1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file contains the variable tracking pass. It computes where
22 variables are located (which registers or where in memory) at each position
23 in instruction stream and emits notes describing the locations.
24 Debug information (DWARF2 location lists) is finally generated from
26 With this debug information, it is possible to show variables
27 even when debugging optimized code.
29 How does the variable tracking pass work?
31 First, it scans RTL code for uses, stores and clobbers (register/memory
32 references in instructions), for call insns and for stack adjustments
33 separately for each basic block and saves them to an array of micro
35 The micro operations of one instruction are ordered so that
36 pre-modifying stack adjustment < use < use with no var < call insn <
37 < set < clobber < post-modifying stack adjustment
39 Then, a forward dataflow analysis is performed to find out how locations
40 of variables change through code and to propagate the variable locations
41 along control flow graph.
42 The IN set for basic block BB is computed as a union of OUT sets of BB's
43 predecessors, the OUT set for BB is copied from the IN set for BB and
44 is changed according to micro operations in BB.
46 The IN and OUT sets for basic blocks consist of a current stack adjustment
47 (used for adjusting offset of variables addressed using stack pointer),
48 the table of structures describing the locations of parts of a variable
49 and for each physical register a linked list for each physical register.
50 The linked list is a list of variable parts stored in the register,
51 i.e. it is a list of triplets (reg, decl, offset) where decl is
52 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
53 effective deleting appropriate variable parts when we set or clobber the
56 There may be more than one variable part in a register. The linked lists
57 should be pretty short so it is a good data structure here.
58 For example in the following code, register allocator may assign same
59 register to variables A and B, and both of them are stored in the same
72 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73 are emitted to appropriate positions in RTL code. Each such a note describes
74 the location of one variable at the point in instruction stream where the
75 note is. There is no need to emit a note for each variable before each
76 instruction, we only emit these notes where the location of variable changes
77 (this means that we also emit notes for changes between the OUT set of the
78 previous block and the IN set of the current block).
80 The notes consist of two parts:
81 1. the declaration (from REG_EXPR or MEM_EXPR)
82 2. the location of a variable - it is either a simple register/memory
83 reference (for simple variables, for example int),
84 or a parallel of register/memory references (for a large variables
85 which consist of several parts, for example long long).
91 #include "coretypes.h"
95 #include "hard-reg-set.h"
96 #include "basic-block.h"
99 #include "insn-config.h"
102 #include "alloc-pool.h"
108 #include "tree-pass.h"
109 #include "tree-flow.h"
114 #include "diagnostic.h"
115 #include "pointer-set.h"
118 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
119 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
120 Currently the value is the same as IDENTIFIER_NODE, which has such
121 a property. If this compile time assertion ever fails, make sure that
122 the new tree code that equals (int) VALUE has the same property. */
123 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
125 /* Type of micro operation. */
126 enum micro_operation_type
128 MO_USE, /* Use location (REG or MEM). */
129 MO_USE_NO_VAR,/* Use location which is not associated with a variable
130 or the variable is not trackable. */
131 MO_VAL_USE, /* Use location which is associated with a value. */
132 MO_VAL_LOC, /* Use location which appears in a debug insn. */
133 MO_VAL_SET, /* Set location associated with a value. */
134 MO_SET, /* Set location. */
135 MO_COPY, /* Copy the same portion of a variable from one
136 location to another. */
137 MO_CLOBBER, /* Clobber location. */
138 MO_CALL, /* Call insn. */
139 MO_ADJUST /* Adjust stack pointer. */
143 static const char * const ATTRIBUTE_UNUSED
144 micro_operation_type_name[] = {
157 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
158 Notes emitted as AFTER_CALL are to take effect during the call,
159 rather than after the call. */
162 EMIT_NOTE_BEFORE_INSN,
163 EMIT_NOTE_AFTER_INSN,
164 EMIT_NOTE_AFTER_CALL_INSN
167 /* Structure holding information about micro operation. */
168 typedef struct micro_operation_def
170 /* Type of micro operation. */
171 enum micro_operation_type type;
173 /* The instruction which the micro operation is in, for MO_USE,
174 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
175 instruction or note in the original flow (before any var-tracking
176 notes are inserted, to simplify emission of notes), for MO_SET
181 /* Location. For MO_SET and MO_COPY, this is the SET that
182 performs the assignment, if known, otherwise it is the target
183 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
184 CONCAT of the VALUE and the LOC associated with it. For
185 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
186 associated with it. */
189 /* Stack adjustment. */
190 HOST_WIDE_INT adjust;
194 DEF_VEC_O(micro_operation);
195 DEF_VEC_ALLOC_O(micro_operation,heap);
197 /* A declaration of a variable, or an RTL value being handled like a
199 typedef void *decl_or_value;
201 /* Structure for passing some other parameters to function
202 emit_note_insn_var_location. */
203 typedef struct emit_note_data_def
205 /* The instruction which the note will be emitted before/after. */
208 /* Where the note will be emitted (before/after insn)? */
209 enum emit_note_where where;
211 /* The variables and values active at this point. */
215 /* Description of location of a part of a variable. The content of a physical
216 register is described by a chain of these structures.
217 The chains are pretty short (usually 1 or 2 elements) and thus
218 chain is the best data structure. */
219 typedef struct attrs_def
221 /* Pointer to next member of the list. */
222 struct attrs_def *next;
224 /* The rtx of register. */
227 /* The declaration corresponding to LOC. */
230 /* Offset from start of DECL. */
231 HOST_WIDE_INT offset;
234 /* Structure holding a refcounted hash table. If refcount > 1,
235 it must be first unshared before modified. */
236 typedef struct shared_hash_def
238 /* Reference count. */
241 /* Actual hash table. */
245 /* Structure holding the IN or OUT set for a basic block. */
246 typedef struct dataflow_set_def
248 /* Adjustment of stack offset. */
249 HOST_WIDE_INT stack_adjust;
251 /* Attributes for registers (lists of attrs). */
252 attrs regs[FIRST_PSEUDO_REGISTER];
254 /* Variable locations. */
257 /* Vars that is being traversed. */
258 shared_hash traversed_vars;
261 /* The structure (one for each basic block) containing the information
262 needed for variable tracking. */
263 typedef struct variable_tracking_info_def
265 /* The vector of micro operations. */
266 VEC(micro_operation, heap) *mos;
268 /* The IN and OUT set for dataflow analysis. */
272 /* The permanent-in dataflow set for this block. This is used to
273 hold values for which we had to compute entry values. ??? This
274 should probably be dynamically allocated, to avoid using more
275 memory in non-debug builds. */
278 /* Has the block been visited in DFS? */
281 /* Has the block been flooded in VTA? */
284 } *variable_tracking_info;
286 /* Structure for chaining the locations. */
287 typedef struct location_chain_def
289 /* Next element in the chain. */
290 struct location_chain_def *next;
292 /* The location (REG, MEM or VALUE). */
295 /* The "value" stored in this location. */
299 enum var_init_status init;
302 /* Structure describing one part of variable. */
303 typedef struct variable_part_def
305 /* Chain of locations of the part. */
306 location_chain loc_chain;
308 /* Location which was last emitted to location list. */
311 /* The offset in the variable. */
312 HOST_WIDE_INT offset;
315 /* Maximum number of location parts. */
316 #define MAX_VAR_PARTS 16
318 /* Structure describing where the variable is located. */
319 typedef struct variable_def
321 /* The declaration of the variable, or an RTL value being handled
322 like a declaration. */
325 /* Reference count. */
328 /* Number of variable parts. */
331 /* True if this variable changed (any of its) cur_loc fields
332 during the current emit_notes_for_changes resp.
333 emit_notes_for_differences call. */
334 bool cur_loc_changed;
336 /* True if this variable_def struct is currently in the
337 changed_variables hash table. */
338 bool in_changed_variables;
340 /* The variable parts. */
341 variable_part var_part[1];
343 typedef const struct variable_def *const_variable;
345 /* Structure for chaining backlinks from referenced VALUEs to
346 DVs that are referencing them. */
347 typedef struct value_chain_def
349 /* Next value_chain entry. */
350 struct value_chain_def *next;
352 /* The declaration of the variable, or an RTL value
353 being handled like a declaration, whose var_parts[0].loc_chain
354 references the VALUE owning this value_chain. */
357 /* Reference count. */
360 typedef const struct value_chain_def *const_value_chain;
362 /* Pointer to the BB's information specific to variable tracking pass. */
363 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
365 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
366 #define INT_MEM_OFFSET(mem) (MEM_OFFSET (mem) ? INTVAL (MEM_OFFSET (mem)) : 0)
368 /* Alloc pool for struct attrs_def. */
369 static alloc_pool attrs_pool;
371 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
372 static alloc_pool var_pool;
374 /* Alloc pool for struct variable_def with a single var_part entry. */
375 static alloc_pool valvar_pool;
377 /* Alloc pool for struct location_chain_def. */
378 static alloc_pool loc_chain_pool;
380 /* Alloc pool for struct shared_hash_def. */
381 static alloc_pool shared_hash_pool;
383 /* Alloc pool for struct value_chain_def. */
384 static alloc_pool value_chain_pool;
386 /* Changed variables, notes will be emitted for them. */
387 static htab_t changed_variables;
389 /* Links from VALUEs to DVs referencing them in their current loc_chains. */
390 static htab_t value_chains;
392 /* Shall notes be emitted? */
393 static bool emit_notes;
395 /* Empty shared hashtable. */
396 static shared_hash empty_shared_hash;
398 /* Scratch register bitmap used by cselib_expand_value_rtx. */
399 static bitmap scratch_regs = NULL;
401 /* Variable used to tell whether cselib_process_insn called our hook. */
402 static bool cselib_hook_called;
404 /* Local function prototypes. */
405 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
407 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
409 static bool vt_stack_adjustments (void);
410 static rtx compute_cfa_pointer (HOST_WIDE_INT);
411 static hashval_t variable_htab_hash (const void *);
412 static int variable_htab_eq (const void *, const void *);
413 static void variable_htab_free (void *);
415 static void init_attrs_list_set (attrs *);
416 static void attrs_list_clear (attrs *);
417 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
418 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
419 static void attrs_list_copy (attrs *, attrs);
420 static void attrs_list_union (attrs *, attrs);
422 static void **unshare_variable (dataflow_set *set, void **slot, variable var,
423 enum var_init_status);
424 static int vars_copy_1 (void **, void *);
425 static void vars_copy (htab_t, htab_t);
426 static tree var_debug_decl (tree);
427 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
428 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
429 enum var_init_status, rtx);
430 static void var_reg_delete (dataflow_set *, rtx, bool);
431 static void var_regno_delete (dataflow_set *, int);
432 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
433 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
434 enum var_init_status, rtx);
435 static void var_mem_delete (dataflow_set *, rtx, bool);
437 static void dataflow_set_init (dataflow_set *);
438 static void dataflow_set_clear (dataflow_set *);
439 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
440 static int variable_union_info_cmp_pos (const void *, const void *);
441 static int variable_union (void **, void *);
442 static void dataflow_set_union (dataflow_set *, dataflow_set *);
443 static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
444 static bool canon_value_cmp (rtx, rtx);
445 static int loc_cmp (rtx, rtx);
446 static bool variable_part_different_p (variable_part *, variable_part *);
447 static bool onepart_variable_different_p (variable, variable);
448 static bool variable_different_p (variable, variable);
449 static int dataflow_set_different_1 (void **, void *);
450 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
451 static void dataflow_set_destroy (dataflow_set *);
453 static bool contains_symbol_ref (rtx);
454 static bool track_expr_p (tree, bool);
455 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
456 static int add_uses (rtx *, void *);
457 static void add_uses_1 (rtx *, void *);
458 static void add_stores (rtx, const_rtx, void *);
459 static bool compute_bb_dataflow (basic_block);
460 static bool vt_find_locations (void);
462 static void dump_attrs_list (attrs);
463 static int dump_var_slot (void **, void *);
464 static void dump_var (variable);
465 static void dump_vars (htab_t);
466 static void dump_dataflow_set (dataflow_set *);
467 static void dump_dataflow_sets (void);
469 static void variable_was_changed (variable, dataflow_set *);
470 static void **set_slot_part (dataflow_set *, rtx, void **,
471 decl_or_value, HOST_WIDE_INT,
472 enum var_init_status, rtx);
473 static void set_variable_part (dataflow_set *, rtx,
474 decl_or_value, HOST_WIDE_INT,
475 enum var_init_status, rtx, enum insert_option);
476 static void **clobber_slot_part (dataflow_set *, rtx,
477 void **, HOST_WIDE_INT, rtx);
478 static void clobber_variable_part (dataflow_set *, rtx,
479 decl_or_value, HOST_WIDE_INT, rtx);
480 static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
481 static void delete_variable_part (dataflow_set *, rtx,
482 decl_or_value, HOST_WIDE_INT);
483 static int emit_note_insn_var_location (void **, void *);
484 static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash);
485 static int emit_notes_for_differences_1 (void **, void *);
486 static int emit_notes_for_differences_2 (void **, void *);
487 static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
488 static void emit_notes_in_bb (basic_block, dataflow_set *);
489 static void vt_emit_notes (void);
491 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
492 static void vt_add_function_parameters (void);
493 static bool vt_initialize (void);
494 static void vt_finalize (void);
496 /* Given a SET, calculate the amount of stack adjustment it contains
497 PRE- and POST-modifying stack pointer.
498 This function is similar to stack_adjust_offset. */
501 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
504 rtx src = SET_SRC (pattern);
505 rtx dest = SET_DEST (pattern);
508 if (dest == stack_pointer_rtx)
510 /* (set (reg sp) (plus (reg sp) (const_int))) */
511 code = GET_CODE (src);
512 if (! (code == PLUS || code == MINUS)
513 || XEXP (src, 0) != stack_pointer_rtx
514 || !CONST_INT_P (XEXP (src, 1)))
518 *post += INTVAL (XEXP (src, 1));
520 *post -= INTVAL (XEXP (src, 1));
522 else if (MEM_P (dest))
524 /* (set (mem (pre_dec (reg sp))) (foo)) */
525 src = XEXP (dest, 0);
526 code = GET_CODE (src);
532 if (XEXP (src, 0) == stack_pointer_rtx)
534 rtx val = XEXP (XEXP (src, 1), 1);
535 /* We handle only adjustments by constant amount. */
536 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
539 if (code == PRE_MODIFY)
540 *pre -= INTVAL (val);
542 *post -= INTVAL (val);
548 if (XEXP (src, 0) == stack_pointer_rtx)
550 *pre += GET_MODE_SIZE (GET_MODE (dest));
556 if (XEXP (src, 0) == stack_pointer_rtx)
558 *post += GET_MODE_SIZE (GET_MODE (dest));
564 if (XEXP (src, 0) == stack_pointer_rtx)
566 *pre -= GET_MODE_SIZE (GET_MODE (dest));
572 if (XEXP (src, 0) == stack_pointer_rtx)
574 *post -= GET_MODE_SIZE (GET_MODE (dest));
585 /* Given an INSN, calculate the amount of stack adjustment it contains
586 PRE- and POST-modifying stack pointer. */
589 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
597 pattern = PATTERN (insn);
598 if (RTX_FRAME_RELATED_P (insn))
600 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
602 pattern = XEXP (expr, 0);
605 if (GET_CODE (pattern) == SET)
606 stack_adjust_offset_pre_post (pattern, pre, post);
607 else if (GET_CODE (pattern) == PARALLEL
608 || GET_CODE (pattern) == SEQUENCE)
612 /* There may be stack adjustments inside compound insns. Search
614 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
615 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
616 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
620 /* Compute stack adjustments for all blocks by traversing DFS tree.
621 Return true when the adjustments on all incoming edges are consistent.
622 Heavily borrowed from pre_and_rev_post_order_compute. */
625 vt_stack_adjustments (void)
627 edge_iterator *stack;
630 /* Initialize entry block. */
631 VTI (ENTRY_BLOCK_PTR)->visited = true;
632 VTI (ENTRY_BLOCK_PTR)->in.stack_adjust = INCOMING_FRAME_SP_OFFSET;
633 VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
635 /* Allocate stack for back-tracking up CFG. */
636 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
639 /* Push the first edge on to the stack. */
640 stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
648 /* Look at the edge on the top of the stack. */
650 src = ei_edge (ei)->src;
651 dest = ei_edge (ei)->dest;
653 /* Check if the edge destination has been visited yet. */
654 if (!VTI (dest)->visited)
657 HOST_WIDE_INT pre, post, offset;
658 VTI (dest)->visited = true;
659 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
661 if (dest != EXIT_BLOCK_PTR)
662 for (insn = BB_HEAD (dest);
663 insn != NEXT_INSN (BB_END (dest));
664 insn = NEXT_INSN (insn))
667 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
668 offset += pre + post;
671 VTI (dest)->out.stack_adjust = offset;
673 if (EDGE_COUNT (dest->succs) > 0)
674 /* Since the DEST node has been visited for the first
675 time, check its successors. */
676 stack[sp++] = ei_start (dest->succs);
680 /* Check whether the adjustments on the edges are the same. */
681 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
687 if (! ei_one_before_end_p (ei))
688 /* Go to the next edge. */
689 ei_next (&stack[sp - 1]);
691 /* Return to previous level if there are no more edges. */
700 /* Compute a CFA-based value for the stack pointer. */
703 compute_cfa_pointer (HOST_WIDE_INT adjustment)
707 #ifdef FRAME_POINTER_CFA_OFFSET
708 adjustment -= FRAME_POINTER_CFA_OFFSET (current_function_decl);
709 cfa = plus_constant (frame_pointer_rtx, adjustment);
711 adjustment -= ARG_POINTER_CFA_OFFSET (current_function_decl);
712 cfa = plus_constant (arg_pointer_rtx, adjustment);
718 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
719 or -1 if the replacement shouldn't be done. */
720 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
722 /* Data for adjust_mems callback. */
724 struct adjust_mem_data
727 enum machine_mode mem_mode;
728 HOST_WIDE_INT stack_adjust;
732 /* Helper function for adjusting used MEMs. */
735 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
737 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
738 rtx mem, addr = loc, tem;
739 enum machine_mode mem_mode_save;
741 switch (GET_CODE (loc))
744 /* Don't do any sp or fp replacements outside of MEM addresses. */
745 if (amd->mem_mode == VOIDmode)
747 if (loc == stack_pointer_rtx
748 && !frame_pointer_needed)
749 return compute_cfa_pointer (amd->stack_adjust);
750 else if (loc == hard_frame_pointer_rtx
751 && frame_pointer_needed
752 && hard_frame_pointer_adjustment != -1)
753 return compute_cfa_pointer (hard_frame_pointer_adjustment);
759 mem = targetm.delegitimize_address (mem);
760 if (mem != loc && !MEM_P (mem))
761 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
764 addr = XEXP (mem, 0);
765 mem_mode_save = amd->mem_mode;
766 amd->mem_mode = GET_MODE (mem);
767 store_save = amd->store;
769 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
770 amd->store = store_save;
771 amd->mem_mode = mem_mode_save;
773 addr = targetm.delegitimize_address (addr);
774 if (addr != XEXP (mem, 0))
775 mem = replace_equiv_address_nv (mem, addr);
777 mem = avoid_constant_pool_reference (mem);
781 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
782 GEN_INT (GET_CODE (loc) == PRE_INC
783 ? GET_MODE_SIZE (amd->mem_mode)
784 : -GET_MODE_SIZE (amd->mem_mode)));
788 addr = XEXP (loc, 0);
789 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
790 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
791 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
792 GEN_INT ((GET_CODE (loc) == PRE_INC
793 || GET_CODE (loc) == POST_INC)
794 ? GET_MODE_SIZE (amd->mem_mode)
795 : -GET_MODE_SIZE (amd->mem_mode)));
796 amd->side_effects = alloc_EXPR_LIST (0,
797 gen_rtx_SET (VOIDmode,
803 addr = XEXP (loc, 1);
806 addr = XEXP (loc, 0);
807 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
808 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
809 amd->side_effects = alloc_EXPR_LIST (0,
810 gen_rtx_SET (VOIDmode,
816 /* First try without delegitimization of whole MEMs and
817 avoid_constant_pool_reference, which is more likely to succeed. */
818 store_save = amd->store;
820 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
822 amd->store = store_save;
823 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
824 if (mem == SUBREG_REG (loc))
826 tem = simplify_gen_subreg (GET_MODE (loc), mem,
827 GET_MODE (SUBREG_REG (loc)),
831 tem = simplify_gen_subreg (GET_MODE (loc), addr,
832 GET_MODE (SUBREG_REG (loc)),
836 return gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
843 /* Helper function for replacement of uses. */
846 adjust_mem_uses (rtx *x, void *data)
848 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
850 validate_change (NULL_RTX, x, new_x, true);
853 /* Helper function for replacement of stores. */
856 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
860 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
862 if (new_dest != SET_DEST (expr))
864 rtx xexpr = CONST_CAST_RTX (expr);
865 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
870 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
871 replace them with their value in the insn and add the side-effects
872 as other sets to the insn. */
875 adjust_insn (basic_block bb, rtx insn)
877 struct adjust_mem_data amd;
879 amd.mem_mode = VOIDmode;
880 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
881 amd.side_effects = NULL_RTX;
884 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
887 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
889 /* For read-only MEMs containing some constant, prefer those
891 set = single_set (insn);
892 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
894 rtx note = find_reg_equal_equiv_note (insn);
896 if (note && CONSTANT_P (XEXP (note, 0)))
897 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
900 if (amd.side_effects)
902 rtx *pat, new_pat, s;
905 pat = &PATTERN (insn);
906 if (GET_CODE (*pat) == COND_EXEC)
907 pat = &COND_EXEC_CODE (*pat);
908 if (GET_CODE (*pat) == PARALLEL)
909 oldn = XVECLEN (*pat, 0);
912 for (s = amd.side_effects, newn = 0; s; newn++)
914 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
915 if (GET_CODE (*pat) == PARALLEL)
916 for (i = 0; i < oldn; i++)
917 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
919 XVECEXP (new_pat, 0, 0) = *pat;
920 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
921 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
922 free_EXPR_LIST_list (&amd.side_effects);
923 validate_change (NULL_RTX, pat, new_pat, true);
927 /* Return true if a decl_or_value DV is a DECL or NULL. */
929 dv_is_decl_p (decl_or_value dv)
931 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
934 /* Return true if a decl_or_value is a VALUE rtl. */
936 dv_is_value_p (decl_or_value dv)
938 return dv && !dv_is_decl_p (dv);
941 /* Return the decl in the decl_or_value. */
943 dv_as_decl (decl_or_value dv)
945 #ifdef ENABLE_CHECKING
946 gcc_assert (dv_is_decl_p (dv));
951 /* Return the value in the decl_or_value. */
953 dv_as_value (decl_or_value dv)
955 #ifdef ENABLE_CHECKING
956 gcc_assert (dv_is_value_p (dv));
961 /* Return the opaque pointer in the decl_or_value. */
963 dv_as_opaque (decl_or_value dv)
968 /* Return true if a decl_or_value must not have more than one variable
971 dv_onepart_p (decl_or_value dv)
975 if (!MAY_HAVE_DEBUG_INSNS)
978 if (dv_is_value_p (dv))
981 decl = dv_as_decl (dv);
986 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
989 return (target_for_debug_bind (decl) != NULL_TREE);
992 /* Return the variable pool to be used for dv, depending on whether it
993 can have multiple parts or not. */
994 static inline alloc_pool
995 dv_pool (decl_or_value dv)
997 return dv_onepart_p (dv) ? valvar_pool : var_pool;
1000 /* Build a decl_or_value out of a decl. */
1001 static inline decl_or_value
1002 dv_from_decl (tree decl)
1006 #ifdef ENABLE_CHECKING
1007 gcc_assert (dv_is_decl_p (dv));
1012 /* Build a decl_or_value out of a value. */
1013 static inline decl_or_value
1014 dv_from_value (rtx value)
1018 #ifdef ENABLE_CHECKING
1019 gcc_assert (dv_is_value_p (dv));
1024 extern void debug_dv (decl_or_value dv);
1027 debug_dv (decl_or_value dv)
1029 if (dv_is_value_p (dv))
1030 debug_rtx (dv_as_value (dv));
1032 debug_generic_stmt (dv_as_decl (dv));
1035 typedef unsigned int dvuid;
1037 /* Return the uid of DV. */
1040 dv_uid (decl_or_value dv)
1042 if (dv_is_value_p (dv))
1043 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
1045 return DECL_UID (dv_as_decl (dv));
1048 /* Compute the hash from the uid. */
1050 static inline hashval_t
1051 dv_uid2hash (dvuid uid)
1056 /* The hash function for a mask table in a shared_htab chain. */
1058 static inline hashval_t
1059 dv_htab_hash (decl_or_value dv)
1061 return dv_uid2hash (dv_uid (dv));
1064 /* The hash function for variable_htab, computes the hash value
1065 from the declaration of variable X. */
1068 variable_htab_hash (const void *x)
1070 const_variable const v = (const_variable) x;
1072 return dv_htab_hash (v->dv);
1075 /* Compare the declaration of variable X with declaration Y. */
1078 variable_htab_eq (const void *x, const void *y)
1080 const_variable const v = (const_variable) x;
1081 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
1083 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
1086 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1089 variable_htab_free (void *elem)
1092 variable var = (variable) elem;
1093 location_chain node, next;
1095 gcc_assert (var->refcount > 0);
1098 if (var->refcount > 0)
1101 for (i = 0; i < var->n_var_parts; i++)
1103 for (node = var->var_part[i].loc_chain; node; node = next)
1106 pool_free (loc_chain_pool, node);
1108 var->var_part[i].loc_chain = NULL;
1110 pool_free (dv_pool (var->dv), var);
1113 /* The hash function for value_chains htab, computes the hash value
1117 value_chain_htab_hash (const void *x)
1119 const_value_chain const v = (const_value_chain) x;
1121 return dv_htab_hash (v->dv);
1124 /* Compare the VALUE X with VALUE Y. */
1127 value_chain_htab_eq (const void *x, const void *y)
1129 const_value_chain const v = (const_value_chain) x;
1130 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
1132 return dv_as_opaque (v->dv) == dv_as_opaque (dv);
1135 /* Initialize the set (array) SET of attrs to empty lists. */
1138 init_attrs_list_set (attrs *set)
1142 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1146 /* Make the list *LISTP empty. */
1149 attrs_list_clear (attrs *listp)
1153 for (list = *listp; list; list = next)
1156 pool_free (attrs_pool, list);
1161 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1164 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1166 for (; list; list = list->next)
1167 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1172 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1175 attrs_list_insert (attrs *listp, decl_or_value dv,
1176 HOST_WIDE_INT offset, rtx loc)
1180 list = (attrs) pool_alloc (attrs_pool);
1183 list->offset = offset;
1184 list->next = *listp;
1188 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1191 attrs_list_copy (attrs *dstp, attrs src)
1195 attrs_list_clear (dstp);
1196 for (; src; src = src->next)
1198 n = (attrs) pool_alloc (attrs_pool);
1201 n->offset = src->offset;
1207 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1210 attrs_list_union (attrs *dstp, attrs src)
1212 for (; src; src = src->next)
1214 if (!attrs_list_member (*dstp, src->dv, src->offset))
1215 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1219 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1223 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1225 gcc_assert (!*dstp);
1226 for (; src; src = src->next)
1228 if (!dv_onepart_p (src->dv))
1229 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1231 for (src = src2; src; src = src->next)
1233 if (!dv_onepart_p (src->dv)
1234 && !attrs_list_member (*dstp, src->dv, src->offset))
1235 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1239 /* Shared hashtable support. */
1241 /* Return true if VARS is shared. */
1244 shared_hash_shared (shared_hash vars)
1246 return vars->refcount > 1;
1249 /* Return the hash table for VARS. */
1251 static inline htab_t
1252 shared_hash_htab (shared_hash vars)
1257 /* Return true if VAR is shared, or maybe because VARS is shared. */
1260 shared_var_p (variable var, shared_hash vars)
1262 /* Don't count an entry in the changed_variables table as a duplicate. */
1263 return ((var->refcount > 1 + (int) var->in_changed_variables)
1264 || shared_hash_shared (vars));
1267 /* Copy variables into a new hash table. */
1270 shared_hash_unshare (shared_hash vars)
1272 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1273 gcc_assert (vars->refcount > 1);
1274 new_vars->refcount = 1;
1276 = htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
1277 variable_htab_eq, variable_htab_free);
1278 vars_copy (new_vars->htab, vars->htab);
1283 /* Increment reference counter on VARS and return it. */
1285 static inline shared_hash
1286 shared_hash_copy (shared_hash vars)
1292 /* Decrement reference counter and destroy hash table if not shared
1296 shared_hash_destroy (shared_hash vars)
1298 gcc_assert (vars->refcount > 0);
1299 if (--vars->refcount == 0)
1301 htab_delete (vars->htab);
1302 pool_free (shared_hash_pool, vars);
1306 /* Unshare *PVARS if shared and return slot for DV. If INS is
1307 INSERT, insert it if not already present. */
1309 static inline void **
1310 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1311 hashval_t dvhash, enum insert_option ins)
1313 if (shared_hash_shared (*pvars))
1314 *pvars = shared_hash_unshare (*pvars);
1315 return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins);
1318 static inline void **
1319 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1320 enum insert_option ins)
1322 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1325 /* Return slot for DV, if it is already present in the hash table.
1326 If it is not present, insert it only VARS is not shared, otherwise
1329 static inline void **
1330 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1332 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1333 shared_hash_shared (vars)
1334 ? NO_INSERT : INSERT);
1337 static inline void **
1338 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1340 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1343 /* Return slot for DV only if it is already present in the hash table. */
1345 static inline void **
1346 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1349 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1353 static inline void **
1354 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1356 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1359 /* Return variable for DV or NULL if not already present in the hash
1362 static inline variable
1363 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1365 return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash);
1368 static inline variable
1369 shared_hash_find (shared_hash vars, decl_or_value dv)
1371 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1374 /* Return true if TVAL is better than CVAL as a canonival value. We
1375 choose lowest-numbered VALUEs, using the RTX address as a
1376 tie-breaker. The idea is to arrange them into a star topology,
1377 such that all of them are at most one step away from the canonical
1378 value, and the canonical value has backlinks to all of them, in
1379 addition to all the actual locations. We don't enforce this
1380 topology throughout the entire dataflow analysis, though.
1384 canon_value_cmp (rtx tval, rtx cval)
1387 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1390 static bool dst_can_be_shared;
1392 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1395 unshare_variable (dataflow_set *set, void **slot, variable var,
1396 enum var_init_status initialized)
1401 new_var = (variable) pool_alloc (dv_pool (var->dv));
1402 new_var->dv = var->dv;
1403 new_var->refcount = 1;
1405 new_var->n_var_parts = var->n_var_parts;
1406 new_var->cur_loc_changed = var->cur_loc_changed;
1407 var->cur_loc_changed = false;
1408 new_var->in_changed_variables = false;
1410 if (! flag_var_tracking_uninit)
1411 initialized = VAR_INIT_STATUS_INITIALIZED;
1413 for (i = 0; i < var->n_var_parts; i++)
1415 location_chain node;
1416 location_chain *nextp;
1418 new_var->var_part[i].offset = var->var_part[i].offset;
1419 nextp = &new_var->var_part[i].loc_chain;
1420 for (node = var->var_part[i].loc_chain; node; node = node->next)
1422 location_chain new_lc;
1424 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1425 new_lc->next = NULL;
1426 if (node->init > initialized)
1427 new_lc->init = node->init;
1429 new_lc->init = initialized;
1430 if (node->set_src && !(MEM_P (node->set_src)))
1431 new_lc->set_src = node->set_src;
1433 new_lc->set_src = NULL;
1434 new_lc->loc = node->loc;
1437 nextp = &new_lc->next;
1440 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1443 dst_can_be_shared = false;
1444 if (shared_hash_shared (set->vars))
1445 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1446 else if (set->traversed_vars && set->vars != set->traversed_vars)
1447 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1449 if (var->in_changed_variables)
1452 = htab_find_slot_with_hash (changed_variables, var->dv,
1453 dv_htab_hash (var->dv), NO_INSERT);
1454 gcc_assert (*cslot == (void *) var);
1455 var->in_changed_variables = false;
1456 variable_htab_free (var);
1458 new_var->in_changed_variables = true;
1463 /* Add a variable from *SLOT to hash table DATA and increase its reference
1467 vars_copy_1 (void **slot, void *data)
1469 htab_t dst = (htab_t) data;
1473 src = (variable) *slot;
1476 dstp = htab_find_slot_with_hash (dst, src->dv,
1477 dv_htab_hash (src->dv),
1481 /* Continue traversing the hash table. */
1485 /* Copy all variables from hash table SRC to hash table DST. */
1488 vars_copy (htab_t dst, htab_t src)
1490 htab_traverse_noresize (src, vars_copy_1, dst);
1493 /* Map a decl to its main debug decl. */
1496 var_debug_decl (tree decl)
1498 if (decl && DECL_P (decl)
1499 && DECL_DEBUG_EXPR_IS_FROM (decl) && DECL_DEBUG_EXPR (decl)
1500 && DECL_P (DECL_DEBUG_EXPR (decl)))
1501 decl = DECL_DEBUG_EXPR (decl);
1506 /* Set the register LOC to contain DV, OFFSET. */
1509 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1510 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1511 enum insert_option iopt)
1514 bool decl_p = dv_is_decl_p (dv);
1517 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1519 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1520 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1521 && node->offset == offset)
1524 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1525 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1528 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1531 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1534 tree decl = REG_EXPR (loc);
1535 HOST_WIDE_INT offset = REG_OFFSET (loc);
1537 var_reg_decl_set (set, loc, initialized,
1538 dv_from_decl (decl), offset, set_src, INSERT);
1541 static enum var_init_status
1542 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1546 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1548 if (! flag_var_tracking_uninit)
1549 return VAR_INIT_STATUS_INITIALIZED;
1551 var = shared_hash_find (set->vars, dv);
1554 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1556 location_chain nextp;
1557 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1558 if (rtx_equal_p (nextp->loc, loc))
1560 ret_val = nextp->init;
1569 /* Delete current content of register LOC in dataflow set SET and set
1570 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1571 MODIFY is true, any other live copies of the same variable part are
1572 also deleted from the dataflow set, otherwise the variable part is
1573 assumed to be copied from another location holding the same
1577 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1578 enum var_init_status initialized, rtx set_src)
1580 tree decl = REG_EXPR (loc);
1581 HOST_WIDE_INT offset = REG_OFFSET (loc);
1585 decl = var_debug_decl (decl);
1587 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1588 initialized = get_init_value (set, loc, dv_from_decl (decl));
1590 nextp = &set->regs[REGNO (loc)];
1591 for (node = *nextp; node; node = next)
1594 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1596 delete_variable_part (set, node->loc, node->dv, node->offset);
1597 pool_free (attrs_pool, node);
1603 nextp = &node->next;
1607 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1608 var_reg_set (set, loc, initialized, set_src);
1611 /* Delete the association of register LOC in dataflow set SET with any
1612 variables that aren't onepart. If CLOBBER is true, also delete any
1613 other live copies of the same variable part, and delete the
1614 association with onepart dvs too. */
1617 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1619 attrs *nextp = &set->regs[REGNO (loc)];
1624 tree decl = REG_EXPR (loc);
1625 HOST_WIDE_INT offset = REG_OFFSET (loc);
1627 decl = var_debug_decl (decl);
1629 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1632 for (node = *nextp; node; node = next)
1635 if (clobber || !dv_onepart_p (node->dv))
1637 delete_variable_part (set, node->loc, node->dv, node->offset);
1638 pool_free (attrs_pool, node);
1642 nextp = &node->next;
1646 /* Delete content of register with number REGNO in dataflow set SET. */
1649 var_regno_delete (dataflow_set *set, int regno)
1651 attrs *reg = &set->regs[regno];
1654 for (node = *reg; node; node = next)
1657 delete_variable_part (set, node->loc, node->dv, node->offset);
1658 pool_free (attrs_pool, node);
1663 /* Set the location of DV, OFFSET as the MEM LOC. */
1666 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1667 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1668 enum insert_option iopt)
1670 if (dv_is_decl_p (dv))
1671 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1673 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1676 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
1678 Adjust the address first if it is stack pointer based. */
1681 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1684 tree decl = MEM_EXPR (loc);
1685 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1687 var_mem_decl_set (set, loc, initialized,
1688 dv_from_decl (decl), offset, set_src, INSERT);
1691 /* Delete and set the location part of variable MEM_EXPR (LOC) in
1692 dataflow set SET to LOC. If MODIFY is true, any other live copies
1693 of the same variable part are also deleted from the dataflow set,
1694 otherwise the variable part is assumed to be copied from another
1695 location holding the same part.
1696 Adjust the address first if it is stack pointer based. */
1699 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1700 enum var_init_status initialized, rtx set_src)
1702 tree decl = MEM_EXPR (loc);
1703 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1705 decl = var_debug_decl (decl);
1707 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1708 initialized = get_init_value (set, loc, dv_from_decl (decl));
1711 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
1712 var_mem_set (set, loc, initialized, set_src);
1715 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
1716 true, also delete any other live copies of the same variable part.
1717 Adjust the address first if it is stack pointer based. */
1720 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
1722 tree decl = MEM_EXPR (loc);
1723 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1725 decl = var_debug_decl (decl);
1727 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1728 delete_variable_part (set, loc, dv_from_decl (decl), offset);
1731 /* Bind a value to a location it was just stored in. If MODIFIED
1732 holds, assume the location was modified, detaching it from any
1733 values bound to it. */
1736 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
1738 cselib_val *v = CSELIB_VAL_PTR (val);
1740 gcc_assert (cselib_preserved_value_p (v));
1744 fprintf (dump_file, "%i: ", INSN_UID (insn));
1745 print_inline_rtx (dump_file, val, 0);
1746 fprintf (dump_file, " stored in ");
1747 print_inline_rtx (dump_file, loc, 0);
1750 struct elt_loc_list *l;
1751 for (l = v->locs; l; l = l->next)
1753 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
1754 print_inline_rtx (dump_file, l->loc, 0);
1757 fprintf (dump_file, "\n");
1763 var_regno_delete (set, REGNO (loc));
1764 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1765 dv_from_value (val), 0, NULL_RTX, INSERT);
1767 else if (MEM_P (loc))
1768 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1769 dv_from_value (val), 0, NULL_RTX, INSERT);
1771 set_variable_part (set, loc, dv_from_value (val), 0,
1772 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1775 /* Reset this node, detaching all its equivalences. Return the slot
1776 in the variable hash table that holds dv, if there is one. */
1779 val_reset (dataflow_set *set, decl_or_value dv)
1781 variable var = shared_hash_find (set->vars, dv) ;
1782 location_chain node;
1785 if (!var || !var->n_var_parts)
1788 gcc_assert (var->n_var_parts == 1);
1791 for (node = var->var_part[0].loc_chain; node; node = node->next)
1792 if (GET_CODE (node->loc) == VALUE
1793 && canon_value_cmp (node->loc, cval))
1796 for (node = var->var_part[0].loc_chain; node; node = node->next)
1797 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
1799 /* Redirect the equivalence link to the new canonical
1800 value, or simply remove it if it would point at
1803 set_variable_part (set, cval, dv_from_value (node->loc),
1804 0, node->init, node->set_src, NO_INSERT);
1805 delete_variable_part (set, dv_as_value (dv),
1806 dv_from_value (node->loc), 0);
1811 decl_or_value cdv = dv_from_value (cval);
1813 /* Keep the remaining values connected, accummulating links
1814 in the canonical value. */
1815 for (node = var->var_part[0].loc_chain; node; node = node->next)
1817 if (node->loc == cval)
1819 else if (GET_CODE (node->loc) == REG)
1820 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
1821 node->set_src, NO_INSERT);
1822 else if (GET_CODE (node->loc) == MEM)
1823 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
1824 node->set_src, NO_INSERT);
1826 set_variable_part (set, node->loc, cdv, 0,
1827 node->init, node->set_src, NO_INSERT);
1831 /* We remove this last, to make sure that the canonical value is not
1832 removed to the point of requiring reinsertion. */
1834 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
1836 clobber_variable_part (set, NULL, dv, 0, NULL);
1838 /* ??? Should we make sure there aren't other available values or
1839 variables whose values involve this one other than by
1840 equivalence? E.g., at the very least we should reset MEMs, those
1841 shouldn't be too hard to find cselib-looking up the value as an
1842 address, then locating the resulting value in our own hash
1846 /* Find the values in a given location and map the val to another
1847 value, if it is unique, or add the location as one holding the
1851 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
1853 decl_or_value dv = dv_from_value (val);
1855 if (dump_file && (dump_flags & TDF_DETAILS))
1858 fprintf (dump_file, "%i: ", INSN_UID (insn));
1860 fprintf (dump_file, "head: ");
1861 print_inline_rtx (dump_file, val, 0);
1862 fputs (" is at ", dump_file);
1863 print_inline_rtx (dump_file, loc, 0);
1864 fputc ('\n', dump_file);
1867 val_reset (set, dv);
1871 attrs node, found = NULL;
1873 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1874 if (dv_is_value_p (node->dv)
1875 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
1879 /* Map incoming equivalences. ??? Wouldn't it be nice if
1880 we just started sharing the location lists? Maybe a
1881 circular list ending at the value itself or some
1883 set_variable_part (set, dv_as_value (node->dv),
1884 dv_from_value (val), node->offset,
1885 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1886 set_variable_part (set, val, node->dv, node->offset,
1887 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1890 /* If we didn't find any equivalence, we need to remember that
1891 this value is held in the named register. */
1893 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1894 dv_from_value (val), 0, NULL_RTX, INSERT);
1896 else if (MEM_P (loc))
1897 /* ??? Merge equivalent MEMs. */
1898 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1899 dv_from_value (val), 0, NULL_RTX, INSERT);
1901 /* ??? Merge equivalent expressions. */
1902 set_variable_part (set, loc, dv_from_value (val), 0,
1903 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1906 /* Initialize dataflow set SET to be empty.
1907 VARS_SIZE is the initial size of hash table VARS. */
1910 dataflow_set_init (dataflow_set *set)
1912 init_attrs_list_set (set->regs);
1913 set->vars = shared_hash_copy (empty_shared_hash);
1914 set->stack_adjust = 0;
1915 set->traversed_vars = NULL;
1918 /* Delete the contents of dataflow set SET. */
1921 dataflow_set_clear (dataflow_set *set)
1925 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1926 attrs_list_clear (&set->regs[i]);
1928 shared_hash_destroy (set->vars);
1929 set->vars = shared_hash_copy (empty_shared_hash);
1932 /* Copy the contents of dataflow set SRC to DST. */
1935 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
1939 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1940 attrs_list_copy (&dst->regs[i], src->regs[i]);
1942 shared_hash_destroy (dst->vars);
1943 dst->vars = shared_hash_copy (src->vars);
1944 dst->stack_adjust = src->stack_adjust;
1947 /* Information for merging lists of locations for a given offset of variable.
1949 struct variable_union_info
1951 /* Node of the location chain. */
1954 /* The sum of positions in the input chains. */
1957 /* The position in the chain of DST dataflow set. */
1961 /* Buffer for location list sorting and its allocated size. */
1962 static struct variable_union_info *vui_vec;
1963 static int vui_allocated;
1965 /* Compare function for qsort, order the structures by POS element. */
1968 variable_union_info_cmp_pos (const void *n1, const void *n2)
1970 const struct variable_union_info *const i1 =
1971 (const struct variable_union_info *) n1;
1972 const struct variable_union_info *const i2 =
1973 ( const struct variable_union_info *) n2;
1975 if (i1->pos != i2->pos)
1976 return i1->pos - i2->pos;
1978 return (i1->pos_dst - i2->pos_dst);
1981 /* Compute union of location parts of variable *SLOT and the same variable
1982 from hash table DATA. Compute "sorted" union of the location chains
1983 for common offsets, i.e. the locations of a variable part are sorted by
1984 a priority where the priority is the sum of the positions in the 2 chains
1985 (if a location is only in one list the position in the second list is
1986 defined to be larger than the length of the chains).
1987 When we are updating the location parts the newest location is in the
1988 beginning of the chain, so when we do the described "sorted" union
1989 we keep the newest locations in the beginning. */
1992 variable_union (void **slot, void *data)
1996 dataflow_set *set = (dataflow_set *) data;
1999 src = (variable) *slot;
2000 dstp = shared_hash_find_slot (set->vars, src->dv);
2001 if (!dstp || !*dstp)
2005 dst_can_be_shared = false;
2007 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2011 /* Continue traversing the hash table. */
2015 dst = (variable) *dstp;
2017 gcc_assert (src->n_var_parts);
2019 /* We can combine one-part variables very efficiently, because their
2020 entries are in canonical order. */
2021 if (dv_onepart_p (src->dv))
2023 location_chain *nodep, dnode, snode;
2025 gcc_assert (src->n_var_parts == 1);
2026 gcc_assert (dst->n_var_parts == 1);
2028 snode = src->var_part[0].loc_chain;
2031 restart_onepart_unshared:
2032 nodep = &dst->var_part[0].loc_chain;
2038 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2042 location_chain nnode;
2044 if (shared_var_p (dst, set->vars))
2046 dstp = unshare_variable (set, dstp, dst,
2047 VAR_INIT_STATUS_INITIALIZED);
2048 dst = (variable)*dstp;
2049 goto restart_onepart_unshared;
2052 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
2053 nnode->loc = snode->loc;
2054 nnode->init = snode->init;
2055 if (!snode->set_src || MEM_P (snode->set_src))
2056 nnode->set_src = NULL;
2058 nnode->set_src = snode->set_src;
2059 nnode->next = dnode;
2062 #ifdef ENABLE_CHECKING
2064 gcc_assert (rtx_equal_p (dnode->loc, snode->loc));
2068 snode = snode->next;
2070 nodep = &dnode->next;
2077 /* Count the number of location parts, result is K. */
2078 for (i = 0, j = 0, k = 0;
2079 i < src->n_var_parts && j < dst->n_var_parts; k++)
2081 if (src->var_part[i].offset == dst->var_part[j].offset)
2086 else if (src->var_part[i].offset < dst->var_part[j].offset)
2091 k += src->n_var_parts - i;
2092 k += dst->n_var_parts - j;
2094 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2095 thus there are at most MAX_VAR_PARTS different offsets. */
2096 gcc_assert (dv_onepart_p (dst->dv) ? k == 1 : k <= MAX_VAR_PARTS);
2098 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2100 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2101 dst = (variable)*dstp;
2104 i = src->n_var_parts - 1;
2105 j = dst->n_var_parts - 1;
2106 dst->n_var_parts = k;
2108 for (k--; k >= 0; k--)
2110 location_chain node, node2;
2112 if (i >= 0 && j >= 0
2113 && src->var_part[i].offset == dst->var_part[j].offset)
2115 /* Compute the "sorted" union of the chains, i.e. the locations which
2116 are in both chains go first, they are sorted by the sum of
2117 positions in the chains. */
2120 struct variable_union_info *vui;
2122 /* If DST is shared compare the location chains.
2123 If they are different we will modify the chain in DST with
2124 high probability so make a copy of DST. */
2125 if (shared_var_p (dst, set->vars))
2127 for (node = src->var_part[i].loc_chain,
2128 node2 = dst->var_part[j].loc_chain; node && node2;
2129 node = node->next, node2 = node2->next)
2131 if (!((REG_P (node2->loc)
2132 && REG_P (node->loc)
2133 && REGNO (node2->loc) == REGNO (node->loc))
2134 || rtx_equal_p (node2->loc, node->loc)))
2136 if (node2->init < node->init)
2137 node2->init = node->init;
2143 dstp = unshare_variable (set, dstp, dst,
2144 VAR_INIT_STATUS_UNKNOWN);
2145 dst = (variable)*dstp;
2150 for (node = src->var_part[i].loc_chain; node; node = node->next)
2153 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2158 /* The most common case, much simpler, no qsort is needed. */
2159 location_chain dstnode = dst->var_part[j].loc_chain;
2160 dst->var_part[k].loc_chain = dstnode;
2161 dst->var_part[k].offset = dst->var_part[j].offset;
2163 for (node = src->var_part[i].loc_chain; node; node = node->next)
2164 if (!((REG_P (dstnode->loc)
2165 && REG_P (node->loc)
2166 && REGNO (dstnode->loc) == REGNO (node->loc))
2167 || rtx_equal_p (dstnode->loc, node->loc)))
2169 location_chain new_node;
2171 /* Copy the location from SRC. */
2172 new_node = (location_chain) pool_alloc (loc_chain_pool);
2173 new_node->loc = node->loc;
2174 new_node->init = node->init;
2175 if (!node->set_src || MEM_P (node->set_src))
2176 new_node->set_src = NULL;
2178 new_node->set_src = node->set_src;
2179 node2->next = new_node;
2186 if (src_l + dst_l > vui_allocated)
2188 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2189 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2194 /* Fill in the locations from DST. */
2195 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2196 node = node->next, jj++)
2199 vui[jj].pos_dst = jj;
2201 /* Pos plus value larger than a sum of 2 valid positions. */
2202 vui[jj].pos = jj + src_l + dst_l;
2205 /* Fill in the locations from SRC. */
2207 for (node = src->var_part[i].loc_chain, ii = 0; node;
2208 node = node->next, ii++)
2210 /* Find location from NODE. */
2211 for (jj = 0; jj < dst_l; jj++)
2213 if ((REG_P (vui[jj].lc->loc)
2214 && REG_P (node->loc)
2215 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2216 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2218 vui[jj].pos = jj + ii;
2222 if (jj >= dst_l) /* The location has not been found. */
2224 location_chain new_node;
2226 /* Copy the location from SRC. */
2227 new_node = (location_chain) pool_alloc (loc_chain_pool);
2228 new_node->loc = node->loc;
2229 new_node->init = node->init;
2230 if (!node->set_src || MEM_P (node->set_src))
2231 new_node->set_src = NULL;
2233 new_node->set_src = node->set_src;
2234 vui[n].lc = new_node;
2235 vui[n].pos_dst = src_l + dst_l;
2236 vui[n].pos = ii + src_l + dst_l;
2243 /* Special case still very common case. For dst_l == 2
2244 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2245 vui[i].pos == i + src_l + dst_l. */
2246 if (vui[0].pos > vui[1].pos)
2248 /* Order should be 1, 0, 2... */
2249 dst->var_part[k].loc_chain = vui[1].lc;
2250 vui[1].lc->next = vui[0].lc;
2253 vui[0].lc->next = vui[2].lc;
2254 vui[n - 1].lc->next = NULL;
2257 vui[0].lc->next = NULL;
2262 dst->var_part[k].loc_chain = vui[0].lc;
2263 if (n >= 3 && vui[2].pos < vui[1].pos)
2265 /* Order should be 0, 2, 1, 3... */
2266 vui[0].lc->next = vui[2].lc;
2267 vui[2].lc->next = vui[1].lc;
2270 vui[1].lc->next = vui[3].lc;
2271 vui[n - 1].lc->next = NULL;
2274 vui[1].lc->next = NULL;
2279 /* Order should be 0, 1, 2... */
2281 vui[n - 1].lc->next = NULL;
2284 for (; ii < n; ii++)
2285 vui[ii - 1].lc->next = vui[ii].lc;
2289 qsort (vui, n, sizeof (struct variable_union_info),
2290 variable_union_info_cmp_pos);
2292 /* Reconnect the nodes in sorted order. */
2293 for (ii = 1; ii < n; ii++)
2294 vui[ii - 1].lc->next = vui[ii].lc;
2295 vui[n - 1].lc->next = NULL;
2296 dst->var_part[k].loc_chain = vui[0].lc;
2299 dst->var_part[k].offset = dst->var_part[j].offset;
2304 else if ((i >= 0 && j >= 0
2305 && src->var_part[i].offset < dst->var_part[j].offset)
2308 dst->var_part[k] = dst->var_part[j];
2311 else if ((i >= 0 && j >= 0
2312 && src->var_part[i].offset > dst->var_part[j].offset)
2315 location_chain *nextp;
2317 /* Copy the chain from SRC. */
2318 nextp = &dst->var_part[k].loc_chain;
2319 for (node = src->var_part[i].loc_chain; node; node = node->next)
2321 location_chain new_lc;
2323 new_lc = (location_chain) pool_alloc (loc_chain_pool);
2324 new_lc->next = NULL;
2325 new_lc->init = node->init;
2326 if (!node->set_src || MEM_P (node->set_src))
2327 new_lc->set_src = NULL;
2329 new_lc->set_src = node->set_src;
2330 new_lc->loc = node->loc;
2333 nextp = &new_lc->next;
2336 dst->var_part[k].offset = src->var_part[i].offset;
2339 dst->var_part[k].cur_loc = NULL;
2342 if (flag_var_tracking_uninit)
2343 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
2345 location_chain node, node2;
2346 for (node = src->var_part[i].loc_chain; node; node = node->next)
2347 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
2348 if (rtx_equal_p (node->loc, node2->loc))
2350 if (node->init > node2->init)
2351 node2->init = node->init;
2355 /* Continue traversing the hash table. */
2359 /* Compute union of dataflow sets SRC and DST and store it to DST. */
2362 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
2366 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2367 attrs_list_union (&dst->regs[i], src->regs[i]);
2369 if (dst->vars == empty_shared_hash)
2371 shared_hash_destroy (dst->vars);
2372 dst->vars = shared_hash_copy (src->vars);
2375 htab_traverse (shared_hash_htab (src->vars), variable_union, dst);
2378 /* Whether the value is currently being expanded. */
2379 #define VALUE_RECURSED_INTO(x) \
2380 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
2381 /* Whether the value is in changed_variables hash table. */
2382 #define VALUE_CHANGED(x) \
2383 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
2384 /* Whether the decl is in changed_variables hash table. */
2385 #define DECL_CHANGED(x) TREE_VISITED (x)
2387 /* Record that DV has been added into resp. removed from changed_variables
2391 set_dv_changed (decl_or_value dv, bool newv)
2393 if (dv_is_value_p (dv))
2394 VALUE_CHANGED (dv_as_value (dv)) = newv;
2396 DECL_CHANGED (dv_as_decl (dv)) = newv;
2399 /* Return true if DV is present in changed_variables hash table. */
2402 dv_changed_p (decl_or_value dv)
2404 return (dv_is_value_p (dv)
2405 ? VALUE_CHANGED (dv_as_value (dv))
2406 : DECL_CHANGED (dv_as_decl (dv)));
2409 /* Return a location list node whose loc is rtx_equal to LOC, in the
2410 location list of a one-part variable or value VAR, or in that of
2411 any values recursively mentioned in the location lists. */
2413 static location_chain
2414 find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
2416 location_chain node;
2421 gcc_assert (dv_onepart_p (var->dv));
2423 if (!var->n_var_parts)
2426 gcc_assert (var->var_part[0].offset == 0);
2428 for (node = var->var_part[0].loc_chain; node; node = node->next)
2429 if (rtx_equal_p (loc, node->loc))
2431 else if (GET_CODE (node->loc) == VALUE
2432 && !VALUE_RECURSED_INTO (node->loc))
2434 decl_or_value dv = dv_from_value (node->loc);
2435 variable var = (variable)
2436 htab_find_with_hash (vars, dv, dv_htab_hash (dv));
2440 location_chain where;
2441 VALUE_RECURSED_INTO (node->loc) = true;
2442 if ((where = find_loc_in_1pdv (loc, var, vars)))
2444 VALUE_RECURSED_INTO (node->loc) = false;
2447 VALUE_RECURSED_INTO (node->loc) = false;
2454 /* Hash table iteration argument passed to variable_merge. */
2457 /* The set in which the merge is to be inserted. */
2459 /* The set that we're iterating in. */
2461 /* The set that may contain the other dv we are to merge with. */
2463 /* Number of onepart dvs in src. */
2464 int src_onepart_cnt;
2467 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
2468 loc_cmp order, and it is maintained as such. */
2471 insert_into_intersection (location_chain *nodep, rtx loc,
2472 enum var_init_status status)
2474 location_chain node;
2477 for (node = *nodep; node; nodep = &node->next, node = *nodep)
2478 if ((r = loc_cmp (node->loc, loc)) == 0)
2480 node->init = MIN (node->init, status);
2486 node = (location_chain) pool_alloc (loc_chain_pool);
2489 node->set_src = NULL;
2490 node->init = status;
2491 node->next = *nodep;
2495 /* Insert in DEST the intersection the locations present in both
2496 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
2497 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
2501 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
2502 location_chain s1node, variable s2var)
2504 dataflow_set *s1set = dsm->cur;
2505 dataflow_set *s2set = dsm->src;
2506 location_chain found;
2508 for (; s1node; s1node = s1node->next)
2510 if (s1node->loc == val)
2513 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
2514 shared_hash_htab (s2set->vars))))
2516 insert_into_intersection (dest, s1node->loc,
2517 MIN (s1node->init, found->init));
2521 if (GET_CODE (s1node->loc) == VALUE
2522 && !VALUE_RECURSED_INTO (s1node->loc))
2524 decl_or_value dv = dv_from_value (s1node->loc);
2525 variable svar = shared_hash_find (s1set->vars, dv);
2528 if (svar->n_var_parts == 1)
2530 VALUE_RECURSED_INTO (s1node->loc) = true;
2531 intersect_loc_chains (val, dest, dsm,
2532 svar->var_part[0].loc_chain,
2534 VALUE_RECURSED_INTO (s1node->loc) = false;
2539 /* ??? if the location is equivalent to any location in src,
2540 searched recursively
2542 add to dst the values needed to represent the equivalence
2544 telling whether locations S is equivalent to another dv's
2547 for each location D in the list
2549 if S and D satisfy rtx_equal_p, then it is present
2551 else if D is a value, recurse without cycles
2553 else if S and D have the same CODE and MODE
2555 for each operand oS and the corresponding oD
2557 if oS and oD are not equivalent, then S an D are not equivalent
2559 else if they are RTX vectors
2561 if any vector oS element is not equivalent to its respective oD,
2562 then S and D are not equivalent
2570 /* Return -1 if X should be before Y in a location list for a 1-part
2571 variable, 1 if Y should be before X, and 0 if they're equivalent
2572 and should not appear in the list. */
2575 loc_cmp (rtx x, rtx y)
2578 RTX_CODE code = GET_CODE (x);
2588 gcc_assert (GET_MODE (x) == GET_MODE (y));
2589 if (REGNO (x) == REGNO (y))
2591 else if (REGNO (x) < REGNO (y))
2604 gcc_assert (GET_MODE (x) == GET_MODE (y));
2605 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
2611 if (GET_CODE (x) == VALUE)
2613 if (GET_CODE (y) != VALUE)
2615 /* Don't assert the modes are the same, that is true only
2616 when not recursing. (subreg:QI (value:SI 1:1) 0)
2617 and (subreg:QI (value:DI 2:2) 0) can be compared,
2618 even when the modes are different. */
2619 if (canon_value_cmp (x, y))
2625 if (GET_CODE (y) == VALUE)
2628 if (GET_CODE (x) == GET_CODE (y))
2629 /* Compare operands below. */;
2630 else if (GET_CODE (x) < GET_CODE (y))
2635 gcc_assert (GET_MODE (x) == GET_MODE (y));
2637 if (GET_CODE (x) == DEBUG_EXPR)
2639 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
2640 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
2642 #ifdef ENABLE_CHECKING
2643 gcc_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
2644 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
2649 fmt = GET_RTX_FORMAT (code);
2650 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2654 if (XWINT (x, i) == XWINT (y, i))
2656 else if (XWINT (x, i) < XWINT (y, i))
2663 if (XINT (x, i) == XINT (y, i))
2665 else if (XINT (x, i) < XINT (y, i))
2672 /* Compare the vector length first. */
2673 if (XVECLEN (x, i) == XVECLEN (y, i))
2674 /* Compare the vectors elements. */;
2675 else if (XVECLEN (x, i) < XVECLEN (y, i))
2680 for (j = 0; j < XVECLEN (x, i); j++)
2681 if ((r = loc_cmp (XVECEXP (x, i, j),
2682 XVECEXP (y, i, j))))
2687 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
2693 if (XSTR (x, i) == XSTR (y, i))
2699 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
2707 /* These are just backpointers, so they don't matter. */
2714 /* It is believed that rtx's at this level will never
2715 contain anything but integers and other rtx's,
2716 except for within LABEL_REFs and SYMBOL_REFs. */
2724 /* If decl or value DVP refers to VALUE from *LOC, add backlinks
2725 from VALUE to DVP. */
2728 add_value_chain (rtx *loc, void *dvp)
2730 decl_or_value dv, ldv;
2731 value_chain vc, nvc;
2734 if (GET_CODE (*loc) == VALUE)
2735 ldv = dv_from_value (*loc);
2736 else if (GET_CODE (*loc) == DEBUG_EXPR)
2737 ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc));
2741 if (dv_as_opaque (ldv) == dvp)
2744 dv = (decl_or_value) dvp;
2745 slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv),
2749 vc = (value_chain) pool_alloc (value_chain_pool);
2753 *slot = (void *) vc;
2757 for (vc = ((value_chain) *slot)->next; vc; vc = vc->next)
2758 if (dv_as_opaque (vc->dv) == dv_as_opaque (dv))
2766 vc = (value_chain) *slot;
2767 nvc = (value_chain) pool_alloc (value_chain_pool);
2769 nvc->next = vc->next;
2775 /* If decl or value DVP refers to VALUEs from within LOC, add backlinks
2776 from those VALUEs to DVP. */
2779 add_value_chains (decl_or_value dv, rtx loc)
2781 if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR)
2783 add_value_chain (&loc, dv_as_opaque (dv));
2789 loc = XEXP (loc, 0);
2790 for_each_rtx (&loc, add_value_chain, dv_as_opaque (dv));
2793 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those
2797 add_cselib_value_chains (decl_or_value dv)
2799 struct elt_loc_list *l;
2801 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
2802 for_each_rtx (&l->loc, add_value_chain, dv_as_opaque (dv));
2805 /* If decl or value DVP refers to VALUE from *LOC, remove backlinks
2806 from VALUE to DVP. */
2809 remove_value_chain (rtx *loc, void *dvp)
2811 decl_or_value dv, ldv;
2815 if (GET_CODE (*loc) == VALUE)
2816 ldv = dv_from_value (*loc);
2817 else if (GET_CODE (*loc) == DEBUG_EXPR)
2818 ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc));
2822 if (dv_as_opaque (ldv) == dvp)
2825 dv = (decl_or_value) dvp;
2826 slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv),
2828 for (vc = (value_chain) *slot; vc->next; vc = vc->next)
2829 if (dv_as_opaque (vc->next->dv) == dv_as_opaque (dv))
2831 value_chain dvc = vc->next;
2832 gcc_assert (dvc->refcount > 0);
2833 if (--dvc->refcount == 0)
2835 vc->next = dvc->next;
2836 pool_free (value_chain_pool, dvc);
2837 if (vc->next == NULL && vc == (value_chain) *slot)
2839 pool_free (value_chain_pool, vc);
2840 htab_clear_slot (value_chains, slot);
2848 /* If decl or value DVP refers to VALUEs from within LOC, remove backlinks
2849 from those VALUEs to DVP. */
2852 remove_value_chains (decl_or_value dv, rtx loc)
2854 if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR)
2856 remove_value_chain (&loc, dv_as_opaque (dv));
2862 loc = XEXP (loc, 0);
2863 for_each_rtx (&loc, remove_value_chain, dv_as_opaque (dv));
2867 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, remove backlinks from those
2871 remove_cselib_value_chains (decl_or_value dv)
2873 struct elt_loc_list *l;
2875 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
2876 for_each_rtx (&l->loc, remove_value_chain, dv_as_opaque (dv));
2879 /* Check the order of entries in one-part variables. */
2882 canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
2884 variable var = (variable) *slot;
2885 decl_or_value dv = var->dv;
2886 location_chain node, next;
2888 #ifdef ENABLE_RTL_CHECKING
2890 for (i = 0; i < var->n_var_parts; i++)
2891 gcc_assert (var->var_part[0].cur_loc == NULL);
2892 gcc_assert (!var->cur_loc_changed && !var->in_changed_variables);
2895 if (!dv_onepart_p (dv))
2898 gcc_assert (var->n_var_parts == 1);
2899 node = var->var_part[0].loc_chain;
2902 while ((next = node->next))
2904 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
2912 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
2913 more likely to be chosen as canonical for an equivalence set.
2914 Ensure less likely values can reach more likely neighbors, making
2915 the connections bidirectional. */
2918 canonicalize_values_mark (void **slot, void *data)
2920 dataflow_set *set = (dataflow_set *)data;
2921 variable var = (variable) *slot;
2922 decl_or_value dv = var->dv;
2924 location_chain node;
2926 if (!dv_is_value_p (dv))
2929 gcc_assert (var->n_var_parts == 1);
2931 val = dv_as_value (dv);
2933 for (node = var->var_part[0].loc_chain; node; node = node->next)
2934 if (GET_CODE (node->loc) == VALUE)
2936 if (canon_value_cmp (node->loc, val))
2937 VALUE_RECURSED_INTO (val) = true;
2940 decl_or_value odv = dv_from_value (node->loc);
2941 void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
2943 oslot = set_slot_part (set, val, oslot, odv, 0,
2944 node->init, NULL_RTX);
2946 VALUE_RECURSED_INTO (node->loc) = true;
2953 /* Remove redundant entries from equivalence lists in onepart
2954 variables, canonicalizing equivalence sets into star shapes. */
2957 canonicalize_values_star (void **slot, void *data)
2959 dataflow_set *set = (dataflow_set *)data;
2960 variable var = (variable) *slot;
2961 decl_or_value dv = var->dv;
2962 location_chain node;
2969 if (!dv_onepart_p (dv))
2972 gcc_assert (var->n_var_parts == 1);
2974 if (dv_is_value_p (dv))
2976 cval = dv_as_value (dv);
2977 if (!VALUE_RECURSED_INTO (cval))
2979 VALUE_RECURSED_INTO (cval) = false;
2989 gcc_assert (var->n_var_parts == 1);
2991 for (node = var->var_part[0].loc_chain; node; node = node->next)
2992 if (GET_CODE (node->loc) == VALUE)
2995 if (VALUE_RECURSED_INTO (node->loc))
2997 if (canon_value_cmp (node->loc, cval))
3006 if (!has_marks || dv_is_decl_p (dv))
3009 /* Keep it marked so that we revisit it, either after visiting a
3010 child node, or after visiting a new parent that might be
3012 VALUE_RECURSED_INTO (val) = true;
3014 for (node = var->var_part[0].loc_chain; node; node = node->next)
3015 if (GET_CODE (node->loc) == VALUE
3016 && VALUE_RECURSED_INTO (node->loc))
3020 VALUE_RECURSED_INTO (cval) = false;
3021 dv = dv_from_value (cval);
3022 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3025 gcc_assert (dv_is_decl_p (var->dv));
3026 /* The canonical value was reset and dropped.
3028 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3031 var = (variable)*slot;
3032 gcc_assert (dv_is_value_p (var->dv));
3033 if (var->n_var_parts == 0)
3035 gcc_assert (var->n_var_parts == 1);
3039 VALUE_RECURSED_INTO (val) = false;
3044 /* Push values to the canonical one. */
3045 cdv = dv_from_value (cval);
3046 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3048 for (node = var->var_part[0].loc_chain; node; node = node->next)
3049 if (node->loc != cval)
3051 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3052 node->init, NULL_RTX);
3053 if (GET_CODE (node->loc) == VALUE)
3055 decl_or_value ndv = dv_from_value (node->loc);
3057 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3060 if (canon_value_cmp (node->loc, val))
3062 /* If it could have been a local minimum, it's not any more,
3063 since it's now neighbor to cval, so it may have to push
3064 to it. Conversely, if it wouldn't have prevailed over
3065 val, then whatever mark it has is fine: if it was to
3066 push, it will now push to a more canonical node, but if
3067 it wasn't, then it has already pushed any values it might
3069 VALUE_RECURSED_INTO (node->loc) = true;
3070 /* Make sure we visit node->loc by ensuring we cval is
3072 VALUE_RECURSED_INTO (cval) = true;
3074 else if (!VALUE_RECURSED_INTO (node->loc))
3075 /* If we have no need to "recurse" into this node, it's
3076 already "canonicalized", so drop the link to the old
3078 clobber_variable_part (set, cval, ndv, 0, NULL);
3080 else if (GET_CODE (node->loc) == REG)
3082 attrs list = set->regs[REGNO (node->loc)], *listp;
3084 /* Change an existing attribute referring to dv so that it
3085 refers to cdv, removing any duplicate this might
3086 introduce, and checking that no previous duplicates
3087 existed, all in a single pass. */
3091 if (list->offset == 0
3092 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3093 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3100 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3103 for (listp = &list->next; (list = *listp); listp = &list->next)
3108 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3110 *listp = list->next;
3111 pool_free (attrs_pool, list);
3116 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3119 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3121 for (listp = &list->next; (list = *listp); listp = &list->next)
3126 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3128 *listp = list->next;
3129 pool_free (attrs_pool, list);
3134 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3143 if (list->offset == 0
3144 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3145 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3155 cslot = set_slot_part (set, val, cslot, cdv, 0,
3156 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3158 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3160 /* Variable may have been unshared. */
3161 var = (variable)*slot;
3162 gcc_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3163 && var->var_part[0].loc_chain->next == NULL);
3165 if (VALUE_RECURSED_INTO (cval))
3166 goto restart_with_cval;
3171 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3172 corresponding entry in DSM->src. Multi-part variables are combined
3173 with variable_union, whereas onepart dvs are combined with
3177 variable_merge_over_cur (void **s1slot, void *data)
3179 struct dfset_merge *dsm = (struct dfset_merge *)data;
3180 dataflow_set *dst = dsm->dst;
3182 variable s1var = (variable) *s1slot;
3183 variable s2var, dvar = NULL;
3184 decl_or_value dv = s1var->dv;
3185 bool onepart = dv_onepart_p (dv);
3188 location_chain node, *nodep;
3190 /* If the incoming onepart variable has an empty location list, then
3191 the intersection will be just as empty. For other variables,
3192 it's always union. */
3193 gcc_assert (s1var->n_var_parts);
3194 gcc_assert (s1var->var_part[0].loc_chain);
3197 return variable_union (s1slot, dst);
3199 gcc_assert (s1var->n_var_parts == 1);
3200 gcc_assert (s1var->var_part[0].offset == 0);
3202 dvhash = dv_htab_hash (dv);
3203 if (dv_is_value_p (dv))
3204 val = dv_as_value (dv);
3208 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3211 dst_can_be_shared = false;
3215 dsm->src_onepart_cnt--;
3216 gcc_assert (s2var->var_part[0].loc_chain);
3217 gcc_assert (s2var->n_var_parts == 1);
3218 gcc_assert (s2var->var_part[0].offset == 0);
3220 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3223 dvar = (variable)*dstslot;
3224 gcc_assert (dvar->refcount == 1);
3225 gcc_assert (dvar->n_var_parts == 1);
3226 gcc_assert (dvar->var_part[0].offset == 0);
3227 nodep = &dvar->var_part[0].loc_chain;
3235 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3237 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3239 *dstslot = dvar = s2var;
3244 dst_can_be_shared = false;
3246 intersect_loc_chains (val, nodep, dsm,
3247 s1var->var_part[0].loc_chain, s2var);
3253 dvar = (variable) pool_alloc (dv_pool (dv));
3256 dvar->n_var_parts = 1;
3257 dvar->cur_loc_changed = false;
3258 dvar->in_changed_variables = false;
3259 dvar->var_part[0].offset = 0;
3260 dvar->var_part[0].loc_chain = node;
3261 dvar->var_part[0].cur_loc = NULL;
3264 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3266 gcc_assert (!*dstslot);
3274 nodep = &dvar->var_part[0].loc_chain;
3275 while ((node = *nodep))
3277 location_chain *nextp = &node->next;
3279 if (GET_CODE (node->loc) == REG)
3283 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
3284 if (GET_MODE (node->loc) == GET_MODE (list->loc)
3285 && dv_is_value_p (list->dv))
3289 attrs_list_insert (&dst->regs[REGNO (node->loc)],
3291 /* If this value became canonical for another value that had
3292 this register, we want to leave it alone. */
3293 else if (dv_as_value (list->dv) != val)
3295 dstslot = set_slot_part (dst, dv_as_value (list->dv),
3297 node->init, NULL_RTX);
3298 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
3300 /* Since nextp points into the removed node, we can't
3301 use it. The pointer to the next node moved to nodep.
3302 However, if the variable we're walking is unshared
3303 during our walk, we'll keep walking the location list
3304 of the previously-shared variable, in which case the
3305 node won't have been removed, and we'll want to skip
3306 it. That's why we test *nodep here. */
3312 /* Canonicalization puts registers first, so we don't have to
3318 if (dvar != (variable)*dstslot)
3319 dvar = (variable)*dstslot;
3320 nodep = &dvar->var_part[0].loc_chain;
3324 /* Mark all referenced nodes for canonicalization, and make sure
3325 we have mutual equivalence links. */
3326 VALUE_RECURSED_INTO (val) = true;
3327 for (node = *nodep; node; node = node->next)
3328 if (GET_CODE (node->loc) == VALUE)
3330 VALUE_RECURSED_INTO (node->loc) = true;
3331 set_variable_part (dst, val, dv_from_value (node->loc), 0,
3332 node->init, NULL, INSERT);
3335 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3336 gcc_assert (*dstslot == dvar);
3337 canonicalize_values_star (dstslot, dst);
3338 #ifdef ENABLE_CHECKING
3340 == shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash));
3342 dvar = (variable)*dstslot;
3346 bool has_value = false, has_other = false;
3348 /* If we have one value and anything else, we're going to
3349 canonicalize this, so make sure all values have an entry in
3350 the table and are marked for canonicalization. */
3351 for (node = *nodep; node; node = node->next)
3353 if (GET_CODE (node->loc) == VALUE)
3355 /* If this was marked during register canonicalization,
3356 we know we have to canonicalize values. */
3371 if (has_value && has_other)
3373 for (node = *nodep; node; node = node->next)
3375 if (GET_CODE (node->loc) == VALUE)
3377 decl_or_value dv = dv_from_value (node->loc);
3380 if (shared_hash_shared (dst->vars))
3381 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
3383 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
3387 variable var = (variable) pool_alloc (dv_pool (dv));
3390 var->n_var_parts = 1;
3391 var->cur_loc_changed = false;
3392 var->in_changed_variables = false;
3393 var->var_part[0].offset = 0;
3394 var->var_part[0].loc_chain = NULL;
3395 var->var_part[0].cur_loc = NULL;
3399 VALUE_RECURSED_INTO (node->loc) = true;
3403 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3404 gcc_assert (*dstslot == dvar);
3405 canonicalize_values_star (dstslot, dst);
3406 #ifdef ENABLE_CHECKING
3408 == shared_hash_find_slot_noinsert_1 (dst->vars,
3411 dvar = (variable)*dstslot;
3415 if (!onepart_variable_different_p (dvar, s2var))
3417 variable_htab_free (dvar);
3418 *dstslot = dvar = s2var;
3421 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
3423 variable_htab_free (dvar);
3424 *dstslot = dvar = s1var;
3426 dst_can_be_shared = false;
3429 dst_can_be_shared = false;
3434 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
3435 multi-part variable. Unions of multi-part variables and
3436 intersections of one-part ones will be handled in
3437 variable_merge_over_cur(). */
3440 variable_merge_over_src (void **s2slot, void *data)
3442 struct dfset_merge *dsm = (struct dfset_merge *)data;
3443 dataflow_set *dst = dsm->dst;
3444 variable s2var = (variable) *s2slot;
3445 decl_or_value dv = s2var->dv;
3446 bool onepart = dv_onepart_p (dv);
3450 void **dstp = shared_hash_find_slot (dst->vars, dv);
3456 dsm->src_onepart_cnt++;
3460 /* Combine dataflow set information from SRC2 into DST, using PDST
3461 to carry over information across passes. */
3464 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
3466 dataflow_set cur = *dst;
3467 dataflow_set *src1 = &cur;
3468 struct dfset_merge dsm;
3470 size_t src1_elems, src2_elems;
3472 src1_elems = htab_elements (shared_hash_htab (src1->vars));
3473 src2_elems = htab_elements (shared_hash_htab (src2->vars));
3474 dataflow_set_init (dst);
3475 dst->stack_adjust = cur.stack_adjust;
3476 shared_hash_destroy (dst->vars);
3477 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
3478 dst->vars->refcount = 1;
3480 = htab_create (MAX (src1_elems, src2_elems), variable_htab_hash,
3481 variable_htab_eq, variable_htab_free);
3483 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3484 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
3489 dsm.src_onepart_cnt = 0;
3491 htab_traverse (shared_hash_htab (dsm.src->vars), variable_merge_over_src,
3493 htab_traverse (shared_hash_htab (dsm.cur->vars), variable_merge_over_cur,
3496 if (dsm.src_onepart_cnt)
3497 dst_can_be_shared = false;
3499 dataflow_set_destroy (src1);
3502 /* Mark register equivalences. */
3505 dataflow_set_equiv_regs (dataflow_set *set)
3510 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3512 rtx canon[NUM_MACHINE_MODES];
3514 memset (canon, 0, sizeof (canon));
3516 for (list = set->regs[i]; list; list = list->next)
3517 if (list->offset == 0 && dv_is_value_p (list->dv))
3519 rtx val = dv_as_value (list->dv);
3520 rtx *cvalp = &canon[(int)GET_MODE (val)];
3523 if (canon_value_cmp (val, cval))
3527 for (list = set->regs[i]; list; list = list->next)
3528 if (list->offset == 0 && dv_onepart_p (list->dv))
3530 rtx cval = canon[(int)GET_MODE (list->loc)];
3535 if (dv_is_value_p (list->dv))
3537 rtx val = dv_as_value (list->dv);
3542 VALUE_RECURSED_INTO (val) = true;