1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file contains the variable tracking pass. It computes where
22 variables are located (which registers or where in memory) at each position
23 in instruction stream and emits notes describing the locations.
24 Debug information (DWARF2 location lists) is finally generated from
26 With this debug information, it is possible to show variables
27 even when debugging optimized code.
29 How does the variable tracking pass work?
31 First, it scans RTL code for uses, stores and clobbers (register/memory
32 references in instructions), for call insns and for stack adjustments
33 separately for each basic block and saves them to an array of micro
35 The micro operations of one instruction are ordered so that
36 pre-modifying stack adjustment < use < use with no var < call insn <
37 < set < clobber < post-modifying stack adjustment
39 Then, a forward dataflow analysis is performed to find out how locations
40 of variables change through code and to propagate the variable locations
41 along control flow graph.
42 The IN set for basic block BB is computed as a union of OUT sets of BB's
43 predecessors, the OUT set for BB is copied from the IN set for BB and
44 is changed according to micro operations in BB.
46 The IN and OUT sets for basic blocks consist of a current stack adjustment
47 (used for adjusting offset of variables addressed using stack pointer),
48 the table of structures describing the locations of parts of a variable
49 and for each physical register a linked list for each physical register.
50 The linked list is a list of variable parts stored in the register,
51 i.e. it is a list of triplets (reg, decl, offset) where decl is
52 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
53 effective deleting appropriate variable parts when we set or clobber the
56 There may be more than one variable part in a register. The linked lists
57 should be pretty short so it is a good data structure here.
58 For example in the following code, register allocator may assign same
59 register to variables A and B, and both of them are stored in the same
72 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73 are emitted to appropriate positions in RTL code. Each such a note describes
74 the location of one variable at the point in instruction stream where the
75 note is. There is no need to emit a note for each variable before each
76 instruction, we only emit these notes where the location of variable changes
77 (this means that we also emit notes for changes between the OUT set of the
78 previous block and the IN set of the current block).
80 The notes consist of two parts:
81 1. the declaration (from REG_EXPR or MEM_EXPR)
82 2. the location of a variable - it is either a simple register/memory
83 reference (for simple variables, for example int),
84 or a parallel of register/memory references (for a large variables
85 which consist of several parts, for example long long).
91 #include "coretypes.h"
95 #include "hard-reg-set.h"
96 #include "basic-block.h"
99 #include "insn-config.h"
102 #include "alloc-pool.h"
108 #include "tree-pass.h"
109 #include "tree-flow.h"
114 #include "diagnostic.h"
115 #include "pointer-set.h"
117 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
118 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
119 Currently the value is the same as IDENTIFIER_NODE, which has such
120 a property. If this compile time assertion ever fails, make sure that
121 the new tree code that equals (int) VALUE has the same property. */
122 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
124 /* Type of micro operation. */
125 enum micro_operation_type
127 MO_USE, /* Use location (REG or MEM). */
128 MO_USE_NO_VAR,/* Use location which is not associated with a variable
129 or the variable is not trackable. */
130 MO_VAL_USE, /* Use location which is associated with a value. */
131 MO_VAL_LOC, /* Use location which appears in a debug insn. */
132 MO_VAL_SET, /* Set location associated with a value. */
133 MO_SET, /* Set location. */
134 MO_COPY, /* Copy the same portion of a variable from one
135 location to another. */
136 MO_CLOBBER, /* Clobber location. */
137 MO_CALL, /* Call insn. */
138 MO_ADJUST /* Adjust stack pointer. */
142 static const char * const ATTRIBUTE_UNUSED
143 micro_operation_type_name[] = {
156 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
157 Notes emitted as AFTER_CALL are to take effect during the call,
158 rather than after the call. */
161 EMIT_NOTE_BEFORE_INSN,
162 EMIT_NOTE_AFTER_INSN,
163 EMIT_NOTE_AFTER_CALL_INSN
166 /* Structure holding information about micro operation. */
167 typedef struct micro_operation_def
169 /* Type of micro operation. */
170 enum micro_operation_type type;
172 /* The instruction which the micro operation is in, for MO_USE,
173 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
174 instruction or note in the original flow (before any var-tracking
175 notes are inserted, to simplify emission of notes), for MO_SET
180 /* Location. For MO_SET and MO_COPY, this is the SET that
181 performs the assignment, if known, otherwise it is the target
182 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
183 CONCAT of the VALUE and the LOC associated with it. For
184 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
185 associated with it. */
188 /* Stack adjustment. */
189 HOST_WIDE_INT adjust;
193 DEF_VEC_O(micro_operation);
194 DEF_VEC_ALLOC_O(micro_operation,heap);
196 /* A declaration of a variable, or an RTL value being handled like a
198 typedef void *decl_or_value;
200 /* Structure for passing some other parameters to function
201 emit_note_insn_var_location. */
202 typedef struct emit_note_data_def
204 /* The instruction which the note will be emitted before/after. */
207 /* Where the note will be emitted (before/after insn)? */
208 enum emit_note_where where;
210 /* The variables and values active at this point. */
214 /* Description of location of a part of a variable. The content of a physical
215 register is described by a chain of these structures.
216 The chains are pretty short (usually 1 or 2 elements) and thus
217 chain is the best data structure. */
218 typedef struct attrs_def
220 /* Pointer to next member of the list. */
221 struct attrs_def *next;
223 /* The rtx of register. */
226 /* The declaration corresponding to LOC. */
229 /* Offset from start of DECL. */
230 HOST_WIDE_INT offset;
233 /* Structure holding a refcounted hash table. If refcount > 1,
234 it must be first unshared before modified. */
235 typedef struct shared_hash_def
237 /* Reference count. */
240 /* Actual hash table. */
244 /* Structure holding the IN or OUT set for a basic block. */
245 typedef struct dataflow_set_def
247 /* Adjustment of stack offset. */
248 HOST_WIDE_INT stack_adjust;
250 /* Attributes for registers (lists of attrs). */
251 attrs regs[FIRST_PSEUDO_REGISTER];
253 /* Variable locations. */
256 /* Vars that is being traversed. */
257 shared_hash traversed_vars;
260 /* The structure (one for each basic block) containing the information
261 needed for variable tracking. */
262 typedef struct variable_tracking_info_def
264 /* The vector of micro operations. */
265 VEC(micro_operation, heap) *mos;
267 /* The IN and OUT set for dataflow analysis. */
271 /* The permanent-in dataflow set for this block. This is used to
272 hold values for which we had to compute entry values. ??? This
273 should probably be dynamically allocated, to avoid using more
274 memory in non-debug builds. */
277 /* Has the block been visited in DFS? */
280 /* Has the block been flooded in VTA? */
283 } *variable_tracking_info;
285 /* Structure for chaining the locations. */
286 typedef struct location_chain_def
288 /* Next element in the chain. */
289 struct location_chain_def *next;
291 /* The location (REG, MEM or VALUE). */
294 /* The "value" stored in this location. */
298 enum var_init_status init;
301 /* Structure describing one part of variable. */
302 typedef struct variable_part_def
304 /* Chain of locations of the part. */
305 location_chain loc_chain;
307 /* Location which was last emitted to location list. */
310 /* The offset in the variable. */
311 HOST_WIDE_INT offset;
314 /* Maximum number of location parts. */
315 #define MAX_VAR_PARTS 16
317 /* Structure describing where the variable is located. */
318 typedef struct variable_def
320 /* The declaration of the variable, or an RTL value being handled
321 like a declaration. */
324 /* Reference count. */
327 /* Number of variable parts. */
330 /* True if this variable changed (any of its) cur_loc fields
331 during the current emit_notes_for_changes resp.
332 emit_notes_for_differences call. */
333 bool cur_loc_changed;
335 /* True if this variable_def struct is currently in the
336 changed_variables hash table. */
337 bool in_changed_variables;
339 /* The variable parts. */
340 variable_part var_part[1];
342 typedef const struct variable_def *const_variable;
344 /* Structure for chaining backlinks from referenced VALUEs to
345 DVs that are referencing them. */
346 typedef struct value_chain_def
348 /* Next value_chain entry. */
349 struct value_chain_def *next;
351 /* The declaration of the variable, or an RTL value
352 being handled like a declaration, whose var_parts[0].loc_chain
353 references the VALUE owning this value_chain. */
356 /* Reference count. */
359 typedef const struct value_chain_def *const_value_chain;
361 /* Pointer to the BB's information specific to variable tracking pass. */
362 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
364 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
365 #define INT_MEM_OFFSET(mem) (MEM_OFFSET (mem) ? INTVAL (MEM_OFFSET (mem)) : 0)
367 /* Alloc pool for struct attrs_def. */
368 static alloc_pool attrs_pool;
370 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
371 static alloc_pool var_pool;
373 /* Alloc pool for struct variable_def with a single var_part entry. */
374 static alloc_pool valvar_pool;
376 /* Alloc pool for struct location_chain_def. */
377 static alloc_pool loc_chain_pool;
379 /* Alloc pool for struct shared_hash_def. */
380 static alloc_pool shared_hash_pool;
382 /* Alloc pool for struct value_chain_def. */
383 static alloc_pool value_chain_pool;
385 /* Changed variables, notes will be emitted for them. */
386 static htab_t changed_variables;
388 /* Links from VALUEs to DVs referencing them in their current loc_chains. */
389 static htab_t value_chains;
391 /* Shall notes be emitted? */
392 static bool emit_notes;
394 /* Empty shared hashtable. */
395 static shared_hash empty_shared_hash;
397 /* Scratch register bitmap used by cselib_expand_value_rtx. */
398 static bitmap scratch_regs = NULL;
400 /* Variable used to tell whether cselib_process_insn called our hook. */
401 static bool cselib_hook_called;
403 /* Local function prototypes. */
404 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
406 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
408 static void bb_stack_adjust_offset (basic_block);
409 static bool vt_stack_adjustments (void);
410 static rtx adjust_stack_reference (rtx, HOST_WIDE_INT);
411 static hashval_t variable_htab_hash (const void *);
412 static int variable_htab_eq (const void *, const void *);
413 static void variable_htab_free (void *);
415 static void init_attrs_list_set (attrs *);
416 static void attrs_list_clear (attrs *);
417 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
418 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
419 static void attrs_list_copy (attrs *, attrs);
420 static void attrs_list_union (attrs *, attrs);
422 static void **unshare_variable (dataflow_set *set, void **slot, variable var,
423 enum var_init_status);
424 static int vars_copy_1 (void **, void *);
425 static void vars_copy (htab_t, htab_t);
426 static tree var_debug_decl (tree);
427 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
428 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
429 enum var_init_status, rtx);
430 static void var_reg_delete (dataflow_set *, rtx, bool);
431 static void var_regno_delete (dataflow_set *, int);
432 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
433 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
434 enum var_init_status, rtx);
435 static void var_mem_delete (dataflow_set *, rtx, bool);
437 static void dataflow_set_init (dataflow_set *);
438 static void dataflow_set_clear (dataflow_set *);
439 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
440 static int variable_union_info_cmp_pos (const void *, const void *);
441 static int variable_union (void **, void *);
442 static void dataflow_set_union (dataflow_set *, dataflow_set *);
443 static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
444 static bool canon_value_cmp (rtx, rtx);
445 static int loc_cmp (rtx, rtx);
446 static bool variable_part_different_p (variable_part *, variable_part *);
447 static bool onepart_variable_different_p (variable, variable);
448 static bool variable_different_p (variable, variable);
449 static int dataflow_set_different_1 (void **, void *);
450 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
451 static void dataflow_set_destroy (dataflow_set *);
453 static bool contains_symbol_ref (rtx);
454 static bool track_expr_p (tree, bool);
455 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
456 static int add_uses (rtx *, void *);
457 static void add_uses_1 (rtx *, void *);
458 static void add_stores (rtx, const_rtx, void *);
459 static bool compute_bb_dataflow (basic_block);
460 static bool vt_find_locations (void);
462 static void dump_attrs_list (attrs);
463 static int dump_var_slot (void **, void *);
464 static void dump_var (variable);
465 static void dump_vars (htab_t);
466 static void dump_dataflow_set (dataflow_set *);
467 static void dump_dataflow_sets (void);
469 static void variable_was_changed (variable, dataflow_set *);
470 static void **set_slot_part (dataflow_set *, rtx, void **,
471 decl_or_value, HOST_WIDE_INT,
472 enum var_init_status, rtx);
473 static void set_variable_part (dataflow_set *, rtx,
474 decl_or_value, HOST_WIDE_INT,
475 enum var_init_status, rtx, enum insert_option);
476 static void **clobber_slot_part (dataflow_set *, rtx,
477 void **, HOST_WIDE_INT, rtx);
478 static void clobber_variable_part (dataflow_set *, rtx,
479 decl_or_value, HOST_WIDE_INT, rtx);
480 static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
481 static void delete_variable_part (dataflow_set *, rtx,
482 decl_or_value, HOST_WIDE_INT);
483 static int emit_note_insn_var_location (void **, void *);
484 static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash);
485 static int emit_notes_for_differences_1 (void **, void *);
486 static int emit_notes_for_differences_2 (void **, void *);
487 static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
488 static void emit_notes_in_bb (basic_block, dataflow_set *);
489 static void vt_emit_notes (void);
491 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
492 static void vt_add_function_parameters (void);
493 static void vt_initialize (void);
494 static void vt_finalize (void);
496 /* Given a SET, calculate the amount of stack adjustment it contains
497 PRE- and POST-modifying stack pointer.
498 This function is similar to stack_adjust_offset. */
501 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
504 rtx src = SET_SRC (pattern);
505 rtx dest = SET_DEST (pattern);
508 if (dest == stack_pointer_rtx)
510 /* (set (reg sp) (plus (reg sp) (const_int))) */
511 code = GET_CODE (src);
512 if (! (code == PLUS || code == MINUS)
513 || XEXP (src, 0) != stack_pointer_rtx
514 || !CONST_INT_P (XEXP (src, 1)))
518 *post += INTVAL (XEXP (src, 1));
520 *post -= INTVAL (XEXP (src, 1));
522 else if (MEM_P (dest))
524 /* (set (mem (pre_dec (reg sp))) (foo)) */
525 src = XEXP (dest, 0);
526 code = GET_CODE (src);
532 if (XEXP (src, 0) == stack_pointer_rtx)
534 rtx val = XEXP (XEXP (src, 1), 1);
535 /* We handle only adjustments by constant amount. */
536 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
539 if (code == PRE_MODIFY)
540 *pre -= INTVAL (val);
542 *post -= INTVAL (val);
548 if (XEXP (src, 0) == stack_pointer_rtx)
550 *pre += GET_MODE_SIZE (GET_MODE (dest));
556 if (XEXP (src, 0) == stack_pointer_rtx)
558 *post += GET_MODE_SIZE (GET_MODE (dest));
564 if (XEXP (src, 0) == stack_pointer_rtx)
566 *pre -= GET_MODE_SIZE (GET_MODE (dest));
572 if (XEXP (src, 0) == stack_pointer_rtx)
574 *post -= GET_MODE_SIZE (GET_MODE (dest));
585 /* Given an INSN, calculate the amount of stack adjustment it contains
586 PRE- and POST-modifying stack pointer. */
589 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
597 pattern = PATTERN (insn);
598 if (RTX_FRAME_RELATED_P (insn))
600 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
602 pattern = XEXP (expr, 0);
605 if (GET_CODE (pattern) == SET)
606 stack_adjust_offset_pre_post (pattern, pre, post);
607 else if (GET_CODE (pattern) == PARALLEL
608 || GET_CODE (pattern) == SEQUENCE)
612 /* There may be stack adjustments inside compound insns. Search
614 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
615 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
616 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
620 /* Compute stack adjustment in basic block BB. */
623 bb_stack_adjust_offset (basic_block bb)
625 HOST_WIDE_INT offset;
629 offset = VTI (bb)->in.stack_adjust;
630 for (i = 0; VEC_iterate (micro_operation, VTI (bb)->mos, i, mo); i++)
632 if (mo->type == MO_ADJUST)
633 offset += mo->u.adjust;
634 else if (mo->type != MO_CALL)
636 if (MEM_P (mo->u.loc))
637 mo->u.loc = adjust_stack_reference (mo->u.loc, -offset);
640 VTI (bb)->out.stack_adjust = offset;
643 /* Compute stack adjustments for all blocks by traversing DFS tree.
644 Return true when the adjustments on all incoming edges are consistent.
645 Heavily borrowed from pre_and_rev_post_order_compute. */
648 vt_stack_adjustments (void)
650 edge_iterator *stack;
653 /* Initialize entry block. */
654 VTI (ENTRY_BLOCK_PTR)->visited = true;
655 VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
657 /* Allocate stack for back-tracking up CFG. */
658 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
661 /* Push the first edge on to the stack. */
662 stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
670 /* Look at the edge on the top of the stack. */
672 src = ei_edge (ei)->src;
673 dest = ei_edge (ei)->dest;
675 /* Check if the edge destination has been visited yet. */
676 if (!VTI (dest)->visited)
678 VTI (dest)->visited = true;
679 VTI (dest)->in.stack_adjust = VTI (src)->out.stack_adjust;
680 bb_stack_adjust_offset (dest);
682 if (EDGE_COUNT (dest->succs) > 0)
683 /* Since the DEST node has been visited for the first
684 time, check its successors. */
685 stack[sp++] = ei_start (dest->succs);
689 /* Check whether the adjustments on the edges are the same. */
690 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
696 if (! ei_one_before_end_p (ei))
697 /* Go to the next edge. */
698 ei_next (&stack[sp - 1]);
700 /* Return to previous level if there are no more edges. */
709 /* Adjust stack reference MEM by ADJUSTMENT bytes and make it relative
710 to the argument pointer. Return the new rtx. */
713 adjust_stack_reference (rtx mem, HOST_WIDE_INT adjustment)
717 #ifdef FRAME_POINTER_CFA_OFFSET
718 adjustment -= FRAME_POINTER_CFA_OFFSET (current_function_decl);
719 cfa = plus_constant (frame_pointer_rtx, adjustment);
721 adjustment -= ARG_POINTER_CFA_OFFSET (current_function_decl);
722 cfa = plus_constant (arg_pointer_rtx, adjustment);
725 addr = replace_rtx (copy_rtx (XEXP (mem, 0)), stack_pointer_rtx, cfa);
726 tmp = simplify_rtx (addr);
730 return replace_equiv_address_nv (mem, addr);
733 /* Return true if a decl_or_value DV is a DECL or NULL. */
735 dv_is_decl_p (decl_or_value dv)
737 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
740 /* Return true if a decl_or_value is a VALUE rtl. */
742 dv_is_value_p (decl_or_value dv)
744 return dv && !dv_is_decl_p (dv);
747 /* Return the decl in the decl_or_value. */
749 dv_as_decl (decl_or_value dv)
751 #ifdef ENABLE_CHECKING
752 gcc_assert (dv_is_decl_p (dv));
757 /* Return the value in the decl_or_value. */
759 dv_as_value (decl_or_value dv)
761 #ifdef ENABLE_CHECKING
762 gcc_assert (dv_is_value_p (dv));
767 /* Return the opaque pointer in the decl_or_value. */
769 dv_as_opaque (decl_or_value dv)
774 /* Return true if a decl_or_value must not have more than one variable
777 dv_onepart_p (decl_or_value dv)
781 if (!MAY_HAVE_DEBUG_INSNS)
784 if (dv_is_value_p (dv))
787 decl = dv_as_decl (dv);
792 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
795 return (target_for_debug_bind (decl) != NULL_TREE);
798 /* Return the variable pool to be used for dv, depending on whether it
799 can have multiple parts or not. */
800 static inline alloc_pool
801 dv_pool (decl_or_value dv)
803 return dv_onepart_p (dv) ? valvar_pool : var_pool;
806 /* Build a decl_or_value out of a decl. */
807 static inline decl_or_value
808 dv_from_decl (tree decl)
812 #ifdef ENABLE_CHECKING
813 gcc_assert (dv_is_decl_p (dv));
818 /* Build a decl_or_value out of a value. */
819 static inline decl_or_value
820 dv_from_value (rtx value)
824 #ifdef ENABLE_CHECKING
825 gcc_assert (dv_is_value_p (dv));
830 extern void debug_dv (decl_or_value dv);
833 debug_dv (decl_or_value dv)
835 if (dv_is_value_p (dv))
836 debug_rtx (dv_as_value (dv));
838 debug_generic_stmt (dv_as_decl (dv));
841 typedef unsigned int dvuid;
843 /* Return the uid of DV. */
846 dv_uid (decl_or_value dv)
848 if (dv_is_value_p (dv))
849 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
851 return DECL_UID (dv_as_decl (dv));
854 /* Compute the hash from the uid. */
856 static inline hashval_t
857 dv_uid2hash (dvuid uid)
862 /* The hash function for a mask table in a shared_htab chain. */
864 static inline hashval_t
865 dv_htab_hash (decl_or_value dv)
867 return dv_uid2hash (dv_uid (dv));
870 /* The hash function for variable_htab, computes the hash value
871 from the declaration of variable X. */
874 variable_htab_hash (const void *x)
876 const_variable const v = (const_variable) x;
878 return dv_htab_hash (v->dv);
881 /* Compare the declaration of variable X with declaration Y. */
884 variable_htab_eq (const void *x, const void *y)
886 const_variable const v = (const_variable) x;
887 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
889 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
892 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
895 variable_htab_free (void *elem)
898 variable var = (variable) elem;
899 location_chain node, next;
901 gcc_assert (var->refcount > 0);
904 if (var->refcount > 0)
907 for (i = 0; i < var->n_var_parts; i++)
909 for (node = var->var_part[i].loc_chain; node; node = next)
912 pool_free (loc_chain_pool, node);
914 var->var_part[i].loc_chain = NULL;
916 pool_free (dv_pool (var->dv), var);
919 /* The hash function for value_chains htab, computes the hash value
923 value_chain_htab_hash (const void *x)
925 const_value_chain const v = (const_value_chain) x;
927 return dv_htab_hash (v->dv);
930 /* Compare the VALUE X with VALUE Y. */
933 value_chain_htab_eq (const void *x, const void *y)
935 const_value_chain const v = (const_value_chain) x;
936 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
938 return dv_as_opaque (v->dv) == dv_as_opaque (dv);
941 /* Initialize the set (array) SET of attrs to empty lists. */
944 init_attrs_list_set (attrs *set)
948 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
952 /* Make the list *LISTP empty. */
955 attrs_list_clear (attrs *listp)
959 for (list = *listp; list; list = next)
962 pool_free (attrs_pool, list);
967 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
970 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
972 for (; list; list = list->next)
973 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
978 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
981 attrs_list_insert (attrs *listp, decl_or_value dv,
982 HOST_WIDE_INT offset, rtx loc)
986 list = (attrs) pool_alloc (attrs_pool);
989 list->offset = offset;
994 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
997 attrs_list_copy (attrs *dstp, attrs src)
1001 attrs_list_clear (dstp);
1002 for (; src; src = src->next)
1004 n = (attrs) pool_alloc (attrs_pool);
1007 n->offset = src->offset;
1013 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1016 attrs_list_union (attrs *dstp, attrs src)
1018 for (; src; src = src->next)
1020 if (!attrs_list_member (*dstp, src->dv, src->offset))
1021 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1025 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1029 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1031 gcc_assert (!*dstp);
1032 for (; src; src = src->next)
1034 if (!dv_onepart_p (src->dv))
1035 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1037 for (src = src2; src; src = src->next)
1039 if (!dv_onepart_p (src->dv)
1040 && !attrs_list_member (*dstp, src->dv, src->offset))
1041 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1045 /* Shared hashtable support. */
1047 /* Return true if VARS is shared. */
1050 shared_hash_shared (shared_hash vars)
1052 return vars->refcount > 1;
1055 /* Return the hash table for VARS. */
1057 static inline htab_t
1058 shared_hash_htab (shared_hash vars)
1063 /* Return true if VAR is shared, or maybe because VARS is shared. */
1066 shared_var_p (variable var, shared_hash vars)
1068 /* Don't count an entry in the changed_variables table as a duplicate. */
1069 return ((var->refcount > 1 + (int) var->in_changed_variables)
1070 || shared_hash_shared (vars));
1073 /* Copy variables into a new hash table. */
1076 shared_hash_unshare (shared_hash vars)
1078 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1079 gcc_assert (vars->refcount > 1);
1080 new_vars->refcount = 1;
1082 = htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
1083 variable_htab_eq, variable_htab_free);
1084 vars_copy (new_vars->htab, vars->htab);
1089 /* Increment reference counter on VARS and return it. */
1091 static inline shared_hash
1092 shared_hash_copy (shared_hash vars)
1098 /* Decrement reference counter and destroy hash table if not shared
1102 shared_hash_destroy (shared_hash vars)
1104 gcc_assert (vars->refcount > 0);
1105 if (--vars->refcount == 0)
1107 htab_delete (vars->htab);
1108 pool_free (shared_hash_pool, vars);
1112 /* Unshare *PVARS if shared and return slot for DV. If INS is
1113 INSERT, insert it if not already present. */
1115 static inline void **
1116 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1117 hashval_t dvhash, enum insert_option ins)
1119 if (shared_hash_shared (*pvars))
1120 *pvars = shared_hash_unshare (*pvars);
1121 return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins);
1124 static inline void **
1125 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1126 enum insert_option ins)
1128 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1131 /* Return slot for DV, if it is already present in the hash table.
1132 If it is not present, insert it only VARS is not shared, otherwise
1135 static inline void **
1136 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1138 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1139 shared_hash_shared (vars)
1140 ? NO_INSERT : INSERT);
1143 static inline void **
1144 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1146 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1149 /* Return slot for DV only if it is already present in the hash table. */
1151 static inline void **
1152 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1155 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1159 static inline void **
1160 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1162 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1165 /* Return variable for DV or NULL if not already present in the hash
1168 static inline variable
1169 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1171 return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash);
1174 static inline variable
1175 shared_hash_find (shared_hash vars, decl_or_value dv)
1177 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1180 /* Return true if TVAL is better than CVAL as a canonival value. We
1181 choose lowest-numbered VALUEs, using the RTX address as a
1182 tie-breaker. The idea is to arrange them into a star topology,
1183 such that all of them are at most one step away from the canonical
1184 value, and the canonical value has backlinks to all of them, in
1185 addition to all the actual locations. We don't enforce this
1186 topology throughout the entire dataflow analysis, though.
1190 canon_value_cmp (rtx tval, rtx cval)
1193 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1196 static bool dst_can_be_shared;
1198 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1201 unshare_variable (dataflow_set *set, void **slot, variable var,
1202 enum var_init_status initialized)
1207 new_var = (variable) pool_alloc (dv_pool (var->dv));
1208 new_var->dv = var->dv;
1209 new_var->refcount = 1;
1211 new_var->n_var_parts = var->n_var_parts;
1212 new_var->cur_loc_changed = var->cur_loc_changed;
1213 var->cur_loc_changed = false;
1214 new_var->in_changed_variables = false;
1216 if (! flag_var_tracking_uninit)
1217 initialized = VAR_INIT_STATUS_INITIALIZED;
1219 for (i = 0; i < var->n_var_parts; i++)
1221 location_chain node;
1222 location_chain *nextp;
1224 new_var->var_part[i].offset = var->var_part[i].offset;
1225 nextp = &new_var->var_part[i].loc_chain;
1226 for (node = var->var_part[i].loc_chain; node; node = node->next)
1228 location_chain new_lc;
1230 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1231 new_lc->next = NULL;
1232 if (node->init > initialized)
1233 new_lc->init = node->init;
1235 new_lc->init = initialized;
1236 if (node->set_src && !(MEM_P (node->set_src)))
1237 new_lc->set_src = node->set_src;
1239 new_lc->set_src = NULL;
1240 new_lc->loc = node->loc;
1243 nextp = &new_lc->next;
1246 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1249 dst_can_be_shared = false;
1250 if (shared_hash_shared (set->vars))
1251 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1252 else if (set->traversed_vars && set->vars != set->traversed_vars)
1253 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1255 if (var->in_changed_variables)
1258 = htab_find_slot_with_hash (changed_variables, var->dv,
1259 dv_htab_hash (var->dv), NO_INSERT);
1260 gcc_assert (*cslot == (void *) var);
1261 var->in_changed_variables = false;
1262 variable_htab_free (var);
1264 new_var->in_changed_variables = true;
1269 /* Add a variable from *SLOT to hash table DATA and increase its reference
1273 vars_copy_1 (void **slot, void *data)
1275 htab_t dst = (htab_t) data;
1279 src = (variable) *slot;
1282 dstp = htab_find_slot_with_hash (dst, src->dv,
1283 dv_htab_hash (src->dv),
1287 /* Continue traversing the hash table. */
1291 /* Copy all variables from hash table SRC to hash table DST. */
1294 vars_copy (htab_t dst, htab_t src)
1296 htab_traverse_noresize (src, vars_copy_1, dst);
1299 /* Map a decl to its main debug decl. */
1302 var_debug_decl (tree decl)
1304 if (decl && DECL_P (decl)
1305 && DECL_DEBUG_EXPR_IS_FROM (decl) && DECL_DEBUG_EXPR (decl)
1306 && DECL_P (DECL_DEBUG_EXPR (decl)))
1307 decl = DECL_DEBUG_EXPR (decl);
1312 /* Set the register LOC to contain DV, OFFSET. */
1315 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1316 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1317 enum insert_option iopt)
1320 bool decl_p = dv_is_decl_p (dv);
1323 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1325 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1326 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1327 && node->offset == offset)
1330 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1331 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1334 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1337 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1340 tree decl = REG_EXPR (loc);
1341 HOST_WIDE_INT offset = REG_OFFSET (loc);
1343 var_reg_decl_set (set, loc, initialized,
1344 dv_from_decl (decl), offset, set_src, INSERT);
1347 static enum var_init_status
1348 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1352 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1354 if (! flag_var_tracking_uninit)
1355 return VAR_INIT_STATUS_INITIALIZED;
1357 var = shared_hash_find (set->vars, dv);
1360 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1362 location_chain nextp;
1363 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1364 if (rtx_equal_p (nextp->loc, loc))
1366 ret_val = nextp->init;
1375 /* Delete current content of register LOC in dataflow set SET and set
1376 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1377 MODIFY is true, any other live copies of the same variable part are
1378 also deleted from the dataflow set, otherwise the variable part is
1379 assumed to be copied from another location holding the same
1383 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1384 enum var_init_status initialized, rtx set_src)
1386 tree decl = REG_EXPR (loc);
1387 HOST_WIDE_INT offset = REG_OFFSET (loc);
1391 decl = var_debug_decl (decl);
1393 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1394 initialized = get_init_value (set, loc, dv_from_decl (decl));
1396 nextp = &set->regs[REGNO (loc)];
1397 for (node = *nextp; node; node = next)
1400 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1402 delete_variable_part (set, node->loc, node->dv, node->offset);
1403 pool_free (attrs_pool, node);
1409 nextp = &node->next;
1413 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1414 var_reg_set (set, loc, initialized, set_src);
1417 /* Delete the association of register LOC in dataflow set SET with any
1418 variables that aren't onepart. If CLOBBER is true, also delete any
1419 other live copies of the same variable part, and delete the
1420 association with onepart dvs too. */
1423 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1425 attrs *nextp = &set->regs[REGNO (loc)];
1430 tree decl = REG_EXPR (loc);
1431 HOST_WIDE_INT offset = REG_OFFSET (loc);
1433 decl = var_debug_decl (decl);
1435 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1438 for (node = *nextp; node; node = next)
1441 if (clobber || !dv_onepart_p (node->dv))
1443 delete_variable_part (set, node->loc, node->dv, node->offset);
1444 pool_free (attrs_pool, node);
1448 nextp = &node->next;
1452 /* Delete content of register with number REGNO in dataflow set SET. */
1455 var_regno_delete (dataflow_set *set, int regno)
1457 attrs *reg = &set->regs[regno];
1460 for (node = *reg; node; node = next)
1463 delete_variable_part (set, node->loc, node->dv, node->offset);
1464 pool_free (attrs_pool, node);
1469 /* Set the location of DV, OFFSET as the MEM LOC. */
1472 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1473 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1474 enum insert_option iopt)
1476 if (dv_is_decl_p (dv))
1477 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1479 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1482 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
1484 Adjust the address first if it is stack pointer based. */
1487 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1490 tree decl = MEM_EXPR (loc);
1491 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1493 var_mem_decl_set (set, loc, initialized,
1494 dv_from_decl (decl), offset, set_src, INSERT);
1497 /* Delete and set the location part of variable MEM_EXPR (LOC) in
1498 dataflow set SET to LOC. If MODIFY is true, any other live copies
1499 of the same variable part are also deleted from the dataflow set,
1500 otherwise the variable part is assumed to be copied from another
1501 location holding the same part.
1502 Adjust the address first if it is stack pointer based. */
1505 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1506 enum var_init_status initialized, rtx set_src)
1508 tree decl = MEM_EXPR (loc);
1509 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1511 decl = var_debug_decl (decl);
1513 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1514 initialized = get_init_value (set, loc, dv_from_decl (decl));
1517 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
1518 var_mem_set (set, loc, initialized, set_src);
1521 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
1522 true, also delete any other live copies of the same variable part.
1523 Adjust the address first if it is stack pointer based. */
1526 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
1528 tree decl = MEM_EXPR (loc);
1529 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1531 decl = var_debug_decl (decl);
1533 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1534 delete_variable_part (set, loc, dv_from_decl (decl), offset);
1537 /* Bind a value to a location it was just stored in. If MODIFIED
1538 holds, assume the location was modified, detaching it from any
1539 values bound to it. */
1542 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
1544 cselib_val *v = CSELIB_VAL_PTR (val);
1546 gcc_assert (cselib_preserved_value_p (v));
1550 fprintf (dump_file, "%i: ", INSN_UID (insn));
1551 print_inline_rtx (dump_file, val, 0);
1552 fprintf (dump_file, " stored in ");
1553 print_inline_rtx (dump_file, loc, 0);
1556 struct elt_loc_list *l;
1557 for (l = v->locs; l; l = l->next)
1559 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
1560 print_inline_rtx (dump_file, l->loc, 0);
1563 fprintf (dump_file, "\n");
1569 var_regno_delete (set, REGNO (loc));
1570 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1571 dv_from_value (val), 0, NULL_RTX, INSERT);
1573 else if (MEM_P (loc))
1574 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1575 dv_from_value (val), 0, NULL_RTX, INSERT);
1577 set_variable_part (set, loc, dv_from_value (val), 0,
1578 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1581 /* Reset this node, detaching all its equivalences. Return the slot
1582 in the variable hash table that holds dv, if there is one. */
1585 val_reset (dataflow_set *set, decl_or_value dv)
1587 variable var = shared_hash_find (set->vars, dv) ;
1588 location_chain node;
1591 if (!var || !var->n_var_parts)
1594 gcc_assert (var->n_var_parts == 1);
1597 for (node = var->var_part[0].loc_chain; node; node = node->next)
1598 if (GET_CODE (node->loc) == VALUE
1599 && canon_value_cmp (node->loc, cval))
1602 for (node = var->var_part[0].loc_chain; node; node = node->next)
1603 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
1605 /* Redirect the equivalence link to the new canonical
1606 value, or simply remove it if it would point at
1609 set_variable_part (set, cval, dv_from_value (node->loc),
1610 0, node->init, node->set_src, NO_INSERT);
1611 delete_variable_part (set, dv_as_value (dv),
1612 dv_from_value (node->loc), 0);
1617 decl_or_value cdv = dv_from_value (cval);
1619 /* Keep the remaining values connected, accummulating links
1620 in the canonical value. */
1621 for (node = var->var_part[0].loc_chain; node; node = node->next)
1623 if (node->loc == cval)
1625 else if (GET_CODE (node->loc) == REG)
1626 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
1627 node->set_src, NO_INSERT);
1628 else if (GET_CODE (node->loc) == MEM)
1629 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
1630 node->set_src, NO_INSERT);
1632 set_variable_part (set, node->loc, cdv, 0,
1633 node->init, node->set_src, NO_INSERT);
1637 /* We remove this last, to make sure that the canonical value is not
1638 removed to the point of requiring reinsertion. */
1640 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
1642 clobber_variable_part (set, NULL, dv, 0, NULL);
1644 /* ??? Should we make sure there aren't other available values or
1645 variables whose values involve this one other than by
1646 equivalence? E.g., at the very least we should reset MEMs, those
1647 shouldn't be too hard to find cselib-looking up the value as an
1648 address, then locating the resulting value in our own hash
1652 /* Find the values in a given location and map the val to another
1653 value, if it is unique, or add the location as one holding the
1657 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
1659 decl_or_value dv = dv_from_value (val);
1661 if (dump_file && (dump_flags & TDF_DETAILS))
1664 fprintf (dump_file, "%i: ", INSN_UID (insn));
1666 fprintf (dump_file, "head: ");
1667 print_inline_rtx (dump_file, val, 0);
1668 fputs (" is at ", dump_file);
1669 print_inline_rtx (dump_file, loc, 0);
1670 fputc ('\n', dump_file);
1673 val_reset (set, dv);
1677 attrs node, found = NULL;
1679 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1680 if (dv_is_value_p (node->dv)
1681 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
1685 /* Map incoming equivalences. ??? Wouldn't it be nice if
1686 we just started sharing the location lists? Maybe a
1687 circular list ending at the value itself or some
1689 set_variable_part (set, dv_as_value (node->dv),
1690 dv_from_value (val), node->offset,
1691 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1692 set_variable_part (set, val, node->dv, node->offset,
1693 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1696 /* If we didn't find any equivalence, we need to remember that
1697 this value is held in the named register. */
1699 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1700 dv_from_value (val), 0, NULL_RTX, INSERT);
1702 else if (MEM_P (loc))
1703 /* ??? Merge equivalent MEMs. */
1704 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1705 dv_from_value (val), 0, NULL_RTX, INSERT);
1707 /* ??? Merge equivalent expressions. */
1708 set_variable_part (set, loc, dv_from_value (val), 0,
1709 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1712 /* Initialize dataflow set SET to be empty.
1713 VARS_SIZE is the initial size of hash table VARS. */
1716 dataflow_set_init (dataflow_set *set)
1718 init_attrs_list_set (set->regs);
1719 set->vars = shared_hash_copy (empty_shared_hash);
1720 set->stack_adjust = 0;
1721 set->traversed_vars = NULL;
1724 /* Delete the contents of dataflow set SET. */
1727 dataflow_set_clear (dataflow_set *set)
1731 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1732 attrs_list_clear (&set->regs[i]);
1734 shared_hash_destroy (set->vars);
1735 set->vars = shared_hash_copy (empty_shared_hash);
1738 /* Copy the contents of dataflow set SRC to DST. */
1741 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
1745 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1746 attrs_list_copy (&dst->regs[i], src->regs[i]);
1748 shared_hash_destroy (dst->vars);
1749 dst->vars = shared_hash_copy (src->vars);
1750 dst->stack_adjust = src->stack_adjust;
1753 /* Information for merging lists of locations for a given offset of variable.
1755 struct variable_union_info
1757 /* Node of the location chain. */
1760 /* The sum of positions in the input chains. */
1763 /* The position in the chain of DST dataflow set. */
1767 /* Buffer for location list sorting and its allocated size. */
1768 static struct variable_union_info *vui_vec;
1769 static int vui_allocated;
1771 /* Compare function for qsort, order the structures by POS element. */
1774 variable_union_info_cmp_pos (const void *n1, const void *n2)
1776 const struct variable_union_info *const i1 =
1777 (const struct variable_union_info *) n1;
1778 const struct variable_union_info *const i2 =
1779 ( const struct variable_union_info *) n2;
1781 if (i1->pos != i2->pos)
1782 return i1->pos - i2->pos;
1784 return (i1->pos_dst - i2->pos_dst);
1787 /* Compute union of location parts of variable *SLOT and the same variable
1788 from hash table DATA. Compute "sorted" union of the location chains
1789 for common offsets, i.e. the locations of a variable part are sorted by
1790 a priority where the priority is the sum of the positions in the 2 chains
1791 (if a location is only in one list the position in the second list is
1792 defined to be larger than the length of the chains).
1793 When we are updating the location parts the newest location is in the
1794 beginning of the chain, so when we do the described "sorted" union
1795 we keep the newest locations in the beginning. */
1798 variable_union (void **slot, void *data)
1802 dataflow_set *set = (dataflow_set *) data;
1805 src = (variable) *slot;
1806 dstp = shared_hash_find_slot (set->vars, src->dv);
1807 if (!dstp || !*dstp)
1811 dst_can_be_shared = false;
1813 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
1817 /* Continue traversing the hash table. */
1821 dst = (variable) *dstp;
1823 gcc_assert (src->n_var_parts);
1825 /* We can combine one-part variables very efficiently, because their
1826 entries are in canonical order. */
1827 if (dv_onepart_p (src->dv))
1829 location_chain *nodep, dnode, snode;
1831 gcc_assert (src->n_var_parts == 1);
1832 gcc_assert (dst->n_var_parts == 1);
1834 snode = src->var_part[0].loc_chain;
1837 restart_onepart_unshared:
1838 nodep = &dst->var_part[0].loc_chain;
1844 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
1848 location_chain nnode;
1850 if (shared_var_p (dst, set->vars))
1852 dstp = unshare_variable (set, dstp, dst,
1853 VAR_INIT_STATUS_INITIALIZED);
1854 dst = (variable)*dstp;
1855 goto restart_onepart_unshared;
1858 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
1859 nnode->loc = snode->loc;
1860 nnode->init = snode->init;
1861 if (!snode->set_src || MEM_P (snode->set_src))
1862 nnode->set_src = NULL;
1864 nnode->set_src = snode->set_src;
1865 nnode->next = dnode;
1868 #ifdef ENABLE_CHECKING
1870 gcc_assert (rtx_equal_p (dnode->loc, snode->loc));
1874 snode = snode->next;
1876 nodep = &dnode->next;
1883 /* Count the number of location parts, result is K. */
1884 for (i = 0, j = 0, k = 0;
1885 i < src->n_var_parts && j < dst->n_var_parts; k++)
1887 if (src->var_part[i].offset == dst->var_part[j].offset)
1892 else if (src->var_part[i].offset < dst->var_part[j].offset)
1897 k += src->n_var_parts - i;
1898 k += dst->n_var_parts - j;
1900 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
1901 thus there are at most MAX_VAR_PARTS different offsets. */
1902 gcc_assert (dv_onepart_p (dst->dv) ? k == 1 : k <= MAX_VAR_PARTS);
1904 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
1906 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
1907 dst = (variable)*dstp;
1910 i = src->n_var_parts - 1;
1911 j = dst->n_var_parts - 1;
1912 dst->n_var_parts = k;
1914 for (k--; k >= 0; k--)
1916 location_chain node, node2;
1918 if (i >= 0 && j >= 0
1919 && src->var_part[i].offset == dst->var_part[j].offset)
1921 /* Compute the "sorted" union of the chains, i.e. the locations which
1922 are in both chains go first, they are sorted by the sum of
1923 positions in the chains. */
1926 struct variable_union_info *vui;
1928 /* If DST is shared compare the location chains.
1929 If they are different we will modify the chain in DST with
1930 high probability so make a copy of DST. */
1931 if (shared_var_p (dst, set->vars))
1933 for (node = src->var_part[i].loc_chain,
1934 node2 = dst->var_part[j].loc_chain; node && node2;
1935 node = node->next, node2 = node2->next)
1937 if (!((REG_P (node2->loc)
1938 && REG_P (node->loc)
1939 && REGNO (node2->loc) == REGNO (node->loc))
1940 || rtx_equal_p (node2->loc, node->loc)))
1942 if (node2->init < node->init)
1943 node2->init = node->init;
1949 dstp = unshare_variable (set, dstp, dst,
1950 VAR_INIT_STATUS_UNKNOWN);
1951 dst = (variable)*dstp;
1956 for (node = src->var_part[i].loc_chain; node; node = node->next)
1959 for (node = dst->var_part[j].loc_chain; node; node = node->next)
1964 /* The most common case, much simpler, no qsort is needed. */
1965 location_chain dstnode = dst->var_part[j].loc_chain;
1966 dst->var_part[k].loc_chain = dstnode;
1967 dst->var_part[k].offset = dst->var_part[j].offset;
1969 for (node = src->var_part[i].loc_chain; node; node = node->next)
1970 if (!((REG_P (dstnode->loc)
1971 && REG_P (node->loc)
1972 && REGNO (dstnode->loc) == REGNO (node->loc))
1973 || rtx_equal_p (dstnode->loc, node->loc)))
1975 location_chain new_node;
1977 /* Copy the location from SRC. */
1978 new_node = (location_chain) pool_alloc (loc_chain_pool);
1979 new_node->loc = node->loc;
1980 new_node->init = node->init;
1981 if (!node->set_src || MEM_P (node->set_src))
1982 new_node->set_src = NULL;
1984 new_node->set_src = node->set_src;
1985 node2->next = new_node;
1992 if (src_l + dst_l > vui_allocated)
1994 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
1995 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2000 /* Fill in the locations from DST. */
2001 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2002 node = node->next, jj++)
2005 vui[jj].pos_dst = jj;
2007 /* Pos plus value larger than a sum of 2 valid positions. */
2008 vui[jj].pos = jj + src_l + dst_l;
2011 /* Fill in the locations from SRC. */
2013 for (node = src->var_part[i].loc_chain, ii = 0; node;
2014 node = node->next, ii++)
2016 /* Find location from NODE. */
2017 for (jj = 0; jj < dst_l; jj++)
2019 if ((REG_P (vui[jj].lc->loc)
2020 && REG_P (node->loc)
2021 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2022 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2024 vui[jj].pos = jj + ii;
2028 if (jj >= dst_l) /* The location has not been found. */
2030 location_chain new_node;
2032 /* Copy the location from SRC. */
2033 new_node = (location_chain) pool_alloc (loc_chain_pool);
2034 new_node->loc = node->loc;
2035 new_node->init = node->init;
2036 if (!node->set_src || MEM_P (node->set_src))
2037 new_node->set_src = NULL;
2039 new_node->set_src = node->set_src;
2040 vui[n].lc = new_node;
2041 vui[n].pos_dst = src_l + dst_l;
2042 vui[n].pos = ii + src_l + dst_l;
2049 /* Special case still very common case. For dst_l == 2
2050 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2051 vui[i].pos == i + src_l + dst_l. */
2052 if (vui[0].pos > vui[1].pos)
2054 /* Order should be 1, 0, 2... */
2055 dst->var_part[k].loc_chain = vui[1].lc;
2056 vui[1].lc->next = vui[0].lc;
2059 vui[0].lc->next = vui[2].lc;
2060 vui[n - 1].lc->next = NULL;
2063 vui[0].lc->next = NULL;
2068 dst->var_part[k].loc_chain = vui[0].lc;
2069 if (n >= 3 && vui[2].pos < vui[1].pos)
2071 /* Order should be 0, 2, 1, 3... */
2072 vui[0].lc->next = vui[2].lc;
2073 vui[2].lc->next = vui[1].lc;
2076 vui[1].lc->next = vui[3].lc;
2077 vui[n - 1].lc->next = NULL;
2080 vui[1].lc->next = NULL;
2085 /* Order should be 0, 1, 2... */
2087 vui[n - 1].lc->next = NULL;
2090 for (; ii < n; ii++)
2091 vui[ii - 1].lc->next = vui[ii].lc;
2095 qsort (vui, n, sizeof (struct variable_union_info),
2096 variable_union_info_cmp_pos);
2098 /* Reconnect the nodes in sorted order. */
2099 for (ii = 1; ii < n; ii++)
2100 vui[ii - 1].lc->next = vui[ii].lc;
2101 vui[n - 1].lc->next = NULL;
2102 dst->var_part[k].loc_chain = vui[0].lc;
2105 dst->var_part[k].offset = dst->var_part[j].offset;
2110 else if ((i >= 0 && j >= 0
2111 && src->var_part[i].offset < dst->var_part[j].offset)
2114 dst->var_part[k] = dst->var_part[j];
2117 else if ((i >= 0 && j >= 0
2118 && src->var_part[i].offset > dst->var_part[j].offset)
2121 location_chain *nextp;
2123 /* Copy the chain from SRC. */
2124 nextp = &dst->var_part[k].loc_chain;
2125 for (node = src->var_part[i].loc_chain; node; node = node->next)
2127 location_chain new_lc;
2129 new_lc = (location_chain) pool_alloc (loc_chain_pool);
2130 new_lc->next = NULL;
2131 new_lc->init = node->init;
2132 if (!node->set_src || MEM_P (node->set_src))
2133 new_lc->set_src = NULL;
2135 new_lc->set_src = node->set_src;
2136 new_lc->loc = node->loc;
2139 nextp = &new_lc->next;
2142 dst->var_part[k].offset = src->var_part[i].offset;
2145 dst->var_part[k].cur_loc = NULL;
2148 if (flag_var_tracking_uninit)
2149 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
2151 location_chain node, node2;
2152 for (node = src->var_part[i].loc_chain; node; node = node->next)
2153 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
2154 if (rtx_equal_p (node->loc, node2->loc))
2156 if (node->init > node2->init)
2157 node2->init = node->init;
2161 /* Continue traversing the hash table. */
2165 /* Compute union of dataflow sets SRC and DST and store it to DST. */
2168 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
2172 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2173 attrs_list_union (&dst->regs[i], src->regs[i]);
2175 if (dst->vars == empty_shared_hash)
2177 shared_hash_destroy (dst->vars);
2178 dst->vars = shared_hash_copy (src->vars);
2181 htab_traverse (shared_hash_htab (src->vars), variable_union, dst);
2184 /* Whether the value is currently being expanded. */
2185 #define VALUE_RECURSED_INTO(x) \
2186 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
2187 /* Whether the value is in changed_variables hash table. */
2188 #define VALUE_CHANGED(x) \
2189 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
2190 /* Whether the decl is in changed_variables hash table. */
2191 #define DECL_CHANGED(x) TREE_VISITED (x)
2193 /* Record that DV has been added into resp. removed from changed_variables
2197 set_dv_changed (decl_or_value dv, bool newv)
2199 if (dv_is_value_p (dv))
2200 VALUE_CHANGED (dv_as_value (dv)) = newv;
2202 DECL_CHANGED (dv_as_decl (dv)) = newv;
2205 /* Return true if DV is present in changed_variables hash table. */
2208 dv_changed_p (decl_or_value dv)
2210 return (dv_is_value_p (dv)
2211 ? VALUE_CHANGED (dv_as_value (dv))
2212 : DECL_CHANGED (dv_as_decl (dv)));
2215 /* Return a location list node whose loc is rtx_equal to LOC, in the
2216 location list of a one-part variable or value VAR, or in that of
2217 any values recursively mentioned in the location lists. */
2219 static location_chain
2220 find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
2222 location_chain node;
2227 gcc_assert (dv_onepart_p (var->dv));
2229 if (!var->n_var_parts)
2232 gcc_assert (var->var_part[0].offset == 0);
2234 for (node = var->var_part[0].loc_chain; node; node = node->next)
2235 if (rtx_equal_p (loc, node->loc))
2237 else if (GET_CODE (node->loc) == VALUE
2238 && !VALUE_RECURSED_INTO (node->loc))
2240 decl_or_value dv = dv_from_value (node->loc);
2241 variable var = (variable)
2242 htab_find_with_hash (vars, dv, dv_htab_hash (dv));
2246 location_chain where;
2247 VALUE_RECURSED_INTO (node->loc) = true;
2248 if ((where = find_loc_in_1pdv (loc, var, vars)))
2250 VALUE_RECURSED_INTO (node->loc) = false;
2253 VALUE_RECURSED_INTO (node->loc) = false;
2260 /* Hash table iteration argument passed to variable_merge. */
2263 /* The set in which the merge is to be inserted. */
2265 /* The set that we're iterating in. */
2267 /* The set that may contain the other dv we are to merge with. */
2269 /* Number of onepart dvs in src. */
2270 int src_onepart_cnt;
2273 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
2274 loc_cmp order, and it is maintained as such. */
2277 insert_into_intersection (location_chain *nodep, rtx loc,
2278 enum var_init_status status)
2280 location_chain node;
2283 for (node = *nodep; node; nodep = &node->next, node = *nodep)
2284 if ((r = loc_cmp (node->loc, loc)) == 0)
2286 node->init = MIN (node->init, status);
2292 node = (location_chain) pool_alloc (loc_chain_pool);
2295 node->set_src = NULL;
2296 node->init = status;
2297 node->next = *nodep;
2301 /* Insert in DEST the intersection the locations present in both
2302 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
2303 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
2307 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
2308 location_chain s1node, variable s2var)
2310 dataflow_set *s1set = dsm->cur;
2311 dataflow_set *s2set = dsm->src;
2312 location_chain found;
2314 for (; s1node; s1node = s1node->next)
2316 if (s1node->loc == val)
2319 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
2320 shared_hash_htab (s2set->vars))))
2322 insert_into_intersection (dest, s1node->loc,
2323 MIN (s1node->init, found->init));
2327 if (GET_CODE (s1node->loc) == VALUE
2328 && !VALUE_RECURSED_INTO (s1node->loc))
2330 decl_or_value dv = dv_from_value (s1node->loc);
2331 variable svar = shared_hash_find (s1set->vars, dv);
2334 if (svar->n_var_parts == 1)
2336 VALUE_RECURSED_INTO (s1node->loc) = true;
2337 intersect_loc_chains (val, dest, dsm,
2338 svar->var_part[0].loc_chain,
2340 VALUE_RECURSED_INTO (s1node->loc) = false;
2345 /* ??? if the location is equivalent to any location in src,
2346 searched recursively
2348 add to dst the values needed to represent the equivalence
2350 telling whether locations S is equivalent to another dv's
2353 for each location D in the list
2355 if S and D satisfy rtx_equal_p, then it is present
2357 else if D is a value, recurse without cycles
2359 else if S and D have the same CODE and MODE
2361 for each operand oS and the corresponding oD
2363 if oS and oD are not equivalent, then S an D are not equivalent
2365 else if they are RTX vectors
2367 if any vector oS element is not equivalent to its respective oD,
2368 then S and D are not equivalent
2376 /* Return -1 if X should be before Y in a location list for a 1-part
2377 variable, 1 if Y should be before X, and 0 if they're equivalent
2378 and should not appear in the list. */
2381 loc_cmp (rtx x, rtx y)
2384 RTX_CODE code = GET_CODE (x);
2394 gcc_assert (GET_MODE (x) == GET_MODE (y));
2395 if (REGNO (x) == REGNO (y))
2397 else if (REGNO (x) < REGNO (y))
2410 gcc_assert (GET_MODE (x) == GET_MODE (y));
2411 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
2417 if (GET_CODE (x) == VALUE)
2419 if (GET_CODE (y) != VALUE)
2421 /* Don't assert the modes are the same, that is true only
2422 when not recursing. (subreg:QI (value:SI 1:1) 0)
2423 and (subreg:QI (value:DI 2:2) 0) can be compared,
2424 even when the modes are different. */
2425 if (canon_value_cmp (x, y))
2431 if (GET_CODE (y) == VALUE)
2434 if (GET_CODE (x) == GET_CODE (y))
2435 /* Compare operands below. */;
2436 else if (GET_CODE (x) < GET_CODE (y))
2441 gcc_assert (GET_MODE (x) == GET_MODE (y));
2443 if (GET_CODE (x) == DEBUG_EXPR)
2445 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
2446 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
2448 #ifdef ENABLE_CHECKING
2449 gcc_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
2450 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
2455 fmt = GET_RTX_FORMAT (code);
2456 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2460 if (XWINT (x, i) == XWINT (y, i))
2462 else if (XWINT (x, i) < XWINT (y, i))
2469 if (XINT (x, i) == XINT (y, i))
2471 else if (XINT (x, i) < XINT (y, i))
2478 /* Compare the vector length first. */
2479 if (XVECLEN (x, i) == XVECLEN (y, i))
2480 /* Compare the vectors elements. */;
2481 else if (XVECLEN (x, i) < XVECLEN (y, i))
2486 for (j = 0; j < XVECLEN (x, i); j++)
2487 if ((r = loc_cmp (XVECEXP (x, i, j),
2488 XVECEXP (y, i, j))))
2493 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
2499 if (XSTR (x, i) == XSTR (y, i))
2505 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
2513 /* These are just backpointers, so they don't matter. */
2520 /* It is believed that rtx's at this level will never
2521 contain anything but integers and other rtx's,
2522 except for within LABEL_REFs and SYMBOL_REFs. */
2530 /* If decl or value DVP refers to VALUE from *LOC, add backlinks
2531 from VALUE to DVP. */
2534 add_value_chain (rtx *loc, void *dvp)
2536 decl_or_value dv, ldv;
2537 value_chain vc, nvc;
2540 if (GET_CODE (*loc) == VALUE)
2541 ldv = dv_from_value (*loc);
2542 else if (GET_CODE (*loc) == DEBUG_EXPR)
2543 ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc));
2547 if (dv_as_opaque (ldv) == dvp)
2550 dv = (decl_or_value) dvp;
2551 slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv),
2555 vc = (value_chain) pool_alloc (value_chain_pool);
2559 *slot = (void *) vc;
2563 for (vc = ((value_chain) *slot)->next; vc; vc = vc->next)
2564 if (dv_as_opaque (vc->dv) == dv_as_opaque (dv))
2572 vc = (value_chain) *slot;
2573 nvc = (value_chain) pool_alloc (value_chain_pool);
2575 nvc->next = vc->next;
2581 /* If decl or value DVP refers to VALUEs from within LOC, add backlinks
2582 from those VALUEs to DVP. */
2585 add_value_chains (decl_or_value dv, rtx loc)
2587 if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR)
2589 add_value_chain (&loc, dv_as_opaque (dv));
2595 loc = XEXP (loc, 0);
2596 for_each_rtx (&loc, add_value_chain, dv_as_opaque (dv));
2599 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those
2603 add_cselib_value_chains (decl_or_value dv)
2605 struct elt_loc_list *l;
2607 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
2608 for_each_rtx (&l->loc, add_value_chain, dv_as_opaque (dv));
2611 /* If decl or value DVP refers to VALUE from *LOC, remove backlinks
2612 from VALUE to DVP. */
2615 remove_value_chain (rtx *loc, void *dvp)
2617 decl_or_value dv, ldv;
2621 if (GET_CODE (*loc) == VALUE)
2622 ldv = dv_from_value (*loc);
2623 else if (GET_CODE (*loc) == DEBUG_EXPR)
2624 ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc));
2628 if (dv_as_opaque (ldv) == dvp)
2631 dv = (decl_or_value) dvp;
2632 slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv),
2634 for (vc = (value_chain) *slot; vc->next; vc = vc->next)
2635 if (dv_as_opaque (vc->next->dv) == dv_as_opaque (dv))
2637 value_chain dvc = vc->next;
2638 gcc_assert (dvc->refcount > 0);
2639 if (--dvc->refcount == 0)
2641 vc->next = dvc->next;
2642 pool_free (value_chain_pool, dvc);
2643 if (vc->next == NULL && vc == (value_chain) *slot)
2645 pool_free (value_chain_pool, vc);
2646 htab_clear_slot (value_chains, slot);
2654 /* If decl or value DVP refers to VALUEs from within LOC, remove backlinks
2655 from those VALUEs to DVP. */
2658 remove_value_chains (decl_or_value dv, rtx loc)
2660 if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR)
2662 remove_value_chain (&loc, dv_as_opaque (dv));
2668 loc = XEXP (loc, 0);
2669 for_each_rtx (&loc, remove_value_chain, dv_as_opaque (dv));
2673 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, remove backlinks from those
2677 remove_cselib_value_chains (decl_or_value dv)
2679 struct elt_loc_list *l;
2681 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
2682 for_each_rtx (&l->loc, remove_value_chain, dv_as_opaque (dv));
2685 /* Check the order of entries in one-part variables. */
2688 canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
2690 variable var = (variable) *slot;
2691 decl_or_value dv = var->dv;
2692 location_chain node, next;
2694 #ifdef ENABLE_RTL_CHECKING
2696 for (i = 0; i < var->n_var_parts; i++)
2697 gcc_assert (var->var_part[0].cur_loc == NULL);
2698 gcc_assert (!var->cur_loc_changed && !var->in_changed_variables);
2701 if (!dv_onepart_p (dv))
2704 gcc_assert (var->n_var_parts == 1);
2705 node = var->var_part[0].loc_chain;
2708 while ((next = node->next))
2710 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
2718 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
2719 more likely to be chosen as canonical for an equivalence set.
2720 Ensure less likely values can reach more likely neighbors, making
2721 the connections bidirectional. */
2724 canonicalize_values_mark (void **slot, void *data)
2726 dataflow_set *set = (dataflow_set *)data;
2727 variable var = (variable) *slot;
2728 decl_or_value dv = var->dv;
2730 location_chain node;
2732 if (!dv_is_value_p (dv))
2735 gcc_assert (var->n_var_parts == 1);
2737 val = dv_as_value (dv);
2739 for (node = var->var_part[0].loc_chain; node; node = node->next)
2740 if (GET_CODE (node->loc) == VALUE)
2742 if (canon_value_cmp (node->loc, val))
2743 VALUE_RECURSED_INTO (val) = true;
2746 decl_or_value odv = dv_from_value (node->loc);
2747 void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
2749 oslot = set_slot_part (set, val, oslot, odv, 0,
2750 node->init, NULL_RTX);
2752 VALUE_RECURSED_INTO (node->loc) = true;
2759 /* Remove redundant entries from equivalence lists in onepart
2760 variables, canonicalizing equivalence sets into star shapes. */
2763 canonicalize_values_star (void **slot, void *data)
2765 dataflow_set *set = (dataflow_set *)data;
2766 variable var = (variable) *slot;
2767 decl_or_value dv = var->dv;
2768 location_chain node;
2775 if (!dv_onepart_p (dv))
2778 gcc_assert (var->n_var_parts == 1);
2780 if (dv_is_value_p (dv))
2782 cval = dv_as_value (dv);
2783 if (!VALUE_RECURSED_INTO (cval))
2785 VALUE_RECURSED_INTO (cval) = false;
2795 gcc_assert (var->n_var_parts == 1);
2797 for (node = var->var_part[0].loc_chain; node; node = node->next)
2798 if (GET_CODE (node->loc) == VALUE)
2801 if (VALUE_RECURSED_INTO (node->loc))
2803 if (canon_value_cmp (node->loc, cval))
2812 if (!has_marks || dv_is_decl_p (dv))
2815 /* Keep it marked so that we revisit it, either after visiting a
2816 child node, or after visiting a new parent that might be
2818 VALUE_RECURSED_INTO (val) = true;
2820 for (node = var->var_part[0].loc_chain; node; node = node->next)
2821 if (GET_CODE (node->loc) == VALUE
2822 && VALUE_RECURSED_INTO (node->loc))
2826 VALUE_RECURSED_INTO (cval) = false;
2827 dv = dv_from_value (cval);
2828 slot = shared_hash_find_slot_noinsert (set->vars, dv);
2831 gcc_assert (dv_is_decl_p (var->dv));
2832 /* The canonical value was reset and dropped.
2834 clobber_variable_part (set, NULL, var->dv, 0, NULL);
2837 var = (variable)*slot;
2838 gcc_assert (dv_is_value_p (var->dv));
2839 if (var->n_var_parts == 0)
2841 gcc_assert (var->n_var_parts == 1);
2845 VALUE_RECURSED_INTO (val) = false;
2850 /* Push values to the canonical one. */
2851 cdv = dv_from_value (cval);
2852 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
2854 for (node = var->var_part[0].loc_chain; node; node = node->next)
2855 if (node->loc != cval)
2857 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
2858 node->init, NULL_RTX);
2859 if (GET_CODE (node->loc) == VALUE)
2861 decl_or_value ndv = dv_from_value (node->loc);
2863 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
2866 if (canon_value_cmp (node->loc, val))
2868 /* If it could have been a local minimum, it's not any more,
2869 since it's now neighbor to cval, so it may have to push
2870 to it. Conversely, if it wouldn't have prevailed over
2871 val, then whatever mark it has is fine: if it was to
2872 push, it will now push to a more canonical node, but if
2873 it wasn't, then it has already pushed any values it might
2875 VALUE_RECURSED_INTO (node->loc) = true;
2876 /* Make sure we visit node->loc by ensuring we cval is
2878 VALUE_RECURSED_INTO (cval) = true;
2880 else if (!VALUE_RECURSED_INTO (node->loc))
2881 /* If we have no need to "recurse" into this node, it's
2882 already "canonicalized", so drop the link to the old
2884 clobber_variable_part (set, cval, ndv, 0, NULL);
2886 else if (GET_CODE (node->loc) == REG)
2888 attrs list = set->regs[REGNO (node->loc)], *listp;
2890 /* Change an existing attribute referring to dv so that it
2891 refers to cdv, removing any duplicate this might
2892 introduce, and checking that no previous duplicates
2893 existed, all in a single pass. */
2897 if (list->offset == 0
2898 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
2899 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
2906 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
2909 for (listp = &list->next; (list = *listp); listp = &list->next)
2914 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
2916 *listp = list->next;
2917 pool_free (attrs_pool, list);
2922 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
2925 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
2927 for (listp = &list->next; (list = *listp); listp = &list->next)
2932 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
2934 *listp = list->next;
2935 pool_free (attrs_pool, list);
2940 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
2949 if (list->offset == 0
2950 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
2951 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
2961 cslot = set_slot_part (set, val, cslot, cdv, 0,
2962 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
2964 slot = clobber_slot_part (set, cval, slot, 0, NULL);
2966 /* Variable may have been unshared. */
2967 var = (variable)*slot;
2968 gcc_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
2969 && var->var_part[0].loc_chain->next == NULL);
2971 if (VALUE_RECURSED_INTO (cval))
2972 goto restart_with_cval;
2977 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
2978 corresponding entry in DSM->src. Multi-part variables are combined
2979 with variable_union, whereas onepart dvs are combined with
2983 variable_merge_over_cur (void **s1slot, void *data)
2985 struct dfset_merge *dsm = (struct dfset_merge *)data;
2986 dataflow_set *dst = dsm->dst;
2988 variable s1var = (variable) *s1slot;
2989 variable s2var, dvar = NULL;
2990 decl_or_value dv = s1var->dv;
2991 bool onepart = dv_onepart_p (dv);
2994 location_chain node, *nodep;
2996 /* If the incoming onepart variable has an empty location list, then
2997 the intersection will be just as empty. For other variables,
2998 it's always union. */
2999 gcc_assert (s1var->n_var_parts);
3000 gcc_assert (s1var->var_part[0].loc_chain);
3003 return variable_union (s1slot, dst);
3005 gcc_assert (s1var->n_var_parts == 1);
3006 gcc_assert (s1var->var_part[0].offset == 0);
3008 dvhash = dv_htab_hash (dv);
3009 if (dv_is_value_p (dv))
3010 val = dv_as_value (dv);
3014 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3017 dst_can_be_shared = false;
3021 dsm->src_onepart_cnt--;
3022 gcc_assert (s2var->var_part[0].loc_chain);
3023 gcc_assert (s2var->n_var_parts == 1);
3024 gcc_assert (s2var->var_part[0].offset == 0);
3026 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3029 dvar = (variable)*dstslot;
3030 gcc_assert (dvar->refcount == 1);
3031 gcc_assert (dvar->n_var_parts == 1);
3032 gcc_assert (dvar->var_part[0].offset == 0);
3033 nodep = &dvar->var_part[0].loc_chain;
3041 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3043 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3045 *dstslot = dvar = s2var;
3050 dst_can_be_shared = false;
3052 intersect_loc_chains (val, nodep, dsm,
3053 s1var->var_part[0].loc_chain, s2var);
3059 dvar = (variable) pool_alloc (dv_pool (dv));
3062 dvar->n_var_parts = 1;
3063 dvar->cur_loc_changed = false;
3064 dvar->in_changed_variables = false;
3065 dvar->var_part[0].offset = 0;
3066 dvar->var_part[0].loc_chain = node;
3067 dvar->var_part[0].cur_loc = NULL;
3070 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3072 gcc_assert (!*dstslot);
3080 nodep = &dvar->var_part[0].loc_chain;
3081 while ((node = *nodep))
3083 location_chain *nextp = &node->next;
3085 if (GET_CODE (node->loc) == REG)
3089 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
3090 if (GET_MODE (node->loc) == GET_MODE (list->loc)
3091 && dv_is_value_p (list->dv))
3095 attrs_list_insert (&dst->regs[REGNO (node->loc)],
3097 /* If this value became canonical for another value that had
3098 this register, we want to leave it alone. */
3099 else if (dv_as_value (list->dv) != val)
3101 dstslot = set_slot_part (dst, dv_as_value (list->dv),
3103 node->init, NULL_RTX);
3104 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
3106 /* Since nextp points into the removed node, we can't
3107 use it. The pointer to the next node moved to nodep.
3108 However, if the variable we're walking is unshared
3109 during our walk, we'll keep walking the location list
3110 of the previously-shared variable, in which case the
3111 node won't have been removed, and we'll want to skip
3112 it. That's why we test *nodep here. */
3118 /* Canonicalization puts registers first, so we don't have to
3124 if (dvar != (variable)*dstslot)
3125 dvar = (variable)*dstslot;
3126 nodep = &dvar->var_part[0].loc_chain;
3130 /* Mark all referenced nodes for canonicalization, and make sure
3131 we have mutual equivalence links. */
3132 VALUE_RECURSED_INTO (val) = true;
3133 for (node = *nodep; node; node = node->next)
3134 if (GET_CODE (node->loc) == VALUE)
3136 VALUE_RECURSED_INTO (node->loc) = true;
3137 set_variable_part (dst, val, dv_from_value (node->loc), 0,
3138 node->init, NULL, INSERT);
3141 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3142 gcc_assert (*dstslot == dvar);
3143 canonicalize_values_star (dstslot, dst);
3144 #ifdef ENABLE_CHECKING
3146 == shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash));
3148 dvar = (variable)*dstslot;
3152 bool has_value = false, has_other = false;
3154 /* If we have one value and anything else, we're going to
3155 canonicalize this, so make sure all values have an entry in
3156 the table and are marked for canonicalization. */
3157 for (node = *nodep; node; node = node->next)
3159 if (GET_CODE (node->loc) == VALUE)
3161 /* If this was marked during register canonicalization,
3162 we know we have to canonicalize values. */
3177 if (has_value && has_other)
3179 for (node = *nodep; node; node = node->next)
3181 if (GET_CODE (node->loc) == VALUE)
3183 decl_or_value dv = dv_from_value (node->loc);
3186 if (shared_hash_shared (dst->vars))
3187 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
3189 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
3193 variable var = (variable) pool_alloc (dv_pool (dv));
3196 var->n_var_parts = 1;
3197 var->cur_loc_changed = false;
3198 var->in_changed_variables = false;
3199 var->var_part[0].offset = 0;
3200 var->var_part[0].loc_chain = NULL;
3201 var->var_part[0].cur_loc = NULL;
3205 VALUE_RECURSED_INTO (node->loc) = true;
3209 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3210 gcc_assert (*dstslot == dvar);
3211 canonicalize_values_star (dstslot, dst);
3212 #ifdef ENABLE_CHECKING
3214 == shared_hash_find_slot_noinsert_1 (dst->vars,
3217 dvar = (variable)*dstslot;
3221 if (!onepart_variable_different_p (dvar, s2var))
3223 variable_htab_free (dvar);
3224 *dstslot = dvar = s2var;
3227 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
3229 variable_htab_free (dvar);
3230 *dstslot = dvar = s1var;
3232 dst_can_be_shared = false;
3235 dst_can_be_shared = false;
3240 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
3241 multi-part variable. Unions of multi-part variables and
3242 intersections of one-part ones will be handled in
3243 variable_merge_over_cur(). */
3246 variable_merge_over_src (void **s2slot, void *data)
3248 struct dfset_merge *dsm = (struct dfset_merge *)data;
3249 dataflow_set *dst = dsm->dst;
3250 variable s2var = (variable) *s2slot;
3251 decl_or_value dv = s2var->dv;
3252 bool onepart = dv_onepart_p (dv);
3256 void **dstp = shared_hash_find_slot (dst->vars, dv);
3262 dsm->src_onepart_cnt++;
3266 /* Combine dataflow set information from SRC2 into DST, using PDST
3267 to carry over information across passes. */
3270 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
3272 dataflow_set cur = *dst;
3273 dataflow_set *src1 = &cur;
3274 struct dfset_merge dsm;
3276 size_t src1_elems, src2_elems;
3278 src1_elems = htab_elements (shared_hash_htab (src1->vars));
3279 src2_elems = htab_elements (shared_hash_htab (src2->vars));
3280 dataflow_set_init (dst);
3281 dst->stack_adjust = cur.stack_adjust;
3282 shared_hash_destroy (dst->vars);
3283 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
3284 dst->vars->refcount = 1;
3286 = htab_create (MAX (src1_elems, src2_elems), variable_htab_hash,
3287 variable_htab_eq, variable_htab_free);
3289 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3290 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
3295 dsm.src_onepart_cnt = 0;
3297 htab_traverse (shared_hash_htab (dsm.src->vars), variable_merge_over_src,
3299 htab_traverse (shared_hash_htab (dsm.cur->vars), variable_merge_over_cur,
3302 if (dsm.src_onepart_cnt)
3303 dst_can_be_shared = false;
3305 dataflow_set_destroy (src1);
3308 /* Mark register equivalences. */
3311 dataflow_set_equiv_regs (dataflow_set *set)
3316 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3318 rtx canon[NUM_MACHINE_MODES];
3320 memset (canon, 0, sizeof (canon));
3322 for (list = set->regs[i]; list; list = list->next)
3323 if (list->offset == 0 && dv_is_value_p (list->dv))
3325 rtx val = dv_as_value (list->dv);
3326 rtx *cvalp = &canon[(int)GET_MODE (val)];
3329 if (canon_value_cmp (val, cval))
3333 for (list = set->regs[i]; list; list = list->next)
3334 if (list->offset == 0 && dv_onepart_p (list->dv))
3336 rtx cval = canon[(int)GET_MODE (list->loc)];
3341 if (dv_is_value_p (list->dv))
3343 rtx val = dv_as_value (list->dv);
3348 VALUE_RECURSED_INTO (val) = true;
3349 set_variable_part (set, val, dv_from_value (cval), 0,
3350 VAR_INIT_STATUS_INITIALIZED,
3354 VALUE_RECURSED_INTO (cval) = true;
3355 set_variable_part (set, cval, list->dv, 0,
3356 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
3359 for (listp = &set->regs[i]; (list = *listp);
3360 listp = list ? &list->next : listp)
3361 if (list->offset == 0 && dv_onepart_p (list->dv))
3363 rtx cval = canon[(int)GET_MODE (list->loc)];
3369 if (dv_is_value_p (list->dv))
3371 rtx val = dv_as_value (list->dv);
3372 if (!VALUE_RECURSED_INTO (val))
3376 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
3377 canonicalize_values_star (slot, set);
3384 /* Remove any redundant values in the location list of VAR, which must
3385 be unshared and 1-part. */
3388 remove_duplicate_values (variable var)
3390 location_chain node, *nodep;
3392 gcc_assert (dv_onepart_p (var->dv));
3393 gcc_assert (var->n_var_parts == 1);
3394 gcc_assert (var->refcount == 1);
3396 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
3398 if (GET_CODE (node->loc) == VALUE)
3400 if (VALUE_RECURSED_INTO (node->loc))
3402 /* Remove duplicate value node. */
3403 *nodep = node->next;
3404 pool_free (loc_chain_pool, node);
3408 VALUE_RECURSED_INTO (node->loc) = true;
3410 nodep = &node->next;
3413 for (node = var->var_part[0].loc_chain; node; node = node->next)
3414 if (GET_CODE (node->loc) == VALUE)
3416 gcc_assert (VALUE_RECURSED_INTO (node->loc));
3417 VALUE_RECURSED_INTO (node->loc) = false;
3422 /* Hash table iteration argument passed to variable_post_merge. */
3423 struct dfset_post_merge
3425 /* The new input set for the current block. */
3427 /* Pointer to the permanent input set for the current block, or
3429 dataflow_set **permp;
3432 /* Create values for incoming expressions associated with one-part
3433 variables that don't have value numbers for them. */
3436 variable_post_merge_new_vals (void **slot, void *info)
3438 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3439 dataflow_set *set = dfpm->set;
3440 variable var = (variable)*slot;
3441 location_chain node;
3443 if (!dv_onepart_p (var->dv) || !var->n_var_parts)
3446 gcc_assert (var->n_var_parts == 1);
3448 if (dv_is_decl_p (var->dv))
3450 bool check_dupes = false;
3453 for (node = var->var_part[0].loc_chain; node; node = node->next)
3455 if (GET_CODE (node->loc) == VALUE)
3456 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
3457 else if (GET_CODE (node->loc) == REG)
3459 attrs att, *attp, *curp = NULL;
3461 if (var->refcount != 1)
3463 slot = unshare_variable (set, slot, var,
3464 VAR_INIT_STATUS_INITIALIZED);
3465 var = (variable)*slot;
3469 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
3471 if (att->offset == 0
3472 && GET_MODE (att->loc) == GET_MODE (node->loc))
3474 if (dv_is_value_p (att->dv))
3476 rtx cval = dv_as_value (att->dv);
3481 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
3489 if ((*curp)->offset == 0
3490 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
3491 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
3494 curp = &(*curp)->next;
3505 *dfpm->permp = XNEW (dataflow_set);
3506 dataflow_set_init (*dfpm->permp);
3509 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
3510 att; att = att->next)
3511 if (GET_MODE (att->loc) == GET_MODE (node->loc))
3513 gcc_assert (att->offset == 0);
3514 gcc_assert (dv_is_value_p (att->dv));
3515 val_reset (set, att->dv);
3522 cval = dv_as_value (cdv);
3526 /* Create a unique value to hold this register,
3527 that ought to be found and reused in
3528 subsequent rounds. */
3530 gcc_assert (!cselib_lookup (node->loc,
3531 GET_MODE (node->loc), 0));
3532 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1);
3533 cselib_preserve_value (v);
3534 cselib_invalidate_rtx (node->loc);
3536 cdv = dv_from_value (cval);
3539 "Created new value %u:%u for reg %i\n",
3540 v->uid, v->hash, REGNO (node->loc));
3543 var_reg_decl_set (*dfpm->permp, node->loc,
3544 VAR_INIT_STATUS_INITIALIZED,
3545 cdv, 0, NULL, INSERT);
3551 /* Remove attribute referring to the decl, which now
3552 uses the value for the register, already existing or
3553 to be added when we bring perm in. */
3556 pool_free (attrs_pool, att);
3561 remove_duplicate_values (var);
3567 /* Reset values in the permanent set that are not associated with the
3568 chosen expression. */
3571 variable_post_merge_perm_vals (void **pslot, void *info)
3573 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3574 dataflow_set *set = dfpm->set;
3575 variable pvar = (variable)*pslot, var;
3576 location_chain pnode;
3580 gcc_assert (dv_is_value_p (pvar->dv));
3581 gcc_assert (pvar->n_var_parts == 1);
3582 pnode = pvar->var_part[0].loc_chain;
3584 gcc_assert (!pnode->next);
3585 gcc_assert (REG_P (pnode->loc));
3589 var = shared_hash_find (set->vars, dv);
3592 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
3594 val_reset (set, dv);
3597 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
3598 if (att->offset == 0
3599 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
3600 && dv_is_value_p (att->dv))
3603 /* If there is a value associated with this register already, create
3605 if (att && dv_as_value (att->dv) != dv_as_value (dv))
3607 rtx cval = dv_as_value (att->dv);
3608 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
3609 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
3614 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
3616 variable_union (pslot, set);
3622 /* Just checking stuff and registering register attributes for
3626 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
3628 struct dfset_post_merge dfpm;
3633 htab_traverse (shared_hash_htab (set->vars), variable_post_merge_new_vals,
3636 htab_traverse (shared_hash_htab ((*permp)->vars),
3637 variable_post_merge_perm_vals, &dfpm);
3638 htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
3641 /* Return a node whose loc is a MEM that refers to EXPR in the
3642 location list of a one-part variable or value VAR, or in that of
3643 any values recursively mentioned in the location lists. */
3645 static location_chain
3646 find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars)
3648 location_chain node;
3651 location_chain where = NULL;
3656 gcc_assert (GET_CODE (val) == VALUE);
3658 gcc_assert (!VALUE_RECURSED_INTO (val));
3660 dv = dv_from_value (val);
3661 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
3666 gcc_assert (dv_onepart_p (var->dv));
3668 if (!var->n_var_parts)
3671 gcc_assert (var->var_part[0].offset == 0);
3673 VALUE_RECURSED_INTO (val) = true;
3675 for (node = var->var_part[0].loc_chain; node; node = node->next)
3676 if (MEM_P (node->loc) && MEM_EXPR (node->loc) == expr
3677 && MEM_OFFSET (node->loc) == 0)
3682 else if (GET_CODE (node->loc) == VALUE
3683 && !VALUE_RECURSED_INTO (node->loc)
3684 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
3687 VALUE_RECURSED_INTO (val) = false;
3692 /* Return TRUE if the value of MEM may vary across a call. */
3695 mem_dies_at_call (rtx mem)
3697 tree expr = MEM_EXPR (mem);
3703 decl = get_base_address (expr);
3711 return (may_be_aliased (decl)
3712 || (!TREE_READONLY (decl) && is_global_var (decl)));
3715 /* Remove all MEMs from the location list of a hash table entry for a
3716 one-part variable, except those whose MEM attributes map back to
3717 the variable itself, directly or within a VALUE. */
3720 dataflow_set_preserve_mem_locs (void **slot, void *data)
3722 dataflow_set *set = (dataflow_set *) data;
3723 variable var = (variable) *slot;
3725 if (dv_is_decl_p (var->dv) && dv_onepart_p (var->dv))
3727 tree decl = dv_as_decl (var->dv);
3728 location_chain loc, *locp;
3729 bool changed = false;
3731 if (!var->n_var_parts)
3734 gcc_assert (var->n_var_parts == 1);
3736 if (shared_var_p (var, set->vars))
3738 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
3740 /* We want to remove dying MEMs that doesn't refer to
3742 if (GET_CODE (loc->loc) == MEM
3743 && (MEM_EXPR (loc->loc) != decl
3744 || MEM_OFFSET (loc->loc))
3745 && !mem_dies_at_call (loc->loc))
3747 /* We want to move here MEMs that do refer to DECL. */
3748 else if (GET_CODE (loc->loc) == VALUE
3749 && find_mem_expr_in_1pdv (decl, loc->loc,
3750 shared_hash_htab (set->vars)))
3757 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
3758 var = (variable)*slot;
3759 gcc_assert (var->n_var_parts == 1);
3762 for (locp = &var->var_part[0].loc_chain, loc = *locp;
3765 rtx old_loc = loc->loc;
3766 if (GET_CODE (old_loc) == VALUE)
3768 location_chain mem_node
3769 = find_mem_expr_in_1pdv (decl, loc->loc,
3770 shared_hash_htab (set->vars));
3772 /* ??? This picks up only one out of multiple MEMs that
3773 refer to the same variable. Do we ever need to be
3774 concerned about dealing with more than one, or, given
3775 that they should all map to the same variable
3776 location, their addresses will have been merged and
3777 they will be regarded as equivalent? */
3780 loc->loc = mem_node->loc;
3781 loc->set_src = mem_node->set_src;
3782 loc->init = MIN (loc->init, mem_node->init);
3786 if (GET_CODE (loc->loc) != MEM
3787 || (MEM_EXPR (loc->loc) == decl
3788 && MEM_OFFSET (loc->loc) == 0)
3789 || !mem_dies_at_call (loc->loc))
3791 if (old_loc != loc->loc && emit_notes)
3793 if (old_loc == var->var_part[0].cur_loc)
3796 var->var_part[0].cur_loc = NULL;
3797 var->cur_loc_changed = true;
3799 add_value_chains (var->dv, loc->loc);
3800 remove_value_chains (var->dv, old_loc);
3808 remove_value_chains (var->dv, old_loc);
3809 if (old_loc == var->var_part[0].cur_loc)
3812 var->var_part[0].cur_loc = NULL;
3813 var->cur_loc_changed = true;
3817 pool_free (loc_chain_pool, loc);
3820 if (!var->var_part[0].loc_chain)
3826 variable_was_changed (var, set);
3832 /* Remove all MEMs from the location list of a hash table entry for a
3836 dataflow_set_remove_mem_locs (void **slot, void *data)
3838 dataflow_set *set = (dataflow_set *) data;
3839 variable var = (variable) *slot;
3841 if (dv_is_value_p (var->dv))
3843 location_chain loc, *locp;
3844 bool changed = false;
3846 gcc_assert (var->n_var_parts == 1);
3848 if (shared_var_p (var, set->vars))
3850 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
3851 if (GET_CODE (loc->loc) == MEM
3852 && mem_dies_at_call (loc->loc))
3858 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
3859 var = (variable)*slot;
3860 gcc_assert (var->n_var_parts == 1);
3863 for (locp = &var->var_part[0].loc_chain, loc = *locp;
3866 if (GET_CODE (loc->loc) != MEM
3867 || !mem_dies_at_call (loc->loc))
3874 remove_value_chains (var->dv, loc->loc);
3876 /* If we have deleted the location which was last emitted
3877 we have to emit new location so add the variable to set
3878 of changed variables. */
3879 if (var->var_part[0].cur_loc == loc->loc)
3882 var->var_part[0].cur_loc = NULL;
3883 var->cur_loc_changed = true;
3885 pool_free (loc_chain_pool, loc);
3888 if (!var->var_part[0].loc_chain)
3894 variable_was_changed (var, set);
3900 /* Remove all variable-location information about call-clobbered
3901 registers, as well as associations between MEMs and VALUEs. */
3904 dataflow_set_clear_at_call (dataflow_set *set)
3908 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
3909 if (TEST_HARD_REG_BIT (call_used_reg_set, r))
3910 var_regno_delete (set, r);
3912 if (MAY_HAVE_DEBUG_INSNS)
3914 set->traversed_vars = set->vars;
3915 htab_traverse (shared_hash_htab (set->vars),
3916 dataflow_set_preserve_mem_locs, set);
3917 set->traversed_vars = set->vars;
3918 htab_traverse (shared_hash_htab (set->vars), dataflow_set_remove_mem_locs,
3920 set->traversed_vars = NULL;
3924 /* Flag whether two dataflow sets being compared contain different data. */
3926 dataflow_set_different_value;
3929 variable_part_different_p (variable_part *vp1, variable_part *vp2)
3931 location_chain lc1, lc2;
3933 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
3935 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
3937 if (REG_P (lc1->loc) && REG_P (lc2->loc))
3939 if (REGNO (lc1->loc) == REGNO (lc2->loc))
3942 if (rtx_equal_p (lc1->loc, lc2->loc))
3951 /* Return true if one-part variables VAR1 and VAR2 are different.
3952 They must be in canonical order. */
3955 onepart_variable_different_p (variable var1, variable var2)
3957 location_chain lc1, lc2;
3962 gcc_assert (var1->n_var_parts == 1);
3963 gcc_assert (var2->n_var_parts == 1);
3965 lc1 = var1->var_part[0].loc_chain;
3966 lc2 = var2->var_part[0].loc_chain;
3973 if (loc_cmp (lc1->loc, lc2->loc))
3982 /* Return true if variables VAR1 and VAR2 are different. */
3985 variable_different_p (variable var1, variable var2)
3992 if (var1->n_var_parts != var2->n_var_parts)
3995 for (i = 0; i < var1->n_var_parts; i++)
3997 if (var1->var_part[i].offset != var2->var_part[i].offset)
3999 /* One-part values have locations in a canonical order. */
4000 if (i == 0 && var1->var_part[i].offset == 0 && dv_onepart_p (var1->dv))
4002 gcc_assert (var1->n_var_parts == 1);
4003 gcc_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
4004 return onepart_variable_different_p (var1, var2);
4006 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4008 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4014 /* Compare variable *SLOT with the same variable in hash table DATA
4015 and set DATAFLOW_SET_DIFFERENT_VALUE if they are different. */
4018 dataflow_set_different_1 (void **slot, void *data)
4020 htab_t htab = (htab_t) data;
4021 variable var1, var2;
4023 var1 = (variable) *slot;
4024 var2 = (variable) htab_find_with_hash (htab, var1->dv,
4025 dv_htab_hash (var1->dv));
4028 dataflow_set_different_value = true;
4030 if (dump_file && (dump_flags & TDF_DETAILS))
4032 fprintf (dump_file, "dataflow difference found: removal of:\n");
4036 /* Stop traversing the hash table. */
4040 if (variable_different_p (var1, var2))
4042 dataflow_set_different_value = true;
4044 if (dump_file && (dump_flags & TDF_DETAILS))
4046 fprintf (dump_file, "dataflow difference found: old and new follow:\n");
4051 /* Stop traversing the hash table. */
4055 /* Continue traversing the hash table. */
4059 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4062 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4064 if (old_set->vars == new_set->vars)
4067 if (htab_elements (shared_hash_htab (old_set->vars))
4068 != htab_elements (shared_hash_htab (new_set->vars)))
4071 dataflow_set_different_value = false;
4073 htab_traverse (shared_hash_htab (old_set->vars), dataflow_set_different_1,
4074 shared_hash_htab (new_set->vars));
4075 /* No need to traverse the second hashtab, if both have the same number
4076 of elements and the second one had all entries found in the first one,
4077 then it can't have any extra entries. */
4078 return dataflow_set_different_value;
4081 /* Free the contents of dataflow set SET. */
4084 dataflow_set_destroy (dataflow_set *set)
4088 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4089 attrs_list_clear (&set->regs[i]);
4091 shared_hash_destroy (set->vars);
4095 /* Return true if RTL X contains a SYMBOL_REF. */
4098 contains_symbol_ref (rtx x)
4107 code = GET_CODE (x);
4108 if (code == SYMBOL_REF)
4111 fmt = GET_RTX_FORMAT (code);
4112 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4116 if (contains_symbol_ref (XEXP (x, i)))
4119 else if (fmt[i] == 'E')
4122 for (j = 0; j < XVECLEN (x, i); j++)
4123 if (contains_symbol_ref (XVECEXP (x, i, j)))
4131 /* Shall EXPR be tracked? */
4134 track_expr_p (tree expr, bool need_rtl)
4139 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
4140 return DECL_RTL_SET_P (expr);
4142 /* If EXPR is not a parameter or a variable do not track it. */
4143 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
4146 /* It also must have a name... */
4147 if (!DECL_NAME (expr) && need_rtl)
4150 /* ... and a RTL assigned to it. */
4151 decl_rtl = DECL_RTL_IF_SET (expr);
4152 if (!decl_rtl && need_rtl)
4155 /* If this expression is really a debug alias of some other declaration, we
4156 don't need to track this expression if the ultimate declaration is
4159 if (DECL_DEBUG_EXPR_IS_FROM (realdecl) && DECL_DEBUG_EXPR (realdecl))
4161 realdecl = DECL_DEBUG_EXPR (realdecl);
4162 /* ??? We don't yet know how to emit DW_OP_piece for variable
4163 that has been SRA'ed. */
4164 if (!DECL_P (realdecl))
4168 /* Do not track EXPR if REALDECL it should be ignored for debugging
4170 if (DECL_IGNORED_P (realdecl))
4173 /* Do not track global variables until we are able to emit correct location
4175 if (TREE_STATIC (realdecl))