1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file contains the variable tracking pass. It computes where
22 variables are located (which registers or where in memory) at each position
23 in instruction stream and emits notes describing the locations.
24 Debug information (DWARF2 location lists) is finally generated from
26 With this debug information, it is possible to show variables
27 even when debugging optimized code.
29 How does the variable tracking pass work?
31 First, it scans RTL code for uses, stores and clobbers (register/memory
32 references in instructions), for call insns and for stack adjustments
33 separately for each basic block and saves them to an array of micro
35 The micro operations of one instruction are ordered so that
36 pre-modifying stack adjustment < use < use with no var < call insn <
37 < set < clobber < post-modifying stack adjustment
39 Then, a forward dataflow analysis is performed to find out how locations
40 of variables change through code and to propagate the variable locations
41 along control flow graph.
42 The IN set for basic block BB is computed as a union of OUT sets of BB's
43 predecessors, the OUT set for BB is copied from the IN set for BB and
44 is changed according to micro operations in BB.
46 The IN and OUT sets for basic blocks consist of a current stack adjustment
47 (used for adjusting offset of variables addressed using stack pointer),
48 the table of structures describing the locations of parts of a variable
49 and for each physical register a linked list for each physical register.
50 The linked list is a list of variable parts stored in the register,
51 i.e. it is a list of triplets (reg, decl, offset) where decl is
52 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
53 effective deleting appropriate variable parts when we set or clobber the
56 There may be more than one variable part in a register. The linked lists
57 should be pretty short so it is a good data structure here.
58 For example in the following code, register allocator may assign same
59 register to variables A and B, and both of them are stored in the same
72 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73 are emitted to appropriate positions in RTL code. Each such a note describes
74 the location of one variable at the point in instruction stream where the
75 note is. There is no need to emit a note for each variable before each
76 instruction, we only emit these notes where the location of variable changes
77 (this means that we also emit notes for changes between the OUT set of the
78 previous block and the IN set of the current block).
80 The notes consist of two parts:
81 1. the declaration (from REG_EXPR or MEM_EXPR)
82 2. the location of a variable - it is either a simple register/memory
83 reference (for simple variables, for example int),
84 or a parallel of register/memory references (for a large variables
85 which consist of several parts, for example long long).
91 #include "coretypes.h"
95 #include "hard-reg-set.h"
96 #include "basic-block.h"
99 #include "insn-config.h"
102 #include "alloc-pool.h"
108 #include "tree-pass.h"
109 #include "tree-flow.h"
114 #include "diagnostic.h"
115 #include "pointer-set.h"
117 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
118 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
119 Currently the value is the same as IDENTIFIER_NODE, which has such
120 a property. If this compile time assertion ever fails, make sure that
121 the new tree code that equals (int) VALUE has the same property. */
122 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
124 /* Type of micro operation. */
125 enum micro_operation_type
127 MO_USE, /* Use location (REG or MEM). */
128 MO_USE_NO_VAR,/* Use location which is not associated with a variable
129 or the variable is not trackable. */
130 MO_VAL_USE, /* Use location which is associated with a value. */
131 MO_VAL_LOC, /* Use location which appears in a debug insn. */
132 MO_VAL_SET, /* Set location associated with a value. */
133 MO_SET, /* Set location. */
134 MO_COPY, /* Copy the same portion of a variable from one
135 location to another. */
136 MO_CLOBBER, /* Clobber location. */
137 MO_CALL, /* Call insn. */
138 MO_ADJUST /* Adjust stack pointer. */
142 static const char * const ATTRIBUTE_UNUSED
143 micro_operation_type_name[] = {
156 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
157 Notes emitted as AFTER_CALL are to take effect during the call,
158 rather than after the call. */
161 EMIT_NOTE_BEFORE_INSN,
162 EMIT_NOTE_AFTER_INSN,
163 EMIT_NOTE_AFTER_CALL_INSN
166 /* Structure holding information about micro operation. */
167 typedef struct micro_operation_def
169 /* Type of micro operation. */
170 enum micro_operation_type type;
172 /* The instruction which the micro operation is in, for MO_USE,
173 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
174 instruction or note in the original flow (before any var-tracking
175 notes are inserted, to simplify emission of notes), for MO_SET
180 /* Location. For MO_SET and MO_COPY, this is the SET that
181 performs the assignment, if known, otherwise it is the target
182 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
183 CONCAT of the VALUE and the LOC associated with it. For
184 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
185 associated with it. */
188 /* Stack adjustment. */
189 HOST_WIDE_INT adjust;
193 DEF_VEC_O(micro_operation);
194 DEF_VEC_ALLOC_O(micro_operation,heap);
196 /* A declaration of a variable, or an RTL value being handled like a
198 typedef void *decl_or_value;
200 /* Structure for passing some other parameters to function
201 emit_note_insn_var_location. */
202 typedef struct emit_note_data_def
204 /* The instruction which the note will be emitted before/after. */
207 /* Where the note will be emitted (before/after insn)? */
208 enum emit_note_where where;
210 /* The variables and values active at this point. */
214 /* Description of location of a part of a variable. The content of a physical
215 register is described by a chain of these structures.
216 The chains are pretty short (usually 1 or 2 elements) and thus
217 chain is the best data structure. */
218 typedef struct attrs_def
220 /* Pointer to next member of the list. */
221 struct attrs_def *next;
223 /* The rtx of register. */
226 /* The declaration corresponding to LOC. */
229 /* Offset from start of DECL. */
230 HOST_WIDE_INT offset;
233 /* Structure holding a refcounted hash table. If refcount > 1,
234 it must be first unshared before modified. */
235 typedef struct shared_hash_def
237 /* Reference count. */
240 /* Actual hash table. */
244 /* Structure holding the IN or OUT set for a basic block. */
245 typedef struct dataflow_set_def
247 /* Adjustment of stack offset. */
248 HOST_WIDE_INT stack_adjust;
250 /* Attributes for registers (lists of attrs). */
251 attrs regs[FIRST_PSEUDO_REGISTER];
253 /* Variable locations. */
256 /* Vars that is being traversed. */
257 shared_hash traversed_vars;
260 /* The structure (one for each basic block) containing the information
261 needed for variable tracking. */
262 typedef struct variable_tracking_info_def
264 /* The vector of micro operations. */
265 VEC(micro_operation, heap) *mos;
267 /* The IN and OUT set for dataflow analysis. */
271 /* The permanent-in dataflow set for this block. This is used to
272 hold values for which we had to compute entry values. ??? This
273 should probably be dynamically allocated, to avoid using more
274 memory in non-debug builds. */
277 /* Has the block been visited in DFS? */
280 /* Has the block been flooded in VTA? */
283 } *variable_tracking_info;
285 /* Structure for chaining the locations. */
286 typedef struct location_chain_def
288 /* Next element in the chain. */
289 struct location_chain_def *next;
291 /* The location (REG, MEM or VALUE). */
294 /* The "value" stored in this location. */
298 enum var_init_status init;
301 /* Structure describing one part of variable. */
302 typedef struct variable_part_def
304 /* Chain of locations of the part. */
305 location_chain loc_chain;
307 /* Location which was last emitted to location list. */
310 /* The offset in the variable. */
311 HOST_WIDE_INT offset;
314 /* Maximum number of location parts. */
315 #define MAX_VAR_PARTS 16
317 /* Structure describing where the variable is located. */
318 typedef struct variable_def
320 /* The declaration of the variable, or an RTL value being handled
321 like a declaration. */
324 /* Reference count. */
327 /* Number of variable parts. */
330 /* True if this variable changed (any of its) cur_loc fields
331 during the current emit_notes_for_changes resp.
332 emit_notes_for_differences call. */
333 bool cur_loc_changed;
335 /* True if this variable_def struct is currently in the
336 changed_variables hash table. */
337 bool in_changed_variables;
339 /* The variable parts. */
340 variable_part var_part[1];
342 typedef const struct variable_def *const_variable;
344 /* Structure for chaining backlinks from referenced VALUEs to
345 DVs that are referencing them. */
346 typedef struct value_chain_def
348 /* Next value_chain entry. */
349 struct value_chain_def *next;
351 /* The declaration of the variable, or an RTL value
352 being handled like a declaration, whose var_parts[0].loc_chain
353 references the VALUE owning this value_chain. */
356 /* Reference count. */
359 typedef const struct value_chain_def *const_value_chain;
361 /* Pointer to the BB's information specific to variable tracking pass. */
362 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
364 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
365 #define INT_MEM_OFFSET(mem) (MEM_OFFSET (mem) ? INTVAL (MEM_OFFSET (mem)) : 0)
367 /* Alloc pool for struct attrs_def. */
368 static alloc_pool attrs_pool;
370 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
371 static alloc_pool var_pool;
373 /* Alloc pool for struct variable_def with a single var_part entry. */
374 static alloc_pool valvar_pool;
376 /* Alloc pool for struct location_chain_def. */
377 static alloc_pool loc_chain_pool;
379 /* Alloc pool for struct shared_hash_def. */
380 static alloc_pool shared_hash_pool;
382 /* Alloc pool for struct value_chain_def. */
383 static alloc_pool value_chain_pool;
385 /* Changed variables, notes will be emitted for them. */
386 static htab_t changed_variables;
388 /* Links from VALUEs to DVs referencing them in their current loc_chains. */
389 static htab_t value_chains;
391 /* Shall notes be emitted? */
392 static bool emit_notes;
394 /* Empty shared hashtable. */
395 static shared_hash empty_shared_hash;
397 /* Scratch register bitmap used by cselib_expand_value_rtx. */
398 static bitmap scratch_regs = NULL;
400 /* Variable used to tell whether cselib_process_insn called our hook. */
401 static bool cselib_hook_called;
403 /* Local function prototypes. */
404 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
406 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
408 static void bb_stack_adjust_offset (basic_block);
409 static bool vt_stack_adjustments (void);
410 static rtx adjust_stack_reference (rtx, HOST_WIDE_INT);
411 static hashval_t variable_htab_hash (const void *);
412 static int variable_htab_eq (const void *, const void *);
413 static void variable_htab_free (void *);
415 static void init_attrs_list_set (attrs *);
416 static void attrs_list_clear (attrs *);
417 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
418 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
419 static void attrs_list_copy (attrs *, attrs);
420 static void attrs_list_union (attrs *, attrs);
422 static void **unshare_variable (dataflow_set *set, void **slot, variable var,
423 enum var_init_status);
424 static int vars_copy_1 (void **, void *);
425 static void vars_copy (htab_t, htab_t);
426 static tree var_debug_decl (tree);
427 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
428 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
429 enum var_init_status, rtx);
430 static void var_reg_delete (dataflow_set *, rtx, bool);
431 static void var_regno_delete (dataflow_set *, int);
432 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
433 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
434 enum var_init_status, rtx);
435 static void var_mem_delete (dataflow_set *, rtx, bool);
437 static void dataflow_set_init (dataflow_set *);
438 static void dataflow_set_clear (dataflow_set *);
439 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
440 static int variable_union_info_cmp_pos (const void *, const void *);
441 static int variable_union (void **, void *);
442 static void dataflow_set_union (dataflow_set *, dataflow_set *);
443 static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
444 static bool canon_value_cmp (rtx, rtx);
445 static int loc_cmp (rtx, rtx);
446 static bool variable_part_different_p (variable_part *, variable_part *);
447 static bool onepart_variable_different_p (variable, variable);
448 static bool variable_different_p (variable, variable);
449 static int dataflow_set_different_1 (void **, void *);
450 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
451 static void dataflow_set_destroy (dataflow_set *);
453 static bool contains_symbol_ref (rtx);
454 static bool track_expr_p (tree, bool);
455 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
456 static int add_uses (rtx *, void *);
457 static void add_uses_1 (rtx *, void *);
458 static void add_stores (rtx, const_rtx, void *);
459 static bool compute_bb_dataflow (basic_block);
460 static bool vt_find_locations (void);
462 static void dump_attrs_list (attrs);
463 static int dump_var_slot (void **, void *);
464 static void dump_var (variable);
465 static void dump_vars (htab_t);
466 static void dump_dataflow_set (dataflow_set *);
467 static void dump_dataflow_sets (void);
469 static void variable_was_changed (variable, dataflow_set *);
470 static void **set_slot_part (dataflow_set *, rtx, void **,
471 decl_or_value, HOST_WIDE_INT,
472 enum var_init_status, rtx);
473 static void set_variable_part (dataflow_set *, rtx,
474 decl_or_value, HOST_WIDE_INT,
475 enum var_init_status, rtx, enum insert_option);
476 static void **clobber_slot_part (dataflow_set *, rtx,
477 void **, HOST_WIDE_INT, rtx);
478 static void clobber_variable_part (dataflow_set *, rtx,
479 decl_or_value, HOST_WIDE_INT, rtx);
480 static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
481 static void delete_variable_part (dataflow_set *, rtx,
482 decl_or_value, HOST_WIDE_INT);
483 static int emit_note_insn_var_location (void **, void *);
484 static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash);
485 static int emit_notes_for_differences_1 (void **, void *);
486 static int emit_notes_for_differences_2 (void **, void *);
487 static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
488 static void emit_notes_in_bb (basic_block, dataflow_set *);
489 static void vt_emit_notes (void);
491 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
492 static void vt_add_function_parameters (void);
493 static void vt_initialize (void);
494 static void vt_finalize (void);
496 /* Given a SET, calculate the amount of stack adjustment it contains
497 PRE- and POST-modifying stack pointer.
498 This function is similar to stack_adjust_offset. */
501 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
504 rtx src = SET_SRC (pattern);
505 rtx dest = SET_DEST (pattern);
508 if (dest == stack_pointer_rtx)
510 /* (set (reg sp) (plus (reg sp) (const_int))) */
511 code = GET_CODE (src);
512 if (! (code == PLUS || code == MINUS)
513 || XEXP (src, 0) != stack_pointer_rtx
514 || !CONST_INT_P (XEXP (src, 1)))
518 *post += INTVAL (XEXP (src, 1));
520 *post -= INTVAL (XEXP (src, 1));
522 else if (MEM_P (dest))
524 /* (set (mem (pre_dec (reg sp))) (foo)) */
525 src = XEXP (dest, 0);
526 code = GET_CODE (src);
532 if (XEXP (src, 0) == stack_pointer_rtx)
534 rtx val = XEXP (XEXP (src, 1), 1);
535 /* We handle only adjustments by constant amount. */
536 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
539 if (code == PRE_MODIFY)
540 *pre -= INTVAL (val);
542 *post -= INTVAL (val);
548 if (XEXP (src, 0) == stack_pointer_rtx)
550 *pre += GET_MODE_SIZE (GET_MODE (dest));
556 if (XEXP (src, 0) == stack_pointer_rtx)
558 *post += GET_MODE_SIZE (GET_MODE (dest));
564 if (XEXP (src, 0) == stack_pointer_rtx)
566 *pre -= GET_MODE_SIZE (GET_MODE (dest));
572 if (XEXP (src, 0) == stack_pointer_rtx)
574 *post -= GET_MODE_SIZE (GET_MODE (dest));
585 /* Given an INSN, calculate the amount of stack adjustment it contains
586 PRE- and POST-modifying stack pointer. */
589 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
597 pattern = PATTERN (insn);
598 if (RTX_FRAME_RELATED_P (insn))
600 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
602 pattern = XEXP (expr, 0);
605 if (GET_CODE (pattern) == SET)
606 stack_adjust_offset_pre_post (pattern, pre, post);
607 else if (GET_CODE (pattern) == PARALLEL
608 || GET_CODE (pattern) == SEQUENCE)
612 /* There may be stack adjustments inside compound insns. Search
614 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
615 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
616 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
620 /* Compute stack adjustment in basic block BB. */
623 bb_stack_adjust_offset (basic_block bb)
625 HOST_WIDE_INT offset;
629 offset = VTI (bb)->in.stack_adjust;
630 for (i = 0; VEC_iterate (micro_operation, VTI (bb)->mos, i, mo); i++)
632 if (mo->type == MO_ADJUST)
633 offset += mo->u.adjust;
634 else if (mo->type != MO_CALL)
636 if (MEM_P (mo->u.loc))
637 mo->u.loc = adjust_stack_reference (mo->u.loc, -offset);
640 VTI (bb)->out.stack_adjust = offset;
643 /* Compute stack adjustments for all blocks by traversing DFS tree.
644 Return true when the adjustments on all incoming edges are consistent.
645 Heavily borrowed from pre_and_rev_post_order_compute. */
648 vt_stack_adjustments (void)
650 edge_iterator *stack;
653 /* Initialize entry block. */
654 VTI (ENTRY_BLOCK_PTR)->visited = true;
655 VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
657 /* Allocate stack for back-tracking up CFG. */
658 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
661 /* Push the first edge on to the stack. */
662 stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
670 /* Look at the edge on the top of the stack. */
672 src = ei_edge (ei)->src;
673 dest = ei_edge (ei)->dest;
675 /* Check if the edge destination has been visited yet. */
676 if (!VTI (dest)->visited)
678 VTI (dest)->visited = true;
679 VTI (dest)->in.stack_adjust = VTI (src)->out.stack_adjust;
680 bb_stack_adjust_offset (dest);
682 if (EDGE_COUNT (dest->succs) > 0)
683 /* Since the DEST node has been visited for the first
684 time, check its successors. */
685 stack[sp++] = ei_start (dest->succs);
689 /* Check whether the adjustments on the edges are the same. */
690 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
696 if (! ei_one_before_end_p (ei))
697 /* Go to the next edge. */
698 ei_next (&stack[sp - 1]);
700 /* Return to previous level if there are no more edges. */
709 /* Adjust stack reference MEM by ADJUSTMENT bytes and make it relative
710 to the argument pointer. Return the new rtx. */
713 adjust_stack_reference (rtx mem, HOST_WIDE_INT adjustment)
717 #ifdef FRAME_POINTER_CFA_OFFSET
718 adjustment -= FRAME_POINTER_CFA_OFFSET (current_function_decl);
719 cfa = plus_constant (frame_pointer_rtx, adjustment);
721 adjustment -= ARG_POINTER_CFA_OFFSET (current_function_decl);
722 cfa = plus_constant (arg_pointer_rtx, adjustment);
725 addr = replace_rtx (copy_rtx (XEXP (mem, 0)), stack_pointer_rtx, cfa);
726 tmp = simplify_rtx (addr);
730 return replace_equiv_address_nv (mem, addr);
733 /* Return true if a decl_or_value DV is a DECL or NULL. */
735 dv_is_decl_p (decl_or_value dv)
737 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
740 /* Return true if a decl_or_value is a VALUE rtl. */
742 dv_is_value_p (decl_or_value dv)
744 return dv && !dv_is_decl_p (dv);
747 /* Return the decl in the decl_or_value. */
749 dv_as_decl (decl_or_value dv)
751 #ifdef ENABLE_CHECKING
752 gcc_assert (dv_is_decl_p (dv));
757 /* Return the value in the decl_or_value. */
759 dv_as_value (decl_or_value dv)
761 #ifdef ENABLE_CHECKING
762 gcc_assert (dv_is_value_p (dv));
767 /* Return the opaque pointer in the decl_or_value. */
769 dv_as_opaque (decl_or_value dv)
774 /* Return true if a decl_or_value must not have more than one variable
777 dv_onepart_p (decl_or_value dv)
781 if (!MAY_HAVE_DEBUG_INSNS)
784 if (dv_is_value_p (dv))
787 decl = dv_as_decl (dv);
792 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
795 return (target_for_debug_bind (decl) != NULL_TREE);
798 /* Return the variable pool to be used for dv, depending on whether it
799 can have multiple parts or not. */
800 static inline alloc_pool
801 dv_pool (decl_or_value dv)
803 return dv_onepart_p (dv) ? valvar_pool : var_pool;
806 /* Build a decl_or_value out of a decl. */
807 static inline decl_or_value
808 dv_from_decl (tree decl)
812 #ifdef ENABLE_CHECKING
813 gcc_assert (dv_is_decl_p (dv));
818 /* Build a decl_or_value out of a value. */
819 static inline decl_or_value
820 dv_from_value (rtx value)
824 #ifdef ENABLE_CHECKING
825 gcc_assert (dv_is_value_p (dv));
830 extern void debug_dv (decl_or_value dv);
833 debug_dv (decl_or_value dv)
835 if (dv_is_value_p (dv))
836 debug_rtx (dv_as_value (dv));
838 debug_generic_stmt (dv_as_decl (dv));
841 typedef unsigned int dvuid;
843 /* Return the uid of DV. */
846 dv_uid (decl_or_value dv)
848 if (dv_is_value_p (dv))
849 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
851 return DECL_UID (dv_as_decl (dv));
854 /* Compute the hash from the uid. */
856 static inline hashval_t
857 dv_uid2hash (dvuid uid)
862 /* The hash function for a mask table in a shared_htab chain. */
864 static inline hashval_t
865 dv_htab_hash (decl_or_value dv)
867 return dv_uid2hash (dv_uid (dv));
870 /* The hash function for variable_htab, computes the hash value
871 from the declaration of variable X. */
874 variable_htab_hash (const void *x)
876 const_variable const v = (const_variable) x;
878 return dv_htab_hash (v->dv);
881 /* Compare the declaration of variable X with declaration Y. */
884 variable_htab_eq (const void *x, const void *y)
886 const_variable const v = (const_variable) x;
887 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
889 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
892 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
895 variable_htab_free (void *elem)
898 variable var = (variable) elem;
899 location_chain node, next;
901 gcc_assert (var->refcount > 0);
904 if (var->refcount > 0)
907 for (i = 0; i < var->n_var_parts; i++)
909 for (node = var->var_part[i].loc_chain; node; node = next)
912 pool_free (loc_chain_pool, node);
914 var->var_part[i].loc_chain = NULL;
916 pool_free (dv_pool (var->dv), var);
919 /* The hash function for value_chains htab, computes the hash value
923 value_chain_htab_hash (const void *x)
925 const_value_chain const v = (const_value_chain) x;
927 return dv_htab_hash (v->dv);
930 /* Compare the VALUE X with VALUE Y. */
933 value_chain_htab_eq (const void *x, const void *y)
935 const_value_chain const v = (const_value_chain) x;
936 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
938 return dv_as_opaque (v->dv) == dv_as_opaque (dv);
941 /* Initialize the set (array) SET of attrs to empty lists. */
944 init_attrs_list_set (attrs *set)
948 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
952 /* Make the list *LISTP empty. */
955 attrs_list_clear (attrs *listp)
959 for (list = *listp; list; list = next)
962 pool_free (attrs_pool, list);
967 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
970 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
972 for (; list; list = list->next)
973 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
978 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
981 attrs_list_insert (attrs *listp, decl_or_value dv,
982 HOST_WIDE_INT offset, rtx loc)
986 list = (attrs) pool_alloc (attrs_pool);
989 list->offset = offset;
994 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
997 attrs_list_copy (attrs *dstp, attrs src)
1001 attrs_list_clear (dstp);
1002 for (; src; src = src->next)
1004 n = (attrs) pool_alloc (attrs_pool);
1007 n->offset = src->offset;
1013 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1016 attrs_list_union (attrs *dstp, attrs src)
1018 for (; src; src = src->next)
1020 if (!attrs_list_member (*dstp, src->dv, src->offset))
1021 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1025 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1029 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1031 gcc_assert (!*dstp);
1032 for (; src; src = src->next)
1034 if (!dv_onepart_p (src->dv))
1035 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1037 for (src = src2; src; src = src->next)
1039 if (!dv_onepart_p (src->dv)
1040 && !attrs_list_member (*dstp, src->dv, src->offset))
1041 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1045 /* Shared hashtable support. */
1047 /* Return true if VARS is shared. */
1050 shared_hash_shared (shared_hash vars)
1052 return vars->refcount > 1;
1055 /* Return the hash table for VARS. */
1057 static inline htab_t
1058 shared_hash_htab (shared_hash vars)
1063 /* Return true if VAR is shared, or maybe because VARS is shared. */
1066 shared_var_p (variable var, shared_hash vars)
1068 /* Don't count an entry in the changed_variables table as a duplicate. */
1069 return ((var->refcount > 1 + (int) var->in_changed_variables)
1070 || shared_hash_shared (vars));
1073 /* Copy variables into a new hash table. */
1076 shared_hash_unshare (shared_hash vars)
1078 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1079 gcc_assert (vars->refcount > 1);
1080 new_vars->refcount = 1;
1082 = htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
1083 variable_htab_eq, variable_htab_free);
1084 vars_copy (new_vars->htab, vars->htab);
1089 /* Increment reference counter on VARS and return it. */
1091 static inline shared_hash
1092 shared_hash_copy (shared_hash vars)
1098 /* Decrement reference counter and destroy hash table if not shared
1102 shared_hash_destroy (shared_hash vars)
1104 gcc_assert (vars->refcount > 0);
1105 if (--vars->refcount == 0)
1107 htab_delete (vars->htab);
1108 pool_free (shared_hash_pool, vars);
1112 /* Unshare *PVARS if shared and return slot for DV. If INS is
1113 INSERT, insert it if not already present. */
1115 static inline void **
1116 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1117 hashval_t dvhash, enum insert_option ins)
1119 if (shared_hash_shared (*pvars))
1120 *pvars = shared_hash_unshare (*pvars);
1121 return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins);
1124 static inline void **
1125 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1126 enum insert_option ins)
1128 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1131 /* Return slot for DV, if it is already present in the hash table.
1132 If it is not present, insert it only VARS is not shared, otherwise
1135 static inline void **
1136 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1138 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1139 shared_hash_shared (vars)
1140 ? NO_INSERT : INSERT);
1143 static inline void **
1144 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1146 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1149 /* Return slot for DV only if it is already present in the hash table. */
1151 static inline void **
1152 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1155 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1159 static inline void **
1160 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1162 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1165 /* Return variable for DV or NULL if not already present in the hash
1168 static inline variable
1169 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1171 return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash);
1174 static inline variable
1175 shared_hash_find (shared_hash vars, decl_or_value dv)
1177 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1180 /* Return true if TVAL is better than CVAL as a canonival value. We
1181 choose lowest-numbered VALUEs, using the RTX address as a
1182 tie-breaker. The idea is to arrange them into a star topology,
1183 such that all of them are at most one step away from the canonical
1184 value, and the canonical value has backlinks to all of them, in
1185 addition to all the actual locations. We don't enforce this
1186 topology throughout the entire dataflow analysis, though.
1190 canon_value_cmp (rtx tval, rtx cval)
1193 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1196 static bool dst_can_be_shared;
1198 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1201 unshare_variable (dataflow_set *set, void **slot, variable var,
1202 enum var_init_status initialized)
1207 new_var = (variable) pool_alloc (dv_pool (var->dv));
1208 new_var->dv = var->dv;
1209 new_var->refcount = 1;
1211 new_var->n_var_parts = var->n_var_parts;
1212 new_var->cur_loc_changed = var->cur_loc_changed;
1213 var->cur_loc_changed = false;
1214 new_var->in_changed_variables = false;
1216 if (! flag_var_tracking_uninit)
1217 initialized = VAR_INIT_STATUS_INITIALIZED;
1219 for (i = 0; i < var->n_var_parts; i++)
1221 location_chain node;
1222 location_chain *nextp;
1224 new_var->var_part[i].offset = var->var_part[i].offset;
1225 nextp = &new_var->var_part[i].loc_chain;
1226 for (node = var->var_part[i].loc_chain; node; node = node->next)
1228 location_chain new_lc;
1230 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1231 new_lc->next = NULL;
1232 if (node->init > initialized)
1233 new_lc->init = node->init;
1235 new_lc->init = initialized;
1236 if (node->set_src && !(MEM_P (node->set_src)))
1237 new_lc->set_src = node->set_src;
1239 new_lc->set_src = NULL;
1240 new_lc->loc = node->loc;
1243 nextp = &new_lc->next;
1246 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1249 dst_can_be_shared = false;
1250 if (shared_hash_shared (set->vars))
1251 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1252 else if (set->traversed_vars && set->vars != set->traversed_vars)
1253 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1255 if (var->in_changed_variables)
1258 = htab_find_slot_with_hash (changed_variables, var->dv,
1259 dv_htab_hash (var->dv), NO_INSERT);
1260 gcc_assert (*cslot == (void *) var);
1261 var->in_changed_variables = false;
1262 variable_htab_free (var);
1264 new_var->in_changed_variables = true;
1269 /* Add a variable from *SLOT to hash table DATA and increase its reference
1273 vars_copy_1 (void **slot, void *data)
1275 htab_t dst = (htab_t) data;
1279 src = (variable) *slot;
1282 dstp = htab_find_slot_with_hash (dst, src->dv,
1283 dv_htab_hash (src->dv),
1287 /* Continue traversing the hash table. */
1291 /* Copy all variables from hash table SRC to hash table DST. */
1294 vars_copy (htab_t dst, htab_t src)
1296 htab_traverse_noresize (src, vars_copy_1, dst);
1299 /* Map a decl to its main debug decl. */
1302 var_debug_decl (tree decl)
1304 if (decl && DECL_P (decl)
1305 && DECL_DEBUG_EXPR_IS_FROM (decl) && DECL_DEBUG_EXPR (decl)
1306 && DECL_P (DECL_DEBUG_EXPR (decl)))
1307 decl = DECL_DEBUG_EXPR (decl);
1312 /* Set the register LOC to contain DV, OFFSET. */
1315 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1316 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1317 enum insert_option iopt)
1320 bool decl_p = dv_is_decl_p (dv);
1323 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1325 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1326 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1327 && node->offset == offset)
1330 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1331 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1334 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1337 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1340 tree decl = REG_EXPR (loc);
1341 HOST_WIDE_INT offset = REG_OFFSET (loc);
1343 var_reg_decl_set (set, loc, initialized,
1344 dv_from_decl (decl), offset, set_src, INSERT);
1347 static enum var_init_status
1348 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1352 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1354 if (! flag_var_tracking_uninit)
1355 return VAR_INIT_STATUS_INITIALIZED;
1357 var = shared_hash_find (set->vars, dv);
1360 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1362 location_chain nextp;
1363 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1364 if (rtx_equal_p (nextp->loc, loc))
1366 ret_val = nextp->init;
1375 /* Delete current content of register LOC in dataflow set SET and set
1376 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1377 MODIFY is true, any other live copies of the same variable part are
1378 also deleted from the dataflow set, otherwise the variable part is
1379 assumed to be copied from another location holding the same
1383 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1384 enum var_init_status initialized, rtx set_src)
1386 tree decl = REG_EXPR (loc);
1387 HOST_WIDE_INT offset = REG_OFFSET (loc);
1391 decl = var_debug_decl (decl);
1393 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1394 initialized = get_init_value (set, loc, dv_from_decl (decl));
1396 nextp = &set->regs[REGNO (loc)];
1397 for (node = *nextp; node; node = next)
1400 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1402 delete_variable_part (set, node->loc, node->dv, node->offset);
1403 pool_free (attrs_pool, node);
1409 nextp = &node->next;
1413 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1414 var_reg_set (set, loc, initialized, set_src);
1417 /* Delete the association of register LOC in dataflow set SET with any
1418 variables that aren't onepart. If CLOBBER is true, also delete any
1419 other live copies of the same variable part, and delete the
1420 association with onepart dvs too. */
1423 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1425 attrs *nextp = &set->regs[REGNO (loc)];
1430 tree decl = REG_EXPR (loc);
1431 HOST_WIDE_INT offset = REG_OFFSET (loc);
1433 decl = var_debug_decl (decl);
1435 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1438 for (node = *nextp; node; node = next)
1441 if (clobber || !dv_onepart_p (node->dv))
1443 delete_variable_part (set, node->loc, node->dv, node->offset);
1444 pool_free (attrs_pool, node);
1448 nextp = &node->next;
1452 /* Delete content of register with number REGNO in dataflow set SET. */
1455 var_regno_delete (dataflow_set *set, int regno)
1457 attrs *reg = &set->regs[regno];
1460 for (node = *reg; node; node = next)
1463 delete_variable_part (set, node->loc, node->dv, node->offset);
1464 pool_free (attrs_pool, node);
1469 /* Set the location of DV, OFFSET as the MEM LOC. */
1472 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1473 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1474 enum insert_option iopt)
1476 if (dv_is_decl_p (dv))
1477 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1479 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1482 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
1484 Adjust the address first if it is stack pointer based. */
1487 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1490 tree decl = MEM_EXPR (loc);
1491 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1493 var_mem_decl_set (set, loc, initialized,
1494 dv_from_decl (decl), offset, set_src, INSERT);
1497 /* Delete and set the location part of variable MEM_EXPR (LOC) in
1498 dataflow set SET to LOC. If MODIFY is true, any other live copies
1499 of the same variable part are also deleted from the dataflow set,
1500 otherwise the variable part is assumed to be copied from another
1501 location holding the same part.
1502 Adjust the address first if it is stack pointer based. */
1505 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1506 enum var_init_status initialized, rtx set_src)
1508 tree decl = MEM_EXPR (loc);
1509 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1511 decl = var_debug_decl (decl);
1513 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1514 initialized = get_init_value (set, loc, dv_from_decl (decl));
1517 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
1518 var_mem_set (set, loc, initialized, set_src);
1521 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
1522 true, also delete any other live copies of the same variable part.
1523 Adjust the address first if it is stack pointer based. */
1526 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
1528 tree decl = MEM_EXPR (loc);
1529 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1531 decl = var_debug_decl (decl);
1533 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1534 delete_variable_part (set, loc, dv_from_decl (decl), offset);
1537 /* Bind a value to a location it was just stored in. If MODIFIED
1538 holds, assume the location was modified, detaching it from any
1539 values bound to it. */
1542 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
1544 cselib_val *v = CSELIB_VAL_PTR (val);
1546 gcc_assert (cselib_preserved_value_p (v));
1550 fprintf (dump_file, "%i: ", INSN_UID (insn));
1551 print_inline_rtx (dump_file, val, 0);
1552 fprintf (dump_file, " stored in ");
1553 print_inline_rtx (dump_file, loc, 0);
1556 struct elt_loc_list *l;
1557 for (l = v->locs; l; l = l->next)
1559 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
1560 print_inline_rtx (dump_file, l->loc, 0);
1563 fprintf (dump_file, "\n");
1569 var_regno_delete (set, REGNO (loc));
1570 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1571 dv_from_value (val), 0, NULL_RTX, INSERT);
1573 else if (MEM_P (loc))
1574 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1575 dv_from_value (val), 0, NULL_RTX, INSERT);
1577 set_variable_part (set, loc, dv_from_value (val), 0,
1578 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1581 /* Reset this node, detaching all its equivalences. Return the slot
1582 in the variable hash table that holds dv, if there is one. */
1585 val_reset (dataflow_set *set, decl_or_value dv)
1587 variable var = shared_hash_find (set->vars, dv) ;
1588 location_chain node;
1591 if (!var || !var->n_var_parts)
1594 gcc_assert (var->n_var_parts == 1);
1597 for (node = var->var_part[0].loc_chain; node; node = node->next)
1598 if (GET_CODE (node->loc) == VALUE
1599 && canon_value_cmp (node->loc, cval))
1602 for (node = var->var_part[0].loc_chain; node; node = node->next)
1603 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
1605 /* Redirect the equivalence link to the new canonical
1606 value, or simply remove it if it would point at
1609 set_variable_part (set, cval, dv_from_value (node->loc),
1610 0, node->init, node->set_src, NO_INSERT);
1611 delete_variable_part (set, dv_as_value (dv),
1612 dv_from_value (node->loc), 0);
1617 decl_or_value cdv = dv_from_value (cval);
1619 /* Keep the remaining values connected, accummulating links
1620 in the canonical value. */
1621 for (node = var->var_part[0].loc_chain; node; node = node->next)
1623 if (node->loc == cval)
1625 else if (GET_CODE (node->loc) == REG)
1626 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
1627 node->set_src, NO_INSERT);
1628 else if (GET_CODE (node->loc) == MEM)
1629 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
1630 node->set_src, NO_INSERT);
1632 set_variable_part (set, node->loc, cdv, 0,
1633 node->init, node->set_src, NO_INSERT);
1637 /* We remove this last, to make sure that the canonical value is not
1638 removed to the point of requiring reinsertion. */
1640 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
1642 clobber_variable_part (set, NULL, dv, 0, NULL);
1644 /* ??? Should we make sure there aren't other available values or
1645 variables whose values involve this one other than by
1646 equivalence? E.g., at the very least we should reset MEMs, those
1647 shouldn't be too hard to find cselib-looking up the value as an
1648 address, then locating the resulting value in our own hash
1652 /* Find the values in a given location and map the val to another
1653 value, if it is unique, or add the location as one holding the
1657 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
1659 decl_or_value dv = dv_from_value (val);
1661 if (dump_file && (dump_flags & TDF_DETAILS))
1664 fprintf (dump_file, "%i: ", INSN_UID (insn));
1666 fprintf (dump_file, "head: ");
1667 print_inline_rtx (dump_file, val, 0);
1668 fputs (" is at ", dump_file);
1669 print_inline_rtx (dump_file, loc, 0);
1670 fputc ('\n', dump_file);
1673 val_reset (set, dv);
1677 attrs node, found = NULL;
1679 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1680 if (dv_is_value_p (node->dv)
1681 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
1685 /* Map incoming equivalences. ??? Wouldn't it be nice if
1686 we just started sharing the location lists? Maybe a
1687 circular list ending at the value itself or some
1689 set_variable_part (set, dv_as_value (node->dv),
1690 dv_from_value (val), node->offset,
1691 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1692 set_variable_part (set, val, node->dv, node->offset,
1693 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1696 /* If we didn't find any equivalence, we need to remember that
1697 this value is held in the named register. */
1699 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1700 dv_from_value (val), 0, NULL_RTX, INSERT);
1702 else if (MEM_P (loc))
1703 /* ??? Merge equivalent MEMs. */
1704 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1705 dv_from_value (val), 0, NULL_RTX, INSERT);
1707 /* ??? Merge equivalent expressions. */
1708 set_variable_part (set, loc, dv_from_value (val), 0,
1709 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1712 /* Initialize dataflow set SET to be empty.
1713 VARS_SIZE is the initial size of hash table VARS. */
1716 dataflow_set_init (dataflow_set *set)
1718 init_attrs_list_set (set->regs);
1719 set->vars = shared_hash_copy (empty_shared_hash);
1720 set->stack_adjust = 0;
1721 set->traversed_vars = NULL;
1724 /* Delete the contents of dataflow set SET. */
1727 dataflow_set_clear (dataflow_set *set)
1731 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1732 attrs_list_clear (&set->regs[i]);
1734 shared_hash_destroy (set->vars);
1735 set->vars = shared_hash_copy (empty_shared_hash);
1738 /* Copy the contents of dataflow set SRC to DST. */
1741 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
1745 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1746 attrs_list_copy (&dst->regs[i], src->regs[i]);
1748 shared_hash_destroy (dst->vars);
1749 dst->vars = shared_hash_copy (src->vars);
1750 dst->stack_adjust = src->stack_adjust;
1753 /* Information for merging lists of locations for a given offset of variable.
1755 struct variable_union_info
1757 /* Node of the location chain. */
1760 /* The sum of positions in the input chains. */
1763 /* The position in the chain of DST dataflow set. */
1767 /* Buffer for location list sorting and its allocated size. */
1768 static struct variable_union_info *vui_vec;
1769 static int vui_allocated;
1771 /* Compare function for qsort, order the structures by POS element. */
1774 variable_union_info_cmp_pos (const void *n1, const void *n2)
1776 const struct variable_union_info *const i1 =
1777 (const struct variable_union_info *) n1;
1778 const struct variable_union_info *const i2 =
1779 ( const struct variable_union_info *) n2;
1781 if (i1->pos != i2->pos)
1782 return i1->pos - i2->pos;
1784 return (i1->pos_dst - i2->pos_dst);
1787 /* Compute union of location parts of variable *SLOT and the same variable
1788 from hash table DATA. Compute "sorted" union of the location chains
1789 for common offsets, i.e. the locations of a variable part are sorted by
1790 a priority where the priority is the sum of the positions in the 2 chains
1791 (if a location is only in one list the position in the second list is
1792 defined to be larger than the length of the chains).
1793 When we are updating the location parts the newest location is in the
1794 beginning of the chain, so when we do the described "sorted" union
1795 we keep the newest locations in the beginning. */
1798 variable_union (void **slot, void *data)
1802 dataflow_set *set = (dataflow_set *) data;
1805 src = (variable) *slot;
1806 dstp = shared_hash_find_slot (set->vars, src->dv);
1807 if (!dstp || !*dstp)
1811 dst_can_be_shared = false;
1813 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
1817 /* Continue traversing the hash table. */
1821 dst = (variable) *dstp;
1823 gcc_assert (src->n_var_parts);
1825 /* We can combine one-part variables very efficiently, because their
1826 entries are in canonical order. */
1827 if (dv_onepart_p (src->dv))
1829 location_chain *nodep, dnode, snode;
1831 gcc_assert (src->n_var_parts == 1);
1832 gcc_assert (dst->n_var_parts == 1);
1834 snode = src->var_part[0].loc_chain;
1837 restart_onepart_unshared:
1838 nodep = &dst->var_part[0].loc_chain;
1844 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
1848 location_chain nnode;
1850 if (shared_var_p (dst, set->vars))
1852 dstp = unshare_variable (set, dstp, dst,
1853 VAR_INIT_STATUS_INITIALIZED);
1854 dst = (variable)*dstp;
1855 goto restart_onepart_unshared;
1858 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
1859 nnode->loc = snode->loc;
1860 nnode->init = snode->init;
1861 if (!snode->set_src || MEM_P (snode->set_src))
1862 nnode->set_src = NULL;
1864 nnode->set_src = snode->set_src;
1865 nnode->next = dnode;
1868 #ifdef ENABLE_CHECKING
1870 gcc_assert (rtx_equal_p (dnode->loc, snode->loc));
1874 snode = snode->next;
1876 nodep = &dnode->next;
1883 /* Count the number of location parts, result is K. */
1884 for (i = 0, j = 0, k = 0;
1885 i < src->n_var_parts && j < dst->n_var_parts; k++)
1887 if (src->var_part[i].offset == dst->var_part[j].offset)
1892 else if (src->var_part[i].offset < dst->var_part[j].offset)
1897 k += src->n_var_parts - i;
1898 k += dst->n_var_parts - j;
1900 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
1901 thus there are at most MAX_VAR_PARTS different offsets. */
1902 gcc_assert (dv_onepart_p (dst->dv) ? k == 1 : k <= MAX_VAR_PARTS);
1904 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
1906 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
1907 dst = (variable)*dstp;
1910 i = src->n_var_parts - 1;
1911 j = dst->n_var_parts - 1;
1912 dst->n_var_parts = k;
1914 for (k--; k >= 0; k--)
1916 location_chain node, node2;
1918 if (i >= 0 && j >= 0
1919 && src->var_part[i].offset == dst->var_part[j].offset)
1921 /* Compute the "sorted" union of the chains, i.e. the locations which
1922 are in both chains go first, they are sorted by the sum of
1923 positions in the chains. */
1926 struct variable_union_info *vui;
1928 /* If DST is shared compare the location chains.
1929 If they are different we will modify the chain in DST with
1930 high probability so make a copy of DST. */
1931 if (shared_var_p (dst, set->vars))
1933 for (node = src->var_part[i].loc_chain,
1934 node2 = dst->var_part[j].loc_chain; node && node2;
1935 node = node->next, node2 = node2->next)
1937 if (!((REG_P (node2->loc)
1938 && REG_P (node->loc)
1939 && REGNO (node2->loc) == REGNO (node->loc))
1940 || rtx_equal_p (node2->loc, node->loc)))
1942 if (node2->init < node->init)
1943 node2->init = node->init;
1949 dstp = unshare_variable (set, dstp, dst,
1950 VAR_INIT_STATUS_UNKNOWN);
1951 dst = (variable)*dstp;
1956 for (node = src->var_part[i].loc_chain; node; node = node->next)
1959 for (node = dst->var_part[j].loc_chain; node; node = node->next)
1964 /* The most common case, much simpler, no qsort is needed. */
1965 location_chain dstnode = dst->var_part[j].loc_chain;
1966 dst->var_part[k].loc_chain = dstnode;
1967 dst->var_part[k].offset = dst->var_part[j].offset;
1969 for (node = src->var_part[i].loc_chain; node; node = node->next)
1970 if (!((REG_P (dstnode->loc)
1971 && REG_P (node->loc)
1972 && REGNO (dstnode->loc) == REGNO (node->loc))
1973 || rtx_equal_p (dstnode->loc, node->loc)))
1975 location_chain new_node;
1977 /* Copy the location from SRC. */
1978 new_node = (location_chain) pool_alloc (loc_chain_pool);
1979 new_node->loc = node->loc;
1980 new_node->init = node->init;
1981 if (!node->set_src || MEM_P (node->set_src))
1982 new_node->set_src = NULL;
1984 new_node->set_src = node->set_src;
1985 node2->next = new_node;
1992 if (src_l + dst_l > vui_allocated)
1994 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
1995 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2000 /* Fill in the locations from DST. */
2001 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2002 node = node->next, jj++)
2005 vui[jj].pos_dst = jj;
2007 /* Pos plus value larger than a sum of 2 valid positions. */
2008 vui[jj].pos = jj + src_l + dst_l;
2011 /* Fill in the locations from SRC. */
2013 for (node = src->var_part[i].loc_chain, ii = 0; node;
2014 node = node->next, ii++)
2016 /* Find location from NODE. */
2017 for (jj = 0; jj < dst_l; jj++)
2019 if ((REG_P (vui[jj].lc->loc)
2020 && REG_P (node->loc)
2021 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2022 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2024 vui[jj].pos = jj + ii;
2028 if (jj >= dst_l) /* The location has not been found. */
2030 location_chain new_node;
2032 /* Copy the location from SRC. */
2033 new_node = (location_chain) pool_alloc (loc_chain_pool);
2034 new_node->loc = node->loc;
2035 new_node->init = node->init;
2036 if (!node->set_src || MEM_P (node->set_src))
2037 new_node->set_src = NULL;
2039 new_node->set_src = node->set_src;
2040 vui[n].lc = new_node;
2041 vui[n].pos_dst = src_l + dst_l;
2042 vui[n].pos = ii + src_l + dst_l;
2049 /* Special case still very common case. For dst_l == 2
2050 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2051 vui[i].pos == i + src_l + dst_l. */
2052 if (vui[0].pos > vui[1].pos)
2054 /* Order should be 1, 0, 2... */
2055 dst->var_part[k].loc_chain = vui[1].lc;
2056 vui[1].lc->next = vui[0].lc;
2059 vui[0].lc->next = vui[2].lc;
2060 vui[n - 1].lc->next = NULL;
2063 vui[0].lc->next = NULL;
2068 dst->var_part[k].loc_chain = vui[0].lc;
2069 if (n >= 3 && vui[2].pos < vui[1].pos)
2071 /* Order should be 0, 2, 1, 3... */
2072 vui[0].lc->next = vui[2].lc;
2073 vui[2].lc->next = vui[1].lc;
2076 vui[1].lc->next = vui[3].lc;
2077 vui[n - 1].lc->next = NULL;
2080 vui[1].lc->next = NULL;
2085 /* Order should be 0, 1, 2... */
2087 vui[n - 1].lc->next = NULL;
2090 for (; ii < n; ii++)
2091 vui[ii - 1].lc->next = vui[ii].lc;
2095 qsort (vui, n, sizeof (struct variable_union_info),
2096 variable_union_info_cmp_pos);
2098 /* Reconnect the nodes in sorted order. */
2099 for (ii = 1; ii < n; ii++)
2100 vui[ii - 1].lc->next = vui[ii].lc;
2101 vui[n - 1].lc->next = NULL;
2102 dst->var_part[k].loc_chain = vui[0].lc;
2105 dst->var_part[k].offset = dst->var_part[j].offset;
2110 else if ((i >= 0 && j >= 0
2111 && src->var_part[i].offset < dst->var_part[j].offset)
2114 dst->var_part[k] = dst->var_part[j];
2117 else if ((i >= 0 && j >= 0
2118 && src->var_part[i].offset > dst->var_part[j].offset)
2121 location_chain *nextp;
2123 /* Copy the chain from SRC. */
2124 nextp = &dst->var_part[k].loc_chain;
2125 for (node = src->var_part[i].loc_chain; node; node = node->next)
2127 location_chain new_lc;
2129 new_lc = (location_chain) pool_alloc (loc_chain_pool);
2130 new_lc->next = NULL;
2131 new_lc->init = node->init;
2132 if (!node->set_src || MEM_P (node->set_src))
2133 new_lc->set_src = NULL;
2135 new_lc->set_src = node->set_src;
2136 new_lc->loc = node->loc;
2139 nextp = &new_lc->next;
2142 dst->var_part[k].offset = src->var_part[i].offset;
2145 dst->var_part[k].cur_loc = NULL;
2148 if (flag_var_tracking_uninit)
2149 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
2151 location_chain node, node2;
2152 for (node = src->var_part[i].loc_chain; node; node = node->next)
2153 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
2154 if (rtx_equal_p (node->loc, node2->loc))
2156 if (node->init > node2->init)
2157 node2->init = node->init;
2161 /* Continue traversing the hash table. */
2165 /* Compute union of dataflow sets SRC and DST and store it to DST. */
2168 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
2172 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2173 attrs_list_union (&dst->regs[i], src->regs[i]);
2175 if (dst->vars == empty_shared_hash)
2177 shared_hash_destroy (dst->vars);
2178 dst->vars = shared_hash_copy (src->vars);
2181 htab_traverse (shared_hash_htab (src->vars), variable_union, dst);
2184 /* Whether the value is currently being expanded. */
2185 #define VALUE_RECURSED_INTO(x) \
2186 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
2187 /* Whether the value is in changed_variables hash table. */
2188 #define VALUE_CHANGED(x) \
2189 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
2190 /* Whether the decl is in changed_variables hash table. */
2191 #define DECL_CHANGED(x) TREE_VISITED (x)
2193 /* Record that DV has been added into resp. removed from changed_variables
2197 set_dv_changed (decl_or_value dv, bool newv)
2199 if (dv_is_value_p (dv))
2200 VALUE_CHANGED (dv_as_value (dv)) = newv;
2202 DECL_CHANGED (dv_as_decl (dv)) = newv;
2205 /* Return true if DV is present in changed_variables hash table. */
2208 dv_changed_p (decl_or_value dv)
2210 return (dv_is_value_p (dv)
2211 ? VALUE_CHANGED (dv_as_value (dv))
2212 : DECL_CHANGED (dv_as_decl (dv)));
2215 /* Return a location list node whose loc is rtx_equal to LOC, in the
2216 location list of a one-part variable or value VAR, or in that of
2217 any values recursively mentioned in the location lists. */
2219 static location_chain
2220 find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
2222 location_chain node;
2227 gcc_assert (dv_onepart_p (var->dv));
2229 if (!var->n_var_parts)
2232 gcc_assert (var->var_part[0].offset == 0);
2234 for (node = var->var_part[0].loc_chain; node; node = node->next)
2235 if (rtx_equal_p (loc, node->loc))
2237 else if (GET_CODE (node->loc) == VALUE
2238 && !VALUE_RECURSED_INTO (node->loc))
2240 decl_or_value dv = dv_from_value (node->loc);
2241 variable var = (variable)
2242 htab_find_with_hash (vars, dv, dv_htab_hash (dv));
2246 location_chain where;
2247 VALUE_RECURSED_INTO (node->loc) = true;
2248 if ((where = find_loc_in_1pdv (loc, var, vars)))
2250 VALUE_RECURSED_INTO (node->loc) = false;
2253 VALUE_RECURSED_INTO (node->loc) = false;
2260 /* Hash table iteration argument passed to variable_merge. */
2263 /* The set in which the merge is to be inserted. */
2265 /* The set that we're iterating in. */
2267 /* The set that may contain the other dv we are to merge with. */
2269 /* Number of onepart dvs in src. */
2270 int src_onepart_cnt;
2273 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
2274 loc_cmp order, and it is maintained as such. */
2277 insert_into_intersection (location_chain *nodep, rtx loc,
2278 enum var_init_status status)
2280 location_chain node;
2283 for (node = *nodep; node; nodep = &node->next, node = *nodep)
2284 if ((r = loc_cmp (node->loc, loc)) == 0)
2286 node->init = MIN (node->init, status);
2292 node = (location_chain) pool_alloc (loc_chain_pool);
2295 node->set_src = NULL;
2296 node->init = status;
2297 node->next = *nodep;
2301 /* Insert in DEST the intersection the locations present in both
2302 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
2303 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
2307 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
2308 location_chain s1node, variable s2var)
2310 dataflow_set *s1set = dsm->cur;
2311 dataflow_set *s2set = dsm->src;
2312 location_chain found;
2314 for (; s1node; s1node = s1node->next)
2316 if (s1node->loc == val)
2319 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
2320 shared_hash_htab (s2set->vars))))
2322 insert_into_intersection (dest, s1node->loc,
2323 MIN (s1node->init, found->init));
2327 if (GET_CODE (s1node->loc) == VALUE
2328 && !VALUE_RECURSED_INTO (s1node->loc))
2330 decl_or_value dv = dv_from_value (s1node->loc);
2331 variable svar = shared_hash_find (s1set->vars, dv);
2334 if (svar->n_var_parts == 1)
2336 VALUE_RECURSED_INTO (s1node->loc) = true;
2337 intersect_loc_chains (val, dest, dsm,
2338 svar->var_part[0].loc_chain,
2340 VALUE_RECURSED_INTO (s1node->loc) = false;
2345 /* ??? if the location is equivalent to any location in src,
2346 searched recursively
2348 add to dst the values needed to represent the equivalence
2350 telling whether locations S is equivalent to another dv's
2353 for each location D in the list
2355 if S and D satisfy rtx_equal_p, then it is present
2357 else if D is a value, recurse without cycles
2359 else if S and D have the same CODE and MODE
2361 for each operand oS and the corresponding oD
2363 if oS and oD are not equivalent, then S an D are not equivalent
2365 else if they are RTX vectors
2367 if any vector oS element is not equivalent to its respective oD,
2368 then S and D are not equivalent
2376 /* Return -1 if X should be before Y in a location list for a 1-part
2377 variable, 1 if Y should be before X, and 0 if they're equivalent
2378 and should not appear in the list. */
2381 loc_cmp (rtx x, rtx y)
2384 RTX_CODE code = GET_CODE (x);
2394 gcc_assert (GET_MODE (x) == GET_MODE (y));
2395 if (REGNO (x) == REGNO (y))
2397 else if (REGNO (x) < REGNO (y))
2410 gcc_assert (GET_MODE (x) == GET_MODE (y));
2411 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
2417 if (GET_CODE (x) == VALUE)
2419 if (GET_CODE (y) != VALUE)
2421 /* Don't assert the modes are the same, that is true only
2422 when not recursing. (subreg:QI (value:SI 1:1) 0)
2423 and (subreg:QI (value:DI 2:2) 0) can be compared,
2424 even when the modes are different. */
2425 if (canon_value_cmp (x, y))
2431 if (GET_CODE (y) == VALUE)
2434 if (GET_CODE (x) == GET_CODE (y))
2435 /* Compare operands below. */;
2436 else if (GET_CODE (x) < GET_CODE (y))
2441 gcc_assert (GET_MODE (x) == GET_MODE (y));
2443 if (GET_CODE (x) == DEBUG_EXPR)
2445 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
2446 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
2448 #ifdef ENABLE_CHECKING
2449 gcc_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
2450 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
2455 fmt = GET_RTX_FORMAT (code);
2456 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2460 if (XWINT (x, i) == XWINT (y, i))
2462 else if (XWINT (x, i) < XWINT (y, i))
2469 if (XINT (x, i) == XINT (y, i))
2471 else if (XINT (x, i) < XINT (y, i))
2478 /* Compare the vector length first. */
2479 if (XVECLEN (x, i) == XVECLEN (y, i))
2480 /* Compare the vectors elements. */;
2481 else if (XVECLEN (x, i) < XVECLEN (y, i))
2486 for (j = 0; j < XVECLEN (x, i); j++)
2487 if ((r = loc_cmp (XVECEXP (x, i, j),
2488 XVECEXP (y, i, j))))
2493 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
2499 if (XSTR (x, i) == XSTR (y, i))
2505 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
2513 /* These are just backpointers, so they don't matter. */
2520 /* It is believed that rtx's at this level will never
2521 contain anything but integers and other rtx's,
2522 except for within LABEL_REFs and SYMBOL_REFs. */
2530 /* If decl or value DVP refers to VALUE from *LOC, add backlinks
2531 from VALUE to DVP. */
2534 add_value_chain (rtx *loc, void *dvp)
2536 decl_or_value dv, ldv;
2537 value_chain vc, nvc;
2540 if (GET_CODE (*loc) == VALUE)
2541 ldv = dv_from_value (*loc);
2542 else if (GET_CODE (*loc) == DEBUG_EXPR)
2543 ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc));
2547 if (dv_as_opaque (ldv) == dvp)
2550 dv = (decl_or_value) dvp;
2551 slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv),
2555 vc = (value_chain) pool_alloc (value_chain_pool);
2559 *slot = (void *) vc;
2563 for (vc = ((value_chain) *slot)->next; vc; vc = vc->next)
2564 if (dv_as_opaque (vc->dv) == dv_as_opaque (dv))
2572 vc = (value_chain) *slot;
2573 nvc = (value_chain) pool_alloc (value_chain_pool);
2575 nvc->next = vc->next;
2581 /* If decl or value DVP refers to VALUEs from within LOC, add backlinks
2582 from those VALUEs to DVP. */
2585 add_value_chains (decl_or_value dv, rtx loc)
2587 if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR)
2589 add_value_chain (&loc, dv_as_opaque (dv));
2595 loc = XEXP (loc, 0);
2596 for_each_rtx (&loc, add_value_chain, dv_as_opaque (dv));
2599 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those
2603 add_cselib_value_chains (decl_or_value dv)
2605 struct elt_loc_list *l;
2607 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
2608 for_each_rtx (&l->loc, add_value_chain, dv_as_opaque (dv));
2611 /* If decl or value DVP refers to VALUE from *LOC, remove backlinks
2612 from VALUE to DVP. */
2615 remove_value_chain (rtx *loc, void *dvp)
2617 decl_or_value dv, ldv;
2621 if (GET_CODE (*loc) == VALUE)
2622 ldv = dv_from_value (*loc);
2623 else if (GET_CODE (*loc) == DEBUG_EXPR)
2624 ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc));
2628 if (dv_as_opaque (ldv) == dvp)
2631 dv = (decl_or_value) dvp;
2632 slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv),
2634 for (vc = (value_chain) *slot; vc->next; vc = vc->next)
2635 if (dv_as_opaque (vc->next->dv) == dv_as_opaque (dv))
2637 value_chain dvc = vc->next;
2638 gcc_assert (dvc->refcount > 0);
2639 if (--dvc->refcount == 0)
2641 vc->next = dvc->next;
2642 pool_free (value_chain_pool, dvc);
2643 if (vc->next == NULL && vc == (value_chain) *slot)
2645 pool_free (value_chain_pool, vc);
2646 htab_clear_slot (value_chains, slot);
2654 /* If decl or value DVP refers to VALUEs from within LOC, remove backlinks
2655 from those VALUEs to DVP. */
2658 remove_value_chains (decl_or_value dv, rtx loc)
2660 if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR)
2662 remove_value_chain (&loc, dv_as_opaque (dv));
2668 loc = XEXP (loc, 0);
2669 for_each_rtx (&loc, remove_value_chain, dv_as_opaque (dv));
2673 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, remove backlinks from those
2677 remove_cselib_value_chains (decl_or_value dv)
2679 struct elt_loc_list *l;
2681 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
2682 for_each_rtx (&l->loc, remove_value_chain, dv_as_opaque (dv));
2685 /* Check the order of entries in one-part variables. */
2688 canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
2690 variable var = (variable) *slot;
2691 decl_or_value dv = var->dv;
2692 location_chain node, next;
2694 #ifdef ENABLE_RTL_CHECKING
2696 for (i = 0; i < var->n_var_parts; i++)
2697 gcc_assert (var->var_part[0].cur_loc == NULL);
2698 gcc_assert (!var->cur_loc_changed && !var->in_changed_variables);
2701 if (!dv_onepart_p (dv))
2704 gcc_assert (var->n_var_parts == 1);
2705 node = var->var_part[0].loc_chain;
2708 while ((next = node->next))
2710 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
2718 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
2719 more likely to be chosen as canonical for an equivalence set.
2720 Ensure less likely values can reach more likely neighbors, making
2721 the connections bidirectional. */
2724 canonicalize_values_mark (void **slot, void *data)
2726 dataflow_set *set = (dataflow_set *)data;
2727 variable var = (variable) *slot;
2728 decl_or_value dv = var->dv;
2730 location_chain node;
2732 if (!dv_is_value_p (dv))
2735 gcc_assert (var->n_var_parts == 1);
2737 val = dv_as_value (dv);
2739 for (node = var->var_part[0].loc_chain; node; node = node->next)
2740 if (GET_CODE (node->loc) == VALUE)
2742 if (canon_value_cmp (node->loc, val))
2743 VALUE_RECURSED_INTO (val) = true;
2746 decl_or_value odv = dv_from_value (node->loc);
2747 void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
2749 oslot = set_slot_part (set, val, oslot, odv, 0,
2750 node->init, NULL_RTX);
2752 VALUE_RECURSED_INTO (node->loc) = true;
2759 /* Remove redundant entries from equivalence lists in onepart
2760 variables, canonicalizing equivalence sets into star shapes. */
2763 canonicalize_values_star (void **slot, void *data)
2765 dataflow_set *set = (dataflow_set *)data;
2766 variable var = (variable) *slot;
2767 decl_or_value dv = var->dv;
2768 location_chain node;
2775 if (!dv_onepart_p (dv))
2778 gcc_assert (var->n_var_parts == 1);
2780 if (dv_is_value_p (dv))
2782 cval = dv_as_value (dv);
2783 if (!VALUE_RECURSED_INTO (cval))
2785 VALUE_RECURSED_INTO (cval) = false;
2795 gcc_assert (var->n_var_parts == 1);
2797 for (node = var->var_part[0].loc_chain; node; node = node->next)
2798 if (GET_CODE (node->loc) == VALUE)
2801 if (VALUE_RECURSED_INTO (node->loc))
2803 if (canon_value_cmp (node->loc, cval))
2812 if (!has_marks || dv_is_decl_p (dv))
2815 /* Keep it marked so that we revisit it, either after visiting a
2816 child node, or after visiting a new parent that might be
2818 VALUE_RECURSED_INTO (val) = true;
2820 for (node = var->var_part[0].loc_chain; node; node = node->next)
2821 if (GET_CODE (node->loc) == VALUE
2822 && VALUE_RECURSED_INTO (node->loc))
2826 VALUE_RECURSED_INTO (cval) = false;
2827 dv = dv_from_value (cval);
2828 slot = shared_hash_find_slot_noinsert (set->vars, dv);
2831 gcc_assert (dv_is_decl_p (var->dv));
2832 /* The canonical value was reset and dropped.
2834 clobber_variable_part (set, NULL, var->dv, 0, NULL);
2837 var = (variable)*slot;
2838 gcc_assert (dv_is_value_p (var->dv));
2839 if (var->n_var_parts == 0)
2841 gcc_assert (var->n_var_parts == 1);
2845 VALUE_RECURSED_INTO (val) = false;
2850 /* Push values to the canonical one. */
2851 cdv = dv_from_value (cval);
2852 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
2854 for (node = var->var_part[0].loc_chain; node; node = node->next)
2855 if (node->loc != cval)
2857 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
2858 node->init, NULL_RTX);
2859 if (GET_CODE (node->loc) == VALUE)
2861 decl_or_value ndv = dv_from_value (node->loc);
2863 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
2866 if (canon_value_cmp (node->loc, val))
2868 /* If it could have been a local minimum, it's not any more,
2869 since it's now neighbor to cval, so it may have to push
2870 to it. Conversely, if it wouldn't have prevailed over
2871 val, then whatever mark it has is fine: if it was to
2872 push, it will now push to a more canonical node, but if
2873 it wasn't, then it has already pushed any values it might
2875 VALUE_RECURSED_INTO (node->loc) = true;
2876 /* Make sure we visit node->loc by ensuring we cval is
2878 VALUE_RECURSED_INTO (cval) = true;
2880 else if (!VALUE_RECURSED_INTO (node->loc))
2881 /* If we have no need to "recurse" into this node, it's
2882 already "canonicalized", so drop the link to the old
2884 clobber_variable_part (set, cval, ndv, 0, NULL);
2886 else if (GET_CODE (node->loc) == REG)
2888 attrs list = set->regs[REGNO (node->loc)], *listp;
2890 /* Change an existing attribute referring to dv so that it
2891 refers to cdv, removing any duplicate this might
2892 introduce, and checking that no previous duplicates
2893 existed, all in a single pass. */
2897 if (list->offset == 0
2898 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
2899 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
2906 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
2909 for (listp = &list->next; (list = *listp); listp = &list->next)
2914 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
2916 *listp = list->next;
2917 pool_free (attrs_pool, list);
2922 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
2925 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
2927 for (listp = &list->next; (list = *listp); listp = &list->next)
2932 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
2934 *listp = list->next;
2935 pool_free (attrs_pool, list);
2940 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
2949 if (list->offset == 0
2950 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
2951 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
2961 cslot = set_slot_part (set, val, cslot, cdv, 0,
2962 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
2964 slot = clobber_slot_part (set, cval, slot, 0, NULL);
2966 /* Variable may have been unshared. */
2967 var = (variable)*slot;
2968 gcc_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
2969 && var->var_part[0].loc_chain->next == NULL);
2971 if (VALUE_RECURSED_INTO (cval))
2972 goto restart_with_cval;
2977 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
2978 corresponding entry in DSM->src. Multi-part variables are combined
2979 with variable_union, whereas onepart dvs are combined with
2983 variable_merge_over_cur (void **s1slot, void *data)
2985 struct dfset_merge *dsm = (struct dfset_merge *)data;
2986 dataflow_set *dst = dsm->dst;
2988 variable s1var = (variable) *s1slot;
2989 variable s2var, dvar = NULL;
2990 decl_or_value dv = s1var->dv;
2991 bool onepart = dv_onepart_p (dv);
2994 location_chain node, *nodep;
2996 /* If the incoming onepart variable has an empty location list, then
2997 the intersection will be just as empty. For other variables,
2998 it's always union. */
2999 gcc_assert (s1var->n_var_parts);
3000 gcc_assert (s1var->var_part[0].loc_chain);
3003 return variable_union (s1slot, dst);
3005 gcc_assert (s1var->n_var_parts == 1);
3006 gcc_assert (s1var->var_part[0].offset == 0);
3008 dvhash = dv_htab_hash (dv);
3009 if (dv_is_value_p (dv))
3010 val = dv_as_value (dv);
3014 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3017 dst_can_be_shared = false;
3021 dsm->src_onepart_cnt--;
3022 gcc_assert (s2var->var_part[0].loc_chain);
3023 gcc_assert (s2var->n_var_parts == 1);
3024 gcc_assert (s2var->var_part[0].offset == 0);
3026 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3029 dvar = (variable)*dstslot;
3030 gcc_assert (dvar->refcount == 1);
3031 gcc_assert (dvar->n_var_parts == 1);
3032 gcc_assert (dvar->var_part[0].offset == 0);
3033 nodep = &dvar->var_part[0].loc_chain;
3041 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3043 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3045 *dstslot = dvar = s2var;
3050 dst_can_be_shared = false;
3052 intersect_loc_chains (val, nodep, dsm,
3053 s1var->var_part[0].loc_chain, s2var);
3059 dvar = (variable) pool_alloc (dv_pool (dv));
3062 dvar->n_var_parts = 1;
3063 dvar->cur_loc_changed = false;
3064 dvar->in_changed_variables = false;
3065 dvar->var_part[0].offset = 0;
3066 dvar->var_part[0].loc_chain = node;
3067 dvar->var_part[0].cur_loc = NULL;
3070 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3072 gcc_assert (!*dstslot);
3080 nodep = &dvar->var_part[0].loc_chain;
3081 while ((node = *nodep))
3083 location_chain *nextp = &node->next;
3085 if (GET_CODE (node->loc) == REG)
3089 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
3090 if (GET_MODE (node->loc) == GET_MODE (list->loc)
3091 && dv_is_value_p (list->dv))
3095 attrs_list_insert (&dst->regs[REGNO (node->loc)],
3097 /* If this value became canonical for another value that had
3098 this register, we want to leave it alone. */
3099 else if (dv_as_value (list->dv) != val)
3101 dstslot = set_slot_part (dst, dv_as_value (list->dv),
3103 node->init, NULL_RTX);
3104 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
3106 /* Since nextp points into the removed node, we can't
3107 use it. The pointer to the next node moved to nodep.
3108 However, if the variable we're walking is unshared
3109 during our walk, we'll keep walking the location list
3110 of the previously-shared variable, in which case the
3111 node won't have been removed, and we'll want to skip
3112 it. That's why we test *nodep here. */
3118 /* Canonicalization puts registers first, so we don't have to
3124 if (dvar != (variable)*dstslot)
3125 dvar = (variable)*dstslot;
3126 nodep = &dvar->var_part[0].loc_chain;
3130 /* Mark all referenced nodes for canonicalization, and make sure
3131 we have mutual equivalence links. */
3132 VALUE_RECURSED_INTO (val) = true;
3133 for (node = *nodep; node; node = node->next)
3134 if (GET_CODE (node->loc) == VALUE)
3136 VALUE_RECURSED_INTO (node->loc) = true;
3137 set_variable_part (dst, val, dv_from_value (node->loc), 0,
3138 node->init, NULL, INSERT);
3141 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3142 gcc_assert (*dstslot == dvar);
3143 canonicalize_values_star (dstslot, dst);
3144 #ifdef ENABLE_CHECKING
3146 == shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash));
3148 dvar = (variable)*dstslot;
3152 bool has_value = false, has_other = false;
3154 /* If we have one value and anything else, we're going to
3155 canonicalize this, so make sure all values have an entry in
3156 the table and are marked for canonicalization. */
3157 for (node = *nodep; node; node = node->next)
3159 if (GET_CODE (node->loc) == VALUE)
3161 /* If this was marked during register canonicalization,
3162 we know we have to canonicalize values. */
3177 if (has_value && has_other)
3179 for (node = *nodep; node; node = node->next)
3181 if (GET_CODE (node->loc) == VALUE)
3183 decl_or_value dv = dv_from_value (node->loc);
3186 if (shared_hash_shared (dst->vars))
3187 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
3189 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
3193 variable var = (variable) pool_alloc (dv_pool (dv));
3196 var->n_var_parts = 1;
3197 var->cur_loc_changed = false;
3198 var->in_changed_variables = false;
3199 var->var_part[0].offset = 0;
3200 var->var_part[0].loc_chain = NULL;
3201 var->var_part[0].cur_loc = NULL;
3205 VALUE_RECURSED_INTO (node->loc) = true;
3209 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3210 gcc_assert (*dstslot == dvar);
3211 canonicalize_values_star (dstslot, dst);
3212 #ifdef ENABLE_CHECKING
3214 == shared_hash_find_slot_noinsert_1 (dst->vars,
3217 dvar = (variable)*dstslot;
3221 if (!onepart_variable_different_p (dvar, s2var))
3223 variable_htab_free (dvar);
3224 *dstslot = dvar = s2var;
3227 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
3229 variable_htab_free (dvar);
3230 *dstslot = dvar = s1var;
3232 dst_can_be_shared = false;
3235 dst_can_be_shared = false;
3240 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
3241 multi-part variable. Unions of multi-part variables and
3242 intersections of one-part ones will be handled in
3243 variable_merge_over_cur(). */
3246 variable_merge_over_src (void **s2slot, void *data)
3248 struct dfset_merge *dsm = (struct dfset_merge *)data;
3249 dataflow_set *dst = dsm->dst;
3250 variable s2var = (variable) *s2slot;
3251 decl_or_value dv = s2var->dv;
3252 bool onepart = dv_onepart_p (dv);
3256 void **dstp = shared_hash_find_slot (dst->vars, dv);
3262 dsm->src_onepart_cnt++;
3266 /* Combine dataflow set information from SRC2 into DST, using PDST
3267 to carry over information across passes. */
3270 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
3272 dataflow_set cur = *dst;
3273 dataflow_set *src1 = &cur;
3274 struct dfset_merge dsm;
3276 size_t src1_elems, src2_elems;
3278 src1_elems = htab_elements (shared_hash_htab (src1->vars));
3279 src2_elems = htab_elements (shared_hash_htab (src2->vars));
3280 dataflow_set_init (dst);
3281 dst->stack_adjust = cur.stack_adjust;
3282 shared_hash_destroy (dst->vars);
3283 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
3284 dst->vars->refcount = 1;
3286 = htab_create (MAX (src1_elems, src2_elems), variable_htab_hash,
3287 variable_htab_eq, variable_htab_free);
3289 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3290 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
3295 dsm.src_onepart_cnt = 0;
3297 htab_traverse (shared_hash_htab (dsm.src->vars), variable_merge_over_src,
3299 htab_traverse (shared_hash_htab (dsm.cur->vars), variable_merge_over_cur,
3302 if (dsm.src_onepart_cnt)
3303 dst_can_be_shared = false;
3305 dataflow_set_destroy (src1);
3308 /* Mark register equivalences. */
3311 dataflow_set_equiv_regs (dataflow_set *set)
3316 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3318 rtx canon[NUM_MACHINE_MODES];
3320 memset (canon, 0, sizeof (canon));
3322 for (list = set->regs[i]; list; list = list->next)
3323 if (list->offset == 0 && dv_is_value_p (list->dv))
3325 rtx val = dv_as_value (list->dv);
3326 rtx *cvalp = &canon[(int)GET_MODE (val)];
3329 if (canon_value_cmp (val, cval))
3333 for (list = set->regs[i]; list; list = list->next)
3334 if (list->offset == 0 && dv_onepart_p (list->dv))
3336 rtx cval = canon[(int)GET_MODE (list->loc)];
3341 if (dv_is_value_p (list->dv))
3343 rtx val = dv_as_value (list->dv);
3348 VALUE_RECURSED_INTO (val) = true;
3349 set_variable_part (set, val, dv_from_value (cval), 0,
3350 VAR_INIT_STATUS_INITIALIZED,
3354 VALUE_RECURSED_INTO (cval) = true;
3355 set_variable_part (set, cval, list->dv, 0,
3356 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
3359 for (listp = &set->regs[i]; (list = *listp);
3360 listp = list ? &list->next : listp)
3361 if (list->offset == 0 && dv_onepart_p (list->dv))
3363 rtx cval = canon[(int)GET_MODE (list->loc)];
3369 if (dv_is_value_p (list->dv))
3371 rtx val = dv_as_value (list->dv);
3372 if (!VALUE_RECURSED_INTO (val))
3376 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
3377 canonicalize_values_star (slot, set);
3384 /* Remove any redundant values in the location list of VAR, which must
3385 be unshared and 1-part. */
3388 remove_duplicate_values (variable var)
3390 location_chain node, *nodep;
3392 gcc_assert (dv_onepart_p (var->dv));
3393 gcc_assert (var->n_var_parts == 1);
3394 gcc_assert (var->refcount == 1);
3396 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
3398 if (GET_CODE (node->loc) == VALUE)
3400 if (VALUE_RECURSED_INTO (node->loc))
3402 /* Remove duplicate value node. */
3403 *nodep = node->next;
3404 pool_free (loc_chain_pool, node);
3408 VALUE_RECURSED_INTO (node->loc) = true;
3410 nodep = &node->next;
3413 for (node = var->var_part[0].loc_chain; node; node = node->next)
3414 if (GET_CODE (node->loc) == VALUE)
3416 gcc_assert (VALUE_RECURSED_INTO (node->loc));
3417 VALUE_RECURSED_INTO (node->loc) = false;
3422 /* Hash table iteration argument passed to variable_post_merge. */
3423 struct dfset_post_merge
3425 /* The new input set for the current block. */
3427 /* Pointer to the permanent input set for the current block, or
3429 dataflow_set **permp;
3432 /* Create values for incoming expressions associated with one-part
3433 variables that don't have value numbers for them. */
3436 variable_post_merge_new_vals (void **slot, void *info)
3438 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3439 dataflow_set *set = dfpm->set;
3440 variable var = (variable)*slot;
3441 location_chain node;
3443 if (!dv_onepart_p (var->dv) || !var->n_var_parts)
3446 gcc_assert (var->n_var_parts == 1);
3448 if (dv_is_decl_p (var->dv))
3450 bool check_dupes = false;
3453 for (node = var->var_part[0].loc_chain; node; node = node->next)
3455 if (GET_CODE (node->loc) == VALUE)
3456 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
3457 else if (GET_CODE (node->loc) == REG)
3459 attrs att, *attp, *curp = NULL;
3461 if (var->refcount != 1)
3463 slot = unshare_variable (set, slot, var,
3464 VAR_INIT_STATUS_INITIALIZED);
3465 var = (variable)*slot;
3469 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
3471 if (att->offset == 0
3472 && GET_MODE (att->loc) == GET_MODE (node->loc))
3474 if (dv_is_value_p (att->dv))
3476 rtx cval = dv_as_value (att->dv);
3481 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
3489 if ((*curp)->offset == 0
3490 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
3491 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
3494 curp = &(*curp)->next;
3505 *dfpm->permp = XNEW (dataflow_set);
3506 dataflow_set_init (*dfpm->permp);
3509 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
3510 att; att = att->next)
3511 if (GET_MODE (att->loc) == GET_MODE (node->loc))
3513 gcc_assert (att->offset == 0);
3514 gcc_assert (dv_is_value_p (att->dv));
3515 val_reset (set, att->dv);
3522 cval = dv_as_value (cdv);
3526 /* Create a unique value to hold this register,
3527 that ought to be found and reused in
3528 subsequent rounds. */
3530 gcc_assert (!cselib_lookup (node->loc,
3531 GET_MODE (node->loc), 0));
3532 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1);
3533 cselib_preserve_value (v);
3534 cselib_invalidate_rtx (node->loc);
3536 cdv = dv_from_value (cval);
3539 "Created new value %u:%u for reg %i\n",
3540 v->uid, v->hash, REGNO (node->loc));
3543 var_reg_decl_set (*dfpm->permp, node->loc,
3544 VAR_INIT_STATUS_INITIALIZED,
3545 cdv, 0, NULL, INSERT);
3551 /* Remove attribute referring to the decl, which now
3552 uses the value for the register, already existing or
3553 to be added when we bring perm in. */
3556 pool_free (attrs_pool, att);
3561 remove_duplicate_values (var);
3567 /* Reset values in the permanent set that are not associated with the
3568 chosen expression. */
3571 variable_post_merge_perm_vals (void **pslot, void *info)
3573 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3574 dataflow_set *set = dfpm->set;
3575 variable pvar = (variable)*pslot, var;
3576 location_chain pnode;
3580 gcc_assert (dv_is_value_p (pvar->dv));
3581 gcc_assert (pvar->n_var_parts == 1);
3582 pnode = pvar->var_part[0].loc_chain;
3584 gcc_assert (!pnode->next);
3585 gcc_assert (REG_P (pnode->loc));
3589 var = shared_hash_find (set->vars, dv);
3592 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
3594 val_reset (set, dv);
3597 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
3598 if (att->offset == 0
3599 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
3600 && dv_is_value_p (att->dv))
3603 /* If there is a value associated with this register already, create
3605 if (att && dv_as_value (att->dv) != dv_as_value (dv))
3607 rtx cval = dv_as_value (att->dv);
3608 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
3609 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
3614 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
3616 variable_union (pslot, set);
3622 /* Just checking stuff and registering register attributes for
3626 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
3628 struct dfset_post_merge dfpm;
3633 htab_traverse (shared_hash_htab (set->vars), variable_post_merge_new_vals,
3636 htab_traverse (shared_hash_htab ((*permp)->vars),
3637 variable_post_merge_perm_vals, &dfpm);
3638 htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
3641 /* Return a node whose loc is a MEM that refers to EXPR in the
3642 location list of a one-part variable or value VAR, or in that of
3643 any values recursively mentioned in the location lists. */
3645 static location_chain
3646 find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars)
3648 location_chain node;
3651 location_chain where = NULL;
3656 gcc_assert (GET_CODE (val) == VALUE);
3658 gcc_assert (!VALUE_RECURSED_INTO (val));
3660 dv = dv_from_value (val);
3661 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
3666 gcc_assert (dv_onepart_p (var->dv));
3668 if (!var->n_var_parts)
3671 gcc_assert (var->var_part[0].offset == 0);
3673 VALUE_RECURSED_INTO (val) = true;
3675 for (node = var->var_part[0].loc_chain; node; node = node->next)
3676 if (MEM_P (node->loc) && MEM_EXPR (node->loc) == expr
3677 && MEM_OFFSET (node->loc) == 0)
3682 else if (GET_CODE (node->loc) == VALUE
3683 && !VALUE_RECURSED_INTO (node->loc)
3684 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
3687 VALUE_RECURSED_INTO (val) = false;
3692 /* Return TRUE if the value of MEM may vary across a call. */
3695 mem_dies_at_call (rtx mem)
3697 tree expr = MEM_EXPR (mem);
3703 decl = get_base_address (expr);
3711 return (may_be_aliased (decl)
3712 || (!TREE_READONLY (decl) && is_global_var (decl)));
3715 /* Remove all MEMs from the location list of a hash table entry for a
3716 one-part variable, except those whose MEM attributes map back to
3717 the variable itself, directly or within a VALUE. */
3720 dataflow_set_preserve_mem_locs (void **slot, void *data)
3722 dataflow_set *set = (dataflow_set *) data;
3723 variable var = (variable) *slot;
3725 if (dv_is_decl_p (var->dv) && dv_onepart_p (var->dv))
3727 tree decl = dv_as_decl (var->dv);
3728 location_chain loc, *locp;
3729 bool changed = false;
3731 if (!var->n_var_parts)
3734 gcc_assert (var->n_var_parts == 1);
3736 if (shared_var_p (var, set->vars))
3738 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
3740 /* We want to remove dying MEMs that doesn't refer to
3742 if (GET_CODE (loc->loc) == MEM
3743 && (MEM_EXPR (loc->loc) != decl
3744 || MEM_OFFSET (loc->loc))
3745 && !mem_dies_at_call (loc->loc))
3747 /* We want to move here MEMs that do refer to DECL. */
3748 else if (GET_CODE (loc->loc) == VALUE
3749 && find_mem_expr_in_1pdv (decl, loc->loc,
3750 shared_hash_htab (set->vars)))
3757 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
3758 var = (variable)*slot;
3759 gcc_assert (var->n_var_parts == 1);
3762 for (locp = &var->var_part[0].loc_chain, loc = *locp;
3765 rtx old_loc = loc->loc;
3766 if (GET_CODE (old_loc) == VALUE)
3768 location_chain mem_node
3769 = find_mem_expr_in_1pdv (decl, loc->loc,
3770 shared_hash_htab (set->vars));
3772 /* ??? This picks up only one out of multiple MEMs that
3773 refer to the same variable. Do we ever need to be
3774 concerned about dealing with more than one, or, given
3775 that they should all map to the same variable
3776 location, their addresses will have been merged and
3777 they will be regarded as equivalent? */
3780 loc->loc = mem_node->loc;
3781 loc->set_src = mem_node->set_src;
3782 loc->init = MIN (loc->init, mem_node->init);
3786 if (GET_CODE (loc->loc) != MEM
3787 || (MEM_EXPR (loc->loc) == decl
3788 && MEM_OFFSET (loc->loc) == 0)
3789 || !mem_dies_at_call (loc->loc))
3791 if (old_loc != loc->loc && emit_notes)
3793 if (old_loc == var->var_part[0].cur_loc)
3796 var->var_part[0].cur_loc = NULL;
3797 var->cur_loc_changed = true;
3799 add_value_chains (var->dv, loc->loc);
3800 remove_value_chains (var->dv, old_loc);
3808 remove_value_chains (var->dv, old_loc);
3809 if (old_loc == var->var_part[0].cur_loc)
3812 var->var_part[0].cur_loc = NULL;
3813 var->cur_loc_changed = true;
3817 pool_free (loc_chain_pool, loc);
3820 if (!var->var_part[0].loc_chain)
3826 variable_was_changed (var, set);
3832 /* Remove all MEMs from the location list of a hash table entry for a
3836 dataflow_set_remove_mem_locs (void **slot, void *data)
3838 dataflow_set *set = (dataflow_set *) data;
3839 variable var = (variable) *slot;
3841 if (dv_is_value_p (var->dv))
3843 location_chain loc, *locp;
3844 bool changed = false;
3846 gcc_assert (var->n_var_parts == 1);
3848 if (shared_var_p (var, set->vars))
3850 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
3851 if (GET_CODE (loc->loc) == MEM
3852 && mem_dies_at_call (loc->loc))
3858 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
3859 var = (variable)*slot;
3860 gcc_assert (var->n_var_parts == 1);
3863 for (locp = &var->var_part[0].loc_chain, loc = *locp;
3866 if (GET_CODE (loc->loc) != MEM
3867 || !mem_dies_at_call (loc->loc))
3874 remove_value_chains (var->dv, loc->loc);
3876 /* If we have deleted the location which was last emitted
3877 we have to emit new location so add the variable to set
3878 of changed variables. */
3879 if (var->var_part[0].cur_loc == loc->loc)
3882 var->var_part[0].cur_loc = NULL;
3883 var->cur_loc_changed = true;
3885 pool_free (loc_chain_pool, loc);
3888 if (!var->var_part[0].loc_chain)
3894 variable_was_changed (var, set);
3900 /* Remove all variable-location information about call-clobbered
3901 registers, as well as associations between MEMs and VALUEs. */
3904 dataflow_set_clear_at_call (dataflow_set *set)
3908 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
3909 if (TEST_HARD_REG_BIT (call_used_reg_set, r))
3910 var_regno_delete (set, r);
3912 if (MAY_HAVE_DEBUG_INSNS)
3914 set->traversed_vars = set->vars;
3915 htab_traverse (shared_hash_htab (set->vars),
3916 dataflow_set_preserve_mem_locs, set);
3917 set->traversed_vars = set->vars;
3918 htab_traverse (shared_hash_htab (set->vars), dataflow_set_remove_mem_locs,
3920 set->traversed_vars = NULL;
3924 /* Flag whether two dataflow sets being compared contain different data. */
3926 dataflow_set_different_value;
3929 variable_part_different_p (variable_part *vp1, variable_part *vp2)
3931 location_chain lc1, lc2;
3933 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
3935 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
3937 if (REG_P (lc1->loc) && REG_P (lc2->loc))
3939 if (REGNO (lc1->loc) == REGNO (lc2->loc))
3942 if (rtx_equal_p (lc1->loc, lc2->loc))
3951 /* Return true if one-part variables VAR1 and VAR2 are different.
3952 They must be in canonical order. */
3955 onepart_variable_different_p (variable var1, variable var2)
3957 location_chain lc1, lc2;
3962 gcc_assert (var1->n_var_parts == 1);
3963 gcc_assert (var2->n_var_parts == 1);
3965 lc1 = var1->var_part[0].loc_chain;
3966 lc2 = var2->var_part[0].loc_chain;
3973 if (loc_cmp (lc1->loc, lc2->loc))
3982 /* Return true if variables VAR1 and VAR2 are different. */
3985 variable_different_p (variable var1, variable var2)
3992 if (var1->n_var_parts != var2->n_var_parts)
3995 for (i = 0; i < var1->n_var_parts; i++)
3997 if (var1->var_part[i].offset != var2->var_part[i].offset)
3999 /* One-part values have locations in a canonical order. */
4000 if (i == 0 && var1->var_part[i].offset == 0 && dv_onepart_p (var1->dv))
4002 gcc_assert (var1->n_var_parts == 1);
4003 gcc_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
4004 return onepart_variable_different_p (var1, var2);
4006 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4008 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4014 /* Compare variable *SLOT with the same variable in hash table DATA
4015 and set DATAFLOW_SET_DIFFERENT_VALUE if they are different. */
4018 dataflow_set_different_1 (void **slot, void *data)
4020 htab_t htab = (htab_t) data;
4021 variable var1, var2;
4023 var1 = (variable) *slot;
4024 var2 = (variable) htab_find_with_hash (htab, var1->dv,
4025 dv_htab_hash (var1->dv));
4028 dataflow_set_different_value = true;
4030 if (dump_file && (dump_flags & TDF_DETAILS))
4032 fprintf (dump_file, "dataflow difference found: removal of:\n");
4036 /* Stop traversing the hash table. */
4040 if (variable_different_p (var1, var2))
4042 dataflow_set_different_value = true;
4044 if (dump_file && (dump_flags & TDF_DETAILS))
4046 fprintf (dump_file, "dataflow difference found: old and new follow:\n");
4051 /* Stop traversing the hash table. */
4055 /* Continue traversing the hash table. */
4059 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4062 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4064 if (old_set->vars == new_set->vars)
4067 if (htab_elements (shared_hash_htab (old_set->vars))
4068 != htab_elements (shared_hash_htab (new_set->vars)))
4071 dataflow_set_different_value = false;
4073 htab_traverse (shared_hash_htab (old_set->vars), dataflow_set_different_1,
4074 shared_hash_htab (new_set->vars));
4075 /* No need to traverse the second hashtab, if both have the same number
4076 of elements and the second one had all entries found in the first one,
4077 then it can't have any extra entries. */
4078 return dataflow_set_different_value;
4081 /* Free the contents of dataflow set SET. */
4084 dataflow_set_destroy (dataflow_set *set)
4088 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4089 attrs_list_clear (&set->regs[i]);
4091 shared_hash_destroy (set->vars);
4095 /* Return true if RTL X contains a SYMBOL_REF. */
4098 contains_symbol_ref (rtx x)
4107 code = GET_CODE (x);
4108 if (code == SYMBOL_REF)
4111 fmt = GET_RTX_FORMAT (code);
4112 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4116 if (contains_symbol_ref (XEXP (x, i)))
4119 else if (fmt[i] == 'E')
4122 for (j = 0; j < XVECLEN (x, i); j++)
4123 if (contains_symbol_ref (XVECEXP (x, i, j)))
4131 /* Shall EXPR be tracked? */
4134 track_expr_p (tree expr, bool need_rtl)
4139 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
4140 return DECL_RTL_SET_P (expr);
4142 /* If EXPR is not a parameter or a variable do not track it. */
4143 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
4146 /* It also must have a name... */
4147 if (!DECL_NAME (expr) && need_rtl)
4150 /* ... and a RTL assigned to it. */
4151 decl_rtl = DECL_RTL_IF_SET (expr);
4152 if (!decl_rtl && need_rtl)
4155 /* If this expression is really a debug alias of some other declaration, we
4156 don't need to track this expression if the ultimate declaration is
4159 if (DECL_DEBUG_EXPR_IS_FROM (realdecl) && DECL_DEBUG_EXPR (realdecl))
4161 realdecl = DECL_DEBUG_EXPR (realdecl);
4162 /* ??? We don't yet know how to emit DW_OP_piece for variable
4163 that has been SRA'ed. */
4164 if (!DECL_P (realdecl))
4168 /* Do not track EXPR if REALDECL it should be ignored for debugging
4170 if (DECL_IGNORED_P (realdecl))
4173 /* Do not track global variables until we are able to emit correct location
4175 if (TREE_STATIC (realdecl))
4178 /* When the EXPR is a DECL for alias of some variable (see example)
4179 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
4180 DECL_RTL contains SYMBOL_REF.
4183 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
4186 if (decl_rtl && MEM_P (decl_rtl)
4187 && contains_symbol_ref (XEXP (decl_rtl, 0)))
4190 /* If RTX is a memory it should not be very large (because it would be
4191 an array or struct). */
4192 if (decl_rtl && MEM_P (decl_rtl))
4194 /* Do not track structures and arrays. */
4195 if (GET_MODE (decl_rtl) == BLKmode
4196 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
4198 if (MEM_SIZE (decl_rtl)
4199 && INTVAL (MEM_SIZE (decl_rtl)) > MAX_VAR_PARTS)
4203 DECL_CHANGED (expr) = 0;
4204 DECL_CHANGED (realdecl) = 0;
4208 /* Determine whether a given LOC refers to the same variable part as
4212 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
4215 HOST_WIDE_INT offset2;
4217 if (! DECL_P (expr))
4222 expr2 = REG_EXPR (loc);
4223 offset2 = REG_OFFSET (loc);
4225 else if (MEM_P (loc))
4227 expr2 = MEM_EXPR (loc);
4228 offset2 = INT_MEM_OFFSET (loc);
4233 if (! expr2 || ! DECL_P (expr2))
4236 expr = var_debug_decl (expr);
4237 expr2 = var_debug_decl (expr2);
4239 return (expr == expr2 && offset == offset2);
4242 /* LOC is a REG or MEM that we would like to track if possible.
4243 If EXPR is null, we don't know what expression LOC refers to,
4244 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
4245 LOC is an lvalue register.
4247 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
4248 is something we can track. When returning true, store the mode of
4249 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
4250 from EXPR in *OFFSET_OUT (if nonnull). */
4253 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
4254 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
4256 enum machine_mode mode;
4258 if (expr == NULL || !track_expr_p (expr, true))
4261 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
4262 whole subreg, but only the old inner part is really relevant. */
4263 mode = GET_MODE (loc);
4264 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
4266 enum machine_mode pseudo_mode;
4268 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
4269 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
4271 offset += byte_lowpart_offset (pseudo_mode, mode);
4276 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
4277 Do the same if we are storing to a register and EXPR occupies
4278 the whole of register LOC; in that case, the whole of EXPR is
4279 being changed. We exclude complex modes from the second case
4280 because the real and imaginary parts are represented as separate
4281 pseudo registers, even if the whole complex value fits into one
4283 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
4285 && !COMPLEX_MODE_P (DECL_MODE (expr))
4286 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
4287 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
4289 mode = DECL_MODE (expr);
4293 if (offset < 0 || offset >= MAX_VAR_PARTS)
4299 *offset_out = offset;
4303 /* Return the MODE lowpart of LOC, or null if LOC is not something we
4304 want to track. When returning nonnull, make sure that the attributes
4305 on the returned value are updated. */
4308 var_lowpart (enum machine_mode mode, rtx loc)
4310 unsigned int offset, reg_offset, regno;
4312 if (!REG_P (loc) && !MEM_P (loc))
4315 if (GET_MODE (loc) == mode)
4318 offset = byte_lowpart_offset (mode, GET_MODE (loc));
4321 return adjust_address_nv (loc, mode, offset);
4323 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
4324 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
4326 return gen_rtx_REG_offset (loc, mode, regno, offset);
4329 /* Carry information about uses and stores while walking rtx. */
4331 struct count_use_info
4333 /* The insn where the RTX is. */
4336 /* The basic block where insn is. */
4339 /* The array of n_sets sets in the insn, as determined by cselib. */
4340 struct cselib_set *sets;
4343 /* True if we're counting stores, false otherwise. */
4347 /* Find a VALUE corresponding to X. */
4349 static inline cselib_val *
4350 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
4356 /* This is called after uses are set up and before stores are
4357 processed bycselib, so it's safe to look up srcs, but not
4358 dsts. So we look up expressions that appear in srcs or in
4359 dest expressions, but we search the sets array for dests of
4363 for (i = 0; i < cui->n_sets; i++)
4364 if (cui->sets[i].dest == x)
4365 return cui->sets[i].src_elt;
4368 return cselib_lookup (x, mode, 0);
4374 /* Replace all registers and addresses in an expression with VALUE
4375 expressions that map back to them, unless the expression is a
4376 register. If no mapping is or can be performed, returns NULL. */
4379 replace_expr_with_values (rtx loc)
4383 else if (MEM_P (loc))
4385 enum machine_mode address_mode
4386 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (loc));
4387 cselib_val *addr = cselib_lookup (XEXP (loc, 0), address_mode, 0);
4389 return replace_equiv_address_nv (loc, addr->val_rtx);
4394 return cselib_subst_to_values (loc);
4397 /* Determine what kind of micro operation to choose for a USE. Return
4398 MO_CLOBBER if no micro operation is to be generated. */
4400 static enum micro_operation_type
4401 use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
4405 if (cui && cui->sets)
4407 if (GET_CODE (loc) == VAR_LOCATION)
4409 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
4411 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
4412 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1);
4414 /* ??? flag_float_store and volatile mems are never
4415 given values, but we could in theory use them for
4417 gcc_assert (val || 1);
4424 if (REG_P (loc) || MEM_P (loc))
4427 *modep = GET_MODE (loc);
4431 || (find_use_val (loc, GET_MODE (loc), cui)
4432 && cselib_lookup (XEXP (loc, 0), GET_MODE (loc), 0)))
4437 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
4439 if (val && !cselib_preserved_value_p (val))
4447 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
4449 expr = REG_EXPR (loc);
4452 return MO_USE_NO_VAR;
4453 else if (target_for_debug_bind (var_debug_decl (expr)))
4455 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
4456 false, modep, NULL))
4459 return MO_USE_NO_VAR;
4461 else if (MEM_P (loc))
4463 expr = MEM_EXPR (loc);
4467 else if (target_for_debug_bind (var_debug_decl (expr)))
4469 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
4470 false, modep, NULL))
4479 /* Log to OUT information about micro-operation MOPT involving X in
4483 log_op_type (rtx x, basic_block bb, rtx insn,
4484 enum micro_operation_type mopt, FILE *out)
4486 fprintf (out, "bb %i op %i insn %i %s ",
4487 bb->index, VEC_length (micro_operation, VTI (bb)->mos),
4488 INSN_UID (insn), micro_operation_type_name[mopt]);
4489 print_inline_rtx (out, x, 2);
4493 /* Adjust sets if needed. Currently this optimizes read-only MEM loads
4494 if REG_EQUAL/REG_EQUIV note is present. */
4497 adjust_sets (rtx insn, struct cselib_set *sets, int n_sets)
4499 if (n_sets == 1 && MEM_P (sets[0].src) && MEM_READONLY_P (sets[0].src))
4501 /* For read-only MEMs containing some constant, prefer those
4503 rtx note = find_reg_equal_equiv_note (insn), src;
4505 if (note && CONSTANT_P (XEXP (note, 0)))
4507 sets[0].src = src = XEXP (note, 0);
4508 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
4509 src = gen_rtx_IF_THEN_ELSE (GET_MODE (sets[0].dest),
4510 COND_EXEC_TEST (PATTERN (insn)),
4512 sets[0].src_elt = cselib_lookup (src, GET_MODE (sets[0].dest), 1);
4517 /* Tell whether the CONCAT used to holds a VALUE and its location
4518 needs value resolution, i.e., an attempt of mapping the location
4519 back to other incoming values. */
4520 #define VAL_NEEDS_RESOLUTION(x) \
4521 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
4522 /* Whether the location in the CONCAT is a tracked expression, that
4523 should also be handled like a MO_USE. */
4524 #define VAL_HOLDS_TRACK_EXPR(x) \
4525 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
4526 /* Whether the location in the CONCAT should be handled like a MO_COPY
4528 #define VAL_EXPR_IS_COPIED(x) \
4529 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
4530 /* Whether the location in the CONCAT should be handled like a
4531 MO_CLOBBER as well. */
4532 #define VAL_EXPR_IS_CLOBBERED(x) \
4533 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
4534 /* Whether the location is a CONCAT of the MO_VAL_SET expression and
4535 a reverse operation that should be handled afterwards. */
4536 #define VAL_EXPR_HAS_REVERSE(x) \
4537 (RTL_FLAG_CHECK1 ("VAL_EXPR_HAS_REVERSE", (x), CONCAT)->return_val)
4539 /* All preserved VALUEs. */
4540 static VEC (rtx, heap) *preserved_values;
4542 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
4545 preserve_value (cselib_val *val)
4547 cselib_preserve_value (val);
4548 VEC_safe_push (rtx, heap, preserved_values, val->val_rtx);
4551 /* Add uses (register and memory references) LOC which will be tracked
4552 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
4555 add_uses (rtx *ploc, void *data)
4558 enum machine_mode mode = VOIDmode;
4559 struct count_use_info *cui = (struct count_use_info *)data;
4560 enum micro_operation_type type = use_type (loc, cui, &mode);
4562 if (type != MO_CLOBBER)
4564 basic_block bb = cui->bb;
4568 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
4569 mo.insn = cui->insn;
4571 if (type == MO_VAL_LOC)
4574 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
4577 gcc_assert (cui->sets);
4580 && !REG_P (XEXP (vloc, 0)) && !MEM_P (XEXP (vloc, 0)))
4583 enum machine_mode address_mode
4584 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
4586 = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
4588 if (val && !cselib_preserved_value_p (val))
4590 micro_operation moa;
4591 preserve_value (val);
4592 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4593 moa.type = MO_VAL_USE;
4594 moa.insn = cui->insn;
4595 moa.u.loc = gen_rtx_CONCAT (address_mode,
4596 val->val_rtx, mloc);
4597 if (dump_file && (dump_flags & TDF_DETAILS))
4598 log_op_type (moa.u.loc, cui->bb, cui->insn,
4599 moa.type, dump_file);
4600 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
4604 if (!VAR_LOC_UNKNOWN_P (vloc)
4605 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
4607 enum machine_mode mode2;
4608 enum micro_operation_type type2;
4609 rtx nloc = replace_expr_with_values (vloc);
4613 oloc = shallow_copy_rtx (oloc);
4614 PAT_VAR_LOCATION_LOC (oloc) = nloc;
4617 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
4619 type2 = use_type (vloc, 0, &mode2);
4621 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
4622 || type2 == MO_CLOBBER);
4624 if (type2 == MO_CLOBBER
4625 && !cselib_preserved_value_p (val))
4627 VAL_NEEDS_RESOLUTION (oloc) = 1;
4628 preserve_value (val);
4631 else if (!VAR_LOC_UNKNOWN_P (vloc))
4633 oloc = shallow_copy_rtx (oloc);
4634 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
4639 else if (type == MO_VAL_USE)
4641 enum machine_mode mode2 = VOIDmode;
4642 enum micro_operation_type type2;
4643 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
4644 rtx vloc, oloc = loc, nloc;
4646 gcc_assert (cui->sets);
4649 && !REG_P (XEXP (oloc, 0)) && !MEM_P (XEXP (oloc, 0)))
4652 enum machine_mode address_mode
4653 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
4655 = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
4657 if (val && !cselib_preserved_value_p (val))
4659 micro_operation moa;
4660 preserve_value (val);
4661 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4662 moa.type = MO_VAL_USE;
4663 moa.insn = cui->insn;
4664 moa.u.loc = gen_rtx_CONCAT (address_mode,
4665 val->val_rtx, mloc);
4666 if (dump_file && (dump_flags & TDF_DETAILS))
4667 log_op_type (moa.u.loc, cui->bb, cui->insn,
4668 moa.type, dump_file);
4669 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
4673 type2 = use_type (loc, 0, &mode2);
4675 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
4676 || type2 == MO_CLOBBER);
4678 if (type2 == MO_USE)
4679 vloc = var_lowpart (mode2, loc);
4683 /* The loc of a MO_VAL_USE may have two forms:
4685 (concat val src): val is at src, a value-based
4688 (concat (concat val use) src): same as above, with use as
4689 the MO_USE tracked value, if it differs from src.
4693 nloc = replace_expr_with_values (loc);
4698 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
4700 oloc = val->val_rtx;
4702 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
4704 if (type2 == MO_USE)
4705 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
4706 if (!cselib_preserved_value_p (val))
4708 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
4709 preserve_value (val);
4713 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
4715 if (dump_file && (dump_flags & TDF_DETAILS))
4716 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
4717 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
4723 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
4726 add_uses_1 (rtx *x, void *cui)
4728 for_each_rtx (x, add_uses, cui);
4731 /* Attempt to reverse the EXPR operation in the debug info. Say for
4732 reg1 = reg2 + 6 even when reg2 is no longer live we
4733 can express its value as VAL - 6. */
4736 reverse_op (rtx val, const_rtx expr)
4742 if (GET_CODE (expr) != SET)
4745 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
4748 src = SET_SRC (expr);
4749 switch (GET_CODE (src))
4763 if (!REG_P (XEXP (src, 0)) || !SCALAR_INT_MODE_P (GET_MODE (src)))
4766 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0);
4767 if (!v || !cselib_preserved_value_p (v))
4770 switch (GET_CODE (src))
4774 if (GET_MODE (v->val_rtx) != GET_MODE (val))
4776 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
4780 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
4792 if (GET_MODE (v->val_rtx) != GET_MODE (val))
4794 arg = XEXP (src, 1);
4795 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
4797 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
4798 if (arg == NULL_RTX)
4800 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
4803 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
4805 /* Ensure ret isn't VALUE itself (which can happen e.g. for
4806 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
4807 breaks a lot of routines during var-tracking. */
4808 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
4814 return gen_rtx_CONCAT (GET_MODE (v->val_rtx), v->val_rtx, ret);
4817 /* Return SRC, or, if it is a read-only MEM for which adjust_sets
4818 replated it with a constant from REG_EQUIV/REG_EQUAL note,
4822 get_adjusted_src (struct count_use_info *cui, rtx src)
4824 if (cui->n_sets == 1
4826 && MEM_READONLY_P (src)
4827 && CONSTANT_P (cui->sets[0].src))
4828 return cui->sets[0].src;
4832 /* Add stores (register and memory references) LOC which will be tracked
4833 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
4834 CUIP->insn is instruction which the LOC is part of. */
4837 add_stores (rtx loc, const_rtx expr, void *cuip)
4839 enum machine_mode mode = VOIDmode, mode2;
4840 struct count_use_info *cui = (struct count_use_info *)cuip;
4841 basic_block bb = cui->bb;
4843 rtx oloc = loc, nloc, src = NULL;
4844 enum micro_operation_type type = use_type (loc, cui, &mode);
4845 bool track_p = false;
4847 bool resolve, preserve;
4850 if (type == MO_CLOBBER)
4857 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
4858 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
4859 || GET_CODE (expr) == CLOBBER)
4861 mo.type = MO_CLOBBER;
4866 if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
4868 src = get_adjusted_src (cui, SET_SRC (expr));
4869 src = var_lowpart (mode2, src);
4871 loc = var_lowpart (mode2, loc);
4880 rtx xexpr = CONST_CAST_RTX (expr);
4882 if (SET_SRC (expr) != src)
4883 xexpr = gen_rtx_SET (VOIDmode, loc, src);
4884 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
4891 mo.insn = cui->insn;
4893 else if (MEM_P (loc)
4894 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
4897 if (MEM_P (loc) && type == MO_VAL_SET
4898 && !REG_P (XEXP (loc, 0)) && !MEM_P (XEXP (loc, 0)))
4901 enum machine_mode address_mode
4902 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
4903 cselib_val *val = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
4905 if (val && !cselib_preserved_value_p (val))
4907 preserve_value (val);
4908 mo.type = MO_VAL_USE;
4909 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4910 mo.u.loc = gen_rtx_CONCAT (address_mode, val->val_rtx, mloc);
4911 mo.insn = cui->insn;
4912 if (dump_file && (dump_flags & TDF_DETAILS))
4913 log_op_type (mo.u.loc, cui->bb, cui->insn,
4914 mo.type, dump_file);
4915 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
4919 if (GET_CODE (expr) == CLOBBER || !track_p)
4921 mo.type = MO_CLOBBER;
4922 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
4926 if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
4928 src = get_adjusted_src (cui, SET_SRC (expr));
4929 src = var_lowpart (mode2, src);
4931 loc = var_lowpart (mode2, loc);
4940 rtx xexpr = CONST_CAST_RTX (expr);
4942 if (SET_SRC (expr) != src)
4943 xexpr = gen_rtx_SET (VOIDmode, loc, src);
4944 if (same_variable_part_p (SET_SRC (xexpr),
4946 INT_MEM_OFFSET (loc)))
4953 mo.insn = cui->insn;
4958 if (type != MO_VAL_SET)
4959 goto log_and_return;
4961 v = find_use_val (oloc, mode, cui);
4964 goto log_and_return;
4966 resolve = preserve = !cselib_preserved_value_p (v);
4968 nloc = replace_expr_with_values (oloc);
4972 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
4974 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0);
4976 gcc_assert (oval != v);
4977 gcc_assert (REG_P (oloc) || MEM_P (oloc));
4979 if (!cselib_preserved_value_p (oval))
4981 micro_operation moa;
4983 preserve_value (oval);
4985 moa.type = MO_VAL_USE;
4986 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
4987 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
4988 moa.insn = cui->insn;
4990 if (dump_file && (dump_flags & TDF_DETAILS))
4991 log_op_type (moa.u.loc, cui->bb, cui->insn,
4992 moa.type, dump_file);
4993 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
4998 else if (resolve && GET_CODE (mo.u.loc) == SET)
5000 src = get_adjusted_src (cui, SET_SRC (expr));
5001 nloc = replace_expr_with_values (src);
5003 /* Avoid the mode mismatch between oexpr and expr. */
5004 if (!nloc && mode != mode2)
5007 gcc_assert (oloc == SET_DEST (expr));
5011 oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
5014 if (oloc == SET_DEST (mo.u.loc))
5015 /* No point in duplicating. */
5017 if (!REG_P (SET_SRC (mo.u.loc)))
5023 if (GET_CODE (mo.u.loc) == SET
5024 && oloc == SET_DEST (mo.u.loc))
5025 /* No point in duplicating. */
5031 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
5033 if (mo.u.loc != oloc)
5034 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
5036 /* The loc of a MO_VAL_SET may have various forms:
5038 (concat val dst): dst now holds val
5040 (concat val (set dst src)): dst now holds val, copied from src
5042 (concat (concat val dstv) dst): dst now holds val; dstv is dst
5043 after replacing mems and non-top-level regs with values.
5045 (concat (concat val dstv) (set dst src)): dst now holds val,
5046 copied from src. dstv is a value-based representation of dst, if
5047 it differs from dst. If resolution is needed, src is a REG, and
5048 its mode is the same as that of val.
5050 (concat (concat val (set dstv srcv)) (set dst src)): src
5051 copied to dst, holding val. dstv and srcv are value-based
5052 representations of dst and src, respectively.
5056 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
5058 reverse = reverse_op (v->val_rtx, expr);
5061 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, reverse);
5062 VAL_EXPR_HAS_REVERSE (loc) = 1;
5069 VAL_HOLDS_TRACK_EXPR (loc) = 1;
5072 VAL_NEEDS_RESOLUTION (loc) = resolve;
5075 if (mo.type == MO_CLOBBER)
5076 VAL_EXPR_IS_CLOBBERED (loc) = 1;
5077 if (mo.type == MO_COPY)
5078 VAL_EXPR_IS_COPIED (loc) = 1;
5080 mo.type = MO_VAL_SET;
5083 if (dump_file && (dump_flags & TDF_DETAILS))
5084 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5085 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5088 /* Callback for cselib_record_sets_hook, that records as micro
5089 operations uses and stores in an insn after cselib_record_sets has
5090 analyzed the sets in an insn, but before it modifies the stored
5091 values in the internal tables, unless cselib_record_sets doesn't
5092 call it directly (perhaps because we're not doing cselib in the
5093 first place, in which case sets and n_sets will be 0). */
5096 add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
5098 basic_block bb = BLOCK_FOR_INSN (insn);
5100 struct count_use_info cui;
5101 micro_operation *mos;
5103 cselib_hook_called = true;
5105 adjust_sets (insn, sets, n_sets);
5110 cui.n_sets = n_sets;
5112 n1 = VEC_length (micro_operation, VTI (bb)->mos);
5113 cui.store_p = false;
5114 note_uses (&PATTERN (insn), add_uses_1, &cui);
5115 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
5116 mos = VEC_address (micro_operation, VTI (bb)->mos);
5118 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
5122 while (n1 < n2 && mos[n1].type == MO_USE)
5124 while (n1 < n2 && mos[n2].type != MO_USE)
5136 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
5139 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
5141 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
5159 mo.u.loc = NULL_RTX;
5161 if (dump_file && (dump_flags & TDF_DETAILS))
5162 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
5163 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5166 n1 = VEC_length (micro_operation, VTI (bb)->mos);
5167 /* This will record NEXT_INSN (insn), such that we can
5168 insert notes before it without worrying about any
5169 notes that MO_USEs might emit after the insn. */
5171 note_stores (PATTERN (insn), add_stores, &cui);
5172 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
5173 mos = VEC_address (micro_operation, VTI (bb)->mos);
5175 /* Order the MO_VAL_USEs first (note_stores does nothing
5176 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
5177 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
5180 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
5182 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
5194 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
5197 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
5199 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
5212 static enum var_init_status
5213 find_src_status (dataflow_set *in, rtx src)
5215 tree decl = NULL_TREE;
5216 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
5218 if (! flag_var_tracking_uninit)
5219 status = VAR_INIT_STATUS_INITIALIZED;
5221 if (src && REG_P (src))
5222 decl = var_debug_decl (REG_EXPR (src));
5223 else if (src && MEM_P (src))
5224 decl = var_debug_decl (MEM_EXPR (src));
5227 status = get_init_value (in, src, dv_from_decl (decl));
5232 /* SRC is the source of an assignment. Use SET to try to find what
5233 was ultimately assigned to SRC. Return that value if known,
5234 otherwise return SRC itself. */
5237 find_src_set_src (dataflow_set *set, rtx src)
5239 tree decl = NULL_TREE; /* The variable being copied around. */
5240 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
5242 location_chain nextp;
5246 if (src && REG_P (src))
5247 decl = var_debug_decl (REG_EXPR (src));
5248 else if (src && MEM_P (src))
5249 decl = var_debug_decl (MEM_EXPR (src));
5253 decl_or_value dv = dv_from_decl (decl);
5255 var = shared_hash_find (set->vars, dv);
5259 for (i = 0; i < var->n_var_parts && !found; i++)
5260 for (nextp = var->var_part[i].loc_chain; nextp && !found;
5261 nextp = nextp->next)
5262 if (rtx_equal_p (nextp->loc, src))
5264 set_src = nextp->set_src;
5274 /* Compute the changes of variable locations in the basic block BB. */
5277 compute_bb_dataflow (basic_block bb)
5280 micro_operation *mo;
5282 dataflow_set old_out;
5283 dataflow_set *in = &VTI (bb)->in;
5284 dataflow_set *out = &VTI (bb)->out;
5286 dataflow_set_init (&old_out);
5287 dataflow_set_copy (&old_out, out);
5288 dataflow_set_copy (out, in);
5290 for (i = 0; VEC_iterate (micro_operation, VTI (bb)->mos, i, mo); i++)
5292 rtx insn = mo->insn;
5297 dataflow_set_clear_at_call (out);
5302 rtx loc = mo->u.loc;
5305 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
5306 else if (MEM_P (loc))
5307 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
5313 rtx loc = mo->u.loc;
5317 if (GET_CODE (loc) == CONCAT)
5319 val = XEXP (loc, 0);
5320 vloc = XEXP (loc, 1);
5328 var = PAT_VAR_LOCATION_DECL (vloc);
5330 clobber_variable_part (out, NULL_RTX,
5331 dv_from_decl (var), 0, NULL_RTX);
5334 if (VAL_NEEDS_RESOLUTION (loc))
5335 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
5336 set_variable_part (out, val, dv_from_decl (var), 0,
5337 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
5345 rtx loc = mo->u.loc;
5346 rtx val, vloc, uloc;
5348 vloc = uloc = XEXP (loc, 1);
5349 val = XEXP (loc, 0);
5351 if (GET_CODE (val) == CONCAT)
5353 uloc = XEXP (val, 1);
5354 val = XEXP (val, 0);
5357 if (VAL_NEEDS_RESOLUTION (loc))
5358 val_resolve (out, val, vloc, insn);
5360 val_store (out, val, uloc, insn, false);
5362 if (VAL_HOLDS_TRACK_EXPR (loc))
5364 if (GET_CODE (uloc) == REG)
5365 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
5367 else if (GET_CODE (uloc) == MEM)
5368 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
5376 rtx loc = mo->u.loc;
5377 rtx val, vloc, uloc, reverse = NULL_RTX;
5380 if (VAL_EXPR_HAS_REVERSE (loc))
5382 reverse = XEXP (loc, 1);
5383 vloc = XEXP (loc, 0);
5385 uloc = XEXP (vloc, 1);
5386 val = XEXP (vloc, 0);
5389 if (GET_CODE (val) == CONCAT)
5391 vloc = XEXP (val, 1);
5392 val = XEXP (val, 0);
5395 if (GET_CODE (vloc) == SET)
5397 rtx vsrc = SET_SRC (vloc);
5399 gcc_assert (val != vsrc);
5400 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
5402 vloc = SET_DEST (vloc);
5404 if (VAL_NEEDS_RESOLUTION (loc))
5405 val_resolve (out, val, vsrc, insn);
5407 else if (VAL_NEEDS_RESOLUTION (loc))
5409 gcc_assert (GET_CODE (uloc) == SET
5410 && GET_CODE (SET_SRC (uloc)) == REG);
5411 val_resolve (out, val, SET_SRC (uloc), insn);
5414 if (VAL_HOLDS_TRACK_EXPR (loc))
5416 if (VAL_EXPR_IS_CLOBBERED (loc))
5419 var_reg_delete (out, uloc, true);
5420 else if (MEM_P (uloc))
5421 var_mem_delete (out, uloc, true);
5425 bool copied_p = VAL_EXPR_IS_COPIED (loc);
5427 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
5429 if (GET_CODE (uloc) == SET)
5431 set_src = SET_SRC (uloc);
5432 uloc = SET_DEST (uloc);
5437 if (flag_var_tracking_uninit)
5439 status = find_src_status (in, set_src);
5441 if (status == VAR_INIT_STATUS_UNKNOWN)
5442 status = find_src_status (out, set_src);
5445 set_src = find_src_set_src (in, set_src);
5449 var_reg_delete_and_set (out, uloc, !copied_p,
5451 else if (MEM_P (uloc))
5452 var_mem_delete_and_set (out, uloc, !copied_p,
5456 else if (REG_P (uloc))
5457 var_regno_delete (out, REGNO (uloc));
5459 val_store (out, val, vloc, insn, true);
5462 val_store (out, XEXP (reverse, 0), XEXP (reverse, 1),
5469 rtx loc = mo->u.loc;
5472 if (GET_CODE (loc) == SET)
5474 set_src = SET_SRC (loc);
5475 loc = SET_DEST (loc);
5479 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
5481 else if (MEM_P (loc))
5482 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
5489 rtx loc = mo->u.loc;
5490 enum var_init_status src_status;
5493 if (GET_CODE (loc) == SET)
5495 set_src = SET_SRC (loc);
5496 loc = SET_DEST (loc);
5499 if (! flag_var_tracking_uninit)
5500 src_status = VAR_INIT_STATUS_INITIALIZED;
5503 src_status = find_src_status (in, set_src);
5505 if (src_status == VAR_INIT_STATUS_UNKNOWN)
5506 src_status = find_src_status (out, set_src);
5509 set_src = find_src_set_src (in, set_src);
5512 var_reg_delete_and_set (out, loc, false, src_status, set_src);
5513 else if (MEM_P (loc))
5514 var_mem_delete_and_set (out, loc, false, src_status, set_src);
5520 rtx loc = mo->u.loc;
5523 var_reg_delete (out, loc, false);
5524 else if (MEM_P (loc))
5525 var_mem_delete (out, loc, false);
5531 rtx loc = mo->u.loc;
5534 var_reg_delete (out, loc, true);
5535 else if (MEM_P (loc))
5536 var_mem_delete (out, loc, true);
5541 out->stack_adjust += mo->u.adjust;
5546 if (MAY_HAVE_DEBUG_INSNS)
5548 dataflow_set_equiv_regs (out);
5549 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_mark,
5551 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_star,
5554 htab_traverse (shared_hash_htab (out->vars),
5555 canonicalize_loc_order_check, out);
5558 changed = dataflow_set_different (&old_out, out);
5559 dataflow_set_destroy (&old_out);
5563 /* Find the locations of variables in the whole function. */
5566 vt_find_locations (void)
5568 fibheap_t worklist, pending, fibheap_swap;
5569 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
5576 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
5577 bool success = true;
5579 /* Compute reverse completion order of depth first search of the CFG
5580 so that the data-flow runs faster. */
5581 rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
5582 bb_order = XNEWVEC (int, last_basic_block);
5583 pre_and_rev_post_order_compute (NULL, rc_order, false);
5584 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
5585 bb_order[rc_order[i]] = i;
5588 worklist = fibheap_new ();
5589 pending = fibheap_new ();
5590 visited = sbitmap_alloc (last_basic_block);
5591 in_worklist = sbitmap_alloc (last_basic_block);
5592 in_pending = sbitmap_alloc (last_basic_block);
5593 sbitmap_zero (in_worklist);
5596 fibheap_insert (pending, bb_order[bb->index], bb);
5597 sbitmap_ones (in_pending);
5599 while (success && !fibheap_empty (pending))
5601 fibheap_swap = pending;
5603 worklist = fibheap_swap;
5604 sbitmap_swap = in_pending;
5605 in_pending = in_worklist;
5606 in_worklist = sbitmap_swap;
5608 sbitmap_zero (visited);
5610 while (!fibheap_empty (worklist))
5612 bb = (basic_block) fibheap_extract_min (worklist);
5613 RESET_BIT (in_worklist, bb->index);
5614 if (!TEST_BIT (visited, bb->index))
5618 int oldinsz, oldoutsz;
5620 SET_BIT (visited, bb->index);
5622 if (VTI (bb)->in.vars)
5625 -= (htab_size (shared_hash_htab (VTI (bb)->in.vars))
5626 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
5628 = htab_elements (shared_hash_htab (VTI (bb)->in.vars));
5630 = htab_elements (shared_hash_htab (VTI (bb)->out.vars));
5633 oldinsz = oldoutsz = 0;
5635 if (MAY_HAVE_DEBUG_INSNS)
5637 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
5638 bool first = true, adjust = false;
5640 /* Calculate the IN set as the intersection of
5641 predecessor OUT sets. */
5643 dataflow_set_clear (in);
5644 dst_can_be_shared = true;
5646 FOR_EACH_EDGE (e, ei, bb->preds)
5647 if (!VTI (e->src)->flooded)
5648 gcc_assert (bb_order[bb->index]
5649 <= bb_order[e->src->index]);
5652 dataflow_set_copy (in, &VTI (e->src)->out);
5653 first_out = &VTI (e->src)->out;
5658 dataflow_set_merge (in, &VTI (e->src)->out);
5664 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
5666 /* Merge and merge_adjust should keep entries in
5668 htab_traverse (shared_hash_htab (in->vars),
5669 canonicalize_loc_order_check,
5672 if (dst_can_be_shared)
5674 shared_hash_destroy (in->vars);
5675 in->vars = shared_hash_copy (first_out->vars);
5679 VTI (bb)->flooded = true;
5683 /* Calculate the IN set as union of predecessor OUT sets. */
5684 dataflow_set_clear (&VTI (bb)->in);
5685 FOR_EACH_EDGE (e, ei, bb->preds)
5686 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
5689 changed = compute_bb_dataflow (bb);
5690 htabsz += (htab_size (shared_hash_htab (VTI (bb)->in.vars))
5691 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
5693 if (htabmax && htabsz > htabmax)
5695 if (MAY_HAVE_DEBUG_INSNS)
5696 inform (DECL_SOURCE_LOCATION (cfun->decl),
5697 "variable tracking size limit exceeded with "
5698 "-fvar-tracking-assignments, retrying without");
5700 inform (DECL_SOURCE_LOCATION (cfun->decl),
5701 "variable tracking size limit exceeded");
5708 FOR_EACH_EDGE (e, ei, bb->succs)
5710 if (e->dest == EXIT_BLOCK_PTR)
5713 if (TEST_BIT (visited, e->dest->index))
5715 if (!TEST_BIT (in_pending, e->dest->index))
5717 /* Send E->DEST to next round. */
5718 SET_BIT (in_pending, e->dest->index);
5719 fibheap_insert (pending,
5720 bb_order[e->dest->index],
5724 else if (!TEST_BIT (in_worklist, e->dest->index))
5726 /* Add E->DEST to current round. */
5727 SET_BIT (in_worklist, e->dest->index);
5728 fibheap_insert (worklist, bb_order[e->dest->index],
5736 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
5738 (int)htab_elements (shared_hash_htab (VTI (bb)->in.vars)),
5740 (int)htab_elements (shared_hash_htab (VTI (bb)->out.vars)),
5742 (int)worklist->nodes, (int)pending->nodes, htabsz);
5744 if (dump_file && (dump_flags & TDF_DETAILS))
5746 fprintf (dump_file, "BB %i IN:\n", bb->index);
5747 dump_dataflow_set (&VTI (bb)->in);
5748 fprintf (dump_file, "BB %i OUT:\n", bb->index);
5749 dump_dataflow_set (&VTI (bb)->out);
5755 if (success && MAY_HAVE_DEBUG_INSNS)
5757 gcc_assert (VTI (bb)->flooded);
5760 fibheap_delete (worklist);
5761 fibheap_delete (pending);
5762 sbitmap_free (visited);
5763 sbitmap_free (in_worklist);
5764 sbitmap_free (in_pending);
5769 /* Print the content of the LIST to dump file. */
5772 dump_attrs_list (attrs list)
5774 for (; list; list = list->next)
5776 if (dv_is_decl_p (list->dv))
5777 print_mem_expr (dump_file, dv_as_decl (list->dv));
5779 print_rtl_single (dump_file, dv_as_value (list->dv));
5780 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
5782 fprintf (dump_file, "\n");
5785 /* Print the information about variable *SLOT to dump file. */
5788 dump_var_slot (void **slot, void *data ATTRIBUTE_UNUSED)
5790 variable var = (variable) *slot;
5794 /* Continue traversing the hash table. */
5798 /* Print the information about variable VAR to dump file. */
5801 dump_var (variable var)
5804 location_chain node;
5806 if (dv_is_decl_p (var->dv))
5808 const_tree decl = dv_as_decl (var->dv);
5810 if (DECL_NAME (decl))
5812 fprintf (dump_file, " name: %s",
5813 IDENTIFIER_POINTER (DECL_NAME (decl)));
5814 if (dump_flags & TDF_UID)
5815 fprintf (dump_file, "D.%u", DECL_UID (decl));
5817 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
5818 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
5820 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
5821 fprintf (dump_file, "\n");
5825 fputc (' ', dump_file);
5826 print_rtl_single (dump_file, dv_as_value (var->dv));
5829 for (i = 0; i < var->n_var_parts; i++)
5831 fprintf (dump_file, " offset %ld\n",
5832 (long) var->var_part[i].offset);
5833 for (node = var->var_part[i].loc_chain; node; node = node->next)
5835 fprintf (dump_file, " ");
5836 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
5837 fprintf (dump_file, "[uninit]");
5838 print_rtl_single (dump_file, node->loc);
5843 /* Print the information about variables from hash table VARS to dump file. */
5846 dump_vars (htab_t vars)
5848 if (htab_elements (vars) > 0)
5850 fprintf (dump_file, "Variables:\n");
5851 htab_traverse (vars, dump_var_slot, NULL);
5855 /* Print the dataflow set SET to dump file. */
5858 dump_dataflow_set (dataflow_set *set)
5862 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
5864 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5868 fprintf (dump_file, "Reg %d:", i);
5869 dump_attrs_list (set->regs[i]);
5872 dump_vars (shared_hash_htab (set->vars));
5873 fprintf (dump_file, "\n");
5876 /* Print the IN and OUT sets for each basic block to dump file. */
5879 dump_dataflow_sets (void)
5885 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
5886 fprintf (dump_file, "IN:\n");
5887 dump_dataflow_set (&VTI (bb)->in);
5888 fprintf (dump_file, "OUT:\n");
5889 dump_dataflow_set (&VTI (bb)->out);
5893 /* Add variable VAR to the hash table of changed variables and
5894 if it has no locations delete it from SET's hash table. */
5897 variable_was_changed (variable var, dataflow_set *set)
5899 hashval_t hash = dv_htab_hash (var->dv);
5904 bool old_cur_loc_changed = false;
5906 /* Remember this decl or VALUE has been added to changed_variables. */
5907 set_dv_changed (var->dv, true);
5909 slot = htab_find_slot_with_hash (changed_variables,
5915 variable old_var = (variable) *slot;
5916 gcc_assert (old_var->in_changed_variables);
5917 old_var->in_changed_variables = false;
5918 old_cur_loc_changed = old_var->cur_loc_changed;
5919 variable_htab_free (*slot);
5921 if (set && var->n_var_parts == 0)
5925 empty_var = (variable) pool_alloc (dv_pool (var->dv));
5926 empty_var->dv = var->dv;
5927 empty_var->refcount = 1;
5928 empty_var->n_var_parts = 0;
5929 empty_var->cur_loc_changed = true;
5930 empty_var->in_changed_variables = true;
5937 var->in_changed_variables = true;
5938 /* If within processing one uop a variable is deleted
5939 and then readded, we need to assume it has changed. */
5940 if (old_cur_loc_changed)
5941 var->cur_loc_changed = true;
5948 if (var->n_var_parts == 0)
5953 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
5956 if (shared_hash_shared (set->vars))
5957 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
5959 htab_clear_slot (shared_hash_htab (set->vars), slot);
5965 /* Look for the index in VAR->var_part corresponding to OFFSET.
5966 Return -1 if not found. If INSERTION_POINT is non-NULL, the
5967 referenced int will be set to the index that the part has or should
5968 have, if it should be inserted. */
5971 find_variable_location_part (variable var, HOST_WIDE_INT offset,
5972 int *insertion_point)
5976 /* Find the location part. */
5978 high = var->n_var_parts;
5981 pos = (low + high) / 2;
5982 if (var->var_part[pos].offset < offset)
5989 if (insertion_point)
5990 *insertion_point = pos;
5992 if (pos < var->n_var_parts && var->var_part[pos].offset == offset)
5999 set_slot_part (dataflow_set *set, rtx loc, void **slot,
6000 decl_or_value dv, HOST_WIDE_INT offset,
6001 enum var_init_status initialized, rtx set_src)
6004 location_chain node, next;
6005 location_chain *nextp;
6007 bool onepart = dv_onepart_p (dv);
6009 gcc_assert (offset == 0 || !onepart);
6010 gcc_assert (loc != dv_as_opaque (dv));
6012 var = (variable) *slot;
6014 if (! flag_var_tracking_uninit)
6015 initialized = VAR_INIT_STATUS_INITIALIZED;
6019 /* Create new variable information. */
6020 var = (variable) pool_alloc (dv_pool (dv));
6023 var->n_var_parts = 1;
6024 var->cur_loc_changed = false;
6025 var->in_changed_variables = false;
6026 var->var_part[0].offset = offset;
6027 var->var_part[0].loc_chain = NULL;
6028 var->var_part[0].cur_loc = NULL;
6031 nextp = &var->var_part[0].loc_chain;
6037 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
6041 if (GET_CODE (loc) == VALUE)
6043 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6044 nextp = &node->next)
6045 if (GET_CODE (node->loc) == VALUE)
6047 if (node->loc == loc)
6052 if (canon_value_cmp (node->loc, loc))
6060 else if (REG_P (node->loc) || MEM_P (node->loc))
6068 else if (REG_P (loc))
6070 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6071 nextp = &node->next)
6072 if (REG_P (node->loc))
6074 if (REGNO (node->loc) < REGNO (loc))
6078 if (REGNO (node->loc) == REGNO (loc))
6091 else if (MEM_P (loc))
6093 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6094 nextp = &node->next)
6095 if (REG_P (node->loc))
6097 else if (MEM_P (node->loc))
6099 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
6111 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6112 nextp = &node->next)
6113 if ((r = loc_cmp (node->loc, loc)) >= 0)
6121 if (shared_var_p (var, set->vars))
6123 slot = unshare_variable (set, slot, var, initialized);
6124 var = (variable)*slot;
6125 for (nextp = &var->var_part[0].loc_chain; c;
6126 nextp = &(*nextp)->next)
6128 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
6135 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
6137 pos = find_variable_location_part (var, offset, &inspos);
6141 node = var->var_part[pos].loc_chain;
6144 && ((REG_P (node->loc) && REG_P (loc)
6145 && REGNO (node->loc) == REGNO (loc))
6146 || rtx_equal_p (node->loc, loc)))
6148 /* LOC is in the beginning of the chain so we have nothing
6150 if (node->init < initialized)
6151 node->init = initialized;
6152 if (set_src != NULL)
6153 node->set_src = set_src;
6159 /* We have to make a copy of a shared variable. */
6160 if (shared_var_p (var, set->vars))
6162 slot = unshare_variable (set, slot, var, initialized);
6163 var = (variable)*slot;
6169 /* We have not found the location part, new one will be created. */
6171 /* We have to make a copy of the shared variable. */
6172 if (shared_var_p (var, set->vars))
6174 slot = unshare_variable (set, slot, var, initialized);
6175 var = (variable)*slot;
6178 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
6179 thus there are at most MAX_VAR_PARTS different offsets. */
6180 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
6181 && (!var->n_var_parts || !dv_onepart_p (var->dv)));
6183 /* We have to move the elements of array starting at index
6184 inspos to the next position. */
6185 for (pos = var->n_var_parts; pos > inspos; pos--)
6186 var->var_part[pos] = var->var_part[pos - 1];
6189 var->var_part[pos].offset = offset;
6190 var->var_part[pos].loc_chain = NULL;
6191 var->var_part[pos].cur_loc = NULL;
6194 /* Delete the location from the list. */
6195 nextp = &var->var_part[pos].loc_chain;
6196 for (node = var->var_part[pos].loc_chain; node; node = next)
6199 if ((REG_P (node->loc) && REG_P (loc)
6200 && REGNO (node->loc) == REGNO (loc))
6201 || rtx_equal_p (node->loc, loc))
6203 /* Save these values, to assign to the new node, before
6204 deleting this one. */
6205 if (node->init > initialized)
6206 initialized = node->init;
6207 if (node->set_src != NULL && set_src == NULL)
6208 set_src = node->set_src;
6209 if (var->var_part[pos].cur_loc == node->loc)
6211 var->var_part[pos].cur_loc = NULL;
6212 var->cur_loc_changed = true;
6214 pool_free (loc_chain_pool, node);
6219 nextp = &node->next;
6222 nextp = &var->var_part[pos].loc_chain;
6225 /* Add the location to the beginning. */
6226 node = (location_chain) pool_alloc (loc_chain_pool);
6228 node->init = initialized;
6229 node->set_src = set_src;
6230 node->next = *nextp;
6233 if (onepart && emit_notes)
6234 add_value_chains (var->dv, loc);
6236 /* If no location was emitted do so. */
6237 if (var->var_part[pos].cur_loc == NULL)
6238 variable_was_changed (var, set);
6243 /* Set the part of variable's location in the dataflow set SET. The
6244 variable part is specified by variable's declaration in DV and
6245 offset OFFSET and the part's location by LOC. IOPT should be
6246 NO_INSERT if the variable is known to be in SET already and the
6247 variable hash table must not be resized, and INSERT otherwise. */
6250 set_variable_part (dataflow_set *set, rtx loc,
6251 decl_or_value dv, HOST_WIDE_INT offset,
6252 enum var_init_status initialized, rtx set_src,
6253 enum insert_option iopt)
6257 if (iopt == NO_INSERT)
6258 slot = shared_hash_find_slot_noinsert (set->vars, dv);
6261 slot = shared_hash_find_slot (set->vars, dv);
6263 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
6265 slot = set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
6268 /* Remove all recorded register locations for the given variable part
6269 from dataflow set SET, except for those that are identical to loc.
6270 The variable part is specified by variable's declaration or value
6271 DV and offset OFFSET. */
6274 clobber_slot_part (dataflow_set *set, rtx loc, void **slot,
6275 HOST_WIDE_INT offset, rtx set_src)
6277 variable var = (variable) *slot;
6278 int pos = find_variable_location_part (var, offset, NULL);
6282 location_chain node, next;
6284 /* Remove the register locations from the dataflow set. */
6285 next = var->var_part[pos].loc_chain;
6286 for (node = next; node; node = next)
6289 if (node->loc != loc
6290 && (!flag_var_tracking_uninit
6293 || !rtx_equal_p (set_src, node->set_src)))
6295 if (REG_P (node->loc))
6300 /* Remove the variable part from the register's
6301 list, but preserve any other variable parts
6302 that might be regarded as live in that same
6304 anextp = &set->regs[REGNO (node->loc)];
6305 for (anode = *anextp; anode; anode = anext)
6307 anext = anode->next;
6308 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
6309 && anode->offset == offset)
6311 pool_free (attrs_pool, anode);
6315 anextp = &anode->next;
6319 slot = delete_slot_part (set, node->loc, slot, offset);
6327 /* Remove all recorded register locations for the given variable part
6328 from dataflow set SET, except for those that are identical to loc.
6329 The variable part is specified by variable's declaration or value
6330 DV and offset OFFSET. */
6333 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
6334 HOST_WIDE_INT offset, rtx set_src)
6338 if (!dv_as_opaque (dv)
6339 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
6342 slot = shared_hash_find_slot_noinsert (set->vars, dv);
6346 slot = clobber_slot_part (set, loc, slot, offset, set_src);
6349 /* Delete the part of variable's location from dataflow set SET. The
6350 variable part is specified by its SET->vars slot SLOT and offset
6351 OFFSET and the part's location by LOC. */
6354 delete_slot_part (dataflow_set *set, rtx loc, void **slot,
6355 HOST_WIDE_INT offset)
6357 variable var = (variable) *slot;
6358 int pos = find_variable_location_part (var, offset, NULL);
6362 location_chain node, next;
6363 location_chain *nextp;
6366 if (shared_var_p (var, set->vars))
6368 /* If the variable contains the location part we have to
6369 make a copy of the variable. */
6370 for (node = var->var_part[pos].loc_chain; node;
6373 if ((REG_P (node->loc) && REG_P (loc)
6374 && REGNO (node->loc) == REGNO (loc))
6375 || rtx_equal_p (node->loc, loc))
6377 slot = unshare_variable (set, slot, var,
6378 VAR_INIT_STATUS_UNKNOWN);
6379 var = (variable)*slot;
6385 /* Delete the location part. */
6387 nextp = &var->var_part[pos].loc_chain;
6388 for (node = *nextp; node; node = next)
6391 if ((REG_P (node->loc) && REG_P (loc)
6392 && REGNO (node->loc) == REGNO (loc))
6393 || rtx_equal_p (node->loc, loc))
6395 if (emit_notes && pos == 0 && dv_onepart_p (var->dv))
6396 remove_value_chains (var->dv, node->loc);
6397 /* If we have deleted the location which was last emitted
6398 we have to emit new location so add the variable to set
6399 of changed variables. */
6400 if (var->var_part[pos].cur_loc == node->loc)
6403 var->var_part[pos].cur_loc = NULL;
6404 var->cur_loc_changed = true;
6406 pool_free (loc_chain_pool, node);
6411 nextp = &node->next;
6414 if (var->var_part[pos].loc_chain == NULL)
6419 var->cur_loc_changed = true;
6420 while (pos < var->n_var_parts)
6422 var->var_part[pos] = var->var_part[pos + 1];
6427 variable_was_changed (var, set);
6433 /* Delete the part of variable's location from dataflow set SET. The
6434 variable part is specified by variable's declaration or value DV
6435 and offset OFFSET and the part's location by LOC. */
6438 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
6439 HOST_WIDE_INT offset)
6441 void **slot = shared_hash_find_slot_noinsert (set->vars, dv);
6445 slot = delete_slot_part (set, loc, slot, offset);
6448 /* Structure for passing some other parameters to function
6449 vt_expand_loc_callback. */
6450 struct expand_loc_callback_data
6452 /* The variables and values active at this point. */
6455 /* True in vt_expand_loc_dummy calls, no rtl should be allocated.
6456 Non-NULL should be returned if vt_expand_loc would return
6457 non-NULL in that case, NULL otherwise. cur_loc_changed should be
6458 computed and cur_loc recomputed when possible (but just once
6459 per emit_notes_for_changes call). */
6462 /* True if expansion of subexpressions had to recompute some
6463 VALUE/DEBUG_EXPR_DECL's cur_loc or used a VALUE/DEBUG_EXPR_DECL
6464 whose cur_loc has been already recomputed during current
6465 emit_notes_for_changes call. */
6466 bool cur_loc_changed;
6469 /* Callback for cselib_expand_value, that looks for expressions
6470 holding the value in the var-tracking hash tables. Return X for
6471 standard processing, anything else is to be used as-is. */
6474 vt_expand_loc_callback (rtx x, bitmap regs, int max_depth, void *data)
6476 struct expand_loc_callback_data *elcd
6477 = (struct expand_loc_callback_data *) data;
6478 bool dummy = elcd->dummy;
6479 bool cur_loc_changed = elcd->cur_loc_changed;
6483 rtx result, subreg, xret;
6485 switch (GET_CODE (x))
6490 if (cselib_dummy_expand_value_rtx_cb (SUBREG_REG (x), regs,
6492 vt_expand_loc_callback, data))
6498 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
6500 vt_expand_loc_callback, data);
6505 result = simplify_gen_subreg (GET_MODE (x), subreg,
6506 GET_MODE (SUBREG_REG (x)),
6509 /* Invalid SUBREGs are ok in debug info. ??? We could try
6510 alternate expansions for the VALUE as well. */
6512 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
6517 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
6522 dv = dv_from_value (x);
6530 if (VALUE_RECURSED_INTO (x))
6533 var = (variable) htab_find_with_hash (elcd->vars, dv, dv_htab_hash (dv));
6537 if (dummy && dv_changed_p (dv))
6538 elcd->cur_loc_changed = true;
6542 if (var->n_var_parts == 0)
6545 elcd->cur_loc_changed = true;
6549 gcc_assert (var->n_var_parts == 1);
6551 VALUE_RECURSED_INTO (x) = true;
6554 if (var->var_part[0].cur_loc)
6558 if (cselib_dummy_expand_value_rtx_cb (var->var_part[0].cur_loc, regs,
6560 vt_expand_loc_callback, data))
6564 result = cselib_expand_value_rtx_cb (var->var_part[0].cur_loc, regs,
6566 vt_expand_loc_callback, data);
6568 set_dv_changed (dv, false);
6570 if (!result && dv_changed_p (dv))
6572 set_dv_changed (dv, false);
6573 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
6574 if (loc->loc == var->var_part[0].cur_loc)
6578 elcd->cur_loc_changed = cur_loc_changed;
6579 if (cselib_dummy_expand_value_rtx_cb (loc->loc, regs, max_depth,
6580 vt_expand_loc_callback,
6588 result = cselib_expand_value_rtx_cb (loc->loc, regs, max_depth,
6589 vt_expand_loc_callback,
6595 if (dummy && (result || var->var_part[0].cur_loc))
6596 var->cur_loc_changed = true;
6597 var->var_part[0].cur_loc = loc ? loc->loc : NULL_RTX;
6601 if (var->cur_loc_changed)
6602 elcd->cur_loc_changed = true;
6603 else if (!result && var->var_part[0].cur_loc == NULL_RTX)
6604 elcd->cur_loc_changed = cur_loc_changed;
6607 VALUE_RECURSED_INTO (x) = false;
6614 /* Expand VALUEs in LOC, using VARS as well as cselib's equivalence
6618 vt_expand_loc (rtx loc, htab_t vars)
6620 struct expand_loc_callback_data data;
6622 if (!MAY_HAVE_DEBUG_INSNS)
6627 data.cur_loc_changed = false;
6628 loc = cselib_expand_value_rtx_cb (loc, scratch_regs, 5,
6629 vt_expand_loc_callback, &data);
6631 if (loc && MEM_P (loc))
6632 loc = targetm.delegitimize_address (loc);
6636 /* Like vt_expand_loc, but only return true/false (whether vt_expand_loc
6637 would succeed or not, without actually allocating new rtxes. */
6640 vt_expand_loc_dummy (rtx loc, htab_t vars, bool *pcur_loc_changed)
6642 struct expand_loc_callback_data data;
6645 gcc_assert (MAY_HAVE_DEBUG_INSNS);
6648 data.cur_loc_changed = false;
6649 ret = cselib_dummy_expand_value_rtx_cb (loc, scratch_regs, 5,
6650 vt_expand_loc_callback, &data);
6651 *pcur_loc_changed = data.cur_loc_changed;
6655 #ifdef ENABLE_RTL_CHECKING
6656 /* Used to verify that cur_loc_changed updating is safe. */
6657 static struct pointer_map_t *emitted_notes;
6660 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
6661 additional parameters: WHERE specifies whether the note shall be emitted
6662 before or after instruction INSN. */
6665 emit_note_insn_var_location (void **varp, void *data)
6667 variable var = (variable) *varp;
6668 rtx insn = ((emit_note_data *)data)->insn;
6669 enum emit_note_where where = ((emit_note_data *)data)->where;
6670 htab_t vars = ((emit_note_data *)data)->vars;
6672 int i, j, n_var_parts;
6674 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
6675 HOST_WIDE_INT last_limit;
6676 tree type_size_unit;
6677 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
6678 rtx loc[MAX_VAR_PARTS];
6682 if (dv_is_value_p (var->dv))
6683 goto value_or_debug_decl;
6685 decl = dv_as_decl (var->dv);
6687 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
6688 goto value_or_debug_decl;
6693 if (!MAY_HAVE_DEBUG_STMTS)
6695 for (i = 0; i < var->n_var_parts; i++)
6696 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
6698 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
6699 var->cur_loc_changed = true;
6701 if (var->n_var_parts == 0)
6702 var->cur_loc_changed = true;
6704 #ifndef ENABLE_RTL_CHECKING
6705 if (!var->cur_loc_changed)
6708 for (i = 0; i < var->n_var_parts; i++)
6710 enum machine_mode mode, wider_mode;
6713 if (last_limit < var->var_part[i].offset)
6718 else if (last_limit > var->var_part[i].offset)
6720 offsets[n_var_parts] = var->var_part[i].offset;
6721 if (!var->var_part[i].cur_loc)
6726 loc2 = vt_expand_loc (var->var_part[i].cur_loc, vars);
6732 loc[n_var_parts] = loc2;
6733 mode = GET_MODE (var->var_part[i].cur_loc);
6734 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
6735 if (var->var_part[i].cur_loc == lc->loc)
6737 initialized = lc->init;
6741 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
6743 /* Attempt to merge adjacent registers or memory. */
6744 wider_mode = GET_MODE_WIDER_MODE (mode);
6745 for (j = i + 1; j < var->n_var_parts; j++)
6746 if (last_limit <= var->var_part[j].offset)
6748 if (j < var->n_var_parts
6749 && wider_mode != VOIDmode
6750 && var->var_part[j].cur_loc
6751 && mode == GET_MODE (var->var_part[j].cur_loc)
6752 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
6753 && last_limit == var->var_part[j].offset
6754 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
6755 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
6759 if (REG_P (loc[n_var_parts])
6760 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
6761 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
6762 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
6765 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
6766 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
6768 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
6769 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
6772 if (!REG_P (new_loc)
6773 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
6776 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
6779 else if (MEM_P (loc[n_var_parts])
6780 && GET_CODE (XEXP (loc2, 0)) == PLUS
6781 && REG_P (XEXP (XEXP (loc2, 0), 0))
6782 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
6784 if ((REG_P (XEXP (loc[n_var_parts], 0))
6785 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
6786 XEXP (XEXP (loc2, 0), 0))
6787 && INTVAL (XEXP (XEXP (loc2, 0), 1))
6788 == GET_MODE_SIZE (mode))
6789 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
6790 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
6791 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
6792 XEXP (XEXP (loc2, 0), 0))
6793 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
6794 + GET_MODE_SIZE (mode)
6795 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
6796 new_loc = adjust_address_nv (loc[n_var_parts],
6802 loc[n_var_parts] = new_loc;
6804 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
6810 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6811 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
6814 if (! flag_var_tracking_uninit)
6815 initialized = VAR_INIT_STATUS_INITIALIZED;
6819 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX,
6821 else if (n_var_parts == 1)
6824 = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
6826 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list,
6829 else if (n_var_parts)
6833 for (i = 0; i < n_var_parts; i++)
6835 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
6837 parallel = gen_rtx_PARALLEL (VOIDmode,
6838 gen_rtvec_v (n_var_parts, loc));
6839 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
6840 parallel, (int) initialized);
6843 #ifdef ENABLE_RTL_CHECKING
6846 void **note_slot = pointer_map_insert (emitted_notes, decl);
6847 rtx pnote = (rtx) *note_slot;
6848 if (!var->cur_loc_changed && (pnote || PAT_VAR_LOCATION_LOC (note_vl)))
6851 gcc_assert (rtx_equal_p (PAT_VAR_LOCATION_LOC (pnote),
6852 PAT_VAR_LOCATION_LOC (note_vl)));
6854 *note_slot = (void *) note_vl;
6856 if (!var->cur_loc_changed)
6860 if (where != EMIT_NOTE_BEFORE_INSN)
6862 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
6863 if (where == EMIT_NOTE_AFTER_CALL_INSN)
6864 NOTE_DURING_CALL_P (note) = true;
6867 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
6868 NOTE_VAR_LOCATION (note) = note_vl;
6871 set_dv_changed (var->dv, false);
6872 var->cur_loc_changed = false;
6873 gcc_assert (var->in_changed_variables);
6874 var->in_changed_variables = false;
6875 htab_clear_slot (changed_variables, varp);
6877 /* Continue traversing the hash table. */
6880 value_or_debug_decl:
6881 if (dv_changed_p (var->dv) && var->n_var_parts)
6884 bool cur_loc_changed;
6886 if (var->var_part[0].cur_loc
6887 && vt_expand_loc_dummy (var->var_part[0].cur_loc, vars,
6890 for (lc = var->var_part[0].loc_chain; lc; lc = lc->next)
6891 if (lc->loc != var->var_part[0].cur_loc
6892 && vt_expand_loc_dummy (lc->loc, vars, &cur_loc_changed))
6894 var->var_part[0].cur_loc = lc ? lc->loc : NULL_RTX;
6899 DEF_VEC_P (variable);
6900 DEF_VEC_ALLOC_P (variable, heap);
6902 /* Stack of variable_def pointers that need processing with
6903 check_changed_vars_2. */
6905 static VEC (variable, heap) *changed_variables_stack;
6907 /* VALUEs with no variables that need set_dv_changed (val, false)
6908 called before check_changed_vars_3. */
6910 static VEC (rtx, heap) *changed_values_stack;
6912 /* Helper function for check_changed_vars_1 and check_changed_vars_2. */
6915 check_changed_vars_0 (decl_or_value dv, htab_t htab)
6918 = (value_chain) htab_find_with_hash (value_chains, dv, dv_htab_hash (dv));
6922 for (vc = vc->next; vc; vc = vc->next)
6923 if (!dv_changed_p (vc->dv))
6926 = (variable) htab_find_with_hash (htab, vc->dv,
6927 dv_htab_hash (vc->dv));
6930 set_dv_changed (vc->dv, true);
6931 VEC_safe_push (variable, heap, changed_variables_stack, vcvar);
6933 else if (dv_is_value_p (vc->dv))
6935 set_dv_changed (vc->dv, true);
6936 VEC_safe_push (rtx, heap, changed_values_stack,
6937 dv_as_value (vc->dv));
6938 check_changed_vars_0 (vc->dv, htab);
6943 /* Populate changed_variables_stack with variable_def pointers
6944 that need variable_was_changed called on them. */
6947 check_changed_vars_1 (void **slot, void *data)
6949 variable var = (variable) *slot;
6950 htab_t htab = (htab_t) data;
6952 if (dv_is_value_p (var->dv)
6953 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
6954 check_changed_vars_0 (var->dv, htab);
6958 /* Add VAR to changed_variables and also for VALUEs add recursively
6959 all DVs that aren't in changed_variables yet but reference the
6960 VALUE from its loc_chain. */
6963 check_changed_vars_2 (variable var, htab_t htab)
6965 variable_was_changed (var, NULL);
6966 if (dv_is_value_p (var->dv)
6967 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
6968 check_changed_vars_0 (var->dv, htab);
6971 /* For each changed decl (except DEBUG_EXPR_DECLs) recompute
6972 cur_loc if needed (and cur_loc of all VALUEs and DEBUG_EXPR_DECLs
6973 it needs and are also in changed variables) and track whether
6974 cur_loc (or anything it uses to compute location) had to change
6975 during the current emit_notes_for_changes call. */
6978 check_changed_vars_3 (void **slot, void *data)
6980 variable var = (variable) *slot;
6981 htab_t vars = (htab_t) data;
6984 bool cur_loc_changed;
6986 if (dv_is_value_p (var->dv)
6987 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
6990 for (i = 0; i < var->n_var_parts; i++)
6992 if (var->var_part[i].cur_loc
6993 && vt_expand_loc_dummy (var->var_part[i].cur_loc, vars,
6996 if (cur_loc_changed)
6997 var->cur_loc_changed = true;
7000 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
7001 if (lc->loc != var->var_part[i].cur_loc
7002 && vt_expand_loc_dummy (lc->loc, vars, &cur_loc_changed))
7004 if (lc || var->var_part[i].cur_loc)
7005 var->cur_loc_changed = true;
7006 var->var_part[i].cur_loc = lc ? lc->loc : NULL_RTX;
7008 if (var->n_var_parts == 0)
7009 var->cur_loc_changed = true;
7013 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
7014 CHANGED_VARIABLES and delete this chain. WHERE specifies whether the notes
7015 shall be emitted before of after instruction INSN. */
7018 emit_notes_for_changes (rtx insn, enum emit_note_where where,
7021 emit_note_data data;
7022 htab_t htab = shared_hash_htab (vars);
7024 if (!htab_elements (changed_variables))
7027 if (MAY_HAVE_DEBUG_INSNS)
7029 /* Unfortunately this has to be done in two steps, because
7030 we can't traverse a hashtab into which we are inserting
7031 through variable_was_changed. */
7032 htab_traverse (changed_variables, check_changed_vars_1, htab);
7033 while (VEC_length (variable, changed_variables_stack) > 0)
7034 check_changed_vars_2 (VEC_pop (variable, changed_variables_stack),
7036 while (VEC_length (rtx, changed_values_stack) > 0)
7037 set_dv_changed (dv_from_value (VEC_pop (rtx, changed_values_stack)),
7039 htab_traverse (changed_variables, check_changed_vars_3, htab);
7046 htab_traverse (changed_variables, emit_note_insn_var_location, &data);
7049 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
7050 same variable in hash table DATA or is not there at all. */
7053 emit_notes_for_differences_1 (void **slot, void *data)
7055 htab_t new_vars = (htab_t) data;
7056 variable old_var, new_var;
7058 old_var = (variable) *slot;
7059 new_var = (variable) htab_find_with_hash (new_vars, old_var->dv,
7060 dv_htab_hash (old_var->dv));
7064 /* Variable has disappeared. */
7067 empty_var = (variable) pool_alloc (dv_pool (old_var->dv));
7068 empty_var->dv = old_var->dv;
7069 empty_var->refcount = 0;
7070 empty_var->n_var_parts = 0;
7071 empty_var->cur_loc_changed = false;
7072 empty_var->in_changed_variables = false;
7073 if (dv_onepart_p (old_var->dv))
7077 gcc_assert (old_var->n_var_parts == 1);
7078 for (lc = old_var->var_part[0].loc_chain; lc; lc = lc->next)
7079 remove_value_chains (old_var->dv, lc->loc);
7081 variable_was_changed (empty_var, NULL);
7082 /* Continue traversing the hash table. */
7085 if (variable_different_p (old_var, new_var))
7087 if (dv_onepart_p (old_var->dv))
7089 location_chain lc1, lc2;
7091 gcc_assert (old_var->n_var_parts == 1);
7092 gcc_assert (new_var->n_var_parts == 1);
7093 lc1 = old_var->var_part[0].loc_chain;
7094 lc2 = new_var->var_part[0].loc_chain;
7097 && ((REG_P (lc1->loc) && REG_P (lc2->loc))
7098 || rtx_equal_p (lc1->loc, lc2->loc)))
7103 for (; lc2; lc2 = lc2->next)
7104 add_value_chains (old_var->dv, lc2->loc);
7105 for (; lc1; lc1 = lc1->next)
7106 remove_value_chains (old_var->dv, lc1->loc);
7108 variable_was_changed (new_var, NULL);
7110 /* Update cur_loc. */
7111 if (old_var != new_var)
7114 for (i = 0; i < new_var->n_var_parts; i++)
7116 new_var->var_part[i].cur_loc = NULL;
7117 if (old_var->n_var_parts != new_var->n_var_parts
7118 || old_var->var_part[i].offset != new_var->var_part[i].offset)
7119 new_var->cur_loc_changed = true;
7120 else if (old_var->var_part[i].cur_loc != NULL)
7123 rtx cur_loc = old_var->var_part[i].cur_loc;
7125 for (lc = new_var->var_part[i].loc_chain; lc; lc = lc->next)
7126 if (lc->loc == cur_loc
7127 || rtx_equal_p (cur_loc, lc->loc))
7129 new_var->var_part[i].cur_loc = lc->loc;
7133 new_var->cur_loc_changed = true;
7138 /* Continue traversing the hash table. */
7142 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
7146 emit_notes_for_differences_2 (void **slot, void *data)
7148 htab_t old_vars = (htab_t) data;
7149 variable old_var, new_var;
7151 new_var = (variable) *slot;
7152 old_var = (variable) htab_find_with_hash (old_vars, new_var->dv,
7153 dv_htab_hash (new_var->dv));
7157 /* Variable has appeared. */
7158 if (dv_onepart_p (new_var->dv))
7162 gcc_assert (new_var->n_var_parts == 1);
7163 for (lc = new_var->var_part[0].loc_chain; lc; lc = lc->next)
7164 add_value_chains (new_var->dv, lc->loc);
7166 for (i = 0; i < new_var->n_var_parts; i++)
7167 new_var->var_part[i].cur_loc = NULL;
7168 variable_was_changed (new_var, NULL);
7171 /* Continue traversing the hash table. */
7175 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
7179 emit_notes_for_differences (rtx insn, dataflow_set *old_set,
7180 dataflow_set *new_set)
7182 htab_traverse (shared_hash_htab (old_set->vars),
7183 emit_notes_for_differences_1,
7184 shared_hash_htab (new_set->vars));
7185 htab_traverse (shared_hash_htab (new_set->vars),
7186 emit_notes_for_differences_2,
7187 shared_hash_htab (old_set->vars));
7188 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
7191 /* Emit the notes for changes of location parts in the basic block BB. */
7194 emit_notes_in_bb (basic_block bb, dataflow_set *set)
7197 micro_operation *mo;
7199 dataflow_set_clear (set);
7200 dataflow_set_copy (set, &VTI (bb)->in);
7202 for (i = 0; VEC_iterate (micro_operation, VTI (bb)->mos, i, mo); i++)
7204 rtx insn = mo->insn;
7209 dataflow_set_clear_at_call (set);
7210 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
7215 rtx loc = mo->u.loc;
7218 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
7220 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
7222 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
7228 rtx loc = mo->u.loc;
7232 if (GET_CODE (loc) == CONCAT)
7234 val = XEXP (loc, 0);
7235 vloc = XEXP (loc, 1);
7243 var = PAT_VAR_LOCATION_DECL (vloc);
7245 clobber_variable_part (set, NULL_RTX,
7246 dv_from_decl (var), 0, NULL_RTX);
7249 if (VAL_NEEDS_RESOLUTION (loc))
7250 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
7251 set_variable_part (set, val, dv_from_decl (var), 0,
7252 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
7256 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
7262 rtx loc = mo->u.loc;
7263 rtx val, vloc, uloc;
7265 vloc = uloc = XEXP (loc, 1);
7266 val = XEXP (loc, 0);
7268 if (GET_CODE (val) == CONCAT)
7270 uloc = XEXP (val, 1);
7271 val = XEXP (val, 0);
7274 if (VAL_NEEDS_RESOLUTION (loc))
7275 val_resolve (set, val, vloc, insn);
7277 val_store (set, val, uloc, insn, false);
7279 if (VAL_HOLDS_TRACK_EXPR (loc))
7281 if (GET_CODE (uloc) == REG)
7282 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
7284 else if (GET_CODE (uloc) == MEM)
7285 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
7289 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
7295 rtx loc = mo->u.loc;
7296 rtx val, vloc, uloc, reverse = NULL_RTX;
7299 if (VAL_EXPR_HAS_REVERSE (loc))
7301 reverse = XEXP (loc, 1);
7302 vloc = XEXP (loc, 0);
7304 uloc = XEXP (vloc, 1);
7305 val = XEXP (vloc, 0);
7308 if (GET_CODE (val) == CONCAT)
7310 vloc = XEXP (val, 1);
7311 val = XEXP (val, 0);
7314 if (GET_CODE (vloc) == SET)
7316 rtx vsrc = SET_SRC (vloc);
7318 gcc_assert (val != vsrc);
7319 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
7321 vloc = SET_DEST (vloc);
7323 if (VAL_NEEDS_RESOLUTION (loc))
7324 val_resolve (set, val, vsrc, insn);
7326 else if (VAL_NEEDS_RESOLUTION (loc))
7328 gcc_assert (GET_CODE (uloc) == SET
7329 && GET_CODE (SET_SRC (uloc)) == REG);
7330 val_resolve (set, val, SET_SRC (uloc), insn);
7333 if (VAL_HOLDS_TRACK_EXPR (loc))
7335 if (VAL_EXPR_IS_CLOBBERED (loc))
7338 var_reg_delete (set, uloc, true);
7339 else if (MEM_P (uloc))
7340 var_mem_delete (set, uloc, true);
7344 bool copied_p = VAL_EXPR_IS_COPIED (loc);
7346 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
7348 if (GET_CODE (uloc) == SET)
7350 set_src = SET_SRC (uloc);
7351 uloc = SET_DEST (uloc);
7356 status = find_src_status (set, set_src);
7358 set_src = find_src_set_src (set, set_src);
7362 var_reg_delete_and_set (set, uloc, !copied_p,
7364 else if (MEM_P (uloc))
7365 var_mem_delete_and_set (set, uloc, !copied_p,
7369 else if (REG_P (uloc))
7370 var_regno_delete (set, REGNO (uloc));
7372 val_store (set, val, vloc, insn, true);
7375 val_store (set, XEXP (reverse, 0), XEXP (reverse, 1),
7378 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7385 rtx loc = mo->u.loc;
7388 if (GET_CODE (loc) == SET)
7390 set_src = SET_SRC (loc);
7391 loc = SET_DEST (loc);
7395 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
7398 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
7401 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7408 rtx loc = mo->u.loc;
7409 enum var_init_status src_status;
7412 if (GET_CODE (loc) == SET)
7414 set_src = SET_SRC (loc);
7415 loc = SET_DEST (loc);
7418 src_status = find_src_status (set, set_src);
7419 set_src = find_src_set_src (set, set_src);
7422 var_reg_delete_and_set (set, loc, false, src_status, set_src);
7424 var_mem_delete_and_set (set, loc, false, src_status, set_src);
7426 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7433 rtx loc = mo->u.loc;
7436 var_reg_delete (set, loc, false);
7438 var_mem_delete (set, loc, false);
7440 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
7446 rtx loc = mo->u.loc;
7449 var_reg_delete (set, loc, true);
7451 var_mem_delete (set, loc, true);
7453 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7459 set->stack_adjust += mo->u.adjust;
7465 /* Emit notes for the whole function. */
7468 vt_emit_notes (void)
7473 #ifdef ENABLE_RTL_CHECKING
7474 emitted_notes = pointer_map_create ();
7476 gcc_assert (!htab_elements (changed_variables));
7478 /* Free memory occupied by the out hash tables, as they aren't used
7481 dataflow_set_clear (&VTI (bb)->out);
7483 /* Enable emitting notes by functions (mainly by set_variable_part and
7484 delete_variable_part). */
7487 if (MAY_HAVE_DEBUG_INSNS)
7492 for (i = 0; VEC_iterate (rtx, preserved_values, i, val); i++)
7493 add_cselib_value_chains (dv_from_value (val));
7494 changed_variables_stack = VEC_alloc (variable, heap, 40);
7495 changed_values_stack = VEC_alloc (rtx, heap, 40);
7498 dataflow_set_init (&cur);
7502 /* Emit the notes for changes of variable locations between two
7503 subsequent basic blocks. */
7504 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
7506 /* Emit the notes for the changes in the basic block itself. */
7507 emit_notes_in_bb (bb, &cur);
7509 /* Free memory occupied by the in hash table, we won't need it
7511 dataflow_set_clear (&VTI (bb)->in);
7513 #ifdef ENABLE_CHECKING
7514 htab_traverse (shared_hash_htab (cur.vars),
7515 emit_notes_for_differences_1,
7516 shared_hash_htab (empty_shared_hash));
7517 if (MAY_HAVE_DEBUG_INSNS)
7522 for (i = 0; VEC_iterate (rtx, preserved_values, i, val); i++)
7523 remove_cselib_value_chains (dv_from_value (val));
7524 gcc_assert (htab_elements (value_chains) == 0);
7527 dataflow_set_destroy (&cur);
7529 if (MAY_HAVE_DEBUG_INSNS)
7531 VEC_free (variable, heap, changed_variables_stack);
7532 VEC_free (rtx, heap, changed_values_stack);
7535 #ifdef ENABLE_RTL_CHECKING
7536 pointer_map_destroy (emitted_notes);
7541 /* If there is a declaration and offset associated with register/memory RTL
7542 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
7545 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
7549 if (REG_ATTRS (rtl))
7551 *declp = REG_EXPR (rtl);
7552 *offsetp = REG_OFFSET (rtl);
7556 else if (MEM_P (rtl))
7558 if (MEM_ATTRS (rtl))
7560 *declp = MEM_EXPR (rtl);
7561 *offsetp = INT_MEM_OFFSET (rtl);
7568 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
7571 vt_add_function_parameters (void)
7575 for (parm = DECL_ARGUMENTS (current_function_decl);
7576 parm; parm = TREE_CHAIN (parm))
7578 rtx decl_rtl = DECL_RTL_IF_SET (parm);
7579 rtx incoming = DECL_INCOMING_RTL (parm);
7581 enum machine_mode mode;
7582 HOST_WIDE_INT offset;
7586 if (TREE_CODE (parm) != PARM_DECL)
7589 if (!DECL_NAME (parm))
7592 if (!decl_rtl || !incoming)
7595 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
7598 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
7600 if (REG_P (incoming) || MEM_P (incoming))
7602 /* This means argument is passed by invisible reference. */
7605 incoming = gen_rtx_MEM (GET_MODE (decl_rtl), incoming);
7609 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
7611 offset += byte_lowpart_offset (GET_MODE (incoming),
7612 GET_MODE (decl_rtl));
7621 /* Assume that DECL_RTL was a pseudo that got spilled to
7622 memory. The spill slot sharing code will force the
7623 memory to reference spill_slot_decl (%sfp), so we don't
7624 match above. That's ok, the pseudo must have referenced
7625 the entire parameter, so just reset OFFSET. */
7626 gcc_assert (decl == get_spill_slot_decl (false));
7630 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
7633 out = &VTI (ENTRY_BLOCK_PTR)->out;
7635 dv = dv_from_decl (parm);
7637 if (target_for_debug_bind (parm)
7638 /* We can't deal with these right now, because this kind of
7639 variable is single-part. ??? We could handle parallels
7640 that describe multiple locations for the same single
7641 value, but ATM we don't. */
7642 && GET_CODE (incoming) != PARALLEL)
7646 /* ??? We shouldn't ever hit this, but it may happen because
7647 arguments passed by invisible reference aren't dealt with
7648 above: incoming-rtl will have Pmode rather than the
7649 expected mode for the type. */
7653 val = cselib_lookup (var_lowpart (mode, incoming), mode, true);
7655 /* ??? Float-typed values in memory are not handled by
7659 preserve_value (val);
7660 set_variable_part (out, val->val_rtx, dv, offset,
7661 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7662 dv = dv_from_value (val->val_rtx);
7666 if (REG_P (incoming))
7668 incoming = var_lowpart (mode, incoming);
7669 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
7670 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
7672 set_variable_part (out, incoming, dv, offset,
7673 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7675 else if (MEM_P (incoming))
7677 incoming = var_lowpart (mode, incoming);
7678 set_variable_part (out, incoming, dv, offset,
7679 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7683 if (MAY_HAVE_DEBUG_INSNS)
7685 cselib_preserve_only_values ();
7686 cselib_reset_table (cselib_get_next_uid ());
7691 /* Allocate and initialize the data structures for variable tracking
7692 and parse the RTL to get the micro operations. */
7695 vt_initialize (void)
7699 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
7701 if (MAY_HAVE_DEBUG_INSNS)
7704 scratch_regs = BITMAP_ALLOC (NULL);
7705 valvar_pool = create_alloc_pool ("small variable_def pool",
7706 sizeof (struct variable_def), 256);
7707 preserved_values = VEC_alloc (rtx, heap, 256);
7711 scratch_regs = NULL;
7718 HOST_WIDE_INT pre, post = 0;
7719 basic_block first_bb, last_bb;
7721 if (MAY_HAVE_DEBUG_INSNS)
7723 cselib_record_sets_hook = add_with_sets;
7724 if (dump_file && (dump_flags & TDF_DETAILS))
7725 fprintf (dump_file, "first value: %i\n",
7726 cselib_get_next_uid ());
7733 if (bb->next_bb == EXIT_BLOCK_PTR
7734 || ! single_pred_p (bb->next_bb))
7736 e = find_edge (bb, bb->next_bb);
7737 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
7743 /* Add the micro-operations to the vector. */
7744 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
7746 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
7747 insn = NEXT_INSN (insn))
7751 if (!frame_pointer_needed)
7753 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
7757 mo.type = MO_ADJUST;
7760 VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
7763 if (dump_file && (dump_flags & TDF_DETAILS))
7764 log_op_type (PATTERN (insn), bb, insn,
7765 MO_ADJUST, dump_file);
7769 cselib_hook_called = false;
7770 if (MAY_HAVE_DEBUG_INSNS)
7772 cselib_process_insn (insn);
7773 if (dump_file && (dump_flags & TDF_DETAILS))
7775 print_rtl_single (dump_file, insn);
7776 dump_cselib_table (dump_file);
7779 if (!cselib_hook_called)
7780 add_with_sets (insn, 0, 0);
7782 if (!frame_pointer_needed && post)
7785 mo.type = MO_ADJUST;
7788 VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
7791 if (dump_file && (dump_flags & TDF_DETAILS))
7792 log_op_type (PATTERN (insn), bb, insn,
7793 MO_ADJUST, dump_file);
7801 if (MAY_HAVE_DEBUG_INSNS)
7803 cselib_preserve_only_values ();
7804 cselib_reset_table (cselib_get_next_uid ());
7805 cselib_record_sets_hook = NULL;
7809 attrs_pool = create_alloc_pool ("attrs_def pool",
7810 sizeof (struct attrs_def), 1024);
7811 var_pool = create_alloc_pool ("variable_def pool",
7812 sizeof (struct variable_def)
7813 + (MAX_VAR_PARTS - 1)
7814 * sizeof (((variable)NULL)->var_part[0]), 64);
7815 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
7816 sizeof (struct location_chain_def),
7818 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
7819 sizeof (struct shared_hash_def), 256);
7820 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
7821 empty_shared_hash->refcount = 1;
7822 empty_shared_hash->htab
7823 = htab_create (1, variable_htab_hash, variable_htab_eq,
7824 variable_htab_free);
7825 changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
7826 variable_htab_free);
7827 if (MAY_HAVE_DEBUG_INSNS)
7829 value_chain_pool = create_alloc_pool ("value_chain_def pool",
7830 sizeof (struct value_chain_def),
7832 value_chains = htab_create (32, value_chain_htab_hash,
7833 value_chain_htab_eq, NULL);
7836 /* Init the IN and OUT sets. */
7839 VTI (bb)->visited = false;
7840 VTI (bb)->flooded = false;
7841 dataflow_set_init (&VTI (bb)->in);
7842 dataflow_set_init (&VTI (bb)->out);
7843 VTI (bb)->permp = NULL;
7846 VTI (ENTRY_BLOCK_PTR)->flooded = true;
7847 vt_add_function_parameters ();
7850 /* Get rid of all debug insns from the insn stream. */
7853 delete_debug_insns (void)
7858 if (!MAY_HAVE_DEBUG_INSNS)
7863 FOR_BB_INSNS_SAFE (bb, insn, next)
7864 if (DEBUG_INSN_P (insn))
7869 /* Run a fast, BB-local only version of var tracking, to take care of
7870 information that we don't do global analysis on, such that not all
7871 information is lost. If SKIPPED holds, we're skipping the global
7872 pass entirely, so we should try to use information it would have
7873 handled as well.. */
7876 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
7878 /* ??? Just skip it all for now. */
7879 delete_debug_insns ();
7882 /* Free the data structures needed for variable tracking. */
7891 VEC_free (micro_operation, heap, VTI (bb)->mos);
7896 dataflow_set_destroy (&VTI (bb)->in);
7897 dataflow_set_destroy (&VTI (bb)->out);
7898 if (VTI (bb)->permp)
7900 dataflow_set_destroy (VTI (bb)->permp);
7901 XDELETE (VTI (bb)->permp);
7904 free_aux_for_blocks ();
7905 htab_delete (empty_shared_hash->htab);
7906 htab_delete (changed_variables);
7907 free_alloc_pool (attrs_pool);
7908 free_alloc_pool (var_pool);
7909 free_alloc_pool (loc_chain_pool);
7910 free_alloc_pool (shared_hash_pool);
7912 if (MAY_HAVE_DEBUG_INSNS)
7914 htab_delete (value_chains);
7915 free_alloc_pool (value_chain_pool);
7916 free_alloc_pool (valvar_pool);
7917 VEC_free (rtx, heap, preserved_values);
7919 BITMAP_FREE (scratch_regs);
7920 scratch_regs = NULL;
7924 XDELETEVEC (vui_vec);
7929 /* The entry point to variable tracking pass. */
7931 static inline unsigned int
7932 variable_tracking_main_1 (void)
7936 if (flag_var_tracking_assignments < 0)
7938 delete_debug_insns ();
7942 if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
7944 vt_debug_insns_local (true);
7948 mark_dfs_back_edges ();
7950 if (!frame_pointer_needed)
7952 if (!vt_stack_adjustments ())
7955 vt_debug_insns_local (true);
7960 success = vt_find_locations ();
7962 if (!success && flag_var_tracking_assignments > 0)
7966 delete_debug_insns ();
7968 /* This is later restored by our caller. */
7969 flag_var_tracking_assignments = 0;
7973 if (!frame_pointer_needed && !vt_stack_adjustments ())
7976 success = vt_find_locations ();
7982 vt_debug_insns_local (false);
7986 if (dump_file && (dump_flags & TDF_DETAILS))
7988 dump_dataflow_sets ();
7989 dump_flow_info (dump_file, dump_flags);
7995 vt_debug_insns_local (false);
8000 variable_tracking_main (void)
8003 int save = flag_var_tracking_assignments;
8005 ret = variable_tracking_main_1 ();
8007 flag_var_tracking_assignments = save;
8013 gate_handle_var_tracking (void)
8015 return (flag_var_tracking);
8020 struct rtl_opt_pass pass_variable_tracking =
8024 "vartrack", /* name */
8025 gate_handle_var_tracking, /* gate */
8026 variable_tracking_main, /* execute */
8029 0, /* static_pass_number */
8030 TV_VAR_TRACKING, /* tv_id */
8031 0, /* properties_required */
8032 0, /* properties_provided */
8033 0, /* properties_destroyed */
8034 0, /* todo_flags_start */
8035 TODO_dump_func | TODO_verify_rtl_sharing/* todo_flags_finish */