1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file contains the variable tracking pass. It computes where
22 variables are located (which registers or where in memory) at each position
23 in instruction stream and emits notes describing the locations.
24 Debug information (DWARF2 location lists) is finally generated from
26 With this debug information, it is possible to show variables
27 even when debugging optimized code.
29 How does the variable tracking pass work?
31 First, it scans RTL code for uses, stores and clobbers (register/memory
32 references in instructions), for call insns and for stack adjustments
33 separately for each basic block and saves them to an array of micro
35 The micro operations of one instruction are ordered so that
36 pre-modifying stack adjustment < use < use with no var < call insn <
37 < set < clobber < post-modifying stack adjustment
39 Then, a forward dataflow analysis is performed to find out how locations
40 of variables change through code and to propagate the variable locations
41 along control flow graph.
42 The IN set for basic block BB is computed as a union of OUT sets of BB's
43 predecessors, the OUT set for BB is copied from the IN set for BB and
44 is changed according to micro operations in BB.
46 The IN and OUT sets for basic blocks consist of a current stack adjustment
47 (used for adjusting offset of variables addressed using stack pointer),
48 the table of structures describing the locations of parts of a variable
49 and for each physical register a linked list for each physical register.
50 The linked list is a list of variable parts stored in the register,
51 i.e. it is a list of triplets (reg, decl, offset) where decl is
52 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
53 effective deleting appropriate variable parts when we set or clobber the
56 There may be more than one variable part in a register. The linked lists
57 should be pretty short so it is a good data structure here.
58 For example in the following code, register allocator may assign same
59 register to variables A and B, and both of them are stored in the same
72 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73 are emitted to appropriate positions in RTL code. Each such a note describes
74 the location of one variable at the point in instruction stream where the
75 note is. There is no need to emit a note for each variable before each
76 instruction, we only emit these notes where the location of variable changes
77 (this means that we also emit notes for changes between the OUT set of the
78 previous block and the IN set of the current block).
80 The notes consist of two parts:
81 1. the declaration (from REG_EXPR or MEM_EXPR)
82 2. the location of a variable - it is either a simple register/memory
83 reference (for simple variables, for example int),
84 or a parallel of register/memory references (for a large variables
85 which consist of several parts, for example long long).
91 #include "coretypes.h"
95 #include "hard-reg-set.h"
96 #include "basic-block.h"
99 #include "insn-config.h"
102 #include "alloc-pool.h"
108 #include "tree-pass.h"
109 #include "tree-flow.h"
114 #include "diagnostic.h"
115 #include "pointer-set.h"
117 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
118 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
119 Currently the value is the same as IDENTIFIER_NODE, which has such
120 a property. If this compile time assertion ever fails, make sure that
121 the new tree code that equals (int) VALUE has the same property. */
122 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
124 /* Type of micro operation. */
125 enum micro_operation_type
127 MO_USE, /* Use location (REG or MEM). */
128 MO_USE_NO_VAR,/* Use location which is not associated with a variable
129 or the variable is not trackable. */
130 MO_VAL_USE, /* Use location which is associated with a value. */
131 MO_VAL_LOC, /* Use location which appears in a debug insn. */
132 MO_VAL_SET, /* Set location associated with a value. */
133 MO_SET, /* Set location. */
134 MO_COPY, /* Copy the same portion of a variable from one
135 location to another. */
136 MO_CLOBBER, /* Clobber location. */
137 MO_CALL, /* Call insn. */
138 MO_ADJUST /* Adjust stack pointer. */
142 static const char * const ATTRIBUTE_UNUSED
143 micro_operation_type_name[] = {
156 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
157 Notes emitted as AFTER_CALL are to take effect during the call,
158 rather than after the call. */
161 EMIT_NOTE_BEFORE_INSN,
162 EMIT_NOTE_AFTER_INSN,
163 EMIT_NOTE_AFTER_CALL_INSN
166 /* Structure holding information about micro operation. */
167 typedef struct micro_operation_def
169 /* Type of micro operation. */
170 enum micro_operation_type type;
173 /* Location. For MO_SET and MO_COPY, this is the SET that
174 performs the assignment, if known, otherwise it is the target
175 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
176 CONCAT of the VALUE and the LOC associated with it. For
177 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
178 associated with it. */
181 /* Stack adjustment. */
182 HOST_WIDE_INT adjust;
185 /* The instruction which the micro operation is in, for MO_USE,
186 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
187 instruction or note in the original flow (before any var-tracking
188 notes are inserted, to simplify emission of notes), for MO_SET
193 /* A declaration of a variable, or an RTL value being handled like a
195 typedef void *decl_or_value;
197 /* Structure for passing some other parameters to function
198 emit_note_insn_var_location. */
199 typedef struct emit_note_data_def
201 /* The instruction which the note will be emitted before/after. */
204 /* Where the note will be emitted (before/after insn)? */
205 enum emit_note_where where;
207 /* The variables and values active at this point. */
211 /* Description of location of a part of a variable. The content of a physical
212 register is described by a chain of these structures.
213 The chains are pretty short (usually 1 or 2 elements) and thus
214 chain is the best data structure. */
215 typedef struct attrs_def
217 /* Pointer to next member of the list. */
218 struct attrs_def *next;
220 /* The rtx of register. */
223 /* The declaration corresponding to LOC. */
226 /* Offset from start of DECL. */
227 HOST_WIDE_INT offset;
230 /* Structure holding a refcounted hash table. If refcount > 1,
231 it must be first unshared before modified. */
232 typedef struct shared_hash_def
234 /* Reference count. */
237 /* Actual hash table. */
241 /* Structure holding the IN or OUT set for a basic block. */
242 typedef struct dataflow_set_def
244 /* Adjustment of stack offset. */
245 HOST_WIDE_INT stack_adjust;
247 /* Attributes for registers (lists of attrs). */
248 attrs regs[FIRST_PSEUDO_REGISTER];
250 /* Variable locations. */
253 /* Vars that is being traversed. */
254 shared_hash traversed_vars;
257 /* The structure (one for each basic block) containing the information
258 needed for variable tracking. */
259 typedef struct variable_tracking_info_def
261 /* Number of micro operations stored in the MOS array. */
264 /* The array of micro operations. */
265 micro_operation *mos;
267 /* The IN and OUT set for dataflow analysis. */
271 /* The permanent-in dataflow set for this block. This is used to
272 hold values for which we had to compute entry values. ??? This
273 should probably be dynamically allocated, to avoid using more
274 memory in non-debug builds. */
277 /* Has the block been visited in DFS? */
280 /* Has the block been flooded in VTA? */
283 } *variable_tracking_info;
285 /* Structure for chaining the locations. */
286 typedef struct location_chain_def
288 /* Next element in the chain. */
289 struct location_chain_def *next;
291 /* The location (REG, MEM or VALUE). */
294 /* The "value" stored in this location. */
298 enum var_init_status init;
301 /* Structure describing one part of variable. */
302 typedef struct variable_part_def
304 /* Chain of locations of the part. */
305 location_chain loc_chain;
307 /* Location which was last emitted to location list. */
310 /* The offset in the variable. */
311 HOST_WIDE_INT offset;
314 /* Maximum number of location parts. */
315 #define MAX_VAR_PARTS 16
317 /* Structure describing where the variable is located. */
318 typedef struct variable_def
320 /* The declaration of the variable, or an RTL value being handled
321 like a declaration. */
324 /* Reference count. */
327 /* Number of variable parts. */
330 /* True if this variable changed (any of its) cur_loc fields
331 during the current emit_notes_for_changes resp.
332 emit_notes_for_differences call. */
333 bool cur_loc_changed;
335 /* True if this variable_def struct is currently in the
336 changed_variables hash table. */
337 bool in_changed_variables;
339 /* The variable parts. */
340 variable_part var_part[1];
342 typedef const struct variable_def *const_variable;
344 /* Structure for chaining backlinks from referenced VALUEs to
345 DVs that are referencing them. */
346 typedef struct value_chain_def
348 /* Next value_chain entry. */
349 struct value_chain_def *next;
351 /* The declaration of the variable, or an RTL value
352 being handled like a declaration, whose var_parts[0].loc_chain
353 references the VALUE owning this value_chain. */
356 /* Reference count. */
359 typedef const struct value_chain_def *const_value_chain;
361 /* Pointer to the BB's information specific to variable tracking pass. */
362 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
364 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
365 #define INT_MEM_OFFSET(mem) (MEM_OFFSET (mem) ? INTVAL (MEM_OFFSET (mem)) : 0)
367 /* Alloc pool for struct attrs_def. */
368 static alloc_pool attrs_pool;
370 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
371 static alloc_pool var_pool;
373 /* Alloc pool for struct variable_def with a single var_part entry. */
374 static alloc_pool valvar_pool;
376 /* Alloc pool for struct location_chain_def. */
377 static alloc_pool loc_chain_pool;
379 /* Alloc pool for struct shared_hash_def. */
380 static alloc_pool shared_hash_pool;
382 /* Alloc pool for struct value_chain_def. */
383 static alloc_pool value_chain_pool;
385 /* Changed variables, notes will be emitted for them. */
386 static htab_t changed_variables;
388 /* Links from VALUEs to DVs referencing them in their current loc_chains. */
389 static htab_t value_chains;
391 /* Shall notes be emitted? */
392 static bool emit_notes;
394 /* Empty shared hashtable. */
395 static shared_hash empty_shared_hash;
397 /* Scratch register bitmap used by cselib_expand_value_rtx. */
398 static bitmap scratch_regs = NULL;
400 /* Variable used to tell whether cselib_process_insn called our hook. */
401 static bool cselib_hook_called;
403 /* Local function prototypes. */
404 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
406 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
408 static void bb_stack_adjust_offset (basic_block);
409 static bool vt_stack_adjustments (void);
410 static rtx adjust_stack_reference (rtx, HOST_WIDE_INT);
411 static hashval_t variable_htab_hash (const void *);
412 static int variable_htab_eq (const void *, const void *);
413 static void variable_htab_free (void *);
415 static void init_attrs_list_set (attrs *);
416 static void attrs_list_clear (attrs *);
417 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
418 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
419 static void attrs_list_copy (attrs *, attrs);
420 static void attrs_list_union (attrs *, attrs);
422 static void **unshare_variable (dataflow_set *set, void **slot, variable var,
423 enum var_init_status);
424 static int vars_copy_1 (void **, void *);
425 static void vars_copy (htab_t, htab_t);
426 static tree var_debug_decl (tree);
427 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
428 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
429 enum var_init_status, rtx);
430 static void var_reg_delete (dataflow_set *, rtx, bool);
431 static void var_regno_delete (dataflow_set *, int);
432 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
433 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
434 enum var_init_status, rtx);
435 static void var_mem_delete (dataflow_set *, rtx, bool);
437 static void dataflow_set_init (dataflow_set *);
438 static void dataflow_set_clear (dataflow_set *);
439 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
440 static int variable_union_info_cmp_pos (const void *, const void *);
441 static int variable_union (void **, void *);
442 static void dataflow_set_union (dataflow_set *, dataflow_set *);
443 static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
444 static bool canon_value_cmp (rtx, rtx);
445 static int loc_cmp (rtx, rtx);
446 static bool variable_part_different_p (variable_part *, variable_part *);
447 static bool onepart_variable_different_p (variable, variable);
448 static bool variable_different_p (variable, variable);
449 static int dataflow_set_different_1 (void **, void *);
450 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
451 static void dataflow_set_destroy (dataflow_set *);
453 static bool contains_symbol_ref (rtx);
454 static bool track_expr_p (tree, bool);
455 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
456 static int count_uses (rtx *, void *);
457 static void count_uses_1 (rtx *, void *);
458 static void count_stores (rtx, const_rtx, void *);
459 static int add_uses (rtx *, void *);
460 static void add_uses_1 (rtx *, void *);
461 static void add_stores (rtx, const_rtx, void *);
462 static bool compute_bb_dataflow (basic_block);
463 static bool vt_find_locations (void);
465 static void dump_attrs_list (attrs);
466 static int dump_var_slot (void **, void *);
467 static void dump_var (variable);
468 static void dump_vars (htab_t);
469 static void dump_dataflow_set (dataflow_set *);
470 static void dump_dataflow_sets (void);
472 static void variable_was_changed (variable, dataflow_set *);
473 static void **set_slot_part (dataflow_set *, rtx, void **,
474 decl_or_value, HOST_WIDE_INT,
475 enum var_init_status, rtx);
476 static void set_variable_part (dataflow_set *, rtx,
477 decl_or_value, HOST_WIDE_INT,
478 enum var_init_status, rtx, enum insert_option);
479 static void **clobber_slot_part (dataflow_set *, rtx,
480 void **, HOST_WIDE_INT, rtx);
481 static void clobber_variable_part (dataflow_set *, rtx,
482 decl_or_value, HOST_WIDE_INT, rtx);
483 static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
484 static void delete_variable_part (dataflow_set *, rtx,
485 decl_or_value, HOST_WIDE_INT);
486 static int emit_note_insn_var_location (void **, void *);
487 static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash);
488 static int emit_notes_for_differences_1 (void **, void *);
489 static int emit_notes_for_differences_2 (void **, void *);
490 static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
491 static void emit_notes_in_bb (basic_block, dataflow_set *);
492 static void vt_emit_notes (void);
494 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
495 static void vt_add_function_parameters (void);
496 static void vt_initialize (void);
497 static void vt_finalize (void);
499 /* Given a SET, calculate the amount of stack adjustment it contains
500 PRE- and POST-modifying stack pointer.
501 This function is similar to stack_adjust_offset. */
504 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
507 rtx src = SET_SRC (pattern);
508 rtx dest = SET_DEST (pattern);
511 if (dest == stack_pointer_rtx)
513 /* (set (reg sp) (plus (reg sp) (const_int))) */
514 code = GET_CODE (src);
515 if (! (code == PLUS || code == MINUS)
516 || XEXP (src, 0) != stack_pointer_rtx
517 || !CONST_INT_P (XEXP (src, 1)))
521 *post += INTVAL (XEXP (src, 1));
523 *post -= INTVAL (XEXP (src, 1));
525 else if (MEM_P (dest))
527 /* (set (mem (pre_dec (reg sp))) (foo)) */
528 src = XEXP (dest, 0);
529 code = GET_CODE (src);
535 if (XEXP (src, 0) == stack_pointer_rtx)
537 rtx val = XEXP (XEXP (src, 1), 1);
538 /* We handle only adjustments by constant amount. */
539 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
542 if (code == PRE_MODIFY)
543 *pre -= INTVAL (val);
545 *post -= INTVAL (val);
551 if (XEXP (src, 0) == stack_pointer_rtx)
553 *pre += GET_MODE_SIZE (GET_MODE (dest));
559 if (XEXP (src, 0) == stack_pointer_rtx)
561 *post += GET_MODE_SIZE (GET_MODE (dest));
567 if (XEXP (src, 0) == stack_pointer_rtx)
569 *pre -= GET_MODE_SIZE (GET_MODE (dest));
575 if (XEXP (src, 0) == stack_pointer_rtx)
577 *post -= GET_MODE_SIZE (GET_MODE (dest));
588 /* Given an INSN, calculate the amount of stack adjustment it contains
589 PRE- and POST-modifying stack pointer. */
592 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
600 pattern = PATTERN (insn);
601 if (RTX_FRAME_RELATED_P (insn))
603 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
605 pattern = XEXP (expr, 0);
608 if (GET_CODE (pattern) == SET)
609 stack_adjust_offset_pre_post (pattern, pre, post);
610 else if (GET_CODE (pattern) == PARALLEL
611 || GET_CODE (pattern) == SEQUENCE)
615 /* There may be stack adjustments inside compound insns. Search
617 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
618 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
619 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
623 /* Compute stack adjustment in basic block BB. */
626 bb_stack_adjust_offset (basic_block bb)
628 HOST_WIDE_INT offset;
631 offset = VTI (bb)->in.stack_adjust;
632 for (i = 0; i < VTI (bb)->n_mos; i++)
634 if (VTI (bb)->mos[i].type == MO_ADJUST)
635 offset += VTI (bb)->mos[i].u.adjust;
636 else if (VTI (bb)->mos[i].type != MO_CALL)
638 if (MEM_P (VTI (bb)->mos[i].u.loc))
640 VTI (bb)->mos[i].u.loc
641 = adjust_stack_reference (VTI (bb)->mos[i].u.loc, -offset);
645 VTI (bb)->out.stack_adjust = offset;
648 /* Compute stack adjustments for all blocks by traversing DFS tree.
649 Return true when the adjustments on all incoming edges are consistent.
650 Heavily borrowed from pre_and_rev_post_order_compute. */
653 vt_stack_adjustments (void)
655 edge_iterator *stack;
658 /* Initialize entry block. */
659 VTI (ENTRY_BLOCK_PTR)->visited = true;
660 VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
662 /* Allocate stack for back-tracking up CFG. */
663 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
666 /* Push the first edge on to the stack. */
667 stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
675 /* Look at the edge on the top of the stack. */
677 src = ei_edge (ei)->src;
678 dest = ei_edge (ei)->dest;
680 /* Check if the edge destination has been visited yet. */
681 if (!VTI (dest)->visited)
683 VTI (dest)->visited = true;
684 VTI (dest)->in.stack_adjust = VTI (src)->out.stack_adjust;
685 bb_stack_adjust_offset (dest);
687 if (EDGE_COUNT (dest->succs) > 0)
688 /* Since the DEST node has been visited for the first
689 time, check its successors. */
690 stack[sp++] = ei_start (dest->succs);
694 /* Check whether the adjustments on the edges are the same. */
695 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
701 if (! ei_one_before_end_p (ei))
702 /* Go to the next edge. */
703 ei_next (&stack[sp - 1]);
705 /* Return to previous level if there are no more edges. */
714 /* Adjust stack reference MEM by ADJUSTMENT bytes and make it relative
715 to the argument pointer. Return the new rtx. */
718 adjust_stack_reference (rtx mem, HOST_WIDE_INT adjustment)
722 #ifdef FRAME_POINTER_CFA_OFFSET
723 adjustment -= FRAME_POINTER_CFA_OFFSET (current_function_decl);
724 cfa = plus_constant (frame_pointer_rtx, adjustment);
726 adjustment -= ARG_POINTER_CFA_OFFSET (current_function_decl);
727 cfa = plus_constant (arg_pointer_rtx, adjustment);
730 addr = replace_rtx (copy_rtx (XEXP (mem, 0)), stack_pointer_rtx, cfa);
731 tmp = simplify_rtx (addr);
735 return replace_equiv_address_nv (mem, addr);
738 /* Return true if a decl_or_value DV is a DECL or NULL. */
740 dv_is_decl_p (decl_or_value dv)
742 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
745 /* Return true if a decl_or_value is a VALUE rtl. */
747 dv_is_value_p (decl_or_value dv)
749 return dv && !dv_is_decl_p (dv);
752 /* Return the decl in the decl_or_value. */
754 dv_as_decl (decl_or_value dv)
756 #ifdef ENABLE_CHECKING
757 gcc_assert (dv_is_decl_p (dv));
762 /* Return the value in the decl_or_value. */
764 dv_as_value (decl_or_value dv)
766 #ifdef ENABLE_CHECKING
767 gcc_assert (dv_is_value_p (dv));
772 /* Return the opaque pointer in the decl_or_value. */
774 dv_as_opaque (decl_or_value dv)
779 /* Return true if a decl_or_value must not have more than one variable
782 dv_onepart_p (decl_or_value dv)
786 if (!MAY_HAVE_DEBUG_INSNS)
789 if (dv_is_value_p (dv))
792 decl = dv_as_decl (dv);
797 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
800 return (target_for_debug_bind (decl) != NULL_TREE);
803 /* Return the variable pool to be used for dv, depending on whether it
804 can have multiple parts or not. */
805 static inline alloc_pool
806 dv_pool (decl_or_value dv)
808 return dv_onepart_p (dv) ? valvar_pool : var_pool;
811 /* Build a decl_or_value out of a decl. */
812 static inline decl_or_value
813 dv_from_decl (tree decl)
817 #ifdef ENABLE_CHECKING
818 gcc_assert (dv_is_decl_p (dv));
823 /* Build a decl_or_value out of a value. */
824 static inline decl_or_value
825 dv_from_value (rtx value)
829 #ifdef ENABLE_CHECKING
830 gcc_assert (dv_is_value_p (dv));
835 extern void debug_dv (decl_or_value dv);
838 debug_dv (decl_or_value dv)
840 if (dv_is_value_p (dv))
841 debug_rtx (dv_as_value (dv));
843 debug_generic_stmt (dv_as_decl (dv));
846 typedef unsigned int dvuid;
848 /* Return the uid of DV. */
851 dv_uid (decl_or_value dv)
853 if (dv_is_value_p (dv))
854 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
856 return DECL_UID (dv_as_decl (dv));
859 /* Compute the hash from the uid. */
861 static inline hashval_t
862 dv_uid2hash (dvuid uid)
867 /* The hash function for a mask table in a shared_htab chain. */
869 static inline hashval_t
870 dv_htab_hash (decl_or_value dv)
872 return dv_uid2hash (dv_uid (dv));
875 /* The hash function for variable_htab, computes the hash value
876 from the declaration of variable X. */
879 variable_htab_hash (const void *x)
881 const_variable const v = (const_variable) x;
883 return dv_htab_hash (v->dv);
886 /* Compare the declaration of variable X with declaration Y. */
889 variable_htab_eq (const void *x, const void *y)
891 const_variable const v = (const_variable) x;
892 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
894 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
897 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
900 variable_htab_free (void *elem)
903 variable var = (variable) elem;
904 location_chain node, next;
906 gcc_assert (var->refcount > 0);
909 if (var->refcount > 0)
912 for (i = 0; i < var->n_var_parts; i++)
914 for (node = var->var_part[i].loc_chain; node; node = next)
917 pool_free (loc_chain_pool, node);
919 var->var_part[i].loc_chain = NULL;
921 pool_free (dv_pool (var->dv), var);
924 /* The hash function for value_chains htab, computes the hash value
928 value_chain_htab_hash (const void *x)
930 const_value_chain const v = (const_value_chain) x;
932 return dv_htab_hash (v->dv);
935 /* Compare the VALUE X with VALUE Y. */
938 value_chain_htab_eq (const void *x, const void *y)
940 const_value_chain const v = (const_value_chain) x;
941 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
943 return dv_as_opaque (v->dv) == dv_as_opaque (dv);
946 /* Initialize the set (array) SET of attrs to empty lists. */
949 init_attrs_list_set (attrs *set)
953 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
957 /* Make the list *LISTP empty. */
960 attrs_list_clear (attrs *listp)
964 for (list = *listp; list; list = next)
967 pool_free (attrs_pool, list);
972 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
975 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
977 for (; list; list = list->next)
978 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
983 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
986 attrs_list_insert (attrs *listp, decl_or_value dv,
987 HOST_WIDE_INT offset, rtx loc)
991 list = (attrs) pool_alloc (attrs_pool);
994 list->offset = offset;
999 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1002 attrs_list_copy (attrs *dstp, attrs src)
1006 attrs_list_clear (dstp);
1007 for (; src; src = src->next)
1009 n = (attrs) pool_alloc (attrs_pool);
1012 n->offset = src->offset;
1018 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1021 attrs_list_union (attrs *dstp, attrs src)
1023 for (; src; src = src->next)
1025 if (!attrs_list_member (*dstp, src->dv, src->offset))
1026 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1030 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1034 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1036 gcc_assert (!*dstp);
1037 for (; src; src = src->next)
1039 if (!dv_onepart_p (src->dv))
1040 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1042 for (src = src2; src; src = src->next)
1044 if (!dv_onepart_p (src->dv)
1045 && !attrs_list_member (*dstp, src->dv, src->offset))
1046 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1050 /* Shared hashtable support. */
1052 /* Return true if VARS is shared. */
1055 shared_hash_shared (shared_hash vars)
1057 return vars->refcount > 1;
1060 /* Return the hash table for VARS. */
1062 static inline htab_t
1063 shared_hash_htab (shared_hash vars)
1068 /* Return true if VAR is shared, or maybe because VARS is shared. */
1071 shared_var_p (variable var, shared_hash vars)
1073 /* Don't count an entry in the changed_variables table as a duplicate. */
1074 return ((var->refcount > 1 + (int) var->in_changed_variables)
1075 || shared_hash_shared (vars));
1078 /* Copy variables into a new hash table. */
1081 shared_hash_unshare (shared_hash vars)
1083 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1084 gcc_assert (vars->refcount > 1);
1085 new_vars->refcount = 1;
1087 = htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
1088 variable_htab_eq, variable_htab_free);
1089 vars_copy (new_vars->htab, vars->htab);
1094 /* Increment reference counter on VARS and return it. */
1096 static inline shared_hash
1097 shared_hash_copy (shared_hash vars)
1103 /* Decrement reference counter and destroy hash table if not shared
1107 shared_hash_destroy (shared_hash vars)
1109 gcc_assert (vars->refcount > 0);
1110 if (--vars->refcount == 0)
1112 htab_delete (vars->htab);
1113 pool_free (shared_hash_pool, vars);
1117 /* Unshare *PVARS if shared and return slot for DV. If INS is
1118 INSERT, insert it if not already present. */
1120 static inline void **
1121 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1122 hashval_t dvhash, enum insert_option ins)
1124 if (shared_hash_shared (*pvars))
1125 *pvars = shared_hash_unshare (*pvars);
1126 return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins);
1129 static inline void **
1130 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1131 enum insert_option ins)
1133 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1136 /* Return slot for DV, if it is already present in the hash table.
1137 If it is not present, insert it only VARS is not shared, otherwise
1140 static inline void **
1141 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1143 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1144 shared_hash_shared (vars)
1145 ? NO_INSERT : INSERT);
1148 static inline void **
1149 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1151 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1154 /* Return slot for DV only if it is already present in the hash table. */
1156 static inline void **
1157 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1160 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1164 static inline void **
1165 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1167 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1170 /* Return variable for DV or NULL if not already present in the hash
1173 static inline variable
1174 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1176 return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash);
1179 static inline variable
1180 shared_hash_find (shared_hash vars, decl_or_value dv)
1182 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1185 /* Return true if TVAL is better than CVAL as a canonival value. We
1186 choose lowest-numbered VALUEs, using the RTX address as a
1187 tie-breaker. The idea is to arrange them into a star topology,
1188 such that all of them are at most one step away from the canonical
1189 value, and the canonical value has backlinks to all of them, in
1190 addition to all the actual locations. We don't enforce this
1191 topology throughout the entire dataflow analysis, though.
1195 canon_value_cmp (rtx tval, rtx cval)
1198 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1201 static bool dst_can_be_shared;
1203 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1206 unshare_variable (dataflow_set *set, void **slot, variable var,
1207 enum var_init_status initialized)
1212 new_var = (variable) pool_alloc (dv_pool (var->dv));
1213 new_var->dv = var->dv;
1214 new_var->refcount = 1;
1216 new_var->n_var_parts = var->n_var_parts;
1217 new_var->cur_loc_changed = var->cur_loc_changed;
1218 var->cur_loc_changed = false;
1219 new_var->in_changed_variables = false;
1221 if (! flag_var_tracking_uninit)
1222 initialized = VAR_INIT_STATUS_INITIALIZED;
1224 for (i = 0; i < var->n_var_parts; i++)
1226 location_chain node;
1227 location_chain *nextp;
1229 new_var->var_part[i].offset = var->var_part[i].offset;
1230 nextp = &new_var->var_part[i].loc_chain;
1231 for (node = var->var_part[i].loc_chain; node; node = node->next)
1233 location_chain new_lc;
1235 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1236 new_lc->next = NULL;
1237 if (node->init > initialized)
1238 new_lc->init = node->init;
1240 new_lc->init = initialized;
1241 if (node->set_src && !(MEM_P (node->set_src)))
1242 new_lc->set_src = node->set_src;
1244 new_lc->set_src = NULL;
1245 new_lc->loc = node->loc;
1248 nextp = &new_lc->next;
1251 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1254 dst_can_be_shared = false;
1255 if (shared_hash_shared (set->vars))
1256 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1257 else if (set->traversed_vars && set->vars != set->traversed_vars)
1258 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1260 if (var->in_changed_variables)
1263 = htab_find_slot_with_hash (changed_variables, var->dv,
1264 dv_htab_hash (var->dv), NO_INSERT);
1265 gcc_assert (*cslot == (void *) var);
1266 var->in_changed_variables = false;
1267 variable_htab_free (var);
1269 new_var->in_changed_variables = true;
1274 /* Add a variable from *SLOT to hash table DATA and increase its reference
1278 vars_copy_1 (void **slot, void *data)
1280 htab_t dst = (htab_t) data;
1284 src = (variable) *slot;
1287 dstp = htab_find_slot_with_hash (dst, src->dv,
1288 dv_htab_hash (src->dv),
1292 /* Continue traversing the hash table. */
1296 /* Copy all variables from hash table SRC to hash table DST. */
1299 vars_copy (htab_t dst, htab_t src)
1301 htab_traverse_noresize (src, vars_copy_1, dst);
1304 /* Map a decl to its main debug decl. */
1307 var_debug_decl (tree decl)
1309 if (decl && DECL_P (decl)
1310 && DECL_DEBUG_EXPR_IS_FROM (decl) && DECL_DEBUG_EXPR (decl)
1311 && DECL_P (DECL_DEBUG_EXPR (decl)))
1312 decl = DECL_DEBUG_EXPR (decl);
1317 /* Set the register LOC to contain DV, OFFSET. */
1320 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1321 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1322 enum insert_option iopt)
1325 bool decl_p = dv_is_decl_p (dv);
1328 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1330 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1331 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1332 && node->offset == offset)
1335 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1336 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1339 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1342 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1345 tree decl = REG_EXPR (loc);
1346 HOST_WIDE_INT offset = REG_OFFSET (loc);
1348 var_reg_decl_set (set, loc, initialized,
1349 dv_from_decl (decl), offset, set_src, INSERT);
1352 static enum var_init_status
1353 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1357 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1359 if (! flag_var_tracking_uninit)
1360 return VAR_INIT_STATUS_INITIALIZED;
1362 var = shared_hash_find (set->vars, dv);
1365 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1367 location_chain nextp;
1368 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1369 if (rtx_equal_p (nextp->loc, loc))
1371 ret_val = nextp->init;
1380 /* Delete current content of register LOC in dataflow set SET and set
1381 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1382 MODIFY is true, any other live copies of the same variable part are
1383 also deleted from the dataflow set, otherwise the variable part is
1384 assumed to be copied from another location holding the same
1388 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1389 enum var_init_status initialized, rtx set_src)
1391 tree decl = REG_EXPR (loc);
1392 HOST_WIDE_INT offset = REG_OFFSET (loc);
1396 decl = var_debug_decl (decl);
1398 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1399 initialized = get_init_value (set, loc, dv_from_decl (decl));
1401 nextp = &set->regs[REGNO (loc)];
1402 for (node = *nextp; node; node = next)
1405 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1407 delete_variable_part (set, node->loc, node->dv, node->offset);
1408 pool_free (attrs_pool, node);
1414 nextp = &node->next;
1418 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1419 var_reg_set (set, loc, initialized, set_src);
1422 /* Delete the association of register LOC in dataflow set SET with any
1423 variables that aren't onepart. If CLOBBER is true, also delete any
1424 other live copies of the same variable part, and delete the
1425 association with onepart dvs too. */
1428 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1430 attrs *nextp = &set->regs[REGNO (loc)];
1435 tree decl = REG_EXPR (loc);
1436 HOST_WIDE_INT offset = REG_OFFSET (loc);
1438 decl = var_debug_decl (decl);
1440 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1443 for (node = *nextp; node; node = next)
1446 if (clobber || !dv_onepart_p (node->dv))
1448 delete_variable_part (set, node->loc, node->dv, node->offset);
1449 pool_free (attrs_pool, node);
1453 nextp = &node->next;
1457 /* Delete content of register with number REGNO in dataflow set SET. */
1460 var_regno_delete (dataflow_set *set, int regno)
1462 attrs *reg = &set->regs[regno];
1465 for (node = *reg; node; node = next)
1468 delete_variable_part (set, node->loc, node->dv, node->offset);
1469 pool_free (attrs_pool, node);
1474 /* Set the location of DV, OFFSET as the MEM LOC. */
1477 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1478 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1479 enum insert_option iopt)
1481 if (dv_is_decl_p (dv))
1482 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1484 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1487 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
1489 Adjust the address first if it is stack pointer based. */
1492 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1495 tree decl = MEM_EXPR (loc);
1496 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1498 var_mem_decl_set (set, loc, initialized,
1499 dv_from_decl (decl), offset, set_src, INSERT);
1502 /* Delete and set the location part of variable MEM_EXPR (LOC) in
1503 dataflow set SET to LOC. If MODIFY is true, any other live copies
1504 of the same variable part are also deleted from the dataflow set,
1505 otherwise the variable part is assumed to be copied from another
1506 location holding the same part.
1507 Adjust the address first if it is stack pointer based. */
1510 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1511 enum var_init_status initialized, rtx set_src)
1513 tree decl = MEM_EXPR (loc);
1514 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1516 decl = var_debug_decl (decl);
1518 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1519 initialized = get_init_value (set, loc, dv_from_decl (decl));
1522 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
1523 var_mem_set (set, loc, initialized, set_src);
1526 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
1527 true, also delete any other live copies of the same variable part.
1528 Adjust the address first if it is stack pointer based. */
1531 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
1533 tree decl = MEM_EXPR (loc);
1534 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1536 decl = var_debug_decl (decl);
1538 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1539 delete_variable_part (set, loc, dv_from_decl (decl), offset);
1542 /* Bind a value to a location it was just stored in. If MODIFIED
1543 holds, assume the location was modified, detaching it from any
1544 values bound to it. */
1547 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
1549 cselib_val *v = CSELIB_VAL_PTR (val);
1551 gcc_assert (cselib_preserved_value_p (v));
1555 fprintf (dump_file, "%i: ", INSN_UID (insn));
1556 print_inline_rtx (dump_file, val, 0);
1557 fprintf (dump_file, " stored in ");
1558 print_inline_rtx (dump_file, loc, 0);
1561 struct elt_loc_list *l;
1562 for (l = v->locs; l; l = l->next)
1564 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
1565 print_inline_rtx (dump_file, l->loc, 0);
1568 fprintf (dump_file, "\n");
1574 var_regno_delete (set, REGNO (loc));
1575 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1576 dv_from_value (val), 0, NULL_RTX, INSERT);
1578 else if (MEM_P (loc))
1579 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1580 dv_from_value (val), 0, NULL_RTX, INSERT);
1582 set_variable_part (set, loc, dv_from_value (val), 0,
1583 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1586 /* Reset this node, detaching all its equivalences. Return the slot
1587 in the variable hash table that holds dv, if there is one. */
1590 val_reset (dataflow_set *set, decl_or_value dv)
1592 variable var = shared_hash_find (set->vars, dv) ;
1593 location_chain node;
1596 if (!var || !var->n_var_parts)
1599 gcc_assert (var->n_var_parts == 1);
1602 for (node = var->var_part[0].loc_chain; node; node = node->next)
1603 if (GET_CODE (node->loc) == VALUE
1604 && canon_value_cmp (node->loc, cval))
1607 for (node = var->var_part[0].loc_chain; node; node = node->next)
1608 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
1610 /* Redirect the equivalence link to the new canonical
1611 value, or simply remove it if it would point at
1614 set_variable_part (set, cval, dv_from_value (node->loc),
1615 0, node->init, node->set_src, NO_INSERT);
1616 delete_variable_part (set, dv_as_value (dv),
1617 dv_from_value (node->loc), 0);
1622 decl_or_value cdv = dv_from_value (cval);
1624 /* Keep the remaining values connected, accummulating links
1625 in the canonical value. */
1626 for (node = var->var_part[0].loc_chain; node; node = node->next)
1628 if (node->loc == cval)
1630 else if (GET_CODE (node->loc) == REG)
1631 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
1632 node->set_src, NO_INSERT);
1633 else if (GET_CODE (node->loc) == MEM)
1634 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
1635 node->set_src, NO_INSERT);
1637 set_variable_part (set, node->loc, cdv, 0,
1638 node->init, node->set_src, NO_INSERT);
1642 /* We remove this last, to make sure that the canonical value is not
1643 removed to the point of requiring reinsertion. */
1645 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
1647 clobber_variable_part (set, NULL, dv, 0, NULL);
1649 /* ??? Should we make sure there aren't other available values or
1650 variables whose values involve this one other than by
1651 equivalence? E.g., at the very least we should reset MEMs, those
1652 shouldn't be too hard to find cselib-looking up the value as an
1653 address, then locating the resulting value in our own hash
1657 /* Find the values in a given location and map the val to another
1658 value, if it is unique, or add the location as one holding the
1662 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
1664 decl_or_value dv = dv_from_value (val);
1666 if (dump_file && (dump_flags & TDF_DETAILS))
1669 fprintf (dump_file, "%i: ", INSN_UID (insn));
1671 fprintf (dump_file, "head: ");
1672 print_inline_rtx (dump_file, val, 0);
1673 fputs (" is at ", dump_file);
1674 print_inline_rtx (dump_file, loc, 0);
1675 fputc ('\n', dump_file);
1678 val_reset (set, dv);
1682 attrs node, found = NULL;
1684 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1685 if (dv_is_value_p (node->dv)
1686 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
1690 /* Map incoming equivalences. ??? Wouldn't it be nice if
1691 we just started sharing the location lists? Maybe a
1692 circular list ending at the value itself or some
1694 set_variable_part (set, dv_as_value (node->dv),
1695 dv_from_value (val), node->offset,
1696 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1697 set_variable_part (set, val, node->dv, node->offset,
1698 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1701 /* If we didn't find any equivalence, we need to remember that
1702 this value is held in the named register. */
1704 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1705 dv_from_value (val), 0, NULL_RTX, INSERT);
1707 else if (MEM_P (loc))
1708 /* ??? Merge equivalent MEMs. */
1709 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1710 dv_from_value (val), 0, NULL_RTX, INSERT);
1712 /* ??? Merge equivalent expressions. */
1713 set_variable_part (set, loc, dv_from_value (val), 0,
1714 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1717 /* Initialize dataflow set SET to be empty.
1718 VARS_SIZE is the initial size of hash table VARS. */
1721 dataflow_set_init (dataflow_set *set)
1723 init_attrs_list_set (set->regs);
1724 set->vars = shared_hash_copy (empty_shared_hash);
1725 set->stack_adjust = 0;
1726 set->traversed_vars = NULL;
1729 /* Delete the contents of dataflow set SET. */
1732 dataflow_set_clear (dataflow_set *set)
1736 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1737 attrs_list_clear (&set->regs[i]);
1739 shared_hash_destroy (set->vars);
1740 set->vars = shared_hash_copy (empty_shared_hash);
1743 /* Copy the contents of dataflow set SRC to DST. */
1746 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
1750 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1751 attrs_list_copy (&dst->regs[i], src->regs[i]);
1753 shared_hash_destroy (dst->vars);
1754 dst->vars = shared_hash_copy (src->vars);
1755 dst->stack_adjust = src->stack_adjust;
1758 /* Information for merging lists of locations for a given offset of variable.
1760 struct variable_union_info
1762 /* Node of the location chain. */
1765 /* The sum of positions in the input chains. */
1768 /* The position in the chain of DST dataflow set. */
1772 /* Buffer for location list sorting and its allocated size. */
1773 static struct variable_union_info *vui_vec;
1774 static int vui_allocated;
1776 /* Compare function for qsort, order the structures by POS element. */
1779 variable_union_info_cmp_pos (const void *n1, const void *n2)
1781 const struct variable_union_info *const i1 =
1782 (const struct variable_union_info *) n1;
1783 const struct variable_union_info *const i2 =
1784 ( const struct variable_union_info *) n2;
1786 if (i1->pos != i2->pos)
1787 return i1->pos - i2->pos;
1789 return (i1->pos_dst - i2->pos_dst);
1792 /* Compute union of location parts of variable *SLOT and the same variable
1793 from hash table DATA. Compute "sorted" union of the location chains
1794 for common offsets, i.e. the locations of a variable part are sorted by
1795 a priority where the priority is the sum of the positions in the 2 chains
1796 (if a location is only in one list the position in the second list is
1797 defined to be larger than the length of the chains).
1798 When we are updating the location parts the newest location is in the
1799 beginning of the chain, so when we do the described "sorted" union
1800 we keep the newest locations in the beginning. */
1803 variable_union (void **slot, void *data)
1807 dataflow_set *set = (dataflow_set *) data;
1810 src = (variable) *slot;
1811 dstp = shared_hash_find_slot (set->vars, src->dv);
1812 if (!dstp || !*dstp)
1816 dst_can_be_shared = false;
1818 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
1822 /* Continue traversing the hash table. */
1826 dst = (variable) *dstp;
1828 gcc_assert (src->n_var_parts);
1830 /* We can combine one-part variables very efficiently, because their
1831 entries are in canonical order. */
1832 if (dv_onepart_p (src->dv))
1834 location_chain *nodep, dnode, snode;
1836 gcc_assert (src->n_var_parts == 1);
1837 gcc_assert (dst->n_var_parts == 1);
1839 snode = src->var_part[0].loc_chain;
1842 restart_onepart_unshared:
1843 nodep = &dst->var_part[0].loc_chain;
1849 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
1853 location_chain nnode;
1855 if (shared_var_p (dst, set->vars))
1857 dstp = unshare_variable (set, dstp, dst,
1858 VAR_INIT_STATUS_INITIALIZED);
1859 dst = (variable)*dstp;
1860 goto restart_onepart_unshared;
1863 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
1864 nnode->loc = snode->loc;
1865 nnode->init = snode->init;
1866 if (!snode->set_src || MEM_P (snode->set_src))
1867 nnode->set_src = NULL;
1869 nnode->set_src = snode->set_src;
1870 nnode->next = dnode;
1873 #ifdef ENABLE_CHECKING
1875 gcc_assert (rtx_equal_p (dnode->loc, snode->loc));
1879 snode = snode->next;
1881 nodep = &dnode->next;
1888 /* Count the number of location parts, result is K. */
1889 for (i = 0, j = 0, k = 0;
1890 i < src->n_var_parts && j < dst->n_var_parts; k++)
1892 if (src->var_part[i].offset == dst->var_part[j].offset)
1897 else if (src->var_part[i].offset < dst->var_part[j].offset)
1902 k += src->n_var_parts - i;
1903 k += dst->n_var_parts - j;
1905 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
1906 thus there are at most MAX_VAR_PARTS different offsets. */
1907 gcc_assert (dv_onepart_p (dst->dv) ? k == 1 : k <= MAX_VAR_PARTS);
1909 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
1911 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
1912 dst = (variable)*dstp;
1915 i = src->n_var_parts - 1;
1916 j = dst->n_var_parts - 1;
1917 dst->n_var_parts = k;
1919 for (k--; k >= 0; k--)
1921 location_chain node, node2;
1923 if (i >= 0 && j >= 0
1924 && src->var_part[i].offset == dst->var_part[j].offset)
1926 /* Compute the "sorted" union of the chains, i.e. the locations which
1927 are in both chains go first, they are sorted by the sum of
1928 positions in the chains. */
1931 struct variable_union_info *vui;
1933 /* If DST is shared compare the location chains.
1934 If they are different we will modify the chain in DST with
1935 high probability so make a copy of DST. */
1936 if (shared_var_p (dst, set->vars))
1938 for (node = src->var_part[i].loc_chain,
1939 node2 = dst->var_part[j].loc_chain; node && node2;
1940 node = node->next, node2 = node2->next)
1942 if (!((REG_P (node2->loc)
1943 && REG_P (node->loc)
1944 && REGNO (node2->loc) == REGNO (node->loc))
1945 || rtx_equal_p (node2->loc, node->loc)))
1947 if (node2->init < node->init)
1948 node2->init = node->init;
1954 dstp = unshare_variable (set, dstp, dst,
1955 VAR_INIT_STATUS_UNKNOWN);
1956 dst = (variable)*dstp;
1961 for (node = src->var_part[i].loc_chain; node; node = node->next)
1964 for (node = dst->var_part[j].loc_chain; node; node = node->next)
1969 /* The most common case, much simpler, no qsort is needed. */
1970 location_chain dstnode = dst->var_part[j].loc_chain;
1971 dst->var_part[k].loc_chain = dstnode;
1972 dst->var_part[k].offset = dst->var_part[j].offset;
1974 for (node = src->var_part[i].loc_chain; node; node = node->next)
1975 if (!((REG_P (dstnode->loc)
1976 && REG_P (node->loc)
1977 && REGNO (dstnode->loc) == REGNO (node->loc))
1978 || rtx_equal_p (dstnode->loc, node->loc)))
1980 location_chain new_node;
1982 /* Copy the location from SRC. */
1983 new_node = (location_chain) pool_alloc (loc_chain_pool);
1984 new_node->loc = node->loc;
1985 new_node->init = node->init;
1986 if (!node->set_src || MEM_P (node->set_src))
1987 new_node->set_src = NULL;
1989 new_node->set_src = node->set_src;
1990 node2->next = new_node;
1997 if (src_l + dst_l > vui_allocated)
1999 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2000 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2005 /* Fill in the locations from DST. */
2006 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2007 node = node->next, jj++)
2010 vui[jj].pos_dst = jj;
2012 /* Pos plus value larger than a sum of 2 valid positions. */
2013 vui[jj].pos = jj + src_l + dst_l;
2016 /* Fill in the locations from SRC. */
2018 for (node = src->var_part[i].loc_chain, ii = 0; node;
2019 node = node->next, ii++)
2021 /* Find location from NODE. */
2022 for (jj = 0; jj < dst_l; jj++)
2024 if ((REG_P (vui[jj].lc->loc)
2025 && REG_P (node->loc)
2026 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2027 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2029 vui[jj].pos = jj + ii;
2033 if (jj >= dst_l) /* The location has not been found. */
2035 location_chain new_node;
2037 /* Copy the location from SRC. */
2038 new_node = (location_chain) pool_alloc (loc_chain_pool);
2039 new_node->loc = node->loc;
2040 new_node->init = node->init;
2041 if (!node->set_src || MEM_P (node->set_src))
2042 new_node->set_src = NULL;
2044 new_node->set_src = node->set_src;
2045 vui[n].lc = new_node;
2046 vui[n].pos_dst = src_l + dst_l;
2047 vui[n].pos = ii + src_l + dst_l;
2054 /* Special case still very common case. For dst_l == 2
2055 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2056 vui[i].pos == i + src_l + dst_l. */
2057 if (vui[0].pos > vui[1].pos)
2059 /* Order should be 1, 0, 2... */
2060 dst->var_part[k].loc_chain = vui[1].lc;
2061 vui[1].lc->next = vui[0].lc;
2064 vui[0].lc->next = vui[2].lc;
2065 vui[n - 1].lc->next = NULL;
2068 vui[0].lc->next = NULL;
2073 dst->var_part[k].loc_chain = vui[0].lc;
2074 if (n >= 3 && vui[2].pos < vui[1].pos)
2076 /* Order should be 0, 2, 1, 3... */
2077 vui[0].lc->next = vui[2].lc;
2078 vui[2].lc->next = vui[1].lc;
2081 vui[1].lc->next = vui[3].lc;
2082 vui[n - 1].lc->next = NULL;
2085 vui[1].lc->next = NULL;
2090 /* Order should be 0, 1, 2... */
2092 vui[n - 1].lc->next = NULL;
2095 for (; ii < n; ii++)
2096 vui[ii - 1].lc->next = vui[ii].lc;
2100 qsort (vui, n, sizeof (struct variable_union_info),
2101 variable_union_info_cmp_pos);
2103 /* Reconnect the nodes in sorted order. */
2104 for (ii = 1; ii < n; ii++)
2105 vui[ii - 1].lc->next = vui[ii].lc;
2106 vui[n - 1].lc->next = NULL;
2107 dst->var_part[k].loc_chain = vui[0].lc;
2110 dst->var_part[k].offset = dst->var_part[j].offset;
2115 else if ((i >= 0 && j >= 0
2116 && src->var_part[i].offset < dst->var_part[j].offset)
2119 dst->var_part[k] = dst->var_part[j];
2122 else if ((i >= 0 && j >= 0
2123 && src->var_part[i].offset > dst->var_part[j].offset)
2126 location_chain *nextp;
2128 /* Copy the chain from SRC. */
2129 nextp = &dst->var_part[k].loc_chain;
2130 for (node = src->var_part[i].loc_chain; node; node = node->next)
2132 location_chain new_lc;
2134 new_lc = (location_chain) pool_alloc (loc_chain_pool);
2135 new_lc->next = NULL;
2136 new_lc->init = node->init;
2137 if (!node->set_src || MEM_P (node->set_src))
2138 new_lc->set_src = NULL;
2140 new_lc->set_src = node->set_src;
2141 new_lc->loc = node->loc;
2144 nextp = &new_lc->next;
2147 dst->var_part[k].offset = src->var_part[i].offset;
2150 dst->var_part[k].cur_loc = NULL;
2153 if (flag_var_tracking_uninit)
2154 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
2156 location_chain node, node2;
2157 for (node = src->var_part[i].loc_chain; node; node = node->next)
2158 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
2159 if (rtx_equal_p (node->loc, node2->loc))
2161 if (node->init > node2->init)
2162 node2->init = node->init;
2166 /* Continue traversing the hash table. */
2170 /* Compute union of dataflow sets SRC and DST and store it to DST. */
2173 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
2177 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2178 attrs_list_union (&dst->regs[i], src->regs[i]);
2180 if (dst->vars == empty_shared_hash)
2182 shared_hash_destroy (dst->vars);
2183 dst->vars = shared_hash_copy (src->vars);
2186 htab_traverse (shared_hash_htab (src->vars), variable_union, dst);
2189 /* Whether the value is currently being expanded. */
2190 #define VALUE_RECURSED_INTO(x) \
2191 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
2192 /* Whether the value is in changed_variables hash table. */
2193 #define VALUE_CHANGED(x) \
2194 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
2195 /* Whether the decl is in changed_variables hash table. */
2196 #define DECL_CHANGED(x) TREE_VISITED (x)
2198 /* Record that DV has been added into resp. removed from changed_variables
2202 set_dv_changed (decl_or_value dv, bool newv)
2204 if (dv_is_value_p (dv))
2205 VALUE_CHANGED (dv_as_value (dv)) = newv;
2207 DECL_CHANGED (dv_as_decl (dv)) = newv;
2210 /* Return true if DV is present in changed_variables hash table. */
2213 dv_changed_p (decl_or_value dv)
2215 return (dv_is_value_p (dv)
2216 ? VALUE_CHANGED (dv_as_value (dv))
2217 : DECL_CHANGED (dv_as_decl (dv)));
2220 /* Return a location list node whose loc is rtx_equal to LOC, in the
2221 location list of a one-part variable or value VAR, or in that of
2222 any values recursively mentioned in the location lists. */
2224 static location_chain
2225 find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
2227 location_chain node;
2232 gcc_assert (dv_onepart_p (var->dv));
2234 if (!var->n_var_parts)
2237 gcc_assert (var->var_part[0].offset == 0);
2239 for (node = var->var_part[0].loc_chain; node; node = node->next)
2240 if (rtx_equal_p (loc, node->loc))
2242 else if (GET_CODE (node->loc) == VALUE
2243 && !VALUE_RECURSED_INTO (node->loc))
2245 decl_or_value dv = dv_from_value (node->loc);
2246 variable var = (variable)
2247 htab_find_with_hash (vars, dv, dv_htab_hash (dv));
2251 location_chain where;
2252 VALUE_RECURSED_INTO (node->loc) = true;
2253 if ((where = find_loc_in_1pdv (loc, var, vars)))
2255 VALUE_RECURSED_INTO (node->loc) = false;
2258 VALUE_RECURSED_INTO (node->loc) = false;
2265 /* Hash table iteration argument passed to variable_merge. */
2268 /* The set in which the merge is to be inserted. */
2270 /* The set that we're iterating in. */
2272 /* The set that may contain the other dv we are to merge with. */
2274 /* Number of onepart dvs in src. */
2275 int src_onepart_cnt;
2278 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
2279 loc_cmp order, and it is maintained as such. */
2282 insert_into_intersection (location_chain *nodep, rtx loc,
2283 enum var_init_status status)
2285 location_chain node;
2288 for (node = *nodep; node; nodep = &node->next, node = *nodep)
2289 if ((r = loc_cmp (node->loc, loc)) == 0)
2291 node->init = MIN (node->init, status);
2297 node = (location_chain) pool_alloc (loc_chain_pool);
2300 node->set_src = NULL;
2301 node->init = status;
2302 node->next = *nodep;
2306 /* Insert in DEST the intersection the locations present in both
2307 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
2308 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
2312 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
2313 location_chain s1node, variable s2var)
2315 dataflow_set *s1set = dsm->cur;
2316 dataflow_set *s2set = dsm->src;
2317 location_chain found;
2319 for (; s1node; s1node = s1node->next)
2321 if (s1node->loc == val)
2324 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
2325 shared_hash_htab (s2set->vars))))
2327 insert_into_intersection (dest, s1node->loc,
2328 MIN (s1node->init, found->init));
2332 if (GET_CODE (s1node->loc) == VALUE
2333 && !VALUE_RECURSED_INTO (s1node->loc))
2335 decl_or_value dv = dv_from_value (s1node->loc);
2336 variable svar = shared_hash_find (s1set->vars, dv);
2339 if (svar->n_var_parts == 1)
2341 VALUE_RECURSED_INTO (s1node->loc) = true;
2342 intersect_loc_chains (val, dest, dsm,
2343 svar->var_part[0].loc_chain,
2345 VALUE_RECURSED_INTO (s1node->loc) = false;
2350 /* ??? if the location is equivalent to any location in src,
2351 searched recursively
2353 add to dst the values needed to represent the equivalence
2355 telling whether locations S is equivalent to another dv's
2358 for each location D in the list
2360 if S and D satisfy rtx_equal_p, then it is present
2362 else if D is a value, recurse without cycles
2364 else if S and D have the same CODE and MODE
2366 for each operand oS and the corresponding oD
2368 if oS and oD are not equivalent, then S an D are not equivalent
2370 else if they are RTX vectors
2372 if any vector oS element is not equivalent to its respective oD,
2373 then S and D are not equivalent
2381 /* Return -1 if X should be before Y in a location list for a 1-part
2382 variable, 1 if Y should be before X, and 0 if they're equivalent
2383 and should not appear in the list. */
2386 loc_cmp (rtx x, rtx y)
2389 RTX_CODE code = GET_CODE (x);
2399 gcc_assert (GET_MODE (x) == GET_MODE (y));
2400 if (REGNO (x) == REGNO (y))
2402 else if (REGNO (x) < REGNO (y))
2415 gcc_assert (GET_MODE (x) == GET_MODE (y));
2416 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
2422 if (GET_CODE (x) == VALUE)
2424 if (GET_CODE (y) != VALUE)
2426 /* Don't assert the modes are the same, that is true only
2427 when not recursing. (subreg:QI (value:SI 1:1) 0)
2428 and (subreg:QI (value:DI 2:2) 0) can be compared,
2429 even when the modes are different. */
2430 if (canon_value_cmp (x, y))
2436 if (GET_CODE (y) == VALUE)
2439 if (GET_CODE (x) == GET_CODE (y))
2440 /* Compare operands below. */;
2441 else if (GET_CODE (x) < GET_CODE (y))
2446 gcc_assert (GET_MODE (x) == GET_MODE (y));
2448 if (GET_CODE (x) == DEBUG_EXPR)
2450 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
2451 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
2453 #ifdef ENABLE_CHECKING
2454 gcc_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
2455 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
2460 fmt = GET_RTX_FORMAT (code);
2461 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2465 if (XWINT (x, i) == XWINT (y, i))
2467 else if (XWINT (x, i) < XWINT (y, i))
2474 if (XINT (x, i) == XINT (y, i))
2476 else if (XINT (x, i) < XINT (y, i))
2483 /* Compare the vector length first. */
2484 if (XVECLEN (x, i) == XVECLEN (y, i))
2485 /* Compare the vectors elements. */;
2486 else if (XVECLEN (x, i) < XVECLEN (y, i))
2491 for (j = 0; j < XVECLEN (x, i); j++)
2492 if ((r = loc_cmp (XVECEXP (x, i, j),
2493 XVECEXP (y, i, j))))
2498 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
2504 if (XSTR (x, i) == XSTR (y, i))
2510 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
2518 /* These are just backpointers, so they don't matter. */
2525 /* It is believed that rtx's at this level will never
2526 contain anything but integers and other rtx's,
2527 except for within LABEL_REFs and SYMBOL_REFs. */
2535 /* If decl or value DVP refers to VALUE from *LOC, add backlinks
2536 from VALUE to DVP. */
2539 add_value_chain (rtx *loc, void *dvp)
2541 decl_or_value dv, ldv;
2542 value_chain vc, nvc;
2545 if (GET_CODE (*loc) == VALUE)
2546 ldv = dv_from_value (*loc);
2547 else if (GET_CODE (*loc) == DEBUG_EXPR)
2548 ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc));
2552 if (dv_as_opaque (ldv) == dvp)
2555 dv = (decl_or_value) dvp;
2556 slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv),
2560 vc = (value_chain) pool_alloc (value_chain_pool);
2564 *slot = (void *) vc;
2568 for (vc = ((value_chain) *slot)->next; vc; vc = vc->next)
2569 if (dv_as_opaque (vc->dv) == dv_as_opaque (dv))
2577 vc = (value_chain) *slot;
2578 nvc = (value_chain) pool_alloc (value_chain_pool);
2580 nvc->next = vc->next;
2586 /* If decl or value DVP refers to VALUEs from within LOC, add backlinks
2587 from those VALUEs to DVP. */
2590 add_value_chains (decl_or_value dv, rtx loc)
2592 if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR)
2594 add_value_chain (&loc, dv_as_opaque (dv));
2600 loc = XEXP (loc, 0);
2601 for_each_rtx (&loc, add_value_chain, dv_as_opaque (dv));
2604 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those
2608 add_cselib_value_chains (decl_or_value dv)
2610 struct elt_loc_list *l;
2612 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
2613 for_each_rtx (&l->loc, add_value_chain, dv_as_opaque (dv));
2616 /* If decl or value DVP refers to VALUE from *LOC, remove backlinks
2617 from VALUE to DVP. */
2620 remove_value_chain (rtx *loc, void *dvp)
2622 decl_or_value dv, ldv;
2626 if (GET_CODE (*loc) == VALUE)
2627 ldv = dv_from_value (*loc);
2628 else if (GET_CODE (*loc) == DEBUG_EXPR)
2629 ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc));
2633 if (dv_as_opaque (ldv) == dvp)
2636 dv = (decl_or_value) dvp;
2637 slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv),
2639 for (vc = (value_chain) *slot; vc->next; vc = vc->next)
2640 if (dv_as_opaque (vc->next->dv) == dv_as_opaque (dv))
2642 value_chain dvc = vc->next;
2643 gcc_assert (dvc->refcount > 0);
2644 if (--dvc->refcount == 0)
2646 vc->next = dvc->next;
2647 pool_free (value_chain_pool, dvc);
2648 if (vc->next == NULL && vc == (value_chain) *slot)
2650 pool_free (value_chain_pool, vc);
2651 htab_clear_slot (value_chains, slot);
2659 /* If decl or value DVP refers to VALUEs from within LOC, remove backlinks
2660 from those VALUEs to DVP. */
2663 remove_value_chains (decl_or_value dv, rtx loc)
2665 if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR)
2667 remove_value_chain (&loc, dv_as_opaque (dv));
2673 loc = XEXP (loc, 0);
2674 for_each_rtx (&loc, remove_value_chain, dv_as_opaque (dv));
2678 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, remove backlinks from those
2682 remove_cselib_value_chains (decl_or_value dv)
2684 struct elt_loc_list *l;
2686 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
2687 for_each_rtx (&l->loc, remove_value_chain, dv_as_opaque (dv));
2690 /* Check the order of entries in one-part variables. */
2693 canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
2695 variable var = (variable) *slot;
2696 decl_or_value dv = var->dv;
2697 location_chain node, next;
2699 #ifdef ENABLE_RTL_CHECKING
2701 for (i = 0; i < var->n_var_parts; i++)
2702 gcc_assert (var->var_part[0].cur_loc == NULL);
2703 gcc_assert (!var->cur_loc_changed && !var->in_changed_variables);
2706 if (!dv_onepart_p (dv))
2709 gcc_assert (var->n_var_parts == 1);
2710 node = var->var_part[0].loc_chain;
2713 while ((next = node->next))
2715 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
2723 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
2724 more likely to be chosen as canonical for an equivalence set.
2725 Ensure less likely values can reach more likely neighbors, making
2726 the connections bidirectional. */
2729 canonicalize_values_mark (void **slot, void *data)
2731 dataflow_set *set = (dataflow_set *)data;
2732 variable var = (variable) *slot;
2733 decl_or_value dv = var->dv;
2735 location_chain node;
2737 if (!dv_is_value_p (dv))
2740 gcc_assert (var->n_var_parts == 1);
2742 val = dv_as_value (dv);
2744 for (node = var->var_part[0].loc_chain; node; node = node->next)
2745 if (GET_CODE (node->loc) == VALUE)
2747 if (canon_value_cmp (node->loc, val))
2748 VALUE_RECURSED_INTO (val) = true;
2751 decl_or_value odv = dv_from_value (node->loc);
2752 void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
2754 oslot = set_slot_part (set, val, oslot, odv, 0,
2755 node->init, NULL_RTX);
2757 VALUE_RECURSED_INTO (node->loc) = true;
2764 /* Remove redundant entries from equivalence lists in onepart
2765 variables, canonicalizing equivalence sets into star shapes. */
2768 canonicalize_values_star (void **slot, void *data)
2770 dataflow_set *set = (dataflow_set *)data;
2771 variable var = (variable) *slot;
2772 decl_or_value dv = var->dv;
2773 location_chain node;
2780 if (!dv_onepart_p (dv))
2783 gcc_assert (var->n_var_parts == 1);
2785 if (dv_is_value_p (dv))
2787 cval = dv_as_value (dv);
2788 if (!VALUE_RECURSED_INTO (cval))
2790 VALUE_RECURSED_INTO (cval) = false;
2800 gcc_assert (var->n_var_parts == 1);
2802 for (node = var->var_part[0].loc_chain; node; node = node->next)
2803 if (GET_CODE (node->loc) == VALUE)
2806 if (VALUE_RECURSED_INTO (node->loc))
2808 if (canon_value_cmp (node->loc, cval))
2817 if (!has_marks || dv_is_decl_p (dv))
2820 /* Keep it marked so that we revisit it, either after visiting a
2821 child node, or after visiting a new parent that might be
2823 VALUE_RECURSED_INTO (val) = true;
2825 for (node = var->var_part[0].loc_chain; node; node = node->next)
2826 if (GET_CODE (node->loc) == VALUE
2827 && VALUE_RECURSED_INTO (node->loc))
2831 VALUE_RECURSED_INTO (cval) = false;
2832 dv = dv_from_value (cval);
2833 slot = shared_hash_find_slot_noinsert (set->vars, dv);
2836 gcc_assert (dv_is_decl_p (var->dv));
2837 /* The canonical value was reset and dropped.
2839 clobber_variable_part (set, NULL, var->dv, 0, NULL);
2842 var = (variable)*slot;
2843 gcc_assert (dv_is_value_p (var->dv));
2844 if (var->n_var_parts == 0)
2846 gcc_assert (var->n_var_parts == 1);
2850 VALUE_RECURSED_INTO (val) = false;
2855 /* Push values to the canonical one. */
2856 cdv = dv_from_value (cval);
2857 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
2859 for (node = var->var_part[0].loc_chain; node; node = node->next)
2860 if (node->loc != cval)
2862 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
2863 node->init, NULL_RTX);
2864 if (GET_CODE (node->loc) == VALUE)
2866 decl_or_value ndv = dv_from_value (node->loc);
2868 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
2871 if (canon_value_cmp (node->loc, val))
2873 /* If it could have been a local minimum, it's not any more,
2874 since it's now neighbor to cval, so it may have to push
2875 to it. Conversely, if it wouldn't have prevailed over
2876 val, then whatever mark it has is fine: if it was to
2877 push, it will now push to a more canonical node, but if
2878 it wasn't, then it has already pushed any values it might
2880 VALUE_RECURSED_INTO (node->loc) = true;
2881 /* Make sure we visit node->loc by ensuring we cval is
2883 VALUE_RECURSED_INTO (cval) = true;
2885 else if (!VALUE_RECURSED_INTO (node->loc))
2886 /* If we have no need to "recurse" into this node, it's
2887 already "canonicalized", so drop the link to the old
2889 clobber_variable_part (set, cval, ndv, 0, NULL);
2891 else if (GET_CODE (node->loc) == REG)
2893 attrs list = set->regs[REGNO (node->loc)], *listp;
2895 /* Change an existing attribute referring to dv so that it
2896 refers to cdv, removing any duplicate this might
2897 introduce, and checking that no previous duplicates
2898 existed, all in a single pass. */
2902 if (list->offset == 0
2903 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
2904 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
2911 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
2914 for (listp = &list->next; (list = *listp); listp = &list->next)
2919 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
2921 *listp = list->next;
2922 pool_free (attrs_pool, list);
2927 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
2930 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
2932 for (listp = &list->next; (list = *listp); listp = &list->next)
2937 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
2939 *listp = list->next;
2940 pool_free (attrs_pool, list);
2945 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
2954 if (list->offset == 0
2955 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
2956 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
2966 cslot = set_slot_part (set, val, cslot, cdv, 0,
2967 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
2969 slot = clobber_slot_part (set, cval, slot, 0, NULL);
2971 /* Variable may have been unshared. */
2972 var = (variable)*slot;
2973 gcc_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
2974 && var->var_part[0].loc_chain->next == NULL);
2976 if (VALUE_RECURSED_INTO (cval))
2977 goto restart_with_cval;
2982 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
2983 corresponding entry in DSM->src. Multi-part variables are combined
2984 with variable_union, whereas onepart dvs are combined with
2988 variable_merge_over_cur (void **s1slot, void *data)
2990 struct dfset_merge *dsm = (struct dfset_merge *)data;
2991 dataflow_set *dst = dsm->dst;
2993 variable s1var = (variable) *s1slot;
2994 variable s2var, dvar = NULL;
2995 decl_or_value dv = s1var->dv;
2996 bool onepart = dv_onepart_p (dv);
2999 location_chain node, *nodep;
3001 /* If the incoming onepart variable has an empty location list, then
3002 the intersection will be just as empty. For other variables,
3003 it's always union. */
3004 gcc_assert (s1var->n_var_parts);
3005 gcc_assert (s1var->var_part[0].loc_chain);
3008 return variable_union (s1slot, dst);
3010 gcc_assert (s1var->n_var_parts == 1);
3011 gcc_assert (s1var->var_part[0].offset == 0);
3013 dvhash = dv_htab_hash (dv);
3014 if (dv_is_value_p (dv))
3015 val = dv_as_value (dv);
3019 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3022 dst_can_be_shared = false;
3026 dsm->src_onepart_cnt--;
3027 gcc_assert (s2var->var_part[0].loc_chain);
3028 gcc_assert (s2var->n_var_parts == 1);
3029 gcc_assert (s2var->var_part[0].offset == 0);
3031 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3034 dvar = (variable)*dstslot;
3035 gcc_assert (dvar->refcount == 1);
3036 gcc_assert (dvar->n_var_parts == 1);
3037 gcc_assert (dvar->var_part[0].offset == 0);
3038 nodep = &dvar->var_part[0].loc_chain;
3046 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3048 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3050 *dstslot = dvar = s2var;
3055 dst_can_be_shared = false;
3057 intersect_loc_chains (val, nodep, dsm,
3058 s1var->var_part[0].loc_chain, s2var);
3064 dvar = (variable) pool_alloc (dv_pool (dv));
3067 dvar->n_var_parts = 1;
3068 dvar->cur_loc_changed = false;
3069 dvar->in_changed_variables = false;
3070 dvar->var_part[0].offset = 0;
3071 dvar->var_part[0].loc_chain = node;
3072 dvar->var_part[0].cur_loc = NULL;
3075 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3077 gcc_assert (!*dstslot);
3085 nodep = &dvar->var_part[0].loc_chain;
3086 while ((node = *nodep))
3088 location_chain *nextp = &node->next;
3090 if (GET_CODE (node->loc) == REG)
3094 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
3095 if (GET_MODE (node->loc) == GET_MODE (list->loc)
3096 && dv_is_value_p (list->dv))
3100 attrs_list_insert (&dst->regs[REGNO (node->loc)],
3102 /* If this value became canonical for another value that had
3103 this register, we want to leave it alone. */
3104 else if (dv_as_value (list->dv) != val)
3106 dstslot = set_slot_part (dst, dv_as_value (list->dv),
3108 node->init, NULL_RTX);
3109 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
3111 /* Since nextp points into the removed node, we can't
3112 use it. The pointer to the next node moved to nodep.
3113 However, if the variable we're walking is unshared
3114 during our walk, we'll keep walking the location list
3115 of the previously-shared variable, in which case the
3116 node won't have been removed, and we'll want to skip
3117 it. That's why we test *nodep here. */
3123 /* Canonicalization puts registers first, so we don't have to
3129 if (dvar != (variable)*dstslot)
3130 dvar = (variable)*dstslot;
3131 nodep = &dvar->var_part[0].loc_chain;
3135 /* Mark all referenced nodes for canonicalization, and make sure
3136 we have mutual equivalence links. */
3137 VALUE_RECURSED_INTO (val) = true;
3138 for (node = *nodep; node; node = node->next)
3139 if (GET_CODE (node->loc) == VALUE)
3141 VALUE_RECURSED_INTO (node->loc) = true;
3142 set_variable_part (dst, val, dv_from_value (node->loc), 0,
3143 node->init, NULL, INSERT);
3146 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3147 gcc_assert (*dstslot == dvar);
3148 canonicalize_values_star (dstslot, dst);
3149 #ifdef ENABLE_CHECKING
3151 == shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash));
3153 dvar = (variable)*dstslot;
3157 bool has_value = false, has_other = false;
3159 /* If we have one value and anything else, we're going to
3160 canonicalize this, so make sure all values have an entry in
3161 the table and are marked for canonicalization. */
3162 for (node = *nodep; node; node = node->next)
3164 if (GET_CODE (node->loc) == VALUE)
3166 /* If this was marked during register canonicalization,
3167 we know we have to canonicalize values. */
3182 if (has_value && has_other)
3184 for (node = *nodep; node; node = node->next)
3186 if (GET_CODE (node->loc) == VALUE)
3188 decl_or_value dv = dv_from_value (node->loc);
3191 if (shared_hash_shared (dst->vars))
3192 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
3194 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
3198 variable var = (variable) pool_alloc (dv_pool (dv));
3201 var->n_var_parts = 1;
3202 var->cur_loc_changed = false;
3203 var->in_changed_variables = false;
3204 var->var_part[0].offset = 0;
3205 var->var_part[0].loc_chain = NULL;
3206 var->var_part[0].cur_loc = NULL;
3210 VALUE_RECURSED_INTO (node->loc) = true;
3214 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3215 gcc_assert (*dstslot == dvar);
3216 canonicalize_values_star (dstslot, dst);
3217 #ifdef ENABLE_CHECKING
3219 == shared_hash_find_slot_noinsert_1 (dst->vars,
3222 dvar = (variable)*dstslot;
3226 if (!onepart_variable_different_p (dvar, s2var))
3228 variable_htab_free (dvar);
3229 *dstslot = dvar = s2var;
3232 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
3234 variable_htab_free (dvar);
3235 *dstslot = dvar = s1var;
3237 dst_can_be_shared = false;
3240 dst_can_be_shared = false;
3245 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
3246 multi-part variable. Unions of multi-part variables and
3247 intersections of one-part ones will be handled in
3248 variable_merge_over_cur(). */
3251 variable_merge_over_src (void **s2slot, void *data)
3253 struct dfset_merge *dsm = (struct dfset_merge *)data;
3254 dataflow_set *dst = dsm->dst;
3255 variable s2var = (variable) *s2slot;
3256 decl_or_value dv = s2var->dv;
3257 bool onepart = dv_onepart_p (dv);
3261 void **dstp = shared_hash_find_slot (dst->vars, dv);
3267 dsm->src_onepart_cnt++;
3271 /* Combine dataflow set information from SRC2 into DST, using PDST
3272 to carry over information across passes. */
3275 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
3277 dataflow_set cur = *dst;
3278 dataflow_set *src1 = &cur;
3279 struct dfset_merge dsm;
3281 size_t src1_elems, src2_elems;
3283 src1_elems = htab_elements (shared_hash_htab (src1->vars));
3284 src2_elems = htab_elements (shared_hash_htab (src2->vars));
3285 dataflow_set_init (dst);
3286 dst->stack_adjust = cur.stack_adjust;
3287 shared_hash_destroy (dst->vars);
3288 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
3289 dst->vars->refcount = 1;
3291 = htab_create (MAX (src1_elems, src2_elems), variable_htab_hash,
3292 variable_htab_eq, variable_htab_free);
3294 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3295 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
3300 dsm.src_onepart_cnt = 0;
3302 htab_traverse (shared_hash_htab (dsm.src->vars), variable_merge_over_src,
3304 htab_traverse (shared_hash_htab (dsm.cur->vars), variable_merge_over_cur,
3307 if (dsm.src_onepart_cnt)
3308 dst_can_be_shared = false;
3310 dataflow_set_destroy (src1);
3313 /* Mark register equivalences. */
3316 dataflow_set_equiv_regs (dataflow_set *set)
3321 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3323 rtx canon[NUM_MACHINE_MODES];
3325 memset (canon, 0, sizeof (canon));
3327 for (list = set->regs[i]; list; list = list->next)
3328 if (list->offset == 0 && dv_is_value_p (list->dv))
3330 rtx val = dv_as_value (list->dv);
3331 rtx *cvalp = &canon[(int)GET_MODE (val)];
3334 if (canon_value_cmp (val, cval))
3338 for (list = set->regs[i]; list; list = list->next)
3339 if (list->offset == 0 && dv_onepart_p (list->dv))
3341 rtx cval = canon[(int)GET_MODE (list->loc)];
3346 if (dv_is_value_p (list->dv))
3348 rtx val = dv_as_value (list->dv);
3353 VALUE_RECURSED_INTO (val) = true;
3354 set_variable_part (set, val, dv_from_value (cval), 0,
3355 VAR_INIT_STATUS_INITIALIZED,
3359 VALUE_RECURSED_INTO (cval) = true;
3360 set_variable_part (set, cval, list->dv, 0,
3361 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
3364 for (listp = &set->regs[i]; (list = *listp);
3365 listp = list ? &list->next : listp)
3366 if (list->offset == 0 && dv_onepart_p (list->dv))
3368 rtx cval = canon[(int)GET_MODE (list->loc)];
3374 if (dv_is_value_p (list->dv))
3376 rtx val = dv_as_value (list->dv);
3377 if (!VALUE_RECURSED_INTO (val))
3381 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
3382 canonicalize_values_star (slot, set);
3389 /* Remove any redundant values in the location list of VAR, which must
3390 be unshared and 1-part. */
3393 remove_duplicate_values (variable var)
3395 location_chain node, *nodep;
3397 gcc_assert (dv_onepart_p (var->dv));
3398 gcc_assert (var->n_var_parts == 1);
3399 gcc_assert (var->refcount == 1);
3401 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
3403 if (GET_CODE (node->loc) == VALUE)
3405 if (VALUE_RECURSED_INTO (node->loc))
3407 /* Remove duplicate value node. */
3408 *nodep = node->next;
3409 pool_free (loc_chain_pool, node);
3413 VALUE_RECURSED_INTO (node->loc) = true;
3415 nodep = &node->next;
3418 for (node = var->var_part[0].loc_chain; node; node = node->next)
3419 if (GET_CODE (node->loc) == VALUE)
3421 gcc_assert (VALUE_RECURSED_INTO (node->loc));
3422 VALUE_RECURSED_INTO (node->loc) = false;
3427 /* Hash table iteration argument passed to variable_post_merge. */
3428 struct dfset_post_merge
3430 /* The new input set for the current block. */
3432 /* Pointer to the permanent input set for the current block, or
3434 dataflow_set **permp;
3437 /* Create values for incoming expressions associated with one-part
3438 variables that don't have value numbers for them. */
3441 variable_post_merge_new_vals (void **slot, void *info)
3443 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3444 dataflow_set *set = dfpm->set;
3445 variable var = (variable)*slot;
3446 location_chain node;
3448 if (!dv_onepart_p (var->dv) || !var->n_var_parts)
3451 gcc_assert (var->n_var_parts == 1);
3453 if (dv_is_decl_p (var->dv))
3455 bool check_dupes = false;
3458 for (node = var->var_part[0].loc_chain; node; node = node->next)
3460 if (GET_CODE (node->loc) == VALUE)
3461 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
3462 else if (GET_CODE (node->loc) == REG)
3464 attrs att, *attp, *curp = NULL;
3466 if (var->refcount != 1)
3468 slot = unshare_variable (set, slot, var,
3469 VAR_INIT_STATUS_INITIALIZED);
3470 var = (variable)*slot;
3474 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
3476 if (att->offset == 0
3477 && GET_MODE (att->loc) == GET_MODE (node->loc))
3479 if (dv_is_value_p (att->dv))
3481 rtx cval = dv_as_value (att->dv);
3486 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
3494 if ((*curp)->offset == 0
3495 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
3496 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
3499 curp = &(*curp)->next;
3510 *dfpm->permp = XNEW (dataflow_set);
3511 dataflow_set_init (*dfpm->permp);
3514 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
3515 att; att = att->next)
3516 if (GET_MODE (att->loc) == GET_MODE (node->loc))
3518 gcc_assert (att->offset == 0);
3519 gcc_assert (dv_is_value_p (att->dv));
3520 val_reset (set, att->dv);
3527 cval = dv_as_value (cdv);
3531 /* Create a unique value to hold this register,
3532 that ought to be found and reused in
3533 subsequent rounds. */
3535 gcc_assert (!cselib_lookup (node->loc,
3536 GET_MODE (node->loc), 0));
3537 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1);
3538 cselib_preserve_value (v);
3539 cselib_invalidate_rtx (node->loc);
3541 cdv = dv_from_value (cval);
3544 "Created new value %u:%u for reg %i\n",
3545 v->uid, v->hash, REGNO (node->loc));
3548 var_reg_decl_set (*dfpm->permp, node->loc,
3549 VAR_INIT_STATUS_INITIALIZED,
3550 cdv, 0, NULL, INSERT);
3556 /* Remove attribute referring to the decl, which now
3557 uses the value for the register, already existing or
3558 to be added when we bring perm in. */
3561 pool_free (attrs_pool, att);
3566 remove_duplicate_values (var);
3572 /* Reset values in the permanent set that are not associated with the
3573 chosen expression. */
3576 variable_post_merge_perm_vals (void **pslot, void *info)
3578 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3579 dataflow_set *set = dfpm->set;
3580 variable pvar = (variable)*pslot, var;
3581 location_chain pnode;
3585 gcc_assert (dv_is_value_p (pvar->dv));
3586 gcc_assert (pvar->n_var_parts == 1);
3587 pnode = pvar->var_part[0].loc_chain;
3589 gcc_assert (!pnode->next);
3590 gcc_assert (REG_P (pnode->loc));
3594 var = shared_hash_find (set->vars, dv);
3597 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
3599 val_reset (set, dv);
3602 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
3603 if (att->offset == 0
3604 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
3605 && dv_is_value_p (att->dv))
3608 /* If there is a value associated with this register already, create
3610 if (att && dv_as_value (att->dv) != dv_as_value (dv))
3612 rtx cval = dv_as_value (att->dv);
3613 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
3614 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
3619 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
3621 variable_union (pslot, set);
3627 /* Just checking stuff and registering register attributes for
3631 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
3633 struct dfset_post_merge dfpm;
3638 htab_traverse (shared_hash_htab (set->vars), variable_post_merge_new_vals,
3641 htab_traverse (shared_hash_htab ((*permp)->vars),
3642 variable_post_merge_perm_vals, &dfpm);
3643 htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
3646 /* Return a node whose loc is a MEM that refers to EXPR in the
3647 location list of a one-part variable or value VAR, or in that of
3648 any values recursively mentioned in the location lists. */
3650 static location_chain
3651 find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars)
3653 location_chain node;
3656 location_chain where = NULL;
3661 gcc_assert (GET_CODE (val) == VALUE);
3663 gcc_assert (!VALUE_RECURSED_INTO (val));
3665 dv = dv_from_value (val);
3666 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
3671 gcc_assert (dv_onepart_p (var->dv));
3673 if (!var->n_var_parts)
3676 gcc_assert (var->var_part[0].offset == 0);
3678 VALUE_RECURSED_INTO (val) = true;
3680 for (node = var->var_part[0].loc_chain; node; node = node->next)
3681 if (MEM_P (node->loc) && MEM_EXPR (node->loc) == expr
3682 && MEM_OFFSET (node->loc) == 0)
3687 else if (GET_CODE (node->loc) == VALUE
3688 && !VALUE_RECURSED_INTO (node->loc)
3689 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
3692 VALUE_RECURSED_INTO (val) = false;
3697 /* Return TRUE if the value of MEM may vary across a call. */
3700 mem_dies_at_call (rtx mem)
3702 tree expr = MEM_EXPR (mem);
3708 decl = get_base_address (expr);
3716 return (may_be_aliased (decl)
3717 || (!TREE_READONLY (decl) && is_global_var (decl)));
3720 /* Remove all MEMs from the location list of a hash table entry for a
3721 one-part variable, except those whose MEM attributes map back to
3722 the variable itself, directly or within a VALUE. */
3725 dataflow_set_preserve_mem_locs (void **slot, void *data)
3727 dataflow_set *set = (dataflow_set *) data;
3728 variable var = (variable) *slot;
3730 if (dv_is_decl_p (var->dv) && dv_onepart_p (var->dv))
3732 tree decl = dv_as_decl (var->dv);
3733 location_chain loc, *locp;
3734 bool changed = false;
3736 if (!var->n_var_parts)
3739 gcc_assert (var->n_var_parts == 1);
3741 if (shared_var_p (var, set->vars))
3743 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
3745 /* We want to remove dying MEMs that doesn't refer to
3747 if (GET_CODE (loc->loc) == MEM
3748 && (MEM_EXPR (loc->loc) != decl
3749 || MEM_OFFSET (loc->loc))
3750 && !mem_dies_at_call (loc->loc))
3752 /* We want to move here MEMs that do refer to DECL. */
3753 else if (GET_CODE (loc->loc) == VALUE
3754 && find_mem_expr_in_1pdv (decl, loc->loc,
3755 shared_hash_htab (set->vars)))
3762 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
3763 var = (variable)*slot;
3764 gcc_assert (var->n_var_parts == 1);
3767 for (locp = &var->var_part[0].loc_chain, loc = *locp;
3770 rtx old_loc = loc->loc;
3771 if (GET_CODE (old_loc) == VALUE)
3773 location_chain mem_node
3774 = find_mem_expr_in_1pdv (decl, loc->loc,
3775 shared_hash_htab (set->vars));
3777 /* ??? This picks up only one out of multiple MEMs that
3778 refer to the same variable. Do we ever need to be
3779 concerned about dealing with more than one, or, given
3780 that they should all map to the same variable
3781 location, their addresses will have been merged and
3782 they will be regarded as equivalent? */
3785 loc->loc = mem_node->loc;
3786 loc->set_src = mem_node->set_src;
3787 loc->init = MIN (loc->init, mem_node->init);
3791 if (GET_CODE (loc->loc) != MEM
3792 || (MEM_EXPR (loc->loc) == decl
3793 && MEM_OFFSET (loc->loc) == 0)
3794 || !mem_dies_at_call (loc->loc))
3796 if (old_loc != loc->loc && emit_notes)
3798 if (old_loc == var->var_part[0].cur_loc)
3801 var->var_part[0].cur_loc = NULL;
3802 var->cur_loc_changed = true;
3804 add_value_chains (var->dv, loc->loc);
3805 remove_value_chains (var->dv, old_loc);
3813 remove_value_chains (var->dv, old_loc);
3814 if (old_loc == var->var_part[0].cur_loc)
3817 var->var_part[0].cur_loc = NULL;
3818 var->cur_loc_changed = true;
3822 pool_free (loc_chain_pool, loc);
3825 if (!var->var_part[0].loc_chain)
3831 variable_was_changed (var, set);
3837 /* Remove all MEMs from the location list of a hash table entry for a
3841 dataflow_set_remove_mem_locs (void **slot, void *data)
3843 dataflow_set *set = (dataflow_set *) data;
3844 variable var = (variable) *slot;
3846 if (dv_is_value_p (var->dv))
3848 location_chain loc, *locp;
3849 bool changed = false;
3851 gcc_assert (var->n_var_parts == 1);
3853 if (shared_var_p (var, set->vars))
3855 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
3856 if (GET_CODE (loc->loc) == MEM
3857 && mem_dies_at_call (loc->loc))
3863 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
3864 var = (variable)*slot;
3865 gcc_assert (var->n_var_parts == 1);
3868 for (locp = &var->var_part[0].loc_chain, loc = *locp;
3871 if (GET_CODE (loc->loc) != MEM
3872 || !mem_dies_at_call (loc->loc))
3879 remove_value_chains (var->dv, loc->loc);
3881 /* If we have deleted the location which was last emitted
3882 we have to emit new location so add the variable to set
3883 of changed variables. */
3884 if (var->var_part[0].cur_loc == loc->loc)
3887 var->var_part[0].cur_loc = NULL;
3888 var->cur_loc_changed = true;
3890 pool_free (loc_chain_pool, loc);
3893 if (!var->var_part[0].loc_chain)
3899 variable_was_changed (var, set);
3905 /* Remove all variable-location information about call-clobbered
3906 registers, as well as associations between MEMs and VALUEs. */
3909 dataflow_set_clear_at_call (dataflow_set *set)
3913 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
3914 if (TEST_HARD_REG_BIT (call_used_reg_set, r))
3915 var_regno_delete (set, r);
3917 if (MAY_HAVE_DEBUG_INSNS)
3919 set->traversed_vars = set->vars;
3920 htab_traverse (shared_hash_htab (set->vars),
3921 dataflow_set_preserve_mem_locs, set);
3922 set->traversed_vars = set->vars;
3923 htab_traverse (shared_hash_htab (set->vars), dataflow_set_remove_mem_locs,
3925 set->traversed_vars = NULL;
3929 /* Flag whether two dataflow sets being compared contain different data. */
3931 dataflow_set_different_value;
3934 variable_part_different_p (variable_part *vp1, variable_part *vp2)
3936 location_chain lc1, lc2;
3938 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
3940 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
3942 if (REG_P (lc1->loc) && REG_P (lc2->loc))
3944 if (REGNO (lc1->loc) == REGNO (lc2->loc))
3947 if (rtx_equal_p (lc1->loc, lc2->loc))
3956 /* Return true if one-part variables VAR1 and VAR2 are different.
3957 They must be in canonical order. */
3960 onepart_variable_different_p (variable var1, variable var2)
3962 location_chain lc1, lc2;
3967 gcc_assert (var1->n_var_parts == 1);
3968 gcc_assert (var2->n_var_parts == 1);
3970 lc1 = var1->var_part[0].loc_chain;
3971 lc2 = var2->var_part[0].loc_chain;
3978 if (loc_cmp (lc1->loc, lc2->loc))
3987 /* Return true if variables VAR1 and VAR2 are different. */
3990 variable_different_p (variable var1, variable var2)
3997 if (var1->n_var_parts != var2->n_var_parts)
4000 for (i = 0; i < var1->n_var_parts; i++)
4002 if (var1->var_part[i].offset != var2->var_part[i].offset)
4004 /* One-part values have locations in a canonical order. */
4005 if (i == 0 && var1->var_part[i].offset == 0 && dv_onepart_p (var1->dv))
4007 gcc_assert (var1->n_var_parts == 1);
4008 gcc_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
4009 return onepart_variable_different_p (var1, var2);
4011 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4013 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4019 /* Compare variable *SLOT with the same variable in hash table DATA
4020 and set DATAFLOW_SET_DIFFERENT_VALUE if they are different. */
4023 dataflow_set_different_1 (void **slot, void *data)
4025 htab_t htab = (htab_t) data;
4026 variable var1, var2;
4028 var1 = (variable) *slot;
4029 var2 = (variable) htab_find_with_hash (htab, var1->dv,
4030 dv_htab_hash (var1->dv));
4033 dataflow_set_different_value = true;
4035 if (dump_file && (dump_flags & TDF_DETAILS))
4037 fprintf (dump_file, "dataflow difference found: removal of:\n");
4041 /* Stop traversing the hash table. */
4045 if (variable_different_p (var1, var2))
4047 dataflow_set_different_value = true;
4049 if (dump_file && (dump_flags & TDF_DETAILS))
4051 fprintf (dump_file, "dataflow difference found: old and new follow:\n");
4056 /* Stop traversing the hash table. */
4060 /* Continue traversing the hash table. */
4064 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4067 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4069 if (old_set->vars == new_set->vars)
4072 if (htab_elements (shared_hash_htab (old_set->vars))
4073 != htab_elements (shared_hash_htab (new_set->vars)))
4076 dataflow_set_different_value = false;
4078 htab_traverse (shared_hash_htab (old_set->vars), dataflow_set_different_1,
4079 shared_hash_htab (new_set->vars));
4080 /* No need to traverse the second hashtab, if both have the same number
4081 of elements and the second one had all entries found in the first one,
4082 then it can't have any extra entries. */
4083 return dataflow_set_different_value;
4086 /* Free the contents of dataflow set SET. */
4089 dataflow_set_destroy (dataflow_set *set)
4093 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4094 attrs_list_clear (&set->regs[i]);
4096 shared_hash_destroy (set->vars);
4100 /* Return true if RTL X contains a SYMBOL_REF. */
4103 contains_symbol_ref (rtx x)
4112 code = GET_CODE (x);
4113 if (code == SYMBOL_REF)
4116 fmt = GET_RTX_FORMAT (code);
4117 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4121 if (contains_symbol_ref (XEXP (x, i)))
4124 else if (fmt[i] == 'E')
4127 for (j = 0; j < XVECLEN (x, i); j++)
4128 if (contains_symbol_ref (XVECEXP (x, i, j)))
4136 /* Shall EXPR be tracked? */
4139 track_expr_p (tree expr, bool need_rtl)
4144 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
4145 return DECL_RTL_SET_P (expr);
4147 /* If EXPR is not a parameter or a variable do not track it. */
4148 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
4151 /* It also must have a name... */
4152 if (!DECL_NAME (expr) && need_rtl)
4155 /* ... and a RTL assigned to it. */
4156 decl_rtl = DECL_RTL_IF_SET (expr);
4157 if (!decl_rtl && need_rtl)
4160 /* If this expression is really a debug alias of some other declaration, we
4161 don't need to track this expression if the ultimate declaration is
4164 if (DECL_DEBUG_EXPR_IS_FROM (realdecl) && DECL_DEBUG_EXPR (realdecl))
4166 realdecl = DECL_DEBUG_EXPR (realdecl);
4167 /* ??? We don't yet know how to emit DW_OP_piece for variable
4168 that has been SRA'ed. */
4169 if (!DECL_P (realdecl))
4173 /* Do not track EXPR if REALDECL it should be ignored for debugging
4175 if (DECL_IGNORED_P (realdecl))
4178 /* Do not track global variables until we are able to emit correct location
4180 if (TREE_STATIC (realdecl))
4183 /* When the EXPR is a DECL for alias of some variable (see example)
4184 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
4185 DECL_RTL contains SYMBOL_REF.
4188 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
4191 if (decl_rtl && MEM_P (decl_rtl)
4192 && contains_symbol_ref (XEXP (decl_rtl, 0)))
4195 /* If RTX is a memory it should not be very large (because it would be
4196 an array or struct). */
4197 if (decl_rtl && MEM_P (decl_rtl))
4199 /* Do not track structures and arrays. */
4200 if (GET_MODE (decl_rtl) == BLKmode
4201 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
4203 if (MEM_SIZE (decl_rtl)
4204 && INTVAL (MEM_SIZE (decl_rtl)) > MAX_VAR_PARTS)
4208 DECL_CHANGED (expr) = 0;
4209 DECL_CHANGED (realdecl) = 0;
4213 /* Determine whether a given LOC refers to the same variable part as
4217 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
4220 HOST_WIDE_INT offset2;
4222 if (! DECL_P (expr))
4227 expr2 = REG_EXPR (loc);
4228 offset2 = REG_OFFSET (loc);
4230 else if (MEM_P (loc))
4232 expr2 = MEM_EXPR (loc);
4233 offset2 = INT_MEM_OFFSET (loc);
4238 if (! expr2 || ! DECL_P (expr2))
4241 expr = var_debug_decl (expr);
4242 expr2 = var_debug_decl (expr2);
4244 return (expr == expr2 && offset == offset2);
4247 /* LOC is a REG or MEM that we would like to track if possible.
4248 If EXPR is null, we don't know what expression LOC refers to,
4249 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
4250 LOC is an lvalue register.
4252 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
4253 is something we can track. When returning true, store the mode of
4254 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
4255 from EXPR in *OFFSET_OUT (if nonnull). */
4258 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
4259 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
4261 enum machine_mode mode;
4263 if (expr == NULL || !track_expr_p (expr, true))
4266 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
4267 whole subreg, but only the old inner part is really relevant. */
4268 mode = GET_MODE (loc);
4269 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
4271 enum machine_mode pseudo_mode;
4273 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
4274 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
4276 offset += byte_lowpart_offset (pseudo_mode, mode);
4281 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
4282 Do the same if we are storing to a register and EXPR occupies
4283 the whole of register LOC; in that case, the whole of EXPR is
4284 being changed. We exclude complex modes from the second case
4285 because the real and imaginary parts are represented as separate
4286 pseudo registers, even if the whole complex value fits into one
4288 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
4290 && !COMPLEX_MODE_P (DECL_MODE (expr))
4291 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
4292 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
4294 mode = DECL_MODE (expr);
4298 if (offset < 0 || offset >= MAX_VAR_PARTS)
4304 *offset_out = offset;
4308 /* Return the MODE lowpart of LOC, or null if LOC is not something we
4309 want to track. When returning nonnull, make sure that the attributes
4310 on the returned value are updated. */
4313 var_lowpart (enum machine_mode mode, rtx loc)
4315 unsigned int offset, reg_offset, regno;
4317 if (!REG_P (loc) && !MEM_P (loc))
4320 if (GET_MODE (loc) == mode)
4323 offset = byte_lowpart_offset (mode, GET_MODE (loc));
4326 return adjust_address_nv (loc, mode, offset);
4328 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
4329 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
4331 return gen_rtx_REG_offset (loc, mode, regno, offset);
4334 /* Carry information about uses and stores while walking rtx. */
4336 struct count_use_info
4338 /* The insn where the RTX is. */
4341 /* The basic block where insn is. */
4344 /* The array of n_sets sets in the insn, as determined by cselib. */
4345 struct cselib_set *sets;
4348 /* True if we're counting stores, false otherwise. */
4352 /* Find a VALUE corresponding to X. */
4354 static inline cselib_val *
4355 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
4361 /* This is called after uses are set up and before stores are
4362 processed bycselib, so it's safe to look up srcs, but not
4363 dsts. So we look up expressions that appear in srcs or in
4364 dest expressions, but we search the sets array for dests of
4368 for (i = 0; i < cui->n_sets; i++)
4369 if (cui->sets[i].dest == x)
4370 return cui->sets[i].src_elt;
4373 return cselib_lookup (x, mode, 0);
4379 /* Replace all registers and addresses in an expression with VALUE
4380 expressions that map back to them, unless the expression is a
4381 register. If no mapping is or can be performed, returns NULL. */
4384 replace_expr_with_values (rtx loc)
4388 else if (MEM_P (loc))
4390 enum machine_mode address_mode
4391 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (loc));
4392 cselib_val *addr = cselib_lookup (XEXP (loc, 0), address_mode, 0);
4394 return replace_equiv_address_nv (loc, addr->val_rtx);
4399 return cselib_subst_to_values (loc);
4402 /* Determine what kind of micro operation to choose for a USE. Return
4403 MO_CLOBBER if no micro operation is to be generated. */
4405 static enum micro_operation_type
4406 use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
4410 if (cui && cui->sets)
4412 if (GET_CODE (loc) == VAR_LOCATION)
4414 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
4416 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
4417 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1);
4419 /* ??? flag_float_store and volatile mems are never
4420 given values, but we could in theory use them for
4422 gcc_assert (val || 1);
4429 if (REG_P (loc) || MEM_P (loc))
4432 *modep = GET_MODE (loc);
4436 || (find_use_val (loc, GET_MODE (loc), cui)
4437 && cselib_lookup (XEXP (loc, 0), GET_MODE (loc), 0)))
4442 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
4444 if (val && !cselib_preserved_value_p (val))
4452 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
4454 expr = REG_EXPR (loc);
4457 return MO_USE_NO_VAR;
4458 else if (target_for_debug_bind (var_debug_decl (expr)))
4460 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
4461 false, modep, NULL))
4464 return MO_USE_NO_VAR;
4466 else if (MEM_P (loc))
4468 expr = MEM_EXPR (loc);
4472 else if (target_for_debug_bind (var_debug_decl (expr)))
4474 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
4475 false, modep, NULL))
4484 /* Log to OUT information about micro-operation MOPT involving X in
4488 log_op_type (rtx x, basic_block bb, rtx insn,
4489 enum micro_operation_type mopt, FILE *out)
4491 fprintf (out, "bb %i op %i insn %i %s ",
4492 bb->index, VTI (bb)->n_mos - 1,
4493 INSN_UID (insn), micro_operation_type_name[mopt]);
4494 print_inline_rtx (out, x, 2);
4498 /* Count uses (register and memory references) LOC which will be tracked.
4499 INSN is instruction which the LOC is part of. */
4502 count_uses (rtx *ploc, void *cuip)
4505 struct count_use_info *cui = (struct count_use_info *) cuip;
4506 enum micro_operation_type mopt = use_type (loc, cui, NULL);
4508 if (mopt != MO_CLOBBER)
4511 enum machine_mode mode = GET_MODE (loc);
4516 loc = PAT_VAR_LOCATION_LOC (loc);
4517 if (VAR_LOC_UNKNOWN_P (loc))
4524 && !REG_P (XEXP (loc, 0)) && !MEM_P (XEXP (loc, 0)))
4526 enum machine_mode address_mode
4527 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (loc));
4528 val = cselib_lookup (XEXP (loc, 0), address_mode, 0);
4530 if (val && !cselib_preserved_value_p (val))
4532 VTI (cui->bb)->n_mos++;
4533 cselib_preserve_value (val);
4534 if (dump_file && (dump_flags & TDF_DETAILS))
4535 log_op_type (XEXP (loc, 0), cui->bb, cui->insn,
4536 MO_VAL_USE, dump_file);
4540 val = find_use_val (loc, mode, cui);
4543 if (mopt == MO_VAL_SET
4544 && GET_CODE (PATTERN (cui->insn)) == COND_EXEC
4547 && (use_type (loc, NULL, NULL) == MO_USE
4550 cselib_val *oval = cselib_lookup (loc, GET_MODE (loc), 0);
4552 gcc_assert (oval != val);
4553 gcc_assert (REG_P (loc) || MEM_P (loc));
4555 if (!cselib_preserved_value_p (oval))
4557 VTI (cui->bb)->n_mos++;
4558 cselib_preserve_value (oval);
4559 if (dump_file && (dump_flags & TDF_DETAILS))
4560 log_op_type (loc, cui->bb, cui->insn,
4561 MO_VAL_USE, dump_file);
4565 cselib_preserve_value (val);
4568 gcc_assert (mopt == MO_VAL_LOC
4569 || (mopt == MO_VAL_SET && cui->store_p));
4577 VTI (cui->bb)->n_mos++;
4578 if (dump_file && (dump_flags & TDF_DETAILS))
4579 log_op_type (loc, cui->bb, cui->insn, mopt, dump_file);
4585 /* Helper function for finding all uses of REG/MEM in X in CUI's
4589 count_uses_1 (rtx *x, void *cui)
4591 for_each_rtx (x, count_uses, cui);
4594 /* Count stores (register and memory references) LOC which will be
4595 tracked. CUI is a count_use_info object containing the instruction
4596 which the LOC is part of. */
4599 count_stores (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *cui)
4601 count_uses (&loc, cui);
4604 /* Callback for cselib_record_sets_hook, that counts how many micro
4605 operations it takes for uses and stores in an insn after
4606 cselib_record_sets has analyzed the sets in an insn, but before it
4607 modifies the stored values in the internal tables, unless
4608 cselib_record_sets doesn't call it directly (perhaps because we're
4609 not doing cselib in the first place, in which case sets and n_sets
4613 count_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
4615 basic_block bb = BLOCK_FOR_INSN (insn);
4616 struct count_use_info cui;
4618 cselib_hook_called = true;
4623 cui.n_sets = n_sets;
4625 cui.store_p = false;
4626 note_uses (&PATTERN (insn), count_uses_1, &cui);
4628 note_stores (PATTERN (insn), count_stores, &cui);
4631 /* Tell whether the CONCAT used to holds a VALUE and its location
4632 needs value resolution, i.e., an attempt of mapping the location
4633 back to other incoming values. */
4634 #define VAL_NEEDS_RESOLUTION(x) \
4635 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
4636 /* Whether the location in the CONCAT is a tracked expression, that
4637 should also be handled like a MO_USE. */
4638 #define VAL_HOLDS_TRACK_EXPR(x) \
4639 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
4640 /* Whether the location in the CONCAT should be handled like a MO_COPY
4642 #define VAL_EXPR_IS_COPIED(x) \
4643 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
4644 /* Whether the location in the CONCAT should be handled like a
4645 MO_CLOBBER as well. */
4646 #define VAL_EXPR_IS_CLOBBERED(x) \
4647 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
4648 /* Whether the location is a CONCAT of the MO_VAL_SET expression and
4649 a reverse operation that should be handled afterwards. */
4650 #define VAL_EXPR_HAS_REVERSE(x) \
4651 (RTL_FLAG_CHECK1 ("VAL_EXPR_HAS_REVERSE", (x), CONCAT)->return_val)
4653 /* All preserved VALUEs. */
4654 static VEC (rtx, heap) *preserved_values;
4656 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes.
4657 This must be only called when cselib_preserve_only_values will be called
4658 with true, the counting phase must use cselib_preserve_value. */
4661 preserve_value (cselib_val *val)
4663 cselib_preserve_value (val);
4664 VEC_safe_push (rtx, heap, preserved_values, val->val_rtx);
4667 /* Add uses (register and memory references) LOC which will be tracked
4668 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
4671 add_uses (rtx *ploc, void *data)
4674 enum machine_mode mode = VOIDmode;
4675 struct count_use_info *cui = (struct count_use_info *)data;
4676 enum micro_operation_type type = use_type (loc, cui, &mode);
4678 if (type != MO_CLOBBER)
4680 basic_block bb = cui->bb;
4681 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4684 mo->u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
4685 mo->insn = cui->insn;
4687 if (type == MO_VAL_LOC)
4690 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
4693 gcc_assert (cui->sets);
4696 && !REG_P (XEXP (vloc, 0)) && !MEM_P (XEXP (vloc, 0)))
4699 enum machine_mode address_mode
4700 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
4702 = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
4704 if (val && !cselib_preserved_value_p (val))
4706 micro_operation *mon = VTI (bb)->mos + VTI (bb)->n_mos++;
4707 mon->type = mo->type;
4708 mon->u.loc = mo->u.loc;
4709 mon->insn = mo->insn;
4710 preserve_value (val);
4711 mo->type = MO_VAL_USE;
4712 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4713 mo->u.loc = gen_rtx_CONCAT (address_mode,
4714 val->val_rtx, mloc);
4715 if (dump_file && (dump_flags & TDF_DETAILS))
4716 log_op_type (mo->u.loc, cui->bb, cui->insn,
4717 mo->type, dump_file);
4722 if (!VAR_LOC_UNKNOWN_P (vloc)
4723 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
4725 enum machine_mode mode2;
4726 enum micro_operation_type type2;
4727 rtx nloc = replace_expr_with_values (vloc);
4731 oloc = shallow_copy_rtx (oloc);
4732 PAT_VAR_LOCATION_LOC (oloc) = nloc;
4735 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
4737 type2 = use_type (vloc, 0, &mode2);
4739 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
4740 || type2 == MO_CLOBBER);
4742 if (type2 == MO_CLOBBER
4743 && !cselib_preserved_value_p (val))
4745 VAL_NEEDS_RESOLUTION (oloc) = 1;
4746 preserve_value (val);
4749 else if (!VAR_LOC_UNKNOWN_P (vloc))
4751 oloc = shallow_copy_rtx (oloc);
4752 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
4757 else if (type == MO_VAL_USE)
4759 enum machine_mode mode2 = VOIDmode;
4760 enum micro_operation_type type2;
4761 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
4762 rtx vloc, oloc = loc, nloc;
4764 gcc_assert (cui->sets);
4767 && !REG_P (XEXP (oloc, 0)) && !MEM_P (XEXP (oloc, 0)))
4770 enum machine_mode address_mode
4771 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
4773 = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
4775 if (val && !cselib_preserved_value_p (val))
4777 micro_operation *mon = VTI (bb)->mos + VTI (bb)->n_mos++;
4778 mon->type = mo->type;
4779 mon->u.loc = mo->u.loc;
4780 mon->insn = mo->insn;
4781 preserve_value (val);
4782 mo->type = MO_VAL_USE;
4783 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4784 mo->u.loc = gen_rtx_CONCAT (address_mode,
4785 val->val_rtx, mloc);
4786 mo->insn = cui->insn;
4787 if (dump_file && (dump_flags & TDF_DETAILS))
4788 log_op_type (mo->u.loc, cui->bb, cui->insn,
4789 mo->type, dump_file);
4794 type2 = use_type (loc, 0, &mode2);
4796 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
4797 || type2 == MO_CLOBBER);
4799 if (type2 == MO_USE)
4800 vloc = var_lowpart (mode2, loc);
4804 /* The loc of a MO_VAL_USE may have two forms:
4806 (concat val src): val is at src, a value-based
4809 (concat (concat val use) src): same as above, with use as
4810 the MO_USE tracked value, if it differs from src.
4814 nloc = replace_expr_with_values (loc);
4819 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
4821 oloc = val->val_rtx;
4823 mo->u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
4825 if (type2 == MO_USE)
4826 VAL_HOLDS_TRACK_EXPR (mo->u.loc) = 1;
4827 if (!cselib_preserved_value_p (val))
4829 VAL_NEEDS_RESOLUTION (mo->u.loc) = 1;
4830 preserve_value (val);
4834 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
4836 if (dump_file && (dump_flags & TDF_DETAILS))
4837 log_op_type (mo->u.loc, cui->bb, cui->insn, mo->type, dump_file);
4843 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
4846 add_uses_1 (rtx *x, void *cui)
4848 for_each_rtx (x, add_uses, cui);
4851 /* Attempt to reverse the EXPR operation in the debug info. Say for
4852 reg1 = reg2 + 6 even when reg2 is no longer live we
4853 can express its value as VAL - 6. */
4856 reverse_op (rtx val, const_rtx expr)
4862 if (GET_CODE (expr) != SET)
4865 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
4868 src = SET_SRC (expr);
4869 switch (GET_CODE (src))
4883 if (!REG_P (XEXP (src, 0)) || !SCALAR_INT_MODE_P (GET_MODE (src)))
4886 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0);
4887 if (!v || !cselib_preserved_value_p (v))
4890 switch (GET_CODE (src))
4894 if (GET_MODE (v->val_rtx) != GET_MODE (val))
4896 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
4900 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
4912 if (GET_MODE (v->val_rtx) != GET_MODE (val))
4914 arg = XEXP (src, 1);
4915 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
4917 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
4918 if (arg == NULL_RTX)
4920 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
4923 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
4925 /* Ensure ret isn't VALUE itself (which can happen e.g. for
4926 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
4927 breaks a lot of routines during var-tracking. */
4928 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
4934 return gen_rtx_CONCAT (GET_MODE (v->val_rtx), v->val_rtx, ret);
4937 /* Add stores (register and memory references) LOC which will be tracked
4938 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
4939 CUIP->insn is instruction which the LOC is part of. */
4942 add_stores (rtx loc, const_rtx expr, void *cuip)
4944 enum machine_mode mode = VOIDmode, mode2;
4945 struct count_use_info *cui = (struct count_use_info *)cuip;
4946 basic_block bb = cui->bb;
4947 micro_operation *mo;
4948 rtx oloc = loc, nloc, src = NULL;
4949 enum micro_operation_type type = use_type (loc, cui, &mode);
4950 bool track_p = false;
4952 bool resolve, preserve;
4955 if (type == MO_CLOBBER)
4962 mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4964 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
4965 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
4966 || GET_CODE (expr) == CLOBBER)
4968 mo->type = MO_CLOBBER;
4973 if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
4974 src = var_lowpart (mode2, SET_SRC (expr));
4975 loc = var_lowpart (mode2, loc);
4984 rtx xexpr = CONST_CAST_RTX (expr);
4986 if (SET_SRC (expr) != src)
4987 xexpr = gen_rtx_SET (VOIDmode, loc, src);
4988 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
4995 mo->insn = cui->insn;
4997 else if (MEM_P (loc)
4998 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5001 mo = VTI (bb)->mos + VTI (bb)->n_mos++;
5003 if (MEM_P (loc) && type == MO_VAL_SET
5004 && !REG_P (XEXP (loc, 0)) && !MEM_P (XEXP (loc, 0)))
5007 enum machine_mode address_mode
5008 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
5009 cselib_val *val = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
5011 if (val && !cselib_preserved_value_p (val))
5013 preserve_value (val);
5014 mo->type = MO_VAL_USE;
5015 mloc = cselib_subst_to_values (XEXP (mloc, 0));
5016 mo->u.loc = gen_rtx_CONCAT (address_mode, val->val_rtx, mloc);
5017 mo->insn = cui->insn;
5018 if (dump_file && (dump_flags & TDF_DETAILS))
5019 log_op_type (mo->u.loc, cui->bb, cui->insn,
5020 mo->type, dump_file);
5021 mo = VTI (bb)->mos + VTI (bb)->n_mos++;
5025 if (GET_CODE (expr) == CLOBBER || !track_p)
5027 mo->type = MO_CLOBBER;
5028 mo->u.loc = track_p ? var_lowpart (mode2, loc) : loc;
5032 if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
5033 src = var_lowpart (mode2, SET_SRC (expr));
5034 loc = var_lowpart (mode2, loc);
5043 rtx xexpr = CONST_CAST_RTX (expr);
5045 if (SET_SRC (expr) != src)
5046 xexpr = gen_rtx_SET (VOIDmode, loc, src);
5047 if (same_variable_part_p (SET_SRC (xexpr),
5049 INT_MEM_OFFSET (loc)))
5056 mo->insn = cui->insn;
5061 if (type != MO_VAL_SET)
5062 goto log_and_return;
5064 v = find_use_val (oloc, mode, cui);
5067 goto log_and_return;
5069 resolve = preserve = !cselib_preserved_value_p (v);
5071 nloc = replace_expr_with_values (oloc);
5075 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
5077 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0);
5079 gcc_assert (oval != v);
5080 gcc_assert (REG_P (oloc) || MEM_P (oloc));
5082 if (!cselib_preserved_value_p (oval))
5084 micro_operation *nmo = VTI (bb)->mos + VTI (bb)->n_mos++;
5086 preserve_value (oval);
5088 nmo->type = MO_VAL_USE;
5089 nmo->u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
5090 VAL_NEEDS_RESOLUTION (nmo->u.loc) = 1;
5091 nmo->insn = mo->insn;
5093 if (dump_file && (dump_flags & TDF_DETAILS))
5094 log_op_type (nmo->u.loc, cui->bb, cui->insn,
5095 nmo->type, dump_file);
5100 else if (resolve && GET_CODE (mo->u.loc) == SET)
5102 nloc = replace_expr_with_values (SET_SRC (expr));
5104 /* Avoid the mode mismatch between oexpr and expr. */
5105 if (!nloc && mode != mode2)
5107 nloc = SET_SRC (expr);
5108 gcc_assert (oloc == SET_DEST (expr));
5112 oloc = gen_rtx_SET (GET_MODE (mo->u.loc), oloc, nloc);
5115 if (oloc == SET_DEST (mo->u.loc))
5116 /* No point in duplicating. */
5118 if (!REG_P (SET_SRC (mo->u.loc)))
5124 if (GET_CODE (mo->u.loc) == SET
5125 && oloc == SET_DEST (mo->u.loc))
5126 /* No point in duplicating. */
5132 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
5134 if (mo->u.loc != oloc)
5135 loc = gen_rtx_CONCAT (GET_MODE (mo->u.loc), loc, mo->u.loc);
5137 /* The loc of a MO_VAL_SET may have various forms:
5139 (concat val dst): dst now holds val
5141 (concat val (set dst src)): dst now holds val, copied from src
5143 (concat (concat val dstv) dst): dst now holds val; dstv is dst
5144 after replacing mems and non-top-level regs with values.
5146 (concat (concat val dstv) (set dst src)): dst now holds val,
5147 copied from src. dstv is a value-based representation of dst, if
5148 it differs from dst. If resolution is needed, src is a REG, and
5149 its mode is the same as that of val.
5151 (concat (concat val (set dstv srcv)) (set dst src)): src
5152 copied to dst, holding val. dstv and srcv are value-based
5153 representations of dst and src, respectively.
5157 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
5159 reverse = reverse_op (v->val_rtx, expr);
5162 loc = gen_rtx_CONCAT (GET_MODE (mo->u.loc), loc, reverse);
5163 VAL_EXPR_HAS_REVERSE (loc) = 1;
5170 VAL_HOLDS_TRACK_EXPR (loc) = 1;
5173 VAL_NEEDS_RESOLUTION (loc) = resolve;
5176 if (mo->type == MO_CLOBBER)
5177 VAL_EXPR_IS_CLOBBERED (loc) = 1;
5178 if (mo->type == MO_COPY)
5179 VAL_EXPR_IS_COPIED (loc) = 1;
5181 mo->type = MO_VAL_SET;
5184 if (dump_file && (dump_flags & TDF_DETAILS))
5185 log_op_type (mo->u.loc, cui->bb, cui->insn, mo->type, dump_file);
5188 /* Callback for cselib_record_sets_hook, that records as micro
5189 operations uses and stores in an insn after cselib_record_sets has
5190 analyzed the sets in an insn, but before it modifies the stored
5191 values in the internal tables, unless cselib_record_sets doesn't
5192 call it directly (perhaps because we're not doing cselib in the
5193 first place, in which case sets and n_sets will be 0). */
5196 add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
5198 basic_block bb = BLOCK_FOR_INSN (insn);
5200 struct count_use_info cui;
5202 cselib_hook_called = true;
5207 cui.n_sets = n_sets;
5209 n1 = VTI (bb)->n_mos;
5210 cui.store_p = false;
5211 note_uses (&PATTERN (insn), add_uses_1, &cui);
5212 n2 = VTI (bb)->n_mos - 1;
5214 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
5218 while (n1 < n2 && VTI (bb)->mos[n1].type == MO_USE)
5220 while (n1 < n2 && VTI (bb)->mos[n2].type != MO_USE)
5226 sw = VTI (bb)->mos[n1];
5227 VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
5228 VTI (bb)->mos[n2] = sw;
5232 n2 = VTI (bb)->n_mos - 1;
5236 while (n1 < n2 && VTI (bb)->mos[n1].type != MO_VAL_LOC)
5238 while (n1 < n2 && VTI (bb)->mos[n2].type == MO_VAL_LOC)
5244 sw = VTI (bb)->mos[n1];
5245 VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
5246 VTI (bb)->mos[n2] = sw;
5252 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
5257 if (dump_file && (dump_flags & TDF_DETAILS))
5258 log_op_type (PATTERN (insn), bb, insn, mo->type, dump_file);
5261 n1 = VTI (bb)->n_mos;
5262 /* This will record NEXT_INSN (insn), such that we can
5263 insert notes before it without worrying about any
5264 notes that MO_USEs might emit after the insn. */
5266 note_stores (PATTERN (insn), add_stores, &cui);
5267 n2 = VTI (bb)->n_mos - 1;
5269 /* Order the MO_CLOBBERs to be before MO_SETs. */
5272 while (n1 < n2 && VTI (bb)->mos[n1].type == MO_CLOBBER)
5274 while (n1 < n2 && VTI (bb)->mos[n2].type != MO_CLOBBER)
5280 sw = VTI (bb)->mos[n1];
5281 VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
5282 VTI (bb)->mos[n2] = sw;
5287 static enum var_init_status
5288 find_src_status (dataflow_set *in, rtx src)
5290 tree decl = NULL_TREE;
5291 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
5293 if (! flag_var_tracking_uninit)
5294 status = VAR_INIT_STATUS_INITIALIZED;
5296 if (src && REG_P (src))
5297 decl = var_debug_decl (REG_EXPR (src));
5298 else if (src && MEM_P (src))
5299 decl = var_debug_decl (MEM_EXPR (src));
5302 status = get_init_value (in, src, dv_from_decl (decl));
5307 /* SRC is the source of an assignment. Use SET to try to find what
5308 was ultimately assigned to SRC. Return that value if known,
5309 otherwise return SRC itself. */
5312 find_src_set_src (dataflow_set *set, rtx src)
5314 tree decl = NULL_TREE; /* The variable being copied around. */
5315 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
5317 location_chain nextp;
5321 if (src && REG_P (src))
5322 decl = var_debug_decl (REG_EXPR (src));
5323 else if (src && MEM_P (src))
5324 decl = var_debug_decl (MEM_EXPR (src));
5328 decl_or_value dv = dv_from_decl (decl);
5330 var = shared_hash_find (set->vars, dv);
5334 for (i = 0; i < var->n_var_parts && !found; i++)
5335 for (nextp = var->var_part[i].loc_chain; nextp && !found;
5336 nextp = nextp->next)
5337 if (rtx_equal_p (nextp->loc, src))
5339 set_src = nextp->set_src;
5349 /* Compute the changes of variable locations in the basic block BB. */
5352 compute_bb_dataflow (basic_block bb)
5356 dataflow_set old_out;
5357 dataflow_set *in = &VTI (bb)->in;
5358 dataflow_set *out = &VTI (bb)->out;
5360 dataflow_set_init (&old_out);
5361 dataflow_set_copy (&old_out, out);
5362 dataflow_set_copy (out, in);
5364 n = VTI (bb)->n_mos;
5365 for (i = 0; i < n; i++)
5367 rtx insn = VTI (bb)->mos[i].insn;
5369 switch (VTI (bb)->mos[i].type)
5372 dataflow_set_clear_at_call (out);
5377 rtx loc = VTI (bb)->mos[i].u.loc;
5380 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
5381 else if (MEM_P (loc))
5382 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
5388 rtx loc = VTI (bb)->mos[i].u.loc;
5392 if (GET_CODE (loc) == CONCAT)
5394 val = XEXP (loc, 0);
5395 vloc = XEXP (loc, 1);
5403 var = PAT_VAR_LOCATION_DECL (vloc);
5405 clobber_variable_part (out, NULL_RTX,
5406 dv_from_decl (var), 0, NULL_RTX);
5409 if (VAL_NEEDS_RESOLUTION (loc))
5410 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
5411 set_variable_part (out, val, dv_from_decl (var), 0,
5412 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
5420 rtx loc = VTI (bb)->mos[i].u.loc;
5421 rtx val, vloc, uloc;
5423 vloc = uloc = XEXP (loc, 1);
5424 val = XEXP (loc, 0);
5426 if (GET_CODE (val) == CONCAT)
5428 uloc = XEXP (val, 1);
5429 val = XEXP (val, 0);
5432 if (VAL_NEEDS_RESOLUTION (loc))
5433 val_resolve (out, val, vloc, insn);
5435 val_store (out, val, uloc, insn, false);
5437 if (VAL_HOLDS_TRACK_EXPR (loc))
5439 if (GET_CODE (uloc) == REG)
5440 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
5442 else if (GET_CODE (uloc) == MEM)
5443 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
5451 rtx loc = VTI (bb)->mos[i].u.loc;
5452 rtx val, vloc, uloc, reverse = NULL_RTX;
5455 if (VAL_EXPR_HAS_REVERSE (loc))
5457 reverse = XEXP (loc, 1);
5458 vloc = XEXP (loc, 0);
5460 uloc = XEXP (vloc, 1);
5461 val = XEXP (vloc, 0);
5464 if (GET_CODE (val) == CONCAT)
5466 vloc = XEXP (val, 1);
5467 val = XEXP (val, 0);
5470 if (GET_CODE (vloc) == SET)
5472 rtx vsrc = SET_SRC (vloc);
5474 gcc_assert (val != vsrc);
5475 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
5477 vloc = SET_DEST (vloc);
5479 if (VAL_NEEDS_RESOLUTION (loc))
5480 val_resolve (out, val, vsrc, insn);
5482 else if (VAL_NEEDS_RESOLUTION (loc))
5484 gcc_assert (GET_CODE (uloc) == SET
5485 && GET_CODE (SET_SRC (uloc)) == REG);
5486 val_resolve (out, val, SET_SRC (uloc), insn);
5489 if (VAL_HOLDS_TRACK_EXPR (loc))
5491 if (VAL_EXPR_IS_CLOBBERED (loc))
5494 var_reg_delete (out, uloc, true);
5495 else if (MEM_P (uloc))
5496 var_mem_delete (out, uloc, true);
5500 bool copied_p = VAL_EXPR_IS_COPIED (loc);
5502 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
5504 if (GET_CODE (uloc) == SET)
5506 set_src = SET_SRC (uloc);
5507 uloc = SET_DEST (uloc);
5512 if (flag_var_tracking_uninit)
5514 status = find_src_status (in, set_src);
5516 if (status == VAR_INIT_STATUS_UNKNOWN)
5517 status = find_src_status (out, set_src);
5520 set_src = find_src_set_src (in, set_src);
5524 var_reg_delete_and_set (out, uloc, !copied_p,
5526 else if (MEM_P (uloc))
5527 var_mem_delete_and_set (out, uloc, !copied_p,
5531 else if (REG_P (uloc))
5532 var_regno_delete (out, REGNO (uloc));
5534 val_store (out, val, vloc, insn, true);
5537 val_store (out, XEXP (reverse, 0), XEXP (reverse, 1),
5544 rtx loc = VTI (bb)->mos[i].u.loc;
5547 if (GET_CODE (loc) == SET)
5549 set_src = SET_SRC (loc);
5550 loc = SET_DEST (loc);
5554 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
5556 else if (MEM_P (loc))
5557 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
5564 rtx loc = VTI (bb)->mos[i].u.loc;
5565 enum var_init_status src_status;
5568 if (GET_CODE (loc) == SET)
5570 set_src = SET_SRC (loc);
5571 loc = SET_DEST (loc);
5574 if (! flag_var_tracking_uninit)
5575 src_status = VAR_INIT_STATUS_INITIALIZED;
5578 src_status = find_src_status (in, set_src);
5580 if (src_status == VAR_INIT_STATUS_UNKNOWN)
5581 src_status = find_src_status (out, set_src);
5584 set_src = find_src_set_src (in, set_src);
5587 var_reg_delete_and_set (out, loc, false, src_status, set_src);
5588 else if (MEM_P (loc))
5589 var_mem_delete_and_set (out, loc, false, src_status, set_src);
5595 rtx loc = VTI (bb)->mos[i].u.loc;
5598 var_reg_delete (out, loc, false);
5599 else if (MEM_P (loc))
5600 var_mem_delete (out, loc, false);
5606 rtx loc = VTI (bb)->mos[i].u.loc;
5609 var_reg_delete (out, loc, true);
5610 else if (MEM_P (loc))
5611 var_mem_delete (out, loc, true);
5616 out->stack_adjust += VTI (bb)->mos[i].u.adjust;
5621 if (MAY_HAVE_DEBUG_INSNS)
5623 dataflow_set_equiv_regs (out);
5624 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_mark,
5626 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_star,
5629 htab_traverse (shared_hash_htab (out->vars),
5630 canonicalize_loc_order_check, out);
5633 changed = dataflow_set_different (&old_out, out);
5634 dataflow_set_destroy (&old_out);
5638 /* Find the locations of variables in the whole function. */
5641 vt_find_locations (void)
5643 fibheap_t worklist, pending, fibheap_swap;
5644 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
5651 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
5652 bool success = true;
5654 /* Compute reverse completion order of depth first search of the CFG
5655 so that the data-flow runs faster. */
5656 rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
5657 bb_order = XNEWVEC (int, last_basic_block);
5658 pre_and_rev_post_order_compute (NULL, rc_order, false);
5659 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
5660 bb_order[rc_order[i]] = i;
5663 worklist = fibheap_new ();
5664 pending = fibheap_new ();
5665 visited = sbitmap_alloc (last_basic_block);
5666 in_worklist = sbitmap_alloc (last_basic_block);
5667 in_pending = sbitmap_alloc (last_basic_block);
5668 sbitmap_zero (in_worklist);
5671 fibheap_insert (pending, bb_order[bb->index], bb);
5672 sbitmap_ones (in_pending);
5674 while (success && !fibheap_empty (pending))
5676 fibheap_swap = pending;
5678 worklist = fibheap_swap;
5679 sbitmap_swap = in_pending;
5680 in_pending = in_worklist;
5681 in_worklist = sbitmap_swap;
5683 sbitmap_zero (visited);
5685 while (!fibheap_empty (worklist))
5687 bb = (basic_block) fibheap_extract_min (worklist);
5688 RESET_BIT (in_worklist, bb->index);
5689 if (!TEST_BIT (visited, bb->index))
5693 int oldinsz, oldoutsz;
5695 SET_BIT (visited, bb->index);
5697 if (VTI (bb)->in.vars)
5700 -= (htab_size (shared_hash_htab (VTI (bb)->in.vars))
5701 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
5703 = htab_elements (shared_hash_htab (VTI (bb)->in.vars));
5705 = htab_elements (shared_hash_htab (VTI (bb)->out.vars));
5708 oldinsz = oldoutsz = 0;
5710 if (MAY_HAVE_DEBUG_INSNS)
5712 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
5713 bool first = true, adjust = false;
5715 /* Calculate the IN set as the intersection of
5716 predecessor OUT sets. */
5718 dataflow_set_clear (in);
5719 dst_can_be_shared = true;
5721 FOR_EACH_EDGE (e, ei, bb->preds)
5722 if (!VTI (e->src)->flooded)
5723 gcc_assert (bb_order[bb->index]
5724 <= bb_order[e->src->index]);
5727 dataflow_set_copy (in, &VTI (e->src)->out);
5728 first_out = &VTI (e->src)->out;
5733 dataflow_set_merge (in, &VTI (e->src)->out);
5739 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
5741 /* Merge and merge_adjust should keep entries in
5743 htab_traverse (shared_hash_htab (in->vars),
5744 canonicalize_loc_order_check,
5747 if (dst_can_be_shared)
5749 shared_hash_destroy (in->vars);
5750 in->vars = shared_hash_copy (first_out->vars);
5754 VTI (bb)->flooded = true;
5758 /* Calculate the IN set as union of predecessor OUT sets. */
5759 dataflow_set_clear (&VTI (bb)->in);
5760 FOR_EACH_EDGE (e, ei, bb->preds)
5761 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
5764 changed = compute_bb_dataflow (bb);
5765 htabsz += (htab_size (shared_hash_htab (VTI (bb)->in.vars))
5766 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
5768 if (htabmax && htabsz > htabmax)
5770 if (MAY_HAVE_DEBUG_INSNS)
5771 inform (DECL_SOURCE_LOCATION (cfun->decl),
5772 "variable tracking size limit exceeded with "
5773 "-fvar-tracking-assignments, retrying without");
5775 inform (DECL_SOURCE_LOCATION (cfun->decl),
5776 "variable tracking size limit exceeded");
5783 FOR_EACH_EDGE (e, ei, bb->succs)
5785 if (e->dest == EXIT_BLOCK_PTR)
5788 if (TEST_BIT (visited, e->dest->index))
5790 if (!TEST_BIT (in_pending, e->dest->index))
5792 /* Send E->DEST to next round. */
5793 SET_BIT (in_pending, e->dest->index);
5794 fibheap_insert (pending,
5795 bb_order[e->dest->index],
5799 else if (!TEST_BIT (in_worklist, e->dest->index))
5801 /* Add E->DEST to current round. */
5802 SET_BIT (in_worklist, e->dest->index);
5803 fibheap_insert (worklist, bb_order[e->dest->index],
5811 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
5813 (int)htab_elements (shared_hash_htab (VTI (bb)->in.vars)),
5815 (int)htab_elements (shared_hash_htab (VTI (bb)->out.vars)),
5817 (int)worklist->nodes, (int)pending->nodes, htabsz);
5819 if (dump_file && (dump_flags & TDF_DETAILS))
5821 fprintf (dump_file, "BB %i IN:\n", bb->index);
5822 dump_dataflow_set (&VTI (bb)->in);
5823 fprintf (dump_file, "BB %i OUT:\n", bb->index);
5824 dump_dataflow_set (&VTI (bb)->out);
5830 if (success && MAY_HAVE_DEBUG_INSNS)
5832 gcc_assert (VTI (bb)->flooded);
5835 fibheap_delete (worklist);
5836 fibheap_delete (pending);
5837 sbitmap_free (visited);
5838 sbitmap_free (in_worklist);
5839 sbitmap_free (in_pending);
5844 /* Print the content of the LIST to dump file. */
5847 dump_attrs_list (attrs list)
5849 for (; list; list = list->next)
5851 if (dv_is_decl_p (list->dv))
5852 print_mem_expr (dump_file, dv_as_decl (list->dv));
5854 print_rtl_single (dump_file, dv_as_value (list->dv));
5855 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
5857 fprintf (dump_file, "\n");
5860 /* Print the information about variable *SLOT to dump file. */
5863 dump_var_slot (void **slot, void *data ATTRIBUTE_UNUSED)
5865 variable var = (variable) *slot;
5869 /* Continue traversing the hash table. */
5873 /* Print the information about variable VAR to dump file. */
5876 dump_var (variable var)
5879 location_chain node;
5881 if (dv_is_decl_p (var->dv))
5883 const_tree decl = dv_as_decl (var->dv);
5885 if (DECL_NAME (decl))
5887 fprintf (dump_file, " name: %s",
5888 IDENTIFIER_POINTER (DECL_NAME (decl)));
5889 if (dump_flags & TDF_UID)
5890 fprintf (dump_file, "D.%u", DECL_UID (decl));
5892 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
5893 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
5895 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
5896 fprintf (dump_file, "\n");
5900 fputc (' ', dump_file);
5901 print_rtl_single (dump_file, dv_as_value (var->dv));
5904 for (i = 0; i < var->n_var_parts; i++)
5906 fprintf (dump_file, " offset %ld\n",
5907 (long) var->var_part[i].offset);
5908 for (node = var->var_part[i].loc_chain; node; node = node->next)
5910 fprintf (dump_file, " ");
5911 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
5912 fprintf (dump_file, "[uninit]");
5913 print_rtl_single (dump_file, node->loc);
5918 /* Print the information about variables from hash table VARS to dump file. */
5921 dump_vars (htab_t vars)
5923 if (htab_elements (vars) > 0)
5925 fprintf (dump_file, "Variables:\n");
5926 htab_traverse (vars, dump_var_slot, NULL);
5930 /* Print the dataflow set SET to dump file. */
5933 dump_dataflow_set (dataflow_set *set)
5937 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
5939 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5943 fprintf (dump_file, "Reg %d:", i);
5944 dump_attrs_list (set->regs[i]);
5947 dump_vars (shared_hash_htab (set->vars));
5948 fprintf (dump_file, "\n");
5951 /* Print the IN and OUT sets for each basic block to dump file. */
5954 dump_dataflow_sets (void)
5960 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
5961 fprintf (dump_file, "IN:\n");
5962 dump_dataflow_set (&VTI (bb)->in);
5963 fprintf (dump_file, "OUT:\n");
5964 dump_dataflow_set (&VTI (bb)->out);
5968 /* Add variable VAR to the hash table of changed variables and
5969 if it has no locations delete it from SET's hash table. */
5972 variable_was_changed (variable var, dataflow_set *set)
5974 hashval_t hash = dv_htab_hash (var->dv);
5979 bool old_cur_loc_changed = false;
5981 /* Remember this decl or VALUE has been added to changed_variables. */
5982 set_dv_changed (var->dv, true);
5984 slot = htab_find_slot_with_hash (changed_variables,
5990 variable old_var = (variable) *slot;
5991 gcc_assert (old_var->in_changed_variables);
5992 old_var->in_changed_variables = false;
5993 old_cur_loc_changed = old_var->cur_loc_changed;
5994 variable_htab_free (*slot);
5996 if (set && var->n_var_parts == 0)
6000 empty_var = (variable) pool_alloc (dv_pool (var->dv));
6001 empty_var->dv = var->dv;
6002 empty_var->refcount = 1;
6003 empty_var->n_var_parts = 0;
6004 empty_var->cur_loc_changed = true;
6005 empty_var->in_changed_variables = true;
6012 var->in_changed_variables = true;
6013 /* If within processing one uop a variable is deleted
6014 and then readded, we need to assume it has changed. */
6015 if (old_cur_loc_changed)
6016 var->cur_loc_changed = true;
6023 if (var->n_var_parts == 0)
6028 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
6031 if (shared_hash_shared (set->vars))
6032 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
6034 htab_clear_slot (shared_hash_htab (set->vars), slot);
6040 /* Look for the index in VAR->var_part corresponding to OFFSET.
6041 Return -1 if not found. If INSERTION_POINT is non-NULL, the
6042 referenced int will be set to the index that the part has or should
6043 have, if it should be inserted. */
6046 find_variable_location_part (variable var, HOST_WIDE_INT offset,
6047 int *insertion_point)
6051 /* Find the location part. */
6053 high = var->n_var_parts;
6056 pos = (low + high) / 2;
6057 if (var->var_part[pos].offset < offset)
6064 if (insertion_point)
6065 *insertion_point = pos;
6067 if (pos < var->n_var_parts && var->var_part[pos].offset == offset)
6074 set_slot_part (dataflow_set *set, rtx loc, void **slot,
6075 decl_or_value dv, HOST_WIDE_INT offset,
6076 enum var_init_status initialized, rtx set_src)
6079 location_chain node, next;
6080 location_chain *nextp;
6082 bool onepart = dv_onepart_p (dv);
6084 gcc_assert (offset == 0 || !onepart);
6085 gcc_assert (loc != dv_as_opaque (dv));
6087 var = (variable) *slot;
6089 if (! flag_var_tracking_uninit)
6090 initialized = VAR_INIT_STATUS_INITIALIZED;
6094 /* Create new variable information. */
6095 var = (variable) pool_alloc (dv_pool (dv));
6098 var->n_var_parts = 1;
6099 var->cur_loc_changed = false;
6100 var->in_changed_variables = false;
6101 var->var_part[0].offset = offset;
6102 var->var_part[0].loc_chain = NULL;
6103 var->var_part[0].cur_loc = NULL;
6106 nextp = &var->var_part[0].loc_chain;
6112 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
6116 if (GET_CODE (loc) == VALUE)
6118 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6119 nextp = &node->next)
6120 if (GET_CODE (node->loc) == VALUE)
6122 if (node->loc == loc)
6127 if (canon_value_cmp (node->loc, loc))
6135 else if (REG_P (node->loc) || MEM_P (node->loc))
6143 else if (REG_P (loc))
6145 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6146 nextp = &node->next)
6147 if (REG_P (node->loc))
6149 if (REGNO (node->loc) < REGNO (loc))
6153 if (REGNO (node->loc) == REGNO (loc))
6166 else if (MEM_P (loc))
6168 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6169 nextp = &node->next)
6170 if (REG_P (node->loc))
6172 else if (MEM_P (node->loc))
6174 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
6186 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6187 nextp = &node->next)
6188 if ((r = loc_cmp (node->loc, loc)) >= 0)
6196 if (shared_var_p (var, set->vars))
6198 slot = unshare_variable (set, slot, var, initialized);
6199 var = (variable)*slot;
6200 for (nextp = &var->var_part[0].loc_chain; c;
6201 nextp = &(*nextp)->next)
6203 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
6210 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
6212 pos = find_variable_location_part (var, offset, &inspos);
6216 node = var->var_part[pos].loc_chain;
6219 && ((REG_P (node->loc) && REG_P (loc)
6220 && REGNO (node->loc) == REGNO (loc))
6221 || rtx_equal_p (node->loc, loc)))
6223 /* LOC is in the beginning of the chain so we have nothing
6225 if (node->init < initialized)
6226 node->init = initialized;
6227 if (set_src != NULL)
6228 node->set_src = set_src;
6234 /* We have to make a copy of a shared variable. */
6235 if (shared_var_p (var, set->vars))
6237 slot = unshare_variable (set, slot, var, initialized);
6238 var = (variable)*slot;
6244 /* We have not found the location part, new one will be created. */
6246 /* We have to make a copy of the shared variable. */
6247 if (shared_var_p (var, set->vars))
6249 slot = unshare_variable (set, slot, var, initialized);
6250 var = (variable)*slot;
6253 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
6254 thus there are at most MAX_VAR_PARTS different offsets. */
6255 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
6256 && (!var->n_var_parts || !dv_onepart_p (var->dv)));
6258 /* We have to move the elements of array starting at index
6259 inspos to the next position. */
6260 for (pos = var->n_var_parts; pos > inspos; pos--)
6261 var->var_part[pos] = var->var_part[pos - 1];
6264 var->var_part[pos].offset = offset;
6265 var->var_part[pos].loc_chain = NULL;
6266 var->var_part[pos].cur_loc = NULL;
6269 /* Delete the location from the list. */
6270 nextp = &var->var_part[pos].loc_chain;
6271 for (node = var->var_part[pos].loc_chain; node; node = next)
6274 if ((REG_P (node->loc) && REG_P (loc)
6275 && REGNO (node->loc) == REGNO (loc))
6276 || rtx_equal_p (node->loc, loc))
6278 /* Save these values, to assign to the new node, before
6279 deleting this one. */
6280 if (node->init > initialized)
6281 initialized = node->init;
6282 if (node->set_src != NULL && set_src == NULL)
6283 set_src = node->set_src;
6284 if (var->var_part[pos].cur_loc == node->loc)
6286 var->var_part[pos].cur_loc = NULL;
6287 var->cur_loc_changed = true;
6289 pool_free (loc_chain_pool, node);
6294 nextp = &node->next;
6297 nextp = &var->var_part[pos].loc_chain;
6300 /* Add the location to the beginning. */
6301 node = (location_chain) pool_alloc (loc_chain_pool);
6303 node->init = initialized;
6304 node->set_src = set_src;
6305 node->next = *nextp;
6308 if (onepart && emit_notes)
6309 add_value_chains (var->dv, loc);
6311 /* If no location was emitted do so. */
6312 if (var->var_part[pos].cur_loc == NULL)
6313 variable_was_changed (var, set);
6318 /* Set the part of variable's location in the dataflow set SET. The
6319 variable part is specified by variable's declaration in DV and
6320 offset OFFSET and the part's location by LOC. IOPT should be
6321 NO_INSERT if the variable is known to be in SET already and the
6322 variable hash table must not be resized, and INSERT otherwise. */
6325 set_variable_part (dataflow_set *set, rtx loc,
6326 decl_or_value dv, HOST_WIDE_INT offset,
6327 enum var_init_status initialized, rtx set_src,
6328 enum insert_option iopt)
6332 if (iopt == NO_INSERT)
6333 slot = shared_hash_find_slot_noinsert (set->vars, dv);
6336 slot = shared_hash_find_slot (set->vars, dv);
6338 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
6340 slot = set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
6343 /* Remove all recorded register locations for the given variable part
6344 from dataflow set SET, except for those that are identical to loc.
6345 The variable part is specified by variable's declaration or value
6346 DV and offset OFFSET. */
6349 clobber_slot_part (dataflow_set *set, rtx loc, void **slot,
6350 HOST_WIDE_INT offset, rtx set_src)
6352 variable var = (variable) *slot;
6353 int pos = find_variable_location_part (var, offset, NULL);
6357 location_chain node, next;
6359 /* Remove the register locations from the dataflow set. */
6360 next = var->var_part[pos].loc_chain;
6361 for (node = next; node; node = next)
6364 if (node->loc != loc
6365 && (!flag_var_tracking_uninit
6368 || !rtx_equal_p (set_src, node->set_src)))
6370 if (REG_P (node->loc))
6375 /* Remove the variable part from the register's
6376 list, but preserve any other variable parts
6377 that might be regarded as live in that same
6379 anextp = &set->regs[REGNO (node->loc)];
6380 for (anode = *anextp; anode; anode = anext)
6382 anext = anode->next;
6383 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
6384 && anode->offset == offset)
6386 pool_free (attrs_pool, anode);
6390 anextp = &anode->next;
6394 slot = delete_slot_part (set, node->loc, slot, offset);
6402 /* Remove all recorded register locations for the given variable part
6403 from dataflow set SET, except for those that are identical to loc.
6404 The variable part is specified by variable's declaration or value
6405 DV and offset OFFSET. */
6408 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
6409 HOST_WIDE_INT offset, rtx set_src)
6413 if (!dv_as_opaque (dv)
6414 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
6417 slot = shared_hash_find_slot_noinsert (set->vars, dv);
6421 slot = clobber_slot_part (set, loc, slot, offset, set_src);
6424 /* Delete the part of variable's location from dataflow set SET. The
6425 variable part is specified by its SET->vars slot SLOT and offset
6426 OFFSET and the part's location by LOC. */
6429 delete_slot_part (dataflow_set *set, rtx loc, void **slot,
6430 HOST_WIDE_INT offset)
6432 variable var = (variable) *slot;
6433 int pos = find_variable_location_part (var, offset, NULL);
6437 location_chain node, next;
6438 location_chain *nextp;
6441 if (shared_var_p (var, set->vars))
6443 /* If the variable contains the location part we have to
6444 make a copy of the variable. */
6445 for (node = var->var_part[pos].loc_chain; node;
6448 if ((REG_P (node->loc) && REG_P (loc)
6449 && REGNO (node->loc) == REGNO (loc))
6450 || rtx_equal_p (node->loc, loc))
6452 slot = unshare_variable (set, slot, var,
6453 VAR_INIT_STATUS_UNKNOWN);
6454 var = (variable)*slot;
6460 /* Delete the location part. */
6462 nextp = &var->var_part[pos].loc_chain;
6463 for (node = *nextp; node; node = next)
6466 if ((REG_P (node->loc) && REG_P (loc)
6467 && REGNO (node->loc) == REGNO (loc))
6468 || rtx_equal_p (node->loc, loc))
6470 if (emit_notes && pos == 0 && dv_onepart_p (var->dv))
6471 remove_value_chains (var->dv, node->loc);
6472 /* If we have deleted the location which was last emitted
6473 we have to emit new location so add the variable to set
6474 of changed variables. */
6475 if (var->var_part[pos].cur_loc == node->loc)
6478 var->var_part[pos].cur_loc = NULL;
6479 var->cur_loc_changed = true;
6481 pool_free (loc_chain_pool, node);
6486 nextp = &node->next;
6489 if (var->var_part[pos].loc_chain == NULL)
6494 var->cur_loc_changed = true;
6495 while (pos < var->n_var_parts)
6497 var->var_part[pos] = var->var_part[pos + 1];
6502 variable_was_changed (var, set);
6508 /* Delete the part of variable's location from dataflow set SET. The
6509 variable part is specified by variable's declaration or value DV
6510 and offset OFFSET and the part's location by LOC. */
6513 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
6514 HOST_WIDE_INT offset)
6516 void **slot = shared_hash_find_slot_noinsert (set->vars, dv);
6520 slot = delete_slot_part (set, loc, slot, offset);
6523 /* Structure for passing some other parameters to function
6524 vt_expand_loc_callback. */
6525 struct expand_loc_callback_data
6527 /* The variables and values active at this point. */
6530 /* True in vt_expand_loc_dummy calls, no rtl should be allocated.
6531 Non-NULL should be returned if vt_expand_loc would return
6532 non-NULL in that case, NULL otherwise. cur_loc_changed should be
6533 computed and cur_loc recomputed when possible (but just once
6534 per emit_notes_for_changes call). */
6537 /* True if expansion of subexpressions had to recompute some
6538 VALUE/DEBUG_EXPR_DECL's cur_loc or used a VALUE/DEBUG_EXPR_DECL
6539 whose cur_loc has been already recomputed during current
6540 emit_notes_for_changes call. */
6541 bool cur_loc_changed;
6544 /* Callback for cselib_expand_value, that looks for expressions
6545 holding the value in the var-tracking hash tables. Return X for
6546 standard processing, anything else is to be used as-is. */
6549 vt_expand_loc_callback (rtx x, bitmap regs, int max_depth, void *data)
6551 struct expand_loc_callback_data *elcd
6552 = (struct expand_loc_callback_data *) data;
6553 bool dummy = elcd->dummy;
6554 bool cur_loc_changed = elcd->cur_loc_changed;
6558 rtx result, subreg, xret;
6560 switch (GET_CODE (x))
6563 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
6565 vt_expand_loc_callback, data);
6573 result = simplify_gen_subreg (GET_MODE (x), subreg,
6574 GET_MODE (SUBREG_REG (x)),
6577 /* Invalid SUBREGs are ok in debug info. ??? We could try
6578 alternate expansions for the VALUE as well. */
6580 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
6585 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
6590 dv = dv_from_value (x);
6598 if (VALUE_RECURSED_INTO (x))
6601 var = (variable) htab_find_with_hash (elcd->vars, dv, dv_htab_hash (dv));
6605 if (dummy && dv_changed_p (dv))
6606 elcd->cur_loc_changed = true;
6610 if (var->n_var_parts == 0)
6613 elcd->cur_loc_changed = true;
6617 gcc_assert (var->n_var_parts == 1);
6619 VALUE_RECURSED_INTO (x) = true;
6622 if (var->var_part[0].cur_loc)
6626 if (cselib_dummy_expand_value_rtx_cb (var->var_part[0].cur_loc, regs,
6628 vt_expand_loc_callback, data))
6632 result = cselib_expand_value_rtx_cb (var->var_part[0].cur_loc, regs,
6634 vt_expand_loc_callback, data);
6636 set_dv_changed (dv, false);
6638 if (!result && dv_changed_p (dv))
6640 set_dv_changed (dv, false);
6641 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
6642 if (loc->loc == var->var_part[0].cur_loc)
6646 elcd->cur_loc_changed = cur_loc_changed;
6647 if (cselib_dummy_expand_value_rtx_cb (loc->loc, regs, max_depth,
6648 vt_expand_loc_callback,
6656 result = cselib_expand_value_rtx_cb (loc->loc, regs, max_depth,
6657 vt_expand_loc_callback,
6663 if (dummy && (result || var->var_part[0].cur_loc))
6664 var->cur_loc_changed = true;
6665 var->var_part[0].cur_loc = loc ? loc->loc : NULL_RTX;
6669 if (var->cur_loc_changed)
6670 elcd->cur_loc_changed = true;
6671 else if (!result && var->var_part[0].cur_loc == NULL_RTX)
6672 elcd->cur_loc_changed = cur_loc_changed;
6675 VALUE_RECURSED_INTO (x) = false;
6682 /* Expand VALUEs in LOC, using VARS as well as cselib's equivalence
6686 vt_expand_loc (rtx loc, htab_t vars)
6688 struct expand_loc_callback_data data;
6690 if (!MAY_HAVE_DEBUG_INSNS)
6695 data.cur_loc_changed = false;
6696 loc = cselib_expand_value_rtx_cb (loc, scratch_regs, 5,
6697 vt_expand_loc_callback, &data);
6699 if (loc && MEM_P (loc))
6700 loc = targetm.delegitimize_address (loc);
6704 /* Like vt_expand_loc, but only return true/false (whether vt_expand_loc
6705 would succeed or not, without actually allocating new rtxes. */
6708 vt_expand_loc_dummy (rtx loc, htab_t vars, bool *pcur_loc_changed)
6710 struct expand_loc_callback_data data;
6713 gcc_assert (MAY_HAVE_DEBUG_INSNS);
6716 data.cur_loc_changed = false;
6717 ret = cselib_dummy_expand_value_rtx_cb (loc, scratch_regs, 5,
6718 vt_expand_loc_callback, &data);
6719 *pcur_loc_changed = data.cur_loc_changed;
6723 #ifdef ENABLE_RTL_CHECKING
6724 /* Used to verify that cur_loc_changed updating is safe. */
6725 static struct pointer_map_t *emitted_notes;
6728 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
6729 additional parameters: WHERE specifies whether the note shall be emitted
6730 before or after instruction INSN. */
6733 emit_note_insn_var_location (void **varp, void *data)
6735 variable var = (variable) *varp;
6736 rtx insn = ((emit_note_data *)data)->insn;
6737 enum emit_note_where where = ((emit_note_data *)data)->where;
6738 htab_t vars = ((emit_note_data *)data)->vars;
6740 int i, j, n_var_parts;
6742 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
6743 HOST_WIDE_INT last_limit;
6744 tree type_size_unit;
6745 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
6746 rtx loc[MAX_VAR_PARTS];
6750 if (dv_is_value_p (var->dv))
6751 goto value_or_debug_decl;
6753 decl = dv_as_decl (var->dv);
6755 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
6756 goto value_or_debug_decl;
6761 if (!MAY_HAVE_DEBUG_STMTS)
6763 for (i = 0; i < var->n_var_parts; i++)
6764 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
6766 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
6767 var->cur_loc_changed = true;
6769 if (var->n_var_parts == 0)
6770 var->cur_loc_changed = true;
6772 #ifndef ENABLE_RTL_CHECKING
6773 if (!var->cur_loc_changed)
6776 for (i = 0; i < var->n_var_parts; i++)
6778 enum machine_mode mode, wider_mode;
6781 if (last_limit < var->var_part[i].offset)
6786 else if (last_limit > var->var_part[i].offset)
6788 offsets[n_var_parts] = var->var_part[i].offset;
6789 if (!var->var_part[i].cur_loc)
6794 loc2 = vt_expand_loc (var->var_part[i].cur_loc, vars);
6800 loc[n_var_parts] = loc2;
6801 mode = GET_MODE (var->var_part[i].cur_loc);
6802 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
6803 if (var->var_part[i].cur_loc == lc->loc)
6805 initialized = lc->init;
6809 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
6811 /* Attempt to merge adjacent registers or memory. */
6812 wider_mode = GET_MODE_WIDER_MODE (mode);
6813 for (j = i + 1; j < var->n_var_parts; j++)
6814 if (last_limit <= var->var_part[j].offset)
6816 if (j < var->n_var_parts
6817 && wider_mode != VOIDmode
6818 && var->var_part[j].cur_loc
6819 && mode == GET_MODE (var->var_part[j].cur_loc)
6820 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
6821 && last_limit == var->var_part[j].offset
6822 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
6823 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
6827 if (REG_P (loc[n_var_parts])
6828 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
6829 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
6830 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
6833 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
6834 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
6836 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
6837 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
6840 if (!REG_P (new_loc)
6841 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
6844 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
6847 else if (MEM_P (loc[n_var_parts])
6848 && GET_CODE (XEXP (loc2, 0)) == PLUS
6849 && REG_P (XEXP (XEXP (loc2, 0), 0))
6850 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
6852 if ((REG_P (XEXP (loc[n_var_parts], 0))
6853 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
6854 XEXP (XEXP (loc2, 0), 0))
6855 && INTVAL (XEXP (XEXP (loc2, 0), 1))
6856 == GET_MODE_SIZE (mode))
6857 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
6858 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
6859 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
6860 XEXP (XEXP (loc2, 0), 0))
6861 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
6862 + GET_MODE_SIZE (mode)
6863 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
6864 new_loc = adjust_address_nv (loc[n_var_parts],
6870 loc[n_var_parts] = new_loc;
6872 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
6878 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6879 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
6882 if (! flag_var_tracking_uninit)
6883 initialized = VAR_INIT_STATUS_INITIALIZED;
6887 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX,
6889 else if (n_var_parts == 1)
6892 = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
6894 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list,
6897 else if (n_var_parts)
6901 for (i = 0; i < n_var_parts; i++)
6903 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
6905 parallel = gen_rtx_PARALLEL (VOIDmode,
6906 gen_rtvec_v (n_var_parts, loc));
6907 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
6908 parallel, (int) initialized);
6911 #ifdef ENABLE_RTL_CHECKING
6914 void **note_slot = pointer_map_insert (emitted_notes, decl);
6915 rtx pnote = (rtx) *note_slot;
6916 if (!var->cur_loc_changed && (pnote || PAT_VAR_LOCATION_LOC (note_vl)))
6919 gcc_assert (rtx_equal_p (PAT_VAR_LOCATION_LOC (pnote),
6920 PAT_VAR_LOCATION_LOC (note_vl)));
6922 *note_slot = (void *) note_vl;
6924 if (!var->cur_loc_changed)
6928 if (where != EMIT_NOTE_BEFORE_INSN)
6930 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
6931 if (where == EMIT_NOTE_AFTER_CALL_INSN)
6932 NOTE_DURING_CALL_P (note) = true;
6935 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
6936 NOTE_VAR_LOCATION (note) = note_vl;
6939 set_dv_changed (var->dv, false);
6940 var->cur_loc_changed = false;
6941 gcc_assert (var->in_changed_variables);
6942 var->in_changed_variables = false;
6943 htab_clear_slot (changed_variables, varp);
6945 /* Continue traversing the hash table. */
6948 value_or_debug_decl:
6949 if (dv_changed_p (var->dv) && var->n_var_parts)
6952 bool cur_loc_changed;
6954 if (var->var_part[0].cur_loc
6955 && vt_expand_loc_dummy (var->var_part[0].cur_loc, vars,
6958 for (lc = var->var_part[0].loc_chain; lc; lc = lc->next)
6959 if (lc->loc != var->var_part[0].cur_loc
6960 && vt_expand_loc_dummy (lc->loc, vars, &cur_loc_changed))
6962 var->var_part[0].cur_loc = lc ? lc->loc : NULL_RTX;
6967 DEF_VEC_P (variable);
6968 DEF_VEC_ALLOC_P (variable, heap);
6970 /* Stack of variable_def pointers that need processing with
6971 check_changed_vars_2. */
6973 static VEC (variable, heap) *changed_variables_stack;
6975 /* VALUEs with no variables that need set_dv_changed (val, false)
6976 called before check_changed_vars_3. */
6978 static VEC (rtx, heap) *changed_values_stack;
6980 /* Helper function for check_changed_vars_1 and check_changed_vars_2. */
6983 check_changed_vars_0 (decl_or_value dv, htab_t htab)
6986 = (value_chain) htab_find_with_hash (value_chains, dv, dv_htab_hash (dv));
6990 for (vc = vc->next; vc; vc = vc->next)
6991 if (!dv_changed_p (vc->dv))
6994 = (variable) htab_find_with_hash (htab, vc->dv,
6995 dv_htab_hash (vc->dv));
6998 set_dv_changed (vc->dv, true);
6999 VEC_safe_push (variable, heap, changed_variables_stack, vcvar);
7001 else if (dv_is_value_p (vc->dv))
7003 set_dv_changed (vc->dv, true);
7004 VEC_safe_push (rtx, heap, changed_values_stack,
7005 dv_as_value (vc->dv));
7006 check_changed_vars_0 (vc->dv, htab);
7011 /* Populate changed_variables_stack with variable_def pointers
7012 that need variable_was_changed called on them. */
7015 check_changed_vars_1 (void **slot, void *data)
7017 variable var = (variable) *slot;
7018 htab_t htab = (htab_t) data;
7020 if (dv_is_value_p (var->dv)
7021 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
7022 check_changed_vars_0 (var->dv, htab);
7026 /* Add VAR to changed_variables and also for VALUEs add recursively
7027 all DVs that aren't in changed_variables yet but reference the
7028 VALUE from its loc_chain. */
7031 check_changed_vars_2 (variable var, htab_t htab)
7033 variable_was_changed (var, NULL);
7034 if (dv_is_value_p (var->dv)
7035 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
7036 check_changed_vars_0 (var->dv, htab);
7039 /* For each changed decl (except DEBUG_EXPR_DECLs) recompute
7040 cur_loc if needed (and cur_loc of all VALUEs and DEBUG_EXPR_DECLs
7041 it needs and are also in changed variables) and track whether
7042 cur_loc (or anything it uses to compute location) had to change
7043 during the current emit_notes_for_changes call. */
7046 check_changed_vars_3 (void **slot, void *data)
7048 variable var = (variable) *slot;
7049 htab_t vars = (htab_t) data;
7052 bool cur_loc_changed;
7054 if (dv_is_value_p (var->dv)
7055 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
7058 for (i = 0; i < var->n_var_parts; i++)
7060 if (var->var_part[i].cur_loc
7061 && vt_expand_loc_dummy (var->var_part[i].cur_loc, vars,
7064 if (cur_loc_changed)
7065 var->cur_loc_changed = true;
7068 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
7069 if (lc->loc != var->var_part[i].cur_loc
7070 && vt_expand_loc_dummy (lc->loc, vars, &cur_loc_changed))
7072 if (lc || var->var_part[i].cur_loc)
7073 var->cur_loc_changed = true;
7074 var->var_part[i].cur_loc = lc ? lc->loc : NULL_RTX;
7076 if (var->n_var_parts == 0)
7077 var->cur_loc_changed = true;
7081 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
7082 CHANGED_VARIABLES and delete this chain. WHERE specifies whether the notes
7083 shall be emitted before of after instruction INSN. */
7086 emit_notes_for_changes (rtx insn, enum emit_note_where where,
7089 emit_note_data data;
7090 htab_t htab = shared_hash_htab (vars);
7092 if (!htab_elements (changed_variables))
7095 if (MAY_HAVE_DEBUG_INSNS)
7097 /* Unfortunately this has to be done in two steps, because
7098 we can't traverse a hashtab into which we are inserting
7099 through variable_was_changed. */
7100 htab_traverse (changed_variables, check_changed_vars_1, htab);
7101 while (VEC_length (variable, changed_variables_stack) > 0)
7102 check_changed_vars_2 (VEC_pop (variable, changed_variables_stack),
7104 while (VEC_length (rtx, changed_values_stack) > 0)
7105 set_dv_changed (dv_from_value (VEC_pop (rtx, changed_values_stack)),
7107 htab_traverse (changed_variables, check_changed_vars_3, htab);
7114 htab_traverse (changed_variables, emit_note_insn_var_location, &data);
7117 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
7118 same variable in hash table DATA or is not there at all. */
7121 emit_notes_for_differences_1 (void **slot, void *data)
7123 htab_t new_vars = (htab_t) data;
7124 variable old_var, new_var;
7126 old_var = (variable) *slot;
7127 new_var = (variable) htab_find_with_hash (new_vars, old_var->dv,
7128 dv_htab_hash (old_var->dv));
7132 /* Variable has disappeared. */
7135 empty_var = (variable) pool_alloc (dv_pool (old_var->dv));
7136 empty_var->dv = old_var->dv;
7137 empty_var->refcount = 0;
7138 empty_var->n_var_parts = 0;
7139 empty_var->cur_loc_changed = false;
7140 empty_var->in_changed_variables = false;
7141 if (dv_onepart_p (old_var->dv))
7145 gcc_assert (old_var->n_var_parts == 1);
7146 for (lc = old_var->var_part[0].loc_chain; lc; lc = lc->next)
7147 remove_value_chains (old_var->dv, lc->loc);
7149 variable_was_changed (empty_var, NULL);
7150 /* Continue traversing the hash table. */
7153 if (variable_different_p (old_var, new_var))
7155 if (dv_onepart_p (old_var->dv))
7157 location_chain lc1, lc2;
7159 gcc_assert (old_var->n_var_parts == 1);
7160 gcc_assert (new_var->n_var_parts == 1);
7161 lc1 = old_var->var_part[0].loc_chain;
7162 lc2 = new_var->var_part[0].loc_chain;
7165 && ((REG_P (lc1->loc) && REG_P (lc2->loc))
7166 || rtx_equal_p (lc1->loc, lc2->loc)))
7171 for (; lc2; lc2 = lc2->next)
7172 add_value_chains (old_var->dv, lc2->loc);
7173 for (; lc1; lc1 = lc1->next)
7174 remove_value_chains (old_var->dv, lc1->loc);
7176 variable_was_changed (new_var, NULL);
7178 /* Update cur_loc. */
7179 if (old_var != new_var)
7182 for (i = 0; i < new_var->n_var_parts; i++)
7184 new_var->var_part[i].cur_loc = NULL;
7185 if (old_var->n_var_parts != new_var->n_var_parts
7186 || old_var->var_part[i].offset != new_var->var_part[i].offset)
7187 new_var->cur_loc_changed = true;
7188 else if (old_var->var_part[i].cur_loc != NULL)
7191 rtx cur_loc = old_var->var_part[i].cur_loc;
7193 for (lc = new_var->var_part[i].loc_chain; lc; lc = lc->next)
7194 if (lc->loc == cur_loc
7195 || rtx_equal_p (cur_loc, lc->loc))
7197 new_var->var_part[i].cur_loc = lc->loc;
7201 new_var->cur_loc_changed = true;
7206 /* Continue traversing the hash table. */
7210 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
7214 emit_notes_for_differences_2 (void **slot, void *data)
7216 htab_t old_vars = (htab_t) data;
7217 variable old_var, new_var;
7219 new_var = (variable) *slot;
7220 old_var = (variable) htab_find_with_hash (old_vars, new_var->dv,
7221 dv_htab_hash (new_var->dv));
7225 /* Variable has appeared. */
7226 if (dv_onepart_p (new_var->dv))
7230 gcc_assert (new_var->n_var_parts == 1);
7231 for (lc = new_var->var_part[0].loc_chain; lc; lc = lc->next)
7232 add_value_chains (new_var->dv, lc->loc);
7234 for (i = 0; i < new_var->n_var_parts; i++)
7235 new_var->var_part[i].cur_loc = NULL;
7236 variable_was_changed (new_var, NULL);
7239 /* Continue traversing the hash table. */
7243 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
7247 emit_notes_for_differences (rtx insn, dataflow_set *old_set,
7248 dataflow_set *new_set)
7250 htab_traverse (shared_hash_htab (old_set->vars),
7251 emit_notes_for_differences_1,
7252 shared_hash_htab (new_set->vars));
7253 htab_traverse (shared_hash_htab (new_set->vars),
7254 emit_notes_for_differences_2,
7255 shared_hash_htab (old_set->vars));
7256 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
7259 /* Emit the notes for changes of location parts in the basic block BB. */
7262 emit_notes_in_bb (basic_block bb, dataflow_set *set)
7266 dataflow_set_clear (set);
7267 dataflow_set_copy (set, &VTI (bb)->in);
7269 for (i = 0; i < VTI (bb)->n_mos; i++)
7271 rtx insn = VTI (bb)->mos[i].insn;
7273 switch (VTI (bb)->mos[i].type)
7276 dataflow_set_clear_at_call (set);
7277 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
7282 rtx loc = VTI (bb)->mos[i].u.loc;
7285 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
7287 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
7289 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
7295 rtx loc = VTI (bb)->mos[i].u.loc;
7299 if (GET_CODE (loc) == CONCAT)
7301 val = XEXP (loc, 0);
7302 vloc = XEXP (loc, 1);
7310 var = PAT_VAR_LOCATION_DECL (vloc);
7312 clobber_variable_part (set, NULL_RTX,
7313 dv_from_decl (var), 0, NULL_RTX);
7316 if (VAL_NEEDS_RESOLUTION (loc))
7317 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
7318 set_variable_part (set, val, dv_from_decl (var), 0,
7319 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
7323 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
7329 rtx loc = VTI (bb)->mos[i].u.loc;
7330 rtx val, vloc, uloc;
7332 vloc = uloc = XEXP (loc, 1);
7333 val = XEXP (loc, 0);
7335 if (GET_CODE (val) == CONCAT)
7337 uloc = XEXP (val, 1);
7338 val = XEXP (val, 0);
7341 if (VAL_NEEDS_RESOLUTION (loc))
7342 val_resolve (set, val, vloc, insn);
7344 val_store (set, val, uloc, insn, false);
7346 if (VAL_HOLDS_TRACK_EXPR (loc))
7348 if (GET_CODE (uloc) == REG)
7349 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
7351 else if (GET_CODE (uloc) == MEM)
7352 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
7356 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
7362 rtx loc = VTI (bb)->mos[i].u.loc;
7363 rtx val, vloc, uloc, reverse = NULL_RTX;
7366 if (VAL_EXPR_HAS_REVERSE (loc))
7368 reverse = XEXP (loc, 1);
7369 vloc = XEXP (loc, 0);
7371 uloc = XEXP (vloc, 1);
7372 val = XEXP (vloc, 0);
7375 if (GET_CODE (val) == CONCAT)
7377 vloc = XEXP (val, 1);
7378 val = XEXP (val, 0);
7381 if (GET_CODE (vloc) == SET)
7383 rtx vsrc = SET_SRC (vloc);
7385 gcc_assert (val != vsrc);
7386 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
7388 vloc = SET_DEST (vloc);
7390 if (VAL_NEEDS_RESOLUTION (loc))
7391 val_resolve (set, val, vsrc, insn);
7393 else if (VAL_NEEDS_RESOLUTION (loc))
7395 gcc_assert (GET_CODE (uloc) == SET
7396 && GET_CODE (SET_SRC (uloc)) == REG);
7397 val_resolve (set, val, SET_SRC (uloc), insn);
7400 if (VAL_HOLDS_TRACK_EXPR (loc))
7402 if (VAL_EXPR_IS_CLOBBERED (loc))
7405 var_reg_delete (set, uloc, true);
7406 else if (MEM_P (uloc))
7407 var_mem_delete (set, uloc, true);
7411 bool copied_p = VAL_EXPR_IS_COPIED (loc);
7413 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
7415 if (GET_CODE (uloc) == SET)
7417 set_src = SET_SRC (uloc);
7418 uloc = SET_DEST (uloc);
7423 status = find_src_status (set, set_src);
7425 set_src = find_src_set_src (set, set_src);
7429 var_reg_delete_and_set (set, uloc, !copied_p,
7431 else if (MEM_P (uloc))
7432 var_mem_delete_and_set (set, uloc, !copied_p,
7436 else if (REG_P (uloc))
7437 var_regno_delete (set, REGNO (uloc));
7439 val_store (set, val, vloc, insn, true);
7442 val_store (set, XEXP (reverse, 0), XEXP (reverse, 1),
7445 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7452 rtx loc = VTI (bb)->mos[i].u.loc;
7455 if (GET_CODE (loc) == SET)
7457 set_src = SET_SRC (loc);
7458 loc = SET_DEST (loc);
7462 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
7465 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
7468 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7475 rtx loc = VTI (bb)->mos[i].u.loc;
7476 enum var_init_status src_status;
7479 if (GET_CODE (loc) == SET)
7481 set_src = SET_SRC (loc);
7482 loc = SET_DEST (loc);
7485 src_status = find_src_status (set, set_src);
7486 set_src = find_src_set_src (set, set_src);
7489 var_reg_delete_and_set (set, loc, false, src_status, set_src);
7491 var_mem_delete_and_set (set, loc, false, src_status, set_src);
7493 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7500 rtx loc = VTI (bb)->mos[i].u.loc;
7503 var_reg_delete (set, loc, false);
7505 var_mem_delete (set, loc, false);
7507 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
7513 rtx loc = VTI (bb)->mos[i].u.loc;
7516 var_reg_delete (set, loc, true);
7518 var_mem_delete (set, loc, true);
7520 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7526 set->stack_adjust += VTI (bb)->mos[i].u.adjust;
7532 /* Emit notes for the whole function. */
7535 vt_emit_notes (void)
7540 #ifdef ENABLE_RTL_CHECKING
7541 emitted_notes = pointer_map_create ();
7543 gcc_assert (!htab_elements (changed_variables));
7545 /* Free memory occupied by the out hash tables, as they aren't used
7548 dataflow_set_clear (&VTI (bb)->out);
7550 /* Enable emitting notes by functions (mainly by set_variable_part and
7551 delete_variable_part). */
7554 if (MAY_HAVE_DEBUG_INSNS)
7559 for (i = 0; VEC_iterate (rtx, preserved_values, i, val); i++)
7560 add_cselib_value_chains (dv_from_value (val));
7561 changed_variables_stack = VEC_alloc (variable, heap, 40);
7562 changed_values_stack = VEC_alloc (rtx, heap, 40);
7565 dataflow_set_init (&cur);
7569 /* Emit the notes for changes of variable locations between two
7570 subsequent basic blocks. */
7571 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
7573 /* Emit the notes for the changes in the basic block itself. */
7574 emit_notes_in_bb (bb, &cur);
7576 /* Free memory occupied by the in hash table, we won't need it
7578 dataflow_set_clear (&VTI (bb)->in);
7580 #ifdef ENABLE_CHECKING
7581 htab_traverse (shared_hash_htab (cur.vars),
7582 emit_notes_for_differences_1,
7583 shared_hash_htab (empty_shared_hash));
7584 if (MAY_HAVE_DEBUG_INSNS)
7589 for (i = 0; VEC_iterate (rtx, preserved_values, i, val); i++)
7590 remove_cselib_value_chains (dv_from_value (val));
7591 gcc_assert (htab_elements (value_chains) == 0);
7594 dataflow_set_destroy (&cur);
7596 if (MAY_HAVE_DEBUG_INSNS)
7598 VEC_free (variable, heap, changed_variables_stack);
7599 VEC_free (rtx, heap, changed_values_stack);
7602 #ifdef ENABLE_RTL_CHECKING
7603 pointer_map_destroy (emitted_notes);
7608 /* If there is a declaration and offset associated with register/memory RTL
7609 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
7612 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
7616 if (REG_ATTRS (rtl))
7618 *declp = REG_EXPR (rtl);
7619 *offsetp = REG_OFFSET (rtl);
7623 else if (MEM_P (rtl))
7625 if (MEM_ATTRS (rtl))
7627 *declp = MEM_EXPR (rtl);
7628 *offsetp = INT_MEM_OFFSET (rtl);
7635 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
7638 vt_add_function_parameters (void)
7642 for (parm = DECL_ARGUMENTS (current_function_decl);
7643 parm; parm = TREE_CHAIN (parm))
7645 rtx decl_rtl = DECL_RTL_IF_SET (parm);
7646 rtx incoming = DECL_INCOMING_RTL (parm);
7648 enum machine_mode mode;
7649 HOST_WIDE_INT offset;
7653 if (TREE_CODE (parm) != PARM_DECL)
7656 if (!DECL_NAME (parm))
7659 if (!decl_rtl || !incoming)
7662 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
7665 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
7667 if (REG_P (incoming) || MEM_P (incoming))
7669 /* This means argument is passed by invisible reference. */
7672 incoming = gen_rtx_MEM (GET_MODE (decl_rtl), incoming);
7676 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
7678 offset += byte_lowpart_offset (GET_MODE (incoming),
7679 GET_MODE (decl_rtl));
7688 /* Assume that DECL_RTL was a pseudo that got spilled to
7689 memory. The spill slot sharing code will force the
7690 memory to reference spill_slot_decl (%sfp), so we don't
7691 match above. That's ok, the pseudo must have referenced
7692 the entire parameter, so just reset OFFSET. */
7693 gcc_assert (decl == get_spill_slot_decl (false));
7697 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
7700 out = &VTI (ENTRY_BLOCK_PTR)->out;
7702 dv = dv_from_decl (parm);
7704 if (target_for_debug_bind (parm)
7705 /* We can't deal with these right now, because this kind of
7706 variable is single-part. ??? We could handle parallels
7707 that describe multiple locations for the same single
7708 value, but ATM we don't. */
7709 && GET_CODE (incoming) != PARALLEL)
7713 /* ??? We shouldn't ever hit this, but it may happen because
7714 arguments passed by invisible reference aren't dealt with
7715 above: incoming-rtl will have Pmode rather than the
7716 expected mode for the type. */
7720 val = cselib_lookup (var_lowpart (mode, incoming), mode, true);
7722 /* ??? Float-typed values in memory are not handled by
7726 preserve_value (val);
7727 set_variable_part (out, val->val_rtx, dv, offset,
7728 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7729 dv = dv_from_value (val->val_rtx);
7733 if (REG_P (incoming))
7735 incoming = var_lowpart (mode, incoming);
7736 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
7737 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
7739 set_variable_part (out, incoming, dv, offset,
7740 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7742 else if (MEM_P (incoming))
7744 incoming = var_lowpart (mode, incoming);
7745 set_variable_part (out, incoming, dv, offset,
7746 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7750 if (MAY_HAVE_DEBUG_INSNS)
7752 cselib_preserve_only_values (true);
7753 cselib_reset_table (cselib_get_next_uid ());
7758 /* Allocate and initialize the data structures for variable tracking
7759 and parse the RTL to get the micro operations. */
7762 vt_initialize (void)
7766 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
7768 if (MAY_HAVE_DEBUG_INSNS)
7771 scratch_regs = BITMAP_ALLOC (NULL);
7772 valvar_pool = create_alloc_pool ("small variable_def pool",
7773 sizeof (struct variable_def), 256);
7774 preserved_values = VEC_alloc (rtx, heap, 256);
7778 scratch_regs = NULL;
7785 HOST_WIDE_INT pre, post = 0;
7786 unsigned int next_uid_before = cselib_get_next_uid ();
7787 unsigned int next_uid_after = next_uid_before;
7788 basic_block first_bb, last_bb;
7790 if (MAY_HAVE_DEBUG_INSNS)
7792 cselib_record_sets_hook = count_with_sets;
7793 if (dump_file && (dump_flags & TDF_DETAILS))
7794 fprintf (dump_file, "first value: %i\n",
7795 cselib_get_next_uid ());
7802 if (bb->next_bb == EXIT_BLOCK_PTR
7803 || ! single_pred_p (bb->next_bb))
7805 e = find_edge (bb, bb->next_bb);
7806 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
7812 /* Count the number of micro operations. */
7813 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
7815 VTI (bb)->n_mos = 0;
7816 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
7817 insn = NEXT_INSN (insn))
7821 if (!frame_pointer_needed)
7823 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
7827 if (dump_file && (dump_flags & TDF_DETAILS))
7828 log_op_type (GEN_INT (pre), bb, insn,
7829 MO_ADJUST, dump_file);
7834 if (dump_file && (dump_flags & TDF_DETAILS))
7835 log_op_type (GEN_INT (post), bb, insn,
7836 MO_ADJUST, dump_file);
7839 cselib_hook_called = false;
7840 if (MAY_HAVE_DEBUG_INSNS)
7842 cselib_process_insn (insn);
7843 if (dump_file && (dump_flags & TDF_DETAILS))
7845 print_rtl_single (dump_file, insn);
7846 dump_cselib_table (dump_file);
7849 if (!cselib_hook_called)
7850 count_with_sets (insn, 0, 0);
7854 if (dump_file && (dump_flags & TDF_DETAILS))
7855 log_op_type (PATTERN (insn), bb, insn,
7856 MO_CALL, dump_file);
7862 if (MAY_HAVE_DEBUG_INSNS)
7864 cselib_preserve_only_values (false);
7865 next_uid_after = cselib_get_next_uid ();
7866 cselib_reset_table (next_uid_before);
7867 cselib_record_sets_hook = add_with_sets;
7868 if (dump_file && (dump_flags & TDF_DETAILS))
7869 fprintf (dump_file, "first value: %i\n",
7870 cselib_get_next_uid ());
7873 /* Add the micro-operations to the array. */
7874 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
7876 int count = VTI (bb)->n_mos;
7877 VTI (bb)->mos = XNEWVEC (micro_operation, count);
7878 VTI (bb)->n_mos = 0;
7879 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
7880 insn = NEXT_INSN (insn))
7884 if (!frame_pointer_needed)
7886 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
7890 = VTI (bb)->mos + VTI (bb)->n_mos++;
7892 mo->type = MO_ADJUST;
7896 if (dump_file && (dump_flags & TDF_DETAILS))
7897 log_op_type (PATTERN (insn), bb, insn,
7898 MO_ADJUST, dump_file);
7902 cselib_hook_called = false;
7903 if (MAY_HAVE_DEBUG_INSNS)
7905 cselib_process_insn (insn);
7906 if (dump_file && (dump_flags & TDF_DETAILS))
7908 print_rtl_single (dump_file, insn);
7909 dump_cselib_table (dump_file);
7912 if (!cselib_hook_called)
7913 add_with_sets (insn, 0, 0);
7915 if (!frame_pointer_needed && post)
7917 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
7919 mo->type = MO_ADJUST;
7920 mo->u.adjust = post;
7923 if (dump_file && (dump_flags & TDF_DETAILS))
7924 log_op_type (PATTERN (insn), bb, insn,
7925 MO_ADJUST, dump_file);
7929 gcc_assert (count == VTI (bb)->n_mos);
7934 if (MAY_HAVE_DEBUG_INSNS)
7936 cselib_preserve_only_values (true);
7937 gcc_assert (next_uid_after == cselib_get_next_uid ());
7938 cselib_reset_table (next_uid_after);
7939 cselib_record_sets_hook = NULL;
7943 attrs_pool = create_alloc_pool ("attrs_def pool",
7944 sizeof (struct attrs_def), 1024);
7945 var_pool = create_alloc_pool ("variable_def pool",
7946 sizeof (struct variable_def)
7947 + (MAX_VAR_PARTS - 1)
7948 * sizeof (((variable)NULL)->var_part[0]), 64);
7949 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
7950 sizeof (struct location_chain_def),
7952 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
7953 sizeof (struct shared_hash_def), 256);
7954 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
7955 empty_shared_hash->refcount = 1;
7956 empty_shared_hash->htab
7957 = htab_create (1, variable_htab_hash, variable_htab_eq,
7958 variable_htab_free);
7959 changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
7960 variable_htab_free);
7961 if (MAY_HAVE_DEBUG_INSNS)
7963 value_chain_pool = create_alloc_pool ("value_chain_def pool",
7964 sizeof (struct value_chain_def),
7966 value_chains = htab_create (32, value_chain_htab_hash,
7967 value_chain_htab_eq, NULL);
7970 /* Init the IN and OUT sets. */
7973 VTI (bb)->visited = false;
7974 VTI (bb)->flooded = false;
7975 dataflow_set_init (&VTI (bb)->in);
7976 dataflow_set_init (&VTI (bb)->out);
7977 VTI (bb)->permp = NULL;
7980 VTI (ENTRY_BLOCK_PTR)->flooded = true;
7981 vt_add_function_parameters ();
7984 /* Get rid of all debug insns from the insn stream. */
7987 delete_debug_insns (void)
7992 if (!MAY_HAVE_DEBUG_INSNS)
7997 FOR_BB_INSNS_SAFE (bb, insn, next)
7998 if (DEBUG_INSN_P (insn))
8003 /* Run a fast, BB-local only version of var tracking, to take care of
8004 information that we don't do global analysis on, such that not all
8005 information is lost. If SKIPPED holds, we're skipping the global
8006 pass entirely, so we should try to use information it would have
8007 handled as well.. */
8010 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
8012 /* ??? Just skip it all for now. */
8013 delete_debug_insns ();
8016 /* Free the data structures needed for variable tracking. */
8025 free (VTI (bb)->mos);
8030 dataflow_set_destroy (&VTI (bb)->in);
8031 dataflow_set_destroy (&VTI (bb)->out);
8032 if (VTI (bb)->permp)
8034 dataflow_set_destroy (VTI (bb)->permp);
8035 XDELETE (VTI (bb)->permp);
8038 free_aux_for_blocks ();
8039 htab_delete (empty_shared_hash->htab);
8040 htab_delete (changed_variables);
8041 free_alloc_pool (attrs_pool);
8042 free_alloc_pool (var_pool);
8043 free_alloc_pool (loc_chain_pool);
8044 free_alloc_pool (shared_hash_pool);
8046 if (MAY_HAVE_DEBUG_INSNS)
8048 htab_delete (value_chains);
8049 free_alloc_pool (value_chain_pool);
8050 free_alloc_pool (valvar_pool);
8051 VEC_free (rtx, heap, preserved_values);
8053 BITMAP_FREE (scratch_regs);
8054 scratch_regs = NULL;
8058 XDELETEVEC (vui_vec);
8063 /* The entry point to variable tracking pass. */
8065 static inline unsigned int
8066 variable_tracking_main_1 (void)
8070 if (flag_var_tracking_assignments < 0)
8072 delete_debug_insns ();
8076 if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
8078 vt_debug_insns_local (true);
8082 mark_dfs_back_edges ();
8084 if (!frame_pointer_needed)
8086 if (!vt_stack_adjustments ())
8089 vt_debug_insns_local (true);
8094 success = vt_find_locations ();
8096 if (!success && flag_var_tracking_assignments > 0)
8100 delete_debug_insns ();
8102 /* This is later restored by our caller. */
8103 flag_var_tracking_assignments = 0;
8107 if (!frame_pointer_needed && !vt_stack_adjustments ())
8110 success = vt_find_locations ();
8116 vt_debug_insns_local (false);
8120 if (dump_file && (dump_flags & TDF_DETAILS))
8122 dump_dataflow_sets ();
8123 dump_flow_info (dump_file, dump_flags);
8129 vt_debug_insns_local (false);
8134 variable_tracking_main (void)
8137 int save = flag_var_tracking_assignments;
8139 ret = variable_tracking_main_1 ();
8141 flag_var_tracking_assignments = save;
8147 gate_handle_var_tracking (void)
8149 return (flag_var_tracking);
8154 struct rtl_opt_pass pass_variable_tracking =
8158 "vartrack", /* name */
8159 gate_handle_var_tracking, /* gate */
8160 variable_tracking_main, /* execute */
8163 0, /* static_pass_number */
8164 TV_VAR_TRACKING, /* tv_id */
8165 0, /* properties_required */
8166 0, /* properties_provided */
8167 0, /* properties_destroyed */
8168 0, /* todo_flags_start */
8169 TODO_dump_func | TODO_verify_rtl_sharing/* todo_flags_finish */