1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file contains the variable tracking pass. It computes where
22 variables are located (which registers or where in memory) at each position
23 in instruction stream and emits notes describing the locations.
24 Debug information (DWARF2 location lists) is finally generated from
26 With this debug information, it is possible to show variables
27 even when debugging optimized code.
29 How does the variable tracking pass work?
31 First, it scans RTL code for uses, stores and clobbers (register/memory
32 references in instructions), for call insns and for stack adjustments
33 separately for each basic block and saves them to an array of micro
35 The micro operations of one instruction are ordered so that
36 pre-modifying stack adjustment < use < use with no var < call insn <
37 < set < clobber < post-modifying stack adjustment
39 Then, a forward dataflow analysis is performed to find out how locations
40 of variables change through code and to propagate the variable locations
41 along control flow graph.
42 The IN set for basic block BB is computed as a union of OUT sets of BB's
43 predecessors, the OUT set for BB is copied from the IN set for BB and
44 is changed according to micro operations in BB.
46 The IN and OUT sets for basic blocks consist of a current stack adjustment
47 (used for adjusting offset of variables addressed using stack pointer),
48 the table of structures describing the locations of parts of a variable
49 and for each physical register a linked list for each physical register.
50 The linked list is a list of variable parts stored in the register,
51 i.e. it is a list of triplets (reg, decl, offset) where decl is
52 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
53 effective deleting appropriate variable parts when we set or clobber the
56 There may be more than one variable part in a register. The linked lists
57 should be pretty short so it is a good data structure here.
58 For example in the following code, register allocator may assign same
59 register to variables A and B, and both of them are stored in the same
72 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73 are emitted to appropriate positions in RTL code. Each such a note describes
74 the location of one variable at the point in instruction stream where the
75 note is. There is no need to emit a note for each variable before each
76 instruction, we only emit these notes where the location of variable changes
77 (this means that we also emit notes for changes between the OUT set of the
78 previous block and the IN set of the current block).
80 The notes consist of two parts:
81 1. the declaration (from REG_EXPR or MEM_EXPR)
82 2. the location of a variable - it is either a simple register/memory
83 reference (for simple variables, for example int),
84 or a parallel of register/memory references (for a large variables
85 which consist of several parts, for example long long).
91 #include "coretypes.h"
95 #include "hard-reg-set.h"
96 #include "basic-block.h"
99 #include "insn-config.h"
102 #include "alloc-pool.h"
108 #include "tree-pass.h"
109 #include "tree-flow.h"
113 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
114 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
115 Currently the value is the same as IDENTIFIER_NODE, which has such
116 a property. If this compile time assertion ever fails, make sure that
117 the new tree code that equals (int) VALUE has the same property. */
118 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
120 /* Type of micro operation. */
121 enum micro_operation_type
123 MO_USE, /* Use location (REG or MEM). */
124 MO_USE_NO_VAR,/* Use location which is not associated with a variable
125 or the variable is not trackable. */
126 MO_VAL_USE, /* Use location which is associated with a value. */
127 MO_VAL_LOC, /* Use location which appears in a debug insn. */
128 MO_VAL_SET, /* Set location associated with a value. */
129 MO_SET, /* Set location. */
130 MO_COPY, /* Copy the same portion of a variable from one
131 location to another. */
132 MO_CLOBBER, /* Clobber location. */
133 MO_CALL, /* Call insn. */
134 MO_ADJUST /* Adjust stack pointer. */
138 static const char * const ATTRIBUTE_UNUSED
139 micro_operation_type_name[] = {
152 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
153 Notes emitted as AFTER_CALL are to take effect during the call,
154 rather than after the call. */
157 EMIT_NOTE_BEFORE_INSN,
158 EMIT_NOTE_AFTER_INSN,
159 EMIT_NOTE_AFTER_CALL_INSN
162 /* Structure holding information about micro operation. */
163 typedef struct micro_operation_def
165 /* Type of micro operation. */
166 enum micro_operation_type type;
169 /* Location. For MO_SET and MO_COPY, this is the SET that
170 performs the assignment, if known, otherwise it is the target
171 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
172 CONCAT of the VALUE and the LOC associated with it. For
173 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
174 associated with it. */
177 /* Stack adjustment. */
178 HOST_WIDE_INT adjust;
181 /* The instruction which the micro operation is in, for MO_USE,
182 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
183 instruction or note in the original flow (before any var-tracking
184 notes are inserted, to simplify emission of notes), for MO_SET
189 /* A declaration of a variable, or an RTL value being handled like a
191 typedef void *decl_or_value;
193 /* Structure for passing some other parameters to function
194 emit_note_insn_var_location. */
195 typedef struct emit_note_data_def
197 /* The instruction which the note will be emitted before/after. */
200 /* Where the note will be emitted (before/after insn)? */
201 enum emit_note_where where;
203 /* The variables and values active at this point. */
207 /* Description of location of a part of a variable. The content of a physical
208 register is described by a chain of these structures.
209 The chains are pretty short (usually 1 or 2 elements) and thus
210 chain is the best data structure. */
211 typedef struct attrs_def
213 /* Pointer to next member of the list. */
214 struct attrs_def *next;
216 /* The rtx of register. */
219 /* The declaration corresponding to LOC. */
222 /* Offset from start of DECL. */
223 HOST_WIDE_INT offset;
226 /* Structure holding a refcounted hash table. If refcount > 1,
227 it must be first unshared before modified. */
228 typedef struct shared_hash_def
230 /* Reference count. */
233 /* Actual hash table. */
237 /* Structure holding the IN or OUT set for a basic block. */
238 typedef struct dataflow_set_def
240 /* Adjustment of stack offset. */
241 HOST_WIDE_INT stack_adjust;
243 /* Attributes for registers (lists of attrs). */
244 attrs regs[FIRST_PSEUDO_REGISTER];
246 /* Variable locations. */
249 /* Vars that is being traversed. */
250 shared_hash traversed_vars;
253 /* The structure (one for each basic block) containing the information
254 needed for variable tracking. */
255 typedef struct variable_tracking_info_def
257 /* Number of micro operations stored in the MOS array. */
260 /* The array of micro operations. */
261 micro_operation *mos;
263 /* The IN and OUT set for dataflow analysis. */
267 /* The permanent-in dataflow set for this block. This is used to
268 hold values for which we had to compute entry values. ??? This
269 should probably be dynamically allocated, to avoid using more
270 memory in non-debug builds. */
273 /* Has the block been visited in DFS? */
276 /* Has the block been flooded in VTA? */
279 } *variable_tracking_info;
281 /* Structure for chaining the locations. */
282 typedef struct location_chain_def
284 /* Next element in the chain. */
285 struct location_chain_def *next;
287 /* The location (REG, MEM or VALUE). */
290 /* The "value" stored in this location. */
294 enum var_init_status init;
297 /* Structure describing one part of variable. */
298 typedef struct variable_part_def
300 /* Chain of locations of the part. */
301 location_chain loc_chain;
303 /* Location which was last emitted to location list. */
306 /* The offset in the variable. */
307 HOST_WIDE_INT offset;
310 /* Maximum number of location parts. */
311 #define MAX_VAR_PARTS 16
313 /* Structure describing where the variable is located. */
314 typedef struct variable_def
316 /* The declaration of the variable, or an RTL value being handled
317 like a declaration. */
320 /* Reference count. */
323 /* Number of variable parts. */
326 /* The variable parts. */
327 variable_part var_part[1];
329 typedef const struct variable_def *const_variable;
331 /* Structure for chaining backlinks from referenced VALUEs to
332 DVs that are referencing them. */
333 typedef struct value_chain_def
335 /* Next value_chain entry. */
336 struct value_chain_def *next;
338 /* The declaration of the variable, or an RTL value
339 being handled like a declaration, whose var_parts[0].loc_chain
340 references the VALUE owning this value_chain. */
343 /* Reference count. */
346 typedef const struct value_chain_def *const_value_chain;
348 /* Hash function for DECL for VARIABLE_HTAB. */
349 #define VARIABLE_HASH_VAL(decl) (DECL_UID (decl))
351 /* Pointer to the BB's information specific to variable tracking pass. */
352 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
354 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
355 #define INT_MEM_OFFSET(mem) (MEM_OFFSET (mem) ? INTVAL (MEM_OFFSET (mem)) : 0)
357 /* Alloc pool for struct attrs_def. */
358 static alloc_pool attrs_pool;
360 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
361 static alloc_pool var_pool;
363 /* Alloc pool for struct variable_def with a single var_part entry. */
364 static alloc_pool valvar_pool;
366 /* Alloc pool for struct location_chain_def. */
367 static alloc_pool loc_chain_pool;
369 /* Alloc pool for struct shared_hash_def. */
370 static alloc_pool shared_hash_pool;
372 /* Alloc pool for struct value_chain_def. */
373 static alloc_pool value_chain_pool;
375 /* Changed variables, notes will be emitted for them. */
376 static htab_t changed_variables;
378 /* Links from VALUEs to DVs referencing them in their current loc_chains. */
379 static htab_t value_chains;
381 /* Shall notes be emitted? */
382 static bool emit_notes;
384 /* Empty shared hashtable. */
385 static shared_hash empty_shared_hash;
387 /* Scratch register bitmap used by cselib_expand_value_rtx. */
388 static bitmap scratch_regs = NULL;
390 /* Variable used to tell whether cselib_process_insn called our hook. */
391 static bool cselib_hook_called;
393 /* Local function prototypes. */
394 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
396 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
398 static void bb_stack_adjust_offset (basic_block);
399 static bool vt_stack_adjustments (void);
400 static rtx adjust_stack_reference (rtx, HOST_WIDE_INT);
401 static hashval_t variable_htab_hash (const void *);
402 static int variable_htab_eq (const void *, const void *);
403 static void variable_htab_free (void *);
405 static void init_attrs_list_set (attrs *);
406 static void attrs_list_clear (attrs *);
407 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
408 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
409 static void attrs_list_copy (attrs *, attrs);
410 static void attrs_list_union (attrs *, attrs);
412 static void **unshare_variable (dataflow_set *set, void **slot, variable var,
413 enum var_init_status);
414 static int vars_copy_1 (void **, void *);
415 static void vars_copy (htab_t, htab_t);
416 static tree var_debug_decl (tree);
417 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
418 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
419 enum var_init_status, rtx);
420 static void var_reg_delete (dataflow_set *, rtx, bool);
421 static void var_regno_delete (dataflow_set *, int);
422 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
423 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
424 enum var_init_status, rtx);
425 static void var_mem_delete (dataflow_set *, rtx, bool);
427 static void dataflow_set_init (dataflow_set *);
428 static void dataflow_set_clear (dataflow_set *);
429 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
430 static int variable_union_info_cmp_pos (const void *, const void *);
431 static int variable_union (void **, void *);
432 static int variable_canonicalize (void **, void *);
433 static void dataflow_set_union (dataflow_set *, dataflow_set *);
434 static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
435 static bool canon_value_cmp (rtx, rtx);
436 static int loc_cmp (rtx, rtx);
437 static bool variable_part_different_p (variable_part *, variable_part *);
438 static bool onepart_variable_different_p (variable, variable);
439 static bool variable_different_p (variable, variable, bool);
440 static int dataflow_set_different_1 (void **, void *);
441 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
442 static void dataflow_set_destroy (dataflow_set *);
444 static bool contains_symbol_ref (rtx);
445 static bool track_expr_p (tree, bool);
446 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
447 static int count_uses (rtx *, void *);
448 static void count_uses_1 (rtx *, void *);
449 static void count_stores (rtx, const_rtx, void *);
450 static int add_uses (rtx *, void *);
451 static void add_uses_1 (rtx *, void *);
452 static void add_stores (rtx, const_rtx, void *);
453 static bool compute_bb_dataflow (basic_block);
454 static void vt_find_locations (void);
456 static void dump_attrs_list (attrs);
457 static int dump_var_slot (void **, void *);
458 static void dump_var (variable);
459 static void dump_vars (htab_t);
460 static void dump_dataflow_set (dataflow_set *);
461 static void dump_dataflow_sets (void);
463 static void variable_was_changed (variable, dataflow_set *);
464 static void **set_slot_part (dataflow_set *, rtx, void **,
465 decl_or_value, HOST_WIDE_INT,
466 enum var_init_status, rtx);
467 static void set_variable_part (dataflow_set *, rtx,
468 decl_or_value, HOST_WIDE_INT,
469 enum var_init_status, rtx, enum insert_option);
470 static void **clobber_slot_part (dataflow_set *, rtx,
471 void **, HOST_WIDE_INT, rtx);
472 static void clobber_variable_part (dataflow_set *, rtx,
473 decl_or_value, HOST_WIDE_INT, rtx);
474 static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
475 static void delete_variable_part (dataflow_set *, rtx,
476 decl_or_value, HOST_WIDE_INT);
477 static int emit_note_insn_var_location (void **, void *);
478 static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash);
479 static int emit_notes_for_differences_1 (void **, void *);
480 static int emit_notes_for_differences_2 (void **, void *);
481 static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
482 static void emit_notes_in_bb (basic_block, dataflow_set *);
483 static void vt_emit_notes (void);
485 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
486 static void vt_add_function_parameters (void);
487 static void vt_initialize (void);
488 static void vt_finalize (void);
490 /* Given a SET, calculate the amount of stack adjustment it contains
491 PRE- and POST-modifying stack pointer.
492 This function is similar to stack_adjust_offset. */
495 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
498 rtx src = SET_SRC (pattern);
499 rtx dest = SET_DEST (pattern);
502 if (dest == stack_pointer_rtx)
504 /* (set (reg sp) (plus (reg sp) (const_int))) */
505 code = GET_CODE (src);
506 if (! (code == PLUS || code == MINUS)
507 || XEXP (src, 0) != stack_pointer_rtx
508 || !CONST_INT_P (XEXP (src, 1)))
512 *post += INTVAL (XEXP (src, 1));
514 *post -= INTVAL (XEXP (src, 1));
516 else if (MEM_P (dest))
518 /* (set (mem (pre_dec (reg sp))) (foo)) */
519 src = XEXP (dest, 0);
520 code = GET_CODE (src);
526 if (XEXP (src, 0) == stack_pointer_rtx)
528 rtx val = XEXP (XEXP (src, 1), 1);
529 /* We handle only adjustments by constant amount. */
530 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
533 if (code == PRE_MODIFY)
534 *pre -= INTVAL (val);
536 *post -= INTVAL (val);
542 if (XEXP (src, 0) == stack_pointer_rtx)
544 *pre += GET_MODE_SIZE (GET_MODE (dest));
550 if (XEXP (src, 0) == stack_pointer_rtx)
552 *post += GET_MODE_SIZE (GET_MODE (dest));
558 if (XEXP (src, 0) == stack_pointer_rtx)
560 *pre -= GET_MODE_SIZE (GET_MODE (dest));
566 if (XEXP (src, 0) == stack_pointer_rtx)
568 *post -= GET_MODE_SIZE (GET_MODE (dest));
579 /* Given an INSN, calculate the amount of stack adjustment it contains
580 PRE- and POST-modifying stack pointer. */
583 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
591 pattern = PATTERN (insn);
592 if (RTX_FRAME_RELATED_P (insn))
594 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
596 pattern = XEXP (expr, 0);
599 if (GET_CODE (pattern) == SET)
600 stack_adjust_offset_pre_post (pattern, pre, post);
601 else if (GET_CODE (pattern) == PARALLEL
602 || GET_CODE (pattern) == SEQUENCE)
606 /* There may be stack adjustments inside compound insns. Search
608 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
609 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
610 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
614 /* Compute stack adjustment in basic block BB. */
617 bb_stack_adjust_offset (basic_block bb)
619 HOST_WIDE_INT offset;
622 offset = VTI (bb)->in.stack_adjust;
623 for (i = 0; i < VTI (bb)->n_mos; i++)
625 if (VTI (bb)->mos[i].type == MO_ADJUST)
626 offset += VTI (bb)->mos[i].u.adjust;
627 else if (VTI (bb)->mos[i].type != MO_CALL)
629 if (MEM_P (VTI (bb)->mos[i].u.loc))
631 VTI (bb)->mos[i].u.loc
632 = adjust_stack_reference (VTI (bb)->mos[i].u.loc, -offset);
636 VTI (bb)->out.stack_adjust = offset;
639 /* Compute stack adjustments for all blocks by traversing DFS tree.
640 Return true when the adjustments on all incoming edges are consistent.
641 Heavily borrowed from pre_and_rev_post_order_compute. */
644 vt_stack_adjustments (void)
646 edge_iterator *stack;
649 /* Initialize entry block. */
650 VTI (ENTRY_BLOCK_PTR)->visited = true;
651 VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
653 /* Allocate stack for back-tracking up CFG. */
654 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
657 /* Push the first edge on to the stack. */
658 stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
666 /* Look at the edge on the top of the stack. */
668 src = ei_edge (ei)->src;
669 dest = ei_edge (ei)->dest;
671 /* Check if the edge destination has been visited yet. */
672 if (!VTI (dest)->visited)
674 VTI (dest)->visited = true;
675 VTI (dest)->in.stack_adjust = VTI (src)->out.stack_adjust;
676 bb_stack_adjust_offset (dest);
678 if (EDGE_COUNT (dest->succs) > 0)
679 /* Since the DEST node has been visited for the first
680 time, check its successors. */
681 stack[sp++] = ei_start (dest->succs);
685 /* Check whether the adjustments on the edges are the same. */
686 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
692 if (! ei_one_before_end_p (ei))
693 /* Go to the next edge. */
694 ei_next (&stack[sp - 1]);
696 /* Return to previous level if there are no more edges. */
705 /* Adjust stack reference MEM by ADJUSTMENT bytes and make it relative
706 to the argument pointer. Return the new rtx. */
709 adjust_stack_reference (rtx mem, HOST_WIDE_INT adjustment)
713 #ifdef FRAME_POINTER_CFA_OFFSET
714 adjustment -= FRAME_POINTER_CFA_OFFSET (current_function_decl);
715 cfa = plus_constant (frame_pointer_rtx, adjustment);
717 adjustment -= ARG_POINTER_CFA_OFFSET (current_function_decl);
718 cfa = plus_constant (arg_pointer_rtx, adjustment);
721 addr = replace_rtx (copy_rtx (XEXP (mem, 0)), stack_pointer_rtx, cfa);
722 tmp = simplify_rtx (addr);
726 return replace_equiv_address_nv (mem, addr);
729 /* Return true if a decl_or_value DV is a DECL or NULL. */
731 dv_is_decl_p (decl_or_value dv)
733 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
736 /* Return true if a decl_or_value is a VALUE rtl. */
738 dv_is_value_p (decl_or_value dv)
740 return dv && !dv_is_decl_p (dv);
743 /* Return the decl in the decl_or_value. */
745 dv_as_decl (decl_or_value dv)
747 #ifdef ENABLE_CHECKING
748 gcc_assert (dv_is_decl_p (dv));
753 /* Return the value in the decl_or_value. */
755 dv_as_value (decl_or_value dv)
757 #ifdef ENABLE_CHECKING
758 gcc_assert (dv_is_value_p (dv));
763 /* Return the opaque pointer in the decl_or_value. */
765 dv_as_opaque (decl_or_value dv)
770 /* Return true if a decl_or_value must not have more than one variable
773 dv_onepart_p (decl_or_value dv)
777 if (!MAY_HAVE_DEBUG_INSNS)
780 if (dv_is_value_p (dv))
783 decl = dv_as_decl (dv);
788 return (target_for_debug_bind (decl) != NULL_TREE);
791 /* Return the variable pool to be used for dv, depending on whether it
792 can have multiple parts or not. */
793 static inline alloc_pool
794 dv_pool (decl_or_value dv)
796 return dv_onepart_p (dv) ? valvar_pool : var_pool;
799 /* Build a decl_or_value out of a decl. */
800 static inline decl_or_value
801 dv_from_decl (tree decl)
805 #ifdef ENABLE_CHECKING
806 gcc_assert (dv_is_decl_p (dv));
811 /* Build a decl_or_value out of a value. */
812 static inline decl_or_value
813 dv_from_value (rtx value)
817 #ifdef ENABLE_CHECKING
818 gcc_assert (dv_is_value_p (dv));
823 static inline hashval_t
824 dv_htab_hash (decl_or_value dv)
826 if (dv_is_value_p (dv))
827 return -(hashval_t)(CSELIB_VAL_PTR (dv_as_value (dv))->value);
829 return (VARIABLE_HASH_VAL (dv_as_decl (dv)));
832 /* The hash function for variable_htab, computes the hash value
833 from the declaration of variable X. */
836 variable_htab_hash (const void *x)
838 const_variable const v = (const_variable) x;
840 return dv_htab_hash (v->dv);
843 /* Compare the declaration of variable X with declaration Y. */
846 variable_htab_eq (const void *x, const void *y)
848 const_variable const v = (const_variable) x;
849 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
851 if (dv_as_opaque (v->dv) == dv_as_opaque (dv))
858 visv = dv_is_value_p (v->dv);
859 dvisv = dv_is_value_p (dv);
865 gcc_assert (CSELIB_VAL_PTR (dv_as_value (v->dv))
866 != CSELIB_VAL_PTR (dv_as_value (dv)));
868 gcc_assert (VARIABLE_HASH_VAL (dv_as_decl (v->dv))
869 != VARIABLE_HASH_VAL (dv_as_decl (dv)));
876 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
879 variable_htab_free (void *elem)
882 variable var = (variable) elem;
883 location_chain node, next;
885 gcc_assert (var->refcount > 0);
888 if (var->refcount > 0)
891 for (i = 0; i < var->n_var_parts; i++)
893 for (node = var->var_part[i].loc_chain; node; node = next)
896 pool_free (loc_chain_pool, node);
898 var->var_part[i].loc_chain = NULL;
900 pool_free (dv_pool (var->dv), var);
903 /* The hash function for value_chains htab, computes the hash value
907 value_chain_htab_hash (const void *x)
909 const_value_chain const v = (const_value_chain) x;
911 return dv_htab_hash (v->dv);
914 /* Compare the VALUE X with VALUE Y. */
917 value_chain_htab_eq (const void *x, const void *y)
919 const_value_chain const v = (const_value_chain) x;
920 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
922 return dv_as_opaque (v->dv) == dv_as_opaque (dv);
925 /* Initialize the set (array) SET of attrs to empty lists. */
928 init_attrs_list_set (attrs *set)
932 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
936 /* Make the list *LISTP empty. */
939 attrs_list_clear (attrs *listp)
943 for (list = *listp; list; list = next)
946 pool_free (attrs_pool, list);
951 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
954 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
956 for (; list; list = list->next)
957 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
962 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
965 attrs_list_insert (attrs *listp, decl_or_value dv,
966 HOST_WIDE_INT offset, rtx loc)
970 list = (attrs) pool_alloc (attrs_pool);
973 list->offset = offset;
978 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
981 attrs_list_copy (attrs *dstp, attrs src)
985 attrs_list_clear (dstp);
986 for (; src; src = src->next)
988 n = (attrs) pool_alloc (attrs_pool);
991 n->offset = src->offset;
997 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1000 attrs_list_union (attrs *dstp, attrs src)
1002 for (; src; src = src->next)
1004 if (!attrs_list_member (*dstp, src->dv, src->offset))
1005 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1009 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1013 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1015 gcc_assert (!*dstp);
1016 for (; src; src = src->next)
1018 if (!dv_onepart_p (src->dv))
1019 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1021 for (src = src2; src; src = src->next)
1023 if (!dv_onepart_p (src->dv)
1024 && !attrs_list_member (*dstp, src->dv, src->offset))
1025 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1029 /* Shared hashtable support. */
1031 /* Return true if VARS is shared. */
1034 shared_hash_shared (shared_hash vars)
1036 return vars->refcount > 1;
1039 /* Return the hash table for VARS. */
1041 static inline htab_t
1042 shared_hash_htab (shared_hash vars)
1047 /* Copy variables into a new hash table. */
1050 shared_hash_unshare (shared_hash vars)
1052 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1053 gcc_assert (vars->refcount > 1);
1054 new_vars->refcount = 1;
1056 = htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
1057 variable_htab_eq, variable_htab_free);
1058 vars_copy (new_vars->htab, vars->htab);
1063 /* Increment reference counter on VARS and return it. */
1065 static inline shared_hash
1066 shared_hash_copy (shared_hash vars)
1072 /* Decrement reference counter and destroy hash table if not shared
1076 shared_hash_destroy (shared_hash vars)
1078 gcc_assert (vars->refcount > 0);
1079 if (--vars->refcount == 0)
1081 htab_delete (vars->htab);
1082 pool_free (shared_hash_pool, vars);
1086 /* Unshare *PVARS if shared and return slot for DV. If INS is
1087 INSERT, insert it if not already present. */
1089 static inline void **
1090 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1091 hashval_t dvhash, enum insert_option ins)
1093 if (shared_hash_shared (*pvars))
1094 *pvars = shared_hash_unshare (*pvars);
1095 return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins);
1098 static inline void **
1099 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1100 enum insert_option ins)
1102 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1105 /* Return slot for DV, if it is already present in the hash table.
1106 If it is not present, insert it only VARS is not shared, otherwise
1109 static inline void **
1110 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1112 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1113 shared_hash_shared (vars)
1114 ? NO_INSERT : INSERT);
1117 static inline void **
1118 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1120 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1123 /* Return slot for DV only if it is already present in the hash table. */
1125 static inline void **
1126 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1129 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1133 static inline void **
1134 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1136 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1139 /* Return variable for DV or NULL if not already present in the hash
1142 static inline variable
1143 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1145 return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash);
1148 static inline variable
1149 shared_hash_find (shared_hash vars, decl_or_value dv)
1151 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1154 /* Determine a total order between two distinct pointers. Compare the
1155 pointers as integral types if size_t is wide enough, otherwise
1156 resort to bitwise memory compare. The actual order does not
1157 matter, we just need to be consistent, so endianness is
1161 tie_break_pointers (const void *p1, const void *p2)
1163 gcc_assert (p1 != p2);
1165 if (sizeof (size_t) >= sizeof (void*))
1166 return (size_t)p1 < (size_t)p2 ? -1 : 1;
1168 return memcmp (&p1, &p2, sizeof (p1));
1171 /* Return true if TVAL is better than CVAL as a canonival value. We
1172 choose lowest-numbered VALUEs, using the RTX address as a
1173 tie-breaker. The idea is to arrange them into a star topology,
1174 such that all of them are at most one step away from the canonical
1175 value, and the canonical value has backlinks to all of them, in
1176 addition to all the actual locations. We don't enforce this
1177 topology throughout the entire dataflow analysis, though.
1181 canon_value_cmp (rtx tval, rtx cval)
1184 || CSELIB_VAL_PTR (tval)->value < CSELIB_VAL_PTR (cval)->value
1185 || (CSELIB_VAL_PTR (tval)->value == CSELIB_VAL_PTR (cval)->value
1186 && tie_break_pointers (tval, cval) < 0);
1189 static bool dst_can_be_shared;
1191 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1194 unshare_variable (dataflow_set *set, void **slot, variable var,
1195 enum var_init_status initialized)
1200 new_var = (variable) pool_alloc (dv_pool (var->dv));
1201 new_var->dv = var->dv;
1202 new_var->refcount = 1;
1204 new_var->n_var_parts = var->n_var_parts;
1206 if (! flag_var_tracking_uninit)
1207 initialized = VAR_INIT_STATUS_INITIALIZED;
1209 for (i = 0; i < var->n_var_parts; i++)
1211 location_chain node;
1212 location_chain *nextp;
1214 new_var->var_part[i].offset = var->var_part[i].offset;
1215 nextp = &new_var->var_part[i].loc_chain;
1216 for (node = var->var_part[i].loc_chain; node; node = node->next)
1218 location_chain new_lc;
1220 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1221 new_lc->next = NULL;
1222 if (node->init > initialized)
1223 new_lc->init = node->init;
1225 new_lc->init = initialized;
1226 if (node->set_src && !(MEM_P (node->set_src)))
1227 new_lc->set_src = node->set_src;
1229 new_lc->set_src = NULL;
1230 new_lc->loc = node->loc;
1233 nextp = &new_lc->next;
1236 /* We are at the basic block boundary when copying variable description
1237 so set the CUR_LOC to be the first element of the chain. */
1238 if (new_var->var_part[i].loc_chain)
1239 new_var->var_part[i].cur_loc = new_var->var_part[i].loc_chain->loc;
1241 new_var->var_part[i].cur_loc = NULL;
1244 dst_can_be_shared = false;
1245 if (shared_hash_shared (set->vars))
1246 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1247 else if (set->traversed_vars && set->vars != set->traversed_vars)
1248 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1253 /* Add a variable from *SLOT to hash table DATA and increase its reference
1257 vars_copy_1 (void **slot, void *data)
1259 htab_t dst = (htab_t) data;
1263 src = (variable) *slot;
1266 dstp = htab_find_slot_with_hash (dst, src->dv,
1267 dv_htab_hash (src->dv),
1271 /* Continue traversing the hash table. */
1275 /* Copy all variables from hash table SRC to hash table DST. */
1278 vars_copy (htab_t dst, htab_t src)
1280 htab_traverse_noresize (src, vars_copy_1, dst);
1283 /* Map a decl to its main debug decl. */
1286 var_debug_decl (tree decl)
1288 if (decl && DECL_P (decl)
1289 && DECL_DEBUG_EXPR_IS_FROM (decl) && DECL_DEBUG_EXPR (decl)
1290 && DECL_P (DECL_DEBUG_EXPR (decl)))
1291 decl = DECL_DEBUG_EXPR (decl);
1296 /* Set the register LOC to contain DV, OFFSET. */
1299 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1300 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1301 enum insert_option iopt)
1304 bool decl_p = dv_is_decl_p (dv);
1307 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1309 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1310 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1311 && node->offset == offset)
1314 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1315 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1318 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1321 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1324 tree decl = REG_EXPR (loc);
1325 HOST_WIDE_INT offset = REG_OFFSET (loc);
1327 var_reg_decl_set (set, loc, initialized,
1328 dv_from_decl (decl), offset, set_src, INSERT);
1331 static enum var_init_status
1332 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1336 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1338 if (! flag_var_tracking_uninit)
1339 return VAR_INIT_STATUS_INITIALIZED;
1341 var = shared_hash_find (set->vars, dv);
1344 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1346 location_chain nextp;
1347 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1348 if (rtx_equal_p (nextp->loc, loc))
1350 ret_val = nextp->init;
1359 /* Delete current content of register LOC in dataflow set SET and set
1360 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1361 MODIFY is true, any other live copies of the same variable part are
1362 also deleted from the dataflow set, otherwise the variable part is
1363 assumed to be copied from another location holding the same
1367 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1368 enum var_init_status initialized, rtx set_src)
1370 tree decl = REG_EXPR (loc);
1371 HOST_WIDE_INT offset = REG_OFFSET (loc);
1375 decl = var_debug_decl (decl);
1377 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1378 initialized = get_init_value (set, loc, dv_from_decl (decl));
1380 nextp = &set->regs[REGNO (loc)];
1381 for (node = *nextp; node; node = next)
1384 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1386 delete_variable_part (set, node->loc, node->dv, node->offset);
1387 pool_free (attrs_pool, node);
1393 nextp = &node->next;
1397 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1398 var_reg_set (set, loc, initialized, set_src);
1401 /* Delete the association of register LOC in dataflow set SET with any
1402 variables that aren't onepart. If CLOBBER is true, also delete any
1403 other live copies of the same variable part, and delete the
1404 association with onepart dvs too. */
1407 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1409 attrs *nextp = &set->regs[REGNO (loc)];
1414 tree decl = REG_EXPR (loc);
1415 HOST_WIDE_INT offset = REG_OFFSET (loc);
1417 decl = var_debug_decl (decl);
1419 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1422 for (node = *nextp; node; node = next)
1425 if (clobber || !dv_onepart_p (node->dv))
1427 delete_variable_part (set, node->loc, node->dv, node->offset);
1428 pool_free (attrs_pool, node);
1432 nextp = &node->next;
1436 /* Delete content of register with number REGNO in dataflow set SET. */
1439 var_regno_delete (dataflow_set *set, int regno)
1441 attrs *reg = &set->regs[regno];
1444 for (node = *reg; node; node = next)
1447 delete_variable_part (set, node->loc, node->dv, node->offset);
1448 pool_free (attrs_pool, node);
1453 /* Set the location of DV, OFFSET as the MEM LOC. */
1456 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1457 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1458 enum insert_option iopt)
1460 if (dv_is_decl_p (dv))
1461 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1463 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1466 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
1468 Adjust the address first if it is stack pointer based. */
1471 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1474 tree decl = MEM_EXPR (loc);
1475 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1477 var_mem_decl_set (set, loc, initialized,
1478 dv_from_decl (decl), offset, set_src, INSERT);
1481 /* Delete and set the location part of variable MEM_EXPR (LOC) in
1482 dataflow set SET to LOC. If MODIFY is true, any other live copies
1483 of the same variable part are also deleted from the dataflow set,
1484 otherwise the variable part is assumed to be copied from another
1485 location holding the same part.
1486 Adjust the address first if it is stack pointer based. */
1489 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1490 enum var_init_status initialized, rtx set_src)
1492 tree decl = MEM_EXPR (loc);
1493 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1495 decl = var_debug_decl (decl);
1497 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1498 initialized = get_init_value (set, loc, dv_from_decl (decl));
1501 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
1502 var_mem_set (set, loc, initialized, set_src);
1505 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
1506 true, also delete any other live copies of the same variable part.
1507 Adjust the address first if it is stack pointer based. */
1510 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
1512 tree decl = MEM_EXPR (loc);
1513 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1515 decl = var_debug_decl (decl);
1517 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1518 delete_variable_part (set, loc, dv_from_decl (decl), offset);
1521 /* Bind a value to a location it was just stored in. If MODIFIED
1522 holds, assume the location was modified, detaching it from any
1523 values bound to it. */
1526 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
1528 cselib_val *v = CSELIB_VAL_PTR (val);
1530 gcc_assert (cselib_preserved_value_p (v));
1534 fprintf (dump_file, "%i: ", INSN_UID (insn));
1535 print_inline_rtx (dump_file, val, 0);
1536 fprintf (dump_file, " stored in ");
1537 print_inline_rtx (dump_file, loc, 0);
1540 struct elt_loc_list *l;
1541 for (l = v->locs; l; l = l->next)
1543 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
1544 print_inline_rtx (dump_file, l->loc, 0);
1547 fprintf (dump_file, "\n");
1553 var_regno_delete (set, REGNO (loc));
1554 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1555 dv_from_value (val), 0, NULL_RTX, INSERT);
1557 else if (MEM_P (loc))
1558 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1559 dv_from_value (val), 0, NULL_RTX, INSERT);
1561 set_variable_part (set, loc, dv_from_value (val), 0,
1562 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1565 /* Reset this node, detaching all its equivalences. Return the slot
1566 in the variable hash table that holds dv, if there is one. */
1569 val_reset (dataflow_set *set, decl_or_value dv)
1571 variable var = shared_hash_find (set->vars, dv) ;
1572 location_chain node;
1575 if (!var || !var->n_var_parts)
1578 gcc_assert (var->n_var_parts == 1);
1581 for (node = var->var_part[0].loc_chain; node; node = node->next)
1582 if (GET_CODE (node->loc) == VALUE
1583 && canon_value_cmp (node->loc, cval))
1586 for (node = var->var_part[0].loc_chain; node; node = node->next)
1587 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
1589 /* Redirect the equivalence link to the new canonical
1590 value, or simply remove it if it would point at
1593 set_variable_part (set, cval, dv_from_value (node->loc),
1594 0, node->init, node->set_src, NO_INSERT);
1595 delete_variable_part (set, dv_as_value (dv),
1596 dv_from_value (node->loc), 0);
1601 decl_or_value cdv = dv_from_value (cval);
1603 /* Keep the remaining values connected, accummulating links
1604 in the canonical value. */
1605 for (node = var->var_part[0].loc_chain; node; node = node->next)
1607 if (node->loc == cval)
1609 else if (GET_CODE (node->loc) == REG)
1610 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
1611 node->set_src, NO_INSERT);
1612 else if (GET_CODE (node->loc) == MEM)
1613 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
1614 node->set_src, NO_INSERT);
1616 set_variable_part (set, node->loc, cdv, 0,
1617 node->init, node->set_src, NO_INSERT);
1621 /* We remove this last, to make sure that the canonical value is not
1622 removed to the point of requiring reinsertion. */
1624 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
1626 clobber_variable_part (set, NULL, dv, 0, NULL);
1628 /* ??? Should we make sure there aren't other available values or
1629 variables whose values involve this one other than by
1630 equivalence? E.g., at the very least we should reset MEMs, those
1631 shouldn't be too hard to find cselib-looking up the value as an
1632 address, then locating the resulting value in our own hash
1636 /* Find the values in a given location and map the val to another
1637 value, if it is unique, or add the location as one holding the
1641 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
1643 decl_or_value dv = dv_from_value (val);
1645 if (dump_file && (dump_flags & TDF_DETAILS))
1648 fprintf (dump_file, "%i: ", INSN_UID (insn));
1650 fprintf (dump_file, "head: ");
1651 print_inline_rtx (dump_file, val, 0);
1652 fputs (" is at ", dump_file);
1653 print_inline_rtx (dump_file, loc, 0);
1654 fputc ('\n', dump_file);
1657 val_reset (set, dv);
1661 attrs node, found = NULL;
1663 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1664 if (dv_is_value_p (node->dv)
1665 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
1669 /* Map incoming equivalences. ??? Wouldn't it be nice if
1670 we just started sharing the location lists? Maybe a
1671 circular list ending at the value itself or some
1673 set_variable_part (set, dv_as_value (node->dv),
1674 dv_from_value (val), node->offset,
1675 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1676 set_variable_part (set, val, node->dv, node->offset,
1677 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1680 /* If we didn't find any equivalence, we need to remember that
1681 this value is held in the named register. */
1683 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1684 dv_from_value (val), 0, NULL_RTX, INSERT);
1686 else if (MEM_P (loc))
1687 /* ??? Merge equivalent MEMs. */
1688 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1689 dv_from_value (val), 0, NULL_RTX, INSERT);
1691 /* ??? Merge equivalent expressions. */
1692 set_variable_part (set, loc, dv_from_value (val), 0,
1693 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1696 /* Initialize dataflow set SET to be empty.
1697 VARS_SIZE is the initial size of hash table VARS. */
1700 dataflow_set_init (dataflow_set *set)
1702 init_attrs_list_set (set->regs);
1703 set->vars = shared_hash_copy (empty_shared_hash);
1704 set->stack_adjust = 0;
1705 set->traversed_vars = NULL;
1708 /* Delete the contents of dataflow set SET. */
1711 dataflow_set_clear (dataflow_set *set)
1715 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1716 attrs_list_clear (&set->regs[i]);
1718 shared_hash_destroy (set->vars);
1719 set->vars = shared_hash_copy (empty_shared_hash);
1722 /* Copy the contents of dataflow set SRC to DST. */
1725 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
1729 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1730 attrs_list_copy (&dst->regs[i], src->regs[i]);
1732 shared_hash_destroy (dst->vars);
1733 dst->vars = shared_hash_copy (src->vars);
1734 dst->stack_adjust = src->stack_adjust;
1737 /* Information for merging lists of locations for a given offset of variable.
1739 struct variable_union_info
1741 /* Node of the location chain. */
1744 /* The sum of positions in the input chains. */
1747 /* The position in the chain of DST dataflow set. */
1751 /* Buffer for location list sorting and its allocated size. */
1752 static struct variable_union_info *vui_vec;
1753 static int vui_allocated;
1755 /* Compare function for qsort, order the structures by POS element. */
1758 variable_union_info_cmp_pos (const void *n1, const void *n2)
1760 const struct variable_union_info *const i1 =
1761 (const struct variable_union_info *) n1;
1762 const struct variable_union_info *const i2 =
1763 ( const struct variable_union_info *) n2;
1765 if (i1->pos != i2->pos)
1766 return i1->pos - i2->pos;
1768 return (i1->pos_dst - i2->pos_dst);
1771 /* Compute union of location parts of variable *SLOT and the same variable
1772 from hash table DATA. Compute "sorted" union of the location chains
1773 for common offsets, i.e. the locations of a variable part are sorted by
1774 a priority where the priority is the sum of the positions in the 2 chains
1775 (if a location is only in one list the position in the second list is
1776 defined to be larger than the length of the chains).
1777 When we are updating the location parts the newest location is in the
1778 beginning of the chain, so when we do the described "sorted" union
1779 we keep the newest locations in the beginning. */
1782 variable_union (void **slot, void *data)
1786 dataflow_set *set = (dataflow_set *) data;
1789 src = (variable) *slot;
1790 dstp = shared_hash_find_slot (set->vars, src->dv);
1791 if (!dstp || !*dstp)
1795 dst_can_be_shared = false;
1797 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
1801 /* If CUR_LOC of some variable part is not the first element of
1802 the location chain we are going to change it so we have to make
1803 a copy of the variable. */
1804 for (k = 0; k < src->n_var_parts; k++)
1806 gcc_assert (!src->var_part[k].loc_chain
1807 == !src->var_part[k].cur_loc);
1808 if (src->var_part[k].loc_chain)
1810 gcc_assert (src->var_part[k].cur_loc);
1811 if (src->var_part[k].cur_loc != src->var_part[k].loc_chain->loc)
1815 if (k < src->n_var_parts)
1816 dstp = unshare_variable (set, dstp, src, VAR_INIT_STATUS_UNKNOWN);
1818 /* Continue traversing the hash table. */
1822 dst = (variable) *dstp;
1824 gcc_assert (src->n_var_parts);
1826 /* We can combine one-part variables very efficiently, because their
1827 entries are in canonical order. */
1828 if (dv_onepart_p (src->dv))
1830 location_chain *nodep, dnode, snode;
1832 gcc_assert (src->n_var_parts == 1);
1833 gcc_assert (dst->n_var_parts == 1);
1835 snode = src->var_part[0].loc_chain;
1838 restart_onepart_unshared:
1839 nodep = &dst->var_part[0].loc_chain;
1845 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
1849 location_chain nnode;
1851 if (dst->refcount != 1 || shared_hash_shared (set->vars))
1853 dstp = unshare_variable (set, dstp, dst,
1854 VAR_INIT_STATUS_INITIALIZED);
1855 dst = (variable)*dstp;
1856 goto restart_onepart_unshared;
1859 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
1860 nnode->loc = snode->loc;
1861 nnode->init = snode->init;
1862 if (!snode->set_src || MEM_P (snode->set_src))
1863 nnode->set_src = NULL;
1865 nnode->set_src = snode->set_src;
1866 nnode->next = dnode;
1869 #ifdef ENABLE_CHECKING
1871 gcc_assert (rtx_equal_p (dnode->loc, snode->loc));
1875 snode = snode->next;
1877 nodep = &dnode->next;
1881 dst->var_part[0].cur_loc = dst->var_part[0].loc_chain->loc;
1886 /* Count the number of location parts, result is K. */
1887 for (i = 0, j = 0, k = 0;
1888 i < src->n_var_parts && j < dst->n_var_parts; k++)
1890 if (src->var_part[i].offset == dst->var_part[j].offset)
1895 else if (src->var_part[i].offset < dst->var_part[j].offset)
1900 k += src->n_var_parts - i;
1901 k += dst->n_var_parts - j;
1903 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
1904 thus there are at most MAX_VAR_PARTS different offsets. */
1905 gcc_assert (dv_onepart_p (dst->dv) ? k == 1 : k <= MAX_VAR_PARTS);
1907 if ((dst->refcount > 1 || shared_hash_shared (set->vars))
1908 && dst->n_var_parts != k)
1910 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
1911 dst = (variable)*dstp;
1914 i = src->n_var_parts - 1;
1915 j = dst->n_var_parts - 1;
1916 dst->n_var_parts = k;
1918 for (k--; k >= 0; k--)
1920 location_chain node, node2;
1922 if (i >= 0 && j >= 0
1923 && src->var_part[i].offset == dst->var_part[j].offset)
1925 /* Compute the "sorted" union of the chains, i.e. the locations which
1926 are in both chains go first, they are sorted by the sum of
1927 positions in the chains. */
1930 struct variable_union_info *vui;
1932 /* If DST is shared compare the location chains.
1933 If they are different we will modify the chain in DST with
1934 high probability so make a copy of DST. */
1935 if (dst->refcount > 1 || shared_hash_shared (set->vars))
1937 for (node = src->var_part[i].loc_chain,
1938 node2 = dst->var_part[j].loc_chain; node && node2;
1939 node = node->next, node2 = node2->next)
1941 if (!((REG_P (node2->loc)
1942 && REG_P (node->loc)
1943 && REGNO (node2->loc) == REGNO (node->loc))
1944 || rtx_equal_p (node2->loc, node->loc)))
1946 if (node2->init < node->init)
1947 node2->init = node->init;
1953 dstp = unshare_variable (set, dstp, dst,
1954 VAR_INIT_STATUS_UNKNOWN);
1955 dst = (variable)*dstp;
1960 for (node = src->var_part[i].loc_chain; node; node = node->next)
1963 for (node = dst->var_part[j].loc_chain; node; node = node->next)
1968 /* The most common case, much simpler, no qsort is needed. */
1969 location_chain dstnode = dst->var_part[j].loc_chain;
1970 dst->var_part[k].loc_chain = dstnode;
1971 dst->var_part[k].offset = dst->var_part[j].offset;
1973 for (node = src->var_part[i].loc_chain; node; node = node->next)
1974 if (!((REG_P (dstnode->loc)
1975 && REG_P (node->loc)
1976 && REGNO (dstnode->loc) == REGNO (node->loc))
1977 || rtx_equal_p (dstnode->loc, node->loc)))
1979 location_chain new_node;
1981 /* Copy the location from SRC. */
1982 new_node = (location_chain) pool_alloc (loc_chain_pool);
1983 new_node->loc = node->loc;
1984 new_node->init = node->init;
1985 if (!node->set_src || MEM_P (node->set_src))
1986 new_node->set_src = NULL;
1988 new_node->set_src = node->set_src;
1989 node2->next = new_node;
1996 if (src_l + dst_l > vui_allocated)
1998 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
1999 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2004 /* Fill in the locations from DST. */
2005 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2006 node = node->next, jj++)
2009 vui[jj].pos_dst = jj;
2011 /* Pos plus value larger than a sum of 2 valid positions. */
2012 vui[jj].pos = jj + src_l + dst_l;
2015 /* Fill in the locations from SRC. */
2017 for (node = src->var_part[i].loc_chain, ii = 0; node;
2018 node = node->next, ii++)
2020 /* Find location from NODE. */
2021 for (jj = 0; jj < dst_l; jj++)
2023 if ((REG_P (vui[jj].lc->loc)
2024 && REG_P (node->loc)
2025 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2026 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2028 vui[jj].pos = jj + ii;
2032 if (jj >= dst_l) /* The location has not been found. */
2034 location_chain new_node;
2036 /* Copy the location from SRC. */
2037 new_node = (location_chain) pool_alloc (loc_chain_pool);
2038 new_node->loc = node->loc;
2039 new_node->init = node->init;
2040 if (!node->set_src || MEM_P (node->set_src))
2041 new_node->set_src = NULL;
2043 new_node->set_src = node->set_src;
2044 vui[n].lc = new_node;
2045 vui[n].pos_dst = src_l + dst_l;
2046 vui[n].pos = ii + src_l + dst_l;
2053 /* Special case still very common case. For dst_l == 2
2054 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2055 vui[i].pos == i + src_l + dst_l. */
2056 if (vui[0].pos > vui[1].pos)
2058 /* Order should be 1, 0, 2... */
2059 dst->var_part[k].loc_chain = vui[1].lc;
2060 vui[1].lc->next = vui[0].lc;
2063 vui[0].lc->next = vui[2].lc;
2064 vui[n - 1].lc->next = NULL;
2067 vui[0].lc->next = NULL;
2072 dst->var_part[k].loc_chain = vui[0].lc;
2073 if (n >= 3 && vui[2].pos < vui[1].pos)
2075 /* Order should be 0, 2, 1, 3... */
2076 vui[0].lc->next = vui[2].lc;
2077 vui[2].lc->next = vui[1].lc;
2080 vui[1].lc->next = vui[3].lc;
2081 vui[n - 1].lc->next = NULL;
2084 vui[1].lc->next = NULL;
2089 /* Order should be 0, 1, 2... */
2091 vui[n - 1].lc->next = NULL;
2094 for (; ii < n; ii++)
2095 vui[ii - 1].lc->next = vui[ii].lc;
2099 qsort (vui, n, sizeof (struct variable_union_info),
2100 variable_union_info_cmp_pos);
2102 /* Reconnect the nodes in sorted order. */
2103 for (ii = 1; ii < n; ii++)
2104 vui[ii - 1].lc->next = vui[ii].lc;
2105 vui[n - 1].lc->next = NULL;
2106 dst->var_part[k].loc_chain = vui[0].lc;
2109 dst->var_part[k].offset = dst->var_part[j].offset;
2114 else if ((i >= 0 && j >= 0
2115 && src->var_part[i].offset < dst->var_part[j].offset)
2118 dst->var_part[k] = dst->var_part[j];
2121 else if ((i >= 0 && j >= 0
2122 && src->var_part[i].offset > dst->var_part[j].offset)
2125 location_chain *nextp;
2127 /* Copy the chain from SRC. */
2128 nextp = &dst->var_part[k].loc_chain;
2129 for (node = src->var_part[i].loc_chain; node; node = node->next)
2131 location_chain new_lc;
2133 new_lc = (location_chain) pool_alloc (loc_chain_pool);
2134 new_lc->next = NULL;
2135 new_lc->init = node->init;
2136 if (!node->set_src || MEM_P (node->set_src))
2137 new_lc->set_src = NULL;
2139 new_lc->set_src = node->set_src;
2140 new_lc->loc = node->loc;
2143 nextp = &new_lc->next;
2146 dst->var_part[k].offset = src->var_part[i].offset;
2150 /* We are at the basic block boundary when computing union
2151 so set the CUR_LOC to be the first element of the chain. */
2152 if (dst->var_part[k].loc_chain)
2153 dst->var_part[k].cur_loc = dst->var_part[k].loc_chain->loc;
2155 dst->var_part[k].cur_loc = NULL;
2158 if (flag_var_tracking_uninit)
2159 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
2161 location_chain node, node2;
2162 for (node = src->var_part[i].loc_chain; node; node = node->next)
2163 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
2164 if (rtx_equal_p (node->loc, node2->loc))
2166 if (node->init > node2->init)
2167 node2->init = node->init;
2171 /* Continue traversing the hash table. */
2175 /* Like variable_union, but only used when doing dataflow_set_union
2176 into an empty hashtab. To allow sharing, dst is initially shared
2177 with src (so all variables are "copied" from src to dst hashtab),
2178 so only unshare_variable for variables that need canonicalization
2182 variable_canonicalize (void **slot, void *data)
2185 dataflow_set *set = (dataflow_set *) data;
2188 src = *(variable *) slot;
2190 /* If CUR_LOC of some variable part is not the first element of
2191 the location chain we are going to change it so we have to make
2192 a copy of the variable. */
2193 for (k = 0; k < src->n_var_parts; k++)
2195 gcc_assert (!src->var_part[k].loc_chain == !src->var_part[k].cur_loc);
2196 if (src->var_part[k].loc_chain)
2198 gcc_assert (src->var_part[k].cur_loc);
2199 if (src->var_part[k].cur_loc != src->var_part[k].loc_chain->loc)
2203 if (k < src->n_var_parts)
2204 slot = unshare_variable (set, slot, src, VAR_INIT_STATUS_UNKNOWN);
2208 /* Compute union of dataflow sets SRC and DST and store it to DST. */
2211 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
2215 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2216 attrs_list_union (&dst->regs[i], src->regs[i]);
2218 if (dst->vars == empty_shared_hash)
2220 shared_hash_destroy (dst->vars);
2221 dst->vars = shared_hash_copy (src->vars);
2222 dst->traversed_vars = dst->vars;
2223 htab_traverse (shared_hash_htab (dst->vars), variable_canonicalize, dst);
2224 dst->traversed_vars = NULL;
2227 htab_traverse (shared_hash_htab (src->vars), variable_union, dst);
2230 /* Whether the value is currently being expanded. */
2231 #define VALUE_RECURSED_INTO(x) \
2232 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
2233 /* Whether the value is in changed_variables hash table. */
2234 #define VALUE_CHANGED(x) \
2235 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
2236 /* Whether the decl is in changed_variables hash table. */
2237 #define DECL_CHANGED(x) TREE_VISITED (x)
2239 /* Record that DV has been added into resp. removed from changed_variables
2243 set_dv_changed (decl_or_value dv, bool newv)
2245 if (dv_is_value_p (dv))
2246 VALUE_CHANGED (dv_as_value (dv)) = newv;
2248 DECL_CHANGED (dv_as_decl (dv)) = newv;
2251 /* Return true if DV is present in changed_variables hash table. */
2254 dv_changed_p (decl_or_value dv)
2256 return (dv_is_value_p (dv)
2257 ? VALUE_CHANGED (dv_as_value (dv))
2258 : DECL_CHANGED (dv_as_decl (dv)));
2261 /* Vector of VALUEs that should have VALUE_RECURSED_INTO bit cleared
2262 at the end of find_loc_in_1pdv. Not a static variable in find_loc_in_1pdv
2263 to avoid constant allocation/freeing of it. */
2264 static VEC(rtx, heap) *values_to_unmark;
2266 /* Helper function for find_loc_in_1pdv.
2267 Return a location list node whose loc is rtx_equal to LOC, in the
2268 location list of a one-part variable or value VAR, or in that of
2269 any values recursively mentioned in the location lists. */
2271 static location_chain
2272 find_loc_in_1pdv_1 (rtx loc, variable var, htab_t vars)
2274 location_chain node;
2279 gcc_assert (dv_onepart_p (var->dv));
2281 if (!var->n_var_parts)
2284 gcc_assert (var->var_part[0].offset == 0);
2286 for (node = var->var_part[0].loc_chain; node; node = node->next)
2287 if (rtx_equal_p (loc, node->loc))
2289 else if (GET_CODE (node->loc) == VALUE
2290 && !VALUE_RECURSED_INTO (node->loc))
2292 decl_or_value dv = dv_from_value (node->loc);
2293 variable var = (variable)
2294 htab_find_with_hash (vars, dv, dv_htab_hash (dv));
2298 location_chain where;
2299 VALUE_RECURSED_INTO (node->loc) = true;
2300 VEC_safe_push (rtx, heap, values_to_unmark, node->loc);
2301 if ((where = find_loc_in_1pdv_1 (loc, var, vars)))
2309 /* Return a location list node whose loc is rtx_equal to LOC, in the
2310 location list of a one-part variable or value VAR, or in that of
2311 any values recursively mentioned in the location lists. */
2313 static location_chain
2314 find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
2320 ret = find_loc_in_1pdv_1 (loc, var, vars);
2321 for (i = 0; VEC_iterate (rtx, values_to_unmark, i, value); i++)
2322 VALUE_RECURSED_INTO (value) = false;
2323 VEC_truncate (rtx, values_to_unmark, 0);
2327 /* Hash table iteration argument passed to variable_merge. */
2330 /* The set in which the merge is to be inserted. */
2332 /* The set that we're iterating in. */
2334 /* The set that may contain the other dv we are to merge with. */
2336 /* Number of onepart dvs in src. */
2337 int src_onepart_cnt;
2340 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
2341 loc_cmp order, and it is maintained as such. */
2344 insert_into_intersection (location_chain *nodep, rtx loc,
2345 enum var_init_status status)
2347 location_chain node;
2350 for (node = *nodep; node; nodep = &node->next, node = *nodep)
2351 if ((r = loc_cmp (node->loc, loc)) == 0)
2353 node->init = MIN (node->init, status);
2359 node = (location_chain) pool_alloc (loc_chain_pool);
2362 node->set_src = NULL;
2363 node->init = status;
2364 node->next = *nodep;
2368 /* Insert in DEST the intersection the locations present in both
2369 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
2370 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
2374 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
2375 location_chain s1node, variable s2var)
2377 dataflow_set *s1set = dsm->cur;
2378 dataflow_set *s2set = dsm->src;
2379 location_chain found;
2381 for (; s1node; s1node = s1node->next)
2383 if (s1node->loc == val)
2386 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
2387 shared_hash_htab (s2set->vars))))
2389 insert_into_intersection (dest, s1node->loc,
2390 MIN (s1node->init, found->init));
2394 if (GET_CODE (s1node->loc) == VALUE
2395 && !VALUE_RECURSED_INTO (s1node->loc))
2397 decl_or_value dv = dv_from_value (s1node->loc);
2398 variable svar = shared_hash_find (s1set->vars, dv);
2401 if (svar->n_var_parts == 1)
2403 VALUE_RECURSED_INTO (s1node->loc) = true;
2404 intersect_loc_chains (val, dest, dsm,
2405 svar->var_part[0].loc_chain,
2407 VALUE_RECURSED_INTO (s1node->loc) = false;
2412 /* ??? if the location is equivalent to any location in src,
2413 searched recursively
2415 add to dst the values needed to represent the equivalence
2417 telling whether locations S is equivalent to another dv's
2420 for each location D in the list
2422 if S and D satisfy rtx_equal_p, then it is present
2424 else if D is a value, recurse without cycles
2426 else if S and D have the same CODE and MODE
2428 for each operand oS and the corresponding oD
2430 if oS and oD are not equivalent, then S an D are not equivalent
2432 else if they are RTX vectors
2434 if any vector oS element is not equivalent to its respective oD,
2435 then S and D are not equivalent
2443 /* Return -1 if X should be before Y in a location list for a 1-part
2444 variable, 1 if Y should be before X, and 0 if they're equivalent
2445 and should not appear in the list. */
2448 loc_cmp (rtx x, rtx y)
2451 RTX_CODE code = GET_CODE (x);
2461 gcc_assert (GET_MODE (x) == GET_MODE (y));
2462 if (REGNO (x) == REGNO (y))
2464 else if (REGNO (x) < REGNO (y))
2477 gcc_assert (GET_MODE (x) == GET_MODE (y));
2478 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
2484 if (GET_CODE (x) == VALUE)
2486 if (GET_CODE (y) != VALUE)
2488 gcc_assert (GET_MODE (x) == GET_MODE (y));
2489 if (canon_value_cmp (x, y))
2495 if (GET_CODE (y) == VALUE)
2498 if (GET_CODE (x) == GET_CODE (y))
2499 /* Compare operands below. */;
2500 else if (GET_CODE (x) < GET_CODE (y))
2505 gcc_assert (GET_MODE (x) == GET_MODE (y));
2507 fmt = GET_RTX_FORMAT (code);
2508 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2512 if (XWINT (x, i) == XWINT (y, i))
2514 else if (XWINT (x, i) < XWINT (y, i))
2521 if (XINT (x, i) == XINT (y, i))
2523 else if (XINT (x, i) < XINT (y, i))
2530 /* Compare the vector length first. */
2531 if (XVECLEN (x, i) == XVECLEN (y, i))
2532 /* Compare the vectors elements. */;
2533 else if (XVECLEN (x, i) < XVECLEN (y, i))
2538 for (j = 0; j < XVECLEN (x, i); j++)
2539 if ((r = loc_cmp (XVECEXP (x, i, j),
2540 XVECEXP (y, i, j))))
2545 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
2551 if (XSTR (x, i) == XSTR (y, i))
2557 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
2565 /* These are just backpointers, so they don't matter. */
2572 /* It is believed that rtx's at this level will never
2573 contain anything but integers and other rtx's,
2574 except for within LABEL_REFs and SYMBOL_REFs. */
2582 /* If decl or value DVP refers to VALUE from *LOC, add backlinks
2583 from VALUE to DVP. */
2586 add_value_chain (rtx *loc, void *dvp)
2588 if (GET_CODE (*loc) == VALUE && (void *) *loc != dvp)
2590 decl_or_value dv = (decl_or_value) dvp;
2591 decl_or_value ldv = dv_from_value (*loc);
2592 value_chain vc, nvc;
2593 void **slot = htab_find_slot_with_hash (value_chains, ldv,
2594 dv_htab_hash (ldv), INSERT);
2597 vc = (value_chain) pool_alloc (value_chain_pool);
2601 *slot = (void *) vc;
2605 for (vc = ((value_chain) *slot)->next; vc; vc = vc->next)
2606 if (dv_as_opaque (vc->dv) == dv_as_opaque (dv))
2614 vc = (value_chain) *slot;
2615 nvc = (value_chain) pool_alloc (value_chain_pool);
2617 nvc->next = vc->next;
2624 /* If decl or value DVP refers to VALUEs from within LOC, add backlinks
2625 from those VALUEs to DVP. */
2628 add_value_chains (decl_or_value dv, rtx loc)
2630 if (GET_CODE (loc) == VALUE)
2632 add_value_chain (&loc, dv_as_opaque (dv));
2638 loc = XEXP (loc, 0);
2639 for_each_rtx (&loc, add_value_chain, dv_as_opaque (dv));
2642 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those
2646 add_cselib_value_chains (decl_or_value dv)
2648 struct elt_loc_list *l;
2650 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
2651 for_each_rtx (&l->loc, add_value_chain, dv_as_opaque (dv));
2654 /* If decl or value DVP refers to VALUE from *LOC, remove backlinks
2655 from VALUE to DVP. */
2658 remove_value_chain (rtx *loc, void *dvp)
2660 if (GET_CODE (*loc) == VALUE && (void *) *loc != dvp)
2662 decl_or_value dv = (decl_or_value) dvp;
2663 decl_or_value ldv = dv_from_value (*loc);
2664 value_chain vc, dvc = NULL;
2665 void **slot = htab_find_slot_with_hash (value_chains, ldv,
2666 dv_htab_hash (ldv), NO_INSERT);
2667 for (vc = (value_chain) *slot; vc->next; vc = vc->next)
2668 if (dv_as_opaque (vc->next->dv) == dv_as_opaque (dv))
2671 gcc_assert (dvc->refcount > 0);
2672 if (--dvc->refcount == 0)
2674 vc->next = dvc->next;
2675 pool_free (value_chain_pool, dvc);
2676 if (vc->next == NULL && vc == (value_chain) *slot)
2678 pool_free (value_chain_pool, vc);
2679 htab_clear_slot (value_chains, slot);
2689 /* If decl or value DVP refers to VALUEs from within LOC, remove backlinks
2690 from those VALUEs to DVP. */
2693 remove_value_chains (decl_or_value dv, rtx loc)
2695 if (GET_CODE (loc) == VALUE)
2697 remove_value_chain (&loc, dv_as_opaque (dv));
2703 loc = XEXP (loc, 0);
2704 for_each_rtx (&loc, remove_value_chain, dv_as_opaque (dv));
2707 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, remove backlinks from those
2711 remove_cselib_value_chains (decl_or_value dv)
2713 struct elt_loc_list *l;
2715 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
2716 for_each_rtx (&l->loc, remove_value_chain, dv_as_opaque (dv));
2720 /* Check the order of entries in one-part variables. */
2723 canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
2725 variable var = (variable) *slot;
2726 decl_or_value dv = var->dv;
2727 location_chain node, next;
2729 if (!dv_onepart_p (dv))
2732 gcc_assert (var->n_var_parts == 1);
2733 node = var->var_part[0].loc_chain;
2736 while ((next = node->next))
2738 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
2746 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
2747 more likely to be chosen as canonical for an equivalence set.
2748 Ensure less likely values can reach more likely neighbors, making
2749 the connections bidirectional. */
2752 canonicalize_values_mark (void **slot, void *data)
2754 dataflow_set *set = (dataflow_set *)data;
2755 variable var = (variable) *slot;
2756 decl_or_value dv = var->dv;
2758 location_chain node;
2760 if (!dv_is_value_p (dv))
2763 gcc_assert (var->n_var_parts == 1);
2765 val = dv_as_value (dv);
2767 for (node = var->var_part[0].loc_chain; node; node = node->next)
2768 if (GET_CODE (node->loc) == VALUE)
2770 if (canon_value_cmp (node->loc, val))
2771 VALUE_RECURSED_INTO (val) = true;
2774 decl_or_value odv = dv_from_value (node->loc);
2775 void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
2777 oslot = set_slot_part (set, val, oslot, odv, 0,
2778 node->init, NULL_RTX);
2780 VALUE_RECURSED_INTO (node->loc) = true;
2787 /* Remove redundant entries from equivalence lists in onepart
2788 variables, canonicalizing equivalence sets into star shapes. */
2791 canonicalize_values_star (void **slot, void *data)
2793 dataflow_set *set = (dataflow_set *)data;
2794 variable var = (variable) *slot;
2795 decl_or_value dv = var->dv;
2796 location_chain node;
2803 if (!dv_onepart_p (dv))
2806 gcc_assert (var->n_var_parts == 1);
2808 if (dv_is_value_p (dv))
2810 cval = dv_as_value (dv);
2811 if (!VALUE_RECURSED_INTO (cval))
2813 VALUE_RECURSED_INTO (cval) = false;
2823 gcc_assert (var->n_var_parts == 1);
2825 for (node = var->var_part[0].loc_chain; node; node = node->next)
2826 if (GET_CODE (node->loc) == VALUE)
2829 if (VALUE_RECURSED_INTO (node->loc))
2831 if (canon_value_cmp (node->loc, cval))
2840 if (!has_marks || dv_is_decl_p (dv))
2843 /* Keep it marked so that we revisit it, either after visiting a
2844 child node, or after visiting a new parent that might be
2846 VALUE_RECURSED_INTO (val) = true;
2848 for (node = var->var_part[0].loc_chain; node; node = node->next)
2849 if (GET_CODE (node->loc) == VALUE
2850 && VALUE_RECURSED_INTO (node->loc))
2854 VALUE_RECURSED_INTO (cval) = false;
2855 dv = dv_from_value (cval);
2856 slot = shared_hash_find_slot_noinsert (set->vars, dv);
2859 gcc_assert (dv_is_decl_p (var->dv));
2860 /* The canonical value was reset and dropped.
2862 clobber_variable_part (set, NULL, var->dv, 0, NULL);
2865 var = (variable)*slot;
2866 gcc_assert (dv_is_value_p (var->dv));
2867 if (var->n_var_parts == 0)
2869 gcc_assert (var->n_var_parts == 1);
2873 VALUE_RECURSED_INTO (val) = false;
2878 /* Push values to the canonical one. */
2879 cdv = dv_from_value (cval);
2880 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
2882 for (node = var->var_part[0].loc_chain; node; node = node->next)
2883 if (node->loc != cval)
2885 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
2886 node->init, NULL_RTX);
2887 if (GET_CODE (node->loc) == VALUE)
2889 decl_or_value ndv = dv_from_value (node->loc);
2891 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
2894 if (canon_value_cmp (node->loc, val))
2896 /* If it could have been a local minimum, it's not any more,
2897 since it's now neighbor to cval, so it may have to push
2898 to it. Conversely, if it wouldn't have prevailed over
2899 val, then whatever mark it has is fine: if it was to
2900 push, it will now push to a more canonical node, but if
2901 it wasn't, then it has already pushed any values it might
2903 VALUE_RECURSED_INTO (node->loc) = true;
2904 /* Make sure we visit node->loc by ensuring we cval is
2906 VALUE_RECURSED_INTO (cval) = true;
2908 else if (!VALUE_RECURSED_INTO (node->loc))
2909 /* If we have no need to "recurse" into this node, it's
2910 already "canonicalized", so drop the link to the old
2912 clobber_variable_part (set, cval, ndv, 0, NULL);
2914 else if (GET_CODE (node->loc) == REG)
2916 attrs list = set->regs[REGNO (node->loc)], *listp;
2918 /* Change an existing attribute referring to dv so that it
2919 refers to cdv, removing any duplicate this might
2920 introduce, and checking that no previous duplicates
2921 existed, all in a single pass. */
2925 if (list->offset == 0
2926 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
2927 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
2934 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
2937 for (listp = &list->next; (list = *listp); listp = &list->next)
2942 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
2944 *listp = list->next;
2945 pool_free (attrs_pool, list);
2950 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
2953 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
2955 for (listp = &list->next; (list = *listp); listp = &list->next)
2960 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
2962 *listp = list->next;
2963 pool_free (attrs_pool, list);
2968 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
2977 if (list->offset == 0
2978 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
2979 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
2989 cslot = set_slot_part (set, val, cslot, cdv, 0,
2990 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
2992 slot = clobber_slot_part (set, cval, slot, 0, NULL);
2994 /* Variable may have been unshared. */
2995 var = (variable)*slot;
2996 gcc_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
2997 && var->var_part[0].loc_chain->next == NULL);
2999 if (VALUE_RECURSED_INTO (cval))
3000 goto restart_with_cval;
3005 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3006 corresponding entry in DSM->src. Multi-part variables are combined
3007 with variable_union, whereas onepart dvs are combined with
3011 variable_merge_over_cur (void **s1slot, void *data)
3013 struct dfset_merge *dsm = (struct dfset_merge *)data;
3014 dataflow_set *dst = dsm->dst;
3016 variable s1var = (variable) *s1slot;
3017 variable s2var, dvar = NULL;
3018 decl_or_value dv = s1var->dv;
3019 bool onepart = dv_onepart_p (dv);
3022 location_chain node, *nodep;
3024 /* If the incoming onepart variable has an empty location list, then
3025 the intersection will be just as empty. For other variables,
3026 it's always union. */
3027 gcc_assert (s1var->n_var_parts);
3028 gcc_assert (s1var->var_part[0].loc_chain);
3031 return variable_union (s1slot, dst);
3033 gcc_assert (s1var->n_var_parts == 1);
3034 gcc_assert (s1var->var_part[0].offset == 0);
3036 dvhash = dv_htab_hash (dv);
3037 if (dv_is_value_p (dv))
3038 val = dv_as_value (dv);
3042 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3045 dst_can_be_shared = false;
3049 dsm->src_onepart_cnt--;
3050 gcc_assert (s2var->var_part[0].loc_chain);
3051 gcc_assert (s2var->n_var_parts == 1);
3052 gcc_assert (s2var->var_part[0].offset == 0);
3054 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3057 dvar = (variable)*dstslot;
3058 gcc_assert (dvar->refcount == 1);
3059 gcc_assert (dvar->n_var_parts == 1);
3060 gcc_assert (dvar->var_part[0].offset == 0);
3061 nodep = &dvar->var_part[0].loc_chain;
3069 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3071 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3073 *dstslot = dvar = s2var;
3078 dst_can_be_shared = false;
3080 intersect_loc_chains (val, nodep, dsm,
3081 s1var->var_part[0].loc_chain, s2var);
3087 dvar = (variable) pool_alloc (dv_pool (dv));
3090 dvar->n_var_parts = 1;
3091 dvar->var_part[0].offset = 0;
3092 dvar->var_part[0].loc_chain = node;
3093 dvar->var_part[0].cur_loc = node->loc;
3096 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3098 gcc_assert (!*dstslot);
3106 nodep = &dvar->var_part[0].loc_chain;
3107 while ((node = *nodep))
3109 location_chain *nextp = &node->next;
3111 if (GET_CODE (node->loc) == REG)
3115 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
3116 if (GET_MODE (node->loc) == GET_MODE (list->loc)
3117 && dv_is_value_p (list->dv))
3121 attrs_list_insert (&dst->regs[REGNO (node->loc)],
3123 /* If this value became canonical for another value that had
3124 this register, we want to leave it alone. */
3125 else if (dv_as_value (list->dv) != val)
3127 dstslot = set_slot_part (dst, dv_as_value (list->dv),
3129 node->init, NULL_RTX);
3130 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
3132 /* Since nextp points into the removed node, we can't
3133 use it. The pointer to the next node moved to nodep.
3134 However, if the variable we're walking is unshared
3135 during our walk, we'll keep walking the location list
3136 of the previously-shared variable, in which case the
3137 node won't have been removed, and we'll want to skip
3138 it. That's why we test *nodep here. */
3144 /* Canonicalization puts registers first, so we don't have to
3150 if (dvar != (variable)*dstslot)
3151 dvar = (variable)*dstslot;
3152 nodep = &dvar->var_part[0].loc_chain;
3156 /* Mark all referenced nodes for canonicalization, and make sure
3157 we have mutual equivalence links. */
3158 VALUE_RECURSED_INTO (val) = true;
3159 for (node = *nodep; node; node = node->next)
3160 if (GET_CODE (node->loc) == VALUE)
3162 VALUE_RECURSED_INTO (node->loc) = true;
3163 set_variable_part (dst, val, dv_from_value (node->loc), 0,
3164 node->init, NULL, INSERT);
3167 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3168 gcc_assert (*dstslot == dvar);
3169 canonicalize_values_star (dstslot, dst);
3170 #ifdef ENABLE_CHECKING
3172 == shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash));
3174 dvar = (variable)*dstslot;
3178 bool has_value = false, has_other = false;
3180 /* If we have one value and anything else, we're going to
3181 canonicalize this, so make sure all values have an entry in
3182 the table and are marked for canonicalization. */
3183 for (node = *nodep; node; node = node->next)
3185 if (GET_CODE (node->loc) == VALUE)
3187 /* If this was marked during register canonicalization,
3188 we know we have to canonicalize values. */
3203 if (has_value && has_other)
3205 for (node = *nodep; node; node = node->next)
3207 if (GET_CODE (node->loc) == VALUE)
3209 decl_or_value dv = dv_from_value (node->loc);
3212 if (shared_hash_shared (dst->vars))
3213 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
3215 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
3219 variable var = (variable) pool_alloc (dv_pool (dv));
3222 var->n_var_parts = 1;
3223 var->var_part[0].offset = 0;
3224 var->var_part[0].loc_chain = NULL;
3225 var->var_part[0].cur_loc = NULL;
3229 VALUE_RECURSED_INTO (node->loc) = true;
3233 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3234 gcc_assert (*dstslot == dvar);
3235 canonicalize_values_star (dstslot, dst);
3236 #ifdef ENABLE_CHECKING
3238 == shared_hash_find_slot_noinsert_1 (dst->vars,
3241 dvar = (variable)*dstslot;
3245 if (!onepart_variable_different_p (dvar, s2var))
3247 variable_htab_free (dvar);
3248 *dstslot = dvar = s2var;
3251 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
3253 variable_htab_free (dvar);
3254 *dstslot = dvar = s1var;
3256 dst_can_be_shared = false;
3260 if (dvar->refcount == 1)
3261 dvar->var_part[0].cur_loc = dvar->var_part[0].loc_chain->loc;
3262 dst_can_be_shared = false;
3268 /* Combine variable in *S1SLOT (in DSM->src) with the corresponding
3269 entry in DSM->src. Only multi-part variables are combined, using
3270 variable_union. onepart dvs were already combined with
3271 intersection in variable_merge_over_cur(). */
3274 variable_merge_over_src (void **s2slot, void *data)
3276 struct dfset_merge *dsm = (struct dfset_merge *)data;
3277 dataflow_set *dst = dsm->dst;
3278 variable s2var = (variable) *s2slot;
3279 decl_or_value dv = s2var->dv;
3280 bool onepart = dv_onepart_p (dv);
3284 void **dstp = shared_hash_find_slot (dst->vars, dv);
3287 return variable_canonicalize (dstp, dst);
3290 dsm->src_onepart_cnt++;
3294 /* Combine dataflow set information from SRC into DST, using PDST
3295 to carry over information across passes. */
3298 dataflow_set_merge (dataflow_set *dst, dataflow_set *src)
3300 dataflow_set src2 = *dst;
3301 struct dfset_merge dsm;
3303 size_t src_elems, dst_elems;
3305 src_elems = htab_elements (shared_hash_htab (src->vars));
3306 dst_elems = htab_elements (shared_hash_htab (src2.vars));
3307 dataflow_set_init (dst);
3308 dst->stack_adjust = src2.stack_adjust;
3309 shared_hash_destroy (dst->vars);
3310 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
3311 dst->vars->refcount = 1;
3313 = htab_create (MAX (src_elems, dst_elems), variable_htab_hash,
3314 variable_htab_eq, variable_htab_free);
3316 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3317 attrs_list_mpdv_union (&dst->regs[i], src->regs[i], src2.regs[i]);
3322 dsm.src_onepart_cnt = 0;
3324 htab_traverse (shared_hash_htab (dsm.src->vars), variable_merge_over_src,
3326 htab_traverse (shared_hash_htab (dsm.cur->vars), variable_merge_over_cur,
3329 if (dsm.src_onepart_cnt)
3330 dst_can_be_shared = false;
3332 dataflow_set_destroy (&src2);
3335 /* Mark register equivalences. */
3338 dataflow_set_equiv_regs (dataflow_set *set)
3343 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3345 rtx canon[NUM_MACHINE_MODES];
3347 memset (canon, 0, sizeof (canon));
3349 for (list = set->regs[i]; list; list = list->next)
3350 if (list->offset == 0 && dv_is_value_p (list->dv))
3352 rtx val = dv_as_value (list->dv);
3353 rtx *cvalp = &canon[(int)GET_MODE (val)];
3356 if (canon_value_cmp (val, cval))
3360 for (list = set->regs[i]; list; list = list->next)
3361 if (list->offset == 0 && dv_onepart_p (list->dv))
3363 rtx cval = canon[(int)GET_MODE (list->loc)];
3368 if (dv_is_value_p (list->dv))
3370 rtx val = dv_as_value (list->dv);
3375 VALUE_RECURSED_INTO (val) = true;
3376 set_variable_part (set, val, dv_from_value (cval), 0,
3377 VAR_INIT_STATUS_INITIALIZED,
3381 VALUE_RECURSED_INTO (cval) = true;
3382 set_variable_part (set, cval, list->dv, 0,
3383 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
3386 for (listp = &set->regs[i]; (list = *listp);
3387 listp = list ? &list->next : listp)
3388 if (list->offset == 0 && dv_onepart_p (list->dv))
3390 rtx cval = canon[(int)GET_MODE (list->loc)];
3396 if (dv_is_value_p (list->dv))
3398 rtx val = dv_as_value (list->dv);
3399 if (!VALUE_RECURSED_INTO (val))
3403 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
3404 canonicalize_values_star (slot, set);
3411 /* Remove any redundant values in the location list of VAR, which must
3412 be unshared and 1-part. */
3415 remove_duplicate_values (variable var)
3417 location_chain node, *nodep;
3419 gcc_assert (dv_onepart_p (var->dv));
3420 gcc_assert (var->n_var_parts == 1);
3421 gcc_assert (var->refcount == 1);
3423 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
3425 if (GET_CODE (node->loc) == VALUE)
3427 if (VALUE_RECURSED_INTO (node->loc))
3429 /* Remove duplicate value node. */
3430 *nodep = node->next;
3431 pool_free (loc_chain_pool, node);
3435 VALUE_RECURSED_INTO (node->loc) = true;
3437 nodep = &node->next;
3440 for (node = var->var_part[0].loc_chain; node; node = node->next)
3441 if (GET_CODE (node->loc) == VALUE)
3443 gcc_assert (VALUE_RECURSED_INTO (node->loc));
3444 VALUE_RECURSED_INTO (node->loc) = false;
3449 /* Hash table iteration argument passed to variable_post_merge. */
3450 struct dfset_post_merge
3452 /* The new input set for the current block. */
3454 /* Pointer to the permanent input set for the current block, or
3456 dataflow_set **permp;
3459 /* Create values for incoming expressions associated with one-part
3460 variables that don't have value numbers for them. */
3463 variable_post_merge_new_vals (void **slot, void *info)
3465 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3466 dataflow_set *set = dfpm->set;
3467 variable var = (variable)*slot;
3468 location_chain node;
3470 if (!dv_onepart_p (var->dv) || !var->n_var_parts)
3473 gcc_assert (var->n_var_parts == 1);
3475 if (dv_is_decl_p (var->dv))
3477 bool check_dupes = false;
3480 for (node = var->var_part[0].loc_chain; node; node = node->next)
3482 if (GET_CODE (node->loc) == VALUE)
3483 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
3484 else if (GET_CODE (node->loc) == REG)
3486 attrs att, *attp, *curp = NULL;
3488 if (var->refcount != 1)
3490 slot = unshare_variable (set, slot, var,
3491 VAR_INIT_STATUS_INITIALIZED);
3492 var = (variable)*slot;
3496 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
3498 if (att->offset == 0
3499 && GET_MODE (att->loc) == GET_MODE (node->loc))
3501 if (dv_is_value_p (att->dv))
3503 rtx cval = dv_as_value (att->dv);
3508 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
3516 if ((*curp)->offset == 0
3517 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
3518 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
3521 curp = &(*curp)->next;
3532 *dfpm->permp = XNEW (dataflow_set);
3533 dataflow_set_init (*dfpm->permp);
3536 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
3537 att; att = att->next)
3538 if (GET_MODE (att->loc) == GET_MODE (node->loc))
3540 gcc_assert (att->offset == 0);
3541 gcc_assert (dv_is_value_p (att->dv));
3542 val_reset (set, att->dv);
3549 cval = dv_as_value (cdv);
3553 /* Create a unique value to hold this register,
3554 that ought to be found and reused in
3555 subsequent rounds. */
3557 gcc_assert (!cselib_lookup (node->loc,
3558 GET_MODE (node->loc), 0));
3559 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1);
3560 cselib_preserve_value (v);
3561 cselib_invalidate_rtx (node->loc);
3563 cdv = dv_from_value (cval);
3566 "Created new value %i for reg %i\n",
3567 v->value, REGNO (node->loc));
3570 var_reg_decl_set (*dfpm->permp, node->loc,
3571 VAR_INIT_STATUS_INITIALIZED,
3572 cdv, 0, NULL, INSERT);
3578 /* Remove attribute referring to the decl, which now
3579 uses the value for the register, already existing or
3580 to be added when we bring perm in. */
3583 pool_free (attrs_pool, att);
3588 remove_duplicate_values (var);
3594 /* Reset values in the permanent set that are not associated with the
3595 chosen expression. */
3598 variable_post_merge_perm_vals (void **pslot, void *info)
3600 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3601 dataflow_set *set = dfpm->set;
3602 variable pvar = (variable)*pslot, var;
3603 location_chain pnode;
3607 gcc_assert (dv_is_value_p (pvar->dv));
3608 gcc_assert (pvar->n_var_parts == 1);
3609 pnode = pvar->var_part[0].loc_chain;
3611 gcc_assert (!pnode->next);
3612 gcc_assert (REG_P (pnode->loc));
3616 var = shared_hash_find (set->vars, dv);
3619 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
3621 val_reset (set, dv);
3624 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
3625 if (att->offset == 0
3626 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
3627 && dv_is_value_p (att->dv))
3630 /* If there is a value associated with this register already, create
3632 if (att && dv_as_value (att->dv) != dv_as_value (dv))
3634 rtx cval = dv_as_value (att->dv);
3635 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
3636 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
3641 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
3643 variable_union (pslot, set);
3649 /* Just checking stuff and registering register attributes for
3653 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
3655 struct dfset_post_merge dfpm;
3660 htab_traverse (shared_hash_htab (set->vars), variable_post_merge_new_vals,
3663 htab_traverse (shared_hash_htab ((*permp)->vars),
3664 variable_post_merge_perm_vals, &dfpm);
3665 htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
3668 /* Return a node whose loc is a MEM that refers to EXPR in the
3669 location list of a one-part variable or value VAR, or in that of
3670 any values recursively mentioned in the location lists. */
3672 static location_chain
3673 find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars)
3675 location_chain node;
3678 location_chain where = NULL;
3683 gcc_assert (GET_CODE (val) == VALUE);
3685 gcc_assert (!VALUE_RECURSED_INTO (val));
3687 dv = dv_from_value (val);
3688 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
3693 gcc_assert (dv_onepart_p (var->dv));
3695 if (!var->n_var_parts)
3698 gcc_assert (var->var_part[0].offset == 0);
3700 VALUE_RECURSED_INTO (val) = true;
3702 for (node = var->var_part[0].loc_chain; node; node = node->next)
3703 if (MEM_P (node->loc) && MEM_EXPR (node->loc) == expr
3704 && MEM_OFFSET (node->loc) == 0)
3709 else if (GET_CODE (node->loc) == VALUE
3710 && !VALUE_RECURSED_INTO (node->loc)
3711 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
3714 VALUE_RECURSED_INTO (val) = false;
3719 /* Return TRUE if the value of MEM may vary across a call. */
3722 mem_dies_at_call (rtx mem)
3724 tree expr = MEM_EXPR (mem);
3730 decl = get_base_address (expr);
3738 return (may_be_aliased (decl)
3739 || (!TREE_READONLY (decl) && is_global_var (decl)));
3742 /* Remove all MEMs from the location list of a hash table entry for a
3743 one-part variable, except those whose MEM attributes map back to
3744 the variable itself, directly or within a VALUE. */
3747 dataflow_set_preserve_mem_locs (void **slot, void *data)
3749 dataflow_set *set = (dataflow_set *) data;
3750 variable var = (variable) *slot;
3752 if (dv_is_decl_p (var->dv) && dv_onepart_p (var->dv))
3754 tree decl = dv_as_decl (var->dv);
3755 location_chain loc, *locp;
3757 if (!var->n_var_parts)
3760 gcc_assert (var->n_var_parts == 1);
3762 if (var->refcount > 1 || shared_hash_shared (set->vars))
3764 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
3766 /* We want to remove dying MEMs that doesn't refer to
3768 if (GET_CODE (loc->loc) == MEM
3769 && (MEM_EXPR (loc->loc) != decl
3770 || MEM_OFFSET (loc->loc))
3771 && !mem_dies_at_call (loc->loc))
3773 /* We want to move here MEMs that do refer to DECL. */
3774 else if (GET_CODE (loc->loc) == VALUE
3775 && find_mem_expr_in_1pdv (decl, loc->loc,
3776 shared_hash_htab (set->vars)))
3783 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
3784 var = (variable)*slot;
3785 gcc_assert (var->n_var_parts == 1);
3788 for (locp = &var->var_part[0].loc_chain, loc = *locp;
3791 rtx old_loc = loc->loc;
3792 if (GET_CODE (old_loc) == VALUE)
3794 location_chain mem_node
3795 = find_mem_expr_in_1pdv (decl, loc->loc,
3796 shared_hash_htab (set->vars));
3798 /* ??? This picks up only one out of multiple MEMs that
3799 refer to the same variable. Do we ever need to be
3800 concerned about dealing with more than one, or, given
3801 that they should all map to the same variable
3802 location, their addresses will have been merged and
3803 they will be regarded as equivalent? */
3806 loc->loc = mem_node->loc;
3807 loc->set_src = mem_node->set_src;
3808 loc->init = MIN (loc->init, mem_node->init);
3812 if (GET_CODE (loc->loc) != MEM
3813 || (MEM_EXPR (loc->loc) == decl
3814 && MEM_OFFSET (loc->loc) == 0)
3815 || !mem_dies_at_call (loc->loc))
3817 if (old_loc != loc->loc && emit_notes)
3819 add_value_chains (var->dv, loc->loc);
3820 remove_value_chains (var->dv, old_loc);
3827 remove_value_chains (var->dv, old_loc);
3829 pool_free (loc_chain_pool, loc);
3832 if (!var->var_part[0].loc_chain)
3835 if (emit_notes && dv_is_value_p (var->dv))
3836 remove_cselib_value_chains (var->dv);
3837 variable_was_changed (var, set);
3844 /* Remove all MEMs from the location list of a hash table entry for a
3848 dataflow_set_remove_mem_locs (void **slot, void *data)
3850 dataflow_set *set = (dataflow_set *) data;
3851 variable var = (variable) *slot;
3853 if (dv_is_value_p (var->dv))
3855 location_chain loc, *locp;
3856 bool changed = false;
3858 gcc_assert (var->n_var_parts == 1);
3860 if (var->refcount > 1 || shared_hash_shared (set->vars))
3862 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
3863 if (GET_CODE (loc->loc) == MEM
3864 && mem_dies_at_call (loc->loc))
3870 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
3871 var = (variable)*slot;
3872 gcc_assert (var->n_var_parts == 1);
3875 for (locp = &var->var_part[0].loc_chain, loc = *locp;
3878 if (GET_CODE (loc->loc) != MEM
3879 || !mem_dies_at_call (loc->loc))
3886 remove_value_chains (var->dv, loc->loc);
3888 /* If we have deleted the location which was last emitted
3889 we have to emit new location so add the variable to set
3890 of changed variables. */
3891 if (var->var_part[0].cur_loc
3892 && rtx_equal_p (loc->loc, var->var_part[0].cur_loc))
3894 pool_free (loc_chain_pool, loc);
3897 if (!var->var_part[0].loc_chain)
3900 if (emit_notes && dv_is_value_p (var->dv))
3901 remove_cselib_value_chains (var->dv);
3902 gcc_assert (changed);
3906 if (var->n_var_parts && var->var_part[0].loc_chain)
3907 var->var_part[0].cur_loc = var->var_part[0].loc_chain->loc;
3908 variable_was_changed (var, set);
3915 /* Remove all variable-location information about call-clobbered
3916 registers, as well as associations between MEMs and VALUEs. */
3919 dataflow_set_clear_at_call (dataflow_set *set)
3923 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
3924 if (TEST_HARD_REG_BIT (call_used_reg_set, r))
3925 var_regno_delete (set, r);
3927 if (MAY_HAVE_DEBUG_INSNS)
3929 set->traversed_vars = set->vars;
3930 htab_traverse (shared_hash_htab (set->vars),
3931 dataflow_set_preserve_mem_locs, set);
3932 set->traversed_vars = set->vars;
3933 htab_traverse (shared_hash_htab (set->vars), dataflow_set_remove_mem_locs,
3935 set->traversed_vars = NULL;
3939 /* Flag whether two dataflow sets being compared contain different data. */
3941 dataflow_set_different_value;
3944 variable_part_different_p (variable_part *vp1, variable_part *vp2)
3946 location_chain lc1, lc2;
3948 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
3950 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
3952 if (REG_P (lc1->loc) && REG_P (lc2->loc))
3954 if (REGNO (lc1->loc) == REGNO (lc2->loc))
3957 if (rtx_equal_p (lc1->loc, lc2->loc))
3966 /* Return true if one-part variables VAR1 and VAR2 are different.
3967 They must be in canonical order. */
3970 onepart_variable_different_p (variable var1, variable var2)
3972 location_chain lc1, lc2;
3977 gcc_assert (var1->n_var_parts == 1);
3978 gcc_assert (var2->n_var_parts == 1);
3980 lc1 = var1->var_part[0].loc_chain;
3981 lc2 = var2->var_part[0].loc_chain;
3988 if (loc_cmp (lc1->loc, lc2->loc))
3997 /* Return true if variables VAR1 and VAR2 are different.
3998 If COMPARE_CURRENT_LOCATION is true compare also the cur_loc of each
4002 variable_different_p (variable var1, variable var2,
4003 bool compare_current_location)
4010 if (var1->n_var_parts != var2->n_var_parts)
4013 for (i = 0; i < var1->n_var_parts; i++)
4015 if (var1->var_part[i].offset != var2->var_part[i].offset)
4017 if (compare_current_location)
4019 if (!((REG_P (var1->var_part[i].cur_loc)
4020 && REG_P (var2->var_part[i].cur_loc)
4021 && (REGNO (var1->var_part[i].cur_loc)
4022 == REGNO (var2->var_part[i].cur_loc)))
4023 || rtx_equal_p (var1->var_part[i].cur_loc,
4024 var2->var_part[i].cur_loc)))
4027 /* One-part values have locations in a canonical order. */
4028 if (i == 0 && var1->var_part[i].offset == 0 && dv_onepart_p (var1->dv))
4030 gcc_assert (var1->n_var_parts == 1);
4031 gcc_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
4032 return onepart_variable_different_p (var1, var2);
4034 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4036 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4042 /* Compare variable *SLOT with the same variable in hash table DATA
4043 and set DATAFLOW_SET_DIFFERENT_VALUE if they are different. */
4046 dataflow_set_different_1 (void **slot, void *data)
4048 htab_t htab = (htab_t) data;
4049 variable var1, var2;
4051 var1 = (variable) *slot;
4052 var2 = (variable) htab_find_with_hash (htab, var1->dv,
4053 dv_htab_hash (var1->dv));
4056 dataflow_set_different_value = true;
4058 if (dump_file && (dump_flags & TDF_DETAILS))
4060 fprintf (dump_file, "dataflow difference found: removal of:\n");
4064 /* Stop traversing the hash table. */
4068 if (variable_different_p (var1, var2, false))
4070 dataflow_set_different_value = true;
4072 if (dump_file && (dump_flags & TDF_DETAILS))
4074 fprintf (dump_file, "dataflow difference found: old and new follow:\n");
4079 /* Stop traversing the hash table. */
4083 /* Continue traversing the hash table. */
4087 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4090 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4092 if (old_set->vars == new_set->vars)
4095 if (htab_elements (shared_hash_htab (old_set->vars))
4096 != htab_elements (shared_hash_htab (new_set->vars)))
4099 dataflow_set_different_value = false;
4101 htab_traverse (shared_hash_htab (old_set->vars), dataflow_set_different_1,
4102 shared_hash_htab (new_set->vars));
4103 /* No need to traverse the second hashtab, if both have the same number
4104 of elements and the second one had all entries found in the first one,
4105 then it can't have any extra entries. */
4106 return dataflow_set_different_value;
4109 /* Free the contents of dataflow set SET. */
4112 dataflow_set_destroy (dataflow_set *set)
4116 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4117 attrs_list_clear (&set->regs[i]);
4119 shared_hash_destroy (set->vars);
4123 /* Return true if RTL X contains a SYMBOL_REF. */
4126 contains_symbol_ref (rtx x)
4135 code = GET_CODE (x);
4136 if (code == SYMBOL_REF)
4139 fmt = GET_RTX_FORMAT (code);
4140 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4144 if (contains_symbol_ref (XEXP (x, i)))
4147 else if (fmt[i] == 'E')
4150 for (j = 0; j < XVECLEN (x, i); j++)
4151 if (contains_symbol_ref (XVECEXP (x, i, j)))
4159 /* Shall EXPR be tracked? */
4162 track_expr_p (tree expr, bool need_rtl)
4167 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
4168 return DECL_RTL_SET_P (expr);
4170 /* If EXPR is not a parameter or a variable do not track it. */
4171 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
4174 /* It also must have a name... */
4175 if (!DECL_NAME (expr))
4178 /* ... and a RTL assigned to it. */
4179 decl_rtl = DECL_RTL_IF_SET (expr);
4180 if (!decl_rtl && need_rtl)
4183 /* If this expression is really a debug alias of some other declaration, we
4184 don't need to track this expression if the ultimate declaration is
4187 if (DECL_DEBUG_EXPR_IS_FROM (realdecl) && DECL_DEBUG_EXPR (realdecl))
4189 realdecl = DECL_DEBUG_EXPR (realdecl);
4190 /* ??? We don't yet know how to emit DW_OP_piece for variable
4191 that has been SRA'ed. */
4192 if (!DECL_P (realdecl))
4196 /* Do not track EXPR if REALDECL it should be ignored for debugging
4198 if (DECL_IGNORED_P (realdecl))
4201 /* Do not track global variables until we are able to emit correct location
4203 if (TREE_STATIC (realdecl))
4206 /* When the EXPR is a DECL for alias of some variable (see example)
4207 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
4208 DECL_RTL contains SYMBOL_REF.
4211 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
4214 if (decl_rtl && MEM_P (decl_rtl)
4215 && contains_symbol_ref (XEXP (decl_rtl, 0)))
4218 /* If RTX is a memory it should not be very large (because it would be
4219 an array or struct). */
4220 if (decl_rtl && MEM_P (decl_rtl))
4222 /* Do not track structures and arrays. */
4223 if (GET_MODE (decl_rtl) == BLKmode
4224 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
4226 if (MEM_SIZE (decl_rtl)
4227 && INTVAL (MEM_SIZE (decl_rtl)) > MAX_VAR_PARTS)
4231 DECL_CHANGED (expr) = 0;
4232 DECL_CHANGED (realdecl) = 0;
4236 /* Determine whether a given LOC refers to the same variable part as
4240 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
4243 HOST_WIDE_INT offset2;
4245 if (! DECL_P (expr))
4250 expr2 = REG_EXPR (loc);
4251 offset2 = REG_OFFSET (loc);
4253 else if (MEM_P (loc))
4255 expr2 = MEM_EXPR (loc);
4256 offset2 = INT_MEM_OFFSET (loc);
4261 if (! expr2 || ! DECL_P (expr2))
4264 expr = var_debug_decl (expr);
4265 expr2 = var_debug_decl (expr2);
4267 return (expr == expr2 && offset == offset2);
4270 /* LOC is a REG or MEM that we would like to track if possible.
4271 If EXPR is null, we don't know what expression LOC refers to,
4272 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
4273 LOC is an lvalue register.
4275 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
4276 is something we can track. When returning true, store the mode of
4277 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
4278 from EXPR in *OFFSET_OUT (if nonnull). */
4281 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
4282 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
4284 enum machine_mode mode;
4286 if (expr == NULL || !track_expr_p (expr, true))
4289 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
4290 whole subreg, but only the old inner part is really relevant. */
4291 mode = GET_MODE (loc);
4292 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
4294 enum machine_mode pseudo_mode;
4296 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
4297 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
4299 offset += byte_lowpart_offset (pseudo_mode, mode);
4304 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
4305 Do the same if we are storing to a register and EXPR occupies
4306 the whole of register LOC; in that case, the whole of EXPR is
4307 being changed. We exclude complex modes from the second case
4308 because the real and imaginary parts are represented as separate
4309 pseudo registers, even if the whole complex value fits into one
4311 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
4313 && !COMPLEX_MODE_P (DECL_MODE (expr))
4314 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
4315 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
4317 mode = DECL_MODE (expr);
4321 if (offset < 0 || offset >= MAX_VAR_PARTS)
4327 *offset_out = offset;
4331 /* Return the MODE lowpart of LOC, or null if LOC is not something we
4332 want to track. When returning nonnull, make sure that the attributes
4333 on the returned value are updated. */
4336 var_lowpart (enum machine_mode mode, rtx loc)
4338 unsigned int offset, reg_offset, regno;
4340 if (!REG_P (loc) && !MEM_P (loc))
4343 if (GET_MODE (loc) == mode)
4346 offset = byte_lowpart_offset (mode, GET_MODE (loc));
4349 return adjust_address_nv (loc, mode, offset);
4351 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
4352 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
4354 return gen_rtx_REG_offset (loc, mode, regno, offset);
4357 /* Carry information about uses and stores while walking rtx. */
4359 struct count_use_info
4361 /* The insn where the RTX is. */
4364 /* The basic block where insn is. */
4367 /* The array of n_sets sets in the insn, as determined by cselib. */
4368 struct cselib_set *sets;
4371 /* True if we're counting stores, false otherwise. */
4375 /* Find a VALUE corresponding to X. */
4377 static inline cselib_val *
4378 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
4384 /* This is called after uses are set up and before stores are
4385 processed bycselib, so it's safe to look up srcs, but not
4386 dsts. So we look up expressions that appear in srcs or in
4387 dest expressions, but we search the sets array for dests of
4391 for (i = 0; i < cui->n_sets; i++)
4392 if (cui->sets[i].dest == x)
4393 return cui->sets[i].src_elt;
4396 return cselib_lookup (x, mode, 0);
4402 /* Replace all registers and addresses in an expression with VALUE
4403 expressions that map back to them, unless the expression is a
4404 register. If no mapping is or can be performed, returns NULL. */
4407 replace_expr_with_values (rtx loc)
4411 else if (MEM_P (loc))
4413 enum machine_mode address_mode
4414 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (loc));
4415 cselib_val *addr = cselib_lookup (XEXP (loc, 0), address_mode, 0);
4417 return replace_equiv_address_nv (loc, addr->val_rtx);
4422 return cselib_subst_to_values (loc);
4425 /* Determine what kind of micro operation to choose for a USE. Return
4426 MO_CLOBBER if no micro operation is to be generated. */
4428 static enum micro_operation_type
4429 use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
4433 if (cui && cui->sets)
4435 if (GET_CODE (loc) == VAR_LOCATION)
4437 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
4439 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
4440 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1);
4442 /* ??? flag_float_store and volatile mems are never
4443 given values, but we could in theory use them for
4445 gcc_assert (val || 1);
4452 if (REG_P (loc) || MEM_P (loc))
4455 *modep = GET_MODE (loc);
4459 || (find_use_val (loc, GET_MODE (loc), cui)
4460 && cselib_lookup (XEXP (loc, 0), GET_MODE (loc), 0)))
4465 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
4467 if (val && !cselib_preserved_value_p (val))
4475 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
4477 expr = REG_EXPR (loc);
4480 return MO_USE_NO_VAR;
4481 else if (target_for_debug_bind (var_debug_decl (expr)))
4483 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
4484 false, modep, NULL))
4487 return MO_USE_NO_VAR;
4489 else if (MEM_P (loc))
4491 expr = MEM_EXPR (loc);
4495 else if (target_for_debug_bind (var_debug_decl (expr)))
4497 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
4498 false, modep, NULL))
4507 /* Log to OUT information about micro-operation MOPT involving X in
4511 log_op_type (rtx x, basic_block bb, rtx insn,
4512 enum micro_operation_type mopt, FILE *out)
4514 fprintf (out, "bb %i op %i insn %i %s ",
4515 bb->index, VTI (bb)->n_mos - 1,
4516 INSN_UID (insn), micro_operation_type_name[mopt]);
4517 print_inline_rtx (out, x, 2);
4521 /* Count uses (register and memory references) LOC which will be tracked.
4522 INSN is instruction which the LOC is part of. */
4525 count_uses (rtx *ploc, void *cuip)
4528 struct count_use_info *cui = (struct count_use_info *) cuip;
4529 enum micro_operation_type mopt = use_type (loc, cui, NULL);
4531 if (mopt != MO_CLOBBER)
4534 enum machine_mode mode = GET_MODE (loc);
4539 loc = PAT_VAR_LOCATION_LOC (loc);
4540 if (VAR_LOC_UNKNOWN_P (loc))
4547 && !REG_P (XEXP (loc, 0)) && !MEM_P (XEXP (loc, 0)))
4549 enum machine_mode address_mode
4550 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (loc));
4551 val = cselib_lookup (XEXP (loc, 0), address_mode, 0);
4553 if (val && !cselib_preserved_value_p (val))
4555 VTI (cui->bb)->n_mos++;
4556 cselib_preserve_value (val);
4557 if (dump_file && (dump_flags & TDF_DETAILS))
4558 log_op_type (XEXP (loc, 0), cui->bb, cui->insn,
4559 MO_VAL_USE, dump_file);
4563 val = find_use_val (loc, mode, cui);
4566 if (mopt == MO_VAL_SET
4567 && GET_CODE (PATTERN (cui->insn)) == COND_EXEC
4570 && (use_type (loc, NULL, NULL) == MO_USE
4573 cselib_val *oval = cselib_lookup (loc, GET_MODE (loc), 0);
4575 gcc_assert (oval != val);
4576 gcc_assert (REG_P (loc) || MEM_P (loc));
4578 if (!cselib_preserved_value_p (oval))
4580 VTI (cui->bb)->n_mos++;
4581 cselib_preserve_value (oval);
4582 if (dump_file && (dump_flags & TDF_DETAILS))
4583 log_op_type (loc, cui->bb, cui->insn,
4584 MO_VAL_USE, dump_file);
4588 cselib_preserve_value (val);
4591 gcc_assert (mopt == MO_VAL_LOC
4592 || (mopt == MO_VAL_SET && cui->store_p));
4600 VTI (cui->bb)->n_mos++;
4601 if (dump_file && (dump_flags & TDF_DETAILS))
4602 log_op_type (loc, cui->bb, cui->insn, mopt, dump_file);
4608 /* Helper function for finding all uses of REG/MEM in X in CUI's
4612 count_uses_1 (rtx *x, void *cui)
4614 for_each_rtx (x, count_uses, cui);
4617 /* Count stores (register and memory references) LOC which will be
4618 tracked. CUI is a count_use_info object containing the instruction
4619 which the LOC is part of. */
4622 count_stores (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *cui)
4624 count_uses (&loc, cui);
4627 /* Callback for cselib_record_sets_hook, that counts how many micro
4628 operations it takes for uses and stores in an insn after
4629 cselib_record_sets has analyzed the sets in an insn, but before it
4630 modifies the stored values in the internal tables, unless
4631 cselib_record_sets doesn't call it directly (perhaps because we're
4632 not doing cselib in the first place, in which case sets and n_sets
4636 count_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
4638 basic_block bb = BLOCK_FOR_INSN (insn);
4639 struct count_use_info cui;
4641 cselib_hook_called = true;
4646 cui.n_sets = n_sets;
4648 cui.store_p = false;
4649 note_uses (&PATTERN (insn), count_uses_1, &cui);
4651 note_stores (PATTERN (insn), count_stores, &cui);
4654 /* Tell whether the CONCAT used to holds a VALUE and its location
4655 needs value resolution, i.e., an attempt of mapping the location
4656 back to other incoming values. */
4657 #define VAL_NEEDS_RESOLUTION(x) \
4658 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
4659 /* Whether the location in the CONCAT is a tracked expression, that
4660 should also be handled like a MO_USE. */
4661 #define VAL_HOLDS_TRACK_EXPR(x) \
4662 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
4663 /* Whether the location in the CONCAT should be handled like a MO_COPY
4665 #define VAL_EXPR_IS_COPIED(x) \
4666 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
4667 /* Whether the location in the CONCAT should be handled like a
4668 MO_CLOBBER as well. */
4669 #define VAL_EXPR_IS_CLOBBERED(x) \
4670 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
4672 /* Add uses (register and memory references) LOC which will be tracked
4673 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
4676 add_uses (rtx *ploc, void *data)
4679 enum machine_mode mode = VOIDmode;
4680 struct count_use_info *cui = (struct count_use_info *)data;
4681 enum micro_operation_type type = use_type (loc, cui, &mode);
4683 if (type != MO_CLOBBER)
4685 basic_block bb = cui->bb;
4686 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4689 mo->u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
4690 mo->insn = cui->insn;
4692 if (type == MO_VAL_LOC)
4695 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
4698 gcc_assert (cui->sets);
4701 && !REG_P (XEXP (vloc, 0)) && !MEM_P (XEXP (vloc, 0)))
4704 enum machine_mode address_mode
4705 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
4707 = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
4709 if (val && !cselib_preserved_value_p (val))
4711 micro_operation *mon = VTI (bb)->mos + VTI (bb)->n_mos++;
4712 mon->type = mo->type;
4713 mon->u.loc = mo->u.loc;
4714 mon->insn = mo->insn;
4715 cselib_preserve_value (val);
4716 mo->type = MO_VAL_USE;
4717 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4718 mo->u.loc = gen_rtx_CONCAT (address_mode,
4719 val->val_rtx, mloc);
4720 if (dump_file && (dump_flags & TDF_DETAILS))
4721 log_op_type (mo->u.loc, cui->bb, cui->insn,
4722 mo->type, dump_file);
4727 if (!VAR_LOC_UNKNOWN_P (vloc)
4728 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
4730 enum machine_mode mode2;
4731 enum micro_operation_type type2;
4732 rtx nloc = replace_expr_with_values (vloc);
4736 oloc = shallow_copy_rtx (oloc);
4737 PAT_VAR_LOCATION_LOC (oloc) = nloc;
4740 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
4742 type2 = use_type (vloc, 0, &mode2);
4744 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
4745 || type2 == MO_CLOBBER);
4747 if (type2 == MO_CLOBBER
4748 && !cselib_preserved_value_p (val))
4750 VAL_NEEDS_RESOLUTION (oloc) = 1;
4751 cselib_preserve_value (val);
4754 else if (!VAR_LOC_UNKNOWN_P (vloc))
4756 oloc = shallow_copy_rtx (oloc);
4757 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
4762 else if (type == MO_VAL_USE)
4764 enum machine_mode mode2 = VOIDmode;
4765 enum micro_operation_type type2;
4766 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
4767 rtx vloc, oloc = loc, nloc;
4769 gcc_assert (cui->sets);
4772 && !REG_P (XEXP (oloc, 0)) && !MEM_P (XEXP (oloc, 0)))
4775 enum machine_mode address_mode
4776 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
4778 = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
4780 if (val && !cselib_preserved_value_p (val))
4782 micro_operation *mon = VTI (bb)->mos + VTI (bb)->n_mos++;
4783 mon->type = mo->type;
4784 mon->u.loc = mo->u.loc;
4785 mon->insn = mo->insn;
4786 cselib_preserve_value (val);
4787 mo->type = MO_VAL_USE;
4788 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4789 mo->u.loc = gen_rtx_CONCAT (address_mode,
4790 val->val_rtx, mloc);
4791 mo->insn = cui->insn;
4792 if (dump_file && (dump_flags & TDF_DETAILS))
4793 log_op_type (mo->u.loc, cui->bb, cui->insn,
4794 mo->type, dump_file);
4799 type2 = use_type (loc, 0, &mode2);
4801 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
4802 || type2 == MO_CLOBBER);
4804 if (type2 == MO_USE)
4805 vloc = var_lowpart (mode2, loc);
4809 /* The loc of a MO_VAL_USE may have two forms:
4811 (concat val src): val is at src, a value-based
4814 (concat (concat val use) src): same as above, with use as
4815 the MO_USE tracked value, if it differs from src.
4819 nloc = replace_expr_with_values (loc);
4824 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
4826 oloc = val->val_rtx;
4828 mo->u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
4830 if (type2 == MO_USE)
4831 VAL_HOLDS_TRACK_EXPR (mo->u.loc) = 1;
4832 if (!cselib_preserved_value_p (val))
4834 VAL_NEEDS_RESOLUTION (mo->u.loc) = 1;
4835 cselib_preserve_value (val);
4839 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
4841 if (dump_file && (dump_flags & TDF_DETAILS))
4842 log_op_type (mo->u.loc, cui->bb, cui->insn, mo->type, dump_file);
4848 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
4851 add_uses_1 (rtx *x, void *cui)
4853 for_each_rtx (x, add_uses, cui);
4856 /* Add stores (register and memory references) LOC which will be tracked
4857 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
4858 CUIP->insn is instruction which the LOC is part of. */
4861 add_stores (rtx loc, const_rtx expr, void *cuip)
4863 enum machine_mode mode = VOIDmode, mode2;
4864 struct count_use_info *cui = (struct count_use_info *)cuip;
4865 basic_block bb = cui->bb;
4866 micro_operation *mo;
4867 rtx oloc = loc, nloc, src = NULL;
4868 enum micro_operation_type type = use_type (loc, cui, &mode);
4869 bool track_p = false;
4871 bool resolve, preserve;
4873 if (type == MO_CLOBBER)
4880 mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4882 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
4883 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
4884 || GET_CODE (expr) == CLOBBER)
4886 mo->type = MO_CLOBBER;
4891 if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
4892 src = var_lowpart (mode2, SET_SRC (expr));
4893 loc = var_lowpart (mode2, loc);
4902 rtx xexpr = CONST_CAST_RTX (expr);
4904 if (SET_SRC (expr) != src)
4905 xexpr = gen_rtx_SET (VOIDmode, loc, src);
4906 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
4913 mo->insn = cui->insn;
4915 else if (MEM_P (loc)
4916 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
4919 mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4921 if (MEM_P (loc) && type == MO_VAL_SET
4922 && !REG_P (XEXP (loc, 0)) && !MEM_P (XEXP (loc, 0)))
4925 enum machine_mode address_mode
4926 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
4927 cselib_val *val = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
4929 if (val && !cselib_preserved_value_p (val))
4931 cselib_preserve_value (val);
4932 mo->type = MO_VAL_USE;
4933 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4934 mo->u.loc = gen_rtx_CONCAT (address_mode, val->val_rtx, mloc);
4935 mo->insn = cui->insn;
4936 if (dump_file && (dump_flags & TDF_DETAILS))
4937 log_op_type (mo->u.loc, cui->bb, cui->insn,
4938 mo->type, dump_file);
4939 mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4943 if (GET_CODE (expr) == CLOBBER || !track_p)
4945 mo->type = MO_CLOBBER;
4946 mo->u.loc = track_p ? var_lowpart (mode2, loc) : loc;
4950 if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
4951 src = var_lowpart (mode2, SET_SRC (expr));
4952 loc = var_lowpart (mode2, loc);
4961 rtx xexpr = CONST_CAST_RTX (expr);
4963 if (SET_SRC (expr) != src)
4964 xexpr = gen_rtx_SET (VOIDmode, loc, src);
4965 if (same_variable_part_p (SET_SRC (xexpr),
4967 INT_MEM_OFFSET (loc)))
4974 mo->insn = cui->insn;
4979 if (type != MO_VAL_SET)
4980 goto log_and_return;
4982 v = find_use_val (oloc, mode, cui);
4985 goto log_and_return;
4987 resolve = preserve = !cselib_preserved_value_p (v);
4989 nloc = replace_expr_with_values (oloc);
4993 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
4995 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0);
4997 gcc_assert (oval != v);
4998 gcc_assert (REG_P (oloc) || MEM_P (oloc));
5000 if (!cselib_preserved_value_p (oval))
5002 micro_operation *nmo = VTI (bb)->mos + VTI (bb)->n_mos++;
5004 cselib_preserve_value (oval);
5006 nmo->type = MO_VAL_USE;
5007 nmo->u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
5008 VAL_NEEDS_RESOLUTION (nmo->u.loc) = 1;
5009 nmo->insn = mo->insn;
5011 if (dump_file && (dump_flags & TDF_DETAILS))
5012 log_op_type (nmo->u.loc, cui->bb, cui->insn,
5013 nmo->type, dump_file);
5018 else if (resolve && GET_CODE (mo->u.loc) == SET)
5020 nloc = replace_expr_with_values (SET_SRC (expr));
5022 /* Avoid the mode mismatch between oexpr and expr. */
5023 if (!nloc && mode != mode2)
5025 nloc = SET_SRC (expr);
5026 gcc_assert (oloc == SET_DEST (expr));
5030 oloc = gen_rtx_SET (GET_MODE (mo->u.loc), oloc, nloc);
5033 if (oloc == SET_DEST (mo->u.loc))
5034 /* No point in duplicating. */
5036 if (!REG_P (SET_SRC (mo->u.loc)))
5042 if (GET_CODE (mo->u.loc) == SET
5043 && oloc == SET_DEST (mo->u.loc))
5044 /* No point in duplicating. */
5050 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
5052 if (mo->u.loc != oloc)
5053 loc = gen_rtx_CONCAT (GET_MODE (mo->u.loc), loc, mo->u.loc);
5055 /* The loc of a MO_VAL_SET may have various forms:
5057 (concat val dst): dst now holds val
5059 (concat val (set dst src)): dst now holds val, copied from src
5061 (concat (concat val dstv) dst): dst now holds val; dstv is dst
5062 after replacing mems and non-top-level regs with values.
5064 (concat (concat val dstv) (set dst src)): dst now holds val,
5065 copied from src. dstv is a value-based representation of dst, if
5066 it differs from dst. If resolution is needed, src is a REG, and
5067 its mode is the same as that of val.
5069 (concat (concat val (set dstv srcv)) (set dst src)): src
5070 copied to dst, holding val. dstv and srcv are value-based
5071 representations of dst and src, respectively.
5078 VAL_HOLDS_TRACK_EXPR (loc) = 1;
5081 VAL_NEEDS_RESOLUTION (loc) = resolve;
5082 cselib_preserve_value (v);
5084 if (mo->type == MO_CLOBBER)
5085 VAL_EXPR_IS_CLOBBERED (loc) = 1;
5086 if (mo->type == MO_COPY)
5087 VAL_EXPR_IS_COPIED (loc) = 1;
5089 mo->type = MO_VAL_SET;
5092 if (dump_file && (dump_flags & TDF_DETAILS))
5093 log_op_type (mo->u.loc, cui->bb, cui->insn, mo->type, dump_file);
5096 /* Callback for cselib_record_sets_hook, that records as micro
5097 operations uses and stores in an insn after cselib_record_sets has
5098 analyzed the sets in an insn, but before it modifies the stored
5099 values in the internal tables, unless cselib_record_sets doesn't
5100 call it directly (perhaps because we're not doing cselib in the
5101 first place, in which case sets and n_sets will be 0). */
5104 add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
5106 basic_block bb = BLOCK_FOR_INSN (insn);
5108 struct count_use_info cui;
5110 cselib_hook_called = true;
5115 cui.n_sets = n_sets;
5117 n1 = VTI (bb)->n_mos;
5118 cui.store_p = false;
5119 note_uses (&PATTERN (insn), add_uses_1, &cui);
5120 n2 = VTI (bb)->n_mos - 1;
5122 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
5126 while (n1 < n2 && VTI (bb)->mos[n1].type == MO_USE)
5128 while (n1 < n2 && VTI (bb)->mos[n2].type != MO_USE)
5134 sw = VTI (bb)->mos[n1];
5135 VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
5136 VTI (bb)->mos[n2] = sw;
5140 n2 = VTI (bb)->n_mos - 1;
5144 while (n1 < n2 && VTI (bb)->mos[n1].type != MO_VAL_LOC)
5146 while (n1 < n2 && VTI (bb)->mos[n2].type == MO_VAL_LOC)
5152 sw = VTI (bb)->mos[n1];
5153 VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
5154 VTI (bb)->mos[n2] = sw;
5160 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
5165 if (dump_file && (dump_flags & TDF_DETAILS))
5166 log_op_type (PATTERN (insn), bb, insn, mo->type, dump_file);
5169 n1 = VTI (bb)->n_mos;
5170 /* This will record NEXT_INSN (insn), such that we can
5171 insert notes before it without worrying about any
5172 notes that MO_USEs might emit after the insn. */
5174 note_stores (PATTERN (insn), add_stores, &cui);
5175 n2 = VTI (bb)->n_mos - 1;
5177 /* Order the MO_CLOBBERs to be before MO_SETs. */
5180 while (n1 < n2 && VTI (bb)->mos[n1].type == MO_CLOBBER)
5182 while (n1 < n2 && VTI (bb)->mos[n2].type != MO_CLOBBER)
5188 sw = VTI (bb)->mos[n1];
5189 VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
5190 VTI (bb)->mos[n2] = sw;
5195 static enum var_init_status
5196 find_src_status (dataflow_set *in, rtx src)
5198 tree decl = NULL_TREE;
5199 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
5201 if (! flag_var_tracking_uninit)
5202 status = VAR_INIT_STATUS_INITIALIZED;
5204 if (src && REG_P (src))
5205 decl = var_debug_decl (REG_EXPR (src));
5206 else if (src && MEM_P (src))
5207 decl = var_debug_decl (MEM_EXPR (src));
5210 status = get_init_value (in, src, dv_from_decl (decl));
5215 /* SRC is the source of an assignment. Use SET to try to find what
5216 was ultimately assigned to SRC. Return that value if known,
5217 otherwise return SRC itself. */
5220 find_src_set_src (dataflow_set *set, rtx src)
5222 tree decl = NULL_TREE; /* The variable being copied around. */
5223 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
5225 location_chain nextp;
5229 if (src && REG_P (src))
5230 decl = var_debug_decl (REG_EXPR (src));
5231 else if (src && MEM_P (src))
5232 decl = var_debug_decl (MEM_EXPR (src));
5236 decl_or_value dv = dv_from_decl (decl);
5238 var = shared_hash_find (set->vars, dv);
5242 for (i = 0; i < var->n_var_parts && !found; i++)
5243 for (nextp = var->var_part[i].loc_chain; nextp && !found;
5244 nextp = nextp->next)
5245 if (rtx_equal_p (nextp->loc, src))
5247 set_src = nextp->set_src;
5257 /* Compute the changes of variable locations in the basic block BB. */
5260 compute_bb_dataflow (basic_block bb)
5264 dataflow_set old_out;
5265 dataflow_set *in = &VTI (bb)->in;
5266 dataflow_set *out = &VTI (bb)->out;
5268 dataflow_set_init (&old_out);
5269 dataflow_set_copy (&old_out, out);
5270 dataflow_set_copy (out, in);
5272 n = VTI (bb)->n_mos;
5273 for (i = 0; i < n; i++)
5275 rtx insn = VTI (bb)->mos[i].insn;
5277 switch (VTI (bb)->mos[i].type)
5280 dataflow_set_clear_at_call (out);
5285 rtx loc = VTI (bb)->mos[i].u.loc;
5288 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
5289 else if (MEM_P (loc))
5290 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
5296 rtx loc = VTI (bb)->mos[i].u.loc;
5300 if (GET_CODE (loc) == CONCAT)
5302 val = XEXP (loc, 0);
5303 vloc = XEXP (loc, 1);
5311 var = PAT_VAR_LOCATION_DECL (vloc);
5313 clobber_variable_part (out, NULL_RTX,
5314 dv_from_decl (var), 0, NULL_RTX);
5317 if (VAL_NEEDS_RESOLUTION (loc))
5318 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
5319 set_variable_part (out, val, dv_from_decl (var), 0,
5320 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
5328 rtx loc = VTI (bb)->mos[i].u.loc;
5329 rtx val, vloc, uloc;
5331 vloc = uloc = XEXP (loc, 1);
5332 val = XEXP (loc, 0);
5334 if (GET_CODE (val) == CONCAT)
5336 uloc = XEXP (val, 1);
5337 val = XEXP (val, 0);
5340 if (VAL_NEEDS_RESOLUTION (loc))
5341 val_resolve (out, val, vloc, insn);
5343 val_store (out, val, uloc, insn, false);
5345 if (VAL_HOLDS_TRACK_EXPR (loc))
5347 if (GET_CODE (uloc) == REG)
5348 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
5350 else if (GET_CODE (uloc) == MEM)
5351 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
5359 rtx loc = VTI (bb)->mos[i].u.loc;
5360 rtx val, vloc, uloc;
5362 vloc = uloc = XEXP (loc, 1);
5363 val = XEXP (loc, 0);
5365 if (GET_CODE (val) == CONCAT)
5367 vloc = XEXP (val, 1);
5368 val = XEXP (val, 0);
5371 if (GET_CODE (vloc) == SET)
5373 rtx vsrc = SET_SRC (vloc);
5375 gcc_assert (val != vsrc);
5376 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
5378 vloc = SET_DEST (vloc);
5380 if (VAL_NEEDS_RESOLUTION (loc))
5381 val_resolve (out, val, vsrc, insn);
5383 else if (VAL_NEEDS_RESOLUTION (loc))
5385 gcc_assert (GET_CODE (uloc) == SET
5386 && GET_CODE (SET_SRC (uloc)) == REG);
5387 val_resolve (out, val, SET_SRC (uloc), insn);
5390 if (VAL_HOLDS_TRACK_EXPR (loc))
5392 if (VAL_EXPR_IS_CLOBBERED (loc))
5395 var_reg_delete (out, uloc, true);
5396 else if (MEM_P (uloc))
5397 var_mem_delete (out, uloc, true);
5401 bool copied_p = VAL_EXPR_IS_COPIED (loc);
5403 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
5405 if (GET_CODE (uloc) == SET)
5407 set_src = SET_SRC (uloc);
5408 uloc = SET_DEST (uloc);
5413 if (flag_var_tracking_uninit)
5415 status = find_src_status (in, set_src);
5417 if (status == VAR_INIT_STATUS_UNKNOWN)
5418 status = find_src_status (out, set_src);
5421 set_src = find_src_set_src (in, set_src);
5425 var_reg_delete_and_set (out, uloc, !copied_p,
5427 else if (MEM_P (uloc))
5428 var_mem_delete_and_set (out, uloc, !copied_p,
5432 else if (REG_P (uloc))
5433 var_regno_delete (out, REGNO (uloc));
5435 val_store (out, val, vloc, insn, true);
5441 rtx loc = VTI (bb)->mos[i].u.loc;
5444 if (GET_CODE (loc) == SET)
5446 set_src = SET_SRC (loc);
5447 loc = SET_DEST (loc);
5451 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
5453 else if (MEM_P (loc))
5454 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
5461 rtx loc = VTI (bb)->mos[i].u.loc;
5462 enum var_init_status src_status;
5465 if (GET_CODE (loc) == SET)
5467 set_src = SET_SRC (loc);
5468 loc = SET_DEST (loc);
5471 if (! flag_var_tracking_uninit)
5472 src_status = VAR_INIT_STATUS_INITIALIZED;
5475 src_status = find_src_status (in, set_src);
5477 if (src_status == VAR_INIT_STATUS_UNKNOWN)
5478 src_status = find_src_status (out, set_src);
5481 set_src = find_src_set_src (in, set_src);
5484 var_reg_delete_and_set (out, loc, false, src_status, set_src);
5485 else if (MEM_P (loc))
5486 var_mem_delete_and_set (out, loc, false, src_status, set_src);
5492 rtx loc = VTI (bb)->mos[i].u.loc;
5495 var_reg_delete (out, loc, false);
5496 else if (MEM_P (loc))
5497 var_mem_delete (out, loc, false);
5503 rtx loc = VTI (bb)->mos[i].u.loc;
5506 var_reg_delete (out, loc, true);
5507 else if (MEM_P (loc))
5508 var_mem_delete (out, loc, true);
5513 out->stack_adjust += VTI (bb)->mos[i].u.adjust;
5518 if (MAY_HAVE_DEBUG_INSNS)
5520 dataflow_set_equiv_regs (out);
5521 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_mark,
5523 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_star,
5526 htab_traverse (shared_hash_htab (out->vars),
5527 canonicalize_loc_order_check, out);
5530 changed = dataflow_set_different (&old_out, out);
5531 dataflow_set_destroy (&old_out);
5535 /* Find the locations of variables in the whole function. */
5538 vt_find_locations (void)
5540 fibheap_t worklist, pending, fibheap_swap;
5541 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
5549 /* Compute reverse completion order of depth first search of the CFG
5550 so that the data-flow runs faster. */
5551 rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
5552 bb_order = XNEWVEC (int, last_basic_block);
5553 pre_and_rev_post_order_compute (NULL, rc_order, false);
5554 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
5555 bb_order[rc_order[i]] = i;
5558 worklist = fibheap_new ();
5559 pending = fibheap_new ();
5560 visited = sbitmap_alloc (last_basic_block);
5561 in_worklist = sbitmap_alloc (last_basic_block);
5562 in_pending = sbitmap_alloc (last_basic_block);
5563 sbitmap_zero (in_worklist);
5566 fibheap_insert (pending, bb_order[bb->index], bb);
5567 sbitmap_ones (in_pending);
5569 while (!fibheap_empty (pending))
5571 fibheap_swap = pending;
5573 worklist = fibheap_swap;
5574 sbitmap_swap = in_pending;
5575 in_pending = in_worklist;
5576 in_worklist = sbitmap_swap;
5578 sbitmap_zero (visited);
5580 while (!fibheap_empty (worklist))
5582 bb = (basic_block) fibheap_extract_min (worklist);
5583 RESET_BIT (in_worklist, bb->index);
5584 if (!TEST_BIT (visited, bb->index))
5588 int oldinsz, oldoutsz;
5590 SET_BIT (visited, bb->index);
5592 if (dump_file && VTI (bb)->in.vars)
5595 -= htab_size (shared_hash_htab (VTI (bb)->in.vars))
5596 + htab_size (shared_hash_htab (VTI (bb)->out.vars));
5598 = htab_elements (shared_hash_htab (VTI (bb)->in.vars));
5600 = htab_elements (shared_hash_htab (VTI (bb)->out.vars));
5603 oldinsz = oldoutsz = 0;
5605 if (MAY_HAVE_DEBUG_INSNS)
5607 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
5608 bool first = true, adjust = false;
5610 /* Calculate the IN set as the intersection of
5611 predecessor OUT sets. */
5613 dataflow_set_clear (in);
5614 dst_can_be_shared = true;
5616 FOR_EACH_EDGE (e, ei, bb->preds)
5617 if (!VTI (e->src)->flooded)
5618 gcc_assert (bb_order[bb->index]
5619 <= bb_order[e->src->index]);
5622 dataflow_set_copy (in, &VTI (e->src)->out);
5623 first_out = &VTI (e->src)->out;
5628 dataflow_set_merge (in, &VTI (e->src)->out);
5634 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
5636 /* Merge and merge_adjust should keep entries in
5638 htab_traverse (shared_hash_htab (in->vars),
5639 canonicalize_loc_order_check,
5642 if (dst_can_be_shared)
5644 shared_hash_destroy (in->vars);
5645 in->vars = shared_hash_copy (first_out->vars);
5649 VTI (bb)->flooded = true;
5653 /* Calculate the IN set as union of predecessor OUT sets. */
5654 dataflow_set_clear (&VTI (bb)->in);
5655 FOR_EACH_EDGE (e, ei, bb->preds)
5656 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
5659 changed = compute_bb_dataflow (bb);
5661 htabsz += htab_size (shared_hash_htab (VTI (bb)->in.vars))
5662 + htab_size (shared_hash_htab (VTI (bb)->out.vars));
5666 FOR_EACH_EDGE (e, ei, bb->succs)
5668 if (e->dest == EXIT_BLOCK_PTR)
5671 if (TEST_BIT (visited, e->dest->index))
5673 if (!TEST_BIT (in_pending, e->dest->index))
5675 /* Send E->DEST to next round. */
5676 SET_BIT (in_pending, e->dest->index);
5677 fibheap_insert (pending,
5678 bb_order[e->dest->index],
5682 else if (!TEST_BIT (in_worklist, e->dest->index))
5684 /* Add E->DEST to current round. */
5685 SET_BIT (in_worklist, e->dest->index);
5686 fibheap_insert (worklist, bb_order[e->dest->index],
5694 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
5696 (int)htab_elements (shared_hash_htab (VTI (bb)->in.vars)),
5698 (int)htab_elements (shared_hash_htab (VTI (bb)->out.vars)),
5700 (int)worklist->nodes, (int)pending->nodes, htabsz);
5702 if (dump_file && (dump_flags & TDF_DETAILS))
5704 fprintf (dump_file, "BB %i IN:\n", bb->index);
5705 dump_dataflow_set (&VTI (bb)->in);
5706 fprintf (dump_file, "BB %i OUT:\n", bb->index);
5707 dump_dataflow_set (&VTI (bb)->out);
5713 if (MAY_HAVE_DEBUG_INSNS)
5715 gcc_assert (VTI (bb)->flooded);
5717 VEC_free (rtx, heap, values_to_unmark);
5719 fibheap_delete (worklist);
5720 fibheap_delete (pending);
5721 sbitmap_free (visited);
5722 sbitmap_free (in_worklist);
5723 sbitmap_free (in_pending);
5726 /* Print the content of the LIST to dump file. */
5729 dump_attrs_list (attrs list)
5731 for (; list; list = list->next)
5733 if (dv_is_decl_p (list->dv))
5734 print_mem_expr (dump_file, dv_as_decl (list->dv));
5736 print_rtl_single (dump_file, dv_as_value (list->dv));
5737 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
5739 fprintf (dump_file, "\n");
5742 /* Print the information about variable *SLOT to dump file. */
5745 dump_var_slot (void **slot, void *data ATTRIBUTE_UNUSED)
5747 variable var = (variable) *slot;
5751 /* Continue traversing the hash table. */
5755 /* Print the information about variable VAR to dump file. */
5758 dump_var (variable var)
5761 location_chain node;
5763 if (dv_is_decl_p (var->dv))
5765 const_tree decl = dv_as_decl (var->dv);
5767 if (DECL_NAME (decl))
5768 fprintf (dump_file, " name: %s",
5769 IDENTIFIER_POINTER (DECL_NAME (decl)));
5771 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
5772 if (dump_flags & TDF_UID)
5773 fprintf (dump_file, " D.%u\n", DECL_UID (decl));
5775 fprintf (dump_file, "\n");
5779 fputc (' ', dump_file);
5780 print_rtl_single (dump_file, dv_as_value (var->dv));
5783 for (i = 0; i < var->n_var_parts; i++)
5785 fprintf (dump_file, " offset %ld\n",
5786 (long) var->var_part[i].offset);
5787 for (node = var->var_part[i].loc_chain; node; node = node->next)
5789 fprintf (dump_file, " ");
5790 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
5791 fprintf (dump_file, "[uninit]");
5792 print_rtl_single (dump_file, node->loc);
5797 /* Print the information about variables from hash table VARS to dump file. */
5800 dump_vars (htab_t vars)
5802 if (htab_elements (vars) > 0)
5804 fprintf (dump_file, "Variables:\n");
5805 htab_traverse (vars, dump_var_slot, NULL);
5809 /* Print the dataflow set SET to dump file. */
5812 dump_dataflow_set (dataflow_set *set)
5816 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
5818 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5822 fprintf (dump_file, "Reg %d:", i);
5823 dump_attrs_list (set->regs[i]);
5826 dump_vars (shared_hash_htab (set->vars));
5827 fprintf (dump_file, "\n");
5830 /* Print the IN and OUT sets for each basic block to dump file. */
5833 dump_dataflow_sets (void)
5839 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
5840 fprintf (dump_file, "IN:\n");
5841 dump_dataflow_set (&VTI (bb)->in);
5842 fprintf (dump_file, "OUT:\n");
5843 dump_dataflow_set (&VTI (bb)->out);
5847 /* Add variable VAR to the hash table of changed variables and
5848 if it has no locations delete it from SET's hash table. */
5851 variable_was_changed (variable var, dataflow_set *set)
5853 hashval_t hash = dv_htab_hash (var->dv);
5859 /* Remember this decl or VALUE has been added to changed_variables. */
5860 set_dv_changed (var->dv, true);
5862 slot = htab_find_slot_with_hash (changed_variables,
5866 if (set && var->n_var_parts == 0)
5870 empty_var = (variable) pool_alloc (dv_pool (var->dv));
5871 empty_var->dv = var->dv;
5872 empty_var->refcount = 1;
5873 empty_var->n_var_parts = 0;
5886 if (var->n_var_parts == 0)
5891 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
5894 if (shared_hash_shared (set->vars))
5895 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
5897 htab_clear_slot (shared_hash_htab (set->vars), slot);
5903 /* Look for the index in VAR->var_part corresponding to OFFSET.
5904 Return -1 if not found. If INSERTION_POINT is non-NULL, the
5905 referenced int will be set to the index that the part has or should
5906 have, if it should be inserted. */
5909 find_variable_location_part (variable var, HOST_WIDE_INT offset,
5910 int *insertion_point)
5914 /* Find the location part. */
5916 high = var->n_var_parts;
5919 pos = (low + high) / 2;
5920 if (var->var_part[pos].offset < offset)
5927 if (insertion_point)
5928 *insertion_point = pos;
5930 if (pos < var->n_var_parts && var->var_part[pos].offset == offset)
5937 set_slot_part (dataflow_set *set, rtx loc, void **slot,
5938 decl_or_value dv, HOST_WIDE_INT offset,
5939 enum var_init_status initialized, rtx set_src)
5942 location_chain node, next;
5943 location_chain *nextp;
5945 bool onepart = dv_onepart_p (dv);
5947 gcc_assert (offset == 0 || !onepart);
5948 gcc_assert (loc != dv_as_opaque (dv));
5950 var = (variable) *slot;
5952 if (! flag_var_tracking_uninit)
5953 initialized = VAR_INIT_STATUS_INITIALIZED;
5957 /* Create new variable information. */
5958 var = (variable) pool_alloc (dv_pool (dv));
5961 var->n_var_parts = 1;
5962 var->var_part[0].offset = offset;
5963 var->var_part[0].loc_chain = NULL;
5964 var->var_part[0].cur_loc = NULL;
5967 nextp = &var->var_part[0].loc_chain;
5968 if (emit_notes && dv_is_value_p (dv))
5969 add_cselib_value_chains (dv);
5975 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
5979 if (GET_CODE (loc) == VALUE)
5981 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
5982 nextp = &node->next)
5983 if (GET_CODE (node->loc) == VALUE)
5985 if (node->loc == loc)
5990 if (canon_value_cmp (node->loc, loc))
5998 else if (REG_P (node->loc) || MEM_P (node->loc))
6006 else if (REG_P (loc))
6008 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6009 nextp = &node->next)
6010 if (REG_P (node->loc))
6012 if (REGNO (node->loc) < REGNO (loc))
6016 if (REGNO (node->loc) == REGNO (loc))
6029 else if (MEM_P (loc))
6031 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6032 nextp = &node->next)
6033 if (REG_P (node->loc))
6035 else if (MEM_P (node->loc))
6037 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
6049 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6050 nextp = &node->next)
6051 if ((r = loc_cmp (node->loc, loc)) >= 0)
6059 if (var->refcount > 1 || shared_hash_shared (set->vars))
6061 slot = unshare_variable (set, slot, var, initialized);
6062 var = (variable)*slot;
6063 for (nextp = &var->var_part[0].loc_chain; c;
6064 nextp = &(*nextp)->next)
6066 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
6073 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
6075 pos = find_variable_location_part (var, offset, &inspos);
6079 node = var->var_part[pos].loc_chain;
6082 && ((REG_P (node->loc) && REG_P (loc)
6083 && REGNO (node->loc) == REGNO (loc))
6084 || rtx_equal_p (node->loc, loc)))
6086 /* LOC is in the beginning of the chain so we have nothing
6088 if (node->init < initialized)
6089 node->init = initialized;
6090 if (set_src != NULL)
6091 node->set_src = set_src;
6097 /* We have to make a copy of a shared variable. */
6098 if (var->refcount > 1 || shared_hash_shared (set->vars))
6100 slot = unshare_variable (set, slot, var, initialized);
6101 var = (variable)*slot;
6107 /* We have not found the location part, new one will be created. */
6109 /* We have to make a copy of the shared variable. */
6110 if (var->refcount > 1 || shared_hash_shared (set->vars))
6112 slot = unshare_variable (set, slot, var, initialized);
6113 var = (variable)*slot;
6116 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
6117 thus there are at most MAX_VAR_PARTS different offsets. */
6118 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
6119 && (!var->n_var_parts || !dv_onepart_p (var->dv)));
6121 /* We have to move the elements of array starting at index
6122 inspos to the next position. */
6123 for (pos = var->n_var_parts; pos > inspos; pos--)
6124 var->var_part[pos] = var->var_part[pos - 1];
6127 var->var_part[pos].offset = offset;
6128 var->var_part[pos].loc_chain = NULL;
6129 var->var_part[pos].cur_loc = NULL;
6132 /* Delete the location from the list. */
6133 nextp = &var->var_part[pos].loc_chain;
6134 for (node = var->var_part[pos].loc_chain; node; node = next)
6137 if ((REG_P (node->loc) && REG_P (loc)
6138 && REGNO (node->loc) == REGNO (loc))
6139 || rtx_equal_p (node->loc, loc))
6141 /* Save these values, to assign to the new node, before
6142 deleting this one. */
6143 if (node->init > initialized)
6144 initialized = node->init;
6145 if (node->set_src != NULL && set_src == NULL)
6146 set_src = node->set_src;
6147 pool_free (loc_chain_pool, node);
6152 nextp = &node->next;
6155 nextp = &var->var_part[pos].loc_chain;
6158 /* Add the location to the beginning. */
6159 node = (location_chain) pool_alloc (loc_chain_pool);
6161 node->init = initialized;
6162 node->set_src = set_src;
6163 node->next = *nextp;
6166 if (onepart && emit_notes)
6167 add_value_chains (var->dv, loc);
6169 /* If no location was emitted do so. */
6170 if (var->var_part[pos].cur_loc == NULL)
6172 var->var_part[pos].cur_loc = loc;
6173 variable_was_changed (var, set);
6179 /* Set the part of variable's location in the dataflow set SET. The
6180 variable part is specified by variable's declaration in DV and
6181 offset OFFSET and the part's location by LOC. IOPT should be
6182 NO_INSERT if the variable is known to be in SET already and the
6183 variable hash table must not be resized, and INSERT otherwise. */
6186 set_variable_part (dataflow_set *set, rtx loc,
6187 decl_or_value dv, HOST_WIDE_INT offset,
6188 enum var_init_status initialized, rtx set_src,
6189 enum insert_option iopt)
6193 if (iopt == NO_INSERT)
6194 slot = shared_hash_find_slot_noinsert (set->vars, dv);
6197 slot = shared_hash_find_slot (set->vars, dv);
6199 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
6201 slot = set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
6204 /* Remove all recorded register locations for the given variable part
6205 from dataflow set SET, except for those that are identical to loc.
6206 The variable part is specified by variable's declaration or value
6207 DV and offset OFFSET. */
6210 clobber_slot_part (dataflow_set *set, rtx loc, void **slot,
6211 HOST_WIDE_INT offset, rtx set_src)
6213 variable var = (variable) *slot;
6214 int pos = find_variable_location_part (var, offset, NULL);
6218 location_chain node, next;
6220 /* Remove the register locations from the dataflow set. */
6221 next = var->var_part[pos].loc_chain;
6222 for (node = next; node; node = next)
6225 if (node->loc != loc
6226 && (!flag_var_tracking_uninit
6229 || !rtx_equal_p (set_src, node->set_src)))
6231 if (REG_P (node->loc))
6236 /* Remove the variable part from the register's
6237 list, but preserve any other variable parts
6238 that might be regarded as live in that same
6240 anextp = &set->regs[REGNO (node->loc)];
6241 for (anode = *anextp; anode; anode = anext)
6243 anext = anode->next;
6244 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
6245 && anode->offset == offset)
6247 pool_free (attrs_pool, anode);
6251 anextp = &anode->next;
6255 slot = delete_slot_part (set, node->loc, slot, offset);
6263 /* Remove all recorded register locations for the given variable part
6264 from dataflow set SET, except for those that are identical to loc.
6265 The variable part is specified by variable's declaration or value
6266 DV and offset OFFSET. */
6269 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
6270 HOST_WIDE_INT offset, rtx set_src)
6274 if (!dv_as_opaque (dv)
6275 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
6278 slot = shared_hash_find_slot_noinsert (set->vars, dv);
6282 slot = clobber_slot_part (set, loc, slot, offset, set_src);
6285 /* Delete the part of variable's location from dataflow set SET. The
6286 variable part is specified by its SET->vars slot SLOT and offset
6287 OFFSET and the part's location by LOC. */
6290 delete_slot_part (dataflow_set *set, rtx loc, void **slot,
6291 HOST_WIDE_INT offset)
6293 variable var = (variable) *slot;
6294 int pos = find_variable_location_part (var, offset, NULL);
6298 location_chain node, next;
6299 location_chain *nextp;
6302 if (var->refcount > 1 || shared_hash_shared (set->vars))
6304 /* If the variable contains the location part we have to
6305 make a copy of the variable. */
6306 for (node = var->var_part[pos].loc_chain; node;
6309 if ((REG_P (node->loc) && REG_P (loc)
6310 && REGNO (node->loc) == REGNO (loc))
6311 || rtx_equal_p (node->loc, loc))
6313 slot = unshare_variable (set, slot, var,
6314 VAR_INIT_STATUS_UNKNOWN);
6315 var = (variable)*slot;
6321 /* Delete the location part. */
6322 nextp = &var->var_part[pos].loc_chain;
6323 for (node = *nextp; node; node = next)
6326 if ((REG_P (node->loc) && REG_P (loc)
6327 && REGNO (node->loc) == REGNO (loc))
6328 || rtx_equal_p (node->loc, loc))
6330 if (emit_notes && pos == 0 && dv_onepart_p (var->dv))
6331 remove_value_chains (var->dv, node->loc);
6332 pool_free (loc_chain_pool, node);
6337 nextp = &node->next;
6340 /* If we have deleted the location which was last emitted
6341 we have to emit new location so add the variable to set
6342 of changed variables. */
6343 if (var->var_part[pos].cur_loc
6345 && REG_P (var->var_part[pos].cur_loc)
6346 && REGNO (loc) == REGNO (var->var_part[pos].cur_loc))
6347 || rtx_equal_p (loc, var->var_part[pos].cur_loc)))
6350 if (var->var_part[pos].loc_chain)
6351 var->var_part[pos].cur_loc = var->var_part[pos].loc_chain->loc;
6356 if (var->var_part[pos].loc_chain == NULL)
6358 gcc_assert (changed);
6360 if (emit_notes && var->n_var_parts == 0 && dv_is_value_p (var->dv))
6361 remove_cselib_value_chains (var->dv);
6362 while (pos < var->n_var_parts)
6364 var->var_part[pos] = var->var_part[pos + 1];
6369 variable_was_changed (var, set);
6375 /* Delete the part of variable's location from dataflow set SET. The
6376 variable part is specified by variable's declaration or value DV
6377 and offset OFFSET and the part's location by LOC. */
6380 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
6381 HOST_WIDE_INT offset)
6383 void **slot = shared_hash_find_slot_noinsert (set->vars, dv);
6387 slot = delete_slot_part (set, loc, slot, offset);
6390 /* Callback for cselib_expand_value, that looks for expressions
6391 holding the value in the var-tracking hash tables. Return X for
6392 standard processing, anything else is to be used as-is. */
6395 vt_expand_loc_callback (rtx x, bitmap regs, int max_depth, void *data)
6397 htab_t vars = (htab_t)data;
6401 rtx result, subreg, xret;
6403 switch (GET_CODE (x))
6406 subreg = SUBREG_REG (x);
6408 if (GET_CODE (SUBREG_REG (x)) != VALUE)
6411 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
6413 vt_expand_loc_callback, data);
6418 result = simplify_gen_subreg (GET_MODE (x), subreg,
6419 GET_MODE (SUBREG_REG (x)),
6422 /* Invalid SUBREGs are ok in debug info. ??? We could try
6423 alternate expansions for the VALUE as well. */
6424 if (!result && (REG_P (subreg) || MEM_P (subreg)))
6425 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
6430 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
6435 dv = dv_from_value (x);
6443 if (VALUE_RECURSED_INTO (x))
6446 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
6451 if (var->n_var_parts == 0)
6454 gcc_assert (var->n_var_parts == 1);
6456 VALUE_RECURSED_INTO (x) = true;
6459 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
6461 result = cselib_expand_value_rtx_cb (loc->loc, regs, max_depth,
6462 vt_expand_loc_callback, vars);
6467 VALUE_RECURSED_INTO (x) = false;
6474 /* Expand VALUEs in LOC, using VARS as well as cselib's equivalence
6478 vt_expand_loc (rtx loc, htab_t vars)
6480 if (!MAY_HAVE_DEBUG_INSNS)
6483 loc = cselib_expand_value_rtx_cb (loc, scratch_regs, 5,
6484 vt_expand_loc_callback, vars);
6486 if (loc && MEM_P (loc))
6487 loc = targetm.delegitimize_address (loc);
6492 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
6493 additional parameters: WHERE specifies whether the note shall be emitted
6494 before or after instruction INSN. */
6497 emit_note_insn_var_location (void **varp, void *data)
6499 variable var = (variable) *varp;
6500 rtx insn = ((emit_note_data *)data)->insn;
6501 enum emit_note_where where = ((emit_note_data *)data)->where;
6502 htab_t vars = ((emit_note_data *)data)->vars;
6504 int i, j, n_var_parts;
6506 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
6507 HOST_WIDE_INT last_limit;
6508 tree type_size_unit;
6509 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
6510 rtx loc[MAX_VAR_PARTS];
6513 if (dv_is_value_p (var->dv))
6516 decl = dv_as_decl (var->dv);
6518 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
6526 for (i = 0; i < var->n_var_parts; i++)
6528 enum machine_mode mode, wider_mode;
6531 if (last_limit < var->var_part[i].offset)
6536 else if (last_limit > var->var_part[i].offset)
6538 offsets[n_var_parts] = var->var_part[i].offset;
6539 loc2 = vt_expand_loc (var->var_part[i].loc_chain->loc, vars);
6545 loc[n_var_parts] = loc2;
6546 mode = GET_MODE (var->var_part[i].loc_chain->loc);
6547 initialized = var->var_part[i].loc_chain->init;
6548 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
6550 /* Attempt to merge adjacent registers or memory. */
6551 wider_mode = GET_MODE_WIDER_MODE (mode);
6552 for (j = i + 1; j < var->n_var_parts; j++)
6553 if (last_limit <= var->var_part[j].offset)
6555 if (j < var->n_var_parts
6556 && wider_mode != VOIDmode
6557 && mode == GET_MODE (var->var_part[j].loc_chain->loc)
6558 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
6559 && (loc2 = vt_expand_loc (var->var_part[j].loc_chain->loc, vars))
6560 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2)
6561 && last_limit == var->var_part[j].offset)
6565 if (REG_P (loc[n_var_parts])
6566 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
6567 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
6568 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
6571 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
6572 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
6574 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
6575 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
6578 if (!REG_P (new_loc)
6579 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
6582 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
6585 else if (MEM_P (loc[n_var_parts])
6586 && GET_CODE (XEXP (loc2, 0)) == PLUS
6587 && REG_P (XEXP (XEXP (loc2, 0), 0))
6588 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
6590 if ((REG_P (XEXP (loc[n_var_parts], 0))
6591 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
6592 XEXP (XEXP (loc2, 0), 0))
6593 && INTVAL (XEXP (XEXP (loc2, 0), 1))
6594 == GET_MODE_SIZE (mode))
6595 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
6596 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
6597 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
6598 XEXP (XEXP (loc2, 0), 0))
6599 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
6600 + GET_MODE_SIZE (mode)
6601 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
6602 new_loc = adjust_address_nv (loc[n_var_parts],
6608 loc[n_var_parts] = new_loc;
6610 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
6616 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6617 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
6620 if (where != EMIT_NOTE_BEFORE_INSN)
6622 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
6623 if (where == EMIT_NOTE_AFTER_CALL_INSN)
6624 NOTE_DURING_CALL_P (note) = true;
6627 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
6629 if (! flag_var_tracking_uninit)
6630 initialized = VAR_INIT_STATUS_INITIALIZED;
6634 NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
6635 NULL_RTX, (int) initialized);
6637 else if (n_var_parts == 1)
6640 = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
6642 NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
6646 else if (n_var_parts)
6650 for (i = 0; i < n_var_parts; i++)
6652 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
6654 parallel = gen_rtx_PARALLEL (VOIDmode,
6655 gen_rtvec_v (n_var_parts, loc));
6656 NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
6662 set_dv_changed (var->dv, false);
6663 htab_clear_slot (changed_variables, varp);
6665 /* Continue traversing the hash table. */
6669 DEF_VEC_P (variable);
6670 DEF_VEC_ALLOC_P (variable, heap);
6672 /* Stack of variable_def pointers that need processing with
6673 check_changed_vars_2. */
6675 static VEC (variable, heap) *changed_variables_stack;
6677 /* Populate changed_variables_stack with variable_def pointers
6678 that need variable_was_changed called on them. */
6681 check_changed_vars_1 (void **slot, void *data)
6683 variable var = (variable) *slot;
6684 htab_t htab = (htab_t) data;
6686 if (dv_is_value_p (var->dv))
6689 = (value_chain) htab_find_with_hash (value_chains, var->dv,
6690 dv_htab_hash (var->dv));
6694 for (vc = vc->next; vc; vc = vc->next)
6695 if (!dv_changed_p (vc->dv))
6698 = (variable) htab_find_with_hash (htab, vc->dv,
6699 dv_htab_hash (vc->dv));
6701 VEC_safe_push (variable, heap, changed_variables_stack,
6708 /* Add VAR to changed_variables and also for VALUEs add recursively
6709 all DVs that aren't in changed_variables yet but reference the
6710 VALUE from its loc_chain. */
6713 check_changed_vars_2 (variable var, htab_t htab)
6715 variable_was_changed (var, NULL);
6716 if (dv_is_value_p (var->dv))
6719 = (value_chain) htab_find_with_hash (value_chains, var->dv,
6720 dv_htab_hash (var->dv));
6724 for (vc = vc->next; vc; vc = vc->next)
6725 if (!dv_changed_p (vc->dv))
6728 = (variable) htab_find_with_hash (htab, vc->dv,
6729 dv_htab_hash (vc->dv));
6731 check_changed_vars_2 (vcvar, htab);
6736 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
6737 CHANGED_VARIABLES and delete this chain. WHERE specifies whether the notes
6738 shall be emitted before of after instruction INSN. */
6741 emit_notes_for_changes (rtx insn, enum emit_note_where where,
6744 emit_note_data data;
6745 htab_t htab = shared_hash_htab (vars);
6747 if (!htab_elements (changed_variables))
6750 if (MAY_HAVE_DEBUG_INSNS)
6752 /* Unfortunately this has to be done in two steps, because
6753 we can't traverse a hashtab into which we are inserting
6754 through variable_was_changed. */
6755 htab_traverse (changed_variables, check_changed_vars_1, htab);
6756 while (VEC_length (variable, changed_variables_stack) > 0)
6757 check_changed_vars_2 (VEC_pop (variable, changed_variables_stack),
6765 htab_traverse (changed_variables, emit_note_insn_var_location, &data);
6768 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
6769 same variable in hash table DATA or is not there at all. */
6772 emit_notes_for_differences_1 (void **slot, void *data)
6774 htab_t new_vars = (htab_t) data;
6775 variable old_var, new_var;
6777 old_var = (variable) *slot;
6778 new_var = (variable) htab_find_with_hash (new_vars, old_var->dv,
6779 dv_htab_hash (old_var->dv));
6783 /* Variable has disappeared. */
6786 empty_var = (variable) pool_alloc (dv_pool (old_var->dv));
6787 empty_var->dv = old_var->dv;
6788 empty_var->refcount = 0;
6789 empty_var->n_var_parts = 0;
6790 if (dv_onepart_p (old_var->dv))
6794 gcc_assert (old_var->n_var_parts == 1);
6795 for (lc = old_var->var_part[0].loc_chain; lc; lc = lc->next)
6796 remove_value_chains (old_var->dv, lc->loc);
6797 if (dv_is_value_p (old_var->dv))
6798 remove_cselib_value_chains (old_var->dv);
6800 variable_was_changed (empty_var, NULL);
6802 else if (variable_different_p (old_var, new_var, true))
6804 if (dv_onepart_p (old_var->dv))
6806 location_chain lc1, lc2;
6808 gcc_assert (old_var->n_var_parts == 1);
6809 gcc_assert (new_var->n_var_parts == 1);
6810 lc1 = old_var->var_part[0].loc_chain;
6811 lc2 = new_var->var_part[0].loc_chain;
6814 && ((REG_P (lc1->loc) && REG_P (lc2->loc))
6815 || rtx_equal_p (lc1->loc, lc2->loc)))
6820 for (; lc2; lc2 = lc2->next)
6821 add_value_chains (old_var->dv, lc2->loc);
6822 for (; lc1; lc1 = lc1->next)
6823 remove_value_chains (old_var->dv, lc1->loc);
6825 variable_was_changed (new_var, NULL);
6828 /* Continue traversing the hash table. */
6832 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
6836 emit_notes_for_differences_2 (void **slot, void *data)
6838 htab_t old_vars = (htab_t) data;
6839 variable old_var, new_var;
6841 new_var = (variable) *slot;
6842 old_var = (variable) htab_find_with_hash (old_vars, new_var->dv,
6843 dv_htab_hash (new_var->dv));
6846 /* Variable has appeared. */
6847 if (dv_onepart_p (new_var->dv))
6851 gcc_assert (new_var->n_var_parts == 1);
6852 for (lc = new_var->var_part[0].loc_chain; lc; lc = lc->next)
6853 add_value_chains (new_var->dv, lc->loc);
6854 if (dv_is_value_p (new_var->dv))
6855 add_cselib_value_chains (new_var->dv);
6857 variable_was_changed (new_var, NULL);
6860 /* Continue traversing the hash table. */
6864 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
6868 emit_notes_for_differences (rtx insn, dataflow_set *old_set,
6869 dataflow_set *new_set)
6871 htab_traverse (shared_hash_htab (old_set->vars),
6872 emit_notes_for_differences_1,
6873 shared_hash_htab (new_set->vars));
6874 htab_traverse (shared_hash_htab (new_set->vars),
6875 emit_notes_for_differences_2,
6876 shared_hash_htab (old_set->vars));
6877 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
6880 /* Emit the notes for changes of location parts in the basic block BB. */
6883 emit_notes_in_bb (basic_block bb, dataflow_set *set)
6887 dataflow_set_clear (set);
6888 dataflow_set_copy (set, &VTI (bb)->in);
6890 for (i = 0; i < VTI (bb)->n_mos; i++)
6892 rtx insn = VTI (bb)->mos[i].insn;
6894 switch (VTI (bb)->mos[i].type)
6897 dataflow_set_clear_at_call (set);
6898 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
6903 rtx loc = VTI (bb)->mos[i].u.loc;
6906 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6908 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6910 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
6916 rtx loc = VTI (bb)->mos[i].u.loc;
6920 if (GET_CODE (loc) == CONCAT)
6922 val = XEXP (loc, 0);
6923 vloc = XEXP (loc, 1);
6931 var = PAT_VAR_LOCATION_DECL (vloc);
6933 clobber_variable_part (set, NULL_RTX,
6934 dv_from_decl (var), 0, NULL_RTX);
6937 if (VAL_NEEDS_RESOLUTION (loc))
6938 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6939 set_variable_part (set, val, dv_from_decl (var), 0,
6940 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6944 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
6950 rtx loc = VTI (bb)->mos[i].u.loc;
6951 rtx val, vloc, uloc;
6953 vloc = uloc = XEXP (loc, 1);
6954 val = XEXP (loc, 0);
6956 if (GET_CODE (val) == CONCAT)
6958 uloc = XEXP (val, 1);
6959 val = XEXP (val, 0);
6962 if (VAL_NEEDS_RESOLUTION (loc))
6963 val_resolve (set, val, vloc, insn);
6965 val_store (set, val, uloc, insn, false);
6967 if (VAL_HOLDS_TRACK_EXPR (loc))
6969 if (GET_CODE (uloc) == REG)
6970 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6972 else if (GET_CODE (uloc) == MEM)
6973 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6977 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
6983 rtx loc = VTI (bb)->mos[i].u.loc;
6984 rtx val, vloc, uloc;
6986 vloc = uloc = XEXP (loc, 1);
6987 val = XEXP (loc, 0);
6989 if (GET_CODE (val) == CONCAT)
6991 vloc = XEXP (val, 1);
6992 val = XEXP (val, 0);
6995 if (GET_CODE (vloc) == SET)
6997 rtx vsrc = SET_SRC (vloc);
6999 gcc_assert (val != vsrc);
7000 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
7002 vloc = SET_DEST (vloc);
7004 if (VAL_NEEDS_RESOLUTION (loc))
7005 val_resolve (set, val, vsrc, insn);
7007 else if (VAL_NEEDS_RESOLUTION (loc))
7009 gcc_assert (GET_CODE (uloc) == SET
7010 && GET_CODE (SET_SRC (uloc)) == REG);
7011 val_resolve (set, val, SET_SRC (uloc), insn);
7014 if (VAL_HOLDS_TRACK_EXPR (loc))
7016 if (VAL_EXPR_IS_CLOBBERED (loc))
7019 var_reg_delete (set, uloc, true);
7020 else if (MEM_P (uloc))
7021 var_mem_delete (set, uloc, true);
7025 bool copied_p = VAL_EXPR_IS_COPIED (loc);
7027 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
7029 if (GET_CODE (uloc) == SET)
7031 set_src = SET_SRC (uloc);
7032 uloc = SET_DEST (uloc);
7037 status = find_src_status (set, set_src);
7039 set_src = find_src_set_src (set, set_src);
7043 var_reg_delete_and_set (set, uloc, !copied_p,
7045 else if (MEM_P (uloc))
7046 var_mem_delete_and_set (set, uloc, !copied_p,
7050 else if (REG_P (uloc))
7051 var_regno_delete (set, REGNO (uloc));
7053 val_store (set, val, vloc, insn, true);
7055 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7062 rtx loc = VTI (bb)->mos[i].u.loc;
7065 if (GET_CODE (loc) == SET)
7067 set_src = SET_SRC (loc);
7068 loc = SET_DEST (loc);
7072 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
7075 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
7078 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7085 rtx loc = VTI (bb)->mos[i].u.loc;
7086 enum var_init_status src_status;
7089 if (GET_CODE (loc) == SET)
7091 set_src = SET_SRC (loc);
7092 loc = SET_DEST (loc);
7095 src_status = find_src_status (set, set_src);
7096 set_src = find_src_set_src (set, set_src);
7099 var_reg_delete_and_set (set, loc, false, src_status, set_src);
7101 var_mem_delete_and_set (set, loc, false, src_status, set_src);
7103 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7110 rtx loc = VTI (bb)->mos[i].u.loc;
7113 var_reg_delete (set, loc, false);
7115 var_mem_delete (set, loc, false);
7117 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
7123 rtx loc = VTI (bb)->mos[i].u.loc;
7126 var_reg_delete (set, loc, true);
7128 var_mem_delete (set, loc, true);
7130 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7136 set->stack_adjust += VTI (bb)->mos[i].u.adjust;
7142 /* Emit notes for the whole function. */
7145 vt_emit_notes (void)
7150 gcc_assert (!htab_elements (changed_variables));
7152 /* Free memory occupied by the out hash tables, as they aren't used
7155 dataflow_set_clear (&VTI (bb)->out);
7157 /* Enable emitting notes by functions (mainly by set_variable_part and
7158 delete_variable_part). */
7161 if (MAY_HAVE_DEBUG_INSNS)
7162 changed_variables_stack = VEC_alloc (variable, heap, 40);
7164 dataflow_set_init (&cur);
7168 /* Emit the notes for changes of variable locations between two
7169 subsequent basic blocks. */
7170 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
7172 /* Emit the notes for the changes in the basic block itself. */
7173 emit_notes_in_bb (bb, &cur);
7175 /* Free memory occupied by the in hash table, we won't need it
7177 dataflow_set_clear (&VTI (bb)->in);
7179 #ifdef ENABLE_CHECKING
7180 htab_traverse (shared_hash_htab (cur.vars),
7181 emit_notes_for_differences_1,
7182 shared_hash_htab (empty_shared_hash));
7183 if (MAY_HAVE_DEBUG_INSNS)
7184 gcc_assert (htab_elements (value_chains) == 0);
7186 dataflow_set_destroy (&cur);
7188 if (MAY_HAVE_DEBUG_INSNS)
7189 VEC_free (variable, heap, changed_variables_stack);
7194 /* If there is a declaration and offset associated with register/memory RTL
7195 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
7198 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
7202 if (REG_ATTRS (rtl))
7204 *declp = REG_EXPR (rtl);
7205 *offsetp = REG_OFFSET (rtl);
7209 else if (MEM_P (rtl))
7211 if (MEM_ATTRS (rtl))
7213 *declp = MEM_EXPR (rtl);
7214 *offsetp = INT_MEM_OFFSET (rtl);
7221 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
7224 vt_add_function_parameters (void)
7228 for (parm = DECL_ARGUMENTS (current_function_decl);
7229 parm; parm = TREE_CHAIN (parm))
7231 rtx decl_rtl = DECL_RTL_IF_SET (parm);
7232 rtx incoming = DECL_INCOMING_RTL (parm);
7234 enum machine_mode mode;
7235 HOST_WIDE_INT offset;
7239 if (TREE_CODE (parm) != PARM_DECL)
7242 if (!DECL_NAME (parm))
7245 if (!decl_rtl || !incoming)
7248 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
7251 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
7253 if (REG_P (incoming) || MEM_P (incoming))
7255 /* This means argument is passed by invisible reference. */
7258 incoming = gen_rtx_MEM (GET_MODE (decl_rtl), incoming);
7262 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
7264 offset += byte_lowpart_offset (GET_MODE (incoming),
7265 GET_MODE (decl_rtl));
7274 /* Assume that DECL_RTL was a pseudo that got spilled to
7275 memory. The spill slot sharing code will force the
7276 memory to reference spill_slot_decl (%sfp), so we don't
7277 match above. That's ok, the pseudo must have referenced
7278 the entire parameter, so just reset OFFSET. */
7279 gcc_assert (decl == get_spill_slot_decl (false));
7283 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
7286 out = &VTI (ENTRY_BLOCK_PTR)->out;
7288 dv = dv_from_decl (parm);
7290 if (target_for_debug_bind (parm)
7291 /* We can't deal with these right now, because this kind of
7292 variable is single-part. ??? We could handle parallels
7293 that describe multiple locations for the same single
7294 value, but ATM we don't. */
7295 && GET_CODE (incoming) != PARALLEL)
7299 /* ??? We shouldn't ever hit this, but it may happen because
7300 arguments passed by invisible reference aren't dealt with
7301 above: incoming-rtl will have Pmode rather than the
7302 expected mode for the type. */
7306 val = cselib_lookup (var_lowpart (mode, incoming), mode, true);
7308 /* ??? Float-typed values in memory are not handled by
7312 cselib_preserve_value (val);
7313 set_variable_part (out, val->val_rtx, dv, offset,
7314 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7315 dv = dv_from_value (val->val_rtx);
7319 if (REG_P (incoming))
7321 incoming = var_lowpart (mode, incoming);
7322 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
7323 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
7325 set_variable_part (out, incoming, dv, offset,
7326 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7328 else if (MEM_P (incoming))
7330 incoming = var_lowpart (mode, incoming);
7331 set_variable_part (out, incoming, dv, offset,
7332 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7336 if (MAY_HAVE_DEBUG_INSNS)
7338 cselib_preserve_only_values (true);
7339 cselib_reset_table_with_next_value (cselib_get_next_unknown_value ());
7344 /* Allocate and initialize the data structures for variable tracking
7345 and parse the RTL to get the micro operations. */
7348 vt_initialize (void)
7352 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
7354 if (MAY_HAVE_DEBUG_INSNS)
7357 scratch_regs = BITMAP_ALLOC (NULL);
7358 valvar_pool = create_alloc_pool ("small variable_def pool",
7359 sizeof (struct variable_def), 256);
7363 scratch_regs = NULL;
7370 HOST_WIDE_INT pre, post = 0;
7372 unsigned int next_value_before = cselib_get_next_unknown_value ();
7373 unsigned int next_value_after = next_value_before;
7375 if (MAY_HAVE_DEBUG_INSNS)
7377 cselib_record_sets_hook = count_with_sets;
7378 if (dump_file && (dump_flags & TDF_DETAILS))
7379 fprintf (dump_file, "first value: %i\n",
7380 cselib_get_next_unknown_value ());
7383 /* Count the number of micro operations. */
7384 VTI (bb)->n_mos = 0;
7385 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
7386 insn = NEXT_INSN (insn))
7390 if (!frame_pointer_needed)
7392 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
7396 if (dump_file && (dump_flags & TDF_DETAILS))
7397 log_op_type (GEN_INT (pre), bb, insn,
7398 MO_ADJUST, dump_file);
7403 if (dump_file && (dump_flags & TDF_DETAILS))
7404 log_op_type (GEN_INT (post), bb, insn,
7405 MO_ADJUST, dump_file);
7408 cselib_hook_called = false;
7409 if (MAY_HAVE_DEBUG_INSNS)
7411 cselib_process_insn (insn);
7412 if (dump_file && (dump_flags & TDF_DETAILS))
7414 print_rtl_single (dump_file, insn);
7415 dump_cselib_table (dump_file);
7418 if (!cselib_hook_called)
7419 count_with_sets (insn, 0, 0);
7423 if (dump_file && (dump_flags & TDF_DETAILS))
7424 log_op_type (PATTERN (insn), bb, insn,
7425 MO_CALL, dump_file);
7430 count = VTI (bb)->n_mos;
7432 if (MAY_HAVE_DEBUG_INSNS)
7434 cselib_preserve_only_values (false);
7435 next_value_after = cselib_get_next_unknown_value ();
7436 cselib_reset_table_with_next_value (next_value_before);
7437 cselib_record_sets_hook = add_with_sets;
7438 if (dump_file && (dump_flags & TDF_DETAILS))
7439 fprintf (dump_file, "first value: %i\n",
7440 cselib_get_next_unknown_value ());
7443 /* Add the micro-operations to the array. */
7444 VTI (bb)->mos = XNEWVEC (micro_operation, VTI (bb)->n_mos);
7445 VTI (bb)->n_mos = 0;
7446 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
7447 insn = NEXT_INSN (insn))
7451 if (!frame_pointer_needed)
7453 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
7456 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
7458 mo->type = MO_ADJUST;
7462 if (dump_file && (dump_flags & TDF_DETAILS))
7463 log_op_type (PATTERN (insn), bb, insn,
7464 MO_ADJUST, dump_file);
7468 cselib_hook_called = false;
7469 if (MAY_HAVE_DEBUG_INSNS)
7471 cselib_process_insn (insn);
7472 if (dump_file && (dump_flags & TDF_DETAILS))
7474 print_rtl_single (dump_file, insn);
7475 dump_cselib_table (dump_file);
7478 if (!cselib_hook_called)
7479 add_with_sets (insn, 0, 0);
7481 if (!frame_pointer_needed && post)
7483 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
7485 mo->type = MO_ADJUST;
7486 mo->u.adjust = post;
7489 if (dump_file && (dump_flags & TDF_DETAILS))
7490 log_op_type (PATTERN (insn), bb, insn,
7491 MO_ADJUST, dump_file);
7495 gcc_assert (count == VTI (bb)->n_mos);
7496 if (MAY_HAVE_DEBUG_INSNS)
7498 cselib_preserve_only_values (true);
7499 gcc_assert (next_value_after == cselib_get_next_unknown_value ());
7500 cselib_reset_table_with_next_value (next_value_after);
7501 cselib_record_sets_hook = NULL;
7505 attrs_pool = create_alloc_pool ("attrs_def pool",
7506 sizeof (struct attrs_def), 1024);
7507 var_pool = create_alloc_pool ("variable_def pool",
7508 sizeof (struct variable_def)
7509 + (MAX_VAR_PARTS - 1)
7510 * sizeof (((variable)NULL)->var_part[0]), 64);
7511 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
7512 sizeof (struct location_chain_def),
7514 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
7515 sizeof (struct shared_hash_def), 256);
7516 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
7517 empty_shared_hash->refcount = 1;
7518 empty_shared_hash->htab
7519 = htab_create (1, variable_htab_hash, variable_htab_eq,
7520 variable_htab_free);
7521 changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
7522 variable_htab_free);
7523 if (MAY_HAVE_DEBUG_INSNS)
7525 value_chain_pool = create_alloc_pool ("value_chain_def pool",
7526 sizeof (struct value_chain_def),
7528 value_chains = htab_create (32, value_chain_htab_hash,
7529 value_chain_htab_eq, NULL);
7532 /* Init the IN and OUT sets. */
7535 VTI (bb)->visited = false;
7536 VTI (bb)->flooded = false;
7537 dataflow_set_init (&VTI (bb)->in);
7538 dataflow_set_init (&VTI (bb)->out);
7539 VTI (bb)->permp = NULL;
7542 VTI (ENTRY_BLOCK_PTR)->flooded = true;
7543 vt_add_function_parameters ();
7546 /* Get rid of all debug insns from the insn stream. */
7549 delete_debug_insns (void)
7554 if (!MAY_HAVE_DEBUG_INSNS)
7559 FOR_BB_INSNS_SAFE (bb, insn, next)
7560 if (DEBUG_INSN_P (insn))
7565 /* Run a fast, BB-local only version of var tracking, to take care of
7566 information that we don't do global analysis on, such that not all
7567 information is lost. If SKIPPED holds, we're skipping the global
7568 pass entirely, so we should try to use information it would have
7569 handled as well.. */
7572 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
7574 /* ??? Just skip it all for now. */
7575 delete_debug_insns ();
7578 /* Free the data structures needed for variable tracking. */
7587 free (VTI (bb)->mos);
7592 dataflow_set_destroy (&VTI (bb)->in);
7593 dataflow_set_destroy (&VTI (bb)->out);
7594 if (VTI (bb)->permp)
7596 dataflow_set_destroy (VTI (bb)->permp);
7597 XDELETE (VTI (bb)->permp);
7600 free_aux_for_blocks ();
7601 htab_delete (empty_shared_hash->htab);
7602 htab_delete (changed_variables);
7603 free_alloc_pool (attrs_pool);
7604 free_alloc_pool (var_pool);
7605 free_alloc_pool (loc_chain_pool);
7606 free_alloc_pool (shared_hash_pool);
7608 if (MAY_HAVE_DEBUG_INSNS)
7610 htab_delete (value_chains);
7611 free_alloc_pool (value_chain_pool);
7612 free_alloc_pool (valvar_pool);
7614 BITMAP_FREE (scratch_regs);
7615 scratch_regs = NULL;
7619 XDELETEVEC (vui_vec);
7624 /* The entry point to variable tracking pass. */
7627 variable_tracking_main (void)
7629 if (flag_var_tracking_assignments < 0)
7631 delete_debug_insns ();
7635 if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
7637 vt_debug_insns_local (true);
7641 mark_dfs_back_edges ();
7643 if (!frame_pointer_needed)
7645 if (!vt_stack_adjustments ())
7648 vt_debug_insns_local (true);
7653 vt_find_locations ();
7655 if (dump_file && (dump_flags & TDF_DETAILS))
7657 dump_dataflow_sets ();
7658 dump_flow_info (dump_file, dump_flags);
7664 vt_debug_insns_local (false);
7669 gate_handle_var_tracking (void)
7671 return (flag_var_tracking);
7676 struct rtl_opt_pass pass_variable_tracking =
7680 "vartrack", /* name */
7681 gate_handle_var_tracking, /* gate */
7682 variable_tracking_main, /* execute */
7685 0, /* static_pass_number */
7686 TV_VAR_TRACKING, /* tv_id */
7687 0, /* properties_required */
7688 0, /* properties_provided */
7689 0, /* properties_destroyed */
7690 0, /* todo_flags_start */
7691 TODO_dump_func | TODO_verify_rtl_sharing/* todo_flags_finish */