1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file contains the variable tracking pass. It computes where
22 variables are located (which registers or where in memory) at each position
23 in instruction stream and emits notes describing the locations.
24 Debug information (DWARF2 location lists) is finally generated from
26 With this debug information, it is possible to show variables
27 even when debugging optimized code.
29 How does the variable tracking pass work?
31 First, it scans RTL code for uses, stores and clobbers (register/memory
32 references in instructions), for call insns and for stack adjustments
33 separately for each basic block and saves them to an array of micro
35 The micro operations of one instruction are ordered so that
36 pre-modifying stack adjustment < use < use with no var < call insn <
37 < set < clobber < post-modifying stack adjustment
39 Then, a forward dataflow analysis is performed to find out how locations
40 of variables change through code and to propagate the variable locations
41 along control flow graph.
42 The IN set for basic block BB is computed as a union of OUT sets of BB's
43 predecessors, the OUT set for BB is copied from the IN set for BB and
44 is changed according to micro operations in BB.
46 The IN and OUT sets for basic blocks consist of a current stack adjustment
47 (used for adjusting offset of variables addressed using stack pointer),
48 the table of structures describing the locations of parts of a variable
49 and for each physical register a linked list for each physical register.
50 The linked list is a list of variable parts stored in the register,
51 i.e. it is a list of triplets (reg, decl, offset) where decl is
52 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
53 effective deleting appropriate variable parts when we set or clobber the
56 There may be more than one variable part in a register. The linked lists
57 should be pretty short so it is a good data structure here.
58 For example in the following code, register allocator may assign same
59 register to variables A and B, and both of them are stored in the same
72 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73 are emitted to appropriate positions in RTL code. Each such a note describes
74 the location of one variable at the point in instruction stream where the
75 note is. There is no need to emit a note for each variable before each
76 instruction, we only emit these notes where the location of variable changes
77 (this means that we also emit notes for changes between the OUT set of the
78 previous block and the IN set of the current block).
80 The notes consist of two parts:
81 1. the declaration (from REG_EXPR or MEM_EXPR)
82 2. the location of a variable - it is either a simple register/memory
83 reference (for simple variables, for example int),
84 or a parallel of register/memory references (for a large variables
85 which consist of several parts, for example long long).
91 #include "coretypes.h"
95 #include "hard-reg-set.h"
96 #include "basic-block.h"
99 #include "insn-config.h"
102 #include "alloc-pool.h"
108 #include "tree-pass.h"
109 #include "tree-flow.h"
113 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
114 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
115 Currently the value is the same as IDENTIFIER_NODE, which has such
116 a property. If this compile time assertion ever fails, make sure that
117 the new tree code that equals (int) VALUE has the same property. */
118 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
120 /* Type of micro operation. */
121 enum micro_operation_type
123 MO_USE, /* Use location (REG or MEM). */
124 MO_USE_NO_VAR,/* Use location which is not associated with a variable
125 or the variable is not trackable. */
126 MO_VAL_USE, /* Use location which is associated with a value. */
127 MO_VAL_LOC, /* Use location which appears in a debug insn. */
128 MO_VAL_SET, /* Set location associated with a value. */
129 MO_SET, /* Set location. */
130 MO_COPY, /* Copy the same portion of a variable from one
131 location to another. */
132 MO_CLOBBER, /* Clobber location. */
133 MO_CALL, /* Call insn. */
134 MO_ADJUST /* Adjust stack pointer. */
138 static const char * const ATTRIBUTE_UNUSED
139 micro_operation_type_name[] = {
152 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
153 Notes emitted as AFTER_CALL are to take effect during the call,
154 rather than after the call. */
157 EMIT_NOTE_BEFORE_INSN,
158 EMIT_NOTE_AFTER_INSN,
159 EMIT_NOTE_AFTER_CALL_INSN
162 /* Structure holding information about micro operation. */
163 typedef struct micro_operation_def
165 /* Type of micro operation. */
166 enum micro_operation_type type;
169 /* Location. For MO_SET and MO_COPY, this is the SET that
170 performs the assignment, if known, otherwise it is the target
171 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
172 CONCAT of the VALUE and the LOC associated with it. For
173 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
174 associated with it. */
177 /* Stack adjustment. */
178 HOST_WIDE_INT adjust;
181 /* The instruction which the micro operation is in, for MO_USE,
182 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
183 instruction or note in the original flow (before any var-tracking
184 notes are inserted, to simplify emission of notes), for MO_SET
189 /* A declaration of a variable, or an RTL value being handled like a
191 typedef void *decl_or_value;
193 /* Structure for passing some other parameters to function
194 emit_note_insn_var_location. */
195 typedef struct emit_note_data_def
197 /* The instruction which the note will be emitted before/after. */
200 /* Where the note will be emitted (before/after insn)? */
201 enum emit_note_where where;
203 /* The variables and values active at this point. */
207 /* Description of location of a part of a variable. The content of a physical
208 register is described by a chain of these structures.
209 The chains are pretty short (usually 1 or 2 elements) and thus
210 chain is the best data structure. */
211 typedef struct attrs_def
213 /* Pointer to next member of the list. */
214 struct attrs_def *next;
216 /* The rtx of register. */
219 /* The declaration corresponding to LOC. */
222 /* Offset from start of DECL. */
223 HOST_WIDE_INT offset;
226 /* Structure holding a refcounted hash table. If refcount > 1,
227 it must be first unshared before modified. */
228 typedef struct shared_hash_def
230 /* Reference count. */
233 /* Actual hash table. */
237 /* Structure holding the IN or OUT set for a basic block. */
238 typedef struct dataflow_set_def
240 /* Adjustment of stack offset. */
241 HOST_WIDE_INT stack_adjust;
243 /* Attributes for registers (lists of attrs). */
244 attrs regs[FIRST_PSEUDO_REGISTER];
246 /* Variable locations. */
249 /* Vars that is being traversed. */
250 shared_hash traversed_vars;
253 /* The structure (one for each basic block) containing the information
254 needed for variable tracking. */
255 typedef struct variable_tracking_info_def
257 /* Number of micro operations stored in the MOS array. */
260 /* The array of micro operations. */
261 micro_operation *mos;
263 /* The IN and OUT set for dataflow analysis. */
267 /* The permanent-in dataflow set for this block. This is used to
268 hold values for which we had to compute entry values. ??? This
269 should probably be dynamically allocated, to avoid using more
270 memory in non-debug builds. */
273 /* Has the block been visited in DFS? */
276 /* Has the block been flooded in VTA? */
279 } *variable_tracking_info;
281 /* Structure for chaining the locations. */
282 typedef struct location_chain_def
284 /* Next element in the chain. */
285 struct location_chain_def *next;
287 /* The location (REG, MEM or VALUE). */
290 /* The "value" stored in this location. */
294 enum var_init_status init;
297 /* Structure describing one part of variable. */
298 typedef struct variable_part_def
300 /* Chain of locations of the part. */
301 location_chain loc_chain;
303 /* Location which was last emitted to location list. */
306 /* The offset in the variable. */
307 HOST_WIDE_INT offset;
310 /* Maximum number of location parts. */
311 #define MAX_VAR_PARTS 16
313 /* Structure describing where the variable is located. */
314 typedef struct variable_def
316 /* The declaration of the variable, or an RTL value being handled
317 like a declaration. */
320 /* Reference count. */
323 /* Number of variable parts. */
326 /* The variable parts. */
327 variable_part var_part[1];
329 typedef const struct variable_def *const_variable;
331 /* Structure for chaining backlinks from referenced VALUEs to
332 DVs that are referencing them. */
333 typedef struct value_chain_def
335 /* Next value_chain entry. */
336 struct value_chain_def *next;
338 /* The declaration of the variable, or an RTL value
339 being handled like a declaration, whose var_parts[0].loc_chain
340 references the VALUE owning this value_chain. */
343 /* Reference count. */
346 typedef const struct value_chain_def *const_value_chain;
348 /* Hash function for DECL for VARIABLE_HTAB. */
349 #define VARIABLE_HASH_VAL(decl) (DECL_UID (decl))
351 /* Pointer to the BB's information specific to variable tracking pass. */
352 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
354 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
355 #define INT_MEM_OFFSET(mem) (MEM_OFFSET (mem) ? INTVAL (MEM_OFFSET (mem)) : 0)
357 /* Alloc pool for struct attrs_def. */
358 static alloc_pool attrs_pool;
360 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
361 static alloc_pool var_pool;
363 /* Alloc pool for struct variable_def with a single var_part entry. */
364 static alloc_pool valvar_pool;
366 /* Alloc pool for struct location_chain_def. */
367 static alloc_pool loc_chain_pool;
369 /* Alloc pool for struct shared_hash_def. */
370 static alloc_pool shared_hash_pool;
372 /* Alloc pool for struct value_chain_def. */
373 static alloc_pool value_chain_pool;
375 /* Changed variables, notes will be emitted for them. */
376 static htab_t changed_variables;
378 /* Links from VALUEs to DVs referencing them in their current loc_chains. */
379 static htab_t value_chains;
381 /* Shall notes be emitted? */
382 static bool emit_notes;
384 /* Empty shared hashtable. */
385 static shared_hash empty_shared_hash;
387 /* Scratch register bitmap used by cselib_expand_value_rtx. */
388 static bitmap scratch_regs = NULL;
390 /* Variable used to tell whether cselib_process_insn called our hook. */
391 static bool cselib_hook_called;
393 /* Local function prototypes. */
394 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
396 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
398 static void bb_stack_adjust_offset (basic_block);
399 static bool vt_stack_adjustments (void);
400 static rtx adjust_stack_reference (rtx, HOST_WIDE_INT);
401 static hashval_t variable_htab_hash (const void *);
402 static int variable_htab_eq (const void *, const void *);
403 static void variable_htab_free (void *);
405 static void init_attrs_list_set (attrs *);
406 static void attrs_list_clear (attrs *);
407 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
408 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
409 static void attrs_list_copy (attrs *, attrs);
410 static void attrs_list_union (attrs *, attrs);
412 static void **unshare_variable (dataflow_set *set, void **slot, variable var,
413 enum var_init_status);
414 static int vars_copy_1 (void **, void *);
415 static void vars_copy (htab_t, htab_t);
416 static tree var_debug_decl (tree);
417 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
418 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
419 enum var_init_status, rtx);
420 static void var_reg_delete (dataflow_set *, rtx, bool);
421 static void var_regno_delete (dataflow_set *, int);
422 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
423 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
424 enum var_init_status, rtx);
425 static void var_mem_delete (dataflow_set *, rtx, bool);
427 static void dataflow_set_init (dataflow_set *);
428 static void dataflow_set_clear (dataflow_set *);
429 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
430 static int variable_union_info_cmp_pos (const void *, const void *);
431 static int variable_union (void **, void *);
432 static int variable_canonicalize (void **, void *);
433 static void dataflow_set_union (dataflow_set *, dataflow_set *);
434 static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
435 static bool canon_value_cmp (rtx, rtx);
436 static int loc_cmp (rtx, rtx);
437 static bool variable_part_different_p (variable_part *, variable_part *);
438 static bool onepart_variable_different_p (variable, variable);
439 static bool variable_different_p (variable, variable, bool);
440 static int dataflow_set_different_1 (void **, void *);
441 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
442 static void dataflow_set_destroy (dataflow_set *);
444 static bool contains_symbol_ref (rtx);
445 static bool track_expr_p (tree, bool);
446 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
447 static int count_uses (rtx *, void *);
448 static void count_uses_1 (rtx *, void *);
449 static void count_stores (rtx, const_rtx, void *);
450 static int add_uses (rtx *, void *);
451 static void add_uses_1 (rtx *, void *);
452 static void add_stores (rtx, const_rtx, void *);
453 static bool compute_bb_dataflow (basic_block);
454 static void vt_find_locations (void);
456 static void dump_attrs_list (attrs);
457 static int dump_var_slot (void **, void *);
458 static void dump_var (variable);
459 static void dump_vars (htab_t);
460 static void dump_dataflow_set (dataflow_set *);
461 static void dump_dataflow_sets (void);
463 static void variable_was_changed (variable, dataflow_set *);
464 static void **set_slot_part (dataflow_set *, rtx, void **,
465 decl_or_value, HOST_WIDE_INT,
466 enum var_init_status, rtx);
467 static void set_variable_part (dataflow_set *, rtx,
468 decl_or_value, HOST_WIDE_INT,
469 enum var_init_status, rtx, enum insert_option);
470 static void **clobber_slot_part (dataflow_set *, rtx,
471 void **, HOST_WIDE_INT, rtx);
472 static void clobber_variable_part (dataflow_set *, rtx,
473 decl_or_value, HOST_WIDE_INT, rtx);
474 static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
475 static void delete_variable_part (dataflow_set *, rtx,
476 decl_or_value, HOST_WIDE_INT);
477 static int emit_note_insn_var_location (void **, void *);
478 static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash);
479 static int emit_notes_for_differences_1 (void **, void *);
480 static int emit_notes_for_differences_2 (void **, void *);
481 static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
482 static void emit_notes_in_bb (basic_block, dataflow_set *);
483 static void vt_emit_notes (void);
485 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
486 static void vt_add_function_parameters (void);
487 static void vt_initialize (void);
488 static void vt_finalize (void);
490 /* Given a SET, calculate the amount of stack adjustment it contains
491 PRE- and POST-modifying stack pointer.
492 This function is similar to stack_adjust_offset. */
495 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
498 rtx src = SET_SRC (pattern);
499 rtx dest = SET_DEST (pattern);
502 if (dest == stack_pointer_rtx)
504 /* (set (reg sp) (plus (reg sp) (const_int))) */
505 code = GET_CODE (src);
506 if (! (code == PLUS || code == MINUS)
507 || XEXP (src, 0) != stack_pointer_rtx
508 || !CONST_INT_P (XEXP (src, 1)))
512 *post += INTVAL (XEXP (src, 1));
514 *post -= INTVAL (XEXP (src, 1));
516 else if (MEM_P (dest))
518 /* (set (mem (pre_dec (reg sp))) (foo)) */
519 src = XEXP (dest, 0);
520 code = GET_CODE (src);
526 if (XEXP (src, 0) == stack_pointer_rtx)
528 rtx val = XEXP (XEXP (src, 1), 1);
529 /* We handle only adjustments by constant amount. */
530 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
533 if (code == PRE_MODIFY)
534 *pre -= INTVAL (val);
536 *post -= INTVAL (val);
542 if (XEXP (src, 0) == stack_pointer_rtx)
544 *pre += GET_MODE_SIZE (GET_MODE (dest));
550 if (XEXP (src, 0) == stack_pointer_rtx)
552 *post += GET_MODE_SIZE (GET_MODE (dest));
558 if (XEXP (src, 0) == stack_pointer_rtx)
560 *pre -= GET_MODE_SIZE (GET_MODE (dest));
566 if (XEXP (src, 0) == stack_pointer_rtx)
568 *post -= GET_MODE_SIZE (GET_MODE (dest));
579 /* Given an INSN, calculate the amount of stack adjustment it contains
580 PRE- and POST-modifying stack pointer. */
583 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
591 pattern = PATTERN (insn);
592 if (RTX_FRAME_RELATED_P (insn))
594 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
596 pattern = XEXP (expr, 0);
599 if (GET_CODE (pattern) == SET)
600 stack_adjust_offset_pre_post (pattern, pre, post);
601 else if (GET_CODE (pattern) == PARALLEL
602 || GET_CODE (pattern) == SEQUENCE)
606 /* There may be stack adjustments inside compound insns. Search
608 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
609 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
610 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
614 /* Compute stack adjustment in basic block BB. */
617 bb_stack_adjust_offset (basic_block bb)
619 HOST_WIDE_INT offset;
622 offset = VTI (bb)->in.stack_adjust;
623 for (i = 0; i < VTI (bb)->n_mos; i++)
625 if (VTI (bb)->mos[i].type == MO_ADJUST)
626 offset += VTI (bb)->mos[i].u.adjust;
627 else if (VTI (bb)->mos[i].type != MO_CALL)
629 if (MEM_P (VTI (bb)->mos[i].u.loc))
631 VTI (bb)->mos[i].u.loc
632 = adjust_stack_reference (VTI (bb)->mos[i].u.loc, -offset);
636 VTI (bb)->out.stack_adjust = offset;
639 /* Compute stack adjustments for all blocks by traversing DFS tree.
640 Return true when the adjustments on all incoming edges are consistent.
641 Heavily borrowed from pre_and_rev_post_order_compute. */
644 vt_stack_adjustments (void)
646 edge_iterator *stack;
649 /* Initialize entry block. */
650 VTI (ENTRY_BLOCK_PTR)->visited = true;
651 VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
653 /* Allocate stack for back-tracking up CFG. */
654 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
657 /* Push the first edge on to the stack. */
658 stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
666 /* Look at the edge on the top of the stack. */
668 src = ei_edge (ei)->src;
669 dest = ei_edge (ei)->dest;
671 /* Check if the edge destination has been visited yet. */
672 if (!VTI (dest)->visited)
674 VTI (dest)->visited = true;
675 VTI (dest)->in.stack_adjust = VTI (src)->out.stack_adjust;
676 bb_stack_adjust_offset (dest);
678 if (EDGE_COUNT (dest->succs) > 0)
679 /* Since the DEST node has been visited for the first
680 time, check its successors. */
681 stack[sp++] = ei_start (dest->succs);
685 /* Check whether the adjustments on the edges are the same. */
686 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
692 if (! ei_one_before_end_p (ei))
693 /* Go to the next edge. */
694 ei_next (&stack[sp - 1]);
696 /* Return to previous level if there are no more edges. */
705 /* Adjust stack reference MEM by ADJUSTMENT bytes and make it relative
706 to the argument pointer. Return the new rtx. */
709 adjust_stack_reference (rtx mem, HOST_WIDE_INT adjustment)
713 #ifdef FRAME_POINTER_CFA_OFFSET
714 adjustment -= FRAME_POINTER_CFA_OFFSET (current_function_decl);
715 cfa = plus_constant (frame_pointer_rtx, adjustment);
717 adjustment -= ARG_POINTER_CFA_OFFSET (current_function_decl);
718 cfa = plus_constant (arg_pointer_rtx, adjustment);
721 addr = replace_rtx (copy_rtx (XEXP (mem, 0)), stack_pointer_rtx, cfa);
722 tmp = simplify_rtx (addr);
726 return replace_equiv_address_nv (mem, addr);
729 /* Return true if a decl_or_value DV is a DECL or NULL. */
731 dv_is_decl_p (decl_or_value dv)
733 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
736 /* Return true if a decl_or_value is a VALUE rtl. */
738 dv_is_value_p (decl_or_value dv)
740 return dv && !dv_is_decl_p (dv);
743 /* Return the decl in the decl_or_value. */
745 dv_as_decl (decl_or_value dv)
747 #ifdef ENABLE_CHECKING
748 gcc_assert (dv_is_decl_p (dv));
753 /* Return the value in the decl_or_value. */
755 dv_as_value (decl_or_value dv)
757 #ifdef ENABLE_CHECKING
758 gcc_assert (dv_is_value_p (dv));
763 /* Return the opaque pointer in the decl_or_value. */
765 dv_as_opaque (decl_or_value dv)
770 /* Return true if a decl_or_value must not have more than one variable
773 dv_onepart_p (decl_or_value dv)
777 if (!MAY_HAVE_DEBUG_INSNS)
780 if (dv_is_value_p (dv))
783 decl = dv_as_decl (dv);
788 return (target_for_debug_bind (decl) != NULL_TREE);
791 /* Return the variable pool to be used for dv, depending on whether it
792 can have multiple parts or not. */
793 static inline alloc_pool
794 dv_pool (decl_or_value dv)
796 return dv_onepart_p (dv) ? valvar_pool : var_pool;
799 /* Build a decl_or_value out of a decl. */
800 static inline decl_or_value
801 dv_from_decl (tree decl)
805 #ifdef ENABLE_CHECKING
806 gcc_assert (dv_is_decl_p (dv));
811 /* Build a decl_or_value out of a value. */
812 static inline decl_or_value
813 dv_from_value (rtx value)
817 #ifdef ENABLE_CHECKING
818 gcc_assert (dv_is_value_p (dv));
823 static inline hashval_t
824 dv_htab_hash (decl_or_value dv)
826 if (dv_is_value_p (dv))
827 return -(hashval_t)(CSELIB_VAL_PTR (dv_as_value (dv))->value);
829 return (VARIABLE_HASH_VAL (dv_as_decl (dv)));
832 /* The hash function for variable_htab, computes the hash value
833 from the declaration of variable X. */
836 variable_htab_hash (const void *x)
838 const_variable const v = (const_variable) x;
840 return dv_htab_hash (v->dv);
843 /* Compare the declaration of variable X with declaration Y. */
846 variable_htab_eq (const void *x, const void *y)
848 const_variable const v = (const_variable) x;
849 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
851 if (dv_as_opaque (v->dv) == dv_as_opaque (dv))
858 visv = dv_is_value_p (v->dv);
859 dvisv = dv_is_value_p (dv);
865 gcc_assert (CSELIB_VAL_PTR (dv_as_value (v->dv))
866 != CSELIB_VAL_PTR (dv_as_value (dv)));
868 gcc_assert (VARIABLE_HASH_VAL (dv_as_decl (v->dv))
869 != VARIABLE_HASH_VAL (dv_as_decl (dv)));
876 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
879 variable_htab_free (void *elem)
882 variable var = (variable) elem;
883 location_chain node, next;
885 gcc_assert (var->refcount > 0);
888 if (var->refcount > 0)
891 for (i = 0; i < var->n_var_parts; i++)
893 for (node = var->var_part[i].loc_chain; node; node = next)
896 pool_free (loc_chain_pool, node);
898 var->var_part[i].loc_chain = NULL;
900 pool_free (dv_pool (var->dv), var);
903 /* The hash function for value_chains htab, computes the hash value
907 value_chain_htab_hash (const void *x)
909 const_value_chain const v = (const_value_chain) x;
911 return dv_htab_hash (v->dv);
914 /* Compare the VALUE X with VALUE Y. */
917 value_chain_htab_eq (const void *x, const void *y)
919 const_value_chain const v = (const_value_chain) x;
920 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
922 return dv_as_opaque (v->dv) == dv_as_opaque (dv);
925 /* Initialize the set (array) SET of attrs to empty lists. */
928 init_attrs_list_set (attrs *set)
932 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
936 /* Make the list *LISTP empty. */
939 attrs_list_clear (attrs *listp)
943 for (list = *listp; list; list = next)
946 pool_free (attrs_pool, list);
951 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
954 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
956 for (; list; list = list->next)
957 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
962 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
965 attrs_list_insert (attrs *listp, decl_or_value dv,
966 HOST_WIDE_INT offset, rtx loc)
970 list = (attrs) pool_alloc (attrs_pool);
973 list->offset = offset;
978 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
981 attrs_list_copy (attrs *dstp, attrs src)
985 attrs_list_clear (dstp);
986 for (; src; src = src->next)
988 n = (attrs) pool_alloc (attrs_pool);
991 n->offset = src->offset;
997 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1000 attrs_list_union (attrs *dstp, attrs src)
1002 for (; src; src = src->next)
1004 if (!attrs_list_member (*dstp, src->dv, src->offset))
1005 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1009 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1013 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1015 gcc_assert (!*dstp);
1016 for (; src; src = src->next)
1018 if (!dv_onepart_p (src->dv))
1019 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1021 for (src = src2; src; src = src->next)
1023 if (!dv_onepart_p (src->dv)
1024 && !attrs_list_member (*dstp, src->dv, src->offset))
1025 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1029 /* Shared hashtable support. */
1031 /* Return true if VARS is shared. */
1034 shared_hash_shared (shared_hash vars)
1036 return vars->refcount > 1;
1039 /* Return the hash table for VARS. */
1041 static inline htab_t
1042 shared_hash_htab (shared_hash vars)
1047 /* Copy variables into a new hash table. */
1050 shared_hash_unshare (shared_hash vars)
1052 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1053 gcc_assert (vars->refcount > 1);
1054 new_vars->refcount = 1;
1056 = htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
1057 variable_htab_eq, variable_htab_free);
1058 vars_copy (new_vars->htab, vars->htab);
1063 /* Increment reference counter on VARS and return it. */
1065 static inline shared_hash
1066 shared_hash_copy (shared_hash vars)
1072 /* Decrement reference counter and destroy hash table if not shared
1076 shared_hash_destroy (shared_hash vars)
1078 gcc_assert (vars->refcount > 0);
1079 if (--vars->refcount == 0)
1081 htab_delete (vars->htab);
1082 pool_free (shared_hash_pool, vars);
1086 /* Unshare *PVARS if shared and return slot for DV. If INS is
1087 INSERT, insert it if not already present. */
1089 static inline void **
1090 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1091 hashval_t dvhash, enum insert_option ins)
1093 if (shared_hash_shared (*pvars))
1094 *pvars = shared_hash_unshare (*pvars);
1095 return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins);
1098 static inline void **
1099 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1100 enum insert_option ins)
1102 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1105 /* Return slot for DV, if it is already present in the hash table.
1106 If it is not present, insert it only VARS is not shared, otherwise
1109 static inline void **
1110 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1112 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1113 shared_hash_shared (vars)
1114 ? NO_INSERT : INSERT);
1117 static inline void **
1118 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1120 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1123 /* Return slot for DV only if it is already present in the hash table. */
1125 static inline void **
1126 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1129 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1133 static inline void **
1134 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1136 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1139 /* Return variable for DV or NULL if not already present in the hash
1142 static inline variable
1143 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1145 return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash);
1148 static inline variable
1149 shared_hash_find (shared_hash vars, decl_or_value dv)
1151 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1154 /* Determine a total order between two distinct pointers. Compare the
1155 pointers as integral types if size_t is wide enough, otherwise
1156 resort to bitwise memory compare. The actual order does not
1157 matter, we just need to be consistent, so endianness is
1161 tie_break_pointers (const void *p1, const void *p2)
1163 gcc_assert (p1 != p2);
1165 if (sizeof (size_t) >= sizeof (void*))
1166 return (size_t)p1 < (size_t)p2 ? -1 : 1;
1168 return memcmp (&p1, &p2, sizeof (p1));
1171 /* Return true if TVAL is better than CVAL as a canonival value. We
1172 choose lowest-numbered VALUEs, using the RTX address as a
1173 tie-breaker. The idea is to arrange them into a star topology,
1174 such that all of them are at most one step away from the canonical
1175 value, and the canonical value has backlinks to all of them, in
1176 addition to all the actual locations. We don't enforce this
1177 topology throughout the entire dataflow analysis, though.
1181 canon_value_cmp (rtx tval, rtx cval)
1184 || CSELIB_VAL_PTR (tval)->value < CSELIB_VAL_PTR (cval)->value
1185 || (CSELIB_VAL_PTR (tval)->value == CSELIB_VAL_PTR (cval)->value
1186 && tie_break_pointers (tval, cval) < 0);
1189 static bool dst_can_be_shared;
1191 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1194 unshare_variable (dataflow_set *set, void **slot, variable var,
1195 enum var_init_status initialized)
1200 new_var = (variable) pool_alloc (dv_pool (var->dv));
1201 new_var->dv = var->dv;
1202 new_var->refcount = 1;
1204 new_var->n_var_parts = var->n_var_parts;
1206 if (! flag_var_tracking_uninit)
1207 initialized = VAR_INIT_STATUS_INITIALIZED;
1209 for (i = 0; i < var->n_var_parts; i++)
1211 location_chain node;
1212 location_chain *nextp;
1214 new_var->var_part[i].offset = var->var_part[i].offset;
1215 nextp = &new_var->var_part[i].loc_chain;
1216 for (node = var->var_part[i].loc_chain; node; node = node->next)
1218 location_chain new_lc;
1220 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1221 new_lc->next = NULL;
1222 if (node->init > initialized)
1223 new_lc->init = node->init;
1225 new_lc->init = initialized;
1226 if (node->set_src && !(MEM_P (node->set_src)))
1227 new_lc->set_src = node->set_src;
1229 new_lc->set_src = NULL;
1230 new_lc->loc = node->loc;
1233 nextp = &new_lc->next;
1236 /* We are at the basic block boundary when copying variable description
1237 so set the CUR_LOC to be the first element of the chain. */
1238 if (new_var->var_part[i].loc_chain)
1239 new_var->var_part[i].cur_loc = new_var->var_part[i].loc_chain->loc;
1241 new_var->var_part[i].cur_loc = NULL;
1244 dst_can_be_shared = false;
1245 if (shared_hash_shared (set->vars))
1246 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1247 else if (set->traversed_vars && set->vars != set->traversed_vars)
1248 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1253 /* Add a variable from *SLOT to hash table DATA and increase its reference
1257 vars_copy_1 (void **slot, void *data)
1259 htab_t dst = (htab_t) data;
1263 src = (variable) *slot;
1266 dstp = htab_find_slot_with_hash (dst, src->dv,
1267 dv_htab_hash (src->dv),
1271 /* Continue traversing the hash table. */
1275 /* Copy all variables from hash table SRC to hash table DST. */
1278 vars_copy (htab_t dst, htab_t src)
1280 htab_traverse_noresize (src, vars_copy_1, dst);
1283 /* Map a decl to its main debug decl. */
1286 var_debug_decl (tree decl)
1288 if (decl && DECL_P (decl)
1289 && DECL_DEBUG_EXPR_IS_FROM (decl) && DECL_DEBUG_EXPR (decl)
1290 && DECL_P (DECL_DEBUG_EXPR (decl)))
1291 decl = DECL_DEBUG_EXPR (decl);
1296 /* Set the register LOC to contain DV, OFFSET. */
1299 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1300 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1301 enum insert_option iopt)
1304 bool decl_p = dv_is_decl_p (dv);
1307 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1309 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1310 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1311 && node->offset == offset)
1314 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1315 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1318 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1321 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1324 tree decl = REG_EXPR (loc);
1325 HOST_WIDE_INT offset = REG_OFFSET (loc);
1327 var_reg_decl_set (set, loc, initialized,
1328 dv_from_decl (decl), offset, set_src, INSERT);
1331 static enum var_init_status
1332 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1336 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1338 if (! flag_var_tracking_uninit)
1339 return VAR_INIT_STATUS_INITIALIZED;
1341 var = shared_hash_find (set->vars, dv);
1344 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1346 location_chain nextp;
1347 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1348 if (rtx_equal_p (nextp->loc, loc))
1350 ret_val = nextp->init;
1359 /* Delete current content of register LOC in dataflow set SET and set
1360 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1361 MODIFY is true, any other live copies of the same variable part are
1362 also deleted from the dataflow set, otherwise the variable part is
1363 assumed to be copied from another location holding the same
1367 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1368 enum var_init_status initialized, rtx set_src)
1370 tree decl = REG_EXPR (loc);
1371 HOST_WIDE_INT offset = REG_OFFSET (loc);
1375 decl = var_debug_decl (decl);
1377 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1378 initialized = get_init_value (set, loc, dv_from_decl (decl));
1380 nextp = &set->regs[REGNO (loc)];
1381 for (node = *nextp; node; node = next)
1384 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1386 delete_variable_part (set, node->loc, node->dv, node->offset);
1387 pool_free (attrs_pool, node);
1393 nextp = &node->next;
1397 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1398 var_reg_set (set, loc, initialized, set_src);
1401 /* Delete the association of register LOC in dataflow set SET with any
1402 variables that aren't onepart. If CLOBBER is true, also delete any
1403 other live copies of the same variable part, and delete the
1404 association with onepart dvs too. */
1407 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1409 attrs *nextp = &set->regs[REGNO (loc)];
1414 tree decl = REG_EXPR (loc);
1415 HOST_WIDE_INT offset = REG_OFFSET (loc);
1417 decl = var_debug_decl (decl);
1419 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1422 for (node = *nextp; node; node = next)
1425 if (clobber || !dv_onepart_p (node->dv))
1427 delete_variable_part (set, node->loc, node->dv, node->offset);
1428 pool_free (attrs_pool, node);
1432 nextp = &node->next;
1436 /* Delete content of register with number REGNO in dataflow set SET. */
1439 var_regno_delete (dataflow_set *set, int regno)
1441 attrs *reg = &set->regs[regno];
1444 for (node = *reg; node; node = next)
1447 delete_variable_part (set, node->loc, node->dv, node->offset);
1448 pool_free (attrs_pool, node);
1453 /* Set the location of DV, OFFSET as the MEM LOC. */
1456 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1457 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1458 enum insert_option iopt)
1460 if (dv_is_decl_p (dv))
1461 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1463 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1466 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
1468 Adjust the address first if it is stack pointer based. */
1471 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1474 tree decl = MEM_EXPR (loc);
1475 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1477 var_mem_decl_set (set, loc, initialized,
1478 dv_from_decl (decl), offset, set_src, INSERT);
1481 /* Delete and set the location part of variable MEM_EXPR (LOC) in
1482 dataflow set SET to LOC. If MODIFY is true, any other live copies
1483 of the same variable part are also deleted from the dataflow set,
1484 otherwise the variable part is assumed to be copied from another
1485 location holding the same part.
1486 Adjust the address first if it is stack pointer based. */
1489 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1490 enum var_init_status initialized, rtx set_src)
1492 tree decl = MEM_EXPR (loc);
1493 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1495 decl = var_debug_decl (decl);
1497 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1498 initialized = get_init_value (set, loc, dv_from_decl (decl));
1501 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
1502 var_mem_set (set, loc, initialized, set_src);
1505 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
1506 true, also delete any other live copies of the same variable part.
1507 Adjust the address first if it is stack pointer based. */
1510 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
1512 tree decl = MEM_EXPR (loc);
1513 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1515 decl = var_debug_decl (decl);
1517 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1518 delete_variable_part (set, loc, dv_from_decl (decl), offset);
1521 /* Map a value to a location it was just stored in. */
1524 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn)
1526 cselib_val *v = CSELIB_VAL_PTR (val);
1528 gcc_assert (cselib_preserved_value_p (v));
1532 fprintf (dump_file, "%i: ", INSN_UID (insn));
1533 print_inline_rtx (dump_file, val, 0);
1534 fprintf (dump_file, " stored in ");
1535 print_inline_rtx (dump_file, loc, 0);
1538 struct elt_loc_list *l;
1539 for (l = v->locs; l; l = l->next)
1541 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
1542 print_inline_rtx (dump_file, l->loc, 0);
1545 fprintf (dump_file, "\n");
1550 var_regno_delete (set, REGNO (loc));
1551 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1552 dv_from_value (val), 0, NULL_RTX, INSERT);
1554 else if (MEM_P (loc))
1555 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1556 dv_from_value (val), 0, NULL_RTX, INSERT);
1558 set_variable_part (set, loc, dv_from_value (val), 0,
1559 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1562 /* Reset this node, detaching all its equivalences. Return the slot
1563 in the variable hash table that holds dv, if there is one. */
1566 val_reset (dataflow_set *set, decl_or_value dv)
1568 variable var = shared_hash_find (set->vars, dv) ;
1569 location_chain node;
1572 if (!var || !var->n_var_parts)
1575 gcc_assert (var->n_var_parts == 1);
1578 for (node = var->var_part[0].loc_chain; node; node = node->next)
1579 if (GET_CODE (node->loc) == VALUE
1580 && canon_value_cmp (node->loc, cval))
1583 for (node = var->var_part[0].loc_chain; node; node = node->next)
1584 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
1586 /* Redirect the equivalence link to the new canonical
1587 value, or simply remove it if it would point at
1590 set_variable_part (set, cval, dv_from_value (node->loc),
1591 0, node->init, node->set_src, NO_INSERT);
1592 delete_variable_part (set, dv_as_value (dv),
1593 dv_from_value (node->loc), 0);
1598 decl_or_value cdv = dv_from_value (cval);
1600 /* Keep the remaining values connected, accummulating links
1601 in the canonical value. */
1602 for (node = var->var_part[0].loc_chain; node; node = node->next)
1604 if (node->loc == cval)
1606 else if (GET_CODE (node->loc) == REG)
1607 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
1608 node->set_src, NO_INSERT);
1609 else if (GET_CODE (node->loc) == MEM)
1610 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
1611 node->set_src, NO_INSERT);
1613 set_variable_part (set, node->loc, cdv, 0,
1614 node->init, node->set_src, NO_INSERT);
1618 /* We remove this last, to make sure that the canonical value is not
1619 removed to the point of requiring reinsertion. */
1621 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
1623 clobber_variable_part (set, NULL, dv, 0, NULL);
1625 /* ??? Should we make sure there aren't other available values or
1626 variables whose values involve this one other than by
1627 equivalence? E.g., at the very least we should reset MEMs, those
1628 shouldn't be too hard to find cselib-looking up the value as an
1629 address, then locating the resulting value in our own hash
1633 /* Find the values in a given location and map the val to another
1634 value, if it is unique, or add the location as one holding the
1638 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
1640 decl_or_value dv = dv_from_value (val);
1642 if (dump_file && (dump_flags & TDF_DETAILS))
1645 fprintf (dump_file, "%i: ", INSN_UID (insn));
1647 fprintf (dump_file, "head: ");
1648 print_inline_rtx (dump_file, val, 0);
1649 fputs (" is at ", dump_file);
1650 print_inline_rtx (dump_file, loc, 0);
1651 fputc ('\n', dump_file);
1654 val_reset (set, dv);
1658 attrs node, found = NULL;
1660 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1661 if (dv_is_value_p (node->dv)
1662 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
1666 /* Map incoming equivalences. ??? Wouldn't it be nice if
1667 we just started sharing the location lists? Maybe a
1668 circular list ending at the value itself or some
1670 set_variable_part (set, dv_as_value (node->dv),
1671 dv_from_value (val), node->offset,
1672 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1673 set_variable_part (set, val, node->dv, node->offset,
1674 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1677 /* If we didn't find any equivalence, we need to remember that
1678 this value is held in the named register. */
1680 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1681 dv_from_value (val), 0, NULL_RTX, INSERT);
1683 else if (MEM_P (loc))
1684 /* ??? Merge equivalent MEMs. */
1685 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1686 dv_from_value (val), 0, NULL_RTX, INSERT);
1688 /* ??? Merge equivalent expressions. */
1689 set_variable_part (set, loc, dv_from_value (val), 0,
1690 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1693 /* Initialize dataflow set SET to be empty.
1694 VARS_SIZE is the initial size of hash table VARS. */
1697 dataflow_set_init (dataflow_set *set)
1699 init_attrs_list_set (set->regs);
1700 set->vars = shared_hash_copy (empty_shared_hash);
1701 set->stack_adjust = 0;
1702 set->traversed_vars = NULL;
1705 /* Delete the contents of dataflow set SET. */
1708 dataflow_set_clear (dataflow_set *set)
1712 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1713 attrs_list_clear (&set->regs[i]);
1715 shared_hash_destroy (set->vars);
1716 set->vars = shared_hash_copy (empty_shared_hash);
1719 /* Copy the contents of dataflow set SRC to DST. */
1722 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
1726 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1727 attrs_list_copy (&dst->regs[i], src->regs[i]);
1729 shared_hash_destroy (dst->vars);
1730 dst->vars = shared_hash_copy (src->vars);
1731 dst->stack_adjust = src->stack_adjust;
1734 /* Information for merging lists of locations for a given offset of variable.
1736 struct variable_union_info
1738 /* Node of the location chain. */
1741 /* The sum of positions in the input chains. */
1744 /* The position in the chain of DST dataflow set. */
1748 /* Buffer for location list sorting and its allocated size. */
1749 static struct variable_union_info *vui_vec;
1750 static int vui_allocated;
1752 /* Compare function for qsort, order the structures by POS element. */
1755 variable_union_info_cmp_pos (const void *n1, const void *n2)
1757 const struct variable_union_info *const i1 =
1758 (const struct variable_union_info *) n1;
1759 const struct variable_union_info *const i2 =
1760 ( const struct variable_union_info *) n2;
1762 if (i1->pos != i2->pos)
1763 return i1->pos - i2->pos;
1765 return (i1->pos_dst - i2->pos_dst);
1768 /* Compute union of location parts of variable *SLOT and the same variable
1769 from hash table DATA. Compute "sorted" union of the location chains
1770 for common offsets, i.e. the locations of a variable part are sorted by
1771 a priority where the priority is the sum of the positions in the 2 chains
1772 (if a location is only in one list the position in the second list is
1773 defined to be larger than the length of the chains).
1774 When we are updating the location parts the newest location is in the
1775 beginning of the chain, so when we do the described "sorted" union
1776 we keep the newest locations in the beginning. */
1779 variable_union (void **slot, void *data)
1783 dataflow_set *set = (dataflow_set *) data;
1786 src = (variable) *slot;
1787 dstp = shared_hash_find_slot (set->vars, src->dv);
1788 if (!dstp || !*dstp)
1792 dst_can_be_shared = false;
1794 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
1798 /* If CUR_LOC of some variable part is not the first element of
1799 the location chain we are going to change it so we have to make
1800 a copy of the variable. */
1801 for (k = 0; k < src->n_var_parts; k++)
1803 gcc_assert (!src->var_part[k].loc_chain
1804 == !src->var_part[k].cur_loc);
1805 if (src->var_part[k].loc_chain)
1807 gcc_assert (src->var_part[k].cur_loc);
1808 if (src->var_part[k].cur_loc != src->var_part[k].loc_chain->loc)
1812 if (k < src->n_var_parts)
1813 dstp = unshare_variable (set, dstp, src, VAR_INIT_STATUS_UNKNOWN);
1815 /* Continue traversing the hash table. */
1819 dst = (variable) *dstp;
1821 gcc_assert (src->n_var_parts);
1823 /* We can combine one-part variables very efficiently, because their
1824 entries are in canonical order. */
1825 if (dv_onepart_p (src->dv))
1827 location_chain *nodep, dnode, snode;
1829 gcc_assert (src->n_var_parts == 1);
1830 gcc_assert (dst->n_var_parts == 1);
1832 snode = src->var_part[0].loc_chain;
1835 restart_onepart_unshared:
1836 nodep = &dst->var_part[0].loc_chain;
1842 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
1846 location_chain nnode;
1848 if (dst->refcount != 1 || shared_hash_shared (set->vars))
1850 dstp = unshare_variable (set, dstp, dst,
1851 VAR_INIT_STATUS_INITIALIZED);
1852 dst = (variable)*dstp;
1853 goto restart_onepart_unshared;
1856 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
1857 nnode->loc = snode->loc;
1858 nnode->init = snode->init;
1859 if (!snode->set_src || MEM_P (snode->set_src))
1860 nnode->set_src = NULL;
1862 nnode->set_src = snode->set_src;
1863 nnode->next = dnode;
1866 #ifdef ENABLE_CHECKING
1868 gcc_assert (rtx_equal_p (dnode->loc, snode->loc));
1872 snode = snode->next;
1874 nodep = &dnode->next;
1878 dst->var_part[0].cur_loc = dst->var_part[0].loc_chain->loc;
1883 /* Count the number of location parts, result is K. */
1884 for (i = 0, j = 0, k = 0;
1885 i < src->n_var_parts && j < dst->n_var_parts; k++)
1887 if (src->var_part[i].offset == dst->var_part[j].offset)
1892 else if (src->var_part[i].offset < dst->var_part[j].offset)
1897 k += src->n_var_parts - i;
1898 k += dst->n_var_parts - j;
1900 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
1901 thus there are at most MAX_VAR_PARTS different offsets. */
1902 gcc_assert (dv_onepart_p (dst->dv) ? k == 1 : k <= MAX_VAR_PARTS);
1904 if ((dst->refcount > 1 || shared_hash_shared (set->vars))
1905 && dst->n_var_parts != k)
1907 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
1908 dst = (variable)*dstp;
1911 i = src->n_var_parts - 1;
1912 j = dst->n_var_parts - 1;
1913 dst->n_var_parts = k;
1915 for (k--; k >= 0; k--)
1917 location_chain node, node2;
1919 if (i >= 0 && j >= 0
1920 && src->var_part[i].offset == dst->var_part[j].offset)
1922 /* Compute the "sorted" union of the chains, i.e. the locations which
1923 are in both chains go first, they are sorted by the sum of
1924 positions in the chains. */
1927 struct variable_union_info *vui;
1929 /* If DST is shared compare the location chains.
1930 If they are different we will modify the chain in DST with
1931 high probability so make a copy of DST. */
1932 if (dst->refcount > 1 || shared_hash_shared (set->vars))
1934 for (node = src->var_part[i].loc_chain,
1935 node2 = dst->var_part[j].loc_chain; node && node2;
1936 node = node->next, node2 = node2->next)
1938 if (!((REG_P (node2->loc)
1939 && REG_P (node->loc)
1940 && REGNO (node2->loc) == REGNO (node->loc))
1941 || rtx_equal_p (node2->loc, node->loc)))
1943 if (node2->init < node->init)
1944 node2->init = node->init;
1950 dstp = unshare_variable (set, dstp, dst,
1951 VAR_INIT_STATUS_UNKNOWN);
1952 dst = (variable)*dstp;
1957 for (node = src->var_part[i].loc_chain; node; node = node->next)
1960 for (node = dst->var_part[j].loc_chain; node; node = node->next)
1965 /* The most common case, much simpler, no qsort is needed. */
1966 location_chain dstnode = dst->var_part[j].loc_chain;
1967 dst->var_part[k].loc_chain = dstnode;
1968 dst->var_part[k].offset = dst->var_part[j].offset;
1970 for (node = src->var_part[i].loc_chain; node; node = node->next)
1971 if (!((REG_P (dstnode->loc)
1972 && REG_P (node->loc)
1973 && REGNO (dstnode->loc) == REGNO (node->loc))
1974 || rtx_equal_p (dstnode->loc, node->loc)))
1976 location_chain new_node;
1978 /* Copy the location from SRC. */
1979 new_node = (location_chain) pool_alloc (loc_chain_pool);
1980 new_node->loc = node->loc;
1981 new_node->init = node->init;
1982 if (!node->set_src || MEM_P (node->set_src))
1983 new_node->set_src = NULL;
1985 new_node->set_src = node->set_src;
1986 node2->next = new_node;
1993 if (src_l + dst_l > vui_allocated)
1995 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
1996 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2001 /* Fill in the locations from DST. */
2002 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2003 node = node->next, jj++)
2006 vui[jj].pos_dst = jj;
2008 /* Pos plus value larger than a sum of 2 valid positions. */
2009 vui[jj].pos = jj + src_l + dst_l;
2012 /* Fill in the locations from SRC. */
2014 for (node = src->var_part[i].loc_chain, ii = 0; node;
2015 node = node->next, ii++)
2017 /* Find location from NODE. */
2018 for (jj = 0; jj < dst_l; jj++)
2020 if ((REG_P (vui[jj].lc->loc)
2021 && REG_P (node->loc)
2022 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2023 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2025 vui[jj].pos = jj + ii;
2029 if (jj >= dst_l) /* The location has not been found. */
2031 location_chain new_node;
2033 /* Copy the location from SRC. */
2034 new_node = (location_chain) pool_alloc (loc_chain_pool);
2035 new_node->loc = node->loc;
2036 new_node->init = node->init;
2037 if (!node->set_src || MEM_P (node->set_src))
2038 new_node->set_src = NULL;
2040 new_node->set_src = node->set_src;
2041 vui[n].lc = new_node;
2042 vui[n].pos_dst = src_l + dst_l;
2043 vui[n].pos = ii + src_l + dst_l;
2050 /* Special case still very common case. For dst_l == 2
2051 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2052 vui[i].pos == i + src_l + dst_l. */
2053 if (vui[0].pos > vui[1].pos)
2055 /* Order should be 1, 0, 2... */
2056 dst->var_part[k].loc_chain = vui[1].lc;
2057 vui[1].lc->next = vui[0].lc;
2060 vui[0].lc->next = vui[2].lc;
2061 vui[n - 1].lc->next = NULL;
2064 vui[0].lc->next = NULL;
2069 dst->var_part[k].loc_chain = vui[0].lc;
2070 if (n >= 3 && vui[2].pos < vui[1].pos)
2072 /* Order should be 0, 2, 1, 3... */
2073 vui[0].lc->next = vui[2].lc;
2074 vui[2].lc->next = vui[1].lc;
2077 vui[1].lc->next = vui[3].lc;
2078 vui[n - 1].lc->next = NULL;
2081 vui[1].lc->next = NULL;
2086 /* Order should be 0, 1, 2... */
2088 vui[n - 1].lc->next = NULL;
2091 for (; ii < n; ii++)
2092 vui[ii - 1].lc->next = vui[ii].lc;
2096 qsort (vui, n, sizeof (struct variable_union_info),
2097 variable_union_info_cmp_pos);
2099 /* Reconnect the nodes in sorted order. */
2100 for (ii = 1; ii < n; ii++)
2101 vui[ii - 1].lc->next = vui[ii].lc;
2102 vui[n - 1].lc->next = NULL;
2103 dst->var_part[k].loc_chain = vui[0].lc;
2106 dst->var_part[k].offset = dst->var_part[j].offset;
2111 else if ((i >= 0 && j >= 0
2112 && src->var_part[i].offset < dst->var_part[j].offset)
2115 dst->var_part[k] = dst->var_part[j];
2118 else if ((i >= 0 && j >= 0
2119 && src->var_part[i].offset > dst->var_part[j].offset)
2122 location_chain *nextp;
2124 /* Copy the chain from SRC. */
2125 nextp = &dst->var_part[k].loc_chain;
2126 for (node = src->var_part[i].loc_chain; node; node = node->next)
2128 location_chain new_lc;
2130 new_lc = (location_chain) pool_alloc (loc_chain_pool);
2131 new_lc->next = NULL;
2132 new_lc->init = node->init;
2133 if (!node->set_src || MEM_P (node->set_src))
2134 new_lc->set_src = NULL;
2136 new_lc->set_src = node->set_src;
2137 new_lc->loc = node->loc;
2140 nextp = &new_lc->next;
2143 dst->var_part[k].offset = src->var_part[i].offset;
2147 /* We are at the basic block boundary when computing union
2148 so set the CUR_LOC to be the first element of the chain. */
2149 if (dst->var_part[k].loc_chain)
2150 dst->var_part[k].cur_loc = dst->var_part[k].loc_chain->loc;
2152 dst->var_part[k].cur_loc = NULL;
2155 if (flag_var_tracking_uninit)
2156 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
2158 location_chain node, node2;
2159 for (node = src->var_part[i].loc_chain; node; node = node->next)
2160 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
2161 if (rtx_equal_p (node->loc, node2->loc))
2163 if (node->init > node2->init)
2164 node2->init = node->init;
2168 /* Continue traversing the hash table. */
2172 /* Like variable_union, but only used when doing dataflow_set_union
2173 into an empty hashtab. To allow sharing, dst is initially shared
2174 with src (so all variables are "copied" from src to dst hashtab),
2175 so only unshare_variable for variables that need canonicalization
2179 variable_canonicalize (void **slot, void *data)
2182 dataflow_set *set = (dataflow_set *) data;
2185 src = *(variable *) slot;
2187 /* If CUR_LOC of some variable part is not the first element of
2188 the location chain we are going to change it so we have to make
2189 a copy of the variable. */
2190 for (k = 0; k < src->n_var_parts; k++)
2192 gcc_assert (!src->var_part[k].loc_chain == !src->var_part[k].cur_loc);
2193 if (src->var_part[k].loc_chain)
2195 gcc_assert (src->var_part[k].cur_loc);
2196 if (src->var_part[k].cur_loc != src->var_part[k].loc_chain->loc)
2200 if (k < src->n_var_parts)
2201 slot = unshare_variable (set, slot, src, VAR_INIT_STATUS_UNKNOWN);
2205 /* Compute union of dataflow sets SRC and DST and store it to DST. */
2208 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
2212 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2213 attrs_list_union (&dst->regs[i], src->regs[i]);
2215 if (dst->vars == empty_shared_hash)
2217 shared_hash_destroy (dst->vars);
2218 dst->vars = shared_hash_copy (src->vars);
2219 dst->traversed_vars = dst->vars;
2220 htab_traverse (shared_hash_htab (dst->vars), variable_canonicalize, dst);
2221 dst->traversed_vars = NULL;
2224 htab_traverse (shared_hash_htab (src->vars), variable_union, dst);
2227 /* Whether the value is currently being expanded. */
2228 #define VALUE_RECURSED_INTO(x) \
2229 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
2230 /* Whether the value is in changed_variables hash table. */
2231 #define VALUE_CHANGED(x) \
2232 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
2233 /* Whether the decl is in changed_variables hash table. */
2234 #define DECL_CHANGED(x) TREE_VISITED (x)
2236 /* Record that DV has been added into resp. removed from changed_variables
2240 set_dv_changed (decl_or_value dv, bool newv)
2242 if (dv_is_value_p (dv))
2243 VALUE_CHANGED (dv_as_value (dv)) = newv;
2245 DECL_CHANGED (dv_as_decl (dv)) = newv;
2248 /* Return true if DV is present in changed_variables hash table. */
2251 dv_changed_p (decl_or_value dv)
2253 return (dv_is_value_p (dv)
2254 ? VALUE_CHANGED (dv_as_value (dv))
2255 : DECL_CHANGED (dv_as_decl (dv)));
2258 /* Vector of VALUEs that should have VALUE_RECURSED_INTO bit cleared
2259 at the end of find_loc_in_1pdv. Not a static variable in find_loc_in_1pdv
2260 to avoid constant allocation/freeing of it. */
2261 static VEC(rtx, heap) *values_to_unmark;
2263 /* Helper function for find_loc_in_1pdv.
2264 Return a location list node whose loc is rtx_equal to LOC, in the
2265 location list of a one-part variable or value VAR, or in that of
2266 any values recursively mentioned in the location lists. */
2268 static location_chain
2269 find_loc_in_1pdv_1 (rtx loc, variable var, htab_t vars)
2271 location_chain node;
2276 gcc_assert (dv_onepart_p (var->dv));
2278 if (!var->n_var_parts)
2281 gcc_assert (var->var_part[0].offset == 0);
2283 for (node = var->var_part[0].loc_chain; node; node = node->next)
2284 if (rtx_equal_p (loc, node->loc))
2286 else if (GET_CODE (node->loc) == VALUE
2287 && !VALUE_RECURSED_INTO (node->loc))
2289 decl_or_value dv = dv_from_value (node->loc);
2290 variable var = (variable)
2291 htab_find_with_hash (vars, dv, dv_htab_hash (dv));
2295 location_chain where;
2296 VALUE_RECURSED_INTO (node->loc) = true;
2297 VEC_safe_push (rtx, heap, values_to_unmark, node->loc);
2298 if ((where = find_loc_in_1pdv_1 (loc, var, vars)))
2306 /* Return a location list node whose loc is rtx_equal to LOC, in the
2307 location list of a one-part variable or value VAR, or in that of
2308 any values recursively mentioned in the location lists. */
2310 static location_chain
2311 find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
2317 ret = find_loc_in_1pdv_1 (loc, var, vars);
2318 for (i = 0; VEC_iterate (rtx, values_to_unmark, i, value); i++)
2319 VALUE_RECURSED_INTO (value) = false;
2320 VEC_truncate (rtx, values_to_unmark, 0);
2324 /* Hash table iteration argument passed to variable_merge. */
2327 /* The set in which the merge is to be inserted. */
2329 /* The set that we're iterating in. */
2331 /* The set that may contain the other dv we are to merge with. */
2333 /* Number of onepart dvs in src. */
2334 int src_onepart_cnt;
2337 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
2338 loc_cmp order, and it is maintained as such. */
2341 insert_into_intersection (location_chain *nodep, rtx loc,
2342 enum var_init_status status)
2344 location_chain node;
2347 for (node = *nodep; node; nodep = &node->next, node = *nodep)
2348 if ((r = loc_cmp (node->loc, loc)) == 0)
2350 node->init = MIN (node->init, status);
2356 node = (location_chain) pool_alloc (loc_chain_pool);
2359 node->set_src = NULL;
2360 node->init = status;
2361 node->next = *nodep;
2365 /* Insert in DEST the intersection the locations present in both
2366 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
2367 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
2371 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
2372 location_chain s1node, variable s2var)
2374 dataflow_set *s1set = dsm->cur;
2375 dataflow_set *s2set = dsm->src;
2376 location_chain found;
2378 for (; s1node; s1node = s1node->next)
2380 if (s1node->loc == val)
2383 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
2384 shared_hash_htab (s2set->vars))))
2386 insert_into_intersection (dest, s1node->loc,
2387 MIN (s1node->init, found->init));
2391 if (GET_CODE (s1node->loc) == VALUE
2392 && !VALUE_RECURSED_INTO (s1node->loc))
2394 decl_or_value dv = dv_from_value (s1node->loc);
2395 variable svar = shared_hash_find (s1set->vars, dv);
2398 if (svar->n_var_parts == 1)
2400 VALUE_RECURSED_INTO (s1node->loc) = true;
2401 intersect_loc_chains (val, dest, dsm,
2402 svar->var_part[0].loc_chain,
2404 VALUE_RECURSED_INTO (s1node->loc) = false;
2409 /* ??? if the location is equivalent to any location in src,
2410 searched recursively
2412 add to dst the values needed to represent the equivalence
2414 telling whether locations S is equivalent to another dv's
2417 for each location D in the list
2419 if S and D satisfy rtx_equal_p, then it is present
2421 else if D is a value, recurse without cycles
2423 else if S and D have the same CODE and MODE
2425 for each operand oS and the corresponding oD
2427 if oS and oD are not equivalent, then S an D are not equivalent
2429 else if they are RTX vectors
2431 if any vector oS element is not equivalent to its respective oD,
2432 then S and D are not equivalent
2440 /* Return -1 if X should be before Y in a location list for a 1-part
2441 variable, 1 if Y should be before X, and 0 if they're equivalent
2442 and should not appear in the list. */
2445 loc_cmp (rtx x, rtx y)
2448 RTX_CODE code = GET_CODE (x);
2458 gcc_assert (GET_MODE (x) == GET_MODE (y));
2459 if (REGNO (x) == REGNO (y))
2461 else if (REGNO (x) < REGNO (y))
2474 gcc_assert (GET_MODE (x) == GET_MODE (y));
2475 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
2481 if (GET_CODE (x) == VALUE)
2483 if (GET_CODE (y) != VALUE)
2485 gcc_assert (GET_MODE (x) == GET_MODE (y));
2486 if (canon_value_cmp (x, y))
2492 if (GET_CODE (y) == VALUE)
2495 if (GET_CODE (x) == GET_CODE (y))
2496 /* Compare operands below. */;
2497 else if (GET_CODE (x) < GET_CODE (y))
2502 gcc_assert (GET_MODE (x) == GET_MODE (y));
2504 fmt = GET_RTX_FORMAT (code);
2505 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2509 if (XWINT (x, i) == XWINT (y, i))
2511 else if (XWINT (x, i) < XWINT (y, i))
2518 if (XINT (x, i) == XINT (y, i))
2520 else if (XINT (x, i) < XINT (y, i))
2527 /* Compare the vector length first. */
2528 if (XVECLEN (x, i) == XVECLEN (y, i))
2529 /* Compare the vectors elements. */;
2530 else if (XVECLEN (x, i) < XVECLEN (y, i))
2535 for (j = 0; j < XVECLEN (x, i); j++)
2536 if ((r = loc_cmp (XVECEXP (x, i, j),
2537 XVECEXP (y, i, j))))
2542 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
2548 if (XSTR (x, i) == XSTR (y, i))
2554 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
2562 /* These are just backpointers, so they don't matter. */
2569 /* It is believed that rtx's at this level will never
2570 contain anything but integers and other rtx's,
2571 except for within LABEL_REFs and SYMBOL_REFs. */
2579 /* If decl or value DVP refers to VALUE from *LOC, add backlinks
2580 from VALUE to DVP. */
2583 add_value_chain (rtx *loc, void *dvp)
2585 if (GET_CODE (*loc) == VALUE && (void *) *loc != dvp)
2587 decl_or_value dv = (decl_or_value) dvp;
2588 decl_or_value ldv = dv_from_value (*loc);
2589 value_chain vc, nvc;
2590 void **slot = htab_find_slot_with_hash (value_chains, ldv,
2591 dv_htab_hash (ldv), INSERT);
2594 vc = (value_chain) pool_alloc (value_chain_pool);
2598 *slot = (void *) vc;
2602 for (vc = ((value_chain) *slot)->next; vc; vc = vc->next)
2603 if (dv_as_opaque (vc->dv) == dv_as_opaque (dv))
2611 vc = (value_chain) *slot;
2612 nvc = (value_chain) pool_alloc (value_chain_pool);
2614 nvc->next = vc->next;
2621 /* If decl or value DVP refers to VALUEs from within LOC, add backlinks
2622 from those VALUEs to DVP. */
2625 add_value_chains (decl_or_value dv, rtx loc)
2627 if (GET_CODE (loc) == VALUE)
2629 add_value_chain (&loc, dv_as_opaque (dv));
2635 loc = XEXP (loc, 0);
2636 for_each_rtx (&loc, add_value_chain, dv_as_opaque (dv));
2639 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those
2643 add_cselib_value_chains (decl_or_value dv)
2645 struct elt_loc_list *l;
2647 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
2648 for_each_rtx (&l->loc, add_value_chain, dv_as_opaque (dv));
2651 /* If decl or value DVP refers to VALUE from *LOC, remove backlinks
2652 from VALUE to DVP. */
2655 remove_value_chain (rtx *loc, void *dvp)
2657 if (GET_CODE (*loc) == VALUE && (void *) *loc != dvp)
2659 decl_or_value dv = (decl_or_value) dvp;
2660 decl_or_value ldv = dv_from_value (*loc);
2661 value_chain vc, dvc = NULL;
2662 void **slot = htab_find_slot_with_hash (value_chains, ldv,
2663 dv_htab_hash (ldv), NO_INSERT);
2664 for (vc = (value_chain) *slot; vc->next; vc = vc->next)
2665 if (dv_as_opaque (vc->next->dv) == dv_as_opaque (dv))
2668 gcc_assert (dvc->refcount > 0);
2669 if (--dvc->refcount == 0)
2671 vc->next = dvc->next;
2672 pool_free (value_chain_pool, dvc);
2673 if (vc->next == NULL && vc == (value_chain) *slot)
2675 pool_free (value_chain_pool, vc);
2676 htab_clear_slot (value_chains, slot);
2686 /* If decl or value DVP refers to VALUEs from within LOC, remove backlinks
2687 from those VALUEs to DVP. */
2690 remove_value_chains (decl_or_value dv, rtx loc)
2692 if (GET_CODE (loc) == VALUE)
2694 remove_value_chain (&loc, dv_as_opaque (dv));
2700 loc = XEXP (loc, 0);
2701 for_each_rtx (&loc, remove_value_chain, dv_as_opaque (dv));
2704 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, remove backlinks from those
2708 remove_cselib_value_chains (decl_or_value dv)
2710 struct elt_loc_list *l;
2712 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
2713 for_each_rtx (&l->loc, remove_value_chain, dv_as_opaque (dv));
2717 /* Check the order of entries in one-part variables. */
2720 canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
2722 variable var = (variable) *slot;
2723 decl_or_value dv = var->dv;
2724 location_chain node, next;
2726 if (!dv_onepart_p (dv))
2729 gcc_assert (var->n_var_parts == 1);
2730 node = var->var_part[0].loc_chain;
2733 while ((next = node->next))
2735 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
2743 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
2744 more likely to be chosen as canonical for an equivalence set.
2745 Ensure less likely values can reach more likely neighbors, making
2746 the connections bidirectional. */
2749 canonicalize_values_mark (void **slot, void *data)
2751 dataflow_set *set = (dataflow_set *)data;
2752 variable var = (variable) *slot;
2753 decl_or_value dv = var->dv;
2755 location_chain node;
2757 if (!dv_is_value_p (dv))
2760 gcc_assert (var->n_var_parts == 1);
2762 val = dv_as_value (dv);
2764 for (node = var->var_part[0].loc_chain; node; node = node->next)
2765 if (GET_CODE (node->loc) == VALUE)
2767 if (canon_value_cmp (node->loc, val))
2768 VALUE_RECURSED_INTO (val) = true;
2771 decl_or_value odv = dv_from_value (node->loc);
2772 void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
2774 oslot = set_slot_part (set, val, oslot, odv, 0,
2775 node->init, NULL_RTX);
2777 VALUE_RECURSED_INTO (node->loc) = true;
2784 /* Remove redundant entries from equivalence lists in onepart
2785 variables, canonicalizing equivalence sets into star shapes. */
2788 canonicalize_values_star (void **slot, void *data)
2790 dataflow_set *set = (dataflow_set *)data;
2791 variable var = (variable) *slot;
2792 decl_or_value dv = var->dv;
2793 location_chain node;
2800 if (!dv_onepart_p (dv))
2803 gcc_assert (var->n_var_parts == 1);
2805 if (dv_is_value_p (dv))
2807 cval = dv_as_value (dv);
2808 if (!VALUE_RECURSED_INTO (cval))
2810 VALUE_RECURSED_INTO (cval) = false;
2820 gcc_assert (var->n_var_parts == 1);
2822 for (node = var->var_part[0].loc_chain; node; node = node->next)
2823 if (GET_CODE (node->loc) == VALUE)
2826 if (VALUE_RECURSED_INTO (node->loc))
2828 if (canon_value_cmp (node->loc, cval))
2837 if (!has_marks || dv_is_decl_p (dv))
2840 /* Keep it marked so that we revisit it, either after visiting a
2841 child node, or after visiting a new parent that might be
2843 VALUE_RECURSED_INTO (val) = true;
2845 for (node = var->var_part[0].loc_chain; node; node = node->next)
2846 if (GET_CODE (node->loc) == VALUE
2847 && VALUE_RECURSED_INTO (node->loc))
2851 VALUE_RECURSED_INTO (cval) = false;
2852 dv = dv_from_value (cval);
2853 slot = shared_hash_find_slot_noinsert (set->vars, dv);
2856 gcc_assert (dv_is_decl_p (var->dv));
2857 /* The canonical value was reset and dropped.
2859 clobber_variable_part (set, NULL, var->dv, 0, NULL);
2862 var = (variable)*slot;
2863 gcc_assert (dv_is_value_p (var->dv));
2864 if (var->n_var_parts == 0)
2866 gcc_assert (var->n_var_parts == 1);
2870 VALUE_RECURSED_INTO (val) = false;
2875 /* Push values to the canonical one. */
2876 cdv = dv_from_value (cval);
2877 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
2879 for (node = var->var_part[0].loc_chain; node; node = node->next)
2880 if (node->loc != cval)
2882 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
2883 node->init, NULL_RTX);
2884 if (GET_CODE (node->loc) == VALUE)
2886 decl_or_value ndv = dv_from_value (node->loc);
2888 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
2891 if (canon_value_cmp (node->loc, val))
2893 /* If it could have been a local minimum, it's not any more,
2894 since it's now neighbor to cval, so it may have to push
2895 to it. Conversely, if it wouldn't have prevailed over
2896 val, then whatever mark it has is fine: if it was to
2897 push, it will now push to a more canonical node, but if
2898 it wasn't, then it has already pushed any values it might
2900 VALUE_RECURSED_INTO (node->loc) = true;
2901 /* Make sure we visit node->loc by ensuring we cval is
2903 VALUE_RECURSED_INTO (cval) = true;
2905 else if (!VALUE_RECURSED_INTO (node->loc))
2906 /* If we have no need to "recurse" into this node, it's
2907 already "canonicalized", so drop the link to the old
2909 clobber_variable_part (set, cval, ndv, 0, NULL);
2911 else if (GET_CODE (node->loc) == REG)
2913 attrs list = set->regs[REGNO (node->loc)], *listp;
2915 /* Change an existing attribute referring to dv so that it
2916 refers to cdv, removing any duplicate this might
2917 introduce, and checking that no previous duplicates
2918 existed, all in a single pass. */
2922 if (list->offset == 0
2923 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
2924 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
2931 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
2934 for (listp = &list->next; (list = *listp); listp = &list->next)
2939 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
2941 *listp = list->next;
2942 pool_free (attrs_pool, list);
2947 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
2950 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
2952 for (listp = &list->next; (list = *listp); listp = &list->next)
2957 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
2959 *listp = list->next;
2960 pool_free (attrs_pool, list);
2965 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
2974 if (list->offset == 0
2975 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
2976 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
2986 cslot = set_slot_part (set, val, cslot, cdv, 0,
2987 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
2989 slot = clobber_slot_part (set, cval, slot, 0, NULL);
2991 /* Variable may have been unshared. */
2992 var = (variable)*slot;
2993 gcc_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
2994 && var->var_part[0].loc_chain->next == NULL);
2996 if (VALUE_RECURSED_INTO (cval))
2997 goto restart_with_cval;
3002 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3003 corresponding entry in DSM->src. Multi-part variables are combined
3004 with variable_union, whereas onepart dvs are combined with
3008 variable_merge_over_cur (void **s1slot, void *data)
3010 struct dfset_merge *dsm = (struct dfset_merge *)data;
3011 dataflow_set *dst = dsm->dst;
3013 variable s1var = (variable) *s1slot;
3014 variable s2var, dvar = NULL;
3015 decl_or_value dv = s1var->dv;
3016 bool onepart = dv_onepart_p (dv);
3019 location_chain node, *nodep;
3021 /* If the incoming onepart variable has an empty location list, then
3022 the intersection will be just as empty. For other variables,
3023 it's always union. */
3024 gcc_assert (s1var->n_var_parts);
3025 gcc_assert (s1var->var_part[0].loc_chain);
3028 return variable_union (s1slot, dst);
3030 gcc_assert (s1var->n_var_parts == 1);
3031 gcc_assert (s1var->var_part[0].offset == 0);
3033 dvhash = dv_htab_hash (dv);
3034 if (dv_is_value_p (dv))
3035 val = dv_as_value (dv);
3039 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3042 dst_can_be_shared = false;
3046 dsm->src_onepart_cnt--;
3047 gcc_assert (s2var->var_part[0].loc_chain);
3048 gcc_assert (s2var->n_var_parts == 1);
3049 gcc_assert (s2var->var_part[0].offset == 0);
3051 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3054 dvar = (variable)*dstslot;
3055 gcc_assert (dvar->refcount == 1);
3056 gcc_assert (dvar->n_var_parts == 1);
3057 gcc_assert (dvar->var_part[0].offset == 0);
3058 nodep = &dvar->var_part[0].loc_chain;
3066 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3068 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3070 *dstslot = dvar = s2var;
3075 dst_can_be_shared = false;
3077 intersect_loc_chains (val, nodep, dsm,
3078 s1var->var_part[0].loc_chain, s2var);
3084 dvar = (variable) pool_alloc (dv_pool (dv));
3087 dvar->n_var_parts = 1;
3088 dvar->var_part[0].offset = 0;
3089 dvar->var_part[0].loc_chain = node;
3090 dvar->var_part[0].cur_loc = node->loc;
3093 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3095 gcc_assert (!*dstslot);
3103 nodep = &dvar->var_part[0].loc_chain;
3104 while ((node = *nodep))
3106 location_chain *nextp = &node->next;
3108 if (GET_CODE (node->loc) == REG)
3112 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
3113 if (GET_MODE (node->loc) == GET_MODE (list->loc)
3114 && dv_is_value_p (list->dv))
3118 attrs_list_insert (&dst->regs[REGNO (node->loc)],
3120 /* If this value became canonical for another value that had
3121 this register, we want to leave it alone. */
3122 else if (dv_as_value (list->dv) != val)
3124 dstslot = set_slot_part (dst, dv_as_value (list->dv),
3126 node->init, NULL_RTX);
3127 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
3129 /* Since nextp points into the removed node, we can't
3130 use it. The pointer to the next node moved to nodep.
3131 However, if the variable we're walking is unshared
3132 during our walk, we'll keep walking the location list
3133 of the previously-shared variable, in which case the
3134 node won't have been removed, and we'll want to skip
3135 it. That's why we test *nodep here. */
3141 /* Canonicalization puts registers first, so we don't have to
3147 if (dvar != (variable)*dstslot)
3148 dvar = (variable)*dstslot;
3149 nodep = &dvar->var_part[0].loc_chain;
3153 /* Mark all referenced nodes for canonicalization, and make sure
3154 we have mutual equivalence links. */
3155 VALUE_RECURSED_INTO (val) = true;
3156 for (node = *nodep; node; node = node->next)
3157 if (GET_CODE (node->loc) == VALUE)
3159 VALUE_RECURSED_INTO (node->loc) = true;
3160 set_variable_part (dst, val, dv_from_value (node->loc), 0,
3161 node->init, NULL, INSERT);
3164 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3165 gcc_assert (*dstslot == dvar);
3166 canonicalize_values_star (dstslot, dst);
3167 #ifdef ENABLE_CHECKING
3169 == shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash));
3171 dvar = (variable)*dstslot;
3175 bool has_value = false, has_other = false;
3177 /* If we have one value and anything else, we're going to
3178 canonicalize this, so make sure all values have an entry in
3179 the table and are marked for canonicalization. */
3180 for (node = *nodep; node; node = node->next)
3182 if (GET_CODE (node->loc) == VALUE)
3184 /* If this was marked during register canonicalization,
3185 we know we have to canonicalize values. */
3200 if (has_value && has_other)
3202 for (node = *nodep; node; node = node->next)
3204 if (GET_CODE (node->loc) == VALUE)
3206 decl_or_value dv = dv_from_value (node->loc);
3209 if (shared_hash_shared (dst->vars))
3210 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
3212 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
3216 variable var = (variable) pool_alloc (dv_pool (dv));
3219 var->n_var_parts = 1;
3220 var->var_part[0].offset = 0;
3221 var->var_part[0].loc_chain = NULL;
3222 var->var_part[0].cur_loc = NULL;
3226 VALUE_RECURSED_INTO (node->loc) = true;
3230 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3231 gcc_assert (*dstslot == dvar);
3232 canonicalize_values_star (dstslot, dst);
3233 #ifdef ENABLE_CHECKING
3235 == shared_hash_find_slot_noinsert_1 (dst->vars,
3238 dvar = (variable)*dstslot;
3242 if (!onepart_variable_different_p (dvar, s2var))
3244 variable_htab_free (dvar);
3245 *dstslot = dvar = s2var;
3248 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
3250 variable_htab_free (dvar);
3251 *dstslot = dvar = s1var;
3253 dst_can_be_shared = false;
3257 if (dvar->refcount == 1)
3258 dvar->var_part[0].cur_loc = dvar->var_part[0].loc_chain->loc;
3259 dst_can_be_shared = false;
3265 /* Combine variable in *S1SLOT (in DSM->src) with the corresponding
3266 entry in DSM->src. Only multi-part variables are combined, using
3267 variable_union. onepart dvs were already combined with
3268 intersection in variable_merge_over_cur(). */
3271 variable_merge_over_src (void **s2slot, void *data)
3273 struct dfset_merge *dsm = (struct dfset_merge *)data;
3274 dataflow_set *dst = dsm->dst;
3275 variable s2var = (variable) *s2slot;
3276 decl_or_value dv = s2var->dv;
3277 bool onepart = dv_onepart_p (dv);
3281 void **dstp = shared_hash_find_slot (dst->vars, dv);
3284 return variable_canonicalize (dstp, dst);
3287 dsm->src_onepart_cnt++;
3291 /* Combine dataflow set information from SRC into DST, using PDST
3292 to carry over information across passes. */
3295 dataflow_set_merge (dataflow_set *dst, dataflow_set *src)
3297 dataflow_set src2 = *dst;
3298 struct dfset_merge dsm;
3300 size_t src_elems, dst_elems;
3302 src_elems = htab_elements (shared_hash_htab (src->vars));
3303 dst_elems = htab_elements (shared_hash_htab (src2.vars));
3304 dataflow_set_init (dst);
3305 dst->stack_adjust = src2.stack_adjust;
3306 shared_hash_destroy (dst->vars);
3307 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
3308 dst->vars->refcount = 1;
3310 = htab_create (MAX (src_elems, dst_elems), variable_htab_hash,
3311 variable_htab_eq, variable_htab_free);
3313 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3314 attrs_list_mpdv_union (&dst->regs[i], src->regs[i], src2.regs[i]);
3319 dsm.src_onepart_cnt = 0;
3321 htab_traverse (shared_hash_htab (dsm.src->vars), variable_merge_over_src,
3323 htab_traverse (shared_hash_htab (dsm.cur->vars), variable_merge_over_cur,
3326 if (dsm.src_onepart_cnt)
3327 dst_can_be_shared = false;
3329 dataflow_set_destroy (&src2);
3332 /* Mark register equivalences. */
3335 dataflow_set_equiv_regs (dataflow_set *set)
3340 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3342 rtx canon[NUM_MACHINE_MODES];
3344 memset (canon, 0, sizeof (canon));
3346 for (list = set->regs[i]; list; list = list->next)
3347 if (list->offset == 0 && dv_is_value_p (list->dv))
3349 rtx val = dv_as_value (list->dv);
3350 rtx *cvalp = &canon[(int)GET_MODE (val)];
3353 if (canon_value_cmp (val, cval))
3357 for (list = set->regs[i]; list; list = list->next)
3358 if (list->offset == 0 && dv_onepart_p (list->dv))
3360 rtx cval = canon[(int)GET_MODE (list->loc)];
3365 if (dv_is_value_p (list->dv))
3367 rtx val = dv_as_value (list->dv);
3372 VALUE_RECURSED_INTO (val) = true;
3373 set_variable_part (set, val, dv_from_value (cval), 0,
3374 VAR_INIT_STATUS_INITIALIZED,
3378 VALUE_RECURSED_INTO (cval) = true;
3379 set_variable_part (set, cval, list->dv, 0,
3380 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
3383 for (listp = &set->regs[i]; (list = *listp);
3384 listp = list ? &list->next : listp)
3385 if (list->offset == 0 && dv_onepart_p (list->dv))
3387 rtx cval = canon[(int)GET_MODE (list->loc)];
3393 if (dv_is_value_p (list->dv))
3395 rtx val = dv_as_value (list->dv);
3396 if (!VALUE_RECURSED_INTO (val))
3400 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
3401 canonicalize_values_star (slot, set);
3408 /* Remove any redundant values in the location list of VAR, which must
3409 be unshared and 1-part. */
3412 remove_duplicate_values (variable var)
3414 location_chain node, *nodep;
3416 gcc_assert (dv_onepart_p (var->dv));
3417 gcc_assert (var->n_var_parts == 1);
3418 gcc_assert (var->refcount == 1);
3420 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
3422 if (GET_CODE (node->loc) == VALUE)
3424 if (VALUE_RECURSED_INTO (node->loc))
3426 /* Remove duplicate value node. */
3427 *nodep = node->next;
3428 pool_free (loc_chain_pool, node);
3432 VALUE_RECURSED_INTO (node->loc) = true;
3434 nodep = &node->next;
3437 for (node = var->var_part[0].loc_chain; node; node = node->next)
3438 if (GET_CODE (node->loc) == VALUE)
3440 gcc_assert (VALUE_RECURSED_INTO (node->loc));
3441 VALUE_RECURSED_INTO (node->loc) = false;
3446 /* Hash table iteration argument passed to variable_post_merge. */
3447 struct dfset_post_merge
3449 /* The new input set for the current block. */
3451 /* Pointer to the permanent input set for the current block, or
3453 dataflow_set **permp;
3456 /* Create values for incoming expressions associated with one-part
3457 variables that don't have value numbers for them. */
3460 variable_post_merge_new_vals (void **slot, void *info)
3462 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3463 dataflow_set *set = dfpm->set;
3464 variable var = (variable)*slot;
3465 location_chain node;
3467 if (!dv_onepart_p (var->dv) || !var->n_var_parts)
3470 gcc_assert (var->n_var_parts == 1);
3472 if (dv_is_decl_p (var->dv))
3474 bool check_dupes = false;
3477 for (node = var->var_part[0].loc_chain; node; node = node->next)
3479 if (GET_CODE (node->loc) == VALUE)
3480 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
3481 else if (GET_CODE (node->loc) == REG)
3483 attrs att, *attp, *curp = NULL;
3485 if (var->refcount != 1)
3487 slot = unshare_variable (set, slot, var,
3488 VAR_INIT_STATUS_INITIALIZED);
3489 var = (variable)*slot;
3493 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
3495 if (att->offset == 0
3496 && GET_MODE (att->loc) == GET_MODE (node->loc))
3498 if (dv_is_value_p (att->dv))
3500 rtx cval = dv_as_value (att->dv);
3505 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
3513 if ((*curp)->offset == 0
3514 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
3515 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
3518 curp = &(*curp)->next;
3529 *dfpm->permp = XNEW (dataflow_set);
3530 dataflow_set_init (*dfpm->permp);
3533 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
3534 att; att = att->next)
3535 if (GET_MODE (att->loc) == GET_MODE (node->loc))
3537 gcc_assert (att->offset == 0);
3538 gcc_assert (dv_is_value_p (att->dv));
3539 val_reset (set, att->dv);
3546 cval = dv_as_value (cdv);
3550 /* Create a unique value to hold this register,
3551 that ought to be found and reused in
3552 subsequent rounds. */
3554 gcc_assert (!cselib_lookup (node->loc,
3555 GET_MODE (node->loc), 0));
3556 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1);
3557 cselib_preserve_value (v);
3558 cselib_invalidate_rtx (node->loc);
3560 cdv = dv_from_value (cval);
3563 "Created new value %i for reg %i\n",
3564 v->value, REGNO (node->loc));
3567 var_reg_decl_set (*dfpm->permp, node->loc,
3568 VAR_INIT_STATUS_INITIALIZED,
3569 cdv, 0, NULL, INSERT);
3575 /* Remove attribute referring to the decl, which now
3576 uses the value for the register, already existing or
3577 to be added when we bring perm in. */
3580 pool_free (attrs_pool, att);
3585 remove_duplicate_values (var);
3591 /* Reset values in the permanent set that are not associated with the
3592 chosen expression. */
3595 variable_post_merge_perm_vals (void **pslot, void *info)
3597 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3598 dataflow_set *set = dfpm->set;
3599 variable pvar = (variable)*pslot, var;
3600 location_chain pnode;
3604 gcc_assert (dv_is_value_p (pvar->dv));
3605 gcc_assert (pvar->n_var_parts == 1);
3606 pnode = pvar->var_part[0].loc_chain;
3608 gcc_assert (!pnode->next);
3609 gcc_assert (REG_P (pnode->loc));
3613 var = shared_hash_find (set->vars, dv);
3616 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
3618 val_reset (set, dv);
3621 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
3622 if (att->offset == 0
3623 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
3624 && dv_is_value_p (att->dv))
3627 /* If there is a value associated with this register already, create
3629 if (att && dv_as_value (att->dv) != dv_as_value (dv))
3631 rtx cval = dv_as_value (att->dv);
3632 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
3633 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
3638 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
3640 variable_union (pslot, set);
3646 /* Just checking stuff and registering register attributes for
3650 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
3652 struct dfset_post_merge dfpm;
3657 htab_traverse (shared_hash_htab (set->vars), variable_post_merge_new_vals,
3660 htab_traverse (shared_hash_htab ((*permp)->vars),
3661 variable_post_merge_perm_vals, &dfpm);
3662 htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
3665 /* Return a node whose loc is a MEM that refers to EXPR in the
3666 location list of a one-part variable or value VAR, or in that of
3667 any values recursively mentioned in the location lists. */
3669 static location_chain
3670 find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars)
3672 location_chain node;
3675 location_chain where = NULL;
3680 gcc_assert (GET_CODE (val) == VALUE);
3682 gcc_assert (!VALUE_RECURSED_INTO (val));
3684 dv = dv_from_value (val);
3685 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
3690 gcc_assert (dv_onepart_p (var->dv));
3692 if (!var->n_var_parts)
3695 gcc_assert (var->var_part[0].offset == 0);
3697 VALUE_RECURSED_INTO (val) = true;
3699 for (node = var->var_part[0].loc_chain; node; node = node->next)
3700 if (MEM_P (node->loc) && MEM_EXPR (node->loc) == expr
3701 && MEM_OFFSET (node->loc) == 0)
3706 else if (GET_CODE (node->loc) == VALUE
3707 && !VALUE_RECURSED_INTO (node->loc)
3708 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
3711 VALUE_RECURSED_INTO (val) = false;
3716 /* Return TRUE if the value of MEM may vary across a call. */
3719 mem_dies_at_call (rtx mem)
3721 tree expr = MEM_EXPR (mem);
3727 decl = get_base_address (expr);
3735 return (may_be_aliased (decl)
3736 || (!TREE_READONLY (decl) && is_global_var (decl)));
3739 /* Remove all MEMs from the location list of a hash table entry for a
3740 one-part variable, except those whose MEM attributes map back to
3741 the variable itself, directly or within a VALUE. */
3744 dataflow_set_preserve_mem_locs (void **slot, void *data)
3746 dataflow_set *set = (dataflow_set *) data;
3747 variable var = (variable) *slot;
3749 if (dv_is_decl_p (var->dv) && dv_onepart_p (var->dv))
3751 tree decl = dv_as_decl (var->dv);
3752 location_chain loc, *locp;
3754 if (!var->n_var_parts)
3757 gcc_assert (var->n_var_parts == 1);
3759 if (var->refcount > 1 || shared_hash_shared (set->vars))
3761 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
3763 /* We want to remove dying MEMs that doesn't refer to
3765 if (GET_CODE (loc->loc) == MEM
3766 && (MEM_EXPR (loc->loc) != decl
3767 || MEM_OFFSET (loc->loc))
3768 && !mem_dies_at_call (loc->loc))
3770 /* We want to move here MEMs that do refer to DECL. */
3771 else if (GET_CODE (loc->loc) == VALUE
3772 && find_mem_expr_in_1pdv (decl, loc->loc,
3773 shared_hash_htab (set->vars)))
3780 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
3781 var = (variable)*slot;
3782 gcc_assert (var->n_var_parts == 1);
3785 for (locp = &var->var_part[0].loc_chain, loc = *locp;
3788 rtx old_loc = loc->loc;
3789 if (GET_CODE (old_loc) == VALUE)
3791 location_chain mem_node
3792 = find_mem_expr_in_1pdv (decl, loc->loc,
3793 shared_hash_htab (set->vars));
3795 /* ??? This picks up only one out of multiple MEMs that
3796 refer to the same variable. Do we ever need to be
3797 concerned about dealing with more than one, or, given
3798 that they should all map to the same variable
3799 location, their addresses will have been merged and
3800 they will be regarded as equivalent? */
3803 loc->loc = mem_node->loc;
3804 loc->set_src = mem_node->set_src;
3805 loc->init = MIN (loc->init, mem_node->init);
3809 if (GET_CODE (loc->loc) != MEM
3810 || (MEM_EXPR (loc->loc) == decl
3811 && MEM_OFFSET (loc->loc) == 0)
3812 || !mem_dies_at_call (loc->loc))
3814 if (old_loc != loc->loc && emit_notes)
3816 add_value_chains (var->dv, loc->loc);
3817 remove_value_chains (var->dv, old_loc);
3824 remove_value_chains (var->dv, old_loc);
3826 pool_free (loc_chain_pool, loc);
3829 if (!var->var_part[0].loc_chain)
3832 if (emit_notes && dv_is_value_p (var->dv))
3833 remove_cselib_value_chains (var->dv);
3834 variable_was_changed (var, set);
3841 /* Remove all MEMs from the location list of a hash table entry for a
3845 dataflow_set_remove_mem_locs (void **slot, void *data)
3847 dataflow_set *set = (dataflow_set *) data;
3848 variable var = (variable) *slot;
3850 if (dv_is_value_p (var->dv))
3852 location_chain loc, *locp;
3853 bool changed = false;
3855 gcc_assert (var->n_var_parts == 1);
3857 if (var->refcount > 1 || shared_hash_shared (set->vars))
3859 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
3860 if (GET_CODE (loc->loc) == MEM
3861 && mem_dies_at_call (loc->loc))
3867 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
3868 var = (variable)*slot;
3869 gcc_assert (var->n_var_parts == 1);
3872 for (locp = &var->var_part[0].loc_chain, loc = *locp;
3875 if (GET_CODE (loc->loc) != MEM
3876 || !mem_dies_at_call (loc->loc))
3883 remove_value_chains (var->dv, loc->loc);
3885 /* If we have deleted the location which was last emitted
3886 we have to emit new location so add the variable to set
3887 of changed variables. */
3888 if (var->var_part[0].cur_loc
3889 && rtx_equal_p (loc->loc, var->var_part[0].cur_loc))
3891 pool_free (loc_chain_pool, loc);
3894 if (!var->var_part[0].loc_chain)
3897 if (emit_notes && dv_is_value_p (var->dv))
3898 remove_cselib_value_chains (var->dv);
3899 gcc_assert (changed);
3903 if (var->n_var_parts && var->var_part[0].loc_chain)
3904 var->var_part[0].cur_loc = var->var_part[0].loc_chain->loc;
3905 variable_was_changed (var, set);
3912 /* Remove all variable-location information about call-clobbered
3913 registers, as well as associations between MEMs and VALUEs. */
3916 dataflow_set_clear_at_call (dataflow_set *set)
3920 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
3921 if (TEST_HARD_REG_BIT (call_used_reg_set, r))
3922 var_regno_delete (set, r);
3924 if (MAY_HAVE_DEBUG_INSNS)
3926 set->traversed_vars = set->vars;
3927 htab_traverse (shared_hash_htab (set->vars),
3928 dataflow_set_preserve_mem_locs, set);
3929 set->traversed_vars = set->vars;
3930 htab_traverse (shared_hash_htab (set->vars), dataflow_set_remove_mem_locs,
3932 set->traversed_vars = NULL;
3936 /* Flag whether two dataflow sets being compared contain different data. */
3938 dataflow_set_different_value;
3941 variable_part_different_p (variable_part *vp1, variable_part *vp2)
3943 location_chain lc1, lc2;
3945 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
3947 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
3949 if (REG_P (lc1->loc) && REG_P (lc2->loc))
3951 if (REGNO (lc1->loc) == REGNO (lc2->loc))
3954 if (rtx_equal_p (lc1->loc, lc2->loc))
3963 /* Return true if one-part variables VAR1 and VAR2 are different.
3964 They must be in canonical order. */
3967 onepart_variable_different_p (variable var1, variable var2)
3969 location_chain lc1, lc2;
3974 gcc_assert (var1->n_var_parts == 1);
3975 gcc_assert (var2->n_var_parts == 1);
3977 lc1 = var1->var_part[0].loc_chain;
3978 lc2 = var2->var_part[0].loc_chain;
3985 if (loc_cmp (lc1->loc, lc2->loc))
3994 /* Return true if variables VAR1 and VAR2 are different.
3995 If COMPARE_CURRENT_LOCATION is true compare also the cur_loc of each
3999 variable_different_p (variable var1, variable var2,
4000 bool compare_current_location)
4007 if (var1->n_var_parts != var2->n_var_parts)
4010 for (i = 0; i < var1->n_var_parts; i++)
4012 if (var1->var_part[i].offset != var2->var_part[i].offset)
4014 if (compare_current_location)
4016 if (!((REG_P (var1->var_part[i].cur_loc)
4017 && REG_P (var2->var_part[i].cur_loc)
4018 && (REGNO (var1->var_part[i].cur_loc)
4019 == REGNO (var2->var_part[i].cur_loc)))
4020 || rtx_equal_p (var1->var_part[i].cur_loc,
4021 var2->var_part[i].cur_loc)))
4024 /* One-part values have locations in a canonical order. */
4025 if (i == 0 && var1->var_part[i].offset == 0 && dv_onepart_p (var1->dv))
4027 gcc_assert (var1->n_var_parts == 1);
4028 gcc_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
4029 return onepart_variable_different_p (var1, var2);
4031 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4033 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4039 /* Compare variable *SLOT with the same variable in hash table DATA
4040 and set DATAFLOW_SET_DIFFERENT_VALUE if they are different. */
4043 dataflow_set_different_1 (void **slot, void *data)
4045 htab_t htab = (htab_t) data;
4046 variable var1, var2;
4048 var1 = (variable) *slot;
4049 var2 = (variable) htab_find_with_hash (htab, var1->dv,
4050 dv_htab_hash (var1->dv));
4053 dataflow_set_different_value = true;
4055 if (dump_file && (dump_flags & TDF_DETAILS))
4057 fprintf (dump_file, "dataflow difference found: removal of:\n");
4061 /* Stop traversing the hash table. */
4065 if (variable_different_p (var1, var2, false))
4067 dataflow_set_different_value = true;
4069 if (dump_file && (dump_flags & TDF_DETAILS))
4071 fprintf (dump_file, "dataflow difference found: old and new follow:\n");
4076 /* Stop traversing the hash table. */
4080 /* Continue traversing the hash table. */
4084 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4087 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4089 if (old_set->vars == new_set->vars)
4092 if (htab_elements (shared_hash_htab (old_set->vars))
4093 != htab_elements (shared_hash_htab (new_set->vars)))
4096 dataflow_set_different_value = false;
4098 htab_traverse (shared_hash_htab (old_set->vars), dataflow_set_different_1,
4099 shared_hash_htab (new_set->vars));
4100 /* No need to traverse the second hashtab, if both have the same number
4101 of elements and the second one had all entries found in the first one,
4102 then it can't have any extra entries. */
4103 return dataflow_set_different_value;
4106 /* Free the contents of dataflow set SET. */
4109 dataflow_set_destroy (dataflow_set *set)
4113 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4114 attrs_list_clear (&set->regs[i]);
4116 shared_hash_destroy (set->vars);
4120 /* Return true if RTL X contains a SYMBOL_REF. */
4123 contains_symbol_ref (rtx x)
4132 code = GET_CODE (x);
4133 if (code == SYMBOL_REF)
4136 fmt = GET_RTX_FORMAT (code);
4137 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4141 if (contains_symbol_ref (XEXP (x, i)))
4144 else if (fmt[i] == 'E')
4147 for (j = 0; j < XVECLEN (x, i); j++)
4148 if (contains_symbol_ref (XVECEXP (x, i, j)))
4156 /* Shall EXPR be tracked? */
4159 track_expr_p (tree expr, bool need_rtl)
4164 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
4165 return DECL_RTL_SET_P (expr);
4167 /* If EXPR is not a parameter or a variable do not track it. */
4168 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
4171 /* It also must have a name... */
4172 if (!DECL_NAME (expr))
4175 /* ... and a RTL assigned to it. */
4176 decl_rtl = DECL_RTL_IF_SET (expr);
4177 if (!decl_rtl && need_rtl)
4180 /* If this expression is really a debug alias of some other declaration, we
4181 don't need to track this expression if the ultimate declaration is
4184 if (DECL_DEBUG_EXPR_IS_FROM (realdecl) && DECL_DEBUG_EXPR (realdecl))
4186 realdecl = DECL_DEBUG_EXPR (realdecl);
4187 /* ??? We don't yet know how to emit DW_OP_piece for variable
4188 that has been SRA'ed. */
4189 if (!DECL_P (realdecl))
4193 /* Do not track EXPR if REALDECL it should be ignored for debugging
4195 if (DECL_IGNORED_P (realdecl))
4198 /* Do not track global variables until we are able to emit correct location
4200 if (TREE_STATIC (realdecl))
4203 /* When the EXPR is a DECL for alias of some variable (see example)
4204 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
4205 DECL_RTL contains SYMBOL_REF.
4208 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
4211 if (decl_rtl && MEM_P (decl_rtl)
4212 && contains_symbol_ref (XEXP (decl_rtl, 0)))
4215 /* If RTX is a memory it should not be very large (because it would be
4216 an array or struct). */
4217 if (decl_rtl && MEM_P (decl_rtl))
4219 /* Do not track structures and arrays. */
4220 if (GET_MODE (decl_rtl) == BLKmode
4221 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
4223 if (MEM_SIZE (decl_rtl)
4224 && INTVAL (MEM_SIZE (decl_rtl)) > MAX_VAR_PARTS)
4228 DECL_CHANGED (expr) = 0;
4229 DECL_CHANGED (realdecl) = 0;
4233 /* Determine whether a given LOC refers to the same variable part as
4237 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
4240 HOST_WIDE_INT offset2;
4242 if (! DECL_P (expr))
4247 expr2 = REG_EXPR (loc);
4248 offset2 = REG_OFFSET (loc);
4250 else if (MEM_P (loc))
4252 expr2 = MEM_EXPR (loc);
4253 offset2 = INT_MEM_OFFSET (loc);
4258 if (! expr2 || ! DECL_P (expr2))
4261 expr = var_debug_decl (expr);
4262 expr2 = var_debug_decl (expr2);
4264 return (expr == expr2 && offset == offset2);
4267 /* LOC is a REG or MEM that we would like to track if possible.
4268 If EXPR is null, we don't know what expression LOC refers to,
4269 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
4270 LOC is an lvalue register.
4272 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
4273 is something we can track. When returning true, store the mode of
4274 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
4275 from EXPR in *OFFSET_OUT (if nonnull). */
4278 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
4279 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
4281 enum machine_mode mode;
4283 if (expr == NULL || !track_expr_p (expr, true))
4286 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
4287 whole subreg, but only the old inner part is really relevant. */
4288 mode = GET_MODE (loc);
4289 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
4291 enum machine_mode pseudo_mode;
4293 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
4294 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
4296 offset += byte_lowpart_offset (pseudo_mode, mode);
4301 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
4302 Do the same if we are storing to a register and EXPR occupies
4303 the whole of register LOC; in that case, the whole of EXPR is
4304 being changed. We exclude complex modes from the second case
4305 because the real and imaginary parts are represented as separate
4306 pseudo registers, even if the whole complex value fits into one
4308 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
4310 && !COMPLEX_MODE_P (DECL_MODE (expr))
4311 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
4312 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
4314 mode = DECL_MODE (expr);
4318 if (offset < 0 || offset >= MAX_VAR_PARTS)
4324 *offset_out = offset;
4328 /* Return the MODE lowpart of LOC, or null if LOC is not something we
4329 want to track. When returning nonnull, make sure that the attributes
4330 on the returned value are updated. */
4333 var_lowpart (enum machine_mode mode, rtx loc)
4335 unsigned int offset, reg_offset, regno;
4337 if (!REG_P (loc) && !MEM_P (loc))
4340 if (GET_MODE (loc) == mode)
4343 offset = byte_lowpart_offset (mode, GET_MODE (loc));
4346 return adjust_address_nv (loc, mode, offset);
4348 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
4349 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
4351 return gen_rtx_REG_offset (loc, mode, regno, offset);
4354 /* Carry information about uses and stores while walking rtx. */
4356 struct count_use_info
4358 /* The insn where the RTX is. */
4361 /* The basic block where insn is. */
4364 /* The array of n_sets sets in the insn, as determined by cselib. */
4365 struct cselib_set *sets;
4368 /* True if we're counting stores, false otherwise. */
4372 /* Find a VALUE corresponding to X. */
4374 static inline cselib_val *
4375 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
4381 /* This is called after uses are set up and before stores are
4382 processed bycselib, so it's safe to look up srcs, but not
4383 dsts. So we look up expressions that appear in srcs or in
4384 dest expressions, but we search the sets array for dests of
4388 for (i = 0; i < cui->n_sets; i++)
4389 if (cui->sets[i].dest == x)
4390 return cui->sets[i].src_elt;
4393 return cselib_lookup (x, mode, 0);
4399 /* Replace all registers and addresses in an expression with VALUE
4400 expressions that map back to them, unless the expression is a
4401 register. If no mapping is or can be performed, returns NULL. */
4404 replace_expr_with_values (rtx loc)
4408 else if (MEM_P (loc))
4410 enum machine_mode address_mode
4411 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (loc));
4412 cselib_val *addr = cselib_lookup (XEXP (loc, 0), address_mode, 0);
4414 return replace_equiv_address_nv (loc, addr->val_rtx);
4419 return cselib_subst_to_values (loc);
4422 /* Determine what kind of micro operation to choose for a USE. Return
4423 MO_CLOBBER if no micro operation is to be generated. */
4425 static enum micro_operation_type
4426 use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
4430 if (cui && cui->sets)
4432 if (GET_CODE (loc) == VAR_LOCATION)
4434 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
4436 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
4437 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1);
4439 /* ??? flag_float_store and volatile mems are never
4440 given values, but we could in theory use them for
4442 gcc_assert (val || 1);
4449 if (REG_P (loc) || MEM_P (loc))
4452 *modep = GET_MODE (loc);
4456 || (find_use_val (loc, GET_MODE (loc), cui)
4457 && cselib_lookup (XEXP (loc, 0), GET_MODE (loc), 0)))
4462 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
4464 if (val && !cselib_preserved_value_p (val))
4472 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
4474 expr = REG_EXPR (loc);
4477 return MO_USE_NO_VAR;
4478 else if (target_for_debug_bind (var_debug_decl (expr)))
4480 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
4481 false, modep, NULL))
4484 return MO_USE_NO_VAR;
4486 else if (MEM_P (loc))
4488 expr = MEM_EXPR (loc);
4492 else if (target_for_debug_bind (var_debug_decl (expr)))
4494 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
4495 false, modep, NULL))
4504 /* Log to OUT information about micro-operation MOPT involving X in
4508 log_op_type (rtx x, basic_block bb, rtx insn,
4509 enum micro_operation_type mopt, FILE *out)
4511 fprintf (out, "bb %i op %i insn %i %s ",
4512 bb->index, VTI (bb)->n_mos - 1,
4513 INSN_UID (insn), micro_operation_type_name[mopt]);
4514 print_inline_rtx (out, x, 2);
4518 /* Count uses (register and memory references) LOC which will be tracked.
4519 INSN is instruction which the LOC is part of. */
4522 count_uses (rtx *ploc, void *cuip)
4525 struct count_use_info *cui = (struct count_use_info *) cuip;
4526 enum micro_operation_type mopt = use_type (loc, cui, NULL);
4528 if (mopt != MO_CLOBBER)
4531 enum machine_mode mode = GET_MODE (loc);
4533 VTI (cui->bb)->n_mos++;
4535 if (dump_file && (dump_flags & TDF_DETAILS))
4536 log_op_type (loc, cui->bb, cui->insn, mopt, dump_file);
4541 loc = PAT_VAR_LOCATION_LOC (loc);
4542 if (VAR_LOC_UNKNOWN_P (loc))
4549 && !REG_P (XEXP (loc, 0)) && !MEM_P (XEXP (loc, 0)))
4551 enum machine_mode address_mode
4552 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (loc));
4553 val = cselib_lookup (XEXP (loc, 0), address_mode, false);
4555 if (val && !cselib_preserved_value_p (val))
4557 VTI (cui->bb)->n_mos++;
4558 cselib_preserve_value (val);
4562 val = find_use_val (loc, mode, cui);
4565 if (mopt == MO_VAL_SET
4566 && GET_CODE (PATTERN (cui->insn)) == COND_EXEC
4569 && (use_type (loc, NULL, NULL) == MO_USE
4572 cselib_val *oval = cselib_lookup (loc, GET_MODE (loc), 0);
4574 gcc_assert (oval != val);
4575 gcc_assert (REG_P (loc) || MEM_P (loc));
4577 if (!cselib_preserved_value_p (oval))
4579 VTI (cui->bb)->n_mos++;
4580 cselib_preserve_value (oval);
4584 cselib_preserve_value (val);
4587 gcc_assert (mopt == MO_VAL_LOC
4588 || (mopt == MO_VAL_SET && cui->store_p));
4600 /* Helper function for finding all uses of REG/MEM in X in CUI's
4604 count_uses_1 (rtx *x, void *cui)
4606 for_each_rtx (x, count_uses, cui);
4609 /* Count stores (register and memory references) LOC which will be
4610 tracked. CUI is a count_use_info object containing the instruction
4611 which the LOC is part of. */
4614 count_stores (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *cui)
4616 count_uses (&loc, cui);
4619 /* Callback for cselib_record_sets_hook, that counts how many micro
4620 operations it takes for uses and stores in an insn after
4621 cselib_record_sets has analyzed the sets in an insn, but before it
4622 modifies the stored values in the internal tables, unless
4623 cselib_record_sets doesn't call it directly (perhaps because we're
4624 not doing cselib in the first place, in which case sets and n_sets
4628 count_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
4630 basic_block bb = BLOCK_FOR_INSN (insn);
4631 struct count_use_info cui;
4633 cselib_hook_called = true;
4638 cui.n_sets = n_sets;
4640 cui.store_p = false;
4641 note_uses (&PATTERN (insn), count_uses_1, &cui);
4643 note_stores (PATTERN (insn), count_stores, &cui);
4646 /* Tell whether the CONCAT used to holds a VALUE and its location
4647 needs value resolution, i.e., an attempt of mapping the location
4648 back to other incoming values. */
4649 #define VAL_NEEDS_RESOLUTION(x) \
4650 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
4651 /* Whether the location in the CONCAT is a tracked expression, that
4652 should also be handled like a MO_USE. */
4653 #define VAL_HOLDS_TRACK_EXPR(x) \
4654 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
4655 /* Whether the location in the CONCAT should be handled like a MO_COPY
4657 #define VAL_EXPR_IS_COPIED(x) \
4658 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
4659 /* Whether the location in the CONCAT should be handled like a
4660 MO_CLOBBER as well. */
4661 #define VAL_EXPR_IS_CLOBBERED(x) \
4662 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
4664 /* Add uses (register and memory references) LOC which will be tracked
4665 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
4668 add_uses (rtx *ploc, void *data)
4671 enum machine_mode mode = VOIDmode;
4672 struct count_use_info *cui = (struct count_use_info *)data;
4673 enum micro_operation_type type = use_type (loc, cui, &mode);
4675 if (type != MO_CLOBBER)
4677 basic_block bb = cui->bb;
4678 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4681 mo->u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
4682 mo->insn = cui->insn;
4684 if (type == MO_VAL_LOC)
4687 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
4690 gcc_assert (cui->sets);
4693 && !REG_P (XEXP (vloc, 0)) && !MEM_P (XEXP (vloc, 0)))
4696 enum machine_mode address_mode
4697 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
4699 = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
4701 if (val && !cselib_preserved_value_p (val))
4703 micro_operation *mon = VTI (bb)->mos + VTI (bb)->n_mos++;
4704 mon->type = mo->type;
4705 mon->u.loc = mo->u.loc;
4706 mon->insn = mo->insn;
4707 cselib_preserve_value (val);
4708 mo->type = MO_VAL_USE;
4709 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4710 mo->u.loc = gen_rtx_CONCAT (address_mode,
4711 val->val_rtx, mloc);
4712 if (dump_file && (dump_flags & TDF_DETAILS))
4713 log_op_type (mo->u.loc, cui->bb, cui->insn,
4714 mo->type, dump_file);
4719 if (!VAR_LOC_UNKNOWN_P (vloc)
4720 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
4722 enum machine_mode mode2;
4723 enum micro_operation_type type2;
4724 rtx nloc = replace_expr_with_values (vloc);
4728 oloc = shallow_copy_rtx (oloc);
4729 PAT_VAR_LOCATION_LOC (oloc) = nloc;
4732 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
4734 type2 = use_type (vloc, 0, &mode2);
4736 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
4737 || type2 == MO_CLOBBER);
4739 if (type2 == MO_CLOBBER
4740 && !cselib_preserved_value_p (val))
4742 VAL_NEEDS_RESOLUTION (oloc) = 1;
4743 cselib_preserve_value (val);
4746 else if (!VAR_LOC_UNKNOWN_P (vloc))
4748 oloc = shallow_copy_rtx (oloc);
4749 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
4754 else if (type == MO_VAL_USE)
4756 enum machine_mode mode2 = VOIDmode;
4757 enum micro_operation_type type2;
4758 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
4759 rtx vloc, oloc = loc, nloc;
4761 gcc_assert (cui->sets);
4764 && !REG_P (XEXP (oloc, 0)) && !MEM_P (XEXP (oloc, 0)))
4767 enum machine_mode address_mode
4768 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
4770 = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
4772 if (val && !cselib_preserved_value_p (val))
4774 micro_operation *mon = VTI (bb)->mos + VTI (bb)->n_mos++;
4775 mon->type = mo->type;
4776 mon->u.loc = mo->u.loc;
4777 mon->insn = mo->insn;
4778 cselib_preserve_value (val);
4779 mo->type = MO_VAL_USE;
4780 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4781 mo->u.loc = gen_rtx_CONCAT (address_mode,
4782 val->val_rtx, mloc);
4783 mo->insn = cui->insn;
4784 if (dump_file && (dump_flags & TDF_DETAILS))
4785 log_op_type (mo->u.loc, cui->bb, cui->insn,
4786 mo->type, dump_file);
4791 type2 = use_type (loc, 0, &mode2);
4793 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
4794 || type2 == MO_CLOBBER);
4796 if (type2 == MO_USE)
4797 vloc = var_lowpart (mode2, loc);
4801 /* The loc of a MO_VAL_USE may have two forms:
4803 (concat val src): val is at src, a value-based
4806 (concat (concat val use) src): same as above, with use as
4807 the MO_USE tracked value, if it differs from src.
4811 nloc = replace_expr_with_values (loc);
4816 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
4818 oloc = val->val_rtx;
4820 mo->u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
4822 if (type2 == MO_USE)
4823 VAL_HOLDS_TRACK_EXPR (mo->u.loc) = 1;
4824 if (!cselib_preserved_value_p (val))
4826 VAL_NEEDS_RESOLUTION (mo->u.loc) = 1;
4827 cselib_preserve_value (val);
4831 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
4833 if (dump_file && (dump_flags & TDF_DETAILS))
4834 log_op_type (mo->u.loc, cui->bb, cui->insn, mo->type, dump_file);
4840 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
4843 add_uses_1 (rtx *x, void *cui)
4845 for_each_rtx (x, add_uses, cui);
4848 /* Add stores (register and memory references) LOC which will be tracked
4849 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
4850 CUIP->insn is instruction which the LOC is part of. */
4853 add_stores (rtx loc, const_rtx expr, void *cuip)
4855 enum machine_mode mode = VOIDmode, mode2;
4856 struct count_use_info *cui = (struct count_use_info *)cuip;
4857 basic_block bb = cui->bb;
4858 micro_operation *mo;
4859 rtx oloc = loc, nloc, src = NULL;
4860 enum micro_operation_type type = use_type (loc, cui, &mode);
4861 bool track_p = false;
4863 bool resolve, preserve;
4865 if (type == MO_CLOBBER)
4872 mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4874 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
4875 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
4876 || GET_CODE (expr) == CLOBBER)
4878 mo->type = MO_CLOBBER;
4883 if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
4884 src = var_lowpart (mode2, SET_SRC (expr));
4885 loc = var_lowpart (mode2, loc);
4894 rtx xexpr = CONST_CAST_RTX (expr);
4896 if (SET_SRC (expr) != src)
4897 xexpr = gen_rtx_SET (VOIDmode, loc, src);
4898 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
4905 mo->insn = cui->insn;
4907 else if (MEM_P (loc)
4908 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
4911 mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4913 if (MEM_P (loc) && type == MO_VAL_SET
4914 && !REG_P (XEXP (loc, 0)) && !MEM_P (XEXP (loc, 0)))
4917 enum machine_mode address_mode
4918 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
4919 cselib_val *val = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
4921 if (val && !cselib_preserved_value_p (val))
4923 cselib_preserve_value (val);
4924 mo->type = MO_VAL_USE;
4925 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4926 mo->u.loc = gen_rtx_CONCAT (address_mode, val->val_rtx, mloc);
4927 mo->insn = cui->insn;
4928 if (dump_file && (dump_flags & TDF_DETAILS))
4929 log_op_type (mo->u.loc, cui->bb, cui->insn,
4930 mo->type, dump_file);
4931 mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4935 if (GET_CODE (expr) == CLOBBER || !track_p)
4937 mo->type = MO_CLOBBER;
4938 mo->u.loc = track_p ? var_lowpart (mode2, loc) : loc;
4942 if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
4943 src = var_lowpart (mode2, SET_SRC (expr));
4944 loc = var_lowpart (mode2, loc);
4953 rtx xexpr = CONST_CAST_RTX (expr);
4955 if (SET_SRC (expr) != src)
4956 xexpr = gen_rtx_SET (VOIDmode, loc, src);
4957 if (same_variable_part_p (SET_SRC (xexpr),
4959 INT_MEM_OFFSET (loc)))
4966 mo->insn = cui->insn;
4971 if (type != MO_VAL_SET)
4972 goto log_and_return;
4974 v = find_use_val (oloc, mode, cui);
4977 goto log_and_return;
4979 resolve = preserve = !cselib_preserved_value_p (v);
4981 nloc = replace_expr_with_values (oloc);
4985 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
4987 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0);
4989 gcc_assert (oval != v);
4990 gcc_assert (REG_P (oloc) || MEM_P (oloc));
4992 if (!cselib_preserved_value_p (oval))
4994 micro_operation *nmo = VTI (bb)->mos + VTI (bb)->n_mos++;
4996 cselib_preserve_value (oval);
4998 nmo->type = MO_VAL_USE;
4999 nmo->u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
5000 VAL_NEEDS_RESOLUTION (nmo->u.loc) = 1;
5001 nmo->insn = mo->insn;
5003 if (dump_file && (dump_flags & TDF_DETAILS))
5004 log_op_type (nmo->u.loc, cui->bb, cui->insn,
5005 nmo->type, dump_file);
5010 else if (resolve && GET_CODE (mo->u.loc) == SET)
5012 nloc = replace_expr_with_values (SET_SRC (expr));
5014 /* Avoid the mode mismatch between oexpr and expr. */
5015 if (!nloc && mode != mode2)
5017 nloc = SET_SRC (expr);
5018 gcc_assert (oloc == SET_DEST (expr));
5022 oloc = gen_rtx_SET (GET_MODE (mo->u.loc), oloc, nloc);
5025 if (oloc == SET_DEST (mo->u.loc))
5026 /* No point in duplicating. */
5028 if (!REG_P (SET_SRC (mo->u.loc)))
5034 if (GET_CODE (mo->u.loc) == SET
5035 && oloc == SET_DEST (mo->u.loc))
5036 /* No point in duplicating. */
5042 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
5044 if (mo->u.loc != oloc)
5045 loc = gen_rtx_CONCAT (GET_MODE (mo->u.loc), loc, mo->u.loc);
5047 /* The loc of a MO_VAL_SET may have various forms:
5049 (concat val dst): dst now holds val
5051 (concat val (set dst src)): dst now holds val, copied from src
5053 (concat (concat val dstv) dst): dst now holds val; dstv is dst
5054 after replacing mems and non-top-level regs with values.
5056 (concat (concat val dstv) (set dst src)): dst now holds val,
5057 copied from src. dstv is a value-based representation of dst, if
5058 it differs from dst. If resolution is needed, src is a REG, and
5059 its mode is the same as that of val.
5061 (concat (concat val (set dstv srcv)) (set dst src)): src
5062 copied to dst, holding val. dstv and srcv are value-based
5063 representations of dst and src, respectively.
5070 VAL_HOLDS_TRACK_EXPR (loc) = 1;
5073 VAL_NEEDS_RESOLUTION (loc) = resolve;
5074 cselib_preserve_value (v);
5076 if (mo->type == MO_CLOBBER)
5077 VAL_EXPR_IS_CLOBBERED (loc) = 1;
5078 if (mo->type == MO_COPY)
5079 VAL_EXPR_IS_COPIED (loc) = 1;
5081 mo->type = MO_VAL_SET;
5084 if (dump_file && (dump_flags & TDF_DETAILS))
5085 log_op_type (mo->u.loc, cui->bb, cui->insn, mo->type, dump_file);
5088 /* Callback for cselib_record_sets_hook, that records as micro
5089 operations uses and stores in an insn after cselib_record_sets has
5090 analyzed the sets in an insn, but before it modifies the stored
5091 values in the internal tables, unless cselib_record_sets doesn't
5092 call it directly (perhaps because we're not doing cselib in the
5093 first place, in which case sets and n_sets will be 0). */
5096 add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
5098 basic_block bb = BLOCK_FOR_INSN (insn);
5100 struct count_use_info cui;
5102 cselib_hook_called = true;
5107 cui.n_sets = n_sets;
5109 n1 = VTI (bb)->n_mos;
5110 cui.store_p = false;
5111 note_uses (&PATTERN (insn), add_uses_1, &cui);
5112 n2 = VTI (bb)->n_mos - 1;
5114 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
5118 while (n1 < n2 && VTI (bb)->mos[n1].type == MO_USE)
5120 while (n1 < n2 && VTI (bb)->mos[n2].type != MO_USE)
5126 sw = VTI (bb)->mos[n1];
5127 VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
5128 VTI (bb)->mos[n2] = sw;
5132 n2 = VTI (bb)->n_mos - 1;
5136 while (n1 < n2 && VTI (bb)->mos[n1].type != MO_VAL_LOC)
5138 while (n1 < n2 && VTI (bb)->mos[n2].type == MO_VAL_LOC)
5144 sw = VTI (bb)->mos[n1];
5145 VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
5146 VTI (bb)->mos[n2] = sw;
5152 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
5157 if (dump_file && (dump_flags & TDF_DETAILS))
5158 log_op_type (PATTERN (insn), bb, insn, mo->type, dump_file);
5161 n1 = VTI (bb)->n_mos;
5162 /* This will record NEXT_INSN (insn), such that we can
5163 insert notes before it without worrying about any
5164 notes that MO_USEs might emit after the insn. */
5166 note_stores (PATTERN (insn), add_stores, &cui);
5167 n2 = VTI (bb)->n_mos - 1;
5169 /* Order the MO_CLOBBERs to be before MO_SETs. */
5172 while (n1 < n2 && VTI (bb)->mos[n1].type == MO_CLOBBER)
5174 while (n1 < n2 && VTI (bb)->mos[n2].type != MO_CLOBBER)
5180 sw = VTI (bb)->mos[n1];
5181 VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
5182 VTI (bb)->mos[n2] = sw;
5187 static enum var_init_status
5188 find_src_status (dataflow_set *in, rtx src)
5190 tree decl = NULL_TREE;
5191 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
5193 if (! flag_var_tracking_uninit)
5194 status = VAR_INIT_STATUS_INITIALIZED;
5196 if (src && REG_P (src))
5197 decl = var_debug_decl (REG_EXPR (src));
5198 else if (src && MEM_P (src))
5199 decl = var_debug_decl (MEM_EXPR (src));
5202 status = get_init_value (in, src, dv_from_decl (decl));
5207 /* SRC is the source of an assignment. Use SET to try to find what
5208 was ultimately assigned to SRC. Return that value if known,
5209 otherwise return SRC itself. */
5212 find_src_set_src (dataflow_set *set, rtx src)
5214 tree decl = NULL_TREE; /* The variable being copied around. */
5215 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
5217 location_chain nextp;
5221 if (src && REG_P (src))
5222 decl = var_debug_decl (REG_EXPR (src));
5223 else if (src && MEM_P (src))
5224 decl = var_debug_decl (MEM_EXPR (src));
5228 decl_or_value dv = dv_from_decl (decl);
5230 var = shared_hash_find (set->vars, dv);
5234 for (i = 0; i < var->n_var_parts && !found; i++)
5235 for (nextp = var->var_part[i].loc_chain; nextp && !found;
5236 nextp = nextp->next)
5237 if (rtx_equal_p (nextp->loc, src))
5239 set_src = nextp->set_src;
5249 /* Compute the changes of variable locations in the basic block BB. */
5252 compute_bb_dataflow (basic_block bb)
5256 dataflow_set old_out;
5257 dataflow_set *in = &VTI (bb)->in;
5258 dataflow_set *out = &VTI (bb)->out;
5260 dataflow_set_init (&old_out);
5261 dataflow_set_copy (&old_out, out);
5262 dataflow_set_copy (out, in);
5264 n = VTI (bb)->n_mos;
5265 for (i = 0; i < n; i++)
5267 rtx insn = VTI (bb)->mos[i].insn;
5269 switch (VTI (bb)->mos[i].type)
5272 dataflow_set_clear_at_call (out);
5277 rtx loc = VTI (bb)->mos[i].u.loc;
5280 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
5281 else if (MEM_P (loc))
5282 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
5288 rtx loc = VTI (bb)->mos[i].u.loc;
5292 if (GET_CODE (loc) == CONCAT)
5294 val = XEXP (loc, 0);
5295 vloc = XEXP (loc, 1);
5303 var = PAT_VAR_LOCATION_DECL (vloc);
5305 clobber_variable_part (out, NULL_RTX,
5306 dv_from_decl (var), 0, NULL_RTX);
5309 if (VAL_NEEDS_RESOLUTION (loc))
5310 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
5311 set_variable_part (out, val, dv_from_decl (var), 0,
5312 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
5320 rtx loc = VTI (bb)->mos[i].u.loc;
5321 rtx val, vloc, uloc;
5323 vloc = uloc = XEXP (loc, 1);
5324 val = XEXP (loc, 0);
5326 if (GET_CODE (val) == CONCAT)
5328 uloc = XEXP (val, 1);
5329 val = XEXP (val, 0);
5332 if (VAL_NEEDS_RESOLUTION (loc))
5333 val_resolve (out, val, vloc, insn);
5335 if (VAL_HOLDS_TRACK_EXPR (loc))
5337 if (GET_CODE (uloc) == REG)
5338 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
5340 else if (GET_CODE (uloc) == MEM)
5341 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
5349 rtx loc = VTI (bb)->mos[i].u.loc;
5350 rtx val, vloc, uloc;
5352 vloc = uloc = XEXP (loc, 1);
5353 val = XEXP (loc, 0);
5355 if (GET_CODE (val) == CONCAT)
5357 vloc = XEXP (val, 1);
5358 val = XEXP (val, 0);
5361 if (GET_CODE (vloc) == SET)
5363 rtx vsrc = SET_SRC (vloc);
5365 gcc_assert (val != vsrc);
5366 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
5368 vloc = SET_DEST (vloc);
5370 if (VAL_NEEDS_RESOLUTION (loc))
5371 val_resolve (out, val, vsrc, insn);
5373 else if (VAL_NEEDS_RESOLUTION (loc))
5375 gcc_assert (GET_CODE (uloc) == SET
5376 && GET_CODE (SET_SRC (uloc)) == REG);
5377 val_resolve (out, val, SET_SRC (uloc), insn);
5380 if (VAL_HOLDS_TRACK_EXPR (loc))
5382 if (VAL_EXPR_IS_CLOBBERED (loc))
5385 var_reg_delete (out, uloc, true);
5386 else if (MEM_P (uloc))
5387 var_mem_delete (out, uloc, true);
5391 bool copied_p = VAL_EXPR_IS_COPIED (loc);
5393 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
5395 if (GET_CODE (uloc) == SET)
5397 set_src = SET_SRC (uloc);
5398 uloc = SET_DEST (uloc);
5403 if (flag_var_tracking_uninit)
5405 status = find_src_status (in, set_src);
5407 if (status == VAR_INIT_STATUS_UNKNOWN)
5408 status = find_src_status (out, set_src);
5411 set_src = find_src_set_src (in, set_src);
5415 var_reg_delete_and_set (out, uloc, !copied_p,
5417 else if (MEM_P (uloc))
5418 var_mem_delete_and_set (out, uloc, !copied_p,
5422 else if (REG_P (uloc))
5423 var_regno_delete (out, REGNO (uloc));
5425 val_store (out, val, vloc, insn);
5431 rtx loc = VTI (bb)->mos[i].u.loc;
5434 if (GET_CODE (loc) == SET)
5436 set_src = SET_SRC (loc);
5437 loc = SET_DEST (loc);
5441 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
5443 else if (MEM_P (loc))
5444 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
5451 rtx loc = VTI (bb)->mos[i].u.loc;
5452 enum var_init_status src_status;
5455 if (GET_CODE (loc) == SET)
5457 set_src = SET_SRC (loc);
5458 loc = SET_DEST (loc);
5461 if (! flag_var_tracking_uninit)
5462 src_status = VAR_INIT_STATUS_INITIALIZED;
5465 src_status = find_src_status (in, set_src);
5467 if (src_status == VAR_INIT_STATUS_UNKNOWN)
5468 src_status = find_src_status (out, set_src);
5471 set_src = find_src_set_src (in, set_src);
5474 var_reg_delete_and_set (out, loc, false, src_status, set_src);
5475 else if (MEM_P (loc))
5476 var_mem_delete_and_set (out, loc, false, src_status, set_src);
5482 rtx loc = VTI (bb)->mos[i].u.loc;
5485 var_reg_delete (out, loc, false);
5486 else if (MEM_P (loc))
5487 var_mem_delete (out, loc, false);
5493 rtx loc = VTI (bb)->mos[i].u.loc;
5496 var_reg_delete (out, loc, true);
5497 else if (MEM_P (loc))
5498 var_mem_delete (out, loc, true);
5503 out->stack_adjust += VTI (bb)->mos[i].u.adjust;
5508 if (MAY_HAVE_DEBUG_INSNS)
5510 dataflow_set_equiv_regs (out);
5511 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_mark,
5513 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_star,
5516 htab_traverse (shared_hash_htab (out->vars),
5517 canonicalize_loc_order_check, out);
5520 changed = dataflow_set_different (&old_out, out);
5521 dataflow_set_destroy (&old_out);
5525 /* Find the locations of variables in the whole function. */
5528 vt_find_locations (void)
5530 fibheap_t worklist, pending, fibheap_swap;
5531 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
5539 /* Compute reverse completion order of depth first search of the CFG
5540 so that the data-flow runs faster. */
5541 rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
5542 bb_order = XNEWVEC (int, last_basic_block);
5543 pre_and_rev_post_order_compute (NULL, rc_order, false);
5544 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
5545 bb_order[rc_order[i]] = i;
5548 worklist = fibheap_new ();
5549 pending = fibheap_new ();
5550 visited = sbitmap_alloc (last_basic_block);
5551 in_worklist = sbitmap_alloc (last_basic_block);
5552 in_pending = sbitmap_alloc (last_basic_block);
5553 sbitmap_zero (in_worklist);
5556 fibheap_insert (pending, bb_order[bb->index], bb);
5557 sbitmap_ones (in_pending);
5559 while (!fibheap_empty (pending))
5561 fibheap_swap = pending;
5563 worklist = fibheap_swap;
5564 sbitmap_swap = in_pending;
5565 in_pending = in_worklist;
5566 in_worklist = sbitmap_swap;
5568 sbitmap_zero (visited);
5570 while (!fibheap_empty (worklist))
5572 bb = (basic_block) fibheap_extract_min (worklist);
5573 RESET_BIT (in_worklist, bb->index);
5574 if (!TEST_BIT (visited, bb->index))
5578 int oldinsz, oldoutsz;
5580 SET_BIT (visited, bb->index);
5582 if (dump_file && VTI (bb)->in.vars)
5585 -= htab_size (shared_hash_htab (VTI (bb)->in.vars))
5586 + htab_size (shared_hash_htab (VTI (bb)->out.vars));
5588 = htab_elements (shared_hash_htab (VTI (bb)->in.vars));
5590 = htab_elements (shared_hash_htab (VTI (bb)->out.vars));
5593 oldinsz = oldoutsz = 0;
5595 if (MAY_HAVE_DEBUG_INSNS)
5597 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
5598 bool first = true, adjust = false;
5600 /* Calculate the IN set as the intersection of
5601 predecessor OUT sets. */
5603 dataflow_set_clear (in);
5604 dst_can_be_shared = true;
5606 FOR_EACH_EDGE (e, ei, bb->preds)
5607 if (!VTI (e->src)->flooded)
5608 gcc_assert (bb_order[bb->index]
5609 <= bb_order[e->src->index]);
5612 dataflow_set_copy (in, &VTI (e->src)->out);
5613 first_out = &VTI (e->src)->out;
5618 dataflow_set_merge (in, &VTI (e->src)->out);
5624 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
5626 /* Merge and merge_adjust should keep entries in
5628 htab_traverse (shared_hash_htab (in->vars),
5629 canonicalize_loc_order_check,
5632 if (dst_can_be_shared)
5634 shared_hash_destroy (in->vars);
5635 in->vars = shared_hash_copy (first_out->vars);
5639 VTI (bb)->flooded = true;
5643 /* Calculate the IN set as union of predecessor OUT sets. */
5644 dataflow_set_clear (&VTI (bb)->in);
5645 FOR_EACH_EDGE (e, ei, bb->preds)
5646 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
5649 changed = compute_bb_dataflow (bb);
5651 htabsz += htab_size (shared_hash_htab (VTI (bb)->in.vars))
5652 + htab_size (shared_hash_htab (VTI (bb)->out.vars));
5656 FOR_EACH_EDGE (e, ei, bb->succs)
5658 if (e->dest == EXIT_BLOCK_PTR)
5661 if (TEST_BIT (visited, e->dest->index))
5663 if (!TEST_BIT (in_pending, e->dest->index))
5665 /* Send E->DEST to next round. */
5666 SET_BIT (in_pending, e->dest->index);
5667 fibheap_insert (pending,
5668 bb_order[e->dest->index],
5672 else if (!TEST_BIT (in_worklist, e->dest->index))
5674 /* Add E->DEST to current round. */
5675 SET_BIT (in_worklist, e->dest->index);
5676 fibheap_insert (worklist, bb_order[e->dest->index],
5684 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
5686 (int)htab_elements (shared_hash_htab (VTI (bb)->in.vars)),
5688 (int)htab_elements (shared_hash_htab (VTI (bb)->out.vars)),
5690 (int)worklist->nodes, (int)pending->nodes, htabsz);
5692 if (dump_file && (dump_flags & TDF_DETAILS))
5694 fprintf (dump_file, "BB %i IN:\n", bb->index);
5695 dump_dataflow_set (&VTI (bb)->in);
5696 fprintf (dump_file, "BB %i OUT:\n", bb->index);
5697 dump_dataflow_set (&VTI (bb)->out);
5703 if (MAY_HAVE_DEBUG_INSNS)
5705 gcc_assert (VTI (bb)->flooded);
5707 VEC_free (rtx, heap, values_to_unmark);
5709 fibheap_delete (worklist);
5710 fibheap_delete (pending);
5711 sbitmap_free (visited);
5712 sbitmap_free (in_worklist);
5713 sbitmap_free (in_pending);
5716 /* Print the content of the LIST to dump file. */
5719 dump_attrs_list (attrs list)
5721 for (; list; list = list->next)
5723 if (dv_is_decl_p (list->dv))
5724 print_mem_expr (dump_file, dv_as_decl (list->dv));
5726 print_rtl_single (dump_file, dv_as_value (list->dv));
5727 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
5729 fprintf (dump_file, "\n");
5732 /* Print the information about variable *SLOT to dump file. */
5735 dump_var_slot (void **slot, void *data ATTRIBUTE_UNUSED)
5737 variable var = (variable) *slot;
5741 /* Continue traversing the hash table. */
5745 /* Print the information about variable VAR to dump file. */
5748 dump_var (variable var)
5751 location_chain node;
5753 if (dv_is_decl_p (var->dv))
5755 const_tree decl = dv_as_decl (var->dv);
5757 if (DECL_NAME (decl))
5758 fprintf (dump_file, " name: %s",
5759 IDENTIFIER_POINTER (DECL_NAME (decl)));
5761 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
5762 if (dump_flags & TDF_UID)
5763 fprintf (dump_file, " D.%u\n", DECL_UID (decl));
5765 fprintf (dump_file, "\n");
5769 fputc (' ', dump_file);
5770 print_rtl_single (dump_file, dv_as_value (var->dv));
5773 for (i = 0; i < var->n_var_parts; i++)
5775 fprintf (dump_file, " offset %ld\n",
5776 (long) var->var_part[i].offset);
5777 for (node = var->var_part[i].loc_chain; node; node = node->next)
5779 fprintf (dump_file, " ");
5780 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
5781 fprintf (dump_file, "[uninit]");
5782 print_rtl_single (dump_file, node->loc);
5787 /* Print the information about variables from hash table VARS to dump file. */
5790 dump_vars (htab_t vars)
5792 if (htab_elements (vars) > 0)
5794 fprintf (dump_file, "Variables:\n");
5795 htab_traverse (vars, dump_var_slot, NULL);
5799 /* Print the dataflow set SET to dump file. */
5802 dump_dataflow_set (dataflow_set *set)
5806 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
5808 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5812 fprintf (dump_file, "Reg %d:", i);
5813 dump_attrs_list (set->regs[i]);
5816 dump_vars (shared_hash_htab (set->vars));
5817 fprintf (dump_file, "\n");
5820 /* Print the IN and OUT sets for each basic block to dump file. */
5823 dump_dataflow_sets (void)
5829 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
5830 fprintf (dump_file, "IN:\n");
5831 dump_dataflow_set (&VTI (bb)->in);
5832 fprintf (dump_file, "OUT:\n");
5833 dump_dataflow_set (&VTI (bb)->out);
5837 /* Add variable VAR to the hash table of changed variables and
5838 if it has no locations delete it from SET's hash table. */
5841 variable_was_changed (variable var, dataflow_set *set)
5843 hashval_t hash = dv_htab_hash (var->dv);
5849 /* Remember this decl or VALUE has been added to changed_variables. */
5850 set_dv_changed (var->dv, true);
5852 slot = htab_find_slot_with_hash (changed_variables,
5856 if (set && var->n_var_parts == 0)
5860 empty_var = (variable) pool_alloc (dv_pool (var->dv));
5861 empty_var->dv = var->dv;
5862 empty_var->refcount = 1;
5863 empty_var->n_var_parts = 0;
5876 if (var->n_var_parts == 0)
5881 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
5884 if (shared_hash_shared (set->vars))
5885 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
5887 htab_clear_slot (shared_hash_htab (set->vars), slot);
5893 /* Look for the index in VAR->var_part corresponding to OFFSET.
5894 Return -1 if not found. If INSERTION_POINT is non-NULL, the
5895 referenced int will be set to the index that the part has or should
5896 have, if it should be inserted. */
5899 find_variable_location_part (variable var, HOST_WIDE_INT offset,
5900 int *insertion_point)
5904 /* Find the location part. */
5906 high = var->n_var_parts;
5909 pos = (low + high) / 2;
5910 if (var->var_part[pos].offset < offset)
5917 if (insertion_point)
5918 *insertion_point = pos;
5920 if (pos < var->n_var_parts && var->var_part[pos].offset == offset)
5927 set_slot_part (dataflow_set *set, rtx loc, void **slot,
5928 decl_or_value dv, HOST_WIDE_INT offset,
5929 enum var_init_status initialized, rtx set_src)
5932 location_chain node, next;
5933 location_chain *nextp;
5935 bool onepart = dv_onepart_p (dv);
5937 gcc_assert (offset == 0 || !onepart);
5938 gcc_assert (loc != dv_as_opaque (dv));
5940 var = (variable) *slot;
5942 if (! flag_var_tracking_uninit)
5943 initialized = VAR_INIT_STATUS_INITIALIZED;
5947 /* Create new variable information. */
5948 var = (variable) pool_alloc (dv_pool (dv));
5951 var->n_var_parts = 1;
5952 var->var_part[0].offset = offset;
5953 var->var_part[0].loc_chain = NULL;
5954 var->var_part[0].cur_loc = NULL;
5957 nextp = &var->var_part[0].loc_chain;
5958 if (emit_notes && dv_is_value_p (dv))
5959 add_cselib_value_chains (dv);
5965 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
5969 if (GET_CODE (loc) == VALUE)
5971 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
5972 nextp = &node->next)
5973 if (GET_CODE (node->loc) == VALUE)
5975 if (node->loc == loc)
5980 if (canon_value_cmp (node->loc, loc))
5988 else if (REG_P (node->loc) || MEM_P (node->loc))
5996 else if (REG_P (loc))
5998 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
5999 nextp = &node->next)
6000 if (REG_P (node->loc))
6002 if (REGNO (node->loc) < REGNO (loc))
6006 if (REGNO (node->loc) == REGNO (loc))
6019 else if (MEM_P (loc))
6021 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6022 nextp = &node->next)
6023 if (REG_P (node->loc))
6025 else if (MEM_P (node->loc))
6027 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
6039 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6040 nextp = &node->next)
6041 if ((r = loc_cmp (node->loc, loc)) >= 0)
6049 if (var->refcount > 1 || shared_hash_shared (set->vars))
6051 slot = unshare_variable (set, slot, var, initialized);
6052 var = (variable)*slot;
6053 for (nextp = &var->var_part[0].loc_chain; c;
6054 nextp = &(*nextp)->next)
6056 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
6063 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
6065 pos = find_variable_location_part (var, offset, &inspos);
6069 node = var->var_part[pos].loc_chain;
6072 && ((REG_P (node->loc) && REG_P (loc)
6073 && REGNO (node->loc) == REGNO (loc))
6074 || rtx_equal_p (node->loc, loc)))
6076 /* LOC is in the beginning of the chain so we have nothing
6078 if (node->init < initialized)
6079 node->init = initialized;
6080 if (set_src != NULL)
6081 node->set_src = set_src;
6087 /* We have to make a copy of a shared variable. */
6088 if (var->refcount > 1 || shared_hash_shared (set->vars))
6090 slot = unshare_variable (set, slot, var, initialized);
6091 var = (variable)*slot;
6097 /* We have not found the location part, new one will be created. */
6099 /* We have to make a copy of the shared variable. */
6100 if (var->refcount > 1 || shared_hash_shared (set->vars))
6102 slot = unshare_variable (set, slot, var, initialized);
6103 var = (variable)*slot;
6106 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
6107 thus there are at most MAX_VAR_PARTS different offsets. */
6108 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
6109 && (!var->n_var_parts || !dv_onepart_p (var->dv)));
6111 /* We have to move the elements of array starting at index
6112 inspos to the next position. */
6113 for (pos = var->n_var_parts; pos > inspos; pos--)
6114 var->var_part[pos] = var->var_part[pos - 1];
6117 var->var_part[pos].offset = offset;
6118 var->var_part[pos].loc_chain = NULL;
6119 var->var_part[pos].cur_loc = NULL;
6122 /* Delete the location from the list. */
6123 nextp = &var->var_part[pos].loc_chain;
6124 for (node = var->var_part[pos].loc_chain; node; node = next)
6127 if ((REG_P (node->loc) && REG_P (loc)
6128 && REGNO (node->loc) == REGNO (loc))
6129 || rtx_equal_p (node->loc, loc))
6131 /* Save these values, to assign to the new node, before
6132 deleting this one. */
6133 if (node->init > initialized)
6134 initialized = node->init;
6135 if (node->set_src != NULL && set_src == NULL)
6136 set_src = node->set_src;
6137 pool_free (loc_chain_pool, node);
6142 nextp = &node->next;
6145 nextp = &var->var_part[pos].loc_chain;
6148 /* Add the location to the beginning. */
6149 node = (location_chain) pool_alloc (loc_chain_pool);
6151 node->init = initialized;
6152 node->set_src = set_src;
6153 node->next = *nextp;
6156 if (onepart && emit_notes)
6157 add_value_chains (var->dv, loc);
6159 /* If no location was emitted do so. */
6160 if (var->var_part[pos].cur_loc == NULL)
6162 var->var_part[pos].cur_loc = loc;
6163 variable_was_changed (var, set);
6169 /* Set the part of variable's location in the dataflow set SET. The
6170 variable part is specified by variable's declaration in DV and
6171 offset OFFSET and the part's location by LOC. IOPT should be
6172 NO_INSERT if the variable is known to be in SET already and the
6173 variable hash table must not be resized, and INSERT otherwise. */
6176 set_variable_part (dataflow_set *set, rtx loc,
6177 decl_or_value dv, HOST_WIDE_INT offset,
6178 enum var_init_status initialized, rtx set_src,
6179 enum insert_option iopt)
6183 if (iopt == NO_INSERT)
6184 slot = shared_hash_find_slot_noinsert (set->vars, dv);
6187 slot = shared_hash_find_slot (set->vars, dv);
6189 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
6191 slot = set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
6194 /* Remove all recorded register locations for the given variable part
6195 from dataflow set SET, except for those that are identical to loc.
6196 The variable part is specified by variable's declaration or value
6197 DV and offset OFFSET. */
6200 clobber_slot_part (dataflow_set *set, rtx loc, void **slot,
6201 HOST_WIDE_INT offset, rtx set_src)
6203 variable var = (variable) *slot;
6204 int pos = find_variable_location_part (var, offset, NULL);
6208 location_chain node, next;
6210 /* Remove the register locations from the dataflow set. */
6211 next = var->var_part[pos].loc_chain;
6212 for (node = next; node; node = next)
6215 if (node->loc != loc
6216 && (!flag_var_tracking_uninit
6219 || !rtx_equal_p (set_src, node->set_src)))
6221 if (REG_P (node->loc))
6226 /* Remove the variable part from the register's
6227 list, but preserve any other variable parts
6228 that might be regarded as live in that same
6230 anextp = &set->regs[REGNO (node->loc)];
6231 for (anode = *anextp; anode; anode = anext)
6233 anext = anode->next;
6234 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
6235 && anode->offset == offset)
6237 pool_free (attrs_pool, anode);
6241 anextp = &anode->next;
6245 slot = delete_slot_part (set, node->loc, slot, offset);
6253 /* Remove all recorded register locations for the given variable part
6254 from dataflow set SET, except for those that are identical to loc.
6255 The variable part is specified by variable's declaration or value
6256 DV and offset OFFSET. */
6259 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
6260 HOST_WIDE_INT offset, rtx set_src)
6264 if (!dv_as_opaque (dv)
6265 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
6268 slot = shared_hash_find_slot_noinsert (set->vars, dv);
6272 slot = clobber_slot_part (set, loc, slot, offset, set_src);
6275 /* Delete the part of variable's location from dataflow set SET. The
6276 variable part is specified by its SET->vars slot SLOT and offset
6277 OFFSET and the part's location by LOC. */
6280 delete_slot_part (dataflow_set *set, rtx loc, void **slot,
6281 HOST_WIDE_INT offset)
6283 variable var = (variable) *slot;
6284 int pos = find_variable_location_part (var, offset, NULL);
6288 location_chain node, next;
6289 location_chain *nextp;
6292 if (var->refcount > 1 || shared_hash_shared (set->vars))
6294 /* If the variable contains the location part we have to
6295 make a copy of the variable. */
6296 for (node = var->var_part[pos].loc_chain; node;
6299 if ((REG_P (node->loc) && REG_P (loc)
6300 && REGNO (node->loc) == REGNO (loc))
6301 || rtx_equal_p (node->loc, loc))
6303 slot = unshare_variable (set, slot, var,
6304 VAR_INIT_STATUS_UNKNOWN);
6305 var = (variable)*slot;
6311 /* Delete the location part. */
6312 nextp = &var->var_part[pos].loc_chain;
6313 for (node = *nextp; node; node = next)
6316 if ((REG_P (node->loc) && REG_P (loc)
6317 && REGNO (node->loc) == REGNO (loc))
6318 || rtx_equal_p (node->loc, loc))
6320 if (emit_notes && pos == 0 && dv_onepart_p (var->dv))
6321 remove_value_chains (var->dv, node->loc);
6322 pool_free (loc_chain_pool, node);
6327 nextp = &node->next;
6330 /* If we have deleted the location which was last emitted
6331 we have to emit new location so add the variable to set
6332 of changed variables. */
6333 if (var->var_part[pos].cur_loc
6335 && REG_P (var->var_part[pos].cur_loc)
6336 && REGNO (loc) == REGNO (var->var_part[pos].cur_loc))
6337 || rtx_equal_p (loc, var->var_part[pos].cur_loc)))
6340 if (var->var_part[pos].loc_chain)
6341 var->var_part[pos].cur_loc = var->var_part[pos].loc_chain->loc;
6346 if (var->var_part[pos].loc_chain == NULL)
6348 gcc_assert (changed);
6350 if (emit_notes && var->n_var_parts == 0 && dv_is_value_p (var->dv))
6351 remove_cselib_value_chains (var->dv);
6352 while (pos < var->n_var_parts)
6354 var->var_part[pos] = var->var_part[pos + 1];
6359 variable_was_changed (var, set);
6365 /* Delete the part of variable's location from dataflow set SET. The
6366 variable part is specified by variable's declaration or value DV
6367 and offset OFFSET and the part's location by LOC. */
6370 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
6371 HOST_WIDE_INT offset)
6373 void **slot = shared_hash_find_slot_noinsert (set->vars, dv);
6377 slot = delete_slot_part (set, loc, slot, offset);
6380 /* Callback for cselib_expand_value, that looks for expressions
6381 holding the value in the var-tracking hash tables. Return X for
6382 standard processing, anything else is to be used as-is. */
6385 vt_expand_loc_callback (rtx x, bitmap regs, int max_depth, void *data)
6387 htab_t vars = (htab_t)data;
6391 rtx result, subreg, xret;
6393 switch (GET_CODE (x))
6396 subreg = SUBREG_REG (x);
6398 if (GET_CODE (SUBREG_REG (x)) != VALUE)
6401 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
6403 vt_expand_loc_callback, data);
6408 result = simplify_gen_subreg (GET_MODE (x), subreg,
6409 GET_MODE (SUBREG_REG (x)),
6412 /* Invalid SUBREGs are ok in debug info. ??? We could try
6413 alternate expansions for the VALUE as well. */
6414 if (!result && (REG_P (subreg) || MEM_P (subreg)))
6415 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
6420 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
6425 dv = dv_from_value (x);
6433 if (VALUE_RECURSED_INTO (x))
6436 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
6441 if (var->n_var_parts == 0)
6444 gcc_assert (var->n_var_parts == 1);
6446 VALUE_RECURSED_INTO (x) = true;
6449 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
6451 result = cselib_expand_value_rtx_cb (loc->loc, regs, max_depth,
6452 vt_expand_loc_callback, vars);
6457 VALUE_RECURSED_INTO (x) = false;
6464 /* Expand VALUEs in LOC, using VARS as well as cselib's equivalence
6468 vt_expand_loc (rtx loc, htab_t vars)
6470 if (!MAY_HAVE_DEBUG_INSNS)
6473 loc = cselib_expand_value_rtx_cb (loc, scratch_regs, 5,
6474 vt_expand_loc_callback, vars);
6476 if (loc && MEM_P (loc))
6477 loc = targetm.delegitimize_address (loc);
6482 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
6483 additional parameters: WHERE specifies whether the note shall be emitted
6484 before or after instruction INSN. */
6487 emit_note_insn_var_location (void **varp, void *data)
6489 variable var = (variable) *varp;
6490 rtx insn = ((emit_note_data *)data)->insn;
6491 enum emit_note_where where = ((emit_note_data *)data)->where;
6492 htab_t vars = ((emit_note_data *)data)->vars;
6494 int i, j, n_var_parts;
6496 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
6497 HOST_WIDE_INT last_limit;
6498 tree type_size_unit;
6499 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
6500 rtx loc[MAX_VAR_PARTS];
6503 if (dv_is_value_p (var->dv))
6506 decl = dv_as_decl (var->dv);
6508 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
6516 for (i = 0; i < var->n_var_parts; i++)
6518 enum machine_mode mode, wider_mode;
6521 if (last_limit < var->var_part[i].offset)
6526 else if (last_limit > var->var_part[i].offset)
6528 offsets[n_var_parts] = var->var_part[i].offset;
6529 loc2 = vt_expand_loc (var->var_part[i].loc_chain->loc, vars);
6535 loc[n_var_parts] = loc2;
6536 mode = GET_MODE (var->var_part[i].loc_chain->loc);
6537 initialized = var->var_part[i].loc_chain->init;
6538 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
6540 /* Attempt to merge adjacent registers or memory. */
6541 wider_mode = GET_MODE_WIDER_MODE (mode);
6542 for (j = i + 1; j < var->n_var_parts; j++)
6543 if (last_limit <= var->var_part[j].offset)
6545 if (j < var->n_var_parts
6546 && wider_mode != VOIDmode
6547 && mode == GET_MODE (var->var_part[j].loc_chain->loc)
6548 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
6549 && (loc2 = vt_expand_loc (var->var_part[j].loc_chain->loc, vars))
6550 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2)
6551 && last_limit == var->var_part[j].offset)
6555 if (REG_P (loc[n_var_parts])
6556 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
6557 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
6558 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
6561 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
6562 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
6564 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
6565 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
6568 if (!REG_P (new_loc)
6569 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
6572 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
6575 else if (MEM_P (loc[n_var_parts])
6576 && GET_CODE (XEXP (loc2, 0)) == PLUS
6577 && REG_P (XEXP (XEXP (loc2, 0), 0))
6578 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
6580 if ((REG_P (XEXP (loc[n_var_parts], 0))
6581 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
6582 XEXP (XEXP (loc2, 0), 0))
6583 && INTVAL (XEXP (XEXP (loc2, 0), 1))
6584 == GET_MODE_SIZE (mode))
6585 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
6586 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
6587 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
6588 XEXP (XEXP (loc2, 0), 0))
6589 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
6590 + GET_MODE_SIZE (mode)
6591 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
6592 new_loc = adjust_address_nv (loc[n_var_parts],
6598 loc[n_var_parts] = new_loc;
6600 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
6606 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6607 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
6610 if (where != EMIT_NOTE_BEFORE_INSN)
6612 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
6613 if (where == EMIT_NOTE_AFTER_CALL_INSN)
6614 NOTE_DURING_CALL_P (note) = true;
6617 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
6619 if (! flag_var_tracking_uninit)
6620 initialized = VAR_INIT_STATUS_INITIALIZED;
6624 NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
6625 NULL_RTX, (int) initialized);
6627 else if (n_var_parts == 1)
6630 = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
6632 NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
6636 else if (n_var_parts)
6640 for (i = 0; i < n_var_parts; i++)
6642 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
6644 parallel = gen_rtx_PARALLEL (VOIDmode,
6645 gen_rtvec_v (n_var_parts, loc));
6646 NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
6652 set_dv_changed (var->dv, false);
6653 htab_clear_slot (changed_variables, varp);
6655 /* Continue traversing the hash table. */
6659 DEF_VEC_P (variable);
6660 DEF_VEC_ALLOC_P (variable, heap);
6662 /* Stack of variable_def pointers that need processing with
6663 check_changed_vars_2. */
6665 static VEC (variable, heap) *changed_variables_stack;
6667 /* Populate changed_variables_stack with variable_def pointers
6668 that need variable_was_changed called on them. */
6671 check_changed_vars_1 (void **slot, void *data)
6673 variable var = (variable) *slot;
6674 htab_t htab = (htab_t) data;
6676 if (dv_is_value_p (var->dv))
6679 = (value_chain) htab_find_with_hash (value_chains, var->dv,
6680 dv_htab_hash (var->dv));
6684 for (vc = vc->next; vc; vc = vc->next)
6685 if (!dv_changed_p (vc->dv))
6688 = (variable) htab_find_with_hash (htab, vc->dv,
6689 dv_htab_hash (vc->dv));
6691 VEC_safe_push (variable, heap, changed_variables_stack,
6698 /* Add VAR to changed_variables and also for VALUEs add recursively
6699 all DVs that aren't in changed_variables yet but reference the
6700 VALUE from its loc_chain. */
6703 check_changed_vars_2 (variable var, htab_t htab)
6705 variable_was_changed (var, NULL);
6706 if (dv_is_value_p (var->dv))
6709 = (value_chain) htab_find_with_hash (value_chains, var->dv,
6710 dv_htab_hash (var->dv));
6714 for (vc = vc->next; vc; vc = vc->next)
6715 if (!dv_changed_p (vc->dv))
6718 = (variable) htab_find_with_hash (htab, vc->dv,
6719 dv_htab_hash (vc->dv));
6721 check_changed_vars_2 (vcvar, htab);
6726 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
6727 CHANGED_VARIABLES and delete this chain. WHERE specifies whether the notes
6728 shall be emitted before of after instruction INSN. */
6731 emit_notes_for_changes (rtx insn, enum emit_note_where where,
6734 emit_note_data data;
6735 htab_t htab = shared_hash_htab (vars);
6737 if (!htab_elements (changed_variables))
6740 if (MAY_HAVE_DEBUG_INSNS)
6742 /* Unfortunately this has to be done in two steps, because
6743 we can't traverse a hashtab into which we are inserting
6744 through variable_was_changed. */
6745 htab_traverse (changed_variables, check_changed_vars_1, htab);
6746 while (VEC_length (variable, changed_variables_stack) > 0)
6747 check_changed_vars_2 (VEC_pop (variable, changed_variables_stack),
6755 htab_traverse (changed_variables, emit_note_insn_var_location, &data);
6758 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
6759 same variable in hash table DATA or is not there at all. */
6762 emit_notes_for_differences_1 (void **slot, void *data)
6764 htab_t new_vars = (htab_t) data;
6765 variable old_var, new_var;
6767 old_var = (variable) *slot;
6768 new_var = (variable) htab_find_with_hash (new_vars, old_var->dv,
6769 dv_htab_hash (old_var->dv));
6773 /* Variable has disappeared. */
6776 empty_var = (variable) pool_alloc (dv_pool (old_var->dv));
6777 empty_var->dv = old_var->dv;
6778 empty_var->refcount = 0;
6779 empty_var->n_var_parts = 0;
6780 if (dv_onepart_p (old_var->dv))
6784 gcc_assert (old_var->n_var_parts == 1);
6785 for (lc = old_var->var_part[0].loc_chain; lc; lc = lc->next)
6786 remove_value_chains (old_var->dv, lc->loc);
6787 if (dv_is_value_p (old_var->dv))
6788 remove_cselib_value_chains (old_var->dv);
6790 variable_was_changed (empty_var, NULL);
6792 else if (variable_different_p (old_var, new_var, true))
6794 if (dv_onepart_p (old_var->dv))
6796 location_chain lc1, lc2;
6798 gcc_assert (old_var->n_var_parts == 1);
6799 gcc_assert (new_var->n_var_parts == 1);
6800 lc1 = old_var->var_part[0].loc_chain;
6801 lc2 = new_var->var_part[0].loc_chain;
6804 && ((REG_P (lc1->loc) && REG_P (lc2->loc))
6805 || rtx_equal_p (lc1->loc, lc2->loc)))
6810 for (; lc2; lc2 = lc2->next)
6811 add_value_chains (old_var->dv, lc2->loc);
6812 for (; lc1; lc1 = lc1->next)
6813 remove_value_chains (old_var->dv, lc1->loc);
6815 variable_was_changed (new_var, NULL);
6818 /* Continue traversing the hash table. */
6822 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
6826 emit_notes_for_differences_2 (void **slot, void *data)
6828 htab_t old_vars = (htab_t) data;
6829 variable old_var, new_var;
6831 new_var = (variable) *slot;
6832 old_var = (variable) htab_find_with_hash (old_vars, new_var->dv,
6833 dv_htab_hash (new_var->dv));
6836 /* Variable has appeared. */
6837 if (dv_onepart_p (new_var->dv))
6841 gcc_assert (new_var->n_var_parts == 1);
6842 for (lc = new_var->var_part[0].loc_chain; lc; lc = lc->next)
6843 add_value_chains (new_var->dv, lc->loc);
6844 if (dv_is_value_p (new_var->dv))
6845 add_cselib_value_chains (new_var->dv);
6847 variable_was_changed (new_var, NULL);
6850 /* Continue traversing the hash table. */
6854 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
6858 emit_notes_for_differences (rtx insn, dataflow_set *old_set,
6859 dataflow_set *new_set)
6861 htab_traverse (shared_hash_htab (old_set->vars),
6862 emit_notes_for_differences_1,
6863 shared_hash_htab (new_set->vars));
6864 htab_traverse (shared_hash_htab (new_set->vars),
6865 emit_notes_for_differences_2,
6866 shared_hash_htab (old_set->vars));
6867 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
6870 /* Emit the notes for changes of location parts in the basic block BB. */
6873 emit_notes_in_bb (basic_block bb, dataflow_set *set)
6877 dataflow_set_clear (set);
6878 dataflow_set_copy (set, &VTI (bb)->in);
6880 for (i = 0; i < VTI (bb)->n_mos; i++)
6882 rtx insn = VTI (bb)->mos[i].insn;
6884 switch (VTI (bb)->mos[i].type)
6887 dataflow_set_clear_at_call (set);
6888 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
6893 rtx loc = VTI (bb)->mos[i].u.loc;
6896 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6898 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6900 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
6906 rtx loc = VTI (bb)->mos[i].u.loc;
6910 if (GET_CODE (loc) == CONCAT)
6912 val = XEXP (loc, 0);
6913 vloc = XEXP (loc, 1);
6921 var = PAT_VAR_LOCATION_DECL (vloc);
6923 clobber_variable_part (set, NULL_RTX,
6924 dv_from_decl (var), 0, NULL_RTX);
6927 if (VAL_NEEDS_RESOLUTION (loc))
6928 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6929 set_variable_part (set, val, dv_from_decl (var), 0,
6930 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6934 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
6940 rtx loc = VTI (bb)->mos[i].u.loc;
6941 rtx val, vloc, uloc;
6943 vloc = uloc = XEXP (loc, 1);
6944 val = XEXP (loc, 0);
6946 if (GET_CODE (val) == CONCAT)
6948 uloc = XEXP (val, 1);
6949 val = XEXP (val, 0);
6952 if (VAL_NEEDS_RESOLUTION (loc))
6953 val_resolve (set, val, vloc, insn);
6955 if (VAL_HOLDS_TRACK_EXPR (loc))
6957 if (GET_CODE (uloc) == REG)
6958 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6960 else if (GET_CODE (uloc) == MEM)
6961 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6965 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
6971 rtx loc = VTI (bb)->mos[i].u.loc;
6972 rtx val, vloc, uloc;
6974 vloc = uloc = XEXP (loc, 1);
6975 val = XEXP (loc, 0);
6977 if (GET_CODE (val) == CONCAT)
6979 vloc = XEXP (val, 1);
6980 val = XEXP (val, 0);
6983 if (GET_CODE (vloc) == SET)
6985 rtx vsrc = SET_SRC (vloc);
6987 gcc_assert (val != vsrc);
6988 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6990 vloc = SET_DEST (vloc);
6992 if (VAL_NEEDS_RESOLUTION (loc))
6993 val_resolve (set, val, vsrc, insn);
6995 else if (VAL_NEEDS_RESOLUTION (loc))
6997 gcc_assert (GET_CODE (uloc) == SET
6998 && GET_CODE (SET_SRC (uloc)) == REG);
6999 val_resolve (set, val, SET_SRC (uloc), insn);
7002 if (VAL_HOLDS_TRACK_EXPR (loc))
7004 if (VAL_EXPR_IS_CLOBBERED (loc))
7007 var_reg_delete (set, uloc, true);
7008 else if (MEM_P (uloc))
7009 var_mem_delete (set, uloc, true);
7013 bool copied_p = VAL_EXPR_IS_COPIED (loc);
7015 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
7017 if (GET_CODE (uloc) == SET)
7019 set_src = SET_SRC (uloc);
7020 uloc = SET_DEST (uloc);
7025 status = find_src_status (set, set_src);
7027 set_src = find_src_set_src (set, set_src);
7031 var_reg_delete_and_set (set, uloc, !copied_p,
7033 else if (MEM_P (uloc))
7034 var_mem_delete_and_set (set, uloc, !copied_p,
7038 else if (REG_P (uloc))
7039 var_regno_delete (set, REGNO (uloc));
7041 val_store (set, val, vloc, insn);
7043 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7050 rtx loc = VTI (bb)->mos[i].u.loc;
7053 if (GET_CODE (loc) == SET)
7055 set_src = SET_SRC (loc);
7056 loc = SET_DEST (loc);
7060 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
7063 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
7066 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7073 rtx loc = VTI (bb)->mos[i].u.loc;
7074 enum var_init_status src_status;
7077 if (GET_CODE (loc) == SET)
7079 set_src = SET_SRC (loc);
7080 loc = SET_DEST (loc);
7083 src_status = find_src_status (set, set_src);
7084 set_src = find_src_set_src (set, set_src);
7087 var_reg_delete_and_set (set, loc, false, src_status, set_src);
7089 var_mem_delete_and_set (set, loc, false, src_status, set_src);
7091 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7098 rtx loc = VTI (bb)->mos[i].u.loc;
7101 var_reg_delete (set, loc, false);
7103 var_mem_delete (set, loc, false);
7105 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
7111 rtx loc = VTI (bb)->mos[i].u.loc;
7114 var_reg_delete (set, loc, true);
7116 var_mem_delete (set, loc, true);
7118 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7124 set->stack_adjust += VTI (bb)->mos[i].u.adjust;
7130 /* Emit notes for the whole function. */
7133 vt_emit_notes (void)
7138 gcc_assert (!htab_elements (changed_variables));
7140 /* Free memory occupied by the out hash tables, as they aren't used
7143 dataflow_set_clear (&VTI (bb)->out);
7145 /* Enable emitting notes by functions (mainly by set_variable_part and
7146 delete_variable_part). */
7149 if (MAY_HAVE_DEBUG_INSNS)
7150 changed_variables_stack = VEC_alloc (variable, heap, 40);
7152 dataflow_set_init (&cur);
7156 /* Emit the notes for changes of variable locations between two
7157 subsequent basic blocks. */
7158 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
7160 /* Emit the notes for the changes in the basic block itself. */
7161 emit_notes_in_bb (bb, &cur);
7163 /* Free memory occupied by the in hash table, we won't need it
7165 dataflow_set_clear (&VTI (bb)->in);
7167 #ifdef ENABLE_CHECKING
7168 htab_traverse (shared_hash_htab (cur.vars),
7169 emit_notes_for_differences_1,
7170 shared_hash_htab (empty_shared_hash));
7171 if (MAY_HAVE_DEBUG_INSNS)
7172 gcc_assert (htab_elements (value_chains) == 0);
7174 dataflow_set_destroy (&cur);
7176 if (MAY_HAVE_DEBUG_INSNS)
7177 VEC_free (variable, heap, changed_variables_stack);
7182 /* If there is a declaration and offset associated with register/memory RTL
7183 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
7186 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
7190 if (REG_ATTRS (rtl))
7192 *declp = REG_EXPR (rtl);
7193 *offsetp = REG_OFFSET (rtl);
7197 else if (MEM_P (rtl))
7199 if (MEM_ATTRS (rtl))
7201 *declp = MEM_EXPR (rtl);
7202 *offsetp = INT_MEM_OFFSET (rtl);
7209 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
7212 vt_add_function_parameters (void)
7216 for (parm = DECL_ARGUMENTS (current_function_decl);
7217 parm; parm = TREE_CHAIN (parm))
7219 rtx decl_rtl = DECL_RTL_IF_SET (parm);
7220 rtx incoming = DECL_INCOMING_RTL (parm);
7222 enum machine_mode mode;
7223 HOST_WIDE_INT offset;
7227 if (TREE_CODE (parm) != PARM_DECL)
7230 if (!DECL_NAME (parm))
7233 if (!decl_rtl || !incoming)
7236 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
7239 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
7241 if (REG_P (incoming) || MEM_P (incoming))
7243 /* This means argument is passed by invisible reference. */
7246 incoming = gen_rtx_MEM (GET_MODE (decl_rtl), incoming);
7250 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
7252 offset += byte_lowpart_offset (GET_MODE (incoming),
7253 GET_MODE (decl_rtl));
7262 /* Assume that DECL_RTL was a pseudo that got spilled to
7263 memory. The spill slot sharing code will force the
7264 memory to reference spill_slot_decl (%sfp), so we don't
7265 match above. That's ok, the pseudo must have referenced
7266 the entire parameter, so just reset OFFSET. */
7267 gcc_assert (decl == get_spill_slot_decl (false));
7271 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
7274 out = &VTI (ENTRY_BLOCK_PTR)->out;
7276 dv = dv_from_decl (parm);
7278 if (target_for_debug_bind (parm)
7279 /* We can't deal with these right now, because this kind of
7280 variable is single-part. ??? We could handle parallels
7281 that describe multiple locations for the same single
7282 value, but ATM we don't. */
7283 && GET_CODE (incoming) != PARALLEL)
7287 /* ??? We shouldn't ever hit this, but it may happen because
7288 arguments passed by invisible reference aren't dealt with
7289 above: incoming-rtl will have Pmode rather than the
7290 expected mode for the type. */
7294 val = cselib_lookup (var_lowpart (mode, incoming), mode, true);
7296 /* ??? Float-typed values in memory are not handled by
7300 cselib_preserve_value (val);
7301 set_variable_part (out, val->val_rtx, dv, offset,
7302 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7303 dv = dv_from_value (val->val_rtx);
7307 if (REG_P (incoming))
7309 incoming = var_lowpart (mode, incoming);
7310 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
7311 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
7313 set_variable_part (out, incoming, dv, offset,
7314 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7316 else if (MEM_P (incoming))
7318 incoming = var_lowpart (mode, incoming);
7319 set_variable_part (out, incoming, dv, offset,
7320 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7324 if (MAY_HAVE_DEBUG_INSNS)
7326 cselib_preserve_only_values (true);
7327 cselib_reset_table_with_next_value (cselib_get_next_unknown_value ());
7332 /* Allocate and initialize the data structures for variable tracking
7333 and parse the RTL to get the micro operations. */
7336 vt_initialize (void)
7340 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
7342 if (MAY_HAVE_DEBUG_INSNS)
7345 scratch_regs = BITMAP_ALLOC (NULL);
7346 valvar_pool = create_alloc_pool ("small variable_def pool",
7347 sizeof (struct variable_def), 256);
7351 scratch_regs = NULL;
7358 HOST_WIDE_INT pre, post = 0;
7360 unsigned int next_value_before = cselib_get_next_unknown_value ();
7361 unsigned int next_value_after = next_value_before;
7363 if (MAY_HAVE_DEBUG_INSNS)
7365 cselib_record_sets_hook = count_with_sets;
7366 if (dump_file && (dump_flags & TDF_DETAILS))
7367 fprintf (dump_file, "first value: %i\n",
7368 cselib_get_next_unknown_value ());
7371 /* Count the number of micro operations. */
7372 VTI (bb)->n_mos = 0;
7373 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
7374 insn = NEXT_INSN (insn))
7378 if (!frame_pointer_needed)
7380 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
7384 if (dump_file && (dump_flags & TDF_DETAILS))
7385 log_op_type (GEN_INT (pre), bb, insn,
7386 MO_ADJUST, dump_file);
7391 if (dump_file && (dump_flags & TDF_DETAILS))
7392 log_op_type (GEN_INT (post), bb, insn,
7393 MO_ADJUST, dump_file);
7396 cselib_hook_called = false;
7397 if (MAY_HAVE_DEBUG_INSNS)
7399 cselib_process_insn (insn);
7400 if (dump_file && (dump_flags & TDF_DETAILS))
7402 print_rtl_single (dump_file, insn);
7403 dump_cselib_table (dump_file);
7406 if (!cselib_hook_called)
7407 count_with_sets (insn, 0, 0);
7411 if (dump_file && (dump_flags & TDF_DETAILS))
7412 log_op_type (PATTERN (insn), bb, insn,
7413 MO_CALL, dump_file);
7418 count = VTI (bb)->n_mos;
7420 if (MAY_HAVE_DEBUG_INSNS)
7422 cselib_preserve_only_values (false);
7423 next_value_after = cselib_get_next_unknown_value ();
7424 cselib_reset_table_with_next_value (next_value_before);
7425 cselib_record_sets_hook = add_with_sets;
7426 if (dump_file && (dump_flags & TDF_DETAILS))
7427 fprintf (dump_file, "first value: %i\n",
7428 cselib_get_next_unknown_value ());
7431 /* Add the micro-operations to the array. */
7432 VTI (bb)->mos = XNEWVEC (micro_operation, VTI (bb)->n_mos);
7433 VTI (bb)->n_mos = 0;
7434 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
7435 insn = NEXT_INSN (insn))
7439 if (!frame_pointer_needed)
7441 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
7444 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
7446 mo->type = MO_ADJUST;
7450 if (dump_file && (dump_flags & TDF_DETAILS))
7451 log_op_type (PATTERN (insn), bb, insn,
7452 MO_ADJUST, dump_file);
7456 cselib_hook_called = false;
7457 if (MAY_HAVE_DEBUG_INSNS)
7459 cselib_process_insn (insn);
7460 if (dump_file && (dump_flags & TDF_DETAILS))
7462 print_rtl_single (dump_file, insn);
7463 dump_cselib_table (dump_file);
7466 if (!cselib_hook_called)
7467 add_with_sets (insn, 0, 0);
7469 if (!frame_pointer_needed && post)
7471 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
7473 mo->type = MO_ADJUST;
7474 mo->u.adjust = post;
7477 if (dump_file && (dump_flags & TDF_DETAILS))
7478 log_op_type (PATTERN (insn), bb, insn,
7479 MO_ADJUST, dump_file);
7483 gcc_assert (count == VTI (bb)->n_mos);
7484 if (MAY_HAVE_DEBUG_INSNS)
7486 cselib_preserve_only_values (true);
7487 gcc_assert (next_value_after == cselib_get_next_unknown_value ());
7488 cselib_reset_table_with_next_value (next_value_after);
7489 cselib_record_sets_hook = NULL;
7493 attrs_pool = create_alloc_pool ("attrs_def pool",
7494 sizeof (struct attrs_def), 1024);
7495 var_pool = create_alloc_pool ("variable_def pool",
7496 sizeof (struct variable_def)
7497 + (MAX_VAR_PARTS - 1)
7498 * sizeof (((variable)NULL)->var_part[0]), 64);
7499 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
7500 sizeof (struct location_chain_def),
7502 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
7503 sizeof (struct shared_hash_def), 256);
7504 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
7505 empty_shared_hash->refcount = 1;
7506 empty_shared_hash->htab
7507 = htab_create (1, variable_htab_hash, variable_htab_eq,
7508 variable_htab_free);
7509 changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
7510 variable_htab_free);
7511 if (MAY_HAVE_DEBUG_INSNS)
7513 value_chain_pool = create_alloc_pool ("value_chain_def pool",
7514 sizeof (struct value_chain_def),
7516 value_chains = htab_create (32, value_chain_htab_hash,
7517 value_chain_htab_eq, NULL);
7520 /* Init the IN and OUT sets. */
7523 VTI (bb)->visited = false;
7524 VTI (bb)->flooded = false;
7525 dataflow_set_init (&VTI (bb)->in);
7526 dataflow_set_init (&VTI (bb)->out);
7527 VTI (bb)->permp = NULL;
7530 VTI (ENTRY_BLOCK_PTR)->flooded = true;
7531 vt_add_function_parameters ();
7534 /* Get rid of all debug insns from the insn stream. */
7537 delete_debug_insns (void)
7542 if (!MAY_HAVE_DEBUG_INSNS)
7547 FOR_BB_INSNS_SAFE (bb, insn, next)
7548 if (DEBUG_INSN_P (insn))
7553 /* Run a fast, BB-local only version of var tracking, to take care of
7554 information that we don't do global analysis on, such that not all
7555 information is lost. If SKIPPED holds, we're skipping the global
7556 pass entirely, so we should try to use information it would have
7557 handled as well.. */
7560 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
7562 /* ??? Just skip it all for now. */
7563 delete_debug_insns ();
7566 /* Free the data structures needed for variable tracking. */
7575 free (VTI (bb)->mos);
7580 dataflow_set_destroy (&VTI (bb)->in);
7581 dataflow_set_destroy (&VTI (bb)->out);
7582 if (VTI (bb)->permp)
7584 dataflow_set_destroy (VTI (bb)->permp);
7585 XDELETE (VTI (bb)->permp);
7588 free_aux_for_blocks ();
7589 htab_delete (empty_shared_hash->htab);
7590 htab_delete (changed_variables);
7591 free_alloc_pool (attrs_pool);
7592 free_alloc_pool (var_pool);
7593 free_alloc_pool (loc_chain_pool);
7594 free_alloc_pool (shared_hash_pool);
7596 if (MAY_HAVE_DEBUG_INSNS)
7598 htab_delete (value_chains);
7599 free_alloc_pool (value_chain_pool);
7600 free_alloc_pool (valvar_pool);
7602 BITMAP_FREE (scratch_regs);
7603 scratch_regs = NULL;
7607 XDELETEVEC (vui_vec);
7612 /* The entry point to variable tracking pass. */
7615 variable_tracking_main (void)
7617 if (flag_var_tracking_assignments < 0)
7619 delete_debug_insns ();
7623 if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
7625 vt_debug_insns_local (true);
7629 mark_dfs_back_edges ();
7631 if (!frame_pointer_needed)
7633 if (!vt_stack_adjustments ())
7636 vt_debug_insns_local (true);
7641 vt_find_locations ();
7643 if (dump_file && (dump_flags & TDF_DETAILS))
7645 dump_dataflow_sets ();
7646 dump_flow_info (dump_file, dump_flags);
7652 vt_debug_insns_local (false);
7657 gate_handle_var_tracking (void)
7659 return (flag_var_tracking);
7664 struct rtl_opt_pass pass_variable_tracking =
7668 "vartrack", /* name */
7669 gate_handle_var_tracking, /* gate */
7670 variable_tracking_main, /* execute */
7673 0, /* static_pass_number */
7674 TV_VAR_TRACKING, /* tv_id */
7675 0, /* properties_required */
7676 0, /* properties_provided */
7677 0, /* properties_destroyed */
7678 0, /* todo_flags_start */
7679 TODO_dump_func | TODO_verify_rtl_sharing/* todo_flags_finish */