1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file contains the variable tracking pass. It computes where
22 variables are located (which registers or where in memory) at each position
23 in instruction stream and emits notes describing the locations.
24 Debug information (DWARF2 location lists) is finally generated from
26 With this debug information, it is possible to show variables
27 even when debugging optimized code.
29 How does the variable tracking pass work?
31 First, it scans RTL code for uses, stores and clobbers (register/memory
32 references in instructions), for call insns and for stack adjustments
33 separately for each basic block and saves them to an array of micro
35 The micro operations of one instruction are ordered so that
36 pre-modifying stack adjustment < use < use with no var < call insn <
37 < clobber < set < post-modifying stack adjustment
39 Then, a forward dataflow analysis is performed to find out how locations
40 of variables change through code and to propagate the variable locations
41 along control flow graph.
42 The IN set for basic block BB is computed as a union of OUT sets of BB's
43 predecessors, the OUT set for BB is copied from the IN set for BB and
44 is changed according to micro operations in BB.
46 The IN and OUT sets for basic blocks consist of a current stack adjustment
47 (used for adjusting offset of variables addressed using stack pointer),
48 the table of structures describing the locations of parts of a variable
49 and for each physical register a linked list for each physical register.
50 The linked list is a list of variable parts stored in the register,
51 i.e. it is a list of triplets (reg, decl, offset) where decl is
52 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
53 effective deleting appropriate variable parts when we set or clobber the
56 There may be more than one variable part in a register. The linked lists
57 should be pretty short so it is a good data structure here.
58 For example in the following code, register allocator may assign same
59 register to variables A and B, and both of them are stored in the same
72 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73 are emitted to appropriate positions in RTL code. Each such a note describes
74 the location of one variable at the point in instruction stream where the
75 note is. There is no need to emit a note for each variable before each
76 instruction, we only emit these notes where the location of variable changes
77 (this means that we also emit notes for changes between the OUT set of the
78 previous block and the IN set of the current block).
80 The notes consist of two parts:
81 1. the declaration (from REG_EXPR or MEM_EXPR)
82 2. the location of a variable - it is either a simple register/memory
83 reference (for simple variables, for example int),
84 or a parallel of register/memory references (for a large variables
85 which consist of several parts, for example long long).
91 #include "coretypes.h"
96 #include "hard-reg-set.h"
97 #include "basic-block.h"
100 #include "insn-config.h"
103 #include "alloc-pool.h"
109 #include "tree-pass.h"
110 #include "tree-flow.h"
114 #include "diagnostic.h"
115 #include "tree-pretty-print.h"
116 #include "pointer-set.h"
120 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
121 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
122 Currently the value is the same as IDENTIFIER_NODE, which has such
123 a property. If this compile time assertion ever fails, make sure that
124 the new tree code that equals (int) VALUE has the same property. */
125 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
127 /* Type of micro operation. */
128 enum micro_operation_type
130 MO_USE, /* Use location (REG or MEM). */
131 MO_USE_NO_VAR,/* Use location which is not associated with a variable
132 or the variable is not trackable. */
133 MO_VAL_USE, /* Use location which is associated with a value. */
134 MO_VAL_LOC, /* Use location which appears in a debug insn. */
135 MO_VAL_SET, /* Set location associated with a value. */
136 MO_SET, /* Set location. */
137 MO_COPY, /* Copy the same portion of a variable from one
138 location to another. */
139 MO_CLOBBER, /* Clobber location. */
140 MO_CALL, /* Call insn. */
141 MO_ADJUST /* Adjust stack pointer. */
145 static const char * const ATTRIBUTE_UNUSED
146 micro_operation_type_name[] = {
159 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
160 Notes emitted as AFTER_CALL are to take effect during the call,
161 rather than after the call. */
164 EMIT_NOTE_BEFORE_INSN,
165 EMIT_NOTE_AFTER_INSN,
166 EMIT_NOTE_AFTER_CALL_INSN
169 /* Structure holding information about micro operation. */
170 typedef struct micro_operation_def
172 /* Type of micro operation. */
173 enum micro_operation_type type;
175 /* The instruction which the micro operation is in, for MO_USE,
176 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
177 instruction or note in the original flow (before any var-tracking
178 notes are inserted, to simplify emission of notes), for MO_SET
183 /* Location. For MO_SET and MO_COPY, this is the SET that
184 performs the assignment, if known, otherwise it is the target
185 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
186 CONCAT of the VALUE and the LOC associated with it. For
187 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
188 associated with it. */
191 /* Stack adjustment. */
192 HOST_WIDE_INT adjust;
196 DEF_VEC_O(micro_operation);
197 DEF_VEC_ALLOC_O(micro_operation,heap);
199 /* A declaration of a variable, or an RTL value being handled like a
201 typedef void *decl_or_value;
203 /* Structure for passing some other parameters to function
204 emit_note_insn_var_location. */
205 typedef struct emit_note_data_def
207 /* The instruction which the note will be emitted before/after. */
210 /* Where the note will be emitted (before/after insn)? */
211 enum emit_note_where where;
213 /* The variables and values active at this point. */
217 /* Description of location of a part of a variable. The content of a physical
218 register is described by a chain of these structures.
219 The chains are pretty short (usually 1 or 2 elements) and thus
220 chain is the best data structure. */
221 typedef struct attrs_def
223 /* Pointer to next member of the list. */
224 struct attrs_def *next;
226 /* The rtx of register. */
229 /* The declaration corresponding to LOC. */
232 /* Offset from start of DECL. */
233 HOST_WIDE_INT offset;
236 /* Structure holding a refcounted hash table. If refcount > 1,
237 it must be first unshared before modified. */
238 typedef struct shared_hash_def
240 /* Reference count. */
243 /* Actual hash table. */
247 /* Structure holding the IN or OUT set for a basic block. */
248 typedef struct dataflow_set_def
250 /* Adjustment of stack offset. */
251 HOST_WIDE_INT stack_adjust;
253 /* Attributes for registers (lists of attrs). */
254 attrs regs[FIRST_PSEUDO_REGISTER];
256 /* Variable locations. */
259 /* Vars that is being traversed. */
260 shared_hash traversed_vars;
263 /* The structure (one for each basic block) containing the information
264 needed for variable tracking. */
265 typedef struct variable_tracking_info_def
267 /* The vector of micro operations. */
268 VEC(micro_operation, heap) *mos;
270 /* The IN and OUT set for dataflow analysis. */
274 /* The permanent-in dataflow set for this block. This is used to
275 hold values for which we had to compute entry values. ??? This
276 should probably be dynamically allocated, to avoid using more
277 memory in non-debug builds. */
280 /* Has the block been visited in DFS? */
283 /* Has the block been flooded in VTA? */
286 } *variable_tracking_info;
288 /* Structure for chaining the locations. */
289 typedef struct location_chain_def
291 /* Next element in the chain. */
292 struct location_chain_def *next;
294 /* The location (REG, MEM or VALUE). */
297 /* The "value" stored in this location. */
301 enum var_init_status init;
304 /* Structure describing one part of variable. */
305 typedef struct variable_part_def
307 /* Chain of locations of the part. */
308 location_chain loc_chain;
310 /* Location which was last emitted to location list. */
313 /* The offset in the variable. */
314 HOST_WIDE_INT offset;
317 /* Maximum number of location parts. */
318 #define MAX_VAR_PARTS 16
320 /* Structure describing where the variable is located. */
321 typedef struct variable_def
323 /* The declaration of the variable, or an RTL value being handled
324 like a declaration. */
327 /* Reference count. */
330 /* Number of variable parts. */
333 /* True if this variable changed (any of its) cur_loc fields
334 during the current emit_notes_for_changes resp.
335 emit_notes_for_differences call. */
336 bool cur_loc_changed;
338 /* True if this variable_def struct is currently in the
339 changed_variables hash table. */
340 bool in_changed_variables;
342 /* The variable parts. */
343 variable_part var_part[1];
345 typedef const struct variable_def *const_variable;
347 /* Structure for chaining backlinks from referenced VALUEs to
348 DVs that are referencing them. */
349 typedef struct value_chain_def
351 /* Next value_chain entry. */
352 struct value_chain_def *next;
354 /* The declaration of the variable, or an RTL value
355 being handled like a declaration, whose var_parts[0].loc_chain
356 references the VALUE owning this value_chain. */
359 /* Reference count. */
362 typedef const struct value_chain_def *const_value_chain;
364 /* Pointer to the BB's information specific to variable tracking pass. */
365 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
367 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
368 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
370 /* Alloc pool for struct attrs_def. */
371 static alloc_pool attrs_pool;
373 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
374 static alloc_pool var_pool;
376 /* Alloc pool for struct variable_def with a single var_part entry. */
377 static alloc_pool valvar_pool;
379 /* Alloc pool for struct location_chain_def. */
380 static alloc_pool loc_chain_pool;
382 /* Alloc pool for struct shared_hash_def. */
383 static alloc_pool shared_hash_pool;
385 /* Alloc pool for struct value_chain_def. */
386 static alloc_pool value_chain_pool;
388 /* Changed variables, notes will be emitted for them. */
389 static htab_t changed_variables;
391 /* Links from VALUEs to DVs referencing them in their current loc_chains. */
392 static htab_t value_chains;
394 /* Shall notes be emitted? */
395 static bool emit_notes;
397 /* Empty shared hashtable. */
398 static shared_hash empty_shared_hash;
400 /* Scratch register bitmap used by cselib_expand_value_rtx. */
401 static bitmap scratch_regs = NULL;
403 typedef struct GTY(()) parm_reg {
408 DEF_VEC_O(parm_reg_t);
409 DEF_VEC_ALLOC_O(parm_reg_t, gc);
411 /* Vector of windowed parameter registers, if any. */
412 static VEC(parm_reg_t, gc) *windowed_parm_regs = NULL;
414 /* Variable used to tell whether cselib_process_insn called our hook. */
415 static bool cselib_hook_called;
417 /* Local function prototypes. */
418 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
420 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
422 static bool vt_stack_adjustments (void);
423 static void note_register_arguments (rtx);
424 static hashval_t variable_htab_hash (const void *);
425 static int variable_htab_eq (const void *, const void *);
426 static void variable_htab_free (void *);
428 static void init_attrs_list_set (attrs *);
429 static void attrs_list_clear (attrs *);
430 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
431 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
432 static void attrs_list_copy (attrs *, attrs);
433 static void attrs_list_union (attrs *, attrs);
435 static void **unshare_variable (dataflow_set *set, void **slot, variable var,
436 enum var_init_status);
437 static void vars_copy (htab_t, htab_t);
438 static tree var_debug_decl (tree);
439 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
440 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
441 enum var_init_status, rtx);
442 static void var_reg_delete (dataflow_set *, rtx, bool);
443 static void var_regno_delete (dataflow_set *, int);
444 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
445 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
446 enum var_init_status, rtx);
447 static void var_mem_delete (dataflow_set *, rtx, bool);
449 static void dataflow_set_init (dataflow_set *);
450 static void dataflow_set_clear (dataflow_set *);
451 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
452 static int variable_union_info_cmp_pos (const void *, const void *);
453 static void dataflow_set_union (dataflow_set *, dataflow_set *);
454 static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
455 static bool canon_value_cmp (rtx, rtx);
456 static int loc_cmp (rtx, rtx);
457 static bool variable_part_different_p (variable_part *, variable_part *);
458 static bool onepart_variable_different_p (variable, variable);
459 static bool variable_different_p (variable, variable);
460 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
461 static void dataflow_set_destroy (dataflow_set *);
463 static bool contains_symbol_ref (rtx);
464 static bool track_expr_p (tree, bool);
465 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
466 static int add_uses (rtx *, void *);
467 static void add_uses_1 (rtx *, void *);
468 static void add_stores (rtx, const_rtx, void *);
469 static bool compute_bb_dataflow (basic_block);
470 static bool vt_find_locations (void);
472 static void dump_attrs_list (attrs);
473 static int dump_var_slot (void **, void *);
474 static void dump_var (variable);
475 static void dump_vars (htab_t);
476 static void dump_dataflow_set (dataflow_set *);
477 static void dump_dataflow_sets (void);
479 static void variable_was_changed (variable, dataflow_set *);
480 static void **set_slot_part (dataflow_set *, rtx, void **,
481 decl_or_value, HOST_WIDE_INT,
482 enum var_init_status, rtx);
483 static void set_variable_part (dataflow_set *, rtx,
484 decl_or_value, HOST_WIDE_INT,
485 enum var_init_status, rtx, enum insert_option);
486 static void **clobber_slot_part (dataflow_set *, rtx,
487 void **, HOST_WIDE_INT, rtx);
488 static void clobber_variable_part (dataflow_set *, rtx,
489 decl_or_value, HOST_WIDE_INT, rtx);
490 static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
491 static void delete_variable_part (dataflow_set *, rtx,
492 decl_or_value, HOST_WIDE_INT);
493 static int emit_note_insn_var_location (void **, void *);
494 static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash);
495 static int emit_notes_for_differences_1 (void **, void *);
496 static int emit_notes_for_differences_2 (void **, void *);
497 static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
498 static void emit_notes_in_bb (basic_block, dataflow_set *);
499 static void vt_emit_notes (void);
501 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
502 static void vt_add_function_parameters (void);
503 static bool vt_initialize (void);
504 static void vt_finalize (void);
506 /* Given a SET, calculate the amount of stack adjustment it contains
507 PRE- and POST-modifying stack pointer.
508 This function is similar to stack_adjust_offset. */
511 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
514 rtx src = SET_SRC (pattern);
515 rtx dest = SET_DEST (pattern);
518 if (dest == stack_pointer_rtx)
520 /* (set (reg sp) (plus (reg sp) (const_int))) */
521 code = GET_CODE (src);
522 if (! (code == PLUS || code == MINUS)
523 || XEXP (src, 0) != stack_pointer_rtx
524 || !CONST_INT_P (XEXP (src, 1)))
528 *post += INTVAL (XEXP (src, 1));
530 *post -= INTVAL (XEXP (src, 1));
532 else if (MEM_P (dest))
534 /* (set (mem (pre_dec (reg sp))) (foo)) */
535 src = XEXP (dest, 0);
536 code = GET_CODE (src);
542 if (XEXP (src, 0) == stack_pointer_rtx)
544 rtx val = XEXP (XEXP (src, 1), 1);
545 /* We handle only adjustments by constant amount. */
546 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
549 if (code == PRE_MODIFY)
550 *pre -= INTVAL (val);
552 *post -= INTVAL (val);
558 if (XEXP (src, 0) == stack_pointer_rtx)
560 *pre += GET_MODE_SIZE (GET_MODE (dest));
566 if (XEXP (src, 0) == stack_pointer_rtx)
568 *post += GET_MODE_SIZE (GET_MODE (dest));
574 if (XEXP (src, 0) == stack_pointer_rtx)
576 *pre -= GET_MODE_SIZE (GET_MODE (dest));
582 if (XEXP (src, 0) == stack_pointer_rtx)
584 *post -= GET_MODE_SIZE (GET_MODE (dest));
595 /* Given an INSN, calculate the amount of stack adjustment it contains
596 PRE- and POST-modifying stack pointer. */
599 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
607 pattern = PATTERN (insn);
608 if (RTX_FRAME_RELATED_P (insn))
610 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
612 pattern = XEXP (expr, 0);
615 if (GET_CODE (pattern) == SET)
616 stack_adjust_offset_pre_post (pattern, pre, post);
617 else if (GET_CODE (pattern) == PARALLEL
618 || GET_CODE (pattern) == SEQUENCE)
622 /* There may be stack adjustments inside compound insns. Search
624 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
625 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
626 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
630 /* Compute stack adjustments for all blocks by traversing DFS tree.
631 Return true when the adjustments on all incoming edges are consistent.
632 Heavily borrowed from pre_and_rev_post_order_compute. */
635 vt_stack_adjustments (void)
637 edge_iterator *stack;
640 /* Initialize entry block. */
641 VTI (ENTRY_BLOCK_PTR)->visited = true;
642 VTI (ENTRY_BLOCK_PTR)->in.stack_adjust = INCOMING_FRAME_SP_OFFSET;
643 VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
645 /* Allocate stack for back-tracking up CFG. */
646 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
649 /* Push the first edge on to the stack. */
650 stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
658 /* Look at the edge on the top of the stack. */
660 src = ei_edge (ei)->src;
661 dest = ei_edge (ei)->dest;
663 /* Check if the edge destination has been visited yet. */
664 if (!VTI (dest)->visited)
667 HOST_WIDE_INT pre, post, offset;
668 VTI (dest)->visited = true;
669 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
671 if (dest != EXIT_BLOCK_PTR)
672 for (insn = BB_HEAD (dest);
673 insn != NEXT_INSN (BB_END (dest));
674 insn = NEXT_INSN (insn))
678 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
679 offset += pre + post;
682 note_register_arguments (insn);
685 VTI (dest)->out.stack_adjust = offset;
687 if (EDGE_COUNT (dest->succs) > 0)
688 /* Since the DEST node has been visited for the first
689 time, check its successors. */
690 stack[sp++] = ei_start (dest->succs);
694 /* Check whether the adjustments on the edges are the same. */
695 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
701 if (! ei_one_before_end_p (ei))
702 /* Go to the next edge. */
703 ei_next (&stack[sp - 1]);
705 /* Return to previous level if there are no more edges. */
714 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
715 hard_frame_pointer_rtx is being mapped to it and offset for it. */
716 static rtx cfa_base_rtx;
717 static HOST_WIDE_INT cfa_base_offset;
719 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
720 or hard_frame_pointer_rtx. */
723 compute_cfa_pointer (HOST_WIDE_INT adjustment)
725 return plus_constant (cfa_base_rtx, adjustment + cfa_base_offset);
728 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
729 or -1 if the replacement shouldn't be done. */
730 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
732 /* Data for adjust_mems callback. */
734 struct adjust_mem_data
737 enum machine_mode mem_mode;
738 HOST_WIDE_INT stack_adjust;
742 /* Helper for adjust_mems. Return 1 if *loc is unsuitable for
743 transformation of wider mode arithmetics to narrower mode,
744 -1 if it is suitable and subexpressions shouldn't be
745 traversed and 0 if it is suitable and subexpressions should
746 be traversed. Called through for_each_rtx. */
749 use_narrower_mode_test (rtx *loc, void *data)
751 rtx subreg = (rtx) data;
753 if (CONSTANT_P (*loc))
755 switch (GET_CODE (*loc))
758 if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
760 if (!validate_subreg (GET_MODE (subreg), GET_MODE (*loc),
761 *loc, subreg_lowpart_offset (GET_MODE (subreg),
770 if (for_each_rtx (&XEXP (*loc, 0), use_narrower_mode_test, data))
779 /* Transform X into narrower mode MODE from wider mode WMODE. */
782 use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
786 return lowpart_subreg (mode, x, wmode);
787 switch (GET_CODE (x))
790 return lowpart_subreg (mode, x, wmode);
794 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
795 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
796 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
798 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
799 return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
805 /* Helper function for adjusting used MEMs. */
808 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
810 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
811 rtx mem, addr = loc, tem;
812 enum machine_mode mem_mode_save;
814 switch (GET_CODE (loc))
817 /* Don't do any sp or fp replacements outside of MEM addresses
819 if (amd->mem_mode == VOIDmode && amd->store)
821 if (loc == stack_pointer_rtx
822 && !frame_pointer_needed
824 return compute_cfa_pointer (amd->stack_adjust);
825 else if (loc == hard_frame_pointer_rtx
826 && frame_pointer_needed
827 && hard_frame_pointer_adjustment != -1
829 return compute_cfa_pointer (hard_frame_pointer_adjustment);
830 gcc_checking_assert (loc != virtual_incoming_args_rtx);
836 mem = targetm.delegitimize_address (mem);
837 if (mem != loc && !MEM_P (mem))
838 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
841 addr = XEXP (mem, 0);
842 mem_mode_save = amd->mem_mode;
843 amd->mem_mode = GET_MODE (mem);
844 store_save = amd->store;
846 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
847 amd->store = store_save;
848 amd->mem_mode = mem_mode_save;
850 addr = targetm.delegitimize_address (addr);
851 if (addr != XEXP (mem, 0))
852 mem = replace_equiv_address_nv (mem, addr);
854 mem = avoid_constant_pool_reference (mem);
858 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
859 GEN_INT (GET_CODE (loc) == PRE_INC
860 ? GET_MODE_SIZE (amd->mem_mode)
861 : -GET_MODE_SIZE (amd->mem_mode)));
865 addr = XEXP (loc, 0);
866 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
867 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
868 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
869 GEN_INT ((GET_CODE (loc) == PRE_INC
870 || GET_CODE (loc) == POST_INC)
871 ? GET_MODE_SIZE (amd->mem_mode)
872 : -GET_MODE_SIZE (amd->mem_mode)));
873 amd->side_effects = alloc_EXPR_LIST (0,
874 gen_rtx_SET (VOIDmode,
880 addr = XEXP (loc, 1);
883 addr = XEXP (loc, 0);
884 gcc_assert (amd->mem_mode != VOIDmode);
885 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
886 amd->side_effects = alloc_EXPR_LIST (0,
887 gen_rtx_SET (VOIDmode,
893 /* First try without delegitimization of whole MEMs and
894 avoid_constant_pool_reference, which is more likely to succeed. */
895 store_save = amd->store;
897 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
899 amd->store = store_save;
900 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
901 if (mem == SUBREG_REG (loc))
906 tem = simplify_gen_subreg (GET_MODE (loc), mem,
907 GET_MODE (SUBREG_REG (loc)),
911 tem = simplify_gen_subreg (GET_MODE (loc), addr,
912 GET_MODE (SUBREG_REG (loc)),
915 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
917 if (MAY_HAVE_DEBUG_INSNS
918 && GET_CODE (tem) == SUBREG
919 && (GET_CODE (SUBREG_REG (tem)) == PLUS
920 || GET_CODE (SUBREG_REG (tem)) == MINUS
921 || GET_CODE (SUBREG_REG (tem)) == MULT
922 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
923 && GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
924 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
925 && GET_MODE_SIZE (GET_MODE (tem))
926 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem)))
927 && subreg_lowpart_p (tem)
928 && !for_each_rtx (&SUBREG_REG (tem), use_narrower_mode_test, tem))
929 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
930 GET_MODE (SUBREG_REG (tem)));
933 /* Don't do any replacements in second and following
934 ASM_OPERANDS of inline-asm with multiple sets.
935 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
936 and ASM_OPERANDS_LABEL_VEC need to be equal between
937 all the ASM_OPERANDs in the insn and adjust_insn will
939 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
948 /* Helper function for replacement of uses. */
951 adjust_mem_uses (rtx *x, void *data)
953 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
955 validate_change (NULL_RTX, x, new_x, true);
958 /* Helper function for replacement of stores. */
961 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
965 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
967 if (new_dest != SET_DEST (expr))
969 rtx xexpr = CONST_CAST_RTX (expr);
970 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
975 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
976 replace them with their value in the insn and add the side-effects
977 as other sets to the insn. */
980 adjust_insn (basic_block bb, rtx insn)
982 struct adjust_mem_data amd;
985 #ifdef HAVE_window_save
986 /* If the target machine has an explicit window save instruction, the
987 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
988 if (RTX_FRAME_RELATED_P (insn)
989 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
991 unsigned int i, nregs = VEC_length(parm_reg_t, windowed_parm_regs);
992 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
995 FOR_EACH_VEC_ELT (parm_reg_t, windowed_parm_regs, i, p)
997 XVECEXP (rtl, 0, i * 2)
998 = gen_rtx_SET (VOIDmode, p->incoming, p->outgoing);
999 /* Do not clobber the attached DECL, but only the REG. */
1000 XVECEXP (rtl, 0, i * 2 + 1)
1001 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1002 gen_raw_REG (GET_MODE (p->outgoing),
1003 REGNO (p->outgoing)));
1006 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1011 amd.mem_mode = VOIDmode;
1012 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1013 amd.side_effects = NULL_RTX;
1016 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1019 if (GET_CODE (PATTERN (insn)) == PARALLEL
1020 && asm_noperands (PATTERN (insn)) > 0
1021 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1026 /* inline-asm with multiple sets is tiny bit more complicated,
1027 because the 3 vectors in ASM_OPERANDS need to be shared between
1028 all ASM_OPERANDS in the instruction. adjust_mems will
1029 not touch ASM_OPERANDS other than the first one, asm_noperands
1030 test above needs to be called before that (otherwise it would fail)
1031 and afterwards this code fixes it up. */
1032 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1033 body = PATTERN (insn);
1034 set0 = XVECEXP (body, 0, 0);
1035 gcc_checking_assert (GET_CODE (set0) == SET
1036 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1037 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1038 for (i = 1; i < XVECLEN (body, 0); i++)
1039 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1043 set = XVECEXP (body, 0, i);
1044 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1045 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1047 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1048 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1049 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1050 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1051 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1052 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1054 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1055 ASM_OPERANDS_INPUT_VEC (newsrc)
1056 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1057 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1058 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1059 ASM_OPERANDS_LABEL_VEC (newsrc)
1060 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1061 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1066 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1068 /* For read-only MEMs containing some constant, prefer those
1070 set = single_set (insn);
1071 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1073 rtx note = find_reg_equal_equiv_note (insn);
1075 if (note && CONSTANT_P (XEXP (note, 0)))
1076 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1079 if (amd.side_effects)
1081 rtx *pat, new_pat, s;
1084 pat = &PATTERN (insn);
1085 if (GET_CODE (*pat) == COND_EXEC)
1086 pat = &COND_EXEC_CODE (*pat);
1087 if (GET_CODE (*pat) == PARALLEL)
1088 oldn = XVECLEN (*pat, 0);
1091 for (s = amd.side_effects, newn = 0; s; newn++)
1093 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1094 if (GET_CODE (*pat) == PARALLEL)
1095 for (i = 0; i < oldn; i++)
1096 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1098 XVECEXP (new_pat, 0, 0) = *pat;
1099 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
1100 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
1101 free_EXPR_LIST_list (&amd.side_effects);
1102 validate_change (NULL_RTX, pat, new_pat, true);
1106 /* Return true if a decl_or_value DV is a DECL or NULL. */
1108 dv_is_decl_p (decl_or_value dv)
1110 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
1113 /* Return true if a decl_or_value is a VALUE rtl. */
1115 dv_is_value_p (decl_or_value dv)
1117 return dv && !dv_is_decl_p (dv);
1120 /* Return the decl in the decl_or_value. */
1122 dv_as_decl (decl_or_value dv)
1124 gcc_checking_assert (dv_is_decl_p (dv));
1128 /* Return the value in the decl_or_value. */
1130 dv_as_value (decl_or_value dv)
1132 gcc_checking_assert (dv_is_value_p (dv));
1136 /* Return the opaque pointer in the decl_or_value. */
1137 static inline void *
1138 dv_as_opaque (decl_or_value dv)
1143 /* Return true if a decl_or_value must not have more than one variable
1146 dv_onepart_p (decl_or_value dv)
1150 if (!MAY_HAVE_DEBUG_INSNS)
1153 if (dv_is_value_p (dv))
1156 decl = dv_as_decl (dv);
1161 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1164 return (target_for_debug_bind (decl) != NULL_TREE);
1167 /* Return the variable pool to be used for dv, depending on whether it
1168 can have multiple parts or not. */
1169 static inline alloc_pool
1170 dv_pool (decl_or_value dv)
1172 return dv_onepart_p (dv) ? valvar_pool : var_pool;
1175 /* Build a decl_or_value out of a decl. */
1176 static inline decl_or_value
1177 dv_from_decl (tree decl)
1181 gcc_checking_assert (dv_is_decl_p (dv));
1185 /* Build a decl_or_value out of a value. */
1186 static inline decl_or_value
1187 dv_from_value (rtx value)
1191 gcc_checking_assert (dv_is_value_p (dv));
1195 extern void debug_dv (decl_or_value dv);
1198 debug_dv (decl_or_value dv)
1200 if (dv_is_value_p (dv))
1201 debug_rtx (dv_as_value (dv));
1203 debug_generic_stmt (dv_as_decl (dv));
1206 typedef unsigned int dvuid;
1208 /* Return the uid of DV. */
1211 dv_uid (decl_or_value dv)
1213 if (dv_is_value_p (dv))
1214 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
1216 return DECL_UID (dv_as_decl (dv));
1219 /* Compute the hash from the uid. */
1221 static inline hashval_t
1222 dv_uid2hash (dvuid uid)
1227 /* The hash function for a mask table in a shared_htab chain. */
1229 static inline hashval_t
1230 dv_htab_hash (decl_or_value dv)
1232 return dv_uid2hash (dv_uid (dv));
1235 /* The hash function for variable_htab, computes the hash value
1236 from the declaration of variable X. */
1239 variable_htab_hash (const void *x)
1241 const_variable const v = (const_variable) x;
1243 return dv_htab_hash (v->dv);
1246 /* Compare the declaration of variable X with declaration Y. */
1249 variable_htab_eq (const void *x, const void *y)
1251 const_variable const v = (const_variable) x;
1252 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
1254 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
1257 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1260 variable_htab_free (void *elem)
1263 variable var = (variable) elem;
1264 location_chain node, next;
1266 gcc_checking_assert (var->refcount > 0);
1269 if (var->refcount > 0)
1272 for (i = 0; i < var->n_var_parts; i++)
1274 for (node = var->var_part[i].loc_chain; node; node = next)
1277 pool_free (loc_chain_pool, node);
1279 var->var_part[i].loc_chain = NULL;
1281 pool_free (dv_pool (var->dv), var);
1284 /* The hash function for value_chains htab, computes the hash value
1288 value_chain_htab_hash (const void *x)
1290 const_value_chain const v = (const_value_chain) x;
1292 return dv_htab_hash (v->dv);
1295 /* Compare the VALUE X with VALUE Y. */
1298 value_chain_htab_eq (const void *x, const void *y)
1300 const_value_chain const v = (const_value_chain) x;
1301 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
1303 return dv_as_opaque (v->dv) == dv_as_opaque (dv);
1306 /* Initialize the set (array) SET of attrs to empty lists. */
1309 init_attrs_list_set (attrs *set)
1313 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1317 /* Make the list *LISTP empty. */
1320 attrs_list_clear (attrs *listp)
1324 for (list = *listp; list; list = next)
1327 pool_free (attrs_pool, list);
1332 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1335 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1337 for (; list; list = list->next)
1338 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1343 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1346 attrs_list_insert (attrs *listp, decl_or_value dv,
1347 HOST_WIDE_INT offset, rtx loc)
1351 list = (attrs) pool_alloc (attrs_pool);
1354 list->offset = offset;
1355 list->next = *listp;
1359 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1362 attrs_list_copy (attrs *dstp, attrs src)
1366 attrs_list_clear (dstp);
1367 for (; src; src = src->next)
1369 n = (attrs) pool_alloc (attrs_pool);
1372 n->offset = src->offset;
1378 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1381 attrs_list_union (attrs *dstp, attrs src)
1383 for (; src; src = src->next)
1385 if (!attrs_list_member (*dstp, src->dv, src->offset))
1386 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1390 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1394 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1396 gcc_assert (!*dstp);
1397 for (; src; src = src->next)
1399 if (!dv_onepart_p (src->dv))
1400 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1402 for (src = src2; src; src = src->next)
1404 if (!dv_onepart_p (src->dv)
1405 && !attrs_list_member (*dstp, src->dv, src->offset))
1406 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1410 /* Shared hashtable support. */
1412 /* Return true if VARS is shared. */
1415 shared_hash_shared (shared_hash vars)
1417 return vars->refcount > 1;
1420 /* Return the hash table for VARS. */
1422 static inline htab_t
1423 shared_hash_htab (shared_hash vars)
1428 /* Return true if VAR is shared, or maybe because VARS is shared. */
1431 shared_var_p (variable var, shared_hash vars)
1433 /* Don't count an entry in the changed_variables table as a duplicate. */
1434 return ((var->refcount > 1 + (int) var->in_changed_variables)
1435 || shared_hash_shared (vars));
1438 /* Copy variables into a new hash table. */
1441 shared_hash_unshare (shared_hash vars)
1443 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1444 gcc_assert (vars->refcount > 1);
1445 new_vars->refcount = 1;
1447 = htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
1448 variable_htab_eq, variable_htab_free);
1449 vars_copy (new_vars->htab, vars->htab);
1454 /* Increment reference counter on VARS and return it. */
1456 static inline shared_hash
1457 shared_hash_copy (shared_hash vars)
1463 /* Decrement reference counter and destroy hash table if not shared
1467 shared_hash_destroy (shared_hash vars)
1469 gcc_checking_assert (vars->refcount > 0);
1470 if (--vars->refcount == 0)
1472 htab_delete (vars->htab);
1473 pool_free (shared_hash_pool, vars);
1477 /* Unshare *PVARS if shared and return slot for DV. If INS is
1478 INSERT, insert it if not already present. */
1480 static inline void **
1481 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1482 hashval_t dvhash, enum insert_option ins)
1484 if (shared_hash_shared (*pvars))
1485 *pvars = shared_hash_unshare (*pvars);
1486 return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins);
1489 static inline void **
1490 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1491 enum insert_option ins)
1493 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1496 /* Return slot for DV, if it is already present in the hash table.
1497 If it is not present, insert it only VARS is not shared, otherwise
1500 static inline void **
1501 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1503 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1504 shared_hash_shared (vars)
1505 ? NO_INSERT : INSERT);
1508 static inline void **
1509 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1511 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1514 /* Return slot for DV only if it is already present in the hash table. */
1516 static inline void **
1517 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1520 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1524 static inline void **
1525 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1527 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1530 /* Return variable for DV or NULL if not already present in the hash
1533 static inline variable
1534 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1536 return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash);
1539 static inline variable
1540 shared_hash_find (shared_hash vars, decl_or_value dv)
1542 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1545 /* Return true if TVAL is better than CVAL as a canonival value. We
1546 choose lowest-numbered VALUEs, using the RTX address as a
1547 tie-breaker. The idea is to arrange them into a star topology,
1548 such that all of them are at most one step away from the canonical
1549 value, and the canonical value has backlinks to all of them, in
1550 addition to all the actual locations. We don't enforce this
1551 topology throughout the entire dataflow analysis, though.
1555 canon_value_cmp (rtx tval, rtx cval)
1558 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1561 static bool dst_can_be_shared;
1563 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1566 unshare_variable (dataflow_set *set, void **slot, variable var,
1567 enum var_init_status initialized)
1572 new_var = (variable) pool_alloc (dv_pool (var->dv));
1573 new_var->dv = var->dv;
1574 new_var->refcount = 1;
1576 new_var->n_var_parts = var->n_var_parts;
1577 new_var->cur_loc_changed = var->cur_loc_changed;
1578 var->cur_loc_changed = false;
1579 new_var->in_changed_variables = false;
1581 if (! flag_var_tracking_uninit)
1582 initialized = VAR_INIT_STATUS_INITIALIZED;
1584 for (i = 0; i < var->n_var_parts; i++)
1586 location_chain node;
1587 location_chain *nextp;
1589 new_var->var_part[i].offset = var->var_part[i].offset;
1590 nextp = &new_var->var_part[i].loc_chain;
1591 for (node = var->var_part[i].loc_chain; node; node = node->next)
1593 location_chain new_lc;
1595 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1596 new_lc->next = NULL;
1597 if (node->init > initialized)
1598 new_lc->init = node->init;
1600 new_lc->init = initialized;
1601 if (node->set_src && !(MEM_P (node->set_src)))
1602 new_lc->set_src = node->set_src;
1604 new_lc->set_src = NULL;
1605 new_lc->loc = node->loc;
1608 nextp = &new_lc->next;
1611 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1614 dst_can_be_shared = false;
1615 if (shared_hash_shared (set->vars))
1616 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1617 else if (set->traversed_vars && set->vars != set->traversed_vars)
1618 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1620 if (var->in_changed_variables)
1623 = htab_find_slot_with_hash (changed_variables, var->dv,
1624 dv_htab_hash (var->dv), NO_INSERT);
1625 gcc_assert (*cslot == (void *) var);
1626 var->in_changed_variables = false;
1627 variable_htab_free (var);
1629 new_var->in_changed_variables = true;
1634 /* Copy all variables from hash table SRC to hash table DST. */
1637 vars_copy (htab_t dst, htab_t src)
1642 FOR_EACH_HTAB_ELEMENT (src, var, variable, hi)
1646 dstp = htab_find_slot_with_hash (dst, var->dv,
1647 dv_htab_hash (var->dv),
1653 /* Map a decl to its main debug decl. */
1656 var_debug_decl (tree decl)
1658 if (decl && DECL_P (decl)
1659 && DECL_DEBUG_EXPR_IS_FROM (decl))
1661 tree debugdecl = DECL_DEBUG_EXPR (decl);
1662 if (debugdecl && DECL_P (debugdecl))
1669 /* Set the register LOC to contain DV, OFFSET. */
1672 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1673 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1674 enum insert_option iopt)
1677 bool decl_p = dv_is_decl_p (dv);
1680 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1682 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1683 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1684 && node->offset == offset)
1687 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1688 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1691 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1694 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1697 tree decl = REG_EXPR (loc);
1698 HOST_WIDE_INT offset = REG_OFFSET (loc);
1700 var_reg_decl_set (set, loc, initialized,
1701 dv_from_decl (decl), offset, set_src, INSERT);
1704 static enum var_init_status
1705 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1709 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1711 if (! flag_var_tracking_uninit)
1712 return VAR_INIT_STATUS_INITIALIZED;
1714 var = shared_hash_find (set->vars, dv);
1717 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1719 location_chain nextp;
1720 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1721 if (rtx_equal_p (nextp->loc, loc))
1723 ret_val = nextp->init;
1732 /* Delete current content of register LOC in dataflow set SET and set
1733 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1734 MODIFY is true, any other live copies of the same variable part are
1735 also deleted from the dataflow set, otherwise the variable part is
1736 assumed to be copied from another location holding the same
1740 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1741 enum var_init_status initialized, rtx set_src)
1743 tree decl = REG_EXPR (loc);
1744 HOST_WIDE_INT offset = REG_OFFSET (loc);
1748 decl = var_debug_decl (decl);
1750 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1751 initialized = get_init_value (set, loc, dv_from_decl (decl));
1753 nextp = &set->regs[REGNO (loc)];
1754 for (node = *nextp; node; node = next)
1757 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1759 delete_variable_part (set, node->loc, node->dv, node->offset);
1760 pool_free (attrs_pool, node);
1766 nextp = &node->next;
1770 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1771 var_reg_set (set, loc, initialized, set_src);
1774 /* Delete the association of register LOC in dataflow set SET with any
1775 variables that aren't onepart. If CLOBBER is true, also delete any
1776 other live copies of the same variable part, and delete the
1777 association with onepart dvs too. */
1780 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1782 attrs *nextp = &set->regs[REGNO (loc)];
1787 tree decl = REG_EXPR (loc);
1788 HOST_WIDE_INT offset = REG_OFFSET (loc);
1790 decl = var_debug_decl (decl);
1792 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1795 for (node = *nextp; node; node = next)
1798 if (clobber || !dv_onepart_p (node->dv))
1800 delete_variable_part (set, node->loc, node->dv, node->offset);
1801 pool_free (attrs_pool, node);
1805 nextp = &node->next;
1809 /* Delete content of register with number REGNO in dataflow set SET. */
1812 var_regno_delete (dataflow_set *set, int regno)
1814 attrs *reg = &set->regs[regno];
1817 for (node = *reg; node; node = next)
1820 delete_variable_part (set, node->loc, node->dv, node->offset);
1821 pool_free (attrs_pool, node);
1826 /* Set the location of DV, OFFSET as the MEM LOC. */
1829 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1830 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1831 enum insert_option iopt)
1833 if (dv_is_decl_p (dv))
1834 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1836 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1839 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
1841 Adjust the address first if it is stack pointer based. */
1844 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1847 tree decl = MEM_EXPR (loc);
1848 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1850 var_mem_decl_set (set, loc, initialized,
1851 dv_from_decl (decl), offset, set_src, INSERT);
1854 /* Delete and set the location part of variable MEM_EXPR (LOC) in
1855 dataflow set SET to LOC. If MODIFY is true, any other live copies
1856 of the same variable part are also deleted from the dataflow set,
1857 otherwise the variable part is assumed to be copied from another
1858 location holding the same part.
1859 Adjust the address first if it is stack pointer based. */
1862 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1863 enum var_init_status initialized, rtx set_src)
1865 tree decl = MEM_EXPR (loc);
1866 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1868 decl = var_debug_decl (decl);
1870 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1871 initialized = get_init_value (set, loc, dv_from_decl (decl));
1874 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
1875 var_mem_set (set, loc, initialized, set_src);
1878 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
1879 true, also delete any other live copies of the same variable part.
1880 Adjust the address first if it is stack pointer based. */
1883 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
1885 tree decl = MEM_EXPR (loc);
1886 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1888 decl = var_debug_decl (decl);
1890 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1891 delete_variable_part (set, loc, dv_from_decl (decl), offset);
1894 /* Bind a value to a location it was just stored in. If MODIFIED
1895 holds, assume the location was modified, detaching it from any
1896 values bound to it. */
1899 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
1901 cselib_val *v = CSELIB_VAL_PTR (val);
1903 gcc_assert (cselib_preserved_value_p (v));
1907 fprintf (dump_file, "%i: ", INSN_UID (insn));
1908 print_inline_rtx (dump_file, val, 0);
1909 fprintf (dump_file, " stored in ");
1910 print_inline_rtx (dump_file, loc, 0);
1913 struct elt_loc_list *l;
1914 for (l = v->locs; l; l = l->next)
1916 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
1917 print_inline_rtx (dump_file, l->loc, 0);
1920 fprintf (dump_file, "\n");
1926 var_regno_delete (set, REGNO (loc));
1927 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1928 dv_from_value (val), 0, NULL_RTX, INSERT);
1930 else if (MEM_P (loc))
1931 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1932 dv_from_value (val), 0, NULL_RTX, INSERT);
1934 set_variable_part (set, loc, dv_from_value (val), 0,
1935 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1938 /* Reset this node, detaching all its equivalences. Return the slot
1939 in the variable hash table that holds dv, if there is one. */
1942 val_reset (dataflow_set *set, decl_or_value dv)
1944 variable var = shared_hash_find (set->vars, dv) ;
1945 location_chain node;
1948 if (!var || !var->n_var_parts)
1951 gcc_assert (var->n_var_parts == 1);
1954 for (node = var->var_part[0].loc_chain; node; node = node->next)
1955 if (GET_CODE (node->loc) == VALUE
1956 && canon_value_cmp (node->loc, cval))
1959 for (node = var->var_part[0].loc_chain; node; node = node->next)
1960 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
1962 /* Redirect the equivalence link to the new canonical
1963 value, or simply remove it if it would point at
1966 set_variable_part (set, cval, dv_from_value (node->loc),
1967 0, node->init, node->set_src, NO_INSERT);
1968 delete_variable_part (set, dv_as_value (dv),
1969 dv_from_value (node->loc), 0);
1974 decl_or_value cdv = dv_from_value (cval);
1976 /* Keep the remaining values connected, accummulating links
1977 in the canonical value. */
1978 for (node = var->var_part[0].loc_chain; node; node = node->next)
1980 if (node->loc == cval)
1982 else if (GET_CODE (node->loc) == REG)
1983 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
1984 node->set_src, NO_INSERT);
1985 else if (GET_CODE (node->loc) == MEM)
1986 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
1987 node->set_src, NO_INSERT);
1989 set_variable_part (set, node->loc, cdv, 0,
1990 node->init, node->set_src, NO_INSERT);
1994 /* We remove this last, to make sure that the canonical value is not
1995 removed to the point of requiring reinsertion. */
1997 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
1999 clobber_variable_part (set, NULL, dv, 0, NULL);
2001 /* ??? Should we make sure there aren't other available values or
2002 variables whose values involve this one other than by
2003 equivalence? E.g., at the very least we should reset MEMs, those
2004 shouldn't be too hard to find cselib-looking up the value as an
2005 address, then locating the resulting value in our own hash
2009 /* Find the values in a given location and map the val to another
2010 value, if it is unique, or add the location as one holding the
2014 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
2016 decl_or_value dv = dv_from_value (val);
2018 if (dump_file && (dump_flags & TDF_DETAILS))
2021 fprintf (dump_file, "%i: ", INSN_UID (insn));
2023 fprintf (dump_file, "head: ");
2024 print_inline_rtx (dump_file, val, 0);
2025 fputs (" is at ", dump_file);
2026 print_inline_rtx (dump_file, loc, 0);
2027 fputc ('\n', dump_file);
2030 val_reset (set, dv);
2034 attrs node, found = NULL;
2036 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2037 if (dv_is_value_p (node->dv)
2038 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2042 /* Map incoming equivalences. ??? Wouldn't it be nice if
2043 we just started sharing the location lists? Maybe a
2044 circular list ending at the value itself or some
2046 set_variable_part (set, dv_as_value (node->dv),
2047 dv_from_value (val), node->offset,
2048 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2049 set_variable_part (set, val, node->dv, node->offset,
2050 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2053 /* If we didn't find any equivalence, we need to remember that
2054 this value is held in the named register. */
2056 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2057 dv_from_value (val), 0, NULL_RTX, INSERT);
2059 else if (MEM_P (loc))
2060 /* ??? Merge equivalent MEMs. */
2061 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2062 dv_from_value (val), 0, NULL_RTX, INSERT);
2064 /* ??? Merge equivalent expressions. */
2065 set_variable_part (set, loc, dv_from_value (val), 0,
2066 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2069 /* Initialize dataflow set SET to be empty.
2070 VARS_SIZE is the initial size of hash table VARS. */
2073 dataflow_set_init (dataflow_set *set)
2075 init_attrs_list_set (set->regs);
2076 set->vars = shared_hash_copy (empty_shared_hash);
2077 set->stack_adjust = 0;
2078 set->traversed_vars = NULL;
2081 /* Delete the contents of dataflow set SET. */
2084 dataflow_set_clear (dataflow_set *set)
2088 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2089 attrs_list_clear (&set->regs[i]);
2091 shared_hash_destroy (set->vars);
2092 set->vars = shared_hash_copy (empty_shared_hash);
2095 /* Copy the contents of dataflow set SRC to DST. */
2098 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2102 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2103 attrs_list_copy (&dst->regs[i], src->regs[i]);
2105 shared_hash_destroy (dst->vars);
2106 dst->vars = shared_hash_copy (src->vars);
2107 dst->stack_adjust = src->stack_adjust;
2110 /* Information for merging lists of locations for a given offset of variable.
2112 struct variable_union_info
2114 /* Node of the location chain. */
2117 /* The sum of positions in the input chains. */
2120 /* The position in the chain of DST dataflow set. */
2124 /* Buffer for location list sorting and its allocated size. */
2125 static struct variable_union_info *vui_vec;
2126 static int vui_allocated;
2128 /* Compare function for qsort, order the structures by POS element. */
2131 variable_union_info_cmp_pos (const void *n1, const void *n2)
2133 const struct variable_union_info *const i1 =
2134 (const struct variable_union_info *) n1;
2135 const struct variable_union_info *const i2 =
2136 ( const struct variable_union_info *) n2;
2138 if (i1->pos != i2->pos)
2139 return i1->pos - i2->pos;
2141 return (i1->pos_dst - i2->pos_dst);
2144 /* Compute union of location parts of variable *SLOT and the same variable
2145 from hash table DATA. Compute "sorted" union of the location chains
2146 for common offsets, i.e. the locations of a variable part are sorted by
2147 a priority where the priority is the sum of the positions in the 2 chains
2148 (if a location is only in one list the position in the second list is
2149 defined to be larger than the length of the chains).
2150 When we are updating the location parts the newest location is in the
2151 beginning of the chain, so when we do the described "sorted" union
2152 we keep the newest locations in the beginning. */
2155 variable_union (variable src, dataflow_set *set)
2161 dstp = shared_hash_find_slot (set->vars, src->dv);
2162 if (!dstp || !*dstp)
2166 dst_can_be_shared = false;
2168 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2172 /* Continue traversing the hash table. */
2176 dst = (variable) *dstp;
2178 gcc_assert (src->n_var_parts);
2180 /* We can combine one-part variables very efficiently, because their
2181 entries are in canonical order. */
2182 if (dv_onepart_p (src->dv))
2184 location_chain *nodep, dnode, snode;
2186 gcc_assert (src->n_var_parts == 1
2187 && dst->n_var_parts == 1);
2189 snode = src->var_part[0].loc_chain;
2192 restart_onepart_unshared:
2193 nodep = &dst->var_part[0].loc_chain;
2199 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2203 location_chain nnode;
2205 if (shared_var_p (dst, set->vars))
2207 dstp = unshare_variable (set, dstp, dst,
2208 VAR_INIT_STATUS_INITIALIZED);
2209 dst = (variable)*dstp;
2210 goto restart_onepart_unshared;
2213 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
2214 nnode->loc = snode->loc;
2215 nnode->init = snode->init;
2216 if (!snode->set_src || MEM_P (snode->set_src))
2217 nnode->set_src = NULL;
2219 nnode->set_src = snode->set_src;
2220 nnode->next = dnode;
2224 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2227 snode = snode->next;
2229 nodep = &dnode->next;
2236 /* Count the number of location parts, result is K. */
2237 for (i = 0, j = 0, k = 0;
2238 i < src->n_var_parts && j < dst->n_var_parts; k++)
2240 if (src->var_part[i].offset == dst->var_part[j].offset)
2245 else if (src->var_part[i].offset < dst->var_part[j].offset)
2250 k += src->n_var_parts - i;
2251 k += dst->n_var_parts - j;
2253 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2254 thus there are at most MAX_VAR_PARTS different offsets. */
2255 gcc_assert (dv_onepart_p (dst->dv) ? k == 1 : k <= MAX_VAR_PARTS);
2257 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2259 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2260 dst = (variable)*dstp;
2263 i = src->n_var_parts - 1;
2264 j = dst->n_var_parts - 1;
2265 dst->n_var_parts = k;
2267 for (k--; k >= 0; k--)
2269 location_chain node, node2;
2271 if (i >= 0 && j >= 0
2272 && src->var_part[i].offset == dst->var_part[j].offset)
2274 /* Compute the "sorted" union of the chains, i.e. the locations which
2275 are in both chains go first, they are sorted by the sum of
2276 positions in the chains. */
2279 struct variable_union_info *vui;
2281 /* If DST is shared compare the location chains.
2282 If they are different we will modify the chain in DST with
2283 high probability so make a copy of DST. */
2284 if (shared_var_p (dst, set->vars))
2286 for (node = src->var_part[i].loc_chain,
2287 node2 = dst->var_part[j].loc_chain; node && node2;
2288 node = node->next, node2 = node2->next)
2290 if (!((REG_P (node2->loc)
2291 && REG_P (node->loc)
2292 && REGNO (node2->loc) == REGNO (node->loc))
2293 || rtx_equal_p (node2->loc, node->loc)))
2295 if (node2->init < node->init)
2296 node2->init = node->init;
2302 dstp = unshare_variable (set, dstp, dst,
2303 VAR_INIT_STATUS_UNKNOWN);
2304 dst = (variable)*dstp;
2309 for (node = src->var_part[i].loc_chain; node; node = node->next)
2312 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2317 /* The most common case, much simpler, no qsort is needed. */
2318 location_chain dstnode = dst->var_part[j].loc_chain;
2319 dst->var_part[k].loc_chain = dstnode;
2320 dst->var_part[k].offset = dst->var_part[j].offset;
2322 for (node = src->var_part[i].loc_chain; node; node = node->next)
2323 if (!((REG_P (dstnode->loc)
2324 && REG_P (node->loc)
2325 && REGNO (dstnode->loc) == REGNO (node->loc))
2326 || rtx_equal_p (dstnode->loc, node->loc)))
2328 location_chain new_node;
2330 /* Copy the location from SRC. */
2331 new_node = (location_chain) pool_alloc (loc_chain_pool);
2332 new_node->loc = node->loc;
2333 new_node->init = node->init;
2334 if (!node->set_src || MEM_P (node->set_src))
2335 new_node->set_src = NULL;
2337 new_node->set_src = node->set_src;
2338 node2->next = new_node;
2345 if (src_l + dst_l > vui_allocated)
2347 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2348 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2353 /* Fill in the locations from DST. */
2354 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2355 node = node->next, jj++)
2358 vui[jj].pos_dst = jj;
2360 /* Pos plus value larger than a sum of 2 valid positions. */
2361 vui[jj].pos = jj + src_l + dst_l;
2364 /* Fill in the locations from SRC. */
2366 for (node = src->var_part[i].loc_chain, ii = 0; node;
2367 node = node->next, ii++)
2369 /* Find location from NODE. */
2370 for (jj = 0; jj < dst_l; jj++)
2372 if ((REG_P (vui[jj].lc->loc)
2373 && REG_P (node->loc)
2374 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2375 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2377 vui[jj].pos = jj + ii;
2381 if (jj >= dst_l) /* The location has not been found. */
2383 location_chain new_node;
2385 /* Copy the location from SRC. */
2386 new_node = (location_chain) pool_alloc (loc_chain_pool);
2387 new_node->loc = node->loc;
2388 new_node->init = node->init;
2389 if (!node->set_src || MEM_P (node->set_src))
2390 new_node->set_src = NULL;
2392 new_node->set_src = node->set_src;
2393 vui[n].lc = new_node;
2394 vui[n].pos_dst = src_l + dst_l;
2395 vui[n].pos = ii + src_l + dst_l;
2402 /* Special case still very common case. For dst_l == 2
2403 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2404 vui[i].pos == i + src_l + dst_l. */
2405 if (vui[0].pos > vui[1].pos)
2407 /* Order should be 1, 0, 2... */
2408 dst->var_part[k].loc_chain = vui[1].lc;
2409 vui[1].lc->next = vui[0].lc;
2412 vui[0].lc->next = vui[2].lc;
2413 vui[n - 1].lc->next = NULL;
2416 vui[0].lc->next = NULL;
2421 dst->var_part[k].loc_chain = vui[0].lc;
2422 if (n >= 3 && vui[2].pos < vui[1].pos)
2424 /* Order should be 0, 2, 1, 3... */
2425 vui[0].lc->next = vui[2].lc;
2426 vui[2].lc->next = vui[1].lc;
2429 vui[1].lc->next = vui[3].lc;
2430 vui[n - 1].lc->next = NULL;
2433 vui[1].lc->next = NULL;
2438 /* Order should be 0, 1, 2... */
2440 vui[n - 1].lc->next = NULL;
2443 for (; ii < n; ii++)
2444 vui[ii - 1].lc->next = vui[ii].lc;
2448 qsort (vui, n, sizeof (struct variable_union_info),
2449 variable_union_info_cmp_pos);
2451 /* Reconnect the nodes in sorted order. */
2452 for (ii = 1; ii < n; ii++)
2453 vui[ii - 1].lc->next = vui[ii].lc;
2454 vui[n - 1].lc->next = NULL;
2455 dst->var_part[k].loc_chain = vui[0].lc;
2458 dst->var_part[k].offset = dst->var_part[j].offset;
2463 else if ((i >= 0 && j >= 0
2464 && src->var_part[i].offset < dst->var_part[j].offset)
2467 dst->var_part[k] = dst->var_part[j];
2470 else if ((i >= 0 && j >= 0
2471 && src->var_part[i].offset > dst->var_part[j].offset)
2474 location_chain *nextp;
2476 /* Copy the chain from SRC. */
2477 nextp = &dst->var_part[k].loc_chain;
2478 for (node = src->var_part[i].loc_chain; node; node = node->next)
2480 location_chain new_lc;
2482 new_lc = (location_chain) pool_alloc (loc_chain_pool);
2483 new_lc->next = NULL;
2484 new_lc->init = node->init;
2485 if (!node->set_src || MEM_P (node->set_src))
2486 new_lc->set_src = NULL;
2488 new_lc->set_src = node->set_src;
2489 new_lc->loc = node->loc;
2492 nextp = &new_lc->next;
2495 dst->var_part[k].offset = src->var_part[i].offset;
2498 dst->var_part[k].cur_loc = NULL;
2501 if (flag_var_tracking_uninit)
2502 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
2504 location_chain node, node2;
2505 for (node = src->var_part[i].loc_chain; node; node = node->next)
2506 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
2507 if (rtx_equal_p (node->loc, node2->loc))
2509 if (node->init > node2->init)
2510 node2->init = node->init;
2514 /* Continue traversing the hash table. */
2518 /* Compute union of dataflow sets SRC and DST and store it to DST. */
2521 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
2525 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2526 attrs_list_union (&dst->regs[i], src->regs[i]);
2528 if (dst->vars == empty_shared_hash)
2530 shared_hash_destroy (dst->vars);
2531 dst->vars = shared_hash_copy (src->vars);
2538 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (src->vars), var, variable, hi)
2539 variable_union (var, dst);
2543 /* Whether the value is currently being expanded. */
2544 #define VALUE_RECURSED_INTO(x) \
2545 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
2546 /* Whether the value is in changed_variables hash table. */
2547 #define VALUE_CHANGED(x) \
2548 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
2549 /* Whether the decl is in changed_variables hash table. */
2550 #define DECL_CHANGED(x) TREE_VISITED (x)
2552 /* Record that DV has been added into resp. removed from changed_variables
2556 set_dv_changed (decl_or_value dv, bool newv)
2558 if (dv_is_value_p (dv))
2559 VALUE_CHANGED (dv_as_value (dv)) = newv;
2561 DECL_CHANGED (dv_as_decl (dv)) = newv;
2564 /* Return true if DV is present in changed_variables hash table. */
2567 dv_changed_p (decl_or_value dv)
2569 return (dv_is_value_p (dv)
2570 ? VALUE_CHANGED (dv_as_value (dv))
2571 : DECL_CHANGED (dv_as_decl (dv)));
2574 /* Return a location list node whose loc is rtx_equal to LOC, in the
2575 location list of a one-part variable or value VAR, or in that of
2576 any values recursively mentioned in the location lists. VARS must
2577 be in star-canonical form. */
2579 static location_chain
2580 find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
2582 location_chain node;
2583 enum rtx_code loc_code;
2588 gcc_checking_assert (dv_onepart_p (var->dv));
2590 if (!var->n_var_parts)
2593 gcc_checking_assert (var->var_part[0].offset == 0);
2594 gcc_checking_assert (loc != dv_as_opaque (var->dv));
2596 loc_code = GET_CODE (loc);
2597 for (node = var->var_part[0].loc_chain; node; node = node->next)
2602 if (GET_CODE (node->loc) != loc_code)
2604 if (GET_CODE (node->loc) != VALUE)
2607 else if (loc == node->loc)
2609 else if (loc_code != VALUE)
2611 if (rtx_equal_p (loc, node->loc))
2616 /* Since we're in star-canonical form, we don't need to visit
2617 non-canonical nodes: one-part variables and non-canonical
2618 values would only point back to the canonical node. */
2619 if (dv_is_value_p (var->dv)
2620 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
2622 /* Skip all subsequent VALUEs. */
2623 while (node->next && GET_CODE (node->next->loc) == VALUE)
2626 gcc_checking_assert (!canon_value_cmp (node->loc,
2627 dv_as_value (var->dv)));
2628 if (loc == node->loc)
2634 gcc_checking_assert (node == var->var_part[0].loc_chain);
2635 gcc_checking_assert (!node->next);
2637 dv = dv_from_value (node->loc);
2638 rvar = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
2639 return find_loc_in_1pdv (loc, rvar, vars);
2645 /* Hash table iteration argument passed to variable_merge. */
2648 /* The set in which the merge is to be inserted. */
2650 /* The set that we're iterating in. */
2652 /* The set that may contain the other dv we are to merge with. */
2654 /* Number of onepart dvs in src. */
2655 int src_onepart_cnt;
2658 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
2659 loc_cmp order, and it is maintained as such. */
2662 insert_into_intersection (location_chain *nodep, rtx loc,
2663 enum var_init_status status)
2665 location_chain node;
2668 for (node = *nodep; node; nodep = &node->next, node = *nodep)
2669 if ((r = loc_cmp (node->loc, loc)) == 0)
2671 node->init = MIN (node->init, status);
2677 node = (location_chain) pool_alloc (loc_chain_pool);
2680 node->set_src = NULL;
2681 node->init = status;
2682 node->next = *nodep;
2686 /* Insert in DEST the intersection the locations present in both
2687 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
2688 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
2692 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
2693 location_chain s1node, variable s2var)
2695 dataflow_set *s1set = dsm->cur;
2696 dataflow_set *s2set = dsm->src;
2697 location_chain found;
2701 location_chain s2node;
2703 gcc_checking_assert (dv_onepart_p (s2var->dv));
2705 if (s2var->n_var_parts)
2707 gcc_checking_assert (s2var->var_part[0].offset == 0);
2708 s2node = s2var->var_part[0].loc_chain;
2710 for (; s1node && s2node;
2711 s1node = s1node->next, s2node = s2node->next)
2712 if (s1node->loc != s2node->loc)
2714 else if (s1node->loc == val)
2717 insert_into_intersection (dest, s1node->loc,
2718 MIN (s1node->init, s2node->init));
2722 for (; s1node; s1node = s1node->next)
2724 if (s1node->loc == val)
2727 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
2728 shared_hash_htab (s2set->vars))))
2730 insert_into_intersection (dest, s1node->loc,
2731 MIN (s1node->init, found->init));
2735 if (GET_CODE (s1node->loc) == VALUE
2736 && !VALUE_RECURSED_INTO (s1node->loc))
2738 decl_or_value dv = dv_from_value (s1node->loc);
2739 variable svar = shared_hash_find (s1set->vars, dv);
2742 if (svar->n_var_parts == 1)
2744 VALUE_RECURSED_INTO (s1node->loc) = true;
2745 intersect_loc_chains (val, dest, dsm,
2746 svar->var_part[0].loc_chain,
2748 VALUE_RECURSED_INTO (s1node->loc) = false;
2753 /* ??? if the location is equivalent to any location in src,
2754 searched recursively
2756 add to dst the values needed to represent the equivalence
2758 telling whether locations S is equivalent to another dv's
2761 for each location D in the list
2763 if S and D satisfy rtx_equal_p, then it is present
2765 else if D is a value, recurse without cycles
2767 else if S and D have the same CODE and MODE
2769 for each operand oS and the corresponding oD
2771 if oS and oD are not equivalent, then S an D are not equivalent
2773 else if they are RTX vectors
2775 if any vector oS element is not equivalent to its respective oD,
2776 then S and D are not equivalent
2784 /* Return -1 if X should be before Y in a location list for a 1-part
2785 variable, 1 if Y should be before X, and 0 if they're equivalent
2786 and should not appear in the list. */
2789 loc_cmp (rtx x, rtx y)
2792 RTX_CODE code = GET_CODE (x);
2802 gcc_assert (GET_MODE (x) == GET_MODE (y));
2803 if (REGNO (x) == REGNO (y))
2805 else if (REGNO (x) < REGNO (y))
2818 gcc_assert (GET_MODE (x) == GET_MODE (y));
2819 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
2825 if (GET_CODE (x) == VALUE)
2827 if (GET_CODE (y) != VALUE)
2829 /* Don't assert the modes are the same, that is true only
2830 when not recursing. (subreg:QI (value:SI 1:1) 0)
2831 and (subreg:QI (value:DI 2:2) 0) can be compared,
2832 even when the modes are different. */
2833 if (canon_value_cmp (x, y))
2839 if (GET_CODE (y) == VALUE)
2842 if (GET_CODE (x) == GET_CODE (y))
2843 /* Compare operands below. */;
2844 else if (GET_CODE (x) < GET_CODE (y))
2849 gcc_assert (GET_MODE (x) == GET_MODE (y));
2851 if (GET_CODE (x) == DEBUG_EXPR)
2853 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
2854 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
2856 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
2857 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
2861 fmt = GET_RTX_FORMAT (code);
2862 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2866 if (XWINT (x, i) == XWINT (y, i))
2868 else if (XWINT (x, i) < XWINT (y, i))
2875 if (XINT (x, i) == XINT (y, i))
2877 else if (XINT (x, i) < XINT (y, i))
2884 /* Compare the vector length first. */
2885 if (XVECLEN (x, i) == XVECLEN (y, i))
2886 /* Compare the vectors elements. */;
2887 else if (XVECLEN (x, i) < XVECLEN (y, i))
2892 for (j = 0; j < XVECLEN (x, i); j++)
2893 if ((r = loc_cmp (XVECEXP (x, i, j),
2894 XVECEXP (y, i, j))))
2899 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
2905 if (XSTR (x, i) == XSTR (y, i))
2911 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
2919 /* These are just backpointers, so they don't matter. */
2926 /* It is believed that rtx's at this level will never
2927 contain anything but integers and other rtx's,
2928 except for within LABEL_REFs and SYMBOL_REFs. */
2936 /* If decl or value DVP refers to VALUE from *LOC, add backlinks
2937 from VALUE to DVP. */
2940 add_value_chain (rtx *loc, void *dvp)
2942 decl_or_value dv, ldv;
2943 value_chain vc, nvc;
2946 if (GET_CODE (*loc) == VALUE)
2947 ldv = dv_from_value (*loc);
2948 else if (GET_CODE (*loc) == DEBUG_EXPR)
2949 ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc));
2953 if (dv_as_opaque (ldv) == dvp)
2956 dv = (decl_or_value) dvp;
2957 slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv),
2961 vc = (value_chain) pool_alloc (value_chain_pool);
2965 *slot = (void *) vc;
2969 for (vc = ((value_chain) *slot)->next; vc; vc = vc->next)
2970 if (dv_as_opaque (vc->dv) == dv_as_opaque (dv))
2978 vc = (value_chain) *slot;
2979 nvc = (value_chain) pool_alloc (value_chain_pool);
2981 nvc->next = vc->next;
2987 /* If decl or value DVP refers to VALUEs from within LOC, add backlinks
2988 from those VALUEs to DVP. */
2991 add_value_chains (decl_or_value dv, rtx loc)
2993 if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR)
2995 add_value_chain (&loc, dv_as_opaque (dv));
3001 loc = XEXP (loc, 0);
3002 for_each_rtx (&loc, add_value_chain, dv_as_opaque (dv));
3005 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those
3006 VALUEs to DV. Add the same time get rid of ASM_OPERANDS from locs list,
3007 that is something we never can express in .debug_info and can prevent
3008 reverse ops from being used. */
3011 add_cselib_value_chains (decl_or_value dv)
3013 struct elt_loc_list **l;
3015 for (l = &CSELIB_VAL_PTR (dv_as_value (dv))->locs; *l;)
3016 if (GET_CODE ((*l)->loc) == ASM_OPERANDS)
3020 for_each_rtx (&(*l)->loc, add_value_chain, dv_as_opaque (dv));
3025 /* If decl or value DVP refers to VALUE from *LOC, remove backlinks
3026 from VALUE to DVP. */
3029 remove_value_chain (rtx *loc, void *dvp)
3031 decl_or_value dv, ldv;
3035 if (GET_CODE (*loc) == VALUE)
3036 ldv = dv_from_value (*loc);
3037 else if (GET_CODE (*loc) == DEBUG_EXPR)
3038 ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc));
3042 if (dv_as_opaque (ldv) == dvp)
3045 dv = (decl_or_value) dvp;
3046 slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv),
3048 for (vc = (value_chain) *slot; vc->next; vc = vc->next)
3049 if (dv_as_opaque (vc->next->dv) == dv_as_opaque (dv))
3051 value_chain dvc = vc->next;
3052 gcc_assert (dvc->refcount > 0);
3053 if (--dvc->refcount == 0)
3055 vc->next = dvc->next;
3056 pool_free (value_chain_pool, dvc);
3057 if (vc->next == NULL && vc == (value_chain) *slot)
3059 pool_free (value_chain_pool, vc);
3060 htab_clear_slot (value_chains, slot);
3068 /* If decl or value DVP refers to VALUEs from within LOC, remove backlinks
3069 from those VALUEs to DVP. */
3072 remove_value_chains (decl_or_value dv, rtx loc)
3074 if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR)
3076 remove_value_chain (&loc, dv_as_opaque (dv));
3082 loc = XEXP (loc, 0);
3083 for_each_rtx (&loc, remove_value_chain, dv_as_opaque (dv));
3087 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, remove backlinks from those
3091 remove_cselib_value_chains (decl_or_value dv)
3093 struct elt_loc_list *l;
3095 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
3096 for_each_rtx (&l->loc, remove_value_chain, dv_as_opaque (dv));
3099 /* Check the order of entries in one-part variables. */
3102 canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
3104 variable var = (variable) *slot;
3105 decl_or_value dv = var->dv;
3106 location_chain node, next;
3108 #ifdef ENABLE_RTL_CHECKING
3110 for (i = 0; i < var->n_var_parts; i++)
3111 gcc_assert (var->var_part[0].cur_loc == NULL);
3112 gcc_assert (!var->cur_loc_changed && !var->in_changed_variables);
3115 if (!dv_onepart_p (dv))
3118 gcc_assert (var->n_var_parts == 1);
3119 node = var->var_part[0].loc_chain;
3122 while ((next = node->next))
3124 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3132 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3133 more likely to be chosen as canonical for an equivalence set.
3134 Ensure less likely values can reach more likely neighbors, making
3135 the connections bidirectional. */
3138 canonicalize_values_mark (void **slot, void *data)
3140 dataflow_set *set = (dataflow_set *)data;
3141 variable var = (variable) *slot;
3142 decl_or_value dv = var->dv;
3144 location_chain node;
3146 if (!dv_is_value_p (dv))
3149 gcc_checking_assert (var->n_var_parts == 1);
3151 val = dv_as_value (dv);
3153 for (node = var->var_part[0].loc_chain; node; node = node->next)
3154 if (GET_CODE (node->loc) == VALUE)
3156 if (canon_value_cmp (node->loc, val))
3157 VALUE_RECURSED_INTO (val) = true;
3160 decl_or_value odv = dv_from_value (node->loc);
3161 void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3163 set_slot_part (set, val, oslot, odv, 0,
3164 node->init, NULL_RTX);
3166 VALUE_RECURSED_INTO (node->loc) = true;
3173 /* Remove redundant entries from equivalence lists in onepart
3174 variables, canonicalizing equivalence sets into star shapes. */
3177 canonicalize_values_star (void **slot, void *data)
3179 dataflow_set *set = (dataflow_set *)data;
3180 variable var = (variable) *slot;
3181 decl_or_value dv = var->dv;
3182 location_chain node;
3189 if (!dv_onepart_p (dv))
3192 gcc_checking_assert (var->n_var_parts == 1);
3194 if (dv_is_value_p (dv))
3196 cval = dv_as_value (dv);
3197 if (!VALUE_RECURSED_INTO (cval))
3199 VALUE_RECURSED_INTO (cval) = false;
3209 gcc_assert (var->n_var_parts == 1);
3211 for (node = var->var_part[0].loc_chain; node; node = node->next)
3212 if (GET_CODE (node->loc) == VALUE)
3215 if (VALUE_RECURSED_INTO (node->loc))
3217 if (canon_value_cmp (node->loc, cval))
3226 if (!has_marks || dv_is_decl_p (dv))
3229 /* Keep it marked so that we revisit it, either after visiting a
3230 child node, or after visiting a new parent that might be
3232 VALUE_RECURSED_INTO (val) = true;
3234 for (node = var->var_part[0].loc_chain; node; node = node->next)
3235 if (GET_CODE (node->loc) == VALUE
3236 && VALUE_RECURSED_INTO (node->loc))
3240 VALUE_RECURSED_INTO (cval) = false;
3241 dv = dv_from_value (cval);
3242 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3245 gcc_assert (dv_is_decl_p (var->dv));
3246 /* The canonical value was reset and dropped.
3248 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3251 var = (variable)*slot;
3252 gcc_assert (dv_is_value_p (var->dv));
3253 if (var->n_var_parts == 0)
3255 gcc_assert (var->n_var_parts == 1);
3259 VALUE_RECURSED_INTO (val) = false;
3264 /* Push values to the canonical one. */
3265 cdv = dv_from_value (cval);
3266 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3268 for (node = var->var_part[0].loc_chain; node; node = node->next)
3269 if (node->loc != cval)
3271 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3272 node->init, NULL_RTX);
3273 if (GET_CODE (node->loc) == VALUE)
3275 decl_or_value ndv = dv_from_value (node->loc);
3277 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3280 if (canon_value_cmp (node->loc, val))
3282 /* If it could have been a local minimum, it's not any more,
3283 since it's now neighbor to cval, so it may have to push
3284 to it. Conversely, if it wouldn't have prevailed over
3285 val, then whatever mark it has is fine: if it was to
3286 push, it will now push to a more canonical node, but if
3287 it wasn't, then it has already pushed any values it might
3289 VALUE_RECURSED_INTO (node->loc) = true;
3290 /* Make sure we visit node->loc by ensuring we cval is
3292 VALUE_RECURSED_INTO (cval) = true;
3294 else if (!VALUE_RECURSED_INTO (node->loc))
3295 /* If we have no need to "recurse" into this node, it's
3296 already "canonicalized", so drop the link to the old
3298 clobber_variable_part (set, cval, ndv, 0, NULL);
3300 else if (GET_CODE (node->loc) == REG)
3302 attrs list = set->regs[REGNO (node->loc)], *listp;
3304 /* Change an existing attribute referring to dv so that it
3305 refers to cdv, removing any duplicate this might
3306 introduce, and checking that no previous duplicates
3307 existed, all in a single pass. */
3311 if (list->offset == 0
3312 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3313 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3320 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3323 for (listp = &list->next; (list = *listp); listp = &list->next)
3328 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3330 *listp = list->next;
3331 pool_free (attrs_pool, list);
3336 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3339 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3341 for (listp = &list->next; (list = *listp); listp = &list->next)
3346 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3348 *listp = list->next;
3349 pool_free (attrs_pool, list);
3354 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3363 if (list->offset == 0
3364 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3365 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3375 set_slot_part (set, val, cslot, cdv, 0,
3376 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3378 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3380 /* Variable may have been unshared. */
3381 var = (variable)*slot;
3382 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3383 && var->var_part[0].loc_chain->next == NULL);
3385 if (VALUE_RECURSED_INTO (cval))
3386 goto restart_with_cval;
3391 /* Bind one-part variables to the canonical value in an equivalence
3392 set. Not doing this causes dataflow convergence failure in rare
3393 circumstances, see PR42873. Unfortunately we can't do this
3394 efficiently as part of canonicalize_values_star, since we may not
3395 have determined or even seen the canonical value of a set when we
3396 get to a variable that references another member of the set. */
3399 canonicalize_vars_star (void **slot, void *data)
3401 dataflow_set *set = (dataflow_set *)data;
3402 variable var = (variable) *slot;
3403 decl_or_value dv = var->dv;
3404 location_chain node;
3409 location_chain cnode;
3411 if (!dv_onepart_p (dv) || dv_is_value_p (dv))
3414 gcc_assert (var->n_var_parts == 1);
3416 node = var->var_part[0].loc_chain;
3418 if (GET_CODE (node->loc) != VALUE)
3421 gcc_assert (!node->next);
3424 /* Push values to the canonical one. */
3425 cdv = dv_from_value (cval);
3426 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3429 cvar = (variable)*cslot;
3430 gcc_assert (cvar->n_var_parts == 1);
3432 cnode = cvar->var_part[0].loc_chain;
3434 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3435 that are not “more canonical” than it. */
3436 if (GET_CODE (cnode->loc) != VALUE
3437 || !canon_value_cmp (cnode->loc, cval))
3440 /* CVAL was found to be non-canonical. Change the variable to point
3441 to the canonical VALUE. */
3442 gcc_assert (!cnode->next);
3445 slot = set_slot_part (set, cval, slot, dv, 0,
3446 node->init, node->set_src);
3447 clobber_slot_part (set, cval, slot, 0, node->set_src);
3452 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3453 corresponding entry in DSM->src. Multi-part variables are combined
3454 with variable_union, whereas onepart dvs are combined with
3458 variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
3460 dataflow_set *dst = dsm->dst;
3462 variable s2var, dvar = NULL;
3463 decl_or_value dv = s1var->dv;
3464 bool onepart = dv_onepart_p (dv);
3467 location_chain node, *nodep;
3469 /* If the incoming onepart variable has an empty location list, then
3470 the intersection will be just as empty. For other variables,
3471 it's always union. */
3472 gcc_checking_assert (s1var->n_var_parts
3473 && s1var->var_part[0].loc_chain);
3476 return variable_union (s1var, dst);
3478 gcc_checking_assert (s1var->n_var_parts == 1
3479 && s1var->var_part[0].offset == 0);
3481 dvhash = dv_htab_hash (dv);
3482 if (dv_is_value_p (dv))
3483 val = dv_as_value (dv);
3487 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3490 dst_can_be_shared = false;
3494 dsm->src_onepart_cnt--;
3495 gcc_assert (s2var->var_part[0].loc_chain
3496 && s2var->n_var_parts == 1
3497 && s2var->var_part[0].offset == 0);
3499 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3502 dvar = (variable)*dstslot;
3503 gcc_assert (dvar->refcount == 1
3504 && dvar->n_var_parts == 1
3505 && dvar->var_part[0].offset == 0);
3506 nodep = &dvar->var_part[0].loc_chain;
3514 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3516 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3518 *dstslot = dvar = s2var;
3523 dst_can_be_shared = false;
3525 intersect_loc_chains (val, nodep, dsm,
3526 s1var->var_part[0].loc_chain, s2var);
3532 dvar = (variable) pool_alloc (dv_pool (dv));
3535 dvar->n_var_parts = 1;
3536 dvar->cur_loc_changed = false;
3537 dvar->in_changed_variables = false;
3538 dvar->var_part[0].offset = 0;
3539 dvar->var_part[0].loc_chain = node;
3540 dvar->var_part[0].cur_loc = NULL;
3543 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3545 gcc_assert (!*dstslot);
3553 nodep = &dvar->var_part[0].loc_chain;
3554 while ((node = *nodep))
3556 location_chain *nextp = &node->next;
3558 if (GET_CODE (node->loc) == REG)
3562 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
3563 if (GET_MODE (node->loc) == GET_MODE (list->loc)
3564 && dv_is_value_p (list->dv))
3568 attrs_list_insert (&dst->regs[REGNO (node->loc)],
3570 /* If this value became canonical for another value that had
3571 this register, we want to leave it alone. */
3572 else if (dv_as_value (list->dv) != val)
3574 dstslot = set_slot_part (dst, dv_as_value (list->dv),
3576 node->init, NULL_RTX);
3577 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
3579 /* Since nextp points into the removed node, we can't
3580 use it. The pointer to the next node moved to nodep.
3581 However, if the variable we're walking is unshared
3582 during our walk, we'll keep walking the location list
3583 of the previously-shared variable, in which case the
3584 node won't have been removed, and we'll want to skip
3585 it. That's why we test *nodep here. */
3591 /* Canonicalization puts registers first, so we don't have to
3597 if (dvar != (variable)*dstslot)
3598 dvar = (variable)*dstslot;
3599 nodep = &dvar->var_part[0].loc_chain;
3603 /* Mark all referenced nodes for canonicalization, and make sure
3604 we have mutual equivalence links. */
3605 VALUE_RECURSED_INTO (val) = true;
3606 for (node = *nodep; node; node = node->next)
3607 if (GET_CODE (node->loc) == VALUE)
3609 VALUE_RECURSED_INTO (node->loc) = true;
3610 set_variable_part (dst, val, dv_from_value (node->loc), 0,
3611 node->init, NULL, INSERT);
3614 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3615 gcc_assert (*dstslot == dvar);
3616 canonicalize_values_star (dstslot, dst);
3617 gcc_checking_assert (dstslot
3618 == shared_hash_find_slot_noinsert_1 (dst->vars,
3620 dvar = (variable)*dstslot;
3624 bool has_value = false, has_other = false;
3626 /* If we have one value and anything else, we're going to
3627 canonicalize this, so make sure all values have an entry in
3628 the table and are marked for canonicalization. */
3629 for (node = *nodep; node; node = node->next)
3631 if (GET_CODE (node->loc) == VALUE)
3633 /* If this was marked during register canonicalization,
3634 we know we have to canonicalize values. */
3649 if (has_value && has_other)
3651 for (node = *nodep; node; node = node->next)
3653 if (GET_CODE (node->loc) == VALUE)
3655 decl_or_value dv = dv_from_value (node->loc);
3658 if (shared_hash_shared (dst->vars))
3659 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
3661 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
3665 variable var = (variable) pool_alloc (dv_pool (dv));
3668 var->n_var_parts = 1;
3669 var->cur_loc_changed = false;
3670 var->in_changed_variables = false;
3671 var->var_part[0].offset = 0;
3672 var->var_part[0].loc_chain = NULL;
3673 var->var_part[0].cur_loc = NULL;
3677 VALUE_RECURSED_INTO (node->loc) = true;
3681 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3682 gcc_assert (*dstslot == dvar);
3683 canonicalize_values_star (dstslot, dst);
3684 gcc_checking_assert (dstslot
3685 == shared_hash_find_slot_noinsert_1 (dst->vars,
3687 dvar = (variable)*dstslot;
3691 if (!onepart_variable_different_p (dvar, s2var))
3693 variable_htab_free (dvar);
3694 *dstslot = dvar = s2var;
3697 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
3699 variable_htab_free (dvar);
3700 *dstslot = dvar = s1var;
3702 dst_can_be_shared = false;
3705 dst_can_be_shared = false;
3710 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
3711 multi-part variable. Unions of multi-part variables and
3712 intersections of one-part ones will be handled in
3713 variable_merge_over_cur(). */
3716 variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
3718 dataflow_set *dst = dsm->dst;
3719 decl_or_value dv = s2var->dv;
3720 bool onepart = dv_onepart_p (dv);
3724 void **dstp = shared_hash_find_slot (dst->vars, dv);
3730 dsm->src_onepart_cnt++;
3734 /* Combine dataflow set information from SRC2 into DST, using PDST
3735 to carry over information across passes. */
3738 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
3740 dataflow_set cur = *dst;
3741 dataflow_set *src1 = &cur;
3742 struct dfset_merge dsm;
3744 size_t src1_elems, src2_elems;
3748 src1_elems = htab_elements (shared_hash_htab (src1->vars));
3749 src2_elems = htab_elements (shared_hash_htab (src2->vars));
3750 dataflow_set_init (dst);
3751 dst->stack_adjust = cur.stack_adjust;
3752 shared_hash_destroy (dst->vars);
3753 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
3754 dst->vars->refcount = 1;
3756 = htab_create (MAX (src1_elems, src2_elems), variable_htab_hash,
3757 variable_htab_eq, variable_htab_free);
3759 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3760 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
3765 dsm.src_onepart_cnt = 0;
3767 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.src->vars), var, variable, hi)
3768 variable_merge_over_src (var, &dsm);
3769 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.cur->vars), var, variable, hi)
3770 variable_merge_over_cur (var, &dsm);
3772 if (dsm.src_onepart_cnt)
3773 dst_can_be_shared = false;
3775 dataflow_set_destroy (src1);
3778 /* Mark register equivalences. */
3781 dataflow_set_equiv_regs (dataflow_set *set)
3786 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3788 rtx canon[NUM_MACHINE_MODES];
3790 /* If the list is empty or one entry, no need to canonicalize
3792 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
3795 memset (canon, 0, sizeof (canon));
3797 for (list = set->regs[i]; list; list = list->next)
3798 if (list->offset == 0 && dv_is_value_p (list->dv))
3800 rtx val = dv_as_value (list->dv);
3801 rtx *cvalp = &canon[(int)GET_MODE (val)];
3804 if (canon_value_cmp (val, cval))
3808 for (list = set->regs[i]; list; list = list->next)
3809 if (list->offset == 0 && dv_onepart_p (list->dv))
3811 rtx cval = canon[(int)GET_MODE (list->loc)];
3816 if (dv_is_value_p (list->dv))
3818 rtx val = dv_as_value (list->dv);
3823 VALUE_RECURSED_INTO (val) = true;
3824 set_variable_part (set, val, dv_from_value (cval), 0,
3825 VAR_INIT_STATUS_INITIALIZED,
3829 VALUE_RECURSED_INTO (cval) = true;
3830 set_variable_part (set, cval, list->dv, 0,
3831 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
3834 for (listp = &set->regs[i]; (list = *listp);
3835 listp = list ? &list->next : listp)
3836 if (list->offset == 0 && dv_onepart_p (list->dv))
3838 rtx cval = canon[(int)GET_MODE (list->loc)];
3844 if (dv_is_value_p (list->dv))
3846 rtx val = dv_as_value (list->dv);
3847 if (!VALUE_RECURSED_INTO (val))
3851 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
3852 canonicalize_values_star (slot, set);
3859 /* Remove any redundant values in the location list of VAR, which must
3860 be unshared and 1-part. */
3863 remove_duplicate_values (variable var)
3865 location_chain node, *nodep;
3867 gcc_assert (dv_onepart_p (var->dv));
3868 gcc_assert (var->n_var_parts == 1);
3869 gcc_assert (var->refcount == 1);
3871 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
3873 if (GET_CODE (node->loc) == VALUE)
3875 if (VALUE_RECURSED_INTO (node->loc))
3877 /* Remove duplicate value node. */
3878 *nodep = node->next;
3879 pool_free (loc_chain_pool, node);
3883 VALUE_RECURSED_INTO (node->loc) = true;
3885 nodep = &node->next;
3888 for (node = var->var_part[0].loc_chain; node; node = node->next)
3889 if (GET_CODE (node->loc) == VALUE)
3891 gcc_assert (VALUE_RECURSED_INTO (node->loc));
3892 VALUE_RECURSED_INTO (node->loc) = false;
3897 /* Hash table iteration argument passed to variable_post_merge. */
3898 struct dfset_post_merge
3900 /* The new input set for the current block. */
3902 /* Pointer to the permanent input set for the current block, or
3904 dataflow_set **permp;
3907 /* Create values for incoming expressions associated with one-part
3908 variables that don't have value numbers for them. */
3911 variable_post_merge_new_vals (void **slot, void *info)
3913 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3914 dataflow_set *set = dfpm->set;
3915 variable var = (variable)*slot;
3916 location_chain node;
3918 if (!dv_onepart_p (var->dv) || !var->n_var_parts)
3921 gcc_assert (var->n_var_parts == 1);
3923 if (dv_is_decl_p (var->dv))
3925 bool check_dupes = false;
3928 for (node = var->var_part[0].loc_chain; node; node = node->next)
3930 if (GET_CODE (node->loc) == VALUE)
3931 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
3932 else if (GET_CODE (node->loc) == REG)
3934 attrs att, *attp, *curp = NULL;
3936 if (var->refcount != 1)
3938 slot = unshare_variable (set, slot, var,
3939 VAR_INIT_STATUS_INITIALIZED);
3940 var = (variable)*slot;
3944 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
3946 if (att->offset == 0
3947 && GET_MODE (att->loc) == GET_MODE (node->loc))
3949 if (dv_is_value_p (att->dv))
3951 rtx cval = dv_as_value (att->dv);
3956 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
3964 if ((*curp)->offset == 0
3965 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
3966 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
3969 curp = &(*curp)->next;
3980 *dfpm->permp = XNEW (dataflow_set);
3981 dataflow_set_init (*dfpm->permp);
3984 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
3985 att; att = att->next)
3986 if (GET_MODE (att->loc) == GET_MODE (node->loc))
3988 gcc_assert (att->offset == 0
3989 && dv_is_value_p (att->dv));
3990 val_reset (set, att->dv);
3997 cval = dv_as_value (cdv);
4001 /* Create a unique value to hold this register,
4002 that ought to be found and reused in
4003 subsequent rounds. */
4005 gcc_assert (!cselib_lookup (node->loc,
4006 GET_MODE (node->loc), 0,
4008 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4010 cselib_preserve_value (v);
4011 cselib_invalidate_rtx (node->loc);
4013 cdv = dv_from_value (cval);
4016 "Created new value %u:%u for reg %i\n",
4017 v->uid, v->hash, REGNO (node->loc));
4020 var_reg_decl_set (*dfpm->permp, node->loc,
4021 VAR_INIT_STATUS_INITIALIZED,
4022 cdv, 0, NULL, INSERT);
4028 /* Remove attribute referring to the decl, which now
4029 uses the value for the register, already existing or
4030 to be added when we bring perm in. */
4033 pool_free (attrs_pool, att);
4038 remove_duplicate_values (var);
4044 /* Reset values in the permanent set that are not associated with the
4045 chosen expression. */
4048 variable_post_merge_perm_vals (void **pslot, void *info)
4050 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
4051 dataflow_set *set = dfpm->set;
4052 variable pvar = (variable)*pslot, var;
4053 location_chain pnode;
4057 gcc_assert (dv_is_value_p (pvar->dv)
4058 && pvar->n_var_parts == 1);
4059 pnode = pvar->var_part[0].loc_chain;
4062 && REG_P (pnode->loc));
4066 var = shared_hash_find (set->vars, dv);
4069 /* Although variable_post_merge_new_vals may have made decls
4070 non-star-canonical, values that pre-existed in canonical form
4071 remain canonical, and newly-created values reference a single
4072 REG, so they are canonical as well. Since VAR has the
4073 location list for a VALUE, using find_loc_in_1pdv for it is
4074 fine, since VALUEs don't map back to DECLs. */
4075 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4077 val_reset (set, dv);
4080 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4081 if (att->offset == 0
4082 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4083 && dv_is_value_p (att->dv))
4086 /* If there is a value associated with this register already, create
4088 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4090 rtx cval = dv_as_value (att->dv);
4091 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4092 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4097 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4099 variable_union (pvar, set);
4105 /* Just checking stuff and registering register attributes for
4109 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4111 struct dfset_post_merge dfpm;
4116 htab_traverse (shared_hash_htab (set->vars), variable_post_merge_new_vals,
4119 htab_traverse (shared_hash_htab ((*permp)->vars),
4120 variable_post_merge_perm_vals, &dfpm);
4121 htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
4122 htab_traverse (shared_hash_htab (set->vars), canonicalize_vars_star, set);
4125 /* Return a node whose loc is a MEM that refers to EXPR in the
4126 location list of a one-part variable or value VAR, or in that of
4127 any values recursively mentioned in the location lists. */
4129 static location_chain
4130 find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars)
4132 location_chain node;
4135 location_chain where = NULL;
4140 gcc_assert (GET_CODE (val) == VALUE
4141 && !VALUE_RECURSED_INTO (val));
4143 dv = dv_from_value (val);
4144 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
4149 gcc_assert (dv_onepart_p (var->dv));
4151 if (!var->n_var_parts)
4154 gcc_assert (var->var_part[0].offset == 0);
4156 VALUE_RECURSED_INTO (val) = true;
4158 for (node = var->var_part[0].loc_chain; node; node = node->next)
4159 if (MEM_P (node->loc)
4160 && MEM_EXPR (node->loc) == expr
4161 && INT_MEM_OFFSET (node->loc) == 0)
4166 else if (GET_CODE (node->loc) == VALUE
4167 && !VALUE_RECURSED_INTO (node->loc)
4168 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4171 VALUE_RECURSED_INTO (val) = false;
4176 /* Return TRUE if the value of MEM may vary across a call. */
4179 mem_dies_at_call (rtx mem)
4181 tree expr = MEM_EXPR (mem);
4187 decl = get_base_address (expr);
4195 return (may_be_aliased (decl)
4196 || (!TREE_READONLY (decl) && is_global_var (decl)));
4199 /* Remove all MEMs from the location list of a hash table entry for a
4200 one-part variable, except those whose MEM attributes map back to
4201 the variable itself, directly or within a VALUE. */
4204 dataflow_set_preserve_mem_locs (void **slot, void *data)
4206 dataflow_set *set = (dataflow_set *) data;
4207 variable var = (variable) *slot;
4209 if (dv_is_decl_p (var->dv) && dv_onepart_p (var->dv))
4211 tree decl = dv_as_decl (var->dv);
4212 location_chain loc, *locp;
4213 bool changed = false;
4215 if (!var->n_var_parts)
4218 gcc_assert (var->n_var_parts == 1);
4220 if (shared_var_p (var, set->vars))
4222 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4224 /* We want to remove dying MEMs that doesn't refer to DECL. */
4225 if (GET_CODE (loc->loc) == MEM
4226 && (MEM_EXPR (loc->loc) != decl
4227 || INT_MEM_OFFSET (loc->loc) != 0)
4228 && !mem_dies_at_call (loc->loc))
4230 /* We want to move here MEMs that do refer to DECL. */
4231 else if (GET_CODE (loc->loc) == VALUE
4232 && find_mem_expr_in_1pdv (decl, loc->loc,
4233 shared_hash_htab (set->vars)))
4240 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4241 var = (variable)*slot;
4242 gcc_assert (var->n_var_parts == 1);
4245 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4248 rtx old_loc = loc->loc;
4249 if (GET_CODE (old_loc) == VALUE)
4251 location_chain mem_node
4252 = find_mem_expr_in_1pdv (decl, loc->loc,
4253 shared_hash_htab (set->vars));
4255 /* ??? This picks up only one out of multiple MEMs that
4256 refer to the same variable. Do we ever need to be
4257 concerned about dealing with more than one, or, given
4258 that they should all map to the same variable
4259 location, their addresses will have been merged and
4260 they will be regarded as equivalent? */
4263 loc->loc = mem_node->loc;
4264 loc->set_src = mem_node->set_src;
4265 loc->init = MIN (loc->init, mem_node->init);
4269 if (GET_CODE (loc->loc) != MEM
4270 || (MEM_EXPR (loc->loc) == decl
4271 && INT_MEM_OFFSET (loc->loc) == 0)
4272 || !mem_dies_at_call (loc->loc))
4274 if (old_loc != loc->loc && emit_notes)
4276 if (old_loc == var->var_part[0].cur_loc)
4279 var->var_part[0].cur_loc = NULL;
4280 var->cur_loc_changed = true;
4282 add_value_chains (var->dv, loc->loc);
4283 remove_value_chains (var->dv, old_loc);
4291 remove_value_chains (var->dv, old_loc);
4292 if (old_loc == var->var_part[0].cur_loc)
4295 var->var_part[0].cur_loc = NULL;
4296 var->cur_loc_changed = true;
4300 pool_free (loc_chain_pool, loc);
4303 if (!var->var_part[0].loc_chain)
4309 variable_was_changed (var, set);
4315 /* Remove all MEMs from the location list of a hash table entry for a
4319 dataflow_set_remove_mem_locs (void **slot, void *data)
4321 dataflow_set *set = (dataflow_set *) data;
4322 variable var = (variable) *slot;
4324 if (dv_is_value_p (var->dv))
4326 location_chain loc, *locp;
4327 bool changed = false;
4329 gcc_assert (var->n_var_parts == 1);
4331 if (shared_var_p (var, set->vars))
4333 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4334 if (GET_CODE (loc->loc) == MEM
4335 && mem_dies_at_call (loc->loc))
4341 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4342 var = (variable)*slot;
4343 gcc_assert (var->n_var_parts == 1);
4346 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4349 if (GET_CODE (loc->loc) != MEM
4350 || !mem_dies_at_call (loc->loc))
4357 remove_value_chains (var->dv, loc->loc);
4359 /* If we have deleted the location which was last emitted
4360 we have to emit new location so add the variable to set
4361 of changed variables. */
4362 if (var->var_part[0].cur_loc == loc->loc)
4365 var->var_part[0].cur_loc = NULL;
4366 var->cur_loc_changed = true;
4368 pool_free (loc_chain_pool, loc);
4371 if (!var->var_part[0].loc_chain)
4377 variable_was_changed (var, set);
4383 /* Remove all variable-location information about call-clobbered
4384 registers, as well as associations between MEMs and VALUEs. */
4387 dataflow_set_clear_at_call (dataflow_set *set)
4391 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
4392 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, r))
4393 var_regno_delete (set, r);
4395 if (MAY_HAVE_DEBUG_INSNS)
4397 set->traversed_vars = set->vars;
4398 htab_traverse (shared_hash_htab (set->vars),
4399 dataflow_set_preserve_mem_locs, set);
4400 set->traversed_vars = set->vars;
4401 htab_traverse (shared_hash_htab (set->vars), dataflow_set_remove_mem_locs,
4403 set->traversed_vars = NULL;
4408 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4410 location_chain lc1, lc2;
4412 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4414 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4416 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4418 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4421 if (rtx_equal_p (lc1->loc, lc2->loc))
4430 /* Return true if one-part variables VAR1 and VAR2 are different.
4431 They must be in canonical order. */
4434 onepart_variable_different_p (variable var1, variable var2)
4436 location_chain lc1, lc2;
4441 gcc_assert (var1->n_var_parts == 1
4442 && var2->n_var_parts == 1);
4444 lc1 = var1->var_part[0].loc_chain;
4445 lc2 = var2->var_part[0].loc_chain;
4447 gcc_assert (lc1 && lc2);
4451 if (loc_cmp (lc1->loc, lc2->loc))
4460 /* Return true if variables VAR1 and VAR2 are different. */
4463 variable_different_p (variable var1, variable var2)
4470 if (var1->n_var_parts != var2->n_var_parts)
4473 for (i = 0; i < var1->n_var_parts; i++)
4475 if (var1->var_part[i].offset != var2->var_part[i].offset)
4477 /* One-part values have locations in a canonical order. */
4478 if (i == 0 && var1->var_part[i].offset == 0 && dv_onepart_p (var1->dv))
4480 gcc_assert (var1->n_var_parts == 1
4481 && dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
4482 return onepart_variable_different_p (var1, var2);
4484 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4486 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4492 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4495 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4500 if (old_set->vars == new_set->vars)
4503 if (htab_elements (shared_hash_htab (old_set->vars))
4504 != htab_elements (shared_hash_htab (new_set->vars)))
4507 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (old_set->vars), var1, variable, hi)
4509 htab_t htab = shared_hash_htab (new_set->vars);
4510 variable var2 = (variable) htab_find_with_hash (htab, var1->dv,
4511 dv_htab_hash (var1->dv));
4514 if (dump_file && (dump_flags & TDF_DETAILS))
4516 fprintf (dump_file, "dataflow difference found: removal of:\n");
4522 if (variable_different_p (var1, var2))
4524 if (dump_file && (dump_flags & TDF_DETAILS))
4526 fprintf (dump_file, "dataflow difference found: "
4527 "old and new follow:\n");
4535 /* No need to traverse the second hashtab, if both have the same number
4536 of elements and the second one had all entries found in the first one,
4537 then it can't have any extra entries. */
4541 /* Free the contents of dataflow set SET. */
4544 dataflow_set_destroy (dataflow_set *set)
4548 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4549 attrs_list_clear (&set->regs[i]);
4551 shared_hash_destroy (set->vars);
4555 /* Return true if RTL X contains a SYMBOL_REF. */
4558 contains_symbol_ref (rtx x)
4567 code = GET_CODE (x);
4568 if (code == SYMBOL_REF)
4571 fmt = GET_RTX_FORMAT (code);
4572 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4576 if (contains_symbol_ref (XEXP (x, i)))
4579 else if (fmt[i] == 'E')
4582 for (j = 0; j < XVECLEN (x, i); j++)
4583 if (contains_symbol_ref (XVECEXP (x, i, j)))
4591 /* Shall EXPR be tracked? */
4594 track_expr_p (tree expr, bool need_rtl)
4599 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
4600 return DECL_RTL_SET_P (expr);
4602 /* If EXPR is not a parameter or a variable do not track it. */
4603 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
4606 /* It also must have a name... */
4607 if (!DECL_NAME (expr) && need_rtl)
4610 /* ... and a RTL assigned to it. */
4611 decl_rtl = DECL_RTL_IF_SET (expr);
4612 if (!decl_rtl && need_rtl)
4615 /* If this expression is really a debug alias of some other declaration, we
4616 don't need to track this expression if the ultimate declaration is
4619 if (DECL_DEBUG_EXPR_IS_FROM (realdecl))
4621 realdecl = DECL_DEBUG_EXPR (realdecl);
4622 if (realdecl == NULL_TREE)
4624 else if (!DECL_P (realdecl))
4626 if (handled_component_p (realdecl))
4628 HOST_WIDE_INT bitsize, bitpos, maxsize;
4630 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
4632 if (!DECL_P (innerdecl)
4633 || DECL_IGNORED_P (innerdecl)
4634 || TREE_STATIC (innerdecl)
4636 || bitpos + bitsize > 256
4637 || bitsize != maxsize)
4647 /* Do not track EXPR if REALDECL it should be ignored for debugging
4649 if (DECL_IGNORED_P (realdecl))
4652 /* Do not track global variables until we are able to emit correct location
4654 if (TREE_STATIC (realdecl))
4657 /* When the EXPR is a DECL for alias of some variable (see example)
4658 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
4659 DECL_RTL contains SYMBOL_REF.
4662 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
4665 if (decl_rtl && MEM_P (decl_rtl)
4666 && contains_symbol_ref (XEXP (decl_rtl, 0)))
4669 /* If RTX is a memory it should not be very large (because it would be
4670 an array or struct). */
4671 if (decl_rtl && MEM_P (decl_rtl))
4673 /* Do not track structures and arrays. */
4674 if (GET_MODE (decl_rtl) == BLKmode
4675 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
4677 if (MEM_SIZE_KNOWN_P (decl_rtl)
4678 && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
4682 DECL_CHANGED (expr) = 0;
4683 DECL_CHANGED (realdecl) = 0;
4687 /* Determine whether a given LOC refers to the same variable part as
4691 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
4694 HOST_WIDE_INT offset2;
4696 if (! DECL_P (expr))
4701 expr2 = REG_EXPR (loc);
4702 offset2 = REG_OFFSET (loc);
4704 else if (MEM_P (loc))
4706 expr2 = MEM_EXPR (loc);
4707 offset2 = INT_MEM_OFFSET (loc);
4712 if (! expr2 || ! DECL_P (expr2))
4715 expr = var_debug_decl (expr);
4716 expr2 = var_debug_decl (expr2);
4718 return (expr == expr2 && offset == offset2);
4721 /* LOC is a REG or MEM that we would like to track if possible.
4722 If EXPR is null, we don't know what expression LOC refers to,
4723 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
4724 LOC is an lvalue register.
4726 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
4727 is something we can track. When returning true, store the mode of
4728 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
4729 from EXPR in *OFFSET_OUT (if nonnull). */
4732 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
4733 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
4735 enum machine_mode mode;
4737 if (expr == NULL || !track_expr_p (expr, true))
4740 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
4741 whole subreg, but only the old inner part is really relevant. */
4742 mode = GET_MODE (loc);
4743 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
4745 enum machine_mode pseudo_mode;
4747 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
4748 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
4750 offset += byte_lowpart_offset (pseudo_mode, mode);
4755 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
4756 Do the same if we are storing to a register and EXPR occupies
4757 the whole of register LOC; in that case, the whole of EXPR is
4758 being changed. We exclude complex modes from the second case
4759 because the real and imaginary parts are represented as separate
4760 pseudo registers, even if the whole complex value fits into one
4762 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
4764 && !COMPLEX_MODE_P (DECL_MODE (expr))
4765 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
4766 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
4768 mode = DECL_MODE (expr);
4772 if (offset < 0 || offset >= MAX_VAR_PARTS)
4778 *offset_out = offset;
4782 /* Return the MODE lowpart of LOC, or null if LOC is not something we
4783 want to track. When returning nonnull, make sure that the attributes
4784 on the returned value are updated. */
4787 var_lowpart (enum machine_mode mode, rtx loc)
4789 unsigned int offset, reg_offset, regno;
4791 if (!REG_P (loc) && !MEM_P (loc))
4794 if (GET_MODE (loc) == mode)
4797 offset = byte_lowpart_offset (mode, GET_MODE (loc));
4800 return adjust_address_nv (loc, mode, offset);
4802 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
4803 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
4805 return gen_rtx_REG_offset (loc, mode, regno, offset);
4808 /* Carry information about uses and stores while walking rtx. */
4810 struct count_use_info
4812 /* The insn where the RTX is. */
4815 /* The basic block where insn is. */
4818 /* The array of n_sets sets in the insn, as determined by cselib. */
4819 struct cselib_set *sets;
4822 /* True if we're counting stores, false otherwise. */
4826 /* Find a VALUE corresponding to X. */
4828 static inline cselib_val *
4829 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
4835 /* This is called after uses are set up and before stores are
4836 processed by cselib, so it's safe to look up srcs, but not
4837 dsts. So we look up expressions that appear in srcs or in
4838 dest expressions, but we search the sets array for dests of
4842 /* Some targets represent memset and memcpy patterns
4843 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
4844 (set (mem:BLK ...) (const_int ...)) or
4845 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
4846 in that case, otherwise we end up with mode mismatches. */
4847 if (mode == BLKmode && MEM_P (x))
4849 for (i = 0; i < cui->n_sets; i++)
4850 if (cui->sets[i].dest == x)
4851 return cui->sets[i].src_elt;
4854 return cselib_lookup (x, mode, 0, VOIDmode);
4860 /* Helper function to get mode of MEM's address. */
4862 static inline enum machine_mode
4863 get_address_mode (rtx mem)
4865 enum machine_mode mode = GET_MODE (XEXP (mem, 0));
4866 if (mode != VOIDmode)
4868 return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
4871 /* Replace all registers and addresses in an expression with VALUE
4872 expressions that map back to them, unless the expression is a
4873 register. If no mapping is or can be performed, returns NULL. */
4876 replace_expr_with_values (rtx loc)
4878 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
4880 else if (MEM_P (loc))
4882 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
4883 get_address_mode (loc), 0,
4886 return replace_equiv_address_nv (loc, addr->val_rtx);
4891 return cselib_subst_to_values (loc, VOIDmode);
4894 /* Determine what kind of micro operation to choose for a USE. Return
4895 MO_CLOBBER if no micro operation is to be generated. */
4897 static enum micro_operation_type
4898 use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
4902 if (cui && cui->sets)
4904 if (GET_CODE (loc) == VAR_LOCATION)
4906 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
4908 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
4909 if (! VAR_LOC_UNKNOWN_P (ploc))
4911 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
4914 /* ??? flag_float_store and volatile mems are never
4915 given values, but we could in theory use them for
4917 gcc_assert (val || 1);
4925 if (REG_P (loc) || MEM_P (loc))
4928 *modep = GET_MODE (loc);
4932 || (find_use_val (loc, GET_MODE (loc), cui)
4933 && cselib_lookup (XEXP (loc, 0),
4934 get_address_mode (loc), 0,
4940 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
4942 if (val && !cselib_preserved_value_p (val))
4950 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
4952 if (loc == cfa_base_rtx)
4954 expr = REG_EXPR (loc);
4957 return MO_USE_NO_VAR;
4958 else if (target_for_debug_bind (var_debug_decl (expr)))
4960 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
4961 false, modep, NULL))
4964 return MO_USE_NO_VAR;
4966 else if (MEM_P (loc))
4968 expr = MEM_EXPR (loc);
4972 else if (target_for_debug_bind (var_debug_decl (expr)))
4974 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
4975 false, modep, NULL))
4984 /* Log to OUT information about micro-operation MOPT involving X in
4988 log_op_type (rtx x, basic_block bb, rtx insn,
4989 enum micro_operation_type mopt, FILE *out)
4991 fprintf (out, "bb %i op %i insn %i %s ",
4992 bb->index, VEC_length (micro_operation, VTI (bb)->mos),
4993 INSN_UID (insn), micro_operation_type_name[mopt]);
4994 print_inline_rtx (out, x, 2);
4998 /* Tell whether the CONCAT used to holds a VALUE and its location
4999 needs value resolution, i.e., an attempt of mapping the location
5000 back to other incoming values. */
5001 #define VAL_NEEDS_RESOLUTION(x) \
5002 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5003 /* Whether the location in the CONCAT is a tracked expression, that
5004 should also be handled like a MO_USE. */
5005 #define VAL_HOLDS_TRACK_EXPR(x) \
5006 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5007 /* Whether the location in the CONCAT should be handled like a MO_COPY
5009 #define VAL_EXPR_IS_COPIED(x) \
5010 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5011 /* Whether the location in the CONCAT should be handled like a
5012 MO_CLOBBER as well. */
5013 #define VAL_EXPR_IS_CLOBBERED(x) \
5014 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5015 /* Whether the location is a CONCAT of the MO_VAL_SET expression and
5016 a reverse operation that should be handled afterwards. */
5017 #define VAL_EXPR_HAS_REVERSE(x) \
5018 (RTL_FLAG_CHECK1 ("VAL_EXPR_HAS_REVERSE", (x), CONCAT)->return_val)
5020 /* All preserved VALUEs. */
5021 static VEC (rtx, heap) *preserved_values;
5023 /* Registers used in the current function for passing parameters. */
5024 static HARD_REG_SET argument_reg_set;
5026 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5029 preserve_value (cselib_val *val)
5031 cselib_preserve_value (val);
5032 VEC_safe_push (rtx, heap, preserved_values, val->val_rtx);
5035 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5036 any rtxes not suitable for CONST use not replaced by VALUEs
5040 non_suitable_const (rtx *x, void *data ATTRIBUTE_UNUSED)
5045 switch (GET_CODE (*x))
5056 return !MEM_READONLY_P (*x);
5062 /* Add uses (register and memory references) LOC which will be tracked
5063 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
5066 add_uses (rtx *ploc, void *data)
5069 enum machine_mode mode = VOIDmode;
5070 struct count_use_info *cui = (struct count_use_info *)data;
5071 enum micro_operation_type type = use_type (loc, cui, &mode);
5073 if (type != MO_CLOBBER)
5075 basic_block bb = cui->bb;
5079 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5080 mo.insn = cui->insn;
5082 if (type == MO_VAL_LOC)
5085 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5088 gcc_assert (cui->sets);
5091 && !REG_P (XEXP (vloc, 0))
5092 && !MEM_P (XEXP (vloc, 0))
5093 && GET_CODE (XEXP (vloc, 0)) != ENTRY_VALUE
5094 && (GET_CODE (XEXP (vloc, 0)) != PLUS
5095 || XEXP (XEXP (vloc, 0), 0) != cfa_base_rtx
5096 || !CONST_INT_P (XEXP (XEXP (vloc, 0), 1))))
5099 enum machine_mode address_mode = get_address_mode (mloc);
5101 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5104 if (val && !cselib_preserved_value_p (val))
5106 micro_operation moa;
5107 preserve_value (val);
5108 mloc = cselib_subst_to_values (XEXP (mloc, 0),
5110 moa.type = MO_VAL_USE;
5111 moa.insn = cui->insn;
5112 moa.u.loc = gen_rtx_CONCAT (address_mode,
5113 val->val_rtx, mloc);
5114 if (dump_file && (dump_flags & TDF_DETAILS))
5115 log_op_type (moa.u.loc, cui->bb, cui->insn,
5116 moa.type, dump_file);
5117 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
5121 if (CONSTANT_P (vloc)
5122 && (GET_CODE (vloc) != CONST
5123 || for_each_rtx (&vloc, non_suitable_const, NULL)))
5124 /* For constants don't look up any value. */;
5125 else if (!VAR_LOC_UNKNOWN_P (vloc)
5126 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5128 enum machine_mode mode2;
5129 enum micro_operation_type type2;
5130 rtx nloc = replace_expr_with_values (vloc);
5134 oloc = shallow_copy_rtx (oloc);
5135 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5138 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5140 type2 = use_type (vloc, 0, &mode2);
5142 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5143 || type2 == MO_CLOBBER);
5145 if (type2 == MO_CLOBBER
5146 && !cselib_preserved_value_p (val))
5148 VAL_NEEDS_RESOLUTION (oloc) = 1;
5149 preserve_value (val);
5152 else if (!VAR_LOC_UNKNOWN_P (vloc))
5154 oloc = shallow_copy_rtx (oloc);
5155 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5160 else if (type == MO_VAL_USE)
5162 enum machine_mode mode2 = VOIDmode;
5163 enum micro_operation_type type2;
5164 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5165 rtx vloc, oloc = loc, nloc;
5167 gcc_assert (cui->sets);
5170 && !REG_P (XEXP (oloc, 0))
5171 && !MEM_P (XEXP (oloc, 0))
5172 && GET_CODE (XEXP (oloc, 0)) != ENTRY_VALUE
5173 && (GET_CODE (XEXP (oloc, 0)) != PLUS
5174 || XEXP (XEXP (oloc, 0), 0) != cfa_base_rtx
5175 || !CONST_INT_P (XEXP (XEXP (oloc, 0), 1))))
5178 enum machine_mode address_mode = get_address_mode (mloc);
5180 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5183 if (val && !cselib_preserved_value_p (val))
5185 micro_operation moa;
5186 preserve_value (val);
5187 mloc = cselib_subst_to_values (XEXP (mloc, 0),
5189 moa.type = MO_VAL_USE;
5190 moa.insn = cui->insn;
5191 moa.u.loc = gen_rtx_CONCAT (address_mode,
5192 val->val_rtx, mloc);
5193 if (dump_file && (dump_flags & TDF_DETAILS))
5194 log_op_type (moa.u.loc, cui->bb, cui->insn,
5195 moa.type, dump_file);
5196 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
5200 type2 = use_type (loc, 0, &mode2);
5202 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5203 || type2 == MO_CLOBBER);
5205 if (type2 == MO_USE)
5206 vloc = var_lowpart (mode2, loc);
5210 /* The loc of a MO_VAL_USE may have two forms:
5212 (concat val src): val is at src, a value-based
5215 (concat (concat val use) src): same as above, with use as
5216 the MO_USE tracked value, if it differs from src.
5220 nloc = replace_expr_with_values (loc);
5225 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5227 oloc = val->val_rtx;
5229 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5231 if (type2 == MO_USE)
5232 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5233 if (!cselib_preserved_value_p (val))
5235 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5236 preserve_value (val);
5240 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5242 if (dump_file && (dump_flags & TDF_DETAILS))
5243 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5244 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5250 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5253 add_uses_1 (rtx *x, void *cui)
5255 for_each_rtx (x, add_uses, cui);
5258 #define EXPR_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5260 /* Attempt to reverse the EXPR operation in the debug info. Say for
5261 reg1 = reg2 + 6 even when reg2 is no longer live we
5262 can express its value as VAL - 6. */
5265 reverse_op (rtx val, const_rtx expr)
5271 if (GET_CODE (expr) != SET)
5274 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5277 src = SET_SRC (expr);
5278 switch (GET_CODE (src))
5285 if (!REG_P (XEXP (src, 0)))
5290 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5297 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5300 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5301 if (!v || !cselib_preserved_value_p (v))
5304 switch (GET_CODE (src))
5308 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5310 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5314 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5326 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5328 arg = XEXP (src, 1);
5329 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5331 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5332 if (arg == NULL_RTX)
5334 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5337 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5339 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5340 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5341 breaks a lot of routines during var-tracking. */
5342 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5348 return gen_rtx_CONCAT (GET_MODE (v->val_rtx), v->val_rtx, ret);
5351 /* Add stores (register and memory references) LOC which will be tracked
5352 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5353 CUIP->insn is instruction which the LOC is part of. */
5356 add_stores (rtx loc, const_rtx expr, void *cuip)
5358 enum machine_mode mode = VOIDmode, mode2;
5359 struct count_use_info *cui = (struct count_use_info *)cuip;
5360 basic_block bb = cui->bb;
5362 rtx oloc = loc, nloc, src = NULL;
5363 enum micro_operation_type type = use_type (loc, cui, &mode);
5364 bool track_p = false;
5366 bool resolve, preserve;
5369 if (type == MO_CLOBBER)
5376 gcc_assert (loc != cfa_base_rtx);
5377 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5378 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5379 || GET_CODE (expr) == CLOBBER)
5381 mo.type = MO_CLOBBER;
5383 if (GET_CODE (expr) == SET
5384 && SET_DEST (expr) == loc
5385 && REGNO (loc) < FIRST_PSEUDO_REGISTER
5386 && TEST_HARD_REG_BIT (argument_reg_set, REGNO (loc))
5387 && find_use_val (loc, mode, cui)
5388 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5390 gcc_checking_assert (type == MO_VAL_SET);
5391 mo.u.loc = gen_rtx_SET (VOIDmode, loc, SET_SRC (expr));
5396 if (GET_CODE (expr) == SET
5397 && SET_DEST (expr) == loc
5398 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5399 src = var_lowpart (mode2, SET_SRC (expr));
5400 loc = var_lowpart (mode2, loc);
5409 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5410 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5417 mo.insn = cui->insn;
5419 else if (MEM_P (loc)
5420 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5423 if (MEM_P (loc) && type == MO_VAL_SET
5424 && !REG_P (XEXP (loc, 0))
5425 && !MEM_P (XEXP (loc, 0))
5426 && GET_CODE (XEXP (loc, 0)) != ENTRY_VALUE
5427 && (GET_CODE (XEXP (loc, 0)) != PLUS
5428 || XEXP (XEXP (loc, 0), 0) != cfa_base_rtx
5429 || !CONST_INT_P (XEXP (XEXP (loc, 0), 1))))
5432 enum machine_mode address_mode = get_address_mode (mloc);
5433 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
5437 if (val && !cselib_preserved_value_p (val))
5439 preserve_value (val);
5440 mo.type = MO_VAL_USE;
5441 mloc = cselib_subst_to_values (XEXP (mloc, 0),
5443 mo.u.loc = gen_rtx_CONCAT (address_mode, val->val_rtx, mloc);
5444 mo.insn = cui->insn;
5445 if (dump_file && (dump_flags & TDF_DETAILS))
5446 log_op_type (mo.u.loc, cui->bb, cui->insn,
5447 mo.type, dump_file);
5448 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5452 if (GET_CODE (expr) == CLOBBER || !track_p)
5454 mo.type = MO_CLOBBER;
5455 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
5459 if (GET_CODE (expr) == SET
5460 && SET_DEST (expr) == loc
5461 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5462 src = var_lowpart (mode2, SET_SRC (expr));
5463 loc = var_lowpart (mode2, loc);
5472 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5473 if (same_variable_part_p (SET_SRC (xexpr),
5475 INT_MEM_OFFSET (loc)))
5482 mo.insn = cui->insn;
5487 if (type != MO_VAL_SET)
5488 goto log_and_return;
5490 v = find_use_val (oloc, mode, cui);
5493 goto log_and_return;
5495 resolve = preserve = !cselib_preserved_value_p (v);
5497 nloc = replace_expr_with_values (oloc);
5501 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
5503 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
5505 gcc_assert (oval != v);
5506 gcc_assert (REG_P (oloc) || MEM_P (oloc));
5508 if (!cselib_preserved_value_p (oval))
5510 micro_operation moa;
5512 preserve_value (oval);
5514 moa.type = MO_VAL_USE;
5515 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
5516 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
5517 moa.insn = cui->insn;
5519 if (dump_file && (dump_flags & TDF_DETAILS))
5520 log_op_type (moa.u.loc, cui->bb, cui->insn,
5521 moa.type, dump_file);
5522 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
5527 else if (resolve && GET_CODE (mo.u.loc) == SET)
5529 nloc = replace_expr_with_values (SET_SRC (expr));
5531 /* Avoid the mode mismatch between oexpr and expr. */
5532 if (!nloc && mode != mode2)
5534 nloc = SET_SRC (expr);
5535 gcc_assert (oloc == SET_DEST (expr));
5539 oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
5542 if (oloc == SET_DEST (mo.u.loc))
5543 /* No point in duplicating. */
5545 if (!REG_P (SET_SRC (mo.u.loc)))
5551 if (GET_CODE (mo.u.loc) == SET
5552 && oloc == SET_DEST (mo.u.loc))
5553 /* No point in duplicating. */
5559 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
5561 if (mo.u.loc != oloc)
5562 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
5564 /* The loc of a MO_VAL_SET may have various forms:
5566 (concat val dst): dst now holds val
5568 (concat val (set dst src)): dst now holds val, copied from src
5570 (concat (concat val dstv) dst): dst now holds val; dstv is dst
5571 after replacing mems and non-top-level regs with values.
5573 (concat (concat val dstv) (set dst src)): dst now holds val,
5574 copied from src. dstv is a value-based representation of dst, if
5575 it differs from dst. If resolution is needed, src is a REG, and
5576 its mode is the same as that of val.
5578 (concat (concat val (set dstv srcv)) (set dst src)): src
5579 copied to dst, holding val. dstv and srcv are value-based
5580 representations of dst and src, respectively.
5584 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
5586 reverse = reverse_op (v->val_rtx, expr);
5589 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, reverse);
5590 VAL_EXPR_HAS_REVERSE (loc) = 1;
5597 VAL_HOLDS_TRACK_EXPR (loc) = 1;
5600 VAL_NEEDS_RESOLUTION (loc) = resolve;
5603 if (mo.type == MO_CLOBBER)
5604 VAL_EXPR_IS_CLOBBERED (loc) = 1;
5605 if (mo.type == MO_COPY)
5606 VAL_EXPR_IS_COPIED (loc) = 1;
5608 mo.type = MO_VAL_SET;
5611 if (dump_file && (dump_flags & TDF_DETAILS))
5612 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5613 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5616 /* Arguments to the call. */
5617 static rtx call_arguments;
5619 /* Compute call_arguments. */
5622 prepare_call_arguments (basic_block bb, rtx insn)
5625 rtx prev, cur, next;
5626 rtx call = PATTERN (insn);
5627 rtx this_arg = NULL_RTX;
5628 tree type = NULL_TREE, t, fndecl = NULL_TREE;
5629 tree obj_type_ref = NULL_TREE;
5630 CUMULATIVE_ARGS args_so_far_v;
5631 cumulative_args_t args_so_far;
5633 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
5634 args_so_far = pack_cumulative_args (&args_so_far_v);
5635 if (GET_CODE (call) == PARALLEL)
5636 call = XVECEXP (call, 0, 0);
5637 if (GET_CODE (call) == SET)
5638 call = SET_SRC (call);
5639 if (GET_CODE (call) == CALL && MEM_P (XEXP (call, 0)))
5641 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
5643 rtx symbol = XEXP (XEXP (call, 0), 0);
5644 if (SYMBOL_REF_DECL (symbol))
5645 fndecl = SYMBOL_REF_DECL (symbol);
5647 if (fndecl == NULL_TREE)
5648 fndecl = MEM_EXPR (XEXP (call, 0));
5650 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
5651 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
5653 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5654 type = TREE_TYPE (fndecl);
5655 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
5657 if (TREE_CODE (fndecl) == INDIRECT_REF
5658 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
5659 obj_type_ref = TREE_OPERAND (fndecl, 0);
5664 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
5666 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
5667 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
5669 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
5673 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
5674 link = CALL_INSN_FUNCTION_USAGE (insn);
5675 #ifndef PCC_STATIC_STRUCT_RETURN
5676 if (aggregate_value_p (TREE_TYPE (type), type)
5677 && targetm.calls.struct_value_rtx (type, 0) == 0)
5679 tree struct_addr = build_pointer_type (TREE_TYPE (type));
5680 enum machine_mode mode = TYPE_MODE (struct_addr);
5682 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
5684 reg = targetm.calls.function_arg (args_so_far, mode,
5686 targetm.calls.function_arg_advance (args_so_far, mode,
5688 if (reg == NULL_RTX)
5690 for (; link; link = XEXP (link, 1))
5691 if (GET_CODE (XEXP (link, 0)) == USE
5692 && MEM_P (XEXP (XEXP (link, 0), 0)))
5694 link = XEXP (link, 1);
5701 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
5703 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
5705 enum machine_mode mode;
5706 t = TYPE_ARG_TYPES (type);
5707 mode = TYPE_MODE (TREE_VALUE (t));
5708 this_arg = targetm.calls.function_arg (args_so_far, mode,
5709 TREE_VALUE (t), true);
5710 if (this_arg && !REG_P (this_arg))
5711 this_arg = NULL_RTX;
5712 else if (this_arg == NULL_RTX)
5714 for (; link; link = XEXP (link, 1))
5715 if (GET_CODE (XEXP (link, 0)) == USE
5716 && MEM_P (XEXP (XEXP (link, 0), 0)))
5718 this_arg = XEXP (XEXP (link, 0), 0);
5726 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
5728 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
5729 if (GET_CODE (XEXP (link, 0)) == USE)
5731 rtx item = NULL_RTX;
5732 x = XEXP (XEXP (link, 0), 0);
5735 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
5736 if (val && cselib_preserved_value_p (val))
5737 item = gen_rtx_CONCAT (GET_MODE (x), x, val->val_rtx);
5738 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
5740 enum machine_mode mode = GET_MODE (x);
5742 while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
5743 && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
5745 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
5747 if (reg == NULL_RTX || !REG_P (reg))
5749 val = cselib_lookup (reg, mode, 0, VOIDmode);
5750 if (val && cselib_preserved_value_p (val))
5752 item = gen_rtx_CONCAT (GET_MODE (x), x,
5753 lowpart_subreg (GET_MODE (x),
5766 if (!frame_pointer_needed)
5768 struct adjust_mem_data amd;
5769 amd.mem_mode = VOIDmode;
5770 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
5771 amd.side_effects = NULL_RTX;
5773 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
5775 gcc_assert (amd.side_effects == NULL_RTX);
5777 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
5778 if (val && cselib_preserved_value_p (val))
5779 item = gen_rtx_CONCAT (GET_MODE (x), copy_rtx (x), val->val_rtx);
5782 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
5783 if (t && t != void_list_node)
5785 tree argtype = TREE_VALUE (t);
5786 enum machine_mode mode = TYPE_MODE (argtype);
5788 if (pass_by_reference (&args_so_far_v, mode, argtype, true))
5790 argtype = build_pointer_type (argtype);
5791 mode = TYPE_MODE (argtype);
5793 reg = targetm.calls.function_arg (args_so_far, mode,
5795 if (TREE_CODE (argtype) == REFERENCE_TYPE
5796 && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
5799 && GET_MODE (reg) == mode
5800 && GET_MODE_CLASS (mode) == MODE_INT
5802 && REGNO (x) == REGNO (reg)
5803 && GET_MODE (x) == mode
5806 enum machine_mode indmode
5807 = TYPE_MODE (TREE_TYPE (argtype));
5808 rtx mem = gen_rtx_MEM (indmode, x);
5809 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
5810 if (val && cselib_preserved_value_p (val))
5812 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
5813 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
5818 struct elt_loc_list *l;
5821 /* Try harder, when passing address of a constant
5822 pool integer it can be easily read back. */
5823 item = XEXP (item, 1);
5824 if (GET_CODE (item) == SUBREG)
5825 item = SUBREG_REG (item);
5826 gcc_assert (GET_CODE (item) == VALUE);
5827 val = CSELIB_VAL_PTR (item);
5828 for (l = val->locs; l; l = l->next)
5829 if (GET_CODE (l->loc) == SYMBOL_REF
5830 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
5831 && SYMBOL_REF_DECL (l->loc)
5832 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
5834 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
5835 if (host_integerp (initial, 0))
5837 item = GEN_INT (tree_low_cst (initial, 0));
5838 item = gen_rtx_CONCAT (indmode, mem, item);
5840 = gen_rtx_EXPR_LIST (VOIDmode, item,
5847 targetm.calls.function_arg_advance (args_so_far, mode,
5853 /* Add debug arguments. */
5855 && TREE_CODE (fndecl) == FUNCTION_DECL
5856 && DECL_HAS_DEBUG_ARGS_P (fndecl))
5858 VEC(tree, gc) **debug_args = decl_debug_args_lookup (fndecl);
5863 for (ix = 0; VEC_iterate (tree, *debug_args, ix, param); ix += 2)
5866 tree dtemp = VEC_index (tree, *debug_args, ix + 1);
5867 enum machine_mode mode = DECL_MODE (dtemp);
5868 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
5869 item = gen_rtx_CONCAT (mode, item, DECL_RTL (dtemp));
5870 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
5876 /* Reverse call_arguments chain. */
5878 for (cur = call_arguments; cur; cur = next)
5880 next = XEXP (cur, 1);
5881 XEXP (cur, 1) = prev;
5884 call_arguments = prev;
5887 if (GET_CODE (x) == PARALLEL)
5888 x = XVECEXP (x, 0, 0);
5889 if (GET_CODE (x) == SET)
5891 if (GET_CODE (x) == CALL && MEM_P (XEXP (x, 0)))
5893 x = XEXP (XEXP (x, 0), 0);
5894 if (GET_CODE (x) == SYMBOL_REF)
5895 /* Don't record anything. */;
5896 else if (CONSTANT_P (x))
5898 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
5901 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
5905 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
5906 if (val && cselib_preserved_value_p (val))
5908 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
5910 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
5916 enum machine_mode mode
5917 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
5918 rtx clobbered = gen_rtx_MEM (mode, this_arg);
5920 = tree_low_cst (OBJ_TYPE_REF_TOKEN (obj_type_ref), 0);
5922 clobbered = plus_constant (clobbered, token * GET_MODE_SIZE (mode));
5923 clobbered = gen_rtx_MEM (mode, clobbered);
5924 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
5926 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
5930 /* Callback for cselib_record_sets_hook, that records as micro
5931 operations uses and stores in an insn after cselib_record_sets has
5932 analyzed the sets in an insn, but before it modifies the stored
5933 values in the internal tables, unless cselib_record_sets doesn't
5934 call it directly (perhaps because we're not doing cselib in the
5935 first place, in which case sets and n_sets will be 0). */
5938 add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
5940 basic_block bb = BLOCK_FOR_INSN (insn);
5942 struct count_use_info cui;
5943 micro_operation *mos;
5945 cselib_hook_called = true;
5950 cui.n_sets = n_sets;
5952 n1 = VEC_length (micro_operation, VTI (bb)->mos);
5953 cui.store_p = false;
5954 note_uses (&PATTERN (insn), add_uses_1, &cui);
5955 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
5956 mos = VEC_address (micro_operation, VTI (bb)->mos);
5958 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
5962 while (n1 < n2 && mos[n1].type == MO_USE)
5964 while (n1 < n2 && mos[n2].type != MO_USE)
5976 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
5979 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
5981 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
5999 mo.u.loc = call_arguments;
6000 call_arguments = NULL_RTX;
6002 if (dump_file && (dump_flags & TDF_DETAILS))
6003 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6004 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
6007 n1 = VEC_length (micro_operation, VTI (bb)->mos);
6008 /* This will record NEXT_INSN (insn), such that we can
6009 insert notes before it without worrying about any
6010 notes that MO_USEs might emit after the insn. */
6012 note_stores (PATTERN (insn), add_stores, &cui);
6013 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
6014 mos = VEC_address (micro_operation, VTI (bb)->mos);
6016 /* Order the MO_VAL_USEs first (note_stores does nothing
6017 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6018 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6021 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6023 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6035 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
6038 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6040 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6053 static enum var_init_status
6054 find_src_status (dataflow_set *in, rtx src)
6056 tree decl = NULL_TREE;
6057 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6059 if (! flag_var_tracking_uninit)
6060 status = VAR_INIT_STATUS_INITIALIZED;
6062 if (src && REG_P (src))
6063 decl = var_debug_decl (REG_EXPR (src));
6064 else if (src && MEM_P (src))
6065 decl = var_debug_decl (MEM_EXPR (src));
6068 status = get_init_value (in, src, dv_from_decl (decl));
6073 /* SRC is the source of an assignment. Use SET to try to find what
6074 was ultimately assigned to SRC. Return that value if known,
6075 otherwise return SRC itself. */
6078 find_src_set_src (dataflow_set *set, rtx src)
6080 tree decl = NULL_TREE; /* The variable being copied around. */
6081 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
6083 location_chain nextp;
6087 if (src && REG_P (src))
6088 decl = var_debug_decl (REG_EXPR (src));
6089 else if (src && MEM_P (src))
6090 decl = var_debug_decl (MEM_EXPR (src));
6094 decl_or_value dv = dv_from_decl (decl);
6096 var = shared_hash_find (set->vars, dv);
6100 for (i = 0; i < var->n_var_parts && !found; i++)
6101 for (nextp = var->var_part[i].loc_chain; nextp && !found;
6102 nextp = nextp->next)
6103 if (rtx_equal_p (nextp->loc, src))
6105 set_src = nextp->set_src;
6115 /* Compute the changes of variable locations in the basic block BB. */
6118 compute_bb_dataflow (basic_block bb)
6121 micro_operation *mo;
6123 dataflow_set old_out;
6124 dataflow_set *in = &VTI (bb)->in;
6125 dataflow_set *out = &VTI (bb)->out;
6127 dataflow_set_init (&old_out);
6128 dataflow_set_copy (&old_out, out);
6129 dataflow_set_copy (out, in);
6131 FOR_EACH_VEC_ELT (micro_operation, VTI (bb)->mos, i, mo)
6133 rtx insn = mo->insn;
6138 dataflow_set_clear_at_call (out);
6143 rtx loc = mo->u.loc;
6146 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6147 else if (MEM_P (loc))
6148 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6154 rtx loc = mo->u.loc;
6158 if (GET_CODE (loc) == CONCAT)
6160 val = XEXP (loc, 0);
6161 vloc = XEXP (loc, 1);
6169 var = PAT_VAR_LOCATION_DECL (vloc);
6171 clobber_variable_part (out, NULL_RTX,
6172 dv_from_decl (var), 0, NULL_RTX);
6175 if (VAL_NEEDS_RESOLUTION (loc))
6176 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6177 set_variable_part (out, val, dv_from_decl (var), 0,
6178 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6181 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6182 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6183 dv_from_decl (var), 0,
6184 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6191 rtx loc = mo->u.loc;
6192 rtx val, vloc, uloc;
6194 vloc = uloc = XEXP (loc, 1);
6195 val = XEXP (loc, 0);
6197 if (GET_CODE (val) == CONCAT)
6199 uloc = XEXP (val, 1);
6200 val = XEXP (val, 0);
6203 if (VAL_NEEDS_RESOLUTION (loc))
6204 val_resolve (out, val, vloc, insn);
6206 val_store (out, val, uloc, insn, false);
6208 if (VAL_HOLDS_TRACK_EXPR (loc))
6210 if (GET_CODE (uloc) == REG)
6211 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6213 else if (GET_CODE (uloc) == MEM)
6214 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6222 rtx loc = mo->u.loc;
6223 rtx val, vloc, uloc, reverse = NULL_RTX;
6226 if (VAL_EXPR_HAS_REVERSE (loc))
6228 reverse = XEXP (loc, 1);
6229 vloc = XEXP (loc, 0);
6231 uloc = XEXP (vloc, 1);
6232 val = XEXP (vloc, 0);
6235 if (GET_CODE (val) == CONCAT)
6237 vloc = XEXP (val, 1);
6238 val = XEXP (val, 0);
6241 if (GET_CODE (vloc) == SET)
6243 rtx vsrc = SET_SRC (vloc);
6245 gcc_assert (val != vsrc);
6246 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6248 vloc = SET_DEST (vloc);
6250 if (VAL_NEEDS_RESOLUTION (loc))
6251 val_resolve (out, val, vsrc, insn);
6253 else if (VAL_NEEDS_RESOLUTION (loc))
6255 gcc_assert (GET_CODE (uloc) == SET
6256 && GET_CODE (SET_SRC (uloc)) == REG);
6257 val_resolve (out, val, SET_SRC (uloc), insn);
6260 if (VAL_HOLDS_TRACK_EXPR (loc))
6262 if (VAL_EXPR_IS_CLOBBERED (loc))
6265 var_reg_delete (out, uloc, true);
6266 else if (MEM_P (uloc))
6267 var_mem_delete (out, uloc, true);
6271 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6273 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6275 if (GET_CODE (uloc) == SET)
6277 set_src = SET_SRC (uloc);
6278 uloc = SET_DEST (uloc);
6283 if (flag_var_tracking_uninit)
6285 status = find_src_status (in, set_src);
6287 if (status == VAR_INIT_STATUS_UNKNOWN)
6288 status = find_src_status (out, set_src);
6291 set_src = find_src_set_src (in, set_src);
6295 var_reg_delete_and_set (out, uloc, !copied_p,
6297 else if (MEM_P (uloc))
6298 var_mem_delete_and_set (out, uloc, !copied_p,
6302 else if (REG_P (uloc))
6303 var_regno_delete (out, REGNO (uloc));
6305 val_store (out, val, vloc, insn, true);
6308 val_store (out, XEXP (reverse, 0), XEXP (reverse, 1),
6315 rtx loc = mo->u.loc;
6318 if (GET_CODE (loc) == SET)
6320 set_src = SET_SRC (loc);
6321 loc = SET_DEST (loc);
6325 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6327 else if (MEM_P (loc))
6328 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6335 rtx loc = mo->u.loc;
6336 enum var_init_status src_status;
6339 if (GET_CODE (loc) == SET)
6341 set_src = SET_SRC (loc);
6342 loc = SET_DEST (loc);
6345 if (! flag_var_tracking_uninit)
6346 src_status = VAR_INIT_STATUS_INITIALIZED;
6349 src_status = find_src_status (in, set_src);
6351 if (src_status == VAR_INIT_STATUS_UNKNOWN)
6352 src_status = find_src_status (out, set_src);
6355 set_src = find_src_set_src (in, set_src);
6358 var_reg_delete_and_set (out, loc, false, src_status, set_src);
6359 else if (MEM_P (loc))
6360 var_mem_delete_and_set (out, loc, false, src_status, set_src);
6366 rtx loc = mo->u.loc;
6369 var_reg_delete (out, loc, false);
6370 else if (MEM_P (loc))
6371 var_mem_delete (out, loc, false);
6377 rtx loc = mo->u.loc;
6380 var_reg_delete (out, loc, true);
6381 else if (MEM_P (loc))
6382 var_mem_delete (out, loc, true);
6387 out->stack_adjust += mo->u.adjust;
6392 if (MAY_HAVE_DEBUG_INSNS)
6394 dataflow_set_equiv_regs (out);
6395 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_mark,
6397 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_star,
6400 htab_traverse (shared_hash_htab (out->vars),
6401 canonicalize_loc_order_check, out);
6404 changed = dataflow_set_different (&old_out, out);
6405 dataflow_set_destroy (&old_out);
6409 /* Find the locations of variables in the whole function. */
6412 vt_find_locations (void)
6414 fibheap_t worklist, pending, fibheap_swap;
6415 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
6422 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
6423 bool success = true;
6425 timevar_push (TV_VAR_TRACKING_DATAFLOW);
6426 /* Compute reverse completion order of depth first search of the CFG
6427 so that the data-flow runs faster. */
6428 rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
6429 bb_order = XNEWVEC (int, last_basic_block);
6430 pre_and_rev_post_order_compute (NULL, rc_order, false);
6431 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
6432 bb_order[rc_order[i]] = i;
6435 worklist = fibheap_new ();
6436 pending = fibheap_new ();
6437 visited = sbitmap_alloc (last_basic_block);
6438 in_worklist = sbitmap_alloc (last_basic_block);
6439 in_pending = sbitmap_alloc (last_basic_block);
6440 sbitmap_zero (in_worklist);
6443 fibheap_insert (pending, bb_order[bb->index], bb);
6444 sbitmap_ones (in_pending);
6446 while (success && !fibheap_empty (pending))
6448 fibheap_swap = pending;
6450 worklist = fibheap_swap;
6451 sbitmap_swap = in_pending;
6452 in_pending = in_worklist;
6453 in_worklist = sbitmap_swap;
6455 sbitmap_zero (visited);
6457 while (!fibheap_empty (worklist))
6459 bb = (basic_block) fibheap_extract_min (worklist);
6460 RESET_BIT (in_worklist, bb->index);
6461 gcc_assert (!TEST_BIT (visited, bb->index));
6462 if (!TEST_BIT (visited, bb->index))
6466 int oldinsz, oldoutsz;
6468 SET_BIT (visited, bb->index);
6470 if (VTI (bb)->in.vars)
6473 -= (htab_size (shared_hash_htab (VTI (bb)->in.vars))
6474 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
6476 = htab_elements (shared_hash_htab (VTI (bb)->in.vars));
6478 = htab_elements (shared_hash_htab (VTI (bb)->out.vars));
6481 oldinsz = oldoutsz = 0;
6483 if (MAY_HAVE_DEBUG_INSNS)
6485 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
6486 bool first = true, adjust = false;
6488 /* Calculate the IN set as the intersection of
6489 predecessor OUT sets. */
6491 dataflow_set_clear (in);
6492 dst_can_be_shared = true;
6494 FOR_EACH_EDGE (e, ei, bb->preds)
6495 if (!VTI (e->src)->flooded)
6496 gcc_assert (bb_order[bb->index]
6497 <= bb_order[e->src->index]);
6500 dataflow_set_copy (in, &VTI (e->src)->out);
6501 first_out = &VTI (e->src)->out;
6506 dataflow_set_merge (in, &VTI (e->src)->out);
6512 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
6514 /* Merge and merge_adjust should keep entries in
6516 htab_traverse (shared_hash_htab (in->vars),
6517 canonicalize_loc_order_check,
6520 if (dst_can_be_shared)
6522 shared_hash_destroy (in->vars);
6523 in->vars = shared_hash_copy (first_out->vars);
6527 VTI (bb)->flooded = true;
6531 /* Calculate the IN set as union of predecessor OUT sets. */
6532 dataflow_set_clear (&VTI (bb)->in);
6533 FOR_EACH_EDGE (e, ei, bb->preds)
6534 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
6537 changed = compute_bb_dataflow (bb);
6538 htabsz += (htab_size (shared_hash_htab (VTI (bb)->in.vars))
6539 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
6541 if (htabmax && htabsz > htabmax)
6543 if (MAY_HAVE_DEBUG_INSNS)
6544 inform (DECL_SOURCE_LOCATION (cfun->decl),
6545 "variable tracking size limit exceeded with "
6546 "-fvar-tracking-assignments, retrying without");
6548 inform (DECL_SOURCE_LOCATION (cfun->decl),
6549 "variable tracking size limit exceeded");
6556 FOR_EACH_EDGE (e, ei, bb->succs)
6558 if (e->dest == EXIT_BLOCK_PTR)
6561 if (TEST_BIT (visited, e->dest->index))
6563 if (!TEST_BIT (in_pending, e->dest->index))
6565 /* Send E->DEST to next round. */
6566 SET_BIT (in_pending, e->dest->index);
6567 fibheap_insert (pending,
6568 bb_order[e->dest->index],
6572 else if (!TEST_BIT (in_worklist, e->dest->index))
6574 /* Add E->DEST to current round. */
6575 SET_BIT (in_worklist, e->dest->index);
6576 fibheap_insert (worklist, bb_order[e->dest->index],
6584 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
6586 (int)htab_elements (shared_hash_htab (VTI (bb)->in.vars)),
6588 (int)htab_elements (shared_hash_htab (VTI (bb)->out.vars)),
6590 (int)worklist->nodes, (int)pending->nodes, htabsz);
6592 if (dump_file && (dump_flags & TDF_DETAILS))
6594 fprintf (dump_file, "BB %i IN:\n", bb->index);
6595 dump_dataflow_set (&VTI (bb)->in);
6596 fprintf (dump_file, "BB %i OUT:\n", bb->index);
6597 dump_dataflow_set (&VTI (bb)->out);
6603 if (success && MAY_HAVE_DEBUG_INSNS)
6605 gcc_assert (VTI (bb)->flooded);
6608 fibheap_delete (worklist);
6609 fibheap_delete (pending);
6610 sbitmap_free (visited);
6611 sbitmap_free (in_worklist);
6612 sbitmap_free (in_pending);
6614 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
6618 /* Print the content of the LIST to dump file. */
6621 dump_attrs_list (attrs list)
6623 for (; list; list = list->next)
6625 if (dv_is_decl_p (list->dv))
6626 print_mem_expr (dump_file, dv_as_decl (list->dv));
6628 print_rtl_single (dump_file, dv_as_value (list->dv));
6629 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
6631 fprintf (dump_file, "\n");
6634 /* Print the information about variable *SLOT to dump file. */
6637 dump_var_slot (void **slot, void *data ATTRIBUTE_UNUSED)
6639 variable var = (variable) *slot;
6643 /* Continue traversing the hash table. */
6647 /* Print the information about variable VAR to dump file. */
6650 dump_var (variable var)
6653 location_chain node;
6655 if (dv_is_decl_p (var->dv))
6657 const_tree decl = dv_as_decl (var->dv);
6659 if (DECL_NAME (decl))
6661 fprintf (dump_file, " name: %s",
6662 IDENTIFIER_POINTER (DECL_NAME (decl)));
6663 if (dump_flags & TDF_UID)
6664 fprintf (dump_file, "D.%u", DECL_UID (decl));
6666 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
6667 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
6669 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
6670 fprintf (dump_file, "\n");
6674 fputc (' ', dump_file);
6675 print_rtl_single (dump_file, dv_as_value (var->dv));
6678 for (i = 0; i < var->n_var_parts; i++)
6680 fprintf (dump_file, " offset %ld\n",
6681 (long) var->var_part[i].offset);
6682 for (node = var->var_part[i].loc_chain; node; node = node->next)
6684 fprintf (dump_file, " ");
6685 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
6686 fprintf (dump_file, "[uninit]");
6687 print_rtl_single (dump_file, node->loc);
6692 /* Print the information about variables from hash table VARS to dump file. */
6695 dump_vars (htab_t vars)
6697 if (htab_elements (vars) > 0)
6699 fprintf (dump_file, "Variables:\n");
6700 htab_traverse (vars, dump_var_slot, NULL);
6704 /* Print the dataflow set SET to dump file. */
6707 dump_dataflow_set (dataflow_set *set)
6711 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
6713 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6717 fprintf (dump_file, "Reg %d:", i);
6718 dump_attrs_list (set->regs[i]);
6721 dump_vars (shared_hash_htab (set->vars));
6722 fprintf (dump_file, "\n");
6725 /* Print the IN and OUT sets for each basic block to dump file. */
6728 dump_dataflow_sets (void)
6734 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
6735 fprintf (dump_file, "IN:\n");
6736 dump_dataflow_set (&VTI (bb)->in);
6737 fprintf (dump_file, "OUT:\n");
6738 dump_dataflow_set (&VTI (bb)->out);
6742 /* Add variable VAR to the hash table of changed variables and
6743 if it has no locations delete it from SET's hash table. */
6746 variable_was_changed (variable var, dataflow_set *set)
6748 hashval_t hash = dv_htab_hash (var->dv);
6753 bool old_cur_loc_changed = false;
6755 /* Remember this decl or VALUE has been added to changed_variables. */
6756 set_dv_changed (var->dv, true);
6758 slot = htab_find_slot_with_hash (changed_variables,
6764 variable old_var = (variable) *slot;
6765 gcc_assert (old_var->in_changed_variables);
6766 old_var->in_changed_variables = false;
6767 old_cur_loc_changed = old_var->cur_loc_changed;
6768 variable_htab_free (*slot);
6770 if (set && var->n_var_parts == 0)
6774 empty_var = (variable) pool_alloc (dv_pool (var->dv));
6775 empty_var->dv = var->dv;
6776 empty_var->refcount = 1;
6777 empty_var->n_var_parts = 0;
6778 empty_var->cur_loc_changed = true;
6779 empty_var->in_changed_variables = true;
6786 var->in_changed_variables = true;
6787 /* If within processing one uop a variable is deleted
6788 and then readded, we need to assume it has changed. */
6789 if (old_cur_loc_changed)
6790 var->cur_loc_changed = true;
6797 if (var->n_var_parts == 0)
6802 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
6805 if (shared_hash_shared (set->vars))
6806 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
6808 htab_clear_slot (shared_hash_htab (set->vars), slot);
6814 /* Look for the index in VAR->var_part corresponding to OFFSET.
6815 Return -1 if not found. If INSERTION_POINT is non-NULL, the
6816 referenced int will be set to the index that the part has or should
6817 have, if it should be inserted. */
6820 find_variable_location_part (variable var, HOST_WIDE_INT offset,
6821 int *insertion_point)
6825 /* Find the location part. */
6827 high = var->n_var_parts;
6830 pos = (low + high) / 2;
6831 if (var->var_part[pos].offset < offset)
6838 if (insertion_point)
6839 *insertion_point = pos;
6841 if (pos < var->n_var_parts && var->var_part[pos].offset == offset)
6848 set_slot_part (dataflow_set *set, rtx loc, void **slot,
6849 decl_or_value dv, HOST_WIDE_INT offset,
6850 enum var_init_status initialized, rtx set_src)
6853 location_chain node, next;
6854 location_chain *nextp;
6856 bool onepart = dv_onepart_p (dv);
6858 gcc_assert (offset == 0 || !onepart);
6859 gcc_assert (loc != dv_as_opaque (dv));
6861 var = (variable) *slot;
6863 if (! flag_var_tracking_uninit)
6864 initialized = VAR_INIT_STATUS_INITIALIZED;
6868 /* Create new variable information. */
6869 var = (variable) pool_alloc (dv_pool (dv));
6872 var->n_var_parts = 1;
6873 var->cur_loc_changed = false;
6874 var->in_changed_variables = false;
6875 var->var_part[0].offset = offset;
6876 var->var_part[0].loc_chain = NULL;
6877 var->var_part[0].cur_loc = NULL;
6880 nextp = &var->var_part[0].loc_chain;
6886 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
6890 if (GET_CODE (loc) == VALUE)
6892 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6893 nextp = &node->next)
6894 if (GET_CODE (node->loc) == VALUE)
6896 if (node->loc == loc)
6901 if (canon_value_cmp (node->loc, loc))
6909 else if (REG_P (node->loc) || MEM_P (node->loc))
6917 else if (REG_P (loc))
6919 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6920 nextp = &node->next)
6921 if (REG_P (node->loc))
6923 if (REGNO (node->loc) < REGNO (loc))
6927 if (REGNO (node->loc) == REGNO (loc))
6940 else if (MEM_P (loc))
6942 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6943 nextp = &node->next)
6944 if (REG_P (node->loc))
6946 else if (MEM_P (node->loc))
6948 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
6960 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6961 nextp = &node->next)
6962 if ((r = loc_cmp (node->loc, loc)) >= 0)
6970 if (shared_var_p (var, set->vars))
6972 slot = unshare_variable (set, slot, var, initialized);
6973 var = (variable)*slot;
6974 for (nextp = &var->var_part[0].loc_chain; c;
6975 nextp = &(*nextp)->next)
6977 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
6984 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
6986 pos = find_variable_location_part (var, offset, &inspos);
6990 node = var->var_part[pos].loc_chain;
6993 && ((REG_P (node->loc) && REG_P (loc)
6994 && REGNO (node->loc) == REGNO (loc))
6995 || rtx_equal_p (node->loc, loc)))
6997 /* LOC is in the beginning of the chain so we have nothing
6999 if (node->init < initialized)
7000 node->init = initialized;
7001 if (set_src != NULL)
7002 node->set_src = set_src;
7008 /* We have to make a copy of a shared variable. */
7009 if (shared_var_p (var, set->vars))
7011 slot = unshare_variable (set, slot, var, initialized);
7012 var = (variable)*slot;
7018 /* We have not found the location part, new one will be created. */
7020 /* We have to make a copy of the shared variable. */
7021 if (shared_var_p (var, set->vars))
7023 slot = unshare_variable (set, slot, var, initialized);
7024 var = (variable)*slot;
7027 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7028 thus there are at most MAX_VAR_PARTS different offsets. */
7029 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7030 && (!var->n_var_parts || !dv_onepart_p (var->dv)));
7032 /* We have to move the elements of array starting at index
7033 inspos to the next position. */
7034 for (pos = var->n_var_parts; pos > inspos; pos--)
7035 var->var_part[pos] = var->var_part[pos - 1];
7038 var->var_part[pos].offset = offset;
7039 var->var_part[pos].loc_chain = NULL;
7040 var->var_part[pos].cur_loc = NULL;
7043 /* Delete the location from the list. */
7044 nextp = &var->var_part[pos].loc_chain;
7045 for (node = var->var_part[pos].loc_chain; node; node = next)
7048 if ((REG_P (node->loc) && REG_P (loc)
7049 && REGNO (node->loc) == REGNO (loc))
7050 || rtx_equal_p (node->loc, loc))
7052 /* Save these values, to assign to the new node, before
7053 deleting this one. */
7054 if (node->init > initialized)
7055 initialized = node->init;
7056 if (node->set_src != NULL && set_src == NULL)
7057 set_src = node->set_src;
7058 if (var->var_part[pos].cur_loc == node->loc)
7060 var->var_part[pos].cur_loc = NULL;
7061 var->cur_loc_changed = true;
7063 pool_free (loc_chain_pool, node);
7068 nextp = &node->next;
7071 nextp = &var->var_part[pos].loc_chain;
7074 /* Add the location to the beginning. */
7075 node = (location_chain) pool_alloc (loc_chain_pool);
7077 node->init = initialized;
7078 node->set_src = set_src;
7079 node->next = *nextp;
7082 if (onepart && emit_notes)
7083 add_value_chains (var->dv, loc);
7085 /* If no location was emitted do so. */
7086 if (var->var_part[pos].cur_loc == NULL)
7087 variable_was_changed (var, set);
7092 /* Set the part of variable's location in the dataflow set SET. The
7093 variable part is specified by variable's declaration in DV and
7094 offset OFFSET and the part's location by LOC. IOPT should be
7095 NO_INSERT if the variable is known to be in SET already and the
7096 variable hash table must not be resized, and INSERT otherwise. */
7099 set_variable_part (dataflow_set *set, rtx loc,
7100 decl_or_value dv, HOST_WIDE_INT offset,
7101 enum var_init_status initialized, rtx set_src,
7102 enum insert_option iopt)
7106 if (iopt == NO_INSERT)
7107 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7110 slot = shared_hash_find_slot (set->vars, dv);
7112 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7114 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7117 /* Remove all recorded register locations for the given variable part
7118 from dataflow set SET, except for those that are identical to loc.
7119 The variable part is specified by variable's declaration or value
7120 DV and offset OFFSET. */
7123 clobber_slot_part (dataflow_set *set, rtx loc, void **slot,
7124 HOST_WIDE_INT offset, rtx set_src)
7126 variable var = (variable) *slot;
7127 int pos = find_variable_location_part (var, offset, NULL);
7131 location_chain node, next;
7133 /* Remove the register locations from the dataflow set. */
7134 next = var->var_part[pos].loc_chain;
7135 for (node = next; node; node = next)
7138 if (node->loc != loc
7139 && (!flag_var_tracking_uninit
7142 || !rtx_equal_p (set_src, node->set_src)))
7144 if (REG_P (node->loc))
7149 /* Remove the variable part from the register's
7150 list, but preserve any other variable parts
7151 that might be regarded as live in that same
7153 anextp = &set->regs[REGNO (node->loc)];
7154 for (anode = *anextp; anode; anode = anext)
7156 anext = anode->next;
7157 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7158 && anode->offset == offset)
7160 pool_free (attrs_pool, anode);
7164 anextp = &anode->next;
7168 slot = delete_slot_part (set, node->loc, slot, offset);
7176 /* Remove all recorded register locations for the given variable part
7177 from dataflow set SET, except for those that are identical to loc.
7178 The variable part is specified by variable's declaration or value
7179 DV and offset OFFSET. */
7182 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7183 HOST_WIDE_INT offset, rtx set_src)
7187 if (!dv_as_opaque (dv)
7188 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7191 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7195 clobber_slot_part (set, loc, slot, offset, set_src);
7198 /* Delete the part of variable's location from dataflow set SET. The
7199 variable part is specified by its SET->vars slot SLOT and offset
7200 OFFSET and the part's location by LOC. */
7203 delete_slot_part (dataflow_set *set, rtx loc, void **slot,
7204 HOST_WIDE_INT offset)
7206 variable var = (variable) *slot;
7207 int pos = find_variable_location_part (var, offset, NULL);
7211 location_chain node, next;
7212 location_chain *nextp;
7215 if (shared_var_p (var, set->vars))
7217 /* If the variable contains the location part we have to
7218 make a copy of the variable. */
7219 for (node = var->var_part[pos].loc_chain; node;
7222 if ((REG_P (node->loc) && REG_P (loc)
7223 && REGNO (node->loc) == REGNO (loc))
7224 || rtx_equal_p (node->loc, loc))
7226 slot = unshare_variable (set, slot, var,
7227 VAR_INIT_STATUS_UNKNOWN);
7228 var = (variable)*slot;
7234 /* Delete the location part. */
7236 nextp = &var->var_part[pos].loc_chain;
7237 for (node = *nextp; node; node = next)
7240 if ((REG_P (node->loc) && REG_P (loc)
7241 && REGNO (node->loc) == REGNO (loc))
7242 || rtx_equal_p (node->loc, loc))
7244 if (emit_notes && pos == 0 && dv_onepart_p (var->dv))
7245 remove_value_chains (var->dv, node->loc);
7246 /* If we have deleted the location which was last emitted
7247 we have to emit new location so add the variable to set
7248 of changed variables. */
7249 if (var->var_part[pos].cur_loc == node->loc)
7252 var->var_part[pos].cur_loc = NULL;
7253 var->cur_loc_changed = true;
7255 pool_free (loc_chain_pool, node);
7260 nextp = &node->next;
7263 if (var->var_part[pos].loc_chain == NULL)
7268 var->cur_loc_changed = true;
7269 while (pos < var->n_var_parts)
7271 var->var_part[pos] = var->var_part[pos + 1];
7276 variable_was_changed (var, set);
7282 /* Delete the part of variable's location from dataflow set SET. The
7283 variable part is specified by variable's declaration or value DV
7284 and offset OFFSET and the part's location by LOC. */
7287 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7288 HOST_WIDE_INT offset)
7290 void **slot = shared_hash_find_slot_noinsert (set->vars, dv);
7294 delete_slot_part (set, loc, slot, offset);
7297 /* Structure for passing some other parameters to function
7298 vt_expand_loc_callback. */
7299 struct expand_loc_callback_data
7301 /* The variables and values active at this point. */
7304 /* True in vt_expand_loc_dummy calls, no rtl should be allocated.
7305 Non-NULL should be returned if vt_expand_loc would return
7306 non-NULL in that case, NULL otherwise. cur_loc_changed should be
7307 computed and cur_loc recomputed when possible (but just once
7308 per emit_notes_for_changes call). */
7311 /* True if expansion of subexpressions had to recompute some
7312 VALUE/DEBUG_EXPR_DECL's cur_loc or used a VALUE/DEBUG_EXPR_DECL
7313 whose cur_loc has been already recomputed during current
7314 emit_notes_for_changes call. */
7315 bool cur_loc_changed;
7317 /* True if cur_loc should be ignored and any possible location
7319 bool ignore_cur_loc;
7322 /* Callback for cselib_expand_value, that looks for expressions
7323 holding the value in the var-tracking hash tables. Return X for
7324 standard processing, anything else is to be used as-is. */
7327 vt_expand_loc_callback (rtx x, bitmap regs, int max_depth, void *data)
7329 struct expand_loc_callback_data *elcd
7330 = (struct expand_loc_callback_data *) data;
7331 bool dummy = elcd->dummy;
7332 bool cur_loc_changed = elcd->cur_loc_changed;
7337 rtx result, subreg, xret;
7339 switch (GET_CODE (x))
7344 if (cselib_dummy_expand_value_rtx_cb (SUBREG_REG (x), regs,
7346 vt_expand_loc_callback, data))
7352 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
7354 vt_expand_loc_callback, data);
7359 result = simplify_gen_subreg (GET_MODE (x), subreg,
7360 GET_MODE (SUBREG_REG (x)),
7363 /* Invalid SUBREGs are ok in debug info. ??? We could try
7364 alternate expansions for the VALUE as well. */
7366 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
7371 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
7376 dv = dv_from_value (x);
7384 if (VALUE_RECURSED_INTO (x))
7387 var = (variable) htab_find_with_hash (elcd->vars, dv, dv_htab_hash (dv));
7391 if (dummy && dv_changed_p (dv))
7392 elcd->cur_loc_changed = true;
7396 if (var->n_var_parts == 0)
7399 elcd->cur_loc_changed = true;
7403 gcc_assert (var->n_var_parts == 1);
7405 VALUE_RECURSED_INTO (x) = true;
7408 if (var->var_part[0].cur_loc && !elcd->ignore_cur_loc)
7412 if (cselib_dummy_expand_value_rtx_cb (var->var_part[0].cur_loc, regs,
7414 vt_expand_loc_callback, data))
7418 result = cselib_expand_value_rtx_cb (var->var_part[0].cur_loc, regs,
7420 vt_expand_loc_callback, data);
7422 set_dv_changed (dv, false);
7423 cur_loc = var->var_part[0].cur_loc;
7427 if (!result && (dv_changed_p (dv) || elcd->ignore_cur_loc))
7429 if (!elcd->ignore_cur_loc)
7430 set_dv_changed (dv, false);
7431 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
7432 if (loc->loc == cur_loc)
7436 elcd->cur_loc_changed = cur_loc_changed;
7437 if (cselib_dummy_expand_value_rtx_cb (loc->loc, regs, max_depth,
7438 vt_expand_loc_callback,
7447 result = cselib_expand_value_rtx_cb (loc->loc, regs, max_depth,
7448 vt_expand_loc_callback, data);
7452 if (dummy && (result || var->var_part[0].cur_loc))
7453 var->cur_loc_changed = true;
7454 if (!elcd->ignore_cur_loc)
7455 var->var_part[0].cur_loc = loc ? loc->loc : NULL_RTX;
7459 if (var->cur_loc_changed)
7460 elcd->cur_loc_changed = true;
7461 else if (!result && var->var_part[0].cur_loc == NULL_RTX)
7462 elcd->cur_loc_changed = cur_loc_changed;
7465 VALUE_RECURSED_INTO (x) = false;
7472 /* Expand VALUEs in LOC, using VARS as well as cselib's equivalence
7476 vt_expand_loc (rtx loc, htab_t vars, bool ignore_cur_loc)
7478 struct expand_loc_callback_data data;
7480 if (!MAY_HAVE_DEBUG_INSNS)
7485 data.cur_loc_changed = false;
7486 data.ignore_cur_loc = ignore_cur_loc;
7487 loc = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
7488 vt_expand_loc_callback, &data);
7490 if (loc && MEM_P (loc))
7491 loc = targetm.delegitimize_address (loc);
7495 /* Like vt_expand_loc, but only return true/false (whether vt_expand_loc
7496 would succeed or not, without actually allocating new rtxes. */
7499 vt_expand_loc_dummy (rtx loc, htab_t vars, bool *pcur_loc_changed)
7501 struct expand_loc_callback_data data;
7504 gcc_assert (MAY_HAVE_DEBUG_INSNS);
7507 data.cur_loc_changed = false;
7508 data.ignore_cur_loc = false;
7509 ret = cselib_dummy_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
7510 vt_expand_loc_callback, &data);
7511 *pcur_loc_changed = data.cur_loc_changed;
7515 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
7516 additional parameters: WHERE specifies whether the note shall be emitted
7517 before or after instruction INSN. */
7520 emit_note_insn_var_location (void **varp, void *data)
7522 variable var = (variable) *varp;
7523 rtx insn = ((emit_note_data *)data)->insn;
7524 enum emit_note_where where = ((emit_note_data *)data)->where;
7525 htab_t vars = ((emit_note_data *)data)->vars;
7527 int i, j, n_var_parts;
7529 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
7530 HOST_WIDE_INT last_limit;
7531 tree type_size_unit;
7532 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
7533 rtx loc[MAX_VAR_PARTS];
7537 if (dv_is_value_p (var->dv))
7538 goto value_or_debug_decl;
7540 decl = dv_as_decl (var->dv);
7542 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7543 goto value_or_debug_decl;
7548 if (!MAY_HAVE_DEBUG_INSNS)
7550 for (i = 0; i < var->n_var_parts; i++)
7551 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
7553 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
7554 var->cur_loc_changed = true;
7556 if (var->n_var_parts == 0)
7557 var->cur_loc_changed = true;
7559 if (!var->cur_loc_changed)
7561 for (i = 0; i < var->n_var_parts; i++)
7563 enum machine_mode mode, wider_mode;
7566 if (last_limit < var->var_part[i].offset)
7571 else if (last_limit > var->var_part[i].offset)
7573 offsets[n_var_parts] = var->var_part[i].offset;
7574 if (!var->var_part[i].cur_loc)
7579 loc2 = vt_expand_loc (var->var_part[i].cur_loc, vars, false);
7585 loc[n_var_parts] = loc2;
7586 mode = GET_MODE (var->var_part[i].cur_loc);
7587 if (mode == VOIDmode && dv_onepart_p (var->dv))
7588 mode = DECL_MODE (decl);
7589 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
7590 if (var->var_part[i].cur_loc == lc->loc)
7592 initialized = lc->init;
7596 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
7598 /* Attempt to merge adjacent registers or memory. */
7599 wider_mode = GET_MODE_WIDER_MODE (mode);
7600 for (j = i + 1; j < var->n_var_parts; j++)
7601 if (last_limit <= var->var_part[j].offset)
7603 if (j < var->n_var_parts
7604 && wider_mode != VOIDmode
7605 && var->var_part[j].cur_loc
7606 && mode == GET_MODE (var->var_part[j].cur_loc)
7607 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
7608 && last_limit == var->var_part[j].offset
7609 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars, false))
7610 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
7614 if (REG_P (loc[n_var_parts])
7615 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
7616 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
7617 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
7620 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
7621 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
7623 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
7624 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
7627 if (!REG_P (new_loc)
7628 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
7631 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
7634 else if (MEM_P (loc[n_var_parts])
7635 && GET_CODE (XEXP (loc2, 0)) == PLUS
7636 && REG_P (XEXP (XEXP (loc2, 0), 0))
7637 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
7639 if ((REG_P (XEXP (loc[n_var_parts], 0))
7640 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
7641 XEXP (XEXP (loc2, 0), 0))
7642 && INTVAL (XEXP (XEXP (loc2, 0), 1))
7643 == GET_MODE_SIZE (mode))
7644 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
7645 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
7646 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
7647 XEXP (XEXP (loc2, 0), 0))
7648 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
7649 + GET_MODE_SIZE (mode)
7650 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
7651 new_loc = adjust_address_nv (loc[n_var_parts],
7657 loc[n_var_parts] = new_loc;
7659 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
7665 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
7666 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
7669 if (! flag_var_tracking_uninit)
7670 initialized = VAR_INIT_STATUS_INITIALIZED;
7674 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX,
7676 else if (n_var_parts == 1)
7680 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
7681 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
7685 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list,
7688 else if (n_var_parts)
7692 for (i = 0; i < n_var_parts; i++)
7694 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
7696 parallel = gen_rtx_PARALLEL (VOIDmode,
7697 gen_rtvec_v (n_var_parts, loc));
7698 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
7699 parallel, (int) initialized);
7702 if (where != EMIT_NOTE_BEFORE_INSN)
7704 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
7705 if (where == EMIT_NOTE_AFTER_CALL_INSN)
7706 NOTE_DURING_CALL_P (note) = true;
7710 /* Make sure that the call related notes come first. */
7711 while (NEXT_INSN (insn)
7713 && NOTE_DURING_CALL_P (insn))
7714 insn = NEXT_INSN (insn);
7715 if (NOTE_P (insn) && NOTE_DURING_CALL_P (insn))
7716 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
7718 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
7720 NOTE_VAR_LOCATION (note) = note_vl;
7723 set_dv_changed (var->dv, false);
7724 var->cur_loc_changed = false;
7725 gcc_assert (var->in_changed_variables);
7726 var->in_changed_variables = false;
7727 htab_clear_slot (changed_variables, varp);
7729 /* Continue traversing the hash table. */
7732 value_or_debug_decl:
7733 if (dv_changed_p (var->dv) && var->n_var_parts)
7736 bool cur_loc_changed;
7738 if (var->var_part[0].cur_loc
7739 && vt_expand_loc_dummy (var->var_part[0].cur_loc, vars,
7742 for (lc = var->var_part[0].loc_chain; lc; lc = lc->next)
7743 if (lc->loc != var->var_part[0].cur_loc
7744 && vt_expand_loc_dummy (lc->loc, vars, &cur_loc_changed))
7746 var->var_part[0].cur_loc = lc ? lc->loc : NULL_RTX;
7751 DEF_VEC_P (variable);
7752 DEF_VEC_ALLOC_P (variable, heap);
7754 /* Stack of variable_def pointers that need processing with
7755 check_changed_vars_2. */
7757 static VEC (variable, heap) *changed_variables_stack;
7759 /* VALUEs with no variables that need set_dv_changed (val, false)
7760 called before check_changed_vars_3. */
7762 static VEC (rtx, heap) *changed_values_stack;
7764 /* Helper function for check_changed_vars_1 and check_changed_vars_2. */
7767 check_changed_vars_0 (decl_or_value dv, htab_t htab)
7770 = (value_chain) htab_find_with_hash (value_chains, dv, dv_htab_hash (dv));
7774 for (vc = vc->next; vc; vc = vc->next)
7775 if (!dv_changed_p (vc->dv))
7778 = (variable) htab_find_with_hash (htab, vc->dv,
7779 dv_htab_hash (vc->dv));
7782 set_dv_changed (vc->dv, true);
7783 VEC_safe_push (variable, heap, changed_variables_stack, vcvar);
7785 else if (dv_is_value_p (vc->dv))
7787 set_dv_changed (vc->dv, true);
7788 VEC_safe_push (rtx, heap, changed_values_stack,
7789 dv_as_value (vc->dv));
7790 check_changed_vars_0 (vc->dv, htab);
7795 /* Populate changed_variables_stack with variable_def pointers
7796 that need variable_was_changed called on them. */
7799 check_changed_vars_1 (void **slot, void *data)
7801 variable var = (variable) *slot;
7802 htab_t htab = (htab_t) data;
7804 if (dv_is_value_p (var->dv)
7805 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
7806 check_changed_vars_0 (var->dv, htab);
7810 /* Add VAR to changed_variables and also for VALUEs add recursively
7811 all DVs that aren't in changed_variables yet but reference the
7812 VALUE from its loc_chain. */
7815 check_changed_vars_2 (variable var, htab_t htab)
7817 variable_was_changed (var, NULL);
7818 if (dv_is_value_p (var->dv)
7819 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
7820 check_changed_vars_0 (var->dv, htab);
7823 /* For each changed decl (except DEBUG_EXPR_DECLs) recompute
7824 cur_loc if needed (and cur_loc of all VALUEs and DEBUG_EXPR_DECLs
7825 it needs and are also in changed variables) and track whether
7826 cur_loc (or anything it uses to compute location) had to change
7827 during the current emit_notes_for_changes call. */
7830 check_changed_vars_3 (void **slot, void *data)
7832 variable var = (variable) *slot;
7833 htab_t vars = (htab_t) data;
7836 bool cur_loc_changed;
7838 if (dv_is_value_p (var->dv)
7839 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
7842 for (i = 0; i < var->n_var_parts; i++)
7844 if (var->var_part[i].cur_loc
7845 && vt_expand_loc_dummy (var->var_part[i].cur_loc, vars,
7848 if (cur_loc_changed)
7849 var->cur_loc_changed = true;
7852 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
7853 if (lc->loc != var->var_part[i].cur_loc
7854 && vt_expand_loc_dummy (lc->loc, vars, &cur_loc_changed))
7856 if (lc || var->var_part[i].cur_loc)
7857 var->cur_loc_changed = true;
7858 var->var_part[i].cur_loc = lc ? lc->loc : NULL_RTX;
7860 if (var->n_var_parts == 0)
7861 var->cur_loc_changed = true;
7865 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
7866 CHANGED_VARIABLES and delete this chain. WHERE specifies whether the notes
7867 shall be emitted before of after instruction INSN. */
7870 emit_notes_for_changes (rtx insn, enum emit_note_where where,
7873 emit_note_data data;
7874 htab_t htab = shared_hash_htab (vars);
7876 if (!htab_elements (changed_variables))
7879 if (MAY_HAVE_DEBUG_INSNS)
7881 /* Unfortunately this has to be done in two steps, because
7882 we can't traverse a hashtab into which we are inserting
7883 through variable_was_changed. */
7884 htab_traverse (changed_variables, check_changed_vars_1, htab);
7885 while (VEC_length (variable, changed_variables_stack) > 0)
7886 check_changed_vars_2 (VEC_pop (variable, changed_variables_stack),
7888 while (VEC_length (rtx, changed_values_stack) > 0)
7889 set_dv_changed (dv_from_value (VEC_pop (rtx, changed_values_stack)),
7891 htab_traverse (changed_variables, check_changed_vars_3, htab);
7898 htab_traverse (changed_variables, emit_note_insn_var_location, &data);
7901 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
7902 same variable in hash table DATA or is not there at all. */
7905 emit_notes_for_differences_1 (void **slot, void *data)
7907 htab_t new_vars = (htab_t) data;
7908 variable old_var, new_var;
7910 old_var = (variable) *slot;
7911 new_var = (variable) htab_find_with_hash (new_vars, old_var->dv,
7912 dv_htab_hash (old_var->dv));
7916 /* Variable has disappeared. */
7919 empty_var = (variable) pool_alloc (dv_pool (old_var->dv));
7920 empty_var->dv = old_var->dv;
7921 empty_var->refcount = 0;
7922 empty_var->n_var_parts = 0;
7923 empty_var->cur_loc_changed = false;
7924 empty_var->in_changed_variables = false;
7925 if (dv_onepart_p (old_var->dv))
7929 gcc_assert (old_var->n_var_parts == 1);
7930 for (lc = old_var->var_part[0].loc_chain; lc; lc = lc->next)
7931 remove_value_chains (old_var->dv, lc->loc);
7933 variable_was_changed (empty_var, NULL);
7934 /* Continue traversing the hash table. */
7937 if (variable_different_p (old_var, new_var))
7939 if (dv_onepart_p (old_var->dv))
7941 location_chain lc1, lc2;
7943 gcc_assert (old_var->n_var_parts == 1
7944 && new_var->n_var_parts == 1);
7945 lc1 = old_var->var_part[0].loc_chain;
7946 lc2 = new_var->var_part[0].loc_chain;
7949 && ((REG_P (lc1->loc) && REG_P (lc2->loc))
7950 || rtx_equal_p (lc1->loc, lc2->loc)))
7955 for (; lc2; lc2 = lc2->next)
7956 add_value_chains (old_var->dv, lc2->loc);
7957 for (; lc1; lc1 = lc1->next)
7958 remove_value_chains (old_var->dv, lc1->loc);
7960 variable_was_changed (new_var, NULL);
7962 /* Update cur_loc. */
7963 if (old_var != new_var)
7966 for (i = 0; i < new_var->n_var_parts; i++)
7968 new_var->var_part[i].cur_loc = NULL;
7969 if (old_var->n_var_parts != new_var->n_var_parts
7970 || old_var->var_part[i].offset != new_var->var_part[i].offset)
7971 new_var->cur_loc_changed = true;
7972 else if (old_var->var_part[i].cur_loc != NULL)
7975 rtx cur_loc = old_var->var_part[i].cur_loc;
7977 for (lc = new_var->var_part[i].loc_chain; lc; lc = lc->next)
7978 if (lc->loc == cur_loc
7979 || rtx_equal_p (cur_loc, lc->loc))
7981 new_var->var_part[i].cur_loc = lc->loc;
7985 new_var->cur_loc_changed = true;
7990 /* Continue traversing the hash table. */
7994 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
7998 emit_notes_for_differences_2 (void **slot, void *data)
8000 htab_t old_vars = (htab_t) data;
8001 variable old_var, new_var;
8003 new_var = (variable) *slot;
8004 old_var = (variable) htab_find_with_hash (old_vars, new_var->dv,
8005 dv_htab_hash (new_var->dv));
8009 /* Variable has appeared. */
8010 if (dv_onepart_p (new_var->dv))
8014 gcc_assert (new_var->n_var_parts == 1);
8015 for (lc = new_var->var_part[0].loc_chain; lc; lc = lc->next)
8016 add_value_chains (new_var->dv, lc->loc);
8018 for (i = 0; i < new_var->n_var_parts; i++)
8019 new_var->var_part[i].cur_loc = NULL;
8020 variable_was_changed (new_var, NULL);
8023 /* Continue traversing the hash table. */
8027 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
8031 emit_notes_for_differences (rtx insn, dataflow_set *old_set,
8032 dataflow_set *new_set)
8034 htab_traverse (shared_hash_htab (old_set->vars),
8035 emit_notes_for_differences_1,
8036 shared_hash_htab (new_set->vars));
8037 htab_traverse (shared_hash_htab (new_set->vars),
8038 emit_notes_for_differences_2,
8039 shared_hash_htab (old_set->vars));
8040 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
8043 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
8046 next_non_note_insn_var_location (rtx insn)
8050 insn = NEXT_INSN (insn);
8053 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
8060 /* Emit the notes for changes of location parts in the basic block BB. */
8063 emit_notes_in_bb (basic_block bb, dataflow_set *set)
8066 micro_operation *mo;
8068 dataflow_set_clear (set);
8069 dataflow_set_copy (set, &VTI (bb)->in);
8071 FOR_EACH_VEC_ELT (micro_operation, VTI (bb)->mos, i, mo)
8073 rtx insn = mo->insn;
8074 rtx next_insn = next_non_note_insn_var_location (insn);
8079 dataflow_set_clear_at_call (set);
8080 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
8082 rtx arguments = mo->u.loc, *p = &arguments, note;
8085 XEXP (XEXP (*p, 0), 1)
8086 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
8087 shared_hash_htab (set->vars), true);
8088 /* If expansion is successful, keep it in the list. */
8089 if (XEXP (XEXP (*p, 0), 1))
8091 /* Otherwise, if the following item is data_value for it,
8093 else if (XEXP (*p, 1)
8094 && REG_P (XEXP (XEXP (*p, 0), 0))
8095 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
8096 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
8098 && REGNO (XEXP (XEXP (*p, 0), 0))
8099 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
8101 *p = XEXP (XEXP (*p, 1), 1);
8102 /* Just drop this item. */
8106 note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
8107 NOTE_VAR_LOCATION (note) = arguments;
8113 rtx loc = mo->u.loc;
8116 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
8118 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
8120 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
8126 rtx loc = mo->u.loc;
8130 if (GET_CODE (loc) == CONCAT)
8132 val = XEXP (loc, 0);
8133 vloc = XEXP (loc, 1);
8141 var = PAT_VAR_LOCATION_DECL (vloc);
8143 clobber_variable_part (set, NULL_RTX,
8144 dv_from_decl (var), 0, NULL_RTX);
8147 if (VAL_NEEDS_RESOLUTION (loc))
8148 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
8149 set_variable_part (set, val, dv_from_decl (var), 0,
8150 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
8153 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
8154 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
8155 dv_from_decl (var), 0,
8156 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
8159 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
8165 rtx loc = mo->u.loc;
8166 rtx val, vloc, uloc;
8168 vloc = uloc = XEXP (loc, 1);
8169 val = XEXP (loc, 0);
8171 if (GET_CODE (val) == CONCAT)
8173 uloc = XEXP (val, 1);
8174 val = XEXP (val, 0);
8177 if (VAL_NEEDS_RESOLUTION (loc))
8178 val_resolve (set, val, vloc, insn);
8180 val_store (set, val, uloc, insn, false);
8182 if (VAL_HOLDS_TRACK_EXPR (loc))
8184 if (GET_CODE (uloc) == REG)
8185 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
8187 else if (GET_CODE (uloc) == MEM)
8188 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
8192 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
8198 rtx loc = mo->u.loc;
8199 rtx val, vloc, uloc, reverse = NULL_RTX;
8202 if (VAL_EXPR_HAS_REVERSE (loc))
8204 reverse = XEXP (loc, 1);
8205 vloc = XEXP (loc, 0);
8207 uloc = XEXP (vloc, 1);
8208 val = XEXP (vloc, 0);
8211 if (GET_CODE (val) == CONCAT)
8213 vloc = XEXP (val, 1);
8214 val = XEXP (val, 0);
8217 if (GET_CODE (vloc) == SET)
8219 rtx vsrc = SET_SRC (vloc);
8221 gcc_assert (val != vsrc);
8222 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
8224 vloc = SET_DEST (vloc);
8226 if (VAL_NEEDS_RESOLUTION (loc))
8227 val_resolve (set, val, vsrc, insn);
8229 else if (VAL_NEEDS_RESOLUTION (loc))
8231 gcc_assert (GET_CODE (uloc) == SET
8232 && GET_CODE (SET_SRC (uloc)) == REG);
8233 val_resolve (set, val, SET_SRC (uloc), insn);
8236 if (VAL_HOLDS_TRACK_EXPR (loc))
8238 if (VAL_EXPR_IS_CLOBBERED (loc))
8241 var_reg_delete (set, uloc, true);
8242 else if (MEM_P (uloc))
8243 var_mem_delete (set, uloc, true);
8247 bool copied_p = VAL_EXPR_IS_COPIED (loc);
8249 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
8251 if (GET_CODE (uloc) == SET)
8253 set_src = SET_SRC (uloc);
8254 uloc = SET_DEST (uloc);
8259 status = find_src_status (set, set_src);
8261 set_src = find_src_set_src (set, set_src);
8265 var_reg_delete_and_set (set, uloc, !copied_p,
8267 else if (MEM_P (uloc))
8268 var_mem_delete_and_set (set, uloc, !copied_p,
8272 else if (REG_P (uloc))
8273 var_regno_delete (set, REGNO (uloc));
8275 val_store (set, val, vloc, insn, true);
8278 val_store (set, XEXP (reverse, 0), XEXP (reverse, 1),
8281 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
8288 rtx loc = mo->u.loc;
8291 if (GET_CODE (loc) == SET)
8293 set_src = SET_SRC (loc);
8294 loc = SET_DEST (loc);
8298 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
8301 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
8304 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
8311 rtx loc = mo->u.loc;
8312 enum var_init_status src_status;
8315 if (GET_CODE (loc) == SET)
8317 set_src = SET_SRC (loc);
8318 loc = SET_DEST (loc);
8321 src_status = find_src_status (set, set_src);
8322 set_src = find_src_set_src (set, set_src);
8325 var_reg_delete_and_set (set, loc, false, src_status, set_src);
8327 var_mem_delete_and_set (set, loc, false, src_status, set_src);
8329 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
8336 rtx loc = mo->u.loc;
8339 var_reg_delete (set, loc, false);
8341 var_mem_delete (set, loc, false);
8343 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
8349 rtx loc = mo->u.loc;
8352 var_reg_delete (set, loc, true);
8354 var_mem_delete (set, loc, true);
8356 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
8362 set->stack_adjust += mo->u.adjust;
8368 /* Emit notes for the whole function. */
8371 vt_emit_notes (void)
8376 gcc_assert (!htab_elements (changed_variables));
8378 /* Free memory occupied by the out hash tables, as they aren't used
8381 dataflow_set_clear (&VTI (bb)->out);
8383 /* Enable emitting notes by functions (mainly by set_variable_part and
8384 delete_variable_part). */
8387 if (MAY_HAVE_DEBUG_INSNS)
8392 FOR_EACH_VEC_ELT (rtx, preserved_values, i, val)
8393 add_cselib_value_chains (dv_from_value (val));
8394 changed_variables_stack = VEC_alloc (variable, heap, 40);
8395 changed_values_stack = VEC_alloc (rtx, heap, 40);
8398 dataflow_set_init (&cur);
8402 /* Emit the notes for changes of variable locations between two
8403 subsequent basic blocks. */
8404 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
8406 /* Emit the notes for the changes in the basic block itself. */
8407 emit_notes_in_bb (bb, &cur);
8409 /* Free memory occupied by the in hash table, we won't need it
8411 dataflow_set_clear (&VTI (bb)->in);
8413 #ifdef ENABLE_CHECKING
8414 htab_traverse (shared_hash_htab (cur.vars),
8415 emit_notes_for_differences_1,
8416 shared_hash_htab (empty_shared_hash));
8417 if (MAY_HAVE_DEBUG_INSNS)
8422 FOR_EACH_VEC_ELT (rtx, preserved_values, i, val)
8423 remove_cselib_value_chains (dv_from_value (val));
8424 gcc_assert (htab_elements (value_chains) == 0);
8427 dataflow_set_destroy (&cur);
8429 if (MAY_HAVE_DEBUG_INSNS)
8431 VEC_free (variable, heap, changed_variables_stack);
8432 VEC_free (rtx, heap, changed_values_stack);
8438 /* If there is a declaration and offset associated with register/memory RTL
8439 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
8442 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
8446 if (REG_ATTRS (rtl))
8448 *declp = REG_EXPR (rtl);
8449 *offsetp = REG_OFFSET (rtl);
8453 else if (MEM_P (rtl))
8455 if (MEM_ATTRS (rtl))
8457 *declp = MEM_EXPR (rtl);
8458 *offsetp = INT_MEM_OFFSET (rtl);
8465 /* Helper function for vt_add_function_parameter. RTL is
8466 the expression and VAL corresponding cselib_val pointer
8467 for which ENTRY_VALUE should be created. */
8470 create_entry_value (rtx rtl, cselib_val *val)
8473 struct elt_loc_list *el;
8474 el = (struct elt_loc_list *) ggc_alloc_cleared_atomic (sizeof (*el));
8475 el->next = val->locs;
8476 el->loc = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
8477 ENTRY_VALUE_EXP (el->loc) = rtl;
8478 el->setting_insn = get_insns ();
8480 val2 = cselib_lookup_from_insn (el->loc, GET_MODE (rtl), true,
8481 VOIDmode, get_insns ());
8485 && rtx_equal_p (val2->locs->loc, el->loc))
8487 struct elt_loc_list *el2;
8489 preserve_value (val2);
8490 el2 = (struct elt_loc_list *) ggc_alloc_cleared_atomic (sizeof (*el2));
8491 el2->next = val2->locs;
8492 el2->loc = val->val_rtx;
8493 el2->setting_insn = get_insns ();
8498 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
8501 vt_add_function_parameter (tree parm)
8503 rtx decl_rtl = DECL_RTL_IF_SET (parm);
8504 rtx incoming = DECL_INCOMING_RTL (parm);
8506 enum machine_mode mode;
8507 HOST_WIDE_INT offset;
8511 if (TREE_CODE (parm) != PARM_DECL)
8514 if (!decl_rtl || !incoming)
8517 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
8520 /* If there is a DRAP register, rewrite the incoming location of parameters
8521 passed on the stack into MEMs based on the argument pointer, as the DRAP
8522 register can be reused for other purposes and we do not track locations
8523 based on generic registers. But the prerequisite is that this argument
8524 pointer be also the virtual CFA pointer, see vt_initialize. */
8525 if (MEM_P (incoming)
8526 && stack_realign_drap
8527 && arg_pointer_rtx == cfa_base_rtx
8528 && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
8529 || (GET_CODE (XEXP (incoming, 0)) == PLUS
8530 && XEXP (XEXP (incoming, 0), 0)
8531 == crtl->args.internal_arg_pointer
8532 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
8534 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
8535 if (GET_CODE (XEXP (incoming, 0)) == PLUS)
8536 off += INTVAL (XEXP (XEXP (incoming, 0), 1));
8538 = replace_equiv_address_nv (incoming,
8539 plus_constant (arg_pointer_rtx, off));
8542 #ifdef HAVE_window_save
8543 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
8544 If the target machine has an explicit window save instruction, the
8545 actual entry value is the corresponding OUTGOING_REGNO instead. */
8546 if (REG_P (incoming)
8547 && HARD_REGISTER_P (incoming)
8548 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
8551 = VEC_safe_push (parm_reg_t, gc, windowed_parm_regs, NULL);
8552 p->incoming = incoming;
8554 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
8555 OUTGOING_REGNO (REGNO (incoming)), 0);
8556 p->outgoing = incoming;
8558 else if (MEM_P (incoming)
8559 && REG_P (XEXP (incoming, 0))
8560 && HARD_REGISTER_P (XEXP (incoming, 0)))
8562 rtx reg = XEXP (incoming, 0);
8563 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
8566 = VEC_safe_push (parm_reg_t, gc, windowed_parm_regs, NULL);
8568 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
8570 incoming = replace_equiv_address_nv (incoming, reg);
8575 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
8577 if (REG_P (incoming) || MEM_P (incoming))
8579 /* This means argument is passed by invisible reference. */
8582 incoming = gen_rtx_MEM (GET_MODE (decl_rtl), incoming);
8586 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
8588 offset += byte_lowpart_offset (GET_MODE (incoming),
8589 GET_MODE (decl_rtl));
8598 /* Assume that DECL_RTL was a pseudo that got spilled to
8599 memory. The spill slot sharing code will force the
8600 memory to reference spill_slot_decl (%sfp), so we don't
8601 match above. That's ok, the pseudo must have referenced
8602 the entire parameter, so just reset OFFSET. */
8603 gcc_assert (decl == get_spill_slot_decl (false));
8607 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
8610 out = &VTI (ENTRY_BLOCK_PTR)->out;
8612 dv = dv_from_decl (parm);
8614 if (target_for_debug_bind (parm)
8615 /* We can't deal with these right now, because this kind of
8616 variable is single-part. ??? We could handle parallels
8617 that describe multiple locations for the same single
8618 value, but ATM we don't. */
8619 && GET_CODE (incoming) != PARALLEL)
8623 /* ??? We shouldn't ever hit this, but it may happen because
8624 arguments passed by invisible reference aren't dealt with
8625 above: incoming-rtl will have Pmode rather than the
8626 expected mode for the type. */
8630 val = cselib_lookup_from_insn (var_lowpart (mode, incoming), mode, true,
8631 VOIDmode, get_insns ());
8633 /* ??? Float-typed values in memory are not handled by
8637 preserve_value (val);
8638 set_variable_part (out, val->val_rtx, dv, offset,
8639 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
8640 dv = dv_from_value (val->val_rtx);
8644 if (REG_P (incoming))
8646 incoming = var_lowpart (mode, incoming);
8647 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
8648 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
8650 set_variable_part (out, incoming, dv, offset,
8651 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
8652 if (dv_is_value_p (dv))
8654 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (dv));
8655 create_entry_value (incoming, val);
8656 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
8657 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
8659 enum machine_mode indmode
8660 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
8661 rtx mem = gen_rtx_MEM (indmode, incoming);
8662 val = cselib_lookup_from_insn (mem, indmode, true,
8663 VOIDmode, get_insns ());
8666 preserve_value (val);
8667 create_entry_value (mem, val);
8672 else if (MEM_P (incoming))
8674 incoming = var_lowpart (mode, incoming);
8675 set_variable_part (out, incoming, dv, offset,
8676 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
8680 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
8683 vt_add_function_parameters (void)
8687 for (parm = DECL_ARGUMENTS (current_function_decl);
8688 parm; parm = DECL_CHAIN (parm))
8689 vt_add_function_parameter (parm);
8691 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
8693 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
8695 if (TREE_CODE (vexpr) == INDIRECT_REF)
8696 vexpr = TREE_OPERAND (vexpr, 0);
8698 if (TREE_CODE (vexpr) == PARM_DECL
8699 && DECL_ARTIFICIAL (vexpr)
8700 && !DECL_IGNORED_P (vexpr)
8701 && DECL_NAMELESS (vexpr))
8702 vt_add_function_parameter (vexpr);
8706 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
8709 fp_setter (rtx insn)
8711 rtx pat = PATTERN (insn);
8712 if (RTX_FRAME_RELATED_P (insn))
8714 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
8716 pat = XEXP (expr, 0);
8718 if (GET_CODE (pat) == SET)
8719 return SET_DEST (pat) == hard_frame_pointer_rtx;
8720 else if (GET_CODE (pat) == PARALLEL)
8723 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
8724 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
8725 && SET_DEST (XVECEXP (pat, 0, i)) == hard_frame_pointer_rtx)
8731 /* Gather all registers used for passing arguments to other functions
8732 called from the current routine. */
8735 note_register_arguments (rtx insn)
8739 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
8740 if (GET_CODE (XEXP (link, 0)) == USE)
8742 x = XEXP (XEXP (link, 0), 0);
8743 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
8744 SET_HARD_REG_BIT (argument_reg_set, REGNO (x));
8748 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
8749 ensure it isn't flushed during cselib_reset_table.
8750 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
8751 has been eliminated. */
8754 vt_init_cfa_base (void)
8758 #ifdef FRAME_POINTER_CFA_OFFSET
8759 cfa_base_rtx = frame_pointer_rtx;
8760 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
8762 cfa_base_rtx = arg_pointer_rtx;
8763 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
8765 if (cfa_base_rtx == hard_frame_pointer_rtx
8766 || !fixed_regs[REGNO (cfa_base_rtx)])
8768 cfa_base_rtx = NULL_RTX;
8771 if (!MAY_HAVE_DEBUG_INSNS)
8774 /* Tell alias analysis that cfa_base_rtx should share
8775 find_base_term value with stack pointer or hard frame pointer. */
8776 if (!frame_pointer_needed)
8777 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
8778 else if (!crtl->stack_realign_tried)
8779 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
8781 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
8782 VOIDmode, get_insns ());
8783 preserve_value (val);
8784 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
8785 var_reg_decl_set (&VTI (ENTRY_BLOCK_PTR)->out, cfa_base_rtx,
8786 VAR_INIT_STATUS_INITIALIZED, dv_from_value (val->val_rtx),
8787 0, NULL_RTX, INSERT);
8790 /* Allocate and initialize the data structures for variable tracking
8791 and parse the RTL to get the micro operations. */
8794 vt_initialize (void)
8796 basic_block bb, prologue_bb = single_succ (ENTRY_BLOCK_PTR);
8797 HOST_WIDE_INT fp_cfa_offset = -1;
8799 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
8801 attrs_pool = create_alloc_pool ("attrs_def pool",
8802 sizeof (struct attrs_def), 1024);
8803 var_pool = create_alloc_pool ("variable_def pool",
8804 sizeof (struct variable_def)
8805 + (MAX_VAR_PARTS - 1)
8806 * sizeof (((variable)NULL)->var_part[0]), 64);
8807 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
8808 sizeof (struct location_chain_def),
8810 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
8811 sizeof (struct shared_hash_def), 256);
8812 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
8813 empty_shared_hash->refcount = 1;
8814 empty_shared_hash->htab
8815 = htab_create (1, variable_htab_hash, variable_htab_eq,
8816 variable_htab_free);
8817 changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
8818 variable_htab_free);
8819 if (MAY_HAVE_DEBUG_INSNS)
8821 value_chain_pool = create_alloc_pool ("value_chain_def pool",
8822 sizeof (struct value_chain_def),
8824 value_chains = htab_create (32, value_chain_htab_hash,
8825 value_chain_htab_eq, NULL);
8828 /* Init the IN and OUT sets. */
8831 VTI (bb)->visited = false;
8832 VTI (bb)->flooded = false;
8833 dataflow_set_init (&VTI (bb)->in);
8834 dataflow_set_init (&VTI (bb)->out);
8835 VTI (bb)->permp = NULL;
8838 if (MAY_HAVE_DEBUG_INSNS)
8840 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
8841 scratch_regs = BITMAP_ALLOC (NULL);
8842 valvar_pool = create_alloc_pool ("small variable_def pool",
8843 sizeof (struct variable_def), 256);
8844 preserved_values = VEC_alloc (rtx, heap, 256);
8848 scratch_regs = NULL;
8852 CLEAR_HARD_REG_SET (argument_reg_set);
8854 /* In order to factor out the adjustments made to the stack pointer or to
8855 the hard frame pointer and thus be able to use DW_OP_fbreg operations
8856 instead of individual location lists, we're going to rewrite MEMs based
8857 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
8858 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
8859 resp. arg_pointer_rtx. We can do this either when there is no frame
8860 pointer in the function and stack adjustments are consistent for all
8861 basic blocks or when there is a frame pointer and no stack realignment.
8862 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
8863 has been eliminated. */
8864 if (!frame_pointer_needed)
8868 if (!vt_stack_adjustments ())
8871 #ifdef FRAME_POINTER_CFA_OFFSET
8872 reg = frame_pointer_rtx;
8874 reg = arg_pointer_rtx;
8876 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
8879 if (GET_CODE (elim) == PLUS)
8880 elim = XEXP (elim, 0);
8881 if (elim == stack_pointer_rtx)
8882 vt_init_cfa_base ();
8885 else if (!crtl->stack_realign_tried)
8889 #ifdef FRAME_POINTER_CFA_OFFSET
8890 reg = frame_pointer_rtx;
8891 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
8893 reg = arg_pointer_rtx;
8894 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
8896 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
8899 if (GET_CODE (elim) == PLUS)
8901 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
8902 elim = XEXP (elim, 0);
8904 if (elim != hard_frame_pointer_rtx)
8911 /* If the stack is realigned and a DRAP register is used, we're going to
8912 rewrite MEMs based on it representing incoming locations of parameters
8913 passed on the stack into MEMs based on the argument pointer. Although
8914 we aren't going to rewrite other MEMs, we still need to initialize the
8915 virtual CFA pointer in order to ensure that the argument pointer will
8916 be seen as a constant throughout the function.
8918 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
8919 else if (stack_realign_drap)
8923 #ifdef FRAME_POINTER_CFA_OFFSET
8924 reg = frame_pointer_rtx;
8926 reg = arg_pointer_rtx;
8928 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
8931 if (GET_CODE (elim) == PLUS)
8932 elim = XEXP (elim, 0);
8933 if (elim == hard_frame_pointer_rtx)
8934 vt_init_cfa_base ();
8938 if (frame_pointer_needed)
8941 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8943 note_register_arguments (insn);
8946 hard_frame_pointer_adjustment = -1;
8948 vt_add_function_parameters ();
8953 HOST_WIDE_INT pre, post = 0;
8954 basic_block first_bb, last_bb;
8956 if (MAY_HAVE_DEBUG_INSNS)
8958 cselib_record_sets_hook = add_with_sets;
8959 if (dump_file && (dump_flags & TDF_DETAILS))
8960 fprintf (dump_file, "first value: %i\n",
8961 cselib_get_next_uid ());
8968 if (bb->next_bb == EXIT_BLOCK_PTR
8969 || ! single_pred_p (bb->next_bb))
8971 e = find_edge (bb, bb->next_bb);
8972 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
8978 /* Add the micro-operations to the vector. */
8979 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
8981 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
8982 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
8983 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
8984 insn = NEXT_INSN (insn))
8988 if (!frame_pointer_needed)
8990 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
8994 mo.type = MO_ADJUST;
8997 if (dump_file && (dump_flags & TDF_DETAILS))
8998 log_op_type (PATTERN (insn), bb, insn,
8999 MO_ADJUST, dump_file);
9000 VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
9002 VTI (bb)->out.stack_adjust += pre;
9006 cselib_hook_called = false;
9007 adjust_insn (bb, insn);
9008 if (MAY_HAVE_DEBUG_INSNS)
9011 prepare_call_arguments (bb, insn);
9012 cselib_process_insn (insn);
9013 if (dump_file && (dump_flags & TDF_DETAILS))
9015 print_rtl_single (dump_file, insn);
9016 dump_cselib_table (dump_file);
9019 if (!cselib_hook_called)
9020 add_with_sets (insn, 0, 0);
9023 if (!frame_pointer_needed && post)
9026 mo.type = MO_ADJUST;
9029 if (dump_file && (dump_flags & TDF_DETAILS))
9030 log_op_type (PATTERN (insn), bb, insn,
9031 MO_ADJUST, dump_file);
9032 VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
9034 VTI (bb)->out.stack_adjust += post;
9037 if (bb == prologue_bb
9038 && fp_cfa_offset != -1
9039 && hard_frame_pointer_adjustment == -1
9040 && RTX_FRAME_RELATED_P (insn)
9041 && fp_setter (insn))
9043 vt_init_cfa_base ();
9044 hard_frame_pointer_adjustment = fp_cfa_offset;
9048 gcc_assert (offset == VTI (bb)->out.stack_adjust);
9053 if (MAY_HAVE_DEBUG_INSNS)
9055 cselib_preserve_only_values ();
9056 cselib_reset_table (cselib_get_next_uid ());
9057 cselib_record_sets_hook = NULL;
9061 hard_frame_pointer_adjustment = -1;
9062 VTI (ENTRY_BLOCK_PTR)->flooded = true;
9063 cfa_base_rtx = NULL_RTX;
9067 /* Get rid of all debug insns from the insn stream. */
9070 delete_debug_insns (void)
9075 if (!MAY_HAVE_DEBUG_INSNS)
9080 FOR_BB_INSNS_SAFE (bb, insn, next)
9081 if (DEBUG_INSN_P (insn))
9086 /* Run a fast, BB-local only version of var tracking, to take care of
9087 information that we don't do global analysis on, such that not all
9088 information is lost. If SKIPPED holds, we're skipping the global
9089 pass entirely, so we should try to use information it would have
9090 handled as well.. */
9093 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
9095 /* ??? Just skip it all for now. */
9096 delete_debug_insns ();
9099 /* Free the data structures needed for variable tracking. */
9108 VEC_free (micro_operation, heap, VTI (bb)->mos);
9113 dataflow_set_destroy (&VTI (bb)->in);
9114 dataflow_set_destroy (&VTI (bb)->out);
9115 if (VTI (bb)->permp)
9117 dataflow_set_destroy (VTI (bb)->permp);
9118 XDELETE (VTI (bb)->permp);
9121 free_aux_for_blocks ();
9122 htab_delete (empty_shared_hash->htab);
9123 htab_delete (changed_variables);
9124 free_alloc_pool (attrs_pool);
9125 free_alloc_pool (var_pool);
9126 free_alloc_pool (loc_chain_pool);
9127 free_alloc_pool (shared_hash_pool);
9129 if (MAY_HAVE_DEBUG_INSNS)
9131 htab_delete (value_chains);
9132 free_alloc_pool (value_chain_pool);
9133 free_alloc_pool (valvar_pool);
9134 VEC_free (rtx, heap, preserved_values);
9136 BITMAP_FREE (scratch_regs);
9137 scratch_regs = NULL;
9140 VEC_free (parm_reg_t, gc, windowed_parm_regs);
9143 XDELETEVEC (vui_vec);
9148 /* The entry point to variable tracking pass. */
9150 static inline unsigned int
9151 variable_tracking_main_1 (void)
9155 if (flag_var_tracking_assignments < 0)
9157 delete_debug_insns ();
9161 if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
9163 vt_debug_insns_local (true);
9167 mark_dfs_back_edges ();
9168 if (!vt_initialize ())
9171 vt_debug_insns_local (true);
9175 success = vt_find_locations ();
9177 if (!success && flag_var_tracking_assignments > 0)
9181 delete_debug_insns ();
9183 /* This is later restored by our caller. */
9184 flag_var_tracking_assignments = 0;
9186 success = vt_initialize ();
9187 gcc_assert (success);
9189 success = vt_find_locations ();
9195 vt_debug_insns_local (false);
9199 if (dump_file && (dump_flags & TDF_DETAILS))
9201 dump_dataflow_sets ();
9202 dump_flow_info (dump_file, dump_flags);
9205 timevar_push (TV_VAR_TRACKING_EMIT);
9207 timevar_pop (TV_VAR_TRACKING_EMIT);
9210 vt_debug_insns_local (false);
9215 variable_tracking_main (void)
9218 int save = flag_var_tracking_assignments;
9220 ret = variable_tracking_main_1 ();
9222 flag_var_tracking_assignments = save;
9228 gate_handle_var_tracking (void)
9230 return (flag_var_tracking && !targetm.delay_vartrack);
9235 struct rtl_opt_pass pass_variable_tracking =
9239 "vartrack", /* name */
9240 gate_handle_var_tracking, /* gate */
9241 variable_tracking_main, /* execute */
9244 0, /* static_pass_number */
9245 TV_VAR_TRACKING, /* tv_id */
9246 0, /* properties_required */
9247 0, /* properties_provided */
9248 0, /* properties_destroyed */
9249 0, /* todo_flags_start */
9250 TODO_verify_rtl_sharing /* todo_flags_finish */