1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file contains the variable tracking pass. It computes where
22 variables are located (which registers or where in memory) at each position
23 in instruction stream and emits notes describing the locations.
24 Debug information (DWARF2 location lists) is finally generated from
26 With this debug information, it is possible to show variables
27 even when debugging optimized code.
29 How does the variable tracking pass work?
31 First, it scans RTL code for uses, stores and clobbers (register/memory
32 references in instructions), for call insns and for stack adjustments
33 separately for each basic block and saves them to an array of micro
35 The micro operations of one instruction are ordered so that
36 pre-modifying stack adjustment < use < use with no var < call insn <
37 < set < clobber < post-modifying stack adjustment
39 Then, a forward dataflow analysis is performed to find out how locations
40 of variables change through code and to propagate the variable locations
41 along control flow graph.
42 The IN set for basic block BB is computed as a union of OUT sets of BB's
43 predecessors, the OUT set for BB is copied from the IN set for BB and
44 is changed according to micro operations in BB.
46 The IN and OUT sets for basic blocks consist of a current stack adjustment
47 (used for adjusting offset of variables addressed using stack pointer),
48 the table of structures describing the locations of parts of a variable
49 and for each physical register a linked list for each physical register.
50 The linked list is a list of variable parts stored in the register,
51 i.e. it is a list of triplets (reg, decl, offset) where decl is
52 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
53 effective deleting appropriate variable parts when we set or clobber the
56 There may be more than one variable part in a register. The linked lists
57 should be pretty short so it is a good data structure here.
58 For example in the following code, register allocator may assign same
59 register to variables A and B, and both of them are stored in the same
72 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73 are emitted to appropriate positions in RTL code. Each such a note describes
74 the location of one variable at the point in instruction stream where the
75 note is. There is no need to emit a note for each variable before each
76 instruction, we only emit these notes where the location of variable changes
77 (this means that we also emit notes for changes between the OUT set of the
78 previous block and the IN set of the current block).
80 The notes consist of two parts:
81 1. the declaration (from REG_EXPR or MEM_EXPR)
82 2. the location of a variable - it is either a simple register/memory
83 reference (for simple variables, for example int),
84 or a parallel of register/memory references (for a large variables
85 which consist of several parts, for example long long).
91 #include "coretypes.h"
95 #include "hard-reg-set.h"
96 #include "basic-block.h"
99 #include "insn-config.h"
102 #include "alloc-pool.h"
108 #include "tree-pass.h"
109 #include "tree-flow.h"
114 #include "diagnostic.h"
115 #include "pointer-set.h"
118 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
119 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
120 Currently the value is the same as IDENTIFIER_NODE, which has such
121 a property. If this compile time assertion ever fails, make sure that
122 the new tree code that equals (int) VALUE has the same property. */
123 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
125 /* Type of micro operation. */
126 enum micro_operation_type
128 MO_USE, /* Use location (REG or MEM). */
129 MO_USE_NO_VAR,/* Use location which is not associated with a variable
130 or the variable is not trackable. */
131 MO_VAL_USE, /* Use location which is associated with a value. */
132 MO_VAL_LOC, /* Use location which appears in a debug insn. */
133 MO_VAL_SET, /* Set location associated with a value. */
134 MO_SET, /* Set location. */
135 MO_COPY, /* Copy the same portion of a variable from one
136 location to another. */
137 MO_CLOBBER, /* Clobber location. */
138 MO_CALL, /* Call insn. */
139 MO_ADJUST /* Adjust stack pointer. */
143 static const char * const ATTRIBUTE_UNUSED
144 micro_operation_type_name[] = {
157 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
158 Notes emitted as AFTER_CALL are to take effect during the call,
159 rather than after the call. */
162 EMIT_NOTE_BEFORE_INSN,
163 EMIT_NOTE_AFTER_INSN,
164 EMIT_NOTE_AFTER_CALL_INSN
167 /* Structure holding information about micro operation. */
168 typedef struct micro_operation_def
170 /* Type of micro operation. */
171 enum micro_operation_type type;
173 /* The instruction which the micro operation is in, for MO_USE,
174 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
175 instruction or note in the original flow (before any var-tracking
176 notes are inserted, to simplify emission of notes), for MO_SET
181 /* Location. For MO_SET and MO_COPY, this is the SET that
182 performs the assignment, if known, otherwise it is the target
183 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
184 CONCAT of the VALUE and the LOC associated with it. For
185 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
186 associated with it. */
189 /* Stack adjustment. */
190 HOST_WIDE_INT adjust;
194 DEF_VEC_O(micro_operation);
195 DEF_VEC_ALLOC_O(micro_operation,heap);
197 /* A declaration of a variable, or an RTL value being handled like a
199 typedef void *decl_or_value;
201 /* Structure for passing some other parameters to function
202 emit_note_insn_var_location. */
203 typedef struct emit_note_data_def
205 /* The instruction which the note will be emitted before/after. */
208 /* Where the note will be emitted (before/after insn)? */
209 enum emit_note_where where;
211 /* The variables and values active at this point. */
215 /* Description of location of a part of a variable. The content of a physical
216 register is described by a chain of these structures.
217 The chains are pretty short (usually 1 or 2 elements) and thus
218 chain is the best data structure. */
219 typedef struct attrs_def
221 /* Pointer to next member of the list. */
222 struct attrs_def *next;
224 /* The rtx of register. */
227 /* The declaration corresponding to LOC. */
230 /* Offset from start of DECL. */
231 HOST_WIDE_INT offset;
234 /* Structure holding a refcounted hash table. If refcount > 1,
235 it must be first unshared before modified. */
236 typedef struct shared_hash_def
238 /* Reference count. */
241 /* Actual hash table. */
245 /* Structure holding the IN or OUT set for a basic block. */
246 typedef struct dataflow_set_def
248 /* Adjustment of stack offset. */
249 HOST_WIDE_INT stack_adjust;
251 /* Attributes for registers (lists of attrs). */
252 attrs regs[FIRST_PSEUDO_REGISTER];
254 /* Variable locations. */
257 /* Vars that is being traversed. */
258 shared_hash traversed_vars;
261 /* The structure (one for each basic block) containing the information
262 needed for variable tracking. */
263 typedef struct variable_tracking_info_def
265 /* The vector of micro operations. */
266 VEC(micro_operation, heap) *mos;
268 /* The IN and OUT set for dataflow analysis. */
272 /* The permanent-in dataflow set for this block. This is used to
273 hold values for which we had to compute entry values. ??? This
274 should probably be dynamically allocated, to avoid using more
275 memory in non-debug builds. */
278 /* Has the block been visited in DFS? */
281 /* Has the block been flooded in VTA? */
284 } *variable_tracking_info;
286 /* Structure for chaining the locations. */
287 typedef struct location_chain_def
289 /* Next element in the chain. */
290 struct location_chain_def *next;
292 /* The location (REG, MEM or VALUE). */
295 /* The "value" stored in this location. */
299 enum var_init_status init;
302 /* Structure describing one part of variable. */
303 typedef struct variable_part_def
305 /* Chain of locations of the part. */
306 location_chain loc_chain;
308 /* Location which was last emitted to location list. */
311 /* The offset in the variable. */
312 HOST_WIDE_INT offset;
315 /* Maximum number of location parts. */
316 #define MAX_VAR_PARTS 16
318 /* Structure describing where the variable is located. */
319 typedef struct variable_def
321 /* The declaration of the variable, or an RTL value being handled
322 like a declaration. */
325 /* Reference count. */
328 /* Number of variable parts. */
331 /* True if this variable changed (any of its) cur_loc fields
332 during the current emit_notes_for_changes resp.
333 emit_notes_for_differences call. */
334 bool cur_loc_changed;
336 /* True if this variable_def struct is currently in the
337 changed_variables hash table. */
338 bool in_changed_variables;
340 /* The variable parts. */
341 variable_part var_part[1];
343 typedef const struct variable_def *const_variable;
345 /* Structure for chaining backlinks from referenced VALUEs to
346 DVs that are referencing them. */
347 typedef struct value_chain_def
349 /* Next value_chain entry. */
350 struct value_chain_def *next;
352 /* The declaration of the variable, or an RTL value
353 being handled like a declaration, whose var_parts[0].loc_chain
354 references the VALUE owning this value_chain. */
357 /* Reference count. */
360 typedef const struct value_chain_def *const_value_chain;
362 /* Pointer to the BB's information specific to variable tracking pass. */
363 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
365 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
366 #define INT_MEM_OFFSET(mem) (MEM_OFFSET (mem) ? INTVAL (MEM_OFFSET (mem)) : 0)
368 /* Alloc pool for struct attrs_def. */
369 static alloc_pool attrs_pool;
371 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
372 static alloc_pool var_pool;
374 /* Alloc pool for struct variable_def with a single var_part entry. */
375 static alloc_pool valvar_pool;
377 /* Alloc pool for struct location_chain_def. */
378 static alloc_pool loc_chain_pool;
380 /* Alloc pool for struct shared_hash_def. */
381 static alloc_pool shared_hash_pool;
383 /* Alloc pool for struct value_chain_def. */
384 static alloc_pool value_chain_pool;
386 /* Changed variables, notes will be emitted for them. */
387 static htab_t changed_variables;
389 /* Links from VALUEs to DVs referencing them in their current loc_chains. */
390 static htab_t value_chains;
392 /* Shall notes be emitted? */
393 static bool emit_notes;
395 /* Empty shared hashtable. */
396 static shared_hash empty_shared_hash;
398 /* Scratch register bitmap used by cselib_expand_value_rtx. */
399 static bitmap scratch_regs = NULL;
401 /* Variable used to tell whether cselib_process_insn called our hook. */
402 static bool cselib_hook_called;
404 /* Local function prototypes. */
405 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
407 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
409 static bool vt_stack_adjustments (void);
410 static rtx compute_cfa_pointer (HOST_WIDE_INT);
411 static hashval_t variable_htab_hash (const void *);
412 static int variable_htab_eq (const void *, const void *);
413 static void variable_htab_free (void *);
415 static void init_attrs_list_set (attrs *);
416 static void attrs_list_clear (attrs *);
417 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
418 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
419 static void attrs_list_copy (attrs *, attrs);
420 static void attrs_list_union (attrs *, attrs);
422 static void **unshare_variable (dataflow_set *set, void **slot, variable var,
423 enum var_init_status);
424 static int vars_copy_1 (void **, void *);
425 static void vars_copy (htab_t, htab_t);
426 static tree var_debug_decl (tree);
427 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
428 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
429 enum var_init_status, rtx);
430 static void var_reg_delete (dataflow_set *, rtx, bool);
431 static void var_regno_delete (dataflow_set *, int);
432 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
433 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
434 enum var_init_status, rtx);
435 static void var_mem_delete (dataflow_set *, rtx, bool);
437 static void dataflow_set_init (dataflow_set *);
438 static void dataflow_set_clear (dataflow_set *);
439 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
440 static int variable_union_info_cmp_pos (const void *, const void *);
441 static int variable_union (void **, void *);
442 static void dataflow_set_union (dataflow_set *, dataflow_set *);
443 static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
444 static bool canon_value_cmp (rtx, rtx);
445 static int loc_cmp (rtx, rtx);
446 static bool variable_part_different_p (variable_part *, variable_part *);
447 static bool onepart_variable_different_p (variable, variable);
448 static bool variable_different_p (variable, variable);
449 static int dataflow_set_different_1 (void **, void *);
450 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
451 static void dataflow_set_destroy (dataflow_set *);
453 static bool contains_symbol_ref (rtx);
454 static bool track_expr_p (tree, bool);
455 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
456 static int add_uses (rtx *, void *);
457 static void add_uses_1 (rtx *, void *);
458 static void add_stores (rtx, const_rtx, void *);
459 static bool compute_bb_dataflow (basic_block);
460 static bool vt_find_locations (void);
462 static void dump_attrs_list (attrs);
463 static int dump_var_slot (void **, void *);
464 static void dump_var (variable);
465 static void dump_vars (htab_t);
466 static void dump_dataflow_set (dataflow_set *);
467 static void dump_dataflow_sets (void);
469 static void variable_was_changed (variable, dataflow_set *);
470 static void **set_slot_part (dataflow_set *, rtx, void **,
471 decl_or_value, HOST_WIDE_INT,
472 enum var_init_status, rtx);
473 static void set_variable_part (dataflow_set *, rtx,
474 decl_or_value, HOST_WIDE_INT,
475 enum var_init_status, rtx, enum insert_option);
476 static void **clobber_slot_part (dataflow_set *, rtx,
477 void **, HOST_WIDE_INT, rtx);
478 static void clobber_variable_part (dataflow_set *, rtx,
479 decl_or_value, HOST_WIDE_INT, rtx);
480 static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
481 static void delete_variable_part (dataflow_set *, rtx,
482 decl_or_value, HOST_WIDE_INT);
483 static int emit_note_insn_var_location (void **, void *);
484 static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash);
485 static int emit_notes_for_differences_1 (void **, void *);
486 static int emit_notes_for_differences_2 (void **, void *);
487 static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
488 static void emit_notes_in_bb (basic_block, dataflow_set *);
489 static void vt_emit_notes (void);
491 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
492 static void vt_add_function_parameters (void);
493 static bool vt_initialize (void);
494 static void vt_finalize (void);
496 /* Given a SET, calculate the amount of stack adjustment it contains
497 PRE- and POST-modifying stack pointer.
498 This function is similar to stack_adjust_offset. */
501 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
504 rtx src = SET_SRC (pattern);
505 rtx dest = SET_DEST (pattern);
508 if (dest == stack_pointer_rtx)
510 /* (set (reg sp) (plus (reg sp) (const_int))) */
511 code = GET_CODE (src);
512 if (! (code == PLUS || code == MINUS)
513 || XEXP (src, 0) != stack_pointer_rtx
514 || !CONST_INT_P (XEXP (src, 1)))
518 *post += INTVAL (XEXP (src, 1));
520 *post -= INTVAL (XEXP (src, 1));
522 else if (MEM_P (dest))
524 /* (set (mem (pre_dec (reg sp))) (foo)) */
525 src = XEXP (dest, 0);
526 code = GET_CODE (src);
532 if (XEXP (src, 0) == stack_pointer_rtx)
534 rtx val = XEXP (XEXP (src, 1), 1);
535 /* We handle only adjustments by constant amount. */
536 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
539 if (code == PRE_MODIFY)
540 *pre -= INTVAL (val);
542 *post -= INTVAL (val);
548 if (XEXP (src, 0) == stack_pointer_rtx)
550 *pre += GET_MODE_SIZE (GET_MODE (dest));
556 if (XEXP (src, 0) == stack_pointer_rtx)
558 *post += GET_MODE_SIZE (GET_MODE (dest));
564 if (XEXP (src, 0) == stack_pointer_rtx)
566 *pre -= GET_MODE_SIZE (GET_MODE (dest));
572 if (XEXP (src, 0) == stack_pointer_rtx)
574 *post -= GET_MODE_SIZE (GET_MODE (dest));
585 /* Given an INSN, calculate the amount of stack adjustment it contains
586 PRE- and POST-modifying stack pointer. */
589 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
597 pattern = PATTERN (insn);
598 if (RTX_FRAME_RELATED_P (insn))
600 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
602 pattern = XEXP (expr, 0);
605 if (GET_CODE (pattern) == SET)
606 stack_adjust_offset_pre_post (pattern, pre, post);
607 else if (GET_CODE (pattern) == PARALLEL
608 || GET_CODE (pattern) == SEQUENCE)
612 /* There may be stack adjustments inside compound insns. Search
614 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
615 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
616 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
620 /* Compute stack adjustments for all blocks by traversing DFS tree.
621 Return true when the adjustments on all incoming edges are consistent.
622 Heavily borrowed from pre_and_rev_post_order_compute. */
625 vt_stack_adjustments (void)
627 edge_iterator *stack;
630 /* Initialize entry block. */
631 VTI (ENTRY_BLOCK_PTR)->visited = true;
632 VTI (ENTRY_BLOCK_PTR)->in.stack_adjust = INCOMING_FRAME_SP_OFFSET;
633 VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
635 /* Allocate stack for back-tracking up CFG. */
636 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
639 /* Push the first edge on to the stack. */
640 stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
648 /* Look at the edge on the top of the stack. */
650 src = ei_edge (ei)->src;
651 dest = ei_edge (ei)->dest;
653 /* Check if the edge destination has been visited yet. */
654 if (!VTI (dest)->visited)
657 HOST_WIDE_INT pre, post, offset;
658 VTI (dest)->visited = true;
659 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
661 if (dest != EXIT_BLOCK_PTR)
662 for (insn = BB_HEAD (dest);
663 insn != NEXT_INSN (BB_END (dest));
664 insn = NEXT_INSN (insn))
667 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
668 offset += pre + post;
671 VTI (dest)->out.stack_adjust = offset;
673 if (EDGE_COUNT (dest->succs) > 0)
674 /* Since the DEST node has been visited for the first
675 time, check its successors. */
676 stack[sp++] = ei_start (dest->succs);
680 /* Check whether the adjustments on the edges are the same. */
681 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
687 if (! ei_one_before_end_p (ei))
688 /* Go to the next edge. */
689 ei_next (&stack[sp - 1]);
691 /* Return to previous level if there are no more edges. */
700 /* Compute a CFA-based value for the stack pointer. */
703 compute_cfa_pointer (HOST_WIDE_INT adjustment)
707 #ifdef FRAME_POINTER_CFA_OFFSET
708 adjustment -= FRAME_POINTER_CFA_OFFSET (current_function_decl);
709 cfa = plus_constant (frame_pointer_rtx, adjustment);
711 adjustment -= ARG_POINTER_CFA_OFFSET (current_function_decl);
712 cfa = plus_constant (arg_pointer_rtx, adjustment);
718 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
719 or -1 if the replacement shouldn't be done. */
720 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
722 /* Data for adjust_mems callback. */
724 struct adjust_mem_data
727 enum machine_mode mem_mode;
728 HOST_WIDE_INT stack_adjust;
732 /* Helper for adjust_mems. Return 1 if *loc is unsuitable for
733 transformation of wider mode arithmetics to narrower mode,
734 -1 if it is suitable and subexpressions shouldn't be
735 traversed and 0 if it is suitable and subexpressions should
736 be traversed. Called through for_each_rtx. */
739 use_narrower_mode_test (rtx *loc, void *data)
741 rtx subreg = (rtx) data;
743 if (CONSTANT_P (*loc))
745 switch (GET_CODE (*loc))
748 if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0))
756 if (for_each_rtx (&XEXP (*loc, 0), use_narrower_mode_test, data))
765 /* Transform X into narrower mode MODE from wider mode WMODE. */
768 use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
772 return lowpart_subreg (mode, x, wmode);
773 switch (GET_CODE (x))
776 return lowpart_subreg (mode, x, wmode);
780 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
781 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
782 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
784 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
785 return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
791 /* Helper function for adjusting used MEMs. */
794 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
796 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
797 rtx mem, addr = loc, tem;
798 enum machine_mode mem_mode_save;
800 switch (GET_CODE (loc))
803 /* Don't do any sp or fp replacements outside of MEM addresses. */
804 if (amd->mem_mode == VOIDmode)
806 if (loc == stack_pointer_rtx
807 && !frame_pointer_needed)
808 return compute_cfa_pointer (amd->stack_adjust);
809 else if (loc == hard_frame_pointer_rtx
810 && frame_pointer_needed
811 && hard_frame_pointer_adjustment != -1)
812 return compute_cfa_pointer (hard_frame_pointer_adjustment);
818 mem = targetm.delegitimize_address (mem);
819 if (mem != loc && !MEM_P (mem))
820 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
823 addr = XEXP (mem, 0);
824 mem_mode_save = amd->mem_mode;
825 amd->mem_mode = GET_MODE (mem);
826 store_save = amd->store;
828 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
829 amd->store = store_save;
830 amd->mem_mode = mem_mode_save;
832 addr = targetm.delegitimize_address (addr);
833 if (addr != XEXP (mem, 0))
834 mem = replace_equiv_address_nv (mem, addr);
836 mem = avoid_constant_pool_reference (mem);
840 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
841 GEN_INT (GET_CODE (loc) == PRE_INC
842 ? GET_MODE_SIZE (amd->mem_mode)
843 : -GET_MODE_SIZE (amd->mem_mode)));
847 addr = XEXP (loc, 0);
848 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
849 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
850 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
851 GEN_INT ((GET_CODE (loc) == PRE_INC
852 || GET_CODE (loc) == POST_INC)
853 ? GET_MODE_SIZE (amd->mem_mode)
854 : -GET_MODE_SIZE (amd->mem_mode)));
855 amd->side_effects = alloc_EXPR_LIST (0,
856 gen_rtx_SET (VOIDmode,
862 addr = XEXP (loc, 1);
865 addr = XEXP (loc, 0);
866 gcc_assert (amd->mem_mode != VOIDmode);
867 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
868 amd->side_effects = alloc_EXPR_LIST (0,
869 gen_rtx_SET (VOIDmode,
875 /* First try without delegitimization of whole MEMs and
876 avoid_constant_pool_reference, which is more likely to succeed. */
877 store_save = amd->store;
879 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
881 amd->store = store_save;
882 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
883 if (mem == SUBREG_REG (loc))
888 tem = simplify_gen_subreg (GET_MODE (loc), mem,
889 GET_MODE (SUBREG_REG (loc)),
893 tem = simplify_gen_subreg (GET_MODE (loc), addr,
894 GET_MODE (SUBREG_REG (loc)),
897 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
899 if (MAY_HAVE_DEBUG_INSNS
900 && GET_CODE (tem) == SUBREG
901 && (GET_CODE (SUBREG_REG (tem)) == PLUS
902 || GET_CODE (SUBREG_REG (tem)) == MINUS
903 || GET_CODE (SUBREG_REG (tem)) == MULT
904 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
905 && GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
906 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
907 && GET_MODE_SIZE (GET_MODE (tem))
908 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem)))
909 && subreg_lowpart_p (tem)
910 && !for_each_rtx (&SUBREG_REG (tem), use_narrower_mode_test, tem))
911 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
912 GET_MODE (SUBREG_REG (tem)));
920 /* Helper function for replacement of uses. */
923 adjust_mem_uses (rtx *x, void *data)
925 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
927 validate_change (NULL_RTX, x, new_x, true);
930 /* Helper function for replacement of stores. */
933 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
937 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
939 if (new_dest != SET_DEST (expr))
941 rtx xexpr = CONST_CAST_RTX (expr);
942 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
947 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
948 replace them with their value in the insn and add the side-effects
949 as other sets to the insn. */
952 adjust_insn (basic_block bb, rtx insn)
954 struct adjust_mem_data amd;
956 amd.mem_mode = VOIDmode;
957 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
958 amd.side_effects = NULL_RTX;
961 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
964 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
966 /* For read-only MEMs containing some constant, prefer those
968 set = single_set (insn);
969 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
971 rtx note = find_reg_equal_equiv_note (insn);
973 if (note && CONSTANT_P (XEXP (note, 0)))
974 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
977 if (amd.side_effects)
979 rtx *pat, new_pat, s;
982 pat = &PATTERN (insn);
983 if (GET_CODE (*pat) == COND_EXEC)
984 pat = &COND_EXEC_CODE (*pat);
985 if (GET_CODE (*pat) == PARALLEL)
986 oldn = XVECLEN (*pat, 0);
989 for (s = amd.side_effects, newn = 0; s; newn++)
991 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
992 if (GET_CODE (*pat) == PARALLEL)
993 for (i = 0; i < oldn; i++)
994 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
996 XVECEXP (new_pat, 0, 0) = *pat;
997 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
998 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
999 free_EXPR_LIST_list (&amd.side_effects);
1000 validate_change (NULL_RTX, pat, new_pat, true);
1004 /* Return true if a decl_or_value DV is a DECL or NULL. */
1006 dv_is_decl_p (decl_or_value dv)
1008 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
1011 /* Return true if a decl_or_value is a VALUE rtl. */
1013 dv_is_value_p (decl_or_value dv)
1015 return dv && !dv_is_decl_p (dv);
1018 /* Return the decl in the decl_or_value. */
1020 dv_as_decl (decl_or_value dv)
1022 #ifdef ENABLE_CHECKING
1023 gcc_assert (dv_is_decl_p (dv));
1028 /* Return the value in the decl_or_value. */
1030 dv_as_value (decl_or_value dv)
1032 #ifdef ENABLE_CHECKING
1033 gcc_assert (dv_is_value_p (dv));
1038 /* Return the opaque pointer in the decl_or_value. */
1039 static inline void *
1040 dv_as_opaque (decl_or_value dv)
1045 /* Return true if a decl_or_value must not have more than one variable
1048 dv_onepart_p (decl_or_value dv)
1052 if (!MAY_HAVE_DEBUG_INSNS)
1055 if (dv_is_value_p (dv))
1058 decl = dv_as_decl (dv);
1063 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1066 return (target_for_debug_bind (decl) != NULL_TREE);
1069 /* Return the variable pool to be used for dv, depending on whether it
1070 can have multiple parts or not. */
1071 static inline alloc_pool
1072 dv_pool (decl_or_value dv)
1074 return dv_onepart_p (dv) ? valvar_pool : var_pool;
1077 /* Build a decl_or_value out of a decl. */
1078 static inline decl_or_value
1079 dv_from_decl (tree decl)
1083 #ifdef ENABLE_CHECKING
1084 gcc_assert (dv_is_decl_p (dv));
1089 /* Build a decl_or_value out of a value. */
1090 static inline decl_or_value
1091 dv_from_value (rtx value)
1095 #ifdef ENABLE_CHECKING
1096 gcc_assert (dv_is_value_p (dv));
1101 extern void debug_dv (decl_or_value dv);
1104 debug_dv (decl_or_value dv)
1106 if (dv_is_value_p (dv))
1107 debug_rtx (dv_as_value (dv));
1109 debug_generic_stmt (dv_as_decl (dv));
1112 typedef unsigned int dvuid;
1114 /* Return the uid of DV. */
1117 dv_uid (decl_or_value dv)
1119 if (dv_is_value_p (dv))
1120 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
1122 return DECL_UID (dv_as_decl (dv));
1125 /* Compute the hash from the uid. */
1127 static inline hashval_t
1128 dv_uid2hash (dvuid uid)
1133 /* The hash function for a mask table in a shared_htab chain. */
1135 static inline hashval_t
1136 dv_htab_hash (decl_or_value dv)
1138 return dv_uid2hash (dv_uid (dv));
1141 /* The hash function for variable_htab, computes the hash value
1142 from the declaration of variable X. */
1145 variable_htab_hash (const void *x)
1147 const_variable const v = (const_variable) x;
1149 return dv_htab_hash (v->dv);
1152 /* Compare the declaration of variable X with declaration Y. */
1155 variable_htab_eq (const void *x, const void *y)
1157 const_variable const v = (const_variable) x;
1158 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
1160 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
1163 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1166 variable_htab_free (void *elem)
1169 variable var = (variable) elem;
1170 location_chain node, next;
1172 gcc_assert (var->refcount > 0);
1175 if (var->refcount > 0)
1178 for (i = 0; i < var->n_var_parts; i++)
1180 for (node = var->var_part[i].loc_chain; node; node = next)
1183 pool_free (loc_chain_pool, node);
1185 var->var_part[i].loc_chain = NULL;
1187 pool_free (dv_pool (var->dv), var);
1190 /* The hash function for value_chains htab, computes the hash value
1194 value_chain_htab_hash (const void *x)
1196 const_value_chain const v = (const_value_chain) x;
1198 return dv_htab_hash (v->dv);
1201 /* Compare the VALUE X with VALUE Y. */
1204 value_chain_htab_eq (const void *x, const void *y)
1206 const_value_chain const v = (const_value_chain) x;
1207 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
1209 return dv_as_opaque (v->dv) == dv_as_opaque (dv);
1212 /* Initialize the set (array) SET of attrs to empty lists. */
1215 init_attrs_list_set (attrs *set)
1219 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1223 /* Make the list *LISTP empty. */
1226 attrs_list_clear (attrs *listp)
1230 for (list = *listp; list; list = next)
1233 pool_free (attrs_pool, list);
1238 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1241 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1243 for (; list; list = list->next)
1244 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1249 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1252 attrs_list_insert (attrs *listp, decl_or_value dv,
1253 HOST_WIDE_INT offset, rtx loc)
1257 list = (attrs) pool_alloc (attrs_pool);
1260 list->offset = offset;
1261 list->next = *listp;
1265 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1268 attrs_list_copy (attrs *dstp, attrs src)
1272 attrs_list_clear (dstp);
1273 for (; src; src = src->next)
1275 n = (attrs) pool_alloc (attrs_pool);
1278 n->offset = src->offset;
1284 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1287 attrs_list_union (attrs *dstp, attrs src)
1289 for (; src; src = src->next)
1291 if (!attrs_list_member (*dstp, src->dv, src->offset))
1292 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1296 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1300 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1302 gcc_assert (!*dstp);
1303 for (; src; src = src->next)
1305 if (!dv_onepart_p (src->dv))
1306 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1308 for (src = src2; src; src = src->next)
1310 if (!dv_onepart_p (src->dv)
1311 && !attrs_list_member (*dstp, src->dv, src->offset))
1312 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1316 /* Shared hashtable support. */
1318 /* Return true if VARS is shared. */
1321 shared_hash_shared (shared_hash vars)
1323 return vars->refcount > 1;
1326 /* Return the hash table for VARS. */
1328 static inline htab_t
1329 shared_hash_htab (shared_hash vars)
1334 /* Return true if VAR is shared, or maybe because VARS is shared. */
1337 shared_var_p (variable var, shared_hash vars)
1339 /* Don't count an entry in the changed_variables table as a duplicate. */
1340 return ((var->refcount > 1 + (int) var->in_changed_variables)
1341 || shared_hash_shared (vars));
1344 /* Copy variables into a new hash table. */
1347 shared_hash_unshare (shared_hash vars)
1349 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1350 gcc_assert (vars->refcount > 1);
1351 new_vars->refcount = 1;
1353 = htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
1354 variable_htab_eq, variable_htab_free);
1355 vars_copy (new_vars->htab, vars->htab);
1360 /* Increment reference counter on VARS and return it. */
1362 static inline shared_hash
1363 shared_hash_copy (shared_hash vars)
1369 /* Decrement reference counter and destroy hash table if not shared
1373 shared_hash_destroy (shared_hash vars)
1375 gcc_assert (vars->refcount > 0);
1376 if (--vars->refcount == 0)
1378 htab_delete (vars->htab);
1379 pool_free (shared_hash_pool, vars);
1383 /* Unshare *PVARS if shared and return slot for DV. If INS is
1384 INSERT, insert it if not already present. */
1386 static inline void **
1387 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1388 hashval_t dvhash, enum insert_option ins)
1390 if (shared_hash_shared (*pvars))
1391 *pvars = shared_hash_unshare (*pvars);
1392 return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins);
1395 static inline void **
1396 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1397 enum insert_option ins)
1399 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1402 /* Return slot for DV, if it is already present in the hash table.
1403 If it is not present, insert it only VARS is not shared, otherwise
1406 static inline void **
1407 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1409 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1410 shared_hash_shared (vars)
1411 ? NO_INSERT : INSERT);
1414 static inline void **
1415 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1417 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1420 /* Return slot for DV only if it is already present in the hash table. */
1422 static inline void **
1423 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1426 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1430 static inline void **
1431 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1433 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1436 /* Return variable for DV or NULL if not already present in the hash
1439 static inline variable
1440 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1442 return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash);
1445 static inline variable
1446 shared_hash_find (shared_hash vars, decl_or_value dv)
1448 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1451 /* Return true if TVAL is better than CVAL as a canonival value. We
1452 choose lowest-numbered VALUEs, using the RTX address as a
1453 tie-breaker. The idea is to arrange them into a star topology,
1454 such that all of them are at most one step away from the canonical
1455 value, and the canonical value has backlinks to all of them, in
1456 addition to all the actual locations. We don't enforce this
1457 topology throughout the entire dataflow analysis, though.
1461 canon_value_cmp (rtx tval, rtx cval)
1464 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1467 static bool dst_can_be_shared;
1469 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1472 unshare_variable (dataflow_set *set, void **slot, variable var,
1473 enum var_init_status initialized)
1478 new_var = (variable) pool_alloc (dv_pool (var->dv));
1479 new_var->dv = var->dv;
1480 new_var->refcount = 1;
1482 new_var->n_var_parts = var->n_var_parts;
1483 new_var->cur_loc_changed = var->cur_loc_changed;
1484 var->cur_loc_changed = false;
1485 new_var->in_changed_variables = false;
1487 if (! flag_var_tracking_uninit)
1488 initialized = VAR_INIT_STATUS_INITIALIZED;
1490 for (i = 0; i < var->n_var_parts; i++)
1492 location_chain node;
1493 location_chain *nextp;
1495 new_var->var_part[i].offset = var->var_part[i].offset;
1496 nextp = &new_var->var_part[i].loc_chain;
1497 for (node = var->var_part[i].loc_chain; node; node = node->next)
1499 location_chain new_lc;
1501 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1502 new_lc->next = NULL;
1503 if (node->init > initialized)
1504 new_lc->init = node->init;
1506 new_lc->init = initialized;
1507 if (node->set_src && !(MEM_P (node->set_src)))
1508 new_lc->set_src = node->set_src;
1510 new_lc->set_src = NULL;
1511 new_lc->loc = node->loc;
1514 nextp = &new_lc->next;
1517 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1520 dst_can_be_shared = false;
1521 if (shared_hash_shared (set->vars))
1522 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1523 else if (set->traversed_vars && set->vars != set->traversed_vars)
1524 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1526 if (var->in_changed_variables)
1529 = htab_find_slot_with_hash (changed_variables, var->dv,
1530 dv_htab_hash (var->dv), NO_INSERT);
1531 gcc_assert (*cslot == (void *) var);
1532 var->in_changed_variables = false;
1533 variable_htab_free (var);
1535 new_var->in_changed_variables = true;
1540 /* Add a variable from *SLOT to hash table DATA and increase its reference
1544 vars_copy_1 (void **slot, void *data)
1546 htab_t dst = (htab_t) data;
1550 src = (variable) *slot;
1553 dstp = htab_find_slot_with_hash (dst, src->dv,
1554 dv_htab_hash (src->dv),
1558 /* Continue traversing the hash table. */
1562 /* Copy all variables from hash table SRC to hash table DST. */
1565 vars_copy (htab_t dst, htab_t src)
1567 htab_traverse_noresize (src, vars_copy_1, dst);
1570 /* Map a decl to its main debug decl. */
1573 var_debug_decl (tree decl)
1575 if (decl && DECL_P (decl)
1576 && DECL_DEBUG_EXPR_IS_FROM (decl))
1578 tree debugdecl = DECL_DEBUG_EXPR (decl);
1579 if (debugdecl && DECL_P (debugdecl))
1586 /* Set the register LOC to contain DV, OFFSET. */
1589 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1590 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1591 enum insert_option iopt)
1594 bool decl_p = dv_is_decl_p (dv);
1597 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1599 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1600 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1601 && node->offset == offset)
1604 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1605 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1608 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1611 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1614 tree decl = REG_EXPR (loc);
1615 HOST_WIDE_INT offset = REG_OFFSET (loc);
1617 var_reg_decl_set (set, loc, initialized,
1618 dv_from_decl (decl), offset, set_src, INSERT);
1621 static enum var_init_status
1622 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1626 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1628 if (! flag_var_tracking_uninit)
1629 return VAR_INIT_STATUS_INITIALIZED;
1631 var = shared_hash_find (set->vars, dv);
1634 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1636 location_chain nextp;
1637 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1638 if (rtx_equal_p (nextp->loc, loc))
1640 ret_val = nextp->init;
1649 /* Delete current content of register LOC in dataflow set SET and set
1650 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1651 MODIFY is true, any other live copies of the same variable part are
1652 also deleted from the dataflow set, otherwise the variable part is
1653 assumed to be copied from another location holding the same
1657 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1658 enum var_init_status initialized, rtx set_src)
1660 tree decl = REG_EXPR (loc);
1661 HOST_WIDE_INT offset = REG_OFFSET (loc);
1665 decl = var_debug_decl (decl);
1667 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1668 initialized = get_init_value (set, loc, dv_from_decl (decl));
1670 nextp = &set->regs[REGNO (loc)];
1671 for (node = *nextp; node; node = next)
1674 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1676 delete_variable_part (set, node->loc, node->dv, node->offset);
1677 pool_free (attrs_pool, node);
1683 nextp = &node->next;
1687 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1688 var_reg_set (set, loc, initialized, set_src);
1691 /* Delete the association of register LOC in dataflow set SET with any
1692 variables that aren't onepart. If CLOBBER is true, also delete any
1693 other live copies of the same variable part, and delete the
1694 association with onepart dvs too. */
1697 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1699 attrs *nextp = &set->regs[REGNO (loc)];
1704 tree decl = REG_EXPR (loc);
1705 HOST_WIDE_INT offset = REG_OFFSET (loc);
1707 decl = var_debug_decl (decl);
1709 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1712 for (node = *nextp; node; node = next)
1715 if (clobber || !dv_onepart_p (node->dv))
1717 delete_variable_part (set, node->loc, node->dv, node->offset);
1718 pool_free (attrs_pool, node);
1722 nextp = &node->next;
1726 /* Delete content of register with number REGNO in dataflow set SET. */
1729 var_regno_delete (dataflow_set *set, int regno)
1731 attrs *reg = &set->regs[regno];
1734 for (node = *reg; node; node = next)
1737 delete_variable_part (set, node->loc, node->dv, node->offset);
1738 pool_free (attrs_pool, node);
1743 /* Set the location of DV, OFFSET as the MEM LOC. */
1746 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1747 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1748 enum insert_option iopt)
1750 if (dv_is_decl_p (dv))
1751 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1753 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1756 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
1758 Adjust the address first if it is stack pointer based. */
1761 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1764 tree decl = MEM_EXPR (loc);
1765 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1767 var_mem_decl_set (set, loc, initialized,
1768 dv_from_decl (decl), offset, set_src, INSERT);
1771 /* Delete and set the location part of variable MEM_EXPR (LOC) in
1772 dataflow set SET to LOC. If MODIFY is true, any other live copies
1773 of the same variable part are also deleted from the dataflow set,
1774 otherwise the variable part is assumed to be copied from another
1775 location holding the same part.
1776 Adjust the address first if it is stack pointer based. */
1779 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1780 enum var_init_status initialized, rtx set_src)
1782 tree decl = MEM_EXPR (loc);
1783 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1785 decl = var_debug_decl (decl);
1787 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1788 initialized = get_init_value (set, loc, dv_from_decl (decl));
1791 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
1792 var_mem_set (set, loc, initialized, set_src);
1795 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
1796 true, also delete any other live copies of the same variable part.
1797 Adjust the address first if it is stack pointer based. */
1800 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
1802 tree decl = MEM_EXPR (loc);
1803 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1805 decl = var_debug_decl (decl);
1807 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1808 delete_variable_part (set, loc, dv_from_decl (decl), offset);
1811 /* Bind a value to a location it was just stored in. If MODIFIED
1812 holds, assume the location was modified, detaching it from any
1813 values bound to it. */
1816 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
1818 cselib_val *v = CSELIB_VAL_PTR (val);
1820 gcc_assert (cselib_preserved_value_p (v));
1824 fprintf (dump_file, "%i: ", INSN_UID (insn));
1825 print_inline_rtx (dump_file, val, 0);
1826 fprintf (dump_file, " stored in ");
1827 print_inline_rtx (dump_file, loc, 0);
1830 struct elt_loc_list *l;
1831 for (l = v->locs; l; l = l->next)
1833 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
1834 print_inline_rtx (dump_file, l->loc, 0);
1837 fprintf (dump_file, "\n");
1843 var_regno_delete (set, REGNO (loc));
1844 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1845 dv_from_value (val), 0, NULL_RTX, INSERT);
1847 else if (MEM_P (loc))
1848 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1849 dv_from_value (val), 0, NULL_RTX, INSERT);
1851 set_variable_part (set, loc, dv_from_value (val), 0,
1852 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1855 /* Reset this node, detaching all its equivalences. Return the slot
1856 in the variable hash table that holds dv, if there is one. */
1859 val_reset (dataflow_set *set, decl_or_value dv)
1861 variable var = shared_hash_find (set->vars, dv) ;
1862 location_chain node;
1865 if (!var || !var->n_var_parts)
1868 gcc_assert (var->n_var_parts == 1);
1871 for (node = var->var_part[0].loc_chain; node; node = node->next)
1872 if (GET_CODE (node->loc) == VALUE
1873 && canon_value_cmp (node->loc, cval))
1876 for (node = var->var_part[0].loc_chain; node; node = node->next)
1877 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
1879 /* Redirect the equivalence link to the new canonical
1880 value, or simply remove it if it would point at
1883 set_variable_part (set, cval, dv_from_value (node->loc),
1884 0, node->init, node->set_src, NO_INSERT);
1885 delete_variable_part (set, dv_as_value (dv),
1886 dv_from_value (node->loc), 0);
1891 decl_or_value cdv = dv_from_value (cval);
1893 /* Keep the remaining values connected, accummulating links
1894 in the canonical value. */
1895 for (node = var->var_part[0].loc_chain; node; node = node->next)
1897 if (node->loc == cval)
1899 else if (GET_CODE (node->loc) == REG)
1900 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
1901 node->set_src, NO_INSERT);
1902 else if (GET_CODE (node->loc) == MEM)
1903 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
1904 node->set_src, NO_INSERT);
1906 set_variable_part (set, node->loc, cdv, 0,
1907 node->init, node->set_src, NO_INSERT);
1911 /* We remove this last, to make sure that the canonical value is not
1912 removed to the point of requiring reinsertion. */
1914 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
1916 clobber_variable_part (set, NULL, dv, 0, NULL);
1918 /* ??? Should we make sure there aren't other available values or
1919 variables whose values involve this one other than by
1920 equivalence? E.g., at the very least we should reset MEMs, those
1921 shouldn't be too hard to find cselib-looking up the value as an
1922 address, then locating the resulting value in our own hash
1926 /* Find the values in a given location and map the val to another
1927 value, if it is unique, or add the location as one holding the
1931 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
1933 decl_or_value dv = dv_from_value (val);
1935 if (dump_file && (dump_flags & TDF_DETAILS))
1938 fprintf (dump_file, "%i: ", INSN_UID (insn));
1940 fprintf (dump_file, "head: ");
1941 print_inline_rtx (dump_file, val, 0);
1942 fputs (" is at ", dump_file);
1943 print_inline_rtx (dump_file, loc, 0);
1944 fputc ('\n', dump_file);
1947 val_reset (set, dv);
1951 attrs node, found = NULL;
1953 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1954 if (dv_is_value_p (node->dv)
1955 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
1959 /* Map incoming equivalences. ??? Wouldn't it be nice if
1960 we just started sharing the location lists? Maybe a
1961 circular list ending at the value itself or some
1963 set_variable_part (set, dv_as_value (node->dv),
1964 dv_from_value (val), node->offset,
1965 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1966 set_variable_part (set, val, node->dv, node->offset,
1967 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1970 /* If we didn't find any equivalence, we need to remember that
1971 this value is held in the named register. */
1973 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1974 dv_from_value (val), 0, NULL_RTX, INSERT);
1976 else if (MEM_P (loc))
1977 /* ??? Merge equivalent MEMs. */
1978 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1979 dv_from_value (val), 0, NULL_RTX, INSERT);
1981 /* ??? Merge equivalent expressions. */
1982 set_variable_part (set, loc, dv_from_value (val), 0,
1983 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1986 /* Initialize dataflow set SET to be empty.
1987 VARS_SIZE is the initial size of hash table VARS. */
1990 dataflow_set_init (dataflow_set *set)
1992 init_attrs_list_set (set->regs);
1993 set->vars = shared_hash_copy (empty_shared_hash);
1994 set->stack_adjust = 0;
1995 set->traversed_vars = NULL;
1998 /* Delete the contents of dataflow set SET. */
2001 dataflow_set_clear (dataflow_set *set)
2005 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2006 attrs_list_clear (&set->regs[i]);
2008 shared_hash_destroy (set->vars);
2009 set->vars = shared_hash_copy (empty_shared_hash);
2012 /* Copy the contents of dataflow set SRC to DST. */
2015 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2019 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2020 attrs_list_copy (&dst->regs[i], src->regs[i]);
2022 shared_hash_destroy (dst->vars);
2023 dst->vars = shared_hash_copy (src->vars);
2024 dst->stack_adjust = src->stack_adjust;
2027 /* Information for merging lists of locations for a given offset of variable.
2029 struct variable_union_info
2031 /* Node of the location chain. */
2034 /* The sum of positions in the input chains. */
2037 /* The position in the chain of DST dataflow set. */
2041 /* Buffer for location list sorting and its allocated size. */
2042 static struct variable_union_info *vui_vec;
2043 static int vui_allocated;
2045 /* Compare function for qsort, order the structures by POS element. */
2048 variable_union_info_cmp_pos (const void *n1, const void *n2)
2050 const struct variable_union_info *const i1 =
2051 (const struct variable_union_info *) n1;
2052 const struct variable_union_info *const i2 =
2053 ( const struct variable_union_info *) n2;
2055 if (i1->pos != i2->pos)
2056 return i1->pos - i2->pos;
2058 return (i1->pos_dst - i2->pos_dst);
2061 /* Compute union of location parts of variable *SLOT and the same variable
2062 from hash table DATA. Compute "sorted" union of the location chains
2063 for common offsets, i.e. the locations of a variable part are sorted by
2064 a priority where the priority is the sum of the positions in the 2 chains
2065 (if a location is only in one list the position in the second list is
2066 defined to be larger than the length of the chains).
2067 When we are updating the location parts the newest location is in the
2068 beginning of the chain, so when we do the described "sorted" union
2069 we keep the newest locations in the beginning. */
2072 variable_union (void **slot, void *data)
2076 dataflow_set *set = (dataflow_set *) data;
2079 src = (variable) *slot;
2080 dstp = shared_hash_find_slot (set->vars, src->dv);
2081 if (!dstp || !*dstp)
2085 dst_can_be_shared = false;
2087 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2091 /* Continue traversing the hash table. */
2095 dst = (variable) *dstp;
2097 gcc_assert (src->n_var_parts);
2099 /* We can combine one-part variables very efficiently, because their
2100 entries are in canonical order. */
2101 if (dv_onepart_p (src->dv))
2103 location_chain *nodep, dnode, snode;
2105 gcc_assert (src->n_var_parts == 1);
2106 gcc_assert (dst->n_var_parts == 1);
2108 snode = src->var_part[0].loc_chain;
2111 restart_onepart_unshared:
2112 nodep = &dst->var_part[0].loc_chain;
2118 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2122 location_chain nnode;
2124 if (shared_var_p (dst, set->vars))
2126 dstp = unshare_variable (set, dstp, dst,
2127 VAR_INIT_STATUS_INITIALIZED);
2128 dst = (variable)*dstp;
2129 goto restart_onepart_unshared;
2132 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
2133 nnode->loc = snode->loc;
2134 nnode->init = snode->init;
2135 if (!snode->set_src || MEM_P (snode->set_src))
2136 nnode->set_src = NULL;
2138 nnode->set_src = snode->set_src;
2139 nnode->next = dnode;
2142 #ifdef ENABLE_CHECKING
2144 gcc_assert (rtx_equal_p (dnode->loc, snode->loc));
2148 snode = snode->next;
2150 nodep = &dnode->next;
2157 /* Count the number of location parts, result is K. */
2158 for (i = 0, j = 0, k = 0;
2159 i < src->n_var_parts && j < dst->n_var_parts; k++)
2161 if (src->var_part[i].offset == dst->var_part[j].offset)
2166 else if (src->var_part[i].offset < dst->var_part[j].offset)
2171 k += src->n_var_parts - i;
2172 k += dst->n_var_parts - j;
2174 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2175 thus there are at most MAX_VAR_PARTS different offsets. */
2176 gcc_assert (dv_onepart_p (dst->dv) ? k == 1 : k <= MAX_VAR_PARTS);
2178 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2180 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2181 dst = (variable)*dstp;
2184 i = src->n_var_parts - 1;
2185 j = dst->n_var_parts - 1;
2186 dst->n_var_parts = k;
2188 for (k--; k >= 0; k--)
2190 location_chain node, node2;
2192 if (i >= 0 && j >= 0
2193 && src->var_part[i].offset == dst->var_part[j].offset)
2195 /* Compute the "sorted" union of the chains, i.e. the locations which
2196 are in both chains go first, they are sorted by the sum of
2197 positions in the chains. */
2200 struct variable_union_info *vui;
2202 /* If DST is shared compare the location chains.
2203 If they are different we will modify the chain in DST with
2204 high probability so make a copy of DST. */
2205 if (shared_var_p (dst, set->vars))
2207 for (node = src->var_part[i].loc_chain,
2208 node2 = dst->var_part[j].loc_chain; node && node2;
2209 node = node->next, node2 = node2->next)
2211 if (!((REG_P (node2->loc)
2212 && REG_P (node->loc)
2213 && REGNO (node2->loc) == REGNO (node->loc))
2214 || rtx_equal_p (node2->loc, node->loc)))
2216 if (node2->init < node->init)
2217 node2->init = node->init;
2223 dstp = unshare_variable (set, dstp, dst,
2224 VAR_INIT_STATUS_UNKNOWN);
2225 dst = (variable)*dstp;
2230 for (node = src->var_part[i].loc_chain; node; node = node->next)
2233 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2238 /* The most common case, much simpler, no qsort is needed. */
2239 location_chain dstnode = dst->var_part[j].loc_chain;
2240 dst->var_part[k].loc_chain = dstnode;
2241 dst->var_part[k].offset = dst->var_part[j].offset;
2243 for (node = src->var_part[i].loc_chain; node; node = node->next)
2244 if (!((REG_P (dstnode->loc)
2245 && REG_P (node->loc)
2246 && REGNO (dstnode->loc) == REGNO (node->loc))
2247 || rtx_equal_p (dstnode->loc, node->loc)))
2249 location_chain new_node;
2251 /* Copy the location from SRC. */
2252 new_node = (location_chain) pool_alloc (loc_chain_pool);
2253 new_node->loc = node->loc;
2254 new_node->init = node->init;
2255 if (!node->set_src || MEM_P (node->set_src))
2256 new_node->set_src = NULL;
2258 new_node->set_src = node->set_src;
2259 node2->next = new_node;
2266 if (src_l + dst_l > vui_allocated)
2268 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2269 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2274 /* Fill in the locations from DST. */
2275 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2276 node = node->next, jj++)
2279 vui[jj].pos_dst = jj;
2281 /* Pos plus value larger than a sum of 2 valid positions. */
2282 vui[jj].pos = jj + src_l + dst_l;
2285 /* Fill in the locations from SRC. */
2287 for (node = src->var_part[i].loc_chain, ii = 0; node;
2288 node = node->next, ii++)
2290 /* Find location from NODE. */
2291 for (jj = 0; jj < dst_l; jj++)
2293 if ((REG_P (vui[jj].lc->loc)
2294 && REG_P (node->loc)
2295 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2296 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2298 vui[jj].pos = jj + ii;
2302 if (jj >= dst_l) /* The location has not been found. */
2304 location_chain new_node;
2306 /* Copy the location from SRC. */
2307 new_node = (location_chain) pool_alloc (loc_chain_pool);
2308 new_node->loc = node->loc;
2309 new_node->init = node->init;
2310 if (!node->set_src || MEM_P (node->set_src))
2311 new_node->set_src = NULL;
2313 new_node->set_src = node->set_src;
2314 vui[n].lc = new_node;
2315 vui[n].pos_dst = src_l + dst_l;
2316 vui[n].pos = ii + src_l + dst_l;
2323 /* Special case still very common case. For dst_l == 2
2324 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2325 vui[i].pos == i + src_l + dst_l. */
2326 if (vui[0].pos > vui[1].pos)
2328 /* Order should be 1, 0, 2... */
2329 dst->var_part[k].loc_chain = vui[1].lc;
2330 vui[1].lc->next = vui[0].lc;
2333 vui[0].lc->next = vui[2].lc;
2334 vui[n - 1].lc->next = NULL;
2337 vui[0].lc->next = NULL;
2342 dst->var_part[k].loc_chain = vui[0].lc;
2343 if (n >= 3 && vui[2].pos < vui[1].pos)
2345 /* Order should be 0, 2, 1, 3... */
2346 vui[0].lc->next = vui[2].lc;
2347 vui[2].lc->next = vui[1].lc;
2350 vui[1].lc->next = vui[3].lc;
2351 vui[n - 1].lc->next = NULL;
2354 vui[1].lc->next = NULL;
2359 /* Order should be 0, 1, 2... */
2361 vui[n - 1].lc->next = NULL;
2364 for (; ii < n; ii++)
2365 vui[ii - 1].lc->next = vui[ii].lc;
2369 qsort (vui, n, sizeof (struct variable_union_info),
2370 variable_union_info_cmp_pos);
2372 /* Reconnect the nodes in sorted order. */
2373 for (ii = 1; ii < n; ii++)
2374 vui[ii - 1].lc->next = vui[ii].lc;
2375 vui[n - 1].lc->next = NULL;
2376 dst->var_part[k].loc_chain = vui[0].lc;
2379 dst->var_part[k].offset = dst->var_part[j].offset;
2384 else if ((i >= 0 && j >= 0
2385 && src->var_part[i].offset < dst->var_part[j].offset)
2388 dst->var_part[k] = dst->var_part[j];
2391 else if ((i >= 0 && j >= 0
2392 && src->var_part[i].offset > dst->var_part[j].offset)
2395 location_chain *nextp;
2397 /* Copy the chain from SRC. */
2398 nextp = &dst->var_part[k].loc_chain;
2399 for (node = src->var_part[i].loc_chain; node; node = node->next)
2401 location_chain new_lc;
2403 new_lc = (location_chain) pool_alloc (loc_chain_pool);
2404 new_lc->next = NULL;
2405 new_lc->init = node->init;
2406 if (!node->set_src || MEM_P (node->set_src))
2407 new_lc->set_src = NULL;
2409 new_lc->set_src = node->set_src;
2410 new_lc->loc = node->loc;
2413 nextp = &new_lc->next;
2416 dst->var_part[k].offset = src->var_part[i].offset;
2419 dst->var_part[k].cur_loc = NULL;
2422 if (flag_var_tracking_uninit)
2423 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
2425 location_chain node, node2;
2426 for (node = src->var_part[i].loc_chain; node; node = node->next)
2427 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
2428 if (rtx_equal_p (node->loc, node2->loc))
2430 if (node->init > node2->init)
2431 node2->init = node->init;
2435 /* Continue traversing the hash table. */
2439 /* Compute union of dataflow sets SRC and DST and store it to DST. */
2442 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
2446 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2447 attrs_list_union (&dst->regs[i], src->regs[i]);
2449 if (dst->vars == empty_shared_hash)
2451 shared_hash_destroy (dst->vars);
2452 dst->vars = shared_hash_copy (src->vars);
2455 htab_traverse (shared_hash_htab (src->vars), variable_union, dst);
2458 /* Whether the value is currently being expanded. */
2459 #define VALUE_RECURSED_INTO(x) \
2460 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
2461 /* Whether the value is in changed_variables hash table. */
2462 #define VALUE_CHANGED(x) \
2463 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
2464 /* Whether the decl is in changed_variables hash table. */
2465 #define DECL_CHANGED(x) TREE_VISITED (x)
2467 /* Record that DV has been added into resp. removed from changed_variables
2471 set_dv_changed (decl_or_value dv, bool newv)
2473 if (dv_is_value_p (dv))
2474 VALUE_CHANGED (dv_as_value (dv)) = newv;
2476 DECL_CHANGED (dv_as_decl (dv)) = newv;
2479 /* Return true if DV is present in changed_variables hash table. */
2482 dv_changed_p (decl_or_value dv)
2484 return (dv_is_value_p (dv)
2485 ? VALUE_CHANGED (dv_as_value (dv))
2486 : DECL_CHANGED (dv_as_decl (dv)));
2489 /* Return a location list node whose loc is rtx_equal to LOC, in the
2490 location list of a one-part variable or value VAR, or in that of
2491 any values recursively mentioned in the location lists. */
2493 static location_chain
2494 find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
2496 location_chain node;
2501 gcc_assert (dv_onepart_p (var->dv));
2503 if (!var->n_var_parts)
2506 gcc_assert (var->var_part[0].offset == 0);
2508 for (node = var->var_part[0].loc_chain; node; node = node->next)
2509 if (rtx_equal_p (loc, node->loc))
2511 else if (GET_CODE (node->loc) == VALUE
2512 && !VALUE_RECURSED_INTO (node->loc))
2514 decl_or_value dv = dv_from_value (node->loc);
2515 variable var = (variable)
2516 htab_find_with_hash (vars, dv, dv_htab_hash (dv));
2520 location_chain where;
2521 VALUE_RECURSED_INTO (node->loc) = true;
2522 if ((where = find_loc_in_1pdv (loc, var, vars)))
2524 VALUE_RECURSED_INTO (node->loc) = false;
2527 VALUE_RECURSED_INTO (node->loc) = false;
2534 /* Hash table iteration argument passed to variable_merge. */
2537 /* The set in which the merge is to be inserted. */
2539 /* The set that we're iterating in. */
2541 /* The set that may contain the other dv we are to merge with. */
2543 /* Number of onepart dvs in src. */
2544 int src_onepart_cnt;
2547 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
2548 loc_cmp order, and it is maintained as such. */
2551 insert_into_intersection (location_chain *nodep, rtx loc,
2552 enum var_init_status status)
2554 location_chain node;
2557 for (node = *nodep; node; nodep = &node->next, node = *nodep)
2558 if ((r = loc_cmp (node->loc, loc)) == 0)
2560 node->init = MIN (node->init, status);
2566 node = (location_chain) pool_alloc (loc_chain_pool);
2569 node->set_src = NULL;
2570 node->init = status;
2571 node->next = *nodep;
2575 /* Insert in DEST the intersection the locations present in both
2576 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
2577 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
2581 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
2582 location_chain s1node, variable s2var)
2584 dataflow_set *s1set = dsm->cur;
2585 dataflow_set *s2set = dsm->src;
2586 location_chain found;
2588 for (; s1node; s1node = s1node->next)
2590 if (s1node->loc == val)
2593 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
2594 shared_hash_htab (s2set->vars))))
2596 insert_into_intersection (dest, s1node->loc,
2597 MIN (s1node->init, found->init));
2601 if (GET_CODE (s1node->loc) == VALUE
2602 && !VALUE_RECURSED_INTO (s1node->loc))
2604 decl_or_value dv = dv_from_value (s1node->loc);
2605 variable svar = shared_hash_find (s1set->vars, dv);
2608 if (svar->n_var_parts == 1)
2610 VALUE_RECURSED_INTO (s1node->loc) = true;
2611 intersect_loc_chains (val, dest, dsm,
2612 svar->var_part[0].loc_chain,
2614 VALUE_RECURSED_INTO (s1node->loc) = false;
2619 /* ??? if the location is equivalent to any location in src,
2620 searched recursively
2622 add to dst the values needed to represent the equivalence
2624 telling whether locations S is equivalent to another dv's
2627 for each location D in the list
2629 if S and D satisfy rtx_equal_p, then it is present
2631 else if D is a value, recurse without cycles
2633 else if S and D have the same CODE and MODE
2635 for each operand oS and the corresponding oD
2637 if oS and oD are not equivalent, then S an D are not equivalent
2639 else if they are RTX vectors
2641 if any vector oS element is not equivalent to its respective oD,
2642 then S and D are not equivalent
2650 /* Return -1 if X should be before Y in a location list for a 1-part
2651 variable, 1 if Y should be before X, and 0 if they're equivalent
2652 and should not appear in the list. */
2655 loc_cmp (rtx x, rtx y)
2658 RTX_CODE code = GET_CODE (x);
2668 gcc_assert (GET_MODE (x) == GET_MODE (y));
2669 if (REGNO (x) == REGNO (y))
2671 else if (REGNO (x) < REGNO (y))
2684 gcc_assert (GET_MODE (x) == GET_MODE (y));
2685 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
2691 if (GET_CODE (x) == VALUE)
2693 if (GET_CODE (y) != VALUE)
2695 /* Don't assert the modes are the same, that is true only
2696 when not recursing. (subreg:QI (value:SI 1:1) 0)
2697 and (subreg:QI (value:DI 2:2) 0) can be compared,
2698 even when the modes are different. */
2699 if (canon_value_cmp (x, y))
2705 if (GET_CODE (y) == VALUE)
2708 if (GET_CODE (x) == GET_CODE (y))
2709 /* Compare operands below. */;
2710 else if (GET_CODE (x) < GET_CODE (y))
2715 gcc_assert (GET_MODE (x) == GET_MODE (y));
2717 if (GET_CODE (x) == DEBUG_EXPR)
2719 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
2720 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
2722 #ifdef ENABLE_CHECKING
2723 gcc_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
2724 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
2729 fmt = GET_RTX_FORMAT (code);
2730 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2734 if (XWINT (x, i) == XWINT (y, i))
2736 else if (XWINT (x, i) < XWINT (y, i))
2743 if (XINT (x, i) == XINT (y, i))
2745 else if (XINT (x, i) < XINT (y, i))
2752 /* Compare the vector length first. */
2753 if (XVECLEN (x, i) == XVECLEN (y, i))
2754 /* Compare the vectors elements. */;
2755 else if (XVECLEN (x, i) < XVECLEN (y, i))
2760 for (j = 0; j < XVECLEN (x, i); j++)
2761 if ((r = loc_cmp (XVECEXP (x, i, j),
2762 XVECEXP (y, i, j))))
2767 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
2773 if (XSTR (x, i) == XSTR (y, i))
2779 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
2787 /* These are just backpointers, so they don't matter. */
2794 /* It is believed that rtx's at this level will never
2795 contain anything but integers and other rtx's,
2796 except for within LABEL_REFs and SYMBOL_REFs. */
2804 /* If decl or value DVP refers to VALUE from *LOC, add backlinks
2805 from VALUE to DVP. */
2808 add_value_chain (rtx *loc, void *dvp)
2810 decl_or_value dv, ldv;
2811 value_chain vc, nvc;
2814 if (GET_CODE (*loc) == VALUE)
2815 ldv = dv_from_value (*loc);
2816 else if (GET_CODE (*loc) == DEBUG_EXPR)
2817 ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc));
2821 if (dv_as_opaque (ldv) == dvp)
2824 dv = (decl_or_value) dvp;
2825 slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv),
2829 vc = (value_chain) pool_alloc (value_chain_pool);
2833 *slot = (void *) vc;
2837 for (vc = ((value_chain) *slot)->next; vc; vc = vc->next)
2838 if (dv_as_opaque (vc->dv) == dv_as_opaque (dv))
2846 vc = (value_chain) *slot;
2847 nvc = (value_chain) pool_alloc (value_chain_pool);
2849 nvc->next = vc->next;
2855 /* If decl or value DVP refers to VALUEs from within LOC, add backlinks
2856 from those VALUEs to DVP. */
2859 add_value_chains (decl_or_value dv, rtx loc)
2861 if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR)
2863 add_value_chain (&loc, dv_as_opaque (dv));
2869 loc = XEXP (loc, 0);
2870 for_each_rtx (&loc, add_value_chain, dv_as_opaque (dv));
2873 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those
2874 VALUEs to DV. Add the same time get rid of ASM_OPERANDS from locs list,
2875 that is something we never can express in .debug_info and can prevent
2876 reverse ops from being used. */
2879 add_cselib_value_chains (decl_or_value dv)
2881 struct elt_loc_list **l;
2883 for (l = &CSELIB_VAL_PTR (dv_as_value (dv))->locs; *l;)
2884 if (GET_CODE ((*l)->loc) == ASM_OPERANDS)
2888 for_each_rtx (&(*l)->loc, add_value_chain, dv_as_opaque (dv));
2893 /* If decl or value DVP refers to VALUE from *LOC, remove backlinks
2894 from VALUE to DVP. */
2897 remove_value_chain (rtx *loc, void *dvp)
2899 decl_or_value dv, ldv;
2903 if (GET_CODE (*loc) == VALUE)
2904 ldv = dv_from_value (*loc);
2905 else if (GET_CODE (*loc) == DEBUG_EXPR)
2906 ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc));
2910 if (dv_as_opaque (ldv) == dvp)
2913 dv = (decl_or_value) dvp;
2914 slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv),
2916 for (vc = (value_chain) *slot; vc->next; vc = vc->next)
2917 if (dv_as_opaque (vc->next->dv) == dv_as_opaque (dv))
2919 value_chain dvc = vc->next;
2920 gcc_assert (dvc->refcount > 0);
2921 if (--dvc->refcount == 0)
2923 vc->next = dvc->next;
2924 pool_free (value_chain_pool, dvc);
2925 if (vc->next == NULL && vc == (value_chain) *slot)
2927 pool_free (value_chain_pool, vc);
2928 htab_clear_slot (value_chains, slot);
2936 /* If decl or value DVP refers to VALUEs from within LOC, remove backlinks
2937 from those VALUEs to DVP. */
2940 remove_value_chains (decl_or_value dv, rtx loc)
2942 if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR)
2944 remove_value_chain (&loc, dv_as_opaque (dv));
2950 loc = XEXP (loc, 0);
2951 for_each_rtx (&loc, remove_value_chain, dv_as_opaque (dv));
2955 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, remove backlinks from those
2959 remove_cselib_value_chains (decl_or_value dv)
2961 struct elt_loc_list *l;
2963 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
2964 for_each_rtx (&l->loc, remove_value_chain, dv_as_opaque (dv));
2967 /* Check the order of entries in one-part variables. */
2970 canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
2972 variable var = (variable) *slot;
2973 decl_or_value dv = var->dv;
2974 location_chain node, next;
2976 #ifdef ENABLE_RTL_CHECKING
2978 for (i = 0; i < var->n_var_parts; i++)
2979 gcc_assert (var->var_part[0].cur_loc == NULL);
2980 gcc_assert (!var->cur_loc_changed && !var->in_changed_variables);
2983 if (!dv_onepart_p (dv))
2986 gcc_assert (var->n_var_parts == 1);
2987 node = var->var_part[0].loc_chain;
2990 while ((next = node->next))
2992 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3000 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3001 more likely to be chosen as canonical for an equivalence set.
3002 Ensure less likely values can reach more likely neighbors, making
3003 the connections bidirectional. */
3006 canonicalize_values_mark (void **slot, void *data)
3008 dataflow_set *set = (dataflow_set *)data;
3009 variable var = (variable) *slot;
3010 decl_or_value dv = var->dv;
3012 location_chain node;
3014 if (!dv_is_value_p (dv))
3017 gcc_assert (var->n_var_parts == 1);
3019 val = dv_as_value (dv);
3021 for (node = var->var_part[0].loc_chain; node; node = node->next)
3022 if (GET_CODE (node->loc) == VALUE)
3024 if (canon_value_cmp (node->loc, val))
3025 VALUE_RECURSED_INTO (val) = true;
3028 decl_or_value odv = dv_from_value (node->loc);
3029 void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3031 oslot = set_slot_part (set, val, oslot, odv, 0,
3032 node->init, NULL_RTX);
3034 VALUE_RECURSED_INTO (node->loc) = true;
3041 /* Remove redundant entries from equivalence lists in onepart
3042 variables, canonicalizing equivalence sets into star shapes. */
3045 canonicalize_values_star (void **slot, void *data)
3047 dataflow_set *set = (dataflow_set *)data;
3048 variable var = (variable) *slot;
3049 decl_or_value dv = var->dv;
3050 location_chain node;
3057 if (!dv_onepart_p (dv))
3060 gcc_assert (var->n_var_parts == 1);
3062 if (dv_is_value_p (dv))
3064 cval = dv_as_value (dv);
3065 if (!VALUE_RECURSED_INTO (cval))
3067 VALUE_RECURSED_INTO (cval) = false;
3077 gcc_assert (var->n_var_parts == 1);
3079 for (node = var->var_part[0].loc_chain; node; node = node->next)
3080 if (GET_CODE (node->loc) == VALUE)
3083 if (VALUE_RECURSED_INTO (node->loc))
3085 if (canon_value_cmp (node->loc, cval))
3094 if (!has_marks || dv_is_decl_p (dv))
3097 /* Keep it marked so that we revisit it, either after visiting a
3098 child node, or after visiting a new parent that might be
3100 VALUE_RECURSED_INTO (val) = true;
3102 for (node = var->var_part[0].loc_chain; node; node = node->next)
3103 if (GET_CODE (node->loc) == VALUE
3104 && VALUE_RECURSED_INTO (node->loc))
3108 VALUE_RECURSED_INTO (cval) = false;
3109 dv = dv_from_value (cval);
3110 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3113 gcc_assert (dv_is_decl_p (var->dv));
3114 /* The canonical value was reset and dropped.
3116 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3119 var = (variable)*slot;
3120 gcc_assert (dv_is_value_p (var->dv));
3121 if (var->n_var_parts == 0)
3123 gcc_assert (var->n_var_parts == 1);
3127 VALUE_RECURSED_INTO (val) = false;
3132 /* Push values to the canonical one. */
3133 cdv = dv_from_value (cval);
3134 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3136 for (node = var->var_part[0].loc_chain; node; node = node->next)
3137 if (node->loc != cval)
3139 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3140 node->init, NULL_RTX);
3141 if (GET_CODE (node->loc) == VALUE)
3143 decl_or_value ndv = dv_from_value (node->loc);
3145 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3148 if (canon_value_cmp (node->loc, val))
3150 /* If it could have been a local minimum, it's not any more,
3151 since it's now neighbor to cval, so it may have to push
3152 to it. Conversely, if it wouldn't have prevailed over
3153 val, then whatever mark it has is fine: if it was to
3154 push, it will now push to a more canonical node, but if
3155 it wasn't, then it has already pushed any values it might
3157 VALUE_RECURSED_INTO (node->loc) = true;
3158 /* Make sure we visit node->loc by ensuring we cval is
3160 VALUE_RECURSED_INTO (cval) = true;
3162 else if (!VALUE_RECURSED_INTO (node->loc))
3163 /* If we have no need to "recurse" into this node, it's
3164 already "canonicalized", so drop the link to the old
3166 clobber_variable_part (set, cval, ndv, 0, NULL);
3168 else if (GET_CODE (node->loc) == REG)
3170 attrs list = set->regs[REGNO (node->loc)], *listp;
3172 /* Change an existing attribute referring to dv so that it
3173 refers to cdv, removing any duplicate this might
3174 introduce, and checking that no previous duplicates
3175 existed, all in a single pass. */
3179 if (list->offset == 0
3180 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3181 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3188 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3191 for (listp = &list->next; (list = *listp); listp = &list->next)
3196 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3198 *listp = list->next;
3199 pool_free (attrs_pool, list);
3204 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3207 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3209 for (listp = &list->next; (list = *listp); listp = &list->next)
3214 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3216 *listp = list->next;
3217 pool_free (attrs_pool, list);
3222 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3231 if (list->offset == 0
3232 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3233 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3243 cslot = set_slot_part (set, val, cslot, cdv, 0,
3244 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3246 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3248 /* Variable may have been unshared. */
3249 var = (variable)*slot;
3250 gcc_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3251 && var->var_part[0].loc_chain->next == NULL);
3253 if (VALUE_RECURSED_INTO (cval))
3254 goto restart_with_cval;
3259 /* Bind one-part variables to the canonical value in an equivalence
3260 set. Not doing this causes dataflow convergence failure in rare
3261 circumstances, see PR42873. Unfortunately we can't do this
3262 efficiently as part of canonicalize_values_star, since we may not
3263 have determined or even seen the canonical value of a set when we
3264 get to a variable that references another member of the set. */
3267 canonicalize_vars_star (void **slot, void *data)
3269 dataflow_set *set = (dataflow_set *)data;
3270 variable var = (variable) *slot;
3271 decl_or_value dv = var->dv;
3272 location_chain node;
3277 location_chain cnode;
3279 if (!dv_onepart_p (dv) || dv_is_value_p (dv))
3282 gcc_assert (var->n_var_parts == 1);
3284 node = var->var_part[0].loc_chain;
3286 if (GET_CODE (node->loc) != VALUE)
3289 gcc_assert (!node->next);
3292 /* Push values to the canonical one. */
3293 cdv = dv_from_value (cval);
3294 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3297 cvar = (variable)*cslot;
3298 gcc_assert (cvar->n_var_parts == 1);
3300 cnode = cvar->var_part[0].loc_chain;
3302 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3303 that are not “more canonical” than it. */
3304 if (GET_CODE (cnode->loc) != VALUE
3305 || !canon_value_cmp (cnode->loc, cval))
3308 /* CVAL was found to be non-canonical. Change the variable to point
3309 to the canonical VALUE. */
3310 gcc_assert (!cnode->next);
3313 slot = set_slot_part (set, cval, slot, dv, 0,
3314 node->init, node->set_src);
3315 slot = clobber_slot_part (set, cval, slot, 0, node->set_src);
3320 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3321 corresponding entry in DSM->src. Multi-part variables are combined
3322 with variable_union, whereas onepart dvs are combined with
3326 variable_merge_over_cur (void **s1slot, void *data)
3328 struct dfset_merge *dsm = (struct dfset_merge *)data;
3329 dataflow_set *dst = dsm->dst;
3331 variable s1var = (variable) *s1slot;
3332 variable s2var, dvar = NULL;
3333 decl_or_value dv = s1var->dv;
3334 bool onepart = dv_onepart_p (dv);
3337 location_chain node, *nodep;
3339 /* If the incoming onepart variable has an empty location list, then
3340 the intersection will be just as empty. For other variables,
3341 it's always union. */
3342 gcc_assert (s1var->n_var_parts);
3343 gcc_assert (s1var->var_part[0].loc_chain);
3346 return variable_union (s1slot, dst);
3348 gcc_assert (s1var->n_var_parts == 1);
3349 gcc_assert (s1var->var_part[0].offset == 0);
3351 dvhash = dv_htab_hash (dv);
3352 if (dv_is_value_p (dv))
3353 val = dv_as_value (dv);
3357 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3360 dst_can_be_shared = false;
3364 dsm->src_onepart_cnt--;
3365 gcc_assert (s2var->var_part[0].loc_chain);
3366 gcc_assert (s2var->n_var_parts == 1);
3367 gcc_assert (s2var->var_part[0].offset == 0);
3369 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3372 dvar = (variable)*dstslot;
3373 gcc_assert (dvar->refcount == 1);
3374 gcc_assert (dvar->n_var_parts == 1);
3375 gcc_assert (dvar->var_part[0].offset == 0);
3376 nodep = &dvar->var_part[0].loc_chain;
3384 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3386 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3388 *dstslot = dvar = s2var;
3393 dst_can_be_shared = false;
3395 intersect_loc_chains (val, nodep, dsm,
3396 s1var->var_part[0].loc_chain, s2var);
3402 dvar = (variable) pool_alloc (dv_pool (dv));
3405 dvar->n_var_parts = 1;
3406 dvar->cur_loc_changed = false;
3407 dvar->in_changed_variables = false;
3408 dvar->var_part[0].offset = 0;
3409 dvar->var_part[0].loc_chain = node;
3410 dvar->var_part[0].cur_loc = NULL;
3413 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3415 gcc_assert (!*dstslot);
3423 nodep = &dvar->var_part[0].loc_chain;
3424 while ((node = *nodep))
3426 location_chain *nextp = &node->next;
3428 if (GET_CODE (node->loc) == REG)
3432 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
3433 if (GET_MODE (node->loc) == GET_MODE (list->loc)
3434 && dv_is_value_p (list->dv))
3438 attrs_list_insert (&dst->regs[REGNO (node->loc)],
3440 /* If this value became canonical for another value that had
3441 this register, we want to leave it alone. */
3442 else if (dv_as_value (list->dv) != val)
3444 dstslot = set_slot_part (dst, dv_as_value (list->dv),
3446 node->init, NULL_RTX);
3447 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
3449 /* Since nextp points into the removed node, we can't
3450 use it. The pointer to the next node moved to nodep.
3451 However, if the variable we're walking is unshared
3452 during our walk, we'll keep walking the location list
3453 of the previously-shared variable, in which case the
3454 node won't have been removed, and we'll want to skip
3455 it. That's why we test *nodep here. */
3461 /* Canonicalization puts registers first, so we don't have to
3467 if (dvar != (variable)*dstslot)
3468 dvar = (variable)*dstslot;
3469 nodep = &dvar->var_part[0].loc_chain;
3473 /* Mark all referenced nodes for canonicalization, and make sure
3474 we have mutual equivalence links. */
3475 VALUE_RECURSED_INTO (val) = true;
3476 for (node = *nodep; node; node = node->next)
3477 if (GET_CODE (node->loc) == VALUE)
3479 VALUE_RECURSED_INTO (node->loc) = true;
3480 set_variable_part (dst, val, dv_from_value (node->loc), 0,
3481 node->init, NULL, INSERT);
3484 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3485 gcc_assert (*dstslot == dvar);
3486 canonicalize_values_star (dstslot, dst);
3487 #ifdef ENABLE_CHECKING
3489 == shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash));
3491 dvar = (variable)*dstslot;
3495 bool has_value = false, has_other = false;
3497 /* If we have one value and anything else, we're going to
3498 canonicalize this, so make sure all values have an entry in
3499 the table and are marked for canonicalization. */
3500 for (node = *nodep; node; node = node->next)
3502 if (GET_CODE (node->loc) == VALUE)
3504 /* If this was marked during register canonicalization,
3505 we know we have to canonicalize values. */
3520 if (has_value && has_other)
3522 for (node = *nodep; node; node = node->next)
3524 if (GET_CODE (node->loc) == VALUE)
3526 decl_or_value dv = dv_from_value (node->loc);
3529 if (shared_hash_shared (dst->vars))
3530 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
3532 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
3536 variable var = (variable) pool_alloc (dv_pool (dv));
3539 var->n_var_parts = 1;
3540 var->cur_loc_changed = false;
3541 var->in_changed_variables = false;
3542 var->var_part[0].offset = 0;
3543 var->var_part[0].loc_chain = NULL;
3544 var->var_part[0].cur_loc = NULL;
3548 VALUE_RECURSED_INTO (node->loc) = true;
3552 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3553 gcc_assert (*dstslot == dvar);
3554 canonicalize_values_star (dstslot, dst);
3555 #ifdef ENABLE_CHECKING
3557 == shared_hash_find_slot_noinsert_1 (dst->vars,
3560 dvar = (variable)*dstslot;
3564 if (!onepart_variable_different_p (dvar, s2var))
3566 variable_htab_free (dvar);
3567 *dstslot = dvar = s2var;
3570 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
3572 variable_htab_free (dvar);
3573 *dstslot = dvar = s1var;
3575 dst_can_be_shared = false;
3578 dst_can_be_shared = false;
3583 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
3584 multi-part variable. Unions of multi-part variables and
3585 intersections of one-part ones will be handled in
3586 variable_merge_over_cur(). */
3589 variable_merge_over_src (void **s2slot, void *data)
3591 struct dfset_merge *dsm = (struct dfset_merge *)data;
3592 dataflow_set *dst = dsm->dst;
3593 variable s2var = (variable) *s2slot;
3594 decl_or_value dv = s2var->dv;
3595 bool onepart = dv_onepart_p (dv);
3599 void **dstp = shared_hash_find_slot (dst->vars, dv);
3605 dsm->src_onepart_cnt++;
3609 /* Combine dataflow set information from SRC2 into DST, using PDST
3610 to carry over information across passes. */
3613 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
3615 dataflow_set cur = *dst;
3616 dataflow_set *src1 = &cur;
3617 struct dfset_merge dsm;
3619 size_t src1_elems, src2_elems;
3621 src1_elems = htab_elements (shared_hash_htab (src1->vars));
3622 src2_elems = htab_elements (shared_hash_htab (src2->vars));
3623 dataflow_set_init (dst);
3624 dst->stack_adjust = cur.stack_adjust;
3625 shared_hash_destroy (dst->vars);
3626 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
3627 dst->vars->refcount = 1;
3629 = htab_create (MAX (src1_elems, src2_elems), variable_htab_hash,
3630 variable_htab_eq, variable_htab_free);
3632 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3633 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
3638 dsm.src_onepart_cnt = 0;
3640 htab_traverse (shared_hash_htab (dsm.src->vars), variable_merge_over_src,
3642 htab_traverse (shared_hash_htab (dsm.cur->vars), variable_merge_over_cur,
3645 if (dsm.src_onepart_cnt)
3646 dst_can_be_shared = false;
3648 dataflow_set_destroy (src1);
3651 /* Mark register equivalences. */
3654 dataflow_set_equiv_regs (dataflow_set *set)
3659 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3661 rtx canon[NUM_MACHINE_MODES];
3663 memset (canon, 0, sizeof (canon));
3665 for (list = set->regs[i]; list; list = list->next)
3666 if (list->offset == 0 && dv_is_value_p (list->dv))
3668 rtx val = dv_as_value (list->dv);
3669 rtx *cvalp = &canon[(int)GET_MODE (val)];
3672 if (canon_value_cmp (val, cval))
3676 for (list = set->regs[i]; list; list = list->next)
3677 if (list->offset == 0 && dv_onepart_p (list->dv))
3679 rtx cval = canon[(int)GET_MODE (list->loc)];
3684 if (dv_is_value_p (list->dv))
3686 rtx val = dv_as_value (list->dv);
3691 VALUE_RECURSED_INTO (val) = true;
3692 set_variable_part (set, val, dv_from_value (cval), 0,
3693 VAR_INIT_STATUS_INITIALIZED,
3697 VALUE_RECURSED_INTO (cval) = true;
3698 set_variable_part (set, cval, list->dv, 0,
3699 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
3702 for (listp = &set->regs[i]; (list = *listp);
3703 listp = list ? &list->next : listp)
3704 if (list->offset == 0 && dv_onepart_p (list->dv))
3706 rtx cval = canon[(int)GET_MODE (list->loc)];
3712 if (dv_is_value_p (list->dv))
3714 rtx val = dv_as_value (list->dv);
3715 if (!VALUE_RECURSED_INTO (val))
3719 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
3720 canonicalize_values_star (slot, set);
3727 /* Remove any redundant values in the location list of VAR, which must
3728 be unshared and 1-part. */
3731 remove_duplicate_values (variable var)
3733 location_chain node, *nodep;
3735 gcc_assert (dv_onepart_p (var->dv));
3736 gcc_assert (var->n_var_parts == 1);
3737 gcc_assert (var->refcount == 1);
3739 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
3741 if (GET_CODE (node->loc) == VALUE)
3743 if (VALUE_RECURSED_INTO (node->loc))
3745 /* Remove duplicate value node. */
3746 *nodep = node->next;
3747 pool_free (loc_chain_pool, node);
3751 VALUE_RECURSED_INTO (node->loc) = true;
3753 nodep = &node->next;
3756 for (node = var->var_part[0].loc_chain; node; node = node->next)
3757 if (GET_CODE (node->loc) == VALUE)
3759 gcc_assert (VALUE_RECURSED_INTO (node->loc));
3760 VALUE_RECURSED_INTO (node->loc) = false;
3765 /* Hash table iteration argument passed to variable_post_merge. */
3766 struct dfset_post_merge
3768 /* The new input set for the current block. */
3770 /* Pointer to the permanent input set for the current block, or
3772 dataflow_set **permp;
3775 /* Create values for incoming expressions associated with one-part
3776 variables that don't have value numbers for them. */
3779 variable_post_merge_new_vals (void **slot, void *info)
3781 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3782 dataflow_set *set = dfpm->set;
3783 variable var = (variable)*slot;
3784 location_chain node;
3786 if (!dv_onepart_p (var->dv) || !var->n_var_parts)
3789 gcc_assert (var->n_var_parts == 1);
3791 if (dv_is_decl_p (var->dv))
3793 bool check_dupes = false;
3796 for (node = var->var_part[0].loc_chain; node; node = node->next)
3798 if (GET_CODE (node->loc) == VALUE)
3799 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
3800 else if (GET_CODE (node->loc) == REG)
3802 attrs att, *attp, *curp = NULL;
3804 if (var->refcount != 1)
3806 slot = unshare_variable (set, slot, var,
3807 VAR_INIT_STATUS_INITIALIZED);
3808 var = (variable)*slot;
3812 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
3814 if (att->offset == 0
3815 && GET_MODE (att->loc) == GET_MODE (node->loc))
3817 if (dv_is_value_p (att->dv))
3819 rtx cval = dv_as_value (att->dv);
3824 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
3832 if ((*curp)->offset == 0
3833 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
3834 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
3837 curp = &(*curp)->next;
3848 *dfpm->permp = XNEW (dataflow_set);
3849 dataflow_set_init (*dfpm->permp);
3852 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
3853 att; att = att->next)
3854 if (GET_MODE (att->loc) == GET_MODE (node->loc))
3856 gcc_assert (att->offset == 0);
3857 gcc_assert (dv_is_value_p (att->dv));
3858 val_reset (set, att->dv);
3865 cval = dv_as_value (cdv);
3869 /* Create a unique value to hold this register,
3870 that ought to be found and reused in
3871 subsequent rounds. */
3873 gcc_assert (!cselib_lookup (node->loc,
3874 GET_MODE (node->loc), 0));
3875 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1);
3876 cselib_preserve_value (v);
3877 cselib_invalidate_rtx (node->loc);
3879 cdv = dv_from_value (cval);
3882 "Created new value %u:%u for reg %i\n",
3883 v->uid, v->hash, REGNO (node->loc));
3886 var_reg_decl_set (*dfpm->permp, node->loc,
3887 VAR_INIT_STATUS_INITIALIZED,
3888 cdv, 0, NULL, INSERT);
3894 /* Remove attribute referring to the decl, which now
3895 uses the value for the register, already existing or
3896 to be added when we bring perm in. */
3899 pool_free (attrs_pool, att);
3904 remove_duplicate_values (var);
3910 /* Reset values in the permanent set that are not associated with the
3911 chosen expression. */
3914 variable_post_merge_perm_vals (void **pslot, void *info)
3916 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3917 dataflow_set *set = dfpm->set;
3918 variable pvar = (variable)*pslot, var;
3919 location_chain pnode;
3923 gcc_assert (dv_is_value_p (pvar->dv));
3924 gcc_assert (pvar->n_var_parts == 1);
3925 pnode = pvar->var_part[0].loc_chain;
3927 gcc_assert (!pnode->next);
3928 gcc_assert (REG_P (pnode->loc));
3932 var = shared_hash_find (set->vars, dv);
3935 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
3937 val_reset (set, dv);
3940 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
3941 if (att->offset == 0
3942 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
3943 && dv_is_value_p (att->dv))
3946 /* If there is a value associated with this register already, create
3948 if (att && dv_as_value (att->dv) != dv_as_value (dv))
3950 rtx cval = dv_as_value (att->dv);
3951 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
3952 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
3957 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
3959 variable_union (pslot, set);
3965 /* Just checking stuff and registering register attributes for
3969 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
3971 struct dfset_post_merge dfpm;
3976 htab_traverse (shared_hash_htab (set->vars), variable_post_merge_new_vals,
3979 htab_traverse (shared_hash_htab ((*permp)->vars),
3980 variable_post_merge_perm_vals, &dfpm);
3981 htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
3982 htab_traverse (shared_hash_htab (set->vars), canonicalize_vars_star, set);
3985 /* Return a node whose loc is a MEM that refers to EXPR in the
3986 location list of a one-part variable or value VAR, or in that of
3987 any values recursively mentioned in the location lists. */
3989 static location_chain
3990 find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars)
3992 location_chain node;
3995 location_chain where = NULL;
4000 gcc_assert (GET_CODE (val) == VALUE);
4002 gcc_assert (!VALUE_RECURSED_INTO (val));
4004 dv = dv_from_value (val);
4005 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
4010 gcc_assert (dv_onepart_p (var->dv));
4012 if (!var->n_var_parts)
4015 gcc_assert (var->var_part[0].offset == 0);
4017 VALUE_RECURSED_INTO (val) = true;
4019 for (node = var->var_part[0].loc_chain; node; node = node->next)
4020 if (MEM_P (node->loc) && MEM_EXPR (node->loc) == expr
4021 && MEM_OFFSET (node->loc) == 0)
4026 else if (GET_CODE (node->loc) == VALUE
4027 && !VALUE_RECURSED_INTO (node->loc)
4028 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4031 VALUE_RECURSED_INTO (val) = false;
4036 /* Return TRUE if the value of MEM may vary across a call. */
4039 mem_dies_at_call (rtx mem)
4041 tree expr = MEM_EXPR (mem);
4047 decl = get_base_address (expr);
4055 return (may_be_aliased (decl)
4056 || (!TREE_READONLY (decl) && is_global_var (decl)));
4059 /* Remove all MEMs from the location list of a hash table entry for a
4060 one-part variable, except those whose MEM attributes map back to
4061 the variable itself, directly or within a VALUE. */
4064 dataflow_set_preserve_mem_locs (void **slot, void *data)
4066 dataflow_set *set = (dataflow_set *) data;
4067 variable var = (variable) *slot;
4069 if (dv_is_decl_p (var->dv) && dv_onepart_p (var->dv))
4071 tree decl = dv_as_decl (var->dv);
4072 location_chain loc, *locp;
4073 bool changed = false;
4075 if (!var->n_var_parts)
4078 gcc_assert (var->n_var_parts == 1);
4080 if (shared_var_p (var, set->vars))
4082 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4084 /* We want to remove dying MEMs that doesn't refer to
4086 if (GET_CODE (loc->loc) == MEM
4087 && (MEM_EXPR (loc->loc) != decl
4088 || MEM_OFFSET (loc->loc))
4089 && !mem_dies_at_call (loc->loc))
4091 /* We want to move here MEMs that do refer to DECL. */
4092 else if (GET_CODE (loc->loc) == VALUE
4093 && find_mem_expr_in_1pdv (decl, loc->loc,
4094 shared_hash_htab (set->vars)))
4101 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4102 var = (variable)*slot;
4103 gcc_assert (var->n_var_parts == 1);
4106 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4109 rtx old_loc = loc->loc;
4110 if (GET_CODE (old_loc) == VALUE)
4112 location_chain mem_node
4113 = find_mem_expr_in_1pdv (decl, loc->loc,
4114 shared_hash_htab (set->vars));
4116 /* ??? This picks up only one out of multiple MEMs that
4117 refer to the same variable. Do we ever need to be
4118 concerned about dealing with more than one, or, given
4119 that they should all map to the same variable
4120 location, their addresses will have been merged and
4121 they will be regarded as equivalent? */
4124 loc->loc = mem_node->loc;
4125 loc->set_src = mem_node->set_src;
4126 loc->init = MIN (loc->init, mem_node->init);
4130 if (GET_CODE (loc->loc) != MEM
4131 || (MEM_EXPR (loc->loc) == decl
4132 && MEM_OFFSET (loc->loc) == 0)
4133 || !mem_dies_at_call (loc->loc))
4135 if (old_loc != loc->loc && emit_notes)
4137 if (old_loc == var->var_part[0].cur_loc)
4140 var->var_part[0].cur_loc = NULL;
4141 var->cur_loc_changed = true;
4143 add_value_chains (var->dv, loc->loc);
4144 remove_value_chains (var->dv, old_loc);
4152 remove_value_chains (var->dv, old_loc);
4153 if (old_loc == var->var_part[0].cur_loc)
4156 var->var_part[0].cur_loc = NULL;
4157 var->cur_loc_changed = true;
4161 pool_free (loc_chain_pool, loc);
4164 if (!var->var_part[0].loc_chain)
4170 variable_was_changed (var, set);
4176 /* Remove all MEMs from the location list of a hash table entry for a
4180 dataflow_set_remove_mem_locs (void **slot, void *data)
4182 dataflow_set *set = (dataflow_set *) data;
4183 variable var = (variable) *slot;
4185 if (dv_is_value_p (var->dv))
4187 location_chain loc, *locp;
4188 bool changed = false;
4190 gcc_assert (var->n_var_parts == 1);
4192 if (shared_var_p (var, set->vars))
4194 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4195 if (GET_CODE (loc->loc) == MEM
4196 && mem_dies_at_call (loc->loc))
4202 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4203 var = (variable)*slot;
4204 gcc_assert (var->n_var_parts == 1);
4207 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4210 if (GET_CODE (loc->loc) != MEM
4211 || !mem_dies_at_call (loc->loc))
4218 remove_value_chains (var->dv, loc->loc);
4220 /* If we have deleted the location which was last emitted
4221 we have to emit new location so add the variable to set
4222 of changed variables. */
4223 if (var->var_part[0].cur_loc == loc->loc)
4226 var->var_part[0].cur_loc = NULL;
4227 var->cur_loc_changed = true;
4229 pool_free (loc_chain_pool, loc);
4232 if (!var->var_part[0].loc_chain)
4238 variable_was_changed (var, set);
4244 /* Remove all variable-location information about call-clobbered
4245 registers, as well as associations between MEMs and VALUEs. */
4248 dataflow_set_clear_at_call (dataflow_set *set)
4252 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
4253 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, r))
4254 var_regno_delete (set, r);
4256 if (MAY_HAVE_DEBUG_INSNS)
4258 set->traversed_vars = set->vars;
4259 htab_traverse (shared_hash_htab (set->vars),
4260 dataflow_set_preserve_mem_locs, set);
4261 set->traversed_vars = set->vars;
4262 htab_traverse (shared_hash_htab (set->vars), dataflow_set_remove_mem_locs,
4264 set->traversed_vars = NULL;
4268 /* Flag whether two dataflow sets being compared contain different data. */
4270 dataflow_set_different_value;
4273 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4275 location_chain lc1, lc2;
4277 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4279 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4281 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4283 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4286 if (rtx_equal_p (lc1->loc, lc2->loc))
4295 /* Return true if one-part variables VAR1 and VAR2 are different.
4296 They must be in canonical order. */
4299 onepart_variable_different_p (variable var1, variable var2)
4301 location_chain lc1, lc2;
4306 gcc_assert (var1->n_var_parts == 1);
4307 gcc_assert (var2->n_var_parts == 1);
4309 lc1 = var1->var_part[0].loc_chain;
4310 lc2 = var2->var_part[0].loc_chain;
4317 if (loc_cmp (lc1->loc, lc2->loc))
4326 /* Return true if variables VAR1 and VAR2 are different. */
4329 variable_different_p (variable var1, variable var2)
4336 if (var1->n_var_parts != var2->n_var_parts)
4339 for (i = 0; i < var1->n_var_parts; i++)
4341 if (var1->var_part[i].offset != var2->var_part[i].offset)
4343 /* One-part values have locations in a canonical order. */
4344 if (i == 0 && var1->var_part[i].offset == 0 && dv_onepart_p (var1->dv))
4346 gcc_assert (var1->n_var_parts == 1);
4347 gcc_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
4348 return onepart_variable_different_p (var1, var2);
4350 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4352 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4358 /* Compare variable *SLOT with the same variable in hash table DATA
4359 and set DATAFLOW_SET_DIFFERENT_VALUE if they are different. */
4362 dataflow_set_different_1 (void **slot, void *data)
4364 htab_t htab = (htab_t) data;
4365 variable var1, var2;
4367 var1 = (variable) *slot;
4368 var2 = (variable) htab_find_with_hash (htab, var1->dv,
4369 dv_htab_hash (var1->dv));
4372 dataflow_set_different_value = true;
4374 if (dump_file && (dump_flags & TDF_DETAILS))
4376 fprintf (dump_file, "dataflow difference found: removal of:\n");
4380 /* Stop traversing the hash table. */
4384 if (variable_different_p (var1, var2))
4386 dataflow_set_different_value = true;
4388 if (dump_file && (dump_flags & TDF_DETAILS))
4390 fprintf (dump_file, "dataflow difference found: old and new follow:\n");
4395 /* Stop traversing the hash table. */
4399 /* Continue traversing the hash table. */
4403 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4406 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4408 if (old_set->vars == new_set->vars)
4411 if (htab_elements (shared_hash_htab (old_set->vars))
4412 != htab_elements (shared_hash_htab (new_set->vars)))
4415 dataflow_set_different_value = false;
4417 htab_traverse (shared_hash_htab (old_set->vars), dataflow_set_different_1,
4418 shared_hash_htab (new_set->vars));
4419 /* No need to traverse the second hashtab, if both have the same number
4420 of elements and the second one had all entries found in the first one,
4421 then it can't have any extra entries. */
4422 return dataflow_set_different_value;
4425 /* Free the contents of dataflow set SET. */
4428 dataflow_set_destroy (dataflow_set *set)
4432 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4433 attrs_list_clear (&set->regs[i]);
4435 shared_hash_destroy (set->vars);
4439 /* Return true if RTL X contains a SYMBOL_REF. */
4442 contains_symbol_ref (rtx x)
4451 code = GET_CODE (x);
4452 if (code == SYMBOL_REF)
4455 fmt = GET_RTX_FORMAT (code);
4456 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4460 if (contains_symbol_ref (XEXP (x, i)))
4463 else if (fmt[i] == 'E')
4466 for (j = 0; j < XVECLEN (x, i); j++)
4467 if (contains_symbol_ref (XVECEXP (x, i, j)))
4475 /* Shall EXPR be tracked? */
4478 track_expr_p (tree expr, bool need_rtl)
4483 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
4484 return DECL_RTL_SET_P (expr);
4486 /* If EXPR is not a parameter or a variable do not track it. */
4487 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
4490 /* It also must have a name... */
4491 if (!DECL_NAME (expr) && need_rtl)
4494 /* ... and a RTL assigned to it. */
4495 decl_rtl = DECL_RTL_IF_SET (expr);
4496 if (!decl_rtl && need_rtl)
4499 /* If this expression is really a debug alias of some other declaration, we
4500 don't need to track this expression if the ultimate declaration is
4503 if (DECL_DEBUG_EXPR_IS_FROM (realdecl))
4505 realdecl = DECL_DEBUG_EXPR (realdecl);
4506 if (realdecl == NULL_TREE)
4508 else if (!DECL_P (realdecl))
4510 if (handled_component_p (realdecl))
4512 HOST_WIDE_INT bitsize, bitpos, maxsize;
4514 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
4516 if (!DECL_P (innerdecl)
4517 || DECL_IGNORED_P (innerdecl)
4518 || TREE_STATIC (innerdecl)
4520 || bitpos + bitsize > 256
4521 || bitsize != maxsize)
4531 /* Do not track EXPR if REALDECL it should be ignored for debugging
4533 if (DECL_IGNORED_P (realdecl))
4536 /* Do not track global variables until we are able to emit correct location
4538 if (TREE_STATIC (realdecl))
4541 /* When the EXPR is a DECL for alias of some variable (see example)
4542 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
4543 DECL_RTL contains SYMBOL_REF.
4546 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
4549 if (decl_rtl && MEM_P (decl_rtl)
4550 && contains_symbol_ref (XEXP (decl_rtl, 0)))
4553 /* If RTX is a memory it should not be very large (because it would be
4554 an array or struct). */
4555 if (decl_rtl && MEM_P (decl_rtl))
4557 /* Do not track structures and arrays. */
4558 if (GET_MODE (decl_rtl) == BLKmode
4559 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
4561 if (MEM_SIZE (decl_rtl)
4562 && INTVAL (MEM_SIZE (decl_rtl)) > MAX_VAR_PARTS)
4566 DECL_CHANGED (expr) = 0;
4567 DECL_CHANGED (realdecl) = 0;
4571 /* Determine whether a given LOC refers to the same variable part as
4575 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
4578 HOST_WIDE_INT offset2;
4580 if (! DECL_P (expr))
4585 expr2 = REG_EXPR (loc);
4586 offset2 = REG_OFFSET (loc);
4588 else if (MEM_P (loc))
4590 expr2 = MEM_EXPR (loc);
4591 offset2 = INT_MEM_OFFSET (loc);
4596 if (! expr2 || ! DECL_P (expr2))
4599 expr = var_debug_decl (expr);
4600 expr2 = var_debug_decl (expr2);
4602 return (expr == expr2 && offset == offset2);
4605 /* LOC is a REG or MEM that we would like to track if possible.
4606 If EXPR is null, we don't know what expression LOC refers to,
4607 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
4608 LOC is an lvalue register.
4610 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
4611 is something we can track. When returning true, store the mode of
4612 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
4613 from EXPR in *OFFSET_OUT (if nonnull). */
4616 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
4617 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
4619 enum machine_mode mode;
4621 if (expr == NULL || !track_expr_p (expr, true))
4624 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
4625 whole subreg, but only the old inner part is really relevant. */
4626 mode = GET_MODE (loc);
4627 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
4629 enum machine_mode pseudo_mode;
4631 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
4632 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
4634 offset += byte_lowpart_offset (pseudo_mode, mode);
4639 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
4640 Do the same if we are storing to a register and EXPR occupies
4641 the whole of register LOC; in that case, the whole of EXPR is
4642 being changed. We exclude complex modes from the second case
4643 because the real and imaginary parts are represented as separate
4644 pseudo registers, even if the whole complex value fits into one
4646 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
4648 && !COMPLEX_MODE_P (DECL_MODE (expr))
4649 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
4650 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
4652 mode = DECL_MODE (expr);
4656 if (offset < 0 || offset >= MAX_VAR_PARTS)
4662 *offset_out = offset;
4666 /* Return the MODE lowpart of LOC, or null if LOC is not something we
4667 want to track. When returning nonnull, make sure that the attributes
4668 on the returned value are updated. */
4671 var_lowpart (enum machine_mode mode, rtx loc)
4673 unsigned int offset, reg_offset, regno;
4675 if (!REG_P (loc) && !MEM_P (loc))
4678 if (GET_MODE (loc) == mode)
4681 offset = byte_lowpart_offset (mode, GET_MODE (loc));
4684 return adjust_address_nv (loc, mode, offset);
4686 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
4687 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
4689 return gen_rtx_REG_offset (loc, mode, regno, offset);
4692 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
4693 hard_frame_pointer_rtx is being mapped to it. */
4694 static rtx cfa_base_rtx;
4696 /* Carry information about uses and stores while walking rtx. */
4698 struct count_use_info
4700 /* The insn where the RTX is. */
4703 /* The basic block where insn is. */
4706 /* The array of n_sets sets in the insn, as determined by cselib. */
4707 struct cselib_set *sets;
4710 /* True if we're counting stores, false otherwise. */
4714 /* Find a VALUE corresponding to X. */
4716 static inline cselib_val *
4717 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
4723 /* This is called after uses are set up and before stores are
4724 processed bycselib, so it's safe to look up srcs, but not
4725 dsts. So we look up expressions that appear in srcs or in
4726 dest expressions, but we search the sets array for dests of
4730 for (i = 0; i < cui->n_sets; i++)
4731 if (cui->sets[i].dest == x)
4732 return cui->sets[i].src_elt;
4735 return cselib_lookup (x, mode, 0);
4741 /* Helper function to get mode of MEM's address. */
4743 static inline enum machine_mode
4744 get_address_mode (rtx mem)
4746 enum machine_mode mode = GET_MODE (XEXP (mem, 0));
4747 if (mode != VOIDmode)
4749 return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
4752 /* Replace all registers and addresses in an expression with VALUE
4753 expressions that map back to them, unless the expression is a
4754 register. If no mapping is or can be performed, returns NULL. */
4757 replace_expr_with_values (rtx loc)
4761 else if (MEM_P (loc))
4763 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
4764 get_address_mode (loc), 0);
4766 return replace_equiv_address_nv (loc, addr->val_rtx);
4771 return cselib_subst_to_values (loc);
4774 /* Determine what kind of micro operation to choose for a USE. Return
4775 MO_CLOBBER if no micro operation is to be generated. */
4777 static enum micro_operation_type
4778 use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
4782 if (cui && cui->sets)
4784 if (GET_CODE (loc) == VAR_LOCATION)
4786 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
4788 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
4789 if (! VAR_LOC_UNKNOWN_P (ploc))
4791 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1);
4793 /* ??? flag_float_store and volatile mems are never
4794 given values, but we could in theory use them for
4796 gcc_assert (val || 1);
4804 if (REG_P (loc) || MEM_P (loc))
4807 *modep = GET_MODE (loc);
4811 || (find_use_val (loc, GET_MODE (loc), cui)
4812 && cselib_lookup (XEXP (loc, 0),
4813 get_address_mode (loc), 0)))
4818 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
4820 if (val && !cselib_preserved_value_p (val))
4828 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
4830 if (loc == cfa_base_rtx)
4832 expr = REG_EXPR (loc);
4835 return MO_USE_NO_VAR;
4836 else if (target_for_debug_bind (var_debug_decl (expr)))
4838 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
4839 false, modep, NULL))
4842 return MO_USE_NO_VAR;
4844 else if (MEM_P (loc))
4846 expr = MEM_EXPR (loc);
4850 else if (target_for_debug_bind (var_debug_decl (expr)))
4852 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
4853 false, modep, NULL))
4862 /* Log to OUT information about micro-operation MOPT involving X in
4866 log_op_type (rtx x, basic_block bb, rtx insn,
4867 enum micro_operation_type mopt, FILE *out)
4869 fprintf (out, "bb %i op %i insn %i %s ",
4870 bb->index, VEC_length (micro_operation, VTI (bb)->mos),
4871 INSN_UID (insn), micro_operation_type_name[mopt]);
4872 print_inline_rtx (out, x, 2);
4876 /* Tell whether the CONCAT used to holds a VALUE and its location
4877 needs value resolution, i.e., an attempt of mapping the location
4878 back to other incoming values. */
4879 #define VAL_NEEDS_RESOLUTION(x) \
4880 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
4881 /* Whether the location in the CONCAT is a tracked expression, that
4882 should also be handled like a MO_USE. */
4883 #define VAL_HOLDS_TRACK_EXPR(x) \
4884 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
4885 /* Whether the location in the CONCAT should be handled like a MO_COPY
4887 #define VAL_EXPR_IS_COPIED(x) \
4888 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
4889 /* Whether the location in the CONCAT should be handled like a
4890 MO_CLOBBER as well. */
4891 #define VAL_EXPR_IS_CLOBBERED(x) \
4892 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
4893 /* Whether the location is a CONCAT of the MO_VAL_SET expression and
4894 a reverse operation that should be handled afterwards. */
4895 #define VAL_EXPR_HAS_REVERSE(x) \
4896 (RTL_FLAG_CHECK1 ("VAL_EXPR_HAS_REVERSE", (x), CONCAT)->return_val)
4898 /* All preserved VALUEs. */
4899 static VEC (rtx, heap) *preserved_values;
4901 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
4904 preserve_value (cselib_val *val)
4906 cselib_preserve_value (val);
4907 VEC_safe_push (rtx, heap, preserved_values, val->val_rtx);
4910 /* Helper function for MO_VAL_LOC handling. Return non-zero if
4911 any rtxes not suitable for CONST use not replaced by VALUEs
4915 non_suitable_const (rtx *x, void *data ATTRIBUTE_UNUSED)
4920 switch (GET_CODE (*x))
4931 return !MEM_READONLY_P (*x);
4937 /* Add uses (register and memory references) LOC which will be tracked
4938 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
4941 add_uses (rtx *ploc, void *data)
4944 enum machine_mode mode = VOIDmode;
4945 struct count_use_info *cui = (struct count_use_info *)data;
4946 enum micro_operation_type type = use_type (loc, cui, &mode);
4948 if (type != MO_CLOBBER)
4950 basic_block bb = cui->bb;
4954 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
4955 mo.insn = cui->insn;
4957 if (type == MO_VAL_LOC)
4960 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
4963 gcc_assert (cui->sets);
4966 && !REG_P (XEXP (vloc, 0))
4967 && !MEM_P (XEXP (vloc, 0))
4968 && (GET_CODE (XEXP (vloc, 0)) != PLUS
4969 || XEXP (XEXP (vloc, 0), 0) != cfa_base_rtx
4970 || !CONST_INT_P (XEXP (XEXP (vloc, 0), 1))))
4973 enum machine_mode address_mode = get_address_mode (mloc);
4975 = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
4977 if (val && !cselib_preserved_value_p (val))
4979 micro_operation moa;
4980 preserve_value (val);
4981 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4982 moa.type = MO_VAL_USE;
4983 moa.insn = cui->insn;
4984 moa.u.loc = gen_rtx_CONCAT (address_mode,
4985 val->val_rtx, mloc);
4986 if (dump_file && (dump_flags & TDF_DETAILS))
4987 log_op_type (moa.u.loc, cui->bb, cui->insn,
4988 moa.type, dump_file);
4989 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
4993 if (CONSTANT_P (vloc)
4994 && (GET_CODE (vloc) != CONST
4995 || for_each_rtx (&vloc, non_suitable_const, NULL)))
4996 /* For constants don't look up any value. */;
4997 else if (!VAR_LOC_UNKNOWN_P (vloc)
4998 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5000 enum machine_mode mode2;
5001 enum micro_operation_type type2;
5002 rtx nloc = replace_expr_with_values (vloc);
5006 oloc = shallow_copy_rtx (oloc);
5007 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5010 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5012 type2 = use_type (vloc, 0, &mode2);
5014 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5015 || type2 == MO_CLOBBER);
5017 if (type2 == MO_CLOBBER
5018 && !cselib_preserved_value_p (val))
5020 VAL_NEEDS_RESOLUTION (oloc) = 1;
5021 preserve_value (val);
5024 else if (!VAR_LOC_UNKNOWN_P (vloc))
5026 oloc = shallow_copy_rtx (oloc);
5027 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5032 else if (type == MO_VAL_USE)
5034 enum machine_mode mode2 = VOIDmode;
5035 enum micro_operation_type type2;
5036 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5037 rtx vloc, oloc = loc, nloc;
5039 gcc_assert (cui->sets);
5042 && !REG_P (XEXP (oloc, 0))
5043 && !MEM_P (XEXP (oloc, 0))
5044 && (GET_CODE (XEXP (oloc, 0)) != PLUS
5045 || XEXP (XEXP (oloc, 0), 0) != cfa_base_rtx
5046 || !CONST_INT_P (XEXP (XEXP (oloc, 0), 1))))
5049 enum machine_mode address_mode = get_address_mode (mloc);
5051 = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
5053 if (val && !cselib_preserved_value_p (val))
5055 micro_operation moa;
5056 preserve_value (val);
5057 mloc = cselib_subst_to_values (XEXP (mloc, 0));
5058 moa.type = MO_VAL_USE;
5059 moa.insn = cui->insn;
5060 moa.u.loc = gen_rtx_CONCAT (address_mode,
5061 val->val_rtx, mloc);
5062 if (dump_file && (dump_flags & TDF_DETAILS))
5063 log_op_type (moa.u.loc, cui->bb, cui->insn,
5064 moa.type, dump_file);
5065 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
5069 type2 = use_type (loc, 0, &mode2);
5071 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5072 || type2 == MO_CLOBBER);
5074 if (type2 == MO_USE)
5075 vloc = var_lowpart (mode2, loc);
5079 /* The loc of a MO_VAL_USE may have two forms:
5081 (concat val src): val is at src, a value-based
5084 (concat (concat val use) src): same as above, with use as
5085 the MO_USE tracked value, if it differs from src.
5089 nloc = replace_expr_with_values (loc);
5094 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5096 oloc = val->val_rtx;
5098 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5100 if (type2 == MO_USE)
5101 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5102 if (!cselib_preserved_value_p (val))
5104 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5105 preserve_value (val);
5109 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5111 if (dump_file && (dump_flags & TDF_DETAILS))
5112 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5113 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5119 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5122 add_uses_1 (rtx *x, void *cui)
5124 for_each_rtx (x, add_uses, cui);
5127 /* Attempt to reverse the EXPR operation in the debug info. Say for
5128 reg1 = reg2 + 6 even when reg2 is no longer live we
5129 can express its value as VAL - 6. */
5132 reverse_op (rtx val, const_rtx expr)
5138 if (GET_CODE (expr) != SET)
5141 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5144 src = SET_SRC (expr);
5145 switch (GET_CODE (src))
5159 if (!REG_P (XEXP (src, 0)) || !SCALAR_INT_MODE_P (GET_MODE (src)))
5162 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0);
5163 if (!v || !cselib_preserved_value_p (v))
5166 switch (GET_CODE (src))
5170 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5172 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5176 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5188 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5190 arg = XEXP (src, 1);
5191 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5193 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5194 if (arg == NULL_RTX)
5196 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5199 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5201 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5202 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5203 breaks a lot of routines during var-tracking. */
5204 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5210 return gen_rtx_CONCAT (GET_MODE (v->val_rtx), v->val_rtx, ret);
5213 /* Add stores (register and memory references) LOC which will be tracked
5214 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5215 CUIP->insn is instruction which the LOC is part of. */
5218 add_stores (rtx loc, const_rtx expr, void *cuip)
5220 enum machine_mode mode = VOIDmode, mode2;
5221 struct count_use_info *cui = (struct count_use_info *)cuip;
5222 basic_block bb = cui->bb;
5224 rtx oloc = loc, nloc, src = NULL;
5225 enum micro_operation_type type = use_type (loc, cui, &mode);
5226 bool track_p = false;
5228 bool resolve, preserve;
5231 if (type == MO_CLOBBER)
5238 gcc_assert (loc != cfa_base_rtx);
5239 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5240 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5241 || GET_CODE (expr) == CLOBBER)
5243 mo.type = MO_CLOBBER;
5248 if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
5249 src = var_lowpart (mode2, SET_SRC (expr));
5250 loc = var_lowpart (mode2, loc);
5259 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5260 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5267 mo.insn = cui->insn;
5269 else if (MEM_P (loc)
5270 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5273 if (MEM_P (loc) && type == MO_VAL_SET
5274 && !REG_P (XEXP (loc, 0))
5275 && !MEM_P (XEXP (loc, 0))
5276 && (GET_CODE (XEXP (loc, 0)) != PLUS
5277 || XEXP (XEXP (loc, 0), 0) != cfa_base_rtx
5278 || !CONST_INT_P (XEXP (XEXP (loc, 0), 1))))
5281 enum machine_mode address_mode = get_address_mode (mloc);
5282 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
5285 if (val && !cselib_preserved_value_p (val))
5287 preserve_value (val);
5288 mo.type = MO_VAL_USE;
5289 mloc = cselib_subst_to_values (XEXP (mloc, 0));
5290 mo.u.loc = gen_rtx_CONCAT (address_mode, val->val_rtx, mloc);
5291 mo.insn = cui->insn;
5292 if (dump_file && (dump_flags & TDF_DETAILS))
5293 log_op_type (mo.u.loc, cui->bb, cui->insn,
5294 mo.type, dump_file);
5295 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5299 if (GET_CODE (expr) == CLOBBER || !track_p)
5301 mo.type = MO_CLOBBER;
5302 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
5306 if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
5307 src = var_lowpart (mode2, SET_SRC (expr));
5308 loc = var_lowpart (mode2, loc);
5317 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5318 if (same_variable_part_p (SET_SRC (xexpr),
5320 INT_MEM_OFFSET (loc)))
5327 mo.insn = cui->insn;
5332 if (type != MO_VAL_SET)
5333 goto log_and_return;
5335 v = find_use_val (oloc, mode, cui);
5338 goto log_and_return;
5340 resolve = preserve = !cselib_preserved_value_p (v);
5342 nloc = replace_expr_with_values (oloc);
5346 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
5348 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0);
5350 gcc_assert (oval != v);
5351 gcc_assert (REG_P (oloc) || MEM_P (oloc));
5353 if (!cselib_preserved_value_p (oval))
5355 micro_operation moa;
5357 preserve_value (oval);
5359 moa.type = MO_VAL_USE;
5360 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
5361 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
5362 moa.insn = cui->insn;
5364 if (dump_file && (dump_flags & TDF_DETAILS))
5365 log_op_type (moa.u.loc, cui->bb, cui->insn,
5366 moa.type, dump_file);
5367 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
5372 else if (resolve && GET_CODE (mo.u.loc) == SET)
5374 nloc = replace_expr_with_values (SET_SRC (expr));
5376 /* Avoid the mode mismatch between oexpr and expr. */
5377 if (!nloc && mode != mode2)
5379 nloc = SET_SRC (expr);
5380 gcc_assert (oloc == SET_DEST (expr));
5384 oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
5387 if (oloc == SET_DEST (mo.u.loc))
5388 /* No point in duplicating. */
5390 if (!REG_P (SET_SRC (mo.u.loc)))
5396 if (GET_CODE (mo.u.loc) == SET
5397 && oloc == SET_DEST (mo.u.loc))
5398 /* No point in duplicating. */
5404 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
5406 if (mo.u.loc != oloc)
5407 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
5409 /* The loc of a MO_VAL_SET may have various forms:
5411 (concat val dst): dst now holds val
5413 (concat val (set dst src)): dst now holds val, copied from src
5415 (concat (concat val dstv) dst): dst now holds val; dstv is dst
5416 after replacing mems and non-top-level regs with values.
5418 (concat (concat val dstv) (set dst src)): dst now holds val,
5419 copied from src. dstv is a value-based representation of dst, if
5420 it differs from dst. If resolution is needed, src is a REG, and
5421 its mode is the same as that of val.
5423 (concat (concat val (set dstv srcv)) (set dst src)): src
5424 copied to dst, holding val. dstv and srcv are value-based
5425 representations of dst and src, respectively.
5429 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
5431 reverse = reverse_op (v->val_rtx, expr);
5434 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, reverse);
5435 VAL_EXPR_HAS_REVERSE (loc) = 1;
5442 VAL_HOLDS_TRACK_EXPR (loc) = 1;
5445 VAL_NEEDS_RESOLUTION (loc) = resolve;
5448 if (mo.type == MO_CLOBBER)
5449 VAL_EXPR_IS_CLOBBERED (loc) = 1;
5450 if (mo.type == MO_COPY)
5451 VAL_EXPR_IS_COPIED (loc) = 1;
5453 mo.type = MO_VAL_SET;
5456 if (dump_file && (dump_flags & TDF_DETAILS))
5457 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5458 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5461 /* Callback for cselib_record_sets_hook, that records as micro
5462 operations uses and stores in an insn after cselib_record_sets has
5463 analyzed the sets in an insn, but before it modifies the stored
5464 values in the internal tables, unless cselib_record_sets doesn't
5465 call it directly (perhaps because we're not doing cselib in the
5466 first place, in which case sets and n_sets will be 0). */
5469 add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
5471 basic_block bb = BLOCK_FOR_INSN (insn);
5473 struct count_use_info cui;
5474 micro_operation *mos;
5476 cselib_hook_called = true;
5481 cui.n_sets = n_sets;
5483 n1 = VEC_length (micro_operation, VTI (bb)->mos);
5484 cui.store_p = false;
5485 note_uses (&PATTERN (insn), add_uses_1, &cui);
5486 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
5487 mos = VEC_address (micro_operation, VTI (bb)->mos);
5489 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
5493 while (n1 < n2 && mos[n1].type == MO_USE)
5495 while (n1 < n2 && mos[n2].type != MO_USE)
5507 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
5510 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
5512 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
5530 mo.u.loc = NULL_RTX;
5532 if (dump_file && (dump_flags & TDF_DETAILS))
5533 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
5534 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5537 n1 = VEC_length (micro_operation, VTI (bb)->mos);
5538 /* This will record NEXT_INSN (insn), such that we can
5539 insert notes before it without worrying about any
5540 notes that MO_USEs might emit after the insn. */
5542 note_stores (PATTERN (insn), add_stores, &cui);
5543 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
5544 mos = VEC_address (micro_operation, VTI (bb)->mos);
5546 /* Order the MO_VAL_USEs first (note_stores does nothing
5547 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
5548 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
5551 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
5553 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
5565 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
5568 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
5570 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
5583 static enum var_init_status
5584 find_src_status (dataflow_set *in, rtx src)
5586 tree decl = NULL_TREE;
5587 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
5589 if (! flag_var_tracking_uninit)
5590 status = VAR_INIT_STATUS_INITIALIZED;
5592 if (src && REG_P (src))
5593 decl = var_debug_decl (REG_EXPR (src));
5594 else if (src && MEM_P (src))
5595 decl = var_debug_decl (MEM_EXPR (src));
5598 status = get_init_value (in, src, dv_from_decl (decl));
5603 /* SRC is the source of an assignment. Use SET to try to find what
5604 was ultimately assigned to SRC. Return that value if known,
5605 otherwise return SRC itself. */
5608 find_src_set_src (dataflow_set *set, rtx src)
5610 tree decl = NULL_TREE; /* The variable being copied around. */
5611 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
5613 location_chain nextp;
5617 if (src && REG_P (src))
5618 decl = var_debug_decl (REG_EXPR (src));
5619 else if (src && MEM_P (src))
5620 decl = var_debug_decl (MEM_EXPR (src));
5624 decl_or_value dv = dv_from_decl (decl);
5626 var = shared_hash_find (set->vars, dv);
5630 for (i = 0; i < var->n_var_parts && !found; i++)
5631 for (nextp = var->var_part[i].loc_chain; nextp && !found;
5632 nextp = nextp->next)
5633 if (rtx_equal_p (nextp->loc, src))
5635 set_src = nextp->set_src;
5645 /* Compute the changes of variable locations in the basic block BB. */
5648 compute_bb_dataflow (basic_block bb)
5651 micro_operation *mo;
5653 dataflow_set old_out;
5654 dataflow_set *in = &VTI (bb)->in;
5655 dataflow_set *out = &VTI (bb)->out;
5657 dataflow_set_init (&old_out);
5658 dataflow_set_copy (&old_out, out);
5659 dataflow_set_copy (out, in);
5661 for (i = 0; VEC_iterate (micro_operation, VTI (bb)->mos, i, mo); i++)
5663 rtx insn = mo->insn;
5668 dataflow_set_clear_at_call (out);
5673 rtx loc = mo->u.loc;
5676 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
5677 else if (MEM_P (loc))
5678 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
5684 rtx loc = mo->u.loc;
5688 if (GET_CODE (loc) == CONCAT)
5690 val = XEXP (loc, 0);
5691 vloc = XEXP (loc, 1);
5699 var = PAT_VAR_LOCATION_DECL (vloc);
5701 clobber_variable_part (out, NULL_RTX,
5702 dv_from_decl (var), 0, NULL_RTX);
5705 if (VAL_NEEDS_RESOLUTION (loc))
5706 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
5707 set_variable_part (out, val, dv_from_decl (var), 0,
5708 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
5711 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
5712 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
5713 dv_from_decl (var), 0,
5714 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
5721 rtx loc = mo->u.loc;
5722 rtx val, vloc, uloc;
5724 vloc = uloc = XEXP (loc, 1);
5725 val = XEXP (loc, 0);
5727 if (GET_CODE (val) == CONCAT)
5729 uloc = XEXP (val, 1);
5730 val = XEXP (val, 0);
5733 if (VAL_NEEDS_RESOLUTION (loc))
5734 val_resolve (out, val, vloc, insn);
5736 val_store (out, val, uloc, insn, false);
5738 if (VAL_HOLDS_TRACK_EXPR (loc))
5740 if (GET_CODE (uloc) == REG)
5741 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
5743 else if (GET_CODE (uloc) == MEM)
5744 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
5752 rtx loc = mo->u.loc;
5753 rtx val, vloc, uloc, reverse = NULL_RTX;
5756 if (VAL_EXPR_HAS_REVERSE (loc))
5758 reverse = XEXP (loc, 1);
5759 vloc = XEXP (loc, 0);
5761 uloc = XEXP (vloc, 1);
5762 val = XEXP (vloc, 0);
5765 if (GET_CODE (val) == CONCAT)
5767 vloc = XEXP (val, 1);
5768 val = XEXP (val, 0);
5771 if (GET_CODE (vloc) == SET)
5773 rtx vsrc = SET_SRC (vloc);
5775 gcc_assert (val != vsrc);
5776 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
5778 vloc = SET_DEST (vloc);
5780 if (VAL_NEEDS_RESOLUTION (loc))
5781 val_resolve (out, val, vsrc, insn);
5783 else if (VAL_NEEDS_RESOLUTION (loc))
5785 gcc_assert (GET_CODE (uloc) == SET
5786 && GET_CODE (SET_SRC (uloc)) == REG);
5787 val_resolve (out, val, SET_SRC (uloc), insn);
5790 if (VAL_HOLDS_TRACK_EXPR (loc))
5792 if (VAL_EXPR_IS_CLOBBERED (loc))
5795 var_reg_delete (out, uloc, true);
5796 else if (MEM_P (uloc))
5797 var_mem_delete (out, uloc, true);
5801 bool copied_p = VAL_EXPR_IS_COPIED (loc);
5803 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
5805 if (GET_CODE (uloc) == SET)
5807 set_src = SET_SRC (uloc);
5808 uloc = SET_DEST (uloc);
5813 if (flag_var_tracking_uninit)
5815 status = find_src_status (in, set_src);
5817 if (status == VAR_INIT_STATUS_UNKNOWN)
5818 status = find_src_status (out, set_src);
5821 set_src = find_src_set_src (in, set_src);
5825 var_reg_delete_and_set (out, uloc, !copied_p,
5827 else if (MEM_P (uloc))
5828 var_mem_delete_and_set (out, uloc, !copied_p,
5832 else if (REG_P (uloc))
5833 var_regno_delete (out, REGNO (uloc));
5835 val_store (out, val, vloc, insn, true);
5838 val_store (out, XEXP (reverse, 0), XEXP (reverse, 1),
5845 rtx loc = mo->u.loc;
5848 if (GET_CODE (loc) == SET)
5850 set_src = SET_SRC (loc);
5851 loc = SET_DEST (loc);
5855 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
5857 else if (MEM_P (loc))
5858 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
5865 rtx loc = mo->u.loc;
5866 enum var_init_status src_status;
5869 if (GET_CODE (loc) == SET)
5871 set_src = SET_SRC (loc);
5872 loc = SET_DEST (loc);
5875 if (! flag_var_tracking_uninit)
5876 src_status = VAR_INIT_STATUS_INITIALIZED;
5879 src_status = find_src_status (in, set_src);
5881 if (src_status == VAR_INIT_STATUS_UNKNOWN)
5882 src_status = find_src_status (out, set_src);
5885 set_src = find_src_set_src (in, set_src);
5888 var_reg_delete_and_set (out, loc, false, src_status, set_src);
5889 else if (MEM_P (loc))
5890 var_mem_delete_and_set (out, loc, false, src_status, set_src);
5896 rtx loc = mo->u.loc;
5899 var_reg_delete (out, loc, false);
5900 else if (MEM_P (loc))
5901 var_mem_delete (out, loc, false);
5907 rtx loc = mo->u.loc;
5910 var_reg_delete (out, loc, true);
5911 else if (MEM_P (loc))
5912 var_mem_delete (out, loc, true);
5917 out->stack_adjust += mo->u.adjust;
5922 if (MAY_HAVE_DEBUG_INSNS)
5924 dataflow_set_equiv_regs (out);
5925 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_mark,
5927 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_star,
5930 htab_traverse (shared_hash_htab (out->vars),
5931 canonicalize_loc_order_check, out);
5934 changed = dataflow_set_different (&old_out, out);
5935 dataflow_set_destroy (&old_out);
5939 /* Find the locations of variables in the whole function. */
5942 vt_find_locations (void)
5944 fibheap_t worklist, pending, fibheap_swap;
5945 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
5952 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
5953 bool success = true;
5955 /* Compute reverse completion order of depth first search of the CFG
5956 so that the data-flow runs faster. */
5957 rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
5958 bb_order = XNEWVEC (int, last_basic_block);
5959 pre_and_rev_post_order_compute (NULL, rc_order, false);
5960 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
5961 bb_order[rc_order[i]] = i;
5964 worklist = fibheap_new ();
5965 pending = fibheap_new ();
5966 visited = sbitmap_alloc (last_basic_block);
5967 in_worklist = sbitmap_alloc (last_basic_block);
5968 in_pending = sbitmap_alloc (last_basic_block);
5969 sbitmap_zero (in_worklist);
5972 fibheap_insert (pending, bb_order[bb->index], bb);
5973 sbitmap_ones (in_pending);
5975 while (success && !fibheap_empty (pending))
5977 fibheap_swap = pending;
5979 worklist = fibheap_swap;
5980 sbitmap_swap = in_pending;
5981 in_pending = in_worklist;
5982 in_worklist = sbitmap_swap;
5984 sbitmap_zero (visited);
5986 while (!fibheap_empty (worklist))
5988 bb = (basic_block) fibheap_extract_min (worklist);
5989 RESET_BIT (in_worklist, bb->index);
5990 if (!TEST_BIT (visited, bb->index))
5994 int oldinsz, oldoutsz;
5996 SET_BIT (visited, bb->index);
5998 if (VTI (bb)->in.vars)
6001 -= (htab_size (shared_hash_htab (VTI (bb)->in.vars))
6002 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
6004 = htab_elements (shared_hash_htab (VTI (bb)->in.vars));
6006 = htab_elements (shared_hash_htab (VTI (bb)->out.vars));
6009 oldinsz = oldoutsz = 0;
6011 if (MAY_HAVE_DEBUG_INSNS)
6013 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
6014 bool first = true, adjust = false;
6016 /* Calculate the IN set as the intersection of
6017 predecessor OUT sets. */
6019 dataflow_set_clear (in);
6020 dst_can_be_shared = true;
6022 FOR_EACH_EDGE (e, ei, bb->preds)
6023 if (!VTI (e->src)->flooded)
6024 gcc_assert (bb_order[bb->index]
6025 <= bb_order[e->src->index]);
6028 dataflow_set_copy (in, &VTI (e->src)->out);
6029 first_out = &VTI (e->src)->out;
6034 dataflow_set_merge (in, &VTI (e->src)->out);
6040 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
6042 /* Merge and merge_adjust should keep entries in
6044 htab_traverse (shared_hash_htab (in->vars),
6045 canonicalize_loc_order_check,
6048 if (dst_can_be_shared)
6050 shared_hash_destroy (in->vars);
6051 in->vars = shared_hash_copy (first_out->vars);
6055 VTI (bb)->flooded = true;
6059 /* Calculate the IN set as union of predecessor OUT sets. */
6060 dataflow_set_clear (&VTI (bb)->in);
6061 FOR_EACH_EDGE (e, ei, bb->preds)
6062 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
6065 changed = compute_bb_dataflow (bb);
6066 htabsz += (htab_size (shared_hash_htab (VTI (bb)->in.vars))
6067 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
6069 if (htabmax && htabsz > htabmax)
6071 if (MAY_HAVE_DEBUG_INSNS)
6072 inform (DECL_SOURCE_LOCATION (cfun->decl),
6073 "variable tracking size limit exceeded with "
6074 "-fvar-tracking-assignments, retrying without");
6076 inform (DECL_SOURCE_LOCATION (cfun->decl),
6077 "variable tracking size limit exceeded");
6084 FOR_EACH_EDGE (e, ei, bb->succs)
6086 if (e->dest == EXIT_BLOCK_PTR)
6089 if (TEST_BIT (visited, e->dest->index))
6091 if (!TEST_BIT (in_pending, e->dest->index))
6093 /* Send E->DEST to next round. */
6094 SET_BIT (in_pending, e->dest->index);
6095 fibheap_insert (pending,
6096 bb_order[e->dest->index],
6100 else if (!TEST_BIT (in_worklist, e->dest->index))
6102 /* Add E->DEST to current round. */
6103 SET_BIT (in_worklist, e->dest->index);
6104 fibheap_insert (worklist, bb_order[e->dest->index],
6112 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
6114 (int)htab_elements (shared_hash_htab (VTI (bb)->in.vars)),
6116 (int)htab_elements (shared_hash_htab (VTI (bb)->out.vars)),
6118 (int)worklist->nodes, (int)pending->nodes, htabsz);
6120 if (dump_file && (dump_flags & TDF_DETAILS))
6122 fprintf (dump_file, "BB %i IN:\n", bb->index);
6123 dump_dataflow_set (&VTI (bb)->in);
6124 fprintf (dump_file, "BB %i OUT:\n", bb->index);
6125 dump_dataflow_set (&VTI (bb)->out);
6131 if (success && MAY_HAVE_DEBUG_INSNS)
6133 gcc_assert (VTI (bb)->flooded);
6136 fibheap_delete (worklist);
6137 fibheap_delete (pending);
6138 sbitmap_free (visited);
6139 sbitmap_free (in_worklist);
6140 sbitmap_free (in_pending);
6145 /* Print the content of the LIST to dump file. */
6148 dump_attrs_list (attrs list)
6150 for (; list; list = list->next)
6152 if (dv_is_decl_p (list->dv))
6153 print_mem_expr (dump_file, dv_as_decl (list->dv));
6155 print_rtl_single (dump_file, dv_as_value (list->dv));
6156 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
6158 fprintf (dump_file, "\n");
6161 /* Print the information about variable *SLOT to dump file. */
6164 dump_var_slot (void **slot, void *data ATTRIBUTE_UNUSED)
6166 variable var = (variable) *slot;
6170 /* Continue traversing the hash table. */
6174 /* Print the information about variable VAR to dump file. */
6177 dump_var (variable var)
6180 location_chain node;
6182 if (dv_is_decl_p (var->dv))
6184 const_tree decl = dv_as_decl (var->dv);
6186 if (DECL_NAME (decl))
6188 fprintf (dump_file, " name: %s",
6189 IDENTIFIER_POINTER (DECL_NAME (decl)));
6190 if (dump_flags & TDF_UID)
6191 fprintf (dump_file, "D.%u", DECL_UID (decl));
6193 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
6194 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
6196 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
6197 fprintf (dump_file, "\n");
6201 fputc (' ', dump_file);
6202 print_rtl_single (dump_file, dv_as_value (var->dv));
6205 for (i = 0; i < var->n_var_parts; i++)
6207 fprintf (dump_file, " offset %ld\n",
6208 (long) var->var_part[i].offset);
6209 for (node = var->var_part[i].loc_chain; node; node = node->next)
6211 fprintf (dump_file, " ");
6212 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
6213 fprintf (dump_file, "[uninit]");
6214 print_rtl_single (dump_file, node->loc);
6219 /* Print the information about variables from hash table VARS to dump file. */
6222 dump_vars (htab_t vars)
6224 if (htab_elements (vars) > 0)
6226 fprintf (dump_file, "Variables:\n");
6227 htab_traverse (vars, dump_var_slot, NULL);
6231 /* Print the dataflow set SET to dump file. */
6234 dump_dataflow_set (dataflow_set *set)
6238 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
6240 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6244 fprintf (dump_file, "Reg %d:", i);
6245 dump_attrs_list (set->regs[i]);
6248 dump_vars (shared_hash_htab (set->vars));
6249 fprintf (dump_file, "\n");
6252 /* Print the IN and OUT sets for each basic block to dump file. */
6255 dump_dataflow_sets (void)
6261 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
6262 fprintf (dump_file, "IN:\n");
6263 dump_dataflow_set (&VTI (bb)->in);
6264 fprintf (dump_file, "OUT:\n");
6265 dump_dataflow_set (&VTI (bb)->out);
6269 /* Add variable VAR to the hash table of changed variables and
6270 if it has no locations delete it from SET's hash table. */
6273 variable_was_changed (variable var, dataflow_set *set)
6275 hashval_t hash = dv_htab_hash (var->dv);
6280 bool old_cur_loc_changed = false;
6282 /* Remember this decl or VALUE has been added to changed_variables. */
6283 set_dv_changed (var->dv, true);
6285 slot = htab_find_slot_with_hash (changed_variables,
6291 variable old_var = (variable) *slot;
6292 gcc_assert (old_var->in_changed_variables);
6293 old_var->in_changed_variables = false;
6294 old_cur_loc_changed = old_var->cur_loc_changed;
6295 variable_htab_free (*slot);
6297 if (set && var->n_var_parts == 0)
6301 empty_var = (variable) pool_alloc (dv_pool (var->dv));
6302 empty_var->dv = var->dv;
6303 empty_var->refcount = 1;
6304 empty_var->n_var_parts = 0;
6305 empty_var->cur_loc_changed = true;
6306 empty_var->in_changed_variables = true;
6313 var->in_changed_variables = true;
6314 /* If within processing one uop a variable is deleted
6315 and then readded, we need to assume it has changed. */
6316 if (old_cur_loc_changed)
6317 var->cur_loc_changed = true;
6324 if (var->n_var_parts == 0)
6329 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
6332 if (shared_hash_shared (set->vars))
6333 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
6335 htab_clear_slot (shared_hash_htab (set->vars), slot);
6341 /* Look for the index in VAR->var_part corresponding to OFFSET.
6342 Return -1 if not found. If INSERTION_POINT is non-NULL, the
6343 referenced int will be set to the index that the part has or should
6344 have, if it should be inserted. */
6347 find_variable_location_part (variable var, HOST_WIDE_INT offset,
6348 int *insertion_point)
6352 /* Find the location part. */
6354 high = var->n_var_parts;
6357 pos = (low + high) / 2;
6358 if (var->var_part[pos].offset < offset)
6365 if (insertion_point)
6366 *insertion_point = pos;
6368 if (pos < var->n_var_parts && var->var_part[pos].offset == offset)
6375 set_slot_part (dataflow_set *set, rtx loc, void **slot,
6376 decl_or_value dv, HOST_WIDE_INT offset,
6377 enum var_init_status initialized, rtx set_src)
6380 location_chain node, next;
6381 location_chain *nextp;
6383 bool onepart = dv_onepart_p (dv);
6385 gcc_assert (offset == 0 || !onepart);
6386 gcc_assert (loc != dv_as_opaque (dv));
6388 var = (variable) *slot;
6390 if (! flag_var_tracking_uninit)
6391 initialized = VAR_INIT_STATUS_INITIALIZED;
6395 /* Create new variable information. */
6396 var = (variable) pool_alloc (dv_pool (dv));
6399 var->n_var_parts = 1;
6400 var->cur_loc_changed = false;
6401 var->in_changed_variables = false;
6402 var->var_part[0].offset = offset;
6403 var->var_part[0].loc_chain = NULL;
6404 var->var_part[0].cur_loc = NULL;
6407 nextp = &var->var_part[0].loc_chain;
6413 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
6417 if (GET_CODE (loc) == VALUE)
6419 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6420 nextp = &node->next)
6421 if (GET_CODE (node->loc) == VALUE)
6423 if (node->loc == loc)
6428 if (canon_value_cmp (node->loc, loc))
6436 else if (REG_P (node->loc) || MEM_P (node->loc))
6444 else if (REG_P (loc))
6446 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6447 nextp = &node->next)
6448 if (REG_P (node->loc))
6450 if (REGNO (node->loc) < REGNO (loc))
6454 if (REGNO (node->loc) == REGNO (loc))
6467 else if (MEM_P (loc))
6469 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6470 nextp = &node->next)
6471 if (REG_P (node->loc))
6473 else if (MEM_P (node->loc))
6475 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
6487 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6488 nextp = &node->next)
6489 if ((r = loc_cmp (node->loc, loc)) >= 0)
6497 if (shared_var_p (var, set->vars))
6499 slot = unshare_variable (set, slot, var, initialized);
6500 var = (variable)*slot;
6501 for (nextp = &var->var_part[0].loc_chain; c;
6502 nextp = &(*nextp)->next)
6504 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
6511 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
6513 pos = find_variable_location_part (var, offset, &inspos);
6517 node = var->var_part[pos].loc_chain;
6520 && ((REG_P (node->loc) && REG_P (loc)
6521 && REGNO (node->loc) == REGNO (loc))
6522 || rtx_equal_p (node->loc, loc)))
6524 /* LOC is in the beginning of the chain so we have nothing
6526 if (node->init < initialized)
6527 node->init = initialized;
6528 if (set_src != NULL)
6529 node->set_src = set_src;
6535 /* We have to make a copy of a shared variable. */
6536 if (shared_var_p (var, set->vars))
6538 slot = unshare_variable (set, slot, var, initialized);
6539 var = (variable)*slot;
6545 /* We have not found the location part, new one will be created. */
6547 /* We have to make a copy of the shared variable. */
6548 if (shared_var_p (var, set->vars))
6550 slot = unshare_variable (set, slot, var, initialized);
6551 var = (variable)*slot;
6554 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
6555 thus there are at most MAX_VAR_PARTS different offsets. */
6556 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
6557 && (!var->n_var_parts || !dv_onepart_p (var->dv)));
6559 /* We have to move the elements of array starting at index
6560 inspos to the next position. */
6561 for (pos = var->n_var_parts; pos > inspos; pos--)
6562 var->var_part[pos] = var->var_part[pos - 1];
6565 var->var_part[pos].offset = offset;
6566 var->var_part[pos].loc_chain = NULL;
6567 var->var_part[pos].cur_loc = NULL;
6570 /* Delete the location from the list. */
6571 nextp = &var->var_part[pos].loc_chain;
6572 for (node = var->var_part[pos].loc_chain; node; node = next)
6575 if ((REG_P (node->loc) && REG_P (loc)
6576 && REGNO (node->loc) == REGNO (loc))
6577 || rtx_equal_p (node->loc, loc))
6579 /* Save these values, to assign to the new node, before
6580 deleting this one. */
6581 if (node->init > initialized)
6582 initialized = node->init;
6583 if (node->set_src != NULL && set_src == NULL)
6584 set_src = node->set_src;
6585 if (var->var_part[pos].cur_loc == node->loc)
6587 var->var_part[pos].cur_loc = NULL;
6588 var->cur_loc_changed = true;
6590 pool_free (loc_chain_pool, node);
6595 nextp = &node->next;
6598 nextp = &var->var_part[pos].loc_chain;
6601 /* Add the location to the beginning. */
6602 node = (location_chain) pool_alloc (loc_chain_pool);
6604 node->init = initialized;
6605 node->set_src = set_src;
6606 node->next = *nextp;
6609 if (onepart && emit_notes)
6610 add_value_chains (var->dv, loc);
6612 /* If no location was emitted do so. */
6613 if (var->var_part[pos].cur_loc == NULL)
6614 variable_was_changed (var, set);
6619 /* Set the part of variable's location in the dataflow set SET. The
6620 variable part is specified by variable's declaration in DV and
6621 offset OFFSET and the part's location by LOC. IOPT should be
6622 NO_INSERT if the variable is known to be in SET already and the
6623 variable hash table must not be resized, and INSERT otherwise. */
6626 set_variable_part (dataflow_set *set, rtx loc,
6627 decl_or_value dv, HOST_WIDE_INT offset,
6628 enum var_init_status initialized, rtx set_src,
6629 enum insert_option iopt)
6633 if (iopt == NO_INSERT)
6634 slot = shared_hash_find_slot_noinsert (set->vars, dv);
6637 slot = shared_hash_find_slot (set->vars, dv);
6639 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
6641 slot = set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
6644 /* Remove all recorded register locations for the given variable part
6645 from dataflow set SET, except for those that are identical to loc.
6646 The variable part is specified by variable's declaration or value
6647 DV and offset OFFSET. */
6650 clobber_slot_part (dataflow_set *set, rtx loc, void **slot,
6651 HOST_WIDE_INT offset, rtx set_src)
6653 variable var = (variable) *slot;
6654 int pos = find_variable_location_part (var, offset, NULL);
6658 location_chain node, next;
6660 /* Remove the register locations from the dataflow set. */
6661 next = var->var_part[pos].loc_chain;
6662 for (node = next; node; node = next)
6665 if (node->loc != loc
6666 && (!flag_var_tracking_uninit
6669 || !rtx_equal_p (set_src, node->set_src)))
6671 if (REG_P (node->loc))
6676 /* Remove the variable part from the register's
6677 list, but preserve any other variable parts
6678 that might be regarded as live in that same
6680 anextp = &set->regs[REGNO (node->loc)];
6681 for (anode = *anextp; anode; anode = anext)
6683 anext = anode->next;
6684 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
6685 && anode->offset == offset)
6687 pool_free (attrs_pool, anode);
6691 anextp = &anode->next;
6695 slot = delete_slot_part (set, node->loc, slot, offset);
6703 /* Remove all recorded register locations for the given variable part
6704 from dataflow set SET, except for those that are identical to loc.
6705 The variable part is specified by variable's declaration or value
6706 DV and offset OFFSET. */
6709 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
6710 HOST_WIDE_INT offset, rtx set_src)
6714 if (!dv_as_opaque (dv)
6715 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
6718 slot = shared_hash_find_slot_noinsert (set->vars, dv);
6722 slot = clobber_slot_part (set, loc, slot, offset, set_src);
6725 /* Delete the part of variable's location from dataflow set SET. The
6726 variable part is specified by its SET->vars slot SLOT and offset
6727 OFFSET and the part's location by LOC. */
6730 delete_slot_part (dataflow_set *set, rtx loc, void **slot,
6731 HOST_WIDE_INT offset)
6733 variable var = (variable) *slot;
6734 int pos = find_variable_location_part (var, offset, NULL);
6738 location_chain node, next;
6739 location_chain *nextp;
6742 if (shared_var_p (var, set->vars))
6744 /* If the variable contains the location part we have to
6745 make a copy of the variable. */
6746 for (node = var->var_part[pos].loc_chain; node;
6749 if ((REG_P (node->loc) && REG_P (loc)
6750 && REGNO (node->loc) == REGNO (loc))
6751 || rtx_equal_p (node->loc, loc))
6753 slot = unshare_variable (set, slot, var,
6754 VAR_INIT_STATUS_UNKNOWN);
6755 var = (variable)*slot;
6761 /* Delete the location part. */
6763 nextp = &var->var_part[pos].loc_chain;
6764 for (node = *nextp; node; node = next)
6767 if ((REG_P (node->loc) && REG_P (loc)
6768 && REGNO (node->loc) == REGNO (loc))
6769 || rtx_equal_p (node->loc, loc))
6771 if (emit_notes && pos == 0 && dv_onepart_p (var->dv))
6772 remove_value_chains (var->dv, node->loc);
6773 /* If we have deleted the location which was last emitted
6774 we have to emit new location so add the variable to set
6775 of changed variables. */
6776 if (var->var_part[pos].cur_loc == node->loc)
6779 var->var_part[pos].cur_loc = NULL;
6780 var->cur_loc_changed = true;
6782 pool_free (loc_chain_pool, node);
6787 nextp = &node->next;
6790 if (var->var_part[pos].loc_chain == NULL)
6795 var->cur_loc_changed = true;
6796 while (pos < var->n_var_parts)
6798 var->var_part[pos] = var->var_part[pos + 1];
6803 variable_was_changed (var, set);
6809 /* Delete the part of variable's location from dataflow set SET. The
6810 variable part is specified by variable's declaration or value DV
6811 and offset OFFSET and the part's location by LOC. */
6814 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
6815 HOST_WIDE_INT offset)
6817 void **slot = shared_hash_find_slot_noinsert (set->vars, dv);
6821 slot = delete_slot_part (set, loc, slot, offset);
6824 /* Structure for passing some other parameters to function
6825 vt_expand_loc_callback. */
6826 struct expand_loc_callback_data
6828 /* The variables and values active at this point. */
6831 /* True in vt_expand_loc_dummy calls, no rtl should be allocated.
6832 Non-NULL should be returned if vt_expand_loc would return
6833 non-NULL in that case, NULL otherwise. cur_loc_changed should be
6834 computed and cur_loc recomputed when possible (but just once
6835 per emit_notes_for_changes call). */
6838 /* True if expansion of subexpressions had to recompute some
6839 VALUE/DEBUG_EXPR_DECL's cur_loc or used a VALUE/DEBUG_EXPR_DECL
6840 whose cur_loc has been already recomputed during current
6841 emit_notes_for_changes call. */
6842 bool cur_loc_changed;
6845 /* Callback for cselib_expand_value, that looks for expressions
6846 holding the value in the var-tracking hash tables. Return X for
6847 standard processing, anything else is to be used as-is. */
6850 vt_expand_loc_callback (rtx x, bitmap regs, int max_depth, void *data)
6852 struct expand_loc_callback_data *elcd
6853 = (struct expand_loc_callback_data *) data;
6854 bool dummy = elcd->dummy;
6855 bool cur_loc_changed = elcd->cur_loc_changed;
6859 rtx result, subreg, xret;
6861 switch (GET_CODE (x))
6866 if (cselib_dummy_expand_value_rtx_cb (SUBREG_REG (x), regs,
6868 vt_expand_loc_callback, data))
6874 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
6876 vt_expand_loc_callback, data);
6881 result = simplify_gen_subreg (GET_MODE (x), subreg,
6882 GET_MODE (SUBREG_REG (x)),
6885 /* Invalid SUBREGs are ok in debug info. ??? We could try
6886 alternate expansions for the VALUE as well. */
6888 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
6893 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
6898 dv = dv_from_value (x);
6906 if (VALUE_RECURSED_INTO (x))
6909 var = (variable) htab_find_with_hash (elcd->vars, dv, dv_htab_hash (dv));
6913 if (dummy && dv_changed_p (dv))
6914 elcd->cur_loc_changed = true;
6918 if (var->n_var_parts == 0)
6921 elcd->cur_loc_changed = true;
6925 gcc_assert (var->n_var_parts == 1);
6927 VALUE_RECURSED_INTO (x) = true;
6930 if (var->var_part[0].cur_loc)
6934 if (cselib_dummy_expand_value_rtx_cb (var->var_part[0].cur_loc, regs,
6936 vt_expand_loc_callback, data))
6940 result = cselib_expand_value_rtx_cb (var->var_part[0].cur_loc, regs,
6942 vt_expand_loc_callback, data);
6944 set_dv_changed (dv, false);
6946 if (!result && dv_changed_p (dv))
6948 set_dv_changed (dv, false);
6949 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
6950 if (loc->loc == var->var_part[0].cur_loc)
6954 elcd->cur_loc_changed = cur_loc_changed;
6955 if (cselib_dummy_expand_value_rtx_cb (loc->loc, regs, max_depth,
6956 vt_expand_loc_callback,
6965 result = cselib_expand_value_rtx_cb (loc->loc, regs, max_depth,
6966 vt_expand_loc_callback, data);
6970 if (dummy && (result || var->var_part[0].cur_loc))
6971 var->cur_loc_changed = true;
6972 var->var_part[0].cur_loc = loc ? loc->loc : NULL_RTX;
6976 if (var->cur_loc_changed)
6977 elcd->cur_loc_changed = true;
6978 else if (!result && var->var_part[0].cur_loc == NULL_RTX)
6979 elcd->cur_loc_changed = cur_loc_changed;
6982 VALUE_RECURSED_INTO (x) = false;
6989 /* Expand VALUEs in LOC, using VARS as well as cselib's equivalence
6993 vt_expand_loc (rtx loc, htab_t vars)
6995 struct expand_loc_callback_data data;
6997 if (!MAY_HAVE_DEBUG_INSNS)
7002 data.cur_loc_changed = false;
7003 loc = cselib_expand_value_rtx_cb (loc, scratch_regs, 5,
7004 vt_expand_loc_callback, &data);
7006 if (loc && MEM_P (loc))
7007 loc = targetm.delegitimize_address (loc);
7011 /* Like vt_expand_loc, but only return true/false (whether vt_expand_loc
7012 would succeed or not, without actually allocating new rtxes. */
7015 vt_expand_loc_dummy (rtx loc, htab_t vars, bool *pcur_loc_changed)
7017 struct expand_loc_callback_data data;
7020 gcc_assert (MAY_HAVE_DEBUG_INSNS);
7023 data.cur_loc_changed = false;
7024 ret = cselib_dummy_expand_value_rtx_cb (loc, scratch_regs, 5,
7025 vt_expand_loc_callback, &data);
7026 *pcur_loc_changed = data.cur_loc_changed;
7030 #ifdef ENABLE_RTL_CHECKING
7031 /* Used to verify that cur_loc_changed updating is safe. */
7032 static struct pointer_map_t *emitted_notes;
7035 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
7036 additional parameters: WHERE specifies whether the note shall be emitted
7037 before or after instruction INSN. */
7040 emit_note_insn_var_location (void **varp, void *data)
7042 variable var = (variable) *varp;
7043 rtx insn = ((emit_note_data *)data)->insn;
7044 enum emit_note_where where = ((emit_note_data *)data)->where;
7045 htab_t vars = ((emit_note_data *)data)->vars;
7047 int i, j, n_var_parts;
7049 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
7050 HOST_WIDE_INT last_limit;
7051 tree type_size_unit;
7052 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
7053 rtx loc[MAX_VAR_PARTS];
7057 if (dv_is_value_p (var->dv))
7058 goto value_or_debug_decl;
7060 decl = dv_as_decl (var->dv);
7062 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7063 goto value_or_debug_decl;
7068 if (!MAY_HAVE_DEBUG_INSNS)
7070 for (i = 0; i < var->n_var_parts; i++)
7071 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
7073 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
7074 var->cur_loc_changed = true;
7076 if (var->n_var_parts == 0)
7077 var->cur_loc_changed = true;
7079 #ifndef ENABLE_RTL_CHECKING
7080 if (!var->cur_loc_changed)
7083 for (i = 0; i < var->n_var_parts; i++)
7085 enum machine_mode mode, wider_mode;
7088 if (last_limit < var->var_part[i].offset)
7093 else if (last_limit > var->var_part[i].offset)
7095 offsets[n_var_parts] = var->var_part[i].offset;
7096 if (!var->var_part[i].cur_loc)
7101 loc2 = vt_expand_loc (var->var_part[i].cur_loc, vars);
7107 loc[n_var_parts] = loc2;
7108 mode = GET_MODE (var->var_part[i].cur_loc);
7109 if (mode == VOIDmode && dv_onepart_p (var->dv))
7110 mode = DECL_MODE (decl);
7111 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
7112 if (var->var_part[i].cur_loc == lc->loc)
7114 initialized = lc->init;
7118 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
7120 /* Attempt to merge adjacent registers or memory. */
7121 wider_mode = GET_MODE_WIDER_MODE (mode);
7122 for (j = i + 1; j < var->n_var_parts; j++)
7123 if (last_limit <= var->var_part[j].offset)
7125 if (j < var->n_var_parts
7126 && wider_mode != VOIDmode
7127 && var->var_part[j].cur_loc
7128 && mode == GET_MODE (var->var_part[j].cur_loc)
7129 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
7130 && last_limit == var->var_part[j].offset
7131 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
7132 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
7136 if (REG_P (loc[n_var_parts])
7137 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
7138 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
7139 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
7142 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
7143 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
7145 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
7146 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
7149 if (!REG_P (new_loc)
7150 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
7153 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
7156 else if (MEM_P (loc[n_var_parts])
7157 && GET_CODE (XEXP (loc2, 0)) == PLUS
7158 && REG_P (XEXP (XEXP (loc2, 0), 0))
7159 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
7161 if ((REG_P (XEXP (loc[n_var_parts], 0))
7162 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
7163 XEXP (XEXP (loc2, 0), 0))
7164 && INTVAL (XEXP (XEXP (loc2, 0), 1))
7165 == GET_MODE_SIZE (mode))
7166 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
7167 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
7168 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
7169 XEXP (XEXP (loc2, 0), 0))
7170 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
7171 + GET_MODE_SIZE (mode)
7172 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
7173 new_loc = adjust_address_nv (loc[n_var_parts],
7179 loc[n_var_parts] = new_loc;
7181 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
7187 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
7188 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
7191 if (! flag_var_tracking_uninit)
7192 initialized = VAR_INIT_STATUS_INITIALIZED;
7196 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX,
7198 else if (n_var_parts == 1)
7202 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
7203 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
7207 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list,
7210 else if (n_var_parts)
7214 for (i = 0; i < n_var_parts; i++)
7216 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
7218 parallel = gen_rtx_PARALLEL (VOIDmode,
7219 gen_rtvec_v (n_var_parts, loc));
7220 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
7221 parallel, (int) initialized);
7224 #ifdef ENABLE_RTL_CHECKING
7227 void **note_slot = pointer_map_insert (emitted_notes, decl);
7228 rtx pnote = (rtx) *note_slot;
7229 if (!var->cur_loc_changed && (pnote || PAT_VAR_LOCATION_LOC (note_vl)))
7232 gcc_assert (rtx_equal_p (PAT_VAR_LOCATION_LOC (pnote),
7233 PAT_VAR_LOCATION_LOC (note_vl)));
7235 *note_slot = (void *) note_vl;
7237 if (!var->cur_loc_changed)
7241 if (where != EMIT_NOTE_BEFORE_INSN)
7243 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
7244 if (where == EMIT_NOTE_AFTER_CALL_INSN)
7245 NOTE_DURING_CALL_P (note) = true;
7248 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
7249 NOTE_VAR_LOCATION (note) = note_vl;
7252 set_dv_changed (var->dv, false);
7253 var->cur_loc_changed = false;
7254 gcc_assert (var->in_changed_variables);
7255 var->in_changed_variables = false;
7256 htab_clear_slot (changed_variables, varp);
7258 /* Continue traversing the hash table. */
7261 value_or_debug_decl:
7262 if (dv_changed_p (var->dv) && var->n_var_parts)
7265 bool cur_loc_changed;
7267 if (var->var_part[0].cur_loc
7268 && vt_expand_loc_dummy (var->var_part[0].cur_loc, vars,
7271 for (lc = var->var_part[0].loc_chain; lc; lc = lc->next)
7272 if (lc->loc != var->var_part[0].cur_loc
7273 && vt_expand_loc_dummy (lc->loc, vars, &cur_loc_changed))
7275 var->var_part[0].cur_loc = lc ? lc->loc : NULL_RTX;
7280 DEF_VEC_P (variable);
7281 DEF_VEC_ALLOC_P (variable, heap);
7283 /* Stack of variable_def pointers that need processing with
7284 check_changed_vars_2. */
7286 static VEC (variable, heap) *changed_variables_stack;
7288 /* VALUEs with no variables that need set_dv_changed (val, false)
7289 called before check_changed_vars_3. */
7291 static VEC (rtx, heap) *changed_values_stack;
7293 /* Helper function for check_changed_vars_1 and check_changed_vars_2. */
7296 check_changed_vars_0 (decl_or_value dv, htab_t htab)
7299 = (value_chain) htab_find_with_hash (value_chains, dv, dv_htab_hash (dv));
7303 for (vc = vc->next; vc; vc = vc->next)
7304 if (!dv_changed_p (vc->dv))
7307 = (variable) htab_find_with_hash (htab, vc->dv,
7308 dv_htab_hash (vc->dv));
7311 set_dv_changed (vc->dv, true);
7312 VEC_safe_push (variable, heap, changed_variables_stack, vcvar);
7314 else if (dv_is_value_p (vc->dv))
7316 set_dv_changed (vc->dv, true);
7317 VEC_safe_push (rtx, heap, changed_values_stack,
7318 dv_as_value (vc->dv));
7319 check_changed_vars_0 (vc->dv, htab);
7324 /* Populate changed_variables_stack with variable_def pointers
7325 that need variable_was_changed called on them. */
7328 check_changed_vars_1 (void **slot, void *data)
7330 variable var = (variable) *slot;
7331 htab_t htab = (htab_t) data;
7333 if (dv_is_value_p (var->dv)
7334 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
7335 check_changed_vars_0 (var->dv, htab);
7339 /* Add VAR to changed_variables and also for VALUEs add recursively
7340 all DVs that aren't in changed_variables yet but reference the
7341 VALUE from its loc_chain. */
7344 check_changed_vars_2 (variable var, htab_t htab)
7346 variable_was_changed (var, NULL);
7347 if (dv_is_value_p (var->dv)
7348 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
7349 check_changed_vars_0 (var->dv, htab);
7352 /* For each changed decl (except DEBUG_EXPR_DECLs) recompute
7353 cur_loc if needed (and cur_loc of all VALUEs and DEBUG_EXPR_DECLs
7354 it needs and are also in changed variables) and track whether
7355 cur_loc (or anything it uses to compute location) had to change
7356 during the current emit_notes_for_changes call. */
7359 check_changed_vars_3 (void **slot, void *data)
7361 variable var = (variable) *slot;
7362 htab_t vars = (htab_t) data;
7365 bool cur_loc_changed;
7367 if (dv_is_value_p (var->dv)
7368 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
7371 for (i = 0; i < var->n_var_parts; i++)
7373 if (var->var_part[i].cur_loc
7374 && vt_expand_loc_dummy (var->var_part[i].cur_loc, vars,
7377 if (cur_loc_changed)
7378 var->cur_loc_changed = true;
7381 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
7382 if (lc->loc != var->var_part[i].cur_loc
7383 && vt_expand_loc_dummy (lc->loc, vars, &cur_loc_changed))
7385 if (lc || var->var_part[i].cur_loc)
7386 var->cur_loc_changed = true;
7387 var->var_part[i].cur_loc = lc ? lc->loc : NULL_RTX;
7389 if (var->n_var_parts == 0)
7390 var->cur_loc_changed = true;
7394 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
7395 CHANGED_VARIABLES and delete this chain. WHERE specifies whether the notes
7396 shall be emitted before of after instruction INSN. */
7399 emit_notes_for_changes (rtx insn, enum emit_note_where where,
7402 emit_note_data data;
7403 htab_t htab = shared_hash_htab (vars);
7405 if (!htab_elements (changed_variables))
7408 if (MAY_HAVE_DEBUG_INSNS)
7410 /* Unfortunately this has to be done in two steps, because
7411 we can't traverse a hashtab into which we are inserting
7412 through variable_was_changed. */
7413 htab_traverse (changed_variables, check_changed_vars_1, htab);
7414 while (VEC_length (variable, changed_variables_stack) > 0)
7415 check_changed_vars_2 (VEC_pop (variable, changed_variables_stack),
7417 while (VEC_length (rtx, changed_values_stack) > 0)
7418 set_dv_changed (dv_from_value (VEC_pop (rtx, changed_values_stack)),
7420 htab_traverse (changed_variables, check_changed_vars_3, htab);
7427 htab_traverse (changed_variables, emit_note_insn_var_location, &data);
7430 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
7431 same variable in hash table DATA or is not there at all. */
7434 emit_notes_for_differences_1 (void **slot, void *data)
7436 htab_t new_vars = (htab_t) data;
7437 variable old_var, new_var;
7439 old_var = (variable) *slot;
7440 new_var = (variable) htab_find_with_hash (new_vars, old_var->dv,
7441 dv_htab_hash (old_var->dv));
7445 /* Variable has disappeared. */
7448 empty_var = (variable) pool_alloc (dv_pool (old_var->dv));
7449 empty_var->dv = old_var->dv;
7450 empty_var->refcount = 0;
7451 empty_var->n_var_parts = 0;
7452 empty_var->cur_loc_changed = false;
7453 empty_var->in_changed_variables = false;
7454 if (dv_onepart_p (old_var->dv))
7458 gcc_assert (old_var->n_var_parts == 1);
7459 for (lc = old_var->var_part[0].loc_chain; lc; lc = lc->next)
7460 remove_value_chains (old_var->dv, lc->loc);
7462 variable_was_changed (empty_var, NULL);
7463 /* Continue traversing the hash table. */
7466 if (variable_different_p (old_var, new_var))
7468 if (dv_onepart_p (old_var->dv))
7470 location_chain lc1, lc2;
7472 gcc_assert (old_var->n_var_parts == 1);
7473 gcc_assert (new_var->n_var_parts == 1);
7474 lc1 = old_var->var_part[0].loc_chain;
7475 lc2 = new_var->var_part[0].loc_chain;
7478 && ((REG_P (lc1->loc) && REG_P (lc2->loc))
7479 || rtx_equal_p (lc1->loc, lc2->loc)))
7484 for (; lc2; lc2 = lc2->next)
7485 add_value_chains (old_var->dv, lc2->loc);
7486 for (; lc1; lc1 = lc1->next)
7487 remove_value_chains (old_var->dv, lc1->loc);
7489 variable_was_changed (new_var, NULL);
7491 /* Update cur_loc. */
7492 if (old_var != new_var)
7495 for (i = 0; i < new_var->n_var_parts; i++)
7497 new_var->var_part[i].cur_loc = NULL;
7498 if (old_var->n_var_parts != new_var->n_var_parts
7499 || old_var->var_part[i].offset != new_var->var_part[i].offset)
7500 new_var->cur_loc_changed = true;
7501 else if (old_var->var_part[i].cur_loc != NULL)
7504 rtx cur_loc = old_var->var_part[i].cur_loc;
7506 for (lc = new_var->var_part[i].loc_chain; lc; lc = lc->next)
7507 if (lc->loc == cur_loc
7508 || rtx_equal_p (cur_loc, lc->loc))
7510 new_var->var_part[i].cur_loc = lc->loc;
7514 new_var->cur_loc_changed = true;
7519 /* Continue traversing the hash table. */
7523 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
7527 emit_notes_for_differences_2 (void **slot, void *data)
7529 htab_t old_vars = (htab_t) data;
7530 variable old_var, new_var;
7532 new_var = (variable) *slot;
7533 old_var = (variable) htab_find_with_hash (old_vars, new_var->dv,
7534 dv_htab_hash (new_var->dv));
7538 /* Variable has appeared. */
7539 if (dv_onepart_p (new_var->dv))
7543 gcc_assert (new_var->n_var_parts == 1);
7544 for (lc = new_var->var_part[0].loc_chain; lc; lc = lc->next)
7545 add_value_chains (new_var->dv, lc->loc);
7547 for (i = 0; i < new_var->n_var_parts; i++)
7548 new_var->var_part[i].cur_loc = NULL;
7549 variable_was_changed (new_var, NULL);
7552 /* Continue traversing the hash table. */
7556 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
7560 emit_notes_for_differences (rtx insn, dataflow_set *old_set,
7561 dataflow_set *new_set)
7563 htab_traverse (shared_hash_htab (old_set->vars),
7564 emit_notes_for_differences_1,
7565 shared_hash_htab (new_set->vars));
7566 htab_traverse (shared_hash_htab (new_set->vars),
7567 emit_notes_for_differences_2,
7568 shared_hash_htab (old_set->vars));
7569 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
7572 /* Emit the notes for changes of location parts in the basic block BB. */
7575 emit_notes_in_bb (basic_block bb, dataflow_set *set)
7578 micro_operation *mo;
7580 dataflow_set_clear (set);
7581 dataflow_set_copy (set, &VTI (bb)->in);
7583 for (i = 0; VEC_iterate (micro_operation, VTI (bb)->mos, i, mo); i++)
7585 rtx insn = mo->insn;
7590 dataflow_set_clear_at_call (set);
7591 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
7596 rtx loc = mo->u.loc;
7599 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
7601 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
7603 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
7609 rtx loc = mo->u.loc;
7613 if (GET_CODE (loc) == CONCAT)
7615 val = XEXP (loc, 0);
7616 vloc = XEXP (loc, 1);
7624 var = PAT_VAR_LOCATION_DECL (vloc);
7626 clobber_variable_part (set, NULL_RTX,
7627 dv_from_decl (var), 0, NULL_RTX);
7630 if (VAL_NEEDS_RESOLUTION (loc))
7631 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
7632 set_variable_part (set, val, dv_from_decl (var), 0,
7633 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
7636 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
7637 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
7638 dv_from_decl (var), 0,
7639 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
7642 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
7648 rtx loc = mo->u.loc;
7649 rtx val, vloc, uloc;
7651 vloc = uloc = XEXP (loc, 1);
7652 val = XEXP (loc, 0);
7654 if (GET_CODE (val) == CONCAT)
7656 uloc = XEXP (val, 1);
7657 val = XEXP (val, 0);
7660 if (VAL_NEEDS_RESOLUTION (loc))
7661 val_resolve (set, val, vloc, insn);
7663 val_store (set, val, uloc, insn, false);
7665 if (VAL_HOLDS_TRACK_EXPR (loc))
7667 if (GET_CODE (uloc) == REG)
7668 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
7670 else if (GET_CODE (uloc) == MEM)
7671 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
7675 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
7681 rtx loc = mo->u.loc;
7682 rtx val, vloc, uloc, reverse = NULL_RTX;
7685 if (VAL_EXPR_HAS_REVERSE (loc))
7687 reverse = XEXP (loc, 1);
7688 vloc = XEXP (loc, 0);
7690 uloc = XEXP (vloc, 1);
7691 val = XEXP (vloc, 0);
7694 if (GET_CODE (val) == CONCAT)
7696 vloc = XEXP (val, 1);
7697 val = XEXP (val, 0);
7700 if (GET_CODE (vloc) == SET)
7702 rtx vsrc = SET_SRC (vloc);
7704 gcc_assert (val != vsrc);
7705 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
7707 vloc = SET_DEST (vloc);
7709 if (VAL_NEEDS_RESOLUTION (loc))
7710 val_resolve (set, val, vsrc, insn);
7712 else if (VAL_NEEDS_RESOLUTION (loc))
7714 gcc_assert (GET_CODE (uloc) == SET
7715 && GET_CODE (SET_SRC (uloc)) == REG);
7716 val_resolve (set, val, SET_SRC (uloc), insn);
7719 if (VAL_HOLDS_TRACK_EXPR (loc))
7721 if (VAL_EXPR_IS_CLOBBERED (loc))
7724 var_reg_delete (set, uloc, true);
7725 else if (MEM_P (uloc))
7726 var_mem_delete (set, uloc, true);
7730 bool copied_p = VAL_EXPR_IS_COPIED (loc);
7732 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
7734 if (GET_CODE (uloc) == SET)
7736 set_src = SET_SRC (uloc);
7737 uloc = SET_DEST (uloc);
7742 status = find_src_status (set, set_src);
7744 set_src = find_src_set_src (set, set_src);
7748 var_reg_delete_and_set (set, uloc, !copied_p,
7750 else if (MEM_P (uloc))
7751 var_mem_delete_and_set (set, uloc, !copied_p,
7755 else if (REG_P (uloc))
7756 var_regno_delete (set, REGNO (uloc));
7758 val_store (set, val, vloc, insn, true);
7761 val_store (set, XEXP (reverse, 0), XEXP (reverse, 1),
7764 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7771 rtx loc = mo->u.loc;
7774 if (GET_CODE (loc) == SET)
7776 set_src = SET_SRC (loc);
7777 loc = SET_DEST (loc);
7781 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
7784 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
7787 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7794 rtx loc = mo->u.loc;
7795 enum var_init_status src_status;
7798 if (GET_CODE (loc) == SET)
7800 set_src = SET_SRC (loc);
7801 loc = SET_DEST (loc);
7804 src_status = find_src_status (set, set_src);
7805 set_src = find_src_set_src (set, set_src);
7808 var_reg_delete_and_set (set, loc, false, src_status, set_src);
7810 var_mem_delete_and_set (set, loc, false, src_status, set_src);
7812 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7819 rtx loc = mo->u.loc;
7822 var_reg_delete (set, loc, false);
7824 var_mem_delete (set, loc, false);
7826 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
7832 rtx loc = mo->u.loc;
7835 var_reg_delete (set, loc, true);
7837 var_mem_delete (set, loc, true);
7839 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7845 set->stack_adjust += mo->u.adjust;
7851 /* Emit notes for the whole function. */
7854 vt_emit_notes (void)
7859 #ifdef ENABLE_RTL_CHECKING
7860 emitted_notes = pointer_map_create ();
7862 gcc_assert (!htab_elements (changed_variables));
7864 /* Free memory occupied by the out hash tables, as they aren't used
7867 dataflow_set_clear (&VTI (bb)->out);
7869 /* Enable emitting notes by functions (mainly by set_variable_part and
7870 delete_variable_part). */
7873 if (MAY_HAVE_DEBUG_INSNS)
7878 for (i = 0; VEC_iterate (rtx, preserved_values, i, val); i++)
7879 add_cselib_value_chains (dv_from_value (val));
7880 changed_variables_stack = VEC_alloc (variable, heap, 40);
7881 changed_values_stack = VEC_alloc (rtx, heap, 40);
7884 dataflow_set_init (&cur);
7888 /* Emit the notes for changes of variable locations between two
7889 subsequent basic blocks. */
7890 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
7892 /* Emit the notes for the changes in the basic block itself. */
7893 emit_notes_in_bb (bb, &cur);
7895 /* Free memory occupied by the in hash table, we won't need it
7897 dataflow_set_clear (&VTI (bb)->in);
7899 #ifdef ENABLE_CHECKING
7900 htab_traverse (shared_hash_htab (cur.vars),
7901 emit_notes_for_differences_1,
7902 shared_hash_htab (empty_shared_hash));
7903 if (MAY_HAVE_DEBUG_INSNS)
7908 for (i = 0; VEC_iterate (rtx, preserved_values, i, val); i++)
7909 remove_cselib_value_chains (dv_from_value (val));
7910 gcc_assert (htab_elements (value_chains) == 0);
7913 dataflow_set_destroy (&cur);
7915 if (MAY_HAVE_DEBUG_INSNS)
7917 VEC_free (variable, heap, changed_variables_stack);
7918 VEC_free (rtx, heap, changed_values_stack);
7921 #ifdef ENABLE_RTL_CHECKING
7922 pointer_map_destroy (emitted_notes);
7927 /* If there is a declaration and offset associated with register/memory RTL
7928 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
7931 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
7935 if (REG_ATTRS (rtl))
7937 *declp = REG_EXPR (rtl);
7938 *offsetp = REG_OFFSET (rtl);
7942 else if (MEM_P (rtl))
7944 if (MEM_ATTRS (rtl))
7946 *declp = MEM_EXPR (rtl);
7947 *offsetp = INT_MEM_OFFSET (rtl);
7954 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
7957 vt_add_function_parameters (void)
7961 for (parm = DECL_ARGUMENTS (current_function_decl);
7962 parm; parm = TREE_CHAIN (parm))
7964 rtx decl_rtl = DECL_RTL_IF_SET (parm);
7965 rtx incoming = DECL_INCOMING_RTL (parm);
7967 enum machine_mode mode;
7968 HOST_WIDE_INT offset;
7972 if (TREE_CODE (parm) != PARM_DECL)
7975 if (!DECL_NAME (parm))
7978 if (!decl_rtl || !incoming)
7981 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
7984 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
7986 if (REG_P (incoming) || MEM_P (incoming))
7988 /* This means argument is passed by invisible reference. */
7991 incoming = gen_rtx_MEM (GET_MODE (decl_rtl), incoming);
7995 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
7997 offset += byte_lowpart_offset (GET_MODE (incoming),
7998 GET_MODE (decl_rtl));
8007 /* Assume that DECL_RTL was a pseudo that got spilled to
8008 memory. The spill slot sharing code will force the
8009 memory to reference spill_slot_decl (%sfp), so we don't
8010 match above. That's ok, the pseudo must have referenced
8011 the entire parameter, so just reset OFFSET. */
8012 gcc_assert (decl == get_spill_slot_decl (false));
8016 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
8019 out = &VTI (ENTRY_BLOCK_PTR)->out;
8021 dv = dv_from_decl (parm);
8023 if (target_for_debug_bind (parm)
8024 /* We can't deal with these right now, because this kind of
8025 variable is single-part. ??? We could handle parallels
8026 that describe multiple locations for the same single
8027 value, but ATM we don't. */
8028 && GET_CODE (incoming) != PARALLEL)
8032 /* ??? We shouldn't ever hit this, but it may happen because
8033 arguments passed by invisible reference aren't dealt with
8034 above: incoming-rtl will have Pmode rather than the
8035 expected mode for the type. */
8039 val = cselib_lookup (var_lowpart (mode, incoming), mode, true);
8041 /* ??? Float-typed values in memory are not handled by
8045 preserve_value (val);
8046 set_variable_part (out, val->val_rtx, dv, offset,
8047 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
8048 dv = dv_from_value (val->val_rtx);
8052 if (REG_P (incoming))
8054 incoming = var_lowpart (mode, incoming);
8055 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
8056 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
8058 set_variable_part (out, incoming, dv, offset,
8059 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
8061 else if (MEM_P (incoming))
8063 incoming = var_lowpart (mode, incoming);
8064 set_variable_part (out, incoming, dv, offset,
8065 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
8069 if (MAY_HAVE_DEBUG_INSNS)
8071 cselib_preserve_only_values ();
8072 cselib_reset_table (cselib_get_next_uid ());
8077 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
8080 fp_setter (rtx insn)
8082 rtx pat = PATTERN (insn);
8083 if (RTX_FRAME_RELATED_P (insn))
8085 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
8087 pat = XEXP (expr, 0);
8089 if (GET_CODE (pat) == SET)
8090 return SET_DEST (pat) == hard_frame_pointer_rtx;
8091 else if (GET_CODE (pat) == PARALLEL)
8094 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
8095 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
8096 && SET_DEST (XVECEXP (pat, 0, i)) == hard_frame_pointer_rtx)
8102 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
8103 ensure it isn't flushed during cselib_reset_table.
8104 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
8105 has been eliminated. */
8108 vt_init_cfa_base (void)
8112 #ifdef FRAME_POINTER_CFA_OFFSET
8113 cfa_base_rtx = frame_pointer_rtx;
8115 cfa_base_rtx = arg_pointer_rtx;
8117 if (cfa_base_rtx == hard_frame_pointer_rtx
8118 || !fixed_regs[REGNO (cfa_base_rtx)])
8120 cfa_base_rtx = NULL_RTX;
8123 if (!MAY_HAVE_DEBUG_INSNS)
8126 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
8128 preserve_value (val);
8129 cselib_preserve_cfa_base_value (val);
8130 var_reg_decl_set (&VTI (ENTRY_BLOCK_PTR)->out, cfa_base_rtx,
8131 VAR_INIT_STATUS_INITIALIZED, dv_from_value (val->val_rtx),
8132 0, NULL_RTX, INSERT);
8135 /* Allocate and initialize the data structures for variable tracking
8136 and parse the RTL to get the micro operations. */
8139 vt_initialize (void)
8141 basic_block bb, prologue_bb = NULL;
8142 HOST_WIDE_INT fp_cfa_offset = -1;
8144 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
8146 attrs_pool = create_alloc_pool ("attrs_def pool",
8147 sizeof (struct attrs_def), 1024);
8148 var_pool = create_alloc_pool ("variable_def pool",
8149 sizeof (struct variable_def)
8150 + (MAX_VAR_PARTS - 1)
8151 * sizeof (((variable)NULL)->var_part[0]), 64);
8152 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
8153 sizeof (struct location_chain_def),
8155 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
8156 sizeof (struct shared_hash_def), 256);
8157 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
8158 empty_shared_hash->refcount = 1;
8159 empty_shared_hash->htab
8160 = htab_create (1, variable_htab_hash, variable_htab_eq,
8161 variable_htab_free);
8162 changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
8163 variable_htab_free);
8164 if (MAY_HAVE_DEBUG_INSNS)
8166 value_chain_pool = create_alloc_pool ("value_chain_def pool",
8167 sizeof (struct value_chain_def),
8169 value_chains = htab_create (32, value_chain_htab_hash,
8170 value_chain_htab_eq, NULL);
8173 /* Init the IN and OUT sets. */
8176 VTI (bb)->visited = false;
8177 VTI (bb)->flooded = false;
8178 dataflow_set_init (&VTI (bb)->in);
8179 dataflow_set_init (&VTI (bb)->out);
8180 VTI (bb)->permp = NULL;
8183 if (MAY_HAVE_DEBUG_INSNS)
8185 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
8186 scratch_regs = BITMAP_ALLOC (NULL);
8187 valvar_pool = create_alloc_pool ("small variable_def pool",
8188 sizeof (struct variable_def), 256);
8189 preserved_values = VEC_alloc (rtx, heap, 256);
8193 scratch_regs = NULL;
8197 if (!frame_pointer_needed)
8201 if (!vt_stack_adjustments ())
8204 #ifdef FRAME_POINTER_CFA_OFFSET
8205 reg = frame_pointer_rtx;
8207 reg = arg_pointer_rtx;
8209 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
8212 if (GET_CODE (elim) == PLUS)
8213 elim = XEXP (elim, 0);
8214 if (elim == stack_pointer_rtx)
8215 vt_init_cfa_base ();
8218 else if (!crtl->stack_realign_tried)
8222 #ifdef FRAME_POINTER_CFA_OFFSET
8223 reg = frame_pointer_rtx;
8224 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
8226 reg = arg_pointer_rtx;
8227 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
8229 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
8232 if (GET_CODE (elim) == PLUS)
8234 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
8235 elim = XEXP (elim, 0);
8237 if (elim != hard_frame_pointer_rtx)
8240 prologue_bb = single_succ (ENTRY_BLOCK_PTR);
8244 hard_frame_pointer_adjustment = -1;
8249 HOST_WIDE_INT pre, post = 0;
8250 basic_block first_bb, last_bb;
8252 if (MAY_HAVE_DEBUG_INSNS)
8254 cselib_record_sets_hook = add_with_sets;
8255 if (dump_file && (dump_flags & TDF_DETAILS))
8256 fprintf (dump_file, "first value: %i\n",
8257 cselib_get_next_uid ());
8264 if (bb->next_bb == EXIT_BLOCK_PTR
8265 || ! single_pred_p (bb->next_bb))
8267 e = find_edge (bb, bb->next_bb);
8268 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
8274 /* Add the micro-operations to the vector. */
8275 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
8277 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
8278 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
8279 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
8280 insn = NEXT_INSN (insn))
8284 if (!frame_pointer_needed)
8286 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
8290 mo.type = MO_ADJUST;
8293 if (dump_file && (dump_flags & TDF_DETAILS))
8294 log_op_type (PATTERN (insn), bb, insn,
8295 MO_ADJUST, dump_file);
8296 VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
8298 VTI (bb)->out.stack_adjust += pre;
8302 cselib_hook_called = false;
8303 adjust_insn (bb, insn);
8304 if (MAY_HAVE_DEBUG_INSNS)
8306 cselib_process_insn (insn);
8307 if (dump_file && (dump_flags & TDF_DETAILS))
8309 print_rtl_single (dump_file, insn);
8310 dump_cselib_table (dump_file);
8313 if (!cselib_hook_called)
8314 add_with_sets (insn, 0, 0);
8317 if (!frame_pointer_needed && post)
8320 mo.type = MO_ADJUST;
8323 if (dump_file && (dump_flags & TDF_DETAILS))
8324 log_op_type (PATTERN (insn), bb, insn,
8325 MO_ADJUST, dump_file);
8326 VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
8328 VTI (bb)->out.stack_adjust += post;
8331 if (bb == prologue_bb
8332 && hard_frame_pointer_adjustment == -1
8333 && RTX_FRAME_RELATED_P (insn)
8334 && fp_setter (insn))
8336 vt_init_cfa_base ();
8337 hard_frame_pointer_adjustment = fp_cfa_offset;
8341 gcc_assert (offset == VTI (bb)->out.stack_adjust);
8346 if (MAY_HAVE_DEBUG_INSNS)
8348 cselib_preserve_only_values ();
8349 cselib_reset_table (cselib_get_next_uid ());
8350 cselib_record_sets_hook = NULL;
8354 hard_frame_pointer_adjustment = -1;
8355 VTI (ENTRY_BLOCK_PTR)->flooded = true;
8356 vt_add_function_parameters ();
8357 cfa_base_rtx = NULL_RTX;
8361 /* Get rid of all debug insns from the insn stream. */
8364 delete_debug_insns (void)
8369 if (!MAY_HAVE_DEBUG_INSNS)
8374 FOR_BB_INSNS_SAFE (bb, insn, next)
8375 if (DEBUG_INSN_P (insn))
8380 /* Run a fast, BB-local only version of var tracking, to take care of
8381 information that we don't do global analysis on, such that not all
8382 information is lost. If SKIPPED holds, we're skipping the global
8383 pass entirely, so we should try to use information it would have
8384 handled as well.. */
8387 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
8389 /* ??? Just skip it all for now. */
8390 delete_debug_insns ();
8393 /* Free the data structures needed for variable tracking. */
8402 VEC_free (micro_operation, heap, VTI (bb)->mos);
8407 dataflow_set_destroy (&VTI (bb)->in);
8408 dataflow_set_destroy (&VTI (bb)->out);
8409 if (VTI (bb)->permp)
8411 dataflow_set_destroy (VTI (bb)->permp);
8412 XDELETE (VTI (bb)->permp);
8415 free_aux_for_blocks ();
8416 htab_delete (empty_shared_hash->htab);
8417 htab_delete (changed_variables);
8418 free_alloc_pool (attrs_pool);
8419 free_alloc_pool (var_pool);
8420 free_alloc_pool (loc_chain_pool);
8421 free_alloc_pool (shared_hash_pool);
8423 if (MAY_HAVE_DEBUG_INSNS)
8425 htab_delete (value_chains);
8426 free_alloc_pool (value_chain_pool);
8427 free_alloc_pool (valvar_pool);
8428 VEC_free (rtx, heap, preserved_values);
8430 BITMAP_FREE (scratch_regs);
8431 scratch_regs = NULL;
8435 XDELETEVEC (vui_vec);
8440 /* The entry point to variable tracking pass. */
8442 static inline unsigned int
8443 variable_tracking_main_1 (void)
8447 if (flag_var_tracking_assignments < 0)
8449 delete_debug_insns ();
8453 if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
8455 vt_debug_insns_local (true);
8459 mark_dfs_back_edges ();
8460 if (!vt_initialize ())
8463 vt_debug_insns_local (true);
8467 success = vt_find_locations ();
8469 if (!success && flag_var_tracking_assignments > 0)
8473 delete_debug_insns ();
8475 /* This is later restored by our caller. */
8476 flag_var_tracking_assignments = 0;
8478 success = vt_initialize ();
8479 gcc_assert (success);
8481 success = vt_find_locations ();
8487 vt_debug_insns_local (false);
8491 if (dump_file && (dump_flags & TDF_DETAILS))
8493 dump_dataflow_sets ();
8494 dump_flow_info (dump_file, dump_flags);
8500 vt_debug_insns_local (false);
8505 variable_tracking_main (void)
8508 int save = flag_var_tracking_assignments;
8510 ret = variable_tracking_main_1 ();
8512 flag_var_tracking_assignments = save;
8518 gate_handle_var_tracking (void)
8520 return (flag_var_tracking);
8525 struct rtl_opt_pass pass_variable_tracking =
8529 "vartrack", /* name */
8530 gate_handle_var_tracking, /* gate */
8531 variable_tracking_main, /* execute */
8534 0, /* static_pass_number */
8535 TV_VAR_TRACKING, /* tv_id */
8536 0, /* properties_required */
8537 0, /* properties_provided */
8538 0, /* properties_destroyed */
8539 0, /* todo_flags_start */
8540 TODO_dump_func | TODO_verify_rtl_sharing/* todo_flags_finish */