1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file contains the variable tracking pass. It computes where
22 variables are located (which registers or where in memory) at each position
23 in instruction stream and emits notes describing the locations.
24 Debug information (DWARF2 location lists) is finally generated from
26 With this debug information, it is possible to show variables
27 even when debugging optimized code.
29 How does the variable tracking pass work?
31 First, it scans RTL code for uses, stores and clobbers (register/memory
32 references in instructions), for call insns and for stack adjustments
33 separately for each basic block and saves them to an array of micro
35 The micro operations of one instruction are ordered so that
36 pre-modifying stack adjustment < use < use with no var < call insn <
37 < set < clobber < post-modifying stack adjustment
39 Then, a forward dataflow analysis is performed to find out how locations
40 of variables change through code and to propagate the variable locations
41 along control flow graph.
42 The IN set for basic block BB is computed as a union of OUT sets of BB's
43 predecessors, the OUT set for BB is copied from the IN set for BB and
44 is changed according to micro operations in BB.
46 The IN and OUT sets for basic blocks consist of a current stack adjustment
47 (used for adjusting offset of variables addressed using stack pointer),
48 the table of structures describing the locations of parts of a variable
49 and for each physical register a linked list for each physical register.
50 The linked list is a list of variable parts stored in the register,
51 i.e. it is a list of triplets (reg, decl, offset) where decl is
52 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
53 effective deleting appropriate variable parts when we set or clobber the
56 There may be more than one variable part in a register. The linked lists
57 should be pretty short so it is a good data structure here.
58 For example in the following code, register allocator may assign same
59 register to variables A and B, and both of them are stored in the same
72 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73 are emitted to appropriate positions in RTL code. Each such a note describes
74 the location of one variable at the point in instruction stream where the
75 note is. There is no need to emit a note for each variable before each
76 instruction, we only emit these notes where the location of variable changes
77 (this means that we also emit notes for changes between the OUT set of the
78 previous block and the IN set of the current block).
80 The notes consist of two parts:
81 1. the declaration (from REG_EXPR or MEM_EXPR)
82 2. the location of a variable - it is either a simple register/memory
83 reference (for simple variables, for example int),
84 or a parallel of register/memory references (for a large variables
85 which consist of several parts, for example long long).
91 #include "coretypes.h"
95 #include "hard-reg-set.h"
96 #include "basic-block.h"
99 #include "insn-config.h"
102 #include "alloc-pool.h"
108 #include "tree-pass.h"
109 #include "tree-flow.h"
114 #include "diagnostic.h"
115 #include "pointer-set.h"
118 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
119 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
120 Currently the value is the same as IDENTIFIER_NODE, which has such
121 a property. If this compile time assertion ever fails, make sure that
122 the new tree code that equals (int) VALUE has the same property. */
123 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
125 /* Type of micro operation. */
126 enum micro_operation_type
128 MO_USE, /* Use location (REG or MEM). */
129 MO_USE_NO_VAR,/* Use location which is not associated with a variable
130 or the variable is not trackable. */
131 MO_VAL_USE, /* Use location which is associated with a value. */
132 MO_VAL_LOC, /* Use location which appears in a debug insn. */
133 MO_VAL_SET, /* Set location associated with a value. */
134 MO_SET, /* Set location. */
135 MO_COPY, /* Copy the same portion of a variable from one
136 location to another. */
137 MO_CLOBBER, /* Clobber location. */
138 MO_CALL, /* Call insn. */
139 MO_ADJUST /* Adjust stack pointer. */
143 static const char * const ATTRIBUTE_UNUSED
144 micro_operation_type_name[] = {
157 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
158 Notes emitted as AFTER_CALL are to take effect during the call,
159 rather than after the call. */
162 EMIT_NOTE_BEFORE_INSN,
163 EMIT_NOTE_AFTER_INSN,
164 EMIT_NOTE_AFTER_CALL_INSN
167 /* Structure holding information about micro operation. */
168 typedef struct micro_operation_def
170 /* Type of micro operation. */
171 enum micro_operation_type type;
173 /* The instruction which the micro operation is in, for MO_USE,
174 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
175 instruction or note in the original flow (before any var-tracking
176 notes are inserted, to simplify emission of notes), for MO_SET
181 /* Location. For MO_SET and MO_COPY, this is the SET that
182 performs the assignment, if known, otherwise it is the target
183 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
184 CONCAT of the VALUE and the LOC associated with it. For
185 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
186 associated with it. */
189 /* Stack adjustment. */
190 HOST_WIDE_INT adjust;
194 DEF_VEC_O(micro_operation);
195 DEF_VEC_ALLOC_O(micro_operation,heap);
197 /* A declaration of a variable, or an RTL value being handled like a
199 typedef void *decl_or_value;
201 /* Structure for passing some other parameters to function
202 emit_note_insn_var_location. */
203 typedef struct emit_note_data_def
205 /* The instruction which the note will be emitted before/after. */
208 /* Where the note will be emitted (before/after insn)? */
209 enum emit_note_where where;
211 /* The variables and values active at this point. */
215 /* Description of location of a part of a variable. The content of a physical
216 register is described by a chain of these structures.
217 The chains are pretty short (usually 1 or 2 elements) and thus
218 chain is the best data structure. */
219 typedef struct attrs_def
221 /* Pointer to next member of the list. */
222 struct attrs_def *next;
224 /* The rtx of register. */
227 /* The declaration corresponding to LOC. */
230 /* Offset from start of DECL. */
231 HOST_WIDE_INT offset;
234 /* Structure holding a refcounted hash table. If refcount > 1,
235 it must be first unshared before modified. */
236 typedef struct shared_hash_def
238 /* Reference count. */
241 /* Actual hash table. */
245 /* Structure holding the IN or OUT set for a basic block. */
246 typedef struct dataflow_set_def
248 /* Adjustment of stack offset. */
249 HOST_WIDE_INT stack_adjust;
251 /* Attributes for registers (lists of attrs). */
252 attrs regs[FIRST_PSEUDO_REGISTER];
254 /* Variable locations. */
257 /* Vars that is being traversed. */
258 shared_hash traversed_vars;
261 /* The structure (one for each basic block) containing the information
262 needed for variable tracking. */
263 typedef struct variable_tracking_info_def
265 /* The vector of micro operations. */
266 VEC(micro_operation, heap) *mos;
268 /* The IN and OUT set for dataflow analysis. */
272 /* The permanent-in dataflow set for this block. This is used to
273 hold values for which we had to compute entry values. ??? This
274 should probably be dynamically allocated, to avoid using more
275 memory in non-debug builds. */
278 /* Has the block been visited in DFS? */
281 /* Has the block been flooded in VTA? */
284 } *variable_tracking_info;
286 /* Structure for chaining the locations. */
287 typedef struct location_chain_def
289 /* Next element in the chain. */
290 struct location_chain_def *next;
292 /* The location (REG, MEM or VALUE). */
295 /* The "value" stored in this location. */
299 enum var_init_status init;
302 /* Structure describing one part of variable. */
303 typedef struct variable_part_def
305 /* Chain of locations of the part. */
306 location_chain loc_chain;
308 /* Location which was last emitted to location list. */
311 /* The offset in the variable. */
312 HOST_WIDE_INT offset;
315 /* Maximum number of location parts. */
316 #define MAX_VAR_PARTS 16
318 /* Structure describing where the variable is located. */
319 typedef struct variable_def
321 /* The declaration of the variable, or an RTL value being handled
322 like a declaration. */
325 /* Reference count. */
328 /* Number of variable parts. */
331 /* True if this variable changed (any of its) cur_loc fields
332 during the current emit_notes_for_changes resp.
333 emit_notes_for_differences call. */
334 bool cur_loc_changed;
336 /* True if this variable_def struct is currently in the
337 changed_variables hash table. */
338 bool in_changed_variables;
340 /* The variable parts. */
341 variable_part var_part[1];
343 typedef const struct variable_def *const_variable;
345 /* Structure for chaining backlinks from referenced VALUEs to
346 DVs that are referencing them. */
347 typedef struct value_chain_def
349 /* Next value_chain entry. */
350 struct value_chain_def *next;
352 /* The declaration of the variable, or an RTL value
353 being handled like a declaration, whose var_parts[0].loc_chain
354 references the VALUE owning this value_chain. */
357 /* Reference count. */
360 typedef const struct value_chain_def *const_value_chain;
362 /* Pointer to the BB's information specific to variable tracking pass. */
363 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
365 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
366 #define INT_MEM_OFFSET(mem) (MEM_OFFSET (mem) ? INTVAL (MEM_OFFSET (mem)) : 0)
368 /* Alloc pool for struct attrs_def. */
369 static alloc_pool attrs_pool;
371 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
372 static alloc_pool var_pool;
374 /* Alloc pool for struct variable_def with a single var_part entry. */
375 static alloc_pool valvar_pool;
377 /* Alloc pool for struct location_chain_def. */
378 static alloc_pool loc_chain_pool;
380 /* Alloc pool for struct shared_hash_def. */
381 static alloc_pool shared_hash_pool;
383 /* Alloc pool for struct value_chain_def. */
384 static alloc_pool value_chain_pool;
386 /* Changed variables, notes will be emitted for them. */
387 static htab_t changed_variables;
389 /* Links from VALUEs to DVs referencing them in their current loc_chains. */
390 static htab_t value_chains;
392 /* Shall notes be emitted? */
393 static bool emit_notes;
395 /* Empty shared hashtable. */
396 static shared_hash empty_shared_hash;
398 /* Scratch register bitmap used by cselib_expand_value_rtx. */
399 static bitmap scratch_regs = NULL;
401 /* Variable used to tell whether cselib_process_insn called our hook. */
402 static bool cselib_hook_called;
404 /* Local function prototypes. */
405 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
407 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
409 static bool vt_stack_adjustments (void);
410 static rtx compute_cfa_pointer (HOST_WIDE_INT);
411 static hashval_t variable_htab_hash (const void *);
412 static int variable_htab_eq (const void *, const void *);
413 static void variable_htab_free (void *);
415 static void init_attrs_list_set (attrs *);
416 static void attrs_list_clear (attrs *);
417 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
418 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
419 static void attrs_list_copy (attrs *, attrs);
420 static void attrs_list_union (attrs *, attrs);
422 static void **unshare_variable (dataflow_set *set, void **slot, variable var,
423 enum var_init_status);
424 static int vars_copy_1 (void **, void *);
425 static void vars_copy (htab_t, htab_t);
426 static tree var_debug_decl (tree);
427 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
428 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
429 enum var_init_status, rtx);
430 static void var_reg_delete (dataflow_set *, rtx, bool);
431 static void var_regno_delete (dataflow_set *, int);
432 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
433 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
434 enum var_init_status, rtx);
435 static void var_mem_delete (dataflow_set *, rtx, bool);
437 static void dataflow_set_init (dataflow_set *);
438 static void dataflow_set_clear (dataflow_set *);
439 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
440 static int variable_union_info_cmp_pos (const void *, const void *);
441 static int variable_union (void **, void *);
442 static void dataflow_set_union (dataflow_set *, dataflow_set *);
443 static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
444 static bool canon_value_cmp (rtx, rtx);
445 static int loc_cmp (rtx, rtx);
446 static bool variable_part_different_p (variable_part *, variable_part *);
447 static bool onepart_variable_different_p (variable, variable);
448 static bool variable_different_p (variable, variable);
449 static int dataflow_set_different_1 (void **, void *);
450 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
451 static void dataflow_set_destroy (dataflow_set *);
453 static bool contains_symbol_ref (rtx);
454 static bool track_expr_p (tree, bool);
455 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
456 static int add_uses (rtx *, void *);
457 static void add_uses_1 (rtx *, void *);
458 static void add_stores (rtx, const_rtx, void *);
459 static bool compute_bb_dataflow (basic_block);
460 static bool vt_find_locations (void);
462 static void dump_attrs_list (attrs);
463 static int dump_var_slot (void **, void *);
464 static void dump_var (variable);
465 static void dump_vars (htab_t);
466 static void dump_dataflow_set (dataflow_set *);
467 static void dump_dataflow_sets (void);
469 static void variable_was_changed (variable, dataflow_set *);
470 static void **set_slot_part (dataflow_set *, rtx, void **,
471 decl_or_value, HOST_WIDE_INT,
472 enum var_init_status, rtx);
473 static void set_variable_part (dataflow_set *, rtx,
474 decl_or_value, HOST_WIDE_INT,
475 enum var_init_status, rtx, enum insert_option);
476 static void **clobber_slot_part (dataflow_set *, rtx,
477 void **, HOST_WIDE_INT, rtx);
478 static void clobber_variable_part (dataflow_set *, rtx,
479 decl_or_value, HOST_WIDE_INT, rtx);
480 static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
481 static void delete_variable_part (dataflow_set *, rtx,
482 decl_or_value, HOST_WIDE_INT);
483 static int emit_note_insn_var_location (void **, void *);
484 static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash);
485 static int emit_notes_for_differences_1 (void **, void *);
486 static int emit_notes_for_differences_2 (void **, void *);
487 static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
488 static void emit_notes_in_bb (basic_block, dataflow_set *);
489 static void vt_emit_notes (void);
491 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
492 static void vt_add_function_parameters (void);
493 static bool vt_initialize (void);
494 static void vt_finalize (void);
496 /* Given a SET, calculate the amount of stack adjustment it contains
497 PRE- and POST-modifying stack pointer.
498 This function is similar to stack_adjust_offset. */
501 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
504 rtx src = SET_SRC (pattern);
505 rtx dest = SET_DEST (pattern);
508 if (dest == stack_pointer_rtx)
510 /* (set (reg sp) (plus (reg sp) (const_int))) */
511 code = GET_CODE (src);
512 if (! (code == PLUS || code == MINUS)
513 || XEXP (src, 0) != stack_pointer_rtx
514 || !CONST_INT_P (XEXP (src, 1)))
518 *post += INTVAL (XEXP (src, 1));
520 *post -= INTVAL (XEXP (src, 1));
522 else if (MEM_P (dest))
524 /* (set (mem (pre_dec (reg sp))) (foo)) */
525 src = XEXP (dest, 0);
526 code = GET_CODE (src);
532 if (XEXP (src, 0) == stack_pointer_rtx)
534 rtx val = XEXP (XEXP (src, 1), 1);
535 /* We handle only adjustments by constant amount. */
536 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
539 if (code == PRE_MODIFY)
540 *pre -= INTVAL (val);
542 *post -= INTVAL (val);
548 if (XEXP (src, 0) == stack_pointer_rtx)
550 *pre += GET_MODE_SIZE (GET_MODE (dest));
556 if (XEXP (src, 0) == stack_pointer_rtx)
558 *post += GET_MODE_SIZE (GET_MODE (dest));
564 if (XEXP (src, 0) == stack_pointer_rtx)
566 *pre -= GET_MODE_SIZE (GET_MODE (dest));
572 if (XEXP (src, 0) == stack_pointer_rtx)
574 *post -= GET_MODE_SIZE (GET_MODE (dest));
585 /* Given an INSN, calculate the amount of stack adjustment it contains
586 PRE- and POST-modifying stack pointer. */
589 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
597 pattern = PATTERN (insn);
598 if (RTX_FRAME_RELATED_P (insn))
600 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
602 pattern = XEXP (expr, 0);
605 if (GET_CODE (pattern) == SET)
606 stack_adjust_offset_pre_post (pattern, pre, post);
607 else if (GET_CODE (pattern) == PARALLEL
608 || GET_CODE (pattern) == SEQUENCE)
612 /* There may be stack adjustments inside compound insns. Search
614 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
615 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
616 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
620 /* Compute stack adjustments for all blocks by traversing DFS tree.
621 Return true when the adjustments on all incoming edges are consistent.
622 Heavily borrowed from pre_and_rev_post_order_compute. */
625 vt_stack_adjustments (void)
627 edge_iterator *stack;
630 /* Initialize entry block. */
631 VTI (ENTRY_BLOCK_PTR)->visited = true;
632 VTI (ENTRY_BLOCK_PTR)->in.stack_adjust = INCOMING_FRAME_SP_OFFSET;
633 VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
635 /* Allocate stack for back-tracking up CFG. */
636 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
639 /* Push the first edge on to the stack. */
640 stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
648 /* Look at the edge on the top of the stack. */
650 src = ei_edge (ei)->src;
651 dest = ei_edge (ei)->dest;
653 /* Check if the edge destination has been visited yet. */
654 if (!VTI (dest)->visited)
657 HOST_WIDE_INT pre, post, offset;
658 VTI (dest)->visited = true;
659 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
661 if (dest != EXIT_BLOCK_PTR)
662 for (insn = BB_HEAD (dest);
663 insn != NEXT_INSN (BB_END (dest));
664 insn = NEXT_INSN (insn))
667 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
668 offset += pre + post;
671 VTI (dest)->out.stack_adjust = offset;
673 if (EDGE_COUNT (dest->succs) > 0)
674 /* Since the DEST node has been visited for the first
675 time, check its successors. */
676 stack[sp++] = ei_start (dest->succs);
680 /* Check whether the adjustments on the edges are the same. */
681 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
687 if (! ei_one_before_end_p (ei))
688 /* Go to the next edge. */
689 ei_next (&stack[sp - 1]);
691 /* Return to previous level if there are no more edges. */
700 /* Compute a CFA-based value for the stack pointer. */
703 compute_cfa_pointer (HOST_WIDE_INT adjustment)
707 #ifdef FRAME_POINTER_CFA_OFFSET
708 adjustment -= FRAME_POINTER_CFA_OFFSET (current_function_decl);
709 cfa = plus_constant (frame_pointer_rtx, adjustment);
711 adjustment -= ARG_POINTER_CFA_OFFSET (current_function_decl);
712 cfa = plus_constant (arg_pointer_rtx, adjustment);
718 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
719 or -1 if the replacement shouldn't be done. */
720 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
722 /* Data for adjust_mems callback. */
724 struct adjust_mem_data
727 enum machine_mode mem_mode;
728 HOST_WIDE_INT stack_adjust;
732 /* Helper for adjust_mems. Return 1 if *loc is unsuitable for
733 transformation of wider mode arithmetics to narrower mode,
734 -1 if it is suitable and subexpressions shouldn't be
735 traversed and 0 if it is suitable and subexpressions should
736 be traversed. Called through for_each_rtx. */
739 use_narrower_mode_test (rtx *loc, void *data)
741 rtx subreg = (rtx) data;
743 if (CONSTANT_P (*loc))
745 switch (GET_CODE (*loc))
748 if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0))
756 if (for_each_rtx (&XEXP (*loc, 0), use_narrower_mode_test, data))
765 /* Transform X into narrower mode MODE from wider mode WMODE. */
768 use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
772 return lowpart_subreg (mode, x, wmode);
773 switch (GET_CODE (x))
776 return lowpart_subreg (mode, x, wmode);
780 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
781 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
782 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
784 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
785 return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
791 /* Helper function for adjusting used MEMs. */
794 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
796 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
797 rtx mem, addr = loc, tem;
798 enum machine_mode mem_mode_save;
800 switch (GET_CODE (loc))
803 /* Don't do any sp or fp replacements outside of MEM addresses. */
804 if (amd->mem_mode == VOIDmode)
806 if (loc == stack_pointer_rtx
807 && !frame_pointer_needed)
808 return compute_cfa_pointer (amd->stack_adjust);
809 else if (loc == hard_frame_pointer_rtx
810 && frame_pointer_needed
811 && hard_frame_pointer_adjustment != -1)
812 return compute_cfa_pointer (hard_frame_pointer_adjustment);
818 mem = targetm.delegitimize_address (mem);
819 if (mem != loc && !MEM_P (mem))
820 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
823 addr = XEXP (mem, 0);
824 mem_mode_save = amd->mem_mode;
825 amd->mem_mode = GET_MODE (mem);
826 store_save = amd->store;
828 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
829 amd->store = store_save;
830 amd->mem_mode = mem_mode_save;
832 addr = targetm.delegitimize_address (addr);
833 if (addr != XEXP (mem, 0))
834 mem = replace_equiv_address_nv (mem, addr);
836 mem = avoid_constant_pool_reference (mem);
840 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
841 GEN_INT (GET_CODE (loc) == PRE_INC
842 ? GET_MODE_SIZE (amd->mem_mode)
843 : -GET_MODE_SIZE (amd->mem_mode)));
847 addr = XEXP (loc, 0);
848 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
849 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
850 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
851 GEN_INT ((GET_CODE (loc) == PRE_INC
852 || GET_CODE (loc) == POST_INC)
853 ? GET_MODE_SIZE (amd->mem_mode)
854 : -GET_MODE_SIZE (amd->mem_mode)));
855 amd->side_effects = alloc_EXPR_LIST (0,
856 gen_rtx_SET (VOIDmode,
862 addr = XEXP (loc, 1);
865 addr = XEXP (loc, 0);
866 gcc_assert (amd->mem_mode != VOIDmode);
867 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
868 amd->side_effects = alloc_EXPR_LIST (0,
869 gen_rtx_SET (VOIDmode,
875 /* First try without delegitimization of whole MEMs and
876 avoid_constant_pool_reference, which is more likely to succeed. */
877 store_save = amd->store;
879 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
881 amd->store = store_save;
882 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
883 if (mem == SUBREG_REG (loc))
888 tem = simplify_gen_subreg (GET_MODE (loc), mem,
889 GET_MODE (SUBREG_REG (loc)),
893 tem = simplify_gen_subreg (GET_MODE (loc), addr,
894 GET_MODE (SUBREG_REG (loc)),
897 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
899 if (MAY_HAVE_DEBUG_INSNS
900 && GET_CODE (tem) == SUBREG
901 && (GET_CODE (SUBREG_REG (tem)) == PLUS
902 || GET_CODE (SUBREG_REG (tem)) == MINUS
903 || GET_CODE (SUBREG_REG (tem)) == MULT
904 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
905 && GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
906 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
907 && GET_MODE_SIZE (GET_MODE (tem))
908 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem)))
909 && subreg_lowpart_p (tem)
910 && !for_each_rtx (&SUBREG_REG (tem), use_narrower_mode_test, tem))
911 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
912 GET_MODE (SUBREG_REG (tem)));
920 /* Helper function for replacement of uses. */
923 adjust_mem_uses (rtx *x, void *data)
925 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
927 validate_change (NULL_RTX, x, new_x, true);
930 /* Helper function for replacement of stores. */
933 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
937 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
939 if (new_dest != SET_DEST (expr))
941 rtx xexpr = CONST_CAST_RTX (expr);
942 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
947 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
948 replace them with their value in the insn and add the side-effects
949 as other sets to the insn. */
952 adjust_insn (basic_block bb, rtx insn)
954 struct adjust_mem_data amd;
956 amd.mem_mode = VOIDmode;
957 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
958 amd.side_effects = NULL_RTX;
961 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
964 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
966 /* For read-only MEMs containing some constant, prefer those
968 set = single_set (insn);
969 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
971 rtx note = find_reg_equal_equiv_note (insn);
973 if (note && CONSTANT_P (XEXP (note, 0)))
974 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
977 if (amd.side_effects)
979 rtx *pat, new_pat, s;
982 pat = &PATTERN (insn);
983 if (GET_CODE (*pat) == COND_EXEC)
984 pat = &COND_EXEC_CODE (*pat);
985 if (GET_CODE (*pat) == PARALLEL)
986 oldn = XVECLEN (*pat, 0);
989 for (s = amd.side_effects, newn = 0; s; newn++)
991 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
992 if (GET_CODE (*pat) == PARALLEL)
993 for (i = 0; i < oldn; i++)
994 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
996 XVECEXP (new_pat, 0, 0) = *pat;
997 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
998 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
999 free_EXPR_LIST_list (&amd.side_effects);
1000 validate_change (NULL_RTX, pat, new_pat, true);
1004 /* Return true if a decl_or_value DV is a DECL or NULL. */
1006 dv_is_decl_p (decl_or_value dv)
1008 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
1011 /* Return true if a decl_or_value is a VALUE rtl. */
1013 dv_is_value_p (decl_or_value dv)
1015 return dv && !dv_is_decl_p (dv);
1018 /* Return the decl in the decl_or_value. */
1020 dv_as_decl (decl_or_value dv)
1022 #ifdef ENABLE_CHECKING
1023 gcc_assert (dv_is_decl_p (dv));
1028 /* Return the value in the decl_or_value. */
1030 dv_as_value (decl_or_value dv)
1032 #ifdef ENABLE_CHECKING
1033 gcc_assert (dv_is_value_p (dv));
1038 /* Return the opaque pointer in the decl_or_value. */
1039 static inline void *
1040 dv_as_opaque (decl_or_value dv)
1045 /* Return true if a decl_or_value must not have more than one variable
1048 dv_onepart_p (decl_or_value dv)
1052 if (!MAY_HAVE_DEBUG_INSNS)
1055 if (dv_is_value_p (dv))
1058 decl = dv_as_decl (dv);
1063 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1066 return (target_for_debug_bind (decl) != NULL_TREE);
1069 /* Return the variable pool to be used for dv, depending on whether it
1070 can have multiple parts or not. */
1071 static inline alloc_pool
1072 dv_pool (decl_or_value dv)
1074 return dv_onepart_p (dv) ? valvar_pool : var_pool;
1077 /* Build a decl_or_value out of a decl. */
1078 static inline decl_or_value
1079 dv_from_decl (tree decl)
1083 #ifdef ENABLE_CHECKING
1084 gcc_assert (dv_is_decl_p (dv));
1089 /* Build a decl_or_value out of a value. */
1090 static inline decl_or_value
1091 dv_from_value (rtx value)
1095 #ifdef ENABLE_CHECKING
1096 gcc_assert (dv_is_value_p (dv));
1101 extern void debug_dv (decl_or_value dv);
1104 debug_dv (decl_or_value dv)
1106 if (dv_is_value_p (dv))
1107 debug_rtx (dv_as_value (dv));
1109 debug_generic_stmt (dv_as_decl (dv));
1112 typedef unsigned int dvuid;
1114 /* Return the uid of DV. */
1117 dv_uid (decl_or_value dv)
1119 if (dv_is_value_p (dv))
1120 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
1122 return DECL_UID (dv_as_decl (dv));
1125 /* Compute the hash from the uid. */
1127 static inline hashval_t
1128 dv_uid2hash (dvuid uid)
1133 /* The hash function for a mask table in a shared_htab chain. */
1135 static inline hashval_t
1136 dv_htab_hash (decl_or_value dv)
1138 return dv_uid2hash (dv_uid (dv));
1141 /* The hash function for variable_htab, computes the hash value
1142 from the declaration of variable X. */
1145 variable_htab_hash (const void *x)
1147 const_variable const v = (const_variable) x;
1149 return dv_htab_hash (v->dv);
1152 /* Compare the declaration of variable X with declaration Y. */
1155 variable_htab_eq (const void *x, const void *y)
1157 const_variable const v = (const_variable) x;
1158 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
1160 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
1163 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1166 variable_htab_free (void *elem)
1169 variable var = (variable) elem;
1170 location_chain node, next;
1172 gcc_assert (var->refcount > 0);
1175 if (var->refcount > 0)
1178 for (i = 0; i < var->n_var_parts; i++)
1180 for (node = var->var_part[i].loc_chain; node; node = next)
1183 pool_free (loc_chain_pool, node);
1185 var->var_part[i].loc_chain = NULL;
1187 pool_free (dv_pool (var->dv), var);
1190 /* The hash function for value_chains htab, computes the hash value
1194 value_chain_htab_hash (const void *x)
1196 const_value_chain const v = (const_value_chain) x;
1198 return dv_htab_hash (v->dv);
1201 /* Compare the VALUE X with VALUE Y. */
1204 value_chain_htab_eq (const void *x, const void *y)
1206 const_value_chain const v = (const_value_chain) x;
1207 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
1209 return dv_as_opaque (v->dv) == dv_as_opaque (dv);
1212 /* Initialize the set (array) SET of attrs to empty lists. */
1215 init_attrs_list_set (attrs *set)
1219 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1223 /* Make the list *LISTP empty. */
1226 attrs_list_clear (attrs *listp)
1230 for (list = *listp; list; list = next)
1233 pool_free (attrs_pool, list);
1238 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1241 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1243 for (; list; list = list->next)
1244 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1249 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1252 attrs_list_insert (attrs *listp, decl_or_value dv,
1253 HOST_WIDE_INT offset, rtx loc)
1257 list = (attrs) pool_alloc (attrs_pool);
1260 list->offset = offset;
1261 list->next = *listp;
1265 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1268 attrs_list_copy (attrs *dstp, attrs src)
1272 attrs_list_clear (dstp);
1273 for (; src; src = src->next)
1275 n = (attrs) pool_alloc (attrs_pool);
1278 n->offset = src->offset;
1284 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1287 attrs_list_union (attrs *dstp, attrs src)
1289 for (; src; src = src->next)
1291 if (!attrs_list_member (*dstp, src->dv, src->offset))
1292 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1296 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1300 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1302 gcc_assert (!*dstp);
1303 for (; src; src = src->next)
1305 if (!dv_onepart_p (src->dv))
1306 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1308 for (src = src2; src; src = src->next)
1310 if (!dv_onepart_p (src->dv)
1311 && !attrs_list_member (*dstp, src->dv, src->offset))
1312 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1316 /* Shared hashtable support. */
1318 /* Return true if VARS is shared. */
1321 shared_hash_shared (shared_hash vars)
1323 return vars->refcount > 1;
1326 /* Return the hash table for VARS. */
1328 static inline htab_t
1329 shared_hash_htab (shared_hash vars)
1334 /* Return true if VAR is shared, or maybe because VARS is shared. */
1337 shared_var_p (variable var, shared_hash vars)
1339 /* Don't count an entry in the changed_variables table as a duplicate. */
1340 return ((var->refcount > 1 + (int) var->in_changed_variables)
1341 || shared_hash_shared (vars));
1344 /* Copy variables into a new hash table. */
1347 shared_hash_unshare (shared_hash vars)
1349 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1350 gcc_assert (vars->refcount > 1);
1351 new_vars->refcount = 1;
1353 = htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
1354 variable_htab_eq, variable_htab_free);
1355 vars_copy (new_vars->htab, vars->htab);
1360 /* Increment reference counter on VARS and return it. */
1362 static inline shared_hash
1363 shared_hash_copy (shared_hash vars)
1369 /* Decrement reference counter and destroy hash table if not shared
1373 shared_hash_destroy (shared_hash vars)
1375 gcc_assert (vars->refcount > 0);
1376 if (--vars->refcount == 0)
1378 htab_delete (vars->htab);
1379 pool_free (shared_hash_pool, vars);
1383 /* Unshare *PVARS if shared and return slot for DV. If INS is
1384 INSERT, insert it if not already present. */
1386 static inline void **
1387 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1388 hashval_t dvhash, enum insert_option ins)
1390 if (shared_hash_shared (*pvars))
1391 *pvars = shared_hash_unshare (*pvars);
1392 return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins);
1395 static inline void **
1396 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1397 enum insert_option ins)
1399 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1402 /* Return slot for DV, if it is already present in the hash table.
1403 If it is not present, insert it only VARS is not shared, otherwise
1406 static inline void **
1407 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1409 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1410 shared_hash_shared (vars)
1411 ? NO_INSERT : INSERT);
1414 static inline void **
1415 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1417 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1420 /* Return slot for DV only if it is already present in the hash table. */
1422 static inline void **
1423 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1426 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1430 static inline void **
1431 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1433 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1436 /* Return variable for DV or NULL if not already present in the hash
1439 static inline variable
1440 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1442 return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash);
1445 static inline variable
1446 shared_hash_find (shared_hash vars, decl_or_value dv)
1448 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1451 /* Return true if TVAL is better than CVAL as a canonival value. We
1452 choose lowest-numbered VALUEs, using the RTX address as a
1453 tie-breaker. The idea is to arrange them into a star topology,
1454 such that all of them are at most one step away from the canonical
1455 value, and the canonical value has backlinks to all of them, in
1456 addition to all the actual locations. We don't enforce this
1457 topology throughout the entire dataflow analysis, though.
1461 canon_value_cmp (rtx tval, rtx cval)
1464 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1467 static bool dst_can_be_shared;
1469 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1472 unshare_variable (dataflow_set *set, void **slot, variable var,
1473 enum var_init_status initialized)
1478 new_var = (variable) pool_alloc (dv_pool (var->dv));
1479 new_var->dv = var->dv;
1480 new_var->refcount = 1;
1482 new_var->n_var_parts = var->n_var_parts;
1483 new_var->cur_loc_changed = var->cur_loc_changed;
1484 var->cur_loc_changed = false;
1485 new_var->in_changed_variables = false;
1487 if (! flag_var_tracking_uninit)
1488 initialized = VAR_INIT_STATUS_INITIALIZED;
1490 for (i = 0; i < var->n_var_parts; i++)
1492 location_chain node;
1493 location_chain *nextp;
1495 new_var->var_part[i].offset = var->var_part[i].offset;
1496 nextp = &new_var->var_part[i].loc_chain;
1497 for (node = var->var_part[i].loc_chain; node; node = node->next)
1499 location_chain new_lc;
1501 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1502 new_lc->next = NULL;
1503 if (node->init > initialized)
1504 new_lc->init = node->init;
1506 new_lc->init = initialized;
1507 if (node->set_src && !(MEM_P (node->set_src)))
1508 new_lc->set_src = node->set_src;
1510 new_lc->set_src = NULL;
1511 new_lc->loc = node->loc;
1514 nextp = &new_lc->next;
1517 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1520 dst_can_be_shared = false;
1521 if (shared_hash_shared (set->vars))
1522 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1523 else if (set->traversed_vars && set->vars != set->traversed_vars)
1524 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1526 if (var->in_changed_variables)
1529 = htab_find_slot_with_hash (changed_variables, var->dv,
1530 dv_htab_hash (var->dv), NO_INSERT);
1531 gcc_assert (*cslot == (void *) var);
1532 var->in_changed_variables = false;
1533 variable_htab_free (var);
1535 new_var->in_changed_variables = true;
1540 /* Add a variable from *SLOT to hash table DATA and increase its reference
1544 vars_copy_1 (void **slot, void *data)
1546 htab_t dst = (htab_t) data;
1550 src = (variable) *slot;
1553 dstp = htab_find_slot_with_hash (dst, src->dv,
1554 dv_htab_hash (src->dv),
1558 /* Continue traversing the hash table. */
1562 /* Copy all variables from hash table SRC to hash table DST. */
1565 vars_copy (htab_t dst, htab_t src)
1567 htab_traverse_noresize (src, vars_copy_1, dst);
1570 /* Map a decl to its main debug decl. */
1573 var_debug_decl (tree decl)
1575 if (decl && DECL_P (decl)
1576 && DECL_DEBUG_EXPR_IS_FROM (decl))
1578 tree debugdecl = DECL_DEBUG_EXPR (decl);
1579 if (debugdecl && DECL_P (debugdecl))
1586 /* Set the register LOC to contain DV, OFFSET. */
1589 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1590 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1591 enum insert_option iopt)
1594 bool decl_p = dv_is_decl_p (dv);
1597 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1599 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1600 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1601 && node->offset == offset)
1604 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1605 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1608 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1611 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1614 tree decl = REG_EXPR (loc);
1615 HOST_WIDE_INT offset = REG_OFFSET (loc);
1617 var_reg_decl_set (set, loc, initialized,
1618 dv_from_decl (decl), offset, set_src, INSERT);
1621 static enum var_init_status
1622 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1626 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1628 if (! flag_var_tracking_uninit)
1629 return VAR_INIT_STATUS_INITIALIZED;
1631 var = shared_hash_find (set->vars, dv);
1634 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1636 location_chain nextp;
1637 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1638 if (rtx_equal_p (nextp->loc, loc))
1640 ret_val = nextp->init;
1649 /* Delete current content of register LOC in dataflow set SET and set
1650 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1651 MODIFY is true, any other live copies of the same variable part are
1652 also deleted from the dataflow set, otherwise the variable part is
1653 assumed to be copied from another location holding the same
1657 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1658 enum var_init_status initialized, rtx set_src)
1660 tree decl = REG_EXPR (loc);
1661 HOST_WIDE_INT offset = REG_OFFSET (loc);
1665 decl = var_debug_decl (decl);
1667 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1668 initialized = get_init_value (set, loc, dv_from_decl (decl));
1670 nextp = &set->regs[REGNO (loc)];
1671 for (node = *nextp; node; node = next)
1674 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1676 delete_variable_part (set, node->loc, node->dv, node->offset);
1677 pool_free (attrs_pool, node);
1683 nextp = &node->next;
1687 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1688 var_reg_set (set, loc, initialized, set_src);
1691 /* Delete the association of register LOC in dataflow set SET with any
1692 variables that aren't onepart. If CLOBBER is true, also delete any
1693 other live copies of the same variable part, and delete the
1694 association with onepart dvs too. */
1697 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1699 attrs *nextp = &set->regs[REGNO (loc)];
1704 tree decl = REG_EXPR (loc);
1705 HOST_WIDE_INT offset = REG_OFFSET (loc);
1707 decl = var_debug_decl (decl);
1709 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1712 for (node = *nextp; node; node = next)
1715 if (clobber || !dv_onepart_p (node->dv))
1717 delete_variable_part (set, node->loc, node->dv, node->offset);
1718 pool_free (attrs_pool, node);
1722 nextp = &node->next;
1726 /* Delete content of register with number REGNO in dataflow set SET. */
1729 var_regno_delete (dataflow_set *set, int regno)
1731 attrs *reg = &set->regs[regno];
1734 for (node = *reg; node; node = next)
1737 delete_variable_part (set, node->loc, node->dv, node->offset);
1738 pool_free (attrs_pool, node);
1743 /* Set the location of DV, OFFSET as the MEM LOC. */
1746 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1747 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1748 enum insert_option iopt)
1750 if (dv_is_decl_p (dv))
1751 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1753 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1756 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
1758 Adjust the address first if it is stack pointer based. */
1761 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1764 tree decl = MEM_EXPR (loc);
1765 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1767 var_mem_decl_set (set, loc, initialized,
1768 dv_from_decl (decl), offset, set_src, INSERT);
1771 /* Delete and set the location part of variable MEM_EXPR (LOC) in
1772 dataflow set SET to LOC. If MODIFY is true, any other live copies
1773 of the same variable part are also deleted from the dataflow set,
1774 otherwise the variable part is assumed to be copied from another
1775 location holding the same part.
1776 Adjust the address first if it is stack pointer based. */
1779 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1780 enum var_init_status initialized, rtx set_src)
1782 tree decl = MEM_EXPR (loc);
1783 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1785 decl = var_debug_decl (decl);
1787 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1788 initialized = get_init_value (set, loc, dv_from_decl (decl));
1791 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
1792 var_mem_set (set, loc, initialized, set_src);
1795 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
1796 true, also delete any other live copies of the same variable part.
1797 Adjust the address first if it is stack pointer based. */
1800 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
1802 tree decl = MEM_EXPR (loc);
1803 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1805 decl = var_debug_decl (decl);
1807 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1808 delete_variable_part (set, loc, dv_from_decl (decl), offset);
1811 /* Bind a value to a location it was just stored in. If MODIFIED
1812 holds, assume the location was modified, detaching it from any
1813 values bound to it. */
1816 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
1818 cselib_val *v = CSELIB_VAL_PTR (val);
1820 gcc_assert (cselib_preserved_value_p (v));
1824 fprintf (dump_file, "%i: ", INSN_UID (insn));
1825 print_inline_rtx (dump_file, val, 0);
1826 fprintf (dump_file, " stored in ");
1827 print_inline_rtx (dump_file, loc, 0);
1830 struct elt_loc_list *l;
1831 for (l = v->locs; l; l = l->next)
1833 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
1834 print_inline_rtx (dump_file, l->loc, 0);
1837 fprintf (dump_file, "\n");
1843 var_regno_delete (set, REGNO (loc));
1844 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1845 dv_from_value (val), 0, NULL_RTX, INSERT);
1847 else if (MEM_P (loc))
1848 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1849 dv_from_value (val), 0, NULL_RTX, INSERT);
1851 set_variable_part (set, loc, dv_from_value (val), 0,
1852 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1855 /* Reset this node, detaching all its equivalences. Return the slot
1856 in the variable hash table that holds dv, if there is one. */
1859 val_reset (dataflow_set *set, decl_or_value dv)
1861 variable var = shared_hash_find (set->vars, dv) ;
1862 location_chain node;
1865 if (!var || !var->n_var_parts)
1868 gcc_assert (var->n_var_parts == 1);
1871 for (node = var->var_part[0].loc_chain; node; node = node->next)
1872 if (GET_CODE (node->loc) == VALUE
1873 && canon_value_cmp (node->loc, cval))
1876 for (node = var->var_part[0].loc_chain; node; node = node->next)
1877 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
1879 /* Redirect the equivalence link to the new canonical
1880 value, or simply remove it if it would point at
1883 set_variable_part (set, cval, dv_from_value (node->loc),
1884 0, node->init, node->set_src, NO_INSERT);
1885 delete_variable_part (set, dv_as_value (dv),
1886 dv_from_value (node->loc), 0);
1891 decl_or_value cdv = dv_from_value (cval);
1893 /* Keep the remaining values connected, accummulating links
1894 in the canonical value. */
1895 for (node = var->var_part[0].loc_chain; node; node = node->next)
1897 if (node->loc == cval)
1899 else if (GET_CODE (node->loc) == REG)
1900 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
1901 node->set_src, NO_INSERT);
1902 else if (GET_CODE (node->loc) == MEM)
1903 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
1904 node->set_src, NO_INSERT);
1906 set_variable_part (set, node->loc, cdv, 0,
1907 node->init, node->set_src, NO_INSERT);
1911 /* We remove this last, to make sure that the canonical value is not
1912 removed to the point of requiring reinsertion. */
1914 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
1916 clobber_variable_part (set, NULL, dv, 0, NULL);
1918 /* ??? Should we make sure there aren't other available values or
1919 variables whose values involve this one other than by
1920 equivalence? E.g., at the very least we should reset MEMs, those
1921 shouldn't be too hard to find cselib-looking up the value as an
1922 address, then locating the resulting value in our own hash
1926 /* Find the values in a given location and map the val to another
1927 value, if it is unique, or add the location as one holding the
1931 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
1933 decl_or_value dv = dv_from_value (val);
1935 if (dump_file && (dump_flags & TDF_DETAILS))
1938 fprintf (dump_file, "%i: ", INSN_UID (insn));
1940 fprintf (dump_file, "head: ");
1941 print_inline_rtx (dump_file, val, 0);
1942 fputs (" is at ", dump_file);
1943 print_inline_rtx (dump_file, loc, 0);
1944 fputc ('\n', dump_file);
1947 val_reset (set, dv);
1951 attrs node, found = NULL;
1953 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1954 if (dv_is_value_p (node->dv)
1955 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
1959 /* Map incoming equivalences. ??? Wouldn't it be nice if
1960 we just started sharing the location lists? Maybe a
1961 circular list ending at the value itself or some
1963 set_variable_part (set, dv_as_value (node->dv),
1964 dv_from_value (val), node->offset,
1965 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1966 set_variable_part (set, val, node->dv, node->offset,
1967 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1970 /* If we didn't find any equivalence, we need to remember that
1971 this value is held in the named register. */
1973 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1974 dv_from_value (val), 0, NULL_RTX, INSERT);
1976 else if (MEM_P (loc))
1977 /* ??? Merge equivalent MEMs. */
1978 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1979 dv_from_value (val), 0, NULL_RTX, INSERT);
1981 /* ??? Merge equivalent expressions. */
1982 set_variable_part (set, loc, dv_from_value (val), 0,
1983 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1986 /* Initialize dataflow set SET to be empty.
1987 VARS_SIZE is the initial size of hash table VARS. */
1990 dataflow_set_init (dataflow_set *set)
1992 init_attrs_list_set (set->regs);
1993 set->vars = shared_hash_copy (empty_shared_hash);
1994 set->stack_adjust = 0;
1995 set->traversed_vars = NULL;
1998 /* Delete the contents of dataflow set SET. */
2001 dataflow_set_clear (dataflow_set *set)
2005 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2006 attrs_list_clear (&set->regs[i]);
2008 shared_hash_destroy (set->vars);
2009 set->vars = shared_hash_copy (empty_shared_hash);
2012 /* Copy the contents of dataflow set SRC to DST. */
2015 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2019 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2020 attrs_list_copy (&dst->regs[i], src->regs[i]);
2022 shared_hash_destroy (dst->vars);
2023 dst->vars = shared_hash_copy (src->vars);
2024 dst->stack_adjust = src->stack_adjust;
2027 /* Information for merging lists of locations for a given offset of variable.
2029 struct variable_union_info
2031 /* Node of the location chain. */
2034 /* The sum of positions in the input chains. */
2037 /* The position in the chain of DST dataflow set. */
2041 /* Buffer for location list sorting and its allocated size. */
2042 static struct variable_union_info *vui_vec;
2043 static int vui_allocated;
2045 /* Compare function for qsort, order the structures by POS element. */
2048 variable_union_info_cmp_pos (const void *n1, const void *n2)
2050 const struct variable_union_info *const i1 =
2051 (const struct variable_union_info *) n1;
2052 const struct variable_union_info *const i2 =
2053 ( const struct variable_union_info *) n2;
2055 if (i1->pos != i2->pos)
2056 return i1->pos - i2->pos;
2058 return (i1->pos_dst - i2->pos_dst);
2061 /* Compute union of location parts of variable *SLOT and the same variable
2062 from hash table DATA. Compute "sorted" union of the location chains
2063 for common offsets, i.e. the locations of a variable part are sorted by
2064 a priority where the priority is the sum of the positions in the 2 chains
2065 (if a location is only in one list the position in the second list is
2066 defined to be larger than the length of the chains).
2067 When we are updating the location parts the newest location is in the
2068 beginning of the chain, so when we do the described "sorted" union
2069 we keep the newest locations in the beginning. */
2072 variable_union (void **slot, void *data)
2076 dataflow_set *set = (dataflow_set *) data;
2079 src = (variable) *slot;
2080 dstp = shared_hash_find_slot (set->vars, src->dv);
2081 if (!dstp || !*dstp)
2085 dst_can_be_shared = false;
2087 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2091 /* Continue traversing the hash table. */
2095 dst = (variable) *dstp;
2097 gcc_assert (src->n_var_parts);
2099 /* We can combine one-part variables very efficiently, because their
2100 entries are in canonical order. */
2101 if (dv_onepart_p (src->dv))
2103 location_chain *nodep, dnode, snode;
2105 gcc_assert (src->n_var_parts == 1);
2106 gcc_assert (dst->n_var_parts == 1);
2108 snode = src->var_part[0].loc_chain;
2111 restart_onepart_unshared:
2112 nodep = &dst->var_part[0].loc_chain;
2118 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2122 location_chain nnode;
2124 if (shared_var_p (dst, set->vars))
2126 dstp = unshare_variable (set, dstp, dst,
2127 VAR_INIT_STATUS_INITIALIZED);
2128 dst = (variable)*dstp;
2129 goto restart_onepart_unshared;
2132 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
2133 nnode->loc = snode->loc;
2134 nnode->init = snode->init;
2135 if (!snode->set_src || MEM_P (snode->set_src))
2136 nnode->set_src = NULL;
2138 nnode->set_src = snode->set_src;
2139 nnode->next = dnode;
2142 #ifdef ENABLE_CHECKING
2144 gcc_assert (rtx_equal_p (dnode->loc, snode->loc));
2148 snode = snode->next;
2150 nodep = &dnode->next;
2157 /* Count the number of location parts, result is K. */
2158 for (i = 0, j = 0, k = 0;
2159 i < src->n_var_parts && j < dst->n_var_parts; k++)
2161 if (src->var_part[i].offset == dst->var_part[j].offset)
2166 else if (src->var_part[i].offset < dst->var_part[j].offset)
2171 k += src->n_var_parts - i;
2172 k += dst->n_var_parts - j;
2174 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2175 thus there are at most MAX_VAR_PARTS different offsets. */
2176 gcc_assert (dv_onepart_p (dst->dv) ? k == 1 : k <= MAX_VAR_PARTS);
2178 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2180 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2181 dst = (variable)*dstp;
2184 i = src->n_var_parts - 1;
2185 j = dst->n_var_parts - 1;
2186 dst->n_var_parts = k;
2188 for (k--; k >= 0; k--)
2190 location_chain node, node2;
2192 if (i >= 0 && j >= 0
2193 && src->var_part[i].offset == dst->var_part[j].offset)
2195 /* Compute the "sorted" union of the chains, i.e. the locations which
2196 are in both chains go first, they are sorted by the sum of
2197 positions in the chains. */
2200 struct variable_union_info *vui;
2202 /* If DST is shared compare the location chains.
2203 If they are different we will modify the chain in DST with
2204 high probability so make a copy of DST. */
2205 if (shared_var_p (dst, set->vars))
2207 for (node = src->var_part[i].loc_chain,
2208 node2 = dst->var_part[j].loc_chain; node && node2;
2209 node = node->next, node2 = node2->next)
2211 if (!((REG_P (node2->loc)
2212 && REG_P (node->loc)
2213 && REGNO (node2->loc) == REGNO (node->loc))
2214 || rtx_equal_p (node2->loc, node->loc)))
2216 if (node2->init < node->init)
2217 node2->init = node->init;
2223 dstp = unshare_variable (set, dstp, dst,
2224 VAR_INIT_STATUS_UNKNOWN);
2225 dst = (variable)*dstp;
2230 for (node = src->var_part[i].loc_chain; node; node = node->next)
2233 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2238 /* The most common case, much simpler, no qsort is needed. */
2239 location_chain dstnode = dst->var_part[j].loc_chain;
2240 dst->var_part[k].loc_chain = dstnode;
2241 dst->var_part[k].offset = dst->var_part[j].offset;
2243 for (node = src->var_part[i].loc_chain; node; node = node->next)
2244 if (!((REG_P (dstnode->loc)
2245 && REG_P (node->loc)
2246 && REGNO (dstnode->loc) == REGNO (node->loc))
2247 || rtx_equal_p (dstnode->loc, node->loc)))
2249 location_chain new_node;
2251 /* Copy the location from SRC. */
2252 new_node = (location_chain) pool_alloc (loc_chain_pool);
2253 new_node->loc = node->loc;
2254 new_node->init = node->init;
2255 if (!node->set_src || MEM_P (node->set_src))
2256 new_node->set_src = NULL;
2258 new_node->set_src = node->set_src;
2259 node2->next = new_node;
2266 if (src_l + dst_l > vui_allocated)
2268 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2269 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2274 /* Fill in the locations from DST. */
2275 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2276 node = node->next, jj++)
2279 vui[jj].pos_dst = jj;
2281 /* Pos plus value larger than a sum of 2 valid positions. */
2282 vui[jj].pos = jj + src_l + dst_l;
2285 /* Fill in the locations from SRC. */
2287 for (node = src->var_part[i].loc_chain, ii = 0; node;
2288 node = node->next, ii++)
2290 /* Find location from NODE. */
2291 for (jj = 0; jj < dst_l; jj++)
2293 if ((REG_P (vui[jj].lc->loc)
2294 && REG_P (node->loc)
2295 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2296 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2298 vui[jj].pos = jj + ii;
2302 if (jj >= dst_l) /* The location has not been found. */
2304 location_chain new_node;
2306 /* Copy the location from SRC. */
2307 new_node = (location_chain) pool_alloc (loc_chain_pool);
2308 new_node->loc = node->loc;
2309 new_node->init = node->init;
2310 if (!node->set_src || MEM_P (node->set_src))
2311 new_node->set_src = NULL;
2313 new_node->set_src = node->set_src;
2314 vui[n].lc = new_node;
2315 vui[n].pos_dst = src_l + dst_l;
2316 vui[n].pos = ii + src_l + dst_l;
2323 /* Special case still very common case. For dst_l == 2
2324 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2325 vui[i].pos == i + src_l + dst_l. */
2326 if (vui[0].pos > vui[1].pos)
2328 /* Order should be 1, 0, 2... */
2329 dst->var_part[k].loc_chain = vui[1].lc;
2330 vui[1].lc->next = vui[0].lc;
2333 vui[0].lc->next = vui[2].lc;
2334 vui[n - 1].lc->next = NULL;
2337 vui[0].lc->next = NULL;
2342 dst->var_part[k].loc_chain = vui[0].lc;
2343 if (n >= 3 && vui[2].pos < vui[1].pos)
2345 /* Order should be 0, 2, 1, 3... */
2346 vui[0].lc->next = vui[2].lc;
2347 vui[2].lc->next = vui[1].lc;
2350 vui[1].lc->next = vui[3].lc;
2351 vui[n - 1].lc->next = NULL;
2354 vui[1].lc->next = NULL;
2359 /* Order should be 0, 1, 2... */
2361 vui[n - 1].lc->next = NULL;
2364 for (; ii < n; ii++)
2365 vui[ii - 1].lc->next = vui[ii].lc;
2369 qsort (vui, n, sizeof (struct variable_union_info),
2370 variable_union_info_cmp_pos);
2372 /* Reconnect the nodes in sorted order. */
2373 for (ii = 1; ii < n; ii++)
2374 vui[ii - 1].lc->next = vui[ii].lc;
2375 vui[n - 1].lc->next = NULL;
2376 dst->var_part[k].loc_chain = vui[0].lc;
2379 dst->var_part[k].offset = dst->var_part[j].offset;
2384 else if ((i >= 0 && j >= 0
2385 && src->var_part[i].offset < dst->var_part[j].offset)
2388 dst->var_part[k] = dst->var_part[j];
2391 else if ((i >= 0 && j >= 0
2392 && src->var_part[i].offset > dst->var_part[j].offset)
2395 location_chain *nextp;
2397 /* Copy the chain from SRC. */
2398 nextp = &dst->var_part[k].loc_chain;
2399 for (node = src->var_part[i].loc_chain; node; node = node->next)
2401 location_chain new_lc;
2403 new_lc = (location_chain) pool_alloc (loc_chain_pool);
2404 new_lc->next = NULL;
2405 new_lc->init = node->init;
2406 if (!node->set_src || MEM_P (node->set_src))
2407 new_lc->set_src = NULL;
2409 new_lc->set_src = node->set_src;
2410 new_lc->loc = node->loc;
2413 nextp = &new_lc->next;
2416 dst->var_part[k].offset = src->var_part[i].offset;
2419 dst->var_part[k].cur_loc = NULL;
2422 if (flag_var_tracking_uninit)
2423 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
2425 location_chain node, node2;
2426 for (node = src->var_part[i].loc_chain; node; node = node->next)
2427 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
2428 if (rtx_equal_p (node->loc, node2->loc))
2430 if (node->init > node2->init)
2431 node2->init = node->init;
2435 /* Continue traversing the hash table. */
2439 /* Compute union of dataflow sets SRC and DST and store it to DST. */
2442 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
2446 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2447 attrs_list_union (&dst->regs[i], src->regs[i]);
2449 if (dst->vars == empty_shared_hash)
2451 shared_hash_destroy (dst->vars);
2452 dst->vars = shared_hash_copy (src->vars);
2455 htab_traverse (shared_hash_htab (src->vars), variable_union, dst);
2458 /* Whether the value is currently being expanded. */
2459 #define VALUE_RECURSED_INTO(x) \
2460 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
2461 /* Whether the value is in changed_variables hash table. */
2462 #define VALUE_CHANGED(x) \
2463 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
2464 /* Whether the decl is in changed_variables hash table. */
2465 #define DECL_CHANGED(x) TREE_VISITED (x)
2467 /* Record that DV has been added into resp. removed from changed_variables
2471 set_dv_changed (decl_or_value dv, bool newv)
2473 if (dv_is_value_p (dv))
2474 VALUE_CHANGED (dv_as_value (dv)) = newv;
2476 DECL_CHANGED (dv_as_decl (dv)) = newv;
2479 /* Return true if DV is present in changed_variables hash table. */
2482 dv_changed_p (decl_or_value dv)
2484 return (dv_is_value_p (dv)
2485 ? VALUE_CHANGED (dv_as_value (dv))
2486 : DECL_CHANGED (dv_as_decl (dv)));
2489 /* Return a location list node whose loc is rtx_equal to LOC, in the
2490 location list of a one-part variable or value VAR, or in that of
2491 any values recursively mentioned in the location lists. */
2493 static location_chain
2494 find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
2496 location_chain node;
2501 gcc_assert (dv_onepart_p (var->dv));
2503 if (!var->n_var_parts)
2506 gcc_assert (var->var_part[0].offset == 0);
2508 for (node = var->var_part[0].loc_chain; node; node = node->next)
2509 if (rtx_equal_p (loc, node->loc))
2511 else if (GET_CODE (node->loc) == VALUE
2512 && !VALUE_RECURSED_INTO (node->loc))
2514 decl_or_value dv = dv_from_value (node->loc);
2515 variable var = (variable)
2516 htab_find_with_hash (vars, dv, dv_htab_hash (dv));
2520 location_chain where;
2521 VALUE_RECURSED_INTO (node->loc) = true;
2522 if ((where = find_loc_in_1pdv (loc, var, vars)))
2524 VALUE_RECURSED_INTO (node->loc) = false;
2527 VALUE_RECURSED_INTO (node->loc) = false;
2534 /* Hash table iteration argument passed to variable_merge. */
2537 /* The set in which the merge is to be inserted. */
2539 /* The set that we're iterating in. */
2541 /* The set that may contain the other dv we are to merge with. */
2543 /* Number of onepart dvs in src. */
2544 int src_onepart_cnt;
2547 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
2548 loc_cmp order, and it is maintained as such. */
2551 insert_into_intersection (location_chain *nodep, rtx loc,
2552 enum var_init_status status)
2554 location_chain node;
2557 for (node = *nodep; node; nodep = &node->next, node = *nodep)
2558 if ((r = loc_cmp (node->loc, loc)) == 0)
2560 node->init = MIN (node->init, status);
2566 node = (location_chain) pool_alloc (loc_chain_pool);
2569 node->set_src = NULL;
2570 node->init = status;
2571 node->next = *nodep;
2575 /* Insert in DEST the intersection the locations present in both
2576 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
2577 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
2581 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
2582 location_chain s1node, variable s2var)
2584 dataflow_set *s1set = dsm->cur;
2585 dataflow_set *s2set = dsm->src;
2586 location_chain found;
2588 for (; s1node; s1node = s1node->next)
2590 if (s1node->loc == val)
2593 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
2594 shared_hash_htab (s2set->vars))))
2596 insert_into_intersection (dest, s1node->loc,
2597 MIN (s1node->init, found->init));
2601 if (GET_CODE (s1node->loc) == VALUE
2602 && !VALUE_RECURSED_INTO (s1node->loc))
2604 decl_or_value dv = dv_from_value (s1node->loc);
2605 variable svar = shared_hash_find (s1set->vars, dv);
2608 if (svar->n_var_parts == 1)
2610 VALUE_RECURSED_INTO (s1node->loc) = true;
2611 intersect_loc_chains (val, dest, dsm,
2612 svar->var_part[0].loc_chain,
2614 VALUE_RECURSED_INTO (s1node->loc) = false;
2619 /* ??? if the location is equivalent to any location in src,
2620 searched recursively
2622 add to dst the values needed to represent the equivalence
2624 telling whether locations S is equivalent to another dv's
2627 for each location D in the list
2629 if S and D satisfy rtx_equal_p, then it is present
2631 else if D is a value, recurse without cycles
2633 else if S and D have the same CODE and MODE
2635 for each operand oS and the corresponding oD
2637 if oS and oD are not equivalent, then S an D are not equivalent
2639 else if they are RTX vectors
2641 if any vector oS element is not equivalent to its respective oD,
2642 then S and D are not equivalent
2650 /* Return -1 if X should be before Y in a location list for a 1-part
2651 variable, 1 if Y should be before X, and 0 if they're equivalent
2652 and should not appear in the list. */
2655 loc_cmp (rtx x, rtx y)
2658 RTX_CODE code = GET_CODE (x);
2668 gcc_assert (GET_MODE (x) == GET_MODE (y));
2669 if (REGNO (x) == REGNO (y))
2671 else if (REGNO (x) < REGNO (y))
2684 gcc_assert (GET_MODE (x) == GET_MODE (y));
2685 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
2691 if (GET_CODE (x) == VALUE)
2693 if (GET_CODE (y) != VALUE)
2695 /* Don't assert the modes are the same, that is true only
2696 when not recursing. (subreg:QI (value:SI 1:1) 0)
2697 and (subreg:QI (value:DI 2:2) 0) can be compared,
2698 even when the modes are different. */
2699 if (canon_value_cmp (x, y))
2705 if (GET_CODE (y) == VALUE)
2708 if (GET_CODE (x) == GET_CODE (y))
2709 /* Compare operands below. */;
2710 else if (GET_CODE (x) < GET_CODE (y))
2715 gcc_assert (GET_MODE (x) == GET_MODE (y));
2717 if (GET_CODE (x) == DEBUG_EXPR)
2719 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
2720 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
2722 #ifdef ENABLE_CHECKING
2723 gcc_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
2724 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
2729 fmt = GET_RTX_FORMAT (code);
2730 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2734 if (XWINT (x, i) == XWINT (y, i))
2736 else if (XWINT (x, i) < XWINT (y, i))
2743 if (XINT (x, i) == XINT (y, i))
2745 else if (XINT (x, i) < XINT (y, i))
2752 /* Compare the vector length first. */
2753 if (XVECLEN (x, i) == XVECLEN (y, i))
2754 /* Compare the vectors elements. */;
2755 else if (XVECLEN (x, i) < XVECLEN (y, i))
2760 for (j = 0; j < XVECLEN (x, i); j++)
2761 if ((r = loc_cmp (XVECEXP (x, i, j),
2762 XVECEXP (y, i, j))))
2767 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
2773 if (XSTR (x, i) == XSTR (y, i))
2779 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
2787 /* These are just backpointers, so they don't matter. */
2794 /* It is believed that rtx's at this level will never
2795 contain anything but integers and other rtx's,
2796 except for within LABEL_REFs and SYMBOL_REFs. */
2804 /* If decl or value DVP refers to VALUE from *LOC, add backlinks
2805 from VALUE to DVP. */
2808 add_value_chain (rtx *loc, void *dvp)
2810 decl_or_value dv, ldv;
2811 value_chain vc, nvc;
2814 if (GET_CODE (*loc) == VALUE)
2815 ldv = dv_from_value (*loc);
2816 else if (GET_CODE (*loc) == DEBUG_EXPR)
2817 ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc));
2821 if (dv_as_opaque (ldv) == dvp)
2824 dv = (decl_or_value) dvp;
2825 slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv),
2829 vc = (value_chain) pool_alloc (value_chain_pool);
2833 *slot = (void *) vc;
2837 for (vc = ((value_chain) *slot)->next; vc; vc = vc->next)
2838 if (dv_as_opaque (vc->dv) == dv_as_opaque (dv))
2846 vc = (value_chain) *slot;
2847 nvc = (value_chain) pool_alloc (value_chain_pool);
2849 nvc->next = vc->next;
2855 /* If decl or value DVP refers to VALUEs from within LOC, add backlinks
2856 from those VALUEs to DVP. */
2859 add_value_chains (decl_or_value dv, rtx loc)
2861 if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR)
2863 add_value_chain (&loc, dv_as_opaque (dv));
2869 loc = XEXP (loc, 0);
2870 for_each_rtx (&loc, add_value_chain, dv_as_opaque (dv));
2873 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those
2874 VALUEs to DV. Add the same time get rid of ASM_OPERANDS from locs list,
2875 that is something we never can express in .debug_info and can prevent
2876 reverse ops from being used. */
2879 add_cselib_value_chains (decl_or_value dv)
2881 struct elt_loc_list **l;
2883 for (l = &CSELIB_VAL_PTR (dv_as_value (dv))->locs; *l;)
2884 if (GET_CODE ((*l)->loc) == ASM_OPERANDS)
2888 for_each_rtx (&(*l)->loc, add_value_chain, dv_as_opaque (dv));
2893 /* If decl or value DVP refers to VALUE from *LOC, remove backlinks
2894 from VALUE to DVP. */
2897 remove_value_chain (rtx *loc, void *dvp)
2899 decl_or_value dv, ldv;
2903 if (GET_CODE (*loc) == VALUE)
2904 ldv = dv_from_value (*loc);
2905 else if (GET_CODE (*loc) == DEBUG_EXPR)
2906 ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc));
2910 if (dv_as_opaque (ldv) == dvp)
2913 dv = (decl_or_value) dvp;
2914 slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv),
2916 for (vc = (value_chain) *slot; vc->next; vc = vc->next)
2917 if (dv_as_opaque (vc->next->dv) == dv_as_opaque (dv))
2919 value_chain dvc = vc->next;
2920 gcc_assert (dvc->refcount > 0);
2921 if (--dvc->refcount == 0)
2923 vc->next = dvc->next;
2924 pool_free (value_chain_pool, dvc);
2925 if (vc->next == NULL && vc == (value_chain) *slot)
2927 pool_free (value_chain_pool, vc);
2928 htab_clear_slot (value_chains, slot);
2936 /* If decl or value DVP refers to VALUEs from within LOC, remove backlinks
2937 from those VALUEs to DVP. */
2940 remove_value_chains (decl_or_value dv, rtx loc)
2942 if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR)
2944 remove_value_chain (&loc, dv_as_opaque (dv));
2950 loc = XEXP (loc, 0);
2951 for_each_rtx (&loc, remove_value_chain, dv_as_opaque (dv));
2955 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, remove backlinks from those
2959 remove_cselib_value_chains (decl_or_value dv)
2961 struct elt_loc_list *l;
2963 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
2964 for_each_rtx (&l->loc, remove_value_chain, dv_as_opaque (dv));
2967 /* Check the order of entries in one-part variables. */
2970 canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
2972 variable var = (variable) *slot;
2973 decl_or_value dv = var->dv;
2974 location_chain node, next;
2976 #ifdef ENABLE_RTL_CHECKING
2978 for (i = 0; i < var->n_var_parts; i++)
2979 gcc_assert (var->var_part[0].cur_loc == NULL);
2980 gcc_assert (!var->cur_loc_changed && !var->in_changed_variables);
2983 if (!dv_onepart_p (dv))
2986 gcc_assert (var->n_var_parts == 1);
2987 node = var->var_part[0].loc_chain;
2990 while ((next = node->next))
2992 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3000 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3001 more likely to be chosen as canonical for an equivalence set.
3002 Ensure less likely values can reach more likely neighbors, making
3003 the connections bidirectional. */
3006 canonicalize_values_mark (void **slot, void *data)
3008 dataflow_set *set = (dataflow_set *)data;
3009 variable var = (variable) *slot;
3010 decl_or_value dv = var->dv;
3012 location_chain node;
3014 if (!dv_is_value_p (dv))
3017 gcc_assert (var->n_var_parts == 1);
3019 val = dv_as_value (dv);
3021 for (node = var->var_part[0].loc_chain; node; node = node->next)
3022 if (GET_CODE (node->loc) == VALUE)
3024 if (canon_value_cmp (node->loc, val))
3025 VALUE_RECURSED_INTO (val) = true;
3028 decl_or_value odv = dv_from_value (node->loc);
3029 void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3031 oslot = set_slot_part (set, val, oslot, odv, 0,
3032 node->init, NULL_RTX);
3034 VALUE_RECURSED_INTO (node->loc) = true;
3041 /* Remove redundant entries from equivalence lists in onepart
3042 variables, canonicalizing equivalence sets into star shapes. */
3045 canonicalize_values_star (void **slot, void *data)
3047 dataflow_set *set = (dataflow_set *)data;
3048 variable var = (variable) *slot;
3049 decl_or_value dv = var->dv;
3050 location_chain node;
3057 if (!dv_onepart_p (dv))
3060 gcc_assert (var->n_var_parts == 1);
3062 if (dv_is_value_p (dv))
3064 cval = dv_as_value (dv);
3065 if (!VALUE_RECURSED_INTO (cval))
3067 VALUE_RECURSED_INTO (cval) = false;
3077 gcc_assert (var->n_var_parts == 1);
3079 for (node = var->var_part[0].loc_chain; node; node = node->next)
3080 if (GET_CODE (node->loc) == VALUE)
3083 if (VALUE_RECURSED_INTO (node->loc))
3085 if (canon_value_cmp (node->loc, cval))
3094 if (!has_marks || dv_is_decl_p (dv))
3097 /* Keep it marked so that we revisit it, either after visiting a
3098 child node, or after visiting a new parent that might be
3100 VALUE_RECURSED_INTO (val) = true;
3102 for (node = var->var_part[0].loc_chain; node; node = node->next)
3103 if (GET_CODE (node->loc) == VALUE
3104 && VALUE_RECURSED_INTO (node->loc))
3108 VALUE_RECURSED_INTO (cval) = false;
3109 dv = dv_from_value (cval);
3110 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3113 gcc_assert (dv_is_decl_p (var->dv));
3114 /* The canonical value was reset and dropped.
3116 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3119 var = (variable)*slot;
3120 gcc_assert (dv_is_value_p (var->dv));
3121 if (var->n_var_parts == 0)
3123 gcc_assert (var->n_var_parts == 1);
3127 VALUE_RECURSED_INTO (val) = false;
3132 /* Push values to the canonical one. */
3133 cdv = dv_from_value (cval);
3134 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3136 for (node = var->var_part[0].loc_chain; node; node = node->next)
3137 if (node->loc != cval)
3139 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3140 node->init, NULL_RTX);
3141 if (GET_CODE (node->loc) == VALUE)
3143 decl_or_value ndv = dv_from_value (node->loc);
3145 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3148 if (canon_value_cmp (node->loc, val))
3150 /* If it could have been a local minimum, it's not any more,
3151 since it's now neighbor to cval, so it may have to push
3152 to it. Conversely, if it wouldn't have prevailed over
3153 val, then whatever mark it has is fine: if it was to
3154 push, it will now push to a more canonical node, but if
3155 it wasn't, then it has already pushed any values it might
3157 VALUE_RECURSED_INTO (node->loc) = true;
3158 /* Make sure we visit node->loc by ensuring we cval is
3160 VALUE_RECURSED_INTO (cval) = true;
3162 else if (!VALUE_RECURSED_INTO (node->loc))
3163 /* If we have no need to "recurse" into this node, it's
3164 already "canonicalized", so drop the link to the old
3166 clobber_variable_part (set, cval, ndv, 0, NULL);
3168 else if (GET_CODE (node->loc) == REG)
3170 attrs list = set->regs[REGNO (node->loc)], *listp;
3172 /* Change an existing attribute referring to dv so that it
3173 refers to cdv, removing any duplicate this might
3174 introduce, and checking that no previous duplicates
3175 existed, all in a single pass. */
3179 if (list->offset == 0
3180 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3181 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3188 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3191 for (listp = &list->next; (list = *listp); listp = &list->next)
3196 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3198 *listp = list->next;
3199 pool_free (attrs_pool, list);
3204 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3207 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3209 for (listp = &list->next; (list = *listp); listp = &list->next)
3214 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3216 *listp = list->next;
3217 pool_free (attrs_pool, list);
3222 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3231 if (list->offset == 0
3232 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3233 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3243 cslot = set_slot_part (set, val, cslot, cdv, 0,
3244 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3246 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3248 /* Variable may have been unshared. */
3249 var = (variable)*slot;
3250 gcc_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3251 && var->var_part[0].loc_chain->next == NULL);
3253 if (VALUE_RECURSED_INTO (cval))
3254 goto restart_with_cval;
3259 /* Bind one-part variables to the canonical value in an equivalence
3260 set. Not doing this causes dataflow convergence failure in rare
3261 circumstances, see PR42873. Unfortunately we can't do this
3262 efficiently as part of canonicalize_values_star, since we may not
3263 have determined or even seen the canonical value of a set when we
3264 get to a variable that references another member of the set. */
3267 canonicalize_vars_star (void **slot, void *data)
3269 dataflow_set *set = (dataflow_set *)data;
3270 variable var = (variable) *slot;
3271 decl_or_value dv = var->dv;
3272 location_chain node;
3277 location_chain cnode;
3279 if (!dv_onepart_p (dv) || dv_is_value_p (dv))
3282 gcc_assert (var->n_var_parts == 1);
3284 node = var->var_part[0].loc_chain;
3286 if (GET_CODE (node->loc) != VALUE)
3289 gcc_assert (!node->next);
3292 /* Push values to the canonical one. */
3293 cdv = dv_from_value (cval);
3294 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3297 cvar = (variable)*cslot;
3298 gcc_assert (cvar->n_var_parts == 1);
3300 cnode = cvar->var_part[0].loc_chain;
3302 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3303 that are not “more canonical” than it. */
3304 if (GET_CODE (cnode->loc) != VALUE
3305 || !canon_value_cmp (cnode->loc, cval))
3308 /* CVAL was found to be non-canonical. Change the variable to point
3309 to the canonical VALUE. */
3310 gcc_assert (!cnode->next);
3313 slot = set_slot_part (set, cval, slot, dv, 0,
3314 node->init, node->set_src);
3315 slot = clobber_slot_part (set, cval, slot, 0, node->set_src);
3320 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3321 corresponding entry in DSM->src. Multi-part variables are combined
3322 with variable_union, whereas onepart dvs are combined with
3326 variable_merge_over_cur (void **s1slot, void *data)
3328 struct dfset_merge *dsm = (struct dfset_merge *)data;
3329 dataflow_set *dst = dsm->dst;
3331 variable s1var = (variable) *s1slot;
3332 variable s2var, dvar = NULL;
3333 decl_or_value dv = s1var->dv;
3334 bool onepart = dv_onepart_p (dv);
3337 location_chain node, *nodep;
3339 /* If the incoming onepart variable has an empty location list, then
3340 the intersection will be just as empty. For other variables,
3341 it's always union. */
3342 gcc_assert (s1var->n_var_parts);
3343 gcc_assert (s1var->var_part[0].loc_chain);
3346 return variable_union (s1slot, dst);
3348 gcc_assert (s1var->n_var_parts == 1);
3349 gcc_assert (s1var->var_part[0].offset == 0);
3351 dvhash = dv_htab_hash (dv);
3352 if (dv_is_value_p (dv))
3353 val = dv_as_value (dv);
3357 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3360 dst_can_be_shared = false;
3364 dsm->src_onepart_cnt--;
3365 gcc_assert (s2var->var_part[0].loc_chain);
3366 gcc_assert (s2var->n_var_parts == 1);
3367 gcc_assert (s2var->var_part[0].offset == 0);
3369 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3372 dvar = (variable)*dstslot;
3373 gcc_assert (dvar->refcount == 1);
3374 gcc_assert (dvar->n_var_parts == 1);
3375 gcc_assert (dvar->var_part[0].offset == 0);
3376 nodep = &dvar->var_part[0].loc_chain;
3384 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3386 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3388 *dstslot = dvar = s2var;
3393 dst_can_be_shared = false;
3395 intersect_loc_chains (val, nodep, dsm,
3396 s1var->var_part[0].loc_chain, s2var);
3402 dvar = (variable) pool_alloc (dv_pool (dv));
3405 dvar->n_var_parts = 1;
3406 dvar->cur_loc_changed = false;
3407 dvar->in_changed_variables = false;
3408 dvar->var_part[0].offset = 0;
3409 dvar->var_part[0].loc_chain = node;
3410 dvar->var_part[0].cur_loc = NULL;
3413 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3415 gcc_assert (!*dstslot);
3423 nodep = &dvar->var_part[0].loc_chain;
3424 while ((node = *nodep))
3426 location_chain *nextp = &node->next;
3428 if (GET_CODE (node->loc) == REG)
3432 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
3433 if (GET_MODE (node->loc) == GET_MODE (list->loc)
3434 && dv_is_value_p (list->dv))
3438 attrs_list_insert (&dst->regs[REGNO (node->loc)],
3440 /* If this value became canonical for another value that had
3441 this register, we want to leave it alone. */
3442 else if (dv_as_value (list->dv) != val)
3444 dstslot = set_slot_part (dst, dv_as_value (list->dv),
3446 node->init, NULL_RTX);
3447 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
3449 /* Since nextp points into the removed node, we can't
3450 use it. The pointer to the next node moved to nodep.
3451 However, if the variable we're walking is unshared
3452 during our walk, we'll keep walking the location list
3453 of the previously-shared variable, in which case the
3454 node won't have been removed, and we'll want to skip
3455 it. That's why we test *nodep here. */
3461 /* Canonicalization puts registers first, so we don't have to
3467 if (dvar != (variable)*dstslot)
3468 dvar = (variable)*dstslot;
3469 nodep = &dvar->var_part[0].loc_chain;
3473 /* Mark all referenced nodes for canonicalization, and make sure
3474 we have mutual equivalence links. */
3475 VALUE_RECURSED_INTO (val) = true;
3476 for (node = *nodep; node; node = node->next)
3477 if (GET_CODE (node->loc) == VALUE)
3479 VALUE_RECURSED_INTO (node->loc) = true;
3480 set_variable_part (dst, val, dv_from_value (node->loc), 0,
3481 node->init, NULL, INSERT);
3484 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3485 gcc_assert (*dstslot == dvar);
3486 canonicalize_values_star (dstslot, dst);
3487 #ifdef ENABLE_CHECKING
3489 == shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash));
3491 dvar = (variable)*dstslot;
3495 bool has_value = false, has_other = false;
3497 /* If we have one value and anything else, we're going to
3498 canonicalize this, so make sure all values have an entry in
3499 the table and are marked for canonicalization. */
3500 for (node = *nodep; node; node = node->next)
3502 if (GET_CODE (node->loc) == VALUE)
3504 /* If this was marked during register canonicalization,
3505 we know we have to canonicalize values. */
3520 if (has_value && has_other)
3522 for (node = *nodep; node; node = node->next)
3524 if (GET_CODE (node->loc) == VALUE)
3526 decl_or_value dv = dv_from_value (node->loc);
3529 if (shared_hash_shared (dst->vars))
3530 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
3532 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
3536 variable var = (variable) pool_alloc (dv_pool (dv));
3539 var->n_var_parts = 1;
3540 var->cur_loc_changed = false;
3541 var->in_changed_variables = false;
3542 var->var_part[0].offset = 0;
3543 var->var_part[0].loc_chain = NULL;
3544 var->var_part[0].cur_loc = NULL;
3548 VALUE_RECURSED_INTO (node->loc) = true;
3552 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3553 gcc_assert (*dstslot == dvar);
3554 canonicalize_values_star (dstslot, dst);
3555 #ifdef ENABLE_CHECKING
3557 == shared_hash_find_slot_noinsert_1 (dst->vars,
3560 dvar = (variable)*dstslot;
3564 if (!onepart_variable_different_p (dvar, s2var))
3566 variable_htab_free (dvar);
3567 *dstslot = dvar = s2var;
3570 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
3572 variable_htab_free (dvar);
3573 *dstslot = dvar = s1var;
3575 dst_can_be_shared = false;
3578 dst_can_be_shared = false;
3583 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
3584 multi-part variable. Unions of multi-part variables and
3585 intersections of one-part ones will be handled in
3586 variable_merge_over_cur(). */
3589 variable_merge_over_src (void **s2slot, void *data)
3591 struct dfset_merge *dsm = (struct dfset_merge *)data;
3592 dataflow_set *dst = dsm->dst;
3593 variable s2var = (variable) *s2slot;
3594 decl_or_value dv = s2var->dv;
3595 bool onepart = dv_onepart_p (dv);
3599 void **dstp = shared_hash_find_slot (dst->vars, dv);
3605 dsm->src_onepart_cnt++;
3609 /* Combine dataflow set information from SRC2 into DST, using PDST
3610 to carry over information across passes. */
3613 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
3615 dataflow_set cur = *dst;
3616 dataflow_set *src1 = &cur;
3617 struct dfset_merge dsm;
3619 size_t src1_elems, src2_elems;
3621 src1_elems = htab_elements (shared_hash_htab (src1->vars));
3622 src2_elems = htab_elements (shared_hash_htab (src2->vars));
3623 dataflow_set_init (dst);
3624 dst->stack_adjust = cur.stack_adjust;
3625 shared_hash_destroy (dst->vars);
3626 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
3627 dst->vars->refcount = 1;
3629 = htab_create (MAX (src1_elems, src2_elems), variable_htab_hash,
3630 variable_htab_eq, variable_htab_free);
3632 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3633 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
3638 dsm.src_onepart_cnt = 0;
3640 htab_traverse (shared_hash_htab (dsm.src->vars), variable_merge_over_src,
3642 htab_traverse (shared_hash_htab (dsm.cur->vars), variable_merge_over_cur,
3645 if (dsm.src_onepart_cnt)
3646 dst_can_be_shared = false;
3648 dataflow_set_destroy (src1);
3651 /* Mark register equivalences. */
3654 dataflow_set_equiv_regs (dataflow_set *set)
3659 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3661 rtx canon[NUM_MACHINE_MODES];
3663 memset (canon, 0, sizeof (canon));
3665 for (list = set->regs[i]; list; list = list->next)
3666 if (list->offset == 0 && dv_is_value_p (list->dv))
3668 rtx val = dv_as_value (list->dv);
3669 rtx *cvalp = &canon[(int)GET_MODE (val)];
3672 if (canon_value_cmp (val, cval))
3676 for (list = set->regs[i]; list; list = list->next)
3677 if (list->offset == 0 && dv_onepart_p (list->dv))
3679 rtx cval = canon[(int)GET_MODE (list->loc)];
3684 if (dv_is_value_p (list->dv))
3686 rtx val = dv_as_value (list->dv);
3691 VALUE_RECURSED_INTO (val) = true;
3692 set_variable_part (set, val, dv_from_value (cval), 0,
3693 VAR_INIT_STATUS_INITIALIZED,
3697 VALUE_RECURSED_INTO (cval) = true;
3698 set_variable_part (set, cval, list->dv, 0,
3699 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
3702 for (listp = &set->regs[i]; (list = *listp);
3703 listp = list ? &list->next : listp)
3704 if (list->offset == 0 && dv_onepart_p (list->dv))
3706 rtx cval = canon[(int)GET_MODE (list->loc)];
3712 if (dv_is_value_p (list->dv))
3714 rtx val = dv_as_value (list->dv);
3715 if (!VALUE_RECURSED_INTO (val))
3719 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
3720 canonicalize_values_star (slot, set);
3727 /* Remove any redundant values in the location list of VAR, which must
3728 be unshared and 1-part. */
3731 remove_duplicate_values (variable var)
3733 location_chain node, *nodep;
3735 gcc_assert (dv_onepart_p (var->dv));
3736 gcc_assert (var->n_var_parts == 1);
3737 gcc_assert (var->refcount == 1);
3739 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
3741 if (GET_CODE (node->loc) == VALUE)
3743 if (VALUE_RECURSED_INTO (node->loc))
3745 /* Remove duplicate value node. */
3746 *nodep = node->next;
3747 pool_free (loc_chain_pool, node);
3751 VALUE_RECURSED_INTO (node->loc) = true;
3753 nodep = &node->next;
3756 for (node = var->var_part[0].loc_chain; node; node = node->next)
3757 if (GET_CODE (node->loc) == VALUE)
3759 gcc_assert (VALUE_RECURSED_INTO (node->loc));
3760 VALUE_RECURSED_INTO (node->loc) = false;
3765 /* Hash table iteration argument passed to variable_post_merge. */
3766 struct dfset_post_merge
3768 /* The new input set for the current block. */
3770 /* Pointer to the permanent input set for the current block, or
3772 dataflow_set **permp;
3775 /* Create values for incoming expressions associated with one-part
3776 variables that don't have value numbers for them. */
3779 variable_post_merge_new_vals (void **slot, void *info)
3781 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3782 dataflow_set *set = dfpm->set;
3783 variable var = (variable)*slot;
3784 location_chain node;
3786 if (!dv_onepart_p (var->dv) || !var->n_var_parts)
3789 gcc_assert (var->n_var_parts == 1);
3791 if (dv_is_decl_p (var->dv))
3793 bool check_dupes = false;
3796 for (node = var->var_part[0].loc_chain; node; node = node->next)
3798 if (GET_CODE (node->loc) == VALUE)
3799 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
3800 else if (GET_CODE (node->loc) == REG)
3802 attrs att, *attp, *curp = NULL;
3804 if (var->refcount != 1)
3806 slot = unshare_variable (set, slot, var,
3807 VAR_INIT_STATUS_INITIALIZED);
3808 var = (variable)*slot;
3812 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
3814 if (att->offset == 0
3815 && GET_MODE (att->loc) == GET_MODE (node->loc))
3817 if (dv_is_value_p (att->dv))
3819 rtx cval = dv_as_value (att->dv);
3824 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
3832 if ((*curp)->offset == 0
3833 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
3834 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
3837 curp = &(*curp)->next;
3848 *dfpm->permp = XNEW (dataflow_set);
3849 dataflow_set_init (*dfpm->permp);
3852 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
3853 att; att = att->next)
3854 if (GET_MODE (att->loc) == GET_MODE (node->loc))
3856 gcc_assert (att->offset == 0);
3857 gcc_assert (dv_is_value_p (att->dv));
3858 val_reset (set, att->dv);
3865 cval = dv_as_value (cdv);
3869 /* Create a unique value to hold this register,
3870 that ought to be found and reused in
3871 subsequent rounds. */
3873 gcc_assert (!cselib_lookup (node->loc,
3874 GET_MODE (node->loc), 0));
3875 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1);
3876 cselib_preserve_value (v);
3877 cselib_invalidate_rtx (node->loc);
3879 cdv = dv_from_value (cval);
3882 "Created new value %u:%u for reg %i\n",
3883 v->uid, v->hash, REGNO (node->loc));
3886 var_reg_decl_set (*dfpm->permp, node->loc,
3887 VAR_INIT_STATUS_INITIALIZED,
3888 cdv, 0, NULL, INSERT);
3894 /* Remove attribute referring to the decl, which now
3895 uses the value for the register, already existing or
3896 to be added when we bring perm in. */
3899 pool_free (attrs_pool, att);
3904 remove_duplicate_values (var);
3910 /* Reset values in the permanent set that are not associated with the
3911 chosen expression. */
3914 variable_post_merge_perm_vals (void **pslot, void *info)
3916 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3917 dataflow_set *set = dfpm->set;
3918 variable pvar = (variable)*pslot, var;
3919 location_chain pnode;
3923 gcc_assert (dv_is_value_p (pvar->dv));
3924 gcc_assert (pvar->n_var_parts == 1);
3925 pnode = pvar->var_part[0].loc_chain;
3927 gcc_assert (!pnode->next);
3928 gcc_assert (REG_P (pnode->loc));
3932 var = shared_hash_find (set->vars, dv);
3935 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
3937 val_reset (set, dv);
3940 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
3941 if (att->offset == 0
3942 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
3943 && dv_is_value_p (att->dv))
3946 /* If there is a value associated with this register already, create
3948 if (att && dv_as_value (att->dv) != dv_as_value (dv))
3950 rtx cval = dv_as_value (att->dv);
3951 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
3952 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
3957 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
3959 variable_union (pslot, set);
3965 /* Just checking stuff and registering register attributes for
3969 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
3971 struct dfset_post_merge dfpm;
3976 htab_traverse (shared_hash_htab (set->vars), variable_post_merge_new_vals,
3979 htab_traverse (shared_hash_htab ((*permp)->vars),
3980 variable_post_merge_perm_vals, &dfpm);
3981 htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
3982 htab_traverse (shared_hash_htab (set->vars), canonicalize_vars_star, set);
3985 /* Return a node whose loc is a MEM that refers to EXPR in the
3986 location list of a one-part variable or value VAR, or in that of
3987 any values recursively mentioned in the location lists. */
3989 static location_chain
3990 find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars)
3992 location_chain node;
3995 location_chain where = NULL;
4000 gcc_assert (GET_CODE (val) == VALUE);
4002 gcc_assert (!VALUE_RECURSED_INTO (val));
4004 dv = dv_from_value (val);
4005 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
4010 gcc_assert (dv_onepart_p (var->dv));
4012 if (!var->n_var_parts)
4015 gcc_assert (var->var_part[0].offset == 0);
4017 VALUE_RECURSED_INTO (val) = true;
4019 for (node = var->var_part[0].loc_chain; node; node = node->next)
4020 if (MEM_P (node->loc) && MEM_EXPR (node->loc) == expr
4021 && MEM_OFFSET (node->loc) == 0)
4026 else if (GET_CODE (node->loc) == VALUE
4027 && !VALUE_RECURSED_INTO (node->loc)
4028 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4031 VALUE_RECURSED_INTO (val) = false;
4036 /* Return TRUE if the value of MEM may vary across a call. */
4039 mem_dies_at_call (rtx mem)
4041 tree expr = MEM_EXPR (mem);
4047 decl = get_base_address (expr);
4055 return (may_be_aliased (decl)
4056 || (!TREE_READONLY (decl) && is_global_var (decl)));
4059 /* Remove all MEMs from the location list of a hash table entry for a
4060 one-part variable, except those whose MEM attributes map back to
4061 the variable itself, directly or within a VALUE. */
4064 dataflow_set_preserve_mem_locs (void **slot, void *data)
4066 dataflow_set *set = (dataflow_set *) data;
4067 variable var = (variable) *slot;
4069 if (dv_is_decl_p (var->dv) && dv_onepart_p (var->dv))
4071 tree decl = dv_as_decl (var->dv);
4072 location_chain loc, *locp;
4073 bool changed = false;
4075 if (!var->n_var_parts)
4078 gcc_assert (var->n_var_parts == 1);
4080 if (shared_var_p (var, set->vars))
4082 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4084 /* We want to remove dying MEMs that doesn't refer to
4086 if (GET_CODE (loc->loc) == MEM
4087 && (MEM_EXPR (loc->loc) != decl
4088 || MEM_OFFSET (loc->loc))
4089 && !mem_dies_at_call (loc->loc))
4091 /* We want to move here MEMs that do refer to DECL. */
4092 else if (GET_CODE (loc->loc) == VALUE
4093 && find_mem_expr_in_1pdv (decl, loc->loc,
4094 shared_hash_htab (set->vars)))
4101 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4102 var = (variable)*slot;
4103 gcc_assert (var->n_var_parts == 1);
4106 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4109 rtx old_loc = loc->loc;
4110 if (GET_CODE (old_loc) == VALUE)
4112 location_chain mem_node
4113 = find_mem_expr_in_1pdv (decl, loc->loc,
4114 shared_hash_htab (set->vars));
4116 /* ??? This picks up only one out of multiple MEMs that
4117 refer to the same variable. Do we ever need to be
4118 concerned about dealing with more than one, or, given
4119 that they should all map to the same variable
4120 location, their addresses will have been merged and
4121 they will be regarded as equivalent? */
4124 loc->loc = mem_node->loc;
4125 loc->set_src = mem_node->set_src;
4126 loc->init = MIN (loc->init, mem_node->init);
4130 if (GET_CODE (loc->loc) != MEM
4131 || (MEM_EXPR (loc->loc) == decl
4132 && MEM_OFFSET (loc->loc) == 0)
4133 || !mem_dies_at_call (loc->loc))
4135 if (old_loc != loc->loc && emit_notes)
4137 if (old_loc == var->var_part[0].cur_loc)
4140 var->var_part[0].cur_loc = NULL;
4141 var->cur_loc_changed = true;
4143 add_value_chains (var->dv, loc->loc);
4144 remove_value_chains (var->dv, old_loc);
4152 remove_value_chains (var->dv, old_loc);
4153 if (old_loc == var->var_part[0].cur_loc)
4156 var->var_part[0].cur_loc = NULL;
4157 var->cur_loc_changed = true;
4161 pool_free (loc_chain_pool, loc);
4164 if (!var->var_part[0].loc_chain)
4170 variable_was_changed (var, set);
4176 /* Remove all MEMs from the location list of a hash table entry for a
4180 dataflow_set_remove_mem_locs (void **slot, void *data)
4182 dataflow_set *set = (dataflow_set *) data;
4183 variable var = (variable) *slot;
4185 if (dv_is_value_p (var->dv))
4187 location_chain loc, *locp;
4188 bool changed = false;
4190 gcc_assert (var->n_var_parts == 1);
4192 if (shared_var_p (var, set->vars))
4194 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4195 if (GET_CODE (loc->loc) == MEM
4196 && mem_dies_at_call (loc->loc))
4202 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4203 var = (variable)*slot;
4204 gcc_assert (var->n_var_parts == 1);
4207 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4210 if (GET_CODE (loc->loc) != MEM
4211 || !mem_dies_at_call (loc->loc))
4218 remove_value_chains (var->dv, loc->loc);
4220 /* If we have deleted the location which was last emitted
4221 we have to emit new location so add the variable to set
4222 of changed variables. */
4223 if (var->var_part[0].cur_loc == loc->loc)
4226 var->var_part[0].cur_loc = NULL;
4227 var->cur_loc_changed = true;
4229 pool_free (loc_chain_pool, loc);
4232 if (!var->var_part[0].loc_chain)
4238 variable_was_changed (var, set);
4244 /* Remove all variable-location information about call-clobbered
4245 registers, as well as associations between MEMs and VALUEs. */
4248 dataflow_set_clear_at_call (dataflow_set *set)
4252 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
4253 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, r))
4254 var_regno_delete (set, r);
4256 if (MAY_HAVE_DEBUG_INSNS)
4258 set->traversed_vars = set->vars;
4259 htab_traverse (shared_hash_htab (set->vars),
4260 dataflow_set_preserve_mem_locs, set);
4261 set->traversed_vars = set->vars;
4262 htab_traverse (shared_hash_htab (set->vars), dataflow_set_remove_mem_locs,
4264 set->traversed_vars = NULL;
4268 /* Flag whether two dataflow sets being compared contain different data. */
4270 dataflow_set_different_value;
4273 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4275 location_chain lc1, lc2;
4277 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4279 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4281 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4283 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4286 if (rtx_equal_p (lc1->loc, lc2->loc))
4295 /* Return true if one-part variables VAR1 and VAR2 are different.
4296 They must be in canonical order. */
4299 onepart_variable_different_p (variable var1, variable var2)
4301 location_chain lc1, lc2;
4306 gcc_assert (var1->n_var_parts == 1);
4307 gcc_assert (var2->n_var_parts == 1);
4309 lc1 = var1->var_part[0].loc_chain;
4310 lc2 = var2->var_part[0].loc_chain;
4317 if (loc_cmp (lc1->loc, lc2->loc))
4326 /* Return true if variables VAR1 and VAR2 are different. */
4329 variable_different_p (variable var1, variable var2)
4336 if (var1->n_var_parts != var2->n_var_parts)
4339 for (i = 0; i < var1->n_var_parts; i++)
4341 if (var1->var_part[i].offset != var2->var_part[i].offset)
4343 /* One-part values have locations in a canonical order. */
4344 if (i == 0 && var1->var_part[i].offset == 0 && dv_onepart_p (var1->dv))
4346 gcc_assert (var1->n_var_parts == 1);
4347 gcc_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
4348 return onepart_variable_different_p (var1, var2);
4350 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4352 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4358 /* Compare variable *SLOT with the same variable in hash table DATA
4359 and set DATAFLOW_SET_DIFFERENT_VALUE if they are different. */
4362 dataflow_set_different_1 (void **slot, void *data)
4364 htab_t htab = (htab_t) data;
4365 variable var1, var2;
4367 var1 = (variable) *slot;
4368 var2 = (variable) htab_find_with_hash (htab, var1->dv,
4369 dv_htab_hash (var1->dv));
4372 dataflow_set_different_value = true;
4374 if (dump_file && (dump_flags & TDF_DETAILS))
4376 fprintf (dump_file, "dataflow difference found: removal of:\n");
4380 /* Stop traversing the hash table. */
4384 if (variable_different_p (var1, var2))
4386 dataflow_set_different_value = true;
4388 if (dump_file && (dump_flags & TDF_DETAILS))
4390 fprintf (dump_file, "dataflow difference found: old and new follow:\n");
4395 /* Stop traversing the hash table. */
4399 /* Continue traversing the hash table. */
4403 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4406 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4408 if (old_set->vars == new_set->vars)
4411 if (htab_elements (shared_hash_htab (old_set->vars))
4412 != htab_elements (shared_hash_htab (new_set->vars)))
4415 dataflow_set_different_value = false;
4417 htab_traverse (shared_hash_htab (old_set->vars), dataflow_set_different_1,
4418 shared_hash_htab (new_set->vars));
4419 /* No need to traverse the second hashtab, if both have the same number
4420 of elements and the second one had all entries found in the first one,
4421 then it can't have any extra entries. */
4422 return dataflow_set_different_value;
4425 /* Free the contents of dataflow set SET. */
4428 dataflow_set_destroy (dataflow_set *set)
4432 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4433 attrs_list_clear (&set->regs[i]);
4435 shared_hash_destroy (set->vars);
4439 /* Return true if RTL X contains a SYMBOL_REF. */
4442 contains_symbol_ref (rtx x)
4451 code = GET_CODE (x);
4452 if (code == SYMBOL_REF)
4455 fmt = GET_RTX_FORMAT (code);
4456 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4460 if (contains_symbol_ref (XEXP (x, i)))
4463 else if (fmt[i] == 'E')
4466 for (j = 0; j < XVECLEN (x, i); j++)
4467 if (contains_symbol_ref (XVECEXP (x, i, j)))
4475 /* Shall EXPR be tracked? */
4478 track_expr_p (tree expr, bool need_rtl)
4483 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
4484 return DECL_RTL_SET_P (expr);
4486 /* If EXPR is not a parameter or a variable do not track it. */
4487 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
4490 /* It also must have a name... */
4491 if (!DECL_NAME (expr) && need_rtl)
4494 /* ... and a RTL assigned to it. */
4495 decl_rtl = DECL_RTL_IF_SET (expr);
4496 if (!decl_rtl && need_rtl)
4499 /* If this expression is really a debug alias of some other declaration, we
4500 don't need to track this expression if the ultimate declaration is
4503 if (DECL_DEBUG_EXPR_IS_FROM (realdecl))
4505 realdecl = DECL_DEBUG_EXPR (realdecl);
4506 if (realdecl == NULL_TREE)
4508 /* ??? We don't yet know how to emit DW_OP_piece for variable
4509 that has been SRA'ed. */
4510 else if (!DECL_P (realdecl))
4514 /* Do not track EXPR if REALDECL it should be ignored for debugging
4516 if (DECL_IGNORED_P (realdecl))
4519 /* Do not track global variables until we are able to emit correct location
4521 if (TREE_STATIC (realdecl))
4524 /* When the EXPR is a DECL for alias of some variable (see example)
4525 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
4526 DECL_RTL contains SYMBOL_REF.
4529 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
4532 if (decl_rtl && MEM_P (decl_rtl)
4533 && contains_symbol_ref (XEXP (decl_rtl, 0)))
4536 /* If RTX is a memory it should not be very large (because it would be
4537 an array or struct). */
4538 if (decl_rtl && MEM_P (decl_rtl))
4540 /* Do not track structures and arrays. */
4541 if (GET_MODE (decl_rtl) == BLKmode
4542 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
4544 if (MEM_SIZE (decl_rtl)
4545 && INTVAL (MEM_SIZE (decl_rtl)) > MAX_VAR_PARTS)
4549 DECL_CHANGED (expr) = 0;
4550 DECL_CHANGED (realdecl) = 0;
4554 /* Determine whether a given LOC refers to the same variable part as
4558 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
4561 HOST_WIDE_INT offset2;
4563 if (! DECL_P (expr))
4568 expr2 = REG_EXPR (loc);
4569 offset2 = REG_OFFSET (loc);
4571 else if (MEM_P (loc))
4573 expr2 = MEM_EXPR (loc);
4574 offset2 = INT_MEM_OFFSET (loc);
4579 if (! expr2 || ! DECL_P (expr2))
4582 expr = var_debug_decl (expr);
4583 expr2 = var_debug_decl (expr2);
4585 return (expr == expr2 && offset == offset2);
4588 /* LOC is a REG or MEM that we would like to track if possible.
4589 If EXPR is null, we don't know what expression LOC refers to,
4590 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
4591 LOC is an lvalue register.
4593 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
4594 is something we can track. When returning true, store the mode of
4595 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
4596 from EXPR in *OFFSET_OUT (if nonnull). */
4599 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
4600 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
4602 enum machine_mode mode;
4604 if (expr == NULL || !track_expr_p (expr, true))
4607 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
4608 whole subreg, but only the old inner part is really relevant. */
4609 mode = GET_MODE (loc);
4610 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
4612 enum machine_mode pseudo_mode;
4614 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
4615 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
4617 offset += byte_lowpart_offset (pseudo_mode, mode);
4622 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
4623 Do the same if we are storing to a register and EXPR occupies
4624 the whole of register LOC; in that case, the whole of EXPR is
4625 being changed. We exclude complex modes from the second case
4626 because the real and imaginary parts are represented as separate
4627 pseudo registers, even if the whole complex value fits into one
4629 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
4631 && !COMPLEX_MODE_P (DECL_MODE (expr))
4632 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
4633 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
4635 mode = DECL_MODE (expr);
4639 if (offset < 0 || offset >= MAX_VAR_PARTS)
4645 *offset_out = offset;
4649 /* Return the MODE lowpart of LOC, or null if LOC is not something we
4650 want to track. When returning nonnull, make sure that the attributes
4651 on the returned value are updated. */
4654 var_lowpart (enum machine_mode mode, rtx loc)
4656 unsigned int offset, reg_offset, regno;
4658 if (!REG_P (loc) && !MEM_P (loc))
4661 if (GET_MODE (loc) == mode)
4664 offset = byte_lowpart_offset (mode, GET_MODE (loc));
4667 return adjust_address_nv (loc, mode, offset);
4669 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
4670 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
4672 return gen_rtx_REG_offset (loc, mode, regno, offset);
4675 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
4676 hard_frame_pointer_rtx is being mapped to it. */
4677 static rtx cfa_base_rtx;
4679 /* Carry information about uses and stores while walking rtx. */
4681 struct count_use_info
4683 /* The insn where the RTX is. */
4686 /* The basic block where insn is. */
4689 /* The array of n_sets sets in the insn, as determined by cselib. */
4690 struct cselib_set *sets;
4693 /* True if we're counting stores, false otherwise. */
4697 /* Find a VALUE corresponding to X. */
4699 static inline cselib_val *
4700 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
4706 /* This is called after uses are set up and before stores are
4707 processed bycselib, so it's safe to look up srcs, but not
4708 dsts. So we look up expressions that appear in srcs or in
4709 dest expressions, but we search the sets array for dests of
4713 for (i = 0; i < cui->n_sets; i++)
4714 if (cui->sets[i].dest == x)
4715 return cui->sets[i].src_elt;
4718 return cselib_lookup (x, mode, 0);
4724 /* Helper function to get mode of MEM's address. */
4726 static inline enum machine_mode
4727 get_address_mode (rtx mem)
4729 enum machine_mode mode = GET_MODE (XEXP (mem, 0));
4730 if (mode != VOIDmode)
4732 return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
4735 /* Replace all registers and addresses in an expression with VALUE
4736 expressions that map back to them, unless the expression is a
4737 register. If no mapping is or can be performed, returns NULL. */
4740 replace_expr_with_values (rtx loc)
4744 else if (MEM_P (loc))
4746 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
4747 get_address_mode (loc), 0);
4749 return replace_equiv_address_nv (loc, addr->val_rtx);
4754 return cselib_subst_to_values (loc);
4757 /* Determine what kind of micro operation to choose for a USE. Return
4758 MO_CLOBBER if no micro operation is to be generated. */
4760 static enum micro_operation_type
4761 use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
4765 if (cui && cui->sets)
4767 if (GET_CODE (loc) == VAR_LOCATION)
4769 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
4771 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
4772 if (! VAR_LOC_UNKNOWN_P (ploc))
4774 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1);
4776 /* ??? flag_float_store and volatile mems are never
4777 given values, but we could in theory use them for
4779 gcc_assert (val || 1);
4787 if (REG_P (loc) || MEM_P (loc))
4790 *modep = GET_MODE (loc);
4794 || (find_use_val (loc, GET_MODE (loc), cui)
4795 && cselib_lookup (XEXP (loc, 0),
4796 get_address_mode (loc), 0)))
4801 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
4803 if (val && !cselib_preserved_value_p (val))
4811 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
4813 if (loc == cfa_base_rtx)
4815 expr = REG_EXPR (loc);
4818 return MO_USE_NO_VAR;
4819 else if (target_for_debug_bind (var_debug_decl (expr)))
4821 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
4822 false, modep, NULL))
4825 return MO_USE_NO_VAR;
4827 else if (MEM_P (loc))
4829 expr = MEM_EXPR (loc);
4833 else if (target_for_debug_bind (var_debug_decl (expr)))
4835 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
4836 false, modep, NULL))
4845 /* Log to OUT information about micro-operation MOPT involving X in
4849 log_op_type (rtx x, basic_block bb, rtx insn,
4850 enum micro_operation_type mopt, FILE *out)
4852 fprintf (out, "bb %i op %i insn %i %s ",
4853 bb->index, VEC_length (micro_operation, VTI (bb)->mos),
4854 INSN_UID (insn), micro_operation_type_name[mopt]);
4855 print_inline_rtx (out, x, 2);
4859 /* Tell whether the CONCAT used to holds a VALUE and its location
4860 needs value resolution, i.e., an attempt of mapping the location
4861 back to other incoming values. */
4862 #define VAL_NEEDS_RESOLUTION(x) \
4863 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
4864 /* Whether the location in the CONCAT is a tracked expression, that
4865 should also be handled like a MO_USE. */
4866 #define VAL_HOLDS_TRACK_EXPR(x) \
4867 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
4868 /* Whether the location in the CONCAT should be handled like a MO_COPY
4870 #define VAL_EXPR_IS_COPIED(x) \
4871 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
4872 /* Whether the location in the CONCAT should be handled like a
4873 MO_CLOBBER as well. */
4874 #define VAL_EXPR_IS_CLOBBERED(x) \
4875 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
4876 /* Whether the location is a CONCAT of the MO_VAL_SET expression and
4877 a reverse operation that should be handled afterwards. */
4878 #define VAL_EXPR_HAS_REVERSE(x) \
4879 (RTL_FLAG_CHECK1 ("VAL_EXPR_HAS_REVERSE", (x), CONCAT)->return_val)
4881 /* All preserved VALUEs. */
4882 static VEC (rtx, heap) *preserved_values;
4884 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
4887 preserve_value (cselib_val *val)
4889 cselib_preserve_value (val);
4890 VEC_safe_push (rtx, heap, preserved_values, val->val_rtx);
4893 /* Helper function for MO_VAL_LOC handling. Return non-zero if
4894 any rtxes not suitable for CONST use not replaced by VALUEs
4898 non_suitable_const (rtx *x, void *data ATTRIBUTE_UNUSED)
4903 switch (GET_CODE (*x))
4914 return !MEM_READONLY_P (*x);
4920 /* Add uses (register and memory references) LOC which will be tracked
4921 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
4924 add_uses (rtx *ploc, void *data)
4927 enum machine_mode mode = VOIDmode;
4928 struct count_use_info *cui = (struct count_use_info *)data;
4929 enum micro_operation_type type = use_type (loc, cui, &mode);
4931 if (type != MO_CLOBBER)
4933 basic_block bb = cui->bb;
4937 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
4938 mo.insn = cui->insn;
4940 if (type == MO_VAL_LOC)
4943 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
4946 gcc_assert (cui->sets);
4949 && !REG_P (XEXP (vloc, 0))
4950 && !MEM_P (XEXP (vloc, 0))
4951 && (GET_CODE (XEXP (vloc, 0)) != PLUS
4952 || XEXP (XEXP (vloc, 0), 0) != cfa_base_rtx
4953 || !CONST_INT_P (XEXP (XEXP (vloc, 0), 1))))
4956 enum machine_mode address_mode = get_address_mode (mloc);
4958 = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
4960 if (val && !cselib_preserved_value_p (val))
4962 micro_operation moa;
4963 preserve_value (val);
4964 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4965 moa.type = MO_VAL_USE;
4966 moa.insn = cui->insn;
4967 moa.u.loc = gen_rtx_CONCAT (address_mode,
4968 val->val_rtx, mloc);
4969 if (dump_file && (dump_flags & TDF_DETAILS))
4970 log_op_type (moa.u.loc, cui->bb, cui->insn,
4971 moa.type, dump_file);
4972 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
4976 if (CONSTANT_P (vloc)
4977 && (GET_CODE (vloc) != CONST
4978 || for_each_rtx (&vloc, non_suitable_const, NULL)))
4979 /* For constants don't look up any value. */;
4980 else if (!VAR_LOC_UNKNOWN_P (vloc)
4981 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
4983 enum machine_mode mode2;
4984 enum micro_operation_type type2;
4985 rtx nloc = replace_expr_with_values (vloc);
4989 oloc = shallow_copy_rtx (oloc);
4990 PAT_VAR_LOCATION_LOC (oloc) = nloc;
4993 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
4995 type2 = use_type (vloc, 0, &mode2);
4997 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
4998 || type2 == MO_CLOBBER);
5000 if (type2 == MO_CLOBBER
5001 && !cselib_preserved_value_p (val))
5003 VAL_NEEDS_RESOLUTION (oloc) = 1;
5004 preserve_value (val);
5007 else if (!VAR_LOC_UNKNOWN_P (vloc))
5009 oloc = shallow_copy_rtx (oloc);
5010 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5015 else if (type == MO_VAL_USE)
5017 enum machine_mode mode2 = VOIDmode;
5018 enum micro_operation_type type2;
5019 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5020 rtx vloc, oloc = loc, nloc;
5022 gcc_assert (cui->sets);
5025 && !REG_P (XEXP (oloc, 0))
5026 && !MEM_P (XEXP (oloc, 0))
5027 && (GET_CODE (XEXP (oloc, 0)) != PLUS
5028 || XEXP (XEXP (oloc, 0), 0) != cfa_base_rtx
5029 || !CONST_INT_P (XEXP (XEXP (oloc, 0), 1))))
5032 enum machine_mode address_mode = get_address_mode (mloc);
5034 = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
5036 if (val && !cselib_preserved_value_p (val))
5038 micro_operation moa;
5039 preserve_value (val);
5040 mloc = cselib_subst_to_values (XEXP (mloc, 0));
5041 moa.type = MO_VAL_USE;
5042 moa.insn = cui->insn;
5043 moa.u.loc = gen_rtx_CONCAT (address_mode,
5044 val->val_rtx, mloc);
5045 if (dump_file && (dump_flags & TDF_DETAILS))
5046 log_op_type (moa.u.loc, cui->bb, cui->insn,
5047 moa.type, dump_file);
5048 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
5052 type2 = use_type (loc, 0, &mode2);
5054 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5055 || type2 == MO_CLOBBER);
5057 if (type2 == MO_USE)
5058 vloc = var_lowpart (mode2, loc);
5062 /* The loc of a MO_VAL_USE may have two forms:
5064 (concat val src): val is at src, a value-based
5067 (concat (concat val use) src): same as above, with use as
5068 the MO_USE tracked value, if it differs from src.
5072 nloc = replace_expr_with_values (loc);
5077 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5079 oloc = val->val_rtx;
5081 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5083 if (type2 == MO_USE)
5084 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5085 if (!cselib_preserved_value_p (val))
5087 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5088 preserve_value (val);
5092 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5094 if (dump_file && (dump_flags & TDF_DETAILS))
5095 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5096 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5102 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5105 add_uses_1 (rtx *x, void *cui)
5107 for_each_rtx (x, add_uses, cui);
5110 /* Attempt to reverse the EXPR operation in the debug info. Say for
5111 reg1 = reg2 + 6 even when reg2 is no longer live we
5112 can express its value as VAL - 6. */
5115 reverse_op (rtx val, const_rtx expr)
5121 if (GET_CODE (expr) != SET)
5124 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5127 src = SET_SRC (expr);
5128 switch (GET_CODE (src))
5142 if (!REG_P (XEXP (src, 0)) || !SCALAR_INT_MODE_P (GET_MODE (src)))
5145 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0);
5146 if (!v || !cselib_preserved_value_p (v))
5149 switch (GET_CODE (src))
5153 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5155 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5159 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5171 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5173 arg = XEXP (src, 1);
5174 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5176 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5177 if (arg == NULL_RTX)
5179 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5182 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5184 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5185 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5186 breaks a lot of routines during var-tracking. */
5187 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5193 return gen_rtx_CONCAT (GET_MODE (v->val_rtx), v->val_rtx, ret);
5196 /* Add stores (register and memory references) LOC which will be tracked
5197 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5198 CUIP->insn is instruction which the LOC is part of. */
5201 add_stores (rtx loc, const_rtx expr, void *cuip)
5203 enum machine_mode mode = VOIDmode, mode2;
5204 struct count_use_info *cui = (struct count_use_info *)cuip;
5205 basic_block bb = cui->bb;
5207 rtx oloc = loc, nloc, src = NULL;
5208 enum micro_operation_type type = use_type (loc, cui, &mode);
5209 bool track_p = false;
5211 bool resolve, preserve;
5214 if (type == MO_CLOBBER)
5221 gcc_assert (loc != cfa_base_rtx);
5222 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5223 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5224 || GET_CODE (expr) == CLOBBER)
5226 mo.type = MO_CLOBBER;
5231 if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
5232 src = var_lowpart (mode2, SET_SRC (expr));
5233 loc = var_lowpart (mode2, loc);
5242 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5243 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5250 mo.insn = cui->insn;
5252 else if (MEM_P (loc)
5253 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5256 if (MEM_P (loc) && type == MO_VAL_SET
5257 && !REG_P (XEXP (loc, 0))
5258 && !MEM_P (XEXP (loc, 0))
5259 && (GET_CODE (XEXP (loc, 0)) != PLUS
5260 || XEXP (XEXP (loc, 0), 0) != cfa_base_rtx
5261 || !CONST_INT_P (XEXP (XEXP (loc, 0), 1))))
5264 enum machine_mode address_mode = get_address_mode (mloc);
5265 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
5268 if (val && !cselib_preserved_value_p (val))
5270 preserve_value (val);
5271 mo.type = MO_VAL_USE;
5272 mloc = cselib_subst_to_values (XEXP (mloc, 0));
5273 mo.u.loc = gen_rtx_CONCAT (address_mode, val->val_rtx, mloc);
5274 mo.insn = cui->insn;
5275 if (dump_file && (dump_flags & TDF_DETAILS))
5276 log_op_type (mo.u.loc, cui->bb, cui->insn,
5277 mo.type, dump_file);
5278 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5282 if (GET_CODE (expr) == CLOBBER || !track_p)
5284 mo.type = MO_CLOBBER;
5285 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
5289 if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
5290 src = var_lowpart (mode2, SET_SRC (expr));
5291 loc = var_lowpart (mode2, loc);
5300 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5301 if (same_variable_part_p (SET_SRC (xexpr),
5303 INT_MEM_OFFSET (loc)))
5310 mo.insn = cui->insn;
5315 if (type != MO_VAL_SET)
5316 goto log_and_return;
5318 v = find_use_val (oloc, mode, cui);
5321 goto log_and_return;
5323 resolve = preserve = !cselib_preserved_value_p (v);
5325 nloc = replace_expr_with_values (oloc);
5329 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
5331 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0);
5333 gcc_assert (oval != v);
5334 gcc_assert (REG_P (oloc) || MEM_P (oloc));
5336 if (!cselib_preserved_value_p (oval))
5338 micro_operation moa;
5340 preserve_value (oval);
5342 moa.type = MO_VAL_USE;
5343 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
5344 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
5345 moa.insn = cui->insn;
5347 if (dump_file && (dump_flags & TDF_DETAILS))
5348 log_op_type (moa.u.loc, cui->bb, cui->insn,
5349 moa.type, dump_file);
5350 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
5355 else if (resolve && GET_CODE (mo.u.loc) == SET)
5357 nloc = replace_expr_with_values (SET_SRC (expr));
5359 /* Avoid the mode mismatch between oexpr and expr. */
5360 if (!nloc && mode != mode2)
5362 nloc = SET_SRC (expr);
5363 gcc_assert (oloc == SET_DEST (expr));
5367 oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
5370 if (oloc == SET_DEST (mo.u.loc))
5371 /* No point in duplicating. */
5373 if (!REG_P (SET_SRC (mo.u.loc)))
5379 if (GET_CODE (mo.u.loc) == SET
5380 && oloc == SET_DEST (mo.u.loc))
5381 /* No point in duplicating. */
5387 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
5389 if (mo.u.loc != oloc)
5390 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
5392 /* The loc of a MO_VAL_SET may have various forms:
5394 (concat val dst): dst now holds val
5396 (concat val (set dst src)): dst now holds val, copied from src
5398 (concat (concat val dstv) dst): dst now holds val; dstv is dst
5399 after replacing mems and non-top-level regs with values.
5401 (concat (concat val dstv) (set dst src)): dst now holds val,
5402 copied from src. dstv is a value-based representation of dst, if
5403 it differs from dst. If resolution is needed, src is a REG, and
5404 its mode is the same as that of val.
5406 (concat (concat val (set dstv srcv)) (set dst src)): src
5407 copied to dst, holding val. dstv and srcv are value-based
5408 representations of dst and src, respectively.
5412 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
5414 reverse = reverse_op (v->val_rtx, expr);
5417 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, reverse);
5418 VAL_EXPR_HAS_REVERSE (loc) = 1;
5425 VAL_HOLDS_TRACK_EXPR (loc) = 1;
5428 VAL_NEEDS_RESOLUTION (loc) = resolve;
5431 if (mo.type == MO_CLOBBER)
5432 VAL_EXPR_IS_CLOBBERED (loc) = 1;
5433 if (mo.type == MO_COPY)
5434 VAL_EXPR_IS_COPIED (loc) = 1;
5436 mo.type = MO_VAL_SET;
5439 if (dump_file && (dump_flags & TDF_DETAILS))
5440 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5441 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5444 /* Callback for cselib_record_sets_hook, that records as micro
5445 operations uses and stores in an insn after cselib_record_sets has
5446 analyzed the sets in an insn, but before it modifies the stored
5447 values in the internal tables, unless cselib_record_sets doesn't
5448 call it directly (perhaps because we're not doing cselib in the
5449 first place, in which case sets and n_sets will be 0). */
5452 add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
5454 basic_block bb = BLOCK_FOR_INSN (insn);
5456 struct count_use_info cui;
5457 micro_operation *mos;
5459 cselib_hook_called = true;
5464 cui.n_sets = n_sets;
5466 n1 = VEC_length (micro_operation, VTI (bb)->mos);
5467 cui.store_p = false;
5468 note_uses (&PATTERN (insn), add_uses_1, &cui);
5469 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
5470 mos = VEC_address (micro_operation, VTI (bb)->mos);
5472 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
5476 while (n1 < n2 && mos[n1].type == MO_USE)
5478 while (n1 < n2 && mos[n2].type != MO_USE)
5490 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
5493 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
5495 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
5513 mo.u.loc = NULL_RTX;
5515 if (dump_file && (dump_flags & TDF_DETAILS))
5516 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
5517 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5520 n1 = VEC_length (micro_operation, VTI (bb)->mos);
5521 /* This will record NEXT_INSN (insn), such that we can
5522 insert notes before it without worrying about any
5523 notes that MO_USEs might emit after the insn. */
5525 note_stores (PATTERN (insn), add_stores, &cui);
5526 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
5527 mos = VEC_address (micro_operation, VTI (bb)->mos);
5529 /* Order the MO_VAL_USEs first (note_stores does nothing
5530 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
5531 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
5534 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
5536 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
5548 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
5551 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
5553 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
5566 static enum var_init_status
5567 find_src_status (dataflow_set *in, rtx src)
5569 tree decl = NULL_TREE;
5570 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
5572 if (! flag_var_tracking_uninit)
5573 status = VAR_INIT_STATUS_INITIALIZED;
5575 if (src && REG_P (src))
5576 decl = var_debug_decl (REG_EXPR (src));
5577 else if (src && MEM_P (src))
5578 decl = var_debug_decl (MEM_EXPR (src));
5581 status = get_init_value (in, src, dv_from_decl (decl));
5586 /* SRC is the source of an assignment. Use SET to try to find what
5587 was ultimately assigned to SRC. Return that value if known,
5588 otherwise return SRC itself. */
5591 find_src_set_src (dataflow_set *set, rtx src)
5593 tree decl = NULL_TREE; /* The variable being copied around. */
5594 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
5596 location_chain nextp;
5600 if (src && REG_P (src))
5601 decl = var_debug_decl (REG_EXPR (src));
5602 else if (src && MEM_P (src))
5603 decl = var_debug_decl (MEM_EXPR (src));
5607 decl_or_value dv = dv_from_decl (decl);
5609 var = shared_hash_find (set->vars, dv);
5613 for (i = 0; i < var->n_var_parts && !found; i++)
5614 for (nextp = var->var_part[i].loc_chain; nextp && !found;
5615 nextp = nextp->next)
5616 if (rtx_equal_p (nextp->loc, src))
5618 set_src = nextp->set_src;
5628 /* Compute the changes of variable locations in the basic block BB. */
5631 compute_bb_dataflow (basic_block bb)
5634 micro_operation *mo;
5636 dataflow_set old_out;
5637 dataflow_set *in = &VTI (bb)->in;
5638 dataflow_set *out = &VTI (bb)->out;
5640 dataflow_set_init (&old_out);
5641 dataflow_set_copy (&old_out, out);
5642 dataflow_set_copy (out, in);
5644 for (i = 0; VEC_iterate (micro_operation, VTI (bb)->mos, i, mo); i++)
5646 rtx insn = mo->insn;
5651 dataflow_set_clear_at_call (out);
5656 rtx loc = mo->u.loc;
5659 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
5660 else if (MEM_P (loc))
5661 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
5667 rtx loc = mo->u.loc;
5671 if (GET_CODE (loc) == CONCAT)
5673 val = XEXP (loc, 0);
5674 vloc = XEXP (loc, 1);
5682 var = PAT_VAR_LOCATION_DECL (vloc);
5684 clobber_variable_part (out, NULL_RTX,
5685 dv_from_decl (var), 0, NULL_RTX);
5688 if (VAL_NEEDS_RESOLUTION (loc))
5689 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
5690 set_variable_part (out, val, dv_from_decl (var), 0,
5691 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
5694 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
5695 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
5696 dv_from_decl (var), 0,
5697 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
5704 rtx loc = mo->u.loc;
5705 rtx val, vloc, uloc;
5707 vloc = uloc = XEXP (loc, 1);
5708 val = XEXP (loc, 0);
5710 if (GET_CODE (val) == CONCAT)
5712 uloc = XEXP (val, 1);
5713 val = XEXP (val, 0);
5716 if (VAL_NEEDS_RESOLUTION (loc))
5717 val_resolve (out, val, vloc, insn);
5719 val_store (out, val, uloc, insn, false);
5721 if (VAL_HOLDS_TRACK_EXPR (loc))
5723 if (GET_CODE (uloc) == REG)
5724 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
5726 else if (GET_CODE (uloc) == MEM)
5727 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
5735 rtx loc = mo->u.loc;
5736 rtx val, vloc, uloc, reverse = NULL_RTX;
5739 if (VAL_EXPR_HAS_REVERSE (loc))
5741 reverse = XEXP (loc, 1);
5742 vloc = XEXP (loc, 0);
5744 uloc = XEXP (vloc, 1);
5745 val = XEXP (vloc, 0);
5748 if (GET_CODE (val) == CONCAT)
5750 vloc = XEXP (val, 1);
5751 val = XEXP (val, 0);
5754 if (GET_CODE (vloc) == SET)
5756 rtx vsrc = SET_SRC (vloc);
5758 gcc_assert (val != vsrc);
5759 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
5761 vloc = SET_DEST (vloc);
5763 if (VAL_NEEDS_RESOLUTION (loc))
5764 val_resolve (out, val, vsrc, insn);
5766 else if (VAL_NEEDS_RESOLUTION (loc))
5768 gcc_assert (GET_CODE (uloc) == SET
5769 && GET_CODE (SET_SRC (uloc)) == REG);
5770 val_resolve (out, val, SET_SRC (uloc), insn);
5773 if (VAL_HOLDS_TRACK_EXPR (loc))
5775 if (VAL_EXPR_IS_CLOBBERED (loc))
5778 var_reg_delete (out, uloc, true);
5779 else if (MEM_P (uloc))
5780 var_mem_delete (out, uloc, true);
5784 bool copied_p = VAL_EXPR_IS_COPIED (loc);
5786 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
5788 if (GET_CODE (uloc) == SET)
5790 set_src = SET_SRC (uloc);
5791 uloc = SET_DEST (uloc);
5796 if (flag_var_tracking_uninit)
5798 status = find_src_status (in, set_src);
5800 if (status == VAR_INIT_STATUS_UNKNOWN)
5801 status = find_src_status (out, set_src);
5804 set_src = find_src_set_src (in, set_src);
5808 var_reg_delete_and_set (out, uloc, !copied_p,
5810 else if (MEM_P (uloc))
5811 var_mem_delete_and_set (out, uloc, !copied_p,
5815 else if (REG_P (uloc))
5816 var_regno_delete (out, REGNO (uloc));
5818 val_store (out, val, vloc, insn, true);
5821 val_store (out, XEXP (reverse, 0), XEXP (reverse, 1),
5828 rtx loc = mo->u.loc;
5831 if (GET_CODE (loc) == SET)
5833 set_src = SET_SRC (loc);
5834 loc = SET_DEST (loc);
5838 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
5840 else if (MEM_P (loc))
5841 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
5848 rtx loc = mo->u.loc;
5849 enum var_init_status src_status;
5852 if (GET_CODE (loc) == SET)
5854 set_src = SET_SRC (loc);
5855 loc = SET_DEST (loc);
5858 if (! flag_var_tracking_uninit)
5859 src_status = VAR_INIT_STATUS_INITIALIZED;
5862 src_status = find_src_status (in, set_src);
5864 if (src_status == VAR_INIT_STATUS_UNKNOWN)
5865 src_status = find_src_status (out, set_src);
5868 set_src = find_src_set_src (in, set_src);
5871 var_reg_delete_and_set (out, loc, false, src_status, set_src);
5872 else if (MEM_P (loc))
5873 var_mem_delete_and_set (out, loc, false, src_status, set_src);
5879 rtx loc = mo->u.loc;
5882 var_reg_delete (out, loc, false);
5883 else if (MEM_P (loc))
5884 var_mem_delete (out, loc, false);
5890 rtx loc = mo->u.loc;
5893 var_reg_delete (out, loc, true);
5894 else if (MEM_P (loc))
5895 var_mem_delete (out, loc, true);
5900 out->stack_adjust += mo->u.adjust;
5905 if (MAY_HAVE_DEBUG_INSNS)
5907 dataflow_set_equiv_regs (out);
5908 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_mark,
5910 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_star,
5913 htab_traverse (shared_hash_htab (out->vars),
5914 canonicalize_loc_order_check, out);
5917 changed = dataflow_set_different (&old_out, out);
5918 dataflow_set_destroy (&old_out);
5922 /* Find the locations of variables in the whole function. */
5925 vt_find_locations (void)
5927 fibheap_t worklist, pending, fibheap_swap;
5928 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
5935 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
5936 bool success = true;
5938 /* Compute reverse completion order of depth first search of the CFG
5939 so that the data-flow runs faster. */
5940 rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
5941 bb_order = XNEWVEC (int, last_basic_block);
5942 pre_and_rev_post_order_compute (NULL, rc_order, false);
5943 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
5944 bb_order[rc_order[i]] = i;
5947 worklist = fibheap_new ();
5948 pending = fibheap_new ();
5949 visited = sbitmap_alloc (last_basic_block);
5950 in_worklist = sbitmap_alloc (last_basic_block);
5951 in_pending = sbitmap_alloc (last_basic_block);
5952 sbitmap_zero (in_worklist);
5955 fibheap_insert (pending, bb_order[bb->index], bb);
5956 sbitmap_ones (in_pending);
5958 while (success && !fibheap_empty (pending))
5960 fibheap_swap = pending;
5962 worklist = fibheap_swap;
5963 sbitmap_swap = in_pending;
5964 in_pending = in_worklist;
5965 in_worklist = sbitmap_swap;
5967 sbitmap_zero (visited);
5969 while (!fibheap_empty (worklist))
5971 bb = (basic_block) fibheap_extract_min (worklist);
5972 RESET_BIT (in_worklist, bb->index);
5973 if (!TEST_BIT (visited, bb->index))
5977 int oldinsz, oldoutsz;
5979 SET_BIT (visited, bb->index);
5981 if (VTI (bb)->in.vars)
5984 -= (htab_size (shared_hash_htab (VTI (bb)->in.vars))
5985 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
5987 = htab_elements (shared_hash_htab (VTI (bb)->in.vars));
5989 = htab_elements (shared_hash_htab (VTI (bb)->out.vars));
5992 oldinsz = oldoutsz = 0;
5994 if (MAY_HAVE_DEBUG_INSNS)
5996 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
5997 bool first = true, adjust = false;
5999 /* Calculate the IN set as the intersection of
6000 predecessor OUT sets. */
6002 dataflow_set_clear (in);
6003 dst_can_be_shared = true;
6005 FOR_EACH_EDGE (e, ei, bb->preds)
6006 if (!VTI (e->src)->flooded)
6007 gcc_assert (bb_order[bb->index]
6008 <= bb_order[e->src->index]);
6011 dataflow_set_copy (in, &VTI (e->src)->out);
6012 first_out = &VTI (e->src)->out;
6017 dataflow_set_merge (in, &VTI (e->src)->out);
6023 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
6025 /* Merge and merge_adjust should keep entries in
6027 htab_traverse (shared_hash_htab (in->vars),
6028 canonicalize_loc_order_check,
6031 if (dst_can_be_shared)
6033 shared_hash_destroy (in->vars);
6034 in->vars = shared_hash_copy (first_out->vars);
6038 VTI (bb)->flooded = true;
6042 /* Calculate the IN set as union of predecessor OUT sets. */
6043 dataflow_set_clear (&VTI (bb)->in);
6044 FOR_EACH_EDGE (e, ei, bb->preds)
6045 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
6048 changed = compute_bb_dataflow (bb);
6049 htabsz += (htab_size (shared_hash_htab (VTI (bb)->in.vars))
6050 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
6052 if (htabmax && htabsz > htabmax)
6054 if (MAY_HAVE_DEBUG_INSNS)
6055 inform (DECL_SOURCE_LOCATION (cfun->decl),
6056 "variable tracking size limit exceeded with "
6057 "-fvar-tracking-assignments, retrying without");
6059 inform (DECL_SOURCE_LOCATION (cfun->decl),
6060 "variable tracking size limit exceeded");
6067 FOR_EACH_EDGE (e, ei, bb->succs)
6069 if (e->dest == EXIT_BLOCK_PTR)
6072 if (TEST_BIT (visited, e->dest->index))
6074 if (!TEST_BIT (in_pending, e->dest->index))
6076 /* Send E->DEST to next round. */
6077 SET_BIT (in_pending, e->dest->index);
6078 fibheap_insert (pending,
6079 bb_order[e->dest->index],
6083 else if (!TEST_BIT (in_worklist, e->dest->index))
6085 /* Add E->DEST to current round. */
6086 SET_BIT (in_worklist, e->dest->index);
6087 fibheap_insert (worklist, bb_order[e->dest->index],
6095 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
6097 (int)htab_elements (shared_hash_htab (VTI (bb)->in.vars)),
6099 (int)htab_elements (shared_hash_htab (VTI (bb)->out.vars)),
6101 (int)worklist->nodes, (int)pending->nodes, htabsz);
6103 if (dump_file && (dump_flags & TDF_DETAILS))
6105 fprintf (dump_file, "BB %i IN:\n", bb->index);
6106 dump_dataflow_set (&VTI (bb)->in);
6107 fprintf (dump_file, "BB %i OUT:\n", bb->index);
6108 dump_dataflow_set (&VTI (bb)->out);
6114 if (success && MAY_HAVE_DEBUG_INSNS)
6116 gcc_assert (VTI (bb)->flooded);
6119 fibheap_delete (worklist);
6120 fibheap_delete (pending);
6121 sbitmap_free (visited);
6122 sbitmap_free (in_worklist);
6123 sbitmap_free (in_pending);
6128 /* Print the content of the LIST to dump file. */
6131 dump_attrs_list (attrs list)
6133 for (; list; list = list->next)
6135 if (dv_is_decl_p (list->dv))
6136 print_mem_expr (dump_file, dv_as_decl (list->dv));
6138 print_rtl_single (dump_file, dv_as_value (list->dv));
6139 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
6141 fprintf (dump_file, "\n");
6144 /* Print the information about variable *SLOT to dump file. */
6147 dump_var_slot (void **slot, void *data ATTRIBUTE_UNUSED)
6149 variable var = (variable) *slot;
6153 /* Continue traversing the hash table. */
6157 /* Print the information about variable VAR to dump file. */
6160 dump_var (variable var)
6163 location_chain node;
6165 if (dv_is_decl_p (var->dv))
6167 const_tree decl = dv_as_decl (var->dv);
6169 if (DECL_NAME (decl))
6171 fprintf (dump_file, " name: %s",
6172 IDENTIFIER_POINTER (DECL_NAME (decl)));
6173 if (dump_flags & TDF_UID)
6174 fprintf (dump_file, "D.%u", DECL_UID (decl));
6176 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
6177 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
6179 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
6180 fprintf (dump_file, "\n");
6184 fputc (' ', dump_file);
6185 print_rtl_single (dump_file, dv_as_value (var->dv));
6188 for (i = 0; i < var->n_var_parts; i++)
6190 fprintf (dump_file, " offset %ld\n",
6191 (long) var->var_part[i].offset);
6192 for (node = var->var_part[i].loc_chain; node; node = node->next)
6194 fprintf (dump_file, " ");
6195 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
6196 fprintf (dump_file, "[uninit]");
6197 print_rtl_single (dump_file, node->loc);
6202 /* Print the information about variables from hash table VARS to dump file. */
6205 dump_vars (htab_t vars)
6207 if (htab_elements (vars) > 0)
6209 fprintf (dump_file, "Variables:\n");
6210 htab_traverse (vars, dump_var_slot, NULL);
6214 /* Print the dataflow set SET to dump file. */
6217 dump_dataflow_set (dataflow_set *set)
6221 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
6223 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6227 fprintf (dump_file, "Reg %d:", i);
6228 dump_attrs_list (set->regs[i]);
6231 dump_vars (shared_hash_htab (set->vars));
6232 fprintf (dump_file, "\n");
6235 /* Print the IN and OUT sets for each basic block to dump file. */
6238 dump_dataflow_sets (void)
6244 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
6245 fprintf (dump_file, "IN:\n");
6246 dump_dataflow_set (&VTI (bb)->in);
6247 fprintf (dump_file, "OUT:\n");
6248 dump_dataflow_set (&VTI (bb)->out);
6252 /* Add variable VAR to the hash table of changed variables and
6253 if it has no locations delete it from SET's hash table. */
6256 variable_was_changed (variable var, dataflow_set *set)
6258 hashval_t hash = dv_htab_hash (var->dv);
6263 bool old_cur_loc_changed = false;
6265 /* Remember this decl or VALUE has been added to changed_variables. */
6266 set_dv_changed (var->dv, true);
6268 slot = htab_find_slot_with_hash (changed_variables,
6274 variable old_var = (variable) *slot;
6275 gcc_assert (old_var->in_changed_variables);
6276 old_var->in_changed_variables = false;
6277 old_cur_loc_changed = old_var->cur_loc_changed;
6278 variable_htab_free (*slot);
6280 if (set && var->n_var_parts == 0)
6284 empty_var = (variable) pool_alloc (dv_pool (var->dv));
6285 empty_var->dv = var->dv;
6286 empty_var->refcount = 1;
6287 empty_var->n_var_parts = 0;
6288 empty_var->cur_loc_changed = true;
6289 empty_var->in_changed_variables = true;
6296 var->in_changed_variables = true;
6297 /* If within processing one uop a variable is deleted
6298 and then readded, we need to assume it has changed. */
6299 if (old_cur_loc_changed)
6300 var->cur_loc_changed = true;
6307 if (var->n_var_parts == 0)
6312 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
6315 if (shared_hash_shared (set->vars))
6316 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
6318 htab_clear_slot (shared_hash_htab (set->vars), slot);
6324 /* Look for the index in VAR->var_part corresponding to OFFSET.
6325 Return -1 if not found. If INSERTION_POINT is non-NULL, the
6326 referenced int will be set to the index that the part has or should
6327 have, if it should be inserted. */
6330 find_variable_location_part (variable var, HOST_WIDE_INT offset,
6331 int *insertion_point)
6335 /* Find the location part. */
6337 high = var->n_var_parts;
6340 pos = (low + high) / 2;
6341 if (var->var_part[pos].offset < offset)
6348 if (insertion_point)
6349 *insertion_point = pos;
6351 if (pos < var->n_var_parts && var->var_part[pos].offset == offset)
6358 set_slot_part (dataflow_set *set, rtx loc, void **slot,
6359 decl_or_value dv, HOST_WIDE_INT offset,
6360 enum var_init_status initialized, rtx set_src)
6363 location_chain node, next;
6364 location_chain *nextp;
6366 bool onepart = dv_onepart_p (dv);
6368 gcc_assert (offset == 0 || !onepart);
6369 gcc_assert (loc != dv_as_opaque (dv));
6371 var = (variable) *slot;
6373 if (! flag_var_tracking_uninit)
6374 initialized = VAR_INIT_STATUS_INITIALIZED;
6378 /* Create new variable information. */
6379 var = (variable) pool_alloc (dv_pool (dv));
6382 var->n_var_parts = 1;
6383 var->cur_loc_changed = false;
6384 var->in_changed_variables = false;
6385 var->var_part[0].offset = offset;
6386 var->var_part[0].loc_chain = NULL;
6387 var->var_part[0].cur_loc = NULL;
6390 nextp = &var->var_part[0].loc_chain;
6396 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
6400 if (GET_CODE (loc) == VALUE)
6402 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6403 nextp = &node->next)
6404 if (GET_CODE (node->loc) == VALUE)
6406 if (node->loc == loc)
6411 if (canon_value_cmp (node->loc, loc))
6419 else if (REG_P (node->loc) || MEM_P (node->loc))
6427 else if (REG_P (loc))
6429 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6430 nextp = &node->next)
6431 if (REG_P (node->loc))
6433 if (REGNO (node->loc) < REGNO (loc))
6437 if (REGNO (node->loc) == REGNO (loc))
6450 else if (MEM_P (loc))
6452 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6453 nextp = &node->next)
6454 if (REG_P (node->loc))
6456 else if (MEM_P (node->loc))
6458 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
6470 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6471 nextp = &node->next)
6472 if ((r = loc_cmp (node->loc, loc)) >= 0)
6480 if (shared_var_p (var, set->vars))
6482 slot = unshare_variable (set, slot, var, initialized);
6483 var = (variable)*slot;
6484 for (nextp = &var->var_part[0].loc_chain; c;
6485 nextp = &(*nextp)->next)
6487 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
6494 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
6496 pos = find_variable_location_part (var, offset, &inspos);
6500 node = var->var_part[pos].loc_chain;
6503 && ((REG_P (node->loc) && REG_P (loc)
6504 && REGNO (node->loc) == REGNO (loc))
6505 || rtx_equal_p (node->loc, loc)))
6507 /* LOC is in the beginning of the chain so we have nothing
6509 if (node->init < initialized)
6510 node->init = initialized;
6511 if (set_src != NULL)
6512 node->set_src = set_src;
6518 /* We have to make a copy of a shared variable. */
6519 if (shared_var_p (var, set->vars))
6521 slot = unshare_variable (set, slot, var, initialized);
6522 var = (variable)*slot;
6528 /* We have not found the location part, new one will be created. */
6530 /* We have to make a copy of the shared variable. */
6531 if (shared_var_p (var, set->vars))
6533 slot = unshare_variable (set, slot, var, initialized);
6534 var = (variable)*slot;
6537 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
6538 thus there are at most MAX_VAR_PARTS different offsets. */
6539 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
6540 && (!var->n_var_parts || !dv_onepart_p (var->dv)));
6542 /* We have to move the elements of array starting at index
6543 inspos to the next position. */
6544 for (pos = var->n_var_parts; pos > inspos; pos--)
6545 var->var_part[pos] = var->var_part[pos - 1];
6548 var->var_part[pos].offset = offset;
6549 var->var_part[pos].loc_chain = NULL;
6550 var->var_part[pos].cur_loc = NULL;
6553 /* Delete the location from the list. */
6554 nextp = &var->var_part[pos].loc_chain;
6555 for (node = var->var_part[pos].loc_chain; node; node = next)
6558 if ((REG_P (node->loc) && REG_P (loc)
6559 && REGNO (node->loc) == REGNO (loc))
6560 || rtx_equal_p (node->loc, loc))
6562 /* Save these values, to assign to the new node, before
6563 deleting this one. */
6564 if (node->init > initialized)
6565 initialized = node->init;
6566 if (node->set_src != NULL && set_src == NULL)
6567 set_src = node->set_src;
6568 if (var->var_part[pos].cur_loc == node->loc)
6570 var->var_part[pos].cur_loc = NULL;
6571 var->cur_loc_changed = true;
6573 pool_free (loc_chain_pool, node);
6578 nextp = &node->next;
6581 nextp = &var->var_part[pos].loc_chain;
6584 /* Add the location to the beginning. */
6585 node = (location_chain) pool_alloc (loc_chain_pool);
6587 node->init = initialized;
6588 node->set_src = set_src;
6589 node->next = *nextp;
6592 if (onepart && emit_notes)
6593 add_value_chains (var->dv, loc);
6595 /* If no location was emitted do so. */
6596 if (var->var_part[pos].cur_loc == NULL)
6597 variable_was_changed (var, set);
6602 /* Set the part of variable's location in the dataflow set SET. The
6603 variable part is specified by variable's declaration in DV and
6604 offset OFFSET and the part's location by LOC. IOPT should be
6605 NO_INSERT if the variable is known to be in SET already and the
6606 variable hash table must not be resized, and INSERT otherwise. */
6609 set_variable_part (dataflow_set *set, rtx loc,
6610 decl_or_value dv, HOST_WIDE_INT offset,
6611 enum var_init_status initialized, rtx set_src,
6612 enum insert_option iopt)
6616 if (iopt == NO_INSERT)
6617 slot = shared_hash_find_slot_noinsert (set->vars, dv);
6620 slot = shared_hash_find_slot (set->vars, dv);
6622 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
6624 slot = set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
6627 /* Remove all recorded register locations for the given variable part
6628 from dataflow set SET, except for those that are identical to loc.
6629 The variable part is specified by variable's declaration or value
6630 DV and offset OFFSET. */
6633 clobber_slot_part (dataflow_set *set, rtx loc, void **slot,
6634 HOST_WIDE_INT offset, rtx set_src)
6636 variable var = (variable) *slot;
6637 int pos = find_variable_location_part (var, offset, NULL);
6641 location_chain node, next;
6643 /* Remove the register locations from the dataflow set. */
6644 next = var->var_part[pos].loc_chain;
6645 for (node = next; node; node = next)
6648 if (node->loc != loc
6649 && (!flag_var_tracking_uninit
6652 || !rtx_equal_p (set_src, node->set_src)))
6654 if (REG_P (node->loc))
6659 /* Remove the variable part from the register's
6660 list, but preserve any other variable parts
6661 that might be regarded as live in that same
6663 anextp = &set->regs[REGNO (node->loc)];
6664 for (anode = *anextp; anode; anode = anext)
6666 anext = anode->next;
6667 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
6668 && anode->offset == offset)
6670 pool_free (attrs_pool, anode);
6674 anextp = &anode->next;
6678 slot = delete_slot_part (set, node->loc, slot, offset);
6686 /* Remove all recorded register locations for the given variable part
6687 from dataflow set SET, except for those that are identical to loc.
6688 The variable part is specified by variable's declaration or value
6689 DV and offset OFFSET. */
6692 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
6693 HOST_WIDE_INT offset, rtx set_src)
6697 if (!dv_as_opaque (dv)
6698 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
6701 slot = shared_hash_find_slot_noinsert (set->vars, dv);
6705 slot = clobber_slot_part (set, loc, slot, offset, set_src);
6708 /* Delete the part of variable's location from dataflow set SET. The
6709 variable part is specified by its SET->vars slot SLOT and offset
6710 OFFSET and the part's location by LOC. */
6713 delete_slot_part (dataflow_set *set, rtx loc, void **slot,
6714 HOST_WIDE_INT offset)
6716 variable var = (variable) *slot;
6717 int pos = find_variable_location_part (var, offset, NULL);
6721 location_chain node, next;
6722 location_chain *nextp;
6725 if (shared_var_p (var, set->vars))
6727 /* If the variable contains the location part we have to
6728 make a copy of the variable. */
6729 for (node = var->var_part[pos].loc_chain; node;
6732 if ((REG_P (node->loc) && REG_P (loc)
6733 && REGNO (node->loc) == REGNO (loc))
6734 || rtx_equal_p (node->loc, loc))
6736 slot = unshare_variable (set, slot, var,
6737 VAR_INIT_STATUS_UNKNOWN);
6738 var = (variable)*slot;
6744 /* Delete the location part. */
6746 nextp = &var->var_part[pos].loc_chain;
6747 for (node = *nextp; node; node = next)
6750 if ((REG_P (node->loc) && REG_P (loc)
6751 && REGNO (node->loc) == REGNO (loc))
6752 || rtx_equal_p (node->loc, loc))
6754 if (emit_notes && pos == 0 && dv_onepart_p (var->dv))
6755 remove_value_chains (var->dv, node->loc);
6756 /* If we have deleted the location which was last emitted
6757 we have to emit new location so add the variable to set
6758 of changed variables. */
6759 if (var->var_part[pos].cur_loc == node->loc)
6762 var->var_part[pos].cur_loc = NULL;
6763 var->cur_loc_changed = true;
6765 pool_free (loc_chain_pool, node);
6770 nextp = &node->next;
6773 if (var->var_part[pos].loc_chain == NULL)
6778 var->cur_loc_changed = true;
6779 while (pos < var->n_var_parts)
6781 var->var_part[pos] = var->var_part[pos + 1];
6786 variable_was_changed (var, set);
6792 /* Delete the part of variable's location from dataflow set SET. The
6793 variable part is specified by variable's declaration or value DV
6794 and offset OFFSET and the part's location by LOC. */
6797 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
6798 HOST_WIDE_INT offset)
6800 void **slot = shared_hash_find_slot_noinsert (set->vars, dv);
6804 slot = delete_slot_part (set, loc, slot, offset);
6807 /* Structure for passing some other parameters to function
6808 vt_expand_loc_callback. */
6809 struct expand_loc_callback_data
6811 /* The variables and values active at this point. */
6814 /* True in vt_expand_loc_dummy calls, no rtl should be allocated.
6815 Non-NULL should be returned if vt_expand_loc would return
6816 non-NULL in that case, NULL otherwise. cur_loc_changed should be
6817 computed and cur_loc recomputed when possible (but just once
6818 per emit_notes_for_changes call). */
6821 /* True if expansion of subexpressions had to recompute some
6822 VALUE/DEBUG_EXPR_DECL's cur_loc or used a VALUE/DEBUG_EXPR_DECL
6823 whose cur_loc has been already recomputed during current
6824 emit_notes_for_changes call. */
6825 bool cur_loc_changed;
6828 /* Callback for cselib_expand_value, that looks for expressions
6829 holding the value in the var-tracking hash tables. Return X for
6830 standard processing, anything else is to be used as-is. */
6833 vt_expand_loc_callback (rtx x, bitmap regs, int max_depth, void *data)
6835 struct expand_loc_callback_data *elcd
6836 = (struct expand_loc_callback_data *) data;
6837 bool dummy = elcd->dummy;
6838 bool cur_loc_changed = elcd->cur_loc_changed;
6842 rtx result, subreg, xret;
6844 switch (GET_CODE (x))
6849 if (cselib_dummy_expand_value_rtx_cb (SUBREG_REG (x), regs,
6851 vt_expand_loc_callback, data))
6857 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
6859 vt_expand_loc_callback, data);
6864 result = simplify_gen_subreg (GET_MODE (x), subreg,
6865 GET_MODE (SUBREG_REG (x)),
6868 /* Invalid SUBREGs are ok in debug info. ??? We could try
6869 alternate expansions for the VALUE as well. */
6871 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
6876 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
6881 dv = dv_from_value (x);
6889 if (VALUE_RECURSED_INTO (x))
6892 var = (variable) htab_find_with_hash (elcd->vars, dv, dv_htab_hash (dv));
6896 if (dummy && dv_changed_p (dv))
6897 elcd->cur_loc_changed = true;
6901 if (var->n_var_parts == 0)
6904 elcd->cur_loc_changed = true;
6908 gcc_assert (var->n_var_parts == 1);
6910 VALUE_RECURSED_INTO (x) = true;
6913 if (var->var_part[0].cur_loc)
6917 if (cselib_dummy_expand_value_rtx_cb (var->var_part[0].cur_loc, regs,
6919 vt_expand_loc_callback, data))
6923 result = cselib_expand_value_rtx_cb (var->var_part[0].cur_loc, regs,
6925 vt_expand_loc_callback, data);
6927 set_dv_changed (dv, false);
6929 if (!result && dv_changed_p (dv))
6931 set_dv_changed (dv, false);
6932 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
6933 if (loc->loc == var->var_part[0].cur_loc)
6937 elcd->cur_loc_changed = cur_loc_changed;
6938 if (cselib_dummy_expand_value_rtx_cb (loc->loc, regs, max_depth,
6939 vt_expand_loc_callback,
6948 result = cselib_expand_value_rtx_cb (loc->loc, regs, max_depth,
6949 vt_expand_loc_callback, data);
6953 if (dummy && (result || var->var_part[0].cur_loc))
6954 var->cur_loc_changed = true;
6955 var->var_part[0].cur_loc = loc ? loc->loc : NULL_RTX;
6959 if (var->cur_loc_changed)
6960 elcd->cur_loc_changed = true;
6961 else if (!result && var->var_part[0].cur_loc == NULL_RTX)
6962 elcd->cur_loc_changed = cur_loc_changed;
6965 VALUE_RECURSED_INTO (x) = false;
6972 /* Expand VALUEs in LOC, using VARS as well as cselib's equivalence
6976 vt_expand_loc (rtx loc, htab_t vars)
6978 struct expand_loc_callback_data data;
6980 if (!MAY_HAVE_DEBUG_INSNS)
6985 data.cur_loc_changed = false;
6986 loc = cselib_expand_value_rtx_cb (loc, scratch_regs, 5,
6987 vt_expand_loc_callback, &data);
6989 if (loc && MEM_P (loc))
6990 loc = targetm.delegitimize_address (loc);
6994 /* Like vt_expand_loc, but only return true/false (whether vt_expand_loc
6995 would succeed or not, without actually allocating new rtxes. */
6998 vt_expand_loc_dummy (rtx loc, htab_t vars, bool *pcur_loc_changed)
7000 struct expand_loc_callback_data data;
7003 gcc_assert (MAY_HAVE_DEBUG_INSNS);
7006 data.cur_loc_changed = false;
7007 ret = cselib_dummy_expand_value_rtx_cb (loc, scratch_regs, 5,
7008 vt_expand_loc_callback, &data);
7009 *pcur_loc_changed = data.cur_loc_changed;
7013 #ifdef ENABLE_RTL_CHECKING
7014 /* Used to verify that cur_loc_changed updating is safe. */
7015 static struct pointer_map_t *emitted_notes;
7018 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
7019 additional parameters: WHERE specifies whether the note shall be emitted
7020 before or after instruction INSN. */
7023 emit_note_insn_var_location (void **varp, void *data)
7025 variable var = (variable) *varp;
7026 rtx insn = ((emit_note_data *)data)->insn;
7027 enum emit_note_where where = ((emit_note_data *)data)->where;
7028 htab_t vars = ((emit_note_data *)data)->vars;
7030 int i, j, n_var_parts;
7032 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
7033 HOST_WIDE_INT last_limit;
7034 tree type_size_unit;
7035 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
7036 rtx loc[MAX_VAR_PARTS];
7040 if (dv_is_value_p (var->dv))
7041 goto value_or_debug_decl;
7043 decl = dv_as_decl (var->dv);
7045 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7046 goto value_or_debug_decl;
7051 if (!MAY_HAVE_DEBUG_INSNS)
7053 for (i = 0; i < var->n_var_parts; i++)
7054 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
7056 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
7057 var->cur_loc_changed = true;
7059 if (var->n_var_parts == 0)
7060 var->cur_loc_changed = true;
7062 #ifndef ENABLE_RTL_CHECKING
7063 if (!var->cur_loc_changed)
7066 for (i = 0; i < var->n_var_parts; i++)
7068 enum machine_mode mode, wider_mode;
7071 if (last_limit < var->var_part[i].offset)
7076 else if (last_limit > var->var_part[i].offset)
7078 offsets[n_var_parts] = var->var_part[i].offset;
7079 if (!var->var_part[i].cur_loc)
7084 loc2 = vt_expand_loc (var->var_part[i].cur_loc, vars);
7090 loc[n_var_parts] = loc2;
7091 mode = GET_MODE (var->var_part[i].cur_loc);
7092 if (mode == VOIDmode && dv_onepart_p (var->dv))
7093 mode = DECL_MODE (decl);
7094 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
7095 if (var->var_part[i].cur_loc == lc->loc)
7097 initialized = lc->init;
7101 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
7103 /* Attempt to merge adjacent registers or memory. */
7104 wider_mode = GET_MODE_WIDER_MODE (mode);
7105 for (j = i + 1; j < var->n_var_parts; j++)
7106 if (last_limit <= var->var_part[j].offset)
7108 if (j < var->n_var_parts
7109 && wider_mode != VOIDmode
7110 && var->var_part[j].cur_loc
7111 && mode == GET_MODE (var->var_part[j].cur_loc)
7112 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
7113 && last_limit == var->var_part[j].offset
7114 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
7115 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
7119 if (REG_P (loc[n_var_parts])
7120 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
7121 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
7122 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
7125 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
7126 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
7128 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
7129 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
7132 if (!REG_P (new_loc)
7133 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
7136 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
7139 else if (MEM_P (loc[n_var_parts])
7140 && GET_CODE (XEXP (loc2, 0)) == PLUS
7141 && REG_P (XEXP (XEXP (loc2, 0), 0))
7142 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
7144 if ((REG_P (XEXP (loc[n_var_parts], 0))
7145 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
7146 XEXP (XEXP (loc2, 0), 0))
7147 && INTVAL (XEXP (XEXP (loc2, 0), 1))
7148 == GET_MODE_SIZE (mode))
7149 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
7150 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
7151 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
7152 XEXP (XEXP (loc2, 0), 0))
7153 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
7154 + GET_MODE_SIZE (mode)
7155 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
7156 new_loc = adjust_address_nv (loc[n_var_parts],
7162 loc[n_var_parts] = new_loc;
7164 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
7170 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
7171 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
7174 if (! flag_var_tracking_uninit)
7175 initialized = VAR_INIT_STATUS_INITIALIZED;
7179 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX,
7181 else if (n_var_parts == 1)
7185 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
7186 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
7190 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list,
7193 else if (n_var_parts)
7197 for (i = 0; i < n_var_parts; i++)
7199 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
7201 parallel = gen_rtx_PARALLEL (VOIDmode,
7202 gen_rtvec_v (n_var_parts, loc));
7203 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
7204 parallel, (int) initialized);
7207 #ifdef ENABLE_RTL_CHECKING
7210 void **note_slot = pointer_map_insert (emitted_notes, decl);
7211 rtx pnote = (rtx) *note_slot;
7212 if (!var->cur_loc_changed && (pnote || PAT_VAR_LOCATION_LOC (note_vl)))
7215 gcc_assert (rtx_equal_p (PAT_VAR_LOCATION_LOC (pnote),
7216 PAT_VAR_LOCATION_LOC (note_vl)));
7218 *note_slot = (void *) note_vl;
7220 if (!var->cur_loc_changed)
7224 if (where != EMIT_NOTE_BEFORE_INSN)
7226 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
7227 if (where == EMIT_NOTE_AFTER_CALL_INSN)
7228 NOTE_DURING_CALL_P (note) = true;
7231 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
7232 NOTE_VAR_LOCATION (note) = note_vl;
7235 set_dv_changed (var->dv, false);
7236 var->cur_loc_changed = false;
7237 gcc_assert (var->in_changed_variables);
7238 var->in_changed_variables = false;
7239 htab_clear_slot (changed_variables, varp);
7241 /* Continue traversing the hash table. */
7244 value_or_debug_decl:
7245 if (dv_changed_p (var->dv) && var->n_var_parts)
7248 bool cur_loc_changed;
7250 if (var->var_part[0].cur_loc
7251 && vt_expand_loc_dummy (var->var_part[0].cur_loc, vars,
7254 for (lc = var->var_part[0].loc_chain; lc; lc = lc->next)
7255 if (lc->loc != var->var_part[0].cur_loc
7256 && vt_expand_loc_dummy (lc->loc, vars, &cur_loc_changed))
7258 var->var_part[0].cur_loc = lc ? lc->loc : NULL_RTX;
7263 DEF_VEC_P (variable);
7264 DEF_VEC_ALLOC_P (variable, heap);
7266 /* Stack of variable_def pointers that need processing with
7267 check_changed_vars_2. */
7269 static VEC (variable, heap) *changed_variables_stack;
7271 /* VALUEs with no variables that need set_dv_changed (val, false)
7272 called before check_changed_vars_3. */
7274 static VEC (rtx, heap) *changed_values_stack;
7276 /* Helper function for check_changed_vars_1 and check_changed_vars_2. */
7279 check_changed_vars_0 (decl_or_value dv, htab_t htab)
7282 = (value_chain) htab_find_with_hash (value_chains, dv, dv_htab_hash (dv));
7286 for (vc = vc->next; vc; vc = vc->next)
7287 if (!dv_changed_p (vc->dv))
7290 = (variable) htab_find_with_hash (htab, vc->dv,
7291 dv_htab_hash (vc->dv));
7294 set_dv_changed (vc->dv, true);
7295 VEC_safe_push (variable, heap, changed_variables_stack, vcvar);
7297 else if (dv_is_value_p (vc->dv))
7299 set_dv_changed (vc->dv, true);
7300 VEC_safe_push (rtx, heap, changed_values_stack,
7301 dv_as_value (vc->dv));
7302 check_changed_vars_0 (vc->dv, htab);
7307 /* Populate changed_variables_stack with variable_def pointers
7308 that need variable_was_changed called on them. */
7311 check_changed_vars_1 (void **slot, void *data)
7313 variable var = (variable) *slot;
7314 htab_t htab = (htab_t) data;
7316 if (dv_is_value_p (var->dv)
7317 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
7318 check_changed_vars_0 (var->dv, htab);
7322 /* Add VAR to changed_variables and also for VALUEs add recursively
7323 all DVs that aren't in changed_variables yet but reference the
7324 VALUE from its loc_chain. */
7327 check_changed_vars_2 (variable var, htab_t htab)
7329 variable_was_changed (var, NULL);
7330 if (dv_is_value_p (var->dv)
7331 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
7332 check_changed_vars_0 (var->dv, htab);
7335 /* For each changed decl (except DEBUG_EXPR_DECLs) recompute
7336 cur_loc if needed (and cur_loc of all VALUEs and DEBUG_EXPR_DECLs
7337 it needs and are also in changed variables) and track whether
7338 cur_loc (or anything it uses to compute location) had to change
7339 during the current emit_notes_for_changes call. */
7342 check_changed_vars_3 (void **slot, void *data)
7344 variable var = (variable) *slot;
7345 htab_t vars = (htab_t) data;
7348 bool cur_loc_changed;
7350 if (dv_is_value_p (var->dv)
7351 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
7354 for (i = 0; i < var->n_var_parts; i++)
7356 if (var->var_part[i].cur_loc
7357 && vt_expand_loc_dummy (var->var_part[i].cur_loc, vars,
7360 if (cur_loc_changed)
7361 var->cur_loc_changed = true;
7364 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
7365 if (lc->loc != var->var_part[i].cur_loc
7366 && vt_expand_loc_dummy (lc->loc, vars, &cur_loc_changed))
7368 if (lc || var->var_part[i].cur_loc)
7369 var->cur_loc_changed = true;
7370 var->var_part[i].cur_loc = lc ? lc->loc : NULL_RTX;
7372 if (var->n_var_parts == 0)
7373 var->cur_loc_changed = true;
7377 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
7378 CHANGED_VARIABLES and delete this chain. WHERE specifies whether the notes
7379 shall be emitted before of after instruction INSN. */
7382 emit_notes_for_changes (rtx insn, enum emit_note_where where,
7385 emit_note_data data;
7386 htab_t htab = shared_hash_htab (vars);
7388 if (!htab_elements (changed_variables))
7391 if (MAY_HAVE_DEBUG_INSNS)
7393 /* Unfortunately this has to be done in two steps, because
7394 we can't traverse a hashtab into which we are inserting
7395 through variable_was_changed. */
7396 htab_traverse (changed_variables, check_changed_vars_1, htab);
7397 while (VEC_length (variable, changed_variables_stack) > 0)
7398 check_changed_vars_2 (VEC_pop (variable, changed_variables_stack),
7400 while (VEC_length (rtx, changed_values_stack) > 0)
7401 set_dv_changed (dv_from_value (VEC_pop (rtx, changed_values_stack)),
7403 htab_traverse (changed_variables, check_changed_vars_3, htab);
7410 htab_traverse (changed_variables, emit_note_insn_var_location, &data);
7413 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
7414 same variable in hash table DATA or is not there at all. */
7417 emit_notes_for_differences_1 (void **slot, void *data)
7419 htab_t new_vars = (htab_t) data;
7420 variable old_var, new_var;
7422 old_var = (variable) *slot;
7423 new_var = (variable) htab_find_with_hash (new_vars, old_var->dv,
7424 dv_htab_hash (old_var->dv));
7428 /* Variable has disappeared. */
7431 empty_var = (variable) pool_alloc (dv_pool (old_var->dv));
7432 empty_var->dv = old_var->dv;
7433 empty_var->refcount = 0;
7434 empty_var->n_var_parts = 0;
7435 empty_var->cur_loc_changed = false;
7436 empty_var->in_changed_variables = false;
7437 if (dv_onepart_p (old_var->dv))
7441 gcc_assert (old_var->n_var_parts == 1);
7442 for (lc = old_var->var_part[0].loc_chain; lc; lc = lc->next)
7443 remove_value_chains (old_var->dv, lc->loc);
7445 variable_was_changed (empty_var, NULL);
7446 /* Continue traversing the hash table. */
7449 if (variable_different_p (old_var, new_var))
7451 if (dv_onepart_p (old_var->dv))
7453 location_chain lc1, lc2;
7455 gcc_assert (old_var->n_var_parts == 1);
7456 gcc_assert (new_var->n_var_parts == 1);
7457 lc1 = old_var->var_part[0].loc_chain;
7458 lc2 = new_var->var_part[0].loc_chain;
7461 && ((REG_P (lc1->loc) && REG_P (lc2->loc))
7462 || rtx_equal_p (lc1->loc, lc2->loc)))
7467 for (; lc2; lc2 = lc2->next)
7468 add_value_chains (old_var->dv, lc2->loc);
7469 for (; lc1; lc1 = lc1->next)
7470 remove_value_chains (old_var->dv, lc1->loc);
7472 variable_was_changed (new_var, NULL);
7474 /* Update cur_loc. */
7475 if (old_var != new_var)
7478 for (i = 0; i < new_var->n_var_parts; i++)
7480 new_var->var_part[i].cur_loc = NULL;
7481 if (old_var->n_var_parts != new_var->n_var_parts
7482 || old_var->var_part[i].offset != new_var->var_part[i].offset)
7483 new_var->cur_loc_changed = true;
7484 else if (old_var->var_part[i].cur_loc != NULL)
7487 rtx cur_loc = old_var->var_part[i].cur_loc;
7489 for (lc = new_var->var_part[i].loc_chain; lc; lc = lc->next)
7490 if (lc->loc == cur_loc
7491 || rtx_equal_p (cur_loc, lc->loc))
7493 new_var->var_part[i].cur_loc = lc->loc;
7497 new_var->cur_loc_changed = true;
7502 /* Continue traversing the hash table. */
7506 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
7510 emit_notes_for_differences_2 (void **slot, void *data)
7512 htab_t old_vars = (htab_t) data;
7513 variable old_var, new_var;
7515 new_var = (variable) *slot;
7516 old_var = (variable) htab_find_with_hash (old_vars, new_var->dv,
7517 dv_htab_hash (new_var->dv));
7521 /* Variable has appeared. */
7522 if (dv_onepart_p (new_var->dv))
7526 gcc_assert (new_var->n_var_parts == 1);
7527 for (lc = new_var->var_part[0].loc_chain; lc; lc = lc->next)
7528 add_value_chains (new_var->dv, lc->loc);
7530 for (i = 0; i < new_var->n_var_parts; i++)
7531 new_var->var_part[i].cur_loc = NULL;
7532 variable_was_changed (new_var, NULL);
7535 /* Continue traversing the hash table. */
7539 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
7543 emit_notes_for_differences (rtx insn, dataflow_set *old_set,
7544 dataflow_set *new_set)
7546 htab_traverse (shared_hash_htab (old_set->vars),
7547 emit_notes_for_differences_1,
7548 shared_hash_htab (new_set->vars));
7549 htab_traverse (shared_hash_htab (new_set->vars),
7550 emit_notes_for_differences_2,
7551 shared_hash_htab (old_set->vars));
7552 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
7555 /* Emit the notes for changes of location parts in the basic block BB. */
7558 emit_notes_in_bb (basic_block bb, dataflow_set *set)
7561 micro_operation *mo;
7563 dataflow_set_clear (set);
7564 dataflow_set_copy (set, &VTI (bb)->in);
7566 for (i = 0; VEC_iterate (micro_operation, VTI (bb)->mos, i, mo); i++)
7568 rtx insn = mo->insn;
7573 dataflow_set_clear_at_call (set);
7574 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
7579 rtx loc = mo->u.loc;
7582 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
7584 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
7586 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
7592 rtx loc = mo->u.loc;
7596 if (GET_CODE (loc) == CONCAT)
7598 val = XEXP (loc, 0);
7599 vloc = XEXP (loc, 1);
7607 var = PAT_VAR_LOCATION_DECL (vloc);
7609 clobber_variable_part (set, NULL_RTX,
7610 dv_from_decl (var), 0, NULL_RTX);
7613 if (VAL_NEEDS_RESOLUTION (loc))
7614 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
7615 set_variable_part (set, val, dv_from_decl (var), 0,
7616 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
7619 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
7620 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
7621 dv_from_decl (var), 0,
7622 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
7625 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
7631 rtx loc = mo->u.loc;
7632 rtx val, vloc, uloc;
7634 vloc = uloc = XEXP (loc, 1);
7635 val = XEXP (loc, 0);
7637 if (GET_CODE (val) == CONCAT)
7639 uloc = XEXP (val, 1);
7640 val = XEXP (val, 0);
7643 if (VAL_NEEDS_RESOLUTION (loc))
7644 val_resolve (set, val, vloc, insn);
7646 val_store (set, val, uloc, insn, false);
7648 if (VAL_HOLDS_TRACK_EXPR (loc))
7650 if (GET_CODE (uloc) == REG)
7651 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
7653 else if (GET_CODE (uloc) == MEM)
7654 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
7658 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
7664 rtx loc = mo->u.loc;
7665 rtx val, vloc, uloc, reverse = NULL_RTX;
7668 if (VAL_EXPR_HAS_REVERSE (loc))
7670 reverse = XEXP (loc, 1);
7671 vloc = XEXP (loc, 0);
7673 uloc = XEXP (vloc, 1);
7674 val = XEXP (vloc, 0);
7677 if (GET_CODE (val) == CONCAT)
7679 vloc = XEXP (val, 1);
7680 val = XEXP (val, 0);
7683 if (GET_CODE (vloc) == SET)
7685 rtx vsrc = SET_SRC (vloc);
7687 gcc_assert (val != vsrc);
7688 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
7690 vloc = SET_DEST (vloc);
7692 if (VAL_NEEDS_RESOLUTION (loc))
7693 val_resolve (set, val, vsrc, insn);
7695 else if (VAL_NEEDS_RESOLUTION (loc))
7697 gcc_assert (GET_CODE (uloc) == SET
7698 && GET_CODE (SET_SRC (uloc)) == REG);
7699 val_resolve (set, val, SET_SRC (uloc), insn);
7702 if (VAL_HOLDS_TRACK_EXPR (loc))
7704 if (VAL_EXPR_IS_CLOBBERED (loc))
7707 var_reg_delete (set, uloc, true);
7708 else if (MEM_P (uloc))
7709 var_mem_delete (set, uloc, true);
7713 bool copied_p = VAL_EXPR_IS_COPIED (loc);
7715 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
7717 if (GET_CODE (uloc) == SET)
7719 set_src = SET_SRC (uloc);
7720 uloc = SET_DEST (uloc);
7725 status = find_src_status (set, set_src);
7727 set_src = find_src_set_src (set, set_src);
7731 var_reg_delete_and_set (set, uloc, !copied_p,
7733 else if (MEM_P (uloc))
7734 var_mem_delete_and_set (set, uloc, !copied_p,
7738 else if (REG_P (uloc))
7739 var_regno_delete (set, REGNO (uloc));
7741 val_store (set, val, vloc, insn, true);
7744 val_store (set, XEXP (reverse, 0), XEXP (reverse, 1),
7747 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7754 rtx loc = mo->u.loc;
7757 if (GET_CODE (loc) == SET)
7759 set_src = SET_SRC (loc);
7760 loc = SET_DEST (loc);
7764 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
7767 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
7770 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7777 rtx loc = mo->u.loc;
7778 enum var_init_status src_status;
7781 if (GET_CODE (loc) == SET)
7783 set_src = SET_SRC (loc);
7784 loc = SET_DEST (loc);
7787 src_status = find_src_status (set, set_src);
7788 set_src = find_src_set_src (set, set_src);
7791 var_reg_delete_and_set (set, loc, false, src_status, set_src);
7793 var_mem_delete_and_set (set, loc, false, src_status, set_src);
7795 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7802 rtx loc = mo->u.loc;
7805 var_reg_delete (set, loc, false);
7807 var_mem_delete (set, loc, false);
7809 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
7815 rtx loc = mo->u.loc;
7818 var_reg_delete (set, loc, true);
7820 var_mem_delete (set, loc, true);
7822 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7828 set->stack_adjust += mo->u.adjust;
7834 /* Emit notes for the whole function. */
7837 vt_emit_notes (void)
7842 #ifdef ENABLE_RTL_CHECKING
7843 emitted_notes = pointer_map_create ();
7845 gcc_assert (!htab_elements (changed_variables));
7847 /* Free memory occupied by the out hash tables, as they aren't used
7850 dataflow_set_clear (&VTI (bb)->out);
7852 /* Enable emitting notes by functions (mainly by set_variable_part and
7853 delete_variable_part). */
7856 if (MAY_HAVE_DEBUG_INSNS)
7861 for (i = 0; VEC_iterate (rtx, preserved_values, i, val); i++)
7862 add_cselib_value_chains (dv_from_value (val));
7863 changed_variables_stack = VEC_alloc (variable, heap, 40);
7864 changed_values_stack = VEC_alloc (rtx, heap, 40);
7867 dataflow_set_init (&cur);
7871 /* Emit the notes for changes of variable locations between two
7872 subsequent basic blocks. */
7873 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
7875 /* Emit the notes for the changes in the basic block itself. */
7876 emit_notes_in_bb (bb, &cur);
7878 /* Free memory occupied by the in hash table, we won't need it
7880 dataflow_set_clear (&VTI (bb)->in);
7882 #ifdef ENABLE_CHECKING
7883 htab_traverse (shared_hash_htab (cur.vars),
7884 emit_notes_for_differences_1,
7885 shared_hash_htab (empty_shared_hash));
7886 if (MAY_HAVE_DEBUG_INSNS)
7891 for (i = 0; VEC_iterate (rtx, preserved_values, i, val); i++)
7892 remove_cselib_value_chains (dv_from_value (val));
7893 gcc_assert (htab_elements (value_chains) == 0);
7896 dataflow_set_destroy (&cur);
7898 if (MAY_HAVE_DEBUG_INSNS)
7900 VEC_free (variable, heap, changed_variables_stack);
7901 VEC_free (rtx, heap, changed_values_stack);
7904 #ifdef ENABLE_RTL_CHECKING
7905 pointer_map_destroy (emitted_notes);
7910 /* If there is a declaration and offset associated with register/memory RTL
7911 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
7914 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
7918 if (REG_ATTRS (rtl))
7920 *declp = REG_EXPR (rtl);
7921 *offsetp = REG_OFFSET (rtl);
7925 else if (MEM_P (rtl))
7927 if (MEM_ATTRS (rtl))
7929 *declp = MEM_EXPR (rtl);
7930 *offsetp = INT_MEM_OFFSET (rtl);
7937 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
7940 vt_add_function_parameters (void)
7944 for (parm = DECL_ARGUMENTS (current_function_decl);
7945 parm; parm = TREE_CHAIN (parm))
7947 rtx decl_rtl = DECL_RTL_IF_SET (parm);
7948 rtx incoming = DECL_INCOMING_RTL (parm);
7950 enum machine_mode mode;
7951 HOST_WIDE_INT offset;
7955 if (TREE_CODE (parm) != PARM_DECL)
7958 if (!DECL_NAME (parm))
7961 if (!decl_rtl || !incoming)
7964 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
7967 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
7969 if (REG_P (incoming) || MEM_P (incoming))
7971 /* This means argument is passed by invisible reference. */
7974 incoming = gen_rtx_MEM (GET_MODE (decl_rtl), incoming);
7978 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
7980 offset += byte_lowpart_offset (GET_MODE (incoming),
7981 GET_MODE (decl_rtl));
7990 /* Assume that DECL_RTL was a pseudo that got spilled to
7991 memory. The spill slot sharing code will force the
7992 memory to reference spill_slot_decl (%sfp), so we don't
7993 match above. That's ok, the pseudo must have referenced
7994 the entire parameter, so just reset OFFSET. */
7995 gcc_assert (decl == get_spill_slot_decl (false));
7999 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
8002 out = &VTI (ENTRY_BLOCK_PTR)->out;
8004 dv = dv_from_decl (parm);
8006 if (target_for_debug_bind (parm)
8007 /* We can't deal with these right now, because this kind of
8008 variable is single-part. ??? We could handle parallels
8009 that describe multiple locations for the same single
8010 value, but ATM we don't. */
8011 && GET_CODE (incoming) != PARALLEL)
8015 /* ??? We shouldn't ever hit this, but it may happen because
8016 arguments passed by invisible reference aren't dealt with
8017 above: incoming-rtl will have Pmode rather than the
8018 expected mode for the type. */
8022 val = cselib_lookup (var_lowpart (mode, incoming), mode, true);
8024 /* ??? Float-typed values in memory are not handled by
8028 preserve_value (val);
8029 set_variable_part (out, val->val_rtx, dv, offset,
8030 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
8031 dv = dv_from_value (val->val_rtx);
8035 if (REG_P (incoming))
8037 incoming = var_lowpart (mode, incoming);
8038 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
8039 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
8041 set_variable_part (out, incoming, dv, offset,
8042 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
8044 else if (MEM_P (incoming))
8046 incoming = var_lowpart (mode, incoming);
8047 set_variable_part (out, incoming, dv, offset,
8048 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
8052 if (MAY_HAVE_DEBUG_INSNS)
8054 cselib_preserve_only_values ();
8055 cselib_reset_table (cselib_get_next_uid ());
8060 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
8063 fp_setter (rtx insn)
8065 rtx pat = PATTERN (insn);
8066 if (RTX_FRAME_RELATED_P (insn))
8068 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
8070 pat = XEXP (expr, 0);
8072 if (GET_CODE (pat) == SET)
8073 return SET_DEST (pat) == hard_frame_pointer_rtx;
8074 else if (GET_CODE (pat) == PARALLEL)
8077 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
8078 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
8079 && SET_DEST (XVECEXP (pat, 0, i)) == hard_frame_pointer_rtx)
8085 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
8086 ensure it isn't flushed during cselib_reset_table.
8087 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
8088 has been eliminated. */
8091 vt_init_cfa_base (void)
8095 #ifdef FRAME_POINTER_CFA_OFFSET
8096 cfa_base_rtx = frame_pointer_rtx;
8098 cfa_base_rtx = arg_pointer_rtx;
8100 if (cfa_base_rtx == hard_frame_pointer_rtx
8101 || !fixed_regs[REGNO (cfa_base_rtx)])
8103 cfa_base_rtx = NULL_RTX;
8106 if (!MAY_HAVE_DEBUG_INSNS)
8109 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
8111 preserve_value (val);
8112 cselib_preserve_cfa_base_value (val);
8113 var_reg_decl_set (&VTI (ENTRY_BLOCK_PTR)->out, cfa_base_rtx,
8114 VAR_INIT_STATUS_INITIALIZED, dv_from_value (val->val_rtx),
8115 0, NULL_RTX, INSERT);
8118 /* Allocate and initialize the data structures for variable tracking
8119 and parse the RTL to get the micro operations. */
8122 vt_initialize (void)
8124 basic_block bb, prologue_bb = NULL;
8125 HOST_WIDE_INT fp_cfa_offset = -1;
8127 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
8129 attrs_pool = create_alloc_pool ("attrs_def pool",
8130 sizeof (struct attrs_def), 1024);
8131 var_pool = create_alloc_pool ("variable_def pool",
8132 sizeof (struct variable_def)
8133 + (MAX_VAR_PARTS - 1)
8134 * sizeof (((variable)NULL)->var_part[0]), 64);
8135 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
8136 sizeof (struct location_chain_def),
8138 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
8139 sizeof (struct shared_hash_def), 256);
8140 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
8141 empty_shared_hash->refcount = 1;
8142 empty_shared_hash->htab
8143 = htab_create (1, variable_htab_hash, variable_htab_eq,
8144 variable_htab_free);
8145 changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
8146 variable_htab_free);
8147 if (MAY_HAVE_DEBUG_INSNS)
8149 value_chain_pool = create_alloc_pool ("value_chain_def pool",
8150 sizeof (struct value_chain_def),
8152 value_chains = htab_create (32, value_chain_htab_hash,
8153 value_chain_htab_eq, NULL);
8156 /* Init the IN and OUT sets. */
8159 VTI (bb)->visited = false;
8160 VTI (bb)->flooded = false;
8161 dataflow_set_init (&VTI (bb)->in);
8162 dataflow_set_init (&VTI (bb)->out);
8163 VTI (bb)->permp = NULL;
8166 if (MAY_HAVE_DEBUG_INSNS)
8168 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
8169 scratch_regs = BITMAP_ALLOC (NULL);
8170 valvar_pool = create_alloc_pool ("small variable_def pool",
8171 sizeof (struct variable_def), 256);
8172 preserved_values = VEC_alloc (rtx, heap, 256);
8176 scratch_regs = NULL;
8180 if (!frame_pointer_needed)
8184 if (!vt_stack_adjustments ())
8187 #ifdef FRAME_POINTER_CFA_OFFSET
8188 reg = frame_pointer_rtx;
8190 reg = arg_pointer_rtx;
8192 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
8195 if (GET_CODE (elim) == PLUS)
8196 elim = XEXP (elim, 0);
8197 if (elim == stack_pointer_rtx)
8198 vt_init_cfa_base ();
8201 else if (!crtl->stack_realign_tried)
8205 #ifdef FRAME_POINTER_CFA_OFFSET
8206 reg = frame_pointer_rtx;
8207 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
8209 reg = arg_pointer_rtx;
8210 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
8212 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
8215 if (GET_CODE (elim) == PLUS)
8217 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
8218 elim = XEXP (elim, 0);
8220 if (elim != hard_frame_pointer_rtx)
8223 prologue_bb = single_succ (ENTRY_BLOCK_PTR);
8227 hard_frame_pointer_adjustment = -1;
8232 HOST_WIDE_INT pre, post = 0;
8233 basic_block first_bb, last_bb;
8235 if (MAY_HAVE_DEBUG_INSNS)
8237 cselib_record_sets_hook = add_with_sets;
8238 if (dump_file && (dump_flags & TDF_DETAILS))
8239 fprintf (dump_file, "first value: %i\n",
8240 cselib_get_next_uid ());
8247 if (bb->next_bb == EXIT_BLOCK_PTR
8248 || ! single_pred_p (bb->next_bb))
8250 e = find_edge (bb, bb->next_bb);
8251 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
8257 /* Add the micro-operations to the vector. */
8258 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
8260 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
8261 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
8262 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
8263 insn = NEXT_INSN (insn))
8267 if (!frame_pointer_needed)
8269 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
8273 mo.type = MO_ADJUST;
8276 if (dump_file && (dump_flags & TDF_DETAILS))
8277 log_op_type (PATTERN (insn), bb, insn,
8278 MO_ADJUST, dump_file);
8279 VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
8281 VTI (bb)->out.stack_adjust += pre;
8285 cselib_hook_called = false;
8286 adjust_insn (bb, insn);
8287 if (MAY_HAVE_DEBUG_INSNS)
8289 cselib_process_insn (insn);
8290 if (dump_file && (dump_flags & TDF_DETAILS))
8292 print_rtl_single (dump_file, insn);
8293 dump_cselib_table (dump_file);
8296 if (!cselib_hook_called)
8297 add_with_sets (insn, 0, 0);
8300 if (!frame_pointer_needed && post)
8303 mo.type = MO_ADJUST;
8306 if (dump_file && (dump_flags & TDF_DETAILS))
8307 log_op_type (PATTERN (insn), bb, insn,
8308 MO_ADJUST, dump_file);
8309 VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
8311 VTI (bb)->out.stack_adjust += post;
8314 if (bb == prologue_bb
8315 && hard_frame_pointer_adjustment == -1
8316 && RTX_FRAME_RELATED_P (insn)
8317 && fp_setter (insn))
8319 vt_init_cfa_base ();
8320 hard_frame_pointer_adjustment = fp_cfa_offset;
8324 gcc_assert (offset == VTI (bb)->out.stack_adjust);
8329 if (MAY_HAVE_DEBUG_INSNS)
8331 cselib_preserve_only_values ();
8332 cselib_reset_table (cselib_get_next_uid ());
8333 cselib_record_sets_hook = NULL;
8337 hard_frame_pointer_adjustment = -1;
8338 VTI (ENTRY_BLOCK_PTR)->flooded = true;
8339 vt_add_function_parameters ();
8340 cfa_base_rtx = NULL_RTX;
8344 /* Get rid of all debug insns from the insn stream. */
8347 delete_debug_insns (void)
8352 if (!MAY_HAVE_DEBUG_INSNS)
8357 FOR_BB_INSNS_SAFE (bb, insn, next)
8358 if (DEBUG_INSN_P (insn))
8363 /* Run a fast, BB-local only version of var tracking, to take care of
8364 information that we don't do global analysis on, such that not all
8365 information is lost. If SKIPPED holds, we're skipping the global
8366 pass entirely, so we should try to use information it would have
8367 handled as well.. */
8370 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
8372 /* ??? Just skip it all for now. */
8373 delete_debug_insns ();
8376 /* Free the data structures needed for variable tracking. */
8385 VEC_free (micro_operation, heap, VTI (bb)->mos);
8390 dataflow_set_destroy (&VTI (bb)->in);
8391 dataflow_set_destroy (&VTI (bb)->out);
8392 if (VTI (bb)->permp)
8394 dataflow_set_destroy (VTI (bb)->permp);
8395 XDELETE (VTI (bb)->permp);
8398 free_aux_for_blocks ();
8399 htab_delete (empty_shared_hash->htab);
8400 htab_delete (changed_variables);
8401 free_alloc_pool (attrs_pool);
8402 free_alloc_pool (var_pool);
8403 free_alloc_pool (loc_chain_pool);
8404 free_alloc_pool (shared_hash_pool);
8406 if (MAY_HAVE_DEBUG_INSNS)
8408 htab_delete (value_chains);
8409 free_alloc_pool (value_chain_pool);
8410 free_alloc_pool (valvar_pool);
8411 VEC_free (rtx, heap, preserved_values);
8413 BITMAP_FREE (scratch_regs);
8414 scratch_regs = NULL;
8418 XDELETEVEC (vui_vec);
8423 /* The entry point to variable tracking pass. */
8425 static inline unsigned int
8426 variable_tracking_main_1 (void)
8430 if (flag_var_tracking_assignments < 0)
8432 delete_debug_insns ();
8436 if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
8438 vt_debug_insns_local (true);
8442 mark_dfs_back_edges ();
8443 if (!vt_initialize ())
8446 vt_debug_insns_local (true);
8450 success = vt_find_locations ();
8452 if (!success && flag_var_tracking_assignments > 0)
8456 delete_debug_insns ();
8458 /* This is later restored by our caller. */
8459 flag_var_tracking_assignments = 0;
8461 success = vt_initialize ();
8462 gcc_assert (success);
8464 success = vt_find_locations ();
8470 vt_debug_insns_local (false);
8474 if (dump_file && (dump_flags & TDF_DETAILS))
8476 dump_dataflow_sets ();
8477 dump_flow_info (dump_file, dump_flags);
8483 vt_debug_insns_local (false);
8488 variable_tracking_main (void)
8491 int save = flag_var_tracking_assignments;
8493 ret = variable_tracking_main_1 ();
8495 flag_var_tracking_assignments = save;
8501 gate_handle_var_tracking (void)
8503 return (flag_var_tracking);
8508 struct rtl_opt_pass pass_variable_tracking =
8512 "vartrack", /* name */
8513 gate_handle_var_tracking, /* gate */
8514 variable_tracking_main, /* execute */
8517 0, /* static_pass_number */
8518 TV_VAR_TRACKING, /* tv_id */
8519 0, /* properties_required */
8520 0, /* properties_provided */
8521 0, /* properties_destroyed */
8522 0, /* todo_flags_start */
8523 TODO_dump_func | TODO_verify_rtl_sharing/* todo_flags_finish */