1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file contains the variable tracking pass. It computes where
22 variables are located (which registers or where in memory) at each position
23 in instruction stream and emits notes describing the locations.
24 Debug information (DWARF2 location lists) is finally generated from
26 With this debug information, it is possible to show variables
27 even when debugging optimized code.
29 How does the variable tracking pass work?
31 First, it scans RTL code for uses, stores and clobbers (register/memory
32 references in instructions), for call insns and for stack adjustments
33 separately for each basic block and saves them to an array of micro
35 The micro operations of one instruction are ordered so that
36 pre-modifying stack adjustment < use < use with no var < call insn <
37 < set < clobber < post-modifying stack adjustment
39 Then, a forward dataflow analysis is performed to find out how locations
40 of variables change through code and to propagate the variable locations
41 along control flow graph.
42 The IN set for basic block BB is computed as a union of OUT sets of BB's
43 predecessors, the OUT set for BB is copied from the IN set for BB and
44 is changed according to micro operations in BB.
46 The IN and OUT sets for basic blocks consist of a current stack adjustment
47 (used for adjusting offset of variables addressed using stack pointer),
48 the table of structures describing the locations of parts of a variable
49 and for each physical register a linked list for each physical register.
50 The linked list is a list of variable parts stored in the register,
51 i.e. it is a list of triplets (reg, decl, offset) where decl is
52 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
53 effective deleting appropriate variable parts when we set or clobber the
56 There may be more than one variable part in a register. The linked lists
57 should be pretty short so it is a good data structure here.
58 For example in the following code, register allocator may assign same
59 register to variables A and B, and both of them are stored in the same
72 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73 are emitted to appropriate positions in RTL code. Each such a note describes
74 the location of one variable at the point in instruction stream where the
75 note is. There is no need to emit a note for each variable before each
76 instruction, we only emit these notes where the location of variable changes
77 (this means that we also emit notes for changes between the OUT set of the
78 previous block and the IN set of the current block).
80 The notes consist of two parts:
81 1. the declaration (from REG_EXPR or MEM_EXPR)
82 2. the location of a variable - it is either a simple register/memory
83 reference (for simple variables, for example int),
84 or a parallel of register/memory references (for a large variables
85 which consist of several parts, for example long long).
91 #include "coretypes.h"
95 #include "hard-reg-set.h"
96 #include "basic-block.h"
99 #include "insn-config.h"
102 #include "alloc-pool.h"
108 #include "tree-pass.h"
109 #include "tree-flow.h"
114 #include "diagnostic.h"
115 #include "tree-pretty-print.h"
116 #include "pointer-set.h"
119 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
120 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
121 Currently the value is the same as IDENTIFIER_NODE, which has such
122 a property. If this compile time assertion ever fails, make sure that
123 the new tree code that equals (int) VALUE has the same property. */
124 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
126 /* Type of micro operation. */
127 enum micro_operation_type
129 MO_USE, /* Use location (REG or MEM). */
130 MO_USE_NO_VAR,/* Use location which is not associated with a variable
131 or the variable is not trackable. */
132 MO_VAL_USE, /* Use location which is associated with a value. */
133 MO_VAL_LOC, /* Use location which appears in a debug insn. */
134 MO_VAL_SET, /* Set location associated with a value. */
135 MO_SET, /* Set location. */
136 MO_COPY, /* Copy the same portion of a variable from one
137 location to another. */
138 MO_CLOBBER, /* Clobber location. */
139 MO_CALL, /* Call insn. */
140 MO_ADJUST /* Adjust stack pointer. */
144 static const char * const ATTRIBUTE_UNUSED
145 micro_operation_type_name[] = {
158 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
159 Notes emitted as AFTER_CALL are to take effect during the call,
160 rather than after the call. */
163 EMIT_NOTE_BEFORE_INSN,
164 EMIT_NOTE_AFTER_INSN,
165 EMIT_NOTE_AFTER_CALL_INSN
168 /* Structure holding information about micro operation. */
169 typedef struct micro_operation_def
171 /* Type of micro operation. */
172 enum micro_operation_type type;
174 /* The instruction which the micro operation is in, for MO_USE,
175 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
176 instruction or note in the original flow (before any var-tracking
177 notes are inserted, to simplify emission of notes), for MO_SET
182 /* Location. For MO_SET and MO_COPY, this is the SET that
183 performs the assignment, if known, otherwise it is the target
184 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
185 CONCAT of the VALUE and the LOC associated with it. For
186 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
187 associated with it. */
190 /* Stack adjustment. */
191 HOST_WIDE_INT adjust;
195 DEF_VEC_O(micro_operation);
196 DEF_VEC_ALLOC_O(micro_operation,heap);
198 /* A declaration of a variable, or an RTL value being handled like a
200 typedef void *decl_or_value;
202 /* Structure for passing some other parameters to function
203 emit_note_insn_var_location. */
204 typedef struct emit_note_data_def
206 /* The instruction which the note will be emitted before/after. */
209 /* Where the note will be emitted (before/after insn)? */
210 enum emit_note_where where;
212 /* The variables and values active at this point. */
216 /* Description of location of a part of a variable. The content of a physical
217 register is described by a chain of these structures.
218 The chains are pretty short (usually 1 or 2 elements) and thus
219 chain is the best data structure. */
220 typedef struct attrs_def
222 /* Pointer to next member of the list. */
223 struct attrs_def *next;
225 /* The rtx of register. */
228 /* The declaration corresponding to LOC. */
231 /* Offset from start of DECL. */
232 HOST_WIDE_INT offset;
235 /* Structure holding a refcounted hash table. If refcount > 1,
236 it must be first unshared before modified. */
237 typedef struct shared_hash_def
239 /* Reference count. */
242 /* Actual hash table. */
246 /* Structure holding the IN or OUT set for a basic block. */
247 typedef struct dataflow_set_def
249 /* Adjustment of stack offset. */
250 HOST_WIDE_INT stack_adjust;
252 /* Attributes for registers (lists of attrs). */
253 attrs regs[FIRST_PSEUDO_REGISTER];
255 /* Variable locations. */
258 /* Vars that is being traversed. */
259 shared_hash traversed_vars;
262 /* The structure (one for each basic block) containing the information
263 needed for variable tracking. */
264 typedef struct variable_tracking_info_def
266 /* The vector of micro operations. */
267 VEC(micro_operation, heap) *mos;
269 /* The IN and OUT set for dataflow analysis. */
273 /* The permanent-in dataflow set for this block. This is used to
274 hold values for which we had to compute entry values. ??? This
275 should probably be dynamically allocated, to avoid using more
276 memory in non-debug builds. */
279 /* Has the block been visited in DFS? */
282 /* Has the block been flooded in VTA? */
285 } *variable_tracking_info;
287 /* Structure for chaining the locations. */
288 typedef struct location_chain_def
290 /* Next element in the chain. */
291 struct location_chain_def *next;
293 /* The location (REG, MEM or VALUE). */
296 /* The "value" stored in this location. */
300 enum var_init_status init;
303 /* Structure describing one part of variable. */
304 typedef struct variable_part_def
306 /* Chain of locations of the part. */
307 location_chain loc_chain;
309 /* Location which was last emitted to location list. */
312 /* The offset in the variable. */
313 HOST_WIDE_INT offset;
316 /* Maximum number of location parts. */
317 #define MAX_VAR_PARTS 16
319 /* Structure describing where the variable is located. */
320 typedef struct variable_def
322 /* The declaration of the variable, or an RTL value being handled
323 like a declaration. */
326 /* Reference count. */
329 /* Number of variable parts. */
332 /* True if this variable changed (any of its) cur_loc fields
333 during the current emit_notes_for_changes resp.
334 emit_notes_for_differences call. */
335 bool cur_loc_changed;
337 /* True if this variable_def struct is currently in the
338 changed_variables hash table. */
339 bool in_changed_variables;
341 /* The variable parts. */
342 variable_part var_part[1];
344 typedef const struct variable_def *const_variable;
346 /* Structure for chaining backlinks from referenced VALUEs to
347 DVs that are referencing them. */
348 typedef struct value_chain_def
350 /* Next value_chain entry. */
351 struct value_chain_def *next;
353 /* The declaration of the variable, or an RTL value
354 being handled like a declaration, whose var_parts[0].loc_chain
355 references the VALUE owning this value_chain. */
358 /* Reference count. */
361 typedef const struct value_chain_def *const_value_chain;
363 /* Pointer to the BB's information specific to variable tracking pass. */
364 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
366 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
367 #define INT_MEM_OFFSET(mem) (MEM_OFFSET (mem) ? INTVAL (MEM_OFFSET (mem)) : 0)
369 /* Alloc pool for struct attrs_def. */
370 static alloc_pool attrs_pool;
372 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
373 static alloc_pool var_pool;
375 /* Alloc pool for struct variable_def with a single var_part entry. */
376 static alloc_pool valvar_pool;
378 /* Alloc pool for struct location_chain_def. */
379 static alloc_pool loc_chain_pool;
381 /* Alloc pool for struct shared_hash_def. */
382 static alloc_pool shared_hash_pool;
384 /* Alloc pool for struct value_chain_def. */
385 static alloc_pool value_chain_pool;
387 /* Changed variables, notes will be emitted for them. */
388 static htab_t changed_variables;
390 /* Links from VALUEs to DVs referencing them in their current loc_chains. */
391 static htab_t value_chains;
393 /* Shall notes be emitted? */
394 static bool emit_notes;
396 /* Empty shared hashtable. */
397 static shared_hash empty_shared_hash;
399 /* Scratch register bitmap used by cselib_expand_value_rtx. */
400 static bitmap scratch_regs = NULL;
402 /* Variable used to tell whether cselib_process_insn called our hook. */
403 static bool cselib_hook_called;
405 /* Local function prototypes. */
406 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
408 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
410 static bool vt_stack_adjustments (void);
411 static rtx compute_cfa_pointer (HOST_WIDE_INT);
412 static hashval_t variable_htab_hash (const void *);
413 static int variable_htab_eq (const void *, const void *);
414 static void variable_htab_free (void *);
416 static void init_attrs_list_set (attrs *);
417 static void attrs_list_clear (attrs *);
418 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
419 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
420 static void attrs_list_copy (attrs *, attrs);
421 static void attrs_list_union (attrs *, attrs);
423 static void **unshare_variable (dataflow_set *set, void **slot, variable var,
424 enum var_init_status);
425 static void vars_copy (htab_t, htab_t);
426 static tree var_debug_decl (tree);
427 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
428 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
429 enum var_init_status, rtx);
430 static void var_reg_delete (dataflow_set *, rtx, bool);
431 static void var_regno_delete (dataflow_set *, int);
432 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
433 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
434 enum var_init_status, rtx);
435 static void var_mem_delete (dataflow_set *, rtx, bool);
437 static void dataflow_set_init (dataflow_set *);
438 static void dataflow_set_clear (dataflow_set *);
439 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
440 static int variable_union_info_cmp_pos (const void *, const void *);
441 static void dataflow_set_union (dataflow_set *, dataflow_set *);
442 static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
443 static bool canon_value_cmp (rtx, rtx);
444 static int loc_cmp (rtx, rtx);
445 static bool variable_part_different_p (variable_part *, variable_part *);
446 static bool onepart_variable_different_p (variable, variable);
447 static bool variable_different_p (variable, variable);
448 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
449 static void dataflow_set_destroy (dataflow_set *);
451 static bool contains_symbol_ref (rtx);
452 static bool track_expr_p (tree, bool);
453 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
454 static int add_uses (rtx *, void *);
455 static void add_uses_1 (rtx *, void *);
456 static void add_stores (rtx, const_rtx, void *);
457 static bool compute_bb_dataflow (basic_block);
458 static bool vt_find_locations (void);
460 static void dump_attrs_list (attrs);
461 static int dump_var_slot (void **, void *);
462 static void dump_var (variable);
463 static void dump_vars (htab_t);
464 static void dump_dataflow_set (dataflow_set *);
465 static void dump_dataflow_sets (void);
467 static void variable_was_changed (variable, dataflow_set *);
468 static void **set_slot_part (dataflow_set *, rtx, void **,
469 decl_or_value, HOST_WIDE_INT,
470 enum var_init_status, rtx);
471 static void set_variable_part (dataflow_set *, rtx,
472 decl_or_value, HOST_WIDE_INT,
473 enum var_init_status, rtx, enum insert_option);
474 static void **clobber_slot_part (dataflow_set *, rtx,
475 void **, HOST_WIDE_INT, rtx);
476 static void clobber_variable_part (dataflow_set *, rtx,
477 decl_or_value, HOST_WIDE_INT, rtx);
478 static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
479 static void delete_variable_part (dataflow_set *, rtx,
480 decl_or_value, HOST_WIDE_INT);
481 static int emit_note_insn_var_location (void **, void *);
482 static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash);
483 static int emit_notes_for_differences_1 (void **, void *);
484 static int emit_notes_for_differences_2 (void **, void *);
485 static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
486 static void emit_notes_in_bb (basic_block, dataflow_set *);
487 static void vt_emit_notes (void);
489 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
490 static void vt_add_function_parameters (void);
491 static bool vt_initialize (void);
492 static void vt_finalize (void);
494 /* Given a SET, calculate the amount of stack adjustment it contains
495 PRE- and POST-modifying stack pointer.
496 This function is similar to stack_adjust_offset. */
499 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
502 rtx src = SET_SRC (pattern);
503 rtx dest = SET_DEST (pattern);
506 if (dest == stack_pointer_rtx)
508 /* (set (reg sp) (plus (reg sp) (const_int))) */
509 code = GET_CODE (src);
510 if (! (code == PLUS || code == MINUS)
511 || XEXP (src, 0) != stack_pointer_rtx
512 || !CONST_INT_P (XEXP (src, 1)))
516 *post += INTVAL (XEXP (src, 1));
518 *post -= INTVAL (XEXP (src, 1));
520 else if (MEM_P (dest))
522 /* (set (mem (pre_dec (reg sp))) (foo)) */
523 src = XEXP (dest, 0);
524 code = GET_CODE (src);
530 if (XEXP (src, 0) == stack_pointer_rtx)
532 rtx val = XEXP (XEXP (src, 1), 1);
533 /* We handle only adjustments by constant amount. */
534 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
537 if (code == PRE_MODIFY)
538 *pre -= INTVAL (val);
540 *post -= INTVAL (val);
546 if (XEXP (src, 0) == stack_pointer_rtx)
548 *pre += GET_MODE_SIZE (GET_MODE (dest));
554 if (XEXP (src, 0) == stack_pointer_rtx)
556 *post += GET_MODE_SIZE (GET_MODE (dest));
562 if (XEXP (src, 0) == stack_pointer_rtx)
564 *pre -= GET_MODE_SIZE (GET_MODE (dest));
570 if (XEXP (src, 0) == stack_pointer_rtx)
572 *post -= GET_MODE_SIZE (GET_MODE (dest));
583 /* Given an INSN, calculate the amount of stack adjustment it contains
584 PRE- and POST-modifying stack pointer. */
587 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
595 pattern = PATTERN (insn);
596 if (RTX_FRAME_RELATED_P (insn))
598 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
600 pattern = XEXP (expr, 0);
603 if (GET_CODE (pattern) == SET)
604 stack_adjust_offset_pre_post (pattern, pre, post);
605 else if (GET_CODE (pattern) == PARALLEL
606 || GET_CODE (pattern) == SEQUENCE)
610 /* There may be stack adjustments inside compound insns. Search
612 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
613 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
614 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
618 /* Compute stack adjustments for all blocks by traversing DFS tree.
619 Return true when the adjustments on all incoming edges are consistent.
620 Heavily borrowed from pre_and_rev_post_order_compute. */
623 vt_stack_adjustments (void)
625 edge_iterator *stack;
628 /* Initialize entry block. */
629 VTI (ENTRY_BLOCK_PTR)->visited = true;
630 VTI (ENTRY_BLOCK_PTR)->in.stack_adjust = INCOMING_FRAME_SP_OFFSET;
631 VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
633 /* Allocate stack for back-tracking up CFG. */
634 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
637 /* Push the first edge on to the stack. */
638 stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
646 /* Look at the edge on the top of the stack. */
648 src = ei_edge (ei)->src;
649 dest = ei_edge (ei)->dest;
651 /* Check if the edge destination has been visited yet. */
652 if (!VTI (dest)->visited)
655 HOST_WIDE_INT pre, post, offset;
656 VTI (dest)->visited = true;
657 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
659 if (dest != EXIT_BLOCK_PTR)
660 for (insn = BB_HEAD (dest);
661 insn != NEXT_INSN (BB_END (dest));
662 insn = NEXT_INSN (insn))
665 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
666 offset += pre + post;
669 VTI (dest)->out.stack_adjust = offset;
671 if (EDGE_COUNT (dest->succs) > 0)
672 /* Since the DEST node has been visited for the first
673 time, check its successors. */
674 stack[sp++] = ei_start (dest->succs);
678 /* Check whether the adjustments on the edges are the same. */
679 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
685 if (! ei_one_before_end_p (ei))
686 /* Go to the next edge. */
687 ei_next (&stack[sp - 1]);
689 /* Return to previous level if there are no more edges. */
698 /* Compute a CFA-based value for the stack pointer. */
701 compute_cfa_pointer (HOST_WIDE_INT adjustment)
705 #ifdef FRAME_POINTER_CFA_OFFSET
706 adjustment -= FRAME_POINTER_CFA_OFFSET (current_function_decl);
707 cfa = plus_constant (frame_pointer_rtx, adjustment);
709 adjustment -= ARG_POINTER_CFA_OFFSET (current_function_decl);
710 cfa = plus_constant (arg_pointer_rtx, adjustment);
716 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
717 or -1 if the replacement shouldn't be done. */
718 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
720 /* Data for adjust_mems callback. */
722 struct adjust_mem_data
725 enum machine_mode mem_mode;
726 HOST_WIDE_INT stack_adjust;
730 /* Helper for adjust_mems. Return 1 if *loc is unsuitable for
731 transformation of wider mode arithmetics to narrower mode,
732 -1 if it is suitable and subexpressions shouldn't be
733 traversed and 0 if it is suitable and subexpressions should
734 be traversed. Called through for_each_rtx. */
737 use_narrower_mode_test (rtx *loc, void *data)
739 rtx subreg = (rtx) data;
741 if (CONSTANT_P (*loc))
743 switch (GET_CODE (*loc))
746 if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0))
754 if (for_each_rtx (&XEXP (*loc, 0), use_narrower_mode_test, data))
763 /* Transform X into narrower mode MODE from wider mode WMODE. */
766 use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
770 return lowpart_subreg (mode, x, wmode);
771 switch (GET_CODE (x))
774 return lowpart_subreg (mode, x, wmode);
778 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
779 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
780 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
782 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
783 return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
789 /* Helper function for adjusting used MEMs. */
792 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
794 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
795 rtx mem, addr = loc, tem;
796 enum machine_mode mem_mode_save;
798 switch (GET_CODE (loc))
801 /* Don't do any sp or fp replacements outside of MEM addresses. */
802 if (amd->mem_mode == VOIDmode)
804 if (loc == stack_pointer_rtx
805 && !frame_pointer_needed)
806 return compute_cfa_pointer (amd->stack_adjust);
807 else if (loc == hard_frame_pointer_rtx
808 && frame_pointer_needed
809 && hard_frame_pointer_adjustment != -1)
810 return compute_cfa_pointer (hard_frame_pointer_adjustment);
816 mem = targetm.delegitimize_address (mem);
817 if (mem != loc && !MEM_P (mem))
818 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
821 addr = XEXP (mem, 0);
822 mem_mode_save = amd->mem_mode;
823 amd->mem_mode = GET_MODE (mem);
824 store_save = amd->store;
826 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
827 amd->store = store_save;
828 amd->mem_mode = mem_mode_save;
830 addr = targetm.delegitimize_address (addr);
831 if (addr != XEXP (mem, 0))
832 mem = replace_equiv_address_nv (mem, addr);
834 mem = avoid_constant_pool_reference (mem);
838 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
839 GEN_INT (GET_CODE (loc) == PRE_INC
840 ? GET_MODE_SIZE (amd->mem_mode)
841 : -GET_MODE_SIZE (amd->mem_mode)));
845 addr = XEXP (loc, 0);
846 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
847 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
848 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
849 GEN_INT ((GET_CODE (loc) == PRE_INC
850 || GET_CODE (loc) == POST_INC)
851 ? GET_MODE_SIZE (amd->mem_mode)
852 : -GET_MODE_SIZE (amd->mem_mode)));
853 amd->side_effects = alloc_EXPR_LIST (0,
854 gen_rtx_SET (VOIDmode,
860 addr = XEXP (loc, 1);
863 addr = XEXP (loc, 0);
864 gcc_assert (amd->mem_mode != VOIDmode);
865 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
866 amd->side_effects = alloc_EXPR_LIST (0,
867 gen_rtx_SET (VOIDmode,
873 /* First try without delegitimization of whole MEMs and
874 avoid_constant_pool_reference, which is more likely to succeed. */
875 store_save = amd->store;
877 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
879 amd->store = store_save;
880 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
881 if (mem == SUBREG_REG (loc))
886 tem = simplify_gen_subreg (GET_MODE (loc), mem,
887 GET_MODE (SUBREG_REG (loc)),
891 tem = simplify_gen_subreg (GET_MODE (loc), addr,
892 GET_MODE (SUBREG_REG (loc)),
895 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
897 if (MAY_HAVE_DEBUG_INSNS
898 && GET_CODE (tem) == SUBREG
899 && (GET_CODE (SUBREG_REG (tem)) == PLUS
900 || GET_CODE (SUBREG_REG (tem)) == MINUS
901 || GET_CODE (SUBREG_REG (tem)) == MULT
902 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
903 && GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
904 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
905 && GET_MODE_SIZE (GET_MODE (tem))
906 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem)))
907 && subreg_lowpart_p (tem)
908 && !for_each_rtx (&SUBREG_REG (tem), use_narrower_mode_test, tem))
909 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
910 GET_MODE (SUBREG_REG (tem)));
918 /* Helper function for replacement of uses. */
921 adjust_mem_uses (rtx *x, void *data)
923 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
925 validate_change (NULL_RTX, x, new_x, true);
928 /* Helper function for replacement of stores. */
931 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
935 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
937 if (new_dest != SET_DEST (expr))
939 rtx xexpr = CONST_CAST_RTX (expr);
940 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
945 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
946 replace them with their value in the insn and add the side-effects
947 as other sets to the insn. */
950 adjust_insn (basic_block bb, rtx insn)
952 struct adjust_mem_data amd;
954 amd.mem_mode = VOIDmode;
955 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
956 amd.side_effects = NULL_RTX;
959 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
962 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
964 /* For read-only MEMs containing some constant, prefer those
966 set = single_set (insn);
967 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
969 rtx note = find_reg_equal_equiv_note (insn);
971 if (note && CONSTANT_P (XEXP (note, 0)))
972 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
975 if (amd.side_effects)
977 rtx *pat, new_pat, s;
980 pat = &PATTERN (insn);
981 if (GET_CODE (*pat) == COND_EXEC)
982 pat = &COND_EXEC_CODE (*pat);
983 if (GET_CODE (*pat) == PARALLEL)
984 oldn = XVECLEN (*pat, 0);
987 for (s = amd.side_effects, newn = 0; s; newn++)
989 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
990 if (GET_CODE (*pat) == PARALLEL)
991 for (i = 0; i < oldn; i++)
992 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
994 XVECEXP (new_pat, 0, 0) = *pat;
995 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
996 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
997 free_EXPR_LIST_list (&amd.side_effects);
998 validate_change (NULL_RTX, pat, new_pat, true);
1002 /* Return true if a decl_or_value DV is a DECL or NULL. */
1004 dv_is_decl_p (decl_or_value dv)
1006 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
1009 /* Return true if a decl_or_value is a VALUE rtl. */
1011 dv_is_value_p (decl_or_value dv)
1013 return dv && !dv_is_decl_p (dv);
1016 /* Return the decl in the decl_or_value. */
1018 dv_as_decl (decl_or_value dv)
1020 #ifdef ENABLE_CHECKING
1021 gcc_assert (dv_is_decl_p (dv));
1026 /* Return the value in the decl_or_value. */
1028 dv_as_value (decl_or_value dv)
1030 #ifdef ENABLE_CHECKING
1031 gcc_assert (dv_is_value_p (dv));
1036 /* Return the opaque pointer in the decl_or_value. */
1037 static inline void *
1038 dv_as_opaque (decl_or_value dv)
1043 /* Return true if a decl_or_value must not have more than one variable
1046 dv_onepart_p (decl_or_value dv)
1050 if (!MAY_HAVE_DEBUG_INSNS)
1053 if (dv_is_value_p (dv))
1056 decl = dv_as_decl (dv);
1061 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1064 return (target_for_debug_bind (decl) != NULL_TREE);
1067 /* Return the variable pool to be used for dv, depending on whether it
1068 can have multiple parts or not. */
1069 static inline alloc_pool
1070 dv_pool (decl_or_value dv)
1072 return dv_onepart_p (dv) ? valvar_pool : var_pool;
1075 /* Build a decl_or_value out of a decl. */
1076 static inline decl_or_value
1077 dv_from_decl (tree decl)
1081 #ifdef ENABLE_CHECKING
1082 gcc_assert (dv_is_decl_p (dv));
1087 /* Build a decl_or_value out of a value. */
1088 static inline decl_or_value
1089 dv_from_value (rtx value)
1093 #ifdef ENABLE_CHECKING
1094 gcc_assert (dv_is_value_p (dv));
1099 extern void debug_dv (decl_or_value dv);
1102 debug_dv (decl_or_value dv)
1104 if (dv_is_value_p (dv))
1105 debug_rtx (dv_as_value (dv));
1107 debug_generic_stmt (dv_as_decl (dv));
1110 typedef unsigned int dvuid;
1112 /* Return the uid of DV. */
1115 dv_uid (decl_or_value dv)
1117 if (dv_is_value_p (dv))
1118 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
1120 return DECL_UID (dv_as_decl (dv));
1123 /* Compute the hash from the uid. */
1125 static inline hashval_t
1126 dv_uid2hash (dvuid uid)
1131 /* The hash function for a mask table in a shared_htab chain. */
1133 static inline hashval_t
1134 dv_htab_hash (decl_or_value dv)
1136 return dv_uid2hash (dv_uid (dv));
1139 /* The hash function for variable_htab, computes the hash value
1140 from the declaration of variable X. */
1143 variable_htab_hash (const void *x)
1145 const_variable const v = (const_variable) x;
1147 return dv_htab_hash (v->dv);
1150 /* Compare the declaration of variable X with declaration Y. */
1153 variable_htab_eq (const void *x, const void *y)
1155 const_variable const v = (const_variable) x;
1156 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
1158 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
1161 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1164 variable_htab_free (void *elem)
1167 variable var = (variable) elem;
1168 location_chain node, next;
1170 gcc_assert (var->refcount > 0);
1173 if (var->refcount > 0)
1176 for (i = 0; i < var->n_var_parts; i++)
1178 for (node = var->var_part[i].loc_chain; node; node = next)
1181 pool_free (loc_chain_pool, node);
1183 var->var_part[i].loc_chain = NULL;
1185 pool_free (dv_pool (var->dv), var);
1188 /* The hash function for value_chains htab, computes the hash value
1192 value_chain_htab_hash (const void *x)
1194 const_value_chain const v = (const_value_chain) x;
1196 return dv_htab_hash (v->dv);
1199 /* Compare the VALUE X with VALUE Y. */
1202 value_chain_htab_eq (const void *x, const void *y)
1204 const_value_chain const v = (const_value_chain) x;
1205 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
1207 return dv_as_opaque (v->dv) == dv_as_opaque (dv);
1210 /* Initialize the set (array) SET of attrs to empty lists. */
1213 init_attrs_list_set (attrs *set)
1217 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1221 /* Make the list *LISTP empty. */
1224 attrs_list_clear (attrs *listp)
1228 for (list = *listp; list; list = next)
1231 pool_free (attrs_pool, list);
1236 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1239 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1241 for (; list; list = list->next)
1242 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1247 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1250 attrs_list_insert (attrs *listp, decl_or_value dv,
1251 HOST_WIDE_INT offset, rtx loc)
1255 list = (attrs) pool_alloc (attrs_pool);
1258 list->offset = offset;
1259 list->next = *listp;
1263 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1266 attrs_list_copy (attrs *dstp, attrs src)
1270 attrs_list_clear (dstp);
1271 for (; src; src = src->next)
1273 n = (attrs) pool_alloc (attrs_pool);
1276 n->offset = src->offset;
1282 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1285 attrs_list_union (attrs *dstp, attrs src)
1287 for (; src; src = src->next)
1289 if (!attrs_list_member (*dstp, src->dv, src->offset))
1290 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1294 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1298 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1300 gcc_assert (!*dstp);
1301 for (; src; src = src->next)
1303 if (!dv_onepart_p (src->dv))
1304 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1306 for (src = src2; src; src = src->next)
1308 if (!dv_onepart_p (src->dv)
1309 && !attrs_list_member (*dstp, src->dv, src->offset))
1310 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1314 /* Shared hashtable support. */
1316 /* Return true if VARS is shared. */
1319 shared_hash_shared (shared_hash vars)
1321 return vars->refcount > 1;
1324 /* Return the hash table for VARS. */
1326 static inline htab_t
1327 shared_hash_htab (shared_hash vars)
1332 /* Return true if VAR is shared, or maybe because VARS is shared. */
1335 shared_var_p (variable var, shared_hash vars)
1337 /* Don't count an entry in the changed_variables table as a duplicate. */
1338 return ((var->refcount > 1 + (int) var->in_changed_variables)
1339 || shared_hash_shared (vars));
1342 /* Copy variables into a new hash table. */
1345 shared_hash_unshare (shared_hash vars)
1347 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1348 gcc_assert (vars->refcount > 1);
1349 new_vars->refcount = 1;
1351 = htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
1352 variable_htab_eq, variable_htab_free);
1353 vars_copy (new_vars->htab, vars->htab);
1358 /* Increment reference counter on VARS and return it. */
1360 static inline shared_hash
1361 shared_hash_copy (shared_hash vars)
1367 /* Decrement reference counter and destroy hash table if not shared
1371 shared_hash_destroy (shared_hash vars)
1373 gcc_assert (vars->refcount > 0);
1374 if (--vars->refcount == 0)
1376 htab_delete (vars->htab);
1377 pool_free (shared_hash_pool, vars);
1381 /* Unshare *PVARS if shared and return slot for DV. If INS is
1382 INSERT, insert it if not already present. */
1384 static inline void **
1385 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1386 hashval_t dvhash, enum insert_option ins)
1388 if (shared_hash_shared (*pvars))
1389 *pvars = shared_hash_unshare (*pvars);
1390 return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins);
1393 static inline void **
1394 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1395 enum insert_option ins)
1397 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1400 /* Return slot for DV, if it is already present in the hash table.
1401 If it is not present, insert it only VARS is not shared, otherwise
1404 static inline void **
1405 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1407 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1408 shared_hash_shared (vars)
1409 ? NO_INSERT : INSERT);
1412 static inline void **
1413 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1415 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1418 /* Return slot for DV only if it is already present in the hash table. */
1420 static inline void **
1421 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1424 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1428 static inline void **
1429 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1431 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1434 /* Return variable for DV or NULL if not already present in the hash
1437 static inline variable
1438 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1440 return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash);
1443 static inline variable
1444 shared_hash_find (shared_hash vars, decl_or_value dv)
1446 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1449 /* Return true if TVAL is better than CVAL as a canonival value. We
1450 choose lowest-numbered VALUEs, using the RTX address as a
1451 tie-breaker. The idea is to arrange them into a star topology,
1452 such that all of them are at most one step away from the canonical
1453 value, and the canonical value has backlinks to all of them, in
1454 addition to all the actual locations. We don't enforce this
1455 topology throughout the entire dataflow analysis, though.
1459 canon_value_cmp (rtx tval, rtx cval)
1462 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1465 static bool dst_can_be_shared;
1467 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1470 unshare_variable (dataflow_set *set, void **slot, variable var,
1471 enum var_init_status initialized)
1476 new_var = (variable) pool_alloc (dv_pool (var->dv));
1477 new_var->dv = var->dv;
1478 new_var->refcount = 1;
1480 new_var->n_var_parts = var->n_var_parts;
1481 new_var->cur_loc_changed = var->cur_loc_changed;
1482 var->cur_loc_changed = false;
1483 new_var->in_changed_variables = false;
1485 if (! flag_var_tracking_uninit)
1486 initialized = VAR_INIT_STATUS_INITIALIZED;
1488 for (i = 0; i < var->n_var_parts; i++)
1490 location_chain node;
1491 location_chain *nextp;
1493 new_var->var_part[i].offset = var->var_part[i].offset;
1494 nextp = &new_var->var_part[i].loc_chain;
1495 for (node = var->var_part[i].loc_chain; node; node = node->next)
1497 location_chain new_lc;
1499 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1500 new_lc->next = NULL;
1501 if (node->init > initialized)
1502 new_lc->init = node->init;
1504 new_lc->init = initialized;
1505 if (node->set_src && !(MEM_P (node->set_src)))
1506 new_lc->set_src = node->set_src;
1508 new_lc->set_src = NULL;
1509 new_lc->loc = node->loc;
1512 nextp = &new_lc->next;
1515 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1518 dst_can_be_shared = false;
1519 if (shared_hash_shared (set->vars))
1520 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1521 else if (set->traversed_vars && set->vars != set->traversed_vars)
1522 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1524 if (var->in_changed_variables)
1527 = htab_find_slot_with_hash (changed_variables, var->dv,
1528 dv_htab_hash (var->dv), NO_INSERT);
1529 gcc_assert (*cslot == (void *) var);
1530 var->in_changed_variables = false;
1531 variable_htab_free (var);
1533 new_var->in_changed_variables = true;
1538 /* Copy all variables from hash table SRC to hash table DST. */
1541 vars_copy (htab_t dst, htab_t src)
1546 FOR_EACH_HTAB_ELEMENT (src, var, variable, hi)
1550 dstp = htab_find_slot_with_hash (dst, var->dv,
1551 dv_htab_hash (var->dv),
1557 /* Map a decl to its main debug decl. */
1560 var_debug_decl (tree decl)
1562 if (decl && DECL_P (decl)
1563 && DECL_DEBUG_EXPR_IS_FROM (decl))
1565 tree debugdecl = DECL_DEBUG_EXPR (decl);
1566 if (debugdecl && DECL_P (debugdecl))
1573 /* Set the register LOC to contain DV, OFFSET. */
1576 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1577 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1578 enum insert_option iopt)
1581 bool decl_p = dv_is_decl_p (dv);
1584 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1586 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1587 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1588 && node->offset == offset)
1591 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1592 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1595 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1598 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1601 tree decl = REG_EXPR (loc);
1602 HOST_WIDE_INT offset = REG_OFFSET (loc);
1604 var_reg_decl_set (set, loc, initialized,
1605 dv_from_decl (decl), offset, set_src, INSERT);
1608 static enum var_init_status
1609 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1613 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1615 if (! flag_var_tracking_uninit)
1616 return VAR_INIT_STATUS_INITIALIZED;
1618 var = shared_hash_find (set->vars, dv);
1621 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1623 location_chain nextp;
1624 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1625 if (rtx_equal_p (nextp->loc, loc))
1627 ret_val = nextp->init;
1636 /* Delete current content of register LOC in dataflow set SET and set
1637 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1638 MODIFY is true, any other live copies of the same variable part are
1639 also deleted from the dataflow set, otherwise the variable part is
1640 assumed to be copied from another location holding the same
1644 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1645 enum var_init_status initialized, rtx set_src)
1647 tree decl = REG_EXPR (loc);
1648 HOST_WIDE_INT offset = REG_OFFSET (loc);
1652 decl = var_debug_decl (decl);
1654 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1655 initialized = get_init_value (set, loc, dv_from_decl (decl));
1657 nextp = &set->regs[REGNO (loc)];
1658 for (node = *nextp; node; node = next)
1661 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1663 delete_variable_part (set, node->loc, node->dv, node->offset);
1664 pool_free (attrs_pool, node);
1670 nextp = &node->next;
1674 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1675 var_reg_set (set, loc, initialized, set_src);
1678 /* Delete the association of register LOC in dataflow set SET with any
1679 variables that aren't onepart. If CLOBBER is true, also delete any
1680 other live copies of the same variable part, and delete the
1681 association with onepart dvs too. */
1684 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1686 attrs *nextp = &set->regs[REGNO (loc)];
1691 tree decl = REG_EXPR (loc);
1692 HOST_WIDE_INT offset = REG_OFFSET (loc);
1694 decl = var_debug_decl (decl);
1696 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1699 for (node = *nextp; node; node = next)
1702 if (clobber || !dv_onepart_p (node->dv))
1704 delete_variable_part (set, node->loc, node->dv, node->offset);
1705 pool_free (attrs_pool, node);
1709 nextp = &node->next;
1713 /* Delete content of register with number REGNO in dataflow set SET. */
1716 var_regno_delete (dataflow_set *set, int regno)
1718 attrs *reg = &set->regs[regno];
1721 for (node = *reg; node; node = next)
1724 delete_variable_part (set, node->loc, node->dv, node->offset);
1725 pool_free (attrs_pool, node);
1730 /* Set the location of DV, OFFSET as the MEM LOC. */
1733 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1734 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1735 enum insert_option iopt)
1737 if (dv_is_decl_p (dv))
1738 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1740 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1743 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
1745 Adjust the address first if it is stack pointer based. */
1748 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1751 tree decl = MEM_EXPR (loc);
1752 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1754 var_mem_decl_set (set, loc, initialized,
1755 dv_from_decl (decl), offset, set_src, INSERT);
1758 /* Delete and set the location part of variable MEM_EXPR (LOC) in
1759 dataflow set SET to LOC. If MODIFY is true, any other live copies
1760 of the same variable part are also deleted from the dataflow set,
1761 otherwise the variable part is assumed to be copied from another
1762 location holding the same part.
1763 Adjust the address first if it is stack pointer based. */
1766 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1767 enum var_init_status initialized, rtx set_src)
1769 tree decl = MEM_EXPR (loc);
1770 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1772 decl = var_debug_decl (decl);
1774 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1775 initialized = get_init_value (set, loc, dv_from_decl (decl));
1778 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
1779 var_mem_set (set, loc, initialized, set_src);
1782 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
1783 true, also delete any other live copies of the same variable part.
1784 Adjust the address first if it is stack pointer based. */
1787 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
1789 tree decl = MEM_EXPR (loc);
1790 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1792 decl = var_debug_decl (decl);
1794 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1795 delete_variable_part (set, loc, dv_from_decl (decl), offset);
1798 /* Bind a value to a location it was just stored in. If MODIFIED
1799 holds, assume the location was modified, detaching it from any
1800 values bound to it. */
1803 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
1805 cselib_val *v = CSELIB_VAL_PTR (val);
1807 gcc_assert (cselib_preserved_value_p (v));
1811 fprintf (dump_file, "%i: ", INSN_UID (insn));
1812 print_inline_rtx (dump_file, val, 0);
1813 fprintf (dump_file, " stored in ");
1814 print_inline_rtx (dump_file, loc, 0);
1817 struct elt_loc_list *l;
1818 for (l = v->locs; l; l = l->next)
1820 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
1821 print_inline_rtx (dump_file, l->loc, 0);
1824 fprintf (dump_file, "\n");
1830 var_regno_delete (set, REGNO (loc));
1831 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1832 dv_from_value (val), 0, NULL_RTX, INSERT);
1834 else if (MEM_P (loc))
1835 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1836 dv_from_value (val), 0, NULL_RTX, INSERT);
1838 set_variable_part (set, loc, dv_from_value (val), 0,
1839 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1842 /* Reset this node, detaching all its equivalences. Return the slot
1843 in the variable hash table that holds dv, if there is one. */
1846 val_reset (dataflow_set *set, decl_or_value dv)
1848 variable var = shared_hash_find (set->vars, dv) ;
1849 location_chain node;
1852 if (!var || !var->n_var_parts)
1855 gcc_assert (var->n_var_parts == 1);
1858 for (node = var->var_part[0].loc_chain; node; node = node->next)
1859 if (GET_CODE (node->loc) == VALUE
1860 && canon_value_cmp (node->loc, cval))
1863 for (node = var->var_part[0].loc_chain; node; node = node->next)
1864 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
1866 /* Redirect the equivalence link to the new canonical
1867 value, or simply remove it if it would point at
1870 set_variable_part (set, cval, dv_from_value (node->loc),
1871 0, node->init, node->set_src, NO_INSERT);
1872 delete_variable_part (set, dv_as_value (dv),
1873 dv_from_value (node->loc), 0);
1878 decl_or_value cdv = dv_from_value (cval);
1880 /* Keep the remaining values connected, accummulating links
1881 in the canonical value. */
1882 for (node = var->var_part[0].loc_chain; node; node = node->next)
1884 if (node->loc == cval)
1886 else if (GET_CODE (node->loc) == REG)
1887 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
1888 node->set_src, NO_INSERT);
1889 else if (GET_CODE (node->loc) == MEM)
1890 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
1891 node->set_src, NO_INSERT);
1893 set_variable_part (set, node->loc, cdv, 0,
1894 node->init, node->set_src, NO_INSERT);
1898 /* We remove this last, to make sure that the canonical value is not
1899 removed to the point of requiring reinsertion. */
1901 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
1903 clobber_variable_part (set, NULL, dv, 0, NULL);
1905 /* ??? Should we make sure there aren't other available values or
1906 variables whose values involve this one other than by
1907 equivalence? E.g., at the very least we should reset MEMs, those
1908 shouldn't be too hard to find cselib-looking up the value as an
1909 address, then locating the resulting value in our own hash
1913 /* Find the values in a given location and map the val to another
1914 value, if it is unique, or add the location as one holding the
1918 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
1920 decl_or_value dv = dv_from_value (val);
1922 if (dump_file && (dump_flags & TDF_DETAILS))
1925 fprintf (dump_file, "%i: ", INSN_UID (insn));
1927 fprintf (dump_file, "head: ");
1928 print_inline_rtx (dump_file, val, 0);
1929 fputs (" is at ", dump_file);
1930 print_inline_rtx (dump_file, loc, 0);
1931 fputc ('\n', dump_file);
1934 val_reset (set, dv);
1938 attrs node, found = NULL;
1940 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1941 if (dv_is_value_p (node->dv)
1942 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
1946 /* Map incoming equivalences. ??? Wouldn't it be nice if
1947 we just started sharing the location lists? Maybe a
1948 circular list ending at the value itself or some
1950 set_variable_part (set, dv_as_value (node->dv),
1951 dv_from_value (val), node->offset,
1952 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1953 set_variable_part (set, val, node->dv, node->offset,
1954 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1957 /* If we didn't find any equivalence, we need to remember that
1958 this value is held in the named register. */
1960 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1961 dv_from_value (val), 0, NULL_RTX, INSERT);
1963 else if (MEM_P (loc))
1964 /* ??? Merge equivalent MEMs. */
1965 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1966 dv_from_value (val), 0, NULL_RTX, INSERT);
1968 /* ??? Merge equivalent expressions. */
1969 set_variable_part (set, loc, dv_from_value (val), 0,
1970 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1973 /* Initialize dataflow set SET to be empty.
1974 VARS_SIZE is the initial size of hash table VARS. */
1977 dataflow_set_init (dataflow_set *set)
1979 init_attrs_list_set (set->regs);
1980 set->vars = shared_hash_copy (empty_shared_hash);
1981 set->stack_adjust = 0;
1982 set->traversed_vars = NULL;
1985 /* Delete the contents of dataflow set SET. */
1988 dataflow_set_clear (dataflow_set *set)
1992 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1993 attrs_list_clear (&set->regs[i]);
1995 shared_hash_destroy (set->vars);
1996 set->vars = shared_hash_copy (empty_shared_hash);
1999 /* Copy the contents of dataflow set SRC to DST. */
2002 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2006 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2007 attrs_list_copy (&dst->regs[i], src->regs[i]);
2009 shared_hash_destroy (dst->vars);
2010 dst->vars = shared_hash_copy (src->vars);
2011 dst->stack_adjust = src->stack_adjust;
2014 /* Information for merging lists of locations for a given offset of variable.
2016 struct variable_union_info
2018 /* Node of the location chain. */
2021 /* The sum of positions in the input chains. */
2024 /* The position in the chain of DST dataflow set. */
2028 /* Buffer for location list sorting and its allocated size. */
2029 static struct variable_union_info *vui_vec;
2030 static int vui_allocated;
2032 /* Compare function for qsort, order the structures by POS element. */
2035 variable_union_info_cmp_pos (const void *n1, const void *n2)
2037 const struct variable_union_info *const i1 =
2038 (const struct variable_union_info *) n1;
2039 const struct variable_union_info *const i2 =
2040 ( const struct variable_union_info *) n2;
2042 if (i1->pos != i2->pos)
2043 return i1->pos - i2->pos;
2045 return (i1->pos_dst - i2->pos_dst);
2048 /* Compute union of location parts of variable *SLOT and the same variable
2049 from hash table DATA. Compute "sorted" union of the location chains
2050 for common offsets, i.e. the locations of a variable part are sorted by
2051 a priority where the priority is the sum of the positions in the 2 chains
2052 (if a location is only in one list the position in the second list is
2053 defined to be larger than the length of the chains).
2054 When we are updating the location parts the newest location is in the
2055 beginning of the chain, so when we do the described "sorted" union
2056 we keep the newest locations in the beginning. */
2059 variable_union (variable src, dataflow_set *set)
2065 dstp = shared_hash_find_slot (set->vars, src->dv);
2066 if (!dstp || !*dstp)
2070 dst_can_be_shared = false;
2072 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2076 /* Continue traversing the hash table. */
2080 dst = (variable) *dstp;
2082 gcc_assert (src->n_var_parts);
2084 /* We can combine one-part variables very efficiently, because their
2085 entries are in canonical order. */
2086 if (dv_onepart_p (src->dv))
2088 location_chain *nodep, dnode, snode;
2090 gcc_assert (src->n_var_parts == 1
2091 && dst->n_var_parts == 1);
2093 snode = src->var_part[0].loc_chain;
2096 restart_onepart_unshared:
2097 nodep = &dst->var_part[0].loc_chain;
2103 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2107 location_chain nnode;
2109 if (shared_var_p (dst, set->vars))
2111 dstp = unshare_variable (set, dstp, dst,
2112 VAR_INIT_STATUS_INITIALIZED);
2113 dst = (variable)*dstp;
2114 goto restart_onepart_unshared;
2117 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
2118 nnode->loc = snode->loc;
2119 nnode->init = snode->init;
2120 if (!snode->set_src || MEM_P (snode->set_src))
2121 nnode->set_src = NULL;
2123 nnode->set_src = snode->set_src;
2124 nnode->next = dnode;
2127 #ifdef ENABLE_CHECKING
2129 gcc_assert (rtx_equal_p (dnode->loc, snode->loc));
2133 snode = snode->next;
2135 nodep = &dnode->next;
2142 /* Count the number of location parts, result is K. */
2143 for (i = 0, j = 0, k = 0;
2144 i < src->n_var_parts && j < dst->n_var_parts; k++)
2146 if (src->var_part[i].offset == dst->var_part[j].offset)
2151 else if (src->var_part[i].offset < dst->var_part[j].offset)
2156 k += src->n_var_parts - i;
2157 k += dst->n_var_parts - j;
2159 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2160 thus there are at most MAX_VAR_PARTS different offsets. */
2161 gcc_assert (dv_onepart_p (dst->dv) ? k == 1 : k <= MAX_VAR_PARTS);
2163 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2165 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2166 dst = (variable)*dstp;
2169 i = src->n_var_parts - 1;
2170 j = dst->n_var_parts - 1;
2171 dst->n_var_parts = k;
2173 for (k--; k >= 0; k--)
2175 location_chain node, node2;
2177 if (i >= 0 && j >= 0
2178 && src->var_part[i].offset == dst->var_part[j].offset)
2180 /* Compute the "sorted" union of the chains, i.e. the locations which
2181 are in both chains go first, they are sorted by the sum of
2182 positions in the chains. */
2185 struct variable_union_info *vui;
2187 /* If DST is shared compare the location chains.
2188 If they are different we will modify the chain in DST with
2189 high probability so make a copy of DST. */
2190 if (shared_var_p (dst, set->vars))
2192 for (node = src->var_part[i].loc_chain,
2193 node2 = dst->var_part[j].loc_chain; node && node2;
2194 node = node->next, node2 = node2->next)
2196 if (!((REG_P (node2->loc)
2197 && REG_P (node->loc)
2198 && REGNO (node2->loc) == REGNO (node->loc))
2199 || rtx_equal_p (node2->loc, node->loc)))
2201 if (node2->init < node->init)
2202 node2->init = node->init;
2208 dstp = unshare_variable (set, dstp, dst,
2209 VAR_INIT_STATUS_UNKNOWN);
2210 dst = (variable)*dstp;
2215 for (node = src->var_part[i].loc_chain; node; node = node->next)
2218 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2223 /* The most common case, much simpler, no qsort is needed. */
2224 location_chain dstnode = dst->var_part[j].loc_chain;
2225 dst->var_part[k].loc_chain = dstnode;
2226 dst->var_part[k].offset = dst->var_part[j].offset;
2228 for (node = src->var_part[i].loc_chain; node; node = node->next)
2229 if (!((REG_P (dstnode->loc)
2230 && REG_P (node->loc)
2231 && REGNO (dstnode->loc) == REGNO (node->loc))
2232 || rtx_equal_p (dstnode->loc, node->loc)))
2234 location_chain new_node;
2236 /* Copy the location from SRC. */
2237 new_node = (location_chain) pool_alloc (loc_chain_pool);
2238 new_node->loc = node->loc;
2239 new_node->init = node->init;
2240 if (!node->set_src || MEM_P (node->set_src))
2241 new_node->set_src = NULL;
2243 new_node->set_src = node->set_src;
2244 node2->next = new_node;
2251 if (src_l + dst_l > vui_allocated)
2253 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2254 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2259 /* Fill in the locations from DST. */
2260 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2261 node = node->next, jj++)
2264 vui[jj].pos_dst = jj;
2266 /* Pos plus value larger than a sum of 2 valid positions. */
2267 vui[jj].pos = jj + src_l + dst_l;
2270 /* Fill in the locations from SRC. */
2272 for (node = src->var_part[i].loc_chain, ii = 0; node;
2273 node = node->next, ii++)
2275 /* Find location from NODE. */
2276 for (jj = 0; jj < dst_l; jj++)
2278 if ((REG_P (vui[jj].lc->loc)
2279 && REG_P (node->loc)
2280 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2281 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2283 vui[jj].pos = jj + ii;
2287 if (jj >= dst_l) /* The location has not been found. */
2289 location_chain new_node;
2291 /* Copy the location from SRC. */
2292 new_node = (location_chain) pool_alloc (loc_chain_pool);
2293 new_node->loc = node->loc;
2294 new_node->init = node->init;
2295 if (!node->set_src || MEM_P (node->set_src))
2296 new_node->set_src = NULL;
2298 new_node->set_src = node->set_src;
2299 vui[n].lc = new_node;
2300 vui[n].pos_dst = src_l + dst_l;
2301 vui[n].pos = ii + src_l + dst_l;
2308 /* Special case still very common case. For dst_l == 2
2309 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2310 vui[i].pos == i + src_l + dst_l. */
2311 if (vui[0].pos > vui[1].pos)
2313 /* Order should be 1, 0, 2... */
2314 dst->var_part[k].loc_chain = vui[1].lc;
2315 vui[1].lc->next = vui[0].lc;
2318 vui[0].lc->next = vui[2].lc;
2319 vui[n - 1].lc->next = NULL;
2322 vui[0].lc->next = NULL;
2327 dst->var_part[k].loc_chain = vui[0].lc;
2328 if (n >= 3 && vui[2].pos < vui[1].pos)
2330 /* Order should be 0, 2, 1, 3... */
2331 vui[0].lc->next = vui[2].lc;
2332 vui[2].lc->next = vui[1].lc;
2335 vui[1].lc->next = vui[3].lc;
2336 vui[n - 1].lc->next = NULL;
2339 vui[1].lc->next = NULL;
2344 /* Order should be 0, 1, 2... */
2346 vui[n - 1].lc->next = NULL;
2349 for (; ii < n; ii++)
2350 vui[ii - 1].lc->next = vui[ii].lc;
2354 qsort (vui, n, sizeof (struct variable_union_info),
2355 variable_union_info_cmp_pos);
2357 /* Reconnect the nodes in sorted order. */
2358 for (ii = 1; ii < n; ii++)
2359 vui[ii - 1].lc->next = vui[ii].lc;
2360 vui[n - 1].lc->next = NULL;
2361 dst->var_part[k].loc_chain = vui[0].lc;
2364 dst->var_part[k].offset = dst->var_part[j].offset;
2369 else if ((i >= 0 && j >= 0
2370 && src->var_part[i].offset < dst->var_part[j].offset)
2373 dst->var_part[k] = dst->var_part[j];
2376 else if ((i >= 0 && j >= 0
2377 && src->var_part[i].offset > dst->var_part[j].offset)
2380 location_chain *nextp;
2382 /* Copy the chain from SRC. */
2383 nextp = &dst->var_part[k].loc_chain;
2384 for (node = src->var_part[i].loc_chain; node; node = node->next)
2386 location_chain new_lc;
2388 new_lc = (location_chain) pool_alloc (loc_chain_pool);
2389 new_lc->next = NULL;
2390 new_lc->init = node->init;
2391 if (!node->set_src || MEM_P (node->set_src))
2392 new_lc->set_src = NULL;
2394 new_lc->set_src = node->set_src;
2395 new_lc->loc = node->loc;
2398 nextp = &new_lc->next;
2401 dst->var_part[k].offset = src->var_part[i].offset;
2404 dst->var_part[k].cur_loc = NULL;
2407 if (flag_var_tracking_uninit)
2408 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
2410 location_chain node, node2;
2411 for (node = src->var_part[i].loc_chain; node; node = node->next)
2412 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
2413 if (rtx_equal_p (node->loc, node2->loc))
2415 if (node->init > node2->init)
2416 node2->init = node->init;
2420 /* Continue traversing the hash table. */
2424 /* Compute union of dataflow sets SRC and DST and store it to DST. */
2427 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
2431 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2432 attrs_list_union (&dst->regs[i], src->regs[i]);
2434 if (dst->vars == empty_shared_hash)
2436 shared_hash_destroy (dst->vars);
2437 dst->vars = shared_hash_copy (src->vars);
2444 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (src->vars), var, variable, hi)
2445 variable_union (var, dst);
2449 /* Whether the value is currently being expanded. */
2450 #define VALUE_RECURSED_INTO(x) \
2451 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
2452 /* Whether the value is in changed_variables hash table. */
2453 #define VALUE_CHANGED(x) \
2454 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
2455 /* Whether the decl is in changed_variables hash table. */
2456 #define DECL_CHANGED(x) TREE_VISITED (x)
2458 /* Record that DV has been added into resp. removed from changed_variables
2462 set_dv_changed (decl_or_value dv, bool newv)
2464 if (dv_is_value_p (dv))
2465 VALUE_CHANGED (dv_as_value (dv)) = newv;
2467 DECL_CHANGED (dv_as_decl (dv)) = newv;
2470 /* Return true if DV is present in changed_variables hash table. */
2473 dv_changed_p (decl_or_value dv)
2475 return (dv_is_value_p (dv)
2476 ? VALUE_CHANGED (dv_as_value (dv))
2477 : DECL_CHANGED (dv_as_decl (dv)));
2480 /* Return a location list node whose loc is rtx_equal to LOC, in the
2481 location list of a one-part variable or value VAR, or in that of
2482 any values recursively mentioned in the location lists. */
2484 static location_chain
2485 find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
2487 location_chain node;
2488 enum rtx_code loc_code;
2489 location_chain ret = NULL;
2490 int unmark_self = 0;
2491 #ifdef ENABLE_CHECKING
2492 static int mark_count;
2498 #ifdef ENABLE_CHECKING
2499 gcc_assert (dv_onepart_p (var->dv));
2502 if (!var->n_var_parts)
2505 #ifdef ENABLE_CHECKING
2506 gcc_assert (var->var_part[0].offset == 0);
2509 loc_code = GET_CODE (loc);
2510 for (node = var->var_part[0].loc_chain; node; node = node->next)
2512 if (GET_CODE (node->loc) != loc_code)
2514 if (GET_CODE (node->loc) != VALUE)
2517 else if (loc == node->loc)
2522 else if (loc_code != VALUE)
2524 if (rtx_equal_p (loc, node->loc))
2531 if (!VALUE_RECURSED_INTO (node->loc))
2533 decl_or_value dv = dv_from_value (node->loc);
2534 variable rvar = (variable)
2535 htab_find_with_hash (vars, dv, dv_htab_hash (dv));
2539 location_chain where;
2543 if (dv_is_value_p (var->dv)
2544 && !VALUE_RECURSED_INTO (dv_as_value (var->dv)))
2547 #ifdef ENABLE_CHECKING
2550 VALUE_RECURSED_INTO (dv_as_value (var->dv)) = true;
2556 #ifdef ENABLE_CHECKING
2558 /* The recursion count is bounded because we're
2559 searching in a star-canonicalized set, i.e., each
2560 equivalence set of values is arranged so that the
2561 canonical value has all locations and equivalent
2562 values, whereas equivalent values only point back to
2563 the canonical. So, if we start at the canonical
2564 value, we'll recurse at most into each sibling, so
2565 the recurse limit will be 2. If we start at a
2566 non-canonical value, we'll recurse into the
2567 canonical, and from there to other siblings, so
2568 recurse limit will be 3. If we start at a one-part
2569 variable, we add one level of recursion, but we don't
2571 gcc_assert (mark_count <= 3);
2573 VALUE_RECURSED_INTO (node->loc) = true;
2574 if ((where = find_loc_in_1pdv (loc, rvar, vars)))
2576 #ifdef ENABLE_CHECKING
2579 VALUE_RECURSED_INTO (node->loc) = false;
2583 VALUE_RECURSED_INTO (node->loc) = false;
2584 #ifdef ENABLE_CHECKING
2591 if (unmark_self > 0)
2593 VALUE_RECURSED_INTO (dv_as_value (var->dv)) = false;
2594 #ifdef ENABLE_CHECKING
2596 gcc_assert (mark_count == 0);
2603 /* Hash table iteration argument passed to variable_merge. */
2606 /* The set in which the merge is to be inserted. */
2608 /* The set that we're iterating in. */
2610 /* The set that may contain the other dv we are to merge with. */
2612 /* Number of onepart dvs in src. */
2613 int src_onepart_cnt;
2616 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
2617 loc_cmp order, and it is maintained as such. */
2620 insert_into_intersection (location_chain *nodep, rtx loc,
2621 enum var_init_status status)
2623 location_chain node;
2626 for (node = *nodep; node; nodep = &node->next, node = *nodep)
2627 if ((r = loc_cmp (node->loc, loc)) == 0)
2629 node->init = MIN (node->init, status);
2635 node = (location_chain) pool_alloc (loc_chain_pool);
2638 node->set_src = NULL;
2639 node->init = status;
2640 node->next = *nodep;
2644 /* Insert in DEST the intersection the locations present in both
2645 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
2646 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
2650 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
2651 location_chain s1node, variable s2var)
2653 dataflow_set *s1set = dsm->cur;
2654 dataflow_set *s2set = dsm->src;
2655 location_chain found;
2659 location_chain s2node;
2661 #ifdef ENABLE_CHECKING
2662 gcc_assert (dv_onepart_p (s2var->dv));
2665 if (s2var->n_var_parts)
2667 #ifdef ENABLE_CHECKING
2668 gcc_assert (s2var->var_part[0].offset == 0);
2670 s2node = s2var->var_part[0].loc_chain;
2672 for (; s1node && s2node;
2673 s1node = s1node->next, s2node = s2node->next)
2674 if (s1node->loc != s2node->loc)
2676 else if (s1node->loc == val)
2679 insert_into_intersection (dest, s1node->loc,
2680 MIN (s1node->init, s2node->init));
2684 for (; s1node; s1node = s1node->next)
2686 if (s1node->loc == val)
2689 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
2690 shared_hash_htab (s2set->vars))))
2692 insert_into_intersection (dest, s1node->loc,
2693 MIN (s1node->init, found->init));
2697 if (GET_CODE (s1node->loc) == VALUE
2698 && !VALUE_RECURSED_INTO (s1node->loc))
2700 decl_or_value dv = dv_from_value (s1node->loc);
2701 variable svar = shared_hash_find (s1set->vars, dv);
2704 if (svar->n_var_parts == 1)
2706 VALUE_RECURSED_INTO (s1node->loc) = true;
2707 intersect_loc_chains (val, dest, dsm,
2708 svar->var_part[0].loc_chain,
2710 VALUE_RECURSED_INTO (s1node->loc) = false;
2715 /* ??? if the location is equivalent to any location in src,
2716 searched recursively
2718 add to dst the values needed to represent the equivalence
2720 telling whether locations S is equivalent to another dv's
2723 for each location D in the list
2725 if S and D satisfy rtx_equal_p, then it is present
2727 else if D is a value, recurse without cycles
2729 else if S and D have the same CODE and MODE
2731 for each operand oS and the corresponding oD
2733 if oS and oD are not equivalent, then S an D are not equivalent
2735 else if they are RTX vectors
2737 if any vector oS element is not equivalent to its respective oD,
2738 then S and D are not equivalent
2746 /* Return -1 if X should be before Y in a location list for a 1-part
2747 variable, 1 if Y should be before X, and 0 if they're equivalent
2748 and should not appear in the list. */
2751 loc_cmp (rtx x, rtx y)
2754 RTX_CODE code = GET_CODE (x);
2764 gcc_assert (GET_MODE (x) == GET_MODE (y));
2765 if (REGNO (x) == REGNO (y))
2767 else if (REGNO (x) < REGNO (y))
2780 gcc_assert (GET_MODE (x) == GET_MODE (y));
2781 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
2787 if (GET_CODE (x) == VALUE)
2789 if (GET_CODE (y) != VALUE)
2791 /* Don't assert the modes are the same, that is true only
2792 when not recursing. (subreg:QI (value:SI 1:1) 0)
2793 and (subreg:QI (value:DI 2:2) 0) can be compared,
2794 even when the modes are different. */
2795 if (canon_value_cmp (x, y))
2801 if (GET_CODE (y) == VALUE)
2804 if (GET_CODE (x) == GET_CODE (y))
2805 /* Compare operands below. */;
2806 else if (GET_CODE (x) < GET_CODE (y))
2811 gcc_assert (GET_MODE (x) == GET_MODE (y));
2813 if (GET_CODE (x) == DEBUG_EXPR)
2815 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
2816 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
2818 #ifdef ENABLE_CHECKING
2819 gcc_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
2820 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
2825 fmt = GET_RTX_FORMAT (code);
2826 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2830 if (XWINT (x, i) == XWINT (y, i))
2832 else if (XWINT (x, i) < XWINT (y, i))
2839 if (XINT (x, i) == XINT (y, i))
2841 else if (XINT (x, i) < XINT (y, i))
2848 /* Compare the vector length first. */
2849 if (XVECLEN (x, i) == XVECLEN (y, i))
2850 /* Compare the vectors elements. */;
2851 else if (XVECLEN (x, i) < XVECLEN (y, i))
2856 for (j = 0; j < XVECLEN (x, i); j++)
2857 if ((r = loc_cmp (XVECEXP (x, i, j),
2858 XVECEXP (y, i, j))))
2863 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
2869 if (XSTR (x, i) == XSTR (y, i))
2875 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
2883 /* These are just backpointers, so they don't matter. */
2890 /* It is believed that rtx's at this level will never
2891 contain anything but integers and other rtx's,
2892 except for within LABEL_REFs and SYMBOL_REFs. */
2900 /* If decl or value DVP refers to VALUE from *LOC, add backlinks
2901 from VALUE to DVP. */
2904 add_value_chain (rtx *loc, void *dvp)
2906 decl_or_value dv, ldv;
2907 value_chain vc, nvc;
2910 if (GET_CODE (*loc) == VALUE)
2911 ldv = dv_from_value (*loc);
2912 else if (GET_CODE (*loc) == DEBUG_EXPR)
2913 ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc));
2917 if (dv_as_opaque (ldv) == dvp)
2920 dv = (decl_or_value) dvp;
2921 slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv),
2925 vc = (value_chain) pool_alloc (value_chain_pool);
2929 *slot = (void *) vc;
2933 for (vc = ((value_chain) *slot)->next; vc; vc = vc->next)
2934 if (dv_as_opaque (vc->dv) == dv_as_opaque (dv))
2942 vc = (value_chain) *slot;
2943 nvc = (value_chain) pool_alloc (value_chain_pool);
2945 nvc->next = vc->next;
2951 /* If decl or value DVP refers to VALUEs from within LOC, add backlinks
2952 from those VALUEs to DVP. */
2955 add_value_chains (decl_or_value dv, rtx loc)
2957 if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR)
2959 add_value_chain (&loc, dv_as_opaque (dv));
2965 loc = XEXP (loc, 0);
2966 for_each_rtx (&loc, add_value_chain, dv_as_opaque (dv));
2969 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those
2970 VALUEs to DV. Add the same time get rid of ASM_OPERANDS from locs list,
2971 that is something we never can express in .debug_info and can prevent
2972 reverse ops from being used. */
2975 add_cselib_value_chains (decl_or_value dv)
2977 struct elt_loc_list **l;
2979 for (l = &CSELIB_VAL_PTR (dv_as_value (dv))->locs; *l;)
2980 if (GET_CODE ((*l)->loc) == ASM_OPERANDS)
2984 for_each_rtx (&(*l)->loc, add_value_chain, dv_as_opaque (dv));
2989 /* If decl or value DVP refers to VALUE from *LOC, remove backlinks
2990 from VALUE to DVP. */
2993 remove_value_chain (rtx *loc, void *dvp)
2995 decl_or_value dv, ldv;
2999 if (GET_CODE (*loc) == VALUE)
3000 ldv = dv_from_value (*loc);
3001 else if (GET_CODE (*loc) == DEBUG_EXPR)
3002 ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc));
3006 if (dv_as_opaque (ldv) == dvp)
3009 dv = (decl_or_value) dvp;
3010 slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv),
3012 for (vc = (value_chain) *slot; vc->next; vc = vc->next)
3013 if (dv_as_opaque (vc->next->dv) == dv_as_opaque (dv))
3015 value_chain dvc = vc->next;
3016 gcc_assert (dvc->refcount > 0);
3017 if (--dvc->refcount == 0)
3019 vc->next = dvc->next;
3020 pool_free (value_chain_pool, dvc);
3021 if (vc->next == NULL && vc == (value_chain) *slot)
3023 pool_free (value_chain_pool, vc);
3024 htab_clear_slot (value_chains, slot);
3032 /* If decl or value DVP refers to VALUEs from within LOC, remove backlinks
3033 from those VALUEs to DVP. */
3036 remove_value_chains (decl_or_value dv, rtx loc)
3038 if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR)
3040 remove_value_chain (&loc, dv_as_opaque (dv));
3046 loc = XEXP (loc, 0);
3047 for_each_rtx (&loc, remove_value_chain, dv_as_opaque (dv));
3051 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, remove backlinks from those
3055 remove_cselib_value_chains (decl_or_value dv)
3057 struct elt_loc_list *l;
3059 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
3060 for_each_rtx (&l->loc, remove_value_chain, dv_as_opaque (dv));
3063 /* Check the order of entries in one-part variables. */
3066 canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
3068 variable var = (variable) *slot;
3069 decl_or_value dv = var->dv;
3070 location_chain node, next;
3072 #ifdef ENABLE_RTL_CHECKING
3074 for (i = 0; i < var->n_var_parts; i++)
3075 gcc_assert (var->var_part[0].cur_loc == NULL);
3076 gcc_assert (!var->cur_loc_changed && !var->in_changed_variables);
3079 if (!dv_onepart_p (dv))
3082 gcc_assert (var->n_var_parts == 1);
3083 node = var->var_part[0].loc_chain;
3086 while ((next = node->next))
3088 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3096 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3097 more likely to be chosen as canonical for an equivalence set.
3098 Ensure less likely values can reach more likely neighbors, making
3099 the connections bidirectional. */
3102 canonicalize_values_mark (void **slot, void *data)
3104 dataflow_set *set = (dataflow_set *)data;
3105 variable var = (variable) *slot;
3106 decl_or_value dv = var->dv;
3108 location_chain node;
3110 if (!dv_is_value_p (dv))
3113 gcc_assert (var->n_var_parts == 1);
3115 val = dv_as_value (dv);
3117 for (node = var->var_part[0].loc_chain; node; node = node->next)
3118 if (GET_CODE (node->loc) == VALUE)
3120 if (canon_value_cmp (node->loc, val))
3121 VALUE_RECURSED_INTO (val) = true;
3124 decl_or_value odv = dv_from_value (node->loc);
3125 void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3127 oslot = set_slot_part (set, val, oslot, odv, 0,
3128 node->init, NULL_RTX);
3130 VALUE_RECURSED_INTO (node->loc) = true;
3137 /* Remove redundant entries from equivalence lists in onepart
3138 variables, canonicalizing equivalence sets into star shapes. */
3141 canonicalize_values_star (void **slot, void *data)
3143 dataflow_set *set = (dataflow_set *)data;
3144 variable var = (variable) *slot;
3145 decl_or_value dv = var->dv;
3146 location_chain node;
3153 if (!dv_onepart_p (dv))
3156 gcc_assert (var->n_var_parts == 1);
3158 if (dv_is_value_p (dv))
3160 cval = dv_as_value (dv);
3161 if (!VALUE_RECURSED_INTO (cval))
3163 VALUE_RECURSED_INTO (cval) = false;
3173 gcc_assert (var->n_var_parts == 1);
3175 for (node = var->var_part[0].loc_chain; node; node = node->next)
3176 if (GET_CODE (node->loc) == VALUE)
3179 if (VALUE_RECURSED_INTO (node->loc))
3181 if (canon_value_cmp (node->loc, cval))
3190 if (!has_marks || dv_is_decl_p (dv))
3193 /* Keep it marked so that we revisit it, either after visiting a
3194 child node, or after visiting a new parent that might be
3196 VALUE_RECURSED_INTO (val) = true;
3198 for (node = var->var_part[0].loc_chain; node; node = node->next)
3199 if (GET_CODE (node->loc) == VALUE
3200 && VALUE_RECURSED_INTO (node->loc))
3204 VALUE_RECURSED_INTO (cval) = false;
3205 dv = dv_from_value (cval);
3206 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3209 gcc_assert (dv_is_decl_p (var->dv));
3210 /* The canonical value was reset and dropped.
3212 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3215 var = (variable)*slot;
3216 gcc_assert (dv_is_value_p (var->dv));
3217 if (var->n_var_parts == 0)
3219 gcc_assert (var->n_var_parts == 1);
3223 VALUE_RECURSED_INTO (val) = false;
3228 /* Push values to the canonical one. */
3229 cdv = dv_from_value (cval);
3230 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3232 for (node = var->var_part[0].loc_chain; node; node = node->next)
3233 if (node->loc != cval)
3235 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3236 node->init, NULL_RTX);
3237 if (GET_CODE (node->loc) == VALUE)
3239 decl_or_value ndv = dv_from_value (node->loc);
3241 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3244 if (canon_value_cmp (node->loc, val))
3246 /* If it could have been a local minimum, it's not any more,
3247 since it's now neighbor to cval, so it may have to push
3248 to it. Conversely, if it wouldn't have prevailed over
3249 val, then whatever mark it has is fine: if it was to
3250 push, it will now push to a more canonical node, but if
3251 it wasn't, then it has already pushed any values it might
3253 VALUE_RECURSED_INTO (node->loc) = true;
3254 /* Make sure we visit node->loc by ensuring we cval is
3256 VALUE_RECURSED_INTO (cval) = true;
3258 else if (!VALUE_RECURSED_INTO (node->loc))
3259 /* If we have no need to "recurse" into this node, it's
3260 already "canonicalized", so drop the link to the old
3262 clobber_variable_part (set, cval, ndv, 0, NULL);
3264 else if (GET_CODE (node->loc) == REG)
3266 attrs list = set->regs[REGNO (node->loc)], *listp;
3268 /* Change an existing attribute referring to dv so that it
3269 refers to cdv, removing any duplicate this might
3270 introduce, and checking that no previous duplicates
3271 existed, all in a single pass. */
3275 if (list->offset == 0
3276 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3277 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3284 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3287 for (listp = &list->next; (list = *listp); listp = &list->next)
3292 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3294 *listp = list->next;
3295 pool_free (attrs_pool, list);
3300 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3303 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3305 for (listp = &list->next; (list = *listp); listp = &list->next)
3310 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3312 *listp = list->next;
3313 pool_free (attrs_pool, list);
3318 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3327 if (list->offset == 0
3328 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3329 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3339 cslot = set_slot_part (set, val, cslot, cdv, 0,
3340 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3342 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3344 /* Variable may have been unshared. */
3345 var = (variable)*slot;
3346 gcc_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3347 && var->var_part[0].loc_chain->next == NULL);
3349 if (VALUE_RECURSED_INTO (cval))
3350 goto restart_with_cval;
3355 /* Bind one-part variables to the canonical value in an equivalence
3356 set. Not doing this causes dataflow convergence failure in rare
3357 circumstances, see PR42873. Unfortunately we can't do this
3358 efficiently as part of canonicalize_values_star, since we may not
3359 have determined or even seen the canonical value of a set when we
3360 get to a variable that references another member of the set. */
3363 canonicalize_vars_star (void **slot, void *data)
3365 dataflow_set *set = (dataflow_set *)data;
3366 variable var = (variable) *slot;
3367 decl_or_value dv = var->dv;
3368 location_chain node;
3373 location_chain cnode;
3375 if (!dv_onepart_p (dv) || dv_is_value_p (dv))
3378 gcc_assert (var->n_var_parts == 1);
3380 node = var->var_part[0].loc_chain;
3382 if (GET_CODE (node->loc) != VALUE)
3385 gcc_assert (!node->next);
3388 /* Push values to the canonical one. */
3389 cdv = dv_from_value (cval);
3390 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3393 cvar = (variable)*cslot;
3394 gcc_assert (cvar->n_var_parts == 1);
3396 cnode = cvar->var_part[0].loc_chain;
3398 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3399 that are not “more canonical” than it. */
3400 if (GET_CODE (cnode->loc) != VALUE
3401 || !canon_value_cmp (cnode->loc, cval))
3404 /* CVAL was found to be non-canonical. Change the variable to point
3405 to the canonical VALUE. */
3406 gcc_assert (!cnode->next);
3409 slot = set_slot_part (set, cval, slot, dv, 0,
3410 node->init, node->set_src);
3411 slot = clobber_slot_part (set, cval, slot, 0, node->set_src);
3416 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3417 corresponding entry in DSM->src. Multi-part variables are combined
3418 with variable_union, whereas onepart dvs are combined with
3422 variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
3424 dataflow_set *dst = dsm->dst;
3426 variable s2var, dvar = NULL;
3427 decl_or_value dv = s1var->dv;
3428 bool onepart = dv_onepart_p (dv);
3431 location_chain node, *nodep;
3433 /* If the incoming onepart variable has an empty location list, then
3434 the intersection will be just as empty. For other variables,
3435 it's always union. */
3436 gcc_assert (s1var->n_var_parts
3437 && s1var->var_part[0].loc_chain);
3440 return variable_union (s1var, dst);
3442 gcc_assert (s1var->n_var_parts == 1
3443 && s1var->var_part[0].offset == 0);
3445 dvhash = dv_htab_hash (dv);
3446 if (dv_is_value_p (dv))
3447 val = dv_as_value (dv);
3451 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3454 dst_can_be_shared = false;
3458 dsm->src_onepart_cnt--;
3459 gcc_assert (s2var->var_part[0].loc_chain
3460 && s2var->n_var_parts == 1
3461 && s2var->var_part[0].offset == 0);
3463 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3466 dvar = (variable)*dstslot;
3467 gcc_assert (dvar->refcount == 1
3468 && dvar->n_var_parts == 1
3469 && dvar->var_part[0].offset == 0);
3470 nodep = &dvar->var_part[0].loc_chain;
3478 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3480 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3482 *dstslot = dvar = s2var;
3487 dst_can_be_shared = false;
3489 intersect_loc_chains (val, nodep, dsm,
3490 s1var->var_part[0].loc_chain, s2var);
3496 dvar = (variable) pool_alloc (dv_pool (dv));
3499 dvar->n_var_parts = 1;
3500 dvar->cur_loc_changed = false;
3501 dvar->in_changed_variables = false;
3502 dvar->var_part[0].offset = 0;
3503 dvar->var_part[0].loc_chain = node;
3504 dvar->var_part[0].cur_loc = NULL;
3507 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3509 gcc_assert (!*dstslot);
3517 nodep = &dvar->var_part[0].loc_chain;
3518 while ((node = *nodep))
3520 location_chain *nextp = &node->next;
3522 if (GET_CODE (node->loc) == REG)
3526 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
3527 if (GET_MODE (node->loc) == GET_MODE (list->loc)
3528 && dv_is_value_p (list->dv))
3532 attrs_list_insert (&dst->regs[REGNO (node->loc)],
3534 /* If this value became canonical for another value that had
3535 this register, we want to leave it alone. */
3536 else if (dv_as_value (list->dv) != val)
3538 dstslot = set_slot_part (dst, dv_as_value (list->dv),
3540 node->init, NULL_RTX);
3541 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
3543 /* Since nextp points into the removed node, we can't
3544 use it. The pointer to the next node moved to nodep.
3545 However, if the variable we're walking is unshared
3546 during our walk, we'll keep walking the location list
3547 of the previously-shared variable, in which case the
3548 node won't have been removed, and we'll want to skip
3549 it. That's why we test *nodep here. */
3555 /* Canonicalization puts registers first, so we don't have to
3561 if (dvar != (variable)*dstslot)
3562 dvar = (variable)*dstslot;
3563 nodep = &dvar->var_part[0].loc_chain;
3567 /* Mark all referenced nodes for canonicalization, and make sure
3568 we have mutual equivalence links. */
3569 VALUE_RECURSED_INTO (val) = true;
3570 for (node = *nodep; node; node = node->next)
3571 if (GET_CODE (node->loc) == VALUE)
3573 VALUE_RECURSED_INTO (node->loc) = true;
3574 set_variable_part (dst, val, dv_from_value (node->loc), 0,
3575 node->init, NULL, INSERT);
3578 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3579 gcc_assert (*dstslot == dvar);
3580 canonicalize_values_star (dstslot, dst);
3581 #ifdef ENABLE_CHECKING
3583 == shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash));
3585 dvar = (variable)*dstslot;
3589 bool has_value = false, has_other = false;
3591 /* If we have one value and anything else, we're going to
3592 canonicalize this, so make sure all values have an entry in
3593 the table and are marked for canonicalization. */
3594 for (node = *nodep; node; node = node->next)
3596 if (GET_CODE (node->loc) == VALUE)
3598 /* If this was marked during register canonicalization,
3599 we know we have to canonicalize values. */
3614 if (has_value && has_other)
3616 for (node = *nodep; node; node = node->next)
3618 if (GET_CODE (node->loc) == VALUE)
3620 decl_or_value dv = dv_from_value (node->loc);
3623 if (shared_hash_shared (dst->vars))
3624 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
3626 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
3630 variable var = (variable) pool_alloc (dv_pool (dv));
3633 var->n_var_parts = 1;
3634 var->cur_loc_changed = false;
3635 var->in_changed_variables = false;
3636 var->var_part[0].offset = 0;
3637 var->var_part[0].loc_chain = NULL;
3638 var->var_part[0].cur_loc = NULL;
3642 VALUE_RECURSED_INTO (node->loc) = true;
3646 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3647 gcc_assert (*dstslot == dvar);
3648 canonicalize_values_star (dstslot, dst);
3649 #ifdef ENABLE_CHECKING
3651 == shared_hash_find_slot_noinsert_1 (dst->vars,
3654 dvar = (variable)*dstslot;
3658 if (!onepart_variable_different_p (dvar, s2var))
3660 variable_htab_free (dvar);
3661 *dstslot = dvar = s2var;
3664 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
3666 variable_htab_free (dvar);
3667 *dstslot = dvar = s1var;
3669 dst_can_be_shared = false;
3672 dst_can_be_shared = false;
3677 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
3678 multi-part variable. Unions of multi-part variables and
3679 intersections of one-part ones will be handled in
3680 variable_merge_over_cur(). */
3683 variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
3685 dataflow_set *dst = dsm->dst;
3686 decl_or_value dv = s2var->dv;
3687 bool onepart = dv_onepart_p (dv);
3691 void **dstp = shared_hash_find_slot (dst->vars, dv);
3697 dsm->src_onepart_cnt++;
3701 /* Combine dataflow set information from SRC2 into DST, using PDST
3702 to carry over information across passes. */
3705 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
3707 dataflow_set cur = *dst;
3708 dataflow_set *src1 = &cur;
3709 struct dfset_merge dsm;
3711 size_t src1_elems, src2_elems;
3715 src1_elems = htab_elements (shared_hash_htab (src1->vars));
3716 src2_elems = htab_elements (shared_hash_htab (src2->vars));
3717 dataflow_set_init (dst);
3718 dst->stack_adjust = cur.stack_adjust;
3719 shared_hash_destroy (dst->vars);
3720 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
3721 dst->vars->refcount = 1;
3723 = htab_create (MAX (src1_elems, src2_elems), variable_htab_hash,
3724 variable_htab_eq, variable_htab_free);
3726 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3727 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
3732 dsm.src_onepart_cnt = 0;
3734 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.src->vars), var, variable, hi)
3735 variable_merge_over_src (var, &dsm);
3736 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.cur->vars), var, variable, hi)
3737 variable_merge_over_cur (var, &dsm);
3739 if (dsm.src_onepart_cnt)
3740 dst_can_be_shared = false;
3742 dataflow_set_destroy (src1);
3745 /* Mark register equivalences. */
3748 dataflow_set_equiv_regs (dataflow_set *set)
3753 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3755 rtx canon[NUM_MACHINE_MODES];
3757 /* If the list is empty or one entry, no need to canonicalize
3759 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
3762 memset (canon, 0, sizeof (canon));
3764 for (list = set->regs[i]; list; list = list->next)
3765 if (list->offset == 0 && dv_is_value_p (list->dv))
3767 rtx val = dv_as_value (list->dv);
3768 rtx *cvalp = &canon[(int)GET_MODE (val)];
3771 if (canon_value_cmp (val, cval))
3775 for (list = set->regs[i]; list; list = list->next)
3776 if (list->offset == 0 && dv_onepart_p (list->dv))
3778 rtx cval = canon[(int)GET_MODE (list->loc)];
3783 if (dv_is_value_p (list->dv))
3785 rtx val = dv_as_value (list->dv);
3790 VALUE_RECURSED_INTO (val) = true;
3791 set_variable_part (set, val, dv_from_value (cval), 0,
3792 VAR_INIT_STATUS_INITIALIZED,
3796 VALUE_RECURSED_INTO (cval) = true;
3797 set_variable_part (set, cval, list->dv, 0,
3798 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
3801 for (listp = &set->regs[i]; (list = *listp);
3802 listp = list ? &list->next : listp)
3803 if (list->offset == 0 && dv_onepart_p (list->dv))
3805 rtx cval = canon[(int)GET_MODE (list->loc)];
3811 if (dv_is_value_p (list->dv))
3813 rtx val = dv_as_value (list->dv);
3814 if (!VALUE_RECURSED_INTO (val))
3818 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
3819 canonicalize_values_star (slot, set);
3826 /* Remove any redundant values in the location list of VAR, which must
3827 be unshared and 1-part. */
3830 remove_duplicate_values (variable var)
3832 location_chain node, *nodep;
3834 gcc_assert (dv_onepart_p (var->dv));
3835 gcc_assert (var->n_var_parts == 1);
3836 gcc_assert (var->refcount == 1);
3838 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
3840 if (GET_CODE (node->loc) == VALUE)
3842 if (VALUE_RECURSED_INTO (node->loc))
3844 /* Remove duplicate value node. */
3845 *nodep = node->next;
3846 pool_free (loc_chain_pool, node);
3850 VALUE_RECURSED_INTO (node->loc) = true;
3852 nodep = &node->next;
3855 for (node = var->var_part[0].loc_chain; node; node = node->next)
3856 if (GET_CODE (node->loc) == VALUE)
3858 gcc_assert (VALUE_RECURSED_INTO (node->loc));
3859 VALUE_RECURSED_INTO (node->loc) = false;
3864 /* Hash table iteration argument passed to variable_post_merge. */
3865 struct dfset_post_merge
3867 /* The new input set for the current block. */
3869 /* Pointer to the permanent input set for the current block, or
3871 dataflow_set **permp;
3874 /* Create values for incoming expressions associated with one-part
3875 variables that don't have value numbers for them. */
3878 variable_post_merge_new_vals (void **slot, void *info)
3880 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3881 dataflow_set *set = dfpm->set;
3882 variable var = (variable)*slot;
3883 location_chain node;
3885 if (!dv_onepart_p (var->dv) || !var->n_var_parts)
3888 gcc_assert (var->n_var_parts == 1);
3890 if (dv_is_decl_p (var->dv))
3892 bool check_dupes = false;
3895 for (node = var->var_part[0].loc_chain; node; node = node->next)
3897 if (GET_CODE (node->loc) == VALUE)
3898 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
3899 else if (GET_CODE (node->loc) == REG)
3901 attrs att, *attp, *curp = NULL;
3903 if (var->refcount != 1)
3905 slot = unshare_variable (set, slot, var,
3906 VAR_INIT_STATUS_INITIALIZED);
3907 var = (variable)*slot;
3911 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
3913 if (att->offset == 0
3914 && GET_MODE (att->loc) == GET_MODE (node->loc))
3916 if (dv_is_value_p (att->dv))
3918 rtx cval = dv_as_value (att->dv);
3923 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
3931 if ((*curp)->offset == 0
3932 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
3933 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
3936 curp = &(*curp)->next;
3947 *dfpm->permp = XNEW (dataflow_set);
3948 dataflow_set_init (*dfpm->permp);
3951 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
3952 att; att = att->next)
3953 if (GET_MODE (att->loc) == GET_MODE (node->loc))
3955 gcc_assert (att->offset == 0
3956 && dv_is_value_p (att->dv));
3957 val_reset (set, att->dv);
3964 cval = dv_as_value (cdv);
3968 /* Create a unique value to hold this register,
3969 that ought to be found and reused in
3970 subsequent rounds. */
3972 gcc_assert (!cselib_lookup (node->loc,
3973 GET_MODE (node->loc), 0));
3974 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1);
3975 cselib_preserve_value (v);
3976 cselib_invalidate_rtx (node->loc);
3978 cdv = dv_from_value (cval);
3981 "Created new value %u:%u for reg %i\n",
3982 v->uid, v->hash, REGNO (node->loc));
3985 var_reg_decl_set (*dfpm->permp, node->loc,
3986 VAR_INIT_STATUS_INITIALIZED,
3987 cdv, 0, NULL, INSERT);
3993 /* Remove attribute referring to the decl, which now
3994 uses the value for the register, already existing or
3995 to be added when we bring perm in. */
3998 pool_free (attrs_pool, att);
4003 remove_duplicate_values (var);
4009 /* Reset values in the permanent set that are not associated with the
4010 chosen expression. */
4013 variable_post_merge_perm_vals (void **pslot, void *info)
4015 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
4016 dataflow_set *set = dfpm->set;
4017 variable pvar = (variable)*pslot, var;
4018 location_chain pnode;
4022 gcc_assert (dv_is_value_p (pvar->dv)
4023 && pvar->n_var_parts == 1);
4024 pnode = pvar->var_part[0].loc_chain;
4027 && REG_P (pnode->loc));
4031 var = shared_hash_find (set->vars, dv);
4034 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4036 val_reset (set, dv);
4039 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4040 if (att->offset == 0
4041 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4042 && dv_is_value_p (att->dv))
4045 /* If there is a value associated with this register already, create
4047 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4049 rtx cval = dv_as_value (att->dv);
4050 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4051 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4056 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4058 variable_union (pvar, set);
4064 /* Just checking stuff and registering register attributes for
4068 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4070 struct dfset_post_merge dfpm;
4075 htab_traverse (shared_hash_htab (set->vars), variable_post_merge_new_vals,
4078 htab_traverse (shared_hash_htab ((*permp)->vars),
4079 variable_post_merge_perm_vals, &dfpm);
4080 htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
4081 htab_traverse (shared_hash_htab (set->vars), canonicalize_vars_star, set);
4084 /* Return a node whose loc is a MEM that refers to EXPR in the
4085 location list of a one-part variable or value VAR, or in that of
4086 any values recursively mentioned in the location lists. */
4088 static location_chain
4089 find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars)
4091 location_chain node;
4094 location_chain where = NULL;
4099 gcc_assert (GET_CODE (val) == VALUE
4100 && !VALUE_RECURSED_INTO (val));
4102 dv = dv_from_value (val);
4103 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
4108 gcc_assert (dv_onepart_p (var->dv));
4110 if (!var->n_var_parts)
4113 gcc_assert (var->var_part[0].offset == 0);
4115 VALUE_RECURSED_INTO (val) = true;
4117 for (node = var->var_part[0].loc_chain; node; node = node->next)
4118 if (MEM_P (node->loc) && MEM_EXPR (node->loc) == expr
4119 && MEM_OFFSET (node->loc) == 0)
4124 else if (GET_CODE (node->loc) == VALUE
4125 && !VALUE_RECURSED_INTO (node->loc)
4126 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4129 VALUE_RECURSED_INTO (val) = false;
4134 /* Return TRUE if the value of MEM may vary across a call. */
4137 mem_dies_at_call (rtx mem)
4139 tree expr = MEM_EXPR (mem);
4145 decl = get_base_address (expr);
4153 return (may_be_aliased (decl)
4154 || (!TREE_READONLY (decl) && is_global_var (decl)));
4157 /* Remove all MEMs from the location list of a hash table entry for a
4158 one-part variable, except those whose MEM attributes map back to
4159 the variable itself, directly or within a VALUE. */
4162 dataflow_set_preserve_mem_locs (void **slot, void *data)
4164 dataflow_set *set = (dataflow_set *) data;
4165 variable var = (variable) *slot;
4167 if (dv_is_decl_p (var->dv) && dv_onepart_p (var->dv))
4169 tree decl = dv_as_decl (var->dv);
4170 location_chain loc, *locp;
4171 bool changed = false;
4173 if (!var->n_var_parts)
4176 gcc_assert (var->n_var_parts == 1);
4178 if (shared_var_p (var, set->vars))
4180 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4182 /* We want to remove dying MEMs that doesn't refer to
4184 if (GET_CODE (loc->loc) == MEM
4185 && (MEM_EXPR (loc->loc) != decl
4186 || MEM_OFFSET (loc->loc))
4187 && !mem_dies_at_call (loc->loc))
4189 /* We want to move here MEMs that do refer to DECL. */
4190 else if (GET_CODE (loc->loc) == VALUE
4191 && find_mem_expr_in_1pdv (decl, loc->loc,
4192 shared_hash_htab (set->vars)))
4199 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4200 var = (variable)*slot;
4201 gcc_assert (var->n_var_parts == 1);
4204 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4207 rtx old_loc = loc->loc;
4208 if (GET_CODE (old_loc) == VALUE)
4210 location_chain mem_node
4211 = find_mem_expr_in_1pdv (decl, loc->loc,
4212 shared_hash_htab (set->vars));
4214 /* ??? This picks up only one out of multiple MEMs that
4215 refer to the same variable. Do we ever need to be
4216 concerned about dealing with more than one, or, given
4217 that they should all map to the same variable
4218 location, their addresses will have been merged and
4219 they will be regarded as equivalent? */
4222 loc->loc = mem_node->loc;
4223 loc->set_src = mem_node->set_src;
4224 loc->init = MIN (loc->init, mem_node->init);
4228 if (GET_CODE (loc->loc) != MEM
4229 || (MEM_EXPR (loc->loc) == decl
4230 && MEM_OFFSET (loc->loc) == 0)
4231 || !mem_dies_at_call (loc->loc))
4233 if (old_loc != loc->loc && emit_notes)
4235 if (old_loc == var->var_part[0].cur_loc)
4238 var->var_part[0].cur_loc = NULL;
4239 var->cur_loc_changed = true;
4241 add_value_chains (var->dv, loc->loc);
4242 remove_value_chains (var->dv, old_loc);
4250 remove_value_chains (var->dv, old_loc);
4251 if (old_loc == var->var_part[0].cur_loc)
4254 var->var_part[0].cur_loc = NULL;
4255 var->cur_loc_changed = true;
4259 pool_free (loc_chain_pool, loc);
4262 if (!var->var_part[0].loc_chain)
4268 variable_was_changed (var, set);
4274 /* Remove all MEMs from the location list of a hash table entry for a
4278 dataflow_set_remove_mem_locs (void **slot, void *data)
4280 dataflow_set *set = (dataflow_set *) data;
4281 variable var = (variable) *slot;
4283 if (dv_is_value_p (var->dv))
4285 location_chain loc, *locp;
4286 bool changed = false;
4288 gcc_assert (var->n_var_parts == 1);
4290 if (shared_var_p (var, set->vars))
4292 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4293 if (GET_CODE (loc->loc) == MEM
4294 && mem_dies_at_call (loc->loc))
4300 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4301 var = (variable)*slot;
4302 gcc_assert (var->n_var_parts == 1);
4305 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4308 if (GET_CODE (loc->loc) != MEM
4309 || !mem_dies_at_call (loc->loc))
4316 remove_value_chains (var->dv, loc->loc);
4318 /* If we have deleted the location which was last emitted
4319 we have to emit new location so add the variable to set
4320 of changed variables. */
4321 if (var->var_part[0].cur_loc == loc->loc)
4324 var->var_part[0].cur_loc = NULL;
4325 var->cur_loc_changed = true;
4327 pool_free (loc_chain_pool, loc);
4330 if (!var->var_part[0].loc_chain)
4336 variable_was_changed (var, set);
4342 /* Remove all variable-location information about call-clobbered
4343 registers, as well as associations between MEMs and VALUEs. */
4346 dataflow_set_clear_at_call (dataflow_set *set)
4350 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
4351 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, r))
4352 var_regno_delete (set, r);
4354 if (MAY_HAVE_DEBUG_INSNS)
4356 set->traversed_vars = set->vars;
4357 htab_traverse (shared_hash_htab (set->vars),
4358 dataflow_set_preserve_mem_locs, set);
4359 set->traversed_vars = set->vars;
4360 htab_traverse (shared_hash_htab (set->vars), dataflow_set_remove_mem_locs,
4362 set->traversed_vars = NULL;
4367 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4369 location_chain lc1, lc2;
4371 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4373 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4375 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4377 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4380 if (rtx_equal_p (lc1->loc, lc2->loc))
4389 /* Return true if one-part variables VAR1 and VAR2 are different.
4390 They must be in canonical order. */
4393 onepart_variable_different_p (variable var1, variable var2)
4395 location_chain lc1, lc2;
4400 gcc_assert (var1->n_var_parts == 1
4401 && var2->n_var_parts == 1);
4403 lc1 = var1->var_part[0].loc_chain;
4404 lc2 = var2->var_part[0].loc_chain;
4406 gcc_assert (lc1 && lc2);
4410 if (loc_cmp (lc1->loc, lc2->loc))
4419 /* Return true if variables VAR1 and VAR2 are different. */
4422 variable_different_p (variable var1, variable var2)
4429 if (var1->n_var_parts != var2->n_var_parts)
4432 for (i = 0; i < var1->n_var_parts; i++)
4434 if (var1->var_part[i].offset != var2->var_part[i].offset)
4436 /* One-part values have locations in a canonical order. */
4437 if (i == 0 && var1->var_part[i].offset == 0 && dv_onepart_p (var1->dv))
4439 gcc_assert (var1->n_var_parts == 1
4440 && dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
4441 return onepart_variable_different_p (var1, var2);
4443 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4445 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4451 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4454 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4459 if (old_set->vars == new_set->vars)
4462 if (htab_elements (shared_hash_htab (old_set->vars))
4463 != htab_elements (shared_hash_htab (new_set->vars)))
4466 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (old_set->vars), var1, variable, hi)
4468 htab_t htab = shared_hash_htab (new_set->vars);
4469 variable var2 = (variable) htab_find_with_hash (htab, var1->dv,
4470 dv_htab_hash (var1->dv));
4473 if (dump_file && (dump_flags & TDF_DETAILS))
4475 fprintf (dump_file, "dataflow difference found: removal of:\n");
4481 if (variable_different_p (var1, var2))
4483 if (dump_file && (dump_flags & TDF_DETAILS))
4485 fprintf (dump_file, "dataflow difference found: "
4486 "old and new follow:\n");
4494 /* No need to traverse the second hashtab, if both have the same number
4495 of elements and the second one had all entries found in the first one,
4496 then it can't have any extra entries. */
4500 /* Free the contents of dataflow set SET. */
4503 dataflow_set_destroy (dataflow_set *set)
4507 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4508 attrs_list_clear (&set->regs[i]);
4510 shared_hash_destroy (set->vars);
4514 /* Return true if RTL X contains a SYMBOL_REF. */
4517 contains_symbol_ref (rtx x)
4526 code = GET_CODE (x);
4527 if (code == SYMBOL_REF)
4530 fmt = GET_RTX_FORMAT (code);
4531 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4535 if (contains_symbol_ref (XEXP (x, i)))
4538 else if (fmt[i] == 'E')
4541 for (j = 0; j < XVECLEN (x, i); j++)
4542 if (contains_symbol_ref (XVECEXP (x, i, j)))
4550 /* Shall EXPR be tracked? */
4553 track_expr_p (tree expr, bool need_rtl)
4558 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
4559 return DECL_RTL_SET_P (expr);
4561 /* If EXPR is not a parameter or a variable do not track it. */
4562 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
4565 /* It also must have a name... */
4566 if (!DECL_NAME (expr) && need_rtl)
4569 /* ... and a RTL assigned to it. */
4570 decl_rtl = DECL_RTL_IF_SET (expr);
4571 if (!decl_rtl && need_rtl)
4574 /* If this expression is really a debug alias of some other declaration, we
4575 don't need to track this expression if the ultimate declaration is
4578 if (DECL_DEBUG_EXPR_IS_FROM (realdecl))
4580 realdecl = DECL_DEBUG_EXPR (realdecl);
4581 if (realdecl == NULL_TREE)
4583 else if (!DECL_P (realdecl))
4585 if (handled_component_p (realdecl))
4587 HOST_WIDE_INT bitsize, bitpos, maxsize;
4589 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
4591 if (!DECL_P (innerdecl)
4592 || DECL_IGNORED_P (innerdecl)
4593 || TREE_STATIC (innerdecl)
4595 || bitpos + bitsize > 256
4596 || bitsize != maxsize)
4606 /* Do not track EXPR if REALDECL it should be ignored for debugging
4608 if (DECL_IGNORED_P (realdecl))
4611 /* Do not track global variables until we are able to emit correct location
4613 if (TREE_STATIC (realdecl))
4616 /* When the EXPR is a DECL for alias of some variable (see example)
4617 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
4618 DECL_RTL contains SYMBOL_REF.
4621 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
4624 if (decl_rtl && MEM_P (decl_rtl)
4625 && contains_symbol_ref (XEXP (decl_rtl, 0)))
4628 /* If RTX is a memory it should not be very large (because it would be
4629 an array or struct). */
4630 if (decl_rtl && MEM_P (decl_rtl))
4632 /* Do not track structures and arrays. */
4633 if (GET_MODE (decl_rtl) == BLKmode
4634 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
4636 if (MEM_SIZE (decl_rtl)
4637 && INTVAL (MEM_SIZE (decl_rtl)) > MAX_VAR_PARTS)
4641 DECL_CHANGED (expr) = 0;
4642 DECL_CHANGED (realdecl) = 0;
4646 /* Determine whether a given LOC refers to the same variable part as
4650 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
4653 HOST_WIDE_INT offset2;
4655 if (! DECL_P (expr))
4660 expr2 = REG_EXPR (loc);
4661 offset2 = REG_OFFSET (loc);
4663 else if (MEM_P (loc))
4665 expr2 = MEM_EXPR (loc);
4666 offset2 = INT_MEM_OFFSET (loc);
4671 if (! expr2 || ! DECL_P (expr2))
4674 expr = var_debug_decl (expr);
4675 expr2 = var_debug_decl (expr2);
4677 return (expr == expr2 && offset == offset2);
4680 /* LOC is a REG or MEM that we would like to track if possible.
4681 If EXPR is null, we don't know what expression LOC refers to,
4682 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
4683 LOC is an lvalue register.
4685 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
4686 is something we can track. When returning true, store the mode of
4687 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
4688 from EXPR in *OFFSET_OUT (if nonnull). */
4691 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
4692 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
4694 enum machine_mode mode;
4696 if (expr == NULL || !track_expr_p (expr, true))
4699 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
4700 whole subreg, but only the old inner part is really relevant. */
4701 mode = GET_MODE (loc);
4702 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
4704 enum machine_mode pseudo_mode;
4706 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
4707 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
4709 offset += byte_lowpart_offset (pseudo_mode, mode);
4714 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
4715 Do the same if we are storing to a register and EXPR occupies
4716 the whole of register LOC; in that case, the whole of EXPR is
4717 being changed. We exclude complex modes from the second case
4718 because the real and imaginary parts are represented as separate
4719 pseudo registers, even if the whole complex value fits into one
4721 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
4723 && !COMPLEX_MODE_P (DECL_MODE (expr))
4724 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
4725 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
4727 mode = DECL_MODE (expr);
4731 if (offset < 0 || offset >= MAX_VAR_PARTS)
4737 *offset_out = offset;
4741 /* Return the MODE lowpart of LOC, or null if LOC is not something we
4742 want to track. When returning nonnull, make sure that the attributes
4743 on the returned value are updated. */
4746 var_lowpart (enum machine_mode mode, rtx loc)
4748 unsigned int offset, reg_offset, regno;
4750 if (!REG_P (loc) && !MEM_P (loc))
4753 if (GET_MODE (loc) == mode)
4756 offset = byte_lowpart_offset (mode, GET_MODE (loc));
4759 return adjust_address_nv (loc, mode, offset);
4761 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
4762 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
4764 return gen_rtx_REG_offset (loc, mode, regno, offset);
4767 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
4768 hard_frame_pointer_rtx is being mapped to it. */
4769 static rtx cfa_base_rtx;
4771 /* Carry information about uses and stores while walking rtx. */
4773 struct count_use_info
4775 /* The insn where the RTX is. */
4778 /* The basic block where insn is. */
4781 /* The array of n_sets sets in the insn, as determined by cselib. */
4782 struct cselib_set *sets;
4785 /* True if we're counting stores, false otherwise. */
4789 /* Find a VALUE corresponding to X. */
4791 static inline cselib_val *
4792 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
4798 /* This is called after uses are set up and before stores are
4799 processed bycselib, so it's safe to look up srcs, but not
4800 dsts. So we look up expressions that appear in srcs or in
4801 dest expressions, but we search the sets array for dests of
4805 for (i = 0; i < cui->n_sets; i++)
4806 if (cui->sets[i].dest == x)
4807 return cui->sets[i].src_elt;
4810 return cselib_lookup (x, mode, 0);
4816 /* Helper function to get mode of MEM's address. */
4818 static inline enum machine_mode
4819 get_address_mode (rtx mem)
4821 enum machine_mode mode = GET_MODE (XEXP (mem, 0));
4822 if (mode != VOIDmode)
4824 return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
4827 /* Replace all registers and addresses in an expression with VALUE
4828 expressions that map back to them, unless the expression is a
4829 register. If no mapping is or can be performed, returns NULL. */
4832 replace_expr_with_values (rtx loc)
4836 else if (MEM_P (loc))
4838 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
4839 get_address_mode (loc), 0);
4841 return replace_equiv_address_nv (loc, addr->val_rtx);
4846 return cselib_subst_to_values (loc);
4849 /* Determine what kind of micro operation to choose for a USE. Return
4850 MO_CLOBBER if no micro operation is to be generated. */
4852 static enum micro_operation_type
4853 use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
4857 if (cui && cui->sets)
4859 if (GET_CODE (loc) == VAR_LOCATION)
4861 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
4863 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
4864 if (! VAR_LOC_UNKNOWN_P (ploc))
4866 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1);
4868 /* ??? flag_float_store and volatile mems are never
4869 given values, but we could in theory use them for
4871 gcc_assert (val || 1);
4879 if (REG_P (loc) || MEM_P (loc))
4882 *modep = GET_MODE (loc);
4886 || (find_use_val (loc, GET_MODE (loc), cui)
4887 && cselib_lookup (XEXP (loc, 0),
4888 get_address_mode (loc), 0)))
4893 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
4895 if (val && !cselib_preserved_value_p (val))
4903 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
4905 if (loc == cfa_base_rtx)
4907 expr = REG_EXPR (loc);
4910 return MO_USE_NO_VAR;
4911 else if (target_for_debug_bind (var_debug_decl (expr)))
4913 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
4914 false, modep, NULL))
4917 return MO_USE_NO_VAR;
4919 else if (MEM_P (loc))
4921 expr = MEM_EXPR (loc);
4925 else if (target_for_debug_bind (var_debug_decl (expr)))
4927 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
4928 false, modep, NULL))
4937 /* Log to OUT information about micro-operation MOPT involving X in
4941 log_op_type (rtx x, basic_block bb, rtx insn,
4942 enum micro_operation_type mopt, FILE *out)
4944 fprintf (out, "bb %i op %i insn %i %s ",
4945 bb->index, VEC_length (micro_operation, VTI (bb)->mos),
4946 INSN_UID (insn), micro_operation_type_name[mopt]);
4947 print_inline_rtx (out, x, 2);
4951 /* Tell whether the CONCAT used to holds a VALUE and its location
4952 needs value resolution, i.e., an attempt of mapping the location
4953 back to other incoming values. */
4954 #define VAL_NEEDS_RESOLUTION(x) \
4955 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
4956 /* Whether the location in the CONCAT is a tracked expression, that
4957 should also be handled like a MO_USE. */
4958 #define VAL_HOLDS_TRACK_EXPR(x) \
4959 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
4960 /* Whether the location in the CONCAT should be handled like a MO_COPY
4962 #define VAL_EXPR_IS_COPIED(x) \
4963 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
4964 /* Whether the location in the CONCAT should be handled like a
4965 MO_CLOBBER as well. */
4966 #define VAL_EXPR_IS_CLOBBERED(x) \
4967 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
4968 /* Whether the location is a CONCAT of the MO_VAL_SET expression and
4969 a reverse operation that should be handled afterwards. */
4970 #define VAL_EXPR_HAS_REVERSE(x) \
4971 (RTL_FLAG_CHECK1 ("VAL_EXPR_HAS_REVERSE", (x), CONCAT)->return_val)
4973 /* All preserved VALUEs. */
4974 static VEC (rtx, heap) *preserved_values;
4976 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
4979 preserve_value (cselib_val *val)
4981 cselib_preserve_value (val);
4982 VEC_safe_push (rtx, heap, preserved_values, val->val_rtx);
4985 /* Helper function for MO_VAL_LOC handling. Return non-zero if
4986 any rtxes not suitable for CONST use not replaced by VALUEs
4990 non_suitable_const (rtx *x, void *data ATTRIBUTE_UNUSED)
4995 switch (GET_CODE (*x))
5006 return !MEM_READONLY_P (*x);
5012 /* Add uses (register and memory references) LOC which will be tracked
5013 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
5016 add_uses (rtx *ploc, void *data)
5019 enum machine_mode mode = VOIDmode;
5020 struct count_use_info *cui = (struct count_use_info *)data;
5021 enum micro_operation_type type = use_type (loc, cui, &mode);
5023 if (type != MO_CLOBBER)
5025 basic_block bb = cui->bb;
5029 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5030 mo.insn = cui->insn;
5032 if (type == MO_VAL_LOC)
5035 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5038 gcc_assert (cui->sets);
5041 && !REG_P (XEXP (vloc, 0))
5042 && !MEM_P (XEXP (vloc, 0))
5043 && (GET_CODE (XEXP (vloc, 0)) != PLUS
5044 || XEXP (XEXP (vloc, 0), 0) != cfa_base_rtx
5045 || !CONST_INT_P (XEXP (XEXP (vloc, 0), 1))))
5048 enum machine_mode address_mode = get_address_mode (mloc);
5050 = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
5052 if (val && !cselib_preserved_value_p (val))
5054 micro_operation moa;
5055 preserve_value (val);
5056 mloc = cselib_subst_to_values (XEXP (mloc, 0));
5057 moa.type = MO_VAL_USE;
5058 moa.insn = cui->insn;
5059 moa.u.loc = gen_rtx_CONCAT (address_mode,
5060 val->val_rtx, mloc);
5061 if (dump_file && (dump_flags & TDF_DETAILS))
5062 log_op_type (moa.u.loc, cui->bb, cui->insn,
5063 moa.type, dump_file);
5064 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
5068 if (CONSTANT_P (vloc)
5069 && (GET_CODE (vloc) != CONST
5070 || for_each_rtx (&vloc, non_suitable_const, NULL)))
5071 /* For constants don't look up any value. */;
5072 else if (!VAR_LOC_UNKNOWN_P (vloc)
5073 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5075 enum machine_mode mode2;
5076 enum micro_operation_type type2;
5077 rtx nloc = replace_expr_with_values (vloc);
5081 oloc = shallow_copy_rtx (oloc);
5082 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5085 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5087 type2 = use_type (vloc, 0, &mode2);
5089 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5090 || type2 == MO_CLOBBER);
5092 if (type2 == MO_CLOBBER
5093 && !cselib_preserved_value_p (val))
5095 VAL_NEEDS_RESOLUTION (oloc) = 1;
5096 preserve_value (val);
5099 else if (!VAR_LOC_UNKNOWN_P (vloc))
5101 oloc = shallow_copy_rtx (oloc);
5102 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5107 else if (type == MO_VAL_USE)
5109 enum machine_mode mode2 = VOIDmode;
5110 enum micro_operation_type type2;
5111 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5112 rtx vloc, oloc = loc, nloc;
5114 gcc_assert (cui->sets);
5117 && !REG_P (XEXP (oloc, 0))
5118 && !MEM_P (XEXP (oloc, 0))
5119 && (GET_CODE (XEXP (oloc, 0)) != PLUS
5120 || XEXP (XEXP (oloc, 0), 0) != cfa_base_rtx
5121 || !CONST_INT_P (XEXP (XEXP (oloc, 0), 1))))
5124 enum machine_mode address_mode = get_address_mode (mloc);
5126 = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
5128 if (val && !cselib_preserved_value_p (val))
5130 micro_operation moa;
5131 preserve_value (val);
5132 mloc = cselib_subst_to_values (XEXP (mloc, 0));
5133 moa.type = MO_VAL_USE;
5134 moa.insn = cui->insn;
5135 moa.u.loc = gen_rtx_CONCAT (address_mode,
5136 val->val_rtx, mloc);
5137 if (dump_file && (dump_flags & TDF_DETAILS))
5138 log_op_type (moa.u.loc, cui->bb, cui->insn,
5139 moa.type, dump_file);
5140 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
5144 type2 = use_type (loc, 0, &mode2);
5146 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5147 || type2 == MO_CLOBBER);
5149 if (type2 == MO_USE)
5150 vloc = var_lowpart (mode2, loc);
5154 /* The loc of a MO_VAL_USE may have two forms:
5156 (concat val src): val is at src, a value-based
5159 (concat (concat val use) src): same as above, with use as
5160 the MO_USE tracked value, if it differs from src.
5164 nloc = replace_expr_with_values (loc);
5169 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5171 oloc = val->val_rtx;
5173 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5175 if (type2 == MO_USE)
5176 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5177 if (!cselib_preserved_value_p (val))
5179 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5180 preserve_value (val);
5184 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5186 if (dump_file && (dump_flags & TDF_DETAILS))
5187 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5188 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5194 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5197 add_uses_1 (rtx *x, void *cui)
5199 for_each_rtx (x, add_uses, cui);
5202 /* Attempt to reverse the EXPR operation in the debug info. Say for
5203 reg1 = reg2 + 6 even when reg2 is no longer live we
5204 can express its value as VAL - 6. */
5207 reverse_op (rtx val, const_rtx expr)
5213 if (GET_CODE (expr) != SET)
5216 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5219 src = SET_SRC (expr);
5220 switch (GET_CODE (src))
5234 if (!REG_P (XEXP (src, 0)) || !SCALAR_INT_MODE_P (GET_MODE (src)))
5237 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0);
5238 if (!v || !cselib_preserved_value_p (v))
5241 switch (GET_CODE (src))
5245 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5247 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5251 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5263 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5265 arg = XEXP (src, 1);
5266 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5268 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5269 if (arg == NULL_RTX)
5271 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5274 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5276 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5277 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5278 breaks a lot of routines during var-tracking. */
5279 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5285 return gen_rtx_CONCAT (GET_MODE (v->val_rtx), v->val_rtx, ret);
5288 /* Add stores (register and memory references) LOC which will be tracked
5289 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5290 CUIP->insn is instruction which the LOC is part of. */
5293 add_stores (rtx loc, const_rtx expr, void *cuip)
5295 enum machine_mode mode = VOIDmode, mode2;
5296 struct count_use_info *cui = (struct count_use_info *)cuip;
5297 basic_block bb = cui->bb;
5299 rtx oloc = loc, nloc, src = NULL;
5300 enum micro_operation_type type = use_type (loc, cui, &mode);
5301 bool track_p = false;
5303 bool resolve, preserve;
5306 if (type == MO_CLOBBER)
5313 gcc_assert (loc != cfa_base_rtx);
5314 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5315 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5316 || GET_CODE (expr) == CLOBBER)
5318 mo.type = MO_CLOBBER;
5323 if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
5324 src = var_lowpart (mode2, SET_SRC (expr));
5325 loc = var_lowpart (mode2, loc);
5334 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5335 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5342 mo.insn = cui->insn;
5344 else if (MEM_P (loc)
5345 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5348 if (MEM_P (loc) && type == MO_VAL_SET
5349 && !REG_P (XEXP (loc, 0))
5350 && !MEM_P (XEXP (loc, 0))
5351 && (GET_CODE (XEXP (loc, 0)) != PLUS
5352 || XEXP (XEXP (loc, 0), 0) != cfa_base_rtx
5353 || !CONST_INT_P (XEXP (XEXP (loc, 0), 1))))
5356 enum machine_mode address_mode = get_address_mode (mloc);
5357 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
5360 if (val && !cselib_preserved_value_p (val))
5362 preserve_value (val);
5363 mo.type = MO_VAL_USE;
5364 mloc = cselib_subst_to_values (XEXP (mloc, 0));
5365 mo.u.loc = gen_rtx_CONCAT (address_mode, val->val_rtx, mloc);
5366 mo.insn = cui->insn;
5367 if (dump_file && (dump_flags & TDF_DETAILS))
5368 log_op_type (mo.u.loc, cui->bb, cui->insn,
5369 mo.type, dump_file);
5370 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5374 if (GET_CODE (expr) == CLOBBER || !track_p)
5376 mo.type = MO_CLOBBER;
5377 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
5381 if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
5382 src = var_lowpart (mode2, SET_SRC (expr));
5383 loc = var_lowpart (mode2, loc);
5392 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5393 if (same_variable_part_p (SET_SRC (xexpr),
5395 INT_MEM_OFFSET (loc)))
5402 mo.insn = cui->insn;
5407 if (type != MO_VAL_SET)
5408 goto log_and_return;
5410 v = find_use_val (oloc, mode, cui);
5413 goto log_and_return;
5415 resolve = preserve = !cselib_preserved_value_p (v);
5417 nloc = replace_expr_with_values (oloc);
5421 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
5423 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0);
5425 gcc_assert (oval != v);
5426 gcc_assert (REG_P (oloc) || MEM_P (oloc));
5428 if (!cselib_preserved_value_p (oval))
5430 micro_operation moa;
5432 preserve_value (oval);
5434 moa.type = MO_VAL_USE;
5435 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
5436 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
5437 moa.insn = cui->insn;
5439 if (dump_file && (dump_flags & TDF_DETAILS))
5440 log_op_type (moa.u.loc, cui->bb, cui->insn,
5441 moa.type, dump_file);
5442 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
5447 else if (resolve && GET_CODE (mo.u.loc) == SET)
5449 nloc = replace_expr_with_values (SET_SRC (expr));
5451 /* Avoid the mode mismatch between oexpr and expr. */
5452 if (!nloc && mode != mode2)
5454 nloc = SET_SRC (expr);
5455 gcc_assert (oloc == SET_DEST (expr));
5459 oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
5462 if (oloc == SET_DEST (mo.u.loc))
5463 /* No point in duplicating. */
5465 if (!REG_P (SET_SRC (mo.u.loc)))
5471 if (GET_CODE (mo.u.loc) == SET
5472 && oloc == SET_DEST (mo.u.loc))
5473 /* No point in duplicating. */
5479 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
5481 if (mo.u.loc != oloc)
5482 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
5484 /* The loc of a MO_VAL_SET may have various forms:
5486 (concat val dst): dst now holds val
5488 (concat val (set dst src)): dst now holds val, copied from src
5490 (concat (concat val dstv) dst): dst now holds val; dstv is dst
5491 after replacing mems and non-top-level regs with values.
5493 (concat (concat val dstv) (set dst src)): dst now holds val,
5494 copied from src. dstv is a value-based representation of dst, if
5495 it differs from dst. If resolution is needed, src is a REG, and
5496 its mode is the same as that of val.
5498 (concat (concat val (set dstv srcv)) (set dst src)): src
5499 copied to dst, holding val. dstv and srcv are value-based
5500 representations of dst and src, respectively.
5504 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
5506 reverse = reverse_op (v->val_rtx, expr);
5509 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, reverse);
5510 VAL_EXPR_HAS_REVERSE (loc) = 1;
5517 VAL_HOLDS_TRACK_EXPR (loc) = 1;
5520 VAL_NEEDS_RESOLUTION (loc) = resolve;
5523 if (mo.type == MO_CLOBBER)
5524 VAL_EXPR_IS_CLOBBERED (loc) = 1;
5525 if (mo.type == MO_COPY)
5526 VAL_EXPR_IS_COPIED (loc) = 1;
5528 mo.type = MO_VAL_SET;
5531 if (dump_file && (dump_flags & TDF_DETAILS))
5532 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5533 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5536 /* Callback for cselib_record_sets_hook, that records as micro
5537 operations uses and stores in an insn after cselib_record_sets has
5538 analyzed the sets in an insn, but before it modifies the stored
5539 values in the internal tables, unless cselib_record_sets doesn't
5540 call it directly (perhaps because we're not doing cselib in the
5541 first place, in which case sets and n_sets will be 0). */
5544 add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
5546 basic_block bb = BLOCK_FOR_INSN (insn);
5548 struct count_use_info cui;
5549 micro_operation *mos;
5551 cselib_hook_called = true;
5556 cui.n_sets = n_sets;
5558 n1 = VEC_length (micro_operation, VTI (bb)->mos);
5559 cui.store_p = false;
5560 note_uses (&PATTERN (insn), add_uses_1, &cui);
5561 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
5562 mos = VEC_address (micro_operation, VTI (bb)->mos);
5564 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
5568 while (n1 < n2 && mos[n1].type == MO_USE)
5570 while (n1 < n2 && mos[n2].type != MO_USE)
5582 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
5585 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
5587 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
5605 mo.u.loc = NULL_RTX;
5607 if (dump_file && (dump_flags & TDF_DETAILS))
5608 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
5609 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5612 n1 = VEC_length (micro_operation, VTI (bb)->mos);
5613 /* This will record NEXT_INSN (insn), such that we can
5614 insert notes before it without worrying about any
5615 notes that MO_USEs might emit after the insn. */
5617 note_stores (PATTERN (insn), add_stores, &cui);
5618 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
5619 mos = VEC_address (micro_operation, VTI (bb)->mos);
5621 /* Order the MO_VAL_USEs first (note_stores does nothing
5622 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
5623 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
5626 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
5628 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
5640 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
5643 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
5645 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
5658 static enum var_init_status
5659 find_src_status (dataflow_set *in, rtx src)
5661 tree decl = NULL_TREE;
5662 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
5664 if (! flag_var_tracking_uninit)
5665 status = VAR_INIT_STATUS_INITIALIZED;
5667 if (src && REG_P (src))
5668 decl = var_debug_decl (REG_EXPR (src));
5669 else if (src && MEM_P (src))
5670 decl = var_debug_decl (MEM_EXPR (src));
5673 status = get_init_value (in, src, dv_from_decl (decl));
5678 /* SRC is the source of an assignment. Use SET to try to find what
5679 was ultimately assigned to SRC. Return that value if known,
5680 otherwise return SRC itself. */
5683 find_src_set_src (dataflow_set *set, rtx src)
5685 tree decl = NULL_TREE; /* The variable being copied around. */
5686 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
5688 location_chain nextp;
5692 if (src && REG_P (src))
5693 decl = var_debug_decl (REG_EXPR (src));
5694 else if (src && MEM_P (src))
5695 decl = var_debug_decl (MEM_EXPR (src));
5699 decl_or_value dv = dv_from_decl (decl);
5701 var = shared_hash_find (set->vars, dv);
5705 for (i = 0; i < var->n_var_parts && !found; i++)
5706 for (nextp = var->var_part[i].loc_chain; nextp && !found;
5707 nextp = nextp->next)
5708 if (rtx_equal_p (nextp->loc, src))
5710 set_src = nextp->set_src;
5720 /* Compute the changes of variable locations in the basic block BB. */
5723 compute_bb_dataflow (basic_block bb)
5726 micro_operation *mo;
5728 dataflow_set old_out;
5729 dataflow_set *in = &VTI (bb)->in;
5730 dataflow_set *out = &VTI (bb)->out;
5732 dataflow_set_init (&old_out);
5733 dataflow_set_copy (&old_out, out);
5734 dataflow_set_copy (out, in);
5736 for (i = 0; VEC_iterate (micro_operation, VTI (bb)->mos, i, mo); i++)
5738 rtx insn = mo->insn;
5743 dataflow_set_clear_at_call (out);
5748 rtx loc = mo->u.loc;
5751 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
5752 else if (MEM_P (loc))
5753 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
5759 rtx loc = mo->u.loc;
5763 if (GET_CODE (loc) == CONCAT)
5765 val = XEXP (loc, 0);
5766 vloc = XEXP (loc, 1);
5774 var = PAT_VAR_LOCATION_DECL (vloc);
5776 clobber_variable_part (out, NULL_RTX,
5777 dv_from_decl (var), 0, NULL_RTX);
5780 if (VAL_NEEDS_RESOLUTION (loc))
5781 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
5782 set_variable_part (out, val, dv_from_decl (var), 0,
5783 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
5786 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
5787 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
5788 dv_from_decl (var), 0,
5789 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
5796 rtx loc = mo->u.loc;
5797 rtx val, vloc, uloc;
5799 vloc = uloc = XEXP (loc, 1);
5800 val = XEXP (loc, 0);
5802 if (GET_CODE (val) == CONCAT)
5804 uloc = XEXP (val, 1);
5805 val = XEXP (val, 0);
5808 if (VAL_NEEDS_RESOLUTION (loc))
5809 val_resolve (out, val, vloc, insn);
5811 val_store (out, val, uloc, insn, false);
5813 if (VAL_HOLDS_TRACK_EXPR (loc))
5815 if (GET_CODE (uloc) == REG)
5816 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
5818 else if (GET_CODE (uloc) == MEM)
5819 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
5827 rtx loc = mo->u.loc;
5828 rtx val, vloc, uloc, reverse = NULL_RTX;
5831 if (VAL_EXPR_HAS_REVERSE (loc))
5833 reverse = XEXP (loc, 1);
5834 vloc = XEXP (loc, 0);
5836 uloc = XEXP (vloc, 1);
5837 val = XEXP (vloc, 0);
5840 if (GET_CODE (val) == CONCAT)
5842 vloc = XEXP (val, 1);
5843 val = XEXP (val, 0);
5846 if (GET_CODE (vloc) == SET)
5848 rtx vsrc = SET_SRC (vloc);
5850 gcc_assert (val != vsrc);
5851 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
5853 vloc = SET_DEST (vloc);
5855 if (VAL_NEEDS_RESOLUTION (loc))
5856 val_resolve (out, val, vsrc, insn);
5858 else if (VAL_NEEDS_RESOLUTION (loc))
5860 gcc_assert (GET_CODE (uloc) == SET
5861 && GET_CODE (SET_SRC (uloc)) == REG);
5862 val_resolve (out, val, SET_SRC (uloc), insn);
5865 if (VAL_HOLDS_TRACK_EXPR (loc))
5867 if (VAL_EXPR_IS_CLOBBERED (loc))
5870 var_reg_delete (out, uloc, true);
5871 else if (MEM_P (uloc))
5872 var_mem_delete (out, uloc, true);
5876 bool copied_p = VAL_EXPR_IS_COPIED (loc);
5878 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
5880 if (GET_CODE (uloc) == SET)
5882 set_src = SET_SRC (uloc);
5883 uloc = SET_DEST (uloc);
5888 if (flag_var_tracking_uninit)
5890 status = find_src_status (in, set_src);
5892 if (status == VAR_INIT_STATUS_UNKNOWN)
5893 status = find_src_status (out, set_src);
5896 set_src = find_src_set_src (in, set_src);
5900 var_reg_delete_and_set (out, uloc, !copied_p,
5902 else if (MEM_P (uloc))
5903 var_mem_delete_and_set (out, uloc, !copied_p,
5907 else if (REG_P (uloc))
5908 var_regno_delete (out, REGNO (uloc));
5910 val_store (out, val, vloc, insn, true);
5913 val_store (out, XEXP (reverse, 0), XEXP (reverse, 1),
5920 rtx loc = mo->u.loc;
5923 if (GET_CODE (loc) == SET)
5925 set_src = SET_SRC (loc);
5926 loc = SET_DEST (loc);
5930 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
5932 else if (MEM_P (loc))
5933 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
5940 rtx loc = mo->u.loc;
5941 enum var_init_status src_status;
5944 if (GET_CODE (loc) == SET)
5946 set_src = SET_SRC (loc);
5947 loc = SET_DEST (loc);
5950 if (! flag_var_tracking_uninit)
5951 src_status = VAR_INIT_STATUS_INITIALIZED;
5954 src_status = find_src_status (in, set_src);
5956 if (src_status == VAR_INIT_STATUS_UNKNOWN)
5957 src_status = find_src_status (out, set_src);
5960 set_src = find_src_set_src (in, set_src);
5963 var_reg_delete_and_set (out, loc, false, src_status, set_src);
5964 else if (MEM_P (loc))
5965 var_mem_delete_and_set (out, loc, false, src_status, set_src);
5971 rtx loc = mo->u.loc;
5974 var_reg_delete (out, loc, false);
5975 else if (MEM_P (loc))
5976 var_mem_delete (out, loc, false);
5982 rtx loc = mo->u.loc;
5985 var_reg_delete (out, loc, true);
5986 else if (MEM_P (loc))
5987 var_mem_delete (out, loc, true);
5992 out->stack_adjust += mo->u.adjust;
5997 if (MAY_HAVE_DEBUG_INSNS)
5999 dataflow_set_equiv_regs (out);
6000 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_mark,
6002 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_star,
6005 htab_traverse (shared_hash_htab (out->vars),
6006 canonicalize_loc_order_check, out);
6009 changed = dataflow_set_different (&old_out, out);
6010 dataflow_set_destroy (&old_out);
6014 /* Find the locations of variables in the whole function. */
6017 vt_find_locations (void)
6019 fibheap_t worklist, pending, fibheap_swap;
6020 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
6027 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
6028 bool success = true;
6030 /* Compute reverse completion order of depth first search of the CFG
6031 so that the data-flow runs faster. */
6032 rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
6033 bb_order = XNEWVEC (int, last_basic_block);
6034 pre_and_rev_post_order_compute (NULL, rc_order, false);
6035 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
6036 bb_order[rc_order[i]] = i;
6039 worklist = fibheap_new ();
6040 pending = fibheap_new ();
6041 visited = sbitmap_alloc (last_basic_block);
6042 in_worklist = sbitmap_alloc (last_basic_block);
6043 in_pending = sbitmap_alloc (last_basic_block);
6044 sbitmap_zero (in_worklist);
6047 fibheap_insert (pending, bb_order[bb->index], bb);
6048 sbitmap_ones (in_pending);
6050 while (success && !fibheap_empty (pending))
6052 fibheap_swap = pending;
6054 worklist = fibheap_swap;
6055 sbitmap_swap = in_pending;
6056 in_pending = in_worklist;
6057 in_worklist = sbitmap_swap;
6059 sbitmap_zero (visited);
6061 while (!fibheap_empty (worklist))
6063 bb = (basic_block) fibheap_extract_min (worklist);
6064 RESET_BIT (in_worklist, bb->index);
6065 if (!TEST_BIT (visited, bb->index))
6069 int oldinsz, oldoutsz;
6071 SET_BIT (visited, bb->index);
6073 if (VTI (bb)->in.vars)
6076 -= (htab_size (shared_hash_htab (VTI (bb)->in.vars))
6077 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
6079 = htab_elements (shared_hash_htab (VTI (bb)->in.vars));
6081 = htab_elements (shared_hash_htab (VTI (bb)->out.vars));
6084 oldinsz = oldoutsz = 0;
6086 if (MAY_HAVE_DEBUG_INSNS)
6088 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
6089 bool first = true, adjust = false;
6091 /* Calculate the IN set as the intersection of
6092 predecessor OUT sets. */
6094 dataflow_set_clear (in);
6095 dst_can_be_shared = true;
6097 FOR_EACH_EDGE (e, ei, bb->preds)
6098 if (!VTI (e->src)->flooded)
6099 gcc_assert (bb_order[bb->index]
6100 <= bb_order[e->src->index]);
6103 dataflow_set_copy (in, &VTI (e->src)->out);
6104 first_out = &VTI (e->src)->out;
6109 dataflow_set_merge (in, &VTI (e->src)->out);
6115 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
6117 /* Merge and merge_adjust should keep entries in
6119 htab_traverse (shared_hash_htab (in->vars),
6120 canonicalize_loc_order_check,
6123 if (dst_can_be_shared)
6125 shared_hash_destroy (in->vars);
6126 in->vars = shared_hash_copy (first_out->vars);
6130 VTI (bb)->flooded = true;
6134 /* Calculate the IN set as union of predecessor OUT sets. */
6135 dataflow_set_clear (&VTI (bb)->in);
6136 FOR_EACH_EDGE (e, ei, bb->preds)
6137 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
6140 changed = compute_bb_dataflow (bb);
6141 htabsz += (htab_size (shared_hash_htab (VTI (bb)->in.vars))
6142 + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
6144 if (htabmax && htabsz > htabmax)
6146 if (MAY_HAVE_DEBUG_INSNS)
6147 inform (DECL_SOURCE_LOCATION (cfun->decl),
6148 "variable tracking size limit exceeded with "
6149 "-fvar-tracking-assignments, retrying without");
6151 inform (DECL_SOURCE_LOCATION (cfun->decl),
6152 "variable tracking size limit exceeded");
6159 FOR_EACH_EDGE (e, ei, bb->succs)
6161 if (e->dest == EXIT_BLOCK_PTR)
6164 if (TEST_BIT (visited, e->dest->index))
6166 if (!TEST_BIT (in_pending, e->dest->index))
6168 /* Send E->DEST to next round. */
6169 SET_BIT (in_pending, e->dest->index);
6170 fibheap_insert (pending,
6171 bb_order[e->dest->index],
6175 else if (!TEST_BIT (in_worklist, e->dest->index))
6177 /* Add E->DEST to current round. */
6178 SET_BIT (in_worklist, e->dest->index);
6179 fibheap_insert (worklist, bb_order[e->dest->index],
6187 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
6189 (int)htab_elements (shared_hash_htab (VTI (bb)->in.vars)),
6191 (int)htab_elements (shared_hash_htab (VTI (bb)->out.vars)),
6193 (int)worklist->nodes, (int)pending->nodes, htabsz);
6195 if (dump_file && (dump_flags & TDF_DETAILS))
6197 fprintf (dump_file, "BB %i IN:\n", bb->index);
6198 dump_dataflow_set (&VTI (bb)->in);
6199 fprintf (dump_file, "BB %i OUT:\n", bb->index);
6200 dump_dataflow_set (&VTI (bb)->out);
6206 if (success && MAY_HAVE_DEBUG_INSNS)
6208 gcc_assert (VTI (bb)->flooded);
6211 fibheap_delete (worklist);
6212 fibheap_delete (pending);
6213 sbitmap_free (visited);
6214 sbitmap_free (in_worklist);
6215 sbitmap_free (in_pending);
6220 /* Print the content of the LIST to dump file. */
6223 dump_attrs_list (attrs list)
6225 for (; list; list = list->next)
6227 if (dv_is_decl_p (list->dv))
6228 print_mem_expr (dump_file, dv_as_decl (list->dv));
6230 print_rtl_single (dump_file, dv_as_value (list->dv));
6231 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
6233 fprintf (dump_file, "\n");
6236 /* Print the information about variable *SLOT to dump file. */
6239 dump_var_slot (void **slot, void *data ATTRIBUTE_UNUSED)
6241 variable var = (variable) *slot;
6245 /* Continue traversing the hash table. */
6249 /* Print the information about variable VAR to dump file. */
6252 dump_var (variable var)
6255 location_chain node;
6257 if (dv_is_decl_p (var->dv))
6259 const_tree decl = dv_as_decl (var->dv);
6261 if (DECL_NAME (decl))
6263 fprintf (dump_file, " name: %s",
6264 IDENTIFIER_POINTER (DECL_NAME (decl)));
6265 if (dump_flags & TDF_UID)
6266 fprintf (dump_file, "D.%u", DECL_UID (decl));
6268 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
6269 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
6271 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
6272 fprintf (dump_file, "\n");
6276 fputc (' ', dump_file);
6277 print_rtl_single (dump_file, dv_as_value (var->dv));
6280 for (i = 0; i < var->n_var_parts; i++)
6282 fprintf (dump_file, " offset %ld\n",
6283 (long) var->var_part[i].offset);
6284 for (node = var->var_part[i].loc_chain; node; node = node->next)
6286 fprintf (dump_file, " ");
6287 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
6288 fprintf (dump_file, "[uninit]");
6289 print_rtl_single (dump_file, node->loc);
6294 /* Print the information about variables from hash table VARS to dump file. */
6297 dump_vars (htab_t vars)
6299 if (htab_elements (vars) > 0)
6301 fprintf (dump_file, "Variables:\n");
6302 htab_traverse (vars, dump_var_slot, NULL);
6306 /* Print the dataflow set SET to dump file. */
6309 dump_dataflow_set (dataflow_set *set)
6313 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
6315 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6319 fprintf (dump_file, "Reg %d:", i);
6320 dump_attrs_list (set->regs[i]);
6323 dump_vars (shared_hash_htab (set->vars));
6324 fprintf (dump_file, "\n");
6327 /* Print the IN and OUT sets for each basic block to dump file. */
6330 dump_dataflow_sets (void)
6336 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
6337 fprintf (dump_file, "IN:\n");
6338 dump_dataflow_set (&VTI (bb)->in);
6339 fprintf (dump_file, "OUT:\n");
6340 dump_dataflow_set (&VTI (bb)->out);
6344 /* Add variable VAR to the hash table of changed variables and
6345 if it has no locations delete it from SET's hash table. */
6348 variable_was_changed (variable var, dataflow_set *set)
6350 hashval_t hash = dv_htab_hash (var->dv);
6355 bool old_cur_loc_changed = false;
6357 /* Remember this decl or VALUE has been added to changed_variables. */
6358 set_dv_changed (var->dv, true);
6360 slot = htab_find_slot_with_hash (changed_variables,
6366 variable old_var = (variable) *slot;
6367 gcc_assert (old_var->in_changed_variables);
6368 old_var->in_changed_variables = false;
6369 old_cur_loc_changed = old_var->cur_loc_changed;
6370 variable_htab_free (*slot);
6372 if (set && var->n_var_parts == 0)
6376 empty_var = (variable) pool_alloc (dv_pool (var->dv));
6377 empty_var->dv = var->dv;
6378 empty_var->refcount = 1;
6379 empty_var->n_var_parts = 0;
6380 empty_var->cur_loc_changed = true;
6381 empty_var->in_changed_variables = true;
6388 var->in_changed_variables = true;
6389 /* If within processing one uop a variable is deleted
6390 and then readded, we need to assume it has changed. */
6391 if (old_cur_loc_changed)
6392 var->cur_loc_changed = true;
6399 if (var->n_var_parts == 0)
6404 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
6407 if (shared_hash_shared (set->vars))
6408 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
6410 htab_clear_slot (shared_hash_htab (set->vars), slot);
6416 /* Look for the index in VAR->var_part corresponding to OFFSET.
6417 Return -1 if not found. If INSERTION_POINT is non-NULL, the
6418 referenced int will be set to the index that the part has or should
6419 have, if it should be inserted. */
6422 find_variable_location_part (variable var, HOST_WIDE_INT offset,
6423 int *insertion_point)
6427 /* Find the location part. */
6429 high = var->n_var_parts;
6432 pos = (low + high) / 2;
6433 if (var->var_part[pos].offset < offset)
6440 if (insertion_point)
6441 *insertion_point = pos;
6443 if (pos < var->n_var_parts && var->var_part[pos].offset == offset)
6450 set_slot_part (dataflow_set *set, rtx loc, void **slot,
6451 decl_or_value dv, HOST_WIDE_INT offset,
6452 enum var_init_status initialized, rtx set_src)
6455 location_chain node, next;
6456 location_chain *nextp;
6458 bool onepart = dv_onepart_p (dv);
6460 gcc_assert (offset == 0 || !onepart);
6461 gcc_assert (loc != dv_as_opaque (dv));
6463 var = (variable) *slot;
6465 if (! flag_var_tracking_uninit)
6466 initialized = VAR_INIT_STATUS_INITIALIZED;
6470 /* Create new variable information. */
6471 var = (variable) pool_alloc (dv_pool (dv));
6474 var->n_var_parts = 1;
6475 var->cur_loc_changed = false;
6476 var->in_changed_variables = false;
6477 var->var_part[0].offset = offset;
6478 var->var_part[0].loc_chain = NULL;
6479 var->var_part[0].cur_loc = NULL;
6482 nextp = &var->var_part[0].loc_chain;
6488 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
6492 if (GET_CODE (loc) == VALUE)
6494 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6495 nextp = &node->next)
6496 if (GET_CODE (node->loc) == VALUE)
6498 if (node->loc == loc)
6503 if (canon_value_cmp (node->loc, loc))
6511 else if (REG_P (node->loc) || MEM_P (node->loc))
6519 else if (REG_P (loc))
6521 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6522 nextp = &node->next)
6523 if (REG_P (node->loc))
6525 if (REGNO (node->loc) < REGNO (loc))
6529 if (REGNO (node->loc) == REGNO (loc))
6542 else if (MEM_P (loc))
6544 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6545 nextp = &node->next)
6546 if (REG_P (node->loc))
6548 else if (MEM_P (node->loc))
6550 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
6562 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6563 nextp = &node->next)
6564 if ((r = loc_cmp (node->loc, loc)) >= 0)
6572 if (shared_var_p (var, set->vars))
6574 slot = unshare_variable (set, slot, var, initialized);
6575 var = (variable)*slot;
6576 for (nextp = &var->var_part[0].loc_chain; c;
6577 nextp = &(*nextp)->next)
6579 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
6586 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
6588 pos = find_variable_location_part (var, offset, &inspos);
6592 node = var->var_part[pos].loc_chain;
6595 && ((REG_P (node->loc) && REG_P (loc)
6596 && REGNO (node->loc) == REGNO (loc))
6597 || rtx_equal_p (node->loc, loc)))
6599 /* LOC is in the beginning of the chain so we have nothing
6601 if (node->init < initialized)
6602 node->init = initialized;
6603 if (set_src != NULL)
6604 node->set_src = set_src;
6610 /* We have to make a copy of a shared variable. */
6611 if (shared_var_p (var, set->vars))
6613 slot = unshare_variable (set, slot, var, initialized);
6614 var = (variable)*slot;
6620 /* We have not found the location part, new one will be created. */
6622 /* We have to make a copy of the shared variable. */
6623 if (shared_var_p (var, set->vars))
6625 slot = unshare_variable (set, slot, var, initialized);
6626 var = (variable)*slot;
6629 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
6630 thus there are at most MAX_VAR_PARTS different offsets. */
6631 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
6632 && (!var->n_var_parts || !dv_onepart_p (var->dv)));
6634 /* We have to move the elements of array starting at index
6635 inspos to the next position. */
6636 for (pos = var->n_var_parts; pos > inspos; pos--)
6637 var->var_part[pos] = var->var_part[pos - 1];
6640 var->var_part[pos].offset = offset;
6641 var->var_part[pos].loc_chain = NULL;
6642 var->var_part[pos].cur_loc = NULL;
6645 /* Delete the location from the list. */
6646 nextp = &var->var_part[pos].loc_chain;
6647 for (node = var->var_part[pos].loc_chain; node; node = next)
6650 if ((REG_P (node->loc) && REG_P (loc)
6651 && REGNO (node->loc) == REGNO (loc))
6652 || rtx_equal_p (node->loc, loc))
6654 /* Save these values, to assign to the new node, before
6655 deleting this one. */
6656 if (node->init > initialized)
6657 initialized = node->init;
6658 if (node->set_src != NULL && set_src == NULL)
6659 set_src = node->set_src;
6660 if (var->var_part[pos].cur_loc == node->loc)
6662 var->var_part[pos].cur_loc = NULL;
6663 var->cur_loc_changed = true;
6665 pool_free (loc_chain_pool, node);
6670 nextp = &node->next;
6673 nextp = &var->var_part[pos].loc_chain;
6676 /* Add the location to the beginning. */
6677 node = (location_chain) pool_alloc (loc_chain_pool);
6679 node->init = initialized;
6680 node->set_src = set_src;
6681 node->next = *nextp;
6684 if (onepart && emit_notes)
6685 add_value_chains (var->dv, loc);
6687 /* If no location was emitted do so. */
6688 if (var->var_part[pos].cur_loc == NULL)
6689 variable_was_changed (var, set);
6694 /* Set the part of variable's location in the dataflow set SET. The
6695 variable part is specified by variable's declaration in DV and
6696 offset OFFSET and the part's location by LOC. IOPT should be
6697 NO_INSERT if the variable is known to be in SET already and the
6698 variable hash table must not be resized, and INSERT otherwise. */
6701 set_variable_part (dataflow_set *set, rtx loc,
6702 decl_or_value dv, HOST_WIDE_INT offset,
6703 enum var_init_status initialized, rtx set_src,
6704 enum insert_option iopt)
6708 if (iopt == NO_INSERT)
6709 slot = shared_hash_find_slot_noinsert (set->vars, dv);
6712 slot = shared_hash_find_slot (set->vars, dv);
6714 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
6716 slot = set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
6719 /* Remove all recorded register locations for the given variable part
6720 from dataflow set SET, except for those that are identical to loc.
6721 The variable part is specified by variable's declaration or value
6722 DV and offset OFFSET. */
6725 clobber_slot_part (dataflow_set *set, rtx loc, void **slot,
6726 HOST_WIDE_INT offset, rtx set_src)
6728 variable var = (variable) *slot;
6729 int pos = find_variable_location_part (var, offset, NULL);
6733 location_chain node, next;
6735 /* Remove the register locations from the dataflow set. */
6736 next = var->var_part[pos].loc_chain;
6737 for (node = next; node; node = next)
6740 if (node->loc != loc
6741 && (!flag_var_tracking_uninit
6744 || !rtx_equal_p (set_src, node->set_src)))
6746 if (REG_P (node->loc))
6751 /* Remove the variable part from the register's
6752 list, but preserve any other variable parts
6753 that might be regarded as live in that same
6755 anextp = &set->regs[REGNO (node->loc)];
6756 for (anode = *anextp; anode; anode = anext)
6758 anext = anode->next;
6759 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
6760 && anode->offset == offset)
6762 pool_free (attrs_pool, anode);
6766 anextp = &anode->next;
6770 slot = delete_slot_part (set, node->loc, slot, offset);
6778 /* Remove all recorded register locations for the given variable part
6779 from dataflow set SET, except for those that are identical to loc.
6780 The variable part is specified by variable's declaration or value
6781 DV and offset OFFSET. */
6784 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
6785 HOST_WIDE_INT offset, rtx set_src)
6789 if (!dv_as_opaque (dv)
6790 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
6793 slot = shared_hash_find_slot_noinsert (set->vars, dv);
6797 slot = clobber_slot_part (set, loc, slot, offset, set_src);
6800 /* Delete the part of variable's location from dataflow set SET. The
6801 variable part is specified by its SET->vars slot SLOT and offset
6802 OFFSET and the part's location by LOC. */
6805 delete_slot_part (dataflow_set *set, rtx loc, void **slot,
6806 HOST_WIDE_INT offset)
6808 variable var = (variable) *slot;
6809 int pos = find_variable_location_part (var, offset, NULL);
6813 location_chain node, next;
6814 location_chain *nextp;
6817 if (shared_var_p (var, set->vars))
6819 /* If the variable contains the location part we have to
6820 make a copy of the variable. */
6821 for (node = var->var_part[pos].loc_chain; node;
6824 if ((REG_P (node->loc) && REG_P (loc)
6825 && REGNO (node->loc) == REGNO (loc))
6826 || rtx_equal_p (node->loc, loc))
6828 slot = unshare_variable (set, slot, var,
6829 VAR_INIT_STATUS_UNKNOWN);
6830 var = (variable)*slot;
6836 /* Delete the location part. */
6838 nextp = &var->var_part[pos].loc_chain;
6839 for (node = *nextp; node; node = next)
6842 if ((REG_P (node->loc) && REG_P (loc)
6843 && REGNO (node->loc) == REGNO (loc))
6844 || rtx_equal_p (node->loc, loc))
6846 if (emit_notes && pos == 0 && dv_onepart_p (var->dv))
6847 remove_value_chains (var->dv, node->loc);
6848 /* If we have deleted the location which was last emitted
6849 we have to emit new location so add the variable to set
6850 of changed variables. */
6851 if (var->var_part[pos].cur_loc == node->loc)
6854 var->var_part[pos].cur_loc = NULL;
6855 var->cur_loc_changed = true;
6857 pool_free (loc_chain_pool, node);
6862 nextp = &node->next;
6865 if (var->var_part[pos].loc_chain == NULL)
6870 var->cur_loc_changed = true;
6871 while (pos < var->n_var_parts)
6873 var->var_part[pos] = var->var_part[pos + 1];
6878 variable_was_changed (var, set);
6884 /* Delete the part of variable's location from dataflow set SET. The
6885 variable part is specified by variable's declaration or value DV
6886 and offset OFFSET and the part's location by LOC. */
6889 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
6890 HOST_WIDE_INT offset)
6892 void **slot = shared_hash_find_slot_noinsert (set->vars, dv);
6896 slot = delete_slot_part (set, loc, slot, offset);
6899 /* Structure for passing some other parameters to function
6900 vt_expand_loc_callback. */
6901 struct expand_loc_callback_data
6903 /* The variables and values active at this point. */
6906 /* True in vt_expand_loc_dummy calls, no rtl should be allocated.
6907 Non-NULL should be returned if vt_expand_loc would return
6908 non-NULL in that case, NULL otherwise. cur_loc_changed should be
6909 computed and cur_loc recomputed when possible (but just once
6910 per emit_notes_for_changes call). */
6913 /* True if expansion of subexpressions had to recompute some
6914 VALUE/DEBUG_EXPR_DECL's cur_loc or used a VALUE/DEBUG_EXPR_DECL
6915 whose cur_loc has been already recomputed during current
6916 emit_notes_for_changes call. */
6917 bool cur_loc_changed;
6920 /* Callback for cselib_expand_value, that looks for expressions
6921 holding the value in the var-tracking hash tables. Return X for
6922 standard processing, anything else is to be used as-is. */
6925 vt_expand_loc_callback (rtx x, bitmap regs, int max_depth, void *data)
6927 struct expand_loc_callback_data *elcd
6928 = (struct expand_loc_callback_data *) data;
6929 bool dummy = elcd->dummy;
6930 bool cur_loc_changed = elcd->cur_loc_changed;
6934 rtx result, subreg, xret;
6936 switch (GET_CODE (x))
6941 if (cselib_dummy_expand_value_rtx_cb (SUBREG_REG (x), regs,
6943 vt_expand_loc_callback, data))
6949 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
6951 vt_expand_loc_callback, data);
6956 result = simplify_gen_subreg (GET_MODE (x), subreg,
6957 GET_MODE (SUBREG_REG (x)),
6960 /* Invalid SUBREGs are ok in debug info. ??? We could try
6961 alternate expansions for the VALUE as well. */
6963 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
6968 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
6973 dv = dv_from_value (x);
6981 if (VALUE_RECURSED_INTO (x))
6984 var = (variable) htab_find_with_hash (elcd->vars, dv, dv_htab_hash (dv));
6988 if (dummy && dv_changed_p (dv))
6989 elcd->cur_loc_changed = true;
6993 if (var->n_var_parts == 0)
6996 elcd->cur_loc_changed = true;
7000 gcc_assert (var->n_var_parts == 1);
7002 VALUE_RECURSED_INTO (x) = true;
7005 if (var->var_part[0].cur_loc)
7009 if (cselib_dummy_expand_value_rtx_cb (var->var_part[0].cur_loc, regs,
7011 vt_expand_loc_callback, data))
7015 result = cselib_expand_value_rtx_cb (var->var_part[0].cur_loc, regs,
7017 vt_expand_loc_callback, data);
7019 set_dv_changed (dv, false);
7021 if (!result && dv_changed_p (dv))
7023 set_dv_changed (dv, false);
7024 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
7025 if (loc->loc == var->var_part[0].cur_loc)
7029 elcd->cur_loc_changed = cur_loc_changed;
7030 if (cselib_dummy_expand_value_rtx_cb (loc->loc, regs, max_depth,
7031 vt_expand_loc_callback,
7040 result = cselib_expand_value_rtx_cb (loc->loc, regs, max_depth,
7041 vt_expand_loc_callback, data);
7045 if (dummy && (result || var->var_part[0].cur_loc))
7046 var->cur_loc_changed = true;
7047 var->var_part[0].cur_loc = loc ? loc->loc : NULL_RTX;
7051 if (var->cur_loc_changed)
7052 elcd->cur_loc_changed = true;
7053 else if (!result && var->var_part[0].cur_loc == NULL_RTX)
7054 elcd->cur_loc_changed = cur_loc_changed;
7057 VALUE_RECURSED_INTO (x) = false;
7064 /* Expand VALUEs in LOC, using VARS as well as cselib's equivalence
7068 vt_expand_loc (rtx loc, htab_t vars)
7070 struct expand_loc_callback_data data;
7072 if (!MAY_HAVE_DEBUG_INSNS)
7077 data.cur_loc_changed = false;
7078 loc = cselib_expand_value_rtx_cb (loc, scratch_regs, 5,
7079 vt_expand_loc_callback, &data);
7081 if (loc && MEM_P (loc))
7082 loc = targetm.delegitimize_address (loc);
7086 /* Like vt_expand_loc, but only return true/false (whether vt_expand_loc
7087 would succeed or not, without actually allocating new rtxes. */
7090 vt_expand_loc_dummy (rtx loc, htab_t vars, bool *pcur_loc_changed)
7092 struct expand_loc_callback_data data;
7095 gcc_assert (MAY_HAVE_DEBUG_INSNS);
7098 data.cur_loc_changed = false;
7099 ret = cselib_dummy_expand_value_rtx_cb (loc, scratch_regs, 5,
7100 vt_expand_loc_callback, &data);
7101 *pcur_loc_changed = data.cur_loc_changed;
7105 #ifdef ENABLE_RTL_CHECKING
7106 /* Used to verify that cur_loc_changed updating is safe. */
7107 static struct pointer_map_t *emitted_notes;
7110 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
7111 additional parameters: WHERE specifies whether the note shall be emitted
7112 before or after instruction INSN. */
7115 emit_note_insn_var_location (void **varp, void *data)
7117 variable var = (variable) *varp;
7118 rtx insn = ((emit_note_data *)data)->insn;
7119 enum emit_note_where where = ((emit_note_data *)data)->where;
7120 htab_t vars = ((emit_note_data *)data)->vars;
7122 int i, j, n_var_parts;
7124 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
7125 HOST_WIDE_INT last_limit;
7126 tree type_size_unit;
7127 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
7128 rtx loc[MAX_VAR_PARTS];
7132 if (dv_is_value_p (var->dv))
7133 goto value_or_debug_decl;
7135 decl = dv_as_decl (var->dv);
7137 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7138 goto value_or_debug_decl;
7143 if (!MAY_HAVE_DEBUG_INSNS)
7145 for (i = 0; i < var->n_var_parts; i++)
7146 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
7148 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
7149 var->cur_loc_changed = true;
7151 if (var->n_var_parts == 0)
7152 var->cur_loc_changed = true;
7154 #ifndef ENABLE_RTL_CHECKING
7155 if (!var->cur_loc_changed)
7158 for (i = 0; i < var->n_var_parts; i++)
7160 enum machine_mode mode, wider_mode;
7163 if (last_limit < var->var_part[i].offset)
7168 else if (last_limit > var->var_part[i].offset)
7170 offsets[n_var_parts] = var->var_part[i].offset;
7171 if (!var->var_part[i].cur_loc)
7176 loc2 = vt_expand_loc (var->var_part[i].cur_loc, vars);
7182 loc[n_var_parts] = loc2;
7183 mode = GET_MODE (var->var_part[i].cur_loc);
7184 if (mode == VOIDmode && dv_onepart_p (var->dv))
7185 mode = DECL_MODE (decl);
7186 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
7187 if (var->var_part[i].cur_loc == lc->loc)
7189 initialized = lc->init;
7193 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
7195 /* Attempt to merge adjacent registers or memory. */
7196 wider_mode = GET_MODE_WIDER_MODE (mode);
7197 for (j = i + 1; j < var->n_var_parts; j++)
7198 if (last_limit <= var->var_part[j].offset)
7200 if (j < var->n_var_parts
7201 && wider_mode != VOIDmode
7202 && var->var_part[j].cur_loc
7203 && mode == GET_MODE (var->var_part[j].cur_loc)
7204 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
7205 && last_limit == var->var_part[j].offset
7206 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
7207 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
7211 if (REG_P (loc[n_var_parts])
7212 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
7213 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
7214 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
7217 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
7218 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
7220 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
7221 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
7224 if (!REG_P (new_loc)
7225 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
7228 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
7231 else if (MEM_P (loc[n_var_parts])
7232 && GET_CODE (XEXP (loc2, 0)) == PLUS
7233 && REG_P (XEXP (XEXP (loc2, 0), 0))
7234 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
7236 if ((REG_P (XEXP (loc[n_var_parts], 0))
7237 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
7238 XEXP (XEXP (loc2, 0), 0))
7239 && INTVAL (XEXP (XEXP (loc2, 0), 1))
7240 == GET_MODE_SIZE (mode))
7241 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
7242 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
7243 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
7244 XEXP (XEXP (loc2, 0), 0))
7245 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
7246 + GET_MODE_SIZE (mode)
7247 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
7248 new_loc = adjust_address_nv (loc[n_var_parts],
7254 loc[n_var_parts] = new_loc;
7256 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
7262 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
7263 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
7266 if (! flag_var_tracking_uninit)
7267 initialized = VAR_INIT_STATUS_INITIALIZED;
7271 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX,
7273 else if (n_var_parts == 1)
7277 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
7278 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
7282 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list,
7285 else if (n_var_parts)
7289 for (i = 0; i < n_var_parts; i++)
7291 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
7293 parallel = gen_rtx_PARALLEL (VOIDmode,
7294 gen_rtvec_v (n_var_parts, loc));
7295 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
7296 parallel, (int) initialized);
7299 #ifdef ENABLE_RTL_CHECKING
7302 void **note_slot = pointer_map_insert (emitted_notes, decl);
7303 rtx pnote = (rtx) *note_slot;
7304 if (!var->cur_loc_changed && (pnote || PAT_VAR_LOCATION_LOC (note_vl)))
7307 gcc_assert (rtx_equal_p (PAT_VAR_LOCATION_LOC (pnote),
7308 PAT_VAR_LOCATION_LOC (note_vl)));
7310 *note_slot = (void *) note_vl;
7312 if (!var->cur_loc_changed)
7316 if (where != EMIT_NOTE_BEFORE_INSN)
7318 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
7319 if (where == EMIT_NOTE_AFTER_CALL_INSN)
7320 NOTE_DURING_CALL_P (note) = true;
7323 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
7324 NOTE_VAR_LOCATION (note) = note_vl;
7327 set_dv_changed (var->dv, false);
7328 var->cur_loc_changed = false;
7329 gcc_assert (var->in_changed_variables);
7330 var->in_changed_variables = false;
7331 htab_clear_slot (changed_variables, varp);
7333 /* Continue traversing the hash table. */
7336 value_or_debug_decl:
7337 if (dv_changed_p (var->dv) && var->n_var_parts)
7340 bool cur_loc_changed;
7342 if (var->var_part[0].cur_loc
7343 && vt_expand_loc_dummy (var->var_part[0].cur_loc, vars,
7346 for (lc = var->var_part[0].loc_chain; lc; lc = lc->next)
7347 if (lc->loc != var->var_part[0].cur_loc
7348 && vt_expand_loc_dummy (lc->loc, vars, &cur_loc_changed))
7350 var->var_part[0].cur_loc = lc ? lc->loc : NULL_RTX;
7355 DEF_VEC_P (variable);
7356 DEF_VEC_ALLOC_P (variable, heap);
7358 /* Stack of variable_def pointers that need processing with
7359 check_changed_vars_2. */
7361 static VEC (variable, heap) *changed_variables_stack;
7363 /* VALUEs with no variables that need set_dv_changed (val, false)
7364 called before check_changed_vars_3. */
7366 static VEC (rtx, heap) *changed_values_stack;
7368 /* Helper function for check_changed_vars_1 and check_changed_vars_2. */
7371 check_changed_vars_0 (decl_or_value dv, htab_t htab)
7374 = (value_chain) htab_find_with_hash (value_chains, dv, dv_htab_hash (dv));
7378 for (vc = vc->next; vc; vc = vc->next)
7379 if (!dv_changed_p (vc->dv))
7382 = (variable) htab_find_with_hash (htab, vc->dv,
7383 dv_htab_hash (vc->dv));
7386 set_dv_changed (vc->dv, true);
7387 VEC_safe_push (variable, heap, changed_variables_stack, vcvar);
7389 else if (dv_is_value_p (vc->dv))
7391 set_dv_changed (vc->dv, true);
7392 VEC_safe_push (rtx, heap, changed_values_stack,
7393 dv_as_value (vc->dv));
7394 check_changed_vars_0 (vc->dv, htab);
7399 /* Populate changed_variables_stack with variable_def pointers
7400 that need variable_was_changed called on them. */
7403 check_changed_vars_1 (void **slot, void *data)
7405 variable var = (variable) *slot;
7406 htab_t htab = (htab_t) data;
7408 if (dv_is_value_p (var->dv)
7409 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
7410 check_changed_vars_0 (var->dv, htab);
7414 /* Add VAR to changed_variables and also for VALUEs add recursively
7415 all DVs that aren't in changed_variables yet but reference the
7416 VALUE from its loc_chain. */
7419 check_changed_vars_2 (variable var, htab_t htab)
7421 variable_was_changed (var, NULL);
7422 if (dv_is_value_p (var->dv)
7423 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
7424 check_changed_vars_0 (var->dv, htab);
7427 /* For each changed decl (except DEBUG_EXPR_DECLs) recompute
7428 cur_loc if needed (and cur_loc of all VALUEs and DEBUG_EXPR_DECLs
7429 it needs and are also in changed variables) and track whether
7430 cur_loc (or anything it uses to compute location) had to change
7431 during the current emit_notes_for_changes call. */
7434 check_changed_vars_3 (void **slot, void *data)
7436 variable var = (variable) *slot;
7437 htab_t vars = (htab_t) data;
7440 bool cur_loc_changed;
7442 if (dv_is_value_p (var->dv)
7443 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
7446 for (i = 0; i < var->n_var_parts; i++)
7448 if (var->var_part[i].cur_loc
7449 && vt_expand_loc_dummy (var->var_part[i].cur_loc, vars,
7452 if (cur_loc_changed)
7453 var->cur_loc_changed = true;
7456 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
7457 if (lc->loc != var->var_part[i].cur_loc
7458 && vt_expand_loc_dummy (lc->loc, vars, &cur_loc_changed))
7460 if (lc || var->var_part[i].cur_loc)
7461 var->cur_loc_changed = true;
7462 var->var_part[i].cur_loc = lc ? lc->loc : NULL_RTX;
7464 if (var->n_var_parts == 0)
7465 var->cur_loc_changed = true;
7469 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
7470 CHANGED_VARIABLES and delete this chain. WHERE specifies whether the notes
7471 shall be emitted before of after instruction INSN. */
7474 emit_notes_for_changes (rtx insn, enum emit_note_where where,
7477 emit_note_data data;
7478 htab_t htab = shared_hash_htab (vars);
7480 if (!htab_elements (changed_variables))
7483 if (MAY_HAVE_DEBUG_INSNS)
7485 /* Unfortunately this has to be done in two steps, because
7486 we can't traverse a hashtab into which we are inserting
7487 through variable_was_changed. */
7488 htab_traverse (changed_variables, check_changed_vars_1, htab);
7489 while (VEC_length (variable, changed_variables_stack) > 0)
7490 check_changed_vars_2 (VEC_pop (variable, changed_variables_stack),
7492 while (VEC_length (rtx, changed_values_stack) > 0)
7493 set_dv_changed (dv_from_value (VEC_pop (rtx, changed_values_stack)),
7495 htab_traverse (changed_variables, check_changed_vars_3, htab);
7502 htab_traverse (changed_variables, emit_note_insn_var_location, &data);
7505 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
7506 same variable in hash table DATA or is not there at all. */
7509 emit_notes_for_differences_1 (void **slot, void *data)
7511 htab_t new_vars = (htab_t) data;
7512 variable old_var, new_var;
7514 old_var = (variable) *slot;
7515 new_var = (variable) htab_find_with_hash (new_vars, old_var->dv,
7516 dv_htab_hash (old_var->dv));
7520 /* Variable has disappeared. */
7523 empty_var = (variable) pool_alloc (dv_pool (old_var->dv));
7524 empty_var->dv = old_var->dv;
7525 empty_var->refcount = 0;
7526 empty_var->n_var_parts = 0;
7527 empty_var->cur_loc_changed = false;
7528 empty_var->in_changed_variables = false;
7529 if (dv_onepart_p (old_var->dv))
7533 gcc_assert (old_var->n_var_parts == 1);
7534 for (lc = old_var->var_part[0].loc_chain; lc; lc = lc->next)
7535 remove_value_chains (old_var->dv, lc->loc);
7537 variable_was_changed (empty_var, NULL);
7538 /* Continue traversing the hash table. */
7541 if (variable_different_p (old_var, new_var))
7543 if (dv_onepart_p (old_var->dv))
7545 location_chain lc1, lc2;
7547 gcc_assert (old_var->n_var_parts == 1
7548 && new_var->n_var_parts == 1);
7549 lc1 = old_var->var_part[0].loc_chain;
7550 lc2 = new_var->var_part[0].loc_chain;
7553 && ((REG_P (lc1->loc) && REG_P (lc2->loc))
7554 || rtx_equal_p (lc1->loc, lc2->loc)))
7559 for (; lc2; lc2 = lc2->next)
7560 add_value_chains (old_var->dv, lc2->loc);
7561 for (; lc1; lc1 = lc1->next)
7562 remove_value_chains (old_var->dv, lc1->loc);
7564 variable_was_changed (new_var, NULL);
7566 /* Update cur_loc. */
7567 if (old_var != new_var)
7570 for (i = 0; i < new_var->n_var_parts; i++)
7572 new_var->var_part[i].cur_loc = NULL;
7573 if (old_var->n_var_parts != new_var->n_var_parts
7574 || old_var->var_part[i].offset != new_var->var_part[i].offset)
7575 new_var->cur_loc_changed = true;
7576 else if (old_var->var_part[i].cur_loc != NULL)
7579 rtx cur_loc = old_var->var_part[i].cur_loc;
7581 for (lc = new_var->var_part[i].loc_chain; lc; lc = lc->next)
7582 if (lc->loc == cur_loc
7583 || rtx_equal_p (cur_loc, lc->loc))
7585 new_var->var_part[i].cur_loc = lc->loc;
7589 new_var->cur_loc_changed = true;
7594 /* Continue traversing the hash table. */
7598 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
7602 emit_notes_for_differences_2 (void **slot, void *data)
7604 htab_t old_vars = (htab_t) data;
7605 variable old_var, new_var;
7607 new_var = (variable) *slot;
7608 old_var = (variable) htab_find_with_hash (old_vars, new_var->dv,
7609 dv_htab_hash (new_var->dv));
7613 /* Variable has appeared. */
7614 if (dv_onepart_p (new_var->dv))
7618 gcc_assert (new_var->n_var_parts == 1);
7619 for (lc = new_var->var_part[0].loc_chain; lc; lc = lc->next)
7620 add_value_chains (new_var->dv, lc->loc);
7622 for (i = 0; i < new_var->n_var_parts; i++)
7623 new_var->var_part[i].cur_loc = NULL;
7624 variable_was_changed (new_var, NULL);
7627 /* Continue traversing the hash table. */
7631 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
7635 emit_notes_for_differences (rtx insn, dataflow_set *old_set,
7636 dataflow_set *new_set)
7638 htab_traverse (shared_hash_htab (old_set->vars),
7639 emit_notes_for_differences_1,
7640 shared_hash_htab (new_set->vars));
7641 htab_traverse (shared_hash_htab (new_set->vars),
7642 emit_notes_for_differences_2,
7643 shared_hash_htab (old_set->vars));
7644 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
7647 /* Emit the notes for changes of location parts in the basic block BB. */
7650 emit_notes_in_bb (basic_block bb, dataflow_set *set)
7653 micro_operation *mo;
7655 dataflow_set_clear (set);
7656 dataflow_set_copy (set, &VTI (bb)->in);
7658 for (i = 0; VEC_iterate (micro_operation, VTI (bb)->mos, i, mo); i++)
7660 rtx insn = mo->insn;
7665 dataflow_set_clear_at_call (set);
7666 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
7671 rtx loc = mo->u.loc;
7674 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
7676 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
7678 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
7684 rtx loc = mo->u.loc;
7688 if (GET_CODE (loc) == CONCAT)
7690 val = XEXP (loc, 0);
7691 vloc = XEXP (loc, 1);
7699 var = PAT_VAR_LOCATION_DECL (vloc);
7701 clobber_variable_part (set, NULL_RTX,
7702 dv_from_decl (var), 0, NULL_RTX);
7705 if (VAL_NEEDS_RESOLUTION (loc))
7706 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
7707 set_variable_part (set, val, dv_from_decl (var), 0,
7708 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
7711 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
7712 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
7713 dv_from_decl (var), 0,
7714 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
7717 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
7723 rtx loc = mo->u.loc;
7724 rtx val, vloc, uloc;
7726 vloc = uloc = XEXP (loc, 1);
7727 val = XEXP (loc, 0);
7729 if (GET_CODE (val) == CONCAT)
7731 uloc = XEXP (val, 1);
7732 val = XEXP (val, 0);
7735 if (VAL_NEEDS_RESOLUTION (loc))
7736 val_resolve (set, val, vloc, insn);
7738 val_store (set, val, uloc, insn, false);
7740 if (VAL_HOLDS_TRACK_EXPR (loc))
7742 if (GET_CODE (uloc) == REG)
7743 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
7745 else if (GET_CODE (uloc) == MEM)
7746 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
7750 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
7756 rtx loc = mo->u.loc;
7757 rtx val, vloc, uloc, reverse = NULL_RTX;
7760 if (VAL_EXPR_HAS_REVERSE (loc))
7762 reverse = XEXP (loc, 1);
7763 vloc = XEXP (loc, 0);
7765 uloc = XEXP (vloc, 1);
7766 val = XEXP (vloc, 0);
7769 if (GET_CODE (val) == CONCAT)
7771 vloc = XEXP (val, 1);
7772 val = XEXP (val, 0);
7775 if (GET_CODE (vloc) == SET)
7777 rtx vsrc = SET_SRC (vloc);
7779 gcc_assert (val != vsrc);
7780 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
7782 vloc = SET_DEST (vloc);
7784 if (VAL_NEEDS_RESOLUTION (loc))
7785 val_resolve (set, val, vsrc, insn);
7787 else if (VAL_NEEDS_RESOLUTION (loc))
7789 gcc_assert (GET_CODE (uloc) == SET
7790 && GET_CODE (SET_SRC (uloc)) == REG);
7791 val_resolve (set, val, SET_SRC (uloc), insn);
7794 if (VAL_HOLDS_TRACK_EXPR (loc))
7796 if (VAL_EXPR_IS_CLOBBERED (loc))
7799 var_reg_delete (set, uloc, true);
7800 else if (MEM_P (uloc))
7801 var_mem_delete (set, uloc, true);
7805 bool copied_p = VAL_EXPR_IS_COPIED (loc);
7807 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
7809 if (GET_CODE (uloc) == SET)
7811 set_src = SET_SRC (uloc);
7812 uloc = SET_DEST (uloc);
7817 status = find_src_status (set, set_src);
7819 set_src = find_src_set_src (set, set_src);
7823 var_reg_delete_and_set (set, uloc, !copied_p,
7825 else if (MEM_P (uloc))
7826 var_mem_delete_and_set (set, uloc, !copied_p,
7830 else if (REG_P (uloc))
7831 var_regno_delete (set, REGNO (uloc));
7833 val_store (set, val, vloc, insn, true);
7836 val_store (set, XEXP (reverse, 0), XEXP (reverse, 1),
7839 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7846 rtx loc = mo->u.loc;
7849 if (GET_CODE (loc) == SET)
7851 set_src = SET_SRC (loc);
7852 loc = SET_DEST (loc);
7856 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
7859 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
7862 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7869 rtx loc = mo->u.loc;
7870 enum var_init_status src_status;
7873 if (GET_CODE (loc) == SET)
7875 set_src = SET_SRC (loc);
7876 loc = SET_DEST (loc);
7879 src_status = find_src_status (set, set_src);
7880 set_src = find_src_set_src (set, set_src);
7883 var_reg_delete_and_set (set, loc, false, src_status, set_src);
7885 var_mem_delete_and_set (set, loc, false, src_status, set_src);
7887 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7894 rtx loc = mo->u.loc;
7897 var_reg_delete (set, loc, false);
7899 var_mem_delete (set, loc, false);
7901 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
7907 rtx loc = mo->u.loc;
7910 var_reg_delete (set, loc, true);
7912 var_mem_delete (set, loc, true);
7914 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7920 set->stack_adjust += mo->u.adjust;
7926 /* Emit notes for the whole function. */
7929 vt_emit_notes (void)
7934 #ifdef ENABLE_RTL_CHECKING
7935 emitted_notes = pointer_map_create ();
7937 gcc_assert (!htab_elements (changed_variables));
7939 /* Free memory occupied by the out hash tables, as they aren't used
7942 dataflow_set_clear (&VTI (bb)->out);
7944 /* Enable emitting notes by functions (mainly by set_variable_part and
7945 delete_variable_part). */
7948 if (MAY_HAVE_DEBUG_INSNS)
7953 for (i = 0; VEC_iterate (rtx, preserved_values, i, val); i++)
7954 add_cselib_value_chains (dv_from_value (val));
7955 changed_variables_stack = VEC_alloc (variable, heap, 40);
7956 changed_values_stack = VEC_alloc (rtx, heap, 40);
7959 dataflow_set_init (&cur);
7963 /* Emit the notes for changes of variable locations between two
7964 subsequent basic blocks. */
7965 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
7967 /* Emit the notes for the changes in the basic block itself. */
7968 emit_notes_in_bb (bb, &cur);
7970 /* Free memory occupied by the in hash table, we won't need it
7972 dataflow_set_clear (&VTI (bb)->in);
7974 #ifdef ENABLE_CHECKING
7975 htab_traverse (shared_hash_htab (cur.vars),
7976 emit_notes_for_differences_1,
7977 shared_hash_htab (empty_shared_hash));
7978 if (MAY_HAVE_DEBUG_INSNS)
7983 for (i = 0; VEC_iterate (rtx, preserved_values, i, val); i++)
7984 remove_cselib_value_chains (dv_from_value (val));
7985 gcc_assert (htab_elements (value_chains) == 0);
7988 dataflow_set_destroy (&cur);
7990 if (MAY_HAVE_DEBUG_INSNS)
7992 VEC_free (variable, heap, changed_variables_stack);
7993 VEC_free (rtx, heap, changed_values_stack);
7996 #ifdef ENABLE_RTL_CHECKING
7997 pointer_map_destroy (emitted_notes);
8002 /* If there is a declaration and offset associated with register/memory RTL
8003 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
8006 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
8010 if (REG_ATTRS (rtl))
8012 *declp = REG_EXPR (rtl);
8013 *offsetp = REG_OFFSET (rtl);
8017 else if (MEM_P (rtl))
8019 if (MEM_ATTRS (rtl))
8021 *declp = MEM_EXPR (rtl);
8022 *offsetp = INT_MEM_OFFSET (rtl);
8029 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
8032 vt_add_function_parameters (void)
8036 for (parm = DECL_ARGUMENTS (current_function_decl);
8037 parm; parm = TREE_CHAIN (parm))
8039 rtx decl_rtl = DECL_RTL_IF_SET (parm);
8040 rtx incoming = DECL_INCOMING_RTL (parm);
8042 enum machine_mode mode;
8043 HOST_WIDE_INT offset;
8047 if (TREE_CODE (parm) != PARM_DECL)
8050 if (!DECL_NAME (parm))
8053 if (!decl_rtl || !incoming)
8056 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
8059 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
8061 if (REG_P (incoming) || MEM_P (incoming))
8063 /* This means argument is passed by invisible reference. */
8066 incoming = gen_rtx_MEM (GET_MODE (decl_rtl), incoming);
8070 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
8072 offset += byte_lowpart_offset (GET_MODE (incoming),
8073 GET_MODE (decl_rtl));
8082 /* Assume that DECL_RTL was a pseudo that got spilled to
8083 memory. The spill slot sharing code will force the
8084 memory to reference spill_slot_decl (%sfp), so we don't
8085 match above. That's ok, the pseudo must have referenced
8086 the entire parameter, so just reset OFFSET. */
8087 gcc_assert (decl == get_spill_slot_decl (false));
8091 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
8094 out = &VTI (ENTRY_BLOCK_PTR)->out;
8096 dv = dv_from_decl (parm);
8098 if (target_for_debug_bind (parm)
8099 /* We can't deal with these right now, because this kind of
8100 variable is single-part. ??? We could handle parallels
8101 that describe multiple locations for the same single
8102 value, but ATM we don't. */
8103 && GET_CODE (incoming) != PARALLEL)
8107 /* ??? We shouldn't ever hit this, but it may happen because
8108 arguments passed by invisible reference aren't dealt with
8109 above: incoming-rtl will have Pmode rather than the
8110 expected mode for the type. */
8114 val = cselib_lookup (var_lowpart (mode, incoming), mode, true);
8116 /* ??? Float-typed values in memory are not handled by
8120 preserve_value (val);
8121 set_variable_part (out, val->val_rtx, dv, offset,
8122 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
8123 dv = dv_from_value (val->val_rtx);
8127 if (REG_P (incoming))
8129 incoming = var_lowpart (mode, incoming);
8130 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
8131 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
8133 set_variable_part (out, incoming, dv, offset,
8134 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
8136 else if (MEM_P (incoming))
8138 incoming = var_lowpart (mode, incoming);
8139 set_variable_part (out, incoming, dv, offset,
8140 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
8144 if (MAY_HAVE_DEBUG_INSNS)
8146 cselib_preserve_only_values ();
8147 cselib_reset_table (cselib_get_next_uid ());
8152 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
8155 fp_setter (rtx insn)
8157 rtx pat = PATTERN (insn);
8158 if (RTX_FRAME_RELATED_P (insn))
8160 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
8162 pat = XEXP (expr, 0);
8164 if (GET_CODE (pat) == SET)
8165 return SET_DEST (pat) == hard_frame_pointer_rtx;
8166 else if (GET_CODE (pat) == PARALLEL)
8169 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
8170 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
8171 && SET_DEST (XVECEXP (pat, 0, i)) == hard_frame_pointer_rtx)
8177 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
8178 ensure it isn't flushed during cselib_reset_table.
8179 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
8180 has been eliminated. */
8183 vt_init_cfa_base (void)
8187 #ifdef FRAME_POINTER_CFA_OFFSET
8188 cfa_base_rtx = frame_pointer_rtx;
8190 cfa_base_rtx = arg_pointer_rtx;
8192 if (cfa_base_rtx == hard_frame_pointer_rtx
8193 || !fixed_regs[REGNO (cfa_base_rtx)])
8195 cfa_base_rtx = NULL_RTX;
8198 if (!MAY_HAVE_DEBUG_INSNS)
8201 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
8203 preserve_value (val);
8204 cselib_preserve_cfa_base_value (val);
8205 var_reg_decl_set (&VTI (ENTRY_BLOCK_PTR)->out, cfa_base_rtx,
8206 VAR_INIT_STATUS_INITIALIZED, dv_from_value (val->val_rtx),
8207 0, NULL_RTX, INSERT);
8210 /* Allocate and initialize the data structures for variable tracking
8211 and parse the RTL to get the micro operations. */
8214 vt_initialize (void)
8216 basic_block bb, prologue_bb = NULL;
8217 HOST_WIDE_INT fp_cfa_offset = -1;
8219 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
8221 attrs_pool = create_alloc_pool ("attrs_def pool",
8222 sizeof (struct attrs_def), 1024);
8223 var_pool = create_alloc_pool ("variable_def pool",
8224 sizeof (struct variable_def)
8225 + (MAX_VAR_PARTS - 1)
8226 * sizeof (((variable)NULL)->var_part[0]), 64);
8227 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
8228 sizeof (struct location_chain_def),
8230 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
8231 sizeof (struct shared_hash_def), 256);
8232 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
8233 empty_shared_hash->refcount = 1;
8234 empty_shared_hash->htab
8235 = htab_create (1, variable_htab_hash, variable_htab_eq,
8236 variable_htab_free);
8237 changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
8238 variable_htab_free);
8239 if (MAY_HAVE_DEBUG_INSNS)
8241 value_chain_pool = create_alloc_pool ("value_chain_def pool",
8242 sizeof (struct value_chain_def),
8244 value_chains = htab_create (32, value_chain_htab_hash,
8245 value_chain_htab_eq, NULL);
8248 /* Init the IN and OUT sets. */
8251 VTI (bb)->visited = false;
8252 VTI (bb)->flooded = false;
8253 dataflow_set_init (&VTI (bb)->in);
8254 dataflow_set_init (&VTI (bb)->out);
8255 VTI (bb)->permp = NULL;
8258 if (MAY_HAVE_DEBUG_INSNS)
8260 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
8261 scratch_regs = BITMAP_ALLOC (NULL);
8262 valvar_pool = create_alloc_pool ("small variable_def pool",
8263 sizeof (struct variable_def), 256);
8264 preserved_values = VEC_alloc (rtx, heap, 256);
8268 scratch_regs = NULL;
8272 if (!frame_pointer_needed)
8276 if (!vt_stack_adjustments ())
8279 #ifdef FRAME_POINTER_CFA_OFFSET
8280 reg = frame_pointer_rtx;
8282 reg = arg_pointer_rtx;
8284 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
8287 if (GET_CODE (elim) == PLUS)
8288 elim = XEXP (elim, 0);
8289 if (elim == stack_pointer_rtx)
8290 vt_init_cfa_base ();
8293 else if (!crtl->stack_realign_tried)
8297 #ifdef FRAME_POINTER_CFA_OFFSET
8298 reg = frame_pointer_rtx;
8299 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
8301 reg = arg_pointer_rtx;
8302 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
8304 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
8307 if (GET_CODE (elim) == PLUS)
8309 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
8310 elim = XEXP (elim, 0);
8312 if (elim != hard_frame_pointer_rtx)
8315 prologue_bb = single_succ (ENTRY_BLOCK_PTR);
8319 hard_frame_pointer_adjustment = -1;
8324 HOST_WIDE_INT pre, post = 0;
8325 basic_block first_bb, last_bb;
8327 if (MAY_HAVE_DEBUG_INSNS)
8329 cselib_record_sets_hook = add_with_sets;
8330 if (dump_file && (dump_flags & TDF_DETAILS))
8331 fprintf (dump_file, "first value: %i\n",
8332 cselib_get_next_uid ());
8339 if (bb->next_bb == EXIT_BLOCK_PTR
8340 || ! single_pred_p (bb->next_bb))
8342 e = find_edge (bb, bb->next_bb);
8343 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
8349 /* Add the micro-operations to the vector. */
8350 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
8352 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
8353 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
8354 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
8355 insn = NEXT_INSN (insn))
8359 if (!frame_pointer_needed)
8361 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
8365 mo.type = MO_ADJUST;
8368 if (dump_file && (dump_flags & TDF_DETAILS))
8369 log_op_type (PATTERN (insn), bb, insn,
8370 MO_ADJUST, dump_file);
8371 VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
8373 VTI (bb)->out.stack_adjust += pre;
8377 cselib_hook_called = false;
8378 adjust_insn (bb, insn);
8379 if (MAY_HAVE_DEBUG_INSNS)
8381 cselib_process_insn (insn);
8382 if (dump_file && (dump_flags & TDF_DETAILS))
8384 print_rtl_single (dump_file, insn);
8385 dump_cselib_table (dump_file);
8388 if (!cselib_hook_called)
8389 add_with_sets (insn, 0, 0);
8392 if (!frame_pointer_needed && post)
8395 mo.type = MO_ADJUST;
8398 if (dump_file && (dump_flags & TDF_DETAILS))
8399 log_op_type (PATTERN (insn), bb, insn,
8400 MO_ADJUST, dump_file);
8401 VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
8403 VTI (bb)->out.stack_adjust += post;
8406 if (bb == prologue_bb
8407 && hard_frame_pointer_adjustment == -1
8408 && RTX_FRAME_RELATED_P (insn)
8409 && fp_setter (insn))
8411 vt_init_cfa_base ();
8412 hard_frame_pointer_adjustment = fp_cfa_offset;
8416 gcc_assert (offset == VTI (bb)->out.stack_adjust);
8421 if (MAY_HAVE_DEBUG_INSNS)
8423 cselib_preserve_only_values ();
8424 cselib_reset_table (cselib_get_next_uid ());
8425 cselib_record_sets_hook = NULL;
8429 hard_frame_pointer_adjustment = -1;
8430 VTI (ENTRY_BLOCK_PTR)->flooded = true;
8431 vt_add_function_parameters ();
8432 cfa_base_rtx = NULL_RTX;
8436 /* Get rid of all debug insns from the insn stream. */
8439 delete_debug_insns (void)
8444 if (!MAY_HAVE_DEBUG_INSNS)
8449 FOR_BB_INSNS_SAFE (bb, insn, next)
8450 if (DEBUG_INSN_P (insn))
8455 /* Run a fast, BB-local only version of var tracking, to take care of
8456 information that we don't do global analysis on, such that not all
8457 information is lost. If SKIPPED holds, we're skipping the global
8458 pass entirely, so we should try to use information it would have
8459 handled as well.. */
8462 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
8464 /* ??? Just skip it all for now. */
8465 delete_debug_insns ();
8468 /* Free the data structures needed for variable tracking. */
8477 VEC_free (micro_operation, heap, VTI (bb)->mos);
8482 dataflow_set_destroy (&VTI (bb)->in);
8483 dataflow_set_destroy (&VTI (bb)->out);
8484 if (VTI (bb)->permp)
8486 dataflow_set_destroy (VTI (bb)->permp);
8487 XDELETE (VTI (bb)->permp);
8490 free_aux_for_blocks ();
8491 htab_delete (empty_shared_hash->htab);
8492 htab_delete (changed_variables);
8493 free_alloc_pool (attrs_pool);
8494 free_alloc_pool (var_pool);
8495 free_alloc_pool (loc_chain_pool);
8496 free_alloc_pool (shared_hash_pool);
8498 if (MAY_HAVE_DEBUG_INSNS)
8500 htab_delete (value_chains);
8501 free_alloc_pool (value_chain_pool);
8502 free_alloc_pool (valvar_pool);
8503 VEC_free (rtx, heap, preserved_values);
8505 BITMAP_FREE (scratch_regs);
8506 scratch_regs = NULL;
8510 XDELETEVEC (vui_vec);
8515 /* The entry point to variable tracking pass. */
8517 static inline unsigned int
8518 variable_tracking_main_1 (void)
8522 if (flag_var_tracking_assignments < 0)
8524 delete_debug_insns ();
8528 if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
8530 vt_debug_insns_local (true);
8534 mark_dfs_back_edges ();
8535 if (!vt_initialize ())
8538 vt_debug_insns_local (true);
8542 success = vt_find_locations ();
8544 if (!success && flag_var_tracking_assignments > 0)
8548 delete_debug_insns ();
8550 /* This is later restored by our caller. */
8551 flag_var_tracking_assignments = 0;
8553 success = vt_initialize ();
8554 gcc_assert (success);
8556 success = vt_find_locations ();
8562 vt_debug_insns_local (false);
8566 if (dump_file && (dump_flags & TDF_DETAILS))
8568 dump_dataflow_sets ();
8569 dump_flow_info (dump_file, dump_flags);
8575 vt_debug_insns_local (false);
8580 variable_tracking_main (void)
8583 int save = flag_var_tracking_assignments;
8585 ret = variable_tracking_main_1 ();
8587 flag_var_tracking_assignments = save;
8593 gate_handle_var_tracking (void)
8595 return (flag_var_tracking);
8600 struct rtl_opt_pass pass_variable_tracking =
8604 "vartrack", /* name */
8605 gate_handle_var_tracking, /* gate */
8606 variable_tracking_main, /* execute */
8609 0, /* static_pass_number */
8610 TV_VAR_TRACKING, /* tv_id */
8611 0, /* properties_required */
8612 0, /* properties_provided */
8613 0, /* properties_destroyed */
8614 0, /* todo_flags_start */
8615 TODO_dump_func | TODO_verify_rtl_sharing/* todo_flags_finish */