1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file contains the variable tracking pass. It computes where
22 variables are located (which registers or where in memory) at each position
23 in instruction stream and emits notes describing the locations.
24 Debug information (DWARF2 location lists) is finally generated from
26 With this debug information, it is possible to show variables
27 even when debugging optimized code.
29 How does the variable tracking pass work?
31 First, it scans RTL code for uses, stores and clobbers (register/memory
32 references in instructions), for call insns and for stack adjustments
33 separately for each basic block and saves them to an array of micro
35 The micro operations of one instruction are ordered so that
36 pre-modifying stack adjustment < use < use with no var < call insn <
37 < set < clobber < post-modifying stack adjustment
39 Then, a forward dataflow analysis is performed to find out how locations
40 of variables change through code and to propagate the variable locations
41 along control flow graph.
42 The IN set for basic block BB is computed as a union of OUT sets of BB's
43 predecessors, the OUT set for BB is copied from the IN set for BB and
44 is changed according to micro operations in BB.
46 The IN and OUT sets for basic blocks consist of a current stack adjustment
47 (used for adjusting offset of variables addressed using stack pointer),
48 the table of structures describing the locations of parts of a variable
49 and for each physical register a linked list for each physical register.
50 The linked list is a list of variable parts stored in the register,
51 i.e. it is a list of triplets (reg, decl, offset) where decl is
52 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
53 effective deleting appropriate variable parts when we set or clobber the
56 There may be more than one variable part in a register. The linked lists
57 should be pretty short so it is a good data structure here.
58 For example in the following code, register allocator may assign same
59 register to variables A and B, and both of them are stored in the same
72 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73 are emitted to appropriate positions in RTL code. Each such a note describes
74 the location of one variable at the point in instruction stream where the
75 note is. There is no need to emit a note for each variable before each
76 instruction, we only emit these notes where the location of variable changes
77 (this means that we also emit notes for changes between the OUT set of the
78 previous block and the IN set of the current block).
80 The notes consist of two parts:
81 1. the declaration (from REG_EXPR or MEM_EXPR)
82 2. the location of a variable - it is either a simple register/memory
83 reference (for simple variables, for example int),
84 or a parallel of register/memory references (for a large variables
85 which consist of several parts, for example long long).
91 #include "coretypes.h"
95 #include "hard-reg-set.h"
96 #include "basic-block.h"
99 #include "insn-config.h"
102 #include "alloc-pool.h"
108 #include "tree-pass.h"
112 /* Type of micro operation. */
113 enum micro_operation_type
115 MO_USE, /* Use location (REG or MEM). */
116 MO_USE_NO_VAR,/* Use location which is not associated with a variable
117 or the variable is not trackable. */
118 MO_VAL_USE, /* Use location which is associated with a value. */
119 MO_VAL_LOC, /* Use location which appears in a debug insn. */
120 MO_VAL_SET, /* Set location associated with a value. */
121 MO_SET, /* Set location. */
122 MO_COPY, /* Copy the same portion of a variable from one
123 location to another. */
124 MO_CLOBBER, /* Clobber location. */
125 MO_CALL, /* Call insn. */
126 MO_ADJUST /* Adjust stack pointer. */
130 static const char * const ATTRIBUTE_UNUSED
131 micro_operation_type_name[] = {
144 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
145 Notes emitted as AFTER_CALL are to take effect during the call,
146 rather than after the call. */
149 EMIT_NOTE_BEFORE_INSN,
150 EMIT_NOTE_AFTER_INSN,
151 EMIT_NOTE_AFTER_CALL_INSN
154 /* Structure holding information about micro operation. */
155 typedef struct micro_operation_def
157 /* Type of micro operation. */
158 enum micro_operation_type type;
161 /* Location. For MO_SET and MO_COPY, this is the SET that
162 performs the assignment, if known, otherwise it is the target
163 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
164 CONCAT of the VALUE and the LOC associated with it. For
165 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
166 associated with it. */
169 /* Stack adjustment. */
170 HOST_WIDE_INT adjust;
173 /* The instruction which the micro operation is in, for MO_USE,
174 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
175 instruction or note in the original flow (before any var-tracking
176 notes are inserted, to simplify emission of notes), for MO_SET
181 /* A declaration of a variable, or an RTL value being handled like a
183 typedef void *decl_or_value;
185 /* Structure for passing some other parameters to function
186 emit_note_insn_var_location. */
187 typedef struct emit_note_data_def
189 /* The instruction which the note will be emitted before/after. */
192 /* Where the note will be emitted (before/after insn)? */
193 enum emit_note_where where;
195 /* The variables and values active at this point. */
199 /* Description of location of a part of a variable. The content of a physical
200 register is described by a chain of these structures.
201 The chains are pretty short (usually 1 or 2 elements) and thus
202 chain is the best data structure. */
203 typedef struct attrs_def
205 /* Pointer to next member of the list. */
206 struct attrs_def *next;
208 /* The rtx of register. */
211 /* The declaration corresponding to LOC. */
214 /* Offset from start of DECL. */
215 HOST_WIDE_INT offset;
218 /* Structure holding a refcounted hash table. If refcount > 1,
219 it must be first unshared before modified. */
220 typedef struct shared_hash_def
222 /* Reference count. */
225 /* Actual hash table. */
229 /* Structure holding the IN or OUT set for a basic block. */
230 typedef struct dataflow_set_def
232 /* Adjustment of stack offset. */
233 HOST_WIDE_INT stack_adjust;
235 /* Attributes for registers (lists of attrs). */
236 attrs regs[FIRST_PSEUDO_REGISTER];
238 /* Variable locations. */
241 /* Vars that is being traversed. */
242 shared_hash traversed_vars;
245 /* The structure (one for each basic block) containing the information
246 needed for variable tracking. */
247 typedef struct variable_tracking_info_def
249 /* Number of micro operations stored in the MOS array. */
252 /* The array of micro operations. */
253 micro_operation *mos;
255 /* The IN and OUT set for dataflow analysis. */
259 /* The permanent-in dataflow set for this block. This is used to
260 hold values for which we had to compute entry values. ??? This
261 should probably be dynamically allocated, to avoid using more
262 memory in non-debug builds. */
265 /* Has the block been visited in DFS? */
268 /* Has the block been flooded in VTA? */
271 } *variable_tracking_info;
273 /* Structure for chaining the locations. */
274 typedef struct location_chain_def
276 /* Next element in the chain. */
277 struct location_chain_def *next;
279 /* The location (REG, MEM or VALUE). */
282 /* The "value" stored in this location. */
286 enum var_init_status init;
289 /* Structure describing one part of variable. */
290 typedef struct variable_part_def
292 /* Chain of locations of the part. */
293 location_chain loc_chain;
295 /* Location which was last emitted to location list. */
298 /* The offset in the variable. */
299 HOST_WIDE_INT offset;
302 /* Maximum number of location parts. */
303 #define MAX_VAR_PARTS 16
305 /* Structure describing where the variable is located. */
306 typedef struct variable_def
308 /* The declaration of the variable, or an RTL value being handled
309 like a declaration. */
312 /* Reference count. */
315 /* Number of variable parts. */
318 /* The variable parts. */
319 variable_part var_part[1];
321 typedef const struct variable_def *const_variable;
323 /* Structure for chaining backlinks from referenced VALUEs to
324 DVs that are referencing them. */
325 typedef struct value_chain_def
327 /* Next value_chain entry. */
328 struct value_chain_def *next;
330 /* The declaration of the variable, or an RTL value
331 being handled like a declaration, whose var_parts[0].loc_chain
332 references the VALUE owning this value_chain. */
335 /* Reference count. */
338 typedef const struct value_chain_def *const_value_chain;
340 /* Hash function for DECL for VARIABLE_HTAB. */
341 #define VARIABLE_HASH_VAL(decl) (DECL_UID (decl))
343 /* Pointer to the BB's information specific to variable tracking pass. */
344 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
346 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
347 #define INT_MEM_OFFSET(mem) (MEM_OFFSET (mem) ? INTVAL (MEM_OFFSET (mem)) : 0)
349 /* Alloc pool for struct attrs_def. */
350 static alloc_pool attrs_pool;
352 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
353 static alloc_pool var_pool;
355 /* Alloc pool for struct variable_def with a single var_part entry. */
356 static alloc_pool valvar_pool;
358 /* Alloc pool for struct location_chain_def. */
359 static alloc_pool loc_chain_pool;
361 /* Alloc pool for struct shared_hash_def. */
362 static alloc_pool shared_hash_pool;
364 /* Alloc pool for struct value_chain_def. */
365 static alloc_pool value_chain_pool;
367 /* Changed variables, notes will be emitted for them. */
368 static htab_t changed_variables;
370 /* Links from VALUEs to DVs referencing them in their current loc_chains. */
371 static htab_t value_chains;
373 /* Shall notes be emitted? */
374 static bool emit_notes;
376 /* Empty shared hashtable. */
377 static shared_hash empty_shared_hash;
379 /* Scratch register bitmap used by cselib_expand_value_rtx. */
380 static bitmap scratch_regs = NULL;
382 /* Variable used to tell whether cselib_process_insn called our hook. */
383 static bool cselib_hook_called;
385 /* Local function prototypes. */
386 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
388 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
390 static void bb_stack_adjust_offset (basic_block);
391 static bool vt_stack_adjustments (void);
392 static rtx adjust_stack_reference (rtx, HOST_WIDE_INT);
393 static hashval_t variable_htab_hash (const void *);
394 static int variable_htab_eq (const void *, const void *);
395 static void variable_htab_free (void *);
397 static void init_attrs_list_set (attrs *);
398 static void attrs_list_clear (attrs *);
399 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
400 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
401 static void attrs_list_copy (attrs *, attrs);
402 static void attrs_list_union (attrs *, attrs);
404 static void **unshare_variable (dataflow_set *set, void **slot, variable var,
405 enum var_init_status);
406 static int vars_copy_1 (void **, void *);
407 static void vars_copy (htab_t, htab_t);
408 static tree var_debug_decl (tree);
409 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
410 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
411 enum var_init_status, rtx);
412 static void var_reg_delete (dataflow_set *, rtx, bool);
413 static void var_regno_delete (dataflow_set *, int);
414 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
415 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
416 enum var_init_status, rtx);
417 static void var_mem_delete (dataflow_set *, rtx, bool);
419 static void dataflow_set_init (dataflow_set *);
420 static void dataflow_set_clear (dataflow_set *);
421 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
422 static int variable_union_info_cmp_pos (const void *, const void *);
423 static int variable_union (void **, void *);
424 static int variable_canonicalize (void **, void *);
425 static void dataflow_set_union (dataflow_set *, dataflow_set *);
426 static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
427 static bool canon_value_cmp (rtx, rtx);
428 static int loc_cmp (rtx, rtx);
429 static bool variable_part_different_p (variable_part *, variable_part *);
430 static bool onepart_variable_different_p (variable, variable);
431 static bool variable_different_p (variable, variable, bool);
432 static int dataflow_set_different_1 (void **, void *);
433 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
434 static void dataflow_set_destroy (dataflow_set *);
436 static bool contains_symbol_ref (rtx);
437 static bool track_expr_p (tree, bool);
438 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
439 static int count_uses (rtx *, void *);
440 static void count_uses_1 (rtx *, void *);
441 static void count_stores (rtx, const_rtx, void *);
442 static int add_uses (rtx *, void *);
443 static void add_uses_1 (rtx *, void *);
444 static void add_stores (rtx, const_rtx, void *);
445 static bool compute_bb_dataflow (basic_block);
446 static void vt_find_locations (void);
448 static void dump_attrs_list (attrs);
449 static int dump_variable_slot (void **, void *);
450 static void dump_variable (variable);
451 static void dump_vars (htab_t);
452 static void dump_dataflow_set (dataflow_set *);
453 static void dump_dataflow_sets (void);
455 static void variable_was_changed (variable, dataflow_set *);
456 static void **set_slot_part (dataflow_set *, rtx, void **,
457 decl_or_value, HOST_WIDE_INT,
458 enum var_init_status, rtx);
459 static void set_variable_part (dataflow_set *, rtx,
460 decl_or_value, HOST_WIDE_INT,
461 enum var_init_status, rtx, enum insert_option);
462 static void **clobber_slot_part (dataflow_set *, rtx,
463 void **, HOST_WIDE_INT, rtx);
464 static void clobber_variable_part (dataflow_set *, rtx,
465 decl_or_value, HOST_WIDE_INT, rtx);
466 static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
467 static void delete_variable_part (dataflow_set *, rtx,
468 decl_or_value, HOST_WIDE_INT);
469 static int emit_note_insn_var_location (void **, void *);
470 static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash);
471 static int emit_notes_for_differences_1 (void **, void *);
472 static int emit_notes_for_differences_2 (void **, void *);
473 static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
474 static void emit_notes_in_bb (basic_block, dataflow_set *);
475 static void vt_emit_notes (void);
477 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
478 static void vt_add_function_parameters (void);
479 static void vt_initialize (void);
480 static void vt_finalize (void);
482 /* Given a SET, calculate the amount of stack adjustment it contains
483 PRE- and POST-modifying stack pointer.
484 This function is similar to stack_adjust_offset. */
487 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
490 rtx src = SET_SRC (pattern);
491 rtx dest = SET_DEST (pattern);
494 if (dest == stack_pointer_rtx)
496 /* (set (reg sp) (plus (reg sp) (const_int))) */
497 code = GET_CODE (src);
498 if (! (code == PLUS || code == MINUS)
499 || XEXP (src, 0) != stack_pointer_rtx
500 || !CONST_INT_P (XEXP (src, 1)))
504 *post += INTVAL (XEXP (src, 1));
506 *post -= INTVAL (XEXP (src, 1));
508 else if (MEM_P (dest))
510 /* (set (mem (pre_dec (reg sp))) (foo)) */
511 src = XEXP (dest, 0);
512 code = GET_CODE (src);
518 if (XEXP (src, 0) == stack_pointer_rtx)
520 rtx val = XEXP (XEXP (src, 1), 1);
521 /* We handle only adjustments by constant amount. */
522 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
525 if (code == PRE_MODIFY)
526 *pre -= INTVAL (val);
528 *post -= INTVAL (val);
534 if (XEXP (src, 0) == stack_pointer_rtx)
536 *pre += GET_MODE_SIZE (GET_MODE (dest));
542 if (XEXP (src, 0) == stack_pointer_rtx)
544 *post += GET_MODE_SIZE (GET_MODE (dest));
550 if (XEXP (src, 0) == stack_pointer_rtx)
552 *pre -= GET_MODE_SIZE (GET_MODE (dest));
558 if (XEXP (src, 0) == stack_pointer_rtx)
560 *post -= GET_MODE_SIZE (GET_MODE (dest));
571 /* Given an INSN, calculate the amount of stack adjustment it contains
572 PRE- and POST-modifying stack pointer. */
575 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
583 pattern = PATTERN (insn);
584 if (RTX_FRAME_RELATED_P (insn))
586 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
588 pattern = XEXP (expr, 0);
591 if (GET_CODE (pattern) == SET)
592 stack_adjust_offset_pre_post (pattern, pre, post);
593 else if (GET_CODE (pattern) == PARALLEL
594 || GET_CODE (pattern) == SEQUENCE)
598 /* There may be stack adjustments inside compound insns. Search
600 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
601 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
602 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
606 /* Compute stack adjustment in basic block BB. */
609 bb_stack_adjust_offset (basic_block bb)
611 HOST_WIDE_INT offset;
614 offset = VTI (bb)->in.stack_adjust;
615 for (i = 0; i < VTI (bb)->n_mos; i++)
617 if (VTI (bb)->mos[i].type == MO_ADJUST)
618 offset += VTI (bb)->mos[i].u.adjust;
619 else if (VTI (bb)->mos[i].type != MO_CALL)
621 if (MEM_P (VTI (bb)->mos[i].u.loc))
623 VTI (bb)->mos[i].u.loc
624 = adjust_stack_reference (VTI (bb)->mos[i].u.loc, -offset);
628 VTI (bb)->out.stack_adjust = offset;
631 /* Compute stack adjustments for all blocks by traversing DFS tree.
632 Return true when the adjustments on all incoming edges are consistent.
633 Heavily borrowed from pre_and_rev_post_order_compute. */
636 vt_stack_adjustments (void)
638 edge_iterator *stack;
641 /* Initialize entry block. */
642 VTI (ENTRY_BLOCK_PTR)->visited = true;
643 VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
645 /* Allocate stack for back-tracking up CFG. */
646 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
649 /* Push the first edge on to the stack. */
650 stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
658 /* Look at the edge on the top of the stack. */
660 src = ei_edge (ei)->src;
661 dest = ei_edge (ei)->dest;
663 /* Check if the edge destination has been visited yet. */
664 if (!VTI (dest)->visited)
666 VTI (dest)->visited = true;
667 VTI (dest)->in.stack_adjust = VTI (src)->out.stack_adjust;
668 bb_stack_adjust_offset (dest);
670 if (EDGE_COUNT (dest->succs) > 0)
671 /* Since the DEST node has been visited for the first
672 time, check its successors. */
673 stack[sp++] = ei_start (dest->succs);
677 /* Check whether the adjustments on the edges are the same. */
678 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
684 if (! ei_one_before_end_p (ei))
685 /* Go to the next edge. */
686 ei_next (&stack[sp - 1]);
688 /* Return to previous level if there are no more edges. */
697 /* Adjust stack reference MEM by ADJUSTMENT bytes and make it relative
698 to the argument pointer. Return the new rtx. */
701 adjust_stack_reference (rtx mem, HOST_WIDE_INT adjustment)
705 #ifdef FRAME_POINTER_CFA_OFFSET
706 adjustment -= FRAME_POINTER_CFA_OFFSET (current_function_decl);
707 cfa = plus_constant (frame_pointer_rtx, adjustment);
709 adjustment -= ARG_POINTER_CFA_OFFSET (current_function_decl);
710 cfa = plus_constant (arg_pointer_rtx, adjustment);
713 addr = replace_rtx (copy_rtx (XEXP (mem, 0)), stack_pointer_rtx, cfa);
714 tmp = simplify_rtx (addr);
718 return replace_equiv_address_nv (mem, addr);
721 /* Return true if a decl_or_value DV is a DECL or NULL. */
723 dv_is_decl_p (decl_or_value dv)
728 /* Make sure relevant codes don't overlap. */
729 switch ((int)TREE_CODE ((tree)dv))
733 case (int)RESULT_DECL:
734 case (int)FUNCTION_DECL:
735 case (int)DEBUG_EXPR_DECL:
736 case (int)COMPONENT_REF:
747 /* Return true if a decl_or_value is a VALUE rtl. */
749 dv_is_value_p (decl_or_value dv)
751 return dv && !dv_is_decl_p (dv);
754 /* Return the decl in the decl_or_value. */
756 dv_as_decl (decl_or_value dv)
758 gcc_assert (dv_is_decl_p (dv));
762 /* Return the value in the decl_or_value. */
764 dv_as_value (decl_or_value dv)
766 gcc_assert (dv_is_value_p (dv));
770 /* Return the opaque pointer in the decl_or_value. */
772 dv_as_opaque (decl_or_value dv)
777 /* Return true if a decl_or_value must not have more than one variable
780 dv_onepart_p (decl_or_value dv)
784 if (!MAY_HAVE_DEBUG_INSNS)
787 if (dv_is_value_p (dv))
790 decl = dv_as_decl (dv);
795 return (target_for_debug_bind (decl) != NULL_TREE);
798 /* Return the variable pool to be used for dv, depending on whether it
799 can have multiple parts or not. */
800 static inline alloc_pool
801 dv_pool (decl_or_value dv)
803 return dv_onepart_p (dv) ? valvar_pool : var_pool;
806 /* Build a decl_or_value out of a decl. */
807 static inline decl_or_value
808 dv_from_decl (tree decl)
812 gcc_assert (dv_is_decl_p (dv));
816 /* Build a decl_or_value out of a value. */
817 static inline decl_or_value
818 dv_from_value (rtx value)
822 gcc_assert (dv_is_value_p (dv));
826 static inline hashval_t
827 dv_htab_hash (decl_or_value dv)
829 if (dv_is_value_p (dv))
830 return -(hashval_t)(CSELIB_VAL_PTR (dv_as_value (dv))->value);
832 return (VARIABLE_HASH_VAL (dv_as_decl (dv)));
835 /* The hash function for variable_htab, computes the hash value
836 from the declaration of variable X. */
839 variable_htab_hash (const void *x)
841 const_variable const v = (const_variable) x;
843 return dv_htab_hash (v->dv);
846 /* Compare the declaration of variable X with declaration Y. */
849 variable_htab_eq (const void *x, const void *y)
851 const_variable const v = (const_variable) x;
852 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
854 if (dv_as_opaque (v->dv) == dv_as_opaque (dv))
861 visv = dv_is_value_p (v->dv);
862 dvisv = dv_is_value_p (dv);
868 gcc_assert (CSELIB_VAL_PTR (dv_as_value (v->dv))
869 != CSELIB_VAL_PTR (dv_as_value (dv)));
871 gcc_assert (VARIABLE_HASH_VAL (dv_as_decl (v->dv))
872 != VARIABLE_HASH_VAL (dv_as_decl (dv)));
879 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
882 variable_htab_free (void *elem)
885 variable var = (variable) elem;
886 location_chain node, next;
888 gcc_assert (var->refcount > 0);
891 if (var->refcount > 0)
894 for (i = 0; i < var->n_var_parts; i++)
896 for (node = var->var_part[i].loc_chain; node; node = next)
899 pool_free (loc_chain_pool, node);
901 var->var_part[i].loc_chain = NULL;
903 pool_free (dv_pool (var->dv), var);
906 /* The hash function for value_chains htab, computes the hash value
910 value_chain_htab_hash (const void *x)
912 const_value_chain const v = (const_value_chain) x;
914 return dv_htab_hash (v->dv);
917 /* Compare the VALUE X with VALUE Y. */
920 value_chain_htab_eq (const void *x, const void *y)
922 const_value_chain const v = (const_value_chain) x;
923 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
925 return dv_as_opaque (v->dv) == dv_as_opaque (dv);
928 /* Initialize the set (array) SET of attrs to empty lists. */
931 init_attrs_list_set (attrs *set)
935 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
939 /* Make the list *LISTP empty. */
942 attrs_list_clear (attrs *listp)
946 for (list = *listp; list; list = next)
949 pool_free (attrs_pool, list);
954 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
957 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
959 for (; list; list = list->next)
960 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
965 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
968 attrs_list_insert (attrs *listp, decl_or_value dv,
969 HOST_WIDE_INT offset, rtx loc)
973 list = (attrs) pool_alloc (attrs_pool);
976 list->offset = offset;
981 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
984 attrs_list_copy (attrs *dstp, attrs src)
988 attrs_list_clear (dstp);
989 for (; src; src = src->next)
991 n = (attrs) pool_alloc (attrs_pool);
994 n->offset = src->offset;
1000 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1003 attrs_list_union (attrs *dstp, attrs src)
1005 for (; src; src = src->next)
1007 if (!attrs_list_member (*dstp, src->dv, src->offset))
1008 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1012 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1016 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1018 gcc_assert (!*dstp);
1019 for (; src; src = src->next)
1021 if (!dv_onepart_p (src->dv))
1022 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1024 for (src = src2; src; src = src->next)
1026 if (!dv_onepart_p (src->dv)
1027 && !attrs_list_member (*dstp, src->dv, src->offset))
1028 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1032 /* Shared hashtable support. */
1034 /* Return true if VARS is shared. */
1037 shared_hash_shared (shared_hash vars)
1039 return vars->refcount > 1;
1042 /* Return the hash table for VARS. */
1044 static inline htab_t
1045 shared_hash_htab (shared_hash vars)
1050 /* Copy variables into a new hash table. */
1053 shared_hash_unshare (shared_hash vars)
1055 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1056 gcc_assert (vars->refcount > 1);
1057 new_vars->refcount = 1;
1059 = htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
1060 variable_htab_eq, variable_htab_free);
1061 vars_copy (new_vars->htab, vars->htab);
1066 /* Increment reference counter on VARS and return it. */
1068 static inline shared_hash
1069 shared_hash_copy (shared_hash vars)
1075 /* Decrement reference counter and destroy hash table if not shared
1079 shared_hash_destroy (shared_hash vars)
1081 gcc_assert (vars->refcount > 0);
1082 if (--vars->refcount == 0)
1084 htab_delete (vars->htab);
1085 pool_free (shared_hash_pool, vars);
1089 /* Unshare *PVARS if shared and return slot for DV. If INS is
1090 INSERT, insert it if not already present. */
1092 static inline void **
1093 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1094 hashval_t dvhash, enum insert_option ins)
1096 if (shared_hash_shared (*pvars))
1097 *pvars = shared_hash_unshare (*pvars);
1098 return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins);
1101 static inline void **
1102 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1103 enum insert_option ins)
1105 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1108 /* Return slot for DV, if it is already present in the hash table.
1109 If it is not present, insert it only VARS is not shared, otherwise
1112 static inline void **
1113 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1115 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1116 shared_hash_shared (vars)
1117 ? NO_INSERT : INSERT);
1120 static inline void **
1121 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1123 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1126 /* Return slot for DV only if it is already present in the hash table. */
1128 static inline void **
1129 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1132 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1136 static inline void **
1137 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1139 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1142 /* Return variable for DV or NULL if not already present in the hash
1145 static inline variable
1146 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1148 return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash);
1151 static inline variable
1152 shared_hash_find (shared_hash vars, decl_or_value dv)
1154 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1157 /* Determine a total order between two distinct pointers. Compare the
1158 pointers as integral types if size_t is wide enough, otherwise
1159 resort to bitwise memory compare. The actual order does not
1160 matter, we just need to be consistent, so endianness is
1164 tie_break_pointers (const void *p1, const void *p2)
1166 gcc_assert (p1 != p2);
1168 if (sizeof (size_t) >= sizeof (void*))
1169 return (size_t)p1 < (size_t)p2 ? -1 : 1;
1171 return memcmp (&p1, &p2, sizeof (p1));
1174 /* Return true if TVAL is better than CVAL as a canonival value. We
1175 choose lowest-numbered VALUEs, using the RTX address as a
1176 tie-breaker. The idea is to arrange them into a star topology,
1177 such that all of them are at most one step away from the canonical
1178 value, and the canonical value has backlinks to all of them, in
1179 addition to all the actual locations. We don't enforce this
1180 topology throughout the entire dataflow analysis, though.
1184 canon_value_cmp (rtx tval, rtx cval)
1187 || CSELIB_VAL_PTR (tval)->value < CSELIB_VAL_PTR (cval)->value
1188 || (CSELIB_VAL_PTR (tval)->value == CSELIB_VAL_PTR (cval)->value
1189 && tie_break_pointers (tval, cval) < 0);
1192 static bool dst_can_be_shared;
1194 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1197 unshare_variable (dataflow_set *set, void **slot, variable var,
1198 enum var_init_status initialized)
1203 new_var = (variable) pool_alloc (dv_pool (var->dv));
1204 new_var->dv = var->dv;
1205 new_var->refcount = 1;
1207 new_var->n_var_parts = var->n_var_parts;
1209 if (! flag_var_tracking_uninit)
1210 initialized = VAR_INIT_STATUS_INITIALIZED;
1212 for (i = 0; i < var->n_var_parts; i++)
1214 location_chain node;
1215 location_chain *nextp;
1217 new_var->var_part[i].offset = var->var_part[i].offset;
1218 nextp = &new_var->var_part[i].loc_chain;
1219 for (node = var->var_part[i].loc_chain; node; node = node->next)
1221 location_chain new_lc;
1223 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1224 new_lc->next = NULL;
1225 if (node->init > initialized)
1226 new_lc->init = node->init;
1228 new_lc->init = initialized;
1229 if (node->set_src && !(MEM_P (node->set_src)))
1230 new_lc->set_src = node->set_src;
1232 new_lc->set_src = NULL;
1233 new_lc->loc = node->loc;
1236 nextp = &new_lc->next;
1239 /* We are at the basic block boundary when copying variable description
1240 so set the CUR_LOC to be the first element of the chain. */
1241 if (new_var->var_part[i].loc_chain)
1242 new_var->var_part[i].cur_loc = new_var->var_part[i].loc_chain->loc;
1244 new_var->var_part[i].cur_loc = NULL;
1247 dst_can_be_shared = false;
1248 if (shared_hash_shared (set->vars))
1249 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1250 else if (set->traversed_vars && set->vars != set->traversed_vars)
1251 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1256 /* Add a variable from *SLOT to hash table DATA and increase its reference
1260 vars_copy_1 (void **slot, void *data)
1262 htab_t dst = (htab_t) data;
1266 src = (variable) *slot;
1269 dstp = htab_find_slot_with_hash (dst, src->dv,
1270 dv_htab_hash (src->dv),
1274 /* Continue traversing the hash table. */
1278 /* Copy all variables from hash table SRC to hash table DST. */
1281 vars_copy (htab_t dst, htab_t src)
1283 htab_traverse_noresize (src, vars_copy_1, dst);
1286 /* Map a decl to its main debug decl. */
1289 var_debug_decl (tree decl)
1291 if (decl && DECL_P (decl)
1292 && DECL_DEBUG_EXPR_IS_FROM (decl) && DECL_DEBUG_EXPR (decl)
1293 && DECL_P (DECL_DEBUG_EXPR (decl)))
1294 decl = DECL_DEBUG_EXPR (decl);
1299 /* Set the register LOC to contain DV, OFFSET. */
1302 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1303 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1304 enum insert_option iopt)
1307 bool decl_p = dv_is_decl_p (dv);
1310 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1312 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1313 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1314 && node->offset == offset)
1317 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1318 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1321 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1324 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1327 tree decl = REG_EXPR (loc);
1328 HOST_WIDE_INT offset = REG_OFFSET (loc);
1330 var_reg_decl_set (set, loc, initialized,
1331 dv_from_decl (decl), offset, set_src, INSERT);
1334 static enum var_init_status
1335 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1339 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1341 if (! flag_var_tracking_uninit)
1342 return VAR_INIT_STATUS_INITIALIZED;
1344 var = shared_hash_find (set->vars, dv);
1347 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1349 location_chain nextp;
1350 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1351 if (rtx_equal_p (nextp->loc, loc))
1353 ret_val = nextp->init;
1362 /* Delete current content of register LOC in dataflow set SET and set
1363 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1364 MODIFY is true, any other live copies of the same variable part are
1365 also deleted from the dataflow set, otherwise the variable part is
1366 assumed to be copied from another location holding the same
1370 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1371 enum var_init_status initialized, rtx set_src)
1373 tree decl = REG_EXPR (loc);
1374 HOST_WIDE_INT offset = REG_OFFSET (loc);
1378 decl = var_debug_decl (decl);
1380 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1381 initialized = get_init_value (set, loc, dv_from_decl (decl));
1383 nextp = &set->regs[REGNO (loc)];
1384 for (node = *nextp; node; node = next)
1387 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1389 delete_variable_part (set, node->loc, node->dv, node->offset);
1390 pool_free (attrs_pool, node);
1396 nextp = &node->next;
1400 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1401 var_reg_set (set, loc, initialized, set_src);
1404 /* Delete the association of register LOC in dataflow set SET with any
1405 variables that aren't onepart. If CLOBBER is true, also delete any
1406 other live copies of the same variable part, and delete the
1407 association with onepart dvs too. */
1410 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1412 attrs *nextp = &set->regs[REGNO (loc)];
1417 tree decl = REG_EXPR (loc);
1418 HOST_WIDE_INT offset = REG_OFFSET (loc);
1420 decl = var_debug_decl (decl);
1422 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1425 for (node = *nextp; node; node = next)
1428 if (clobber || !dv_onepart_p (node->dv))
1430 delete_variable_part (set, node->loc, node->dv, node->offset);
1431 pool_free (attrs_pool, node);
1435 nextp = &node->next;
1439 /* Delete content of register with number REGNO in dataflow set SET. */
1442 var_regno_delete (dataflow_set *set, int regno)
1444 attrs *reg = &set->regs[regno];
1447 for (node = *reg; node; node = next)
1450 delete_variable_part (set, node->loc, node->dv, node->offset);
1451 pool_free (attrs_pool, node);
1456 /* Set the location of DV, OFFSET as the MEM LOC. */
1459 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1460 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1461 enum insert_option iopt)
1463 if (dv_is_decl_p (dv))
1464 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1466 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1469 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
1471 Adjust the address first if it is stack pointer based. */
1474 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1477 tree decl = MEM_EXPR (loc);
1478 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1480 var_mem_decl_set (set, loc, initialized,
1481 dv_from_decl (decl), offset, set_src, INSERT);
1484 /* Delete and set the location part of variable MEM_EXPR (LOC) in
1485 dataflow set SET to LOC. If MODIFY is true, any other live copies
1486 of the same variable part are also deleted from the dataflow set,
1487 otherwise the variable part is assumed to be copied from another
1488 location holding the same part.
1489 Adjust the address first if it is stack pointer based. */
1492 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1493 enum var_init_status initialized, rtx set_src)
1495 tree decl = MEM_EXPR (loc);
1496 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1498 decl = var_debug_decl (decl);
1500 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1501 initialized = get_init_value (set, loc, dv_from_decl (decl));
1504 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
1505 var_mem_set (set, loc, initialized, set_src);
1508 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
1509 true, also delete any other live copies of the same variable part.
1510 Adjust the address first if it is stack pointer based. */
1513 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
1515 tree decl = MEM_EXPR (loc);
1516 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1518 decl = var_debug_decl (decl);
1520 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1521 delete_variable_part (set, loc, dv_from_decl (decl), offset);
1524 /* Map a value to a location it was just stored in. */
1527 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn)
1529 cselib_val *v = CSELIB_VAL_PTR (val);
1531 gcc_assert (cselib_preserved_value_p (v));
1535 fprintf (dump_file, "%i: ", INSN_UID (insn));
1536 print_inline_rtx (dump_file, val, 0);
1537 fprintf (dump_file, " stored in ");
1538 print_inline_rtx (dump_file, loc, 0);
1541 struct elt_loc_list *l;
1542 for (l = v->locs; l; l = l->next)
1544 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
1545 print_inline_rtx (dump_file, l->loc, 0);
1548 fprintf (dump_file, "\n");
1553 var_regno_delete (set, REGNO (loc));
1554 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1555 dv_from_value (val), 0, NULL_RTX, INSERT);
1557 else if (MEM_P (loc))
1558 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1559 dv_from_value (val), 0, NULL_RTX, INSERT);
1561 set_variable_part (set, loc, dv_from_value (val), 0,
1562 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1565 /* Reset this node, detaching all its equivalences. Return the slot
1566 in the variable hash table that holds dv, if there is one. */
1569 val_reset (dataflow_set *set, decl_or_value dv)
1571 variable var = shared_hash_find (set->vars, dv) ;
1572 location_chain node;
1575 if (!var || !var->n_var_parts)
1578 gcc_assert (var->n_var_parts == 1);
1581 for (node = var->var_part[0].loc_chain; node; node = node->next)
1582 if (GET_CODE (node->loc) == VALUE
1583 && canon_value_cmp (node->loc, cval))
1586 for (node = var->var_part[0].loc_chain; node; node = node->next)
1587 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
1589 /* Redirect the equivalence link to the new canonical
1590 value, or simply remove it if it would point at
1593 set_variable_part (set, cval, dv_from_value (node->loc),
1594 0, node->init, node->set_src, NO_INSERT);
1595 delete_variable_part (set, dv_as_value (dv),
1596 dv_from_value (node->loc), 0);
1601 decl_or_value cdv = dv_from_value (cval);
1603 /* Keep the remaining values connected, accummulating links
1604 in the canonical value. */
1605 for (node = var->var_part[0].loc_chain; node; node = node->next)
1607 if (node->loc == cval)
1609 else if (GET_CODE (node->loc) == REG)
1610 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
1611 node->set_src, NO_INSERT);
1612 else if (GET_CODE (node->loc) == MEM)
1613 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
1614 node->set_src, NO_INSERT);
1616 set_variable_part (set, node->loc, cdv, 0,
1617 node->init, node->set_src, NO_INSERT);
1621 /* We remove this last, to make sure that the canonical value is not
1622 removed to the point of requiring reinsertion. */
1624 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
1626 clobber_variable_part (set, NULL, dv, 0, NULL);
1628 /* ??? Should we make sure there aren't other available values or
1629 variables whose values involve this one other than by
1630 equivalence? E.g., at the very least we should reset MEMs, those
1631 shouldn't be too hard to find cselib-looking up the value as an
1632 address, then locating the resulting value in our own hash
1636 /* Find the values in a given location and map the val to another
1637 value, if it is unique, or add the location as one holding the
1641 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
1643 decl_or_value dv = dv_from_value (val);
1645 if (dump_file && (dump_flags & TDF_DETAILS))
1648 fprintf (dump_file, "%i: ", INSN_UID (insn));
1650 fprintf (dump_file, "head: ");
1651 print_inline_rtx (dump_file, val, 0);
1652 fputs (" is at ", dump_file);
1653 print_inline_rtx (dump_file, loc, 0);
1654 fputc ('\n', dump_file);
1657 val_reset (set, dv);
1661 attrs node, found = NULL;
1663 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1664 if (dv_is_value_p (node->dv)
1665 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
1669 /* Map incoming equivalences. ??? Wouldn't it be nice if
1670 we just started sharing the location lists? Maybe a
1671 circular list ending at the value itself or some
1673 set_variable_part (set, dv_as_value (node->dv),
1674 dv_from_value (val), node->offset,
1675 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1676 set_variable_part (set, val, node->dv, node->offset,
1677 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1680 /* If we didn't find any equivalence, we need to remember that
1681 this value is held in the named register. */
1683 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1684 dv_from_value (val), 0, NULL_RTX, INSERT);
1686 else if (MEM_P (loc))
1687 /* ??? Merge equivalent MEMs. */
1688 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1689 dv_from_value (val), 0, NULL_RTX, INSERT);
1691 /* ??? Merge equivalent expressions. */
1692 set_variable_part (set, loc, dv_from_value (val), 0,
1693 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1696 /* Initialize dataflow set SET to be empty.
1697 VARS_SIZE is the initial size of hash table VARS. */
1700 dataflow_set_init (dataflow_set *set)
1702 init_attrs_list_set (set->regs);
1703 set->vars = shared_hash_copy (empty_shared_hash);
1704 set->stack_adjust = 0;
1705 set->traversed_vars = NULL;
1708 /* Delete the contents of dataflow set SET. */
1711 dataflow_set_clear (dataflow_set *set)
1715 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1716 attrs_list_clear (&set->regs[i]);
1718 shared_hash_destroy (set->vars);
1719 set->vars = shared_hash_copy (empty_shared_hash);
1722 /* Copy the contents of dataflow set SRC to DST. */
1725 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
1729 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1730 attrs_list_copy (&dst->regs[i], src->regs[i]);
1732 shared_hash_destroy (dst->vars);
1733 dst->vars = shared_hash_copy (src->vars);
1734 dst->stack_adjust = src->stack_adjust;
1737 /* Information for merging lists of locations for a given offset of variable.
1739 struct variable_union_info
1741 /* Node of the location chain. */
1744 /* The sum of positions in the input chains. */
1747 /* The position in the chain of DST dataflow set. */
1751 /* Buffer for location list sorting and its allocated size. */
1752 static struct variable_union_info *vui_vec;
1753 static int vui_allocated;
1755 /* Compare function for qsort, order the structures by POS element. */
1758 variable_union_info_cmp_pos (const void *n1, const void *n2)
1760 const struct variable_union_info *const i1 =
1761 (const struct variable_union_info *) n1;
1762 const struct variable_union_info *const i2 =
1763 ( const struct variable_union_info *) n2;
1765 if (i1->pos != i2->pos)
1766 return i1->pos - i2->pos;
1768 return (i1->pos_dst - i2->pos_dst);
1771 /* Compute union of location parts of variable *SLOT and the same variable
1772 from hash table DATA. Compute "sorted" union of the location chains
1773 for common offsets, i.e. the locations of a variable part are sorted by
1774 a priority where the priority is the sum of the positions in the 2 chains
1775 (if a location is only in one list the position in the second list is
1776 defined to be larger than the length of the chains).
1777 When we are updating the location parts the newest location is in the
1778 beginning of the chain, so when we do the described "sorted" union
1779 we keep the newest locations in the beginning. */
1782 variable_union (void **slot, void *data)
1786 dataflow_set *set = (dataflow_set *) data;
1789 src = (variable) *slot;
1790 dstp = shared_hash_find_slot (set->vars, src->dv);
1791 if (!dstp || !*dstp)
1795 dst_can_be_shared = false;
1797 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
1801 /* If CUR_LOC of some variable part is not the first element of
1802 the location chain we are going to change it so we have to make
1803 a copy of the variable. */
1804 for (k = 0; k < src->n_var_parts; k++)
1806 gcc_assert (!src->var_part[k].loc_chain
1807 == !src->var_part[k].cur_loc);
1808 if (src->var_part[k].loc_chain)
1810 gcc_assert (src->var_part[k].cur_loc);
1811 if (src->var_part[k].cur_loc != src->var_part[k].loc_chain->loc)
1815 if (k < src->n_var_parts)
1816 dstp = unshare_variable (set, dstp, src, VAR_INIT_STATUS_UNKNOWN);
1818 /* Continue traversing the hash table. */
1822 dst = (variable) *dstp;
1824 gcc_assert (src->n_var_parts);
1826 /* We can combine one-part variables very efficiently, because their
1827 entries are in canonical order. */
1828 if (dv_onepart_p (src->dv))
1830 location_chain *nodep, dnode, snode;
1832 gcc_assert (src->n_var_parts == 1);
1833 gcc_assert (dst->n_var_parts == 1);
1835 snode = src->var_part[0].loc_chain;
1838 restart_onepart_unshared:
1839 nodep = &dst->var_part[0].loc_chain;
1845 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
1849 location_chain nnode;
1851 if (dst->refcount != 1 || shared_hash_shared (set->vars))
1853 dstp = unshare_variable (set, dstp, dst,
1854 VAR_INIT_STATUS_INITIALIZED);
1855 dst = (variable)*dstp;
1856 goto restart_onepart_unshared;
1859 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
1860 nnode->loc = snode->loc;
1861 nnode->init = snode->init;
1862 if (!snode->set_src || MEM_P (snode->set_src))
1863 nnode->set_src = NULL;
1865 nnode->set_src = snode->set_src;
1866 nnode->next = dnode;
1869 #ifdef ENABLE_CHECKING
1871 gcc_assert (rtx_equal_p (dnode->loc, snode->loc));
1875 snode = snode->next;
1877 nodep = &dnode->next;
1881 dst->var_part[0].cur_loc = dst->var_part[0].loc_chain->loc;
1886 /* Count the number of location parts, result is K. */
1887 for (i = 0, j = 0, k = 0;
1888 i < src->n_var_parts && j < dst->n_var_parts; k++)
1890 if (src->var_part[i].offset == dst->var_part[j].offset)
1895 else if (src->var_part[i].offset < dst->var_part[j].offset)
1900 k += src->n_var_parts - i;
1901 k += dst->n_var_parts - j;
1903 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
1904 thus there are at most MAX_VAR_PARTS different offsets. */
1905 gcc_assert (dv_onepart_p (dst->dv) ? k == 1 : k <= MAX_VAR_PARTS);
1907 if ((dst->refcount > 1 || shared_hash_shared (set->vars))
1908 && dst->n_var_parts != k)
1910 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
1911 dst = (variable)*dstp;
1914 i = src->n_var_parts - 1;
1915 j = dst->n_var_parts - 1;
1916 dst->n_var_parts = k;
1918 for (k--; k >= 0; k--)
1920 location_chain node, node2;
1922 if (i >= 0 && j >= 0
1923 && src->var_part[i].offset == dst->var_part[j].offset)
1925 /* Compute the "sorted" union of the chains, i.e. the locations which
1926 are in both chains go first, they are sorted by the sum of
1927 positions in the chains. */
1930 struct variable_union_info *vui;
1932 /* If DST is shared compare the location chains.
1933 If they are different we will modify the chain in DST with
1934 high probability so make a copy of DST. */
1935 if (dst->refcount > 1 || shared_hash_shared (set->vars))
1937 for (node = src->var_part[i].loc_chain,
1938 node2 = dst->var_part[j].loc_chain; node && node2;
1939 node = node->next, node2 = node2->next)
1941 if (!((REG_P (node2->loc)
1942 && REG_P (node->loc)
1943 && REGNO (node2->loc) == REGNO (node->loc))
1944 || rtx_equal_p (node2->loc, node->loc)))
1946 if (node2->init < node->init)
1947 node2->init = node->init;
1953 dstp = unshare_variable (set, dstp, dst,
1954 VAR_INIT_STATUS_UNKNOWN);
1955 dst = (variable)*dstp;
1960 for (node = src->var_part[i].loc_chain; node; node = node->next)
1963 for (node = dst->var_part[j].loc_chain; node; node = node->next)
1968 /* The most common case, much simpler, no qsort is needed. */
1969 location_chain dstnode = dst->var_part[j].loc_chain;
1970 dst->var_part[k].loc_chain = dstnode;
1971 dst->var_part[k].offset = dst->var_part[j].offset;
1973 for (node = src->var_part[i].loc_chain; node; node = node->next)
1974 if (!((REG_P (dstnode->loc)
1975 && REG_P (node->loc)
1976 && REGNO (dstnode->loc) == REGNO (node->loc))
1977 || rtx_equal_p (dstnode->loc, node->loc)))
1979 location_chain new_node;
1981 /* Copy the location from SRC. */
1982 new_node = (location_chain) pool_alloc (loc_chain_pool);
1983 new_node->loc = node->loc;
1984 new_node->init = node->init;
1985 if (!node->set_src || MEM_P (node->set_src))
1986 new_node->set_src = NULL;
1988 new_node->set_src = node->set_src;
1989 node2->next = new_node;
1996 if (src_l + dst_l > vui_allocated)
1998 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
1999 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2004 /* Fill in the locations from DST. */
2005 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2006 node = node->next, jj++)
2009 vui[jj].pos_dst = jj;
2011 /* Pos plus value larger than a sum of 2 valid positions. */
2012 vui[jj].pos = jj + src_l + dst_l;
2015 /* Fill in the locations from SRC. */
2017 for (node = src->var_part[i].loc_chain, ii = 0; node;
2018 node = node->next, ii++)
2020 /* Find location from NODE. */
2021 for (jj = 0; jj < dst_l; jj++)
2023 if ((REG_P (vui[jj].lc->loc)
2024 && REG_P (node->loc)
2025 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2026 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2028 vui[jj].pos = jj + ii;
2032 if (jj >= dst_l) /* The location has not been found. */
2034 location_chain new_node;
2036 /* Copy the location from SRC. */
2037 new_node = (location_chain) pool_alloc (loc_chain_pool);
2038 new_node->loc = node->loc;
2039 new_node->init = node->init;
2040 if (!node->set_src || MEM_P (node->set_src))
2041 new_node->set_src = NULL;
2043 new_node->set_src = node->set_src;
2044 vui[n].lc = new_node;
2045 vui[n].pos_dst = src_l + dst_l;
2046 vui[n].pos = ii + src_l + dst_l;
2053 /* Special case still very common case. For dst_l == 2
2054 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2055 vui[i].pos == i + src_l + dst_l. */
2056 if (vui[0].pos > vui[1].pos)
2058 /* Order should be 1, 0, 2... */
2059 dst->var_part[k].loc_chain = vui[1].lc;
2060 vui[1].lc->next = vui[0].lc;
2063 vui[0].lc->next = vui[2].lc;
2064 vui[n - 1].lc->next = NULL;
2067 vui[0].lc->next = NULL;
2072 dst->var_part[k].loc_chain = vui[0].lc;
2073 if (n >= 3 && vui[2].pos < vui[1].pos)
2075 /* Order should be 0, 2, 1, 3... */
2076 vui[0].lc->next = vui[2].lc;
2077 vui[2].lc->next = vui[1].lc;
2080 vui[1].lc->next = vui[3].lc;
2081 vui[n - 1].lc->next = NULL;
2084 vui[1].lc->next = NULL;
2089 /* Order should be 0, 1, 2... */
2091 vui[n - 1].lc->next = NULL;
2094 for (; ii < n; ii++)
2095 vui[ii - 1].lc->next = vui[ii].lc;
2099 qsort (vui, n, sizeof (struct variable_union_info),
2100 variable_union_info_cmp_pos);
2102 /* Reconnect the nodes in sorted order. */
2103 for (ii = 1; ii < n; ii++)
2104 vui[ii - 1].lc->next = vui[ii].lc;
2105 vui[n - 1].lc->next = NULL;
2106 dst->var_part[k].loc_chain = vui[0].lc;
2109 dst->var_part[k].offset = dst->var_part[j].offset;
2114 else if ((i >= 0 && j >= 0
2115 && src->var_part[i].offset < dst->var_part[j].offset)
2118 dst->var_part[k] = dst->var_part[j];
2121 else if ((i >= 0 && j >= 0
2122 && src->var_part[i].offset > dst->var_part[j].offset)
2125 location_chain *nextp;
2127 /* Copy the chain from SRC. */
2128 nextp = &dst->var_part[k].loc_chain;
2129 for (node = src->var_part[i].loc_chain; node; node = node->next)
2131 location_chain new_lc;
2133 new_lc = (location_chain) pool_alloc (loc_chain_pool);
2134 new_lc->next = NULL;
2135 new_lc->init = node->init;
2136 if (!node->set_src || MEM_P (node->set_src))
2137 new_lc->set_src = NULL;
2139 new_lc->set_src = node->set_src;
2140 new_lc->loc = node->loc;
2143 nextp = &new_lc->next;
2146 dst->var_part[k].offset = src->var_part[i].offset;
2150 /* We are at the basic block boundary when computing union
2151 so set the CUR_LOC to be the first element of the chain. */
2152 if (dst->var_part[k].loc_chain)
2153 dst->var_part[k].cur_loc = dst->var_part[k].loc_chain->loc;
2155 dst->var_part[k].cur_loc = NULL;
2158 if (flag_var_tracking_uninit)
2159 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
2161 location_chain node, node2;
2162 for (node = src->var_part[i].loc_chain; node; node = node->next)
2163 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
2164 if (rtx_equal_p (node->loc, node2->loc))
2166 if (node->init > node2->init)
2167 node2->init = node->init;
2171 /* Continue traversing the hash table. */
2175 /* Like variable_union, but only used when doing dataflow_set_union
2176 into an empty hashtab. To allow sharing, dst is initially shared
2177 with src (so all variables are "copied" from src to dst hashtab),
2178 so only unshare_variable for variables that need canonicalization
2182 variable_canonicalize (void **slot, void *data)
2185 dataflow_set *set = (dataflow_set *) data;
2188 src = *(variable *) slot;
2190 /* If CUR_LOC of some variable part is not the first element of
2191 the location chain we are going to change it so we have to make
2192 a copy of the variable. */
2193 for (k = 0; k < src->n_var_parts; k++)
2195 gcc_assert (!src->var_part[k].loc_chain == !src->var_part[k].cur_loc);
2196 if (src->var_part[k].loc_chain)
2198 gcc_assert (src->var_part[k].cur_loc);
2199 if (src->var_part[k].cur_loc != src->var_part[k].loc_chain->loc)
2203 if (k < src->n_var_parts)
2204 slot = unshare_variable (set, slot, src, VAR_INIT_STATUS_UNKNOWN);
2208 /* Compute union of dataflow sets SRC and DST and store it to DST. */
2211 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
2215 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2216 attrs_list_union (&dst->regs[i], src->regs[i]);
2218 if (dst->vars == empty_shared_hash)
2220 shared_hash_destroy (dst->vars);
2221 dst->vars = shared_hash_copy (src->vars);
2222 dst->traversed_vars = dst->vars;
2223 htab_traverse (shared_hash_htab (dst->vars), variable_canonicalize, dst);
2224 dst->traversed_vars = NULL;
2227 htab_traverse (shared_hash_htab (src->vars), variable_union, dst);
2230 /* Whether the value is currently being expanded. */
2231 #define VALUE_RECURSED_INTO(x) \
2232 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
2233 /* Whether the value is in changed_variables hash table. */
2234 #define VALUE_CHANGED(x) \
2235 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
2236 /* Whether the decl is in changed_variables hash table. */
2237 #define DECL_CHANGED(x) TREE_VISITED (x)
2239 /* Record that DV has been added into resp. removed from changed_variables
2243 set_dv_changed (decl_or_value dv, bool newv)
2245 if (dv_is_value_p (dv))
2246 VALUE_CHANGED (dv_as_value (dv)) = newv;
2248 DECL_CHANGED (dv_as_decl (dv)) = newv;
2251 /* Return true if DV is present in changed_variables hash table. */
2254 dv_changed_p (decl_or_value dv)
2256 return (dv_is_value_p (dv)
2257 ? VALUE_CHANGED (dv_as_value (dv))
2258 : DECL_CHANGED (dv_as_decl (dv)));
2261 /* Vector of VALUEs that should have VALUE_RECURSED_INTO bit cleared
2262 at the end of find_loc_in_1pdv. Not a static variable in find_loc_in_1pdv
2263 to avoid constant allocation/freeing of it. */
2264 static VEC(rtx, heap) *values_to_unmark;
2266 /* Helper function for find_loc_in_1pdv.
2267 Return a location list node whose loc is rtx_equal to LOC, in the
2268 location list of a one-part variable or value VAR, or in that of
2269 any values recursively mentioned in the location lists. */
2271 static location_chain
2272 find_loc_in_1pdv_1 (rtx loc, variable var, htab_t vars)
2274 location_chain node;
2279 gcc_assert (dv_onepart_p (var->dv));
2281 if (!var->n_var_parts)
2284 gcc_assert (var->var_part[0].offset == 0);
2286 for (node = var->var_part[0].loc_chain; node; node = node->next)
2287 if (rtx_equal_p (loc, node->loc))
2289 else if (GET_CODE (node->loc) == VALUE
2290 && !VALUE_RECURSED_INTO (node->loc))
2292 decl_or_value dv = dv_from_value (node->loc);
2293 variable var = (variable)
2294 htab_find_with_hash (vars, dv, dv_htab_hash (dv));
2298 location_chain where;
2299 VALUE_RECURSED_INTO (node->loc) = true;
2300 VEC_safe_push (rtx, heap, values_to_unmark, node->loc);
2301 if ((where = find_loc_in_1pdv_1 (loc, var, vars)))
2309 /* Return a location list node whose loc is rtx_equal to LOC, in the
2310 location list of a one-part variable or value VAR, or in that of
2311 any values recursively mentioned in the location lists. */
2313 static location_chain
2314 find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
2320 ret = find_loc_in_1pdv_1 (loc, var, vars);
2321 for (i = 0; VEC_iterate (rtx, values_to_unmark, i, value); i++)
2322 VALUE_RECURSED_INTO (value) = false;
2323 VEC_truncate (rtx, values_to_unmark, 0);
2327 /* Hash table iteration argument passed to variable_merge. */
2330 /* The set in which the merge is to be inserted. */
2332 /* The set that we're iterating in. */
2334 /* The set that may contain the other dv we are to merge with. */
2336 /* Number of onepart dvs in src. */
2337 int src_onepart_cnt;
2340 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
2341 loc_cmp order, and it is maintained as such. */
2344 insert_into_intersection (location_chain *nodep, rtx loc,
2345 enum var_init_status status)
2347 location_chain node;
2350 for (node = *nodep; node; nodep = &node->next, node = *nodep)
2351 if ((r = loc_cmp (node->loc, loc)) == 0)
2353 node->init = MIN (node->init, status);
2359 node = (location_chain) pool_alloc (loc_chain_pool);
2362 node->set_src = NULL;
2363 node->init = status;
2364 node->next = *nodep;
2368 /* Insert in DEST the intersection the locations present in both
2369 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
2370 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
2374 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
2375 location_chain s1node, variable s2var)
2377 dataflow_set *s1set = dsm->cur;
2378 dataflow_set *s2set = dsm->src;
2379 location_chain found;
2381 for (; s1node; s1node = s1node->next)
2383 if (s1node->loc == val)
2386 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
2387 shared_hash_htab (s2set->vars))))
2389 insert_into_intersection (dest, s1node->loc,
2390 MIN (s1node->init, found->init));
2394 if (GET_CODE (s1node->loc) == VALUE
2395 && !VALUE_RECURSED_INTO (s1node->loc))
2397 decl_or_value dv = dv_from_value (s1node->loc);
2398 variable svar = shared_hash_find (s1set->vars, dv);
2401 if (svar->n_var_parts == 1)
2403 VALUE_RECURSED_INTO (s1node->loc) = true;
2404 intersect_loc_chains (val, dest, dsm,
2405 svar->var_part[0].loc_chain,
2407 VALUE_RECURSED_INTO (s1node->loc) = false;
2412 /* ??? if the location is equivalent to any location in src,
2413 searched recursively
2415 add to dst the values needed to represent the equivalence
2417 telling whether locations S is equivalent to another dv's
2420 for each location D in the list
2422 if S and D satisfy rtx_equal_p, then it is present
2424 else if D is a value, recurse without cycles
2426 else if S and D have the same CODE and MODE
2428 for each operand oS and the corresponding oD
2430 if oS and oD are not equivalent, then S an D are not equivalent
2432 else if they are RTX vectors
2434 if any vector oS element is not equivalent to its respective oD,
2435 then S and D are not equivalent
2443 /* Return -1 if X should be before Y in a location list for a 1-part
2444 variable, 1 if Y should be before X, and 0 if they're equivalent
2445 and should not appear in the list. */
2448 loc_cmp (rtx x, rtx y)
2451 RTX_CODE code = GET_CODE (x);
2461 gcc_assert (GET_MODE (x) == GET_MODE (y));
2462 if (REGNO (x) == REGNO (y))
2464 else if (REGNO (x) < REGNO (y))
2477 gcc_assert (GET_MODE (x) == GET_MODE (y));
2478 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
2484 if (GET_CODE (x) == VALUE)
2486 if (GET_CODE (y) != VALUE)
2488 gcc_assert (GET_MODE (x) == GET_MODE (y));
2489 if (canon_value_cmp (x, y))
2495 if (GET_CODE (y) == VALUE)
2498 if (GET_CODE (x) == GET_CODE (y))
2499 /* Compare operands below. */;
2500 else if (GET_CODE (x) < GET_CODE (y))
2505 gcc_assert (GET_MODE (x) == GET_MODE (y));
2507 fmt = GET_RTX_FORMAT (code);
2508 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2512 if (XWINT (x, i) == XWINT (y, i))
2514 else if (XWINT (x, i) < XWINT (y, i))
2521 if (XINT (x, i) == XINT (y, i))
2523 else if (XINT (x, i) < XINT (y, i))
2530 /* Compare the vector length first. */
2531 if (XVECLEN (x, i) == XVECLEN (y, i))
2532 /* Compare the vectors elements. */;
2533 else if (XVECLEN (x, i) < XVECLEN (y, i))
2538 for (j = 0; j < XVECLEN (x, i); j++)
2539 if ((r = loc_cmp (XVECEXP (x, i, j),
2540 XVECEXP (y, i, j))))
2545 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
2551 if (XSTR (x, i) == XSTR (y, i))
2557 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
2565 /* These are just backpointers, so they don't matter. */
2572 /* It is believed that rtx's at this level will never
2573 contain anything but integers and other rtx's,
2574 except for within LABEL_REFs and SYMBOL_REFs. */
2582 /* If decl or value DVP refers to VALUE from *LOC, add backlinks
2583 from VALUE to DVP. */
2586 add_value_chain (rtx *loc, void *dvp)
2588 if (GET_CODE (*loc) == VALUE && (void *) *loc != dvp)
2590 decl_or_value dv = (decl_or_value) dvp;
2591 decl_or_value ldv = dv_from_value (*loc);
2592 value_chain vc, nvc;
2593 void **slot = htab_find_slot_with_hash (value_chains, ldv,
2594 dv_htab_hash (ldv), INSERT);
2597 vc = (value_chain) pool_alloc (value_chain_pool);
2601 *slot = (void *) vc;
2605 for (vc = ((value_chain) *slot)->next; vc; vc = vc->next)
2606 if (dv_as_opaque (vc->dv) == dv_as_opaque (dv))
2614 vc = (value_chain) *slot;
2615 nvc = (value_chain) pool_alloc (value_chain_pool);
2617 nvc->next = vc->next;
2624 /* If decl or value DVP refers to VALUEs from within LOC, add backlinks
2625 from those VALUEs to DVP. */
2628 add_value_chains (decl_or_value dv, rtx loc)
2630 if (GET_CODE (loc) == VALUE)
2632 add_value_chain (&loc, dv_as_opaque (dv));
2638 loc = XEXP (loc, 0);
2639 for_each_rtx (&loc, add_value_chain, dv_as_opaque (dv));
2642 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those
2646 add_cselib_value_chains (decl_or_value dv)
2648 struct elt_loc_list *l;
2650 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
2651 for_each_rtx (&l->loc, add_value_chain, dv_as_opaque (dv));
2654 /* If decl or value DVP refers to VALUE from *LOC, remove backlinks
2655 from VALUE to DVP. */
2658 remove_value_chain (rtx *loc, void *dvp)
2660 if (GET_CODE (*loc) == VALUE && (void *) *loc != dvp)
2662 decl_or_value dv = (decl_or_value) dvp;
2663 decl_or_value ldv = dv_from_value (*loc);
2664 value_chain vc, dvc = NULL;
2665 void **slot = htab_find_slot_with_hash (value_chains, ldv,
2666 dv_htab_hash (ldv), NO_INSERT);
2667 for (vc = (value_chain) *slot; vc->next; vc = vc->next)
2668 if (dv_as_opaque (vc->next->dv) == dv_as_opaque (dv))
2671 gcc_assert (dvc->refcount > 0);
2672 if (--dvc->refcount == 0)
2674 vc->next = dvc->next;
2675 pool_free (value_chain_pool, dvc);
2676 if (vc->next == NULL && vc == (value_chain) *slot)
2678 pool_free (value_chain_pool, vc);
2679 htab_clear_slot (value_chains, slot);
2689 /* If decl or value DVP refers to VALUEs from within LOC, remove backlinks
2690 from those VALUEs to DVP. */
2693 remove_value_chains (decl_or_value dv, rtx loc)
2695 if (GET_CODE (loc) == VALUE)
2697 remove_value_chain (&loc, dv_as_opaque (dv));
2703 loc = XEXP (loc, 0);
2704 for_each_rtx (&loc, remove_value_chain, dv_as_opaque (dv));
2707 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, remove backlinks from those
2711 remove_cselib_value_chains (decl_or_value dv)
2713 struct elt_loc_list *l;
2715 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
2716 for_each_rtx (&l->loc, remove_value_chain, dv_as_opaque (dv));
2720 /* Check the order of entries in one-part variables. */
2723 canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
2725 variable var = (variable) *slot;
2726 decl_or_value dv = var->dv;
2727 location_chain node, next;
2729 if (!dv_onepart_p (dv))
2732 gcc_assert (var->n_var_parts == 1);
2733 node = var->var_part[0].loc_chain;
2736 while ((next = node->next))
2738 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
2746 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
2747 more likely to be chosen as canonical for an equivalence set.
2748 Ensure less likely values can reach more likely neighbors, making
2749 the connections bidirectional. */
2752 canonicalize_values_mark (void **slot, void *data)
2754 dataflow_set *set = (dataflow_set *)data;
2755 variable var = (variable) *slot;
2756 decl_or_value dv = var->dv;
2758 location_chain node;
2760 if (!dv_is_value_p (dv))
2763 gcc_assert (var->n_var_parts == 1);
2765 val = dv_as_value (dv);
2767 for (node = var->var_part[0].loc_chain; node; node = node->next)
2768 if (GET_CODE (node->loc) == VALUE)
2770 if (canon_value_cmp (node->loc, val))
2771 VALUE_RECURSED_INTO (val) = true;
2774 decl_or_value odv = dv_from_value (node->loc);
2775 void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
2777 oslot = set_slot_part (set, val, oslot, odv, 0,
2778 node->init, NULL_RTX);
2780 VALUE_RECURSED_INTO (node->loc) = true;
2787 /* Remove redundant entries from equivalence lists in onepart
2788 variables, canonicalizing equivalence sets into star shapes. */
2791 canonicalize_values_star (void **slot, void *data)
2793 dataflow_set *set = (dataflow_set *)data;
2794 variable var = (variable) *slot;
2795 decl_or_value dv = var->dv;
2796 location_chain node;
2803 if (!dv_onepart_p (dv))
2806 gcc_assert (var->n_var_parts == 1);
2808 if (dv_is_value_p (dv))
2810 cval = dv_as_value (dv);
2811 if (!VALUE_RECURSED_INTO (cval))
2813 VALUE_RECURSED_INTO (cval) = false;
2823 gcc_assert (var->n_var_parts == 1);
2825 for (node = var->var_part[0].loc_chain; node; node = node->next)
2826 if (GET_CODE (node->loc) == VALUE)
2829 if (VALUE_RECURSED_INTO (node->loc))
2831 if (canon_value_cmp (node->loc, cval))
2840 if (!has_marks || dv_is_decl_p (dv))
2843 /* Keep it marked so that we revisit it, either after visiting a
2844 child node, or after visiting a new parent that might be
2846 VALUE_RECURSED_INTO (val) = true;
2848 for (node = var->var_part[0].loc_chain; node; node = node->next)
2849 if (GET_CODE (node->loc) == VALUE
2850 && VALUE_RECURSED_INTO (node->loc))
2854 VALUE_RECURSED_INTO (cval) = false;
2855 dv = dv_from_value (cval);
2856 slot = shared_hash_find_slot_noinsert (set->vars, dv);
2859 gcc_assert (dv_is_decl_p (var->dv));
2860 /* The canonical value was reset and dropped.
2862 clobber_variable_part (set, NULL, var->dv, 0, NULL);
2865 var = (variable)*slot;
2866 gcc_assert (dv_is_value_p (var->dv));
2867 if (var->n_var_parts == 0)
2869 gcc_assert (var->n_var_parts == 1);
2873 VALUE_RECURSED_INTO (val) = false;
2878 /* Push values to the canonical one. */
2879 cdv = dv_from_value (cval);
2880 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
2882 for (node = var->var_part[0].loc_chain; node; node = node->next)
2883 if (node->loc != cval)
2885 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
2886 node->init, NULL_RTX);
2887 if (GET_CODE (node->loc) == VALUE)
2889 decl_or_value ndv = dv_from_value (node->loc);
2891 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
2894 if (canon_value_cmp (node->loc, val))
2896 /* If it could have been a local minimum, it's not any more,
2897 since it's now neighbor to cval, so it may have to push
2898 to it. Conversely, if it wouldn't have prevailed over
2899 val, then whatever mark it has is fine: if it was to
2900 push, it will now push to a more canonical node, but if
2901 it wasn't, then it has already pushed any values it might
2903 VALUE_RECURSED_INTO (node->loc) = true;
2904 /* Make sure we visit node->loc by ensuring we cval is
2906 VALUE_RECURSED_INTO (cval) = true;
2908 else if (!VALUE_RECURSED_INTO (node->loc))
2909 /* If we have no need to "recurse" into this node, it's
2910 already "canonicalized", so drop the link to the old
2912 clobber_variable_part (set, cval, ndv, 0, NULL);
2914 else if (GET_CODE (node->loc) == REG)
2916 attrs list = set->regs[REGNO (node->loc)], *listp;
2918 /* Change an existing attribute referring to dv so that it
2919 refers to cdv, removing any duplicate this might
2920 introduce, and checking that no previous duplicates
2921 existed, all in a single pass. */
2925 if (list->offset == 0
2926 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
2927 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
2934 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
2937 for (listp = &list->next; (list = *listp); listp = &list->next)
2942 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
2944 *listp = list->next;
2945 pool_free (attrs_pool, list);
2950 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
2953 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
2955 for (listp = &list->next; (list = *listp); listp = &list->next)
2960 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
2962 *listp = list->next;
2963 pool_free (attrs_pool, list);
2968 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
2977 if (list->offset == 0
2978 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
2979 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
2989 cslot = set_slot_part (set, val, cslot, cdv, 0,
2990 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
2992 slot = clobber_slot_part (set, cval, slot, 0, NULL);
2994 /* Variable may have been unshared. */
2995 var = (variable)*slot;
2996 gcc_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
2997 && var->var_part[0].loc_chain->next == NULL);
2999 if (VALUE_RECURSED_INTO (cval))
3000 goto restart_with_cval;
3005 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3006 corresponding entry in DSM->src. Multi-part variables are combined
3007 with variable_union, whereas onepart dvs are combined with
3011 variable_merge_over_cur (void **s1slot, void *data)
3013 struct dfset_merge *dsm = (struct dfset_merge *)data;
3014 dataflow_set *dst = dsm->dst;
3016 variable s1var = (variable) *s1slot;
3017 variable s2var, dvar = NULL;
3018 decl_or_value dv = s1var->dv;
3019 bool onepart = dv_onepart_p (dv);
3022 location_chain node, *nodep;
3024 /* If the incoming onepart variable has an empty location list, then
3025 the intersection will be just as empty. For other variables,
3026 it's always union. */
3027 gcc_assert (s1var->n_var_parts);
3028 gcc_assert (s1var->var_part[0].loc_chain);
3031 return variable_union (s1slot, dst);
3033 gcc_assert (s1var->n_var_parts == 1);
3034 gcc_assert (s1var->var_part[0].offset == 0);
3036 dvhash = dv_htab_hash (dv);
3037 if (dv_is_value_p (dv))
3038 val = dv_as_value (dv);
3042 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3045 dst_can_be_shared = false;
3049 dsm->src_onepart_cnt--;
3050 gcc_assert (s2var->var_part[0].loc_chain);
3051 gcc_assert (s2var->n_var_parts == 1);
3052 gcc_assert (s2var->var_part[0].offset == 0);
3054 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3057 dvar = (variable)*dstslot;
3058 gcc_assert (dvar->refcount == 1);
3059 gcc_assert (dvar->n_var_parts == 1);
3060 gcc_assert (dvar->var_part[0].offset == 0);
3061 nodep = &dvar->var_part[0].loc_chain;
3069 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3071 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3073 *dstslot = dvar = s2var;
3078 dst_can_be_shared = false;
3080 intersect_loc_chains (val, nodep, dsm,
3081 s1var->var_part[0].loc_chain, s2var);
3087 dvar = (variable) pool_alloc (dv_pool (dv));
3090 dvar->n_var_parts = 1;
3091 dvar->var_part[0].offset = 0;
3092 dvar->var_part[0].loc_chain = node;
3093 dvar->var_part[0].cur_loc = node->loc;
3096 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3098 gcc_assert (!*dstslot);
3106 nodep = &dvar->var_part[0].loc_chain;
3107 while ((node = *nodep))
3109 location_chain *nextp = &node->next;
3111 if (GET_CODE (node->loc) == REG)
3115 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
3116 if (GET_MODE (node->loc) == GET_MODE (list->loc)
3117 && dv_is_value_p (list->dv))
3121 attrs_list_insert (&dst->regs[REGNO (node->loc)],
3123 /* If this value became canonical for another value that had
3124 this register, we want to leave it alone. */
3125 else if (dv_as_value (list->dv) != val)
3127 dstslot = set_slot_part (dst, dv_as_value (list->dv),
3129 node->init, NULL_RTX);
3130 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
3132 /* Since nextp points into the removed node, we can't
3133 use it. The pointer to the next node moved to nodep.
3134 However, if the variable we're walking is unshared
3135 during our walk, we'll keep walking the location list
3136 of the previously-shared variable, in which case the
3137 node won't have been removed, and we'll want to skip
3138 it. That's why we test *nodep here. */
3144 /* Canonicalization puts registers first, so we don't have to
3150 if (dvar != (variable)*dstslot)
3151 dvar = (variable)*dstslot;
3152 nodep = &dvar->var_part[0].loc_chain;
3156 /* Mark all referenced nodes for canonicalization, and make sure
3157 we have mutual equivalence links. */
3158 VALUE_RECURSED_INTO (val) = true;
3159 for (node = *nodep; node; node = node->next)
3160 if (GET_CODE (node->loc) == VALUE)
3162 VALUE_RECURSED_INTO (node->loc) = true;
3163 set_variable_part (dst, val, dv_from_value (node->loc), 0,
3164 node->init, NULL, INSERT);
3167 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3168 gcc_assert (*dstslot == dvar);
3169 canonicalize_values_star (dstslot, dst);
3170 #ifdef ENABLE_CHECKING
3172 == shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash));
3174 dvar = (variable)*dstslot;
3178 bool has_value = false, has_other = false;
3180 /* If we have one value and anything else, we're going to
3181 canonicalize this, so make sure all values have an entry in
3182 the table and are marked for canonicalization. */
3183 for (node = *nodep; node; node = node->next)
3185 if (GET_CODE (node->loc) == VALUE)
3187 /* If this was marked during register canonicalization,
3188 we know we have to canonicalize values. */
3203 if (has_value && has_other)
3205 for (node = *nodep; node; node = node->next)
3207 if (GET_CODE (node->loc) == VALUE)
3209 decl_or_value dv = dv_from_value (node->loc);
3212 if (shared_hash_shared (dst->vars))
3213 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
3215 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
3219 variable var = (variable) pool_alloc (dv_pool (dv));
3222 var->n_var_parts = 1;
3223 var->var_part[0].offset = 0;
3224 var->var_part[0].loc_chain = NULL;
3225 var->var_part[0].cur_loc = NULL;
3229 VALUE_RECURSED_INTO (node->loc) = true;
3233 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3234 gcc_assert (*dstslot == dvar);
3235 canonicalize_values_star (dstslot, dst);
3236 #ifdef ENABLE_CHECKING
3238 == shared_hash_find_slot_noinsert_1 (dst->vars,
3241 dvar = (variable)*dstslot;
3245 if (!onepart_variable_different_p (dvar, s2var))
3247 variable_htab_free (dvar);
3248 *dstslot = dvar = s2var;
3251 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
3253 variable_htab_free (dvar);
3254 *dstslot = dvar = s1var;
3256 dst_can_be_shared = false;
3260 if (dvar->refcount == 1)
3261 dvar->var_part[0].cur_loc = dvar->var_part[0].loc_chain->loc;
3262 dst_can_be_shared = false;
3268 /* Combine variable in *S1SLOT (in DSM->src) with the corresponding
3269 entry in DSM->src. Only multi-part variables are combined, using
3270 variable_union. onepart dvs were already combined with
3271 intersection in variable_merge_over_cur(). */
3274 variable_merge_over_src (void **s2slot, void *data)
3276 struct dfset_merge *dsm = (struct dfset_merge *)data;
3277 dataflow_set *dst = dsm->dst;
3278 variable s2var = (variable) *s2slot;
3279 decl_or_value dv = s2var->dv;
3280 bool onepart = dv_onepart_p (dv);
3284 void **dstp = shared_hash_find_slot (dst->vars, dv);
3287 return variable_canonicalize (dstp, dst);
3290 dsm->src_onepart_cnt++;
3294 /* Combine dataflow set information from SRC into DST, using PDST
3295 to carry over information across passes. */
3298 dataflow_set_merge (dataflow_set *dst, dataflow_set *src)
3300 dataflow_set src2 = *dst;
3301 struct dfset_merge dsm;
3303 size_t src_elems, dst_elems;
3305 src_elems = htab_elements (shared_hash_htab (src->vars));
3306 dst_elems = htab_elements (shared_hash_htab (src2.vars));
3307 dataflow_set_init (dst);
3308 dst->stack_adjust = src2.stack_adjust;
3309 shared_hash_destroy (dst->vars);
3310 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
3311 dst->vars->refcount = 1;
3313 = htab_create (MAX (src_elems, dst_elems), variable_htab_hash,
3314 variable_htab_eq, variable_htab_free);
3316 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3317 attrs_list_mpdv_union (&dst->regs[i], src->regs[i], src2.regs[i]);
3322 dsm.src_onepart_cnt = 0;
3324 htab_traverse (shared_hash_htab (dsm.src->vars), variable_merge_over_src,
3326 htab_traverse (shared_hash_htab (dsm.cur->vars), variable_merge_over_cur,
3329 if (dsm.src_onepart_cnt)
3330 dst_can_be_shared = false;
3332 dataflow_set_destroy (&src2);
3335 /* Mark register equivalences. */
3338 dataflow_set_equiv_regs (dataflow_set *set)
3343 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3345 rtx canon[NUM_MACHINE_MODES];
3347 memset (canon, 0, sizeof (canon));
3349 for (list = set->regs[i]; list; list = list->next)
3350 if (list->offset == 0 && dv_is_value_p (list->dv))
3352 rtx val = dv_as_value (list->dv);
3353 rtx *cvalp = &canon[(int)GET_MODE (val)];
3356 if (canon_value_cmp (val, cval))
3360 for (list = set->regs[i]; list; list = list->next)
3361 if (list->offset == 0 && dv_onepart_p (list->dv))
3363 rtx cval = canon[(int)GET_MODE (list->loc)];
3368 if (dv_is_value_p (list->dv))
3370 rtx val = dv_as_value (list->dv);
3375 VALUE_RECURSED_INTO (val) = true;
3376 set_variable_part (set, val, dv_from_value (cval), 0,
3377 VAR_INIT_STATUS_INITIALIZED,
3381 VALUE_RECURSED_INTO (cval) = true;
3382 set_variable_part (set, cval, list->dv, 0,
3383 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
3386 for (listp = &set->regs[i]; (list = *listp);
3387 listp = list ? &list->next : listp)
3388 if (list->offset == 0 && dv_onepart_p (list->dv))
3390 rtx cval = canon[(int)GET_MODE (list->loc)];
3396 if (dv_is_value_p (list->dv))
3398 rtx val = dv_as_value (list->dv);
3399 if (!VALUE_RECURSED_INTO (val))
3403 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
3404 canonicalize_values_star (slot, set);
3411 /* Remove any redundant values in the location list of VAR, which must
3412 be unshared and 1-part. */
3415 remove_duplicate_values (variable var)
3417 location_chain node, *nodep;
3419 gcc_assert (dv_onepart_p (var->dv));
3420 gcc_assert (var->n_var_parts == 1);
3421 gcc_assert (var->refcount == 1);
3423 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
3425 if (GET_CODE (node->loc) == VALUE)
3427 if (VALUE_RECURSED_INTO (node->loc))
3429 /* Remove duplicate value node. */
3430 *nodep = node->next;
3431 pool_free (loc_chain_pool, node);
3435 VALUE_RECURSED_INTO (node->loc) = true;
3437 nodep = &node->next;
3440 for (node = var->var_part[0].loc_chain; node; node = node->next)
3441 if (GET_CODE (node->loc) == VALUE)
3443 gcc_assert (VALUE_RECURSED_INTO (node->loc));
3444 VALUE_RECURSED_INTO (node->loc) = false;
3449 /* Hash table iteration argument passed to variable_post_merge. */
3450 struct dfset_post_merge
3452 /* The new input set for the current block. */
3454 /* Pointer to the permanent input set for the current block, or
3456 dataflow_set **permp;
3459 /* Create values for incoming expressions associated with one-part
3460 variables that don't have value numbers for them. */
3463 variable_post_merge_new_vals (void **slot, void *info)
3465 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3466 dataflow_set *set = dfpm->set;
3467 variable var = (variable)*slot;
3468 location_chain node;
3470 if (!dv_onepart_p (var->dv) || !var->n_var_parts)
3473 gcc_assert (var->n_var_parts == 1);
3475 if (dv_is_decl_p (var->dv))
3477 bool check_dupes = false;
3480 for (node = var->var_part[0].loc_chain; node; node = node->next)
3482 if (GET_CODE (node->loc) == VALUE)
3483 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
3484 else if (GET_CODE (node->loc) == REG)
3486 attrs att, *attp, *curp = NULL;
3488 if (var->refcount != 1)
3490 slot = unshare_variable (set, slot, var,
3491 VAR_INIT_STATUS_INITIALIZED);
3492 var = (variable)*slot;
3496 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
3498 if (att->offset == 0
3499 && GET_MODE (att->loc) == GET_MODE (node->loc))
3501 if (dv_is_value_p (att->dv))
3503 rtx cval = dv_as_value (att->dv);
3508 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
3516 if ((*curp)->offset == 0
3517 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
3518 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
3521 curp = &(*curp)->next;
3532 *dfpm->permp = XNEW (dataflow_set);
3533 dataflow_set_init (*dfpm->permp);
3536 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
3537 att; att = att->next)
3538 if (GET_MODE (att->loc) == GET_MODE (node->loc))
3540 gcc_assert (att->offset == 0);
3541 gcc_assert (dv_is_value_p (att->dv));
3542 val_reset (set, att->dv);
3549 cval = dv_as_value (cdv);
3553 /* Create a unique value to hold this register,
3554 that ought to be found and reused in
3555 subsequent rounds. */
3557 gcc_assert (!cselib_lookup (node->loc,
3558 GET_MODE (node->loc), 0));
3559 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1);
3560 cselib_preserve_value (v);
3561 cselib_invalidate_rtx (node->loc);
3563 cdv = dv_from_value (cval);
3566 "Created new value %i for reg %i\n",
3567 v->value, REGNO (node->loc));
3570 var_reg_decl_set (*dfpm->permp, node->loc,
3571 VAR_INIT_STATUS_INITIALIZED,
3572 cdv, 0, NULL, INSERT);
3578 /* Remove attribute referring to the decl, which now
3579 uses the value for the register, already existing or
3580 to be added when we bring perm in. */
3583 pool_free (attrs_pool, att);
3588 remove_duplicate_values (var);
3594 /* Reset values in the permanent set that are not associated with the
3595 chosen expression. */
3598 variable_post_merge_perm_vals (void **pslot, void *info)
3600 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3601 dataflow_set *set = dfpm->set;
3602 variable pvar = (variable)*pslot, var;
3603 location_chain pnode;
3607 gcc_assert (dv_is_value_p (pvar->dv));
3608 gcc_assert (pvar->n_var_parts == 1);
3609 pnode = pvar->var_part[0].loc_chain;
3611 gcc_assert (!pnode->next);
3612 gcc_assert (REG_P (pnode->loc));
3616 var = shared_hash_find (set->vars, dv);
3619 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
3621 val_reset (set, dv);
3624 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
3625 if (att->offset == 0
3626 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
3627 && dv_is_value_p (att->dv))
3630 /* If there is a value associated with this register already, create
3632 if (att && dv_as_value (att->dv) != dv_as_value (dv))
3634 rtx cval = dv_as_value (att->dv);
3635 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
3636 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
3641 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
3643 variable_union (pslot, set);
3649 /* Just checking stuff and registering register attributes for
3653 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
3655 struct dfset_post_merge dfpm;
3660 htab_traverse (shared_hash_htab (set->vars), variable_post_merge_new_vals,
3663 htab_traverse (shared_hash_htab ((*permp)->vars),
3664 variable_post_merge_perm_vals, &dfpm);
3665 htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
3668 /* Return a node whose loc is a MEM that refers to EXPR in the
3669 location list of a one-part variable or value VAR, or in that of
3670 any values recursively mentioned in the location lists. */
3672 static location_chain
3673 find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars)
3675 location_chain node;
3678 location_chain where = NULL;
3683 gcc_assert (GET_CODE (val) == VALUE);
3685 gcc_assert (!VALUE_RECURSED_INTO (val));
3687 dv = dv_from_value (val);
3688 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
3693 gcc_assert (dv_onepart_p (var->dv));
3695 if (!var->n_var_parts)
3698 gcc_assert (var->var_part[0].offset == 0);
3700 VALUE_RECURSED_INTO (val) = true;
3702 for (node = var->var_part[0].loc_chain; node; node = node->next)
3703 if (MEM_P (node->loc) && MEM_EXPR (node->loc) == expr
3704 && MEM_OFFSET (node->loc) == 0)
3709 else if (GET_CODE (node->loc) == VALUE
3710 && !VALUE_RECURSED_INTO (node->loc)
3711 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
3714 VALUE_RECURSED_INTO (val) = false;
3719 /* Remove all MEMs from the location list of a hash table entry for a
3720 one-part variable, except those whose MEM attributes map back to
3721 the variable itself, directly or within a VALUE.
3723 ??? We could also preserve MEMs that reference stack slots that are
3724 annotated as not addressable. This is arguably even more reliable
3725 than the current heuristic. */
3728 dataflow_set_preserve_mem_locs (void **slot, void *data)
3730 dataflow_set *set = (dataflow_set *) data;
3731 variable var = (variable) *slot;
3733 if (dv_is_decl_p (var->dv) && dv_onepart_p (var->dv))
3735 tree decl = dv_as_decl (var->dv);
3736 location_chain loc, *locp;
3738 if (!var->n_var_parts)
3741 gcc_assert (var->n_var_parts == 1);
3743 if (var->refcount > 1 || shared_hash_shared (set->vars))
3745 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
3747 /* We want to remove a MEM that doesn't refer to DECL. */
3748 if (GET_CODE (loc->loc) == MEM
3749 && (MEM_EXPR (loc->loc) != decl
3750 || MEM_OFFSET (loc->loc)))
3752 /* We want to move here a MEM that does refer to DECL. */
3753 else if (GET_CODE (loc->loc) == VALUE
3754 && find_mem_expr_in_1pdv (decl, loc->loc,
3755 shared_hash_htab (set->vars)))
3762 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
3763 var = (variable)*slot;
3764 gcc_assert (var->n_var_parts == 1);
3767 for (locp = &var->var_part[0].loc_chain, loc = *locp;
3770 rtx old_loc = loc->loc;
3771 if (GET_CODE (old_loc) == VALUE)
3773 location_chain mem_node
3774 = find_mem_expr_in_1pdv (decl, loc->loc,
3775 shared_hash_htab (set->vars));
3777 /* ??? This picks up only one out of multiple MEMs that
3778 refer to the same variable. Do we ever need to be
3779 concerned about dealing with more than one, or, given
3780 that they should all map to the same variable
3781 location, their addresses will have been merged and
3782 they will be regarded as equivalent? */
3785 loc->loc = mem_node->loc;
3786 loc->set_src = mem_node->set_src;
3787 loc->init = MIN (loc->init, mem_node->init);
3791 if (GET_CODE (loc->loc) != MEM
3792 || (MEM_EXPR (loc->loc) == decl
3793 && MEM_OFFSET (loc->loc) == 0))
3795 if (old_loc != loc->loc && emit_notes)
3797 add_value_chains (var->dv, loc->loc);
3798 remove_value_chains (var->dv, old_loc);
3805 remove_value_chains (var->dv, old_loc);
3807 pool_free (loc_chain_pool, loc);
3810 if (!var->var_part[0].loc_chain)
3813 if (emit_notes && dv_is_value_p (var->dv))
3814 remove_cselib_value_chains (var->dv);
3815 variable_was_changed (var, set);
3822 /* Remove all MEMs from the location list of a hash table entry for a
3826 dataflow_set_remove_mem_locs (void **slot, void *data)
3828 dataflow_set *set = (dataflow_set *) data;
3829 variable var = (variable) *slot;
3831 if (dv_is_value_p (var->dv))
3833 location_chain loc, *locp;
3834 bool changed = false;
3836 gcc_assert (var->n_var_parts == 1);
3838 if (var->refcount > 1 || shared_hash_shared (set->vars))
3840 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
3841 if (GET_CODE (loc->loc) == MEM)
3847 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
3848 var = (variable)*slot;
3849 gcc_assert (var->n_var_parts == 1);
3852 for (locp = &var->var_part[0].loc_chain, loc = *locp;
3855 if (GET_CODE (loc->loc) != MEM)
3862 remove_value_chains (var->dv, loc->loc);
3864 /* If we have deleted the location which was last emitted
3865 we have to emit new location so add the variable to set
3866 of changed variables. */
3867 if (var->var_part[0].cur_loc
3868 && rtx_equal_p (loc->loc, var->var_part[0].cur_loc))
3870 pool_free (loc_chain_pool, loc);
3873 if (!var->var_part[0].loc_chain)
3876 if (emit_notes && dv_is_value_p (var->dv))
3877 remove_cselib_value_chains (var->dv);
3878 gcc_assert (changed);
3882 if (var->n_var_parts && var->var_part[0].loc_chain)
3883 var->var_part[0].cur_loc = var->var_part[0].loc_chain->loc;
3884 variable_was_changed (var, set);
3891 /* Remove all variable-location information about call-clobbered
3892 registers, as well as associations between MEMs and VALUEs. */
3895 dataflow_set_clear_at_call (dataflow_set *set)
3899 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
3900 if (TEST_HARD_REG_BIT (call_used_reg_set, r))
3901 var_regno_delete (set, r);
3903 if (MAY_HAVE_DEBUG_INSNS)
3905 set->traversed_vars = set->vars;
3906 htab_traverse (shared_hash_htab (set->vars),
3907 dataflow_set_preserve_mem_locs, set);
3908 set->traversed_vars = set->vars;
3909 htab_traverse (shared_hash_htab (set->vars), dataflow_set_remove_mem_locs,
3911 set->traversed_vars = NULL;
3915 /* Flag whether two dataflow sets being compared contain different data. */
3917 dataflow_set_different_value;
3920 variable_part_different_p (variable_part *vp1, variable_part *vp2)
3922 location_chain lc1, lc2;
3924 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
3926 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
3928 if (REG_P (lc1->loc) && REG_P (lc2->loc))
3930 if (REGNO (lc1->loc) == REGNO (lc2->loc))
3933 if (rtx_equal_p (lc1->loc, lc2->loc))
3942 /* Return true if one-part variables VAR1 and VAR2 are different.
3943 They must be in canonical order. */
3946 onepart_variable_different_p (variable var1, variable var2)
3948 location_chain lc1, lc2;
3953 gcc_assert (var1->n_var_parts == 1);
3954 gcc_assert (var2->n_var_parts == 1);
3956 lc1 = var1->var_part[0].loc_chain;
3957 lc2 = var2->var_part[0].loc_chain;
3964 if (loc_cmp (lc1->loc, lc2->loc))
3973 /* Return true if variables VAR1 and VAR2 are different.
3974 If COMPARE_CURRENT_LOCATION is true compare also the cur_loc of each
3978 variable_different_p (variable var1, variable var2,
3979 bool compare_current_location)
3986 if (var1->n_var_parts != var2->n_var_parts)
3989 for (i = 0; i < var1->n_var_parts; i++)
3991 if (var1->var_part[i].offset != var2->var_part[i].offset)
3993 if (compare_current_location)
3995 if (!((REG_P (var1->var_part[i].cur_loc)
3996 && REG_P (var2->var_part[i].cur_loc)
3997 && (REGNO (var1->var_part[i].cur_loc)
3998 == REGNO (var2->var_part[i].cur_loc)))
3999 || rtx_equal_p (var1->var_part[i].cur_loc,
4000 var2->var_part[i].cur_loc)))
4003 /* One-part values have locations in a canonical order. */
4004 if (i == 0 && var1->var_part[i].offset == 0 && dv_onepart_p (var1->dv))
4006 gcc_assert (var1->n_var_parts == 1);
4007 gcc_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
4008 return onepart_variable_different_p (var1, var2);
4010 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4012 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4018 /* Compare variable *SLOT with the same variable in hash table DATA
4019 and set DATAFLOW_SET_DIFFERENT_VALUE if they are different. */
4022 dataflow_set_different_1 (void **slot, void *data)
4024 htab_t htab = (htab_t) data;
4025 variable var1, var2;
4027 var1 = (variable) *slot;
4028 var2 = (variable) htab_find_with_hash (htab, var1->dv,
4029 dv_htab_hash (var1->dv));
4032 dataflow_set_different_value = true;
4034 if (dump_file && (dump_flags & TDF_DETAILS))
4036 fprintf (dump_file, "dataflow difference found: removal of:\n");
4037 dump_variable (var1);
4040 /* Stop traversing the hash table. */
4044 if (variable_different_p (var1, var2, false))
4046 dataflow_set_different_value = true;
4048 if (dump_file && (dump_flags & TDF_DETAILS))
4050 fprintf (dump_file, "dataflow difference found: old and new follow:\n");
4051 dump_variable (var1);
4052 dump_variable (var2);
4055 /* Stop traversing the hash table. */
4059 /* Continue traversing the hash table. */
4063 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4066 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4068 if (old_set->vars == new_set->vars)
4071 if (htab_elements (shared_hash_htab (old_set->vars))
4072 != htab_elements (shared_hash_htab (new_set->vars)))
4075 dataflow_set_different_value = false;
4077 htab_traverse (shared_hash_htab (old_set->vars), dataflow_set_different_1,
4078 shared_hash_htab (new_set->vars));
4079 /* No need to traverse the second hashtab, if both have the same number
4080 of elements and the second one had all entries found in the first one,
4081 then it can't have any extra entries. */
4082 return dataflow_set_different_value;
4085 /* Free the contents of dataflow set SET. */
4088 dataflow_set_destroy (dataflow_set *set)
4092 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4093 attrs_list_clear (&set->regs[i]);
4095 shared_hash_destroy (set->vars);
4099 /* Return true if RTL X contains a SYMBOL_REF. */
4102 contains_symbol_ref (rtx x)
4111 code = GET_CODE (x);
4112 if (code == SYMBOL_REF)
4115 fmt = GET_RTX_FORMAT (code);
4116 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4120 if (contains_symbol_ref (XEXP (x, i)))
4123 else if (fmt[i] == 'E')
4126 for (j = 0; j < XVECLEN (x, i); j++)
4127 if (contains_symbol_ref (XVECEXP (x, i, j)))
4135 /* Shall EXPR be tracked? */
4138 track_expr_p (tree expr, bool need_rtl)
4143 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
4144 return DECL_RTL_SET_P (expr);
4146 /* If EXPR is not a parameter or a variable do not track it. */
4147 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
4150 /* It also must have a name... */
4151 if (!DECL_NAME (expr))
4154 /* ... and a RTL assigned to it. */
4155 decl_rtl = DECL_RTL_IF_SET (expr);
4156 if (!decl_rtl && need_rtl)
4159 /* If this expression is really a debug alias of some other declaration, we
4160 don't need to track this expression if the ultimate declaration is
4163 if (DECL_DEBUG_EXPR_IS_FROM (realdecl) && DECL_DEBUG_EXPR (realdecl))
4165 realdecl = DECL_DEBUG_EXPR (realdecl);
4166 /* ??? We don't yet know how to emit DW_OP_piece for variable
4167 that has been SRA'ed. */
4168 if (!DECL_P (realdecl))
4172 /* Do not track EXPR if REALDECL it should be ignored for debugging
4174 if (DECL_IGNORED_P (realdecl))
4177 /* Do not track global variables until we are able to emit correct location
4179 if (TREE_STATIC (realdecl))
4182 /* When the EXPR is a DECL for alias of some variable (see example)
4183 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
4184 DECL_RTL contains SYMBOL_REF.
4187 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
4190 if (decl_rtl && MEM_P (decl_rtl)
4191 && contains_symbol_ref (XEXP (decl_rtl, 0)))
4194 /* If RTX is a memory it should not be very large (because it would be
4195 an array or struct). */
4196 if (decl_rtl && MEM_P (decl_rtl))
4198 /* Do not track structures and arrays. */
4199 if (GET_MODE (decl_rtl) == BLKmode
4200 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
4202 if (MEM_SIZE (decl_rtl)
4203 && INTVAL (MEM_SIZE (decl_rtl)) > MAX_VAR_PARTS)
4207 DECL_CHANGED (expr) = 0;
4208 DECL_CHANGED (realdecl) = 0;
4212 /* Determine whether a given LOC refers to the same variable part as
4216 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
4219 HOST_WIDE_INT offset2;
4221 if (! DECL_P (expr))
4226 expr2 = REG_EXPR (loc);
4227 offset2 = REG_OFFSET (loc);
4229 else if (MEM_P (loc))
4231 expr2 = MEM_EXPR (loc);
4232 offset2 = INT_MEM_OFFSET (loc);
4237 if (! expr2 || ! DECL_P (expr2))
4240 expr = var_debug_decl (expr);
4241 expr2 = var_debug_decl (expr2);
4243 return (expr == expr2 && offset == offset2);
4246 /* LOC is a REG or MEM that we would like to track if possible.
4247 If EXPR is null, we don't know what expression LOC refers to,
4248 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
4249 LOC is an lvalue register.
4251 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
4252 is something we can track. When returning true, store the mode of
4253 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
4254 from EXPR in *OFFSET_OUT (if nonnull). */
4257 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
4258 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
4260 enum machine_mode mode;
4262 if (expr == NULL || !track_expr_p (expr, true))
4265 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
4266 whole subreg, but only the old inner part is really relevant. */
4267 mode = GET_MODE (loc);
4268 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
4270 enum machine_mode pseudo_mode;
4272 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
4273 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
4275 offset += byte_lowpart_offset (pseudo_mode, mode);
4280 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
4281 Do the same if we are storing to a register and EXPR occupies
4282 the whole of register LOC; in that case, the whole of EXPR is
4283 being changed. We exclude complex modes from the second case
4284 because the real and imaginary parts are represented as separate
4285 pseudo registers, even if the whole complex value fits into one
4287 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
4289 && !COMPLEX_MODE_P (DECL_MODE (expr))
4290 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
4291 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
4293 mode = DECL_MODE (expr);
4297 if (offset < 0 || offset >= MAX_VAR_PARTS)
4303 *offset_out = offset;
4307 /* Return the MODE lowpart of LOC, or null if LOC is not something we
4308 want to track. When returning nonnull, make sure that the attributes
4309 on the returned value are updated. */
4312 var_lowpart (enum machine_mode mode, rtx loc)
4314 unsigned int offset, reg_offset, regno;
4316 if (!REG_P (loc) && !MEM_P (loc))
4319 if (GET_MODE (loc) == mode)
4322 offset = byte_lowpart_offset (mode, GET_MODE (loc));
4325 return adjust_address_nv (loc, mode, offset);
4327 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
4328 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
4330 return gen_rtx_REG_offset (loc, mode, regno, offset);
4333 /* Carry information about uses and stores while walking rtx. */
4335 struct count_use_info
4337 /* The insn where the RTX is. */
4340 /* The basic block where insn is. */
4343 /* The array of n_sets sets in the insn, as determined by cselib. */
4344 struct cselib_set *sets;
4347 /* True if we're counting stores, false otherwise. */
4351 /* Find a VALUE corresponding to X. */
4353 static inline cselib_val *
4354 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
4360 /* This is called after uses are set up and before stores are
4361 processed bycselib, so it's safe to look up srcs, but not
4362 dsts. So we look up expressions that appear in srcs or in
4363 dest expressions, but we search the sets array for dests of
4367 for (i = 0; i < cui->n_sets; i++)
4368 if (cui->sets[i].dest == x)
4369 return cui->sets[i].src_elt;
4372 return cselib_lookup (x, mode, 0);
4378 /* Replace all registers and addresses in an expression with VALUE
4379 expressions that map back to them, unless the expression is a
4380 register. If no mapping is or can be performed, returns NULL. */
4383 replace_expr_with_values (rtx loc)
4387 else if (MEM_P (loc))
4389 enum machine_mode address_mode
4390 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (loc));
4391 cselib_val *addr = cselib_lookup (XEXP (loc, 0), address_mode, 0);
4393 return replace_equiv_address_nv (loc, addr->val_rtx);
4398 return cselib_subst_to_values (loc);
4401 /* Determine what kind of micro operation to choose for a USE. Return
4402 MO_CLOBBER if no micro operation is to be generated. */
4404 static enum micro_operation_type
4405 use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
4410 if (cui && cui->sets)
4412 if (GET_CODE (loc) == VAR_LOCATION)
4414 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
4416 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
4417 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1);
4419 /* ??? flag_float_store and volatile mems are never
4420 given values, but we could in theory use them for
4422 gcc_assert (val || 1);
4429 if ((REG_P (loc) || MEM_P (loc))
4430 && (val = find_use_val (loc, GET_MODE (loc), cui)))
4433 *modep = GET_MODE (loc);
4437 || cselib_lookup (XEXP (loc, 0), GET_MODE (loc), 0))
4440 else if (!cselib_preserved_value_p (val))
4447 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
4449 expr = REG_EXPR (loc);
4452 return MO_USE_NO_VAR;
4453 else if (target_for_debug_bind (var_debug_decl (expr)))
4455 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
4456 false, modep, NULL))
4459 return MO_USE_NO_VAR;
4461 else if (MEM_P (loc))
4463 expr = MEM_EXPR (loc);
4467 else if (target_for_debug_bind (var_debug_decl (expr)))
4469 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
4470 false, modep, NULL))
4479 /* Log to OUT information about micro-operation MOPT involving X in
4483 log_op_type (rtx x, basic_block bb, rtx insn,
4484 enum micro_operation_type mopt, FILE *out)
4486 fprintf (out, "bb %i op %i insn %i %s ",
4487 bb->index, VTI (bb)->n_mos - 1,
4488 INSN_UID (insn), micro_operation_type_name[mopt]);
4489 print_inline_rtx (out, x, 2);
4493 /* Count uses (register and memory references) LOC which will be tracked.
4494 INSN is instruction which the LOC is part of. */
4497 count_uses (rtx *ploc, void *cuip)
4500 struct count_use_info *cui = (struct count_use_info *) cuip;
4501 enum micro_operation_type mopt = use_type (loc, cui, NULL);
4503 if (mopt != MO_CLOBBER)
4506 enum machine_mode mode = GET_MODE (loc);
4508 VTI (cui->bb)->n_mos++;
4510 if (dump_file && (dump_flags & TDF_DETAILS))
4511 log_op_type (loc, cui->bb, cui->insn, mopt, dump_file);
4516 loc = PAT_VAR_LOCATION_LOC (loc);
4517 if (VAR_LOC_UNKNOWN_P (loc))
4524 && !REG_P (XEXP (loc, 0)) && !MEM_P (XEXP (loc, 0)))
4526 enum machine_mode address_mode
4527 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (loc));
4528 val = cselib_lookup (XEXP (loc, 0), address_mode, false);
4530 if (val && !cselib_preserved_value_p (val))
4532 VTI (cui->bb)->n_mos++;
4533 cselib_preserve_value (val);
4537 val = find_use_val (loc, mode, cui);
4540 if (mopt == MO_VAL_SET
4541 && GET_CODE (PATTERN (cui->insn)) == COND_EXEC
4544 && (use_type (loc, NULL, NULL) == MO_USE
4547 cselib_val *oval = cselib_lookup (loc, GET_MODE (loc), 0);
4549 gcc_assert (oval != val);
4550 gcc_assert (REG_P (loc) || MEM_P (loc));
4552 if (!cselib_preserved_value_p (oval))
4554 VTI (cui->bb)->n_mos++;
4555 cselib_preserve_value (oval);
4559 cselib_preserve_value (val);
4562 gcc_assert (mopt == MO_VAL_LOC);
4574 /* Helper function for finding all uses of REG/MEM in X in CUI's
4578 count_uses_1 (rtx *x, void *cui)
4580 for_each_rtx (x, count_uses, cui);
4583 /* Count stores (register and memory references) LOC which will be
4584 tracked. CUI is a count_use_info object containing the instruction
4585 which the LOC is part of. */
4588 count_stores (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *cui)
4590 count_uses (&loc, cui);
4593 /* Callback for cselib_record_sets_hook, that counts how many micro
4594 operations it takes for uses and stores in an insn after
4595 cselib_record_sets has analyzed the sets in an insn, but before it
4596 modifies the stored values in the internal tables, unless
4597 cselib_record_sets doesn't call it directly (perhaps because we're
4598 not doing cselib in the first place, in which case sets and n_sets
4602 count_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
4604 basic_block bb = BLOCK_FOR_INSN (insn);
4605 struct count_use_info cui;
4607 cselib_hook_called = true;
4612 cui.n_sets = n_sets;
4614 cui.store_p = false;
4615 note_uses (&PATTERN (insn), count_uses_1, &cui);
4617 note_stores (PATTERN (insn), count_stores, &cui);
4620 /* Tell whether the CONCAT used to holds a VALUE and its location
4621 needs value resolution, i.e., an attempt of mapping the location
4622 back to other incoming values. */
4623 #define VAL_NEEDS_RESOLUTION(x) \
4624 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
4625 /* Whether the location in the CONCAT is a tracked expression, that
4626 should also be handled like a MO_USE. */
4627 #define VAL_HOLDS_TRACK_EXPR(x) \
4628 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
4629 /* Whether the location in the CONCAT should be handled like a MO_COPY
4631 #define VAL_EXPR_IS_COPIED(x) \
4632 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
4633 /* Whether the location in the CONCAT should be handled like a
4634 MO_CLOBBER as well. */
4635 #define VAL_EXPR_IS_CLOBBERED(x) \
4636 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
4638 /* Add uses (register and memory references) LOC which will be tracked
4639 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
4642 add_uses (rtx *ploc, void *data)
4645 enum machine_mode mode = VOIDmode;
4646 struct count_use_info *cui = (struct count_use_info *)data;
4647 enum micro_operation_type type = use_type (loc, cui, &mode);
4649 if (type != MO_CLOBBER)
4651 basic_block bb = cui->bb;
4652 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4655 mo->u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
4656 mo->insn = cui->insn;
4658 if (type == MO_VAL_LOC)
4661 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
4664 gcc_assert (cui->sets);
4667 && !REG_P (XEXP (vloc, 0)) && !MEM_P (XEXP (vloc, 0)))
4670 enum machine_mode address_mode
4671 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
4673 = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
4675 if (val && !cselib_preserved_value_p (val))
4677 micro_operation *mon = VTI (bb)->mos + VTI (bb)->n_mos++;
4678 mon->type = mo->type;
4679 mon->u.loc = mo->u.loc;
4680 mon->insn = mo->insn;
4681 cselib_preserve_value (val);
4682 mo->type = MO_VAL_USE;
4683 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4684 mo->u.loc = gen_rtx_CONCAT (address_mode,
4685 val->val_rtx, mloc);
4686 if (dump_file && (dump_flags & TDF_DETAILS))
4687 log_op_type (mo->u.loc, cui->bb, cui->insn,
4688 mo->type, dump_file);
4693 if (!VAR_LOC_UNKNOWN_P (vloc)
4694 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
4696 enum machine_mode mode2;
4697 enum micro_operation_type type2;
4698 rtx nloc = replace_expr_with_values (vloc);
4702 oloc = shallow_copy_rtx (oloc);
4703 PAT_VAR_LOCATION_LOC (oloc) = nloc;
4706 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
4708 type2 = use_type (vloc, 0, &mode2);
4710 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
4711 || type2 == MO_CLOBBER);
4713 if (type2 == MO_CLOBBER
4714 && !cselib_preserved_value_p (val))
4716 VAL_NEEDS_RESOLUTION (oloc) = 1;
4717 cselib_preserve_value (val);
4720 else if (!VAR_LOC_UNKNOWN_P (vloc))
4722 oloc = shallow_copy_rtx (oloc);
4723 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
4728 else if (type == MO_VAL_USE)
4730 enum machine_mode mode2 = VOIDmode;
4731 enum micro_operation_type type2;
4732 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
4733 rtx vloc, oloc = loc, nloc;
4735 gcc_assert (cui->sets);
4738 && !REG_P (XEXP (oloc, 0)) && !MEM_P (XEXP (oloc, 0)))
4741 enum machine_mode address_mode
4742 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
4744 = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
4746 if (val && !cselib_preserved_value_p (val))
4748 micro_operation *mon = VTI (bb)->mos + VTI (bb)->n_mos++;
4749 mon->type = mo->type;
4750 mon->u.loc = mo->u.loc;
4751 mon->insn = mo->insn;
4752 cselib_preserve_value (val);
4753 mo->type = MO_VAL_USE;
4754 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4755 mo->u.loc = gen_rtx_CONCAT (address_mode,
4756 val->val_rtx, mloc);
4757 mo->insn = cui->insn;
4758 if (dump_file && (dump_flags & TDF_DETAILS))
4759 log_op_type (mo->u.loc, cui->bb, cui->insn,
4760 mo->type, dump_file);
4765 type2 = use_type (loc, 0, &mode2);
4767 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
4768 || type2 == MO_CLOBBER);
4770 if (type2 == MO_USE)
4771 vloc = var_lowpart (mode2, loc);
4775 /* The loc of a MO_VAL_USE may have two forms:
4777 (concat val src): val is at src, a value-based
4780 (concat (concat val use) src): same as above, with use as
4781 the MO_USE tracked value, if it differs from src.
4785 nloc = replace_expr_with_values (loc);
4790 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
4792 oloc = val->val_rtx;
4794 mo->u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
4796 if (type2 == MO_USE)
4797 VAL_HOLDS_TRACK_EXPR (mo->u.loc) = 1;
4798 if (!cselib_preserved_value_p (val))
4800 VAL_NEEDS_RESOLUTION (mo->u.loc) = 1;
4801 cselib_preserve_value (val);
4805 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
4807 if (dump_file && (dump_flags & TDF_DETAILS))
4808 log_op_type (mo->u.loc, cui->bb, cui->insn, mo->type, dump_file);
4814 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
4817 add_uses_1 (rtx *x, void *cui)
4819 for_each_rtx (x, add_uses, cui);
4822 /* Add stores (register and memory references) LOC which will be tracked
4823 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
4824 CUIP->insn is instruction which the LOC is part of. */
4827 add_stores (rtx loc, const_rtx expr, void *cuip)
4829 enum machine_mode mode = VOIDmode, mode2;
4830 struct count_use_info *cui = (struct count_use_info *)cuip;
4831 basic_block bb = cui->bb;
4832 micro_operation *mo;
4833 rtx oloc = loc, nloc, src = NULL;
4834 enum micro_operation_type type = use_type (loc, cui, &mode);
4835 bool track_p = false;
4837 bool resolve, preserve;
4839 if (type == MO_CLOBBER)
4846 mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4848 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
4849 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
4850 || GET_CODE (expr) == CLOBBER)
4852 mo->type = MO_CLOBBER;
4857 if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
4858 src = var_lowpart (mode2, SET_SRC (expr));
4859 loc = var_lowpart (mode2, loc);
4868 rtx xexpr = CONST_CAST_RTX (expr);
4870 if (SET_SRC (expr) != src)
4871 xexpr = gen_rtx_SET (VOIDmode, loc, src);
4872 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
4879 mo->insn = cui->insn;
4881 else if (MEM_P (loc)
4882 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
4885 mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4887 if (MEM_P (loc) && type == MO_VAL_SET
4888 && !REG_P (XEXP (loc, 0)) && !MEM_P (XEXP (loc, 0)))
4891 enum machine_mode address_mode
4892 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
4893 cselib_val *val = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
4895 if (val && !cselib_preserved_value_p (val))
4897 cselib_preserve_value (val);
4898 mo->type = MO_VAL_USE;
4899 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4900 mo->u.loc = gen_rtx_CONCAT (address_mode, val->val_rtx, mloc);
4901 mo->insn = cui->insn;
4902 if (dump_file && (dump_flags & TDF_DETAILS))
4903 log_op_type (mo->u.loc, cui->bb, cui->insn,
4904 mo->type, dump_file);
4905 mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4909 if (GET_CODE (expr) == CLOBBER || !track_p)
4911 mo->type = MO_CLOBBER;
4912 mo->u.loc = track_p ? var_lowpart (mode2, loc) : loc;
4916 if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
4917 src = var_lowpart (mode2, SET_SRC (expr));
4918 loc = var_lowpart (mode2, loc);
4927 rtx xexpr = CONST_CAST_RTX (expr);
4929 if (SET_SRC (expr) != src)
4930 xexpr = gen_rtx_SET (VOIDmode, loc, src);
4931 if (same_variable_part_p (SET_SRC (xexpr),
4933 INT_MEM_OFFSET (loc)))
4940 mo->insn = cui->insn;
4945 if (type != MO_VAL_SET)
4946 goto log_and_return;
4948 v = find_use_val (oloc, mode, cui);
4950 resolve = preserve = !cselib_preserved_value_p (v);
4952 nloc = replace_expr_with_values (oloc);
4956 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
4958 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0);
4960 gcc_assert (oval != v);
4961 gcc_assert (REG_P (oloc) || MEM_P (oloc));
4963 if (!cselib_preserved_value_p (oval))
4965 micro_operation *nmo = VTI (bb)->mos + VTI (bb)->n_mos++;
4967 cselib_preserve_value (oval);
4969 nmo->type = MO_VAL_USE;
4970 nmo->u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
4971 VAL_NEEDS_RESOLUTION (nmo->u.loc) = 1;
4972 nmo->insn = mo->insn;
4974 if (dump_file && (dump_flags & TDF_DETAILS))
4975 log_op_type (nmo->u.loc, cui->bb, cui->insn,
4976 nmo->type, dump_file);
4981 else if (resolve && GET_CODE (mo->u.loc) == SET)
4983 nloc = replace_expr_with_values (SET_SRC (expr));
4985 /* Avoid the mode mismatch between oexpr and expr. */
4986 if (!nloc && mode != mode2)
4988 nloc = SET_SRC (expr);
4989 gcc_assert (oloc == SET_DEST (expr));
4993 oloc = gen_rtx_SET (GET_MODE (mo->u.loc), oloc, nloc);
4996 if (oloc == SET_DEST (mo->u.loc))
4997 /* No point in duplicating. */
4999 if (!REG_P (SET_SRC (mo->u.loc)))
5005 if (GET_CODE (mo->u.loc) == SET
5006 && oloc == SET_DEST (mo->u.loc))
5007 /* No point in duplicating. */
5013 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
5015 if (mo->u.loc != oloc)
5016 loc = gen_rtx_CONCAT (GET_MODE (mo->u.loc), loc, mo->u.loc);
5018 /* The loc of a MO_VAL_SET may have various forms:
5020 (concat val dst): dst now holds val
5022 (concat val (set dst src)): dst now holds val, copied from src
5024 (concat (concat val dstv) dst): dst now holds val; dstv is dst
5025 after replacing mems and non-top-level regs with values.
5027 (concat (concat val dstv) (set dst src)): dst now holds val,
5028 copied from src. dstv is a value-based representation of dst, if
5029 it differs from dst. If resolution is needed, src is a REG, and
5030 its mode is the same as that of val.
5032 (concat (concat val (set dstv srcv)) (set dst src)): src
5033 copied to dst, holding val. dstv and srcv are value-based
5034 representations of dst and src, respectively.
5041 VAL_HOLDS_TRACK_EXPR (loc) = 1;
5044 VAL_NEEDS_RESOLUTION (loc) = resolve;
5045 cselib_preserve_value (v);
5047 if (mo->type == MO_CLOBBER)
5048 VAL_EXPR_IS_CLOBBERED (loc) = 1;
5049 if (mo->type == MO_COPY)
5050 VAL_EXPR_IS_COPIED (loc) = 1;
5052 mo->type = MO_VAL_SET;
5055 if (dump_file && (dump_flags & TDF_DETAILS))
5056 log_op_type (mo->u.loc, cui->bb, cui->insn, mo->type, dump_file);
5059 /* Callback for cselib_record_sets_hook, that records as micro
5060 operations uses and stores in an insn after cselib_record_sets has
5061 analyzed the sets in an insn, but before it modifies the stored
5062 values in the internal tables, unless cselib_record_sets doesn't
5063 call it directly (perhaps because we're not doing cselib in the
5064 first place, in which case sets and n_sets will be 0). */
5067 add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
5069 basic_block bb = BLOCK_FOR_INSN (insn);
5071 struct count_use_info cui;
5073 cselib_hook_called = true;
5078 cui.n_sets = n_sets;
5080 n1 = VTI (bb)->n_mos;
5081 cui.store_p = false;
5082 note_uses (&PATTERN (insn), add_uses_1, &cui);
5083 n2 = VTI (bb)->n_mos - 1;
5085 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
5089 while (n1 < n2 && VTI (bb)->mos[n1].type == MO_USE)
5091 while (n1 < n2 && VTI (bb)->mos[n2].type != MO_USE)
5097 sw = VTI (bb)->mos[n1];
5098 VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
5099 VTI (bb)->mos[n2] = sw;
5103 n2 = VTI (bb)->n_mos - 1;
5107 while (n1 < n2 && VTI (bb)->mos[n1].type != MO_VAL_LOC)
5109 while (n1 < n2 && VTI (bb)->mos[n2].type == MO_VAL_LOC)
5115 sw = VTI (bb)->mos[n1];
5116 VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
5117 VTI (bb)->mos[n2] = sw;
5123 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
5128 if (dump_file && (dump_flags & TDF_DETAILS))
5129 log_op_type (PATTERN (insn), bb, insn, mo->type, dump_file);
5132 n1 = VTI (bb)->n_mos;
5133 /* This will record NEXT_INSN (insn), such that we can
5134 insert notes before it without worrying about any
5135 notes that MO_USEs might emit after the insn. */
5137 note_stores (PATTERN (insn), add_stores, &cui);
5138 n2 = VTI (bb)->n_mos - 1;
5140 /* Order the MO_CLOBBERs to be before MO_SETs. */
5143 while (n1 < n2 && VTI (bb)->mos[n1].type == MO_CLOBBER)
5145 while (n1 < n2 && VTI (bb)->mos[n2].type != MO_CLOBBER)
5151 sw = VTI (bb)->mos[n1];
5152 VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
5153 VTI (bb)->mos[n2] = sw;
5158 static enum var_init_status
5159 find_src_status (dataflow_set *in, rtx src)
5161 tree decl = NULL_TREE;
5162 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
5164 if (! flag_var_tracking_uninit)
5165 status = VAR_INIT_STATUS_INITIALIZED;
5167 if (src && REG_P (src))
5168 decl = var_debug_decl (REG_EXPR (src));
5169 else if (src && MEM_P (src))
5170 decl = var_debug_decl (MEM_EXPR (src));
5173 status = get_init_value (in, src, dv_from_decl (decl));
5178 /* SRC is the source of an assignment. Use SET to try to find what
5179 was ultimately assigned to SRC. Return that value if known,
5180 otherwise return SRC itself. */
5183 find_src_set_src (dataflow_set *set, rtx src)
5185 tree decl = NULL_TREE; /* The variable being copied around. */
5186 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
5188 location_chain nextp;
5192 if (src && REG_P (src))
5193 decl = var_debug_decl (REG_EXPR (src));
5194 else if (src && MEM_P (src))
5195 decl = var_debug_decl (MEM_EXPR (src));
5199 decl_or_value dv = dv_from_decl (decl);
5201 var = shared_hash_find (set->vars, dv);
5205 for (i = 0; i < var->n_var_parts && !found; i++)
5206 for (nextp = var->var_part[i].loc_chain; nextp && !found;
5207 nextp = nextp->next)
5208 if (rtx_equal_p (nextp->loc, src))
5210 set_src = nextp->set_src;
5220 /* Compute the changes of variable locations in the basic block BB. */
5223 compute_bb_dataflow (basic_block bb)
5227 dataflow_set old_out;
5228 dataflow_set *in = &VTI (bb)->in;
5229 dataflow_set *out = &VTI (bb)->out;
5231 dataflow_set_init (&old_out);
5232 dataflow_set_copy (&old_out, out);
5233 dataflow_set_copy (out, in);
5235 n = VTI (bb)->n_mos;
5236 for (i = 0; i < n; i++)
5238 rtx insn = VTI (bb)->mos[i].insn;
5240 switch (VTI (bb)->mos[i].type)
5243 dataflow_set_clear_at_call (out);
5248 rtx loc = VTI (bb)->mos[i].u.loc;
5251 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
5252 else if (MEM_P (loc))
5253 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
5259 rtx loc = VTI (bb)->mos[i].u.loc;
5263 if (GET_CODE (loc) == CONCAT)
5265 val = XEXP (loc, 0);
5266 vloc = XEXP (loc, 1);
5274 var = PAT_VAR_LOCATION_DECL (vloc);
5276 clobber_variable_part (out, NULL_RTX,
5277 dv_from_decl (var), 0, NULL_RTX);
5280 if (VAL_NEEDS_RESOLUTION (loc))
5281 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
5282 set_variable_part (out, val, dv_from_decl (var), 0,
5283 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
5291 rtx loc = VTI (bb)->mos[i].u.loc;
5292 rtx val, vloc, uloc;
5294 vloc = uloc = XEXP (loc, 1);
5295 val = XEXP (loc, 0);
5297 if (GET_CODE (val) == CONCAT)
5299 uloc = XEXP (val, 1);
5300 val = XEXP (val, 0);
5303 if (VAL_NEEDS_RESOLUTION (loc))
5304 val_resolve (out, val, vloc, insn);
5306 if (VAL_HOLDS_TRACK_EXPR (loc))
5308 if (GET_CODE (uloc) == REG)
5309 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
5311 else if (GET_CODE (uloc) == MEM)
5312 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
5320 rtx loc = VTI (bb)->mos[i].u.loc;
5321 rtx val, vloc, uloc;
5323 vloc = uloc = XEXP (loc, 1);
5324 val = XEXP (loc, 0);
5326 if (GET_CODE (val) == CONCAT)
5328 vloc = XEXP (val, 1);
5329 val = XEXP (val, 0);
5332 if (GET_CODE (vloc) == SET)
5334 rtx vsrc = SET_SRC (vloc);
5336 gcc_assert (val != vsrc);
5337 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
5339 vloc = SET_DEST (vloc);
5341 if (VAL_NEEDS_RESOLUTION (loc))
5342 val_resolve (out, val, vsrc, insn);
5344 else if (VAL_NEEDS_RESOLUTION (loc))
5346 gcc_assert (GET_CODE (uloc) == SET
5347 && GET_CODE (SET_SRC (uloc)) == REG);
5348 val_resolve (out, val, SET_SRC (uloc), insn);
5351 if (VAL_HOLDS_TRACK_EXPR (loc))
5353 if (VAL_EXPR_IS_CLOBBERED (loc))
5356 var_reg_delete (out, uloc, true);
5357 else if (MEM_P (uloc))
5358 var_mem_delete (out, uloc, true);
5362 bool copied_p = VAL_EXPR_IS_COPIED (loc);
5364 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
5366 if (GET_CODE (uloc) == SET)
5368 set_src = SET_SRC (uloc);
5369 uloc = SET_DEST (uloc);
5374 if (flag_var_tracking_uninit)
5376 status = find_src_status (in, set_src);
5378 if (status == VAR_INIT_STATUS_UNKNOWN)
5379 status = find_src_status (out, set_src);
5382 set_src = find_src_set_src (in, set_src);
5386 var_reg_delete_and_set (out, uloc, !copied_p,
5388 else if (MEM_P (uloc))
5389 var_mem_delete_and_set (out, uloc, !copied_p,
5393 else if (REG_P (uloc))
5394 var_regno_delete (out, REGNO (uloc));
5396 val_store (out, val, vloc, insn);
5402 rtx loc = VTI (bb)->mos[i].u.loc;
5405 if (GET_CODE (loc) == SET)
5407 set_src = SET_SRC (loc);
5408 loc = SET_DEST (loc);
5412 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
5414 else if (MEM_P (loc))
5415 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
5422 rtx loc = VTI (bb)->mos[i].u.loc;
5423 enum var_init_status src_status;
5426 if (GET_CODE (loc) == SET)
5428 set_src = SET_SRC (loc);
5429 loc = SET_DEST (loc);
5432 if (! flag_var_tracking_uninit)
5433 src_status = VAR_INIT_STATUS_INITIALIZED;
5436 src_status = find_src_status (in, set_src);
5438 if (src_status == VAR_INIT_STATUS_UNKNOWN)
5439 src_status = find_src_status (out, set_src);
5442 set_src = find_src_set_src (in, set_src);
5445 var_reg_delete_and_set (out, loc, false, src_status, set_src);
5446 else if (MEM_P (loc))
5447 var_mem_delete_and_set (out, loc, false, src_status, set_src);
5453 rtx loc = VTI (bb)->mos[i].u.loc;
5456 var_reg_delete (out, loc, false);
5457 else if (MEM_P (loc))
5458 var_mem_delete (out, loc, false);
5464 rtx loc = VTI (bb)->mos[i].u.loc;
5467 var_reg_delete (out, loc, true);
5468 else if (MEM_P (loc))
5469 var_mem_delete (out, loc, true);
5474 out->stack_adjust += VTI (bb)->mos[i].u.adjust;
5479 if (MAY_HAVE_DEBUG_INSNS)
5481 dataflow_set_equiv_regs (out);
5482 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_mark,
5484 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_star,
5487 htab_traverse (shared_hash_htab (out->vars),
5488 canonicalize_loc_order_check, out);
5491 changed = dataflow_set_different (&old_out, out);
5492 dataflow_set_destroy (&old_out);
5496 /* Find the locations of variables in the whole function. */
5499 vt_find_locations (void)
5501 fibheap_t worklist, pending, fibheap_swap;
5502 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
5510 /* Compute reverse completion order of depth first search of the CFG
5511 so that the data-flow runs faster. */
5512 rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
5513 bb_order = XNEWVEC (int, last_basic_block);
5514 pre_and_rev_post_order_compute (NULL, rc_order, false);
5515 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
5516 bb_order[rc_order[i]] = i;
5519 worklist = fibheap_new ();
5520 pending = fibheap_new ();
5521 visited = sbitmap_alloc (last_basic_block);
5522 in_worklist = sbitmap_alloc (last_basic_block);
5523 in_pending = sbitmap_alloc (last_basic_block);
5524 sbitmap_zero (in_worklist);
5527 fibheap_insert (pending, bb_order[bb->index], bb);
5528 sbitmap_ones (in_pending);
5530 while (!fibheap_empty (pending))
5532 fibheap_swap = pending;
5534 worklist = fibheap_swap;
5535 sbitmap_swap = in_pending;
5536 in_pending = in_worklist;
5537 in_worklist = sbitmap_swap;
5539 sbitmap_zero (visited);
5541 while (!fibheap_empty (worklist))
5543 bb = (basic_block) fibheap_extract_min (worklist);
5544 RESET_BIT (in_worklist, bb->index);
5545 if (!TEST_BIT (visited, bb->index))
5549 int oldinsz, oldoutsz;
5551 SET_BIT (visited, bb->index);
5553 if (dump_file && VTI (bb)->in.vars)
5556 -= htab_size (shared_hash_htab (VTI (bb)->in.vars))
5557 + htab_size (shared_hash_htab (VTI (bb)->out.vars));
5559 = htab_elements (shared_hash_htab (VTI (bb)->in.vars));
5561 = htab_elements (shared_hash_htab (VTI (bb)->out.vars));
5564 oldinsz = oldoutsz = 0;
5566 if (MAY_HAVE_DEBUG_INSNS)
5568 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
5569 bool first = true, adjust = false;
5571 /* Calculate the IN set as the intersection of
5572 predecessor OUT sets. */
5574 dataflow_set_clear (in);
5575 dst_can_be_shared = true;
5577 FOR_EACH_EDGE (e, ei, bb->preds)
5578 if (!VTI (e->src)->flooded)
5579 gcc_assert (bb_order[bb->index]
5580 <= bb_order[e->src->index]);
5583 dataflow_set_copy (in, &VTI (e->src)->out);
5584 first_out = &VTI (e->src)->out;
5589 dataflow_set_merge (in, &VTI (e->src)->out);
5595 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
5597 /* Merge and merge_adjust should keep entries in
5599 htab_traverse (shared_hash_htab (in->vars),
5600 canonicalize_loc_order_check,
5603 if (dst_can_be_shared)
5605 shared_hash_destroy (in->vars);
5606 in->vars = shared_hash_copy (first_out->vars);
5610 VTI (bb)->flooded = true;
5614 /* Calculate the IN set as union of predecessor OUT sets. */
5615 dataflow_set_clear (&VTI (bb)->in);
5616 FOR_EACH_EDGE (e, ei, bb->preds)
5617 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
5620 changed = compute_bb_dataflow (bb);
5622 htabsz += htab_size (shared_hash_htab (VTI (bb)->in.vars))
5623 + htab_size (shared_hash_htab (VTI (bb)->out.vars));
5627 FOR_EACH_EDGE (e, ei, bb->succs)
5629 if (e->dest == EXIT_BLOCK_PTR)
5632 if (TEST_BIT (visited, e->dest->index))
5634 if (!TEST_BIT (in_pending, e->dest->index))
5636 /* Send E->DEST to next round. */
5637 SET_BIT (in_pending, e->dest->index);
5638 fibheap_insert (pending,
5639 bb_order[e->dest->index],
5643 else if (!TEST_BIT (in_worklist, e->dest->index))
5645 /* Add E->DEST to current round. */
5646 SET_BIT (in_worklist, e->dest->index);
5647 fibheap_insert (worklist, bb_order[e->dest->index],
5655 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
5657 (int)htab_elements (shared_hash_htab (VTI (bb)->in.vars)),
5659 (int)htab_elements (shared_hash_htab (VTI (bb)->out.vars)),
5661 (int)worklist->nodes, (int)pending->nodes, htabsz);
5663 if (dump_file && (dump_flags & TDF_DETAILS))
5665 fprintf (dump_file, "BB %i IN:\n", bb->index);
5666 dump_dataflow_set (&VTI (bb)->in);
5667 fprintf (dump_file, "BB %i OUT:\n", bb->index);
5668 dump_dataflow_set (&VTI (bb)->out);
5674 if (MAY_HAVE_DEBUG_INSNS)
5676 gcc_assert (VTI (bb)->flooded);
5678 VEC_free (rtx, heap, values_to_unmark);
5680 fibheap_delete (worklist);
5681 fibheap_delete (pending);
5682 sbitmap_free (visited);
5683 sbitmap_free (in_worklist);
5684 sbitmap_free (in_pending);
5687 /* Print the content of the LIST to dump file. */
5690 dump_attrs_list (attrs list)
5692 for (; list; list = list->next)
5694 if (dv_is_decl_p (list->dv))
5695 print_mem_expr (dump_file, dv_as_decl (list->dv));
5697 print_rtl_single (dump_file, dv_as_value (list->dv));
5698 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
5700 fprintf (dump_file, "\n");
5703 /* Print the information about variable *SLOT to dump file. */
5706 dump_variable_slot (void **slot, void *data ATTRIBUTE_UNUSED)
5708 variable var = (variable) *slot;
5710 dump_variable (var);
5712 /* Continue traversing the hash table. */
5716 /* Print the information about variable VAR to dump file. */
5719 dump_variable (variable var)
5722 location_chain node;
5724 if (dv_is_decl_p (var->dv))
5726 const_tree decl = dv_as_decl (var->dv);
5728 if (DECL_NAME (decl))
5729 fprintf (dump_file, " name: %s",
5730 IDENTIFIER_POINTER (DECL_NAME (decl)));
5732 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
5733 if (dump_flags & TDF_UID)
5734 fprintf (dump_file, " D.%u\n", DECL_UID (decl));
5736 fprintf (dump_file, "\n");
5740 fputc (' ', dump_file);
5741 print_rtl_single (dump_file, dv_as_value (var->dv));
5744 for (i = 0; i < var->n_var_parts; i++)
5746 fprintf (dump_file, " offset %ld\n",
5747 (long) var->var_part[i].offset);
5748 for (node = var->var_part[i].loc_chain; node; node = node->next)
5750 fprintf (dump_file, " ");
5751 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
5752 fprintf (dump_file, "[uninit]");
5753 print_rtl_single (dump_file, node->loc);
5758 /* Print the information about variables from hash table VARS to dump file. */
5761 dump_vars (htab_t vars)
5763 if (htab_elements (vars) > 0)
5765 fprintf (dump_file, "Variables:\n");
5766 htab_traverse (vars, dump_variable_slot, NULL);
5770 /* Print the dataflow set SET to dump file. */
5773 dump_dataflow_set (dataflow_set *set)
5777 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
5779 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5783 fprintf (dump_file, "Reg %d:", i);
5784 dump_attrs_list (set->regs[i]);
5787 dump_vars (shared_hash_htab (set->vars));
5788 fprintf (dump_file, "\n");
5791 /* Print the IN and OUT sets for each basic block to dump file. */
5794 dump_dataflow_sets (void)
5800 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
5801 fprintf (dump_file, "IN:\n");
5802 dump_dataflow_set (&VTI (bb)->in);
5803 fprintf (dump_file, "OUT:\n");
5804 dump_dataflow_set (&VTI (bb)->out);
5808 /* Add variable VAR to the hash table of changed variables and
5809 if it has no locations delete it from SET's hash table. */
5812 variable_was_changed (variable var, dataflow_set *set)
5814 hashval_t hash = dv_htab_hash (var->dv);
5820 /* Remember this decl or VALUE has been added to changed_variables. */
5821 set_dv_changed (var->dv, true);
5823 slot = htab_find_slot_with_hash (changed_variables,
5827 if (set && var->n_var_parts == 0)
5831 empty_var = (variable) pool_alloc (dv_pool (var->dv));
5832 empty_var->dv = var->dv;
5833 empty_var->refcount = 1;
5834 empty_var->n_var_parts = 0;
5847 if (var->n_var_parts == 0)
5852 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
5855 if (shared_hash_shared (set->vars))
5856 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
5858 htab_clear_slot (shared_hash_htab (set->vars), slot);
5864 /* Look for the index in VAR->var_part corresponding to OFFSET.
5865 Return -1 if not found. If INSERTION_POINT is non-NULL, the
5866 referenced int will be set to the index that the part has or should
5867 have, if it should be inserted. */
5870 find_variable_location_part (variable var, HOST_WIDE_INT offset,
5871 int *insertion_point)
5875 /* Find the location part. */
5877 high = var->n_var_parts;
5880 pos = (low + high) / 2;
5881 if (var->var_part[pos].offset < offset)
5888 if (insertion_point)
5889 *insertion_point = pos;
5891 if (pos < var->n_var_parts && var->var_part[pos].offset == offset)
5898 set_slot_part (dataflow_set *set, rtx loc, void **slot,
5899 decl_or_value dv, HOST_WIDE_INT offset,
5900 enum var_init_status initialized, rtx set_src)
5903 location_chain node, next;
5904 location_chain *nextp;
5906 bool onepart = dv_onepart_p (dv);
5908 gcc_assert (offset == 0 || !onepart);
5909 gcc_assert (loc != dv_as_opaque (dv));
5911 var = (variable) *slot;
5913 if (! flag_var_tracking_uninit)
5914 initialized = VAR_INIT_STATUS_INITIALIZED;
5918 /* Create new variable information. */
5919 var = (variable) pool_alloc (dv_pool (dv));
5922 var->n_var_parts = 1;
5923 var->var_part[0].offset = offset;
5924 var->var_part[0].loc_chain = NULL;
5925 var->var_part[0].cur_loc = NULL;
5928 nextp = &var->var_part[0].loc_chain;
5929 if (emit_notes && dv_is_value_p (dv))
5930 add_cselib_value_chains (dv);
5936 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
5940 if (GET_CODE (loc) == VALUE)
5942 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
5943 nextp = &node->next)
5944 if (GET_CODE (node->loc) == VALUE)
5946 if (node->loc == loc)
5951 if (canon_value_cmp (node->loc, loc))
5959 else if (REG_P (node->loc) || MEM_P (node->loc))
5967 else if (REG_P (loc))
5969 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
5970 nextp = &node->next)
5971 if (REG_P (node->loc))
5973 if (REGNO (node->loc) < REGNO (loc))
5977 if (REGNO (node->loc) == REGNO (loc))
5990 else if (MEM_P (loc))
5992 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
5993 nextp = &node->next)
5994 if (REG_P (node->loc))
5996 else if (MEM_P (node->loc))
5998 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
6010 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6011 nextp = &node->next)
6012 if ((r = loc_cmp (node->loc, loc)) >= 0)
6020 if (var->refcount > 1 || shared_hash_shared (set->vars))
6022 slot = unshare_variable (set, slot, var, initialized);
6023 var = (variable)*slot;
6024 for (nextp = &var->var_part[0].loc_chain; c;
6025 nextp = &(*nextp)->next)
6027 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
6034 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
6036 pos = find_variable_location_part (var, offset, &inspos);
6040 node = var->var_part[pos].loc_chain;
6043 && ((REG_P (node->loc) && REG_P (loc)
6044 && REGNO (node->loc) == REGNO (loc))
6045 || rtx_equal_p (node->loc, loc)))
6047 /* LOC is in the beginning of the chain so we have nothing
6049 if (node->init < initialized)
6050 node->init = initialized;
6051 if (set_src != NULL)
6052 node->set_src = set_src;
6058 /* We have to make a copy of a shared variable. */
6059 if (var->refcount > 1 || shared_hash_shared (set->vars))
6061 slot = unshare_variable (set, slot, var, initialized);
6062 var = (variable)*slot;
6068 /* We have not found the location part, new one will be created. */
6070 /* We have to make a copy of the shared variable. */
6071 if (var->refcount > 1 || shared_hash_shared (set->vars))
6073 slot = unshare_variable (set, slot, var, initialized);
6074 var = (variable)*slot;
6077 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
6078 thus there are at most MAX_VAR_PARTS different offsets. */
6079 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
6080 && (!var->n_var_parts || !dv_onepart_p (var->dv)));
6082 /* We have to move the elements of array starting at index
6083 inspos to the next position. */
6084 for (pos = var->n_var_parts; pos > inspos; pos--)
6085 var->var_part[pos] = var->var_part[pos - 1];
6088 var->var_part[pos].offset = offset;
6089 var->var_part[pos].loc_chain = NULL;
6090 var->var_part[pos].cur_loc = NULL;
6093 /* Delete the location from the list. */
6094 nextp = &var->var_part[pos].loc_chain;
6095 for (node = var->var_part[pos].loc_chain; node; node = next)
6098 if ((REG_P (node->loc) && REG_P (loc)
6099 && REGNO (node->loc) == REGNO (loc))
6100 || rtx_equal_p (node->loc, loc))
6102 /* Save these values, to assign to the new node, before
6103 deleting this one. */
6104 if (node->init > initialized)
6105 initialized = node->init;
6106 if (node->set_src != NULL && set_src == NULL)
6107 set_src = node->set_src;
6108 pool_free (loc_chain_pool, node);
6113 nextp = &node->next;
6116 nextp = &var->var_part[pos].loc_chain;
6119 /* Add the location to the beginning. */
6120 node = (location_chain) pool_alloc (loc_chain_pool);
6122 node->init = initialized;
6123 node->set_src = set_src;
6124 node->next = *nextp;
6127 if (onepart && emit_notes)
6128 add_value_chains (var->dv, loc);
6130 /* If no location was emitted do so. */
6131 if (var->var_part[pos].cur_loc == NULL)
6133 var->var_part[pos].cur_loc = loc;
6134 variable_was_changed (var, set);
6140 /* Set the part of variable's location in the dataflow set SET. The
6141 variable part is specified by variable's declaration in DV and
6142 offset OFFSET and the part's location by LOC. IOPT should be
6143 NO_INSERT if the variable is known to be in SET already and the
6144 variable hash table must not be resized, and INSERT otherwise. */
6147 set_variable_part (dataflow_set *set, rtx loc,
6148 decl_or_value dv, HOST_WIDE_INT offset,
6149 enum var_init_status initialized, rtx set_src,
6150 enum insert_option iopt)
6154 if (iopt == NO_INSERT)
6155 slot = shared_hash_find_slot_noinsert (set->vars, dv);
6158 slot = shared_hash_find_slot (set->vars, dv);
6160 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
6162 slot = set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
6165 /* Remove all recorded register locations for the given variable part
6166 from dataflow set SET, except for those that are identical to loc.
6167 The variable part is specified by variable's declaration or value
6168 DV and offset OFFSET. */
6171 clobber_slot_part (dataflow_set *set, rtx loc, void **slot,
6172 HOST_WIDE_INT offset, rtx set_src)
6174 variable var = (variable) *slot;
6175 int pos = find_variable_location_part (var, offset, NULL);
6179 location_chain node, next;
6181 /* Remove the register locations from the dataflow set. */
6182 next = var->var_part[pos].loc_chain;
6183 for (node = next; node; node = next)
6186 if (node->loc != loc
6187 && (!flag_var_tracking_uninit
6190 || !rtx_equal_p (set_src, node->set_src)))
6192 if (REG_P (node->loc))
6197 /* Remove the variable part from the register's
6198 list, but preserve any other variable parts
6199 that might be regarded as live in that same
6201 anextp = &set->regs[REGNO (node->loc)];
6202 for (anode = *anextp; anode; anode = anext)
6204 anext = anode->next;
6205 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
6206 && anode->offset == offset)
6208 pool_free (attrs_pool, anode);
6212 anextp = &anode->next;
6216 slot = delete_slot_part (set, node->loc, slot, offset);
6224 /* Remove all recorded register locations for the given variable part
6225 from dataflow set SET, except for those that are identical to loc.
6226 The variable part is specified by variable's declaration or value
6227 DV and offset OFFSET. */
6230 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
6231 HOST_WIDE_INT offset, rtx set_src)
6235 if (!dv_as_opaque (dv)
6236 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
6239 slot = shared_hash_find_slot_noinsert (set->vars, dv);
6243 slot = clobber_slot_part (set, loc, slot, offset, set_src);
6246 /* Delete the part of variable's location from dataflow set SET. The
6247 variable part is specified by its SET->vars slot SLOT and offset
6248 OFFSET and the part's location by LOC. */
6251 delete_slot_part (dataflow_set *set, rtx loc, void **slot,
6252 HOST_WIDE_INT offset)
6254 variable var = (variable) *slot;
6255 int pos = find_variable_location_part (var, offset, NULL);
6259 location_chain node, next;
6260 location_chain *nextp;
6263 if (var->refcount > 1 || shared_hash_shared (set->vars))
6265 /* If the variable contains the location part we have to
6266 make a copy of the variable. */
6267 for (node = var->var_part[pos].loc_chain; node;
6270 if ((REG_P (node->loc) && REG_P (loc)
6271 && REGNO (node->loc) == REGNO (loc))
6272 || rtx_equal_p (node->loc, loc))
6274 slot = unshare_variable (set, slot, var,
6275 VAR_INIT_STATUS_UNKNOWN);
6276 var = (variable)*slot;
6282 /* Delete the location part. */
6283 nextp = &var->var_part[pos].loc_chain;
6284 for (node = *nextp; node; node = next)
6287 if ((REG_P (node->loc) && REG_P (loc)
6288 && REGNO (node->loc) == REGNO (loc))
6289 || rtx_equal_p (node->loc, loc))
6291 if (emit_notes && pos == 0 && dv_onepart_p (var->dv))
6292 remove_value_chains (var->dv, node->loc);
6293 pool_free (loc_chain_pool, node);
6298 nextp = &node->next;
6301 /* If we have deleted the location which was last emitted
6302 we have to emit new location so add the variable to set
6303 of changed variables. */
6304 if (var->var_part[pos].cur_loc
6306 && REG_P (var->var_part[pos].cur_loc)
6307 && REGNO (loc) == REGNO (var->var_part[pos].cur_loc))
6308 || rtx_equal_p (loc, var->var_part[pos].cur_loc)))
6311 if (var->var_part[pos].loc_chain)
6312 var->var_part[pos].cur_loc = var->var_part[pos].loc_chain->loc;
6317 if (var->var_part[pos].loc_chain == NULL)
6319 gcc_assert (changed);
6321 if (emit_notes && var->n_var_parts == 0 && dv_is_value_p (var->dv))
6322 remove_cselib_value_chains (var->dv);
6323 while (pos < var->n_var_parts)
6325 var->var_part[pos] = var->var_part[pos + 1];
6330 variable_was_changed (var, set);
6336 /* Delete the part of variable's location from dataflow set SET. The
6337 variable part is specified by variable's declaration or value DV
6338 and offset OFFSET and the part's location by LOC. */
6341 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
6342 HOST_WIDE_INT offset)
6344 void **slot = shared_hash_find_slot_noinsert (set->vars, dv);
6348 slot = delete_slot_part (set, loc, slot, offset);
6351 /* Callback for cselib_expand_value, that looks for expressions
6352 holding the value in the var-tracking hash tables. Return X for
6353 standard processing, anything else is to be used as-is. */
6356 vt_expand_loc_callback (rtx x, bitmap regs, int max_depth, void *data)
6358 htab_t vars = (htab_t)data;
6362 rtx result, subreg, xret;
6364 switch (GET_CODE (x))
6367 subreg = SUBREG_REG (x);
6369 if (GET_CODE (SUBREG_REG (x)) != VALUE)
6372 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
6374 vt_expand_loc_callback, data);
6379 result = simplify_gen_subreg (GET_MODE (x), subreg,
6380 GET_MODE (SUBREG_REG (x)),
6383 /* Invalid SUBREGs are ok in debug info. ??? We could try
6384 alternate expansions for the VALUE as well. */
6385 if (!result && (REG_P (subreg) || MEM_P (subreg)))
6386 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
6391 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
6396 dv = dv_from_value (x);
6404 if (VALUE_RECURSED_INTO (x))
6407 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
6412 if (var->n_var_parts == 0)
6415 gcc_assert (var->n_var_parts == 1);
6417 VALUE_RECURSED_INTO (x) = true;
6420 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
6422 result = cselib_expand_value_rtx_cb (loc->loc, regs, max_depth,
6423 vt_expand_loc_callback, vars);
6428 VALUE_RECURSED_INTO (x) = false;
6435 /* Expand VALUEs in LOC, using VARS as well as cselib's equivalence
6439 vt_expand_loc (rtx loc, htab_t vars)
6441 if (!MAY_HAVE_DEBUG_INSNS)
6444 loc = cselib_expand_value_rtx_cb (loc, scratch_regs, 5,
6445 vt_expand_loc_callback, vars);
6447 if (loc && MEM_P (loc))
6448 loc = targetm.delegitimize_address (loc);
6453 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
6454 additional parameters: WHERE specifies whether the note shall be emitted
6455 before or after instruction INSN. */
6458 emit_note_insn_var_location (void **varp, void *data)
6460 variable var = (variable) *varp;
6461 rtx insn = ((emit_note_data *)data)->insn;
6462 enum emit_note_where where = ((emit_note_data *)data)->where;
6463 htab_t vars = ((emit_note_data *)data)->vars;
6465 int i, j, n_var_parts;
6467 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
6468 HOST_WIDE_INT last_limit;
6469 tree type_size_unit;
6470 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
6471 rtx loc[MAX_VAR_PARTS];
6474 if (dv_is_value_p (var->dv))
6477 decl = dv_as_decl (var->dv);
6479 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
6487 for (i = 0; i < var->n_var_parts; i++)
6489 enum machine_mode mode, wider_mode;
6492 if (last_limit < var->var_part[i].offset)
6497 else if (last_limit > var->var_part[i].offset)
6499 offsets[n_var_parts] = var->var_part[i].offset;
6500 loc2 = vt_expand_loc (var->var_part[i].loc_chain->loc, vars);
6506 loc[n_var_parts] = loc2;
6507 mode = GET_MODE (var->var_part[i].loc_chain->loc);
6508 initialized = var->var_part[i].loc_chain->init;
6509 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
6511 /* Attempt to merge adjacent registers or memory. */
6512 wider_mode = GET_MODE_WIDER_MODE (mode);
6513 for (j = i + 1; j < var->n_var_parts; j++)
6514 if (last_limit <= var->var_part[j].offset)
6516 if (j < var->n_var_parts
6517 && wider_mode != VOIDmode
6518 && mode == GET_MODE (var->var_part[j].loc_chain->loc)
6519 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
6520 && (loc2 = vt_expand_loc (var->var_part[j].loc_chain->loc, vars))
6521 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2)
6522 && last_limit == var->var_part[j].offset)
6526 if (REG_P (loc[n_var_parts])
6527 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
6528 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
6529 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
6532 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
6533 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
6535 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
6536 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
6539 if (!REG_P (new_loc)
6540 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
6543 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
6546 else if (MEM_P (loc[n_var_parts])
6547 && GET_CODE (XEXP (loc2, 0)) == PLUS
6548 && REG_P (XEXP (XEXP (loc2, 0), 0))
6549 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
6551 if ((REG_P (XEXP (loc[n_var_parts], 0))
6552 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
6553 XEXP (XEXP (loc2, 0), 0))
6554 && INTVAL (XEXP (XEXP (loc2, 0), 1))
6555 == GET_MODE_SIZE (mode))
6556 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
6557 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
6558 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
6559 XEXP (XEXP (loc2, 0), 0))
6560 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
6561 + GET_MODE_SIZE (mode)
6562 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
6563 new_loc = adjust_address_nv (loc[n_var_parts],
6569 loc[n_var_parts] = new_loc;
6571 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
6577 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6578 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
6581 if (where != EMIT_NOTE_BEFORE_INSN)
6583 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
6584 if (where == EMIT_NOTE_AFTER_CALL_INSN)
6585 NOTE_DURING_CALL_P (note) = true;
6588 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
6590 if (! flag_var_tracking_uninit)
6591 initialized = VAR_INIT_STATUS_INITIALIZED;
6595 NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
6596 NULL_RTX, (int) initialized);
6598 else if (n_var_parts == 1)
6601 = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
6603 NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
6607 else if (n_var_parts)
6611 for (i = 0; i < n_var_parts; i++)
6613 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
6615 parallel = gen_rtx_PARALLEL (VOIDmode,
6616 gen_rtvec_v (n_var_parts, loc));
6617 NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
6623 set_dv_changed (var->dv, false);
6624 htab_clear_slot (changed_variables, varp);
6626 /* Continue traversing the hash table. */
6630 DEF_VEC_P (variable);
6631 DEF_VEC_ALLOC_P (variable, heap);
6633 /* Stack of variable_def pointers that need processing with
6634 check_changed_vars_2. */
6636 static VEC (variable, heap) *changed_variables_stack;
6638 /* Populate changed_variables_stack with variable_def pointers
6639 that need variable_was_changed called on them. */
6642 check_changed_vars_1 (void **slot, void *data)
6644 variable var = (variable) *slot;
6645 htab_t htab = (htab_t) data;
6647 if (dv_is_value_p (var->dv))
6650 = (value_chain) htab_find_with_hash (value_chains, var->dv,
6651 dv_htab_hash (var->dv));
6655 for (vc = vc->next; vc; vc = vc->next)
6656 if (!dv_changed_p (vc->dv))
6659 = (variable) htab_find_with_hash (htab, vc->dv,
6660 dv_htab_hash (vc->dv));
6662 VEC_safe_push (variable, heap, changed_variables_stack,
6669 /* Add VAR to changed_variables and also for VALUEs add recursively
6670 all DVs that aren't in changed_variables yet but reference the
6671 VALUE from its loc_chain. */
6674 check_changed_vars_2 (variable var, htab_t htab)
6676 variable_was_changed (var, NULL);
6677 if (dv_is_value_p (var->dv))
6680 = (value_chain) htab_find_with_hash (value_chains, var->dv,
6681 dv_htab_hash (var->dv));
6685 for (vc = vc->next; vc; vc = vc->next)
6686 if (!dv_changed_p (vc->dv))
6689 = (variable) htab_find_with_hash (htab, vc->dv,
6690 dv_htab_hash (vc->dv));
6692 check_changed_vars_2 (vcvar, htab);
6697 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
6698 CHANGED_VARIABLES and delete this chain. WHERE specifies whether the notes
6699 shall be emitted before of after instruction INSN. */
6702 emit_notes_for_changes (rtx insn, enum emit_note_where where,
6705 emit_note_data data;
6706 htab_t htab = shared_hash_htab (vars);
6708 if (!htab_elements (changed_variables))
6711 if (MAY_HAVE_DEBUG_INSNS)
6713 /* Unfortunately this has to be done in two steps, because
6714 we can't traverse a hashtab into which we are inserting
6715 through variable_was_changed. */
6716 htab_traverse (changed_variables, check_changed_vars_1, htab);
6717 while (VEC_length (variable, changed_variables_stack) > 0)
6718 check_changed_vars_2 (VEC_pop (variable, changed_variables_stack),
6726 htab_traverse (changed_variables, emit_note_insn_var_location, &data);
6729 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
6730 same variable in hash table DATA or is not there at all. */
6733 emit_notes_for_differences_1 (void **slot, void *data)
6735 htab_t new_vars = (htab_t) data;
6736 variable old_var, new_var;
6738 old_var = (variable) *slot;
6739 new_var = (variable) htab_find_with_hash (new_vars, old_var->dv,
6740 dv_htab_hash (old_var->dv));
6744 /* Variable has disappeared. */
6747 empty_var = (variable) pool_alloc (dv_pool (old_var->dv));
6748 empty_var->dv = old_var->dv;
6749 empty_var->refcount = 0;
6750 empty_var->n_var_parts = 0;
6751 if (dv_onepart_p (old_var->dv))
6755 gcc_assert (old_var->n_var_parts == 1);
6756 for (lc = old_var->var_part[0].loc_chain; lc; lc = lc->next)
6757 remove_value_chains (old_var->dv, lc->loc);
6758 if (dv_is_value_p (old_var->dv))
6759 remove_cselib_value_chains (old_var->dv);
6761 variable_was_changed (empty_var, NULL);
6763 else if (variable_different_p (old_var, new_var, true))
6765 if (dv_onepart_p (old_var->dv))
6767 location_chain lc1, lc2;
6769 gcc_assert (old_var->n_var_parts == 1);
6770 gcc_assert (new_var->n_var_parts == 1);
6771 lc1 = old_var->var_part[0].loc_chain;
6772 lc2 = new_var->var_part[0].loc_chain;
6775 && ((REG_P (lc1->loc) && REG_P (lc2->loc))
6776 || rtx_equal_p (lc1->loc, lc2->loc)))
6781 for (; lc2; lc2 = lc2->next)
6782 add_value_chains (old_var->dv, lc2->loc);
6783 for (; lc1; lc1 = lc1->next)
6784 remove_value_chains (old_var->dv, lc1->loc);
6786 variable_was_changed (new_var, NULL);
6789 /* Continue traversing the hash table. */
6793 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
6797 emit_notes_for_differences_2 (void **slot, void *data)
6799 htab_t old_vars = (htab_t) data;
6800 variable old_var, new_var;
6802 new_var = (variable) *slot;
6803 old_var = (variable) htab_find_with_hash (old_vars, new_var->dv,
6804 dv_htab_hash (new_var->dv));
6807 /* Variable has appeared. */
6808 if (dv_onepart_p (new_var->dv))
6812 gcc_assert (new_var->n_var_parts == 1);
6813 for (lc = new_var->var_part[0].loc_chain; lc; lc = lc->next)
6814 add_value_chains (new_var->dv, lc->loc);
6815 if (dv_is_value_p (new_var->dv))
6816 add_cselib_value_chains (new_var->dv);
6818 variable_was_changed (new_var, NULL);
6821 /* Continue traversing the hash table. */
6825 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
6829 emit_notes_for_differences (rtx insn, dataflow_set *old_set,
6830 dataflow_set *new_set)
6832 htab_traverse (shared_hash_htab (old_set->vars),
6833 emit_notes_for_differences_1,
6834 shared_hash_htab (new_set->vars));
6835 htab_traverse (shared_hash_htab (new_set->vars),
6836 emit_notes_for_differences_2,
6837 shared_hash_htab (old_set->vars));
6838 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
6841 /* Emit the notes for changes of location parts in the basic block BB. */
6844 emit_notes_in_bb (basic_block bb, dataflow_set *set)
6848 dataflow_set_clear (set);
6849 dataflow_set_copy (set, &VTI (bb)->in);
6851 for (i = 0; i < VTI (bb)->n_mos; i++)
6853 rtx insn = VTI (bb)->mos[i].insn;
6855 switch (VTI (bb)->mos[i].type)
6858 dataflow_set_clear_at_call (set);
6859 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
6864 rtx loc = VTI (bb)->mos[i].u.loc;
6867 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6869 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6871 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
6877 rtx loc = VTI (bb)->mos[i].u.loc;
6881 if (GET_CODE (loc) == CONCAT)
6883 val = XEXP (loc, 0);
6884 vloc = XEXP (loc, 1);
6892 var = PAT_VAR_LOCATION_DECL (vloc);
6894 clobber_variable_part (set, NULL_RTX,
6895 dv_from_decl (var), 0, NULL_RTX);
6898 if (VAL_NEEDS_RESOLUTION (loc))
6899 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6900 set_variable_part (set, val, dv_from_decl (var), 0,
6901 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6905 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
6911 rtx loc = VTI (bb)->mos[i].u.loc;
6912 rtx val, vloc, uloc;
6914 vloc = uloc = XEXP (loc, 1);
6915 val = XEXP (loc, 0);
6917 if (GET_CODE (val) == CONCAT)
6919 uloc = XEXP (val, 1);
6920 val = XEXP (val, 0);
6923 if (VAL_NEEDS_RESOLUTION (loc))
6924 val_resolve (set, val, vloc, insn);
6926 if (VAL_HOLDS_TRACK_EXPR (loc))
6928 if (GET_CODE (uloc) == REG)
6929 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6931 else if (GET_CODE (uloc) == MEM)
6932 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6936 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
6942 rtx loc = VTI (bb)->mos[i].u.loc;
6943 rtx val, vloc, uloc;
6945 vloc = uloc = XEXP (loc, 1);
6946 val = XEXP (loc, 0);
6948 if (GET_CODE (val) == CONCAT)
6950 vloc = XEXP (val, 1);
6951 val = XEXP (val, 0);
6954 if (GET_CODE (vloc) == SET)
6956 rtx vsrc = SET_SRC (vloc);
6958 gcc_assert (val != vsrc);
6959 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6961 vloc = SET_DEST (vloc);
6963 if (VAL_NEEDS_RESOLUTION (loc))
6964 val_resolve (set, val, vsrc, insn);
6966 else if (VAL_NEEDS_RESOLUTION (loc))
6968 gcc_assert (GET_CODE (uloc) == SET
6969 && GET_CODE (SET_SRC (uloc)) == REG);
6970 val_resolve (set, val, SET_SRC (uloc), insn);
6973 if (VAL_HOLDS_TRACK_EXPR (loc))
6975 if (VAL_EXPR_IS_CLOBBERED (loc))
6978 var_reg_delete (set, uloc, true);
6979 else if (MEM_P (uloc))
6980 var_mem_delete (set, uloc, true);
6984 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6986 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6988 if (GET_CODE (uloc) == SET)
6990 set_src = SET_SRC (uloc);
6991 uloc = SET_DEST (uloc);
6996 status = find_src_status (set, set_src);
6998 set_src = find_src_set_src (set, set_src);
7002 var_reg_delete_and_set (set, uloc, !copied_p,
7004 else if (MEM_P (uloc))
7005 var_mem_delete_and_set (set, uloc, !copied_p,
7009 else if (REG_P (uloc))
7010 var_regno_delete (set, REGNO (uloc));
7012 val_store (set, val, vloc, insn);
7014 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7021 rtx loc = VTI (bb)->mos[i].u.loc;
7024 if (GET_CODE (loc) == SET)
7026 set_src = SET_SRC (loc);
7027 loc = SET_DEST (loc);
7031 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
7034 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
7037 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7044 rtx loc = VTI (bb)->mos[i].u.loc;
7045 enum var_init_status src_status;
7048 if (GET_CODE (loc) == SET)
7050 set_src = SET_SRC (loc);
7051 loc = SET_DEST (loc);
7054 src_status = find_src_status (set, set_src);
7055 set_src = find_src_set_src (set, set_src);
7058 var_reg_delete_and_set (set, loc, false, src_status, set_src);
7060 var_mem_delete_and_set (set, loc, false, src_status, set_src);
7062 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7069 rtx loc = VTI (bb)->mos[i].u.loc;
7072 var_reg_delete (set, loc, false);
7074 var_mem_delete (set, loc, false);
7076 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
7082 rtx loc = VTI (bb)->mos[i].u.loc;
7085 var_reg_delete (set, loc, true);
7087 var_mem_delete (set, loc, true);
7089 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7095 set->stack_adjust += VTI (bb)->mos[i].u.adjust;
7101 /* Emit notes for the whole function. */
7104 vt_emit_notes (void)
7109 gcc_assert (!htab_elements (changed_variables));
7111 /* Free memory occupied by the out hash tables, as they aren't used
7114 dataflow_set_clear (&VTI (bb)->out);
7116 /* Enable emitting notes by functions (mainly by set_variable_part and
7117 delete_variable_part). */
7120 if (MAY_HAVE_DEBUG_INSNS)
7121 changed_variables_stack = VEC_alloc (variable, heap, 40);
7123 dataflow_set_init (&cur);
7127 /* Emit the notes for changes of variable locations between two
7128 subsequent basic blocks. */
7129 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
7131 /* Emit the notes for the changes in the basic block itself. */
7132 emit_notes_in_bb (bb, &cur);
7134 /* Free memory occupied by the in hash table, we won't need it
7136 dataflow_set_clear (&VTI (bb)->in);
7138 #ifdef ENABLE_CHECKING
7139 htab_traverse (shared_hash_htab (cur.vars),
7140 emit_notes_for_differences_1,
7141 shared_hash_htab (empty_shared_hash));
7142 if (MAY_HAVE_DEBUG_INSNS)
7143 gcc_assert (htab_elements (value_chains) == 0);
7145 dataflow_set_destroy (&cur);
7147 if (MAY_HAVE_DEBUG_INSNS)
7148 VEC_free (variable, heap, changed_variables_stack);
7153 /* If there is a declaration and offset associated with register/memory RTL
7154 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
7157 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
7161 if (REG_ATTRS (rtl))
7163 *declp = REG_EXPR (rtl);
7164 *offsetp = REG_OFFSET (rtl);
7168 else if (MEM_P (rtl))
7170 if (MEM_ATTRS (rtl))
7172 *declp = MEM_EXPR (rtl);
7173 *offsetp = INT_MEM_OFFSET (rtl);
7180 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
7183 vt_add_function_parameters (void)
7187 for (parm = DECL_ARGUMENTS (current_function_decl);
7188 parm; parm = TREE_CHAIN (parm))
7190 rtx decl_rtl = DECL_RTL_IF_SET (parm);
7191 rtx incoming = DECL_INCOMING_RTL (parm);
7193 enum machine_mode mode;
7194 HOST_WIDE_INT offset;
7198 if (TREE_CODE (parm) != PARM_DECL)
7201 if (!DECL_NAME (parm))
7204 if (!decl_rtl || !incoming)
7207 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
7210 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
7212 if (REG_P (incoming) || MEM_P (incoming))
7214 /* This means argument is passed by invisible reference. */
7217 incoming = gen_rtx_MEM (GET_MODE (decl_rtl), incoming);
7221 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
7223 offset += byte_lowpart_offset (GET_MODE (incoming),
7224 GET_MODE (decl_rtl));
7233 /* Assume that DECL_RTL was a pseudo that got spilled to
7234 memory. The spill slot sharing code will force the
7235 memory to reference spill_slot_decl (%sfp), so we don't
7236 match above. That's ok, the pseudo must have referenced
7237 the entire parameter, so just reset OFFSET. */
7238 gcc_assert (decl == get_spill_slot_decl (false));
7242 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
7245 out = &VTI (ENTRY_BLOCK_PTR)->out;
7247 dv = dv_from_decl (parm);
7249 if (target_for_debug_bind (parm)
7250 /* We can't deal with these right now, because this kind of
7251 variable is single-part. ??? We could handle parallels
7252 that describe multiple locations for the same single
7253 value, but ATM we don't. */
7254 && GET_CODE (incoming) != PARALLEL)
7258 /* ??? We shouldn't ever hit this, but it may happen because
7259 arguments passed by invisible reference aren't dealt with
7260 above: incoming-rtl will have Pmode rather than the
7261 expected mode for the type. */
7265 val = cselib_lookup (var_lowpart (mode, incoming), mode, true);
7267 /* ??? Float-typed values in memory are not handled by
7271 cselib_preserve_value (val);
7272 set_variable_part (out, val->val_rtx, dv, offset,
7273 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7274 dv = dv_from_value (val->val_rtx);
7278 if (REG_P (incoming))
7280 incoming = var_lowpart (mode, incoming);
7281 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
7282 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
7284 set_variable_part (out, incoming, dv, offset,
7285 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7287 else if (MEM_P (incoming))
7289 incoming = var_lowpart (mode, incoming);
7290 set_variable_part (out, incoming, dv, offset,
7291 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7295 if (MAY_HAVE_DEBUG_INSNS)
7297 cselib_preserve_only_values (true);
7298 cselib_reset_table_with_next_value (cselib_get_next_unknown_value ());
7303 /* Allocate and initialize the data structures for variable tracking
7304 and parse the RTL to get the micro operations. */
7307 vt_initialize (void)
7311 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
7313 if (MAY_HAVE_DEBUG_INSNS)
7316 scratch_regs = BITMAP_ALLOC (NULL);
7317 valvar_pool = create_alloc_pool ("small variable_def pool",
7318 sizeof (struct variable_def), 256);
7322 scratch_regs = NULL;
7329 HOST_WIDE_INT pre, post = 0;
7331 unsigned int next_value_before = cselib_get_next_unknown_value ();
7332 unsigned int next_value_after = next_value_before;
7334 if (MAY_HAVE_DEBUG_INSNS)
7336 cselib_record_sets_hook = count_with_sets;
7337 if (dump_file && (dump_flags & TDF_DETAILS))
7338 fprintf (dump_file, "first value: %i\n",
7339 cselib_get_next_unknown_value ());
7342 /* Count the number of micro operations. */
7343 VTI (bb)->n_mos = 0;
7344 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
7345 insn = NEXT_INSN (insn))
7349 if (!frame_pointer_needed)
7351 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
7355 if (dump_file && (dump_flags & TDF_DETAILS))
7356 log_op_type (GEN_INT (pre), bb, insn,
7357 MO_ADJUST, dump_file);
7362 if (dump_file && (dump_flags & TDF_DETAILS))
7363 log_op_type (GEN_INT (post), bb, insn,
7364 MO_ADJUST, dump_file);
7367 cselib_hook_called = false;
7368 if (MAY_HAVE_DEBUG_INSNS)
7370 cselib_process_insn (insn);
7371 if (dump_file && (dump_flags & TDF_DETAILS))
7373 print_rtl_single (dump_file, insn);
7374 dump_cselib_table (dump_file);
7377 if (!cselib_hook_called)
7378 count_with_sets (insn, 0, 0);
7382 if (dump_file && (dump_flags & TDF_DETAILS))
7383 log_op_type (PATTERN (insn), bb, insn,
7384 MO_CALL, dump_file);
7389 count = VTI (bb)->n_mos;
7391 if (MAY_HAVE_DEBUG_INSNS)
7393 cselib_preserve_only_values (false);
7394 next_value_after = cselib_get_next_unknown_value ();
7395 cselib_reset_table_with_next_value (next_value_before);
7396 cselib_record_sets_hook = add_with_sets;
7397 if (dump_file && (dump_flags & TDF_DETAILS))
7398 fprintf (dump_file, "first value: %i\n",
7399 cselib_get_next_unknown_value ());
7402 /* Add the micro-operations to the array. */
7403 VTI (bb)->mos = XNEWVEC (micro_operation, VTI (bb)->n_mos);
7404 VTI (bb)->n_mos = 0;
7405 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
7406 insn = NEXT_INSN (insn))
7410 if (!frame_pointer_needed)
7412 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
7415 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
7417 mo->type = MO_ADJUST;
7421 if (dump_file && (dump_flags & TDF_DETAILS))
7422 log_op_type (PATTERN (insn), bb, insn,
7423 MO_ADJUST, dump_file);
7427 cselib_hook_called = false;
7428 if (MAY_HAVE_DEBUG_INSNS)
7430 cselib_process_insn (insn);
7431 if (dump_file && (dump_flags & TDF_DETAILS))
7433 print_rtl_single (dump_file, insn);
7434 dump_cselib_table (dump_file);
7437 if (!cselib_hook_called)
7438 add_with_sets (insn, 0, 0);
7440 if (!frame_pointer_needed && post)
7442 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
7444 mo->type = MO_ADJUST;
7445 mo->u.adjust = post;
7448 if (dump_file && (dump_flags & TDF_DETAILS))
7449 log_op_type (PATTERN (insn), bb, insn,
7450 MO_ADJUST, dump_file);
7454 gcc_assert (count == VTI (bb)->n_mos);
7455 if (MAY_HAVE_DEBUG_INSNS)
7457 cselib_preserve_only_values (true);
7458 gcc_assert (next_value_after == cselib_get_next_unknown_value ());
7459 cselib_reset_table_with_next_value (next_value_after);
7460 cselib_record_sets_hook = NULL;
7464 attrs_pool = create_alloc_pool ("attrs_def pool",
7465 sizeof (struct attrs_def), 1024);
7466 var_pool = create_alloc_pool ("variable_def pool",
7467 sizeof (struct variable_def)
7468 + (MAX_VAR_PARTS - 1)
7469 * sizeof (((variable)NULL)->var_part[0]), 64);
7470 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
7471 sizeof (struct location_chain_def),
7473 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
7474 sizeof (struct shared_hash_def), 256);
7475 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
7476 empty_shared_hash->refcount = 1;
7477 empty_shared_hash->htab
7478 = htab_create (1, variable_htab_hash, variable_htab_eq,
7479 variable_htab_free);
7480 changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
7481 variable_htab_free);
7482 if (MAY_HAVE_DEBUG_INSNS)
7484 value_chain_pool = create_alloc_pool ("value_chain_def pool",
7485 sizeof (struct value_chain_def),
7487 value_chains = htab_create (32, value_chain_htab_hash,
7488 value_chain_htab_eq, NULL);
7491 /* Init the IN and OUT sets. */
7494 VTI (bb)->visited = false;
7495 VTI (bb)->flooded = false;
7496 dataflow_set_init (&VTI (bb)->in);
7497 dataflow_set_init (&VTI (bb)->out);
7498 VTI (bb)->permp = NULL;
7501 VTI (ENTRY_BLOCK_PTR)->flooded = true;
7502 vt_add_function_parameters ();
7505 /* Get rid of all debug insns from the insn stream. */
7508 delete_debug_insns (void)
7513 if (!MAY_HAVE_DEBUG_INSNS)
7518 FOR_BB_INSNS_SAFE (bb, insn, next)
7519 if (DEBUG_INSN_P (insn))
7524 /* Run a fast, BB-local only version of var tracking, to take care of
7525 information that we don't do global analysis on, such that not all
7526 information is lost. If SKIPPED holds, we're skipping the global
7527 pass entirely, so we should try to use information it would have
7528 handled as well.. */
7531 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
7533 /* ??? Just skip it all for now. */
7534 delete_debug_insns ();
7537 /* Free the data structures needed for variable tracking. */
7546 free (VTI (bb)->mos);
7551 dataflow_set_destroy (&VTI (bb)->in);
7552 dataflow_set_destroy (&VTI (bb)->out);
7553 if (VTI (bb)->permp)
7555 dataflow_set_destroy (VTI (bb)->permp);
7556 XDELETE (VTI (bb)->permp);
7559 free_aux_for_blocks ();
7560 htab_delete (empty_shared_hash->htab);
7561 htab_delete (changed_variables);
7562 free_alloc_pool (attrs_pool);
7563 free_alloc_pool (var_pool);
7564 free_alloc_pool (loc_chain_pool);
7565 free_alloc_pool (shared_hash_pool);
7567 if (MAY_HAVE_DEBUG_INSNS)
7569 htab_delete (value_chains);
7570 free_alloc_pool (value_chain_pool);
7571 free_alloc_pool (valvar_pool);
7573 BITMAP_FREE (scratch_regs);
7574 scratch_regs = NULL;
7578 XDELETEVEC (vui_vec);
7583 /* The entry point to variable tracking pass. */
7586 variable_tracking_main (void)
7588 if (flag_var_tracking_assignments < 0)
7590 delete_debug_insns ();
7594 if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
7596 vt_debug_insns_local (true);
7600 mark_dfs_back_edges ();
7602 if (!frame_pointer_needed)
7604 if (!vt_stack_adjustments ())
7607 vt_debug_insns_local (true);
7612 vt_find_locations ();
7614 if (dump_file && (dump_flags & TDF_DETAILS))
7616 dump_dataflow_sets ();
7617 dump_flow_info (dump_file, dump_flags);
7623 vt_debug_insns_local (false);
7628 gate_handle_var_tracking (void)
7630 return (flag_var_tracking);
7635 struct rtl_opt_pass pass_variable_tracking =
7639 "vartrack", /* name */
7640 gate_handle_var_tracking, /* gate */
7641 variable_tracking_main, /* execute */
7644 0, /* static_pass_number */
7645 TV_VAR_TRACKING, /* tv_id */
7646 0, /* properties_required */
7647 0, /* properties_provided */
7648 0, /* properties_destroyed */
7649 0, /* todo_flags_start */
7650 TODO_dump_func | TODO_verify_rtl_sharing/* todo_flags_finish */