1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file contains the variable tracking pass. It computes where
22 variables are located (which registers or where in memory) at each position
23 in instruction stream and emits notes describing the locations.
24 Debug information (DWARF2 location lists) is finally generated from
26 With this debug information, it is possible to show variables
27 even when debugging optimized code.
29 How does the variable tracking pass work?
31 First, it scans RTL code for uses, stores and clobbers (register/memory
32 references in instructions), for call insns and for stack adjustments
33 separately for each basic block and saves them to an array of micro
35 The micro operations of one instruction are ordered so that
36 pre-modifying stack adjustment < use < use with no var < call insn <
37 < set < clobber < post-modifying stack adjustment
39 Then, a forward dataflow analysis is performed to find out how locations
40 of variables change through code and to propagate the variable locations
41 along control flow graph.
42 The IN set for basic block BB is computed as a union of OUT sets of BB's
43 predecessors, the OUT set for BB is copied from the IN set for BB and
44 is changed according to micro operations in BB.
46 The IN and OUT sets for basic blocks consist of a current stack adjustment
47 (used for adjusting offset of variables addressed using stack pointer),
48 the table of structures describing the locations of parts of a variable
49 and for each physical register a linked list for each physical register.
50 The linked list is a list of variable parts stored in the register,
51 i.e. it is a list of triplets (reg, decl, offset) where decl is
52 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
53 effective deleting appropriate variable parts when we set or clobber the
56 There may be more than one variable part in a register. The linked lists
57 should be pretty short so it is a good data structure here.
58 For example in the following code, register allocator may assign same
59 register to variables A and B, and both of them are stored in the same
72 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73 are emitted to appropriate positions in RTL code. Each such a note describes
74 the location of one variable at the point in instruction stream where the
75 note is. There is no need to emit a note for each variable before each
76 instruction, we only emit these notes where the location of variable changes
77 (this means that we also emit notes for changes between the OUT set of the
78 previous block and the IN set of the current block).
80 The notes consist of two parts:
81 1. the declaration (from REG_EXPR or MEM_EXPR)
82 2. the location of a variable - it is either a simple register/memory
83 reference (for simple variables, for example int),
84 or a parallel of register/memory references (for a large variables
85 which consist of several parts, for example long long).
91 #include "coretypes.h"
95 #include "hard-reg-set.h"
96 #include "basic-block.h"
99 #include "insn-config.h"
102 #include "alloc-pool.h"
108 #include "tree-pass.h"
112 /* Type of micro operation. */
113 enum micro_operation_type
115 MO_USE, /* Use location (REG or MEM). */
116 MO_USE_NO_VAR,/* Use location which is not associated with a variable
117 or the variable is not trackable. */
118 MO_VAL_USE, /* Use location which is associated with a value. */
119 MO_VAL_LOC, /* Use location which appears in a debug insn. */
120 MO_VAL_SET, /* Set location associated with a value. */
121 MO_SET, /* Set location. */
122 MO_COPY, /* Copy the same portion of a variable from one
123 location to another. */
124 MO_CLOBBER, /* Clobber location. */
125 MO_CALL, /* Call insn. */
126 MO_ADJUST /* Adjust stack pointer. */
130 static const char * const ATTRIBUTE_UNUSED
131 micro_operation_type_name[] = {
144 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
145 Notes emitted as AFTER_CALL are to take effect during the call,
146 rather than after the call. */
149 EMIT_NOTE_BEFORE_INSN,
150 EMIT_NOTE_AFTER_INSN,
151 EMIT_NOTE_AFTER_CALL_INSN
154 /* Structure holding information about micro operation. */
155 typedef struct micro_operation_def
157 /* Type of micro operation. */
158 enum micro_operation_type type;
161 /* Location. For MO_SET and MO_COPY, this is the SET that
162 performs the assignment, if known, otherwise it is the target
163 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
164 CONCAT of the VALUE and the LOC associated with it. For
165 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
166 associated with it. */
169 /* Stack adjustment. */
170 HOST_WIDE_INT adjust;
173 /* The instruction which the micro operation is in, for MO_USE,
174 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
175 instruction or note in the original flow (before any var-tracking
176 notes are inserted, to simplify emission of notes), for MO_SET
181 /* A declaration of a variable, or an RTL value being handled like a
183 typedef void *decl_or_value;
185 /* Structure for passing some other parameters to function
186 emit_note_insn_var_location. */
187 typedef struct emit_note_data_def
189 /* The instruction which the note will be emitted before/after. */
192 /* Where the note will be emitted (before/after insn)? */
193 enum emit_note_where where;
195 /* The variables and values active at this point. */
199 /* Description of location of a part of a variable. The content of a physical
200 register is described by a chain of these structures.
201 The chains are pretty short (usually 1 or 2 elements) and thus
202 chain is the best data structure. */
203 typedef struct attrs_def
205 /* Pointer to next member of the list. */
206 struct attrs_def *next;
208 /* The rtx of register. */
211 /* The declaration corresponding to LOC. */
214 /* Offset from start of DECL. */
215 HOST_WIDE_INT offset;
218 /* Structure holding a refcounted hash table. If refcount > 1,
219 it must be first unshared before modified. */
220 typedef struct shared_hash_def
222 /* Reference count. */
225 /* Actual hash table. */
229 /* Structure holding the IN or OUT set for a basic block. */
230 typedef struct dataflow_set_def
232 /* Adjustment of stack offset. */
233 HOST_WIDE_INT stack_adjust;
235 /* Attributes for registers (lists of attrs). */
236 attrs regs[FIRST_PSEUDO_REGISTER];
238 /* Variable locations. */
241 /* Vars that is being traversed. */
242 shared_hash traversed_vars;
245 /* The structure (one for each basic block) containing the information
246 needed for variable tracking. */
247 typedef struct variable_tracking_info_def
249 /* Number of micro operations stored in the MOS array. */
252 /* The array of micro operations. */
253 micro_operation *mos;
255 /* The IN and OUT set for dataflow analysis. */
259 /* The permanent-in dataflow set for this block. This is used to
260 hold values for which we had to compute entry values. ??? This
261 should probably be dynamically allocated, to avoid using more
262 memory in non-debug builds. */
265 /* Has the block been visited in DFS? */
268 /* Has the block been flooded in VTA? */
271 } *variable_tracking_info;
273 /* Structure for chaining the locations. */
274 typedef struct location_chain_def
276 /* Next element in the chain. */
277 struct location_chain_def *next;
279 /* The location (REG, MEM or VALUE). */
282 /* The "value" stored in this location. */
286 enum var_init_status init;
289 /* Structure describing one part of variable. */
290 typedef struct variable_part_def
292 /* Chain of locations of the part. */
293 location_chain loc_chain;
295 /* Location which was last emitted to location list. */
298 /* The offset in the variable. */
299 HOST_WIDE_INT offset;
302 /* Maximum number of location parts. */
303 #define MAX_VAR_PARTS 16
305 /* Structure describing where the variable is located. */
306 typedef struct variable_def
308 /* The declaration of the variable, or an RTL value being handled
309 like a declaration. */
312 /* Reference count. */
315 /* Number of variable parts. */
318 /* The variable parts. */
319 variable_part var_part[1];
321 typedef const struct variable_def *const_variable;
323 /* Structure for chaining backlinks from referenced VALUEs to
324 DVs that are referencing them. */
325 typedef struct value_chain_def
327 /* Next value_chain entry. */
328 struct value_chain_def *next;
330 /* The declaration of the variable, or an RTL value
331 being handled like a declaration, whose var_parts[0].loc_chain
332 references the VALUE owning this value_chain. */
335 /* Reference count. */
338 typedef const struct value_chain_def *const_value_chain;
340 /* Hash function for DECL for VARIABLE_HTAB. */
341 #define VARIABLE_HASH_VAL(decl) (DECL_UID (decl))
343 /* Pointer to the BB's information specific to variable tracking pass. */
344 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
346 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
347 #define INT_MEM_OFFSET(mem) (MEM_OFFSET (mem) ? INTVAL (MEM_OFFSET (mem)) : 0)
349 /* Alloc pool for struct attrs_def. */
350 static alloc_pool attrs_pool;
352 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
353 static alloc_pool var_pool;
355 /* Alloc pool for struct variable_def with a single var_part entry. */
356 static alloc_pool valvar_pool;
358 /* Alloc pool for struct location_chain_def. */
359 static alloc_pool loc_chain_pool;
361 /* Alloc pool for struct shared_hash_def. */
362 static alloc_pool shared_hash_pool;
364 /* Alloc pool for struct value_chain_def. */
365 static alloc_pool value_chain_pool;
367 /* Changed variables, notes will be emitted for them. */
368 static htab_t changed_variables;
370 /* Links from VALUEs to DVs referencing them in their current loc_chains. */
371 static htab_t value_chains;
373 /* Shall notes be emitted? */
374 static bool emit_notes;
376 /* Empty shared hashtable. */
377 static shared_hash empty_shared_hash;
379 /* Scratch register bitmap used by cselib_expand_value_rtx. */
380 static bitmap scratch_regs = NULL;
382 /* Variable used to tell whether cselib_process_insn called our hook. */
383 static bool cselib_hook_called;
385 /* Local function prototypes. */
386 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
388 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
390 static void bb_stack_adjust_offset (basic_block);
391 static bool vt_stack_adjustments (void);
392 static rtx adjust_stack_reference (rtx, HOST_WIDE_INT);
393 static hashval_t variable_htab_hash (const void *);
394 static int variable_htab_eq (const void *, const void *);
395 static void variable_htab_free (void *);
397 static void init_attrs_list_set (attrs *);
398 static void attrs_list_clear (attrs *);
399 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
400 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
401 static void attrs_list_copy (attrs *, attrs);
402 static void attrs_list_union (attrs *, attrs);
404 static void **unshare_variable (dataflow_set *set, void **slot, variable var,
405 enum var_init_status);
406 static int vars_copy_1 (void **, void *);
407 static void vars_copy (htab_t, htab_t);
408 static tree var_debug_decl (tree);
409 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
410 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
411 enum var_init_status, rtx);
412 static void var_reg_delete (dataflow_set *, rtx, bool);
413 static void var_regno_delete (dataflow_set *, int);
414 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
415 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
416 enum var_init_status, rtx);
417 static void var_mem_delete (dataflow_set *, rtx, bool);
419 static void dataflow_set_init (dataflow_set *);
420 static void dataflow_set_clear (dataflow_set *);
421 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
422 static int variable_union_info_cmp_pos (const void *, const void *);
423 static int variable_union (void **, void *);
424 static int variable_canonicalize (void **, void *);
425 static void dataflow_set_union (dataflow_set *, dataflow_set *);
426 static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
427 static bool canon_value_cmp (rtx, rtx);
428 static int loc_cmp (rtx, rtx);
429 static bool variable_part_different_p (variable_part *, variable_part *);
430 static bool onepart_variable_different_p (variable, variable);
431 static bool variable_different_p (variable, variable, bool);
432 static int dataflow_set_different_1 (void **, void *);
433 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
434 static void dataflow_set_destroy (dataflow_set *);
436 static bool contains_symbol_ref (rtx);
437 static bool track_expr_p (tree, bool);
438 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
439 static int count_uses (rtx *, void *);
440 static void count_uses_1 (rtx *, void *);
441 static void count_stores (rtx, const_rtx, void *);
442 static int add_uses (rtx *, void *);
443 static void add_uses_1 (rtx *, void *);
444 static void add_stores (rtx, const_rtx, void *);
445 static bool compute_bb_dataflow (basic_block);
446 static void vt_find_locations (void);
448 static void dump_attrs_list (attrs);
449 static int dump_variable_slot (void **, void *);
450 static void dump_variable (variable);
451 static void dump_vars (htab_t);
452 static void dump_dataflow_set (dataflow_set *);
453 static void dump_dataflow_sets (void);
455 static void variable_was_changed (variable, dataflow_set *);
456 static void **set_slot_part (dataflow_set *, rtx, void **,
457 decl_or_value, HOST_WIDE_INT,
458 enum var_init_status, rtx);
459 static void set_variable_part (dataflow_set *, rtx,
460 decl_or_value, HOST_WIDE_INT,
461 enum var_init_status, rtx, enum insert_option);
462 static void **clobber_slot_part (dataflow_set *, rtx,
463 void **, HOST_WIDE_INT, rtx);
464 static void clobber_variable_part (dataflow_set *, rtx,
465 decl_or_value, HOST_WIDE_INT, rtx);
466 static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
467 static void delete_variable_part (dataflow_set *, rtx,
468 decl_or_value, HOST_WIDE_INT);
469 static int emit_note_insn_var_location (void **, void *);
470 static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash);
471 static int emit_notes_for_differences_1 (void **, void *);
472 static int emit_notes_for_differences_2 (void **, void *);
473 static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
474 static void emit_notes_in_bb (basic_block, dataflow_set *);
475 static void vt_emit_notes (void);
477 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
478 static void vt_add_function_parameters (void);
479 static void vt_initialize (void);
480 static void vt_finalize (void);
482 /* Given a SET, calculate the amount of stack adjustment it contains
483 PRE- and POST-modifying stack pointer.
484 This function is similar to stack_adjust_offset. */
487 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
490 rtx src = SET_SRC (pattern);
491 rtx dest = SET_DEST (pattern);
494 if (dest == stack_pointer_rtx)
496 /* (set (reg sp) (plus (reg sp) (const_int))) */
497 code = GET_CODE (src);
498 if (! (code == PLUS || code == MINUS)
499 || XEXP (src, 0) != stack_pointer_rtx
500 || !CONST_INT_P (XEXP (src, 1)))
504 *post += INTVAL (XEXP (src, 1));
506 *post -= INTVAL (XEXP (src, 1));
508 else if (MEM_P (dest))
510 /* (set (mem (pre_dec (reg sp))) (foo)) */
511 src = XEXP (dest, 0);
512 code = GET_CODE (src);
518 if (XEXP (src, 0) == stack_pointer_rtx)
520 rtx val = XEXP (XEXP (src, 1), 1);
521 /* We handle only adjustments by constant amount. */
522 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
525 if (code == PRE_MODIFY)
526 *pre -= INTVAL (val);
528 *post -= INTVAL (val);
534 if (XEXP (src, 0) == stack_pointer_rtx)
536 *pre += GET_MODE_SIZE (GET_MODE (dest));
542 if (XEXP (src, 0) == stack_pointer_rtx)
544 *post += GET_MODE_SIZE (GET_MODE (dest));
550 if (XEXP (src, 0) == stack_pointer_rtx)
552 *pre -= GET_MODE_SIZE (GET_MODE (dest));
558 if (XEXP (src, 0) == stack_pointer_rtx)
560 *post -= GET_MODE_SIZE (GET_MODE (dest));
571 /* Given an INSN, calculate the amount of stack adjustment it contains
572 PRE- and POST-modifying stack pointer. */
575 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
583 pattern = PATTERN (insn);
584 if (RTX_FRAME_RELATED_P (insn))
586 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
588 pattern = XEXP (expr, 0);
591 if (GET_CODE (pattern) == SET)
592 stack_adjust_offset_pre_post (pattern, pre, post);
593 else if (GET_CODE (pattern) == PARALLEL
594 || GET_CODE (pattern) == SEQUENCE)
598 /* There may be stack adjustments inside compound insns. Search
600 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
601 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
602 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
606 /* Compute stack adjustment in basic block BB. */
609 bb_stack_adjust_offset (basic_block bb)
611 HOST_WIDE_INT offset;
614 offset = VTI (bb)->in.stack_adjust;
615 for (i = 0; i < VTI (bb)->n_mos; i++)
617 if (VTI (bb)->mos[i].type == MO_ADJUST)
618 offset += VTI (bb)->mos[i].u.adjust;
619 else if (VTI (bb)->mos[i].type != MO_CALL)
621 if (MEM_P (VTI (bb)->mos[i].u.loc))
623 VTI (bb)->mos[i].u.loc
624 = adjust_stack_reference (VTI (bb)->mos[i].u.loc, -offset);
628 VTI (bb)->out.stack_adjust = offset;
631 /* Compute stack adjustments for all blocks by traversing DFS tree.
632 Return true when the adjustments on all incoming edges are consistent.
633 Heavily borrowed from pre_and_rev_post_order_compute. */
636 vt_stack_adjustments (void)
638 edge_iterator *stack;
641 /* Initialize entry block. */
642 VTI (ENTRY_BLOCK_PTR)->visited = true;
643 VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
645 /* Allocate stack for back-tracking up CFG. */
646 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
649 /* Push the first edge on to the stack. */
650 stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
658 /* Look at the edge on the top of the stack. */
660 src = ei_edge (ei)->src;
661 dest = ei_edge (ei)->dest;
663 /* Check if the edge destination has been visited yet. */
664 if (!VTI (dest)->visited)
666 VTI (dest)->visited = true;
667 VTI (dest)->in.stack_adjust = VTI (src)->out.stack_adjust;
668 bb_stack_adjust_offset (dest);
670 if (EDGE_COUNT (dest->succs) > 0)
671 /* Since the DEST node has been visited for the first
672 time, check its successors. */
673 stack[sp++] = ei_start (dest->succs);
677 /* Check whether the adjustments on the edges are the same. */
678 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
684 if (! ei_one_before_end_p (ei))
685 /* Go to the next edge. */
686 ei_next (&stack[sp - 1]);
688 /* Return to previous level if there are no more edges. */
697 /* Adjust stack reference MEM by ADJUSTMENT bytes and make it relative
698 to the argument pointer. Return the new rtx. */
701 adjust_stack_reference (rtx mem, HOST_WIDE_INT adjustment)
705 #ifdef FRAME_POINTER_CFA_OFFSET
706 adjustment -= FRAME_POINTER_CFA_OFFSET (current_function_decl);
707 cfa = plus_constant (frame_pointer_rtx, adjustment);
709 adjustment -= ARG_POINTER_CFA_OFFSET (current_function_decl);
710 cfa = plus_constant (arg_pointer_rtx, adjustment);
713 addr = replace_rtx (copy_rtx (XEXP (mem, 0)), stack_pointer_rtx, cfa);
714 tmp = simplify_rtx (addr);
718 return replace_equiv_address_nv (mem, addr);
721 /* Return true if a decl_or_value DV is a DECL or NULL. */
723 dv_is_decl_p (decl_or_value dv)
728 /* Make sure relevant codes don't overlap. */
729 switch ((int)TREE_CODE ((tree)dv))
733 case (int)RESULT_DECL:
734 case (int)FUNCTION_DECL:
735 case (int)DEBUG_EXPR_DECL:
736 case (int)COMPONENT_REF:
747 /* Return true if a decl_or_value is a VALUE rtl. */
749 dv_is_value_p (decl_or_value dv)
751 return dv && !dv_is_decl_p (dv);
754 /* Return the decl in the decl_or_value. */
756 dv_as_decl (decl_or_value dv)
758 gcc_assert (dv_is_decl_p (dv));
762 /* Return the value in the decl_or_value. */
764 dv_as_value (decl_or_value dv)
766 gcc_assert (dv_is_value_p (dv));
770 /* Return the opaque pointer in the decl_or_value. */
772 dv_as_opaque (decl_or_value dv)
777 /* Return true if a decl_or_value must not have more than one variable
780 dv_onepart_p (decl_or_value dv)
784 if (!MAY_HAVE_DEBUG_INSNS)
787 if (dv_is_value_p (dv))
790 decl = dv_as_decl (dv);
795 return (target_for_debug_bind (decl) != NULL_TREE);
798 /* Return the variable pool to be used for dv, depending on whether it
799 can have multiple parts or not. */
800 static inline alloc_pool
801 dv_pool (decl_or_value dv)
803 return dv_onepart_p (dv) ? valvar_pool : var_pool;
806 /* Build a decl_or_value out of a decl. */
807 static inline decl_or_value
808 dv_from_decl (tree decl)
812 gcc_assert (dv_is_decl_p (dv));
816 /* Build a decl_or_value out of a value. */
817 static inline decl_or_value
818 dv_from_value (rtx value)
822 gcc_assert (dv_is_value_p (dv));
826 static inline hashval_t
827 dv_htab_hash (decl_or_value dv)
829 if (dv_is_value_p (dv))
830 return -(hashval_t)(CSELIB_VAL_PTR (dv_as_value (dv))->value);
832 return (VARIABLE_HASH_VAL (dv_as_decl (dv)));
835 /* The hash function for variable_htab, computes the hash value
836 from the declaration of variable X. */
839 variable_htab_hash (const void *x)
841 const_variable const v = (const_variable) x;
843 return dv_htab_hash (v->dv);
846 /* Compare the declaration of variable X with declaration Y. */
849 variable_htab_eq (const void *x, const void *y)
851 const_variable const v = (const_variable) x;
852 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
854 if (dv_as_opaque (v->dv) == dv_as_opaque (dv))
861 visv = dv_is_value_p (v->dv);
862 dvisv = dv_is_value_p (dv);
868 gcc_assert (CSELIB_VAL_PTR (dv_as_value (v->dv))
869 != CSELIB_VAL_PTR (dv_as_value (dv)));
871 gcc_assert (VARIABLE_HASH_VAL (dv_as_decl (v->dv))
872 != VARIABLE_HASH_VAL (dv_as_decl (dv)));
879 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
882 variable_htab_free (void *elem)
885 variable var = (variable) elem;
886 location_chain node, next;
888 gcc_assert (var->refcount > 0);
891 if (var->refcount > 0)
894 for (i = 0; i < var->n_var_parts; i++)
896 for (node = var->var_part[i].loc_chain; node; node = next)
899 pool_free (loc_chain_pool, node);
901 var->var_part[i].loc_chain = NULL;
903 pool_free (dv_pool (var->dv), var);
906 /* The hash function for value_chains htab, computes the hash value
910 value_chain_htab_hash (const void *x)
912 const_value_chain const v = (const_value_chain) x;
914 return dv_htab_hash (v->dv);
917 /* Compare the VALUE X with VALUE Y. */
920 value_chain_htab_eq (const void *x, const void *y)
922 const_value_chain const v = (const_value_chain) x;
923 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
925 return dv_as_opaque (v->dv) == dv_as_opaque (dv);
928 /* Initialize the set (array) SET of attrs to empty lists. */
931 init_attrs_list_set (attrs *set)
935 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
939 /* Make the list *LISTP empty. */
942 attrs_list_clear (attrs *listp)
946 for (list = *listp; list; list = next)
949 pool_free (attrs_pool, list);
954 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
957 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
959 for (; list; list = list->next)
960 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
965 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
968 attrs_list_insert (attrs *listp, decl_or_value dv,
969 HOST_WIDE_INT offset, rtx loc)
973 list = (attrs) pool_alloc (attrs_pool);
976 list->offset = offset;
981 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
984 attrs_list_copy (attrs *dstp, attrs src)
988 attrs_list_clear (dstp);
989 for (; src; src = src->next)
991 n = (attrs) pool_alloc (attrs_pool);
994 n->offset = src->offset;
1000 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1003 attrs_list_union (attrs *dstp, attrs src)
1005 for (; src; src = src->next)
1007 if (!attrs_list_member (*dstp, src->dv, src->offset))
1008 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1012 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1016 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1018 gcc_assert (!*dstp);
1019 for (; src; src = src->next)
1021 if (!dv_onepart_p (src->dv))
1022 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1024 for (src = src2; src; src = src->next)
1026 if (!dv_onepart_p (src->dv)
1027 && !attrs_list_member (*dstp, src->dv, src->offset))
1028 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1032 /* Shared hashtable support. */
1034 /* Return true if VARS is shared. */
1037 shared_hash_shared (shared_hash vars)
1039 return vars->refcount > 1;
1042 /* Return the hash table for VARS. */
1044 static inline htab_t
1045 shared_hash_htab (shared_hash vars)
1050 /* Copy variables into a new hash table. */
1053 shared_hash_unshare (shared_hash vars)
1055 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1056 gcc_assert (vars->refcount > 1);
1057 new_vars->refcount = 1;
1059 = htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
1060 variable_htab_eq, variable_htab_free);
1061 vars_copy (new_vars->htab, vars->htab);
1066 /* Increment reference counter on VARS and return it. */
1068 static inline shared_hash
1069 shared_hash_copy (shared_hash vars)
1075 /* Decrement reference counter and destroy hash table if not shared
1079 shared_hash_destroy (shared_hash vars)
1081 gcc_assert (vars->refcount > 0);
1082 if (--vars->refcount == 0)
1084 htab_delete (vars->htab);
1085 pool_free (shared_hash_pool, vars);
1089 /* Unshare *PVARS if shared and return slot for DV. If INS is
1090 INSERT, insert it if not already present. */
1092 static inline void **
1093 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1094 hashval_t dvhash, enum insert_option ins)
1096 if (shared_hash_shared (*pvars))
1097 *pvars = shared_hash_unshare (*pvars);
1098 return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins);
1101 static inline void **
1102 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1103 enum insert_option ins)
1105 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1108 /* Return slot for DV, if it is already present in the hash table.
1109 If it is not present, insert it only VARS is not shared, otherwise
1112 static inline void **
1113 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1115 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1116 shared_hash_shared (vars)
1117 ? NO_INSERT : INSERT);
1120 static inline void **
1121 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1123 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1126 /* Return slot for DV only if it is already present in the hash table. */
1128 static inline void **
1129 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1132 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1136 static inline void **
1137 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1139 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1142 /* Return variable for DV or NULL if not already present in the hash
1145 static inline variable
1146 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1148 return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash);
1151 static inline variable
1152 shared_hash_find (shared_hash vars, decl_or_value dv)
1154 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1157 /* Determine a total order between two distinct pointers. Compare the
1158 pointers as integral types if size_t is wide enough, otherwise
1159 resort to bitwise memory compare. The actual order does not
1160 matter, we just need to be consistent, so endianness is
1164 tie_break_pointers (const void *p1, const void *p2)
1166 gcc_assert (p1 != p2);
1168 if (sizeof (size_t) >= sizeof (void*))
1169 return (size_t)p1 < (size_t)p2 ? -1 : 1;
1171 return memcmp (&p1, &p2, sizeof (p1));
1174 /* Return true if TVAL is better than CVAL as a canonival value. We
1175 choose lowest-numbered VALUEs, using the RTX address as a
1176 tie-breaker. The idea is to arrange them into a star topology,
1177 such that all of them are at most one step away from the canonical
1178 value, and the canonical value has backlinks to all of them, in
1179 addition to all the actual locations. We don't enforce this
1180 topology throughout the entire dataflow analysis, though.
1184 canon_value_cmp (rtx tval, rtx cval)
1187 || CSELIB_VAL_PTR (tval)->value < CSELIB_VAL_PTR (cval)->value
1188 || (CSELIB_VAL_PTR (tval)->value == CSELIB_VAL_PTR (cval)->value
1189 && tie_break_pointers (tval, cval) < 0);
1192 static bool dst_can_be_shared;
1194 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1197 unshare_variable (dataflow_set *set, void **slot, variable var,
1198 enum var_init_status initialized)
1203 new_var = (variable) pool_alloc (dv_pool (var->dv));
1204 new_var->dv = var->dv;
1205 new_var->refcount = 1;
1207 new_var->n_var_parts = var->n_var_parts;
1209 if (! flag_var_tracking_uninit)
1210 initialized = VAR_INIT_STATUS_INITIALIZED;
1212 for (i = 0; i < var->n_var_parts; i++)
1214 location_chain node;
1215 location_chain *nextp;
1217 new_var->var_part[i].offset = var->var_part[i].offset;
1218 nextp = &new_var->var_part[i].loc_chain;
1219 for (node = var->var_part[i].loc_chain; node; node = node->next)
1221 location_chain new_lc;
1223 new_lc = (location_chain) pool_alloc (loc_chain_pool);
1224 new_lc->next = NULL;
1225 if (node->init > initialized)
1226 new_lc->init = node->init;
1228 new_lc->init = initialized;
1229 if (node->set_src && !(MEM_P (node->set_src)))
1230 new_lc->set_src = node->set_src;
1232 new_lc->set_src = NULL;
1233 new_lc->loc = node->loc;
1236 nextp = &new_lc->next;
1239 /* We are at the basic block boundary when copying variable description
1240 so set the CUR_LOC to be the first element of the chain. */
1241 if (new_var->var_part[i].loc_chain)
1242 new_var->var_part[i].cur_loc = new_var->var_part[i].loc_chain->loc;
1244 new_var->var_part[i].cur_loc = NULL;
1247 dst_can_be_shared = false;
1248 if (shared_hash_shared (set->vars))
1249 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1250 else if (set->traversed_vars && set->vars != set->traversed_vars)
1251 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1256 /* Add a variable from *SLOT to hash table DATA and increase its reference
1260 vars_copy_1 (void **slot, void *data)
1262 htab_t dst = (htab_t) data;
1266 src = (variable) *slot;
1269 dstp = htab_find_slot_with_hash (dst, src->dv,
1270 dv_htab_hash (src->dv),
1274 /* Continue traversing the hash table. */
1278 /* Copy all variables from hash table SRC to hash table DST. */
1281 vars_copy (htab_t dst, htab_t src)
1283 htab_traverse_noresize (src, vars_copy_1, dst);
1286 /* Map a decl to its main debug decl. */
1289 var_debug_decl (tree decl)
1291 if (decl && DECL_P (decl)
1292 && DECL_DEBUG_EXPR_IS_FROM (decl) && DECL_DEBUG_EXPR (decl)
1293 && DECL_P (DECL_DEBUG_EXPR (decl)))
1294 decl = DECL_DEBUG_EXPR (decl);
1299 /* Set the register LOC to contain DV, OFFSET. */
1302 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1303 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1304 enum insert_option iopt)
1307 bool decl_p = dv_is_decl_p (dv);
1310 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1312 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1313 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1314 && node->offset == offset)
1317 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1318 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1321 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1324 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1327 tree decl = REG_EXPR (loc);
1328 HOST_WIDE_INT offset = REG_OFFSET (loc);
1330 var_reg_decl_set (set, loc, initialized,
1331 dv_from_decl (decl), offset, set_src, INSERT);
1334 static enum var_init_status
1335 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1339 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1341 if (! flag_var_tracking_uninit)
1342 return VAR_INIT_STATUS_INITIALIZED;
1344 var = shared_hash_find (set->vars, dv);
1347 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1349 location_chain nextp;
1350 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1351 if (rtx_equal_p (nextp->loc, loc))
1353 ret_val = nextp->init;
1362 /* Delete current content of register LOC in dataflow set SET and set
1363 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1364 MODIFY is true, any other live copies of the same variable part are
1365 also deleted from the dataflow set, otherwise the variable part is
1366 assumed to be copied from another location holding the same
1370 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1371 enum var_init_status initialized, rtx set_src)
1373 tree decl = REG_EXPR (loc);
1374 HOST_WIDE_INT offset = REG_OFFSET (loc);
1378 decl = var_debug_decl (decl);
1380 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1381 initialized = get_init_value (set, loc, dv_from_decl (decl));
1383 nextp = &set->regs[REGNO (loc)];
1384 for (node = *nextp; node; node = next)
1387 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1389 delete_variable_part (set, node->loc, node->dv, node->offset);
1390 pool_free (attrs_pool, node);
1396 nextp = &node->next;
1400 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1401 var_reg_set (set, loc, initialized, set_src);
1404 /* Delete current content of register LOC in dataflow set SET. If
1405 CLOBBER is true, also delete any other live copies of the same
1409 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1411 attrs *reg = &set->regs[REGNO (loc)];
1416 tree decl = REG_EXPR (loc);
1417 HOST_WIDE_INT offset = REG_OFFSET (loc);
1419 decl = var_debug_decl (decl);
1421 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1424 for (node = *reg; node; node = next)
1427 delete_variable_part (set, node->loc, node->dv, node->offset);
1428 pool_free (attrs_pool, node);
1433 /* Delete content of register with number REGNO in dataflow set SET. */
1436 var_regno_delete (dataflow_set *set, int regno)
1438 attrs *reg = &set->regs[regno];
1441 for (node = *reg; node; node = next)
1444 delete_variable_part (set, node->loc, node->dv, node->offset);
1445 pool_free (attrs_pool, node);
1450 /* Set the location of DV, OFFSET as the MEM LOC. */
1453 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1454 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1455 enum insert_option iopt)
1457 if (dv_is_decl_p (dv))
1458 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1460 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1463 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
1465 Adjust the address first if it is stack pointer based. */
1468 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1471 tree decl = MEM_EXPR (loc);
1472 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1474 var_mem_decl_set (set, loc, initialized,
1475 dv_from_decl (decl), offset, set_src, INSERT);
1478 /* Delete and set the location part of variable MEM_EXPR (LOC) in
1479 dataflow set SET to LOC. If MODIFY is true, any other live copies
1480 of the same variable part are also deleted from the dataflow set,
1481 otherwise the variable part is assumed to be copied from another
1482 location holding the same part.
1483 Adjust the address first if it is stack pointer based. */
1486 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1487 enum var_init_status initialized, rtx set_src)
1489 tree decl = MEM_EXPR (loc);
1490 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1492 decl = var_debug_decl (decl);
1494 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1495 initialized = get_init_value (set, loc, dv_from_decl (decl));
1498 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
1499 var_mem_set (set, loc, initialized, set_src);
1502 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
1503 true, also delete any other live copies of the same variable part.
1504 Adjust the address first if it is stack pointer based. */
1507 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
1509 tree decl = MEM_EXPR (loc);
1510 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1512 decl = var_debug_decl (decl);
1514 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1515 delete_variable_part (set, loc, dv_from_decl (decl), offset);
1518 /* Map a value to a location it was just stored in. */
1521 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn)
1523 cselib_val *v = CSELIB_VAL_PTR (val);
1525 gcc_assert (cselib_preserved_value_p (v));
1529 fprintf (dump_file, "%i: ", INSN_UID (insn));
1530 print_inline_rtx (dump_file, val, 0);
1531 fprintf (dump_file, " stored in ");
1532 print_inline_rtx (dump_file, loc, 0);
1535 struct elt_loc_list *l;
1536 for (l = v->locs; l; l = l->next)
1538 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
1539 print_inline_rtx (dump_file, l->loc, 0);
1542 fprintf (dump_file, "\n");
1547 var_regno_delete (set, REGNO (loc));
1548 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1549 dv_from_value (val), 0, NULL_RTX, INSERT);
1551 else if (MEM_P (loc))
1552 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1553 dv_from_value (val), 0, NULL_RTX, INSERT);
1555 set_variable_part (set, loc, dv_from_value (val), 0,
1556 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1559 /* Reset this node, detaching all its equivalences. Return the slot
1560 in the variable hash table that holds dv, if there is one. */
1563 val_reset (dataflow_set *set, decl_or_value dv)
1565 variable var = shared_hash_find (set->vars, dv) ;
1566 location_chain node;
1569 if (!var || !var->n_var_parts)
1572 gcc_assert (var->n_var_parts == 1);
1575 for (node = var->var_part[0].loc_chain; node; node = node->next)
1576 if (GET_CODE (node->loc) == VALUE
1577 && canon_value_cmp (node->loc, cval))
1580 for (node = var->var_part[0].loc_chain; node; node = node->next)
1581 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
1583 /* Redirect the equivalence link to the new canonical
1584 value, or simply remove it if it would point at
1587 set_variable_part (set, cval, dv_from_value (node->loc),
1588 0, node->init, node->set_src, NO_INSERT);
1589 delete_variable_part (set, dv_as_value (dv),
1590 dv_from_value (node->loc), 0);
1595 decl_or_value cdv = dv_from_value (cval);
1597 /* Keep the remaining values connected, accummulating links
1598 in the canonical value. */
1599 for (node = var->var_part[0].loc_chain; node; node = node->next)
1601 if (node->loc == cval)
1603 else if (GET_CODE (node->loc) == REG)
1604 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
1605 node->set_src, NO_INSERT);
1606 else if (GET_CODE (node->loc) == MEM)
1607 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
1608 node->set_src, NO_INSERT);
1610 set_variable_part (set, node->loc, cdv, 0,
1611 node->init, node->set_src, NO_INSERT);
1615 /* We remove this last, to make sure that the canonical value is not
1616 removed to the point of requiring reinsertion. */
1618 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
1620 clobber_variable_part (set, NULL, dv, 0, NULL);
1622 /* ??? Should we make sure there aren't other available values or
1623 variables whose values involve this one other than by
1624 equivalence? E.g., at the very least we should reset MEMs, those
1625 shouldn't be too hard to find cselib-looking up the value as an
1626 address, then locating the resulting value in our own hash
1630 /* Find the values in a given location and map the val to another
1631 value, if it is unique, or add the location as one holding the
1635 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
1637 decl_or_value dv = dv_from_value (val);
1639 if (dump_file && (dump_flags & TDF_DETAILS))
1642 fprintf (dump_file, "%i: ", INSN_UID (insn));
1644 fprintf (dump_file, "head: ");
1645 print_inline_rtx (dump_file, val, 0);
1646 fputs (" is at ", dump_file);
1647 print_inline_rtx (dump_file, loc, 0);
1648 fputc ('\n', dump_file);
1651 val_reset (set, dv);
1655 attrs node, found = NULL;
1657 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1658 if (dv_is_value_p (node->dv)
1659 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
1663 /* Map incoming equivalences. ??? Wouldn't it be nice if
1664 we just started sharing the location lists? Maybe a
1665 circular list ending at the value itself or some
1667 set_variable_part (set, dv_as_value (node->dv),
1668 dv_from_value (val), node->offset,
1669 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1670 set_variable_part (set, val, node->dv, node->offset,
1671 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1674 /* If we didn't find any equivalence, we need to remember that
1675 this value is held in the named register. */
1677 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1678 dv_from_value (val), 0, NULL_RTX, INSERT);
1680 else if (MEM_P (loc))
1681 /* ??? Merge equivalent MEMs. */
1682 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1683 dv_from_value (val), 0, NULL_RTX, INSERT);
1685 /* ??? Merge equivalent expressions. */
1686 set_variable_part (set, loc, dv_from_value (val), 0,
1687 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1690 /* Initialize dataflow set SET to be empty.
1691 VARS_SIZE is the initial size of hash table VARS. */
1694 dataflow_set_init (dataflow_set *set)
1696 init_attrs_list_set (set->regs);
1697 set->vars = shared_hash_copy (empty_shared_hash);
1698 set->stack_adjust = 0;
1699 set->traversed_vars = NULL;
1702 /* Delete the contents of dataflow set SET. */
1705 dataflow_set_clear (dataflow_set *set)
1709 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1710 attrs_list_clear (&set->regs[i]);
1712 shared_hash_destroy (set->vars);
1713 set->vars = shared_hash_copy (empty_shared_hash);
1716 /* Copy the contents of dataflow set SRC to DST. */
1719 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
1723 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1724 attrs_list_copy (&dst->regs[i], src->regs[i]);
1726 shared_hash_destroy (dst->vars);
1727 dst->vars = shared_hash_copy (src->vars);
1728 dst->stack_adjust = src->stack_adjust;
1731 /* Information for merging lists of locations for a given offset of variable.
1733 struct variable_union_info
1735 /* Node of the location chain. */
1738 /* The sum of positions in the input chains. */
1741 /* The position in the chain of DST dataflow set. */
1745 /* Buffer for location list sorting and its allocated size. */
1746 static struct variable_union_info *vui_vec;
1747 static int vui_allocated;
1749 /* Compare function for qsort, order the structures by POS element. */
1752 variable_union_info_cmp_pos (const void *n1, const void *n2)
1754 const struct variable_union_info *const i1 =
1755 (const struct variable_union_info *) n1;
1756 const struct variable_union_info *const i2 =
1757 ( const struct variable_union_info *) n2;
1759 if (i1->pos != i2->pos)
1760 return i1->pos - i2->pos;
1762 return (i1->pos_dst - i2->pos_dst);
1765 /* Compute union of location parts of variable *SLOT and the same variable
1766 from hash table DATA. Compute "sorted" union of the location chains
1767 for common offsets, i.e. the locations of a variable part are sorted by
1768 a priority where the priority is the sum of the positions in the 2 chains
1769 (if a location is only in one list the position in the second list is
1770 defined to be larger than the length of the chains).
1771 When we are updating the location parts the newest location is in the
1772 beginning of the chain, so when we do the described "sorted" union
1773 we keep the newest locations in the beginning. */
1776 variable_union (void **slot, void *data)
1780 dataflow_set *set = (dataflow_set *) data;
1783 src = (variable) *slot;
1784 dstp = shared_hash_find_slot (set->vars, src->dv);
1785 if (!dstp || !*dstp)
1789 dst_can_be_shared = false;
1791 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
1795 /* If CUR_LOC of some variable part is not the first element of
1796 the location chain we are going to change it so we have to make
1797 a copy of the variable. */
1798 for (k = 0; k < src->n_var_parts; k++)
1800 gcc_assert (!src->var_part[k].loc_chain
1801 == !src->var_part[k].cur_loc);
1802 if (src->var_part[k].loc_chain)
1804 gcc_assert (src->var_part[k].cur_loc);
1805 if (src->var_part[k].cur_loc != src->var_part[k].loc_chain->loc)
1809 if (k < src->n_var_parts)
1810 dstp = unshare_variable (set, dstp, src, VAR_INIT_STATUS_UNKNOWN);
1812 /* Continue traversing the hash table. */
1816 dst = (variable) *dstp;
1818 gcc_assert (src->n_var_parts);
1820 /* We can combine one-part variables very efficiently, because their
1821 entries are in canonical order. */
1822 if (dv_onepart_p (src->dv))
1824 location_chain *nodep, dnode, snode;
1826 gcc_assert (src->n_var_parts == 1);
1827 gcc_assert (dst->n_var_parts == 1);
1829 snode = src->var_part[0].loc_chain;
1832 restart_onepart_unshared:
1833 nodep = &dst->var_part[0].loc_chain;
1839 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
1843 location_chain nnode;
1845 if (dst->refcount != 1 || shared_hash_shared (set->vars))
1847 dstp = unshare_variable (set, dstp, dst,
1848 VAR_INIT_STATUS_INITIALIZED);
1849 dst = (variable)*dstp;
1850 goto restart_onepart_unshared;
1853 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
1854 nnode->loc = snode->loc;
1855 nnode->init = snode->init;
1856 if (!snode->set_src || MEM_P (snode->set_src))
1857 nnode->set_src = NULL;
1859 nnode->set_src = snode->set_src;
1860 nnode->next = dnode;
1863 #ifdef ENABLE_CHECKING
1865 gcc_assert (rtx_equal_p (dnode->loc, snode->loc));
1869 snode = snode->next;
1871 nodep = &dnode->next;
1875 dst->var_part[0].cur_loc = dst->var_part[0].loc_chain->loc;
1880 /* Count the number of location parts, result is K. */
1881 for (i = 0, j = 0, k = 0;
1882 i < src->n_var_parts && j < dst->n_var_parts; k++)
1884 if (src->var_part[i].offset == dst->var_part[j].offset)
1889 else if (src->var_part[i].offset < dst->var_part[j].offset)
1894 k += src->n_var_parts - i;
1895 k += dst->n_var_parts - j;
1897 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
1898 thus there are at most MAX_VAR_PARTS different offsets. */
1899 gcc_assert (dv_onepart_p (dst->dv) ? k == 1 : k <= MAX_VAR_PARTS);
1901 if ((dst->refcount > 1 || shared_hash_shared (set->vars))
1902 && dst->n_var_parts != k)
1904 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
1905 dst = (variable)*dstp;
1908 i = src->n_var_parts - 1;
1909 j = dst->n_var_parts - 1;
1910 dst->n_var_parts = k;
1912 for (k--; k >= 0; k--)
1914 location_chain node, node2;
1916 if (i >= 0 && j >= 0
1917 && src->var_part[i].offset == dst->var_part[j].offset)
1919 /* Compute the "sorted" union of the chains, i.e. the locations which
1920 are in both chains go first, they are sorted by the sum of
1921 positions in the chains. */
1924 struct variable_union_info *vui;
1926 /* If DST is shared compare the location chains.
1927 If they are different we will modify the chain in DST with
1928 high probability so make a copy of DST. */
1929 if (dst->refcount > 1 || shared_hash_shared (set->vars))
1931 for (node = src->var_part[i].loc_chain,
1932 node2 = dst->var_part[j].loc_chain; node && node2;
1933 node = node->next, node2 = node2->next)
1935 if (!((REG_P (node2->loc)
1936 && REG_P (node->loc)
1937 && REGNO (node2->loc) == REGNO (node->loc))
1938 || rtx_equal_p (node2->loc, node->loc)))
1940 if (node2->init < node->init)
1941 node2->init = node->init;
1947 dstp = unshare_variable (set, dstp, dst,
1948 VAR_INIT_STATUS_UNKNOWN);
1949 dst = (variable)*dstp;
1954 for (node = src->var_part[i].loc_chain; node; node = node->next)
1957 for (node = dst->var_part[j].loc_chain; node; node = node->next)
1962 /* The most common case, much simpler, no qsort is needed. */
1963 location_chain dstnode = dst->var_part[j].loc_chain;
1964 dst->var_part[k].loc_chain = dstnode;
1965 dst->var_part[k].offset = dst->var_part[j].offset;
1967 for (node = src->var_part[i].loc_chain; node; node = node->next)
1968 if (!((REG_P (dstnode->loc)
1969 && REG_P (node->loc)
1970 && REGNO (dstnode->loc) == REGNO (node->loc))
1971 || rtx_equal_p (dstnode->loc, node->loc)))
1973 location_chain new_node;
1975 /* Copy the location from SRC. */
1976 new_node = (location_chain) pool_alloc (loc_chain_pool);
1977 new_node->loc = node->loc;
1978 new_node->init = node->init;
1979 if (!node->set_src || MEM_P (node->set_src))
1980 new_node->set_src = NULL;
1982 new_node->set_src = node->set_src;
1983 node2->next = new_node;
1990 if (src_l + dst_l > vui_allocated)
1992 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
1993 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
1998 /* Fill in the locations from DST. */
1999 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2000 node = node->next, jj++)
2003 vui[jj].pos_dst = jj;
2005 /* Pos plus value larger than a sum of 2 valid positions. */
2006 vui[jj].pos = jj + src_l + dst_l;
2009 /* Fill in the locations from SRC. */
2011 for (node = src->var_part[i].loc_chain, ii = 0; node;
2012 node = node->next, ii++)
2014 /* Find location from NODE. */
2015 for (jj = 0; jj < dst_l; jj++)
2017 if ((REG_P (vui[jj].lc->loc)
2018 && REG_P (node->loc)
2019 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2020 || rtx_equal_p (vui[jj].lc->loc, node->loc))
2022 vui[jj].pos = jj + ii;
2026 if (jj >= dst_l) /* The location has not been found. */
2028 location_chain new_node;
2030 /* Copy the location from SRC. */
2031 new_node = (location_chain) pool_alloc (loc_chain_pool);
2032 new_node->loc = node->loc;
2033 new_node->init = node->init;
2034 if (!node->set_src || MEM_P (node->set_src))
2035 new_node->set_src = NULL;
2037 new_node->set_src = node->set_src;
2038 vui[n].lc = new_node;
2039 vui[n].pos_dst = src_l + dst_l;
2040 vui[n].pos = ii + src_l + dst_l;
2047 /* Special case still very common case. For dst_l == 2
2048 all entries dst_l ... n-1 are sorted, with for i >= dst_l
2049 vui[i].pos == i + src_l + dst_l. */
2050 if (vui[0].pos > vui[1].pos)
2052 /* Order should be 1, 0, 2... */
2053 dst->var_part[k].loc_chain = vui[1].lc;
2054 vui[1].lc->next = vui[0].lc;
2057 vui[0].lc->next = vui[2].lc;
2058 vui[n - 1].lc->next = NULL;
2061 vui[0].lc->next = NULL;
2066 dst->var_part[k].loc_chain = vui[0].lc;
2067 if (n >= 3 && vui[2].pos < vui[1].pos)
2069 /* Order should be 0, 2, 1, 3... */
2070 vui[0].lc->next = vui[2].lc;
2071 vui[2].lc->next = vui[1].lc;
2074 vui[1].lc->next = vui[3].lc;
2075 vui[n - 1].lc->next = NULL;
2078 vui[1].lc->next = NULL;
2083 /* Order should be 0, 1, 2... */
2085 vui[n - 1].lc->next = NULL;
2088 for (; ii < n; ii++)
2089 vui[ii - 1].lc->next = vui[ii].lc;
2093 qsort (vui, n, sizeof (struct variable_union_info),
2094 variable_union_info_cmp_pos);
2096 /* Reconnect the nodes in sorted order. */
2097 for (ii = 1; ii < n; ii++)
2098 vui[ii - 1].lc->next = vui[ii].lc;
2099 vui[n - 1].lc->next = NULL;
2100 dst->var_part[k].loc_chain = vui[0].lc;
2103 dst->var_part[k].offset = dst->var_part[j].offset;
2108 else if ((i >= 0 && j >= 0
2109 && src->var_part[i].offset < dst->var_part[j].offset)
2112 dst->var_part[k] = dst->var_part[j];
2115 else if ((i >= 0 && j >= 0
2116 && src->var_part[i].offset > dst->var_part[j].offset)
2119 location_chain *nextp;
2121 /* Copy the chain from SRC. */
2122 nextp = &dst->var_part[k].loc_chain;
2123 for (node = src->var_part[i].loc_chain; node; node = node->next)
2125 location_chain new_lc;
2127 new_lc = (location_chain) pool_alloc (loc_chain_pool);
2128 new_lc->next = NULL;
2129 new_lc->init = node->init;
2130 if (!node->set_src || MEM_P (node->set_src))
2131 new_lc->set_src = NULL;
2133 new_lc->set_src = node->set_src;
2134 new_lc->loc = node->loc;
2137 nextp = &new_lc->next;
2140 dst->var_part[k].offset = src->var_part[i].offset;
2144 /* We are at the basic block boundary when computing union
2145 so set the CUR_LOC to be the first element of the chain. */
2146 if (dst->var_part[k].loc_chain)
2147 dst->var_part[k].cur_loc = dst->var_part[k].loc_chain->loc;
2149 dst->var_part[k].cur_loc = NULL;
2152 if (flag_var_tracking_uninit)
2153 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
2155 location_chain node, node2;
2156 for (node = src->var_part[i].loc_chain; node; node = node->next)
2157 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
2158 if (rtx_equal_p (node->loc, node2->loc))
2160 if (node->init > node2->init)
2161 node2->init = node->init;
2165 /* Continue traversing the hash table. */
2169 /* Like variable_union, but only used when doing dataflow_set_union
2170 into an empty hashtab. To allow sharing, dst is initially shared
2171 with src (so all variables are "copied" from src to dst hashtab),
2172 so only unshare_variable for variables that need canonicalization
2176 variable_canonicalize (void **slot, void *data)
2179 dataflow_set *set = (dataflow_set *) data;
2182 src = *(variable *) slot;
2184 /* If CUR_LOC of some variable part is not the first element of
2185 the location chain we are going to change it so we have to make
2186 a copy of the variable. */
2187 for (k = 0; k < src->n_var_parts; k++)
2189 gcc_assert (!src->var_part[k].loc_chain == !src->var_part[k].cur_loc);
2190 if (src->var_part[k].loc_chain)
2192 gcc_assert (src->var_part[k].cur_loc);
2193 if (src->var_part[k].cur_loc != src->var_part[k].loc_chain->loc)
2197 if (k < src->n_var_parts)
2198 slot = unshare_variable (set, slot, src, VAR_INIT_STATUS_UNKNOWN);
2202 /* Compute union of dataflow sets SRC and DST and store it to DST. */
2205 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
2209 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2210 attrs_list_union (&dst->regs[i], src->regs[i]);
2212 if (dst->vars == empty_shared_hash)
2214 shared_hash_destroy (dst->vars);
2215 dst->vars = shared_hash_copy (src->vars);
2216 dst->traversed_vars = dst->vars;
2217 htab_traverse (shared_hash_htab (dst->vars), variable_canonicalize, dst);
2218 dst->traversed_vars = NULL;
2221 htab_traverse (shared_hash_htab (src->vars), variable_union, dst);
2224 /* Whether the value is currently being expanded. */
2225 #define VALUE_RECURSED_INTO(x) \
2226 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
2227 /* Whether the value is in changed_variables hash table. */
2228 #define VALUE_CHANGED(x) \
2229 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
2230 /* Whether the decl is in changed_variables hash table. */
2231 #define DECL_CHANGED(x) TREE_VISITED (x)
2233 /* Record that DV has been added into resp. removed from changed_variables
2237 set_dv_changed (decl_or_value dv, bool newv)
2239 if (dv_is_value_p (dv))
2240 VALUE_CHANGED (dv_as_value (dv)) = newv;
2242 DECL_CHANGED (dv_as_decl (dv)) = newv;
2245 /* Return true if DV is present in changed_variables hash table. */
2248 dv_changed_p (decl_or_value dv)
2250 return (dv_is_value_p (dv)
2251 ? VALUE_CHANGED (dv_as_value (dv))
2252 : DECL_CHANGED (dv_as_decl (dv)));
2255 /* Return a location list node whose loc is rtx_equal to LOC, in the
2256 location list of a one-part variable or value VAR, or in that of
2257 any values recursively mentioned in the location lists. */
2259 static location_chain
2260 find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
2262 location_chain node;
2267 gcc_assert (dv_onepart_p (var->dv));
2269 if (!var->n_var_parts)
2272 gcc_assert (var->var_part[0].offset == 0);
2274 for (node = var->var_part[0].loc_chain; node; node = node->next)
2275 if (rtx_equal_p (loc, node->loc))
2277 else if (GET_CODE (node->loc) == VALUE
2278 && !VALUE_RECURSED_INTO (node->loc))
2280 decl_or_value dv = dv_from_value (node->loc);
2281 variable var = (variable)
2282 htab_find_with_hash (vars, dv, dv_htab_hash (dv));
2286 location_chain where;
2287 VALUE_RECURSED_INTO (node->loc) = true;
2288 if ((where = find_loc_in_1pdv (loc, var, vars)))
2290 VALUE_RECURSED_INTO (node->loc) = false;
2293 VALUE_RECURSED_INTO (node->loc) = false;
2300 /* Hash table iteration argument passed to variable_merge. */
2303 /* The set in which the merge is to be inserted. */
2305 /* The set that we're iterating in. */
2307 /* The set that may contain the other dv we are to merge with. */
2309 /* Number of onepart dvs in src. */
2310 int src_onepart_cnt;
2313 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
2314 loc_cmp order, and it is maintained as such. */
2317 insert_into_intersection (location_chain *nodep, rtx loc,
2318 enum var_init_status status)
2320 location_chain node;
2323 for (node = *nodep; node; nodep = &node->next, node = *nodep)
2324 if ((r = loc_cmp (node->loc, loc)) == 0)
2326 node->init = MIN (node->init, status);
2332 node = (location_chain) pool_alloc (loc_chain_pool);
2335 node->set_src = NULL;
2336 node->init = status;
2337 node->next = *nodep;
2341 /* Insert in DEST the intersection the locations present in both
2342 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
2343 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
2347 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
2348 location_chain s1node, variable s2var)
2350 dataflow_set *s1set = dsm->cur;
2351 dataflow_set *s2set = dsm->src;
2352 location_chain found;
2354 for (; s1node; s1node = s1node->next)
2356 if (s1node->loc == val)
2359 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
2360 shared_hash_htab (s2set->vars))))
2362 insert_into_intersection (dest, s1node->loc,
2363 MIN (s1node->init, found->init));
2367 if (GET_CODE (s1node->loc) == VALUE
2368 && !VALUE_RECURSED_INTO (s1node->loc))
2370 decl_or_value dv = dv_from_value (s1node->loc);
2371 variable svar = shared_hash_find (s1set->vars, dv);
2374 if (svar->n_var_parts == 1)
2376 VALUE_RECURSED_INTO (s1node->loc) = true;
2377 intersect_loc_chains (val, dest, dsm,
2378 svar->var_part[0].loc_chain,
2380 VALUE_RECURSED_INTO (s1node->loc) = false;
2385 /* ??? if the location is equivalent to any location in src,
2386 searched recursively
2388 add to dst the values needed to represent the equivalence
2390 telling whether locations S is equivalent to another dv's
2393 for each location D in the list
2395 if S and D satisfy rtx_equal_p, then it is present
2397 else if D is a value, recurse without cycles
2399 else if S and D have the same CODE and MODE
2401 for each operand oS and the corresponding oD
2403 if oS and oD are not equivalent, then S an D are not equivalent
2405 else if they are RTX vectors
2407 if any vector oS element is not equivalent to its respective oD,
2408 then S and D are not equivalent
2416 /* Return -1 if X should be before Y in a location list for a 1-part
2417 variable, 1 if Y should be before X, and 0 if they're equivalent
2418 and should not appear in the list. */
2421 loc_cmp (rtx x, rtx y)
2424 RTX_CODE code = GET_CODE (x);
2434 gcc_assert (GET_MODE (x) == GET_MODE (y));
2435 if (REGNO (x) == REGNO (y))
2437 else if (REGNO (x) < REGNO (y))
2450 gcc_assert (GET_MODE (x) == GET_MODE (y));
2451 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
2457 if (GET_CODE (x) == VALUE)
2459 if (GET_CODE (y) != VALUE)
2461 gcc_assert (GET_MODE (x) == GET_MODE (y));
2462 if (canon_value_cmp (x, y))
2468 if (GET_CODE (y) == VALUE)
2471 if (GET_CODE (x) == GET_CODE (y))
2472 /* Compare operands below. */;
2473 else if (GET_CODE (x) < GET_CODE (y))
2478 gcc_assert (GET_MODE (x) == GET_MODE (y));
2480 fmt = GET_RTX_FORMAT (code);
2481 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2485 if (XWINT (x, i) == XWINT (y, i))
2487 else if (XWINT (x, i) < XWINT (y, i))
2494 if (XINT (x, i) == XINT (y, i))
2496 else if (XINT (x, i) < XINT (y, i))
2503 /* Compare the vector length first. */
2504 if (XVECLEN (x, i) == XVECLEN (y, i))
2505 /* Compare the vectors elements. */;
2506 else if (XVECLEN (x, i) < XVECLEN (y, i))
2511 for (j = 0; j < XVECLEN (x, i); j++)
2512 if ((r = loc_cmp (XVECEXP (x, i, j),
2513 XVECEXP (y, i, j))))
2518 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
2524 if (XSTR (x, i) == XSTR (y, i))
2530 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
2538 /* These are just backpointers, so they don't matter. */
2545 /* It is believed that rtx's at this level will never
2546 contain anything but integers and other rtx's,
2547 except for within LABEL_REFs and SYMBOL_REFs. */
2555 /* If decl or value DVP refers to VALUE from *LOC, add backlinks
2556 from VALUE to DVP. */
2559 add_value_chain (rtx *loc, void *dvp)
2561 if (GET_CODE (*loc) == VALUE && (void *) *loc != dvp)
2563 decl_or_value dv = (decl_or_value) dvp;
2564 decl_or_value ldv = dv_from_value (*loc);
2565 value_chain vc, nvc;
2566 void **slot = htab_find_slot_with_hash (value_chains, ldv,
2567 dv_htab_hash (ldv), INSERT);
2570 vc = (value_chain) pool_alloc (value_chain_pool);
2574 *slot = (void *) vc;
2578 for (vc = ((value_chain) *slot)->next; vc; vc = vc->next)
2579 if (dv_as_opaque (vc->dv) == dv_as_opaque (dv))
2587 vc = (value_chain) *slot;
2588 nvc = (value_chain) pool_alloc (value_chain_pool);
2590 nvc->next = vc->next;
2597 /* If decl or value DVP refers to VALUEs from within LOC, add backlinks
2598 from those VALUEs to DVP. */
2601 add_value_chains (decl_or_value dv, rtx loc)
2603 if (GET_CODE (loc) == VALUE)
2605 add_value_chain (&loc, dv_as_opaque (dv));
2611 loc = XEXP (loc, 0);
2612 for_each_rtx (&loc, add_value_chain, dv_as_opaque (dv));
2615 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those
2619 add_cselib_value_chains (decl_or_value dv)
2621 struct elt_loc_list *l;
2623 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
2624 for_each_rtx (&l->loc, add_value_chain, dv_as_opaque (dv));
2627 /* If decl or value DVP refers to VALUE from *LOC, remove backlinks
2628 from VALUE to DVP. */
2631 remove_value_chain (rtx *loc, void *dvp)
2633 if (GET_CODE (*loc) == VALUE && (void *) *loc != dvp)
2635 decl_or_value dv = (decl_or_value) dvp;
2636 decl_or_value ldv = dv_from_value (*loc);
2637 value_chain vc, dvc = NULL;
2638 void **slot = htab_find_slot_with_hash (value_chains, ldv,
2639 dv_htab_hash (ldv), NO_INSERT);
2640 for (vc = (value_chain) *slot; vc->next; vc = vc->next)
2641 if (dv_as_opaque (vc->next->dv) == dv_as_opaque (dv))
2644 gcc_assert (dvc->refcount > 0);
2645 if (--dvc->refcount == 0)
2647 vc->next = dvc->next;
2648 pool_free (value_chain_pool, dvc);
2649 if (vc->next == NULL && vc == (value_chain) *slot)
2651 pool_free (value_chain_pool, vc);
2652 htab_clear_slot (value_chains, slot);
2662 /* If decl or value DVP refers to VALUEs from within LOC, remove backlinks
2663 from those VALUEs to DVP. */
2666 remove_value_chains (decl_or_value dv, rtx loc)
2668 if (GET_CODE (loc) == VALUE)
2670 remove_value_chain (&loc, dv_as_opaque (dv));
2676 loc = XEXP (loc, 0);
2677 for_each_rtx (&loc, remove_value_chain, dv_as_opaque (dv));
2680 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, remove backlinks from those
2684 remove_cselib_value_chains (decl_or_value dv)
2686 struct elt_loc_list *l;
2688 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
2689 for_each_rtx (&l->loc, remove_value_chain, dv_as_opaque (dv));
2693 /* Check the order of entries in one-part variables. */
2696 canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
2698 variable var = (variable) *slot;
2699 decl_or_value dv = var->dv;
2700 location_chain node, next;
2702 if (!dv_onepart_p (dv))
2705 gcc_assert (var->n_var_parts == 1);
2706 node = var->var_part[0].loc_chain;
2709 while ((next = node->next))
2711 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
2719 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
2720 more likely to be chosen as canonical for an equivalence set.
2721 Ensure less likely values can reach more likely neighbors, making
2722 the connections bidirectional. */
2725 canonicalize_values_mark (void **slot, void *data)
2727 dataflow_set *set = (dataflow_set *)data;
2728 variable var = (variable) *slot;
2729 decl_or_value dv = var->dv;
2731 location_chain node;
2733 if (!dv_is_value_p (dv))
2736 gcc_assert (var->n_var_parts == 1);
2738 val = dv_as_value (dv);
2740 for (node = var->var_part[0].loc_chain; node; node = node->next)
2741 if (GET_CODE (node->loc) == VALUE)
2743 if (canon_value_cmp (node->loc, val))
2744 VALUE_RECURSED_INTO (val) = true;
2747 decl_or_value odv = dv_from_value (node->loc);
2748 void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
2750 oslot = set_slot_part (set, val, oslot, odv, 0,
2751 node->init, NULL_RTX);
2753 VALUE_RECURSED_INTO (node->loc) = true;
2760 /* Remove redundant entries from equivalence lists in onepart
2761 variables, canonicalizing equivalence sets into star shapes. */
2764 canonicalize_values_star (void **slot, void *data)
2766 dataflow_set *set = (dataflow_set *)data;
2767 variable var = (variable) *slot;
2768 decl_or_value dv = var->dv;
2769 location_chain node;
2776 if (!dv_onepart_p (dv))
2779 gcc_assert (var->n_var_parts == 1);
2781 if (dv_is_value_p (dv))
2783 cval = dv_as_value (dv);
2784 if (!VALUE_RECURSED_INTO (cval))
2786 VALUE_RECURSED_INTO (cval) = false;
2796 gcc_assert (var->n_var_parts == 1);
2798 for (node = var->var_part[0].loc_chain; node; node = node->next)
2799 if (GET_CODE (node->loc) == VALUE)
2802 if (VALUE_RECURSED_INTO (node->loc))
2804 if (canon_value_cmp (node->loc, cval))
2813 if (!has_marks || dv_is_decl_p (dv))
2816 /* Keep it marked so that we revisit it, either after visiting a
2817 child node, or after visiting a new parent that might be
2819 VALUE_RECURSED_INTO (val) = true;
2821 for (node = var->var_part[0].loc_chain; node; node = node->next)
2822 if (GET_CODE (node->loc) == VALUE
2823 && VALUE_RECURSED_INTO (node->loc))
2827 VALUE_RECURSED_INTO (cval) = false;
2828 dv = dv_from_value (cval);
2829 slot = shared_hash_find_slot_noinsert (set->vars, dv);
2832 gcc_assert (dv_is_decl_p (var->dv));
2833 /* The canonical value was reset and dropped.
2835 clobber_variable_part (set, NULL, var->dv, 0, NULL);
2838 var = (variable)*slot;
2839 gcc_assert (dv_is_value_p (var->dv));
2840 if (var->n_var_parts == 0)
2842 gcc_assert (var->n_var_parts == 1);
2846 VALUE_RECURSED_INTO (val) = false;
2851 /* Push values to the canonical one. */
2852 cdv = dv_from_value (cval);
2853 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
2855 for (node = var->var_part[0].loc_chain; node; node = node->next)
2856 if (node->loc != cval)
2858 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
2859 node->init, NULL_RTX);
2860 if (GET_CODE (node->loc) == VALUE)
2862 decl_or_value ndv = dv_from_value (node->loc);
2864 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
2867 if (canon_value_cmp (node->loc, val))
2869 /* If it could have been a local minimum, it's not any more,
2870 since it's now neighbor to cval, so it may have to push
2871 to it. Conversely, if it wouldn't have prevailed over
2872 val, then whatever mark it has is fine: if it was to
2873 push, it will now push to a more canonical node, but if
2874 it wasn't, then it has already pushed any values it might
2876 VALUE_RECURSED_INTO (node->loc) = true;
2877 /* Make sure we visit node->loc by ensuring we cval is
2879 VALUE_RECURSED_INTO (cval) = true;
2881 else if (!VALUE_RECURSED_INTO (node->loc))
2882 /* If we have no need to "recurse" into this node, it's
2883 already "canonicalized", so drop the link to the old
2885 clobber_variable_part (set, cval, ndv, 0, NULL);
2887 else if (GET_CODE (node->loc) == REG)
2889 attrs list = set->regs[REGNO (node->loc)], *listp;
2891 /* Change an existing attribute referring to dv so that it
2892 refers to cdv, removing any duplicate this might
2893 introduce, and checking that no previous duplicates
2894 existed, all in a single pass. */
2898 if (list->offset == 0
2899 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
2900 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
2907 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
2910 for (listp = &list->next; (list = *listp); listp = &list->next)
2915 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
2917 *listp = list->next;
2918 pool_free (attrs_pool, list);
2923 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
2926 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
2928 for (listp = &list->next; (list = *listp); listp = &list->next)
2933 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
2935 *listp = list->next;
2936 pool_free (attrs_pool, list);
2941 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
2950 if (list->offset == 0
2951 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
2952 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
2962 cslot = set_slot_part (set, val, cslot, cdv, 0,
2963 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
2965 slot = clobber_slot_part (set, cval, slot, 0, NULL);
2967 /* Variable may have been unshared. */
2968 var = (variable)*slot;
2969 gcc_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
2970 && var->var_part[0].loc_chain->next == NULL);
2972 if (VALUE_RECURSED_INTO (cval))
2973 goto restart_with_cval;
2978 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
2979 corresponding entry in DSM->src. Multi-part variables are combined
2980 with variable_union, whereas onepart dvs are combined with
2984 variable_merge_over_cur (void **s1slot, void *data)
2986 struct dfset_merge *dsm = (struct dfset_merge *)data;
2987 dataflow_set *dst = dsm->dst;
2989 variable s1var = (variable) *s1slot;
2990 variable s2var, dvar = NULL;
2991 decl_or_value dv = s1var->dv;
2992 bool onepart = dv_onepart_p (dv);
2995 location_chain node, *nodep;
2997 /* If the incoming onepart variable has an empty location list, then
2998 the intersection will be just as empty. For other variables,
2999 it's always union. */
3000 gcc_assert (s1var->n_var_parts);
3001 gcc_assert (s1var->var_part[0].loc_chain);
3004 return variable_union (s1slot, dst);
3006 gcc_assert (s1var->n_var_parts == 1);
3007 gcc_assert (s1var->var_part[0].offset == 0);
3009 dvhash = dv_htab_hash (dv);
3010 if (dv_is_value_p (dv))
3011 val = dv_as_value (dv);
3015 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3018 dst_can_be_shared = false;
3022 dsm->src_onepart_cnt--;
3023 gcc_assert (s2var->var_part[0].loc_chain);
3024 gcc_assert (s2var->n_var_parts == 1);
3025 gcc_assert (s2var->var_part[0].offset == 0);
3027 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3030 dvar = (variable)*dstslot;
3031 gcc_assert (dvar->refcount == 1);
3032 gcc_assert (dvar->n_var_parts == 1);
3033 gcc_assert (dvar->var_part[0].offset == 0);
3034 nodep = &dvar->var_part[0].loc_chain;
3042 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3044 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3046 *dstslot = dvar = s2var;
3051 dst_can_be_shared = false;
3053 intersect_loc_chains (val, nodep, dsm,
3054 s1var->var_part[0].loc_chain, s2var);
3060 dvar = (variable) pool_alloc (dv_pool (dv));
3063 dvar->n_var_parts = 1;
3064 dvar->var_part[0].offset = 0;
3065 dvar->var_part[0].loc_chain = node;
3066 dvar->var_part[0].cur_loc = node->loc;
3069 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3071 gcc_assert (!*dstslot);
3079 nodep = &dvar->var_part[0].loc_chain;
3080 while ((node = *nodep))
3082 location_chain *nextp = &node->next;
3084 if (GET_CODE (node->loc) == REG)
3088 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
3089 if (GET_MODE (node->loc) == GET_MODE (list->loc)
3090 && dv_is_value_p (list->dv))
3094 attrs_list_insert (&dst->regs[REGNO (node->loc)],
3096 /* If this value became canonical for another value that had
3097 this register, we want to leave it alone. */
3098 else if (dv_as_value (list->dv) != val)
3100 dstslot = set_slot_part (dst, dv_as_value (list->dv),
3102 node->init, NULL_RTX);
3103 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
3105 /* Since nextp points into the removed node, we can't
3106 use it. The pointer to the next node moved to nodep.
3107 However, if the variable we're walking is unshared
3108 during our walk, we'll keep walking the location list
3109 of the previously-shared variable, in which case the
3110 node won't have been removed, and we'll want to skip
3111 it. That's why we test *nodep here. */
3117 /* Canonicalization puts registers first, so we don't have to
3123 if (dvar != (variable)*dstslot)
3124 dvar = (variable)*dstslot;
3125 nodep = &dvar->var_part[0].loc_chain;
3129 /* Mark all referenced nodes for canonicalization, and make sure
3130 we have mutual equivalence links. */
3131 VALUE_RECURSED_INTO (val) = true;
3132 for (node = *nodep; node; node = node->next)
3133 if (GET_CODE (node->loc) == VALUE)
3135 VALUE_RECURSED_INTO (node->loc) = true;
3136 set_variable_part (dst, val, dv_from_value (node->loc), 0,
3137 node->init, NULL, INSERT);
3140 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3141 gcc_assert (*dstslot == dvar);
3142 canonicalize_values_star (dstslot, dst);
3143 #ifdef ENABLE_CHECKING
3145 == shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash));
3147 dvar = (variable)*dstslot;
3151 bool has_value = false, has_other = false;
3153 /* If we have one value and anything else, we're going to
3154 canonicalize this, so make sure all values have an entry in
3155 the table and are marked for canonicalization. */
3156 for (node = *nodep; node; node = node->next)
3158 if (GET_CODE (node->loc) == VALUE)
3160 /* If this was marked during register canonicalization,
3161 we know we have to canonicalize values. */
3176 if (has_value && has_other)
3178 for (node = *nodep; node; node = node->next)
3180 if (GET_CODE (node->loc) == VALUE)
3182 decl_or_value dv = dv_from_value (node->loc);
3185 if (shared_hash_shared (dst->vars))
3186 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
3188 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
3192 variable var = (variable) pool_alloc (dv_pool (dv));
3195 var->n_var_parts = 1;
3196 var->var_part[0].offset = 0;
3197 var->var_part[0].loc_chain = NULL;
3198 var->var_part[0].cur_loc = NULL;
3202 VALUE_RECURSED_INTO (node->loc) = true;
3206 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3207 gcc_assert (*dstslot == dvar);
3208 canonicalize_values_star (dstslot, dst);
3209 #ifdef ENABLE_CHECKING
3211 == shared_hash_find_slot_noinsert_1 (dst->vars,
3214 dvar = (variable)*dstslot;
3218 if (!onepart_variable_different_p (dvar, s2var))
3220 variable_htab_free (dvar);
3221 *dstslot = dvar = s2var;
3224 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
3226 variable_htab_free (dvar);
3227 *dstslot = dvar = s1var;
3229 dst_can_be_shared = false;
3233 if (dvar->refcount == 1)
3234 dvar->var_part[0].cur_loc = dvar->var_part[0].loc_chain->loc;
3235 dst_can_be_shared = false;
3241 /* Combine variable in *S1SLOT (in DSM->src) with the corresponding
3242 entry in DSM->src. Only multi-part variables are combined, using
3243 variable_union. onepart dvs were already combined with
3244 intersection in variable_merge_over_cur(). */
3247 variable_merge_over_src (void **s2slot, void *data)
3249 struct dfset_merge *dsm = (struct dfset_merge *)data;
3250 dataflow_set *dst = dsm->dst;
3251 variable s2var = (variable) *s2slot;
3252 decl_or_value dv = s2var->dv;
3253 bool onepart = dv_onepart_p (dv);
3257 void **dstp = shared_hash_find_slot (dst->vars, dv);
3260 return variable_canonicalize (dstp, dst);
3263 dsm->src_onepart_cnt++;
3267 /* Combine dataflow set information from SRC into DST, using PDST
3268 to carry over information across passes. */
3271 dataflow_set_merge (dataflow_set *dst, dataflow_set *src)
3273 dataflow_set src2 = *dst;
3274 struct dfset_merge dsm;
3276 size_t src_elems, dst_elems;
3278 src_elems = htab_elements (shared_hash_htab (src->vars));
3279 dst_elems = htab_elements (shared_hash_htab (src2.vars));
3280 dataflow_set_init (dst);
3281 dst->stack_adjust = src2.stack_adjust;
3282 shared_hash_destroy (dst->vars);
3283 dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
3284 dst->vars->refcount = 1;
3286 = htab_create (MAX (src_elems, dst_elems), variable_htab_hash,
3287 variable_htab_eq, variable_htab_free);
3289 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3290 attrs_list_mpdv_union (&dst->regs[i], src->regs[i], src2.regs[i]);
3295 dsm.src_onepart_cnt = 0;
3297 htab_traverse (shared_hash_htab (dsm.src->vars), variable_merge_over_src,
3299 htab_traverse (shared_hash_htab (dsm.cur->vars), variable_merge_over_cur,
3302 if (dsm.src_onepart_cnt)
3303 dst_can_be_shared = false;
3305 dataflow_set_destroy (&src2);
3308 /* Mark register equivalences. */
3311 dataflow_set_equiv_regs (dataflow_set *set)
3316 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3318 rtx canon[NUM_MACHINE_MODES];
3320 memset (canon, 0, sizeof (canon));
3322 for (list = set->regs[i]; list; list = list->next)
3323 if (list->offset == 0 && dv_is_value_p (list->dv))
3325 rtx val = dv_as_value (list->dv);
3326 rtx *cvalp = &canon[(int)GET_MODE (val)];
3329 if (canon_value_cmp (val, cval))
3333 for (list = set->regs[i]; list; list = list->next)
3334 if (list->offset == 0 && dv_onepart_p (list->dv))
3336 rtx cval = canon[(int)GET_MODE (list->loc)];
3341 if (dv_is_value_p (list->dv))
3343 rtx val = dv_as_value (list->dv);
3348 VALUE_RECURSED_INTO (val) = true;
3349 set_variable_part (set, val, dv_from_value (cval), 0,
3350 VAR_INIT_STATUS_INITIALIZED,
3354 VALUE_RECURSED_INTO (cval) = true;
3355 set_variable_part (set, cval, list->dv, 0,
3356 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
3359 for (listp = &set->regs[i]; (list = *listp);
3360 listp = list ? &list->next : listp)
3361 if (list->offset == 0 && dv_onepart_p (list->dv))
3363 rtx cval = canon[(int)GET_MODE (list->loc)];
3369 if (dv_is_value_p (list->dv))
3371 rtx val = dv_as_value (list->dv);
3372 if (!VALUE_RECURSED_INTO (val))
3376 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
3377 canonicalize_values_star (slot, set);
3384 /* Remove any redundant values in the location list of VAR, which must
3385 be unshared and 1-part. */
3388 remove_duplicate_values (variable var)
3390 location_chain node, *nodep;
3392 gcc_assert (dv_onepart_p (var->dv));
3393 gcc_assert (var->n_var_parts == 1);
3394 gcc_assert (var->refcount == 1);
3396 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
3398 if (GET_CODE (node->loc) == VALUE)
3400 if (VALUE_RECURSED_INTO (node->loc))
3402 /* Remove duplicate value node. */
3403 *nodep = node->next;
3404 pool_free (loc_chain_pool, node);
3408 VALUE_RECURSED_INTO (node->loc) = true;
3410 nodep = &node->next;
3413 for (node = var->var_part[0].loc_chain; node; node = node->next)
3414 if (GET_CODE (node->loc) == VALUE)
3416 gcc_assert (VALUE_RECURSED_INTO (node->loc));
3417 VALUE_RECURSED_INTO (node->loc) = false;
3422 /* Hash table iteration argument passed to variable_post_merge. */
3423 struct dfset_post_merge
3425 /* The new input set for the current block. */
3427 /* Pointer to the permanent input set for the current block, or
3429 dataflow_set **permp;
3432 /* Create values for incoming expressions associated with one-part
3433 variables that don't have value numbers for them. */
3436 variable_post_merge_new_vals (void **slot, void *info)
3438 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3439 dataflow_set *set = dfpm->set;
3440 variable var = (variable)*slot;
3441 location_chain node;
3443 if (!dv_onepart_p (var->dv) || !var->n_var_parts)
3446 gcc_assert (var->n_var_parts == 1);
3448 if (dv_is_decl_p (var->dv))
3450 bool check_dupes = false;
3453 for (node = var->var_part[0].loc_chain; node; node = node->next)
3455 if (GET_CODE (node->loc) == VALUE)
3456 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
3457 else if (GET_CODE (node->loc) == REG)
3459 attrs att, *attp, *curp = NULL;
3461 if (var->refcount != 1)
3463 slot = unshare_variable (set, slot, var,
3464 VAR_INIT_STATUS_INITIALIZED);
3465 var = (variable)*slot;
3469 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
3471 if (att->offset == 0
3472 && GET_MODE (att->loc) == GET_MODE (node->loc))
3474 if (dv_is_value_p (att->dv))
3476 rtx cval = dv_as_value (att->dv);
3481 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
3489 if ((*curp)->offset == 0
3490 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
3491 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
3494 curp = &(*curp)->next;
3505 *dfpm->permp = XNEW (dataflow_set);
3506 dataflow_set_init (*dfpm->permp);
3509 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
3510 att; att = att->next)
3511 if (GET_MODE (att->loc) == GET_MODE (node->loc))
3513 gcc_assert (att->offset == 0);
3514 gcc_assert (dv_is_value_p (att->dv));
3515 val_reset (set, att->dv);
3522 cval = dv_as_value (cdv);
3526 /* Create a unique value to hold this register,
3527 that ought to be found and reused in
3528 subsequent rounds. */
3530 gcc_assert (!cselib_lookup (node->loc,
3531 GET_MODE (node->loc), 0));
3532 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1);
3533 cselib_preserve_value (v);
3534 cselib_invalidate_rtx (node->loc);
3536 cdv = dv_from_value (cval);
3539 "Created new value %i for reg %i\n",
3540 v->value, REGNO (node->loc));
3543 var_reg_decl_set (*dfpm->permp, node->loc,
3544 VAR_INIT_STATUS_INITIALIZED,
3545 cdv, 0, NULL, INSERT);
3551 /* Remove attribute referring to the decl, which now
3552 uses the value for the register, already existing or
3553 to be added when we bring perm in. */
3556 pool_free (attrs_pool, att);
3561 remove_duplicate_values (var);
3567 /* Reset values in the permanent set that are not associated with the
3568 chosen expression. */
3571 variable_post_merge_perm_vals (void **pslot, void *info)
3573 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3574 dataflow_set *set = dfpm->set;
3575 variable pvar = (variable)*pslot, var;
3576 location_chain pnode;
3580 gcc_assert (dv_is_value_p (pvar->dv));
3581 gcc_assert (pvar->n_var_parts == 1);
3582 pnode = pvar->var_part[0].loc_chain;
3584 gcc_assert (!pnode->next);
3585 gcc_assert (REG_P (pnode->loc));
3589 var = shared_hash_find (set->vars, dv);
3592 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
3594 val_reset (set, dv);
3597 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
3598 if (att->offset == 0
3599 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
3600 && dv_is_value_p (att->dv))
3603 /* If there is a value associated with this register already, create
3605 if (att && dv_as_value (att->dv) != dv_as_value (dv))
3607 rtx cval = dv_as_value (att->dv);
3608 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
3609 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
3614 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
3616 variable_union (pslot, set);
3622 /* Just checking stuff and registering register attributes for
3626 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
3628 struct dfset_post_merge dfpm;
3633 htab_traverse (shared_hash_htab (set->vars), variable_post_merge_new_vals,
3636 htab_traverse (shared_hash_htab ((*permp)->vars),
3637 variable_post_merge_perm_vals, &dfpm);
3638 htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
3641 /* Return a node whose loc is a MEM that refers to EXPR in the
3642 location list of a one-part variable or value VAR, or in that of
3643 any values recursively mentioned in the location lists. */
3645 static location_chain
3646 find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars)
3648 location_chain node;
3651 location_chain where = NULL;
3656 gcc_assert (GET_CODE (val) == VALUE);
3658 gcc_assert (!VALUE_RECURSED_INTO (val));
3660 dv = dv_from_value (val);
3661 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
3666 gcc_assert (dv_onepart_p (var->dv));
3668 if (!var->n_var_parts)
3671 gcc_assert (var->var_part[0].offset == 0);
3673 VALUE_RECURSED_INTO (val) = true;
3675 for (node = var->var_part[0].loc_chain; node; node = node->next)
3676 if (MEM_P (node->loc) && MEM_EXPR (node->loc) == expr
3677 && MEM_OFFSET (node->loc) == 0)
3682 else if (GET_CODE (node->loc) == VALUE
3683 && !VALUE_RECURSED_INTO (node->loc)
3684 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
3687 VALUE_RECURSED_INTO (val) = false;
3692 /* Remove all MEMs from the location list of a hash table entry for a
3693 one-part variable, except those whose MEM attributes map back to
3694 the variable itself, directly or within a VALUE.
3696 ??? We could also preserve MEMs that reference stack slots that are
3697 annotated as not addressable. This is arguably even more reliable
3698 than the current heuristic. */
3701 dataflow_set_preserve_mem_locs (void **slot, void *data)
3703 dataflow_set *set = (dataflow_set *) data;
3704 variable var = (variable) *slot;
3706 if (dv_is_decl_p (var->dv) && dv_onepart_p (var->dv))
3708 tree decl = dv_as_decl (var->dv);
3709 location_chain loc, *locp;
3711 if (!var->n_var_parts)
3714 gcc_assert (var->n_var_parts == 1);
3716 if (var->refcount > 1 || shared_hash_shared (set->vars))
3718 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
3720 /* We want to remove a MEM that doesn't refer to DECL. */
3721 if (GET_CODE (loc->loc) == MEM
3722 && (MEM_EXPR (loc->loc) != decl
3723 || MEM_OFFSET (loc->loc)))
3725 /* We want to move here a MEM that does refer to DECL. */
3726 else if (GET_CODE (loc->loc) == VALUE
3727 && find_mem_expr_in_1pdv (decl, loc->loc,
3728 shared_hash_htab (set->vars)))
3735 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
3736 var = (variable)*slot;
3737 gcc_assert (var->n_var_parts == 1);
3740 for (locp = &var->var_part[0].loc_chain, loc = *locp;
3743 rtx old_loc = loc->loc;
3744 if (GET_CODE (old_loc) == VALUE)
3746 location_chain mem_node
3747 = find_mem_expr_in_1pdv (decl, loc->loc,
3748 shared_hash_htab (set->vars));
3750 /* ??? This picks up only one out of multiple MEMs that
3751 refer to the same variable. Do we ever need to be
3752 concerned about dealing with more than one, or, given
3753 that they should all map to the same variable
3754 location, their addresses will have been merged and
3755 they will be regarded as equivalent? */
3758 loc->loc = mem_node->loc;
3759 loc->set_src = mem_node->set_src;
3760 loc->init = MIN (loc->init, mem_node->init);
3764 if (GET_CODE (loc->loc) != MEM
3765 || (MEM_EXPR (loc->loc) == decl
3766 && MEM_OFFSET (loc->loc) == 0))
3768 if (old_loc != loc->loc && emit_notes)
3770 add_value_chains (var->dv, loc->loc);
3771 remove_value_chains (var->dv, old_loc);
3778 remove_value_chains (var->dv, old_loc);
3780 pool_free (loc_chain_pool, loc);
3783 if (!var->var_part[0].loc_chain)
3786 if (emit_notes && dv_is_value_p (var->dv))
3787 remove_cselib_value_chains (var->dv);
3788 variable_was_changed (var, set);
3795 /* Remove all MEMs from the location list of a hash table entry for a
3799 dataflow_set_remove_mem_locs (void **slot, void *data)
3801 dataflow_set *set = (dataflow_set *) data;
3802 variable var = (variable) *slot;
3804 if (dv_is_value_p (var->dv))
3806 location_chain loc, *locp;
3807 bool changed = false;
3809 gcc_assert (var->n_var_parts == 1);
3811 if (var->refcount > 1 || shared_hash_shared (set->vars))
3813 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
3814 if (GET_CODE (loc->loc) == MEM)
3820 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
3821 var = (variable)*slot;
3822 gcc_assert (var->n_var_parts == 1);
3825 for (locp = &var->var_part[0].loc_chain, loc = *locp;
3828 if (GET_CODE (loc->loc) != MEM)
3835 remove_value_chains (var->dv, loc->loc);
3837 /* If we have deleted the location which was last emitted
3838 we have to emit new location so add the variable to set
3839 of changed variables. */
3840 if (var->var_part[0].cur_loc
3841 && rtx_equal_p (loc->loc, var->var_part[0].cur_loc))
3843 pool_free (loc_chain_pool, loc);
3846 if (!var->var_part[0].loc_chain)
3849 if (emit_notes && dv_is_value_p (var->dv))
3850 remove_cselib_value_chains (var->dv);
3851 gcc_assert (changed);
3855 if (var->n_var_parts && var->var_part[0].loc_chain)
3856 var->var_part[0].cur_loc = var->var_part[0].loc_chain->loc;
3857 variable_was_changed (var, set);
3864 /* Remove all variable-location information about call-clobbered
3865 registers, as well as associations between MEMs and VALUEs. */
3868 dataflow_set_clear_at_call (dataflow_set *set)
3872 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
3873 if (TEST_HARD_REG_BIT (call_used_reg_set, r))
3874 var_regno_delete (set, r);
3876 if (MAY_HAVE_DEBUG_INSNS)
3878 set->traversed_vars = set->vars;
3879 htab_traverse (shared_hash_htab (set->vars),
3880 dataflow_set_preserve_mem_locs, set);
3881 set->traversed_vars = set->vars;
3882 htab_traverse (shared_hash_htab (set->vars), dataflow_set_remove_mem_locs,
3884 set->traversed_vars = NULL;
3888 /* Flag whether two dataflow sets being compared contain different data. */
3890 dataflow_set_different_value;
3893 variable_part_different_p (variable_part *vp1, variable_part *vp2)
3895 location_chain lc1, lc2;
3897 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
3899 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
3901 if (REG_P (lc1->loc) && REG_P (lc2->loc))
3903 if (REGNO (lc1->loc) == REGNO (lc2->loc))
3906 if (rtx_equal_p (lc1->loc, lc2->loc))
3915 /* Return true if one-part variables VAR1 and VAR2 are different.
3916 They must be in canonical order. */
3919 onepart_variable_different_p (variable var1, variable var2)
3921 location_chain lc1, lc2;
3926 gcc_assert (var1->n_var_parts == 1);
3927 gcc_assert (var2->n_var_parts == 1);
3929 lc1 = var1->var_part[0].loc_chain;
3930 lc2 = var2->var_part[0].loc_chain;
3937 if (loc_cmp (lc1->loc, lc2->loc))
3946 /* Return true if variables VAR1 and VAR2 are different.
3947 If COMPARE_CURRENT_LOCATION is true compare also the cur_loc of each
3951 variable_different_p (variable var1, variable var2,
3952 bool compare_current_location)
3959 if (var1->n_var_parts != var2->n_var_parts)
3962 for (i = 0; i < var1->n_var_parts; i++)
3964 if (var1->var_part[i].offset != var2->var_part[i].offset)
3966 if (compare_current_location)
3968 if (!((REG_P (var1->var_part[i].cur_loc)
3969 && REG_P (var2->var_part[i].cur_loc)
3970 && (REGNO (var1->var_part[i].cur_loc)
3971 == REGNO (var2->var_part[i].cur_loc)))
3972 || rtx_equal_p (var1->var_part[i].cur_loc,
3973 var2->var_part[i].cur_loc)))
3976 /* One-part values have locations in a canonical order. */
3977 if (i == 0 && var1->var_part[i].offset == 0 && dv_onepart_p (var1->dv))
3979 gcc_assert (var1->n_var_parts == 1);
3980 gcc_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
3981 return onepart_variable_different_p (var1, var2);
3983 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
3985 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
3991 /* Compare variable *SLOT with the same variable in hash table DATA
3992 and set DATAFLOW_SET_DIFFERENT_VALUE if they are different. */
3995 dataflow_set_different_1 (void **slot, void *data)
3997 htab_t htab = (htab_t) data;
3998 variable var1, var2;
4000 var1 = (variable) *slot;
4001 var2 = (variable) htab_find_with_hash (htab, var1->dv,
4002 dv_htab_hash (var1->dv));
4005 dataflow_set_different_value = true;
4007 if (dump_file && (dump_flags & TDF_DETAILS))
4009 fprintf (dump_file, "dataflow difference found: removal of:\n");
4010 dump_variable (var1);
4013 /* Stop traversing the hash table. */
4017 if (variable_different_p (var1, var2, false))
4019 dataflow_set_different_value = true;
4021 if (dump_file && (dump_flags & TDF_DETAILS))
4023 fprintf (dump_file, "dataflow difference found: old and new follow:\n");
4024 dump_variable (var1);
4025 dump_variable (var2);
4028 /* Stop traversing the hash table. */
4032 /* Continue traversing the hash table. */
4036 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
4039 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4041 if (old_set->vars == new_set->vars)
4044 if (htab_elements (shared_hash_htab (old_set->vars))
4045 != htab_elements (shared_hash_htab (new_set->vars)))
4048 dataflow_set_different_value = false;
4050 htab_traverse (shared_hash_htab (old_set->vars), dataflow_set_different_1,
4051 shared_hash_htab (new_set->vars));
4052 /* No need to traverse the second hashtab, if both have the same number
4053 of elements and the second one had all entries found in the first one,
4054 then it can't have any extra entries. */
4055 return dataflow_set_different_value;
4058 /* Free the contents of dataflow set SET. */
4061 dataflow_set_destroy (dataflow_set *set)
4065 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4066 attrs_list_clear (&set->regs[i]);
4068 shared_hash_destroy (set->vars);
4072 /* Return true if RTL X contains a SYMBOL_REF. */
4075 contains_symbol_ref (rtx x)
4084 code = GET_CODE (x);
4085 if (code == SYMBOL_REF)
4088 fmt = GET_RTX_FORMAT (code);
4089 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4093 if (contains_symbol_ref (XEXP (x, i)))
4096 else if (fmt[i] == 'E')
4099 for (j = 0; j < XVECLEN (x, i); j++)
4100 if (contains_symbol_ref (XVECEXP (x, i, j)))
4108 /* Shall EXPR be tracked? */
4111 track_expr_p (tree expr, bool need_rtl)
4116 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
4117 return DECL_RTL_SET_P (expr);
4119 /* If EXPR is not a parameter or a variable do not track it. */
4120 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
4123 /* It also must have a name... */
4124 if (!DECL_NAME (expr))
4127 /* ... and a RTL assigned to it. */
4128 decl_rtl = DECL_RTL_IF_SET (expr);
4129 if (!decl_rtl && need_rtl)
4132 /* If this expression is really a debug alias of some other declaration, we
4133 don't need to track this expression if the ultimate declaration is
4136 if (DECL_DEBUG_EXPR_IS_FROM (realdecl) && DECL_DEBUG_EXPR (realdecl))
4138 realdecl = DECL_DEBUG_EXPR (realdecl);
4139 /* ??? We don't yet know how to emit DW_OP_piece for variable
4140 that has been SRA'ed. */
4141 if (!DECL_P (realdecl))
4145 /* Do not track EXPR if REALDECL it should be ignored for debugging
4147 if (DECL_IGNORED_P (realdecl))
4150 /* Do not track global variables until we are able to emit correct location
4152 if (TREE_STATIC (realdecl))
4155 /* When the EXPR is a DECL for alias of some variable (see example)
4156 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
4157 DECL_RTL contains SYMBOL_REF.
4160 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
4163 if (decl_rtl && MEM_P (decl_rtl)
4164 && contains_symbol_ref (XEXP (decl_rtl, 0)))
4167 /* If RTX is a memory it should not be very large (because it would be
4168 an array or struct). */
4169 if (decl_rtl && MEM_P (decl_rtl))
4171 /* Do not track structures and arrays. */
4172 if (GET_MODE (decl_rtl) == BLKmode
4173 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
4175 if (MEM_SIZE (decl_rtl)
4176 && INTVAL (MEM_SIZE (decl_rtl)) > MAX_VAR_PARTS)
4180 DECL_CHANGED (expr) = 0;
4181 DECL_CHANGED (realdecl) = 0;
4185 /* Determine whether a given LOC refers to the same variable part as
4189 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
4192 HOST_WIDE_INT offset2;
4194 if (! DECL_P (expr))
4199 expr2 = REG_EXPR (loc);
4200 offset2 = REG_OFFSET (loc);
4202 else if (MEM_P (loc))
4204 expr2 = MEM_EXPR (loc);
4205 offset2 = INT_MEM_OFFSET (loc);
4210 if (! expr2 || ! DECL_P (expr2))
4213 expr = var_debug_decl (expr);
4214 expr2 = var_debug_decl (expr2);
4216 return (expr == expr2 && offset == offset2);
4219 /* LOC is a REG or MEM that we would like to track if possible.
4220 If EXPR is null, we don't know what expression LOC refers to,
4221 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
4222 LOC is an lvalue register.
4224 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
4225 is something we can track. When returning true, store the mode of
4226 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
4227 from EXPR in *OFFSET_OUT (if nonnull). */
4230 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
4231 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
4233 enum machine_mode mode;
4235 if (expr == NULL || !track_expr_p (expr, true))
4238 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
4239 whole subreg, but only the old inner part is really relevant. */
4240 mode = GET_MODE (loc);
4241 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
4243 enum machine_mode pseudo_mode;
4245 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
4246 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
4248 offset += byte_lowpart_offset (pseudo_mode, mode);
4253 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
4254 Do the same if we are storing to a register and EXPR occupies
4255 the whole of register LOC; in that case, the whole of EXPR is
4256 being changed. We exclude complex modes from the second case
4257 because the real and imaginary parts are represented as separate
4258 pseudo registers, even if the whole complex value fits into one
4260 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
4262 && !COMPLEX_MODE_P (DECL_MODE (expr))
4263 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
4264 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
4266 mode = DECL_MODE (expr);
4270 if (offset < 0 || offset >= MAX_VAR_PARTS)
4276 *offset_out = offset;
4280 /* Return the MODE lowpart of LOC, or null if LOC is not something we
4281 want to track. When returning nonnull, make sure that the attributes
4282 on the returned value are updated. */
4285 var_lowpart (enum machine_mode mode, rtx loc)
4287 unsigned int offset, reg_offset, regno;
4289 if (!REG_P (loc) && !MEM_P (loc))
4292 if (GET_MODE (loc) == mode)
4295 offset = byte_lowpart_offset (mode, GET_MODE (loc));
4298 return adjust_address_nv (loc, mode, offset);
4300 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
4301 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
4303 return gen_rtx_REG_offset (loc, mode, regno, offset);
4306 /* Carry information about uses and stores while walking rtx. */
4308 struct count_use_info
4310 /* The insn where the RTX is. */
4313 /* The basic block where insn is. */
4316 /* The array of n_sets sets in the insn, as determined by cselib. */
4317 struct cselib_set *sets;
4320 /* True if we're counting stores, false otherwise. */
4324 /* Find a VALUE corresponding to X. */
4326 static inline cselib_val *
4327 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
4333 /* This is called after uses are set up and before stores are
4334 processed bycselib, so it's safe to look up srcs, but not
4335 dsts. So we look up expressions that appear in srcs or in
4336 dest expressions, but we search the sets array for dests of
4340 for (i = 0; i < cui->n_sets; i++)
4341 if (cui->sets[i].dest == x)
4342 return cui->sets[i].src_elt;
4345 return cselib_lookup (x, mode, 0);
4351 /* Replace all registers and addresses in an expression with VALUE
4352 expressions that map back to them, unless the expression is a
4353 register. If no mapping is or can be performed, returns NULL. */
4356 replace_expr_with_values (rtx loc)
4360 else if (MEM_P (loc))
4362 enum machine_mode address_mode
4363 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (loc));
4364 cselib_val *addr = cselib_lookup (XEXP (loc, 0), address_mode, 0);
4366 return replace_equiv_address_nv (loc, addr->val_rtx);
4371 return cselib_subst_to_values (loc);
4374 /* Determine what kind of micro operation to choose for a USE. Return
4375 MO_CLOBBER if no micro operation is to be generated. */
4377 static enum micro_operation_type
4378 use_type (rtx *loc, struct count_use_info *cui, enum machine_mode *modep)
4383 if (cui && cui->sets)
4385 if (GET_CODE (*loc) == VAR_LOCATION)
4387 if (track_expr_p (PAT_VAR_LOCATION_DECL (*loc), false))
4389 rtx ploc = PAT_VAR_LOCATION_LOC (*loc);
4390 cselib_val *val = cselib_lookup (ploc, GET_MODE (*loc), 1);
4392 /* ??? flag_float_store and volatile mems are never
4393 given values, but we could in theory use them for
4395 gcc_assert (val || 1);
4402 if ((REG_P (*loc) || MEM_P (*loc))
4403 && (val = find_use_val (*loc, GET_MODE (*loc), cui)))
4406 *modep = GET_MODE (*loc);
4410 || cselib_lookup (XEXP (*loc, 0), GET_MODE (*loc), 0))
4413 else if (!cselib_preserved_value_p (val))
4420 gcc_assert (REGNO (*loc) < FIRST_PSEUDO_REGISTER);
4422 expr = REG_EXPR (*loc);
4425 return MO_USE_NO_VAR;
4426 else if (target_for_debug_bind (var_debug_decl (expr)))
4428 else if (track_loc_p (*loc, expr, REG_OFFSET (*loc),
4429 false, modep, NULL))
4432 return MO_USE_NO_VAR;
4434 else if (MEM_P (*loc))
4436 expr = MEM_EXPR (*loc);
4440 else if (target_for_debug_bind (var_debug_decl (expr)))
4442 else if (track_loc_p (*loc, expr, INT_MEM_OFFSET (*loc),
4443 false, modep, NULL))
4452 /* Log to OUT information about micro-operation MOPT involving X in
4456 log_op_type (rtx x, basic_block bb, rtx insn,
4457 enum micro_operation_type mopt, FILE *out)
4459 fprintf (out, "bb %i op %i insn %i %s ",
4460 bb->index, VTI (bb)->n_mos - 1,
4461 INSN_UID (insn), micro_operation_type_name[mopt]);
4462 print_inline_rtx (out, x, 2);
4466 /* Count uses (register and memory references) LOC which will be tracked.
4467 INSN is instruction which the LOC is part of. */
4470 count_uses (rtx *loc, void *cuip)
4472 struct count_use_info *cui = (struct count_use_info *) cuip;
4473 enum micro_operation_type mopt = use_type (loc, cui, NULL);
4475 if (mopt != MO_CLOBBER)
4478 enum machine_mode mode = GET_MODE (*loc);
4480 VTI (cui->bb)->n_mos++;
4482 if (dump_file && (dump_flags & TDF_DETAILS))
4483 log_op_type (*loc, cui->bb, cui->insn, mopt, dump_file);
4488 loc = &PAT_VAR_LOCATION_LOC (*loc);
4489 if (VAR_LOC_UNKNOWN_P (*loc))
4496 && !REG_P (XEXP (*loc, 0)) && !MEM_P (XEXP (*loc, 0)))
4498 enum machine_mode address_mode
4499 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (*loc));
4500 val = cselib_lookup (XEXP (*loc, 0), address_mode, false);
4502 if (val && !cselib_preserved_value_p (val))
4504 VTI (cui->bb)->n_mos++;
4505 cselib_preserve_value (val);
4509 val = find_use_val (*loc, mode, cui);
4511 cselib_preserve_value (val);
4513 gcc_assert (mopt == MO_VAL_LOC);
4525 /* Helper function for finding all uses of REG/MEM in X in CUI's
4529 count_uses_1 (rtx *x, void *cui)
4531 for_each_rtx (x, count_uses, cui);
4534 /* Count stores (register and memory references) LOC which will be
4535 tracked. CUI is a count_use_info object containing the instruction
4536 which the LOC is part of. */
4539 count_stores (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *cui)
4541 count_uses (&loc, cui);
4544 /* Callback for cselib_record_sets_hook, that counts how many micro
4545 operations it takes for uses and stores in an insn after
4546 cselib_record_sets has analyzed the sets in an insn, but before it
4547 modifies the stored values in the internal tables, unless
4548 cselib_record_sets doesn't call it directly (perhaps because we're
4549 not doing cselib in the first place, in which case sets and n_sets
4553 count_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
4555 basic_block bb = BLOCK_FOR_INSN (insn);
4556 struct count_use_info cui;
4558 cselib_hook_called = true;
4563 cui.n_sets = n_sets;
4565 cui.store_p = false;
4566 note_uses (&PATTERN (insn), count_uses_1, &cui);
4568 note_stores (PATTERN (insn), count_stores, &cui);
4571 /* Tell whether the CONCAT used to holds a VALUE and its location
4572 needs value resolution, i.e., an attempt of mapping the location
4573 back to other incoming values. */
4574 #define VAL_NEEDS_RESOLUTION(x) \
4575 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
4576 /* Whether the location in the CONCAT is a tracked expression, that
4577 should also be handled like a MO_USE. */
4578 #define VAL_HOLDS_TRACK_EXPR(x) \
4579 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
4580 /* Whether the location in the CONCAT should be handled like a MO_COPY
4582 #define VAL_EXPR_IS_COPIED(x) \
4583 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
4584 /* Whether the location in the CONCAT should be handled like a
4585 MO_CLOBBER as well. */
4586 #define VAL_EXPR_IS_CLOBBERED(x) \
4587 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
4589 /* Add uses (register and memory references) LOC which will be tracked
4590 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
4593 add_uses (rtx *loc, void *data)
4595 enum machine_mode mode = VOIDmode;
4596 struct count_use_info *cui = (struct count_use_info *)data;
4597 enum micro_operation_type type = use_type (loc, cui, &mode);
4599 if (type != MO_CLOBBER)
4601 basic_block bb = cui->bb;
4602 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4605 mo->u.loc = type == MO_USE ? var_lowpart (mode, *loc) : *loc;
4606 mo->insn = cui->insn;
4608 if (type == MO_VAL_LOC)
4611 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
4614 gcc_assert (cui->sets);
4617 && !REG_P (XEXP (vloc, 0)) && !MEM_P (XEXP (vloc, 0)))
4620 enum machine_mode address_mode
4621 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
4623 = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
4625 if (val && !cselib_preserved_value_p (val))
4627 micro_operation *mon = VTI (bb)->mos + VTI (bb)->n_mos++;
4628 mon->type = mo->type;
4629 mon->u.loc = mo->u.loc;
4630 mon->insn = mo->insn;
4631 cselib_preserve_value (val);
4632 mo->type = MO_VAL_USE;
4633 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4634 mo->u.loc = gen_rtx_CONCAT (address_mode,
4635 val->val_rtx, mloc);
4636 if (dump_file && (dump_flags & TDF_DETAILS))
4637 log_op_type (mo->u.loc, cui->bb, cui->insn,
4638 mo->type, dump_file);
4643 if (!VAR_LOC_UNKNOWN_P (vloc)
4644 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
4646 enum machine_mode mode2;
4647 enum micro_operation_type type2;
4648 rtx nloc = replace_expr_with_values (vloc);
4652 oloc = shallow_copy_rtx (oloc);
4653 PAT_VAR_LOCATION_LOC (oloc) = nloc;
4656 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
4658 type2 = use_type (&vloc, 0, &mode2);
4660 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
4661 || type2 == MO_CLOBBER);
4663 if (type2 == MO_CLOBBER
4664 && !cselib_preserved_value_p (val))
4666 VAL_NEEDS_RESOLUTION (oloc) = 1;
4667 cselib_preserve_value (val);
4670 else if (!VAR_LOC_UNKNOWN_P (vloc))
4672 oloc = shallow_copy_rtx (oloc);
4673 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
4678 else if (type == MO_VAL_USE)
4680 enum machine_mode mode2 = VOIDmode;
4681 enum micro_operation_type type2;
4682 cselib_val *val = find_use_val (*loc, GET_MODE (*loc), cui);
4683 rtx vloc, oloc = *loc, nloc;
4685 gcc_assert (cui->sets);
4688 && !REG_P (XEXP (oloc, 0)) && !MEM_P (XEXP (oloc, 0)))
4691 enum machine_mode address_mode
4692 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
4694 = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
4696 if (val && !cselib_preserved_value_p (val))
4698 micro_operation *mon = VTI (bb)->mos + VTI (bb)->n_mos++;
4699 mon->type = mo->type;
4700 mon->u.loc = mo->u.loc;
4701 mon->insn = mo->insn;
4702 cselib_preserve_value (val);
4703 mo->type = MO_VAL_USE;
4704 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4705 mo->u.loc = gen_rtx_CONCAT (address_mode,
4706 val->val_rtx, mloc);
4707 mo->insn = cui->insn;
4708 if (dump_file && (dump_flags & TDF_DETAILS))
4709 log_op_type (mo->u.loc, cui->bb, cui->insn,
4710 mo->type, dump_file);
4715 type2 = use_type (loc, 0, &mode2);
4717 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
4718 || type2 == MO_CLOBBER);
4720 if (type2 == MO_USE)
4721 vloc = var_lowpart (mode2, *loc);
4725 /* The loc of a MO_VAL_USE may have two forms:
4727 (concat val src): val is at src, a value-based
4730 (concat (concat val use) src): same as above, with use as
4731 the MO_USE tracked value, if it differs from src.
4735 nloc = replace_expr_with_values (*loc);
4740 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
4742 oloc = val->val_rtx;
4744 mo->u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
4746 if (type2 == MO_USE)
4747 VAL_HOLDS_TRACK_EXPR (mo->u.loc) = 1;
4748 if (!cselib_preserved_value_p (val))
4750 VAL_NEEDS_RESOLUTION (mo->u.loc) = 1;
4751 cselib_preserve_value (val);
4755 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
4757 if (dump_file && (dump_flags & TDF_DETAILS))
4758 log_op_type (mo->u.loc, cui->bb, cui->insn, mo->type, dump_file);
4764 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
4767 add_uses_1 (rtx *x, void *cui)
4769 for_each_rtx (x, add_uses, cui);
4772 /* Add stores (register and memory references) LOC which will be tracked
4773 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
4774 CUIP->insn is instruction which the LOC is part of. */
4777 add_stores (rtx loc, const_rtx expr, void *cuip)
4779 enum machine_mode mode = VOIDmode, mode2;
4780 struct count_use_info *cui = (struct count_use_info *)cuip;
4781 basic_block bb = cui->bb;
4782 micro_operation *mo;
4783 rtx oloc = loc, nloc, src = NULL;
4784 enum micro_operation_type type = use_type (&loc, cui, &mode);
4785 bool track_p = false;
4787 bool resolve, preserve;
4789 if (type == MO_CLOBBER)
4796 mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4798 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
4799 || !(track_p = use_type (&loc, NULL, &mode2) == MO_USE)
4800 || GET_CODE (expr) == CLOBBER)
4802 mo->type = MO_CLOBBER;
4807 if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
4808 src = var_lowpart (mode2, SET_SRC (expr));
4809 loc = var_lowpart (mode2, loc);
4818 if (SET_SRC (expr) != src)
4819 expr = gen_rtx_SET (VOIDmode, loc, src);
4820 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
4824 mo->u.loc = CONST_CAST_RTX (expr);
4827 mo->insn = cui->insn;
4829 else if (MEM_P (loc)
4830 && ((track_p = use_type (&loc, NULL, &mode2) == MO_USE)
4833 mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4835 if (MEM_P (loc) && type == MO_VAL_SET
4836 && !REG_P (XEXP (loc, 0)) && !MEM_P (XEXP (loc, 0)))
4839 enum machine_mode address_mode
4840 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc));
4841 cselib_val *val = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
4843 if (val && !cselib_preserved_value_p (val))
4845 cselib_preserve_value (val);
4846 mo->type = MO_VAL_USE;
4847 mloc = cselib_subst_to_values (XEXP (mloc, 0));
4848 mo->u.loc = gen_rtx_CONCAT (address_mode, val->val_rtx, mloc);
4849 mo->insn = cui->insn;
4850 if (dump_file && (dump_flags & TDF_DETAILS))
4851 log_op_type (mo->u.loc, cui->bb, cui->insn,
4852 mo->type, dump_file);
4853 mo = VTI (bb)->mos + VTI (bb)->n_mos++;
4857 if (GET_CODE (expr) == CLOBBER || !track_p)
4859 mo->type = MO_CLOBBER;
4860 mo->u.loc = track_p ? var_lowpart (mode2, loc) : loc;
4864 if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
4865 src = var_lowpart (mode2, SET_SRC (expr));
4866 loc = var_lowpart (mode2, loc);
4875 if (SET_SRC (expr) != src)
4876 expr = gen_rtx_SET (VOIDmode, loc, src);
4877 if (same_variable_part_p (SET_SRC (expr),
4879 INT_MEM_OFFSET (loc)))
4883 mo->u.loc = CONST_CAST_RTX (expr);
4886 mo->insn = cui->insn;
4891 if (type != MO_VAL_SET)
4892 goto log_and_return;
4894 v = find_use_val (oloc, mode, cui);
4896 resolve = preserve = !cselib_preserved_value_p (v);
4898 nloc = replace_expr_with_values (oloc);
4902 if (resolve && GET_CODE (mo->u.loc) == SET)
4904 nloc = replace_expr_with_values (SET_SRC (mo->u.loc));
4907 oloc = gen_rtx_SET (GET_MODE (mo->u.loc), oloc, nloc);
4910 if (oloc == SET_DEST (mo->u.loc))
4911 /* No point in duplicating. */
4913 if (!REG_P (SET_SRC (mo->u.loc)))
4919 if (GET_CODE (mo->u.loc) == SET
4920 && oloc == SET_DEST (mo->u.loc))
4921 /* No point in duplicating. */
4927 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
4929 if (mo->u.loc != oloc)
4930 loc = gen_rtx_CONCAT (GET_MODE (mo->u.loc), loc, mo->u.loc);
4932 /* The loc of a MO_VAL_SET may have various forms:
4934 (concat val dst): dst now holds val
4936 (concat val (set dst src)): dst now holds val, copied from src
4938 (concat (concat val dstv) dst): dst now holds val; dstv is dst
4939 after replacing mems and non-top-level regs with values.
4941 (concat (concat val dstv) (set dst src)): dst now holds val,
4942 copied from src. dstv is a value-based representation of dst, if
4943 it differs from dst. If resolution is needed, src is a REG.
4945 (concat (concat val (set dstv srcv)) (set dst src)): src
4946 copied to dst, holding val. dstv and srcv are value-based
4947 representations of dst and src, respectively.
4954 VAL_HOLDS_TRACK_EXPR (loc) = 1;
4957 VAL_NEEDS_RESOLUTION (loc) = resolve;
4958 cselib_preserve_value (v);
4960 if (mo->type == MO_CLOBBER)
4961 VAL_EXPR_IS_CLOBBERED (loc) = 1;
4962 if (mo->type == MO_COPY)
4963 VAL_EXPR_IS_COPIED (loc) = 1;
4965 mo->type = MO_VAL_SET;
4968 if (dump_file && (dump_flags & TDF_DETAILS))
4969 log_op_type (mo->u.loc, cui->bb, cui->insn, mo->type, dump_file);
4972 /* Callback for cselib_record_sets_hook, that records as micro
4973 operations uses and stores in an insn after cselib_record_sets has
4974 analyzed the sets in an insn, but before it modifies the stored
4975 values in the internal tables, unless cselib_record_sets doesn't
4976 call it directly (perhaps because we're not doing cselib in the
4977 first place, in which case sets and n_sets will be 0). */
4980 add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
4982 basic_block bb = BLOCK_FOR_INSN (insn);
4984 struct count_use_info cui;
4986 cselib_hook_called = true;
4991 cui.n_sets = n_sets;
4993 n1 = VTI (bb)->n_mos;
4994 cui.store_p = false;
4995 note_uses (&PATTERN (insn), add_uses_1, &cui);
4996 n2 = VTI (bb)->n_mos - 1;
4998 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
5002 while (n1 < n2 && VTI (bb)->mos[n1].type == MO_USE)
5004 while (n1 < n2 && VTI (bb)->mos[n2].type != MO_USE)
5010 sw = VTI (bb)->mos[n1];
5011 VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
5012 VTI (bb)->mos[n2] = sw;
5016 n2 = VTI (bb)->n_mos - 1;
5020 while (n1 < n2 && VTI (bb)->mos[n1].type != MO_VAL_LOC)
5022 while (n1 < n2 && VTI (bb)->mos[n2].type == MO_VAL_LOC)
5028 sw = VTI (bb)->mos[n1];
5029 VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
5030 VTI (bb)->mos[n2] = sw;
5036 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
5041 if (dump_file && (dump_flags & TDF_DETAILS))
5042 log_op_type (PATTERN (insn), bb, insn, mo->type, dump_file);
5045 n1 = VTI (bb)->n_mos;
5046 /* This will record NEXT_INSN (insn), such that we can
5047 insert notes before it without worrying about any
5048 notes that MO_USEs might emit after the insn. */
5050 note_stores (PATTERN (insn), add_stores, &cui);
5051 n2 = VTI (bb)->n_mos - 1;
5053 /* Order the MO_CLOBBERs to be before MO_SETs. */
5056 while (n1 < n2 && VTI (bb)->mos[n1].type == MO_CLOBBER)
5058 while (n1 < n2 && VTI (bb)->mos[n2].type != MO_CLOBBER)
5064 sw = VTI (bb)->mos[n1];
5065 VTI (bb)->mos[n1] = VTI (bb)->mos[n2];
5066 VTI (bb)->mos[n2] = sw;
5071 static enum var_init_status
5072 find_src_status (dataflow_set *in, rtx src)
5074 tree decl = NULL_TREE;
5075 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
5077 if (! flag_var_tracking_uninit)
5078 status = VAR_INIT_STATUS_INITIALIZED;
5080 if (src && REG_P (src))
5081 decl = var_debug_decl (REG_EXPR (src));
5082 else if (src && MEM_P (src))
5083 decl = var_debug_decl (MEM_EXPR (src));
5086 status = get_init_value (in, src, dv_from_decl (decl));
5091 /* SRC is the source of an assignment. Use SET to try to find what
5092 was ultimately assigned to SRC. Return that value if known,
5093 otherwise return SRC itself. */
5096 find_src_set_src (dataflow_set *set, rtx src)
5098 tree decl = NULL_TREE; /* The variable being copied around. */
5099 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
5101 location_chain nextp;
5105 if (src && REG_P (src))
5106 decl = var_debug_decl (REG_EXPR (src));
5107 else if (src && MEM_P (src))
5108 decl = var_debug_decl (MEM_EXPR (src));
5112 decl_or_value dv = dv_from_decl (decl);
5114 var = shared_hash_find (set->vars, dv);
5118 for (i = 0; i < var->n_var_parts && !found; i++)
5119 for (nextp = var->var_part[i].loc_chain; nextp && !found;
5120 nextp = nextp->next)
5121 if (rtx_equal_p (nextp->loc, src))
5123 set_src = nextp->set_src;
5133 /* Compute the changes of variable locations in the basic block BB. */
5136 compute_bb_dataflow (basic_block bb)
5140 dataflow_set old_out;
5141 dataflow_set *in = &VTI (bb)->in;
5142 dataflow_set *out = &VTI (bb)->out;
5144 dataflow_set_init (&old_out);
5145 dataflow_set_copy (&old_out, out);
5146 dataflow_set_copy (out, in);
5148 n = VTI (bb)->n_mos;
5149 for (i = 0; i < n; i++)
5151 rtx insn = VTI (bb)->mos[i].insn;
5153 switch (VTI (bb)->mos[i].type)
5156 dataflow_set_clear_at_call (out);
5161 rtx loc = VTI (bb)->mos[i].u.loc;
5164 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
5165 else if (MEM_P (loc))
5166 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
5172 rtx loc = VTI (bb)->mos[i].u.loc;
5176 if (GET_CODE (loc) == CONCAT)
5178 val = XEXP (loc, 0);
5179 vloc = XEXP (loc, 1);
5187 var = PAT_VAR_LOCATION_DECL (vloc);
5189 clobber_variable_part (out, NULL_RTX,
5190 dv_from_decl (var), 0, NULL_RTX);
5193 if (VAL_NEEDS_RESOLUTION (loc))
5194 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
5195 set_variable_part (out, val, dv_from_decl (var), 0,
5196 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
5204 rtx loc = VTI (bb)->mos[i].u.loc;
5205 rtx val, vloc, uloc;
5207 vloc = uloc = XEXP (loc, 1);
5208 val = XEXP (loc, 0);
5210 if (GET_CODE (val) == CONCAT)
5212 uloc = XEXP (val, 1);
5213 val = XEXP (val, 0);
5216 if (VAL_NEEDS_RESOLUTION (loc))
5217 val_resolve (out, val, vloc, insn);
5219 if (VAL_HOLDS_TRACK_EXPR (loc))
5221 if (GET_CODE (uloc) == REG)
5222 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
5224 else if (GET_CODE (uloc) == MEM)
5225 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
5233 rtx loc = VTI (bb)->mos[i].u.loc;
5234 rtx val, vloc, uloc;
5236 vloc = uloc = XEXP (loc, 1);
5237 val = XEXP (loc, 0);
5239 if (GET_CODE (val) == CONCAT)
5241 vloc = XEXP (val, 1);
5242 val = XEXP (val, 0);
5245 if (GET_CODE (vloc) == SET)
5247 rtx vsrc = SET_SRC (vloc);
5249 gcc_assert (val != vsrc);
5250 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
5252 vloc = SET_DEST (vloc);
5254 if (VAL_NEEDS_RESOLUTION (loc))
5255 val_resolve (out, val, vsrc, insn);
5257 else if (VAL_NEEDS_RESOLUTION (loc))
5259 gcc_assert (GET_CODE (uloc) == SET
5260 && GET_CODE (SET_SRC (uloc)) == REG);
5261 val_resolve (out, val, SET_SRC (uloc), insn);
5264 if (VAL_HOLDS_TRACK_EXPR (loc))
5266 if (VAL_EXPR_IS_CLOBBERED (loc))
5269 var_reg_delete (out, uloc, true);
5270 else if (MEM_P (uloc))
5271 var_mem_delete (out, uloc, true);
5275 bool copied_p = VAL_EXPR_IS_COPIED (loc);
5277 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
5279 if (GET_CODE (uloc) == SET)
5281 set_src = SET_SRC (uloc);
5282 uloc = SET_DEST (uloc);
5287 if (flag_var_tracking_uninit)
5289 status = find_src_status (in, set_src);
5291 if (status == VAR_INIT_STATUS_UNKNOWN)
5292 status = find_src_status (out, set_src);
5295 set_src = find_src_set_src (in, set_src);
5299 var_reg_delete_and_set (out, uloc, !copied_p,
5301 else if (MEM_P (uloc))
5302 var_mem_delete_and_set (out, uloc, !copied_p,
5306 else if (REG_P (uloc))
5307 var_regno_delete (out, REGNO (uloc));
5309 val_store (out, val, vloc, insn);
5315 rtx loc = VTI (bb)->mos[i].u.loc;
5318 if (GET_CODE (loc) == SET)
5320 set_src = SET_SRC (loc);
5321 loc = SET_DEST (loc);
5325 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
5327 else if (MEM_P (loc))
5328 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
5335 rtx loc = VTI (bb)->mos[i].u.loc;
5336 enum var_init_status src_status;
5339 if (GET_CODE (loc) == SET)
5341 set_src = SET_SRC (loc);
5342 loc = SET_DEST (loc);
5345 if (! flag_var_tracking_uninit)
5346 src_status = VAR_INIT_STATUS_INITIALIZED;
5349 src_status = find_src_status (in, set_src);
5351 if (src_status == VAR_INIT_STATUS_UNKNOWN)
5352 src_status = find_src_status (out, set_src);
5355 set_src = find_src_set_src (in, set_src);
5358 var_reg_delete_and_set (out, loc, false, src_status, set_src);
5359 else if (MEM_P (loc))
5360 var_mem_delete_and_set (out, loc, false, src_status, set_src);
5366 rtx loc = VTI (bb)->mos[i].u.loc;
5369 var_reg_delete (out, loc, false);
5370 else if (MEM_P (loc))
5371 var_mem_delete (out, loc, false);
5377 rtx loc = VTI (bb)->mos[i].u.loc;
5380 var_reg_delete (out, loc, true);
5381 else if (MEM_P (loc))
5382 var_mem_delete (out, loc, true);
5387 out->stack_adjust += VTI (bb)->mos[i].u.adjust;
5392 if (MAY_HAVE_DEBUG_INSNS)
5394 dataflow_set_equiv_regs (out);
5395 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_mark,
5397 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_star,
5400 htab_traverse (shared_hash_htab (out->vars),
5401 canonicalize_loc_order_check, out);
5404 changed = dataflow_set_different (&old_out, out);
5405 dataflow_set_destroy (&old_out);
5409 /* Find the locations of variables in the whole function. */
5412 vt_find_locations (void)
5414 fibheap_t worklist, pending, fibheap_swap;
5415 sbitmap visited, in_worklist, in_pending, sbitmap_swap;
5423 /* Compute reverse completion order of depth first search of the CFG
5424 so that the data-flow runs faster. */
5425 rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
5426 bb_order = XNEWVEC (int, last_basic_block);
5427 pre_and_rev_post_order_compute (NULL, rc_order, false);
5428 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
5429 bb_order[rc_order[i]] = i;
5432 worklist = fibheap_new ();
5433 pending = fibheap_new ();
5434 visited = sbitmap_alloc (last_basic_block);
5435 in_worklist = sbitmap_alloc (last_basic_block);
5436 in_pending = sbitmap_alloc (last_basic_block);
5437 sbitmap_zero (in_worklist);
5440 fibheap_insert (pending, bb_order[bb->index], bb);
5441 sbitmap_ones (in_pending);
5443 while (!fibheap_empty (pending))
5445 fibheap_swap = pending;
5447 worklist = fibheap_swap;
5448 sbitmap_swap = in_pending;
5449 in_pending = in_worklist;
5450 in_worklist = sbitmap_swap;
5452 sbitmap_zero (visited);
5454 while (!fibheap_empty (worklist))
5456 bb = (basic_block) fibheap_extract_min (worklist);
5457 RESET_BIT (in_worklist, bb->index);
5458 if (!TEST_BIT (visited, bb->index))
5462 int oldinsz, oldoutsz;
5464 SET_BIT (visited, bb->index);
5466 if (dump_file && VTI (bb)->in.vars)
5469 -= htab_size (shared_hash_htab (VTI (bb)->in.vars))
5470 + htab_size (shared_hash_htab (VTI (bb)->out.vars));
5472 = htab_elements (shared_hash_htab (VTI (bb)->in.vars));
5474 = htab_elements (shared_hash_htab (VTI (bb)->out.vars));
5477 oldinsz = oldoutsz = 0;
5479 if (MAY_HAVE_DEBUG_INSNS)
5481 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
5482 bool first = true, adjust = false;
5484 /* Calculate the IN set as the intersection of
5485 predecessor OUT sets. */
5487 dataflow_set_clear (in);
5488 dst_can_be_shared = true;
5490 FOR_EACH_EDGE (e, ei, bb->preds)
5491 if (!VTI (e->src)->flooded)
5492 gcc_assert (bb_order[bb->index]
5493 <= bb_order[e->src->index]);
5496 dataflow_set_copy (in, &VTI (e->src)->out);
5497 first_out = &VTI (e->src)->out;
5502 dataflow_set_merge (in, &VTI (e->src)->out);
5508 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
5510 /* Merge and merge_adjust should keep entries in
5512 htab_traverse (shared_hash_htab (in->vars),
5513 canonicalize_loc_order_check,
5516 if (dst_can_be_shared)
5518 shared_hash_destroy (in->vars);
5519 in->vars = shared_hash_copy (first_out->vars);
5523 VTI (bb)->flooded = true;
5527 /* Calculate the IN set as union of predecessor OUT sets. */
5528 dataflow_set_clear (&VTI (bb)->in);
5529 FOR_EACH_EDGE (e, ei, bb->preds)
5530 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
5533 changed = compute_bb_dataflow (bb);
5535 htabsz += htab_size (shared_hash_htab (VTI (bb)->in.vars))
5536 + htab_size (shared_hash_htab (VTI (bb)->out.vars));
5540 FOR_EACH_EDGE (e, ei, bb->succs)
5542 if (e->dest == EXIT_BLOCK_PTR)
5545 if (TEST_BIT (visited, e->dest->index))
5547 if (!TEST_BIT (in_pending, e->dest->index))
5549 /* Send E->DEST to next round. */
5550 SET_BIT (in_pending, e->dest->index);
5551 fibheap_insert (pending,
5552 bb_order[e->dest->index],
5556 else if (!TEST_BIT (in_worklist, e->dest->index))
5558 /* Add E->DEST to current round. */
5559 SET_BIT (in_worklist, e->dest->index);
5560 fibheap_insert (worklist, bb_order[e->dest->index],
5568 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
5570 (int)htab_elements (shared_hash_htab (VTI (bb)->in.vars)),
5572 (int)htab_elements (shared_hash_htab (VTI (bb)->out.vars)),
5574 (int)worklist->nodes, (int)pending->nodes, htabsz);
5576 if (dump_file && (dump_flags & TDF_DETAILS))
5578 fprintf (dump_file, "BB %i IN:\n", bb->index);
5579 dump_dataflow_set (&VTI (bb)->in);
5580 fprintf (dump_file, "BB %i OUT:\n", bb->index);
5581 dump_dataflow_set (&VTI (bb)->out);
5587 if (MAY_HAVE_DEBUG_INSNS)
5589 gcc_assert (VTI (bb)->flooded);
5592 fibheap_delete (worklist);
5593 fibheap_delete (pending);
5594 sbitmap_free (visited);
5595 sbitmap_free (in_worklist);
5596 sbitmap_free (in_pending);
5599 /* Print the content of the LIST to dump file. */
5602 dump_attrs_list (attrs list)
5604 for (; list; list = list->next)
5606 if (dv_is_decl_p (list->dv))
5607 print_mem_expr (dump_file, dv_as_decl (list->dv));
5609 print_rtl_single (dump_file, dv_as_value (list->dv));
5610 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
5612 fprintf (dump_file, "\n");
5615 /* Print the information about variable *SLOT to dump file. */
5618 dump_variable_slot (void **slot, void *data ATTRIBUTE_UNUSED)
5620 variable var = (variable) *slot;
5622 dump_variable (var);
5624 /* Continue traversing the hash table. */
5628 /* Print the information about variable VAR to dump file. */
5631 dump_variable (variable var)
5634 location_chain node;
5636 if (dv_is_decl_p (var->dv))
5638 const_tree decl = dv_as_decl (var->dv);
5640 if (DECL_NAME (decl))
5641 fprintf (dump_file, " name: %s",
5642 IDENTIFIER_POINTER (DECL_NAME (decl)));
5644 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
5645 if (dump_flags & TDF_UID)
5646 fprintf (dump_file, " D.%u\n", DECL_UID (decl));
5648 fprintf (dump_file, "\n");
5652 fputc (' ', dump_file);
5653 print_rtl_single (dump_file, dv_as_value (var->dv));
5656 for (i = 0; i < var->n_var_parts; i++)
5658 fprintf (dump_file, " offset %ld\n",
5659 (long) var->var_part[i].offset);
5660 for (node = var->var_part[i].loc_chain; node; node = node->next)
5662 fprintf (dump_file, " ");
5663 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
5664 fprintf (dump_file, "[uninit]");
5665 print_rtl_single (dump_file, node->loc);
5670 /* Print the information about variables from hash table VARS to dump file. */
5673 dump_vars (htab_t vars)
5675 if (htab_elements (vars) > 0)
5677 fprintf (dump_file, "Variables:\n");
5678 htab_traverse (vars, dump_variable_slot, NULL);
5682 /* Print the dataflow set SET to dump file. */
5685 dump_dataflow_set (dataflow_set *set)
5689 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
5691 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5695 fprintf (dump_file, "Reg %d:", i);
5696 dump_attrs_list (set->regs[i]);
5699 dump_vars (shared_hash_htab (set->vars));
5700 fprintf (dump_file, "\n");
5703 /* Print the IN and OUT sets for each basic block to dump file. */
5706 dump_dataflow_sets (void)
5712 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
5713 fprintf (dump_file, "IN:\n");
5714 dump_dataflow_set (&VTI (bb)->in);
5715 fprintf (dump_file, "OUT:\n");
5716 dump_dataflow_set (&VTI (bb)->out);
5720 /* Add variable VAR to the hash table of changed variables and
5721 if it has no locations delete it from SET's hash table. */
5724 variable_was_changed (variable var, dataflow_set *set)
5726 hashval_t hash = dv_htab_hash (var->dv);
5732 /* Remember this decl or VALUE has been added to changed_variables. */
5733 set_dv_changed (var->dv, true);
5735 slot = htab_find_slot_with_hash (changed_variables,
5739 if (set && var->n_var_parts == 0)
5743 empty_var = (variable) pool_alloc (dv_pool (var->dv));
5744 empty_var->dv = var->dv;
5745 empty_var->refcount = 1;
5746 empty_var->n_var_parts = 0;
5759 if (var->n_var_parts == 0)
5764 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
5767 if (shared_hash_shared (set->vars))
5768 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
5770 htab_clear_slot (shared_hash_htab (set->vars), slot);
5776 /* Look for the index in VAR->var_part corresponding to OFFSET.
5777 Return -1 if not found. If INSERTION_POINT is non-NULL, the
5778 referenced int will be set to the index that the part has or should
5779 have, if it should be inserted. */
5782 find_variable_location_part (variable var, HOST_WIDE_INT offset,
5783 int *insertion_point)
5787 /* Find the location part. */
5789 high = var->n_var_parts;
5792 pos = (low + high) / 2;
5793 if (var->var_part[pos].offset < offset)
5800 if (insertion_point)
5801 *insertion_point = pos;
5803 if (pos < var->n_var_parts && var->var_part[pos].offset == offset)
5810 set_slot_part (dataflow_set *set, rtx loc, void **slot,
5811 decl_or_value dv, HOST_WIDE_INT offset,
5812 enum var_init_status initialized, rtx set_src)
5815 location_chain node, next;
5816 location_chain *nextp;
5818 bool onepart = dv_onepart_p (dv);
5820 gcc_assert (offset == 0 || !onepart);
5821 gcc_assert (loc != dv_as_opaque (dv));
5823 var = (variable) *slot;
5825 if (! flag_var_tracking_uninit)
5826 initialized = VAR_INIT_STATUS_INITIALIZED;
5830 /* Create new variable information. */
5831 var = (variable) pool_alloc (dv_pool (dv));
5834 var->n_var_parts = 1;
5835 var->var_part[0].offset = offset;
5836 var->var_part[0].loc_chain = NULL;
5837 var->var_part[0].cur_loc = NULL;
5840 nextp = &var->var_part[0].loc_chain;
5841 if (emit_notes && dv_is_value_p (dv))
5842 add_cselib_value_chains (dv);
5848 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
5852 if (GET_CODE (loc) == VALUE)
5854 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
5855 nextp = &node->next)
5856 if (GET_CODE (node->loc) == VALUE)
5858 if (node->loc == loc)
5863 if (canon_value_cmp (node->loc, loc))
5871 else if (REG_P (node->loc) || MEM_P (node->loc))
5879 else if (REG_P (loc))
5881 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
5882 nextp = &node->next)
5883 if (REG_P (node->loc))
5885 if (REGNO (node->loc) < REGNO (loc))
5889 if (REGNO (node->loc) == REGNO (loc))
5902 else if (MEM_P (loc))
5904 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
5905 nextp = &node->next)
5906 if (REG_P (node->loc))
5908 else if (MEM_P (node->loc))
5910 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
5922 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
5923 nextp = &node->next)
5924 if ((r = loc_cmp (node->loc, loc)) >= 0)
5932 if (var->refcount > 1 || shared_hash_shared (set->vars))
5934 slot = unshare_variable (set, slot, var, initialized);
5935 var = (variable)*slot;
5936 for (nextp = &var->var_part[0].loc_chain; c;
5937 nextp = &(*nextp)->next)
5939 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
5946 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
5948 pos = find_variable_location_part (var, offset, &inspos);
5952 node = var->var_part[pos].loc_chain;
5955 && ((REG_P (node->loc) && REG_P (loc)
5956 && REGNO (node->loc) == REGNO (loc))
5957 || rtx_equal_p (node->loc, loc)))
5959 /* LOC is in the beginning of the chain so we have nothing
5961 if (node->init < initialized)
5962 node->init = initialized;
5963 if (set_src != NULL)
5964 node->set_src = set_src;
5970 /* We have to make a copy of a shared variable. */
5971 if (var->refcount > 1 || shared_hash_shared (set->vars))
5973 slot = unshare_variable (set, slot, var, initialized);
5974 var = (variable)*slot;
5980 /* We have not found the location part, new one will be created. */
5982 /* We have to make a copy of the shared variable. */
5983 if (var->refcount > 1 || shared_hash_shared (set->vars))
5985 slot = unshare_variable (set, slot, var, initialized);
5986 var = (variable)*slot;
5989 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
5990 thus there are at most MAX_VAR_PARTS different offsets. */
5991 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
5992 && (!var->n_var_parts || !dv_onepart_p (var->dv)));
5994 /* We have to move the elements of array starting at index
5995 inspos to the next position. */
5996 for (pos = var->n_var_parts; pos > inspos; pos--)
5997 var->var_part[pos] = var->var_part[pos - 1];
6000 var->var_part[pos].offset = offset;
6001 var->var_part[pos].loc_chain = NULL;
6002 var->var_part[pos].cur_loc = NULL;
6005 /* Delete the location from the list. */
6006 nextp = &var->var_part[pos].loc_chain;
6007 for (node = var->var_part[pos].loc_chain; node; node = next)
6010 if ((REG_P (node->loc) && REG_P (loc)
6011 && REGNO (node->loc) == REGNO (loc))
6012 || rtx_equal_p (node->loc, loc))
6014 /* Save these values, to assign to the new node, before
6015 deleting this one. */
6016 if (node->init > initialized)
6017 initialized = node->init;
6018 if (node->set_src != NULL && set_src == NULL)
6019 set_src = node->set_src;
6020 pool_free (loc_chain_pool, node);
6025 nextp = &node->next;
6028 nextp = &var->var_part[pos].loc_chain;
6031 /* Add the location to the beginning. */
6032 node = (location_chain) pool_alloc (loc_chain_pool);
6034 node->init = initialized;
6035 node->set_src = set_src;
6036 node->next = *nextp;
6039 if (onepart && emit_notes)
6040 add_value_chains (var->dv, loc);
6042 /* If no location was emitted do so. */
6043 if (var->var_part[pos].cur_loc == NULL)
6045 var->var_part[pos].cur_loc = loc;
6046 variable_was_changed (var, set);
6052 /* Set the part of variable's location in the dataflow set SET. The
6053 variable part is specified by variable's declaration in DV and
6054 offset OFFSET and the part's location by LOC. IOPT should be
6055 NO_INSERT if the variable is known to be in SET already and the
6056 variable hash table must not be resized, and INSERT otherwise. */
6059 set_variable_part (dataflow_set *set, rtx loc,
6060 decl_or_value dv, HOST_WIDE_INT offset,
6061 enum var_init_status initialized, rtx set_src,
6062 enum insert_option iopt)
6066 if (iopt == NO_INSERT)
6067 slot = shared_hash_find_slot_noinsert (set->vars, dv);
6070 slot = shared_hash_find_slot (set->vars, dv);
6072 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
6074 slot = set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
6077 /* Remove all recorded register locations for the given variable part
6078 from dataflow set SET, except for those that are identical to loc.
6079 The variable part is specified by variable's declaration or value
6080 DV and offset OFFSET. */
6083 clobber_slot_part (dataflow_set *set, rtx loc, void **slot,
6084 HOST_WIDE_INT offset, rtx set_src)
6086 variable var = (variable) *slot;
6087 int pos = find_variable_location_part (var, offset, NULL);
6091 location_chain node, next;
6093 /* Remove the register locations from the dataflow set. */
6094 next = var->var_part[pos].loc_chain;
6095 for (node = next; node; node = next)
6098 if (node->loc != loc
6099 && (!flag_var_tracking_uninit
6102 || !rtx_equal_p (set_src, node->set_src)))
6104 if (REG_P (node->loc))
6109 /* Remove the variable part from the register's
6110 list, but preserve any other variable parts
6111 that might be regarded as live in that same
6113 anextp = &set->regs[REGNO (node->loc)];
6114 for (anode = *anextp; anode; anode = anext)
6116 anext = anode->next;
6117 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
6118 && anode->offset == offset)
6120 pool_free (attrs_pool, anode);
6124 anextp = &anode->next;
6128 slot = delete_slot_part (set, node->loc, slot, offset);
6136 /* Remove all recorded register locations for the given variable part
6137 from dataflow set SET, except for those that are identical to loc.
6138 The variable part is specified by variable's declaration or value
6139 DV and offset OFFSET. */
6142 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
6143 HOST_WIDE_INT offset, rtx set_src)
6147 if (!dv_as_opaque (dv)
6148 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
6151 slot = shared_hash_find_slot_noinsert (set->vars, dv);
6155 slot = clobber_slot_part (set, loc, slot, offset, set_src);
6158 /* Delete the part of variable's location from dataflow set SET. The
6159 variable part is specified by its SET->vars slot SLOT and offset
6160 OFFSET and the part's location by LOC. */
6163 delete_slot_part (dataflow_set *set, rtx loc, void **slot,
6164 HOST_WIDE_INT offset)
6166 variable var = (variable) *slot;
6167 int pos = find_variable_location_part (var, offset, NULL);
6171 location_chain node, next;
6172 location_chain *nextp;
6175 if (var->refcount > 1 || shared_hash_shared (set->vars))
6177 /* If the variable contains the location part we have to
6178 make a copy of the variable. */
6179 for (node = var->var_part[pos].loc_chain; node;
6182 if ((REG_P (node->loc) && REG_P (loc)
6183 && REGNO (node->loc) == REGNO (loc))
6184 || rtx_equal_p (node->loc, loc))
6186 slot = unshare_variable (set, slot, var,
6187 VAR_INIT_STATUS_UNKNOWN);
6188 var = (variable)*slot;
6194 /* Delete the location part. */
6195 nextp = &var->var_part[pos].loc_chain;
6196 for (node = *nextp; node; node = next)
6199 if ((REG_P (node->loc) && REG_P (loc)
6200 && REGNO (node->loc) == REGNO (loc))
6201 || rtx_equal_p (node->loc, loc))
6203 if (emit_notes && pos == 0 && dv_onepart_p (var->dv))
6204 remove_value_chains (var->dv, node->loc);
6205 pool_free (loc_chain_pool, node);
6210 nextp = &node->next;
6213 /* If we have deleted the location which was last emitted
6214 we have to emit new location so add the variable to set
6215 of changed variables. */
6216 if (var->var_part[pos].cur_loc
6218 && REG_P (var->var_part[pos].cur_loc)
6219 && REGNO (loc) == REGNO (var->var_part[pos].cur_loc))
6220 || rtx_equal_p (loc, var->var_part[pos].cur_loc)))
6223 if (var->var_part[pos].loc_chain)
6224 var->var_part[pos].cur_loc = var->var_part[pos].loc_chain->loc;
6229 if (var->var_part[pos].loc_chain == NULL)
6231 gcc_assert (changed);
6233 if (emit_notes && var->n_var_parts == 0 && dv_is_value_p (var->dv))
6234 remove_cselib_value_chains (var->dv);
6235 while (pos < var->n_var_parts)
6237 var->var_part[pos] = var->var_part[pos + 1];
6242 variable_was_changed (var, set);
6248 /* Delete the part of variable's location from dataflow set SET. The
6249 variable part is specified by variable's declaration or value DV
6250 and offset OFFSET and the part's location by LOC. */
6253 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
6254 HOST_WIDE_INT offset)
6256 void **slot = shared_hash_find_slot_noinsert (set->vars, dv);
6260 slot = delete_slot_part (set, loc, slot, offset);
6263 /* Callback for cselib_expand_value, that looks for expressions
6264 holding the value in the var-tracking hash tables. Return X for
6265 standard processing, anything else is to be used as-is. */
6268 vt_expand_loc_callback (rtx x, bitmap regs, int max_depth, void *data)
6270 htab_t vars = (htab_t)data;
6274 rtx result, subreg, xret;
6276 switch (GET_CODE (x))
6279 subreg = SUBREG_REG (x);
6281 if (GET_CODE (SUBREG_REG (x)) != VALUE)
6284 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
6286 vt_expand_loc_callback, data);
6291 result = simplify_gen_subreg (GET_MODE (x), subreg,
6292 GET_MODE (SUBREG_REG (x)),
6295 /* Invalid SUBREGs are ok in debug info. ??? We could try
6296 alternate expansions for the VALUE as well. */
6297 if (!result && (REG_P (subreg) || MEM_P (subreg)))
6298 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
6303 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
6308 dv = dv_from_value (x);
6316 if (VALUE_RECURSED_INTO (x))
6319 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
6324 if (var->n_var_parts == 0)
6327 gcc_assert (var->n_var_parts == 1);
6329 VALUE_RECURSED_INTO (x) = true;
6332 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
6334 result = cselib_expand_value_rtx_cb (loc->loc, regs, max_depth,
6335 vt_expand_loc_callback, vars);
6340 VALUE_RECURSED_INTO (x) = false;
6347 /* Expand VALUEs in LOC, using VARS as well as cselib's equivalence
6351 vt_expand_loc (rtx loc, htab_t vars)
6353 if (!MAY_HAVE_DEBUG_INSNS)
6356 loc = cselib_expand_value_rtx_cb (loc, scratch_regs, 5,
6357 vt_expand_loc_callback, vars);
6359 if (loc && MEM_P (loc))
6360 loc = targetm.delegitimize_address (loc);
6365 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
6366 additional parameters: WHERE specifies whether the note shall be emitted
6367 before or after instruction INSN. */
6370 emit_note_insn_var_location (void **varp, void *data)
6372 variable var = (variable) *varp;
6373 rtx insn = ((emit_note_data *)data)->insn;
6374 enum emit_note_where where = ((emit_note_data *)data)->where;
6375 htab_t vars = ((emit_note_data *)data)->vars;
6377 int i, j, n_var_parts;
6379 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
6380 HOST_WIDE_INT last_limit;
6381 tree type_size_unit;
6382 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
6383 rtx loc[MAX_VAR_PARTS];
6386 if (dv_is_value_p (var->dv))
6389 decl = dv_as_decl (var->dv);
6391 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
6399 for (i = 0; i < var->n_var_parts; i++)
6401 enum machine_mode mode, wider_mode;
6404 if (last_limit < var->var_part[i].offset)
6409 else if (last_limit > var->var_part[i].offset)
6411 offsets[n_var_parts] = var->var_part[i].offset;
6412 loc2 = vt_expand_loc (var->var_part[i].loc_chain->loc, vars);
6418 loc[n_var_parts] = loc2;
6419 mode = GET_MODE (var->var_part[i].loc_chain->loc);
6420 initialized = var->var_part[i].loc_chain->init;
6421 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
6423 /* Attempt to merge adjacent registers or memory. */
6424 wider_mode = GET_MODE_WIDER_MODE (mode);
6425 for (j = i + 1; j < var->n_var_parts; j++)
6426 if (last_limit <= var->var_part[j].offset)
6428 if (j < var->n_var_parts
6429 && wider_mode != VOIDmode
6430 && mode == GET_MODE (var->var_part[j].loc_chain->loc)
6431 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
6432 && (loc2 = vt_expand_loc (var->var_part[j].loc_chain->loc, vars))
6433 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2)
6434 && last_limit == var->var_part[j].offset)
6438 if (REG_P (loc[n_var_parts])
6439 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
6440 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
6441 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
6444 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
6445 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
6447 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
6448 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
6451 if (!REG_P (new_loc)
6452 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
6455 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
6458 else if (MEM_P (loc[n_var_parts])
6459 && GET_CODE (XEXP (loc2, 0)) == PLUS
6460 && REG_P (XEXP (XEXP (loc2, 0), 0))
6461 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
6463 if ((REG_P (XEXP (loc[n_var_parts], 0))
6464 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
6465 XEXP (XEXP (loc2, 0), 0))
6466 && INTVAL (XEXP (XEXP (loc2, 0), 1))
6467 == GET_MODE_SIZE (mode))
6468 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
6469 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
6470 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
6471 XEXP (XEXP (loc2, 0), 0))
6472 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
6473 + GET_MODE_SIZE (mode)
6474 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
6475 new_loc = adjust_address_nv (loc[n_var_parts],
6481 loc[n_var_parts] = new_loc;
6483 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
6489 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6490 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
6493 if (where != EMIT_NOTE_BEFORE_INSN)
6495 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
6496 if (where == EMIT_NOTE_AFTER_CALL_INSN)
6497 NOTE_DURING_CALL_P (note) = true;
6500 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
6502 if (! flag_var_tracking_uninit)
6503 initialized = VAR_INIT_STATUS_INITIALIZED;
6507 NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
6508 NULL_RTX, (int) initialized);
6510 else if (n_var_parts == 1)
6513 = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
6515 NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
6519 else if (n_var_parts)
6523 for (i = 0; i < n_var_parts; i++)
6525 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
6527 parallel = gen_rtx_PARALLEL (VOIDmode,
6528 gen_rtvec_v (n_var_parts, loc));
6529 NOTE_VAR_LOCATION (note) = gen_rtx_VAR_LOCATION (VOIDmode, decl,
6535 set_dv_changed (var->dv, false);
6536 htab_clear_slot (changed_variables, varp);
6538 /* Continue traversing the hash table. */
6542 DEF_VEC_P (variable);
6543 DEF_VEC_ALLOC_P (variable, heap);
6545 /* Stack of variable_def pointers that need processing with
6546 check_changed_vars_2. */
6548 static VEC (variable, heap) *changed_variables_stack;
6550 /* Populate changed_variables_stack with variable_def pointers
6551 that need variable_was_changed called on them. */
6554 check_changed_vars_1 (void **slot, void *data)
6556 variable var = (variable) *slot;
6557 htab_t htab = (htab_t) data;
6559 if (dv_is_value_p (var->dv))
6562 = (value_chain) htab_find_with_hash (value_chains, var->dv,
6563 dv_htab_hash (var->dv));
6567 for (vc = vc->next; vc; vc = vc->next)
6568 if (!dv_changed_p (vc->dv))
6571 = (variable) htab_find_with_hash (htab, vc->dv,
6572 dv_htab_hash (vc->dv));
6574 VEC_safe_push (variable, heap, changed_variables_stack,
6581 /* Add VAR to changed_variables and also for VALUEs add recursively
6582 all DVs that aren't in changed_variables yet but reference the
6583 VALUE from its loc_chain. */
6586 check_changed_vars_2 (variable var, htab_t htab)
6588 variable_was_changed (var, NULL);
6589 if (dv_is_value_p (var->dv))
6592 = (value_chain) htab_find_with_hash (value_chains, var->dv,
6593 dv_htab_hash (var->dv));
6597 for (vc = vc->next; vc; vc = vc->next)
6598 if (!dv_changed_p (vc->dv))
6601 = (variable) htab_find_with_hash (htab, vc->dv,
6602 dv_htab_hash (vc->dv));
6604 check_changed_vars_2 (vcvar, htab);
6609 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
6610 CHANGED_VARIABLES and delete this chain. WHERE specifies whether the notes
6611 shall be emitted before of after instruction INSN. */
6614 emit_notes_for_changes (rtx insn, enum emit_note_where where,
6617 emit_note_data data;
6618 htab_t htab = shared_hash_htab (vars);
6620 if (!htab_elements (changed_variables))
6623 if (MAY_HAVE_DEBUG_INSNS)
6625 /* Unfortunately this has to be done in two steps, because
6626 we can't traverse a hashtab into which we are inserting
6627 through variable_was_changed. */
6628 htab_traverse (changed_variables, check_changed_vars_1, htab);
6629 while (VEC_length (variable, changed_variables_stack) > 0)
6630 check_changed_vars_2 (VEC_pop (variable, changed_variables_stack),
6638 htab_traverse (changed_variables, emit_note_insn_var_location, &data);
6641 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
6642 same variable in hash table DATA or is not there at all. */
6645 emit_notes_for_differences_1 (void **slot, void *data)
6647 htab_t new_vars = (htab_t) data;
6648 variable old_var, new_var;
6650 old_var = (variable) *slot;
6651 new_var = (variable) htab_find_with_hash (new_vars, old_var->dv,
6652 dv_htab_hash (old_var->dv));
6656 /* Variable has disappeared. */
6659 empty_var = (variable) pool_alloc (dv_pool (old_var->dv));
6660 empty_var->dv = old_var->dv;
6661 empty_var->refcount = 0;
6662 empty_var->n_var_parts = 0;
6663 if (dv_onepart_p (old_var->dv))
6667 gcc_assert (old_var->n_var_parts == 1);
6668 for (lc = old_var->var_part[0].loc_chain; lc; lc = lc->next)
6669 remove_value_chains (old_var->dv, lc->loc);
6670 if (dv_is_value_p (old_var->dv))
6671 remove_cselib_value_chains (old_var->dv);
6673 variable_was_changed (empty_var, NULL);
6675 else if (variable_different_p (old_var, new_var, true))
6677 if (dv_onepart_p (old_var->dv))
6679 location_chain lc1, lc2;
6681 gcc_assert (old_var->n_var_parts == 1);
6682 gcc_assert (new_var->n_var_parts == 1);
6683 lc1 = old_var->var_part[0].loc_chain;
6684 lc2 = new_var->var_part[0].loc_chain;
6687 && ((REG_P (lc1->loc) && REG_P (lc2->loc))
6688 || rtx_equal_p (lc1->loc, lc2->loc)))
6693 for (; lc2; lc2 = lc2->next)
6694 add_value_chains (old_var->dv, lc2->loc);
6695 for (; lc1; lc1 = lc1->next)
6696 remove_value_chains (old_var->dv, lc1->loc);
6698 variable_was_changed (new_var, NULL);
6701 /* Continue traversing the hash table. */
6705 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
6709 emit_notes_for_differences_2 (void **slot, void *data)
6711 htab_t old_vars = (htab_t) data;
6712 variable old_var, new_var;
6714 new_var = (variable) *slot;
6715 old_var = (variable) htab_find_with_hash (old_vars, new_var->dv,
6716 dv_htab_hash (new_var->dv));
6719 /* Variable has appeared. */
6720 if (dv_onepart_p (new_var->dv))
6724 gcc_assert (new_var->n_var_parts == 1);
6725 for (lc = new_var->var_part[0].loc_chain; lc; lc = lc->next)
6726 add_value_chains (new_var->dv, lc->loc);
6727 if (dv_is_value_p (new_var->dv))
6728 add_cselib_value_chains (new_var->dv);
6730 variable_was_changed (new_var, NULL);
6733 /* Continue traversing the hash table. */
6737 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
6741 emit_notes_for_differences (rtx insn, dataflow_set *old_set,
6742 dataflow_set *new_set)
6744 htab_traverse (shared_hash_htab (old_set->vars),
6745 emit_notes_for_differences_1,
6746 shared_hash_htab (new_set->vars));
6747 htab_traverse (shared_hash_htab (new_set->vars),
6748 emit_notes_for_differences_2,
6749 shared_hash_htab (old_set->vars));
6750 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
6753 /* Emit the notes for changes of location parts in the basic block BB. */
6756 emit_notes_in_bb (basic_block bb, dataflow_set *set)
6760 dataflow_set_clear (set);
6761 dataflow_set_copy (set, &VTI (bb)->in);
6763 for (i = 0; i < VTI (bb)->n_mos; i++)
6765 rtx insn = VTI (bb)->mos[i].insn;
6767 switch (VTI (bb)->mos[i].type)
6770 dataflow_set_clear_at_call (set);
6771 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
6776 rtx loc = VTI (bb)->mos[i].u.loc;
6779 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6781 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6783 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
6789 rtx loc = VTI (bb)->mos[i].u.loc;
6793 if (GET_CODE (loc) == CONCAT)
6795 val = XEXP (loc, 0);
6796 vloc = XEXP (loc, 1);
6804 var = PAT_VAR_LOCATION_DECL (vloc);
6806 clobber_variable_part (set, NULL_RTX,
6807 dv_from_decl (var), 0, NULL_RTX);
6810 if (VAL_NEEDS_RESOLUTION (loc))
6811 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6812 set_variable_part (set, val, dv_from_decl (var), 0,
6813 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6817 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
6823 rtx loc = VTI (bb)->mos[i].u.loc;
6824 rtx val, vloc, uloc;
6826 vloc = uloc = XEXP (loc, 1);
6827 val = XEXP (loc, 0);
6829 if (GET_CODE (val) == CONCAT)
6831 uloc = XEXP (val, 1);
6832 val = XEXP (val, 0);
6835 if (VAL_NEEDS_RESOLUTION (loc))
6836 val_resolve (set, val, vloc, insn);
6838 if (VAL_HOLDS_TRACK_EXPR (loc))
6840 if (GET_CODE (uloc) == REG)
6841 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6843 else if (GET_CODE (uloc) == MEM)
6844 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6848 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
6854 rtx loc = VTI (bb)->mos[i].u.loc;
6855 rtx val, vloc, uloc;
6857 vloc = uloc = XEXP (loc, 1);
6858 val = XEXP (loc, 0);
6860 if (GET_CODE (val) == CONCAT)
6862 vloc = XEXP (val, 1);
6863 val = XEXP (val, 0);
6866 if (GET_CODE (vloc) == SET)
6868 rtx vsrc = SET_SRC (vloc);
6870 gcc_assert (val != vsrc);
6871 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6873 vloc = SET_DEST (vloc);
6875 if (VAL_NEEDS_RESOLUTION (loc))
6876 val_resolve (set, val, vsrc, insn);
6878 else if (VAL_NEEDS_RESOLUTION (loc))
6880 gcc_assert (GET_CODE (uloc) == SET
6881 && GET_CODE (SET_SRC (uloc)) == REG);
6882 val_resolve (set, val, SET_SRC (uloc), insn);
6885 if (VAL_HOLDS_TRACK_EXPR (loc))
6887 if (VAL_EXPR_IS_CLOBBERED (loc))
6890 var_reg_delete (set, uloc, true);
6891 else if (MEM_P (uloc))
6892 var_mem_delete (set, uloc, true);
6896 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6898 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6900 if (GET_CODE (uloc) == SET)
6902 set_src = SET_SRC (uloc);
6903 uloc = SET_DEST (uloc);
6908 status = find_src_status (set, set_src);
6910 set_src = find_src_set_src (set, set_src);
6914 var_reg_delete_and_set (set, uloc, !copied_p,
6916 else if (MEM_P (uloc))
6917 var_mem_delete_and_set (set, uloc, !copied_p,
6921 else if (REG_P (uloc))
6922 var_regno_delete (set, REGNO (uloc));
6924 val_store (set, val, vloc, insn);
6926 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
6933 rtx loc = VTI (bb)->mos[i].u.loc;
6936 if (GET_CODE (loc) == SET)
6938 set_src = SET_SRC (loc);
6939 loc = SET_DEST (loc);
6943 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
6946 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
6949 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
6956 rtx loc = VTI (bb)->mos[i].u.loc;
6957 enum var_init_status src_status;
6960 if (GET_CODE (loc) == SET)
6962 set_src = SET_SRC (loc);
6963 loc = SET_DEST (loc);
6966 src_status = find_src_status (set, set_src);
6967 set_src = find_src_set_src (set, set_src);
6970 var_reg_delete_and_set (set, loc, false, src_status, set_src);
6972 var_mem_delete_and_set (set, loc, false, src_status, set_src);
6974 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
6981 rtx loc = VTI (bb)->mos[i].u.loc;
6984 var_reg_delete (set, loc, false);
6986 var_mem_delete (set, loc, false);
6988 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
6994 rtx loc = VTI (bb)->mos[i].u.loc;
6997 var_reg_delete (set, loc, true);
6999 var_mem_delete (set, loc, true);
7001 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7007 set->stack_adjust += VTI (bb)->mos[i].u.adjust;
7013 /* Emit notes for the whole function. */
7016 vt_emit_notes (void)
7021 gcc_assert (!htab_elements (changed_variables));
7023 /* Free memory occupied by the out hash tables, as they aren't used
7026 dataflow_set_clear (&VTI (bb)->out);
7028 /* Enable emitting notes by functions (mainly by set_variable_part and
7029 delete_variable_part). */
7032 if (MAY_HAVE_DEBUG_INSNS)
7033 changed_variables_stack = VEC_alloc (variable, heap, 40);
7035 dataflow_set_init (&cur);
7039 /* Emit the notes for changes of variable locations between two
7040 subsequent basic blocks. */
7041 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
7043 /* Emit the notes for the changes in the basic block itself. */
7044 emit_notes_in_bb (bb, &cur);
7046 /* Free memory occupied by the in hash table, we won't need it
7048 dataflow_set_clear (&VTI (bb)->in);
7050 #ifdef ENABLE_CHECKING
7051 htab_traverse (shared_hash_htab (cur.vars),
7052 emit_notes_for_differences_1,
7053 shared_hash_htab (empty_shared_hash));
7054 if (MAY_HAVE_DEBUG_INSNS)
7055 gcc_assert (htab_elements (value_chains) == 0);
7057 dataflow_set_destroy (&cur);
7059 if (MAY_HAVE_DEBUG_INSNS)
7060 VEC_free (variable, heap, changed_variables_stack);
7065 /* If there is a declaration and offset associated with register/memory RTL
7066 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
7069 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
7073 if (REG_ATTRS (rtl))
7075 *declp = REG_EXPR (rtl);
7076 *offsetp = REG_OFFSET (rtl);
7080 else if (MEM_P (rtl))
7082 if (MEM_ATTRS (rtl))
7084 *declp = MEM_EXPR (rtl);
7085 *offsetp = INT_MEM_OFFSET (rtl);
7092 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
7095 vt_add_function_parameters (void)
7099 for (parm = DECL_ARGUMENTS (current_function_decl);
7100 parm; parm = TREE_CHAIN (parm))
7102 rtx decl_rtl = DECL_RTL_IF_SET (parm);
7103 rtx incoming = DECL_INCOMING_RTL (parm);
7105 enum machine_mode mode;
7106 HOST_WIDE_INT offset;
7110 if (TREE_CODE (parm) != PARM_DECL)
7113 if (!DECL_NAME (parm))
7116 if (!decl_rtl || !incoming)
7119 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
7122 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
7124 if (REG_P (incoming) || MEM_P (incoming))
7126 /* This means argument is passed by invisible reference. */
7129 incoming = gen_rtx_MEM (GET_MODE (decl_rtl), incoming);
7133 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
7135 offset += byte_lowpart_offset (GET_MODE (incoming),
7136 GET_MODE (decl_rtl));
7145 /* Assume that DECL_RTL was a pseudo that got spilled to
7146 memory. The spill slot sharing code will force the
7147 memory to reference spill_slot_decl (%sfp), so we don't
7148 match above. That's ok, the pseudo must have referenced
7149 the entire parameter, so just reset OFFSET. */
7150 gcc_assert (decl == get_spill_slot_decl (false));
7154 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
7157 out = &VTI (ENTRY_BLOCK_PTR)->out;
7159 dv = dv_from_decl (parm);
7161 if (target_for_debug_bind (parm)
7162 /* We can't deal with these right now, because this kind of
7163 variable is single-part. ??? We could handle parallels
7164 that describe multiple locations for the same single
7165 value, but ATM we don't. */
7166 && GET_CODE (incoming) != PARALLEL)
7170 /* ??? We shouldn't ever hit this, but it may happen because
7171 arguments passed by invisible reference aren't dealt with
7172 above: incoming-rtl will have Pmode rather than the
7173 expected mode for the type. */
7177 val = cselib_lookup (var_lowpart (mode, incoming), mode, true);
7179 /* ??? Float-typed values in memory are not handled by
7183 cselib_preserve_value (val);
7184 set_variable_part (out, val->val_rtx, dv, offset,
7185 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7186 dv = dv_from_value (val->val_rtx);
7190 if (REG_P (incoming))
7192 incoming = var_lowpart (mode, incoming);
7193 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
7194 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
7196 set_variable_part (out, incoming, dv, offset,
7197 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7199 else if (MEM_P (incoming))
7201 incoming = var_lowpart (mode, incoming);
7202 set_variable_part (out, incoming, dv, offset,
7203 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
7207 if (MAY_HAVE_DEBUG_INSNS)
7209 cselib_preserve_only_values (true);
7210 cselib_reset_table_with_next_value (cselib_get_next_unknown_value ());
7215 /* Allocate and initialize the data structures for variable tracking
7216 and parse the RTL to get the micro operations. */
7219 vt_initialize (void)
7223 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
7225 if (MAY_HAVE_DEBUG_INSNS)
7228 scratch_regs = BITMAP_ALLOC (NULL);
7229 valvar_pool = create_alloc_pool ("small variable_def pool",
7230 sizeof (struct variable_def), 256);
7234 scratch_regs = NULL;
7241 HOST_WIDE_INT pre, post = 0;
7243 unsigned int next_value_before = cselib_get_next_unknown_value ();
7244 unsigned int next_value_after = next_value_before;
7246 if (MAY_HAVE_DEBUG_INSNS)
7248 cselib_record_sets_hook = count_with_sets;
7249 if (dump_file && (dump_flags & TDF_DETAILS))
7250 fprintf (dump_file, "first value: %i\n",
7251 cselib_get_next_unknown_value ());
7254 /* Count the number of micro operations. */
7255 VTI (bb)->n_mos = 0;
7256 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
7257 insn = NEXT_INSN (insn))
7261 if (!frame_pointer_needed)
7263 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
7267 if (dump_file && (dump_flags & TDF_DETAILS))
7268 log_op_type (GEN_INT (pre), bb, insn,
7269 MO_ADJUST, dump_file);
7274 if (dump_file && (dump_flags & TDF_DETAILS))
7275 log_op_type (GEN_INT (post), bb, insn,
7276 MO_ADJUST, dump_file);
7279 cselib_hook_called = false;
7280 if (MAY_HAVE_DEBUG_INSNS)
7282 cselib_process_insn (insn);
7283 if (dump_file && (dump_flags & TDF_DETAILS))
7285 print_rtl_single (dump_file, insn);
7286 dump_cselib_table (dump_file);
7289 if (!cselib_hook_called)
7290 count_with_sets (insn, 0, 0);
7294 if (dump_file && (dump_flags & TDF_DETAILS))
7295 log_op_type (PATTERN (insn), bb, insn,
7296 MO_CALL, dump_file);
7301 count = VTI (bb)->n_mos;
7303 if (MAY_HAVE_DEBUG_INSNS)
7305 cselib_preserve_only_values (false);
7306 next_value_after = cselib_get_next_unknown_value ();
7307 cselib_reset_table_with_next_value (next_value_before);
7308 cselib_record_sets_hook = add_with_sets;
7309 if (dump_file && (dump_flags & TDF_DETAILS))
7310 fprintf (dump_file, "first value: %i\n",
7311 cselib_get_next_unknown_value ());
7314 /* Add the micro-operations to the array. */
7315 VTI (bb)->mos = XNEWVEC (micro_operation, VTI (bb)->n_mos);
7316 VTI (bb)->n_mos = 0;
7317 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
7318 insn = NEXT_INSN (insn))
7322 if (!frame_pointer_needed)
7324 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
7327 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
7329 mo->type = MO_ADJUST;
7333 if (dump_file && (dump_flags & TDF_DETAILS))
7334 log_op_type (PATTERN (insn), bb, insn,
7335 MO_ADJUST, dump_file);
7339 cselib_hook_called = false;
7340 if (MAY_HAVE_DEBUG_INSNS)
7342 cselib_process_insn (insn);
7343 if (dump_file && (dump_flags & TDF_DETAILS))
7345 print_rtl_single (dump_file, insn);
7346 dump_cselib_table (dump_file);
7349 if (!cselib_hook_called)
7350 add_with_sets (insn, 0, 0);
7352 if (!frame_pointer_needed && post)
7354 micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;
7356 mo->type = MO_ADJUST;
7357 mo->u.adjust = post;
7360 if (dump_file && (dump_flags & TDF_DETAILS))
7361 log_op_type (PATTERN (insn), bb, insn,
7362 MO_ADJUST, dump_file);
7366 gcc_assert (count == VTI (bb)->n_mos);
7367 if (MAY_HAVE_DEBUG_INSNS)
7369 cselib_preserve_only_values (true);
7370 gcc_assert (next_value_after == cselib_get_next_unknown_value ());
7371 cselib_reset_table_with_next_value (next_value_after);
7372 cselib_record_sets_hook = NULL;
7376 attrs_pool = create_alloc_pool ("attrs_def pool",
7377 sizeof (struct attrs_def), 1024);
7378 var_pool = create_alloc_pool ("variable_def pool",
7379 sizeof (struct variable_def)
7380 + (MAX_VAR_PARTS - 1)
7381 * sizeof (((variable)NULL)->var_part[0]), 64);
7382 loc_chain_pool = create_alloc_pool ("location_chain_def pool",
7383 sizeof (struct location_chain_def),
7385 shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
7386 sizeof (struct shared_hash_def), 256);
7387 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
7388 empty_shared_hash->refcount = 1;
7389 empty_shared_hash->htab
7390 = htab_create (1, variable_htab_hash, variable_htab_eq,
7391 variable_htab_free);
7392 changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
7393 variable_htab_free);
7394 if (MAY_HAVE_DEBUG_INSNS)
7396 value_chain_pool = create_alloc_pool ("value_chain_def pool",
7397 sizeof (struct value_chain_def),
7399 value_chains = htab_create (32, value_chain_htab_hash,
7400 value_chain_htab_eq, NULL);
7403 /* Init the IN and OUT sets. */
7406 VTI (bb)->visited = false;
7407 VTI (bb)->flooded = false;
7408 dataflow_set_init (&VTI (bb)->in);
7409 dataflow_set_init (&VTI (bb)->out);
7410 VTI (bb)->permp = NULL;
7413 VTI (ENTRY_BLOCK_PTR)->flooded = true;
7414 vt_add_function_parameters ();
7417 /* Get rid of all debug insns from the insn stream. */
7420 delete_debug_insns (void)
7425 if (!MAY_HAVE_DEBUG_INSNS)
7430 FOR_BB_INSNS_SAFE (bb, insn, next)
7431 if (DEBUG_INSN_P (insn))
7436 /* Run a fast, BB-local only version of var tracking, to take care of
7437 information that we don't do global analysis on, such that not all
7438 information is lost. If SKIPPED holds, we're skipping the global
7439 pass entirely, so we should try to use information it would have
7440 handled as well.. */
7443 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
7445 /* ??? Just skip it all for now. */
7446 delete_debug_insns ();
7449 /* Free the data structures needed for variable tracking. */
7458 free (VTI (bb)->mos);
7463 dataflow_set_destroy (&VTI (bb)->in);
7464 dataflow_set_destroy (&VTI (bb)->out);
7465 if (VTI (bb)->permp)
7467 dataflow_set_destroy (VTI (bb)->permp);
7468 XDELETE (VTI (bb)->permp);
7471 free_aux_for_blocks ();
7472 htab_delete (empty_shared_hash->htab);
7473 htab_delete (changed_variables);
7474 free_alloc_pool (attrs_pool);
7475 free_alloc_pool (var_pool);
7476 free_alloc_pool (loc_chain_pool);
7477 free_alloc_pool (shared_hash_pool);
7479 if (MAY_HAVE_DEBUG_INSNS)
7481 htab_delete (value_chains);
7482 free_alloc_pool (value_chain_pool);
7483 free_alloc_pool (valvar_pool);
7485 BITMAP_FREE (scratch_regs);
7486 scratch_regs = NULL;
7490 XDELETEVEC (vui_vec);
7495 /* The entry point to variable tracking pass. */
7498 variable_tracking_main (void)
7500 if (flag_var_tracking_assignments < 0)
7502 delete_debug_insns ();
7506 if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
7508 vt_debug_insns_local (true);
7512 mark_dfs_back_edges ();
7514 if (!frame_pointer_needed)
7516 if (!vt_stack_adjustments ())
7519 vt_debug_insns_local (true);
7524 vt_find_locations ();
7526 if (dump_file && (dump_flags & TDF_DETAILS))
7528 dump_dataflow_sets ();
7529 dump_flow_info (dump_file, dump_flags);
7535 vt_debug_insns_local (false);
7540 gate_handle_var_tracking (void)
7542 return (flag_var_tracking);
7547 struct rtl_opt_pass pass_variable_tracking =
7551 "vartrack", /* name */
7552 gate_handle_var_tracking, /* gate */
7553 variable_tracking_main, /* execute */
7556 0, /* static_pass_number */
7557 TV_VAR_TRACKING, /* tv_id */
7558 0, /* properties_required */
7559 0, /* properties_provided */
7560 0, /* properties_destroyed */
7561 0, /* todo_flags_start */
7562 TODO_dump_func | TODO_verify_rtl_sharing/* todo_flags_finish */