1 /* Common subexpression elimination library for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "hard-reg-set.h"
32 #include "insn-config.h"
36 #include "diagnostic-core.h"
40 #include "tree-pass.h"
43 #include "alloc-pool.h"
47 /* A list of cselib_val structures. */
49 struct elt_list *next;
53 static bool cselib_record_memory;
54 static bool cselib_preserve_constants;
55 static int entry_and_rtx_equal_p (const void *, const void *);
56 static hashval_t get_value_hash (const void *);
57 static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
58 static void new_elt_loc_list (cselib_val *, rtx);
59 static void unchain_one_value (cselib_val *);
60 static void unchain_one_elt_list (struct elt_list **);
61 static void unchain_one_elt_loc_list (struct elt_loc_list **);
62 static int discard_useless_locs (void **, void *);
63 static int discard_useless_values (void **, void *);
64 static void remove_useless_values (void);
65 static int rtx_equal_for_cselib_1 (rtx, rtx, enum machine_mode);
66 static unsigned int cselib_hash_rtx (rtx, int, enum machine_mode);
67 static cselib_val *new_cselib_val (unsigned int, enum machine_mode, rtx);
68 static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
69 static cselib_val *cselib_lookup_mem (rtx, int);
70 static void cselib_invalidate_regno (unsigned int, enum machine_mode);
71 static void cselib_invalidate_mem (rtx);
72 static void cselib_record_set (rtx, cselib_val *, cselib_val *);
73 static void cselib_record_sets (rtx);
75 struct expand_value_data
78 cselib_expand_callback callback;
83 static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
85 /* There are three ways in which cselib can look up an rtx:
86 - for a REG, the reg_values table (which is indexed by regno) is used
87 - for a MEM, we recursively look up its address and then follow the
88 addr_list of that value
89 - for everything else, we compute a hash value and go through the hash
90 table. Since different rtx's can still have the same hash value,
91 this involves walking the table entries for a given value and comparing
92 the locations of the entries with the rtx we are looking up. */
94 /* A table that enables us to look up elts by their value. */
95 static htab_t cselib_hash_table;
97 /* This is a global so we don't have to pass this through every function.
98 It is used in new_elt_loc_list to set SETTING_INSN. */
99 static rtx cselib_current_insn;
101 /* The unique id that the next create value will take. */
102 static unsigned int next_uid;
104 /* The number of registers we had when the varrays were last resized. */
105 static unsigned int cselib_nregs;
107 /* Count values without known locations, or with only locations that
108 wouldn't have been known except for debug insns. Whenever this
109 grows too big, we remove these useless values from the table.
111 Counting values with only debug values is a bit tricky. We don't
112 want to increment n_useless_values when we create a value for a
113 debug insn, for this would get n_useless_values out of sync, but we
114 want increment it if all locs in the list that were ever referenced
115 in nondebug insns are removed from the list.
117 In the general case, once we do that, we'd have to stop accepting
118 nondebug expressions in the loc list, to avoid having two values
119 equivalent that, without debug insns, would have been made into
120 separate values. However, because debug insns never introduce
121 equivalences themselves (no assignments), the only means for
122 growing loc lists is through nondebug assignments. If the locs
123 also happen to be referenced in debug insns, it will work just fine.
125 A consequence of this is that there's at most one debug-only loc in
126 each loc list. If we keep it in the first entry, testing whether
127 we have a debug-only loc list takes O(1).
129 Furthermore, since any additional entry in a loc list containing a
130 debug loc would have to come from an assignment (nondebug) that
131 references both the initial debug loc and the newly-equivalent loc,
132 the initial debug loc would be promoted to a nondebug loc, and the
133 loc list would not contain debug locs any more.
135 So the only case we have to be careful with in order to keep
136 n_useless_values in sync between debug and nondebug compilations is
137 to avoid incrementing n_useless_values when removing the single loc
138 from a value that turns out to not appear outside debug values. We
139 increment n_useless_debug_values instead, and leave such values
140 alone until, for other reasons, we garbage-collect useless
142 static int n_useless_values;
143 static int n_useless_debug_values;
145 /* Count values whose locs have been taken exclusively from debug
146 insns for the entire life of the value. */
147 static int n_debug_values;
149 /* Number of useless values before we remove them from the hash table. */
150 #define MAX_USELESS_VALUES 32
152 /* This table maps from register number to values. It does not
153 contain pointers to cselib_val structures, but rather elt_lists.
154 The purpose is to be able to refer to the same register in
155 different modes. The first element of the list defines the mode in
156 which the register was set; if the mode is unknown or the value is
157 no longer valid in that mode, ELT will be NULL for the first
159 static struct elt_list **reg_values;
160 static unsigned int reg_values_size;
161 #define REG_VALUES(i) reg_values[i]
163 /* The largest number of hard regs used by any entry added to the
164 REG_VALUES table. Cleared on each cselib_clear_table() invocation. */
165 static unsigned int max_value_regs;
167 /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
168 in cselib_clear_table() for fast emptying. */
169 static unsigned int *used_regs;
170 static unsigned int n_used_regs;
172 /* We pass this to cselib_invalidate_mem to invalidate all of
173 memory for a non-const call instruction. */
174 static GTY(()) rtx callmem;
176 /* Set by discard_useless_locs if it deleted the last location of any
178 static int values_became_useless;
180 /* Used as stop element of the containing_mem list so we can check
181 presence in the list by checking the next pointer. */
182 static cselib_val dummy_val;
184 /* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
185 that is constant through the whole function and should never be
187 static cselib_val *cfa_base_preserved_val;
188 static unsigned int cfa_base_preserved_regno = INVALID_REGNUM;
190 /* Used to list all values that contain memory reference.
191 May or may not contain the useless values - the list is compacted
192 each time memory is invalidated. */
193 static cselib_val *first_containing_mem = &dummy_val;
194 static alloc_pool elt_loc_list_pool, elt_list_pool, cselib_val_pool, value_pool;
196 /* If nonnull, cselib will call this function before freeing useless
197 VALUEs. A VALUE is deemed useless if its "locs" field is null. */
198 void (*cselib_discard_hook) (cselib_val *);
200 /* If nonnull, cselib will call this function before recording sets or
201 even clobbering outputs of INSN. All the recorded sets will be
202 represented in the array sets[n_sets]. new_val_min can be used to
203 tell whether values present in sets are introduced by this
205 void (*cselib_record_sets_hook) (rtx insn, struct cselib_set *sets,
208 #define PRESERVED_VALUE_P(RTX) \
209 (RTL_FLAG_CHECK1("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
213 /* Allocate a struct elt_list and fill in its two elements with the
216 static inline struct elt_list *
217 new_elt_list (struct elt_list *next, cselib_val *elt)
220 el = (struct elt_list *) pool_alloc (elt_list_pool);
226 /* Allocate a struct elt_loc_list with LOC and prepend it to VAL's loc
230 new_elt_loc_list (cselib_val *val, rtx loc)
232 struct elt_loc_list *el, *next = val->locs;
234 gcc_checking_assert (!next || !next->setting_insn
235 || !DEBUG_INSN_P (next->setting_insn)
236 || cselib_current_insn == next->setting_insn);
238 /* If we're creating the first loc in a debug insn context, we've
239 just created a debug value. Count it. */
240 if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn))
243 val = canonical_cselib_val (val);
246 if (GET_CODE (loc) == VALUE)
248 loc = canonical_cselib_val (CSELIB_VAL_PTR (loc))->val_rtx;
250 gcc_checking_assert (PRESERVED_VALUE_P (loc)
251 == PRESERVED_VALUE_P (val->val_rtx));
253 if (val->val_rtx == loc)
255 else if (val->uid > CSELIB_VAL_PTR (loc)->uid)
257 /* Reverse the insertion. */
258 new_elt_loc_list (CSELIB_VAL_PTR (loc), val->val_rtx);
262 gcc_checking_assert (val->uid < CSELIB_VAL_PTR (loc)->uid);
264 if (CSELIB_VAL_PTR (loc)->locs)
266 /* Bring all locs from LOC to VAL. */
267 for (el = CSELIB_VAL_PTR (loc)->locs; el->next; el = el->next)
269 /* Adjust values that have LOC as canonical so that VAL
270 becomes their canonical. */
271 if (el->loc && GET_CODE (el->loc) == VALUE)
273 gcc_checking_assert (CSELIB_VAL_PTR (el->loc)->locs->loc
275 CSELIB_VAL_PTR (el->loc)->locs->loc = val->val_rtx;
278 el->next = val->locs;
279 next = val->locs = CSELIB_VAL_PTR (loc)->locs;
280 if (CSELIB_VAL_PTR (loc)->next_containing_mem != NULL
281 && val->next_containing_mem == NULL)
283 val->next_containing_mem = first_containing_mem;
284 first_containing_mem = val;
288 /* Chain LOC back to VAL. */
289 el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool);
290 el->loc = val->val_rtx;
291 el->setting_insn = cselib_current_insn;
293 CSELIB_VAL_PTR (loc)->locs = el;
296 el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool);
298 el->setting_insn = cselib_current_insn;
303 /* Promote loc L to a nondebug cselib_current_insn if L is marked as
304 originating from a debug insn, maintaining the debug values
308 promote_debug_loc (struct elt_loc_list *l)
310 if (l->setting_insn && DEBUG_INSN_P (l->setting_insn)
311 && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
314 l->setting_insn = cselib_current_insn;
315 if (cselib_preserve_constants && l->next)
317 gcc_assert (l->next->setting_insn
318 && DEBUG_INSN_P (l->next->setting_insn)
320 l->next->setting_insn = cselib_current_insn;
323 gcc_assert (!l->next);
327 /* The elt_list at *PL is no longer needed. Unchain it and free its
331 unchain_one_elt_list (struct elt_list **pl)
333 struct elt_list *l = *pl;
336 pool_free (elt_list_pool, l);
339 /* Likewise for elt_loc_lists. */
342 unchain_one_elt_loc_list (struct elt_loc_list **pl)
344 struct elt_loc_list *l = *pl;
347 pool_free (elt_loc_list_pool, l);
350 /* Likewise for cselib_vals. This also frees the addr_list associated with
354 unchain_one_value (cselib_val *v)
357 unchain_one_elt_list (&v->addr_list);
359 pool_free (cselib_val_pool, v);
362 /* Remove all entries from the hash table. Also used during
366 cselib_clear_table (void)
368 cselib_reset_table (1);
371 /* Remove from hash table all VALUEs except constants
372 and function invariants. */
375 preserve_only_constants (void **x, void *info ATTRIBUTE_UNUSED)
377 cselib_val *v = (cselib_val *)*x;
378 struct elt_loc_list *l;
381 && v->locs->next == NULL)
383 if (CONSTANT_P (v->locs->loc)
384 && (GET_CODE (v->locs->loc) != CONST
385 || !references_value_p (v->locs->loc, 0)))
387 /* Although a debug expr may be bound to different expressions,
388 we can preserve it as if it was constant, to get unification
389 and proper merging within var-tracking. */
390 if (GET_CODE (v->locs->loc) == DEBUG_EXPR
391 || GET_CODE (v->locs->loc) == DEBUG_IMPLICIT_PTR
392 || GET_CODE (v->locs->loc) == ENTRY_VALUE
393 || GET_CODE (v->locs->loc) == DEBUG_PARAMETER_REF)
395 if (cfa_base_preserved_val)
397 if (v == cfa_base_preserved_val)
399 if (GET_CODE (v->locs->loc) == PLUS
400 && CONST_INT_P (XEXP (v->locs->loc, 1))
401 && XEXP (v->locs->loc, 0) == cfa_base_preserved_val->val_rtx)
406 /* Keep VALUE equivalences around. */
407 for (l = v->locs; l; l = l->next)
408 if (GET_CODE (l->loc) == VALUE)
411 htab_clear_slot (cselib_hash_table, x);
415 /* Remove all entries from the hash table, arranging for the next
416 value to be numbered NUM. */
419 cselib_reset_table (unsigned int num)
425 if (cfa_base_preserved_val)
427 unsigned int regno = cfa_base_preserved_regno;
428 unsigned int new_used_regs = 0;
429 for (i = 0; i < n_used_regs; i++)
430 if (used_regs[i] == regno)
436 REG_VALUES (used_regs[i]) = 0;
437 gcc_assert (new_used_regs == 1);
438 n_used_regs = new_used_regs;
439 used_regs[0] = regno;
441 = hard_regno_nregs[regno][GET_MODE (cfa_base_preserved_val->locs->loc)];
445 for (i = 0; i < n_used_regs; i++)
446 REG_VALUES (used_regs[i]) = 0;
450 if (cselib_preserve_constants)
451 htab_traverse (cselib_hash_table, preserve_only_constants, NULL);
453 htab_empty (cselib_hash_table);
455 n_useless_values = 0;
456 n_useless_debug_values = 0;
461 first_containing_mem = &dummy_val;
464 /* Return the number of the next value that will be generated. */
467 cselib_get_next_uid (void)
472 /* See the documentation of cselib_find_slot below. */
473 static enum machine_mode find_slot_memmode;
475 /* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE,
476 INSERTing if requested. When X is part of the address of a MEM,
477 MEMMODE should specify the mode of the MEM. While searching the
478 table, MEMMODE is held in FIND_SLOT_MEMMODE, so that autoinc RTXs
479 in X can be resolved. */
482 cselib_find_slot (rtx x, hashval_t hash, enum insert_option insert,
483 enum machine_mode memmode)
486 find_slot_memmode = memmode;
487 slot = htab_find_slot_with_hash (cselib_hash_table, x, hash, insert);
488 find_slot_memmode = VOIDmode;
492 /* The equality test for our hash table. The first argument ENTRY is a table
493 element (i.e. a cselib_val), while the second arg X is an rtx. We know
494 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
495 CONST of an appropriate mode. */
498 entry_and_rtx_equal_p (const void *entry, const void *x_arg)
500 struct elt_loc_list *l;
501 const cselib_val *const v = (const cselib_val *) entry;
502 rtx x = CONST_CAST_RTX ((const_rtx)x_arg);
503 enum machine_mode mode = GET_MODE (x);
505 gcc_assert (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED
506 && (mode != VOIDmode || GET_CODE (x) != CONST_DOUBLE));
508 if (mode != GET_MODE (v->val_rtx))
511 /* Unwrap X if necessary. */
512 if (GET_CODE (x) == CONST
513 && (CONST_INT_P (XEXP (x, 0))
514 || GET_CODE (XEXP (x, 0)) == CONST_FIXED
515 || GET_CODE (XEXP (x, 0)) == CONST_DOUBLE))
518 /* We don't guarantee that distinct rtx's have different hash values,
519 so we need to do a comparison. */
520 for (l = v->locs; l; l = l->next)
521 if (rtx_equal_for_cselib_1 (l->loc, x, find_slot_memmode))
523 promote_debug_loc (l);
530 /* The hash function for our hash table. The value is always computed with
531 cselib_hash_rtx when adding an element; this function just extracts the
532 hash value from a cselib_val structure. */
535 get_value_hash (const void *entry)
537 const cselib_val *const v = (const cselib_val *) entry;
541 /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
542 only return true for values which point to a cselib_val whose value
543 element has been set to zero, which implies the cselib_val will be
547 references_value_p (const_rtx x, int only_useless)
549 const enum rtx_code code = GET_CODE (x);
550 const char *fmt = GET_RTX_FORMAT (code);
553 if (GET_CODE (x) == VALUE
554 && (! only_useless ||
555 (CSELIB_VAL_PTR (x)->locs == 0 && !PRESERVED_VALUE_P (x))))
558 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
560 if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
562 else if (fmt[i] == 'E')
563 for (j = 0; j < XVECLEN (x, i); j++)
564 if (references_value_p (XVECEXP (x, i, j), only_useless))
571 /* For all locations found in X, delete locations that reference useless
572 values (i.e. values without any location). Called through
576 discard_useless_locs (void **x, void *info ATTRIBUTE_UNUSED)
578 cselib_val *v = (cselib_val *)*x;
579 struct elt_loc_list **p = &v->locs;
580 bool had_locs = v->locs != NULL;
581 rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
585 if (references_value_p ((*p)->loc, 1))
586 unchain_one_elt_loc_list (p);
591 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
593 if (setting_insn && DEBUG_INSN_P (setting_insn))
594 n_useless_debug_values++;
597 values_became_useless = 1;
602 /* If X is a value with no locations, remove it from the hashtable. */
605 discard_useless_values (void **x, void *info ATTRIBUTE_UNUSED)
607 cselib_val *v = (cselib_val *)*x;
609 if (v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
611 if (cselib_discard_hook)
612 cselib_discard_hook (v);
614 CSELIB_VAL_PTR (v->val_rtx) = NULL;
615 htab_clear_slot (cselib_hash_table, x);
616 unchain_one_value (v);
623 /* Clean out useless values (i.e. those which no longer have locations
624 associated with them) from the hash table. */
627 remove_useless_values (void)
631 /* First pass: eliminate locations that reference the value. That in
632 turn can make more values useless. */
635 values_became_useless = 0;
636 htab_traverse (cselib_hash_table, discard_useless_locs, 0);
638 while (values_became_useless);
640 /* Second pass: actually remove the values. */
642 p = &first_containing_mem;
643 for (v = *p; v != &dummy_val; v = v->next_containing_mem)
647 p = &(*p)->next_containing_mem;
651 n_useless_values += n_useless_debug_values;
652 n_debug_values -= n_useless_debug_values;
653 n_useless_debug_values = 0;
655 htab_traverse (cselib_hash_table, discard_useless_values, 0);
657 gcc_assert (!n_useless_values);
660 /* Arrange for a value to not be removed from the hash table even if
661 it becomes useless. */
664 cselib_preserve_value (cselib_val *v)
666 PRESERVED_VALUE_P (v->val_rtx) = 1;
669 /* Test whether a value is preserved. */
672 cselib_preserved_value_p (cselib_val *v)
674 return PRESERVED_VALUE_P (v->val_rtx);
677 /* Arrange for a REG value to be assumed constant through the whole function,
678 never invalidated and preserved across cselib_reset_table calls. */
681 cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno)
683 if (cselib_preserve_constants
685 && REG_P (v->locs->loc))
687 cfa_base_preserved_val = v;
688 cfa_base_preserved_regno = regno;
692 /* Clean all non-constant expressions in the hash table, but retain
696 cselib_preserve_only_values (void)
700 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
701 cselib_invalidate_regno (i, reg_raw_mode[i]);
703 cselib_invalidate_mem (callmem);
705 remove_useless_values ();
707 gcc_assert (first_containing_mem == &dummy_val);
710 /* Return the mode in which a register was last set. If X is not a
711 register, return its mode. If the mode in which the register was
712 set is not known, or the value was already clobbered, return
716 cselib_reg_set_mode (const_rtx x)
721 if (REG_VALUES (REGNO (x)) == NULL
722 || REG_VALUES (REGNO (x))->elt == NULL)
725 return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx);
728 /* Return nonzero if we can prove that X and Y contain the same value, taking
729 our gathered information into account. */
732 rtx_equal_for_cselib_p (rtx x, rtx y)
734 return rtx_equal_for_cselib_1 (x, y, VOIDmode);
737 /* If x is a PLUS or an autoinc operation, expand the operation,
738 storing the offset, if any, in *OFF. */
741 autoinc_split (rtx x, rtx *off, enum machine_mode memmode)
743 switch (GET_CODE (x))
750 if (memmode == VOIDmode)
753 *off = GEN_INT (-GET_MODE_SIZE (memmode));
758 if (memmode == VOIDmode)
761 *off = GEN_INT (GET_MODE_SIZE (memmode));
777 /* Return nonzero if we can prove that X and Y contain the same value,
778 taking our gathered information into account. MEMMODE holds the
779 mode of the enclosing MEM, if any, as required to deal with autoinc
780 addressing modes. If X and Y are not (known to be) part of
781 addresses, MEMMODE should be VOIDmode. */
784 rtx_equal_for_cselib_1 (rtx x, rtx y, enum machine_mode memmode)
790 if (REG_P (x) || MEM_P (x))
792 cselib_val *e = cselib_lookup (x, GET_MODE (x), 0, memmode);
798 if (REG_P (y) || MEM_P (y))
800 cselib_val *e = cselib_lookup (y, GET_MODE (y), 0, memmode);
809 if (GET_CODE (x) == VALUE)
811 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (x));
812 struct elt_loc_list *l;
814 if (GET_CODE (y) == VALUE)
815 return e == canonical_cselib_val (CSELIB_VAL_PTR (y));
817 for (l = e->locs; l; l = l->next)
821 /* Avoid infinite recursion. We know we have the canonical
822 value, so we can just skip any values in the equivalence
824 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
826 else if (rtx_equal_for_cselib_1 (t, y, memmode))
832 else if (GET_CODE (y) == VALUE)
834 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (y));
835 struct elt_loc_list *l;
837 for (l = e->locs; l; l = l->next)
841 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
843 else if (rtx_equal_for_cselib_1 (x, t, memmode))
850 if (GET_MODE (x) != GET_MODE (y))
853 if (GET_CODE (x) != GET_CODE (y))
855 rtx xorig = x, yorig = y;
856 rtx xoff = NULL, yoff = NULL;
858 x = autoinc_split (x, &xoff, memmode);
859 y = autoinc_split (y, &yoff, memmode);
864 if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode))
867 /* Don't recurse if nothing changed. */
868 if (x != xorig || y != yorig)
869 return rtx_equal_for_cselib_1 (x, y, memmode);
874 /* These won't be handled correctly by the code below. */
875 switch (GET_CODE (x))
882 case DEBUG_IMPLICIT_PTR:
883 return DEBUG_IMPLICIT_PTR_DECL (x)
884 == DEBUG_IMPLICIT_PTR_DECL (y);
886 case DEBUG_PARAMETER_REF:
887 return DEBUG_PARAMETER_REF_DECL (x)
888 == DEBUG_PARAMETER_REF_DECL (y);
891 /* ENTRY_VALUEs are function invariant, it is thus undesirable to
892 use rtx_equal_for_cselib_1 to compare the operands. */
893 return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
896 return XEXP (x, 0) == XEXP (y, 0);
899 /* We have to compare any autoinc operations in the addresses
900 using this MEM's mode. */
901 return rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 0), GET_MODE (x));
908 fmt = GET_RTX_FORMAT (code);
910 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
917 if (XWINT (x, i) != XWINT (y, i))
923 if (XINT (x, i) != XINT (y, i))
929 /* Two vectors must have the same length. */
930 if (XVECLEN (x, i) != XVECLEN (y, i))
933 /* And the corresponding elements must match. */
934 for (j = 0; j < XVECLEN (x, i); j++)
935 if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j),
936 XVECEXP (y, i, j), memmode))
942 && targetm.commutative_p (x, UNKNOWN)
943 && rtx_equal_for_cselib_1 (XEXP (x, 1), XEXP (y, 0), memmode)
944 && rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode))
946 if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode))
952 if (strcmp (XSTR (x, i), XSTR (y, i)))
957 /* These are just backpointers, so they don't matter. */
964 /* It is believed that rtx's at this level will never
965 contain anything but integers and other rtx's,
966 except for within LABEL_REFs and SYMBOL_REFs. */
974 /* We need to pass down the mode of constants through the hash table
975 functions. For that purpose, wrap them in a CONST of the appropriate
978 wrap_constant (enum machine_mode mode, rtx x)
980 if (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED
981 && (GET_CODE (x) != CONST_DOUBLE || GET_MODE (x) != VOIDmode))
983 gcc_assert (mode != VOIDmode);
984 return gen_rtx_CONST (mode, x);
987 /* Hash an rtx. Return 0 if we couldn't hash the rtx.
988 For registers and memory locations, we look up their cselib_val structure
989 and return its VALUE element.
990 Possible reasons for return 0 are: the object is volatile, or we couldn't
991 find a register or memory location in the table and CREATE is zero. If
992 CREATE is nonzero, table elts are created for regs and mem.
993 N.B. this hash function returns the same hash value for RTXes that
994 differ only in the order of operands, thus it is suitable for comparisons
995 that take commutativity into account.
996 If we wanted to also support associative rules, we'd have to use a different
997 strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
998 MEMMODE indicates the mode of an enclosing MEM, and it's only
999 used to compute autoinc values.
1000 We used to have a MODE argument for hashing for CONST_INTs, but that
1001 didn't make sense, since it caused spurious hash differences between
1002 (set (reg:SI 1) (const_int))
1003 (plus:SI (reg:SI 2) (reg:SI 1))
1005 (plus:SI (reg:SI 2) (const_int))
1006 If the mode is important in any context, it must be checked specifically
1007 in a comparison anyway, since relying on hash differences is unsafe. */
1010 cselib_hash_rtx (rtx x, int create, enum machine_mode memmode)
1016 unsigned int hash = 0;
1018 code = GET_CODE (x);
1019 hash += (unsigned) code + (unsigned) GET_MODE (x);
1025 e = cselib_lookup (x, GET_MODE (x), create, memmode);
1032 hash += ((unsigned) DEBUG_EXPR << 7)
1033 + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x));
1034 return hash ? hash : (unsigned int) DEBUG_EXPR;
1036 case DEBUG_IMPLICIT_PTR:
1037 hash += ((unsigned) DEBUG_IMPLICIT_PTR << 7)
1038 + DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x));
1039 return hash ? hash : (unsigned int) DEBUG_IMPLICIT_PTR;
1041 case DEBUG_PARAMETER_REF:
1042 hash += ((unsigned) DEBUG_PARAMETER_REF << 7)
1043 + DECL_UID (DEBUG_PARAMETER_REF_DECL (x));
1044 return hash ? hash : (unsigned int) DEBUG_PARAMETER_REF;
1047 /* ENTRY_VALUEs are function invariant, thus try to avoid
1048 recursing on argument if ENTRY_VALUE is one of the
1049 forms emitted by expand_debug_expr, otherwise
1050 ENTRY_VALUE hash would depend on the current value
1051 in some register or memory. */
1052 if (REG_P (ENTRY_VALUE_EXP (x)))
1053 hash += (unsigned int) REG
1054 + (unsigned int) GET_MODE (ENTRY_VALUE_EXP (x))
1055 + (unsigned int) REGNO (ENTRY_VALUE_EXP (x));
1056 else if (MEM_P (ENTRY_VALUE_EXP (x))
1057 && REG_P (XEXP (ENTRY_VALUE_EXP (x), 0)))
1058 hash += (unsigned int) MEM
1059 + (unsigned int) GET_MODE (XEXP (ENTRY_VALUE_EXP (x), 0))
1060 + (unsigned int) REGNO (XEXP (ENTRY_VALUE_EXP (x), 0));
1062 hash += cselib_hash_rtx (ENTRY_VALUE_EXP (x), create, memmode);
1063 return hash ? hash : (unsigned int) ENTRY_VALUE;
1066 hash += ((unsigned) CONST_INT << 7) + INTVAL (x);
1067 return hash ? hash : (unsigned int) CONST_INT;
1070 /* This is like the general case, except that it only counts
1071 the integers representing the constant. */
1072 hash += (unsigned) code + (unsigned) GET_MODE (x);
1073 if (GET_MODE (x) != VOIDmode)
1074 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
1076 hash += ((unsigned) CONST_DOUBLE_LOW (x)
1077 + (unsigned) CONST_DOUBLE_HIGH (x));
1078 return hash ? hash : (unsigned int) CONST_DOUBLE;
1081 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1082 hash += fixed_hash (CONST_FIXED_VALUE (x));
1083 return hash ? hash : (unsigned int) CONST_FIXED;
1090 units = CONST_VECTOR_NUNITS (x);
1092 for (i = 0; i < units; ++i)
1094 elt = CONST_VECTOR_ELT (x, i);
1095 hash += cselib_hash_rtx (elt, 0, memmode);
1101 /* Assume there is only one rtx object for any given label. */
1103 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1104 differences and differences between each stage's debugging dumps. */
1105 hash += (((unsigned int) LABEL_REF << 7)
1106 + CODE_LABEL_NUMBER (XEXP (x, 0)));
1107 return hash ? hash : (unsigned int) LABEL_REF;
1111 /* Don't hash on the symbol's address to avoid bootstrap differences.
1112 Different hash values may cause expressions to be recorded in
1113 different orders and thus different registers to be used in the
1114 final assembler. This also avoids differences in the dump files
1115 between various stages. */
1117 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1120 h += (h << 7) + *p++; /* ??? revisit */
1122 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1123 return hash ? hash : (unsigned int) SYMBOL_REF;
1128 /* We can't compute these without knowing the MEM mode. */
1129 gcc_assert (memmode != VOIDmode);
1130 i = GET_MODE_SIZE (memmode);
1131 if (code == PRE_DEC)
1133 /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
1134 like (mem:MEMMODE (plus (reg) (const_int I))). */
1135 hash += (unsigned) PLUS - (unsigned)code
1136 + cselib_hash_rtx (XEXP (x, 0), create, memmode)
1137 + cselib_hash_rtx (GEN_INT (i), create, memmode);
1138 return hash ? hash : 1 + (unsigned) PLUS;
1141 gcc_assert (memmode != VOIDmode);
1142 return cselib_hash_rtx (XEXP (x, 1), create, memmode);
1147 gcc_assert (memmode != VOIDmode);
1148 return cselib_hash_rtx (XEXP (x, 0), create, memmode);
1153 case UNSPEC_VOLATILE:
1157 if (MEM_VOLATILE_P (x))
1166 i = GET_RTX_LENGTH (code) - 1;
1167 fmt = GET_RTX_FORMAT (code);
1174 rtx tem = XEXP (x, i);
1175 unsigned int tem_hash = cselib_hash_rtx (tem, create, memmode);
1184 for (j = 0; j < XVECLEN (x, i); j++)
1186 unsigned int tem_hash
1187 = cselib_hash_rtx (XVECEXP (x, i, j), create, memmode);
1198 const unsigned char *p = (const unsigned char *) XSTR (x, i);
1207 hash += XINT (x, i);
1220 return hash ? hash : 1 + (unsigned int) GET_CODE (x);
1223 /* Create a new value structure for VALUE and initialize it. The mode of the
1226 static inline cselib_val *
1227 new_cselib_val (unsigned int hash, enum machine_mode mode, rtx x)
1229 cselib_val *e = (cselib_val *) pool_alloc (cselib_val_pool);
1232 gcc_assert (next_uid);
1235 e->uid = next_uid++;
1236 /* We use an alloc pool to allocate this RTL construct because it
1237 accounts for about 8% of the overall memory usage. We know
1238 precisely when we can have VALUE RTXen (when cselib is active)
1239 so we don't need to put them in garbage collected memory.
1240 ??? Why should a VALUE be an RTX in the first place? */
1241 e->val_rtx = (rtx) pool_alloc (value_pool);
1242 memset (e->val_rtx, 0, RTX_HDR_SIZE);
1243 PUT_CODE (e->val_rtx, VALUE);
1244 PUT_MODE (e->val_rtx, mode);
1245 CSELIB_VAL_PTR (e->val_rtx) = e;
1248 e->next_containing_mem = 0;
1250 if (dump_file && (dump_flags & TDF_CSELIB))
1252 fprintf (dump_file, "cselib value %u:%u ", e->uid, hash);
1253 if (flag_dump_noaddr || flag_dump_unnumbered)
1254 fputs ("# ", dump_file);
1256 fprintf (dump_file, "%p ", (void*)e);
1257 print_rtl_single (dump_file, x);
1258 fputc ('\n', dump_file);
1264 /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
1265 contains the data at this address. X is a MEM that represents the
1266 value. Update the two value structures to represent this situation. */
1269 add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
1271 struct elt_loc_list *l;
1273 mem_elt = canonical_cselib_val (mem_elt);
1275 /* Avoid duplicates. */
1276 for (l = mem_elt->locs; l; l = l->next)
1278 && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt)
1280 promote_debug_loc (l);
1284 addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
1285 new_elt_loc_list (mem_elt,
1286 replace_equiv_address_nv (x, addr_elt->val_rtx));
1287 if (mem_elt->next_containing_mem == NULL)
1289 mem_elt->next_containing_mem = first_containing_mem;
1290 first_containing_mem = mem_elt;
1294 /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
1295 If CREATE, make a new one if we haven't seen it before. */
1298 cselib_lookup_mem (rtx x, int create)
1300 enum machine_mode mode = GET_MODE (x);
1301 enum machine_mode addr_mode;
1304 cselib_val *mem_elt;
1307 if (MEM_VOLATILE_P (x) || mode == BLKmode
1308 || !cselib_record_memory
1309 || (FLOAT_MODE_P (mode) && flag_float_store))
1312 addr_mode = GET_MODE (XEXP (x, 0));
1313 if (addr_mode == VOIDmode)
1316 /* Look up the value for the address. */
1317 addr = cselib_lookup (XEXP (x, 0), addr_mode, create, mode);
1321 /* Find a value that describes a value of our mode at that address. */
1322 for (l = addr->addr_list; l; l = l->next)
1323 if (GET_MODE (l->elt->val_rtx) == mode)
1325 promote_debug_loc (l->elt->locs);
1332 mem_elt = new_cselib_val (next_uid, mode, x);
1333 add_mem_for_addr (addr, mem_elt, x);
1334 slot = cselib_find_slot (wrap_constant (mode, x), mem_elt->hash,
1340 /* Search thru the possible substitutions in P. We prefer a non reg
1341 substitution because this allows us to expand the tree further. If
1342 we find, just a reg, take the lowest regno. There may be several
1343 non-reg results, we just take the first one because they will all
1344 expand to the same place. */
1347 expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
1350 rtx reg_result = NULL;
1351 unsigned int regno = UINT_MAX;
1352 struct elt_loc_list *p_in = p;
1354 for (; p; p = p -> next)
1356 /* Avoid infinite recursion trying to expand a reg into a
1358 if ((REG_P (p->loc))
1359 && (REGNO (p->loc) < regno)
1360 && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
1362 reg_result = p->loc;
1363 regno = REGNO (p->loc);
1365 /* Avoid infinite recursion and do not try to expand the
1367 else if (GET_CODE (p->loc) == VALUE
1368 && CSELIB_VAL_PTR (p->loc)->locs == p_in)
1370 else if (!REG_P (p->loc))
1373 if (dump_file && (dump_flags & TDF_CSELIB))
1375 print_inline_rtx (dump_file, p->loc, 0);
1376 fprintf (dump_file, "\n");
1378 if (GET_CODE (p->loc) == LO_SUM
1379 && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF
1381 && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX))
1382 && XEXP (note, 0) == XEXP (p->loc, 1))
1383 return XEXP (p->loc, 1);
1384 result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1);
1391 if (regno != UINT_MAX)
1394 if (dump_file && (dump_flags & TDF_CSELIB))
1395 fprintf (dump_file, "r%d\n", regno);
1397 result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1);
1402 if (dump_file && (dump_flags & TDF_CSELIB))
1406 print_inline_rtx (dump_file, reg_result, 0);
1407 fprintf (dump_file, "\n");
1410 fprintf (dump_file, "NULL\n");
1416 /* Forward substitute and expand an expression out to its roots.
1417 This is the opposite of common subexpression. Because local value
1418 numbering is such a weak optimization, the expanded expression is
1419 pretty much unique (not from a pointer equals point of view but
1420 from a tree shape point of view.
1422 This function returns NULL if the expansion fails. The expansion
1423 will fail if there is no value number for one of the operands or if
1424 one of the operands has been overwritten between the current insn
1425 and the beginning of the basic block. For instance x has no
1431 REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1432 It is clear on return. */
1435 cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
1437 struct expand_value_data evd;
1439 evd.regs_active = regs_active;
1440 evd.callback = NULL;
1441 evd.callback_arg = NULL;
1444 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1447 /* Same as cselib_expand_value_rtx, but using a callback to try to
1448 resolve some expressions. The CB function should return ORIG if it
1449 can't or does not want to deal with a certain RTX. Any other
1450 return value, including NULL, will be used as the expansion for
1451 VALUE, without any further changes. */
1454 cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1455 cselib_expand_callback cb, void *data)
1457 struct expand_value_data evd;
1459 evd.regs_active = regs_active;
1461 evd.callback_arg = data;
1464 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1467 /* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1468 or simplified. Useful to find out whether cselib_expand_value_rtx_cb
1469 would return NULL or non-NULL, without allocating new rtx. */
1472 cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1473 cselib_expand_callback cb, void *data)
1475 struct expand_value_data evd;
1477 evd.regs_active = regs_active;
1479 evd.callback_arg = data;
1482 return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
1485 /* Internal implementation of cselib_expand_value_rtx and
1486 cselib_expand_value_rtx_cb. */
1489 cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
1495 const char *format_ptr;
1496 enum machine_mode mode;
1498 code = GET_CODE (orig);
1500 /* For the context of dse, if we end up expand into a huge tree, we
1501 will not have a useful address, so we might as well just give up
1510 struct elt_list *l = REG_VALUES (REGNO (orig));
1512 if (l && l->elt == NULL)
1514 for (; l; l = l->next)
1515 if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig))
1518 unsigned regno = REGNO (orig);
1520 /* The only thing that we are not willing to do (this
1521 is requirement of dse and if others potential uses
1522 need this function we should add a parm to control
1523 it) is that we will not substitute the
1524 STACK_POINTER_REGNUM, FRAME_POINTER or the
1527 These expansions confuses the code that notices that
1528 stores into the frame go dead at the end of the
1529 function and that the frame is not effected by calls
1530 to subroutines. If you allow the
1531 STACK_POINTER_REGNUM substitution, then dse will
1532 think that parameter pushing also goes dead which is
1533 wrong. If you allow the FRAME_POINTER or the
1534 HARD_FRAME_POINTER then you lose the opportunity to
1535 make the frame assumptions. */
1536 if (regno == STACK_POINTER_REGNUM
1537 || regno == FRAME_POINTER_REGNUM
1538 || regno == HARD_FRAME_POINTER_REGNUM
1539 || regno == cfa_base_preserved_regno)
1542 bitmap_set_bit (evd->regs_active, regno);
1544 if (dump_file && (dump_flags & TDF_CSELIB))
1545 fprintf (dump_file, "expanding: r%d into: ", regno);
1547 result = expand_loc (l->elt->locs, evd, max_depth);
1548 bitmap_clear_bit (evd->regs_active, regno);
1565 /* SCRATCH must be shared because they represent distinct values. */
1568 if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
1573 if (shared_const_p (orig))
1583 subreg = evd->callback (orig, evd->regs_active, max_depth,
1589 subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
1593 scopy = simplify_gen_subreg (GET_MODE (orig), subreg,
1594 GET_MODE (SUBREG_REG (orig)),
1595 SUBREG_BYTE (orig));
1597 || (GET_CODE (scopy) == SUBREG
1598 && !REG_P (SUBREG_REG (scopy))
1599 && !MEM_P (SUBREG_REG (scopy))))
1609 if (dump_file && (dump_flags & TDF_CSELIB))
1611 fputs ("\nexpanding ", dump_file);
1612 print_rtl_single (dump_file, orig);
1613 fputs (" into...", dump_file);
1618 result = evd->callback (orig, evd->regs_active, max_depth,
1625 result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
1631 return evd->callback (orig, evd->regs_active, max_depth,
1639 /* Copy the various flags, fields, and other information. We assume
1640 that all fields need copying, and then clear the fields that should
1641 not be copied. That is the sensible default behavior, and forces
1642 us to explicitly document why we are *not* copying a flag. */
1646 copy = shallow_copy_rtx (orig);
1648 format_ptr = GET_RTX_FORMAT (code);
1650 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1651 switch (*format_ptr++)
1654 if (XEXP (orig, i) != NULL)
1656 rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd,
1661 XEXP (copy, i) = result;
1667 if (XVEC (orig, i) != NULL)
1670 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
1671 for (j = 0; j < XVECLEN (orig, i); j++)
1673 rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
1674 evd, max_depth - 1);
1678 XVECEXP (copy, i, j) = result;
1692 /* These are left unchanged. */
1702 mode = GET_MODE (copy);
1703 /* If an operand has been simplified into CONST_INT, which doesn't
1704 have a mode and the mode isn't derivable from whole rtx's mode,
1705 try simplify_*_operation first with mode from original's operand
1706 and as a fallback wrap CONST_INT into gen_rtx_CONST. */
1708 switch (GET_RTX_CLASS (code))
1711 if (CONST_INT_P (XEXP (copy, 0))
1712 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1714 scopy = simplify_unary_operation (code, mode, XEXP (copy, 0),
1715 GET_MODE (XEXP (orig, 0)));
1720 case RTX_COMM_ARITH:
1722 /* These expressions can derive operand modes from the whole rtx's mode. */
1725 case RTX_BITFIELD_OPS:
1726 if (CONST_INT_P (XEXP (copy, 0))
1727 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1729 scopy = simplify_ternary_operation (code, mode,
1730 GET_MODE (XEXP (orig, 0)),
1731 XEXP (copy, 0), XEXP (copy, 1),
1738 case RTX_COMM_COMPARE:
1739 if (CONST_INT_P (XEXP (copy, 0))
1740 && GET_MODE (XEXP (copy, 1)) == VOIDmode
1741 && (GET_MODE (XEXP (orig, 0)) != VOIDmode
1742 || GET_MODE (XEXP (orig, 1)) != VOIDmode))
1744 scopy = simplify_relational_operation (code, mode,
1745 (GET_MODE (XEXP (orig, 0))
1747 ? GET_MODE (XEXP (orig, 0))
1748 : GET_MODE (XEXP (orig, 1)),
1758 scopy = simplify_rtx (copy);
1764 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
1765 with VALUE expressions. This way, it becomes independent of changes
1766 to registers and memory.
1767 X isn't actually modified; if modifications are needed, new rtl is
1768 allocated. However, the return value can share rtl with X.
1769 If X is within a MEM, MEMMODE must be the mode of the MEM. */
1772 cselib_subst_to_values (rtx x, enum machine_mode memmode)
1774 enum rtx_code code = GET_CODE (x);
1775 const char *fmt = GET_RTX_FORMAT (code);
1784 l = REG_VALUES (REGNO (x));
1785 if (l && l->elt == NULL)
1787 for (; l; l = l->next)
1788 if (GET_MODE (l->elt->val_rtx) == GET_MODE (x))
1789 return l->elt->val_rtx;
1794 e = cselib_lookup_mem (x, 0);
1795 /* This used to happen for autoincrements, but we deal with them
1796 properly now. Remove the if stmt for the next release. */
1799 /* Assign a value that doesn't match any other. */
1800 e = new_cselib_val (next_uid, GET_MODE (x), x);
1805 e = cselib_lookup (x, GET_MODE (x), 0, memmode);
1818 gcc_assert (memmode != VOIDmode);
1819 i = GET_MODE_SIZE (memmode);
1820 if (code == PRE_DEC)
1822 return cselib_subst_to_values (plus_constant (XEXP (x, 0), i),
1826 gcc_assert (memmode != VOIDmode);
1827 return cselib_subst_to_values (XEXP (x, 1), memmode);
1832 gcc_assert (memmode != VOIDmode);
1833 return cselib_subst_to_values (XEXP (x, 0), memmode);
1839 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1843 rtx t = cselib_subst_to_values (XEXP (x, i), memmode);
1845 if (t != XEXP (x, i))
1848 copy = shallow_copy_rtx (x);
1852 else if (fmt[i] == 'E')
1856 for (j = 0; j < XVECLEN (x, i); j++)
1858 rtx t = cselib_subst_to_values (XVECEXP (x, i, j), memmode);
1860 if (t != XVECEXP (x, i, j))
1862 if (XVEC (x, i) == XVEC (copy, i))
1865 copy = shallow_copy_rtx (x);
1866 XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i));
1868 XVECEXP (copy, i, j) = t;
1877 /* Look up the rtl expression X in our tables and return the value it
1878 has. If CREATE is zero, we return NULL if we don't know the value.
1879 Otherwise, we create a new one if possible, using mode MODE if X
1880 doesn't have a mode (i.e. because it's a constant). When X is part
1881 of an address, MEMMODE should be the mode of the enclosing MEM if
1882 we're tracking autoinc expressions. */
1885 cselib_lookup_1 (rtx x, enum machine_mode mode,
1886 int create, enum machine_mode memmode)
1890 unsigned int hashval;
1892 if (GET_MODE (x) != VOIDmode)
1893 mode = GET_MODE (x);
1895 if (GET_CODE (x) == VALUE)
1896 return CSELIB_VAL_PTR (x);
1901 unsigned int i = REGNO (x);
1904 if (l && l->elt == NULL)
1906 for (; l; l = l->next)
1907 if (mode == GET_MODE (l->elt->val_rtx))
1909 promote_debug_loc (l->elt->locs);
1916 if (i < FIRST_PSEUDO_REGISTER)
1918 unsigned int n = hard_regno_nregs[i][mode];
1920 if (n > max_value_regs)
1924 e = new_cselib_val (next_uid, GET_MODE (x), x);
1925 new_elt_loc_list (e, x);
1926 if (REG_VALUES (i) == 0)
1928 /* Maintain the invariant that the first entry of
1929 REG_VALUES, if present, must be the value used to set the
1930 register, or NULL. */
1931 used_regs[n_used_regs++] = i;
1932 REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
1934 else if (cselib_preserve_constants
1935 && GET_MODE_CLASS (mode) == MODE_INT)
1937 /* During var-tracking, try harder to find equivalences
1938 for SUBREGs. If a setter sets say a DImode register
1939 and user uses that register only in SImode, add a lowpart
1941 struct elt_list *lwider = NULL;
1943 if (l && l->elt == NULL)
1945 for (; l; l = l->next)
1946 if (GET_MODE_CLASS (GET_MODE (l->elt->val_rtx)) == MODE_INT
1947 && GET_MODE_SIZE (GET_MODE (l->elt->val_rtx))
1948 > GET_MODE_SIZE (mode)
1950 || GET_MODE_SIZE (GET_MODE (l->elt->val_rtx))
1951 < GET_MODE_SIZE (GET_MODE (lwider->elt->val_rtx))))
1953 struct elt_loc_list *el;
1954 if (i < FIRST_PSEUDO_REGISTER
1955 && hard_regno_nregs[i][GET_MODE (l->elt->val_rtx)] != 1)
1957 for (el = l->elt->locs; el; el = el->next)
1958 if (!REG_P (el->loc))
1965 rtx sub = lowpart_subreg (mode, lwider->elt->val_rtx,
1966 GET_MODE (lwider->elt->val_rtx));
1968 new_elt_loc_list (e, sub);
1971 REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
1972 slot = cselib_find_slot (x, e->hash, INSERT, memmode);
1978 return cselib_lookup_mem (x, create);
1980 hashval = cselib_hash_rtx (x, create, memmode);
1981 /* Can't even create if hashing is not possible. */
1985 slot = cselib_find_slot (wrap_constant (mode, x), hashval,
1986 create ? INSERT : NO_INSERT, memmode);
1990 e = (cselib_val *) *slot;
1994 e = new_cselib_val (hashval, mode, x);
1996 /* We have to fill the slot before calling cselib_subst_to_values:
1997 the hash table is inconsistent until we do so, and
1998 cselib_subst_to_values will need to do lookups. */
2000 new_elt_loc_list (e, cselib_subst_to_values (x, memmode));
2004 /* Wrapper for cselib_lookup, that indicates X is in INSN. */
2007 cselib_lookup_from_insn (rtx x, enum machine_mode mode,
2008 int create, enum machine_mode memmode, rtx insn)
2012 gcc_assert (!cselib_current_insn);
2013 cselib_current_insn = insn;
2015 ret = cselib_lookup (x, mode, create, memmode);
2017 cselib_current_insn = NULL;
2022 /* Wrapper for cselib_lookup_1, that logs the lookup result and
2023 maintains invariants related with debug insns. */
2026 cselib_lookup (rtx x, enum machine_mode mode,
2027 int create, enum machine_mode memmode)
2029 cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode);
2031 /* ??? Should we return NULL if we're not to create an entry, the
2032 found loc is a debug loc and cselib_current_insn is not DEBUG?
2033 If so, we should also avoid converting val to non-DEBUG; probably
2034 easiest setting cselib_current_insn to NULL before the call
2037 if (dump_file && (dump_flags & TDF_CSELIB))
2039 fputs ("cselib lookup ", dump_file);
2040 print_inline_rtx (dump_file, x, 2);
2041 fprintf (dump_file, " => %u:%u\n",
2043 ret ? ret->hash : 0);
2049 /* Invalidate any entries in reg_values that overlap REGNO. This is called
2050 if REGNO is changing. MODE is the mode of the assignment to REGNO, which
2051 is used to determine how many hard registers are being changed. If MODE
2052 is VOIDmode, then only REGNO is being changed; this is used when
2053 invalidating call clobbered registers across a call. */
2056 cselib_invalidate_regno (unsigned int regno, enum machine_mode mode)
2058 unsigned int endregno;
2061 /* If we see pseudos after reload, something is _wrong_. */
2062 gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER
2063 || reg_renumber[regno] < 0);
2065 /* Determine the range of registers that must be invalidated. For
2066 pseudos, only REGNO is affected. For hard regs, we must take MODE
2067 into account, and we must also invalidate lower register numbers
2068 if they contain values that overlap REGNO. */
2069 if (regno < FIRST_PSEUDO_REGISTER)
2071 gcc_assert (mode != VOIDmode);
2073 if (regno < max_value_regs)
2076 i = regno - max_value_regs;
2078 endregno = end_hard_regno (mode, regno);
2083 endregno = regno + 1;
2086 for (; i < endregno; i++)
2088 struct elt_list **l = ®_VALUES (i);
2090 /* Go through all known values for this reg; if it overlaps the range
2091 we're invalidating, remove the value. */
2094 cselib_val *v = (*l)->elt;
2097 struct elt_loc_list **p;
2098 unsigned int this_last = i;
2100 if (i < FIRST_PSEUDO_REGISTER && v != NULL)
2101 this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
2103 if (this_last < regno || v == NULL
2104 || (v == cfa_base_preserved_val
2105 && i == cfa_base_preserved_regno))
2111 /* We have an overlap. */
2112 if (*l == REG_VALUES (i))
2114 /* Maintain the invariant that the first entry of
2115 REG_VALUES, if present, must be the value used to set
2116 the register, or NULL. This is also nice because
2117 then we won't push the same regno onto user_regs
2123 unchain_one_elt_list (l);
2125 v = canonical_cselib_val (v);
2127 had_locs = v->locs != NULL;
2128 setting_insn = v->locs ? v->locs->setting_insn : NULL;
2130 /* Now, we clear the mapping from value to reg. It must exist, so
2131 this code will crash intentionally if it doesn't. */
2132 for (p = &v->locs; ; p = &(*p)->next)
2136 if (REG_P (x) && REGNO (x) == i)
2138 unchain_one_elt_loc_list (p);
2143 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
2145 if (setting_insn && DEBUG_INSN_P (setting_insn))
2146 n_useless_debug_values++;
2154 /* Return 1 if X has a value that can vary even between two
2155 executions of the program. 0 means X can be compared reliably
2156 against certain constants or near-constants. */
2159 cselib_rtx_varies_p (const_rtx x ATTRIBUTE_UNUSED, bool from_alias ATTRIBUTE_UNUSED)
2161 /* We actually don't need to verify very hard. This is because
2162 if X has actually changed, we invalidate the memory anyway,
2163 so assume that all common memory addresses are
2168 /* Invalidate any locations in the table which are changed because of a
2169 store to MEM_RTX. If this is called because of a non-const call
2170 instruction, MEM_RTX is (mem:BLK const0_rtx). */
2173 cselib_invalidate_mem (rtx mem_rtx)
2175 cselib_val **vp, *v, *next;
2179 mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
2180 mem_rtx = canon_rtx (mem_rtx);
2182 vp = &first_containing_mem;
2183 for (v = *vp; v != &dummy_val; v = next)
2185 bool has_mem = false;
2186 struct elt_loc_list **p = &v->locs;
2187 bool had_locs = v->locs != NULL;
2188 rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
2194 struct elt_list **mem_chain;
2196 /* MEMs may occur in locations only at the top level; below
2197 that every MEM or REG is substituted by its VALUE. */
2203 if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS)
2204 && ! canon_true_dependence (mem_rtx, GET_MODE (mem_rtx), mem_addr,
2205 x, NULL_RTX, cselib_rtx_varies_p))
2213 /* This one overlaps. */
2214 /* We must have a mapping from this MEM's address to the
2215 value (E). Remove that, too. */
2216 addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0, GET_MODE (x));
2217 mem_chain = &addr->addr_list;
2220 if (canonical_cselib_val ((*mem_chain)->elt) == v)
2222 unchain_one_elt_list (mem_chain);
2226 mem_chain = &(*mem_chain)->next;
2229 unchain_one_elt_loc_list (p);
2232 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
2234 if (setting_insn && DEBUG_INSN_P (setting_insn))
2235 n_useless_debug_values++;
2240 next = v->next_containing_mem;
2244 vp = &(*vp)->next_containing_mem;
2247 v->next_containing_mem = NULL;
2252 /* Invalidate DEST, which is being assigned to or clobbered. */
2255 cselib_invalidate_rtx (rtx dest)
2257 while (GET_CODE (dest) == SUBREG
2258 || GET_CODE (dest) == ZERO_EXTRACT
2259 || GET_CODE (dest) == STRICT_LOW_PART)
2260 dest = XEXP (dest, 0);
2263 cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
2264 else if (MEM_P (dest))
2265 cselib_invalidate_mem (dest);
2268 /* A wrapper for cselib_invalidate_rtx to be called via note_stores. */
2271 cselib_invalidate_rtx_note_stores (rtx dest, const_rtx ignore ATTRIBUTE_UNUSED,
2272 void *data ATTRIBUTE_UNUSED)
2274 cselib_invalidate_rtx (dest);
2277 /* Record the result of a SET instruction. DEST is being set; the source
2278 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
2279 describes its address. */
2282 cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
2284 int dreg = REG_P (dest) ? (int) REGNO (dest) : -1;
2286 if (src_elt == 0 || side_effects_p (dest))
2291 if (dreg < FIRST_PSEUDO_REGISTER)
2293 unsigned int n = hard_regno_nregs[dreg][GET_MODE (dest)];
2295 if (n > max_value_regs)
2299 if (REG_VALUES (dreg) == 0)
2301 used_regs[n_used_regs++] = dreg;
2302 REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
2306 /* The register should have been invalidated. */
2307 gcc_assert (REG_VALUES (dreg)->elt == 0);
2308 REG_VALUES (dreg)->elt = src_elt;
2311 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2313 new_elt_loc_list (src_elt, dest);
2315 else if (MEM_P (dest) && dest_addr_elt != 0
2316 && cselib_record_memory)
2318 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2320 add_mem_for_addr (dest_addr_elt, src_elt, dest);
2324 /* Make ELT and X's VALUE equivalent to each other at INSN. */
2327 cselib_add_permanent_equiv (cselib_val *elt, rtx x, rtx insn)
2330 rtx save_cselib_current_insn = cselib_current_insn;
2332 gcc_checking_assert (elt);
2333 gcc_checking_assert (PRESERVED_VALUE_P (elt->val_rtx));
2334 gcc_checking_assert (!side_effects_p (x));
2336 cselib_current_insn = insn;
2338 nelt = cselib_lookup (x, GET_MODE (elt->val_rtx), 1, VOIDmode);
2342 if (!PRESERVED_VALUE_P (nelt->val_rtx))
2343 cselib_preserve_value (nelt);
2345 new_elt_loc_list (nelt, elt->val_rtx);
2348 cselib_current_insn = save_cselib_current_insn;
2351 /* There is no good way to determine how many elements there can be
2352 in a PARALLEL. Since it's fairly cheap, use a really large number. */
2353 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
2355 struct cselib_record_autoinc_data
2357 struct cselib_set *sets;
2361 /* Callback for for_each_inc_dec. Records in ARG the SETs implied by
2362 autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST. */
2365 cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED, rtx op ATTRIBUTE_UNUSED,
2366 rtx dest, rtx src, rtx srcoff, void *arg)
2368 struct cselib_record_autoinc_data *data;
2369 data = (struct cselib_record_autoinc_data *)arg;
2371 data->sets[data->n_sets].dest = dest;
2374 data->sets[data->n_sets].src = gen_rtx_PLUS (GET_MODE (src), src, srcoff);
2376 data->sets[data->n_sets].src = src;
2383 /* Record the effects of any sets and autoincs in INSN. */
2385 cselib_record_sets (rtx insn)
2389 struct cselib_set sets[MAX_SETS];
2390 rtx body = PATTERN (insn);
2392 int n_sets_before_autoinc;
2393 struct cselib_record_autoinc_data data;
2395 body = PATTERN (insn);
2396 if (GET_CODE (body) == COND_EXEC)
2398 cond = COND_EXEC_TEST (body);
2399 body = COND_EXEC_CODE (body);
2402 /* Find all sets. */
2403 if (GET_CODE (body) == SET)
2405 sets[0].src = SET_SRC (body);
2406 sets[0].dest = SET_DEST (body);
2409 else if (GET_CODE (body) == PARALLEL)
2411 /* Look through the PARALLEL and record the values being
2412 set, if possible. Also handle any CLOBBERs. */
2413 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
2415 rtx x = XVECEXP (body, 0, i);
2417 if (GET_CODE (x) == SET)
2419 sets[n_sets].src = SET_SRC (x);
2420 sets[n_sets].dest = SET_DEST (x);
2427 && MEM_P (sets[0].src)
2428 && !cselib_record_memory
2429 && MEM_READONLY_P (sets[0].src))
2431 rtx note = find_reg_equal_equiv_note (insn);
2433 if (note && CONSTANT_P (XEXP (note, 0)))
2434 sets[0].src = XEXP (note, 0);
2438 data.n_sets = n_sets_before_autoinc = n_sets;
2439 for_each_inc_dec (&insn, cselib_record_autoinc_cb, &data);
2440 n_sets = data.n_sets;
2442 /* Look up the values that are read. Do this before invalidating the
2443 locations that are written. */
2444 for (i = 0; i < n_sets; i++)
2446 rtx dest = sets[i].dest;
2448 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
2449 the low part after invalidating any knowledge about larger modes. */
2450 if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
2451 sets[i].dest = dest = XEXP (dest, 0);
2453 /* We don't know how to record anything but REG or MEM. */
2455 || (MEM_P (dest) && cselib_record_memory))
2457 rtx src = sets[i].src;
2459 src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
2460 sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1, VOIDmode);
2463 enum machine_mode address_mode
2464 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (dest));
2466 sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
2471 sets[i].dest_addr_elt = 0;
2475 if (cselib_record_sets_hook)
2476 cselib_record_sets_hook (insn, sets, n_sets);
2478 /* Invalidate all locations written by this insn. Note that the elts we
2479 looked up in the previous loop aren't affected, just some of their
2480 locations may go away. */
2481 note_stores (body, cselib_invalidate_rtx_note_stores, NULL);
2483 for (i = n_sets_before_autoinc; i < n_sets; i++)
2484 cselib_invalidate_rtx (sets[i].dest);
2486 /* If this is an asm, look for duplicate sets. This can happen when the
2487 user uses the same value as an output multiple times. This is valid
2488 if the outputs are not actually used thereafter. Treat this case as
2489 if the value isn't actually set. We do this by smashing the destination
2490 to pc_rtx, so that we won't record the value later. */
2491 if (n_sets >= 2 && asm_noperands (body) >= 0)
2493 for (i = 0; i < n_sets; i++)
2495 rtx dest = sets[i].dest;
2496 if (REG_P (dest) || MEM_P (dest))
2499 for (j = i + 1; j < n_sets; j++)
2500 if (rtx_equal_p (dest, sets[j].dest))
2502 sets[i].dest = pc_rtx;
2503 sets[j].dest = pc_rtx;
2509 /* Now enter the equivalences in our tables. */
2510 for (i = 0; i < n_sets; i++)
2512 rtx dest = sets[i].dest;
2514 || (MEM_P (dest) && cselib_record_memory))
2515 cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
2519 /* Record the effects of INSN. */
2522 cselib_process_insn (rtx insn)
2527 cselib_current_insn = insn;
2529 /* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp. */
2532 && find_reg_note (insn, REG_SETJMP, NULL))
2533 || (NONJUMP_INSN_P (insn)
2534 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2535 && MEM_VOLATILE_P (PATTERN (insn))))
2537 cselib_reset_table (next_uid);
2538 cselib_current_insn = NULL_RTX;
2542 if (! INSN_P (insn))
2544 cselib_current_insn = NULL_RTX;
2548 /* If this is a call instruction, forget anything stored in a
2549 call clobbered register, or, if this is not a const call, in
2553 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2554 if (call_used_regs[i]
2555 || (REG_VALUES (i) && REG_VALUES (i)->elt
2556 && HARD_REGNO_CALL_PART_CLOBBERED (i,
2557 GET_MODE (REG_VALUES (i)->elt->val_rtx))))
2558 cselib_invalidate_regno (i, reg_raw_mode[i]);
2560 /* Since it is not clear how cselib is going to be used, be
2561 conservative here and treat looping pure or const functions
2562 as if they were regular functions. */
2563 if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)
2564 || !(RTL_CONST_OR_PURE_CALL_P (insn)))
2565 cselib_invalidate_mem (callmem);
2568 cselib_record_sets (insn);
2570 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
2571 after we have processed the insn. */
2573 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
2574 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
2575 cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
2577 cselib_current_insn = NULL_RTX;
2579 if (n_useless_values > MAX_USELESS_VALUES
2580 /* remove_useless_values is linear in the hash table size. Avoid
2581 quadratic behavior for very large hashtables with very few
2582 useless elements. */
2583 && ((unsigned int)n_useless_values
2584 > (cselib_hash_table->n_elements
2585 - cselib_hash_table->n_deleted
2586 - n_debug_values) / 4))
2587 remove_useless_values ();
2590 /* Initialize cselib for one pass. The caller must also call
2591 init_alias_analysis. */
2594 cselib_init (int record_what)
2596 elt_list_pool = create_alloc_pool ("elt_list",
2597 sizeof (struct elt_list), 10);
2598 elt_loc_list_pool = create_alloc_pool ("elt_loc_list",
2599 sizeof (struct elt_loc_list), 10);
2600 cselib_val_pool = create_alloc_pool ("cselib_val_list",
2601 sizeof (cselib_val), 10);
2602 value_pool = create_alloc_pool ("value", RTX_CODE_SIZE (VALUE), 100);
2603 cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
2604 cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
2606 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
2607 see canon_true_dependence. This is only created once. */
2609 callmem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2611 cselib_nregs = max_reg_num ();
2613 /* We preserve reg_values to allow expensive clearing of the whole thing.
2614 Reallocate it however if it happens to be too large. */
2615 if (!reg_values || reg_values_size < cselib_nregs
2616 || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4))
2619 /* Some space for newly emit instructions so we don't end up
2620 reallocating in between passes. */
2621 reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16;
2622 reg_values = XCNEWVEC (struct elt_list *, reg_values_size);
2624 used_regs = XNEWVEC (unsigned int, cselib_nregs);
2626 cselib_hash_table = htab_create (31, get_value_hash,
2627 entry_and_rtx_equal_p, NULL);
2631 /* Called when the current user is done with cselib. */
2634 cselib_finish (void)
2636 cselib_discard_hook = NULL;
2637 cselib_preserve_constants = false;
2638 cfa_base_preserved_val = NULL;
2639 cfa_base_preserved_regno = INVALID_REGNUM;
2640 free_alloc_pool (elt_list_pool);
2641 free_alloc_pool (elt_loc_list_pool);
2642 free_alloc_pool (cselib_val_pool);
2643 free_alloc_pool (value_pool);
2644 cselib_clear_table ();
2645 htab_delete (cselib_hash_table);
2648 cselib_hash_table = 0;
2649 n_useless_values = 0;
2650 n_useless_debug_values = 0;
2655 /* Dump the cselib_val *X to FILE *info. */
2658 dump_cselib_val (void **x, void *info)
2660 cselib_val *v = (cselib_val *)*x;
2661 FILE *out = (FILE *)info;
2662 bool need_lf = true;
2664 print_inline_rtx (out, v->val_rtx, 0);
2668 struct elt_loc_list *l = v->locs;
2674 fputs (" locs:", out);
2677 fprintf (out, "\n from insn %i ",
2678 INSN_UID (l->setting_insn));
2679 print_inline_rtx (out, l->loc, 4);
2681 while ((l = l->next));
2686 fputs (" no locs", out);
2692 struct elt_list *e = v->addr_list;
2698 fputs (" addr list:", out);
2702 print_inline_rtx (out, e->elt->val_rtx, 2);
2704 while ((e = e->next));
2709 fputs (" no addrs", out);
2713 if (v->next_containing_mem == &dummy_val)
2714 fputs (" last mem\n", out);
2715 else if (v->next_containing_mem)
2717 fputs (" next mem ", out);
2718 print_inline_rtx (out, v->next_containing_mem->val_rtx, 2);
2727 /* Dump to OUT everything in the CSELIB table. */
2730 dump_cselib_table (FILE *out)
2732 fprintf (out, "cselib hash table:\n");
2733 htab_traverse (cselib_hash_table, dump_cselib_val, out);
2734 if (first_containing_mem != &dummy_val)
2736 fputs ("first mem ", out);
2737 print_inline_rtx (out, first_containing_mem->val_rtx, 2);
2740 fprintf (out, "next uid %i\n", next_uid);
2743 #include "gt-cselib.h"