1 /* Common subexpression elimination library for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
29 #include "hard-reg-set.h"
32 #include "insn-config.h"
43 static int entry_and_rtx_equal_p PARAMS ((const void *, const void *));
44 static unsigned int get_value_hash PARAMS ((const void *));
45 static struct elt_list *new_elt_list PARAMS ((struct elt_list *,
47 static struct elt_loc_list *new_elt_loc_list PARAMS ((struct elt_loc_list *,
49 static void unchain_one_value PARAMS ((cselib_val *));
50 static void unchain_one_elt_list PARAMS ((struct elt_list **));
51 static void unchain_one_elt_loc_list PARAMS ((struct elt_loc_list **));
52 static void clear_table PARAMS ((int));
53 static int discard_useless_locs PARAMS ((void **, void *));
54 static int discard_useless_values PARAMS ((void **, void *));
55 static void remove_useless_values PARAMS ((void));
56 static rtx wrap_constant PARAMS ((enum machine_mode, rtx));
57 static unsigned int hash_rtx PARAMS ((rtx, enum machine_mode, int));
58 static cselib_val *new_cselib_val PARAMS ((unsigned int,
60 static void add_mem_for_addr PARAMS ((cselib_val *, cselib_val *,
62 static cselib_val *cselib_lookup_mem PARAMS ((rtx, int));
63 static void cselib_invalidate_regno PARAMS ((unsigned int,
65 static int cselib_mem_conflict_p PARAMS ((rtx, rtx));
66 static int cselib_invalidate_mem_1 PARAMS ((void **, void *));
67 static void cselib_invalidate_mem PARAMS ((rtx));
68 static void cselib_invalidate_rtx PARAMS ((rtx, rtx, void *));
69 static void cselib_record_set PARAMS ((rtx, cselib_val *,
71 static void cselib_record_sets PARAMS ((rtx));
73 /* There are three ways in which cselib can look up an rtx:
74 - for a REG, the reg_values table (which is indexed by regno) is used
75 - for a MEM, we recursively look up its address and then follow the
76 addr_list of that value
77 - for everything else, we compute a hash value and go through the hash
78 table. Since different rtx's can still have the same hash value,
79 this involves walking the table entries for a given value and comparing
80 the locations of the entries with the rtx we are looking up. */
82 /* A table that enables us to look up elts by their value. */
83 static htab_t hash_table;
85 /* This is a global so we don't have to pass this through every function.
86 It is used in new_elt_loc_list to set SETTING_INSN. */
87 static rtx cselib_current_insn;
89 /* Every new unknown value gets a unique number. */
90 static unsigned int next_unknown_value;
92 /* The number of registers we had when the varrays were last resized. */
93 static unsigned int cselib_nregs;
95 /* Count values without known locations. Whenever this grows too big, we
96 remove these useless values from the table. */
97 static int n_useless_values;
99 /* Number of useless values before we remove them from the hash table. */
100 #define MAX_USELESS_VALUES 32
102 /* This table maps from register number to values. It does not contain
103 pointers to cselib_val structures, but rather elt_lists. The purpose is
104 to be able to refer to the same register in different modes. */
105 static varray_type reg_values;
106 #define REG_VALUES(I) VARRAY_ELT_LIST (reg_values, (I))
108 /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
109 in clear_table() for fast emptying. */
110 static varray_type used_regs;
112 /* We pass this to cselib_invalidate_mem to invalidate all of
113 memory for a non-const call instruction. */
116 /* Memory for our structures is allocated from this obstack. */
117 static struct obstack cselib_obstack;
119 /* Used to quickly free all memory. */
120 static char *cselib_startobj;
122 /* Caches for unused structures. */
123 static cselib_val *empty_vals;
124 static struct elt_list *empty_elt_lists;
125 static struct elt_loc_list *empty_elt_loc_lists;
127 /* Set by discard_useless_locs if it deleted the last location of any
129 static int values_became_useless;
132 /* Allocate a struct elt_list and fill in its two elements with the
135 static struct elt_list *
136 new_elt_list (next, elt)
137 struct elt_list *next;
140 struct elt_list *el = empty_elt_lists;
143 empty_elt_lists = el->next;
145 el = (struct elt_list *) obstack_alloc (&cselib_obstack,
146 sizeof (struct elt_list));
152 /* Allocate a struct elt_loc_list and fill in its two elements with the
155 static struct elt_loc_list *
156 new_elt_loc_list (next, loc)
157 struct elt_loc_list *next;
160 struct elt_loc_list *el = empty_elt_loc_lists;
163 empty_elt_loc_lists = el->next;
165 el = (struct elt_loc_list *) obstack_alloc (&cselib_obstack,
166 sizeof (struct elt_loc_list));
169 el->setting_insn = cselib_current_insn;
173 /* The elt_list at *PL is no longer needed. Unchain it and free its
177 unchain_one_elt_list (pl)
178 struct elt_list **pl;
180 struct elt_list *l = *pl;
183 l->next = empty_elt_lists;
187 /* Likewise for elt_loc_lists. */
190 unchain_one_elt_loc_list (pl)
191 struct elt_loc_list **pl;
193 struct elt_loc_list *l = *pl;
196 l->next = empty_elt_loc_lists;
197 empty_elt_loc_lists = l;
200 /* Likewise for cselib_vals. This also frees the addr_list associated with
204 unchain_one_value (v)
208 unchain_one_elt_list (&v->addr_list);
210 v->u.next_free = empty_vals;
214 /* Remove all entries from the hash table. Also used during
215 initialization. If CLEAR_ALL isn't set, then only clear the entries
216 which are known to have been used. */
219 clear_table (clear_all)
225 for (i = 0; i < cselib_nregs; i++)
228 for (i = 0; i < VARRAY_ACTIVE_SIZE (used_regs); i++)
229 REG_VALUES (VARRAY_UINT (used_regs, i)) = 0;
231 VARRAY_POP_ALL (used_regs);
233 htab_empty (hash_table);
234 obstack_free (&cselib_obstack, cselib_startobj);
238 empty_elt_loc_lists = 0;
239 n_useless_values = 0;
241 next_unknown_value = 0;
244 /* The equality test for our hash table. The first argument ENTRY is a table
245 element (i.e. a cselib_val), while the second arg X is an rtx. We know
246 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
247 CONST of an appropriate mode. */
250 entry_and_rtx_equal_p (entry, x_arg)
251 const void *entry, *x_arg;
253 struct elt_loc_list *l;
254 const cselib_val *v = (const cselib_val *) entry;
256 enum machine_mode mode = GET_MODE (x);
258 if (GET_CODE (x) == CONST_INT
259 || (mode == VOIDmode && GET_CODE (x) == CONST_DOUBLE))
261 if (mode != GET_MODE (v->u.val_rtx))
264 /* Unwrap X if necessary. */
265 if (GET_CODE (x) == CONST
266 && (GET_CODE (XEXP (x, 0)) == CONST_INT
267 || GET_CODE (XEXP (x, 0)) == CONST_DOUBLE))
270 /* We don't guarantee that distinct rtx's have different hash values,
271 so we need to do a comparison. */
272 for (l = v->locs; l; l = l->next)
273 if (rtx_equal_for_cselib_p (l->loc, x))
279 /* The hash function for our hash table. The value is always computed with
280 hash_rtx when adding an element; this function just extracts the hash
281 value from a cselib_val structure. */
284 get_value_hash (entry)
287 const cselib_val *v = (const cselib_val *) entry;
291 /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
292 only return true for values which point to a cselib_val whose value
293 element has been set to zero, which implies the cselib_val will be
297 references_value_p (x, only_useless)
301 enum rtx_code code = GET_CODE (x);
302 const char *fmt = GET_RTX_FORMAT (code);
305 if (GET_CODE (x) == VALUE
306 && (! only_useless || CSELIB_VAL_PTR (x)->locs == 0))
309 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
311 if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
313 else if (fmt[i] == 'E')
314 for (j = 0; j < XVECLEN (x, i); j++)
315 if (references_value_p (XVECEXP (x, i, j), only_useless))
322 /* For all locations found in X, delete locations that reference useless
323 values (i.e. values without any location). Called through
327 discard_useless_locs (x, info)
329 void *info ATTRIBUTE_UNUSED;
331 cselib_val *v = (cselib_val *)*x;
332 struct elt_loc_list **p = &v->locs;
333 int had_locs = v->locs != 0;
337 if (references_value_p ((*p)->loc, 1))
338 unchain_one_elt_loc_list (p);
343 if (had_locs && v->locs == 0)
346 values_became_useless = 1;
351 /* If X is a value with no locations, remove it from the hashtable. */
354 discard_useless_values (x, info)
356 void *info ATTRIBUTE_UNUSED;
358 cselib_val *v = (cselib_val *)*x;
362 htab_clear_slot (hash_table, x);
363 unchain_one_value (v);
370 /* Clean out useless values (i.e. those which no longer have locations
371 associated with them) from the hash table. */
374 remove_useless_values ()
376 /* First pass: eliminate locations that reference the value. That in
377 turn can make more values useless. */
380 values_became_useless = 0;
381 htab_traverse (hash_table, discard_useless_locs, 0);
383 while (values_became_useless);
385 /* Second pass: actually remove the values. */
386 htab_traverse (hash_table, discard_useless_values, 0);
388 if (n_useless_values != 0)
392 /* Return nonzero if we can prove that X and Y contain the same value, taking
393 our gathered information into account. */
396 rtx_equal_for_cselib_p (x, y)
403 if (GET_CODE (x) == REG || GET_CODE (x) == MEM)
405 cselib_val *e = cselib_lookup (x, GET_MODE (x), 0);
411 if (GET_CODE (y) == REG || GET_CODE (y) == MEM)
413 cselib_val *e = cselib_lookup (y, GET_MODE (y), 0);
422 if (GET_CODE (x) == VALUE && GET_CODE (y) == VALUE)
423 return CSELIB_VAL_PTR (x) == CSELIB_VAL_PTR (y);
425 if (GET_CODE (x) == VALUE)
427 cselib_val *e = CSELIB_VAL_PTR (x);
428 struct elt_loc_list *l;
430 for (l = e->locs; l; l = l->next)
434 /* Avoid infinite recursion. */
435 if (GET_CODE (t) == REG || GET_CODE (t) == MEM)
437 else if (rtx_equal_for_cselib_p (t, y))
444 if (GET_CODE (y) == VALUE)
446 cselib_val *e = CSELIB_VAL_PTR (y);
447 struct elt_loc_list *l;
449 for (l = e->locs; l; l = l->next)
453 if (GET_CODE (t) == REG || GET_CODE (t) == MEM)
455 else if (rtx_equal_for_cselib_p (x, t))
462 if (GET_CODE (x) != GET_CODE (y) || GET_MODE (x) != GET_MODE (y))
465 /* This won't be handled correctly by the code below. */
466 if (GET_CODE (x) == LABEL_REF)
467 return XEXP (x, 0) == XEXP (y, 0);
470 fmt = GET_RTX_FORMAT (code);
472 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
479 if (XWINT (x, i) != XWINT (y, i))
485 if (XINT (x, i) != XINT (y, i))
491 /* Two vectors must have the same length. */
492 if (XVECLEN (x, i) != XVECLEN (y, i))
495 /* And the corresponding elements must match. */
496 for (j = 0; j < XVECLEN (x, i); j++)
497 if (! rtx_equal_for_cselib_p (XVECEXP (x, i, j),
503 if (! rtx_equal_for_cselib_p (XEXP (x, i), XEXP (y, i)))
509 if (strcmp (XSTR (x, i), XSTR (y, i)))
514 /* These are just backpointers, so they don't matter. */
521 /* It is believed that rtx's at this level will never
522 contain anything but integers and other rtx's,
523 except for within LABEL_REFs and SYMBOL_REFs. */
531 /* We need to pass down the mode of constants through the hash table
532 functions. For that purpose, wrap them in a CONST of the appropriate
535 wrap_constant (mode, x)
536 enum machine_mode mode;
539 if (GET_CODE (x) != CONST_INT
540 && (GET_CODE (x) != CONST_DOUBLE || GET_MODE (x) != VOIDmode))
542 if (mode == VOIDmode)
544 return gen_rtx_CONST (mode, x);
547 /* Hash an rtx. Return 0 if we couldn't hash the rtx.
548 For registers and memory locations, we look up their cselib_val structure
549 and return its VALUE element.
550 Possible reasons for return 0 are: the object is volatile, or we couldn't
551 find a register or memory location in the table and CREATE is zero. If
552 CREATE is nonzero, table elts are created for regs and mem.
553 MODE is used in hashing for CONST_INTs only;
554 otherwise the mode of X is used. */
557 hash_rtx (x, mode, create)
559 enum machine_mode mode;
566 unsigned int hash = 0;
569 hash += (unsigned) code + (unsigned) GET_MODE (x);
575 e = cselib_lookup (x, GET_MODE (x), create);
582 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + INTVAL (x);
583 return hash ? hash : (unsigned int) CONST_INT;
586 /* This is like the general case, except that it only counts
587 the integers representing the constant. */
588 hash += (unsigned) code + (unsigned) GET_MODE (x);
589 if (GET_MODE (x) != VOIDmode)
590 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
591 hash += XWINT (x, i);
593 hash += ((unsigned) CONST_DOUBLE_LOW (x)
594 + (unsigned) CONST_DOUBLE_HIGH (x));
595 return hash ? hash : (unsigned int) CONST_DOUBLE;
597 /* Assume there is only one rtx object for any given label. */
600 += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
601 return hash ? hash : (unsigned int) LABEL_REF;
605 += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
606 return hash ? hash : (unsigned int) SYMBOL_REF;
617 case UNSPEC_VOLATILE:
621 if (MEM_VOLATILE_P (x))
630 i = GET_RTX_LENGTH (code) - 1;
631 fmt = GET_RTX_FORMAT (code);
636 rtx tem = XEXP (x, i);
637 unsigned int tem_hash = hash_rtx (tem, 0, create);
644 else if (fmt[i] == 'E')
645 for (j = 0; j < XVECLEN (x, i); j++)
647 unsigned int tem_hash = hash_rtx (XVECEXP (x, i, j), 0, create);
654 else if (fmt[i] == 's')
656 const unsigned char *p = (const unsigned char *) XSTR (x, i);
662 else if (fmt[i] == 'i')
664 else if (fmt[i] == '0' || fmt[i] == 't')
670 return hash ? hash : 1 + (unsigned int) GET_CODE (x);
673 /* Create a new value structure for VALUE and initialize it. The mode of the
677 new_cselib_val (value, mode)
679 enum machine_mode mode;
681 cselib_val *e = empty_vals;
684 empty_vals = e->u.next_free;
686 e = (cselib_val *) obstack_alloc (&cselib_obstack, sizeof (cselib_val));
692 e->u.val_rtx = gen_rtx_VALUE (mode);
693 CSELIB_VAL_PTR (e->u.val_rtx) = e;
699 /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
700 contains the data at this address. X is a MEM that represents the
701 value. Update the two value structures to represent this situation. */
704 add_mem_for_addr (addr_elt, mem_elt, x)
705 cselib_val *addr_elt, *mem_elt;
708 struct elt_loc_list *l;
710 /* Avoid duplicates. */
711 for (l = mem_elt->locs; l; l = l->next)
712 if (GET_CODE (l->loc) == MEM
713 && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt)
716 addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
718 = new_elt_loc_list (mem_elt->locs,
719 replace_equiv_address_nv (x, addr_elt->u.val_rtx));
722 /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
723 If CREATE, make a new one if we haven't seen it before. */
726 cselib_lookup_mem (x, create)
730 enum machine_mode mode = GET_MODE (x);
736 if (MEM_VOLATILE_P (x) || mode == BLKmode
737 || (FLOAT_MODE_P (mode) && flag_float_store))
740 /* Look up the value for the address. */
741 addr = cselib_lookup (XEXP (x, 0), mode, create);
745 /* Find a value that describes a value of our mode at that address. */
746 for (l = addr->addr_list; l; l = l->next)
747 if (GET_MODE (l->elt->u.val_rtx) == mode)
753 mem_elt = new_cselib_val (++next_unknown_value, mode);
754 add_mem_for_addr (addr, mem_elt, x);
755 slot = htab_find_slot_with_hash (hash_table, wrap_constant (mode, x),
756 mem_elt->value, INSERT);
761 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
762 with VALUE expressions. This way, it becomes independent of changes
763 to registers and memory.
764 X isn't actually modified; if modifications are needed, new rtl is
765 allocated. However, the return value can share rtl with X. */
768 cselib_subst_to_values (x)
771 enum rtx_code code = GET_CODE (x);
772 const char *fmt = GET_RTX_FORMAT (code);
781 for (l = REG_VALUES (REGNO (x)); l; l = l->next)
782 if (GET_MODE (l->elt->u.val_rtx) == GET_MODE (x))
783 return l->elt->u.val_rtx;
788 e = cselib_lookup_mem (x, 0);
791 /* This happens for autoincrements. Assign a value that doesn't
793 e = new_cselib_val (++next_unknown_value, GET_MODE (x));
797 /* CONST_DOUBLEs must be special-cased here so that we won't try to
798 look up the CONST_DOUBLE_MEM inside. */
809 e = new_cselib_val (++next_unknown_value, GET_MODE (x));
816 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
820 rtx t = cselib_subst_to_values (XEXP (x, i));
822 if (t != XEXP (x, i) && x == copy)
823 copy = shallow_copy_rtx (x);
827 else if (fmt[i] == 'E')
831 for (j = 0; j < XVECLEN (x, i); j++)
833 rtx t = cselib_subst_to_values (XVECEXP (x, i, j));
835 if (t != XVECEXP (x, i, j) && XVEC (x, i) == XVEC (copy, i))
838 copy = shallow_copy_rtx (x);
840 XVEC (copy, i) = rtvec_alloc (XVECLEN (x, i));
841 for (k = 0; k < j; k++)
842 XVECEXP (copy, i, k) = XVECEXP (x, i, k);
845 XVECEXP (copy, i, j) = t;
853 /* Look up the rtl expression X in our tables and return the value it has.
854 If CREATE is zero, we return NULL if we don't know the value. Otherwise,
855 we create a new one if possible, using mode MODE if X doesn't have a mode
856 (i.e. because it's a constant). */
859 cselib_lookup (x, mode, create)
861 enum machine_mode mode;
866 unsigned int hashval;
868 if (GET_MODE (x) != VOIDmode)
871 if (GET_CODE (x) == VALUE)
872 return CSELIB_VAL_PTR (x);
874 if (GET_CODE (x) == REG)
877 unsigned int i = REGNO (x);
879 for (l = REG_VALUES (i); l; l = l->next)
880 if (mode == GET_MODE (l->elt->u.val_rtx))
886 e = new_cselib_val (++next_unknown_value, GET_MODE (x));
887 e->locs = new_elt_loc_list (e->locs, x);
888 if (REG_VALUES (i) == 0)
889 VARRAY_PUSH_UINT (used_regs, i);
890 REG_VALUES (i) = new_elt_list (REG_VALUES (i), e);
891 slot = htab_find_slot_with_hash (hash_table, x, e->value, INSERT);
896 if (GET_CODE (x) == MEM)
897 return cselib_lookup_mem (x, create);
899 hashval = hash_rtx (x, mode, create);
900 /* Can't even create if hashing is not possible. */
904 slot = htab_find_slot_with_hash (hash_table, wrap_constant (mode, x),
905 hashval, create ? INSERT : NO_INSERT);
909 e = (cselib_val *) *slot;
913 e = new_cselib_val (hashval, mode);
915 /* We have to fill the slot before calling cselib_subst_to_values:
916 the hash table is inconsistent until we do so, and
917 cselib_subst_to_values will need to do lookups. */
919 e->locs = new_elt_loc_list (e->locs, cselib_subst_to_values (x));
923 /* Invalidate any entries in reg_values that overlap REGNO. This is called
924 if REGNO is changing. MODE is the mode of the assignment to REGNO, which
925 is used to determine how many hard registers are being changed. If MODE
926 is VOIDmode, then only REGNO is being changed; this is used when
927 invalidating call clobbered registers across a call. */
930 cselib_invalidate_regno (regno, mode)
932 enum machine_mode mode;
934 unsigned int endregno;
937 /* If we see pseudos after reload, something is _wrong_. */
938 if (reload_completed && regno >= FIRST_PSEUDO_REGISTER
939 && reg_renumber[regno] >= 0)
942 /* Determine the range of registers that must be invalidated. For
943 pseudos, only REGNO is affected. For hard regs, we must take MODE
944 into account, and we must also invalidate lower register numbers
945 if they contain values that overlap REGNO. */
946 endregno = regno + 1;
947 if (regno < FIRST_PSEUDO_REGISTER && mode != VOIDmode)
948 endregno = regno + HARD_REGNO_NREGS (regno, mode);
950 for (i = 0; i < endregno; i++)
952 struct elt_list **l = ®_VALUES (i);
954 /* Go through all known values for this reg; if it overlaps the range
955 we're invalidating, remove the value. */
958 cselib_val *v = (*l)->elt;
959 struct elt_loc_list **p;
960 unsigned int this_last = i;
962 if (i < FIRST_PSEUDO_REGISTER)
963 this_last += HARD_REGNO_NREGS (i, GET_MODE (v->u.val_rtx)) - 1;
965 if (this_last < regno)
971 /* We have an overlap. */
972 unchain_one_elt_list (l);
974 /* Now, we clear the mapping from value to reg. It must exist, so
975 this code will crash intentionally if it doesn't. */
976 for (p = &v->locs; ; p = &(*p)->next)
980 if (GET_CODE (x) == REG && REGNO (x) == i)
982 unchain_one_elt_loc_list (p);
992 /* The memory at address MEM_BASE is being changed.
993 Return whether this change will invalidate VAL. */
996 cselib_mem_conflict_p (mem_base, val)
1004 code = GET_CODE (val);
1007 /* Get rid of a few simple cases quickly. */
1020 if (GET_MODE (mem_base) == BLKmode
1021 || GET_MODE (val) == BLKmode
1022 || anti_dependence (val, mem_base))
1025 /* The address may contain nested MEMs. */
1032 fmt = GET_RTX_FORMAT (code);
1033 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1037 if (cselib_mem_conflict_p (mem_base, XEXP (val, i)))
1040 else if (fmt[i] == 'E')
1041 for (j = 0; j < XVECLEN (val, i); j++)
1042 if (cselib_mem_conflict_p (mem_base, XVECEXP (val, i, j)))
1049 /* For the value found in SLOT, walk its locations to determine if any overlap
1050 INFO (which is a MEM rtx). */
1053 cselib_invalidate_mem_1 (slot, info)
1057 cselib_val *v = (cselib_val *) *slot;
1058 rtx mem_rtx = (rtx) info;
1059 struct elt_loc_list **p = &v->locs;
1060 int had_locs = v->locs != 0;
1066 struct elt_list **mem_chain;
1068 /* MEMs may occur in locations only at the top level; below
1069 that every MEM or REG is substituted by its VALUE. */
1070 if (GET_CODE (x) != MEM
1071 || ! cselib_mem_conflict_p (mem_rtx, x))
1077 /* This one overlaps. */
1078 /* We must have a mapping from this MEM's address to the
1079 value (E). Remove that, too. */
1080 addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0);
1081 mem_chain = &addr->addr_list;
1084 if ((*mem_chain)->elt == v)
1086 unchain_one_elt_list (mem_chain);
1090 mem_chain = &(*mem_chain)->next;
1093 unchain_one_elt_loc_list (p);
1096 if (had_locs && v->locs == 0)
1102 /* Invalidate any locations in the table which are changed because of a
1103 store to MEM_RTX. If this is called because of a non-const call
1104 instruction, MEM_RTX is (mem:BLK const0_rtx). */
1107 cselib_invalidate_mem (mem_rtx)
1110 htab_traverse (hash_table, cselib_invalidate_mem_1, mem_rtx);
1113 /* Invalidate DEST, which is being assigned to or clobbered. The second and
1114 the third parameter exist so that this function can be passed to
1115 note_stores; they are ignored. */
1118 cselib_invalidate_rtx (dest, ignore, data)
1120 rtx ignore ATTRIBUTE_UNUSED;
1121 void *data ATTRIBUTE_UNUSED;
1123 while (GET_CODE (dest) == STRICT_LOW_PART || GET_CODE (dest) == SIGN_EXTRACT
1124 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SUBREG)
1125 dest = XEXP (dest, 0);
1127 if (GET_CODE (dest) == REG)
1128 cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
1129 else if (GET_CODE (dest) == MEM)
1130 cselib_invalidate_mem (dest);
1132 /* Some machines don't define AUTO_INC_DEC, but they still use push
1133 instructions. We need to catch that case here in order to
1134 invalidate the stack pointer correctly. Note that invalidating
1135 the stack pointer is different from invalidating DEST. */
1136 if (push_operand (dest, GET_MODE (dest)))
1137 cselib_invalidate_rtx (stack_pointer_rtx, NULL_RTX, NULL);
1140 /* Record the result of a SET instruction. DEST is being set; the source
1141 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
1142 describes its address. */
1145 cselib_record_set (dest, src_elt, dest_addr_elt)
1147 cselib_val *src_elt, *dest_addr_elt;
1149 int dreg = GET_CODE (dest) == REG ? (int) REGNO (dest) : -1;
1151 if (src_elt == 0 || side_effects_p (dest))
1156 if (REG_VALUES (dreg) == 0)
1157 VARRAY_PUSH_UINT (used_regs, dreg);
1159 REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
1160 if (src_elt->locs == 0)
1162 src_elt->locs = new_elt_loc_list (src_elt->locs, dest);
1164 else if (GET_CODE (dest) == MEM && dest_addr_elt != 0)
1166 if (src_elt->locs == 0)
1168 add_mem_for_addr (dest_addr_elt, src_elt, dest);
1172 /* Describe a single set that is part of an insn. */
1177 cselib_val *src_elt;
1178 cselib_val *dest_addr_elt;
1181 /* There is no good way to determine how many elements there can be
1182 in a PARALLEL. Since it's fairly cheap, use a really large number. */
1183 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
1185 /* Record the effects of any sets in INSN. */
1187 cselib_record_sets (insn)
1192 struct set sets[MAX_SETS];
1193 rtx body = PATTERN (insn);
1196 body = PATTERN (insn);
1197 if (GET_CODE (body) == COND_EXEC)
1199 cond = COND_EXEC_TEST (body);
1200 body = COND_EXEC_CODE (body);
1203 /* Find all sets. */
1204 if (GET_CODE (body) == SET)
1206 sets[0].src = SET_SRC (body);
1207 sets[0].dest = SET_DEST (body);
1210 else if (GET_CODE (body) == PARALLEL)
1212 /* Look through the PARALLEL and record the values being
1213 set, if possible. Also handle any CLOBBERs. */
1214 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
1216 rtx x = XVECEXP (body, 0, i);
1218 if (GET_CODE (x) == SET)
1220 sets[n_sets].src = SET_SRC (x);
1221 sets[n_sets].dest = SET_DEST (x);
1227 /* Look up the values that are read. Do this before invalidating the
1228 locations that are written. */
1229 for (i = 0; i < n_sets; i++)
1231 rtx dest = sets[i].dest;
1233 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
1234 the low part after invalidating any knowledge about larger modes. */
1235 if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
1236 sets[i].dest = dest = XEXP (dest, 0);
1238 /* We don't know how to record anything but REG or MEM. */
1239 if (GET_CODE (dest) == REG || GET_CODE (dest) == MEM)
1241 rtx src = sets[i].src;
1243 src = gen_rtx_IF_THEN_ELSE (GET_MODE (src), cond, src, dest);
1244 sets[i].src_elt = cselib_lookup (sets[i].src, GET_MODE (dest), 1);
1245 if (GET_CODE (dest) == MEM)
1246 sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0), Pmode, 1);
1248 sets[i].dest_addr_elt = 0;
1252 /* Invalidate all locations written by this insn. Note that the elts we
1253 looked up in the previous loop aren't affected, just some of their
1254 locations may go away. */
1255 note_stores (body, cselib_invalidate_rtx, NULL);
1257 /* Now enter the equivalences in our tables. */
1258 for (i = 0; i < n_sets; i++)
1260 rtx dest = sets[i].dest;
1261 if (GET_CODE (dest) == REG || GET_CODE (dest) == MEM)
1262 cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
1266 /* Record the effects of INSN. */
1269 cselib_process_insn (insn)
1275 cselib_current_insn = insn;
1277 /* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp. */
1278 if (GET_CODE (insn) == CODE_LABEL
1279 || (GET_CODE (insn) == CALL_INSN
1280 && find_reg_note (insn, REG_SETJMP, NULL))
1281 || (GET_CODE (insn) == INSN
1282 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
1283 && MEM_VOLATILE_P (PATTERN (insn))))
1289 if (! INSN_P (insn))
1291 cselib_current_insn = 0;
1295 /* If this is a call instruction, forget anything stored in a
1296 call clobbered register, or, if this is not a const call, in
1298 if (GET_CODE (insn) == CALL_INSN)
1300 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1301 if (call_used_regs[i])
1302 cselib_invalidate_regno (i, VOIDmode);
1304 if (! CONST_OR_PURE_CALL_P (insn))
1305 cselib_invalidate_mem (callmem);
1308 cselib_record_sets (insn);
1311 /* Clobber any registers which appear in REG_INC notes. We
1312 could keep track of the changes to their values, but it is
1313 unlikely to help. */
1314 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
1315 if (REG_NOTE_KIND (x) == REG_INC)
1316 cselib_invalidate_rtx (XEXP (x, 0), NULL_RTX, NULL);
1319 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
1320 after we have processed the insn. */
1321 if (GET_CODE (insn) == CALL_INSN)
1322 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
1323 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
1324 cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX, NULL);
1326 cselib_current_insn = 0;
1328 if (n_useless_values > MAX_USELESS_VALUES)
1329 remove_useless_values ();
1332 /* Make sure our varrays are big enough. Not called from any cselib routines;
1333 it must be called by the user if it allocated new registers. */
1336 cselib_update_varray_sizes ()
1338 unsigned int nregs = max_reg_num ();
1340 if (nregs == cselib_nregs)
1343 cselib_nregs = nregs;
1344 VARRAY_GROW (reg_values, nregs);
1345 VARRAY_GROW (used_regs, nregs);
1348 /* Initialize cselib for one pass. The caller must also call
1349 init_alias_analysis. */
1354 /* These are only created once. */
1357 gcc_obstack_init (&cselib_obstack);
1358 cselib_startobj = obstack_alloc (&cselib_obstack, 0);
1360 callmem = gen_rtx_MEM (BLKmode, const0_rtx);
1361 ggc_add_rtx_root (&callmem, 1);
1364 cselib_nregs = max_reg_num ();
1365 VARRAY_ELT_LIST_INIT (reg_values, cselib_nregs, "reg_values");
1366 VARRAY_UINT_INIT (used_regs, cselib_nregs, "used_regs");
1367 hash_table = htab_create (31, get_value_hash, entry_and_rtx_equal_p, NULL);
1371 /* Called when the current user is done with cselib. */
1377 VARRAY_FREE (reg_values);
1378 VARRAY_FREE (used_regs);
1379 htab_delete (hash_table);