1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 2011 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "basic-block.h"
32 #include "insn-config.h"
36 #include "diagnostic-core.h"
44 #include "rtlhooks-def.h"
45 #include "tree-pass.h"
49 /* The basic idea of common subexpression elimination is to go
50 through the code, keeping a record of expressions that would
51 have the same value at the current scan point, and replacing
52 expressions encountered with the cheapest equivalent expression.
54 It is too complicated to keep track of the different possibilities
55 when control paths merge in this code; so, at each label, we forget all
56 that is known and start fresh. This can be described as processing each
57 extended basic block separately. We have a separate pass to perform
60 Note CSE can turn a conditional or computed jump into a nop or
61 an unconditional jump. When this occurs we arrange to run the jump
62 optimizer after CSE to delete the unreachable code.
64 We use two data structures to record the equivalent expressions:
65 a hash table for most expressions, and a vector of "quantity
66 numbers" to record equivalent (pseudo) registers.
68 The use of the special data structure for registers is desirable
69 because it is faster. It is possible because registers references
70 contain a fairly small number, the register number, taken from
71 a contiguously allocated series, and two register references are
72 identical if they have the same number. General expressions
73 do not have any such thing, so the only way to retrieve the
74 information recorded on an expression other than a register
75 is to keep it in a hash table.
77 Registers and "quantity numbers":
79 At the start of each basic block, all of the (hardware and pseudo)
80 registers used in the function are given distinct quantity
81 numbers to indicate their contents. During scan, when the code
82 copies one register into another, we copy the quantity number.
83 When a register is loaded in any other way, we allocate a new
84 quantity number to describe the value generated by this operation.
85 `REG_QTY (N)' records what quantity register N is currently thought
88 All real quantity numbers are greater than or equal to zero.
89 If register N has not been assigned a quantity, `REG_QTY (N)' will
90 equal -N - 1, which is always negative.
92 Quantity numbers below zero do not exist and none of the `qty_table'
93 entries should be referenced with a negative index.
95 We also maintain a bidirectional chain of registers for each
96 quantity number. The `qty_table` members `first_reg' and `last_reg',
97 and `reg_eqv_table' members `next' and `prev' hold these chains.
99 The first register in a chain is the one whose lifespan is least local.
100 Among equals, it is the one that was seen first.
101 We replace any equivalent register with that one.
103 If two registers have the same quantity number, it must be true that
104 REG expressions with qty_table `mode' must be in the hash table for both
105 registers and must be in the same class.
107 The converse is not true. Since hard registers may be referenced in
108 any mode, two REG expressions might be equivalent in the hash table
109 but not have the same quantity number if the quantity number of one
110 of the registers is not the same mode as those expressions.
112 Constants and quantity numbers
114 When a quantity has a known constant value, that value is stored
115 in the appropriate qty_table `const_rtx'. This is in addition to
116 putting the constant in the hash table as is usual for non-regs.
118 Whether a reg or a constant is preferred is determined by the configuration
119 macro CONST_COSTS and will often depend on the constant value. In any
120 event, expressions containing constants can be simplified, by fold_rtx.
122 When a quantity has a known nearly constant value (such as an address
123 of a stack slot), that value is stored in the appropriate qty_table
126 Integer constants don't have a machine mode. However, cse
127 determines the intended machine mode from the destination
128 of the instruction that moves the constant. The machine mode
129 is recorded in the hash table along with the actual RTL
130 constant expression so that different modes are kept separate.
134 To record known equivalences among expressions in general
135 we use a hash table called `table'. It has a fixed number of buckets
136 that contain chains of `struct table_elt' elements for expressions.
137 These chains connect the elements whose expressions have the same
140 Other chains through the same elements connect the elements which
141 currently have equivalent values.
143 Register references in an expression are canonicalized before hashing
144 the expression. This is done using `reg_qty' and qty_table `first_reg'.
145 The hash code of a register reference is computed using the quantity
146 number, not the register number.
148 When the value of an expression changes, it is necessary to remove from the
149 hash table not just that expression but all expressions whose values
150 could be different as a result.
152 1. If the value changing is in memory, except in special cases
153 ANYTHING referring to memory could be changed. That is because
154 nobody knows where a pointer does not point.
155 The function `invalidate_memory' removes what is necessary.
157 The special cases are when the address is constant or is
158 a constant plus a fixed register such as the frame pointer
159 or a static chain pointer. When such addresses are stored in,
160 we can tell exactly which other such addresses must be invalidated
161 due to overlap. `invalidate' does this.
162 All expressions that refer to non-constant
163 memory addresses are also invalidated. `invalidate_memory' does this.
165 2. If the value changing is a register, all expressions
166 containing references to that register, and only those,
169 Because searching the entire hash table for expressions that contain
170 a register is very slow, we try to figure out when it isn't necessary.
171 Precisely, this is necessary only when expressions have been
172 entered in the hash table using this register, and then the value has
173 changed, and then another expression wants to be added to refer to
174 the register's new value. This sequence of circumstances is rare
175 within any one basic block.
177 `REG_TICK' and `REG_IN_TABLE', accessors for members of
178 cse_reg_info, are used to detect this case. REG_TICK (i) is
179 incremented whenever a value is stored in register i.
180 REG_IN_TABLE (i) holds -1 if no references to register i have been
181 entered in the table; otherwise, it contains the value REG_TICK (i)
182 had when the references were entered. If we want to enter a
183 reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
184 remove old references. Until we want to enter a new entry, the
185 mere fact that the two vectors don't match makes the entries be
186 ignored if anyone tries to match them.
188 Registers themselves are entered in the hash table as well as in
189 the equivalent-register chains. However, `REG_TICK' and
190 `REG_IN_TABLE' do not apply to expressions which are simple
191 register references. These expressions are removed from the table
192 immediately when they become invalid, and this can be done even if
193 we do not immediately search for all the expressions that refer to
196 A CLOBBER rtx in an instruction invalidates its operand for further
197 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
198 invalidates everything that resides in memory.
202 Constant expressions that differ only by an additive integer
203 are called related. When a constant expression is put in
204 the table, the related expression with no constant term
205 is also entered. These are made to point at each other
206 so that it is possible to find out if there exists any
207 register equivalent to an expression related to a given expression. */
209 /* Length of qty_table vector. We know in advance we will not need
210 a quantity number this big. */
214 /* Next quantity number to be allocated.
215 This is 1 + the largest number needed so far. */
219 /* Per-qty information tracking.
221 `first_reg' and `last_reg' track the head and tail of the
222 chain of registers which currently contain this quantity.
224 `mode' contains the machine mode of this quantity.
226 `const_rtx' holds the rtx of the constant value of this
227 quantity, if known. A summations of the frame/arg pointer
228 and a constant can also be entered here. When this holds
229 a known value, `const_insn' is the insn which stored the
232 `comparison_{code,const,qty}' are used to track when a
233 comparison between a quantity and some constant or register has
234 been passed. In such a case, we know the results of the comparison
235 in case we see it again. These members record a comparison that
236 is known to be true. `comparison_code' holds the rtx code of such
237 a comparison, else it is set to UNKNOWN and the other two
238 comparison members are undefined. `comparison_const' holds
239 the constant being compared against, or zero if the comparison
240 is not against a constant. `comparison_qty' holds the quantity
241 being compared against when the result is known. If the comparison
242 is not with a register, `comparison_qty' is -1. */
244 struct qty_table_elem
248 rtx comparison_const;
250 unsigned int first_reg, last_reg;
251 /* The sizes of these fields should match the sizes of the
252 code and mode fields of struct rtx_def (see rtl.h). */
253 ENUM_BITFIELD(rtx_code) comparison_code : 16;
254 ENUM_BITFIELD(machine_mode) mode : 8;
257 /* The table of all qtys, indexed by qty number. */
258 static struct qty_table_elem *qty_table;
260 /* Structure used to pass arguments via for_each_rtx to function
261 cse_change_cc_mode. */
262 struct change_cc_mode_args
269 /* For machines that have a CC0, we do not record its value in the hash
270 table since its use is guaranteed to be the insn immediately following
271 its definition and any other insn is presumed to invalidate it.
273 Instead, we store below the current and last value assigned to CC0.
274 If it should happen to be a constant, it is stored in preference
275 to the actual assigned value. In case it is a constant, we store
276 the mode in which the constant should be interpreted. */
278 static rtx this_insn_cc0, prev_insn_cc0;
279 static enum machine_mode this_insn_cc0_mode, prev_insn_cc0_mode;
282 /* Insn being scanned. */
284 static rtx this_insn;
285 static bool optimize_this_for_speed_p;
287 /* Index by register number, gives the number of the next (or
288 previous) register in the chain of registers sharing the same
291 Or -1 if this register is at the end of the chain.
293 If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined. */
295 /* Per-register equivalence chain. */
301 /* The table of all register equivalence chains. */
302 static struct reg_eqv_elem *reg_eqv_table;
306 /* The timestamp at which this register is initialized. */
307 unsigned int timestamp;
309 /* The quantity number of the register's current contents. */
312 /* The number of times the register has been altered in the current
316 /* The REG_TICK value at which rtx's containing this register are
317 valid in the hash table. If this does not equal the current
318 reg_tick value, such expressions existing in the hash table are
322 /* The SUBREG that was set when REG_TICK was last incremented. Set
323 to -1 if the last store was to the whole register, not a subreg. */
324 unsigned int subreg_ticked;
327 /* A table of cse_reg_info indexed by register numbers. */
328 static struct cse_reg_info *cse_reg_info_table;
330 /* The size of the above table. */
331 static unsigned int cse_reg_info_table_size;
333 /* The index of the first entry that has not been initialized. */
334 static unsigned int cse_reg_info_table_first_uninitialized;
336 /* The timestamp at the beginning of the current run of
337 cse_extended_basic_block. We increment this variable at the beginning of
338 the current run of cse_extended_basic_block. The timestamp field of a
339 cse_reg_info entry matches the value of this variable if and only
340 if the entry has been initialized during the current run of
341 cse_extended_basic_block. */
342 static unsigned int cse_reg_info_timestamp;
344 /* A HARD_REG_SET containing all the hard registers for which there is
345 currently a REG expression in the hash table. Note the difference
346 from the above variables, which indicate if the REG is mentioned in some
347 expression in the table. */
349 static HARD_REG_SET hard_regs_in_table;
351 /* True if CSE has altered the CFG. */
352 static bool cse_cfg_altered;
354 /* True if CSE has altered conditional jump insns in such a way
355 that jump optimization should be redone. */
356 static bool cse_jumps_altered;
358 /* True if we put a LABEL_REF into the hash table for an INSN
359 without a REG_LABEL_OPERAND, we have to rerun jump after CSE
360 to put in the note. */
361 static bool recorded_label_ref;
363 /* canon_hash stores 1 in do_not_record
364 if it notices a reference to CC0, PC, or some other volatile
367 static int do_not_record;
369 /* canon_hash stores 1 in hash_arg_in_memory
370 if it notices a reference to memory within the expression being hashed. */
372 static int hash_arg_in_memory;
374 /* The hash table contains buckets which are chains of `struct table_elt's,
375 each recording one expression's information.
376 That expression is in the `exp' field.
378 The canon_exp field contains a canonical (from the point of view of
379 alias analysis) version of the `exp' field.
381 Those elements with the same hash code are chained in both directions
382 through the `next_same_hash' and `prev_same_hash' fields.
384 Each set of expressions with equivalent values
385 are on a two-way chain through the `next_same_value'
386 and `prev_same_value' fields, and all point with
387 the `first_same_value' field at the first element in
388 that chain. The chain is in order of increasing cost.
389 Each element's cost value is in its `cost' field.
391 The `in_memory' field is nonzero for elements that
392 involve any reference to memory. These elements are removed
393 whenever a write is done to an unidentified location in memory.
394 To be safe, we assume that a memory address is unidentified unless
395 the address is either a symbol constant or a constant plus
396 the frame pointer or argument pointer.
398 The `related_value' field is used to connect related expressions
399 (that differ by adding an integer).
400 The related expressions are chained in a circular fashion.
401 `related_value' is zero for expressions for which this
404 The `cost' field stores the cost of this element's expression.
405 The `regcost' field stores the value returned by approx_reg_cost for
406 this element's expression.
408 The `is_const' flag is set if the element is a constant (including
411 The `flag' field is used as a temporary during some search routines.
413 The `mode' field is usually the same as GET_MODE (`exp'), but
414 if `exp' is a CONST_INT and has no machine mode then the `mode'
415 field is the mode it was being used as. Each constant is
416 recorded separately for each mode it is used with. */
422 struct table_elt *next_same_hash;
423 struct table_elt *prev_same_hash;
424 struct table_elt *next_same_value;
425 struct table_elt *prev_same_value;
426 struct table_elt *first_same_value;
427 struct table_elt *related_value;
430 /* The size of this field should match the size
431 of the mode field of struct rtx_def (see rtl.h). */
432 ENUM_BITFIELD(machine_mode) mode : 8;
438 /* We don't want a lot of buckets, because we rarely have very many
439 things stored in the hash table, and a lot of buckets slows
440 down a lot of loops that happen frequently. */
442 #define HASH_SIZE (1 << HASH_SHIFT)
443 #define HASH_MASK (HASH_SIZE - 1)
445 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
446 register (hard registers may require `do_not_record' to be set). */
449 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
450 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
451 : canon_hash (X, M)) & HASH_MASK)
453 /* Like HASH, but without side-effects. */
454 #define SAFE_HASH(X, M) \
455 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
456 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
457 : safe_hash (X, M)) & HASH_MASK)
459 /* Determine whether register number N is considered a fixed register for the
460 purpose of approximating register costs.
461 It is desirable to replace other regs with fixed regs, to reduce need for
463 A reg wins if it is either the frame pointer or designated as fixed. */
464 #define FIXED_REGNO_P(N) \
465 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
466 || fixed_regs[N] || global_regs[N])
468 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
469 hard registers and pointers into the frame are the cheapest with a cost
470 of 0. Next come pseudos with a cost of one and other hard registers with
471 a cost of 2. Aside from these special cases, call `rtx_cost'. */
473 #define CHEAP_REGNO(N) \
474 (REGNO_PTR_FRAME_P(N) \
475 || (HARD_REGISTER_NUM_P (N) \
476 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
478 #define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET, 1))
479 #define COST_IN(X, OUTER, OPNO) (REG_P (X) ? 0 : notreg_cost (X, OUTER, OPNO))
481 /* Get the number of times this register has been updated in this
484 #define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
486 /* Get the point at which REG was recorded in the table. */
488 #define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
490 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
493 #define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
495 /* Get the quantity number for REG. */
497 #define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
499 /* Determine if the quantity number for register X represents a valid index
500 into the qty_table. */
502 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
504 /* Compare table_elt X and Y and return true iff X is cheaper than Y. */
506 #define CHEAPER(X, Y) \
507 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
509 static struct table_elt *table[HASH_SIZE];
511 /* Chain of `struct table_elt's made so far for this function
512 but currently removed from the table. */
514 static struct table_elt *free_element_chain;
516 /* Set to the cost of a constant pool reference if one was found for a
517 symbolic constant. If this was found, it means we should try to
518 convert constants into constant pool entries if they don't fit in
521 static int constant_pool_entries_cost;
522 static int constant_pool_entries_regcost;
524 /* Trace a patch through the CFG. */
528 /* The basic block for this path entry. */
532 /* This data describes a block that will be processed by
533 cse_extended_basic_block. */
535 struct cse_basic_block_data
537 /* Total number of SETs in block. */
539 /* Size of current branch path, if any. */
541 /* Current path, indicating which basic_blocks will be processed. */
542 struct branch_path *path;
546 /* Pointers to the live in/live out bitmaps for the boundaries of the
548 static bitmap cse_ebb_live_in, cse_ebb_live_out;
550 /* A simple bitmap to track which basic blocks have been visited
551 already as part of an already processed extended basic block. */
552 static sbitmap cse_visited_basic_blocks;
554 static bool fixed_base_plus_p (rtx x);
555 static int notreg_cost (rtx, enum rtx_code, int);
556 static int approx_reg_cost_1 (rtx *, void *);
557 static int approx_reg_cost (rtx);
558 static int preferable (int, int, int, int);
559 static void new_basic_block (void);
560 static void make_new_qty (unsigned int, enum machine_mode);
561 static void make_regs_eqv (unsigned int, unsigned int);
562 static void delete_reg_equiv (unsigned int);
563 static int mention_regs (rtx);
564 static int insert_regs (rtx, struct table_elt *, int);
565 static void remove_from_table (struct table_elt *, unsigned);
566 static void remove_pseudo_from_table (rtx, unsigned);
567 static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
568 static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
569 static rtx lookup_as_function (rtx, enum rtx_code);
570 static struct table_elt *insert_with_costs (rtx, struct table_elt *, unsigned,
571 enum machine_mode, int, int);
572 static struct table_elt *insert (rtx, struct table_elt *, unsigned,
574 static void merge_equiv_classes (struct table_elt *, struct table_elt *);
575 static void invalidate (rtx, enum machine_mode);
576 static void remove_invalid_refs (unsigned int);
577 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
579 static void rehash_using_reg (rtx);
580 static void invalidate_memory (void);
581 static void invalidate_for_call (void);
582 static rtx use_related_value (rtx, struct table_elt *);
584 static inline unsigned canon_hash (rtx, enum machine_mode);
585 static inline unsigned safe_hash (rtx, enum machine_mode);
586 static inline unsigned hash_rtx_string (const char *);
588 static rtx canon_reg (rtx, rtx);
589 static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
591 enum machine_mode *);
592 static rtx fold_rtx (rtx, rtx);
593 static rtx equiv_constant (rtx);
594 static void record_jump_equiv (rtx, bool);
595 static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
597 static void cse_insn (rtx);
598 static void cse_prescan_path (struct cse_basic_block_data *);
599 static void invalidate_from_clobbers (rtx);
600 static rtx cse_process_notes (rtx, rtx, bool *);
601 static void cse_extended_basic_block (struct cse_basic_block_data *);
602 static void count_reg_usage (rtx, int *, rtx, int);
603 static int check_for_label_ref (rtx *, void *);
604 extern void dump_class (struct table_elt*);
605 static void get_cse_reg_info_1 (unsigned int regno);
606 static struct cse_reg_info * get_cse_reg_info (unsigned int regno);
607 static int check_dependence (rtx *, void *);
609 static void flush_hash_table (void);
610 static bool insn_live_p (rtx, int *);
611 static bool set_live_p (rtx, rtx, int *);
612 static int cse_change_cc_mode (rtx *, void *);
613 static void cse_change_cc_mode_insn (rtx, rtx);
614 static void cse_change_cc_mode_insns (rtx, rtx, rtx);
615 static enum machine_mode cse_cc_succs (basic_block, basic_block, rtx, rtx,
619 #undef RTL_HOOKS_GEN_LOWPART
620 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
622 static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
624 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
625 virtual regs here because the simplify_*_operation routines are called
626 by integrate.c, which is called before virtual register instantiation. */
629 fixed_base_plus_p (rtx x)
631 switch (GET_CODE (x))
634 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
636 if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
638 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
639 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
644 if (!CONST_INT_P (XEXP (x, 1)))
646 return fixed_base_plus_p (XEXP (x, 0));
653 /* Dump the expressions in the equivalence class indicated by CLASSP.
654 This function is used only for debugging. */
656 dump_class (struct table_elt *classp)
658 struct table_elt *elt;
660 fprintf (stderr, "Equivalence chain for ");
661 print_rtl (stderr, classp->exp);
662 fprintf (stderr, ": \n");
664 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
666 print_rtl (stderr, elt->exp);
667 fprintf (stderr, "\n");
671 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
674 approx_reg_cost_1 (rtx *xp, void *data)
677 int *cost_p = (int *) data;
681 unsigned int regno = REGNO (x);
683 if (! CHEAP_REGNO (regno))
685 if (regno < FIRST_PSEUDO_REGISTER)
687 if (targetm.small_register_classes_for_mode_p (GET_MODE (x)))
699 /* Return an estimate of the cost of the registers used in an rtx.
700 This is mostly the number of different REG expressions in the rtx;
701 however for some exceptions like fixed registers we use a cost of
702 0. If any other hard register reference occurs, return MAX_COST. */
705 approx_reg_cost (rtx x)
709 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
715 /* Return a negative value if an rtx A, whose costs are given by COST_A
716 and REGCOST_A, is more desirable than an rtx B.
717 Return a positive value if A is less desirable, or 0 if the two are
720 preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
722 /* First, get rid of cases involving expressions that are entirely
724 if (cost_a != cost_b)
726 if (cost_a == MAX_COST)
728 if (cost_b == MAX_COST)
732 /* Avoid extending lifetimes of hardregs. */
733 if (regcost_a != regcost_b)
735 if (regcost_a == MAX_COST)
737 if (regcost_b == MAX_COST)
741 /* Normal operation costs take precedence. */
742 if (cost_a != cost_b)
743 return cost_a - cost_b;
744 /* Only if these are identical consider effects on register pressure. */
745 if (regcost_a != regcost_b)
746 return regcost_a - regcost_b;
750 /* Internal function, to compute cost when X is not a register; called
751 from COST macro to keep it simple. */
754 notreg_cost (rtx x, enum rtx_code outer, int opno)
756 return ((GET_CODE (x) == SUBREG
757 && REG_P (SUBREG_REG (x))
758 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
759 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
760 && (GET_MODE_SIZE (GET_MODE (x))
761 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
762 && subreg_lowpart_p (x)
763 && TRULY_NOOP_TRUNCATION_MODES_P (GET_MODE (x),
764 GET_MODE (SUBREG_REG (x))))
766 : rtx_cost (x, outer, opno, optimize_this_for_speed_p) * 2);
770 /* Initialize CSE_REG_INFO_TABLE. */
773 init_cse_reg_info (unsigned int nregs)
775 /* Do we need to grow the table? */
776 if (nregs > cse_reg_info_table_size)
778 unsigned int new_size;
780 if (cse_reg_info_table_size < 2048)
782 /* Compute a new size that is a power of 2 and no smaller
783 than the large of NREGS and 64. */
784 new_size = (cse_reg_info_table_size
785 ? cse_reg_info_table_size : 64);
787 while (new_size < nregs)
792 /* If we need a big table, allocate just enough to hold
797 /* Reallocate the table with NEW_SIZE entries. */
798 free (cse_reg_info_table);
799 cse_reg_info_table = XNEWVEC (struct cse_reg_info, new_size);
800 cse_reg_info_table_size = new_size;
801 cse_reg_info_table_first_uninitialized = 0;
804 /* Do we have all of the first NREGS entries initialized? */
805 if (cse_reg_info_table_first_uninitialized < nregs)
807 unsigned int old_timestamp = cse_reg_info_timestamp - 1;
810 /* Put the old timestamp on newly allocated entries so that they
811 will all be considered out of date. We do not touch those
812 entries beyond the first NREGS entries to be nice to the
814 for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++)
815 cse_reg_info_table[i].timestamp = old_timestamp;
817 cse_reg_info_table_first_uninitialized = nregs;
821 /* Given REGNO, initialize the cse_reg_info entry for REGNO. */
824 get_cse_reg_info_1 (unsigned int regno)
826 /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
827 entry will be considered to have been initialized. */
828 cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
830 /* Initialize the rest of the entry. */
831 cse_reg_info_table[regno].reg_tick = 1;
832 cse_reg_info_table[regno].reg_in_table = -1;
833 cse_reg_info_table[regno].subreg_ticked = -1;
834 cse_reg_info_table[regno].reg_qty = -regno - 1;
837 /* Find a cse_reg_info entry for REGNO. */
839 static inline struct cse_reg_info *
840 get_cse_reg_info (unsigned int regno)
842 struct cse_reg_info *p = &cse_reg_info_table[regno];
844 /* If this entry has not been initialized, go ahead and initialize
846 if (p->timestamp != cse_reg_info_timestamp)
847 get_cse_reg_info_1 (regno);
852 /* Clear the hash table and initialize each register with its own quantity,
853 for a new basic block. */
856 new_basic_block (void)
862 /* Invalidate cse_reg_info_table. */
863 cse_reg_info_timestamp++;
865 /* Clear out hash table state for this pass. */
866 CLEAR_HARD_REG_SET (hard_regs_in_table);
868 /* The per-quantity values used to be initialized here, but it is
869 much faster to initialize each as it is made in `make_new_qty'. */
871 for (i = 0; i < HASH_SIZE; i++)
873 struct table_elt *first;
878 struct table_elt *last = first;
882 while (last->next_same_hash != NULL)
883 last = last->next_same_hash;
885 /* Now relink this hash entire chain into
886 the free element list. */
888 last->next_same_hash = free_element_chain;
889 free_element_chain = first;
898 /* Say that register REG contains a quantity in mode MODE not in any
899 register before and initialize that quantity. */
902 make_new_qty (unsigned int reg, enum machine_mode mode)
905 struct qty_table_elem *ent;
906 struct reg_eqv_elem *eqv;
908 gcc_assert (next_qty < max_qty);
910 q = REG_QTY (reg) = next_qty++;
912 ent->first_reg = reg;
915 ent->const_rtx = ent->const_insn = NULL_RTX;
916 ent->comparison_code = UNKNOWN;
918 eqv = ®_eqv_table[reg];
919 eqv->next = eqv->prev = -1;
922 /* Make reg NEW equivalent to reg OLD.
923 OLD is not changing; NEW is. */
926 make_regs_eqv (unsigned int new_reg, unsigned int old_reg)
928 unsigned int lastr, firstr;
929 int q = REG_QTY (old_reg);
930 struct qty_table_elem *ent;
934 /* Nothing should become eqv until it has a "non-invalid" qty number. */
935 gcc_assert (REGNO_QTY_VALID_P (old_reg));
937 REG_QTY (new_reg) = q;
938 firstr = ent->first_reg;
939 lastr = ent->last_reg;
941 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
942 hard regs. Among pseudos, if NEW will live longer than any other reg
943 of the same qty, and that is beyond the current basic block,
944 make it the new canonical replacement for this qty. */
945 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
946 /* Certain fixed registers might be of the class NO_REGS. This means
947 that not only can they not be allocated by the compiler, but
948 they cannot be used in substitutions or canonicalizations
950 && (new_reg >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new_reg) != NO_REGS)
951 && ((new_reg < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new_reg))
952 || (new_reg >= FIRST_PSEUDO_REGISTER
953 && (firstr < FIRST_PSEUDO_REGISTER
954 || (bitmap_bit_p (cse_ebb_live_out, new_reg)
955 && !bitmap_bit_p (cse_ebb_live_out, firstr))
956 || (bitmap_bit_p (cse_ebb_live_in, new_reg)
957 && !bitmap_bit_p (cse_ebb_live_in, firstr))))))
959 reg_eqv_table[firstr].prev = new_reg;
960 reg_eqv_table[new_reg].next = firstr;
961 reg_eqv_table[new_reg].prev = -1;
962 ent->first_reg = new_reg;
966 /* If NEW is a hard reg (known to be non-fixed), insert at end.
967 Otherwise, insert before any non-fixed hard regs that are at the
968 end. Registers of class NO_REGS cannot be used as an
969 equivalent for anything. */
970 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
971 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
972 && new_reg >= FIRST_PSEUDO_REGISTER)
973 lastr = reg_eqv_table[lastr].prev;
974 reg_eqv_table[new_reg].next = reg_eqv_table[lastr].next;
975 if (reg_eqv_table[lastr].next >= 0)
976 reg_eqv_table[reg_eqv_table[lastr].next].prev = new_reg;
978 qty_table[q].last_reg = new_reg;
979 reg_eqv_table[lastr].next = new_reg;
980 reg_eqv_table[new_reg].prev = lastr;
984 /* Remove REG from its equivalence class. */
987 delete_reg_equiv (unsigned int reg)
989 struct qty_table_elem *ent;
990 int q = REG_QTY (reg);
993 /* If invalid, do nothing. */
994 if (! REGNO_QTY_VALID_P (reg))
999 p = reg_eqv_table[reg].prev;
1000 n = reg_eqv_table[reg].next;
1003 reg_eqv_table[n].prev = p;
1007 reg_eqv_table[p].next = n;
1011 REG_QTY (reg) = -reg - 1;
1014 /* Remove any invalid expressions from the hash table
1015 that refer to any of the registers contained in expression X.
1017 Make sure that newly inserted references to those registers
1018 as subexpressions will be considered valid.
1020 mention_regs is not called when a register itself
1021 is being stored in the table.
1023 Return 1 if we have done something that may have changed the hash code
1027 mention_regs (rtx x)
1037 code = GET_CODE (x);
1040 unsigned int regno = REGNO (x);
1041 unsigned int endregno = END_REGNO (x);
1044 for (i = regno; i < endregno; i++)
1046 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1047 remove_invalid_refs (i);
1049 REG_IN_TABLE (i) = REG_TICK (i);
1050 SUBREG_TICKED (i) = -1;
1056 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1057 pseudo if they don't use overlapping words. We handle only pseudos
1058 here for simplicity. */
1059 if (code == SUBREG && REG_P (SUBREG_REG (x))
1060 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1062 unsigned int i = REGNO (SUBREG_REG (x));
1064 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1066 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1067 the last store to this register really stored into this
1068 subreg, then remove the memory of this subreg.
1069 Otherwise, remove any memory of the entire register and
1070 all its subregs from the table. */
1071 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1072 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1073 remove_invalid_refs (i);
1075 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1078 REG_IN_TABLE (i) = REG_TICK (i);
1079 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1083 /* If X is a comparison or a COMPARE and either operand is a register
1084 that does not have a quantity, give it one. This is so that a later
1085 call to record_jump_equiv won't cause X to be assigned a different
1086 hash code and not found in the table after that call.
1088 It is not necessary to do this here, since rehash_using_reg can
1089 fix up the table later, but doing this here eliminates the need to
1090 call that expensive function in the most common case where the only
1091 use of the register is in the comparison. */
1093 if (code == COMPARE || COMPARISON_P (x))
1095 if (REG_P (XEXP (x, 0))
1096 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1097 if (insert_regs (XEXP (x, 0), NULL, 0))
1099 rehash_using_reg (XEXP (x, 0));
1103 if (REG_P (XEXP (x, 1))
1104 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1105 if (insert_regs (XEXP (x, 1), NULL, 0))
1107 rehash_using_reg (XEXP (x, 1));
1112 fmt = GET_RTX_FORMAT (code);
1113 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1115 changed |= mention_regs (XEXP (x, i));
1116 else if (fmt[i] == 'E')
1117 for (j = 0; j < XVECLEN (x, i); j++)
1118 changed |= mention_regs (XVECEXP (x, i, j));
1123 /* Update the register quantities for inserting X into the hash table
1124 with a value equivalent to CLASSP.
1125 (If the class does not contain a REG, it is irrelevant.)
1126 If MODIFIED is nonzero, X is a destination; it is being modified.
1127 Note that delete_reg_equiv should be called on a register
1128 before insert_regs is done on that register with MODIFIED != 0.
1130 Nonzero value means that elements of reg_qty have changed
1131 so X's hash code may be different. */
1134 insert_regs (rtx x, struct table_elt *classp, int modified)
1138 unsigned int regno = REGNO (x);
1141 /* If REGNO is in the equivalence table already but is of the
1142 wrong mode for that equivalence, don't do anything here. */
1144 qty_valid = REGNO_QTY_VALID_P (regno);
1147 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1149 if (ent->mode != GET_MODE (x))
1153 if (modified || ! qty_valid)
1156 for (classp = classp->first_same_value;
1158 classp = classp->next_same_value)
1159 if (REG_P (classp->exp)
1160 && GET_MODE (classp->exp) == GET_MODE (x))
1162 unsigned c_regno = REGNO (classp->exp);
1164 gcc_assert (REGNO_QTY_VALID_P (c_regno));
1166 /* Suppose that 5 is hard reg and 100 and 101 are
1169 (set (reg:si 100) (reg:si 5))
1170 (set (reg:si 5) (reg:si 100))
1171 (set (reg:di 101) (reg:di 5))
1173 We would now set REG_QTY (101) = REG_QTY (5), but the
1174 entry for 5 is in SImode. When we use this later in
1175 copy propagation, we get the register in wrong mode. */
1176 if (qty_table[REG_QTY (c_regno)].mode != GET_MODE (x))
1179 make_regs_eqv (regno, c_regno);
1183 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1184 than REG_IN_TABLE to find out if there was only a single preceding
1185 invalidation - for the SUBREG - or another one, which would be
1186 for the full register. However, if we find here that REG_TICK
1187 indicates that the register is invalid, it means that it has
1188 been invalidated in a separate operation. The SUBREG might be used
1189 now (then this is a recursive call), or we might use the full REG
1190 now and a SUBREG of it later. So bump up REG_TICK so that
1191 mention_regs will do the right thing. */
1193 && REG_IN_TABLE (regno) >= 0
1194 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1196 make_new_qty (regno, GET_MODE (x));
1203 /* If X is a SUBREG, we will likely be inserting the inner register in the
1204 table. If that register doesn't have an assigned quantity number at
1205 this point but does later, the insertion that we will be doing now will
1206 not be accessible because its hash code will have changed. So assign
1207 a quantity number now. */
1209 else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1210 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1212 insert_regs (SUBREG_REG (x), NULL, 0);
1217 return mention_regs (x);
1221 /* Compute upper and lower anchors for CST. Also compute the offset of CST
1222 from these anchors/bases such that *_BASE + *_OFFS = CST. Return false iff
1223 CST is equal to an anchor. */
1226 compute_const_anchors (rtx cst,
1227 HOST_WIDE_INT *lower_base, HOST_WIDE_INT *lower_offs,
1228 HOST_WIDE_INT *upper_base, HOST_WIDE_INT *upper_offs)
1230 HOST_WIDE_INT n = INTVAL (cst);
1232 *lower_base = n & ~(targetm.const_anchor - 1);
1233 if (*lower_base == n)
1237 (n + (targetm.const_anchor - 1)) & ~(targetm.const_anchor - 1);
1238 *upper_offs = n - *upper_base;
1239 *lower_offs = n - *lower_base;
1243 /* Insert the equivalence between ANCHOR and (REG + OFF) in mode MODE. */
1246 insert_const_anchor (HOST_WIDE_INT anchor, rtx reg, HOST_WIDE_INT offs,
1247 enum machine_mode mode)
1249 struct table_elt *elt;
1254 anchor_exp = GEN_INT (anchor);
1255 hash = HASH (anchor_exp, mode);
1256 elt = lookup (anchor_exp, hash, mode);
1258 elt = insert (anchor_exp, NULL, hash, mode);
1260 exp = plus_constant (reg, offs);
1261 /* REG has just been inserted and the hash codes recomputed. */
1263 hash = HASH (exp, mode);
1265 /* Use the cost of the register rather than the whole expression. When
1266 looking up constant anchors we will further offset the corresponding
1267 expression therefore it does not make sense to prefer REGs over
1268 reg-immediate additions. Prefer instead the oldest expression. Also
1269 don't prefer pseudos over hard regs so that we derive constants in
1270 argument registers from other argument registers rather than from the
1271 original pseudo that was used to synthesize the constant. */
1272 insert_with_costs (exp, elt, hash, mode, COST (reg), 1);
1275 /* The constant CST is equivalent to the register REG. Create
1276 equivalences between the two anchors of CST and the corresponding
1277 register-offset expressions using REG. */
1280 insert_const_anchors (rtx reg, rtx cst, enum machine_mode mode)
1282 HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
1284 if (!compute_const_anchors (cst, &lower_base, &lower_offs,
1285 &upper_base, &upper_offs))
1288 /* Ignore anchors of value 0. Constants accessible from zero are
1290 if (lower_base != 0)
1291 insert_const_anchor (lower_base, reg, -lower_offs, mode);
1293 if (upper_base != 0)
1294 insert_const_anchor (upper_base, reg, -upper_offs, mode);
1297 /* We need to express ANCHOR_ELT->exp + OFFS. Walk the equivalence list of
1298 ANCHOR_ELT and see if offsetting any of the entries by OFFS would create a
1299 valid expression. Return the cheapest and oldest of such expressions. In
1300 *OLD, return how old the resulting expression is compared to the other
1301 equivalent expressions. */
1304 find_reg_offset_for_const (struct table_elt *anchor_elt, HOST_WIDE_INT offs,
1307 struct table_elt *elt;
1309 struct table_elt *match_elt;
1312 /* Find the cheapest and *oldest* expression to maximize the chance of
1313 reusing the same pseudo. */
1317 for (elt = anchor_elt->first_same_value, idx = 0;
1319 elt = elt->next_same_value, idx++)
1321 if (match_elt && CHEAPER (match_elt, elt))
1324 if (REG_P (elt->exp)
1325 || (GET_CODE (elt->exp) == PLUS
1326 && REG_P (XEXP (elt->exp, 0))
1327 && GET_CODE (XEXP (elt->exp, 1)) == CONST_INT))
1331 /* Ignore expressions that are no longer valid. */
1332 if (!REG_P (elt->exp) && !exp_equiv_p (elt->exp, elt->exp, 1, false))
1335 x = plus_constant (elt->exp, offs);
1337 || (GET_CODE (x) == PLUS
1338 && IN_RANGE (INTVAL (XEXP (x, 1)),
1339 -targetm.const_anchor,
1340 targetm.const_anchor - 1)))
1352 /* Try to express the constant SRC_CONST using a register+offset expression
1353 derived from a constant anchor. Return it if successful or NULL_RTX,
1357 try_const_anchors (rtx src_const, enum machine_mode mode)
1359 struct table_elt *lower_elt, *upper_elt;
1360 HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
1361 rtx lower_anchor_rtx, upper_anchor_rtx;
1362 rtx lower_exp = NULL_RTX, upper_exp = NULL_RTX;
1363 unsigned lower_old, upper_old;
1365 if (!compute_const_anchors (src_const, &lower_base, &lower_offs,
1366 &upper_base, &upper_offs))
1369 lower_anchor_rtx = GEN_INT (lower_base);
1370 upper_anchor_rtx = GEN_INT (upper_base);
1371 lower_elt = lookup (lower_anchor_rtx, HASH (lower_anchor_rtx, mode), mode);
1372 upper_elt = lookup (upper_anchor_rtx, HASH (upper_anchor_rtx, mode), mode);
1375 lower_exp = find_reg_offset_for_const (lower_elt, lower_offs, &lower_old);
1377 upper_exp = find_reg_offset_for_const (upper_elt, upper_offs, &upper_old);
1384 /* Return the older expression. */
1385 return (upper_old > lower_old ? upper_exp : lower_exp);
1388 /* Look in or update the hash table. */
1390 /* Remove table element ELT from use in the table.
1391 HASH is its hash code, made using the HASH macro.
1392 It's an argument because often that is known in advance
1393 and we save much time not recomputing it. */
1396 remove_from_table (struct table_elt *elt, unsigned int hash)
1401 /* Mark this element as removed. See cse_insn. */
1402 elt->first_same_value = 0;
1404 /* Remove the table element from its equivalence class. */
1407 struct table_elt *prev = elt->prev_same_value;
1408 struct table_elt *next = elt->next_same_value;
1411 next->prev_same_value = prev;
1414 prev->next_same_value = next;
1417 struct table_elt *newfirst = next;
1420 next->first_same_value = newfirst;
1421 next = next->next_same_value;
1426 /* Remove the table element from its hash bucket. */
1429 struct table_elt *prev = elt->prev_same_hash;
1430 struct table_elt *next = elt->next_same_hash;
1433 next->prev_same_hash = prev;
1436 prev->next_same_hash = next;
1437 else if (table[hash] == elt)
1441 /* This entry is not in the proper hash bucket. This can happen
1442 when two classes were merged by `merge_equiv_classes'. Search
1443 for the hash bucket that it heads. This happens only very
1444 rarely, so the cost is acceptable. */
1445 for (hash = 0; hash < HASH_SIZE; hash++)
1446 if (table[hash] == elt)
1451 /* Remove the table element from its related-value circular chain. */
1453 if (elt->related_value != 0 && elt->related_value != elt)
1455 struct table_elt *p = elt->related_value;
1457 while (p->related_value != elt)
1458 p = p->related_value;
1459 p->related_value = elt->related_value;
1460 if (p->related_value == p)
1461 p->related_value = 0;
1464 /* Now add it to the free element chain. */
1465 elt->next_same_hash = free_element_chain;
1466 free_element_chain = elt;
1469 /* Same as above, but X is a pseudo-register. */
1472 remove_pseudo_from_table (rtx x, unsigned int hash)
1474 struct table_elt *elt;
1476 /* Because a pseudo-register can be referenced in more than one
1477 mode, we might have to remove more than one table entry. */
1478 while ((elt = lookup_for_remove (x, hash, VOIDmode)))
1479 remove_from_table (elt, hash);
1482 /* Look up X in the hash table and return its table element,
1483 or 0 if X is not in the table.
1485 MODE is the machine-mode of X, or if X is an integer constant
1486 with VOIDmode then MODE is the mode with which X will be used.
1488 Here we are satisfied to find an expression whose tree structure
1491 static struct table_elt *
1492 lookup (rtx x, unsigned int hash, enum machine_mode mode)
1494 struct table_elt *p;
1496 for (p = table[hash]; p; p = p->next_same_hash)
1497 if (mode == p->mode && ((x == p->exp && REG_P (x))
1498 || exp_equiv_p (x, p->exp, !REG_P (x), false)))
1504 /* Like `lookup' but don't care whether the table element uses invalid regs.
1505 Also ignore discrepancies in the machine mode of a register. */
1507 static struct table_elt *
1508 lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1510 struct table_elt *p;
1514 unsigned int regno = REGNO (x);
1516 /* Don't check the machine mode when comparing registers;
1517 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1518 for (p = table[hash]; p; p = p->next_same_hash)
1520 && REGNO (p->exp) == regno)
1525 for (p = table[hash]; p; p = p->next_same_hash)
1527 && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
1534 /* Look for an expression equivalent to X and with code CODE.
1535 If one is found, return that expression. */
1538 lookup_as_function (rtx x, enum rtx_code code)
1541 = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
1546 for (p = p->first_same_value; p; p = p->next_same_value)
1547 if (GET_CODE (p->exp) == code
1548 /* Make sure this is a valid entry in the table. */
1549 && exp_equiv_p (p->exp, p->exp, 1, false))
1555 /* Insert X in the hash table, assuming HASH is its hash code and
1556 CLASSP is an element of the class it should go in (or 0 if a new
1557 class should be made). COST is the code of X and reg_cost is the
1558 cost of registers in X. It is inserted at the proper position to
1559 keep the class in the order cheapest first.
1561 MODE is the machine-mode of X, or if X is an integer constant
1562 with VOIDmode then MODE is the mode with which X will be used.
1564 For elements of equal cheapness, the most recent one
1565 goes in front, except that the first element in the list
1566 remains first unless a cheaper element is added. The order of
1567 pseudo-registers does not matter, as canon_reg will be called to
1568 find the cheapest when a register is retrieved from the table.
1570 The in_memory field in the hash table element is set to 0.
1571 The caller must set it nonzero if appropriate.
1573 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1574 and if insert_regs returns a nonzero value
1575 you must then recompute its hash code before calling here.
1577 If necessary, update table showing constant values of quantities. */
1579 static struct table_elt *
1580 insert_with_costs (rtx x, struct table_elt *classp, unsigned int hash,
1581 enum machine_mode mode, int cost, int reg_cost)
1583 struct table_elt *elt;
1585 /* If X is a register and we haven't made a quantity for it,
1586 something is wrong. */
1587 gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
1589 /* If X is a hard register, show it is being put in the table. */
1590 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1591 add_to_hard_reg_set (&hard_regs_in_table, GET_MODE (x), REGNO (x));
1593 /* Put an element for X into the right hash bucket. */
1595 elt = free_element_chain;
1597 free_element_chain = elt->next_same_hash;
1599 elt = XNEW (struct table_elt);
1602 elt->canon_exp = NULL_RTX;
1604 elt->regcost = reg_cost;
1605 elt->next_same_value = 0;
1606 elt->prev_same_value = 0;
1607 elt->next_same_hash = table[hash];
1608 elt->prev_same_hash = 0;
1609 elt->related_value = 0;
1612 elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1615 table[hash]->prev_same_hash = elt;
1618 /* Put it into the proper value-class. */
1621 classp = classp->first_same_value;
1622 if (CHEAPER (elt, classp))
1623 /* Insert at the head of the class. */
1625 struct table_elt *p;
1626 elt->next_same_value = classp;
1627 classp->prev_same_value = elt;
1628 elt->first_same_value = elt;
1630 for (p = classp; p; p = p->next_same_value)
1631 p->first_same_value = elt;
1635 /* Insert not at head of the class. */
1636 /* Put it after the last element cheaper than X. */
1637 struct table_elt *p, *next;
1640 (next = p->next_same_value) && CHEAPER (next, elt);
1644 /* Put it after P and before NEXT. */
1645 elt->next_same_value = next;
1647 next->prev_same_value = elt;
1649 elt->prev_same_value = p;
1650 p->next_same_value = elt;
1651 elt->first_same_value = classp;
1655 elt->first_same_value = elt;
1657 /* If this is a constant being set equivalent to a register or a register
1658 being set equivalent to a constant, note the constant equivalence.
1660 If this is a constant, it cannot be equivalent to a different constant,
1661 and a constant is the only thing that can be cheaper than a register. So
1662 we know the register is the head of the class (before the constant was
1665 If this is a register that is not already known equivalent to a
1666 constant, we must check the entire class.
1668 If this is a register that is already known equivalent to an insn,
1669 update the qtys `const_insn' to show that `this_insn' is the latest
1670 insn making that quantity equivalent to the constant. */
1672 if (elt->is_const && classp && REG_P (classp->exp)
1675 int exp_q = REG_QTY (REGNO (classp->exp));
1676 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1678 exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1679 exp_ent->const_insn = this_insn;
1684 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1687 struct table_elt *p;
1689 for (p = classp; p != 0; p = p->next_same_value)
1691 if (p->is_const && !REG_P (p->exp))
1693 int x_q = REG_QTY (REGNO (x));
1694 struct qty_table_elem *x_ent = &qty_table[x_q];
1697 = gen_lowpart (GET_MODE (x), p->exp);
1698 x_ent->const_insn = this_insn;
1705 && qty_table[REG_QTY (REGNO (x))].const_rtx
1706 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1707 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1709 /* If this is a constant with symbolic value,
1710 and it has a term with an explicit integer value,
1711 link it up with related expressions. */
1712 if (GET_CODE (x) == CONST)
1714 rtx subexp = get_related_value (x);
1716 struct table_elt *subelt, *subelt_prev;
1720 /* Get the integer-free subexpression in the hash table. */
1721 subhash = SAFE_HASH (subexp, mode);
1722 subelt = lookup (subexp, subhash, mode);
1724 subelt = insert (subexp, NULL, subhash, mode);
1725 /* Initialize SUBELT's circular chain if it has none. */
1726 if (subelt->related_value == 0)
1727 subelt->related_value = subelt;
1728 /* Find the element in the circular chain that precedes SUBELT. */
1729 subelt_prev = subelt;
1730 while (subelt_prev->related_value != subelt)
1731 subelt_prev = subelt_prev->related_value;
1732 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1733 This way the element that follows SUBELT is the oldest one. */
1734 elt->related_value = subelt_prev->related_value;
1735 subelt_prev->related_value = elt;
1742 /* Wrap insert_with_costs by passing the default costs. */
1744 static struct table_elt *
1745 insert (rtx x, struct table_elt *classp, unsigned int hash,
1746 enum machine_mode mode)
1749 insert_with_costs (x, classp, hash, mode, COST (x), approx_reg_cost (x));
1753 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1754 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1755 the two classes equivalent.
1757 CLASS1 will be the surviving class; CLASS2 should not be used after this
1760 Any invalid entries in CLASS2 will not be copied. */
1763 merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1765 struct table_elt *elt, *next, *new_elt;
1767 /* Ensure we start with the head of the classes. */
1768 class1 = class1->first_same_value;
1769 class2 = class2->first_same_value;
1771 /* If they were already equal, forget it. */
1772 if (class1 == class2)
1775 for (elt = class2; elt; elt = next)
1779 enum machine_mode mode = elt->mode;
1781 next = elt->next_same_value;
1783 /* Remove old entry, make a new one in CLASS1's class.
1784 Don't do this for invalid entries as we cannot find their
1785 hash code (it also isn't necessary). */
1786 if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
1788 bool need_rehash = false;
1790 hash_arg_in_memory = 0;
1791 hash = HASH (exp, mode);
1795 need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1796 delete_reg_equiv (REGNO (exp));
1799 if (REG_P (exp) && REGNO (exp) >= FIRST_PSEUDO_REGISTER)
1800 remove_pseudo_from_table (exp, hash);
1802 remove_from_table (elt, hash);
1804 if (insert_regs (exp, class1, 0) || need_rehash)
1806 rehash_using_reg (exp);
1807 hash = HASH (exp, mode);
1809 new_elt = insert (exp, class1, hash, mode);
1810 new_elt->in_memory = hash_arg_in_memory;
1815 /* Flush the entire hash table. */
1818 flush_hash_table (void)
1821 struct table_elt *p;
1823 for (i = 0; i < HASH_SIZE; i++)
1824 for (p = table[i]; p; p = table[i])
1826 /* Note that invalidate can remove elements
1827 after P in the current hash chain. */
1829 invalidate (p->exp, VOIDmode);
1831 remove_from_table (p, i);
1835 /* Function called for each rtx to check whether true dependence exist. */
1836 struct check_dependence_data
1838 enum machine_mode mode;
1844 check_dependence (rtx *x, void *data)
1846 struct check_dependence_data *d = (struct check_dependence_data *) data;
1847 if (*x && MEM_P (*x))
1848 return canon_true_dependence (d->exp, d->mode, d->addr, *x, NULL_RTX);
1853 /* Remove from the hash table, or mark as invalid, all expressions whose
1854 values could be altered by storing in X. X is a register, a subreg, or
1855 a memory reference with nonvarying address (because, when a memory
1856 reference with a varying address is stored in, all memory references are
1857 removed by invalidate_memory so specific invalidation is superfluous).
1858 FULL_MODE, if not VOIDmode, indicates that this much should be
1859 invalidated instead of just the amount indicated by the mode of X. This
1860 is only used for bitfield stores into memory.
1862 A nonvarying address may be just a register or just a symbol reference,
1863 or it may be either of those plus a numeric offset. */
1866 invalidate (rtx x, enum machine_mode full_mode)
1869 struct table_elt *p;
1872 switch (GET_CODE (x))
1876 /* If X is a register, dependencies on its contents are recorded
1877 through the qty number mechanism. Just change the qty number of
1878 the register, mark it as invalid for expressions that refer to it,
1879 and remove it itself. */
1880 unsigned int regno = REGNO (x);
1881 unsigned int hash = HASH (x, GET_MODE (x));
1883 /* Remove REGNO from any quantity list it might be on and indicate
1884 that its value might have changed. If it is a pseudo, remove its
1885 entry from the hash table.
1887 For a hard register, we do the first two actions above for any
1888 additional hard registers corresponding to X. Then, if any of these
1889 registers are in the table, we must remove any REG entries that
1890 overlap these registers. */
1892 delete_reg_equiv (regno);
1894 SUBREG_TICKED (regno) = -1;
1896 if (regno >= FIRST_PSEUDO_REGISTER)
1897 remove_pseudo_from_table (x, hash);
1900 HOST_WIDE_INT in_table
1901 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1902 unsigned int endregno = END_HARD_REGNO (x);
1903 unsigned int tregno, tendregno, rn;
1904 struct table_elt *p, *next;
1906 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1908 for (rn = regno + 1; rn < endregno; rn++)
1910 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1911 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1912 delete_reg_equiv (rn);
1914 SUBREG_TICKED (rn) = -1;
1918 for (hash = 0; hash < HASH_SIZE; hash++)
1919 for (p = table[hash]; p; p = next)
1921 next = p->next_same_hash;
1924 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1927 tregno = REGNO (p->exp);
1928 tendregno = END_HARD_REGNO (p->exp);
1929 if (tendregno > regno && tregno < endregno)
1930 remove_from_table (p, hash);
1937 invalidate (SUBREG_REG (x), VOIDmode);
1941 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1942 invalidate (XVECEXP (x, 0, i), VOIDmode);
1946 /* This is part of a disjoint return value; extract the location in
1947 question ignoring the offset. */
1948 invalidate (XEXP (x, 0), VOIDmode);
1952 addr = canon_rtx (get_addr (XEXP (x, 0)));
1953 /* Calculate the canonical version of X here so that
1954 true_dependence doesn't generate new RTL for X on each call. */
1957 /* Remove all hash table elements that refer to overlapping pieces of
1959 if (full_mode == VOIDmode)
1960 full_mode = GET_MODE (x);
1962 for (i = 0; i < HASH_SIZE; i++)
1964 struct table_elt *next;
1966 for (p = table[i]; p; p = next)
1968 next = p->next_same_hash;
1971 struct check_dependence_data d;
1973 /* Just canonicalize the expression once;
1974 otherwise each time we call invalidate
1975 true_dependence will canonicalize the
1976 expression again. */
1978 p->canon_exp = canon_rtx (p->exp);
1982 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1983 remove_from_table (p, i);
1994 /* Remove all expressions that refer to register REGNO,
1995 since they are already invalid, and we are about to
1996 mark that register valid again and don't want the old
1997 expressions to reappear as valid. */
2000 remove_invalid_refs (unsigned int regno)
2003 struct table_elt *p, *next;
2005 for (i = 0; i < HASH_SIZE; i++)
2006 for (p = table[i]; p; p = next)
2008 next = p->next_same_hash;
2010 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
2011 remove_from_table (p, i);
2015 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
2018 remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
2019 enum machine_mode mode)
2022 struct table_elt *p, *next;
2023 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
2025 for (i = 0; i < HASH_SIZE; i++)
2026 for (p = table[i]; p; p = next)
2029 next = p->next_same_hash;
2032 && (GET_CODE (exp) != SUBREG
2033 || !REG_P (SUBREG_REG (exp))
2034 || REGNO (SUBREG_REG (exp)) != regno
2035 || (((SUBREG_BYTE (exp)
2036 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
2037 && SUBREG_BYTE (exp) <= end))
2038 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
2039 remove_from_table (p, i);
2043 /* Recompute the hash codes of any valid entries in the hash table that
2044 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2046 This is called when we make a jump equivalence. */
2049 rehash_using_reg (rtx x)
2052 struct table_elt *p, *next;
2055 if (GET_CODE (x) == SUBREG)
2058 /* If X is not a register or if the register is known not to be in any
2059 valid entries in the table, we have no work to do. */
2062 || REG_IN_TABLE (REGNO (x)) < 0
2063 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2066 /* Scan all hash chains looking for valid entries that mention X.
2067 If we find one and it is in the wrong hash chain, move it. */
2069 for (i = 0; i < HASH_SIZE; i++)
2070 for (p = table[i]; p; p = next)
2072 next = p->next_same_hash;
2073 if (reg_mentioned_p (x, p->exp)
2074 && exp_equiv_p (p->exp, p->exp, 1, false)
2075 && i != (hash = SAFE_HASH (p->exp, p->mode)))
2077 if (p->next_same_hash)
2078 p->next_same_hash->prev_same_hash = p->prev_same_hash;
2080 if (p->prev_same_hash)
2081 p->prev_same_hash->next_same_hash = p->next_same_hash;
2083 table[i] = p->next_same_hash;
2085 p->next_same_hash = table[hash];
2086 p->prev_same_hash = 0;
2088 table[hash]->prev_same_hash = p;
2094 /* Remove from the hash table any expression that is a call-clobbered
2095 register. Also update their TICK values. */
2098 invalidate_for_call (void)
2100 unsigned int regno, endregno;
2103 struct table_elt *p, *next;
2106 /* Go through all the hard registers. For each that is clobbered in
2107 a CALL_INSN, remove the register from quantity chains and update
2108 reg_tick if defined. Also see if any of these registers is currently
2111 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2112 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2114 delete_reg_equiv (regno);
2115 if (REG_TICK (regno) >= 0)
2118 SUBREG_TICKED (regno) = -1;
2121 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2124 /* In the case where we have no call-clobbered hard registers in the
2125 table, we are done. Otherwise, scan the table and remove any
2126 entry that overlaps a call-clobbered register. */
2129 for (hash = 0; hash < HASH_SIZE; hash++)
2130 for (p = table[hash]; p; p = next)
2132 next = p->next_same_hash;
2135 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2138 regno = REGNO (p->exp);
2139 endregno = END_HARD_REGNO (p->exp);
2141 for (i = regno; i < endregno; i++)
2142 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2144 remove_from_table (p, hash);
2150 /* Given an expression X of type CONST,
2151 and ELT which is its table entry (or 0 if it
2152 is not in the hash table),
2153 return an alternate expression for X as a register plus integer.
2154 If none can be found, return 0. */
2157 use_related_value (rtx x, struct table_elt *elt)
2159 struct table_elt *relt = 0;
2160 struct table_elt *p, *q;
2161 HOST_WIDE_INT offset;
2163 /* First, is there anything related known?
2164 If we have a table element, we can tell from that.
2165 Otherwise, must look it up. */
2167 if (elt != 0 && elt->related_value != 0)
2169 else if (elt == 0 && GET_CODE (x) == CONST)
2171 rtx subexp = get_related_value (x);
2173 relt = lookup (subexp,
2174 SAFE_HASH (subexp, GET_MODE (subexp)),
2181 /* Search all related table entries for one that has an
2182 equivalent register. */
2187 /* This loop is strange in that it is executed in two different cases.
2188 The first is when X is already in the table. Then it is searching
2189 the RELATED_VALUE list of X's class (RELT). The second case is when
2190 X is not in the table. Then RELT points to a class for the related
2193 Ensure that, whatever case we are in, that we ignore classes that have
2194 the same value as X. */
2196 if (rtx_equal_p (x, p->exp))
2199 for (q = p->first_same_value; q; q = q->next_same_value)
2206 p = p->related_value;
2208 /* We went all the way around, so there is nothing to be found.
2209 Alternatively, perhaps RELT was in the table for some other reason
2210 and it has no related values recorded. */
2211 if (p == relt || p == 0)
2218 offset = (get_integer_term (x) - get_integer_term (p->exp));
2219 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2220 return plus_constant (q->exp, offset);
2224 /* Hash a string. Just add its bytes up. */
2225 static inline unsigned
2226 hash_rtx_string (const char *ps)
2229 const unsigned char *p = (const unsigned char *) ps;
2238 /* Same as hash_rtx, but call CB on each rtx if it is not NULL.
2239 When the callback returns true, we continue with the new rtx. */
2242 hash_rtx_cb (const_rtx x, enum machine_mode mode,
2243 int *do_not_record_p, int *hash_arg_in_memory_p,
2244 bool have_reg_qty, hash_rtx_callback_function cb)
2250 enum machine_mode newmode;
2253 /* Used to turn recursion into iteration. We can't rely on GCC's
2254 tail-recursion elimination since we need to keep accumulating values
2260 /* Invoke the callback first. */
2262 && ((*cb) (x, mode, &newx, &newmode)))
2264 hash += hash_rtx_cb (newx, newmode, do_not_record_p,
2265 hash_arg_in_memory_p, have_reg_qty, cb);
2269 code = GET_CODE (x);
2274 unsigned int regno = REGNO (x);
2276 if (do_not_record_p && !reload_completed)
2278 /* On some machines, we can't record any non-fixed hard register,
2279 because extending its life will cause reload problems. We
2280 consider ap, fp, sp, gp to be fixed for this purpose.
2282 We also consider CCmode registers to be fixed for this purpose;
2283 failure to do so leads to failure to simplify 0<100 type of
2286 On all machines, we can't record any global registers.
2287 Nor should we record any register that is in a small
2288 class, as defined by TARGET_CLASS_LIKELY_SPILLED_P. */
2291 if (regno >= FIRST_PSEUDO_REGISTER)
2293 else if (x == frame_pointer_rtx
2294 || x == hard_frame_pointer_rtx
2295 || x == arg_pointer_rtx
2296 || x == stack_pointer_rtx
2297 || x == pic_offset_table_rtx)
2299 else if (global_regs[regno])
2301 else if (fixed_regs[regno])
2303 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2305 else if (targetm.small_register_classes_for_mode_p (GET_MODE (x)))
2307 else if (targetm.class_likely_spilled_p (REGNO_REG_CLASS (regno)))
2314 *do_not_record_p = 1;
2319 hash += ((unsigned int) REG << 7);
2320 hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2324 /* We handle SUBREG of a REG specially because the underlying
2325 reg changes its hash value with every value change; we don't
2326 want to have to forget unrelated subregs when one subreg changes. */
2329 if (REG_P (SUBREG_REG (x)))
2331 hash += (((unsigned int) SUBREG << 7)
2332 + REGNO (SUBREG_REG (x))
2333 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2340 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2341 + (unsigned int) INTVAL (x));
2345 /* This is like the general case, except that it only counts
2346 the integers representing the constant. */
2347 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2348 if (GET_MODE (x) != VOIDmode)
2349 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2351 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2352 + (unsigned int) CONST_DOUBLE_HIGH (x));
2356 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2357 hash += fixed_hash (CONST_FIXED_VALUE (x));
2365 units = CONST_VECTOR_NUNITS (x);
2367 for (i = 0; i < units; ++i)
2369 elt = CONST_VECTOR_ELT (x, i);
2370 hash += hash_rtx_cb (elt, GET_MODE (elt),
2371 do_not_record_p, hash_arg_in_memory_p,
2378 /* Assume there is only one rtx object for any given label. */
2380 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2381 differences and differences between each stage's debugging dumps. */
2382 hash += (((unsigned int) LABEL_REF << 7)
2383 + CODE_LABEL_NUMBER (XEXP (x, 0)));
2388 /* Don't hash on the symbol's address to avoid bootstrap differences.
2389 Different hash values may cause expressions to be recorded in
2390 different orders and thus different registers to be used in the
2391 final assembler. This also avoids differences in the dump files
2392 between various stages. */
2394 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2397 h += (h << 7) + *p++; /* ??? revisit */
2399 hash += ((unsigned int) SYMBOL_REF << 7) + h;
2404 /* We don't record if marked volatile or if BLKmode since we don't
2405 know the size of the move. */
2406 if (do_not_record_p && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2408 *do_not_record_p = 1;
2411 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2412 *hash_arg_in_memory_p = 1;
2414 /* Now that we have already found this special case,
2415 might as well speed it up as much as possible. */
2416 hash += (unsigned) MEM;
2421 /* A USE that mentions non-volatile memory needs special
2422 handling since the MEM may be BLKmode which normally
2423 prevents an entry from being made. Pure calls are
2424 marked by a USE which mentions BLKmode memory.
2425 See calls.c:emit_call_1. */
2426 if (MEM_P (XEXP (x, 0))
2427 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2429 hash += (unsigned) USE;
2432 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2433 *hash_arg_in_memory_p = 1;
2435 /* Now that we have already found this special case,
2436 might as well speed it up as much as possible. */
2437 hash += (unsigned) MEM;
2452 case UNSPEC_VOLATILE:
2453 if (do_not_record_p) {
2454 *do_not_record_p = 1;
2462 if (do_not_record_p && MEM_VOLATILE_P (x))
2464 *do_not_record_p = 1;
2469 /* We don't want to take the filename and line into account. */
2470 hash += (unsigned) code + (unsigned) GET_MODE (x)
2471 + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2472 + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2473 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2475 if (ASM_OPERANDS_INPUT_LENGTH (x))
2477 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2479 hash += (hash_rtx_cb (ASM_OPERANDS_INPUT (x, i),
2480 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2481 do_not_record_p, hash_arg_in_memory_p,
2484 (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
2487 hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2488 x = ASM_OPERANDS_INPUT (x, 0);
2489 mode = GET_MODE (x);
2501 i = GET_RTX_LENGTH (code) - 1;
2502 hash += (unsigned) code + (unsigned) GET_MODE (x);
2503 fmt = GET_RTX_FORMAT (code);
2509 /* If we are about to do the last recursive call
2510 needed at this level, change it into iteration.
2511 This function is called enough to be worth it. */
2518 hash += hash_rtx_cb (XEXP (x, i), VOIDmode, do_not_record_p,
2519 hash_arg_in_memory_p,
2524 for (j = 0; j < XVECLEN (x, i); j++)
2525 hash += hash_rtx_cb (XVECEXP (x, i, j), VOIDmode, do_not_record_p,
2526 hash_arg_in_memory_p,
2531 hash += hash_rtx_string (XSTR (x, i));
2535 hash += (unsigned int) XINT (x, i);
2550 /* Hash an rtx. We are careful to make sure the value is never negative.
2551 Equivalent registers hash identically.
2552 MODE is used in hashing for CONST_INTs only;
2553 otherwise the mode of X is used.
2555 Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2557 If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2558 a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
2560 Note that cse_insn knows that the hash code of a MEM expression
2561 is just (int) MEM plus the hash code of the address. */
2564 hash_rtx (const_rtx x, enum machine_mode mode, int *do_not_record_p,
2565 int *hash_arg_in_memory_p, bool have_reg_qty)
2567 return hash_rtx_cb (x, mode, do_not_record_p,
2568 hash_arg_in_memory_p, have_reg_qty, NULL);
2571 /* Hash an rtx X for cse via hash_rtx.
2572 Stores 1 in do_not_record if any subexpression is volatile.
2573 Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2574 does not have the RTX_UNCHANGING_P bit set. */
2576 static inline unsigned
2577 canon_hash (rtx x, enum machine_mode mode)
2579 return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2582 /* Like canon_hash but with no side effects, i.e. do_not_record
2583 and hash_arg_in_memory are not changed. */
2585 static inline unsigned
2586 safe_hash (rtx x, enum machine_mode mode)
2588 int dummy_do_not_record;
2589 return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
2592 /* Return 1 iff X and Y would canonicalize into the same thing,
2593 without actually constructing the canonicalization of either one.
2594 If VALIDATE is nonzero,
2595 we assume X is an expression being processed from the rtl
2596 and Y was found in the hash table. We check register refs
2597 in Y for being marked as valid.
2599 If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
2602 exp_equiv_p (const_rtx x, const_rtx y, int validate, bool for_gcse)
2608 /* Note: it is incorrect to assume an expression is equivalent to itself
2609 if VALIDATE is nonzero. */
2610 if (x == y && !validate)
2613 if (x == 0 || y == 0)
2616 code = GET_CODE (x);
2617 if (code != GET_CODE (y))
2620 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2621 if (GET_MODE (x) != GET_MODE (y))
2624 /* MEMs refering to different address space are not equivalent. */
2625 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2638 return XEXP (x, 0) == XEXP (y, 0);
2641 return XSTR (x, 0) == XSTR (y, 0);
2645 return REGNO (x) == REGNO (y);
2648 unsigned int regno = REGNO (y);
2650 unsigned int endregno = END_REGNO (y);
2652 /* If the quantities are not the same, the expressions are not
2653 equivalent. If there are and we are not to validate, they
2654 are equivalent. Otherwise, ensure all regs are up-to-date. */
2656 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2662 for (i = regno; i < endregno; i++)
2663 if (REG_IN_TABLE (i) != REG_TICK (i))
2672 /* A volatile mem should not be considered equivalent to any
2674 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2677 /* Can't merge two expressions in different alias sets, since we
2678 can decide that the expression is transparent in a block when
2679 it isn't, due to it being set with the different alias set.
2681 Also, can't merge two expressions with different MEM_ATTRS.
2682 They could e.g. be two different entities allocated into the
2683 same space on the stack (see e.g. PR25130). In that case, the
2684 MEM addresses can be the same, even though the two MEMs are
2685 absolutely not equivalent.
2687 But because really all MEM attributes should be the same for
2688 equivalent MEMs, we just use the invariant that MEMs that have
2689 the same attributes share the same mem_attrs data structure. */
2690 if (MEM_ATTRS (x) != MEM_ATTRS (y))
2695 /* For commutative operations, check both orders. */
2703 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2705 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2706 validate, for_gcse))
2707 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2709 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2710 validate, for_gcse)));
2713 /* We don't use the generic code below because we want to
2714 disregard filename and line numbers. */
2716 /* A volatile asm isn't equivalent to any other. */
2717 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2720 if (GET_MODE (x) != GET_MODE (y)
2721 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2722 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2723 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2724 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2725 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2728 if (ASM_OPERANDS_INPUT_LENGTH (x))
2730 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2731 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2732 ASM_OPERANDS_INPUT (y, i),
2734 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2735 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2745 /* Compare the elements. If any pair of corresponding elements
2746 fail to match, return 0 for the whole thing. */
2748 fmt = GET_RTX_FORMAT (code);
2749 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2754 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2755 validate, for_gcse))
2760 if (XVECLEN (x, i) != XVECLEN (y, i))
2762 for (j = 0; j < XVECLEN (x, i); j++)
2763 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2764 validate, for_gcse))
2769 if (strcmp (XSTR (x, i), XSTR (y, i)))
2774 if (XINT (x, i) != XINT (y, i))
2779 if (XWINT (x, i) != XWINT (y, i))
2795 /* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2796 the result if necessary. INSN is as for canon_reg. */
2799 validate_canon_reg (rtx *xloc, rtx insn)
2803 rtx new_rtx = canon_reg (*xloc, insn);
2805 /* If replacing pseudo with hard reg or vice versa, ensure the
2806 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2807 gcc_assert (insn && new_rtx);
2808 validate_change (insn, xloc, new_rtx, 1);
2812 /* Canonicalize an expression:
2813 replace each register reference inside it
2814 with the "oldest" equivalent register.
2816 If INSN is nonzero validate_change is used to ensure that INSN remains valid
2817 after we make our substitution. The calls are made with IN_GROUP nonzero
2818 so apply_change_group must be called upon the outermost return from this
2819 function (unless INSN is zero). The result of apply_change_group can
2820 generally be discarded since the changes we are making are optional. */
2823 canon_reg (rtx x, rtx insn)
2832 code = GET_CODE (x);
2852 struct qty_table_elem *ent;
2854 /* Never replace a hard reg, because hard regs can appear
2855 in more than one machine mode, and we must preserve the mode
2856 of each occurrence. Also, some hard regs appear in
2857 MEMs that are shared and mustn't be altered. Don't try to
2858 replace any reg that maps to a reg of class NO_REGS. */
2859 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2860 || ! REGNO_QTY_VALID_P (REGNO (x)))
2863 q = REG_QTY (REGNO (x));
2864 ent = &qty_table[q];
2865 first = ent->first_reg;
2866 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2867 : REGNO_REG_CLASS (first) == NO_REGS ? x
2868 : gen_rtx_REG (ent->mode, first));
2875 fmt = GET_RTX_FORMAT (code);
2876 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2881 validate_canon_reg (&XEXP (x, i), insn);
2882 else if (fmt[i] == 'E')
2883 for (j = 0; j < XVECLEN (x, i); j++)
2884 validate_canon_reg (&XVECEXP (x, i, j), insn);
2890 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2891 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2892 what values are being compared.
2894 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2895 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2896 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2897 compared to produce cc0.
2899 The return value is the comparison operator and is either the code of
2900 A or the code corresponding to the inverse of the comparison. */
2902 static enum rtx_code
2903 find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
2904 enum machine_mode *pmode1, enum machine_mode *pmode2)
2908 static unsigned int nesting = 0;
2909 /* Prevent an infinite loop for RX and FRV. */
2910 if (++ nesting < 100)
2913 arg1 = *parg1, arg2 = *parg2;
2915 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2917 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2919 /* Set nonzero when we find something of interest. */
2921 int reverse_code = 0;
2922 struct table_elt *p = 0;
2924 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2925 On machines with CC0, this is the only case that can occur, since
2926 fold_rtx will return the COMPARE or item being compared with zero
2929 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2932 /* If ARG1 is a comparison operator and CODE is testing for
2933 STORE_FLAG_VALUE, get the inner arguments. */
2935 else if (COMPARISON_P (arg1))
2937 #ifdef FLOAT_STORE_FLAG_VALUE
2938 REAL_VALUE_TYPE fsfv;
2942 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2943 && code == LT && STORE_FLAG_VALUE == -1)
2944 #ifdef FLOAT_STORE_FLAG_VALUE
2945 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
2946 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2947 REAL_VALUE_NEGATIVE (fsfv)))
2952 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2953 && code == GE && STORE_FLAG_VALUE == -1)
2954 #ifdef FLOAT_STORE_FLAG_VALUE
2955 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
2956 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2957 REAL_VALUE_NEGATIVE (fsfv)))
2960 x = arg1, reverse_code = 1;
2963 /* ??? We could also check for
2965 (ne (and (eq (...) (const_int 1))) (const_int 0))
2967 and related forms, but let's wait until we see them occurring. */
2970 /* Look up ARG1 in the hash table and see if it has an equivalence
2971 that lets us see what is being compared. */
2972 p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
2975 p = p->first_same_value;
2977 /* If what we compare is already known to be constant, that is as
2979 We need to break the loop in this case, because otherwise we
2980 can have an infinite loop when looking at a reg that is known
2981 to be a constant which is the same as a comparison of a reg
2982 against zero which appears later in the insn stream, which in
2983 turn is constant and the same as the comparison of the first reg
2989 for (; p; p = p->next_same_value)
2991 enum machine_mode inner_mode = GET_MODE (p->exp);
2992 #ifdef FLOAT_STORE_FLAG_VALUE
2993 REAL_VALUE_TYPE fsfv;
2996 /* If the entry isn't valid, skip it. */
2997 if (! exp_equiv_p (p->exp, p->exp, 1, false))
3000 /* If it's the same comparison we're already looking at, skip it. */
3001 if (COMPARISON_P (p->exp)
3002 && XEXP (p->exp, 0) == arg1
3003 && XEXP (p->exp, 1) == arg2)
3006 if (GET_CODE (p->exp) == COMPARE
3007 /* Another possibility is that this machine has a compare insn
3008 that includes the comparison code. In that case, ARG1 would
3009 be equivalent to a comparison operation that would set ARG1 to
3010 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3011 ORIG_CODE is the actual comparison being done; if it is an EQ,
3012 we must reverse ORIG_CODE. On machine with a negative value
3013 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3016 && val_signbit_known_set_p (inner_mode,
3018 #ifdef FLOAT_STORE_FLAG_VALUE
3020 && SCALAR_FLOAT_MODE_P (inner_mode)
3021 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3022 REAL_VALUE_NEGATIVE (fsfv)))
3025 && COMPARISON_P (p->exp)))
3030 else if ((code == EQ
3032 && val_signbit_known_set_p (inner_mode,
3034 #ifdef FLOAT_STORE_FLAG_VALUE
3036 && SCALAR_FLOAT_MODE_P (inner_mode)
3037 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3038 REAL_VALUE_NEGATIVE (fsfv)))
3041 && COMPARISON_P (p->exp))
3048 /* If this non-trapping address, e.g. fp + constant, the
3049 equivalent is a better operand since it may let us predict
3050 the value of the comparison. */
3051 else if (!rtx_addr_can_trap_p (p->exp))
3058 /* If we didn't find a useful equivalence for ARG1, we are done.
3059 Otherwise, set up for the next iteration. */
3063 /* If we need to reverse the comparison, make sure that that is
3064 possible -- we can't necessarily infer the value of GE from LT
3065 with floating-point operands. */
3068 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3069 if (reversed == UNKNOWN)
3074 else if (COMPARISON_P (x))
3075 code = GET_CODE (x);
3076 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3079 /* Return our results. Return the modes from before fold_rtx
3080 because fold_rtx might produce const_int, and then it's too late. */
3081 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3082 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3089 /* If X is a nontrivial arithmetic operation on an argument for which
3090 a constant value can be determined, return the result of operating
3091 on that value, as a constant. Otherwise, return X, possibly with
3092 one or more operands changed to a forward-propagated constant.
3094 If X is a register whose contents are known, we do NOT return
3095 those contents here; equiv_constant is called to perform that task.
3096 For SUBREGs and MEMs, we do that both here and in equiv_constant.
3098 INSN is the insn that we may be modifying. If it is 0, make a copy
3099 of X before modifying it. */
3102 fold_rtx (rtx x, rtx insn)
3105 enum machine_mode mode;
3111 /* Operands of X. */
3115 /* Constant equivalents of first three operands of X;
3116 0 when no such equivalent is known. */
3121 /* The mode of the first operand of X. We need this for sign and zero
3123 enum machine_mode mode_arg0;
3128 /* Try to perform some initial simplifications on X. */
3129 code = GET_CODE (x);
3134 if ((new_rtx = equiv_constant (x)) != NULL_RTX)
3147 /* No use simplifying an EXPR_LIST
3148 since they are used only for lists of args
3149 in a function call's REG_EQUAL note. */
3155 return prev_insn_cc0;
3161 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3162 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3163 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3167 #ifdef NO_FUNCTION_CSE
3169 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3174 /* Anything else goes through the loop below. */
3179 mode = GET_MODE (x);
3183 mode_arg0 = VOIDmode;
3185 /* Try folding our operands.
3186 Then see which ones have constant values known. */
3188 fmt = GET_RTX_FORMAT (code);
3189 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3192 rtx folded_arg = XEXP (x, i), const_arg;
3193 enum machine_mode mode_arg = GET_MODE (folded_arg);
3195 switch (GET_CODE (folded_arg))
3200 const_arg = equiv_constant (folded_arg);
3210 const_arg = folded_arg;
3215 folded_arg = prev_insn_cc0;
3216 mode_arg = prev_insn_cc0_mode;
3217 const_arg = equiv_constant (folded_arg);
3222 folded_arg = fold_rtx (folded_arg, insn);
3223 const_arg = equiv_constant (folded_arg);
3227 /* For the first three operands, see if the operand
3228 is constant or equivalent to a constant. */
3232 folded_arg0 = folded_arg;
3233 const_arg0 = const_arg;
3234 mode_arg0 = mode_arg;
3237 folded_arg1 = folded_arg;
3238 const_arg1 = const_arg;
3241 const_arg2 = const_arg;
3245 /* Pick the least expensive of the argument and an equivalent constant
3248 && const_arg != folded_arg
3249 && COST_IN (const_arg, code, i) <= COST_IN (folded_arg, code, i)
3251 /* It's not safe to substitute the operand of a conversion
3252 operator with a constant, as the conversion's identity
3253 depends upon the mode of its operand. This optimization
3254 is handled by the call to simplify_unary_operation. */
3255 && (GET_RTX_CLASS (code) != RTX_UNARY
3256 || GET_MODE (const_arg) == mode_arg0
3257 || (code != ZERO_EXTEND
3258 && code != SIGN_EXTEND
3260 && code != FLOAT_TRUNCATE
3261 && code != FLOAT_EXTEND
3264 && code != UNSIGNED_FLOAT
3265 && code != UNSIGNED_FIX)))
3266 folded_arg = const_arg;
3268 if (folded_arg == XEXP (x, i))
3271 if (insn == NULL_RTX && !changed)
3274 validate_unshare_change (insn, &XEXP (x, i), folded_arg, 1);
3279 /* Canonicalize X if necessary, and keep const_argN and folded_argN
3280 consistent with the order in X. */
3281 if (canonicalize_change_group (insn, x))
3284 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3285 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3288 apply_change_group ();
3291 /* If X is an arithmetic operation, see if we can simplify it. */
3293 switch (GET_RTX_CLASS (code))
3297 /* We can't simplify extension ops unless we know the
3299 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3300 && mode_arg0 == VOIDmode)
3303 new_rtx = simplify_unary_operation (code, mode,
3304 const_arg0 ? const_arg0 : folded_arg0,
3310 case RTX_COMM_COMPARE:
3311 /* See what items are actually being compared and set FOLDED_ARG[01]
3312 to those values and CODE to the actual comparison code. If any are
3313 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3314 do anything if both operands are already known to be constant. */
3316 /* ??? Vector mode comparisons are not supported yet. */
3317 if (VECTOR_MODE_P (mode))
3320 if (const_arg0 == 0 || const_arg1 == 0)
3322 struct table_elt *p0, *p1;
3323 rtx true_rtx, false_rtx;
3324 enum machine_mode mode_arg1;
3326 if (SCALAR_FLOAT_MODE_P (mode))
3328 #ifdef FLOAT_STORE_FLAG_VALUE
3329 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3330 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3332 true_rtx = NULL_RTX;
3334 false_rtx = CONST0_RTX (mode);
3338 true_rtx = const_true_rtx;
3339 false_rtx = const0_rtx;
3342 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3343 &mode_arg0, &mode_arg1);
3345 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3346 what kinds of things are being compared, so we can't do
3347 anything with this comparison. */
3349 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3352 const_arg0 = equiv_constant (folded_arg0);
3353 const_arg1 = equiv_constant (folded_arg1);
3355 /* If we do not now have two constants being compared, see
3356 if we can nevertheless deduce some things about the
3358 if (const_arg0 == 0 || const_arg1 == 0)
3360 if (const_arg1 != NULL)
3362 rtx cheapest_simplification;
3365 struct table_elt *p;
3367 /* See if we can find an equivalent of folded_arg0
3368 that gets us a cheaper expression, possibly a
3369 constant through simplifications. */
3370 p = lookup (folded_arg0, SAFE_HASH (folded_arg0, mode_arg0),
3375 cheapest_simplification = x;
3376 cheapest_cost = COST (x);
3378 for (p = p->first_same_value; p != NULL; p = p->next_same_value)
3382 /* If the entry isn't valid, skip it. */
3383 if (! exp_equiv_p (p->exp, p->exp, 1, false))
3386 /* Try to simplify using this equivalence. */
3388 = simplify_relational_operation (code, mode,
3393 if (simp_result == NULL)
3396 cost = COST (simp_result);
3397 if (cost < cheapest_cost)
3399 cheapest_cost = cost;
3400 cheapest_simplification = simp_result;
3404 /* If we have a cheaper expression now, use that
3405 and try folding it further, from the top. */
3406 if (cheapest_simplification != x)
3407 return fold_rtx (copy_rtx (cheapest_simplification),
3412 /* See if the two operands are the same. */
3414 if ((REG_P (folded_arg0)
3415 && REG_P (folded_arg1)
3416 && (REG_QTY (REGNO (folded_arg0))
3417 == REG_QTY (REGNO (folded_arg1))))
3418 || ((p0 = lookup (folded_arg0,
3419 SAFE_HASH (folded_arg0, mode_arg0),
3421 && (p1 = lookup (folded_arg1,
3422 SAFE_HASH (folded_arg1, mode_arg0),
3424 && p0->first_same_value == p1->first_same_value))
3425 folded_arg1 = folded_arg0;
3427 /* If FOLDED_ARG0 is a register, see if the comparison we are
3428 doing now is either the same as we did before or the reverse
3429 (we only check the reverse if not floating-point). */
3430 else if (REG_P (folded_arg0))
3432 int qty = REG_QTY (REGNO (folded_arg0));
3434 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3436 struct qty_table_elem *ent = &qty_table[qty];
3438 if ((comparison_dominates_p (ent->comparison_code, code)
3439 || (! FLOAT_MODE_P (mode_arg0)
3440 && comparison_dominates_p (ent->comparison_code,
3441 reverse_condition (code))))
3442 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3444 && rtx_equal_p (ent->comparison_const,
3446 || (REG_P (folded_arg1)
3447 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3449 if (comparison_dominates_p (ent->comparison_code, code))
3464 /* If we are comparing against zero, see if the first operand is
3465 equivalent to an IOR with a constant. If so, we may be able to
3466 determine the result of this comparison. */
3467 if (const_arg1 == const0_rtx && !const_arg0)
3469 rtx y = lookup_as_function (folded_arg0, IOR);
3473 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3474 && CONST_INT_P (inner_const)
3475 && INTVAL (inner_const) != 0)
3476 folded_arg0 = gen_rtx_IOR (mode_arg0, XEXP (y, 0), inner_const);
3480 rtx op0 = const_arg0 ? const_arg0 : folded_arg0;
3481 rtx op1 = const_arg1 ? const_arg1 : folded_arg1;
3482 new_rtx = simplify_relational_operation (code, mode, mode_arg0, op0, op1);
3487 case RTX_COMM_ARITH:
3491 /* If the second operand is a LABEL_REF, see if the first is a MINUS
3492 with that LABEL_REF as its second operand. If so, the result is
3493 the first operand of that MINUS. This handles switches with an
3494 ADDR_DIFF_VEC table. */
3495 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
3498 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
3499 : lookup_as_function (folded_arg0, MINUS);
3501 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3502 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
3505 /* Now try for a CONST of a MINUS like the above. */
3506 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
3507 : lookup_as_function (folded_arg0, CONST))) != 0
3508 && GET_CODE (XEXP (y, 0)) == MINUS
3509 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3510 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
3511 return XEXP (XEXP (y, 0), 0);
3514 /* Likewise if the operands are in the other order. */
3515 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
3518 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
3519 : lookup_as_function (folded_arg1, MINUS);
3521 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3522 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
3525 /* Now try for a CONST of a MINUS like the above. */
3526 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
3527 : lookup_as_function (folded_arg1, CONST))) != 0
3528 && GET_CODE (XEXP (y, 0)) == MINUS
3529 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3530 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
3531 return XEXP (XEXP (y, 0), 0);
3534 /* If second operand is a register equivalent to a negative
3535 CONST_INT, see if we can find a register equivalent to the
3536 positive constant. Make a MINUS if so. Don't do this for
3537 a non-negative constant since we might then alternate between
3538 choosing positive and negative constants. Having the positive
3539 constant previously-used is the more common case. Be sure
3540 the resulting constant is non-negative; if const_arg1 were
3541 the smallest negative number this would overflow: depending
3542 on the mode, this would either just be the same value (and
3543 hence not save anything) or be incorrect. */
3544 if (const_arg1 != 0 && CONST_INT_P (const_arg1)
3545 && INTVAL (const_arg1) < 0
3546 /* This used to test
3548 -INTVAL (const_arg1) >= 0
3550 But The Sun V5.0 compilers mis-compiled that test. So
3551 instead we test for the problematic value in a more direct
3552 manner and hope the Sun compilers get it correct. */
3553 && INTVAL (const_arg1) !=
3554 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
3555 && REG_P (folded_arg1))
3557 rtx new_const = GEN_INT (-INTVAL (const_arg1));
3559 = lookup (new_const, SAFE_HASH (new_const, mode), mode);
3562 for (p = p->first_same_value; p; p = p->next_same_value)
3564 return simplify_gen_binary (MINUS, mode, folded_arg0,
3565 canon_reg (p->exp, NULL_RTX));
3570 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3571 If so, produce (PLUS Z C2-C). */
3572 if (const_arg1 != 0 && CONST_INT_P (const_arg1))
3574 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
3575 if (y && CONST_INT_P (XEXP (y, 1)))
3576 return fold_rtx (plus_constant (copy_rtx (y),
3577 -INTVAL (const_arg1)),
3584 case SMIN: case SMAX: case UMIN: case UMAX:
3585 case IOR: case AND: case XOR:
3587 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
3588 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3589 is known to be of similar form, we may be able to replace the
3590 operation with a combined operation. This may eliminate the
3591 intermediate operation if every use is simplified in this way.
3592 Note that the similar optimization done by combine.c only works
3593 if the intermediate operation's result has only one reference. */
3595 if (REG_P (folded_arg0)
3596 && const_arg1 && CONST_INT_P (const_arg1))
3599 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
3600 rtx y, inner_const, new_const;
3601 rtx canon_const_arg1 = const_arg1;
3602 enum rtx_code associate_code;
3605 && (INTVAL (const_arg1) >= GET_MODE_PRECISION (mode)
3606 || INTVAL (const_arg1) < 0))
3608 if (SHIFT_COUNT_TRUNCATED)
3609 canon_const_arg1 = GEN_INT (INTVAL (const_arg1)
3610 & (GET_MODE_BITSIZE (mode)
3616 y = lookup_as_function (folded_arg0, code);
3620 /* If we have compiled a statement like
3621 "if (x == (x & mask1))", and now are looking at
3622 "x & mask2", we will have a case where the first operand
3623 of Y is the same as our first operand. Unless we detect
3624 this case, an infinite loop will result. */
3625 if (XEXP (y, 0) == folded_arg0)
3628 inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
3629 if (!inner_const || !CONST_INT_P (inner_const))
3632 /* Don't associate these operations if they are a PLUS with the
3633 same constant and it is a power of two. These might be doable
3634 with a pre- or post-increment. Similarly for two subtracts of
3635 identical powers of two with post decrement. */
3637 if (code == PLUS && const_arg1 == inner_const
3638 && ((HAVE_PRE_INCREMENT
3639 && exact_log2 (INTVAL (const_arg1)) >= 0)
3640 || (HAVE_POST_INCREMENT
3641 && exact_log2 (INTVAL (const_arg1)) >= 0)
3642 || (HAVE_PRE_DECREMENT
3643 && exact_log2 (- INTVAL (const_arg1)) >= 0)
3644 || (HAVE_POST_DECREMENT
3645 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
3648 /* ??? Vector mode shifts by scalar
3649 shift operand are not supported yet. */
3650 if (is_shift && VECTOR_MODE_P (mode))
3654 && (INTVAL (inner_const) >= GET_MODE_PRECISION (mode)
3655 || INTVAL (inner_const) < 0))
3657 if (SHIFT_COUNT_TRUNCATED)
3658 inner_const = GEN_INT (INTVAL (inner_const)
3659 & (GET_MODE_BITSIZE (mode) - 1));
3664 /* Compute the code used to compose the constants. For example,
3665 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
3667 associate_code = (is_shift || code == MINUS ? PLUS : code);
3669 new_const = simplify_binary_operation (associate_code, mode,
3676 /* If we are associating shift operations, don't let this
3677 produce a shift of the size of the object or larger.
3678 This could occur when we follow a sign-extend by a right
3679 shift on a machine that does a sign-extend as a pair
3683 && CONST_INT_P (new_const)
3684 && INTVAL (new_const) >= GET_MODE_PRECISION (mode))
3686 /* As an exception, we can turn an ASHIFTRT of this
3687 form into a shift of the number of bits - 1. */
3688 if (code == ASHIFTRT)
3689 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
3690 else if (!side_effects_p (XEXP (y, 0)))
3691 return CONST0_RTX (mode);
3696 y = copy_rtx (XEXP (y, 0));
3698 /* If Y contains our first operand (the most common way this
3699 can happen is if Y is a MEM), we would do into an infinite
3700 loop if we tried to fold it. So don't in that case. */
3702 if (! reg_mentioned_p (folded_arg0, y))
3703 y = fold_rtx (y, insn);
3705 return simplify_gen_binary (code, mode, y, new_const);
3709 case DIV: case UDIV:
3710 /* ??? The associative optimization performed immediately above is
3711 also possible for DIV and UDIV using associate_code of MULT.
3712 However, we would need extra code to verify that the
3713 multiplication does not overflow, that is, there is no overflow
3714 in the calculation of new_const. */
3721 new_rtx = simplify_binary_operation (code, mode,
3722 const_arg0 ? const_arg0 : folded_arg0,
3723 const_arg1 ? const_arg1 : folded_arg1);
3727 /* (lo_sum (high X) X) is simply X. */
3728 if (code == LO_SUM && const_arg0 != 0
3729 && GET_CODE (const_arg0) == HIGH
3730 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
3735 case RTX_BITFIELD_OPS:
3736 new_rtx = simplify_ternary_operation (code, mode, mode_arg0,
3737 const_arg0 ? const_arg0 : folded_arg0,
3738 const_arg1 ? const_arg1 : folded_arg1,
3739 const_arg2 ? const_arg2 : XEXP (x, 2));
3746 return new_rtx ? new_rtx : x;
3749 /* Return a constant value currently equivalent to X.
3750 Return 0 if we don't know one. */
3753 equiv_constant (rtx x)
3756 && REGNO_QTY_VALID_P (REGNO (x)))
3758 int x_q = REG_QTY (REGNO (x));
3759 struct qty_table_elem *x_ent = &qty_table[x_q];
3761 if (x_ent->const_rtx)
3762 x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
3765 if (x == 0 || CONSTANT_P (x))
3768 if (GET_CODE (x) == SUBREG)
3770 enum machine_mode mode = GET_MODE (x);
3771 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3774 /* See if we previously assigned a constant value to this SUBREG. */
3775 if ((new_rtx = lookup_as_function (x, CONST_INT)) != 0
3776 || (new_rtx = lookup_as_function (x, CONST_DOUBLE)) != 0
3777 || (new_rtx = lookup_as_function (x, CONST_FIXED)) != 0)
3780 /* If we didn't and if doing so makes sense, see if we previously
3781 assigned a constant value to the enclosing word mode SUBREG. */
3782 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode)
3783 && GET_MODE_SIZE (word_mode) < GET_MODE_SIZE (imode))
3785 int byte = SUBREG_BYTE (x) - subreg_lowpart_offset (mode, word_mode);
3786 if (byte >= 0 && (byte % UNITS_PER_WORD) == 0)
3788 rtx y = gen_rtx_SUBREG (word_mode, SUBREG_REG (x), byte);
3789 new_rtx = lookup_as_function (y, CONST_INT);
3791 return gen_lowpart (mode, new_rtx);
3795 /* Otherwise see if we already have a constant for the inner REG. */
3796 if (REG_P (SUBREG_REG (x))
3797 && (new_rtx = equiv_constant (SUBREG_REG (x))) != 0)
3798 return simplify_subreg (mode, new_rtx, imode, SUBREG_BYTE (x));
3803 /* If X is a MEM, see if it is a constant-pool reference, or look it up in
3804 the hash table in case its value was seen before. */
3808 struct table_elt *elt;
3810 x = avoid_constant_pool_reference (x);
3814 elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
3818 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3819 if (elt->is_const && CONSTANT_P (elt->exp))
3826 /* Given INSN, a jump insn, TAKEN indicates if we are following the
3829 In certain cases, this can cause us to add an equivalence. For example,
3830 if we are following the taken case of
3832 we can add the fact that `i' and '2' are now equivalent.
3834 In any case, we can record that this comparison was passed. If the same
3835 comparison is seen later, we will know its value. */
3838 record_jump_equiv (rtx insn, bool taken)
3840 int cond_known_true;
3843 enum machine_mode mode, mode0, mode1;
3844 int reversed_nonequality = 0;
3847 /* Ensure this is the right kind of insn. */
3848 gcc_assert (any_condjump_p (insn));
3850 set = pc_set (insn);
3852 /* See if this jump condition is known true or false. */
3854 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
3856 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
3858 /* Get the type of comparison being done and the operands being compared.
3859 If we had to reverse a non-equality condition, record that fact so we
3860 know that it isn't valid for floating-point. */
3861 code = GET_CODE (XEXP (SET_SRC (set), 0));
3862 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
3863 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
3865 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
3866 if (! cond_known_true)
3868 code = reversed_comparison_code_parts (code, op0, op1, insn);
3870 /* Don't remember if we can't find the inverse. */
3871 if (code == UNKNOWN)
3875 /* The mode is the mode of the non-constant. */
3877 if (mode1 != VOIDmode)
3880 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
3883 /* Yet another form of subreg creation. In this case, we want something in
3884 MODE, and we should assume OP has MODE iff it is naturally modeless. */
3887 record_jump_cond_subreg (enum machine_mode mode, rtx op)
3889 enum machine_mode op_mode = GET_MODE (op);
3890 if (op_mode == mode || op_mode == VOIDmode)
3892 return lowpart_subreg (mode, op, op_mode);
3895 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
3896 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
3897 Make any useful entries we can with that information. Called from
3898 above function and called recursively. */
3901 record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
3902 rtx op1, int reversed_nonequality)
3904 unsigned op0_hash, op1_hash;
3905 int op0_in_memory, op1_in_memory;
3906 struct table_elt *op0_elt, *op1_elt;
3908 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
3909 we know that they are also equal in the smaller mode (this is also
3910 true for all smaller modes whether or not there is a SUBREG, but
3911 is not worth testing for with no SUBREG). */
3913 /* Note that GET_MODE (op0) may not equal MODE. */
3914 if (code == EQ && paradoxical_subreg_p (op0))
3916 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
3917 rtx tem = record_jump_cond_subreg (inner_mode, op1);
3919 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
3920 reversed_nonequality);
3923 if (code == EQ && paradoxical_subreg_p (op1))
3925 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
3926 rtx tem = record_jump_cond_subreg (inner_mode, op0);
3928 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
3929 reversed_nonequality);
3932 /* Similarly, if this is an NE comparison, and either is a SUBREG
3933 making a smaller mode, we know the whole thing is also NE. */
3935 /* Note that GET_MODE (op0) may not equal MODE;
3936 if we test MODE instead, we can get an infinite recursion
3937 alternating between two modes each wider than MODE. */
3939 if (code == NE && GET_CODE (op0) == SUBREG
3940 && subreg_lowpart_p (op0)
3941 && (GET_MODE_SIZE (GET_MODE (op0))
3942 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
3944 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
3945 rtx tem = record_jump_cond_subreg (inner_mode, op1);
3947 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
3948 reversed_nonequality);
3951 if (code == NE && GET_CODE (op1) == SUBREG
3952 && subreg_lowpart_p (op1)
3953 && (GET_MODE_SIZE (GET_MODE (op1))
3954 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
3956 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
3957 rtx tem = record_jump_cond_subreg (inner_mode, op0);
3959 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
3960 reversed_nonequality);
3963 /* Hash both operands. */
3966 hash_arg_in_memory = 0;
3967 op0_hash = HASH (op0, mode);
3968 op0_in_memory = hash_arg_in_memory;
3974 hash_arg_in_memory = 0;
3975 op1_hash = HASH (op1, mode);
3976 op1_in_memory = hash_arg_in_memory;
3981 /* Look up both operands. */
3982 op0_elt = lookup (op0, op0_hash, mode);
3983 op1_elt = lookup (op1, op1_hash, mode);
3985 /* If both operands are already equivalent or if they are not in the
3986 table but are identical, do nothing. */
3987 if ((op0_elt != 0 && op1_elt != 0
3988 && op0_elt->first_same_value == op1_elt->first_same_value)
3989 || op0 == op1 || rtx_equal_p (op0, op1))
3992 /* If we aren't setting two things equal all we can do is save this
3993 comparison. Similarly if this is floating-point. In the latter
3994 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
3995 If we record the equality, we might inadvertently delete code
3996 whose intent was to change -0 to +0. */
3998 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4000 struct qty_table_elem *ent;
4003 /* If we reversed a floating-point comparison, if OP0 is not a
4004 register, or if OP1 is neither a register or constant, we can't
4008 op1 = equiv_constant (op1);
4010 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4011 || !REG_P (op0) || op1 == 0)
4014 /* Put OP0 in the hash table if it isn't already. This gives it a
4015 new quantity number. */
4018 if (insert_regs (op0, NULL, 0))
4020 rehash_using_reg (op0);
4021 op0_hash = HASH (op0, mode);
4023 /* If OP0 is contained in OP1, this changes its hash code
4024 as well. Faster to rehash than to check, except
4025 for the simple case of a constant. */
4026 if (! CONSTANT_P (op1))
4027 op1_hash = HASH (op1,mode);
4030 op0_elt = insert (op0, NULL, op0_hash, mode);
4031 op0_elt->in_memory = op0_in_memory;
4034 qty = REG_QTY (REGNO (op0));
4035 ent = &qty_table[qty];
4037 ent->comparison_code = code;
4040 /* Look it up again--in case op0 and op1 are the same. */
4041 op1_elt = lookup (op1, op1_hash, mode);
4043 /* Put OP1 in the hash table so it gets a new quantity number. */
4046 if (insert_regs (op1, NULL, 0))
4048 rehash_using_reg (op1);
4049 op1_hash = HASH (op1, mode);
4052 op1_elt = insert (op1, NULL, op1_hash, mode);
4053 op1_elt->in_memory = op1_in_memory;
4056 ent->comparison_const = NULL_RTX;
4057 ent->comparison_qty = REG_QTY (REGNO (op1));
4061 ent->comparison_const = op1;
4062 ent->comparison_qty = -1;
4068 /* If either side is still missing an equivalence, make it now,
4069 then merge the equivalences. */
4073 if (insert_regs (op0, NULL, 0))
4075 rehash_using_reg (op0);
4076 op0_hash = HASH (op0, mode);
4079 op0_elt = insert (op0, NULL, op0_hash, mode);
4080 op0_elt->in_memory = op0_in_memory;
4085 if (insert_regs (op1, NULL, 0))
4087 rehash_using_reg (op1);
4088 op1_hash = HASH (op1, mode);
4091 op1_elt = insert (op1, NULL, op1_hash, mode);
4092 op1_elt->in_memory = op1_in_memory;
4095 merge_equiv_classes (op0_elt, op1_elt);
4098 /* CSE processing for one instruction.
4099 First simplify sources and addresses of all assignments
4100 in the instruction, using previously-computed equivalents values.
4101 Then install the new sources and destinations in the table
4102 of available values. */
4104 /* Data on one SET contained in the instruction. */
4108 /* The SET rtx itself. */
4110 /* The SET_SRC of the rtx (the original value, if it is changing). */
4112 /* The hash-table element for the SET_SRC of the SET. */
4113 struct table_elt *src_elt;
4114 /* Hash value for the SET_SRC. */
4116 /* Hash value for the SET_DEST. */
4118 /* The SET_DEST, with SUBREG, etc., stripped. */
4120 /* Nonzero if the SET_SRC is in memory. */
4122 /* Nonzero if the SET_SRC contains something
4123 whose value cannot be predicted and understood. */
4125 /* Original machine mode, in case it becomes a CONST_INT.
4126 The size of this field should match the size of the mode
4127 field of struct rtx_def (see rtl.h). */
4128 ENUM_BITFIELD(machine_mode) mode : 8;
4129 /* A constant equivalent for SET_SRC, if any. */
4131 /* Hash value of constant equivalent for SET_SRC. */
4132 unsigned src_const_hash;
4133 /* Table entry for constant equivalent for SET_SRC, if any. */
4134 struct table_elt *src_const_elt;
4135 /* Table entry for the destination address. */
4136 struct table_elt *dest_addr_elt;
4142 rtx x = PATTERN (insn);
4148 struct table_elt *src_eqv_elt = 0;
4149 int src_eqv_volatile = 0;
4150 int src_eqv_in_memory = 0;
4151 unsigned src_eqv_hash = 0;
4153 struct set *sets = (struct set *) 0;
4157 /* Records what this insn does to set CC0. */
4159 this_insn_cc0_mode = VOIDmode;
4162 /* Find all the SETs and CLOBBERs in this instruction.
4163 Record all the SETs in the array `set' and count them.
4164 Also determine whether there is a CLOBBER that invalidates
4165 all memory references, or all references at varying addresses. */
4169 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4171 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4172 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4173 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4177 if (GET_CODE (x) == SET)
4179 sets = XALLOCA (struct set);
4182 /* Ignore SETs that are unconditional jumps.
4183 They never need cse processing, so this does not hurt.
4184 The reason is not efficiency but rather
4185 so that we can test at the end for instructions
4186 that have been simplified to unconditional jumps
4187 and not be misled by unchanged instructions
4188 that were unconditional jumps to begin with. */
4189 if (SET_DEST (x) == pc_rtx
4190 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4193 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4194 The hard function value register is used only once, to copy to
4195 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4196 Ensure we invalidate the destination register. On the 80386 no
4197 other code would invalidate it since it is a fixed_reg.
4198 We need not check the return of apply_change_group; see canon_reg. */
4200 else if (GET_CODE (SET_SRC (x)) == CALL)
4202 canon_reg (SET_SRC (x), insn);
4203 apply_change_group ();
4204 fold_rtx (SET_SRC (x), insn);
4205 invalidate (SET_DEST (x), VOIDmode);
4210 else if (GET_CODE (x) == PARALLEL)
4212 int lim = XVECLEN (x, 0);
4214 sets = XALLOCAVEC (struct set, lim);
4216 /* Find all regs explicitly clobbered in this insn,
4217 and ensure they are not replaced with any other regs
4218 elsewhere in this insn.
4219 When a reg that is clobbered is also used for input,
4220 we should presume that that is for a reason,
4221 and we should not substitute some other register
4222 which is not supposed to be clobbered.
4223 Therefore, this loop cannot be merged into the one below
4224 because a CALL may precede a CLOBBER and refer to the
4225 value clobbered. We must not let a canonicalization do
4226 anything in that case. */
4227 for (i = 0; i < lim; i++)
4229 rtx y = XVECEXP (x, 0, i);
4230 if (GET_CODE (y) == CLOBBER)
4232 rtx clobbered = XEXP (y, 0);
4234 if (REG_P (clobbered)
4235 || GET_CODE (clobbered) == SUBREG)
4236 invalidate (clobbered, VOIDmode);
4237 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4238 || GET_CODE (clobbered) == ZERO_EXTRACT)
4239 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4243 for (i = 0; i < lim; i++)
4245 rtx y = XVECEXP (x, 0, i);
4246 if (GET_CODE (y) == SET)
4248 /* As above, we ignore unconditional jumps and call-insns and
4249 ignore the result of apply_change_group. */
4250 if (GET_CODE (SET_SRC (y)) == CALL)
4252 canon_reg (SET_SRC (y), insn);
4253 apply_change_group ();
4254 fold_rtx (SET_SRC (y), insn);
4255 invalidate (SET_DEST (y), VOIDmode);
4257 else if (SET_DEST (y) == pc_rtx
4258 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4261 sets[n_sets++].rtl = y;
4263 else if (GET_CODE (y) == CLOBBER)
4265 /* If we clobber memory, canon the address.
4266 This does nothing when a register is clobbered
4267 because we have already invalidated the reg. */
4268 if (MEM_P (XEXP (y, 0)))
4269 canon_reg (XEXP (y, 0), insn);
4271 else if (GET_CODE (y) == USE
4272 && ! (REG_P (XEXP (y, 0))
4273 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4274 canon_reg (y, insn);
4275 else if (GET_CODE (y) == CALL)
4277 /* The result of apply_change_group can be ignored; see
4279 canon_reg (y, insn);
4280 apply_change_group ();
4285 else if (GET_CODE (x) == CLOBBER)
4287 if (MEM_P (XEXP (x, 0)))
4288 canon_reg (XEXP (x, 0), insn);
4290 /* Canonicalize a USE of a pseudo register or memory location. */
4291 else if (GET_CODE (x) == USE
4292 && ! (REG_P (XEXP (x, 0))
4293 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4294 canon_reg (x, insn);
4295 else if (GET_CODE (x) == ASM_OPERANDS)
4297 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
4299 rtx input = ASM_OPERANDS_INPUT (x, i);
4300 if (!(REG_P (input) && REGNO (input) < FIRST_PSEUDO_REGISTER))
4302 input = canon_reg (input, insn);
4303 validate_change (insn, &ASM_OPERANDS_INPUT (x, i), input, 1);
4307 else if (GET_CODE (x) == CALL)
4309 /* The result of apply_change_group can be ignored; see canon_reg. */
4310 canon_reg (x, insn);
4311 apply_change_group ();
4314 else if (DEBUG_INSN_P (insn))
4315 canon_reg (PATTERN (insn), insn);
4317 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4318 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4319 is handled specially for this case, and if it isn't set, then there will
4320 be no equivalence for the destination. */
4321 if (n_sets == 1 && REG_NOTES (insn) != 0
4322 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4323 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4324 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4326 /* The result of apply_change_group can be ignored; see canon_reg. */
4327 canon_reg (XEXP (tem, 0), insn);
4328 apply_change_group ();
4329 src_eqv = fold_rtx (XEXP (tem, 0), insn);
4330 XEXP (tem, 0) = copy_rtx (src_eqv);
4331 df_notes_rescan (insn);
4334 /* Canonicalize sources and addresses of destinations.
4335 We do this in a separate pass to avoid problems when a MATCH_DUP is
4336 present in the insn pattern. In that case, we want to ensure that
4337 we don't break the duplicate nature of the pattern. So we will replace
4338 both operands at the same time. Otherwise, we would fail to find an
4339 equivalent substitution in the loop calling validate_change below.
4341 We used to suppress canonicalization of DEST if it appears in SRC,
4342 but we don't do this any more. */
4344 for (i = 0; i < n_sets; i++)
4346 rtx dest = SET_DEST (sets[i].rtl);
4347 rtx src = SET_SRC (sets[i].rtl);
4348 rtx new_rtx = canon_reg (src, insn);
4350 validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
4352 if (GET_CODE (dest) == ZERO_EXTRACT)
4354 validate_change (insn, &XEXP (dest, 1),
4355 canon_reg (XEXP (dest, 1), insn), 1);
4356 validate_change (insn, &XEXP (dest, 2),
4357 canon_reg (XEXP (dest, 2), insn), 1);
4360 while (GET_CODE (dest) == SUBREG
4361 || GET_CODE (dest) == ZERO_EXTRACT
4362 || GET_CODE (dest) == STRICT_LOW_PART)
4363 dest = XEXP (dest, 0);
4366 canon_reg (dest, insn);
4369 /* Now that we have done all the replacements, we can apply the change
4370 group and see if they all work. Note that this will cause some
4371 canonicalizations that would have worked individually not to be applied
4372 because some other canonicalization didn't work, but this should not
4375 The result of apply_change_group can be ignored; see canon_reg. */
4377 apply_change_group ();
4379 /* Set sets[i].src_elt to the class each source belongs to.
4380 Detect assignments from or to volatile things
4381 and set set[i] to zero so they will be ignored
4382 in the rest of this function.
4384 Nothing in this loop changes the hash table or the register chains. */
4386 for (i = 0; i < n_sets; i++)
4388 bool repeat = false;
4391 struct table_elt *elt = 0, *p;
4392 enum machine_mode mode;
4395 rtx src_related = 0;
4396 bool src_related_is_const_anchor = false;
4397 struct table_elt *src_const_elt = 0;
4398 int src_cost = MAX_COST;
4399 int src_eqv_cost = MAX_COST;
4400 int src_folded_cost = MAX_COST;
4401 int src_related_cost = MAX_COST;
4402 int src_elt_cost = MAX_COST;
4403 int src_regcost = MAX_COST;
4404 int src_eqv_regcost = MAX_COST;
4405 int src_folded_regcost = MAX_COST;
4406 int src_related_regcost = MAX_COST;
4407 int src_elt_regcost = MAX_COST;
4408 /* Set nonzero if we need to call force_const_mem on with the
4409 contents of src_folded before using it. */
4410 int src_folded_force_flag = 0;
4412 dest = SET_DEST (sets[i].rtl);
4413 src = SET_SRC (sets[i].rtl);
4415 /* If SRC is a constant that has no machine mode,
4416 hash it with the destination's machine mode.
4417 This way we can keep different modes separate. */
4419 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4420 sets[i].mode = mode;
4424 enum machine_mode eqvmode = mode;
4425 if (GET_CODE (dest) == STRICT_LOW_PART)
4426 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4428 hash_arg_in_memory = 0;
4429 src_eqv_hash = HASH (src_eqv, eqvmode);
4431 /* Find the equivalence class for the equivalent expression. */
4434 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4436 src_eqv_volatile = do_not_record;
4437 src_eqv_in_memory = hash_arg_in_memory;
4440 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4441 value of the INNER register, not the destination. So it is not
4442 a valid substitution for the source. But save it for later. */
4443 if (GET_CODE (dest) == STRICT_LOW_PART)
4446 src_eqv_here = src_eqv;
4448 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4449 simplified result, which may not necessarily be valid. */
4450 src_folded = fold_rtx (src, insn);
4453 /* ??? This caused bad code to be generated for the m68k port with -O2.
4454 Suppose src is (CONST_INT -1), and that after truncation src_folded
4455 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4456 At the end we will add src and src_const to the same equivalence
4457 class. We now have 3 and -1 on the same equivalence class. This
4458 causes later instructions to be mis-optimized. */
4459 /* If storing a constant in a bitfield, pre-truncate the constant
4460 so we will be able to record it later. */
4461 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
4463 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4465 if (CONST_INT_P (src)
4466 && CONST_INT_P (width)
4467 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4468 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4470 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
4471 << INTVAL (width)) - 1));
4475 /* Compute SRC's hash code, and also notice if it
4476 should not be recorded at all. In that case,
4477 prevent any further processing of this assignment. */
4479 hash_arg_in_memory = 0;
4482 sets[i].src_hash = HASH (src, mode);
4483 sets[i].src_volatile = do_not_record;
4484 sets[i].src_in_memory = hash_arg_in_memory;
4486 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4487 a pseudo, do not record SRC. Using SRC as a replacement for
4488 anything else will be incorrect in that situation. Note that
4489 this usually occurs only for stack slots, in which case all the
4490 RTL would be referring to SRC, so we don't lose any optimization
4491 opportunities by not having SRC in the hash table. */
4494 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
4496 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
4497 sets[i].src_volatile = 1;
4500 /* It is no longer clear why we used to do this, but it doesn't
4501 appear to still be needed. So let's try without it since this
4502 code hurts cse'ing widened ops. */
4503 /* If source is a paradoxical subreg (such as QI treated as an SI),
4504 treat it as volatile. It may do the work of an SI in one context
4505 where the extra bits are not being used, but cannot replace an SI
4507 if (paradoxical_subreg_p (src))
4508 sets[i].src_volatile = 1;
4511 /* Locate all possible equivalent forms for SRC. Try to replace
4512 SRC in the insn with each cheaper equivalent.
4514 We have the following types of equivalents: SRC itself, a folded
4515 version, a value given in a REG_EQUAL note, or a value related
4518 Each of these equivalents may be part of an additional class
4519 of equivalents (if more than one is in the table, they must be in
4520 the same class; we check for this).
4522 If the source is volatile, we don't do any table lookups.
4524 We note any constant equivalent for possible later use in a
4527 if (!sets[i].src_volatile)
4528 elt = lookup (src, sets[i].src_hash, mode);
4530 sets[i].src_elt = elt;
4532 if (elt && src_eqv_here && src_eqv_elt)
4534 if (elt->first_same_value != src_eqv_elt->first_same_value)
4536 /* The REG_EQUAL is indicating that two formerly distinct
4537 classes are now equivalent. So merge them. */
4538 merge_equiv_classes (elt, src_eqv_elt);
4539 src_eqv_hash = HASH (src_eqv, elt->mode);
4540 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
4546 else if (src_eqv_elt)
4549 /* Try to find a constant somewhere and record it in `src_const'.
4550 Record its table element, if any, in `src_const_elt'. Look in
4551 any known equivalences first. (If the constant is not in the
4552 table, also set `sets[i].src_const_hash'). */
4554 for (p = elt->first_same_value; p; p = p->next_same_value)
4558 src_const_elt = elt;
4563 && (CONSTANT_P (src_folded)
4564 /* Consider (minus (label_ref L1) (label_ref L2)) as
4565 "constant" here so we will record it. This allows us
4566 to fold switch statements when an ADDR_DIFF_VEC is used. */
4567 || (GET_CODE (src_folded) == MINUS
4568 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
4569 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
4570 src_const = src_folded, src_const_elt = elt;
4571 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
4572 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
4574 /* If we don't know if the constant is in the table, get its
4575 hash code and look it up. */
4576 if (src_const && src_const_elt == 0)
4578 sets[i].src_const_hash = HASH (src_const, mode);
4579 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
4582 sets[i].src_const = src_const;
4583 sets[i].src_const_elt = src_const_elt;
4585 /* If the constant and our source are both in the table, mark them as
4586 equivalent. Otherwise, if a constant is in the table but the source
4587 isn't, set ELT to it. */
4588 if (src_const_elt && elt
4589 && src_const_elt->first_same_value != elt->first_same_value)
4590 merge_equiv_classes (elt, src_const_elt);
4591 else if (src_const_elt && elt == 0)
4592 elt = src_const_elt;
4594 /* See if there is a register linearly related to a constant
4595 equivalent of SRC. */
4597 && (GET_CODE (src_const) == CONST
4598 || (src_const_elt && src_const_elt->related_value != 0)))
4600 src_related = use_related_value (src_const, src_const_elt);
4603 struct table_elt *src_related_elt
4604 = lookup (src_related, HASH (src_related, mode), mode);
4605 if (src_related_elt && elt)
4607 if (elt->first_same_value
4608 != src_related_elt->first_same_value)
4609 /* This can occur when we previously saw a CONST
4610 involving a SYMBOL_REF and then see the SYMBOL_REF
4611 twice. Merge the involved classes. */
4612 merge_equiv_classes (elt, src_related_elt);
4615 src_related_elt = 0;
4617 else if (src_related_elt && elt == 0)
4618 elt = src_related_elt;
4622 /* See if we have a CONST_INT that is already in a register in a
4625 if (src_const && src_related == 0 && CONST_INT_P (src_const)
4626 && GET_MODE_CLASS (mode) == MODE_INT
4627 && GET_MODE_PRECISION (mode) < BITS_PER_WORD)
4629 enum machine_mode wider_mode;
4631 for (wider_mode = GET_MODE_WIDER_MODE (mode);
4632 wider_mode != VOIDmode
4633 && GET_MODE_PRECISION (wider_mode) <= BITS_PER_WORD
4634 && src_related == 0;
4635 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4637 struct table_elt *const_elt
4638 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
4643 for (const_elt = const_elt->first_same_value;
4644 const_elt; const_elt = const_elt->next_same_value)
4645 if (REG_P (const_elt->exp))
4647 src_related = gen_lowpart (mode, const_elt->exp);
4653 /* Another possibility is that we have an AND with a constant in
4654 a mode narrower than a word. If so, it might have been generated
4655 as part of an "if" which would narrow the AND. If we already
4656 have done the AND in a wider mode, we can use a SUBREG of that
4659 if (flag_expensive_optimizations && ! src_related
4660 && GET_CODE (src) == AND && CONST_INT_P (XEXP (src, 1))
4661 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4663 enum machine_mode tmode;
4664 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
4666 for (tmode = GET_MODE_WIDER_MODE (mode);
4667 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4668 tmode = GET_MODE_WIDER_MODE (tmode))
4670 rtx inner = gen_lowpart (tmode, XEXP (src, 0));
4671 struct table_elt *larger_elt;
4675 PUT_MODE (new_and, tmode);
4676 XEXP (new_and, 0) = inner;
4677 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
4678 if (larger_elt == 0)
4681 for (larger_elt = larger_elt->first_same_value;
4682 larger_elt; larger_elt = larger_elt->next_same_value)
4683 if (REG_P (larger_elt->exp))
4686 = gen_lowpart (mode, larger_elt->exp);
4696 #ifdef LOAD_EXTEND_OP
4697 /* See if a MEM has already been loaded with a widening operation;
4698 if it has, we can use a subreg of that. Many CISC machines
4699 also have such operations, but this is only likely to be
4700 beneficial on these machines. */
4702 if (flag_expensive_optimizations && src_related == 0
4703 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4704 && GET_MODE_CLASS (mode) == MODE_INT
4705 && MEM_P (src) && ! do_not_record
4706 && LOAD_EXTEND_OP (mode) != UNKNOWN)
4708 struct rtx_def memory_extend_buf;
4709 rtx memory_extend_rtx = &memory_extend_buf;
4710 enum machine_mode tmode;
4712 /* Set what we are trying to extend and the operation it might
4713 have been extended with. */
4714 memset (memory_extend_rtx, 0, sizeof(*memory_extend_rtx));
4715 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
4716 XEXP (memory_extend_rtx, 0) = src;
4718 for (tmode = GET_MODE_WIDER_MODE (mode);
4719 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4720 tmode = GET_MODE_WIDER_MODE (tmode))
4722 struct table_elt *larger_elt;
4724 PUT_MODE (memory_extend_rtx, tmode);
4725 larger_elt = lookup (memory_extend_rtx,
4726 HASH (memory_extend_rtx, tmode), tmode);
4727 if (larger_elt == 0)
4730 for (larger_elt = larger_elt->first_same_value;
4731 larger_elt; larger_elt = larger_elt->next_same_value)
4732 if (REG_P (larger_elt->exp))
4734 src_related = gen_lowpart (mode, larger_elt->exp);
4742 #endif /* LOAD_EXTEND_OP */
4744 /* Try to express the constant using a register+offset expression
4745 derived from a constant anchor. */
4747 if (targetm.const_anchor
4750 && GET_CODE (src_const) == CONST_INT)
4752 src_related = try_const_anchors (src_const, mode);
4753 src_related_is_const_anchor = src_related != NULL_RTX;
4757 if (src == src_folded)
4760 /* At this point, ELT, if nonzero, points to a class of expressions
4761 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
4762 and SRC_RELATED, if nonzero, each contain additional equivalent
4763 expressions. Prune these latter expressions by deleting expressions
4764 already in the equivalence class.
4766 Check for an equivalent identical to the destination. If found,
4767 this is the preferred equivalent since it will likely lead to
4768 elimination of the insn. Indicate this by placing it in
4772 elt = elt->first_same_value;
4773 for (p = elt; p; p = p->next_same_value)
4775 enum rtx_code code = GET_CODE (p->exp);
4777 /* If the expression is not valid, ignore it. Then we do not
4778 have to check for validity below. In most cases, we can use
4779 `rtx_equal_p', since canonicalization has already been done. */
4780 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
4783 /* Also skip paradoxical subregs, unless that's what we're
4785 if (paradoxical_subreg_p (p->exp)
4787 && GET_CODE (src) == SUBREG
4788 && GET_MODE (src) == GET_MODE (p->exp)
4789 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4790 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
4793 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
4795 else if (src_folded && GET_CODE (src_folded) == code
4796 && rtx_equal_p (src_folded, p->exp))
4798 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
4799 && rtx_equal_p (src_eqv_here, p->exp))
4801 else if (src_related && GET_CODE (src_related) == code
4802 && rtx_equal_p (src_related, p->exp))
4805 /* This is the same as the destination of the insns, we want
4806 to prefer it. Copy it to src_related. The code below will
4807 then give it a negative cost. */
4808 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
4812 /* Find the cheapest valid equivalent, trying all the available
4813 possibilities. Prefer items not in the hash table to ones
4814 that are when they are equal cost. Note that we can never
4815 worsen an insn as the current contents will also succeed.
4816 If we find an equivalent identical to the destination, use it as best,
4817 since this insn will probably be eliminated in that case. */
4820 if (rtx_equal_p (src, dest))
4821 src_cost = src_regcost = -1;
4824 src_cost = COST (src);
4825 src_regcost = approx_reg_cost (src);
4831 if (rtx_equal_p (src_eqv_here, dest))
4832 src_eqv_cost = src_eqv_regcost = -1;
4835 src_eqv_cost = COST (src_eqv_here);
4836 src_eqv_regcost = approx_reg_cost (src_eqv_here);
4842 if (rtx_equal_p (src_folded, dest))
4843 src_folded_cost = src_folded_regcost = -1;
4846 src_folded_cost = COST (src_folded);
4847 src_folded_regcost = approx_reg_cost (src_folded);
4853 if (rtx_equal_p (src_related, dest))
4854 src_related_cost = src_related_regcost = -1;
4857 src_related_cost = COST (src_related);
4858 src_related_regcost = approx_reg_cost (src_related);
4860 /* If a const-anchor is used to synthesize a constant that
4861 normally requires multiple instructions then slightly prefer
4862 it over the original sequence. These instructions are likely
4863 to become redundant now. We can't compare against the cost
4864 of src_eqv_here because, on MIPS for example, multi-insn
4865 constants have zero cost; they are assumed to be hoisted from
4867 if (src_related_is_const_anchor
4868 && src_related_cost == src_cost
4874 /* If this was an indirect jump insn, a known label will really be
4875 cheaper even though it looks more expensive. */
4876 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
4877 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
4879 /* Terminate loop when replacement made. This must terminate since
4880 the current contents will be tested and will always be valid. */
4885 /* Skip invalid entries. */
4886 while (elt && !REG_P (elt->exp)
4887 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
4888 elt = elt->next_same_value;
4890 /* A paradoxical subreg would be bad here: it'll be the right
4891 size, but later may be adjusted so that the upper bits aren't
4892 what we want. So reject it. */
4894 && paradoxical_subreg_p (elt->exp)
4895 /* It is okay, though, if the rtx we're trying to match
4896 will ignore any of the bits we can't predict. */
4898 && GET_CODE (src) == SUBREG
4899 && GET_MODE (src) == GET_MODE (elt->exp)
4900 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4901 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
4903 elt = elt->next_same_value;
4909 src_elt_cost = elt->cost;
4910 src_elt_regcost = elt->regcost;
4913 /* Find cheapest and skip it for the next time. For items
4914 of equal cost, use this order:
4915 src_folded, src, src_eqv, src_related and hash table entry. */
4917 && preferable (src_folded_cost, src_folded_regcost,
4918 src_cost, src_regcost) <= 0
4919 && preferable (src_folded_cost, src_folded_regcost,
4920 src_eqv_cost, src_eqv_regcost) <= 0
4921 && preferable (src_folded_cost, src_folded_regcost,
4922 src_related_cost, src_related_regcost) <= 0
4923 && preferable (src_folded_cost, src_folded_regcost,
4924 src_elt_cost, src_elt_regcost) <= 0)
4926 trial = src_folded, src_folded_cost = MAX_COST;
4927 if (src_folded_force_flag)
4929 rtx forced = force_const_mem (mode, trial);
4935 && preferable (src_cost, src_regcost,
4936 src_eqv_cost, src_eqv_regcost) <= 0
4937 && preferable (src_cost, src_regcost,
4938 src_related_cost, src_related_regcost) <= 0
4939 && preferable (src_cost, src_regcost,
4940 src_elt_cost, src_elt_regcost) <= 0)
4941 trial = src, src_cost = MAX_COST;
4942 else if (src_eqv_here
4943 && preferable (src_eqv_cost, src_eqv_regcost,
4944 src_related_cost, src_related_regcost) <= 0
4945 && preferable (src_eqv_cost, src_eqv_regcost,
4946 src_elt_cost, src_elt_regcost) <= 0)
4947 trial = src_eqv_here, src_eqv_cost = MAX_COST;
4948 else if (src_related
4949 && preferable (src_related_cost, src_related_regcost,
4950 src_elt_cost, src_elt_regcost) <= 0)
4951 trial = src_related, src_related_cost = MAX_COST;
4955 elt = elt->next_same_value;
4956 src_elt_cost = MAX_COST;
4959 /* Avoid creation of overlapping memory moves. */
4960 if (MEM_P (trial) && MEM_P (SET_DEST (sets[i].rtl)))
4964 /* BLKmode moves are not handled by cse anyway. */
4965 if (GET_MODE (trial) == BLKmode)
4968 src = canon_rtx (trial);
4969 dest = canon_rtx (SET_DEST (sets[i].rtl));
4971 if (!MEM_P (src) || !MEM_P (dest)
4972 || !nonoverlapping_memrefs_p (src, dest, false))
4977 (set (reg:M N) (const_int A))
4978 (set (reg:M2 O) (const_int B))
4979 (set (zero_extract:M2 (reg:M N) (const_int C) (const_int D))
4981 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
4982 && CONST_INT_P (trial)
4983 && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 1))
4984 && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 2))
4985 && REG_P (XEXP (SET_DEST (sets[i].rtl), 0))
4986 && (GET_MODE_PRECISION (GET_MODE (SET_DEST (sets[i].rtl)))
4987 >= INTVAL (XEXP (SET_DEST (sets[i].rtl), 1)))
4988 && ((unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 1))
4989 + (unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 2))
4990 <= HOST_BITS_PER_WIDE_INT))
4992 rtx dest_reg = XEXP (SET_DEST (sets[i].rtl), 0);
4993 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4994 rtx pos = XEXP (SET_DEST (sets[i].rtl), 2);
4995 unsigned int dest_hash = HASH (dest_reg, GET_MODE (dest_reg));
4996 struct table_elt *dest_elt
4997 = lookup (dest_reg, dest_hash, GET_MODE (dest_reg));
4998 rtx dest_cst = NULL;
5001 for (p = dest_elt->first_same_value; p; p = p->next_same_value)
5002 if (p->is_const && CONST_INT_P (p->exp))
5009 HOST_WIDE_INT val = INTVAL (dest_cst);
5012 if (BITS_BIG_ENDIAN)
5013 shift = GET_MODE_PRECISION (GET_MODE (dest_reg))
5014 - INTVAL (pos) - INTVAL (width);
5016 shift = INTVAL (pos);
5017 if (INTVAL (width) == HOST_BITS_PER_WIDE_INT)
5018 mask = ~(HOST_WIDE_INT) 0;
5020 mask = ((HOST_WIDE_INT) 1 << INTVAL (width)) - 1;
5021 val &= ~(mask << shift);
5022 val |= (INTVAL (trial) & mask) << shift;
5023 val = trunc_int_for_mode (val, GET_MODE (dest_reg));
5024 validate_unshare_change (insn, &SET_DEST (sets[i].rtl),
5026 validate_unshare_change (insn, &SET_SRC (sets[i].rtl),
5028 if (apply_change_group ())
5030 rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5033 remove_note (insn, note);
5034 df_notes_rescan (insn);
5038 src_eqv_volatile = 0;
5039 src_eqv_in_memory = 0;
5047 /* We don't normally have an insn matching (set (pc) (pc)), so
5048 check for this separately here. We will delete such an
5051 For other cases such as a table jump or conditional jump
5052 where we know the ultimate target, go ahead and replace the
5053 operand. While that may not make a valid insn, we will
5054 reemit the jump below (and also insert any necessary
5056 if (n_sets == 1 && dest == pc_rtx
5058 || (GET_CODE (trial) == LABEL_REF
5059 && ! condjump_p (insn))))
5061 /* Don't substitute non-local labels, this confuses CFG. */
5062 if (GET_CODE (trial) == LABEL_REF
5063 && LABEL_REF_NONLOCAL_P (trial))
5066 SET_SRC (sets[i].rtl) = trial;
5067 cse_jumps_altered = true;
5071 /* Reject certain invalid forms of CONST that we create. */
5072 else if (CONSTANT_P (trial)
5073 && GET_CODE (trial) == CONST
5074 /* Reject cases that will cause decode_rtx_const to
5075 die. On the alpha when simplifying a switch, we
5076 get (const (truncate (minus (label_ref)
5078 && (GET_CODE (XEXP (trial, 0)) == TRUNCATE
5079 /* Likewise on IA-64, except without the
5081 || (GET_CODE (XEXP (trial, 0)) == MINUS
5082 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5083 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)))
5084 /* Do nothing for this case. */
5087 /* Look for a substitution that makes a valid insn. */
5088 else if (validate_unshare_change
5089 (insn, &SET_SRC (sets[i].rtl), trial, 0))
5091 rtx new_rtx = canon_reg (SET_SRC (sets[i].rtl), insn);
5093 /* The result of apply_change_group can be ignored; see
5096 validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
5097 apply_change_group ();
5102 /* If we previously found constant pool entries for
5103 constants and this is a constant, try making a
5104 pool entry. Put it in src_folded unless we already have done
5105 this since that is where it likely came from. */
5107 else if (constant_pool_entries_cost
5108 && CONSTANT_P (trial)
5110 || (!MEM_P (src_folded)
5111 && ! src_folded_force_flag))
5112 && GET_MODE_CLASS (mode) != MODE_CC
5113 && mode != VOIDmode)
5115 src_folded_force_flag = 1;
5117 src_folded_cost = constant_pool_entries_cost;
5118 src_folded_regcost = constant_pool_entries_regcost;
5122 /* If we changed the insn too much, handle this set from scratch. */
5129 src = SET_SRC (sets[i].rtl);
5131 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5132 However, there is an important exception: If both are registers
5133 that are not the head of their equivalence class, replace SET_SRC
5134 with the head of the class. If we do not do this, we will have
5135 both registers live over a portion of the basic block. This way,
5136 their lifetimes will likely abut instead of overlapping. */
5138 && REGNO_QTY_VALID_P (REGNO (dest)))
5140 int dest_q = REG_QTY (REGNO (dest));
5141 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5143 if (dest_ent->mode == GET_MODE (dest)
5144 && dest_ent->first_reg != REGNO (dest)
5145 && REG_P (src) && REGNO (src) == REGNO (dest)
5146 /* Don't do this if the original insn had a hard reg as
5147 SET_SRC or SET_DEST. */
5148 && (!REG_P (sets[i].src)
5149 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5150 && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5151 /* We can't call canon_reg here because it won't do anything if
5152 SRC is a hard register. */
5154 int src_q = REG_QTY (REGNO (src));
5155 struct qty_table_elem *src_ent = &qty_table[src_q];
5156 int first = src_ent->first_reg;
5158 = (first >= FIRST_PSEUDO_REGISTER
5159 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5161 /* We must use validate-change even for this, because this
5162 might be a special no-op instruction, suitable only to
5164 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5167 /* If we had a constant that is cheaper than what we are now
5168 setting SRC to, use that constant. We ignored it when we
5169 thought we could make this into a no-op. */
5170 if (src_const && COST (src_const) < COST (src)
5171 && validate_change (insn, &SET_SRC (sets[i].rtl),
5178 /* If we made a change, recompute SRC values. */
5179 if (src != sets[i].src)
5182 hash_arg_in_memory = 0;
5184 sets[i].src_hash = HASH (src, mode);
5185 sets[i].src_volatile = do_not_record;
5186 sets[i].src_in_memory = hash_arg_in_memory;
5187 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5190 /* If this is a single SET, we are setting a register, and we have an
5191 equivalent constant, we want to add a REG_NOTE. We don't want
5192 to write a REG_EQUAL note for a constant pseudo since verifying that
5193 that pseudo hasn't been eliminated is a pain. Such a note also
5194 won't help anything.
5196 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5197 which can be created for a reference to a compile time computable
5198 entry in a jump table. */
5200 if (n_sets == 1 && src_const && REG_P (dest)
5201 && !REG_P (src_const)
5202 && ! (GET_CODE (src_const) == CONST
5203 && GET_CODE (XEXP (src_const, 0)) == MINUS
5204 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5205 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5207 /* We only want a REG_EQUAL note if src_const != src. */
5208 if (! rtx_equal_p (src, src_const))
5210 /* Make sure that the rtx is not shared. */
5211 src_const = copy_rtx (src_const);
5213 /* Record the actual constant value in a REG_EQUAL note,
5214 making a new one if one does not already exist. */
5215 set_unique_reg_note (insn, REG_EQUAL, src_const);
5216 df_notes_rescan (insn);
5220 /* Now deal with the destination. */
5223 /* Look within any ZERO_EXTRACT to the MEM or REG within it. */
5224 while (GET_CODE (dest) == SUBREG
5225 || GET_CODE (dest) == ZERO_EXTRACT
5226 || GET_CODE (dest) == STRICT_LOW_PART)
5227 dest = XEXP (dest, 0);
5229 sets[i].inner_dest = dest;
5233 #ifdef PUSH_ROUNDING
5234 /* Stack pushes invalidate the stack pointer. */
5235 rtx addr = XEXP (dest, 0);
5236 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5237 && XEXP (addr, 0) == stack_pointer_rtx)
5238 invalidate (stack_pointer_rtx, VOIDmode);
5240 dest = fold_rtx (dest, insn);
5243 /* Compute the hash code of the destination now,
5244 before the effects of this instruction are recorded,
5245 since the register values used in the address computation
5246 are those before this instruction. */
5247 sets[i].dest_hash = HASH (dest, mode);
5249 /* Don't enter a bit-field in the hash table
5250 because the value in it after the store
5251 may not equal what was stored, due to truncation. */
5253 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5255 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5257 if (src_const != 0 && CONST_INT_P (src_const)
5258 && CONST_INT_P (width)
5259 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5260 && ! (INTVAL (src_const)
5261 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5262 /* Exception: if the value is constant,
5263 and it won't be truncated, record it. */
5267 /* This is chosen so that the destination will be invalidated
5268 but no new value will be recorded.
5269 We must invalidate because sometimes constant
5270 values can be recorded for bitfields. */
5271 sets[i].src_elt = 0;
5272 sets[i].src_volatile = 1;
5278 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5280 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5282 /* One less use of the label this insn used to jump to. */
5283 delete_insn_and_edges (insn);
5284 cse_jumps_altered = true;
5285 /* No more processing for this set. */
5289 /* If this SET is now setting PC to a label, we know it used to
5290 be a conditional or computed branch. */
5291 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5292 && !LABEL_REF_NONLOCAL_P (src))
5294 /* We reemit the jump in as many cases as possible just in
5295 case the form of an unconditional jump is significantly
5296 different than a computed jump or conditional jump.
5298 If this insn has multiple sets, then reemitting the
5299 jump is nontrivial. So instead we just force rerecognition
5300 and hope for the best. */
5305 new_rtx = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
5306 JUMP_LABEL (new_rtx) = XEXP (src, 0);
5307 LABEL_NUSES (XEXP (src, 0))++;
5309 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5310 note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5313 XEXP (note, 1) = NULL_RTX;
5314 REG_NOTES (new_rtx) = note;
5317 delete_insn_and_edges (insn);
5321 INSN_CODE (insn) = -1;
5323 /* Do not bother deleting any unreachable code, let jump do it. */
5324 cse_jumps_altered = true;
5328 /* If destination is volatile, invalidate it and then do no further
5329 processing for this assignment. */
5331 else if (do_not_record)
5333 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5334 invalidate (dest, VOIDmode);
5335 else if (MEM_P (dest))
5336 invalidate (dest, VOIDmode);
5337 else if (GET_CODE (dest) == STRICT_LOW_PART
5338 || GET_CODE (dest) == ZERO_EXTRACT)
5339 invalidate (XEXP (dest, 0), GET_MODE (dest));
5343 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5344 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5347 /* If setting CC0, record what it was set to, or a constant, if it
5348 is equivalent to a constant. If it is being set to a floating-point
5349 value, make a COMPARE with the appropriate constant of 0. If we
5350 don't do this, later code can interpret this as a test against
5351 const0_rtx, which can cause problems if we try to put it into an
5352 insn as a floating-point operand. */
5353 if (dest == cc0_rtx)
5355 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5356 this_insn_cc0_mode = mode;
5357 if (FLOAT_MODE_P (mode))
5358 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5364 /* Now enter all non-volatile source expressions in the hash table
5365 if they are not already present.
5366 Record their equivalence classes in src_elt.
5367 This way we can insert the corresponding destinations into
5368 the same classes even if the actual sources are no longer in them
5369 (having been invalidated). */
5371 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5372 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5374 struct table_elt *elt;
5375 struct table_elt *classp = sets[0].src_elt;
5376 rtx dest = SET_DEST (sets[0].rtl);
5377 enum machine_mode eqvmode = GET_MODE (dest);
5379 if (GET_CODE (dest) == STRICT_LOW_PART)
5381 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5384 if (insert_regs (src_eqv, classp, 0))
5386 rehash_using_reg (src_eqv);
5387 src_eqv_hash = HASH (src_eqv, eqvmode);
5389 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5390 elt->in_memory = src_eqv_in_memory;
5393 /* Check to see if src_eqv_elt is the same as a set source which
5394 does not yet have an elt, and if so set the elt of the set source
5396 for (i = 0; i < n_sets; i++)
5397 if (sets[i].rtl && sets[i].src_elt == 0
5398 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5399 sets[i].src_elt = src_eqv_elt;
5402 for (i = 0; i < n_sets; i++)
5403 if (sets[i].rtl && ! sets[i].src_volatile
5404 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5406 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5408 /* REG_EQUAL in setting a STRICT_LOW_PART
5409 gives an equivalent for the entire destination register,
5410 not just for the subreg being stored in now.
5411 This is a more interesting equivalence, so we arrange later
5412 to treat the entire reg as the destination. */
5413 sets[i].src_elt = src_eqv_elt;
5414 sets[i].src_hash = src_eqv_hash;
5418 /* Insert source and constant equivalent into hash table, if not
5420 struct table_elt *classp = src_eqv_elt;
5421 rtx src = sets[i].src;
5422 rtx dest = SET_DEST (sets[i].rtl);
5423 enum machine_mode mode
5424 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5426 /* It's possible that we have a source value known to be
5427 constant but don't have a REG_EQUAL note on the insn.
5428 Lack of a note will mean src_eqv_elt will be NULL. This
5429 can happen where we've generated a SUBREG to access a
5430 CONST_INT that is already in a register in a wider mode.
5431 Ensure that the source expression is put in the proper
5434 classp = sets[i].src_const_elt;
5436 if (sets[i].src_elt == 0)
5438 struct table_elt *elt;
5440 /* Note that these insert_regs calls cannot remove
5441 any of the src_elt's, because they would have failed to
5442 match if not still valid. */
5443 if (insert_regs (src, classp, 0))
5445 rehash_using_reg (src);
5446 sets[i].src_hash = HASH (src, mode);
5448 elt = insert (src, classp, sets[i].src_hash, mode);
5449 elt->in_memory = sets[i].src_in_memory;
5450 sets[i].src_elt = classp = elt;
5452 if (sets[i].src_const && sets[i].src_const_elt == 0
5453 && src != sets[i].src_const
5454 && ! rtx_equal_p (sets[i].src_const, src))
5455 sets[i].src_elt = insert (sets[i].src_const, classp,
5456 sets[i].src_const_hash, mode);
5459 else if (sets[i].src_elt == 0)
5460 /* If we did not insert the source into the hash table (e.g., it was
5461 volatile), note the equivalence class for the REG_EQUAL value, if any,
5462 so that the destination goes into that class. */
5463 sets[i].src_elt = src_eqv_elt;
5465 /* Record destination addresses in the hash table. This allows us to
5466 check if they are invalidated by other sets. */
5467 for (i = 0; i < n_sets; i++)
5471 rtx x = sets[i].inner_dest;
5472 struct table_elt *elt;
5473 enum machine_mode mode;
5479 mode = GET_MODE (x);
5480 hash = HASH (x, mode);
5481 elt = lookup (x, hash, mode);
5484 if (insert_regs (x, NULL, 0))
5486 rtx dest = SET_DEST (sets[i].rtl);
5488 rehash_using_reg (x);
5489 hash = HASH (x, mode);
5490 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5492 elt = insert (x, NULL, hash, mode);
5495 sets[i].dest_addr_elt = elt;
5498 sets[i].dest_addr_elt = NULL;
5502 invalidate_from_clobbers (x);
5504 /* Some registers are invalidated by subroutine calls. Memory is
5505 invalidated by non-constant calls. */
5509 if (!(RTL_CONST_OR_PURE_CALL_P (insn)))
5510 invalidate_memory ();
5511 invalidate_for_call ();
5514 /* Now invalidate everything set by this instruction.
5515 If a SUBREG or other funny destination is being set,
5516 sets[i].rtl is still nonzero, so here we invalidate the reg
5517 a part of which is being set. */
5519 for (i = 0; i < n_sets; i++)
5522 /* We can't use the inner dest, because the mode associated with
5523 a ZERO_EXTRACT is significant. */
5524 rtx dest = SET_DEST (sets[i].rtl);
5526 /* Needed for registers to remove the register from its
5527 previous quantity's chain.
5528 Needed for memory if this is a nonvarying address, unless
5529 we have just done an invalidate_memory that covers even those. */
5530 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5531 invalidate (dest, VOIDmode);
5532 else if (MEM_P (dest))
5533 invalidate (dest, VOIDmode);
5534 else if (GET_CODE (dest) == STRICT_LOW_PART
5535 || GET_CODE (dest) == ZERO_EXTRACT)
5536 invalidate (XEXP (dest, 0), GET_MODE (dest));
5539 /* A volatile ASM invalidates everything. */
5540 if (NONJUMP_INSN_P (insn)
5541 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5542 && MEM_VOLATILE_P (PATTERN (insn)))
5543 flush_hash_table ();
5545 /* Don't cse over a call to setjmp; on some machines (eg VAX)
5546 the regs restored by the longjmp come from a later time
5548 if (CALL_P (insn) && find_reg_note (insn, REG_SETJMP, NULL))
5550 flush_hash_table ();
5554 /* Make sure registers mentioned in destinations
5555 are safe for use in an expression to be inserted.
5556 This removes from the hash table
5557 any invalid entry that refers to one of these registers.
5559 We don't care about the return value from mention_regs because
5560 we are going to hash the SET_DEST values unconditionally. */
5562 for (i = 0; i < n_sets; i++)
5566 rtx x = SET_DEST (sets[i].rtl);
5572 /* We used to rely on all references to a register becoming
5573 inaccessible when a register changes to a new quantity,
5574 since that changes the hash code. However, that is not
5575 safe, since after HASH_SIZE new quantities we get a
5576 hash 'collision' of a register with its own invalid
5577 entries. And since SUBREGs have been changed not to
5578 change their hash code with the hash code of the register,
5579 it wouldn't work any longer at all. So we have to check
5580 for any invalid references lying around now.
5581 This code is similar to the REG case in mention_regs,
5582 but it knows that reg_tick has been incremented, and
5583 it leaves reg_in_table as -1 . */
5584 unsigned int regno = REGNO (x);
5585 unsigned int endregno = END_REGNO (x);
5588 for (i = regno; i < endregno; i++)
5590 if (REG_IN_TABLE (i) >= 0)
5592 remove_invalid_refs (i);
5593 REG_IN_TABLE (i) = -1;
5600 /* We may have just removed some of the src_elt's from the hash table.
5601 So replace each one with the current head of the same class.
5602 Also check if destination addresses have been removed. */
5604 for (i = 0; i < n_sets; i++)
5607 if (sets[i].dest_addr_elt
5608 && sets[i].dest_addr_elt->first_same_value == 0)
5610 /* The elt was removed, which means this destination is not
5611 valid after this instruction. */
5612 sets[i].rtl = NULL_RTX;
5614 else if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5615 /* If elt was removed, find current head of same class,
5616 or 0 if nothing remains of that class. */
5618 struct table_elt *elt = sets[i].src_elt;
5620 while (elt && elt->prev_same_value)
5621 elt = elt->prev_same_value;
5623 while (elt && elt->first_same_value == 0)
5624 elt = elt->next_same_value;
5625 sets[i].src_elt = elt ? elt->first_same_value : 0;
5629 /* Now insert the destinations into their equivalence classes. */
5631 for (i = 0; i < n_sets; i++)
5634 rtx dest = SET_DEST (sets[i].rtl);
5635 struct table_elt *elt;
5637 /* Don't record value if we are not supposed to risk allocating
5638 floating-point values in registers that might be wider than
5640 if ((flag_float_store
5642 && FLOAT_MODE_P (GET_MODE (dest)))
5643 /* Don't record BLKmode values, because we don't know the
5644 size of it, and can't be sure that other BLKmode values
5645 have the same or smaller size. */
5646 || GET_MODE (dest) == BLKmode
5647 /* If we didn't put a REG_EQUAL value or a source into the hash
5648 table, there is no point is recording DEST. */
5649 || sets[i].src_elt == 0
5650 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
5651 or SIGN_EXTEND, don't record DEST since it can cause
5652 some tracking to be wrong.
5654 ??? Think about this more later. */
5655 || (paradoxical_subreg_p (dest)
5656 && (GET_CODE (sets[i].src) == SIGN_EXTEND
5657 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
5660 /* STRICT_LOW_PART isn't part of the value BEING set,
5661 and neither is the SUBREG inside it.
5662 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
5663 if (GET_CODE (dest) == STRICT_LOW_PART)
5664 dest = SUBREG_REG (XEXP (dest, 0));
5666 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5667 /* Registers must also be inserted into chains for quantities. */
5668 if (insert_regs (dest, sets[i].src_elt, 1))
5670 /* If `insert_regs' changes something, the hash code must be
5672 rehash_using_reg (dest);
5673 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5676 elt = insert (dest, sets[i].src_elt,
5677 sets[i].dest_hash, GET_MODE (dest));
5679 /* If this is a constant, insert the constant anchors with the
5680 equivalent register-offset expressions using register DEST. */
5681 if (targetm.const_anchor
5683 && SCALAR_INT_MODE_P (GET_MODE (dest))
5684 && GET_CODE (sets[i].src_elt->exp) == CONST_INT)
5685 insert_const_anchors (dest, sets[i].src_elt->exp, GET_MODE (dest));
5687 elt->in_memory = (MEM_P (sets[i].inner_dest)
5688 && !MEM_READONLY_P (sets[i].inner_dest));
5690 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
5691 narrower than M2, and both M1 and M2 are the same number of words,
5692 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
5693 make that equivalence as well.
5695 However, BAR may have equivalences for which gen_lowpart
5696 will produce a simpler value than gen_lowpart applied to
5697 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
5698 BAR's equivalences. If we don't get a simplified form, make
5699 the SUBREG. It will not be used in an equivalence, but will
5700 cause two similar assignments to be detected.
5702 Note the loop below will find SUBREG_REG (DEST) since we have
5703 already entered SRC and DEST of the SET in the table. */
5705 if (GET_CODE (dest) == SUBREG
5706 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
5708 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
5709 && (GET_MODE_SIZE (GET_MODE (dest))
5710 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5711 && sets[i].src_elt != 0)
5713 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
5714 struct table_elt *elt, *classp = 0;
5716 for (elt = sets[i].src_elt->first_same_value; elt;
5717 elt = elt->next_same_value)
5721 struct table_elt *src_elt;
5724 /* Ignore invalid entries. */
5725 if (!REG_P (elt->exp)
5726 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5729 /* We may have already been playing subreg games. If the
5730 mode is already correct for the destination, use it. */
5731 if (GET_MODE (elt->exp) == new_mode)
5735 /* Calculate big endian correction for the SUBREG_BYTE.
5736 We have already checked that M1 (GET_MODE (dest))
5737 is not narrower than M2 (new_mode). */
5738 if (BYTES_BIG_ENDIAN)
5739 byte = (GET_MODE_SIZE (GET_MODE (dest))
5740 - GET_MODE_SIZE (new_mode));
5742 new_src = simplify_gen_subreg (new_mode, elt->exp,
5743 GET_MODE (dest), byte);
5746 /* The call to simplify_gen_subreg fails if the value
5747 is VOIDmode, yet we can't do any simplification, e.g.
5748 for EXPR_LISTs denoting function call results.
5749 It is invalid to construct a SUBREG with a VOIDmode
5750 SUBREG_REG, hence a zero new_src means we can't do
5751 this substitution. */
5755 src_hash = HASH (new_src, new_mode);
5756 src_elt = lookup (new_src, src_hash, new_mode);
5758 /* Put the new source in the hash table is if isn't
5762 if (insert_regs (new_src, classp, 0))
5764 rehash_using_reg (new_src);
5765 src_hash = HASH (new_src, new_mode);
5767 src_elt = insert (new_src, classp, src_hash, new_mode);
5768 src_elt->in_memory = elt->in_memory;
5770 else if (classp && classp != src_elt->first_same_value)
5771 /* Show that two things that we've seen before are
5772 actually the same. */
5773 merge_equiv_classes (src_elt, classp);
5775 classp = src_elt->first_same_value;
5776 /* Ignore invalid entries. */
5778 && !REG_P (classp->exp)
5779 && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
5780 classp = classp->next_same_value;
5785 /* Special handling for (set REG0 REG1) where REG0 is the
5786 "cheapest", cheaper than REG1. After cse, REG1 will probably not
5787 be used in the sequel, so (if easily done) change this insn to
5788 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
5789 that computed their value. Then REG1 will become a dead store
5790 and won't cloud the situation for later optimizations.
5792 Do not make this change if REG1 is a hard register, because it will
5793 then be used in the sequel and we may be changing a two-operand insn
5794 into a three-operand insn.
5796 Also do not do this if we are operating on a copy of INSN. */
5798 if (n_sets == 1 && sets[0].rtl && REG_P (SET_DEST (sets[0].rtl))
5799 && NEXT_INSN (PREV_INSN (insn)) == insn
5800 && REG_P (SET_SRC (sets[0].rtl))
5801 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
5802 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
5804 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
5805 struct qty_table_elem *src_ent = &qty_table[src_q];
5807 if (src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
5809 /* Scan for the previous nonnote insn, but stop at a basic
5812 rtx bb_head = BB_HEAD (BLOCK_FOR_INSN (insn));
5815 prev = PREV_INSN (prev);
5817 while (prev != bb_head && (NOTE_P (prev) || DEBUG_INSN_P (prev)));
5819 /* Do not swap the registers around if the previous instruction
5820 attaches a REG_EQUIV note to REG1.
5822 ??? It's not entirely clear whether we can transfer a REG_EQUIV
5823 from the pseudo that originally shadowed an incoming argument
5824 to another register. Some uses of REG_EQUIV might rely on it
5825 being attached to REG1 rather than REG2.
5827 This section previously turned the REG_EQUIV into a REG_EQUAL
5828 note. We cannot do that because REG_EQUIV may provide an
5829 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
5830 if (NONJUMP_INSN_P (prev)
5831 && GET_CODE (PATTERN (prev)) == SET
5832 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
5833 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
5835 rtx dest = SET_DEST (sets[0].rtl);
5836 rtx src = SET_SRC (sets[0].rtl);
5839 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
5840 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
5841 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
5842 apply_change_group ();
5844 /* If INSN has a REG_EQUAL note, and this note mentions
5845 REG0, then we must delete it, because the value in
5846 REG0 has changed. If the note's value is REG1, we must
5847 also delete it because that is now this insn's dest. */
5848 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5850 && (reg_mentioned_p (dest, XEXP (note, 0))
5851 || rtx_equal_p (src, XEXP (note, 0))))
5852 remove_note (insn, note);
5860 /* Remove from the hash table all expressions that reference memory. */
5863 invalidate_memory (void)
5866 struct table_elt *p, *next;
5868 for (i = 0; i < HASH_SIZE; i++)
5869 for (p = table[i]; p; p = next)
5871 next = p->next_same_hash;
5873 remove_from_table (p, i);
5877 /* Perform invalidation on the basis of everything about an insn
5878 except for invalidating the actual places that are SET in it.
5879 This includes the places CLOBBERed, and anything that might
5880 alias with something that is SET or CLOBBERed.
5882 X is the pattern of the insn. */
5885 invalidate_from_clobbers (rtx x)
5887 if (GET_CODE (x) == CLOBBER)
5889 rtx ref = XEXP (x, 0);
5892 if (REG_P (ref) || GET_CODE (ref) == SUBREG
5894 invalidate (ref, VOIDmode);
5895 else if (GET_CODE (ref) == STRICT_LOW_PART
5896 || GET_CODE (ref) == ZERO_EXTRACT)
5897 invalidate (XEXP (ref, 0), GET_MODE (ref));
5900 else if (GET_CODE (x) == PARALLEL)
5903 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5905 rtx y = XVECEXP (x, 0, i);
5906 if (GET_CODE (y) == CLOBBER)
5908 rtx ref = XEXP (y, 0);
5909 if (REG_P (ref) || GET_CODE (ref) == SUBREG
5911 invalidate (ref, VOIDmode);
5912 else if (GET_CODE (ref) == STRICT_LOW_PART
5913 || GET_CODE (ref) == ZERO_EXTRACT)
5914 invalidate (XEXP (ref, 0), GET_MODE (ref));
5920 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
5921 and replace any registers in them with either an equivalent constant
5922 or the canonical form of the register. If we are inside an address,
5923 only do this if the address remains valid.
5925 OBJECT is 0 except when within a MEM in which case it is the MEM.
5927 Return the replacement for X. */
5930 cse_process_notes_1 (rtx x, rtx object, bool *changed)
5932 enum rtx_code code = GET_CODE (x);
5933 const char *fmt = GET_RTX_FORMAT (code);
5951 validate_change (x, &XEXP (x, 0),
5952 cse_process_notes (XEXP (x, 0), x, changed), 0);
5957 if (REG_NOTE_KIND (x) == REG_EQUAL)
5958 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX, changed);
5960 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX, changed);
5967 rtx new_rtx = cse_process_notes (XEXP (x, 0), object, changed);
5968 /* We don't substitute VOIDmode constants into these rtx,
5969 since they would impede folding. */
5970 if (GET_MODE (new_rtx) != VOIDmode)
5971 validate_change (object, &XEXP (x, 0), new_rtx, 0);
5976 i = REG_QTY (REGNO (x));
5978 /* Return a constant or a constant register. */
5979 if (REGNO_QTY_VALID_P (REGNO (x)))
5981 struct qty_table_elem *ent = &qty_table[i];
5983 if (ent->const_rtx != NULL_RTX
5984 && (CONSTANT_P (ent->const_rtx)
5985 || REG_P (ent->const_rtx)))
5987 rtx new_rtx = gen_lowpart (GET_MODE (x), ent->const_rtx);
5989 return copy_rtx (new_rtx);
5993 /* Otherwise, canonicalize this register. */
5994 return canon_reg (x, NULL_RTX);
6000 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6002 validate_change (object, &XEXP (x, i),
6003 cse_process_notes (XEXP (x, i), object, changed), 0);
6009 cse_process_notes (rtx x, rtx object, bool *changed)
6011 rtx new_rtx = cse_process_notes_1 (x, object, changed);
6018 /* Find a path in the CFG, starting with FIRST_BB to perform CSE on.
6020 DATA is a pointer to a struct cse_basic_block_data, that is used to
6022 It is filled with a queue of basic blocks, starting with FIRST_BB
6023 and following a trace through the CFG.
6025 If all paths starting at FIRST_BB have been followed, or no new path
6026 starting at FIRST_BB can be constructed, this function returns FALSE.
6027 Otherwise, DATA->path is filled and the function returns TRUE indicating
6028 that a path to follow was found.
6030 If FOLLOW_JUMPS is false, the maximum path length is 1 and the only
6031 block in the path will be FIRST_BB. */
6034 cse_find_path (basic_block first_bb, struct cse_basic_block_data *data,
6041 SET_BIT (cse_visited_basic_blocks, first_bb->index);
6043 /* See if there is a previous path. */
6044 path_size = data->path_size;
6046 /* There is a previous path. Make sure it started with FIRST_BB. */
6048 gcc_assert (data->path[0].bb == first_bb);
6050 /* There was only one basic block in the last path. Clear the path and
6051 return, so that paths starting at another basic block can be tried. */
6058 /* If the path was empty from the beginning, construct a new path. */
6060 data->path[path_size++].bb = first_bb;
6063 /* Otherwise, path_size must be equal to or greater than 2, because
6064 a previous path exists that is at least two basic blocks long.
6066 Update the previous branch path, if any. If the last branch was
6067 previously along the branch edge, take the fallthrough edge now. */
6068 while (path_size >= 2)
6070 basic_block last_bb_in_path, previous_bb_in_path;
6074 last_bb_in_path = data->path[path_size].bb;
6075 previous_bb_in_path = data->path[path_size - 1].bb;
6077 /* If we previously followed a path along the branch edge, try
6078 the fallthru edge now. */
6079 if (EDGE_COUNT (previous_bb_in_path->succs) == 2
6080 && any_condjump_p (BB_END (previous_bb_in_path))
6081 && (e = find_edge (previous_bb_in_path, last_bb_in_path))
6082 && e == BRANCH_EDGE (previous_bb_in_path))
6084 bb = FALLTHRU_EDGE (previous_bb_in_path)->dest;
6085 if (bb != EXIT_BLOCK_PTR
6086 && single_pred_p (bb)
6087 /* We used to assert here that we would only see blocks
6088 that we have not visited yet. But we may end up
6089 visiting basic blocks twice if the CFG has changed
6090 in this run of cse_main, because when the CFG changes
6091 the topological sort of the CFG also changes. A basic
6092 blocks that previously had more than two predecessors
6093 may now have a single predecessor, and become part of
6094 a path that starts at another basic block.
6096 We still want to visit each basic block only once, so
6097 halt the path here if we have already visited BB. */
6098 && !TEST_BIT (cse_visited_basic_blocks, bb->index))
6100 SET_BIT (cse_visited_basic_blocks, bb->index);
6101 data->path[path_size++].bb = bb;
6106 data->path[path_size].bb = NULL;
6109 /* If only one block remains in the path, bail. */
6117 /* Extend the path if possible. */
6120 bb = data->path[path_size - 1].bb;
6121 while (bb && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH))
6123 if (single_succ_p (bb))
6124 e = single_succ_edge (bb);
6125 else if (EDGE_COUNT (bb->succs) == 2
6126 && any_condjump_p (BB_END (bb)))
6128 /* First try to follow the branch. If that doesn't lead
6129 to a useful path, follow the fallthru edge. */
6130 e = BRANCH_EDGE (bb);
6131 if (!single_pred_p (e->dest))
6132 e = FALLTHRU_EDGE (bb);
6138 && !((e->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label)
6139 && e->dest != EXIT_BLOCK_PTR
6140 && single_pred_p (e->dest)
6141 /* Avoid visiting basic blocks twice. The large comment
6142 above explains why this can happen. */
6143 && !TEST_BIT (cse_visited_basic_blocks, e->dest->index))
6145 basic_block bb2 = e->dest;
6146 SET_BIT (cse_visited_basic_blocks, bb2->index);
6147 data->path[path_size++].bb = bb2;
6156 data->path_size = path_size;
6157 return path_size != 0;
6160 /* Dump the path in DATA to file F. NSETS is the number of sets
6164 cse_dump_path (struct cse_basic_block_data *data, int nsets, FILE *f)
6168 fprintf (f, ";; Following path with %d sets: ", nsets);
6169 for (path_entry = 0; path_entry < data->path_size; path_entry++)
6170 fprintf (f, "%d ", (data->path[path_entry].bb)->index);
6171 fputc ('\n', dump_file);
6176 /* Return true if BB has exception handling successor edges. */
6179 have_eh_succ_edges (basic_block bb)
6184 FOR_EACH_EDGE (e, ei, bb->succs)
6185 if (e->flags & EDGE_EH)
6192 /* Scan to the end of the path described by DATA. Return an estimate of
6193 the total number of SETs of all insns in the path. */
6196 cse_prescan_path (struct cse_basic_block_data *data)
6199 int path_size = data->path_size;
6202 /* Scan to end of each basic block in the path. */
6203 for (path_entry = 0; path_entry < path_size; path_entry++)
6208 bb = data->path[path_entry].bb;
6210 FOR_BB_INSNS (bb, insn)
6215 /* A PARALLEL can have lots of SETs in it,
6216 especially if it is really an ASM_OPERANDS. */
6217 if (GET_CODE (PATTERN (insn)) == PARALLEL)
6218 nsets += XVECLEN (PATTERN (insn), 0);
6224 data->nsets = nsets;
6227 /* Process a single extended basic block described by EBB_DATA. */
6230 cse_extended_basic_block (struct cse_basic_block_data *ebb_data)
6232 int path_size = ebb_data->path_size;
6236 /* Allocate the space needed by qty_table. */
6237 qty_table = XNEWVEC (struct qty_table_elem, max_qty);
6240 cse_ebb_live_in = df_get_live_in (ebb_data->path[0].bb);
6241 cse_ebb_live_out = df_get_live_out (ebb_data->path[path_size - 1].bb);
6242 for (path_entry = 0; path_entry < path_size; path_entry++)
6247 bb = ebb_data->path[path_entry].bb;
6249 /* Invalidate recorded information for eh regs if there is an EH
6250 edge pointing to that bb. */
6251 if (bb_has_eh_pred (bb))
6255 for (def_rec = df_get_artificial_defs (bb->index); *def_rec; def_rec++)
6257 df_ref def = *def_rec;
6258 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
6259 invalidate (DF_REF_REG (def), GET_MODE (DF_REF_REG (def)));
6263 optimize_this_for_speed_p = optimize_bb_for_speed_p (bb);
6264 FOR_BB_INSNS (bb, insn)
6266 /* If we have processed 1,000 insns, flush the hash table to
6267 avoid extreme quadratic behavior. We must not include NOTEs
6268 in the count since there may be more of them when generating
6269 debugging information. If we clear the table at different
6270 times, code generated with -g -O might be different than code
6271 generated with -O but not -g.
6273 FIXME: This is a real kludge and needs to be done some other
6275 if (NONDEBUG_INSN_P (insn)
6276 && num_insns++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS))
6278 flush_hash_table ();
6284 /* Process notes first so we have all notes in canonical forms
6285 when looking for duplicate operations. */
6286 if (REG_NOTES (insn))
6288 bool changed = false;
6289 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn),
6290 NULL_RTX, &changed);
6292 df_notes_rescan (insn);
6297 /* If we haven't already found an insn where we added a LABEL_REF,
6299 if (INSN_P (insn) && !recorded_label_ref
6300 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
6302 recorded_label_ref = true;
6305 if (NONDEBUG_INSN_P (insn))
6307 /* If the previous insn sets CC0 and this insn no
6308 longer references CC0, delete the previous insn.
6309 Here we use fact that nothing expects CC0 to be
6310 valid over an insn, which is true until the final
6314 prev_insn = prev_nonnote_nondebug_insn (insn);
6315 if (prev_insn && NONJUMP_INSN_P (prev_insn)
6316 && (tem = single_set (prev_insn)) != NULL_RTX
6317 && SET_DEST (tem) == cc0_rtx
6318 && ! reg_mentioned_p (cc0_rtx, PATTERN (insn)))
6319 delete_insn (prev_insn);
6321 /* If this insn is not the last insn in the basic
6322 block, it will be PREV_INSN(insn) in the next
6323 iteration. If we recorded any CC0-related
6324 information for this insn, remember it. */
6325 if (insn != BB_END (bb))
6327 prev_insn_cc0 = this_insn_cc0;
6328 prev_insn_cc0_mode = this_insn_cc0_mode;
6335 /* With non-call exceptions, we are not always able to update
6336 the CFG properly inside cse_insn. So clean up possibly
6337 redundant EH edges here. */
6338 if (cfun->can_throw_non_call_exceptions && have_eh_succ_edges (bb))
6339 cse_cfg_altered |= purge_dead_edges (bb);
6341 /* If we changed a conditional jump, we may have terminated
6342 the path we are following. Check that by verifying that
6343 the edge we would take still exists. If the edge does
6344 not exist anymore, purge the remainder of the path.
6345 Note that this will cause us to return to the caller. */
6346 if (path_entry < path_size - 1)
6348 basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6349 if (!find_edge (bb, next_bb))
6355 /* If we truncate the path, we must also reset the
6356 visited bit on the remaining blocks in the path,
6357 or we will never visit them at all. */
6358 RESET_BIT (cse_visited_basic_blocks,
6359 ebb_data->path[path_size].bb->index);
6360 ebb_data->path[path_size].bb = NULL;
6362 while (path_size - 1 != path_entry);
6363 ebb_data->path_size = path_size;
6367 /* If this is a conditional jump insn, record any known
6368 equivalences due to the condition being tested. */
6370 if (path_entry < path_size - 1
6372 && single_set (insn)
6373 && any_condjump_p (insn))
6375 basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6376 bool taken = (next_bb == BRANCH_EDGE (bb)->dest);
6377 record_jump_equiv (insn, taken);
6381 /* Clear the CC0-tracking related insns, they can't provide
6382 useful information across basic block boundaries. */
6387 gcc_assert (next_qty <= max_qty);
6393 /* Perform cse on the instructions of a function.
6394 F is the first instruction.
6395 NREGS is one plus the highest pseudo-reg number used in the instruction.
6397 Return 2 if jump optimizations should be redone due to simplifications
6398 in conditional jump instructions.
6399 Return 1 if the CFG should be cleaned up because it has been modified.
6400 Return 0 otherwise. */
6403 cse_main (rtx f ATTRIBUTE_UNUSED, int nregs)
6405 struct cse_basic_block_data ebb_data;
6407 int *rc_order = XNEWVEC (int, last_basic_block);
6410 df_set_flags (DF_LR_RUN_DCE);
6412 df_set_flags (DF_DEFER_INSN_RESCAN);
6414 reg_scan (get_insns (), max_reg_num ());
6415 init_cse_reg_info (nregs);
6417 ebb_data.path = XNEWVEC (struct branch_path,
6418 PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6420 cse_cfg_altered = false;
6421 cse_jumps_altered = false;
6422 recorded_label_ref = false;
6423 constant_pool_entries_cost = 0;
6424 constant_pool_entries_regcost = 0;
6425 ebb_data.path_size = 0;
6427 rtl_hooks = cse_rtl_hooks;
6430 init_alias_analysis ();
6432 reg_eqv_table = XNEWVEC (struct reg_eqv_elem, nregs);
6434 /* Set up the table of already visited basic blocks. */
6435 cse_visited_basic_blocks = sbitmap_alloc (last_basic_block);
6436 sbitmap_zero (cse_visited_basic_blocks);
6438 /* Loop over basic blocks in reverse completion order (RPO),
6439 excluding the ENTRY and EXIT blocks. */
6440 n_blocks = pre_and_rev_post_order_compute (NULL, rc_order, false);
6442 while (i < n_blocks)
6444 /* Find the first block in the RPO queue that we have not yet
6445 processed before. */
6448 bb = BASIC_BLOCK (rc_order[i++]);
6450 while (TEST_BIT (cse_visited_basic_blocks, bb->index)
6453 /* Find all paths starting with BB, and process them. */
6454 while (cse_find_path (bb, &ebb_data, flag_cse_follow_jumps))
6456 /* Pre-scan the path. */
6457 cse_prescan_path (&ebb_data);
6459 /* If this basic block has no sets, skip it. */
6460 if (ebb_data.nsets == 0)
6463 /* Get a reasonable estimate for the maximum number of qty's
6464 needed for this path. For this, we take the number of sets
6465 and multiply that by MAX_RECOG_OPERANDS. */
6466 max_qty = ebb_data.nsets * MAX_RECOG_OPERANDS;
6468 /* Dump the path we're about to process. */
6470 cse_dump_path (&ebb_data, ebb_data.nsets, dump_file);
6472 cse_extended_basic_block (&ebb_data);
6477 end_alias_analysis ();
6478 free (reg_eqv_table);
6479 free (ebb_data.path);
6480 sbitmap_free (cse_visited_basic_blocks);
6482 rtl_hooks = general_rtl_hooks;
6484 if (cse_jumps_altered || recorded_label_ref)
6486 else if (cse_cfg_altered)
6492 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for
6493 which there isn't a REG_LABEL_OPERAND note.
6494 Return one if so. DATA is the insn. */
6497 check_for_label_ref (rtx *rtl, void *data)
6499 rtx insn = (rtx) data;
6501 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL_OPERAND
6502 note for it, we must rerun jump since it needs to place the note. If
6503 this is a LABEL_REF for a CODE_LABEL that isn't in the insn chain,
6504 don't do this since no REG_LABEL_OPERAND will be added. */
6505 return (GET_CODE (*rtl) == LABEL_REF
6506 && ! LABEL_REF_NONLOCAL_P (*rtl)
6508 || !label_is_jump_target_p (XEXP (*rtl, 0), insn))
6509 && LABEL_P (XEXP (*rtl, 0))
6510 && INSN_UID (XEXP (*rtl, 0)) != 0
6511 && ! find_reg_note (insn, REG_LABEL_OPERAND, XEXP (*rtl, 0)));
6514 /* Count the number of times registers are used (not set) in X.
6515 COUNTS is an array in which we accumulate the count, INCR is how much
6516 we count each register usage.
6518 Don't count a usage of DEST, which is the SET_DEST of a SET which
6519 contains X in its SET_SRC. This is because such a SET does not
6520 modify the liveness of DEST.
6521 DEST is set to pc_rtx for a trapping insn, or for an insn with side effects.
6522 We must then count uses of a SET_DEST regardless, because the insn can't be
6526 count_reg_usage (rtx x, int *counts, rtx dest, int incr)
6536 switch (code = GET_CODE (x))
6540 counts[REGNO (x)] += incr;
6555 /* If we are clobbering a MEM, mark any registers inside the address
6557 if (MEM_P (XEXP (x, 0)))
6558 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
6562 /* Unless we are setting a REG, count everything in SET_DEST. */
6563 if (!REG_P (SET_DEST (x)))
6564 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
6565 count_reg_usage (SET_SRC (x), counts,
6566 dest ? dest : SET_DEST (x),
6576 /* We expect dest to be NULL_RTX here. If the insn may trap,
6577 or if it cannot be deleted due to side-effects, mark this fact
6578 by setting DEST to pc_rtx. */
6579 if (insn_could_throw_p (x) || side_effects_p (PATTERN (x)))
6581 if (code == CALL_INSN)
6582 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, dest, incr);
6583 count_reg_usage (PATTERN (x), counts, dest, incr);
6585 /* Things used in a REG_EQUAL note aren't dead since loop may try to
6588 note = find_reg_equal_equiv_note (x);
6591 rtx eqv = XEXP (note, 0);
6593 if (GET_CODE (eqv) == EXPR_LIST)
6594 /* This REG_EQUAL note describes the result of a function call.
6595 Process all the arguments. */
6598 count_reg_usage (XEXP (eqv, 0), counts, dest, incr);
6599 eqv = XEXP (eqv, 1);
6601 while (eqv && GET_CODE (eqv) == EXPR_LIST);
6603 count_reg_usage (eqv, counts, dest, incr);
6608 if (REG_NOTE_KIND (x) == REG_EQUAL
6609 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
6610 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
6611 involving registers in the address. */
6612 || GET_CODE (XEXP (x, 0)) == CLOBBER)
6613 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
6615 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
6619 /* Iterate over just the inputs, not the constraints as well. */
6620 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
6621 count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, dest, incr);
6631 fmt = GET_RTX_FORMAT (code);
6632 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6635 count_reg_usage (XEXP (x, i), counts, dest, incr);
6636 else if (fmt[i] == 'E')
6637 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6638 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
6642 /* Return true if X is a dead register. */
6645 is_dead_reg (rtx x, int *counts)
6648 && REGNO (x) >= FIRST_PSEUDO_REGISTER
6649 && counts[REGNO (x)] == 0);
6652 /* Return true if set is live. */
6654 set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */
6661 if (set_noop_p (set))
6665 else if (GET_CODE (SET_DEST (set)) == CC0
6666 && !side_effects_p (SET_SRC (set))
6667 && ((tem = next_nonnote_nondebug_insn (insn)) == NULL_RTX
6669 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
6672 else if (!is_dead_reg (SET_DEST (set), counts)
6673 || side_effects_p (SET_SRC (set)))
6678 /* Return true if insn is live. */
6681 insn_live_p (rtx insn, int *counts)
6684 if (insn_could_throw_p (insn))
6686 else if (GET_CODE (PATTERN (insn)) == SET)
6687 return set_live_p (PATTERN (insn), insn, counts);
6688 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
6690 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6692 rtx elt = XVECEXP (PATTERN (insn), 0, i);
6694 if (GET_CODE (elt) == SET)
6696 if (set_live_p (elt, insn, counts))
6699 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
6704 else if (DEBUG_INSN_P (insn))
6708 for (next = NEXT_INSN (insn); next; next = NEXT_INSN (next))
6711 else if (!DEBUG_INSN_P (next))
6713 else if (INSN_VAR_LOCATION_DECL (insn) == INSN_VAR_LOCATION_DECL (next))
6722 /* Count the number of stores into pseudo. Callback for note_stores. */
6725 count_stores (rtx x, const_rtx set ATTRIBUTE_UNUSED, void *data)
6727 int *counts = (int *) data;
6728 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
6729 counts[REGNO (x)]++;
6732 struct dead_debug_insn_data
6739 /* Return if a DEBUG_INSN needs to be reset because some dead
6740 pseudo doesn't have a replacement. Callback for for_each_rtx. */
6743 is_dead_debug_insn (rtx *loc, void *data)
6746 struct dead_debug_insn_data *ddid = (struct dead_debug_insn_data *) data;
6748 if (is_dead_reg (x, ddid->counts))
6750 if (ddid->replacements && ddid->replacements[REGNO (x)] != NULL_RTX)
6751 ddid->seen_repl = true;
6758 /* Replace a dead pseudo in a DEBUG_INSN with replacement DEBUG_EXPR.
6759 Callback for simplify_replace_fn_rtx. */
6762 replace_dead_reg (rtx x, const_rtx old_rtx ATTRIBUTE_UNUSED, void *data)
6764 rtx *replacements = (rtx *) data;
6767 && REGNO (x) >= FIRST_PSEUDO_REGISTER
6768 && replacements[REGNO (x)] != NULL_RTX)
6770 if (GET_MODE (x) == GET_MODE (replacements[REGNO (x)]))
6771 return replacements[REGNO (x)];
6772 return lowpart_subreg (GET_MODE (x), replacements[REGNO (x)],
6773 GET_MODE (replacements[REGNO (x)]));
6778 /* Scan all the insns and delete any that are dead; i.e., they store a register
6779 that is never used or they copy a register to itself.
6781 This is used to remove insns made obviously dead by cse, loop or other
6782 optimizations. It improves the heuristics in loop since it won't try to
6783 move dead invariants out of loops or make givs for dead quantities. The
6784 remaining passes of the compilation are also sped up. */
6787 delete_trivially_dead_insns (rtx insns, int nreg)
6791 rtx *replacements = NULL;
6794 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
6795 /* First count the number of times each register is used. */
6796 if (MAY_HAVE_DEBUG_INSNS)
6798 counts = XCNEWVEC (int, nreg * 3);
6799 for (insn = insns; insn; insn = NEXT_INSN (insn))
6800 if (DEBUG_INSN_P (insn))
6801 count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg,
6803 else if (INSN_P (insn))
6805 count_reg_usage (insn, counts, NULL_RTX, 1);
6806 note_stores (PATTERN (insn), count_stores, counts + nreg * 2);
6808 /* If there can be debug insns, COUNTS are 3 consecutive arrays.
6809 First one counts how many times each pseudo is used outside
6810 of debug insns, second counts how many times each pseudo is
6811 used in debug insns and third counts how many times a pseudo
6816 counts = XCNEWVEC (int, nreg);
6817 for (insn = insns; insn; insn = NEXT_INSN (insn))
6819 count_reg_usage (insn, counts, NULL_RTX, 1);
6820 /* If no debug insns can be present, COUNTS is just an array
6821 which counts how many times each pseudo is used. */
6823 /* Go from the last insn to the first and delete insns that only set unused
6824 registers or copy a register to itself. As we delete an insn, remove
6825 usage counts for registers it uses.
6827 The first jump optimization pass may leave a real insn as the last
6828 insn in the function. We must not skip that insn or we may end
6829 up deleting code that is not really dead.
6831 If some otherwise unused register is only used in DEBUG_INSNs,
6832 try to create a DEBUG_EXPR temporary and emit a DEBUG_INSN before
6833 the setter. Then go through DEBUG_INSNs and if a DEBUG_EXPR
6834 has been created for the unused register, replace it with
6835 the DEBUG_EXPR, otherwise reset the DEBUG_INSN. */
6836 for (insn = get_last_insn (); insn; insn = prev)
6840 prev = PREV_INSN (insn);
6844 live_insn = insn_live_p (insn, counts);
6846 /* If this is a dead insn, delete it and show registers in it aren't
6849 if (! live_insn && dbg_cnt (delete_trivial_dead))
6851 if (DEBUG_INSN_P (insn))
6852 count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg,
6857 if (MAY_HAVE_DEBUG_INSNS
6858 && (set = single_set (insn)) != NULL_RTX
6859 && is_dead_reg (SET_DEST (set), counts)
6860 /* Used at least once in some DEBUG_INSN. */
6861 && counts[REGNO (SET_DEST (set)) + nreg] > 0
6862 /* And set exactly once. */
6863 && counts[REGNO (SET_DEST (set)) + nreg * 2] == 1
6864 && !side_effects_p (SET_SRC (set))
6865 && asm_noperands (PATTERN (insn)) < 0)
6869 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
6870 dval = make_debug_expr_from_rtl (SET_DEST (set));
6872 /* Emit a debug bind insn before the insn in which
6874 bind = gen_rtx_VAR_LOCATION (GET_MODE (SET_DEST (set)),
6875 DEBUG_EXPR_TREE_DECL (dval),
6877 VAR_INIT_STATUS_INITIALIZED);
6878 count_reg_usage (bind, counts + nreg, NULL_RTX, 1);
6880 bind = emit_debug_insn_before (bind, insn);
6881 df_insn_rescan (bind);
6883 if (replacements == NULL)
6884 replacements = XCNEWVEC (rtx, nreg);
6885 replacements[REGNO (SET_DEST (set))] = dval;
6888 count_reg_usage (insn, counts, NULL_RTX, -1);
6891 delete_insn_and_edges (insn);
6895 if (MAY_HAVE_DEBUG_INSNS)
6897 struct dead_debug_insn_data ddid;
6898 ddid.counts = counts;
6899 ddid.replacements = replacements;
6900 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
6901 if (DEBUG_INSN_P (insn))
6903 /* If this debug insn references a dead register that wasn't replaced
6904 with an DEBUG_EXPR, reset the DEBUG_INSN. */
6905 ddid.seen_repl = false;
6906 if (for_each_rtx (&INSN_VAR_LOCATION_LOC (insn),
6907 is_dead_debug_insn, &ddid))
6909 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
6910 df_insn_rescan (insn);
6912 else if (ddid.seen_repl)
6914 INSN_VAR_LOCATION_LOC (insn)
6915 = simplify_replace_fn_rtx (INSN_VAR_LOCATION_LOC (insn),
6916 NULL_RTX, replace_dead_reg,
6918 df_insn_rescan (insn);
6921 free (replacements);
6924 if (dump_file && ndead)
6925 fprintf (dump_file, "Deleted %i trivially dead insns\n",
6929 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
6933 /* This function is called via for_each_rtx. The argument, NEWREG, is
6934 a condition code register with the desired mode. If we are looking
6935 at the same register in a different mode, replace it with
6939 cse_change_cc_mode (rtx *loc, void *data)
6941 struct change_cc_mode_args* args = (struct change_cc_mode_args*)data;
6945 && REGNO (*loc) == REGNO (args->newreg)
6946 && GET_MODE (*loc) != GET_MODE (args->newreg))
6948 validate_change (args->insn, loc, args->newreg, 1);
6955 /* Change the mode of any reference to the register REGNO (NEWREG) to
6956 GET_MODE (NEWREG) in INSN. */
6959 cse_change_cc_mode_insn (rtx insn, rtx newreg)
6961 struct change_cc_mode_args args;
6968 args.newreg = newreg;
6970 for_each_rtx (&PATTERN (insn), cse_change_cc_mode, &args);
6971 for_each_rtx (®_NOTES (insn), cse_change_cc_mode, &args);
6973 /* If the following assertion was triggered, there is most probably
6974 something wrong with the cc_modes_compatible back end function.
6975 CC modes only can be considered compatible if the insn - with the mode
6976 replaced by any of the compatible modes - can still be recognized. */
6977 success = apply_change_group ();
6978 gcc_assert (success);
6981 /* Change the mode of any reference to the register REGNO (NEWREG) to
6982 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
6983 any instruction which modifies NEWREG. */
6986 cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
6990 for (insn = start; insn != end; insn = NEXT_INSN (insn))
6992 if (! INSN_P (insn))
6995 if (reg_set_p (newreg, insn))
6998 cse_change_cc_mode_insn (insn, newreg);
7002 /* BB is a basic block which finishes with CC_REG as a condition code
7003 register which is set to CC_SRC. Look through the successors of BB
7004 to find blocks which have a single predecessor (i.e., this one),
7005 and look through those blocks for an assignment to CC_REG which is
7006 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7007 permitted to change the mode of CC_SRC to a compatible mode. This
7008 returns VOIDmode if no equivalent assignments were found.
7009 Otherwise it returns the mode which CC_SRC should wind up with.
7010 ORIG_BB should be the same as BB in the outermost cse_cc_succs call,
7011 but is passed unmodified down to recursive calls in order to prevent
7014 The main complexity in this function is handling the mode issues.
7015 We may have more than one duplicate which we can eliminate, and we
7016 try to find a mode which will work for multiple duplicates. */
7018 static enum machine_mode
7019 cse_cc_succs (basic_block bb, basic_block orig_bb, rtx cc_reg, rtx cc_src,
7020 bool can_change_mode)
7023 enum machine_mode mode;
7024 unsigned int insn_count;
7027 enum machine_mode modes[2];
7033 /* We expect to have two successors. Look at both before picking
7034 the final mode for the comparison. If we have more successors
7035 (i.e., some sort of table jump, although that seems unlikely),
7036 then we require all beyond the first two to use the same
7039 found_equiv = false;
7040 mode = GET_MODE (cc_src);
7042 FOR_EACH_EDGE (e, ei, bb->succs)
7047 if (e->flags & EDGE_COMPLEX)
7050 if (EDGE_COUNT (e->dest->preds) != 1
7051 || e->dest == EXIT_BLOCK_PTR
7052 /* Avoid endless recursion on unreachable blocks. */
7053 || e->dest == orig_bb)
7056 end = NEXT_INSN (BB_END (e->dest));
7057 for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7061 if (! INSN_P (insn))
7064 /* If CC_SRC is modified, we have to stop looking for
7065 something which uses it. */
7066 if (modified_in_p (cc_src, insn))
7069 /* Check whether INSN sets CC_REG to CC_SRC. */
7070 set = single_set (insn);
7072 && REG_P (SET_DEST (set))
7073 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7076 enum machine_mode set_mode;
7077 enum machine_mode comp_mode;
7080 set_mode = GET_MODE (SET_SRC (set));
7081 comp_mode = set_mode;
7082 if (rtx_equal_p (cc_src, SET_SRC (set)))
7084 else if (GET_CODE (cc_src) == COMPARE
7085 && GET_CODE (SET_SRC (set)) == COMPARE
7087 && rtx_equal_p (XEXP (cc_src, 0),
7088 XEXP (SET_SRC (set), 0))
7089 && rtx_equal_p (XEXP (cc_src, 1),
7090 XEXP (SET_SRC (set), 1)))
7093 comp_mode = targetm.cc_modes_compatible (mode, set_mode);
7094 if (comp_mode != VOIDmode
7095 && (can_change_mode || comp_mode == mode))
7102 if (insn_count < ARRAY_SIZE (insns))
7104 insns[insn_count] = insn;
7105 modes[insn_count] = set_mode;
7106 last_insns[insn_count] = end;
7109 if (mode != comp_mode)
7111 gcc_assert (can_change_mode);
7114 /* The modified insn will be re-recognized later. */
7115 PUT_MODE (cc_src, mode);
7120 if (set_mode != mode)
7122 /* We found a matching expression in the
7123 wrong mode, but we don't have room to
7124 store it in the array. Punt. This case
7128 /* INSN sets CC_REG to a value equal to CC_SRC
7129 with the right mode. We can simply delete
7134 /* We found an instruction to delete. Keep looking,
7135 in the hopes of finding a three-way jump. */
7139 /* We found an instruction which sets the condition
7140 code, so don't look any farther. */
7144 /* If INSN sets CC_REG in some other way, don't look any
7146 if (reg_set_p (cc_reg, insn))
7150 /* If we fell off the bottom of the block, we can keep looking
7151 through successors. We pass CAN_CHANGE_MODE as false because
7152 we aren't prepared to handle compatibility between the
7153 further blocks and this block. */
7156 enum machine_mode submode;
7158 submode = cse_cc_succs (e->dest, orig_bb, cc_reg, cc_src, false);
7159 if (submode != VOIDmode)
7161 gcc_assert (submode == mode);
7163 can_change_mode = false;
7171 /* Now INSN_COUNT is the number of instructions we found which set
7172 CC_REG to a value equivalent to CC_SRC. The instructions are in
7173 INSNS. The modes used by those instructions are in MODES. */
7176 for (i = 0; i < insn_count; ++i)
7178 if (modes[i] != mode)
7180 /* We need to change the mode of CC_REG in INSNS[i] and
7181 subsequent instructions. */
7184 if (GET_MODE (cc_reg) == mode)
7187 newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7189 cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7193 delete_insn_and_edges (insns[i]);
7199 /* If we have a fixed condition code register (or two), walk through
7200 the instructions and try to eliminate duplicate assignments. */
7203 cse_condition_code_reg (void)
7205 unsigned int cc_regno_1;
7206 unsigned int cc_regno_2;
7211 if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
7214 cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7215 if (cc_regno_2 != INVALID_REGNUM)
7216 cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7218 cc_reg_2 = NULL_RTX;
7227 enum machine_mode mode;
7228 enum machine_mode orig_mode;
7230 /* Look for blocks which end with a conditional jump based on a
7231 condition code register. Then look for the instruction which
7232 sets the condition code register. Then look through the
7233 successor blocks for instructions which set the condition
7234 code register to the same value. There are other possible
7235 uses of the condition code register, but these are by far the
7236 most common and the ones which we are most likely to be able
7239 last_insn = BB_END (bb);
7240 if (!JUMP_P (last_insn))
7243 if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7245 else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7250 cc_src_insn = NULL_RTX;
7252 for (insn = PREV_INSN (last_insn);
7253 insn && insn != PREV_INSN (BB_HEAD (bb));
7254 insn = PREV_INSN (insn))
7258 if (! INSN_P (insn))
7260 set = single_set (insn);
7262 && REG_P (SET_DEST (set))
7263 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7266 cc_src = SET_SRC (set);
7269 else if (reg_set_p (cc_reg, insn))
7276 if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7279 /* Now CC_REG is a condition code register used for a
7280 conditional jump at the end of the block, and CC_SRC, in
7281 CC_SRC_INSN, is the value to which that condition code
7282 register is set, and CC_SRC is still meaningful at the end of
7285 orig_mode = GET_MODE (cc_src);
7286 mode = cse_cc_succs (bb, bb, cc_reg, cc_src, true);
7287 if (mode != VOIDmode)
7289 gcc_assert (mode == GET_MODE (cc_src));
7290 if (mode != orig_mode)
7292 rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7294 cse_change_cc_mode_insn (cc_src_insn, newreg);
7296 /* Do the same in the following insns that use the
7297 current value of CC_REG within BB. */
7298 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7299 NEXT_INSN (last_insn),
7307 /* Perform common subexpression elimination. Nonzero value from
7308 `cse_main' means that jumps were simplified and some code may now
7309 be unreachable, so do jump optimization again. */
7311 gate_handle_cse (void)
7313 return optimize > 0;
7317 rest_of_handle_cse (void)
7322 dump_flow_info (dump_file, dump_flags);
7324 tem = cse_main (get_insns (), max_reg_num ());
7326 /* If we are not running more CSE passes, then we are no longer
7327 expecting CSE to be run. But always rerun it in a cheap mode. */
7328 cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
7332 timevar_push (TV_JUMP);
7333 rebuild_jump_labels (get_insns ());
7335 timevar_pop (TV_JUMP);
7337 else if (tem == 1 || optimize > 1)
7343 struct rtl_opt_pass pass_cse =
7348 gate_handle_cse, /* gate */
7349 rest_of_handle_cse, /* execute */
7352 0, /* static_pass_number */
7354 0, /* properties_required */
7355 0, /* properties_provided */
7356 0, /* properties_destroyed */
7357 0, /* todo_flags_start */
7358 TODO_df_finish | TODO_verify_rtl_sharing |
7360 TODO_verify_flow, /* todo_flags_finish */
7366 gate_handle_cse2 (void)
7368 return optimize > 0 && flag_rerun_cse_after_loop;
7371 /* Run second CSE pass after loop optimizations. */
7373 rest_of_handle_cse2 (void)
7378 dump_flow_info (dump_file, dump_flags);
7380 tem = cse_main (get_insns (), max_reg_num ());
7382 /* Run a pass to eliminate duplicated assignments to condition code
7383 registers. We have to run this after bypass_jumps, because it
7384 makes it harder for that pass to determine whether a jump can be
7386 cse_condition_code_reg ();
7388 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7392 timevar_push (TV_JUMP);
7393 rebuild_jump_labels (get_insns ());
7395 timevar_pop (TV_JUMP);
7400 cse_not_expected = 1;
7405 struct rtl_opt_pass pass_cse2 =
7410 gate_handle_cse2, /* gate */
7411 rest_of_handle_cse2, /* execute */
7414 0, /* static_pass_number */
7415 TV_CSE2, /* tv_id */
7416 0, /* properties_required */
7417 0, /* properties_provided */
7418 0, /* properties_destroyed */
7419 0, /* todo_flags_start */
7420 TODO_df_finish | TODO_verify_rtl_sharing |
7422 TODO_verify_flow /* todo_flags_finish */
7427 gate_handle_cse_after_global_opts (void)
7429 return optimize > 0 && flag_rerun_cse_after_global_opts;
7432 /* Run second CSE pass after loop optimizations. */
7434 rest_of_handle_cse_after_global_opts (void)
7439 /* We only want to do local CSE, so don't follow jumps. */
7440 save_cfj = flag_cse_follow_jumps;
7441 flag_cse_follow_jumps = 0;
7443 rebuild_jump_labels (get_insns ());
7444 tem = cse_main (get_insns (), max_reg_num ());
7445 purge_all_dead_edges ();
7446 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7448 cse_not_expected = !flag_rerun_cse_after_loop;
7450 /* If cse altered any jumps, rerun jump opts to clean things up. */
7453 timevar_push (TV_JUMP);
7454 rebuild_jump_labels (get_insns ());
7456 timevar_pop (TV_JUMP);
7461 flag_cse_follow_jumps = save_cfj;
7465 struct rtl_opt_pass pass_cse_after_global_opts =
7469 "cse_local", /* name */
7470 gate_handle_cse_after_global_opts, /* gate */
7471 rest_of_handle_cse_after_global_opts, /* execute */
7474 0, /* static_pass_number */
7476 0, /* properties_required */
7477 0, /* properties_provided */
7478 0, /* properties_destroyed */
7479 0, /* todo_flags_start */
7480 TODO_df_finish | TODO_verify_rtl_sharing |
7482 TODO_verify_flow /* todo_flags_finish */