1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 /* stdio.h must precede rtl.h for FFS. */
25 #include "coretypes.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
35 #include "insn-config.h"
46 #include "rtlhooks-def.h"
48 /* The basic idea of common subexpression elimination is to go
49 through the code, keeping a record of expressions that would
50 have the same value at the current scan point, and replacing
51 expressions encountered with the cheapest equivalent expression.
53 It is too complicated to keep track of the different possibilities
54 when control paths merge in this code; so, at each label, we forget all
55 that is known and start fresh. This can be described as processing each
56 extended basic block separately. We have a separate pass to perform
59 Note CSE can turn a conditional or computed jump into a nop or
60 an unconditional jump. When this occurs we arrange to run the jump
61 optimizer after CSE to delete the unreachable code.
63 We use two data structures to record the equivalent expressions:
64 a hash table for most expressions, and a vector of "quantity
65 numbers" to record equivalent (pseudo) registers.
67 The use of the special data structure for registers is desirable
68 because it is faster. It is possible because registers references
69 contain a fairly small number, the register number, taken from
70 a contiguously allocated series, and two register references are
71 identical if they have the same number. General expressions
72 do not have any such thing, so the only way to retrieve the
73 information recorded on an expression other than a register
74 is to keep it in a hash table.
76 Registers and "quantity numbers":
78 At the start of each basic block, all of the (hardware and pseudo)
79 registers used in the function are given distinct quantity
80 numbers to indicate their contents. During scan, when the code
81 copies one register into another, we copy the quantity number.
82 When a register is loaded in any other way, we allocate a new
83 quantity number to describe the value generated by this operation.
84 `reg_qty' records what quantity a register is currently thought
87 All real quantity numbers are greater than or equal to zero.
88 If register N has not been assigned a quantity, reg_qty[N] will
89 equal -N - 1, which is always negative.
91 Quantity numbers below zero do not exist and none of the `qty_table'
92 entries should be referenced with a negative index.
94 We also maintain a bidirectional chain of registers for each
95 quantity number. The `qty_table` members `first_reg' and `last_reg',
96 and `reg_eqv_table' members `next' and `prev' hold these chains.
98 The first register in a chain is the one whose lifespan is least local.
99 Among equals, it is the one that was seen first.
100 We replace any equivalent register with that one.
102 If two registers have the same quantity number, it must be true that
103 REG expressions with qty_table `mode' must be in the hash table for both
104 registers and must be in the same class.
106 The converse is not true. Since hard registers may be referenced in
107 any mode, two REG expressions might be equivalent in the hash table
108 but not have the same quantity number if the quantity number of one
109 of the registers is not the same mode as those expressions.
111 Constants and quantity numbers
113 When a quantity has a known constant value, that value is stored
114 in the appropriate qty_table `const_rtx'. This is in addition to
115 putting the constant in the hash table as is usual for non-regs.
117 Whether a reg or a constant is preferred is determined by the configuration
118 macro CONST_COSTS and will often depend on the constant value. In any
119 event, expressions containing constants can be simplified, by fold_rtx.
121 When a quantity has a known nearly constant value (such as an address
122 of a stack slot), that value is stored in the appropriate qty_table
125 Integer constants don't have a machine mode. However, cse
126 determines the intended machine mode from the destination
127 of the instruction that moves the constant. The machine mode
128 is recorded in the hash table along with the actual RTL
129 constant expression so that different modes are kept separate.
133 To record known equivalences among expressions in general
134 we use a hash table called `table'. It has a fixed number of buckets
135 that contain chains of `struct table_elt' elements for expressions.
136 These chains connect the elements whose expressions have the same
139 Other chains through the same elements connect the elements which
140 currently have equivalent values.
142 Register references in an expression are canonicalized before hashing
143 the expression. This is done using `reg_qty' and qty_table `first_reg'.
144 The hash code of a register reference is computed using the quantity
145 number, not the register number.
147 When the value of an expression changes, it is necessary to remove from the
148 hash table not just that expression but all expressions whose values
149 could be different as a result.
151 1. If the value changing is in memory, except in special cases
152 ANYTHING referring to memory could be changed. That is because
153 nobody knows where a pointer does not point.
154 The function `invalidate_memory' removes what is necessary.
156 The special cases are when the address is constant or is
157 a constant plus a fixed register such as the frame pointer
158 or a static chain pointer. When such addresses are stored in,
159 we can tell exactly which other such addresses must be invalidated
160 due to overlap. `invalidate' does this.
161 All expressions that refer to non-constant
162 memory addresses are also invalidated. `invalidate_memory' does this.
164 2. If the value changing is a register, all expressions
165 containing references to that register, and only those,
168 Because searching the entire hash table for expressions that contain
169 a register is very slow, we try to figure out when it isn't necessary.
170 Precisely, this is necessary only when expressions have been
171 entered in the hash table using this register, and then the value has
172 changed, and then another expression wants to be added to refer to
173 the register's new value. This sequence of circumstances is rare
174 within any one basic block.
176 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
177 reg_tick[i] is incremented whenever a value is stored in register i.
178 reg_in_table[i] holds -1 if no references to register i have been
179 entered in the table; otherwise, it contains the value reg_tick[i] had
180 when the references were entered. If we want to enter a reference
181 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
182 Until we want to enter a new entry, the mere fact that the two vectors
183 don't match makes the entries be ignored if anyone tries to match them.
185 Registers themselves are entered in the hash table as well as in
186 the equivalent-register chains. However, the vectors `reg_tick'
187 and `reg_in_table' do not apply to expressions which are simple
188 register references. These expressions are removed from the table
189 immediately when they become invalid, and this can be done even if
190 we do not immediately search for all the expressions that refer to
193 A CLOBBER rtx in an instruction invalidates its operand for further
194 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
195 invalidates everything that resides in memory.
199 Constant expressions that differ only by an additive integer
200 are called related. When a constant expression is put in
201 the table, the related expression with no constant term
202 is also entered. These are made to point at each other
203 so that it is possible to find out if there exists any
204 register equivalent to an expression related to a given expression. */
206 /* One plus largest register number used in this function. */
210 /* One plus largest instruction UID used in this function at time of
213 static int max_insn_uid;
215 /* Length of qty_table vector. We know in advance we will not need
216 a quantity number this big. */
220 /* Next quantity number to be allocated.
221 This is 1 + the largest number needed so far. */
225 /* Per-qty information tracking.
227 `first_reg' and `last_reg' track the head and tail of the
228 chain of registers which currently contain this quantity.
230 `mode' contains the machine mode of this quantity.
232 `const_rtx' holds the rtx of the constant value of this
233 quantity, if known. A summations of the frame/arg pointer
234 and a constant can also be entered here. When this holds
235 a known value, `const_insn' is the insn which stored the
238 `comparison_{code,const,qty}' are used to track when a
239 comparison between a quantity and some constant or register has
240 been passed. In such a case, we know the results of the comparison
241 in case we see it again. These members record a comparison that
242 is known to be true. `comparison_code' holds the rtx code of such
243 a comparison, else it is set to UNKNOWN and the other two
244 comparison members are undefined. `comparison_const' holds
245 the constant being compared against, or zero if the comparison
246 is not against a constant. `comparison_qty' holds the quantity
247 being compared against when the result is known. If the comparison
248 is not with a register, `comparison_qty' is -1. */
250 struct qty_table_elem
254 rtx comparison_const;
256 unsigned int first_reg, last_reg;
257 /* The sizes of these fields should match the sizes of the
258 code and mode fields of struct rtx_def (see rtl.h). */
259 ENUM_BITFIELD(rtx_code) comparison_code : 16;
260 ENUM_BITFIELD(machine_mode) mode : 8;
263 /* The table of all qtys, indexed by qty number. */
264 static struct qty_table_elem *qty_table;
267 /* For machines that have a CC0, we do not record its value in the hash
268 table since its use is guaranteed to be the insn immediately following
269 its definition and any other insn is presumed to invalidate it.
271 Instead, we store below the value last assigned to CC0. If it should
272 happen to be a constant, it is stored in preference to the actual
273 assigned value. In case it is a constant, we store the mode in which
274 the constant should be interpreted. */
276 static rtx prev_insn_cc0;
277 static enum machine_mode prev_insn_cc0_mode;
279 /* Previous actual insn. 0 if at first insn of basic block. */
281 static rtx prev_insn;
284 /* Insn being scanned. */
286 static rtx this_insn;
288 /* Index by register number, gives the number of the next (or
289 previous) register in the chain of registers sharing the same
292 Or -1 if this register is at the end of the chain.
294 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
296 /* Per-register equivalence chain. */
302 /* The table of all register equivalence chains. */
303 static struct reg_eqv_elem *reg_eqv_table;
307 /* Next in hash chain. */
308 struct cse_reg_info *hash_next;
310 /* The next cse_reg_info structure in the free or used list. */
311 struct cse_reg_info *next;
316 /* The quantity number of the register's current contents. */
319 /* The number of times the register has been altered in the current
323 /* The REG_TICK value at which rtx's containing this register are
324 valid in the hash table. If this does not equal the current
325 reg_tick value, such expressions existing in the hash table are
329 /* The SUBREG that was set when REG_TICK was last incremented. Set
330 to -1 if the last store was to the whole register, not a subreg. */
331 unsigned int subreg_ticked;
334 /* A free list of cse_reg_info entries. */
335 static struct cse_reg_info *cse_reg_info_free_list;
337 /* A used list of cse_reg_info entries. */
338 static struct cse_reg_info *cse_reg_info_used_list;
339 static struct cse_reg_info *cse_reg_info_used_list_end;
341 /* A mapping from registers to cse_reg_info data structures. */
342 #define REGHASH_SHIFT 7
343 #define REGHASH_SIZE (1 << REGHASH_SHIFT)
344 #define REGHASH_MASK (REGHASH_SIZE - 1)
345 static struct cse_reg_info *reg_hash[REGHASH_SIZE];
347 #define REGHASH_FN(REGNO) \
348 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
350 /* The last lookup we did into the cse_reg_info_tree. This allows us
351 to cache repeated lookups. */
352 static unsigned int cached_regno;
353 static struct cse_reg_info *cached_cse_reg_info;
355 /* A HARD_REG_SET containing all the hard registers for which there is
356 currently a REG expression in the hash table. Note the difference
357 from the above variables, which indicate if the REG is mentioned in some
358 expression in the table. */
360 static HARD_REG_SET hard_regs_in_table;
362 /* CUID of insn that starts the basic block currently being cse-processed. */
364 static int cse_basic_block_start;
366 /* CUID of insn that ends the basic block currently being cse-processed. */
368 static int cse_basic_block_end;
370 /* Vector mapping INSN_UIDs to cuids.
371 The cuids are like uids but increase monotonically always.
372 We use them to see whether a reg is used outside a given basic block. */
374 static int *uid_cuid;
376 /* Highest UID in UID_CUID. */
379 /* Get the cuid of an insn. */
381 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
383 /* Nonzero if this pass has made changes, and therefore it's
384 worthwhile to run the garbage collector. */
386 static int cse_altered;
388 /* Nonzero if cse has altered conditional jump insns
389 in such a way that jump optimization should be redone. */
391 static int cse_jumps_altered;
393 /* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
394 REG_LABEL, we have to rerun jump after CSE to put in the note. */
395 static int recorded_label_ref;
397 /* canon_hash stores 1 in do_not_record
398 if it notices a reference to CC0, PC, or some other volatile
401 static int do_not_record;
403 #ifdef LOAD_EXTEND_OP
405 /* Scratch rtl used when looking for load-extended copy of a MEM. */
406 static rtx memory_extend_rtx;
409 /* canon_hash stores 1 in hash_arg_in_memory
410 if it notices a reference to memory within the expression being hashed. */
412 static int hash_arg_in_memory;
414 /* The hash table contains buckets which are chains of `struct table_elt's,
415 each recording one expression's information.
416 That expression is in the `exp' field.
418 The canon_exp field contains a canonical (from the point of view of
419 alias analysis) version of the `exp' field.
421 Those elements with the same hash code are chained in both directions
422 through the `next_same_hash' and `prev_same_hash' fields.
424 Each set of expressions with equivalent values
425 are on a two-way chain through the `next_same_value'
426 and `prev_same_value' fields, and all point with
427 the `first_same_value' field at the first element in
428 that chain. The chain is in order of increasing cost.
429 Each element's cost value is in its `cost' field.
431 The `in_memory' field is nonzero for elements that
432 involve any reference to memory. These elements are removed
433 whenever a write is done to an unidentified location in memory.
434 To be safe, we assume that a memory address is unidentified unless
435 the address is either a symbol constant or a constant plus
436 the frame pointer or argument pointer.
438 The `related_value' field is used to connect related expressions
439 (that differ by adding an integer).
440 The related expressions are chained in a circular fashion.
441 `related_value' is zero for expressions for which this
444 The `cost' field stores the cost of this element's expression.
445 The `regcost' field stores the value returned by approx_reg_cost for
446 this element's expression.
448 The `is_const' flag is set if the element is a constant (including
451 The `flag' field is used as a temporary during some search routines.
453 The `mode' field is usually the same as GET_MODE (`exp'), but
454 if `exp' is a CONST_INT and has no machine mode then the `mode'
455 field is the mode it was being used as. Each constant is
456 recorded separately for each mode it is used with. */
462 struct table_elt *next_same_hash;
463 struct table_elt *prev_same_hash;
464 struct table_elt *next_same_value;
465 struct table_elt *prev_same_value;
466 struct table_elt *first_same_value;
467 struct table_elt *related_value;
470 /* The size of this field should match the size
471 of the mode field of struct rtx_def (see rtl.h). */
472 ENUM_BITFIELD(machine_mode) mode : 8;
478 /* We don't want a lot of buckets, because we rarely have very many
479 things stored in the hash table, and a lot of buckets slows
480 down a lot of loops that happen frequently. */
482 #define HASH_SIZE (1 << HASH_SHIFT)
483 #define HASH_MASK (HASH_SIZE - 1)
485 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
486 register (hard registers may require `do_not_record' to be set). */
489 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
490 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
491 : canon_hash (X, M)) & HASH_MASK)
493 /* Like HASH, but without side-effects. */
494 #define SAFE_HASH(X, M) \
495 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
496 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
497 : safe_hash (X, M)) & HASH_MASK)
499 /* Determine whether register number N is considered a fixed register for the
500 purpose of approximating register costs.
501 It is desirable to replace other regs with fixed regs, to reduce need for
503 A reg wins if it is either the frame pointer or designated as fixed. */
504 #define FIXED_REGNO_P(N) \
505 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
506 || fixed_regs[N] || global_regs[N])
508 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
509 hard registers and pointers into the frame are the cheapest with a cost
510 of 0. Next come pseudos with a cost of one and other hard registers with
511 a cost of 2. Aside from these special cases, call `rtx_cost'. */
513 #define CHEAP_REGNO(N) \
514 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
515 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
516 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
517 || ((N) < FIRST_PSEUDO_REGISTER \
518 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
520 #define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET))
521 #define COST_IN(X,OUTER) (REG_P (X) ? 0 : notreg_cost (X, OUTER))
523 /* Get the info associated with register N. */
525 #define GET_CSE_REG_INFO(N) \
526 (((N) == cached_regno && cached_cse_reg_info) \
527 ? cached_cse_reg_info : get_cse_reg_info ((N)))
529 /* Get the number of times this register has been updated in this
532 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
534 /* Get the point at which REG was recorded in the table. */
536 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
538 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
541 #define SUBREG_TICKED(N) ((GET_CSE_REG_INFO (N))->subreg_ticked)
543 /* Get the quantity number for REG. */
545 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
547 /* Determine if the quantity number for register X represents a valid index
548 into the qty_table. */
550 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
552 static struct table_elt *table[HASH_SIZE];
554 /* Chain of `struct table_elt's made so far for this function
555 but currently removed from the table. */
557 static struct table_elt *free_element_chain;
559 /* Number of `struct table_elt' structures made so far for this function. */
561 static int n_elements_made;
563 /* Maximum value `n_elements_made' has had so far in this compilation
564 for functions previously processed. */
566 static int max_elements_made;
568 /* Set to the cost of a constant pool reference if one was found for a
569 symbolic constant. If this was found, it means we should try to
570 convert constants into constant pool entries if they don't fit in
573 static int constant_pool_entries_cost;
574 static int constant_pool_entries_regcost;
576 /* This data describes a block that will be processed by cse_basic_block. */
578 struct cse_basic_block_data
580 /* Lowest CUID value of insns in block. */
582 /* Highest CUID value of insns in block. */
584 /* Total number of SETs in block. */
586 /* Last insn in the block. */
588 /* Size of current branch path, if any. */
590 /* Current branch path, indicating which branches will be taken. */
593 /* The branch insn. */
595 /* Whether it should be taken or not. AROUND is the same as taken
596 except that it is used when the destination label is not preceded
598 enum taken {PATH_TAKEN, PATH_NOT_TAKEN, PATH_AROUND} status;
602 static bool fixed_base_plus_p (rtx x);
603 static int notreg_cost (rtx, enum rtx_code);
604 static int approx_reg_cost_1 (rtx *, void *);
605 static int approx_reg_cost (rtx);
606 static int preferable (int, int, int, int);
607 static void new_basic_block (void);
608 static void make_new_qty (unsigned int, enum machine_mode);
609 static void make_regs_eqv (unsigned int, unsigned int);
610 static void delete_reg_equiv (unsigned int);
611 static int mention_regs (rtx);
612 static int insert_regs (rtx, struct table_elt *, int);
613 static void remove_from_table (struct table_elt *, unsigned);
614 static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
615 static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
616 static rtx lookup_as_function (rtx, enum rtx_code);
617 static struct table_elt *insert (rtx, struct table_elt *, unsigned,
619 static void merge_equiv_classes (struct table_elt *, struct table_elt *);
620 static void invalidate (rtx, enum machine_mode);
621 static int cse_rtx_varies_p (rtx, int);
622 static void remove_invalid_refs (unsigned int);
623 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
625 static void rehash_using_reg (rtx);
626 static void invalidate_memory (void);
627 static void invalidate_for_call (void);
628 static rtx use_related_value (rtx, struct table_elt *);
630 static inline unsigned canon_hash (rtx, enum machine_mode);
631 static inline unsigned safe_hash (rtx, enum machine_mode);
632 static unsigned hash_rtx_string (const char *);
634 static rtx canon_reg (rtx, rtx);
635 static void find_best_addr (rtx, rtx *, enum machine_mode);
636 static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
638 enum machine_mode *);
639 static rtx fold_rtx (rtx, rtx);
640 static rtx equiv_constant (rtx);
641 static void record_jump_equiv (rtx, int);
642 static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
644 static void cse_insn (rtx, rtx);
645 static void cse_end_of_basic_block (rtx, struct cse_basic_block_data *,
647 static int addr_affects_sp_p (rtx);
648 static void invalidate_from_clobbers (rtx);
649 static rtx cse_process_notes (rtx, rtx);
650 static void invalidate_skipped_set (rtx, rtx, void *);
651 static void invalidate_skipped_block (rtx);
652 static rtx cse_basic_block (rtx, rtx, struct branch_path *);
653 static void count_reg_usage (rtx, int *, int);
654 static int check_for_label_ref (rtx *, void *);
655 extern void dump_class (struct table_elt*);
656 static struct cse_reg_info * get_cse_reg_info (unsigned int);
657 static int check_dependence (rtx *, void *);
659 static void flush_hash_table (void);
660 static bool insn_live_p (rtx, int *);
661 static bool set_live_p (rtx, rtx, int *);
662 static bool dead_libcall_p (rtx, int *);
663 static int cse_change_cc_mode (rtx *, void *);
664 static void cse_change_cc_mode_insns (rtx, rtx, rtx);
665 static enum machine_mode cse_cc_succs (basic_block, rtx, rtx, bool);
668 #undef RTL_HOOKS_GEN_LOWPART
669 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
671 static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
673 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
674 virtual regs here because the simplify_*_operation routines are called
675 by integrate.c, which is called before virtual register instantiation. */
678 fixed_base_plus_p (rtx x)
680 switch (GET_CODE (x))
683 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
685 if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
687 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
688 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
693 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
695 return fixed_base_plus_p (XEXP (x, 0));
702 /* Dump the expressions in the equivalence class indicated by CLASSP.
703 This function is used only for debugging. */
705 dump_class (struct table_elt *classp)
707 struct table_elt *elt;
709 fprintf (stderr, "Equivalence chain for ");
710 print_rtl (stderr, classp->exp);
711 fprintf (stderr, ": \n");
713 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
715 print_rtl (stderr, elt->exp);
716 fprintf (stderr, "\n");
720 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
723 approx_reg_cost_1 (rtx *xp, void *data)
730 unsigned int regno = REGNO (x);
732 if (! CHEAP_REGNO (regno))
734 if (regno < FIRST_PSEUDO_REGISTER)
736 if (SMALL_REGISTER_CLASSES)
748 /* Return an estimate of the cost of the registers used in an rtx.
749 This is mostly the number of different REG expressions in the rtx;
750 however for some exceptions like fixed registers we use a cost of
751 0. If any other hard register reference occurs, return MAX_COST. */
754 approx_reg_cost (rtx x)
758 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
764 /* Return a negative value if an rtx A, whose costs are given by COST_A
765 and REGCOST_A, is more desirable than an rtx B.
766 Return a positive value if A is less desirable, or 0 if the two are
769 preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
771 /* First, get rid of cases involving expressions that are entirely
773 if (cost_a != cost_b)
775 if (cost_a == MAX_COST)
777 if (cost_b == MAX_COST)
781 /* Avoid extending lifetimes of hardregs. */
782 if (regcost_a != regcost_b)
784 if (regcost_a == MAX_COST)
786 if (regcost_b == MAX_COST)
790 /* Normal operation costs take precedence. */
791 if (cost_a != cost_b)
792 return cost_a - cost_b;
793 /* Only if these are identical consider effects on register pressure. */
794 if (regcost_a != regcost_b)
795 return regcost_a - regcost_b;
799 /* Internal function, to compute cost when X is not a register; called
800 from COST macro to keep it simple. */
803 notreg_cost (rtx x, enum rtx_code outer)
805 return ((GET_CODE (x) == SUBREG
806 && REG_P (SUBREG_REG (x))
807 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
808 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
809 && (GET_MODE_SIZE (GET_MODE (x))
810 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
811 && subreg_lowpart_p (x)
812 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
813 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
815 : rtx_cost (x, outer) * 2);
819 static struct cse_reg_info *
820 get_cse_reg_info (unsigned int regno)
822 struct cse_reg_info **hash_head = ®_hash[REGHASH_FN (regno)];
823 struct cse_reg_info *p;
825 for (p = *hash_head; p != NULL; p = p->hash_next)
826 if (p->regno == regno)
831 /* Get a new cse_reg_info structure. */
832 if (cse_reg_info_free_list)
834 p = cse_reg_info_free_list;
835 cse_reg_info_free_list = p->next;
838 p = xmalloc (sizeof (struct cse_reg_info));
840 /* Insert into hash table. */
841 p->hash_next = *hash_head;
846 p->reg_in_table = -1;
847 p->subreg_ticked = -1;
848 p->reg_qty = -regno - 1;
850 p->next = cse_reg_info_used_list;
851 cse_reg_info_used_list = p;
852 if (!cse_reg_info_used_list_end)
853 cse_reg_info_used_list_end = p;
856 /* Cache this lookup; we tend to be looking up information about the
857 same register several times in a row. */
858 cached_regno = regno;
859 cached_cse_reg_info = p;
864 /* Clear the hash table and initialize each register with its own quantity,
865 for a new basic block. */
868 new_basic_block (void)
874 /* Clear out hash table state for this pass. */
876 memset (reg_hash, 0, sizeof reg_hash);
878 if (cse_reg_info_used_list)
880 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
881 cse_reg_info_free_list = cse_reg_info_used_list;
882 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
884 cached_cse_reg_info = 0;
886 CLEAR_HARD_REG_SET (hard_regs_in_table);
888 /* The per-quantity values used to be initialized here, but it is
889 much faster to initialize each as it is made in `make_new_qty'. */
891 for (i = 0; i < HASH_SIZE; i++)
893 struct table_elt *first;
898 struct table_elt *last = first;
902 while (last->next_same_hash != NULL)
903 last = last->next_same_hash;
905 /* Now relink this hash entire chain into
906 the free element list. */
908 last->next_same_hash = free_element_chain;
909 free_element_chain = first;
919 /* Say that register REG contains a quantity in mode MODE not in any
920 register before and initialize that quantity. */
923 make_new_qty (unsigned int reg, enum machine_mode mode)
926 struct qty_table_elem *ent;
927 struct reg_eqv_elem *eqv;
929 gcc_assert (next_qty < max_qty);
931 q = REG_QTY (reg) = next_qty++;
933 ent->first_reg = reg;
936 ent->const_rtx = ent->const_insn = NULL_RTX;
937 ent->comparison_code = UNKNOWN;
939 eqv = ®_eqv_table[reg];
940 eqv->next = eqv->prev = -1;
943 /* Make reg NEW equivalent to reg OLD.
944 OLD is not changing; NEW is. */
947 make_regs_eqv (unsigned int new, unsigned int old)
949 unsigned int lastr, firstr;
950 int q = REG_QTY (old);
951 struct qty_table_elem *ent;
955 /* Nothing should become eqv until it has a "non-invalid" qty number. */
956 gcc_assert (REGNO_QTY_VALID_P (old));
959 firstr = ent->first_reg;
960 lastr = ent->last_reg;
962 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
963 hard regs. Among pseudos, if NEW will live longer than any other reg
964 of the same qty, and that is beyond the current basic block,
965 make it the new canonical replacement for this qty. */
966 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
967 /* Certain fixed registers might be of the class NO_REGS. This means
968 that not only can they not be allocated by the compiler, but
969 they cannot be used in substitutions or canonicalizations
971 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
972 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
973 || (new >= FIRST_PSEUDO_REGISTER
974 && (firstr < FIRST_PSEUDO_REGISTER
975 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
976 || (uid_cuid[REGNO_FIRST_UID (new)]
977 < cse_basic_block_start))
978 && (uid_cuid[REGNO_LAST_UID (new)]
979 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
981 reg_eqv_table[firstr].prev = new;
982 reg_eqv_table[new].next = firstr;
983 reg_eqv_table[new].prev = -1;
984 ent->first_reg = new;
988 /* If NEW is a hard reg (known to be non-fixed), insert at end.
989 Otherwise, insert before any non-fixed hard regs that are at the
990 end. Registers of class NO_REGS cannot be used as an
991 equivalent for anything. */
992 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
993 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
994 && new >= FIRST_PSEUDO_REGISTER)
995 lastr = reg_eqv_table[lastr].prev;
996 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
997 if (reg_eqv_table[lastr].next >= 0)
998 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1000 qty_table[q].last_reg = new;
1001 reg_eqv_table[lastr].next = new;
1002 reg_eqv_table[new].prev = lastr;
1006 /* Remove REG from its equivalence class. */
1009 delete_reg_equiv (unsigned int reg)
1011 struct qty_table_elem *ent;
1012 int q = REG_QTY (reg);
1015 /* If invalid, do nothing. */
1016 if (! REGNO_QTY_VALID_P (reg))
1019 ent = &qty_table[q];
1021 p = reg_eqv_table[reg].prev;
1022 n = reg_eqv_table[reg].next;
1025 reg_eqv_table[n].prev = p;
1029 reg_eqv_table[p].next = n;
1033 REG_QTY (reg) = -reg - 1;
1036 /* Remove any invalid expressions from the hash table
1037 that refer to any of the registers contained in expression X.
1039 Make sure that newly inserted references to those registers
1040 as subexpressions will be considered valid.
1042 mention_regs is not called when a register itself
1043 is being stored in the table.
1045 Return 1 if we have done something that may have changed the hash code
1049 mention_regs (rtx x)
1059 code = GET_CODE (x);
1062 unsigned int regno = REGNO (x);
1063 unsigned int endregno
1064 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1065 : hard_regno_nregs[regno][GET_MODE (x)]);
1068 for (i = regno; i < endregno; i++)
1070 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1071 remove_invalid_refs (i);
1073 REG_IN_TABLE (i) = REG_TICK (i);
1074 SUBREG_TICKED (i) = -1;
1080 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1081 pseudo if they don't use overlapping words. We handle only pseudos
1082 here for simplicity. */
1083 if (code == SUBREG && REG_P (SUBREG_REG (x))
1084 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1086 unsigned int i = REGNO (SUBREG_REG (x));
1088 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1090 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1091 the last store to this register really stored into this
1092 subreg, then remove the memory of this subreg.
1093 Otherwise, remove any memory of the entire register and
1094 all its subregs from the table. */
1095 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1096 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1097 remove_invalid_refs (i);
1099 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1102 REG_IN_TABLE (i) = REG_TICK (i);
1103 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1107 /* If X is a comparison or a COMPARE and either operand is a register
1108 that does not have a quantity, give it one. This is so that a later
1109 call to record_jump_equiv won't cause X to be assigned a different
1110 hash code and not found in the table after that call.
1112 It is not necessary to do this here, since rehash_using_reg can
1113 fix up the table later, but doing this here eliminates the need to
1114 call that expensive function in the most common case where the only
1115 use of the register is in the comparison. */
1117 if (code == COMPARE || COMPARISON_P (x))
1119 if (REG_P (XEXP (x, 0))
1120 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1121 if (insert_regs (XEXP (x, 0), NULL, 0))
1123 rehash_using_reg (XEXP (x, 0));
1127 if (REG_P (XEXP (x, 1))
1128 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1129 if (insert_regs (XEXP (x, 1), NULL, 0))
1131 rehash_using_reg (XEXP (x, 1));
1136 fmt = GET_RTX_FORMAT (code);
1137 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1139 changed |= mention_regs (XEXP (x, i));
1140 else if (fmt[i] == 'E')
1141 for (j = 0; j < XVECLEN (x, i); j++)
1142 changed |= mention_regs (XVECEXP (x, i, j));
1147 /* Update the register quantities for inserting X into the hash table
1148 with a value equivalent to CLASSP.
1149 (If the class does not contain a REG, it is irrelevant.)
1150 If MODIFIED is nonzero, X is a destination; it is being modified.
1151 Note that delete_reg_equiv should be called on a register
1152 before insert_regs is done on that register with MODIFIED != 0.
1154 Nonzero value means that elements of reg_qty have changed
1155 so X's hash code may be different. */
1158 insert_regs (rtx x, struct table_elt *classp, int modified)
1162 unsigned int regno = REGNO (x);
1165 /* If REGNO is in the equivalence table already but is of the
1166 wrong mode for that equivalence, don't do anything here. */
1168 qty_valid = REGNO_QTY_VALID_P (regno);
1171 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1173 if (ent->mode != GET_MODE (x))
1177 if (modified || ! qty_valid)
1180 for (classp = classp->first_same_value;
1182 classp = classp->next_same_value)
1183 if (REG_P (classp->exp)
1184 && GET_MODE (classp->exp) == GET_MODE (x))
1186 make_regs_eqv (regno, REGNO (classp->exp));
1190 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1191 than REG_IN_TABLE to find out if there was only a single preceding
1192 invalidation - for the SUBREG - or another one, which would be
1193 for the full register. However, if we find here that REG_TICK
1194 indicates that the register is invalid, it means that it has
1195 been invalidated in a separate operation. The SUBREG might be used
1196 now (then this is a recursive call), or we might use the full REG
1197 now and a SUBREG of it later. So bump up REG_TICK so that
1198 mention_regs will do the right thing. */
1200 && REG_IN_TABLE (regno) >= 0
1201 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1203 make_new_qty (regno, GET_MODE (x));
1210 /* If X is a SUBREG, we will likely be inserting the inner register in the
1211 table. If that register doesn't have an assigned quantity number at
1212 this point but does later, the insertion that we will be doing now will
1213 not be accessible because its hash code will have changed. So assign
1214 a quantity number now. */
1216 else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1217 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1219 insert_regs (SUBREG_REG (x), NULL, 0);
1224 return mention_regs (x);
1227 /* Look in or update the hash table. */
1229 /* Remove table element ELT from use in the table.
1230 HASH is its hash code, made using the HASH macro.
1231 It's an argument because often that is known in advance
1232 and we save much time not recomputing it. */
1235 remove_from_table (struct table_elt *elt, unsigned int hash)
1240 /* Mark this element as removed. See cse_insn. */
1241 elt->first_same_value = 0;
1243 /* Remove the table element from its equivalence class. */
1246 struct table_elt *prev = elt->prev_same_value;
1247 struct table_elt *next = elt->next_same_value;
1250 next->prev_same_value = prev;
1253 prev->next_same_value = next;
1256 struct table_elt *newfirst = next;
1259 next->first_same_value = newfirst;
1260 next = next->next_same_value;
1265 /* Remove the table element from its hash bucket. */
1268 struct table_elt *prev = elt->prev_same_hash;
1269 struct table_elt *next = elt->next_same_hash;
1272 next->prev_same_hash = prev;
1275 prev->next_same_hash = next;
1276 else if (table[hash] == elt)
1280 /* This entry is not in the proper hash bucket. This can happen
1281 when two classes were merged by `merge_equiv_classes'. Search
1282 for the hash bucket that it heads. This happens only very
1283 rarely, so the cost is acceptable. */
1284 for (hash = 0; hash < HASH_SIZE; hash++)
1285 if (table[hash] == elt)
1290 /* Remove the table element from its related-value circular chain. */
1292 if (elt->related_value != 0 && elt->related_value != elt)
1294 struct table_elt *p = elt->related_value;
1296 while (p->related_value != elt)
1297 p = p->related_value;
1298 p->related_value = elt->related_value;
1299 if (p->related_value == p)
1300 p->related_value = 0;
1303 /* Now add it to the free element chain. */
1304 elt->next_same_hash = free_element_chain;
1305 free_element_chain = elt;
1308 /* Look up X in the hash table and return its table element,
1309 or 0 if X is not in the table.
1311 MODE is the machine-mode of X, or if X is an integer constant
1312 with VOIDmode then MODE is the mode with which X will be used.
1314 Here we are satisfied to find an expression whose tree structure
1317 static struct table_elt *
1318 lookup (rtx x, unsigned int hash, enum machine_mode mode)
1320 struct table_elt *p;
1322 for (p = table[hash]; p; p = p->next_same_hash)
1323 if (mode == p->mode && ((x == p->exp && REG_P (x))
1324 || exp_equiv_p (x, p->exp, !REG_P (x), false)))
1330 /* Like `lookup' but don't care whether the table element uses invalid regs.
1331 Also ignore discrepancies in the machine mode of a register. */
1333 static struct table_elt *
1334 lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1336 struct table_elt *p;
1340 unsigned int regno = REGNO (x);
1342 /* Don't check the machine mode when comparing registers;
1343 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1344 for (p = table[hash]; p; p = p->next_same_hash)
1346 && REGNO (p->exp) == regno)
1351 for (p = table[hash]; p; p = p->next_same_hash)
1353 && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
1360 /* Look for an expression equivalent to X and with code CODE.
1361 If one is found, return that expression. */
1364 lookup_as_function (rtx x, enum rtx_code code)
1367 = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
1369 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1370 long as we are narrowing. So if we looked in vain for a mode narrower
1371 than word_mode before, look for word_mode now. */
1372 if (p == 0 && code == CONST_INT
1373 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1376 PUT_MODE (x, word_mode);
1377 p = lookup (x, SAFE_HASH (x, VOIDmode), word_mode);
1383 for (p = p->first_same_value; p; p = p->next_same_value)
1384 if (GET_CODE (p->exp) == code
1385 /* Make sure this is a valid entry in the table. */
1386 && exp_equiv_p (p->exp, p->exp, 1, false))
1392 /* Insert X in the hash table, assuming HASH is its hash code
1393 and CLASSP is an element of the class it should go in
1394 (or 0 if a new class should be made).
1395 It is inserted at the proper position to keep the class in
1396 the order cheapest first.
1398 MODE is the machine-mode of X, or if X is an integer constant
1399 with VOIDmode then MODE is the mode with which X will be used.
1401 For elements of equal cheapness, the most recent one
1402 goes in front, except that the first element in the list
1403 remains first unless a cheaper element is added. The order of
1404 pseudo-registers does not matter, as canon_reg will be called to
1405 find the cheapest when a register is retrieved from the table.
1407 The in_memory field in the hash table element is set to 0.
1408 The caller must set it nonzero if appropriate.
1410 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1411 and if insert_regs returns a nonzero value
1412 you must then recompute its hash code before calling here.
1414 If necessary, update table showing constant values of quantities. */
1416 #define CHEAPER(X, Y) \
1417 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1419 static struct table_elt *
1420 insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mode)
1422 struct table_elt *elt;
1424 /* If X is a register and we haven't made a quantity for it,
1425 something is wrong. */
1426 gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
1428 /* If X is a hard register, show it is being put in the table. */
1429 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1431 unsigned int regno = REGNO (x);
1432 unsigned int endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
1435 for (i = regno; i < endregno; i++)
1436 SET_HARD_REG_BIT (hard_regs_in_table, i);
1439 /* Put an element for X into the right hash bucket. */
1441 elt = free_element_chain;
1443 free_element_chain = elt->next_same_hash;
1447 elt = xmalloc (sizeof (struct table_elt));
1451 elt->canon_exp = NULL_RTX;
1452 elt->cost = COST (x);
1453 elt->regcost = approx_reg_cost (x);
1454 elt->next_same_value = 0;
1455 elt->prev_same_value = 0;
1456 elt->next_same_hash = table[hash];
1457 elt->prev_same_hash = 0;
1458 elt->related_value = 0;
1461 elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1464 table[hash]->prev_same_hash = elt;
1467 /* Put it into the proper value-class. */
1470 classp = classp->first_same_value;
1471 if (CHEAPER (elt, classp))
1472 /* Insert at the head of the class. */
1474 struct table_elt *p;
1475 elt->next_same_value = classp;
1476 classp->prev_same_value = elt;
1477 elt->first_same_value = elt;
1479 for (p = classp; p; p = p->next_same_value)
1480 p->first_same_value = elt;
1484 /* Insert not at head of the class. */
1485 /* Put it after the last element cheaper than X. */
1486 struct table_elt *p, *next;
1488 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1491 /* Put it after P and before NEXT. */
1492 elt->next_same_value = next;
1494 next->prev_same_value = elt;
1496 elt->prev_same_value = p;
1497 p->next_same_value = elt;
1498 elt->first_same_value = classp;
1502 elt->first_same_value = elt;
1504 /* If this is a constant being set equivalent to a register or a register
1505 being set equivalent to a constant, note the constant equivalence.
1507 If this is a constant, it cannot be equivalent to a different constant,
1508 and a constant is the only thing that can be cheaper than a register. So
1509 we know the register is the head of the class (before the constant was
1512 If this is a register that is not already known equivalent to a
1513 constant, we must check the entire class.
1515 If this is a register that is already known equivalent to an insn,
1516 update the qtys `const_insn' to show that `this_insn' is the latest
1517 insn making that quantity equivalent to the constant. */
1519 if (elt->is_const && classp && REG_P (classp->exp)
1522 int exp_q = REG_QTY (REGNO (classp->exp));
1523 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1525 exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1526 exp_ent->const_insn = this_insn;
1531 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1534 struct table_elt *p;
1536 for (p = classp; p != 0; p = p->next_same_value)
1538 if (p->is_const && !REG_P (p->exp))
1540 int x_q = REG_QTY (REGNO (x));
1541 struct qty_table_elem *x_ent = &qty_table[x_q];
1544 = gen_lowpart (GET_MODE (x), p->exp);
1545 x_ent->const_insn = this_insn;
1552 && qty_table[REG_QTY (REGNO (x))].const_rtx
1553 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1554 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1556 /* If this is a constant with symbolic value,
1557 and it has a term with an explicit integer value,
1558 link it up with related expressions. */
1559 if (GET_CODE (x) == CONST)
1561 rtx subexp = get_related_value (x);
1563 struct table_elt *subelt, *subelt_prev;
1567 /* Get the integer-free subexpression in the hash table. */
1568 subhash = SAFE_HASH (subexp, mode);
1569 subelt = lookup (subexp, subhash, mode);
1571 subelt = insert (subexp, NULL, subhash, mode);
1572 /* Initialize SUBELT's circular chain if it has none. */
1573 if (subelt->related_value == 0)
1574 subelt->related_value = subelt;
1575 /* Find the element in the circular chain that precedes SUBELT. */
1576 subelt_prev = subelt;
1577 while (subelt_prev->related_value != subelt)
1578 subelt_prev = subelt_prev->related_value;
1579 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1580 This way the element that follows SUBELT is the oldest one. */
1581 elt->related_value = subelt_prev->related_value;
1582 subelt_prev->related_value = elt;
1589 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1590 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1591 the two classes equivalent.
1593 CLASS1 will be the surviving class; CLASS2 should not be used after this
1596 Any invalid entries in CLASS2 will not be copied. */
1599 merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1601 struct table_elt *elt, *next, *new;
1603 /* Ensure we start with the head of the classes. */
1604 class1 = class1->first_same_value;
1605 class2 = class2->first_same_value;
1607 /* If they were already equal, forget it. */
1608 if (class1 == class2)
1611 for (elt = class2; elt; elt = next)
1615 enum machine_mode mode = elt->mode;
1617 next = elt->next_same_value;
1619 /* Remove old entry, make a new one in CLASS1's class.
1620 Don't do this for invalid entries as we cannot find their
1621 hash code (it also isn't necessary). */
1622 if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
1624 bool need_rehash = false;
1626 hash_arg_in_memory = 0;
1627 hash = HASH (exp, mode);
1631 need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1632 delete_reg_equiv (REGNO (exp));
1635 remove_from_table (elt, hash);
1637 if (insert_regs (exp, class1, 0) || need_rehash)
1639 rehash_using_reg (exp);
1640 hash = HASH (exp, mode);
1642 new = insert (exp, class1, hash, mode);
1643 new->in_memory = hash_arg_in_memory;
1648 /* Flush the entire hash table. */
1651 flush_hash_table (void)
1654 struct table_elt *p;
1656 for (i = 0; i < HASH_SIZE; i++)
1657 for (p = table[i]; p; p = table[i])
1659 /* Note that invalidate can remove elements
1660 after P in the current hash chain. */
1662 invalidate (p->exp, p->mode);
1664 remove_from_table (p, i);
1668 /* Function called for each rtx to check whether true dependence exist. */
1669 struct check_dependence_data
1671 enum machine_mode mode;
1677 check_dependence (rtx *x, void *data)
1679 struct check_dependence_data *d = (struct check_dependence_data *) data;
1680 if (*x && MEM_P (*x))
1681 return canon_true_dependence (d->exp, d->mode, d->addr, *x,
1687 /* Remove from the hash table, or mark as invalid, all expressions whose
1688 values could be altered by storing in X. X is a register, a subreg, or
1689 a memory reference with nonvarying address (because, when a memory
1690 reference with a varying address is stored in, all memory references are
1691 removed by invalidate_memory so specific invalidation is superfluous).
1692 FULL_MODE, if not VOIDmode, indicates that this much should be
1693 invalidated instead of just the amount indicated by the mode of X. This
1694 is only used for bitfield stores into memory.
1696 A nonvarying address may be just a register or just a symbol reference,
1697 or it may be either of those plus a numeric offset. */
1700 invalidate (rtx x, enum machine_mode full_mode)
1703 struct table_elt *p;
1706 switch (GET_CODE (x))
1710 /* If X is a register, dependencies on its contents are recorded
1711 through the qty number mechanism. Just change the qty number of
1712 the register, mark it as invalid for expressions that refer to it,
1713 and remove it itself. */
1714 unsigned int regno = REGNO (x);
1715 unsigned int hash = HASH (x, GET_MODE (x));
1717 /* Remove REGNO from any quantity list it might be on and indicate
1718 that its value might have changed. If it is a pseudo, remove its
1719 entry from the hash table.
1721 For a hard register, we do the first two actions above for any
1722 additional hard registers corresponding to X. Then, if any of these
1723 registers are in the table, we must remove any REG entries that
1724 overlap these registers. */
1726 delete_reg_equiv (regno);
1728 SUBREG_TICKED (regno) = -1;
1730 if (regno >= FIRST_PSEUDO_REGISTER)
1732 /* Because a register can be referenced in more than one mode,
1733 we might have to remove more than one table entry. */
1734 struct table_elt *elt;
1736 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1737 remove_from_table (elt, hash);
1741 HOST_WIDE_INT in_table
1742 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1743 unsigned int endregno
1744 = regno + hard_regno_nregs[regno][GET_MODE (x)];
1745 unsigned int tregno, tendregno, rn;
1746 struct table_elt *p, *next;
1748 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1750 for (rn = regno + 1; rn < endregno; rn++)
1752 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1753 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1754 delete_reg_equiv (rn);
1756 SUBREG_TICKED (rn) = -1;
1760 for (hash = 0; hash < HASH_SIZE; hash++)
1761 for (p = table[hash]; p; p = next)
1763 next = p->next_same_hash;
1766 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1769 tregno = REGNO (p->exp);
1771 = tregno + hard_regno_nregs[tregno][GET_MODE (p->exp)];
1772 if (tendregno > regno && tregno < endregno)
1773 remove_from_table (p, hash);
1780 invalidate (SUBREG_REG (x), VOIDmode);
1784 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1785 invalidate (XVECEXP (x, 0, i), VOIDmode);
1789 /* This is part of a disjoint return value; extract the location in
1790 question ignoring the offset. */
1791 invalidate (XEXP (x, 0), VOIDmode);
1795 addr = canon_rtx (get_addr (XEXP (x, 0)));
1796 /* Calculate the canonical version of X here so that
1797 true_dependence doesn't generate new RTL for X on each call. */
1800 /* Remove all hash table elements that refer to overlapping pieces of
1802 if (full_mode == VOIDmode)
1803 full_mode = GET_MODE (x);
1805 for (i = 0; i < HASH_SIZE; i++)
1807 struct table_elt *next;
1809 for (p = table[i]; p; p = next)
1811 next = p->next_same_hash;
1814 struct check_dependence_data d;
1816 /* Just canonicalize the expression once;
1817 otherwise each time we call invalidate
1818 true_dependence will canonicalize the
1819 expression again. */
1821 p->canon_exp = canon_rtx (p->exp);
1825 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1826 remove_from_table (p, i);
1837 /* Remove all expressions that refer to register REGNO,
1838 since they are already invalid, and we are about to
1839 mark that register valid again and don't want the old
1840 expressions to reappear as valid. */
1843 remove_invalid_refs (unsigned int regno)
1846 struct table_elt *p, *next;
1848 for (i = 0; i < HASH_SIZE; i++)
1849 for (p = table[i]; p; p = next)
1851 next = p->next_same_hash;
1853 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1854 remove_from_table (p, i);
1858 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1861 remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
1862 enum machine_mode mode)
1865 struct table_elt *p, *next;
1866 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
1868 for (i = 0; i < HASH_SIZE; i++)
1869 for (p = table[i]; p; p = next)
1872 next = p->next_same_hash;
1875 && (GET_CODE (exp) != SUBREG
1876 || !REG_P (SUBREG_REG (exp))
1877 || REGNO (SUBREG_REG (exp)) != regno
1878 || (((SUBREG_BYTE (exp)
1879 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
1880 && SUBREG_BYTE (exp) <= end))
1881 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1882 remove_from_table (p, i);
1886 /* Recompute the hash codes of any valid entries in the hash table that
1887 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1889 This is called when we make a jump equivalence. */
1892 rehash_using_reg (rtx x)
1895 struct table_elt *p, *next;
1898 if (GET_CODE (x) == SUBREG)
1901 /* If X is not a register or if the register is known not to be in any
1902 valid entries in the table, we have no work to do. */
1905 || REG_IN_TABLE (REGNO (x)) < 0
1906 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
1909 /* Scan all hash chains looking for valid entries that mention X.
1910 If we find one and it is in the wrong hash chain, move it. */
1912 for (i = 0; i < HASH_SIZE; i++)
1913 for (p = table[i]; p; p = next)
1915 next = p->next_same_hash;
1916 if (reg_mentioned_p (x, p->exp)
1917 && exp_equiv_p (p->exp, p->exp, 1, false)
1918 && i != (hash = SAFE_HASH (p->exp, p->mode)))
1920 if (p->next_same_hash)
1921 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1923 if (p->prev_same_hash)
1924 p->prev_same_hash->next_same_hash = p->next_same_hash;
1926 table[i] = p->next_same_hash;
1928 p->next_same_hash = table[hash];
1929 p->prev_same_hash = 0;
1931 table[hash]->prev_same_hash = p;
1937 /* Remove from the hash table any expression that is a call-clobbered
1938 register. Also update their TICK values. */
1941 invalidate_for_call (void)
1943 unsigned int regno, endregno;
1946 struct table_elt *p, *next;
1949 /* Go through all the hard registers. For each that is clobbered in
1950 a CALL_INSN, remove the register from quantity chains and update
1951 reg_tick if defined. Also see if any of these registers is currently
1954 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1955 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1957 delete_reg_equiv (regno);
1958 if (REG_TICK (regno) >= 0)
1961 SUBREG_TICKED (regno) = -1;
1964 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
1967 /* In the case where we have no call-clobbered hard registers in the
1968 table, we are done. Otherwise, scan the table and remove any
1969 entry that overlaps a call-clobbered register. */
1972 for (hash = 0; hash < HASH_SIZE; hash++)
1973 for (p = table[hash]; p; p = next)
1975 next = p->next_same_hash;
1978 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1981 regno = REGNO (p->exp);
1982 endregno = regno + hard_regno_nregs[regno][GET_MODE (p->exp)];
1984 for (i = regno; i < endregno; i++)
1985 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1987 remove_from_table (p, hash);
1993 /* Given an expression X of type CONST,
1994 and ELT which is its table entry (or 0 if it
1995 is not in the hash table),
1996 return an alternate expression for X as a register plus integer.
1997 If none can be found, return 0. */
2000 use_related_value (rtx x, struct table_elt *elt)
2002 struct table_elt *relt = 0;
2003 struct table_elt *p, *q;
2004 HOST_WIDE_INT offset;
2006 /* First, is there anything related known?
2007 If we have a table element, we can tell from that.
2008 Otherwise, must look it up. */
2010 if (elt != 0 && elt->related_value != 0)
2012 else if (elt == 0 && GET_CODE (x) == CONST)
2014 rtx subexp = get_related_value (x);
2016 relt = lookup (subexp,
2017 SAFE_HASH (subexp, GET_MODE (subexp)),
2024 /* Search all related table entries for one that has an
2025 equivalent register. */
2030 /* This loop is strange in that it is executed in two different cases.
2031 The first is when X is already in the table. Then it is searching
2032 the RELATED_VALUE list of X's class (RELT). The second case is when
2033 X is not in the table. Then RELT points to a class for the related
2036 Ensure that, whatever case we are in, that we ignore classes that have
2037 the same value as X. */
2039 if (rtx_equal_p (x, p->exp))
2042 for (q = p->first_same_value; q; q = q->next_same_value)
2049 p = p->related_value;
2051 /* We went all the way around, so there is nothing to be found.
2052 Alternatively, perhaps RELT was in the table for some other reason
2053 and it has no related values recorded. */
2054 if (p == relt || p == 0)
2061 offset = (get_integer_term (x) - get_integer_term (p->exp));
2062 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2063 return plus_constant (q->exp, offset);
2066 /* Hash a string. Just add its bytes up. */
2067 static inline unsigned
2068 hash_rtx_string (const char *ps)
2071 const unsigned char *p = (const unsigned char *) ps;
2080 /* Hash an rtx. We are careful to make sure the value is never negative.
2081 Equivalent registers hash identically.
2082 MODE is used in hashing for CONST_INTs only;
2083 otherwise the mode of X is used.
2085 Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2087 If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2088 a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
2090 Note that cse_insn knows that the hash code of a MEM expression
2091 is just (int) MEM plus the hash code of the address. */
2094 hash_rtx (rtx x, enum machine_mode mode, int *do_not_record_p,
2095 int *hash_arg_in_memory_p, bool have_reg_qty)
2102 /* Used to turn recursion into iteration. We can't rely on GCC's
2103 tail-recursion elimination since we need to keep accumulating values
2109 code = GET_CODE (x);
2114 unsigned int regno = REGNO (x);
2116 if (!reload_completed)
2118 /* On some machines, we can't record any non-fixed hard register,
2119 because extending its life will cause reload problems. We
2120 consider ap, fp, sp, gp to be fixed for this purpose.
2122 We also consider CCmode registers to be fixed for this purpose;
2123 failure to do so leads to failure to simplify 0<100 type of
2126 On all machines, we can't record any global registers.
2127 Nor should we record any register that is in a small
2128 class, as defined by CLASS_LIKELY_SPILLED_P. */
2131 if (regno >= FIRST_PSEUDO_REGISTER)
2133 else if (x == frame_pointer_rtx
2134 || x == hard_frame_pointer_rtx
2135 || x == arg_pointer_rtx
2136 || x == stack_pointer_rtx
2137 || x == pic_offset_table_rtx)
2139 else if (global_regs[regno])
2141 else if (fixed_regs[regno])
2143 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2145 else if (SMALL_REGISTER_CLASSES)
2147 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2154 *do_not_record_p = 1;
2159 hash += ((unsigned int) REG << 7);
2160 hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2164 /* We handle SUBREG of a REG specially because the underlying
2165 reg changes its hash value with every value change; we don't
2166 want to have to forget unrelated subregs when one subreg changes. */
2169 if (REG_P (SUBREG_REG (x)))
2171 hash += (((unsigned int) SUBREG << 7)
2172 + REGNO (SUBREG_REG (x))
2173 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2180 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2181 + (unsigned int) INTVAL (x));
2185 /* This is like the general case, except that it only counts
2186 the integers representing the constant. */
2187 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2188 if (GET_MODE (x) != VOIDmode)
2189 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2191 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2192 + (unsigned int) CONST_DOUBLE_HIGH (x));
2200 units = CONST_VECTOR_NUNITS (x);
2202 for (i = 0; i < units; ++i)
2204 elt = CONST_VECTOR_ELT (x, i);
2205 hash += hash_rtx (elt, GET_MODE (elt), do_not_record_p,
2206 hash_arg_in_memory_p, have_reg_qty);
2212 /* Assume there is only one rtx object for any given label. */
2214 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2215 differences and differences between each stage's debugging dumps. */
2216 hash += (((unsigned int) LABEL_REF << 7)
2217 + CODE_LABEL_NUMBER (XEXP (x, 0)));
2222 /* Don't hash on the symbol's address to avoid bootstrap differences.
2223 Different hash values may cause expressions to be recorded in
2224 different orders and thus different registers to be used in the
2225 final assembler. This also avoids differences in the dump files
2226 between various stages. */
2228 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2231 h += (h << 7) + *p++; /* ??? revisit */
2233 hash += ((unsigned int) SYMBOL_REF << 7) + h;
2238 /* We don't record if marked volatile or if BLKmode since we don't
2239 know the size of the move. */
2240 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2242 *do_not_record_p = 1;
2245 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2246 *hash_arg_in_memory_p = 1;
2248 /* Now that we have already found this special case,
2249 might as well speed it up as much as possible. */
2250 hash += (unsigned) MEM;
2255 /* A USE that mentions non-volatile memory needs special
2256 handling since the MEM may be BLKmode which normally
2257 prevents an entry from being made. Pure calls are
2258 marked by a USE which mentions BLKmode memory.
2259 See calls.c:emit_call_1. */
2260 if (MEM_P (XEXP (x, 0))
2261 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2263 hash += (unsigned) USE;
2266 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2267 *hash_arg_in_memory_p = 1;
2269 /* Now that we have already found this special case,
2270 might as well speed it up as much as possible. */
2271 hash += (unsigned) MEM;
2286 case UNSPEC_VOLATILE:
2287 *do_not_record_p = 1;
2291 if (MEM_VOLATILE_P (x))
2293 *do_not_record_p = 1;
2298 /* We don't want to take the filename and line into account. */
2299 hash += (unsigned) code + (unsigned) GET_MODE (x)
2300 + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2301 + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2302 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2304 if (ASM_OPERANDS_INPUT_LENGTH (x))
2306 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2308 hash += (hash_rtx (ASM_OPERANDS_INPUT (x, i),
2309 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2310 do_not_record_p, hash_arg_in_memory_p,
2313 (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
2316 hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2317 x = ASM_OPERANDS_INPUT (x, 0);
2318 mode = GET_MODE (x);
2330 i = GET_RTX_LENGTH (code) - 1;
2331 hash += (unsigned) code + (unsigned) GET_MODE (x);
2332 fmt = GET_RTX_FORMAT (code);
2338 /* If we are about to do the last recursive call
2339 needed at this level, change it into iteration.
2340 This function is called enough to be worth it. */
2347 hash += hash_rtx (XEXP (x, i), 0, do_not_record_p,
2348 hash_arg_in_memory_p, have_reg_qty);
2352 for (j = 0; j < XVECLEN (x, i); j++)
2353 hash += hash_rtx (XVECEXP (x, i, j), 0, do_not_record_p,
2354 hash_arg_in_memory_p, have_reg_qty);
2358 hash += hash_rtx_string (XSTR (x, i));
2362 hash += (unsigned int) XINT (x, i);
2377 /* Hash an rtx X for cse via hash_rtx.
2378 Stores 1 in do_not_record if any subexpression is volatile.
2379 Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2380 does not have the RTX_UNCHANGING_P bit set. */
2382 static inline unsigned
2383 canon_hash (rtx x, enum machine_mode mode)
2385 return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2388 /* Like canon_hash but with no side effects, i.e. do_not_record
2389 and hash_arg_in_memory are not changed. */
2391 static inline unsigned
2392 safe_hash (rtx x, enum machine_mode mode)
2394 int dummy_do_not_record;
2395 return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
2398 /* Return 1 iff X and Y would canonicalize into the same thing,
2399 without actually constructing the canonicalization of either one.
2400 If VALIDATE is nonzero,
2401 we assume X is an expression being processed from the rtl
2402 and Y was found in the hash table. We check register refs
2403 in Y for being marked as valid.
2405 If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
2408 exp_equiv_p (rtx x, rtx y, int validate, bool for_gcse)
2414 /* Note: it is incorrect to assume an expression is equivalent to itself
2415 if VALIDATE is nonzero. */
2416 if (x == y && !validate)
2419 if (x == 0 || y == 0)
2422 code = GET_CODE (x);
2423 if (code != GET_CODE (y))
2426 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2427 if (GET_MODE (x) != GET_MODE (y))
2438 return XEXP (x, 0) == XEXP (y, 0);
2441 return XSTR (x, 0) == XSTR (y, 0);
2445 return REGNO (x) == REGNO (y);
2448 unsigned int regno = REGNO (y);
2450 unsigned int endregno
2451 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2452 : hard_regno_nregs[regno][GET_MODE (y)]);
2454 /* If the quantities are not the same, the expressions are not
2455 equivalent. If there are and we are not to validate, they
2456 are equivalent. Otherwise, ensure all regs are up-to-date. */
2458 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2464 for (i = regno; i < endregno; i++)
2465 if (REG_IN_TABLE (i) != REG_TICK (i))
2474 /* Can't merge two expressions in different alias sets, since we
2475 can decide that the expression is transparent in a block when
2476 it isn't, due to it being set with the different alias set. */
2477 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
2480 /* A volatile mem should not be considered equivalent to any
2482 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2487 /* For commutative operations, check both orders. */
2495 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2497 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2498 validate, for_gcse))
2499 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2501 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2502 validate, for_gcse)));
2505 /* We don't use the generic code below because we want to
2506 disregard filename and line numbers. */
2508 /* A volatile asm isn't equivalent to any other. */
2509 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2512 if (GET_MODE (x) != GET_MODE (y)
2513 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2514 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2515 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2516 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2517 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2520 if (ASM_OPERANDS_INPUT_LENGTH (x))
2522 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2523 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2524 ASM_OPERANDS_INPUT (y, i),
2526 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2527 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2537 /* Compare the elements. If any pair of corresponding elements
2538 fail to match, return 0 for the whole thing. */
2540 fmt = GET_RTX_FORMAT (code);
2541 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2546 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2547 validate, for_gcse))
2552 if (XVECLEN (x, i) != XVECLEN (y, i))
2554 for (j = 0; j < XVECLEN (x, i); j++)
2555 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2556 validate, for_gcse))
2561 if (strcmp (XSTR (x, i), XSTR (y, i)))
2566 if (XINT (x, i) != XINT (y, i))
2571 if (XWINT (x, i) != XWINT (y, i))
2587 /* Return 1 if X has a value that can vary even between two
2588 executions of the program. 0 means X can be compared reliably
2589 against certain constants or near-constants. */
2592 cse_rtx_varies_p (rtx x, int from_alias)
2594 /* We need not check for X and the equivalence class being of the same
2595 mode because if X is equivalent to a constant in some mode, it
2596 doesn't vary in any mode. */
2599 && REGNO_QTY_VALID_P (REGNO (x)))
2601 int x_q = REG_QTY (REGNO (x));
2602 struct qty_table_elem *x_ent = &qty_table[x_q];
2604 if (GET_MODE (x) == x_ent->mode
2605 && x_ent->const_rtx != NULL_RTX)
2609 if (GET_CODE (x) == PLUS
2610 && GET_CODE (XEXP (x, 1)) == CONST_INT
2611 && REG_P (XEXP (x, 0))
2612 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2614 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2615 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2617 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2618 && x0_ent->const_rtx != NULL_RTX)
2622 /* This can happen as the result of virtual register instantiation, if
2623 the initial constant is too large to be a valid address. This gives
2624 us a three instruction sequence, load large offset into a register,
2625 load fp minus a constant into a register, then a MEM which is the
2626 sum of the two `constant' registers. */
2627 if (GET_CODE (x) == PLUS
2628 && REG_P (XEXP (x, 0))
2629 && REG_P (XEXP (x, 1))
2630 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2631 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2633 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2634 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2635 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2636 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2638 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2639 && x0_ent->const_rtx != NULL_RTX
2640 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2641 && x1_ent->const_rtx != NULL_RTX)
2645 return rtx_varies_p (x, from_alias);
2648 /* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2649 the result if necessary. INSN is as for canon_reg. */
2652 validate_canon_reg (rtx *xloc, rtx insn)
2654 rtx new = canon_reg (*xloc, insn);
2657 /* If replacing pseudo with hard reg or vice versa, ensure the
2658 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2659 if (insn != 0 && new != 0
2660 && REG_P (new) && REG_P (*xloc)
2661 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2662 != (REGNO (*xloc) < FIRST_PSEUDO_REGISTER))
2663 || GET_MODE (new) != GET_MODE (*xloc)
2664 || (insn_code = recog_memoized (insn)) < 0
2665 || insn_data[insn_code].n_dups > 0))
2666 validate_change (insn, xloc, new, 1);
2671 /* Canonicalize an expression:
2672 replace each register reference inside it
2673 with the "oldest" equivalent register.
2675 If INSN is nonzero and we are replacing a pseudo with a hard register
2676 or vice versa, validate_change is used to ensure that INSN remains valid
2677 after we make our substitution. The calls are made with IN_GROUP nonzero
2678 so apply_change_group must be called upon the outermost return from this
2679 function (unless INSN is zero). The result of apply_change_group can
2680 generally be discarded since the changes we are making are optional. */
2683 canon_reg (rtx x, rtx insn)
2692 code = GET_CODE (x);
2711 struct qty_table_elem *ent;
2713 /* Never replace a hard reg, because hard regs can appear
2714 in more than one machine mode, and we must preserve the mode
2715 of each occurrence. Also, some hard regs appear in
2716 MEMs that are shared and mustn't be altered. Don't try to
2717 replace any reg that maps to a reg of class NO_REGS. */
2718 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2719 || ! REGNO_QTY_VALID_P (REGNO (x)))
2722 q = REG_QTY (REGNO (x));
2723 ent = &qty_table[q];
2724 first = ent->first_reg;
2725 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2726 : REGNO_REG_CLASS (first) == NO_REGS ? x
2727 : gen_rtx_REG (ent->mode, first));
2734 fmt = GET_RTX_FORMAT (code);
2735 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2740 validate_canon_reg (&XEXP (x, i), insn);
2741 else if (fmt[i] == 'E')
2742 for (j = 0; j < XVECLEN (x, i); j++)
2743 validate_canon_reg (&XVECEXP (x, i, j), insn);
2749 /* LOC is a location within INSN that is an operand address (the contents of
2750 a MEM). Find the best equivalent address to use that is valid for this
2753 On most CISC machines, complicated address modes are costly, and rtx_cost
2754 is a good approximation for that cost. However, most RISC machines have
2755 only a few (usually only one) memory reference formats. If an address is
2756 valid at all, it is often just as cheap as any other address. Hence, for
2757 RISC machines, we use `address_cost' to compare the costs of various
2758 addresses. For two addresses of equal cost, choose the one with the
2759 highest `rtx_cost' value as that has the potential of eliminating the
2760 most insns. For equal costs, we choose the first in the equivalence
2761 class. Note that we ignore the fact that pseudo registers are cheaper than
2762 hard registers here because we would also prefer the pseudo registers. */
2765 find_best_addr (rtx insn, rtx *loc, enum machine_mode mode)
2767 struct table_elt *elt;
2769 struct table_elt *p;
2770 int found_better = 1;
2771 int save_do_not_record = do_not_record;
2772 int save_hash_arg_in_memory = hash_arg_in_memory;
2777 /* Do not try to replace constant addresses or addresses of local and
2778 argument slots. These MEM expressions are made only once and inserted
2779 in many instructions, as well as being used to control symbol table
2780 output. It is not safe to clobber them.
2782 There are some uncommon cases where the address is already in a register
2783 for some reason, but we cannot take advantage of that because we have
2784 no easy way to unshare the MEM. In addition, looking up all stack
2785 addresses is costly. */
2786 if ((GET_CODE (addr) == PLUS
2787 && REG_P (XEXP (addr, 0))
2788 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2789 && (regno = REGNO (XEXP (addr, 0)),
2790 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2791 || regno == ARG_POINTER_REGNUM))
2793 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2794 || regno == HARD_FRAME_POINTER_REGNUM
2795 || regno == ARG_POINTER_REGNUM))
2796 || CONSTANT_ADDRESS_P (addr))
2799 /* If this address is not simply a register, try to fold it. This will
2800 sometimes simplify the expression. Many simplifications
2801 will not be valid, but some, usually applying the associative rule, will
2802 be valid and produce better code. */
2805 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2806 int addr_folded_cost = address_cost (folded, mode);
2807 int addr_cost = address_cost (addr, mode);
2809 if ((addr_folded_cost < addr_cost
2810 || (addr_folded_cost == addr_cost
2811 /* ??? The rtx_cost comparison is left over from an older
2812 version of this code. It is probably no longer helpful. */
2813 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2814 || approx_reg_cost (folded) < approx_reg_cost (addr))))
2815 && validate_change (insn, loc, folded, 0))
2819 /* If this address is not in the hash table, we can't look for equivalences
2820 of the whole address. Also, ignore if volatile. */
2823 hash = HASH (addr, Pmode);
2824 addr_volatile = do_not_record;
2825 do_not_record = save_do_not_record;
2826 hash_arg_in_memory = save_hash_arg_in_memory;
2831 elt = lookup (addr, hash, Pmode);
2835 /* We need to find the best (under the criteria documented above) entry
2836 in the class that is valid. We use the `flag' field to indicate
2837 choices that were invalid and iterate until we can't find a better
2838 one that hasn't already been tried. */
2840 for (p = elt->first_same_value; p; p = p->next_same_value)
2843 while (found_better)
2845 int best_addr_cost = address_cost (*loc, mode);
2846 int best_rtx_cost = (elt->cost + 1) >> 1;
2848 struct table_elt *best_elt = elt;
2851 for (p = elt->first_same_value; p; p = p->next_same_value)
2855 || exp_equiv_p (p->exp, p->exp, 1, false))
2856 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2857 || (exp_cost == best_addr_cost
2858 && ((p->cost + 1) >> 1) > best_rtx_cost)))
2861 best_addr_cost = exp_cost;
2862 best_rtx_cost = (p->cost + 1) >> 1;
2869 if (validate_change (insn, loc,
2870 canon_reg (copy_rtx (best_elt->exp),
2879 /* If the address is a binary operation with the first operand a register
2880 and the second a constant, do the same as above, but looking for
2881 equivalences of the register. Then try to simplify before checking for
2882 the best address to use. This catches a few cases: First is when we
2883 have REG+const and the register is another REG+const. We can often merge
2884 the constants and eliminate one insn and one register. It may also be
2885 that a machine has a cheap REG+REG+const. Finally, this improves the
2886 code on the Alpha for unaligned byte stores. */
2888 if (flag_expensive_optimizations
2889 && ARITHMETIC_P (*loc)
2890 && REG_P (XEXP (*loc, 0)))
2892 rtx op1 = XEXP (*loc, 1);
2895 hash = HASH (XEXP (*loc, 0), Pmode);
2896 do_not_record = save_do_not_record;
2897 hash_arg_in_memory = save_hash_arg_in_memory;
2899 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2903 /* We need to find the best (under the criteria documented above) entry
2904 in the class that is valid. We use the `flag' field to indicate
2905 choices that were invalid and iterate until we can't find a better
2906 one that hasn't already been tried. */
2908 for (p = elt->first_same_value; p; p = p->next_same_value)
2911 while (found_better)
2913 int best_addr_cost = address_cost (*loc, mode);
2914 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2915 struct table_elt *best_elt = elt;
2916 rtx best_rtx = *loc;
2919 /* This is at worst case an O(n^2) algorithm, so limit our search
2920 to the first 32 elements on the list. This avoids trouble
2921 compiling code with very long basic blocks that can easily
2922 call simplify_gen_binary so many times that we run out of
2926 for (p = elt->first_same_value, count = 0;
2928 p = p->next_same_value, count++)
2931 || exp_equiv_p (p->exp, p->exp, 1, false)))
2933 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
2936 new_cost = address_cost (new, mode);
2938 if (new_cost < best_addr_cost
2939 || (new_cost == best_addr_cost
2940 && (COST (new) + 1) >> 1 > best_rtx_cost))
2943 best_addr_cost = new_cost;
2944 best_rtx_cost = (COST (new) + 1) >> 1;
2952 if (validate_change (insn, loc,
2953 canon_reg (copy_rtx (best_rtx),
2963 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2964 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2965 what values are being compared.
2967 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2968 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2969 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2970 compared to produce cc0.
2972 The return value is the comparison operator and is either the code of
2973 A or the code corresponding to the inverse of the comparison. */
2975 static enum rtx_code
2976 find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
2977 enum machine_mode *pmode1, enum machine_mode *pmode2)
2981 arg1 = *parg1, arg2 = *parg2;
2983 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2985 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2987 /* Set nonzero when we find something of interest. */
2989 int reverse_code = 0;
2990 struct table_elt *p = 0;
2992 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2993 On machines with CC0, this is the only case that can occur, since
2994 fold_rtx will return the COMPARE or item being compared with zero
2997 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3000 /* If ARG1 is a comparison operator and CODE is testing for
3001 STORE_FLAG_VALUE, get the inner arguments. */
3003 else if (COMPARISON_P (arg1))
3005 #ifdef FLOAT_STORE_FLAG_VALUE
3006 REAL_VALUE_TYPE fsfv;
3010 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3011 && code == LT && STORE_FLAG_VALUE == -1)
3012 #ifdef FLOAT_STORE_FLAG_VALUE
3013 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3014 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3015 REAL_VALUE_NEGATIVE (fsfv)))
3020 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3021 && code == GE && STORE_FLAG_VALUE == -1)
3022 #ifdef FLOAT_STORE_FLAG_VALUE
3023 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3024 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3025 REAL_VALUE_NEGATIVE (fsfv)))
3028 x = arg1, reverse_code = 1;
3031 /* ??? We could also check for
3033 (ne (and (eq (...) (const_int 1))) (const_int 0))
3035 and related forms, but let's wait until we see them occurring. */
3038 /* Look up ARG1 in the hash table and see if it has an equivalence
3039 that lets us see what is being compared. */
3040 p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
3043 p = p->first_same_value;
3045 /* If what we compare is already known to be constant, that is as
3047 We need to break the loop in this case, because otherwise we
3048 can have an infinite loop when looking at a reg that is known
3049 to be a constant which is the same as a comparison of a reg
3050 against zero which appears later in the insn stream, which in
3051 turn is constant and the same as the comparison of the first reg
3057 for (; p; p = p->next_same_value)
3059 enum machine_mode inner_mode = GET_MODE (p->exp);
3060 #ifdef FLOAT_STORE_FLAG_VALUE
3061 REAL_VALUE_TYPE fsfv;
3064 /* If the entry isn't valid, skip it. */
3065 if (! exp_equiv_p (p->exp, p->exp, 1, false))
3068 if (GET_CODE (p->exp) == COMPARE
3069 /* Another possibility is that this machine has a compare insn
3070 that includes the comparison code. In that case, ARG1 would
3071 be equivalent to a comparison operation that would set ARG1 to
3072 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3073 ORIG_CODE is the actual comparison being done; if it is an EQ,
3074 we must reverse ORIG_CODE. On machine with a negative value
3075 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3078 && GET_MODE_CLASS (inner_mode) == MODE_INT
3079 && (GET_MODE_BITSIZE (inner_mode)
3080 <= HOST_BITS_PER_WIDE_INT)
3081 && (STORE_FLAG_VALUE
3082 & ((HOST_WIDE_INT) 1
3083 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3084 #ifdef FLOAT_STORE_FLAG_VALUE
3086 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3087 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3088 REAL_VALUE_NEGATIVE (fsfv)))
3091 && COMPARISON_P (p->exp)))
3096 else if ((code == EQ
3098 && GET_MODE_CLASS (inner_mode) == MODE_INT
3099 && (GET_MODE_BITSIZE (inner_mode)
3100 <= HOST_BITS_PER_WIDE_INT)
3101 && (STORE_FLAG_VALUE
3102 & ((HOST_WIDE_INT) 1
3103 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3104 #ifdef FLOAT_STORE_FLAG_VALUE
3106 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3107 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3108 REAL_VALUE_NEGATIVE (fsfv)))
3111 && COMPARISON_P (p->exp))
3118 /* If this non-trapping address, e.g. fp + constant, the
3119 equivalent is a better operand since it may let us predict
3120 the value of the comparison. */
3121 else if (!rtx_addr_can_trap_p (p->exp))
3128 /* If we didn't find a useful equivalence for ARG1, we are done.
3129 Otherwise, set up for the next iteration. */
3133 /* If we need to reverse the comparison, make sure that that is
3134 possible -- we can't necessarily infer the value of GE from LT
3135 with floating-point operands. */
3138 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3139 if (reversed == UNKNOWN)
3144 else if (COMPARISON_P (x))
3145 code = GET_CODE (x);
3146 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3149 /* Return our results. Return the modes from before fold_rtx
3150 because fold_rtx might produce const_int, and then it's too late. */
3151 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3152 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3157 /* If X is a nontrivial arithmetic operation on an argument
3158 for which a constant value can be determined, return
3159 the result of operating on that value, as a constant.
3160 Otherwise, return X, possibly with one or more operands
3161 modified by recursive calls to this function.
3163 If X is a register whose contents are known, we do NOT
3164 return those contents here. equiv_constant is called to
3167 INSN is the insn that we may be modifying. If it is 0, make a copy
3168 of X before modifying it. */
3171 fold_rtx (rtx x, rtx insn)
3174 enum machine_mode mode;
3181 /* Folded equivalents of first two operands of X. */
3185 /* Constant equivalents of first three operands of X;
3186 0 when no such equivalent is known. */
3191 /* The mode of the first operand of X. We need this for sign and zero
3193 enum machine_mode mode_arg0;
3198 mode = GET_MODE (x);
3199 code = GET_CODE (x);
3209 /* No use simplifying an EXPR_LIST
3210 since they are used only for lists of args
3211 in a function call's REG_EQUAL note. */
3217 return prev_insn_cc0;
3221 /* If the next insn is a CODE_LABEL followed by a jump table,
3222 PC's value is a LABEL_REF pointing to that label. That
3223 lets us fold switch statements on the VAX. */
3226 if (insn && tablejump_p (insn, &next, NULL))
3227 return gen_rtx_LABEL_REF (Pmode, next);
3232 /* See if we previously assigned a constant value to this SUBREG. */
3233 if ((new = lookup_as_function (x, CONST_INT)) != 0
3234 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3237 /* If this is a paradoxical SUBREG, we have no idea what value the
3238 extra bits would have. However, if the operand is equivalent
3239 to a SUBREG whose operand is the same as our mode, and all the
3240 modes are within a word, we can just use the inner operand
3241 because these SUBREGs just say how to treat the register.
3243 Similarly if we find an integer constant. */
3245 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3247 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3248 struct table_elt *elt;
3250 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3251 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3252 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3254 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3256 if (CONSTANT_P (elt->exp)
3257 && GET_MODE (elt->exp) == VOIDmode)
3260 if (GET_CODE (elt->exp) == SUBREG
3261 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3262 && exp_equiv_p (elt->exp, elt->exp, 1, false))
3263 return copy_rtx (SUBREG_REG (elt->exp));
3269 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3270 We might be able to if the SUBREG is extracting a single word in an
3271 integral mode or extracting the low part. */
3273 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3274 const_arg0 = equiv_constant (folded_arg0);
3276 folded_arg0 = const_arg0;
3278 if (folded_arg0 != SUBREG_REG (x))
3280 new = simplify_subreg (mode, folded_arg0,
3281 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3286 if (REG_P (folded_arg0)
3287 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0)))
3289 struct table_elt *elt;
3291 elt = lookup (folded_arg0,
3292 HASH (folded_arg0, GET_MODE (folded_arg0)),
3293 GET_MODE (folded_arg0));
3296 elt = elt->first_same_value;
3298 if (subreg_lowpart_p (x))
3299 /* If this is a narrowing SUBREG and our operand is a REG, see
3300 if we can find an equivalence for REG that is an arithmetic
3301 operation in a wider mode where both operands are paradoxical
3302 SUBREGs from objects of our result mode. In that case, we
3303 couldn-t report an equivalent value for that operation, since we
3304 don't know what the extra bits will be. But we can find an
3305 equivalence for this SUBREG by folding that operation in the
3306 narrow mode. This allows us to fold arithmetic in narrow modes
3307 when the machine only supports word-sized arithmetic.
3309 Also look for a case where we have a SUBREG whose operand
3310 is the same as our result. If both modes are smaller
3311 than a word, we are simply interpreting a register in
3312 different modes and we can use the inner value. */
3314 for (; elt; elt = elt->next_same_value)
3316 enum rtx_code eltcode = GET_CODE (elt->exp);
3318 /* Just check for unary and binary operations. */
3319 if (UNARY_P (elt->exp)
3320 && eltcode != SIGN_EXTEND
3321 && eltcode != ZERO_EXTEND
3322 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3323 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3324 && (GET_MODE_CLASS (mode)
3325 == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3327 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3329 if (!REG_P (op0) && ! CONSTANT_P (op0))
3330 op0 = fold_rtx (op0, NULL_RTX);
3332 op0 = equiv_constant (op0);
3334 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3337 else if (ARITHMETIC_P (elt->exp)
3338 && eltcode != DIV && eltcode != MOD
3339 && eltcode != UDIV && eltcode != UMOD
3340 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3341 && eltcode != ROTATE && eltcode != ROTATERT
3342 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3343 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3345 || CONSTANT_P (XEXP (elt->exp, 0)))
3346 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3347 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3349 || CONSTANT_P (XEXP (elt->exp, 1))))
3351 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3352 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3354 if (op0 && !REG_P (op0) && ! CONSTANT_P (op0))
3355 op0 = fold_rtx (op0, NULL_RTX);
3358 op0 = equiv_constant (op0);
3360 if (op1 && !REG_P (op1) && ! CONSTANT_P (op1))
3361 op1 = fold_rtx (op1, NULL_RTX);
3364 op1 = equiv_constant (op1);
3366 /* If we are looking for the low SImode part of
3367 (ashift:DI c (const_int 32)), it doesn't work
3368 to compute that in SImode, because a 32-bit shift
3369 in SImode is unpredictable. We know the value is 0. */
3371 && GET_CODE (elt->exp) == ASHIFT
3372 && GET_CODE (op1) == CONST_INT
3373 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3376 < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3377 /* If the count fits in the inner mode's width,
3378 but exceeds the outer mode's width,
3379 the value will get truncated to 0
3381 new = CONST0_RTX (mode);
3383 /* If the count exceeds even the inner mode's width,
3384 don't fold this expression. */
3387 else if (op0 && op1)
3388 new = simplify_binary_operation (GET_CODE (elt->exp), mode, op0, op1);
3391 else if (GET_CODE (elt->exp) == SUBREG
3392 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3393 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3395 && exp_equiv_p (elt->exp, elt->exp, 1, false))
3396 new = copy_rtx (SUBREG_REG (elt->exp));
3402 /* A SUBREG resulting from a zero extension may fold to zero if
3403 it extracts higher bits than the ZERO_EXTEND's source bits.
3404 FIXME: if combine tried to, er, combine these instructions,
3405 this transformation may be moved to simplify_subreg. */
3406 for (; elt; elt = elt->next_same_value)
3408 if (GET_CODE (elt->exp) == ZERO_EXTEND
3410 >= GET_MODE_BITSIZE (GET_MODE (XEXP (elt->exp, 0))))
3411 return CONST0_RTX (mode);
3419 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3420 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3421 new = lookup_as_function (XEXP (x, 0), code);
3423 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3427 /* If we are not actually processing an insn, don't try to find the
3428 best address. Not only don't we care, but we could modify the
3429 MEM in an invalid way since we have no insn to validate against. */
3431 find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3434 /* Even if we don't fold in the insn itself,
3435 we can safely do so here, in hopes of getting a constant. */
3436 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3438 HOST_WIDE_INT offset = 0;
3441 && REGNO_QTY_VALID_P (REGNO (addr)))
3443 int addr_q = REG_QTY (REGNO (addr));
3444 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3446 if (GET_MODE (addr) == addr_ent->mode
3447 && addr_ent->const_rtx != NULL_RTX)
3448 addr = addr_ent->const_rtx;
3451 /* If address is constant, split it into a base and integer offset. */
3452 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3454 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3455 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3457 base = XEXP (XEXP (addr, 0), 0);
3458 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3460 else if (GET_CODE (addr) == LO_SUM
3461 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3462 base = XEXP (addr, 1);
3464 /* If this is a constant pool reference, we can fold it into its
3465 constant to allow better value tracking. */
3466 if (base && GET_CODE (base) == SYMBOL_REF
3467 && CONSTANT_POOL_ADDRESS_P (base))
3469 rtx constant = get_pool_constant (base);
3470 enum machine_mode const_mode = get_pool_mode (base);
3473 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3475 constant_pool_entries_cost = COST (constant);
3476 constant_pool_entries_regcost = approx_reg_cost (constant);
3479 /* If we are loading the full constant, we have an equivalence. */
3480 if (offset == 0 && mode == const_mode)
3483 /* If this actually isn't a constant (weird!), we can't do
3484 anything. Otherwise, handle the two most common cases:
3485 extracting a word from a multi-word constant, and extracting
3486 the low-order bits. Other cases don't seem common enough to
3488 if (! CONSTANT_P (constant))
3491 if (GET_MODE_CLASS (mode) == MODE_INT
3492 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3493 && offset % UNITS_PER_WORD == 0
3494 && (new = operand_subword (constant,
3495 offset / UNITS_PER_WORD,
3496 0, const_mode)) != 0)
3499 if (((BYTES_BIG_ENDIAN
3500 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3501 || (! BYTES_BIG_ENDIAN && offset == 0))
3502 && (new = gen_lowpart (mode, constant)) != 0)
3506 /* If this is a reference to a label at a known position in a jump
3507 table, we also know its value. */
3508 if (base && GET_CODE (base) == LABEL_REF)
3510 rtx label = XEXP (base, 0);
3511 rtx table_insn = NEXT_INSN (label);
3513 if (table_insn && JUMP_P (table_insn)
3514 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3516 rtx table = PATTERN (table_insn);
3519 && (offset / GET_MODE_SIZE (GET_MODE (table))
3520 < XVECLEN (table, 0)))
3521 return XVECEXP (table, 0,
3522 offset / GET_MODE_SIZE (GET_MODE (table)));
3524 if (table_insn && JUMP_P (table_insn)
3525 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3527 rtx table = PATTERN (table_insn);
3530 && (offset / GET_MODE_SIZE (GET_MODE (table))
3531 < XVECLEN (table, 1)))
3533 offset /= GET_MODE_SIZE (GET_MODE (table));
3534 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3537 if (GET_MODE (table) != Pmode)
3538 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3540 /* Indicate this is a constant. This isn't a
3541 valid form of CONST, but it will only be used
3542 to fold the next insns and then discarded, so
3545 Note this expression must be explicitly discarded,
3546 by cse_insn, else it may end up in a REG_EQUAL note
3547 and "escape" to cause problems elsewhere. */
3548 return gen_rtx_CONST (GET_MODE (new), new);
3556 #ifdef NO_FUNCTION_CSE
3558 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3564 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3565 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3566 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3576 mode_arg0 = VOIDmode;
3578 /* Try folding our operands.
3579 Then see which ones have constant values known. */
3581 fmt = GET_RTX_FORMAT (code);
3582 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3585 rtx arg = XEXP (x, i);
3586 rtx folded_arg = arg, const_arg = 0;
3587 enum machine_mode mode_arg = GET_MODE (arg);
3588 rtx cheap_arg, expensive_arg;
3589 rtx replacements[2];
3591 int old_cost = COST_IN (XEXP (x, i), code);
3593 /* Most arguments are cheap, so handle them specially. */
3594 switch (GET_CODE (arg))
3597 /* This is the same as calling equiv_constant; it is duplicated
3599 if (REGNO_QTY_VALID_P (REGNO (arg)))
3601 int arg_q = REG_QTY (REGNO (arg));
3602 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3604 if (arg_ent->const_rtx != NULL_RTX
3605 && !REG_P (arg_ent->const_rtx)
3606 && GET_CODE (arg_ent->const_rtx) != PLUS)
3608 = gen_lowpart (GET_MODE (arg),
3609 arg_ent->const_rtx);
3624 folded_arg = prev_insn_cc0;
3625 mode_arg = prev_insn_cc0_mode;
3626 const_arg = equiv_constant (folded_arg);
3631 folded_arg = fold_rtx (arg, insn);
3632 const_arg = equiv_constant (folded_arg);
3635 /* For the first three operands, see if the operand
3636 is constant or equivalent to a constant. */
3640 folded_arg0 = folded_arg;
3641 const_arg0 = const_arg;
3642 mode_arg0 = mode_arg;
3645 folded_arg1 = folded_arg;
3646 const_arg1 = const_arg;
3649 const_arg2 = const_arg;
3653 /* Pick the least expensive of the folded argument and an
3654 equivalent constant argument. */
3655 if (const_arg == 0 || const_arg == folded_arg
3656 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3657 cheap_arg = folded_arg, expensive_arg = const_arg;
3659 cheap_arg = const_arg, expensive_arg = folded_arg;
3661 /* Try to replace the operand with the cheapest of the two
3662 possibilities. If it doesn't work and this is either of the first
3663 two operands of a commutative operation, try swapping them.
3664 If THAT fails, try the more expensive, provided it is cheaper
3665 than what is already there. */
3667 if (cheap_arg == XEXP (x, i))
3670 if (insn == 0 && ! copied)
3676 /* Order the replacements from cheapest to most expensive. */
3677 replacements[0] = cheap_arg;
3678 replacements[1] = expensive_arg;
3680 for (j = 0; j < 2 && replacements[j]; j++)
3682 int new_cost = COST_IN (replacements[j], code);
3684 /* Stop if what existed before was cheaper. Prefer constants
3685 in the case of a tie. */
3686 if (new_cost > old_cost
3687 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3690 /* It's not safe to substitute the operand of a conversion
3691 operator with a constant, as the conversion's identity
3692 depends upon the mode of it's operand. This optimization
3693 is handled by the call to simplify_unary_operation. */
3694 if (GET_RTX_CLASS (code) == RTX_UNARY
3695 && GET_MODE (replacements[j]) != mode_arg0
3696 && (code == ZERO_EXTEND
3697 || code == SIGN_EXTEND
3699 || code == FLOAT_TRUNCATE
3700 || code == FLOAT_EXTEND
3703 || code == UNSIGNED_FLOAT
3704 || code == UNSIGNED_FIX))
3707 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3710 if (GET_RTX_CLASS (code) == RTX_COMM_COMPARE
3711 || GET_RTX_CLASS (code) == RTX_COMM_ARITH)
3713 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3714 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3716 if (apply_change_group ())
3718 /* Swap them back to be invalid so that this loop can
3719 continue and flag them to be swapped back later. */
3722 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3734 /* Don't try to fold inside of a vector of expressions.
3735 Doing nothing is harmless. */
3739 /* If a commutative operation, place a constant integer as the second
3740 operand unless the first operand is also a constant integer. Otherwise,
3741 place any constant second unless the first operand is also a constant. */
3743 if (COMMUTATIVE_P (x))
3746 || swap_commutative_operands_p (const_arg0 ? const_arg0
3748 const_arg1 ? const_arg1
3751 rtx tem = XEXP (x, 0);
3753 if (insn == 0 && ! copied)
3759 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3760 validate_change (insn, &XEXP (x, 1), tem, 1);
3761 if (apply_change_group ())
3763 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3764 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3769 /* If X is an arithmetic operation, see if we can simplify it. */
3771 switch (GET_RTX_CLASS (code))
3777 /* We can't simplify extension ops unless we know the
3779 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3780 && mode_arg0 == VOIDmode)
3783 /* If we had a CONST, strip it off and put it back later if we
3785 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3786 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3788 new = simplify_unary_operation (code, mode,
3789 const_arg0 ? const_arg0 : folded_arg0,
3791 /* NEG of PLUS could be converted into MINUS, but that causes
3792 expressions of the form
3793 (CONST (MINUS (CONST_INT) (SYMBOL_REF)))
3794 which many ports mistakenly treat as LEGITIMATE_CONSTANT_P.
3795 FIXME: those ports should be fixed. */
3796 if (new != 0 && is_const
3797 && GET_CODE (new) == PLUS
3798 && (GET_CODE (XEXP (new, 0)) == SYMBOL_REF
3799 || GET_CODE (XEXP (new, 0)) == LABEL_REF)
3800 && GET_CODE (XEXP (new, 1)) == CONST_INT)
3801 new = gen_rtx_CONST (mode, new);
3806 case RTX_COMM_COMPARE:
3807 /* See what items are actually being compared and set FOLDED_ARG[01]
3808 to those values and CODE to the actual comparison code. If any are
3809 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3810 do anything if both operands are already known to be constant. */
3812 if (const_arg0 == 0 || const_arg1 == 0)
3814 struct table_elt *p0, *p1;
3815 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3816 enum machine_mode mode_arg1;
3818 #ifdef FLOAT_STORE_FLAG_VALUE
3819 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3821 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3822 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3823 false_rtx = CONST0_RTX (mode);
3827 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3828 &mode_arg0, &mode_arg1);
3829 const_arg0 = equiv_constant (folded_arg0);
3830 const_arg1 = equiv_constant (folded_arg1);
3832 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3833 what kinds of things are being compared, so we can't do
3834 anything with this comparison. */
3836 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3839 /* If we do not now have two constants being compared, see
3840 if we can nevertheless deduce some things about the
3842 if (const_arg0 == 0 || const_arg1 == 0)
3844 /* Some addresses are known to be nonzero. We don't know
3845 their sign, but equality comparisons are known. */
3846 if (const_arg1 == const0_rtx
3847 && nonzero_address_p (folded_arg0))
3851 else if (code == NE)
3855 /* See if the two operands are the same. */
3857 if (folded_arg0 == folded_arg1
3858 || (REG_P (folded_arg0)
3859 && REG_P (folded_arg1)
3860 && (REG_QTY (REGNO (folded_arg0))
3861 == REG_QTY (REGNO (folded_arg1))))
3862 || ((p0 = lookup (folded_arg0,
3863 SAFE_HASH (folded_arg0, mode_arg0),
3865 && (p1 = lookup (folded_arg1,
3866 SAFE_HASH (folded_arg1, mode_arg0),
3868 && p0->first_same_value == p1->first_same_value))
3870 /* Sadly two equal NaNs are not equivalent. */
3871 if (!HONOR_NANS (mode_arg0))
3872 return ((code == EQ || code == LE || code == GE
3873 || code == LEU || code == GEU || code == UNEQ
3874 || code == UNLE || code == UNGE
3876 ? true_rtx : false_rtx);
3877 /* Take care for the FP compares we can resolve. */
3878 if (code == UNEQ || code == UNLE || code == UNGE)
3880 if (code == LTGT || code == LT || code == GT)
3884 /* If FOLDED_ARG0 is a register, see if the comparison we are
3885 doing now is either the same as we did before or the reverse
3886 (we only check the reverse if not floating-point). */
3887 else if (REG_P (folded_arg0))
3889 int qty = REG_QTY (REGNO (folded_arg0));
3891 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3893 struct qty_table_elem *ent = &qty_table[qty];
3895 if ((comparison_dominates_p (ent->comparison_code, code)
3896 || (! FLOAT_MODE_P (mode_arg0)
3897 && comparison_dominates_p (ent->comparison_code,
3898 reverse_condition (code))))
3899 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3901 && rtx_equal_p (ent->comparison_const,
3903 || (REG_P (folded_arg1)
3904 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3905 return (comparison_dominates_p (ent->comparison_code, code)
3906 ? true_rtx : false_rtx);
3912 /* If we are comparing against zero, see if the first operand is
3913 equivalent to an IOR with a constant. If so, we may be able to
3914 determine the result of this comparison. */
3916 if (const_arg1 == const0_rtx)
3918 rtx y = lookup_as_function (folded_arg0, IOR);
3922 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3923 && GET_CODE (inner_const) == CONST_INT
3924 && INTVAL (inner_const) != 0)
3926 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
3927 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
3928 && (INTVAL (inner_const)
3929 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
3930 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3932 #ifdef FLOAT_STORE_FLAG_VALUE
3933 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3935 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3936 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3937 false_rtx = CONST0_RTX (mode);
3962 rtx op0 = const_arg0 ? const_arg0 : folded_arg0;
3963 rtx op1 = const_arg1 ? const_arg1 : folded_arg1;
3964 new = simplify_relational_operation (code, mode, mode_arg0, op0, op1);
3969 case RTX_COMM_ARITH:
3973 /* If the second operand is a LABEL_REF, see if the first is a MINUS
3974 with that LABEL_REF as its second operand. If so, the result is
3975 the first operand of that MINUS. This handles switches with an
3976 ADDR_DIFF_VEC table. */
3977 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
3980 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
3981 : lookup_as_function (folded_arg0, MINUS);
3983 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3984 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
3987 /* Now try for a CONST of a MINUS like the above. */
3988 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
3989 : lookup_as_function (folded_arg0, CONST))) != 0
3990 && GET_CODE (XEXP (y, 0)) == MINUS
3991 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3992 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
3993 return XEXP (XEXP (y, 0), 0);
3996 /* Likewise if the operands are in the other order. */
3997 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4000 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
4001 : lookup_as_function (folded_arg1, MINUS);
4003 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4004 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4007 /* Now try for a CONST of a MINUS like the above. */
4008 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4009 : lookup_as_function (folded_arg1, CONST))) != 0
4010 && GET_CODE (XEXP (y, 0)) == MINUS
4011 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4012 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4013 return XEXP (XEXP (y, 0), 0);
4016 /* If second operand is a register equivalent to a negative
4017 CONST_INT, see if we can find a register equivalent to the
4018 positive constant. Make a MINUS if so. Don't do this for
4019 a non-negative constant since we might then alternate between
4020 choosing positive and negative constants. Having the positive
4021 constant previously-used is the more common case. Be sure
4022 the resulting constant is non-negative; if const_arg1 were
4023 the smallest negative number this would overflow: depending
4024 on the mode, this would either just be the same value (and
4025 hence not save anything) or be incorrect. */
4026 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4027 && INTVAL (const_arg1) < 0
4028 /* This used to test
4030 -INTVAL (const_arg1) >= 0
4032 But The Sun V5.0 compilers mis-compiled that test. So
4033 instead we test for the problematic value in a more direct
4034 manner and hope the Sun compilers get it correct. */
4035 && INTVAL (const_arg1) !=
4036 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4037 && REG_P (folded_arg1))
4039 rtx new_const = GEN_INT (-INTVAL (const_arg1));
4041 = lookup (new_const, SAFE_HASH (new_const, mode), mode);
4044 for (p = p->first_same_value; p; p = p->next_same_value)
4046 return simplify_gen_binary (MINUS, mode, folded_arg0,
4047 canon_reg (p->exp, NULL_RTX));
4052 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4053 If so, produce (PLUS Z C2-C). */
4054 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4056 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4057 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4058 return fold_rtx (plus_constant (copy_rtx (y),
4059 -INTVAL (const_arg1)),
4066 case SMIN: case SMAX: case UMIN: case UMAX:
4067 case IOR: case AND: case XOR:
4069 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4070 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4071 is known to be of similar form, we may be able to replace the
4072 operation with a combined operation. This may eliminate the
4073 intermediate operation if every use is simplified in this way.
4074 Note that the similar optimization done by combine.c only works
4075 if the intermediate operation's result has only one reference. */
4077 if (REG_P (folded_arg0)
4078 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4081 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4082 rtx y = lookup_as_function (folded_arg0, code);
4084 enum rtx_code associate_code;
4088 || 0 == (inner_const
4089 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4090 || GET_CODE (inner_const) != CONST_INT
4091 /* If we have compiled a statement like
4092 "if (x == (x & mask1))", and now are looking at
4093 "x & mask2", we will have a case where the first operand
4094 of Y is the same as our first operand. Unless we detect
4095 this case, an infinite loop will result. */
4096 || XEXP (y, 0) == folded_arg0)
4099 /* Don't associate these operations if they are a PLUS with the
4100 same constant and it is a power of two. These might be doable
4101 with a pre- or post-increment. Similarly for two subtracts of
4102 identical powers of two with post decrement. */
4104 if (code == PLUS && const_arg1 == inner_const
4105 && ((HAVE_PRE_INCREMENT
4106 && exact_log2 (INTVAL (const_arg1)) >= 0)
4107 || (HAVE_POST_INCREMENT
4108 && exact_log2 (INTVAL (const_arg1)) >= 0)
4109 || (HAVE_PRE_DECREMENT
4110 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4111 || (HAVE_POST_DECREMENT
4112 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4115 /* Compute the code used to compose the constants. For example,
4116 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
4118 associate_code = (is_shift || code == MINUS ? PLUS : code);
4120 new_const = simplify_binary_operation (associate_code, mode,
4121 const_arg1, inner_const);
4126 /* If we are associating shift operations, don't let this
4127 produce a shift of the size of the object or larger.
4128 This could occur when we follow a sign-extend by a right
4129 shift on a machine that does a sign-extend as a pair
4132 if (is_shift && GET_CODE (new_const) == CONST_INT
4133 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4135 /* As an exception, we can turn an ASHIFTRT of this
4136 form into a shift of the number of bits - 1. */
4137 if (code == ASHIFTRT)
4138 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4143 y = copy_rtx (XEXP (y, 0));
4145 /* If Y contains our first operand (the most common way this
4146 can happen is if Y is a MEM), we would do into an infinite
4147 loop if we tried to fold it. So don't in that case. */
4149 if (! reg_mentioned_p (folded_arg0, y))
4150 y = fold_rtx (y, insn);
4152 return simplify_gen_binary (code, mode, y, new_const);
4156 case DIV: case UDIV:
4157 /* ??? The associative optimization performed immediately above is
4158 also possible for DIV and UDIV using associate_code of MULT.
4159 However, we would need extra code to verify that the
4160 multiplication does not overflow, that is, there is no overflow
4161 in the calculation of new_const. */
4168 new = simplify_binary_operation (code, mode,
4169 const_arg0 ? const_arg0 : folded_arg0,
4170 const_arg1 ? const_arg1 : folded_arg1);
4174 /* (lo_sum (high X) X) is simply X. */
4175 if (code == LO_SUM && const_arg0 != 0
4176 && GET_CODE (const_arg0) == HIGH
4177 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4182 case RTX_BITFIELD_OPS:
4183 new = simplify_ternary_operation (code, mode, mode_arg0,
4184 const_arg0 ? const_arg0 : folded_arg0,
4185 const_arg1 ? const_arg1 : folded_arg1,
4186 const_arg2 ? const_arg2 : XEXP (x, 2));
4193 return new ? new : x;
4196 /* Return a constant value currently equivalent to X.
4197 Return 0 if we don't know one. */
4200 equiv_constant (rtx x)
4203 && REGNO_QTY_VALID_P (REGNO (x)))
4205 int x_q = REG_QTY (REGNO (x));
4206 struct qty_table_elem *x_ent = &qty_table[x_q];
4208 if (x_ent->const_rtx)
4209 x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
4212 if (x == 0 || CONSTANT_P (x))
4215 /* If X is a MEM, try to fold it outside the context of any insn to see if
4216 it might be equivalent to a constant. That handles the case where it
4217 is a constant-pool reference. Then try to look it up in the hash table
4218 in case it is something whose value we have seen before. */
4222 struct table_elt *elt;
4224 x = fold_rtx (x, NULL_RTX);
4228 elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
4232 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4233 if (elt->is_const && CONSTANT_P (elt->exp))
4240 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4241 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4242 least-significant part of X.
4243 MODE specifies how big a part of X to return.
4245 If the requested operation cannot be done, 0 is returned.
4247 This is similar to gen_lowpart_general in emit-rtl.c. */
4250 gen_lowpart_if_possible (enum machine_mode mode, rtx x)
4252 rtx result = gen_lowpart_common (mode, x);
4258 /* This is the only other case we handle. */
4262 if (WORDS_BIG_ENDIAN)
4263 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4264 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4265 if (BYTES_BIG_ENDIAN)
4266 /* Adjust the address so that the address-after-the-data is
4268 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4269 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4271 new = adjust_address_nv (x, mode, offset);
4272 if (! memory_address_p (mode, XEXP (new, 0)))
4281 /* Given INSN, a jump insn, PATH_TAKEN indicates if we are following the "taken"
4282 branch. It will be zero if not.
4284 In certain cases, this can cause us to add an equivalence. For example,
4285 if we are following the taken case of
4287 we can add the fact that `i' and '2' are now equivalent.
4289 In any case, we can record that this comparison was passed. If the same
4290 comparison is seen later, we will know its value. */
4293 record_jump_equiv (rtx insn, int taken)
4295 int cond_known_true;
4298 enum machine_mode mode, mode0, mode1;
4299 int reversed_nonequality = 0;
4302 /* Ensure this is the right kind of insn. */
4303 if (! any_condjump_p (insn))
4305 set = pc_set (insn);
4307 /* See if this jump condition is known true or false. */
4309 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4311 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4313 /* Get the type of comparison being done and the operands being compared.
4314 If we had to reverse a non-equality condition, record that fact so we
4315 know that it isn't valid for floating-point. */
4316 code = GET_CODE (XEXP (SET_SRC (set), 0));
4317 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4318 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4320 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4321 if (! cond_known_true)
4323 code = reversed_comparison_code_parts (code, op0, op1, insn);
4325 /* Don't remember if we can't find the inverse. */
4326 if (code == UNKNOWN)
4330 /* The mode is the mode of the non-constant. */
4332 if (mode1 != VOIDmode)
4335 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4338 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4339 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4340 Make any useful entries we can with that information. Called from
4341 above function and called recursively. */
4344 record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
4345 rtx op1, int reversed_nonequality)
4347 unsigned op0_hash, op1_hash;
4348 int op0_in_memory, op1_in_memory;
4349 struct table_elt *op0_elt, *op1_elt;
4351 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4352 we know that they are also equal in the smaller mode (this is also
4353 true for all smaller modes whether or not there is a SUBREG, but
4354 is not worth testing for with no SUBREG). */
4356 /* Note that GET_MODE (op0) may not equal MODE. */
4357 if (code == EQ && GET_CODE (op0) == SUBREG
4358 && (GET_MODE_SIZE (GET_MODE (op0))
4359 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4361 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4362 rtx tem = gen_lowpart (inner_mode, op1);
4364 record_jump_cond (code, mode, SUBREG_REG (op0),
4365 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4366 reversed_nonequality);
4369 if (code == EQ && GET_CODE (op1) == SUBREG
4370 && (GET_MODE_SIZE (GET_MODE (op1))
4371 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4373 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4374 rtx tem = gen_lowpart (inner_mode, op0);
4376 record_jump_cond (code, mode, SUBREG_REG (op1),
4377 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4378 reversed_nonequality);
4381 /* Similarly, if this is an NE comparison, and either is a SUBREG
4382 making a smaller mode, we know the whole thing is also NE. */
4384 /* Note that GET_MODE (op0) may not equal MODE;
4385 if we test MODE instead, we can get an infinite recursion
4386 alternating between two modes each wider than MODE. */
4388 if (code == NE && GET_CODE (op0) == SUBREG
4389 && subreg_lowpart_p (op0)
4390 && (GET_MODE_SIZE (GET_MODE (op0))
4391 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4393 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4394 rtx tem = gen_lowpart (inner_mode, op1);
4396 record_jump_cond (code, mode, SUBREG_REG (op0),
4397 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4398 reversed_nonequality);
4401 if (code == NE && GET_CODE (op1) == SUBREG
4402 && subreg_lowpart_p (op1)
4403 && (GET_MODE_SIZE (GET_MODE (op1))
4404 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4406 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4407 rtx tem = gen_lowpart (inner_mode, op0);
4409 record_jump_cond (code, mode, SUBREG_REG (op1),
4410 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4411 reversed_nonequality);
4414 /* Hash both operands. */
4417 hash_arg_in_memory = 0;
4418 op0_hash = HASH (op0, mode);
4419 op0_in_memory = hash_arg_in_memory;
4425 hash_arg_in_memory = 0;
4426 op1_hash = HASH (op1, mode);
4427 op1_in_memory = hash_arg_in_memory;
4432 /* Look up both operands. */
4433 op0_elt = lookup (op0, op0_hash, mode);
4434 op1_elt = lookup (op1, op1_hash, mode);
4436 /* If both operands are already equivalent or if they are not in the
4437 table but are identical, do nothing. */
4438 if ((op0_elt != 0 && op1_elt != 0
4439 && op0_elt->first_same_value == op1_elt->first_same_value)
4440 || op0 == op1 || rtx_equal_p (op0, op1))
4443 /* If we aren't setting two things equal all we can do is save this
4444 comparison. Similarly if this is floating-point. In the latter
4445 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4446 If we record the equality, we might inadvertently delete code
4447 whose intent was to change -0 to +0. */
4449 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4451 struct qty_table_elem *ent;
4454 /* If we reversed a floating-point comparison, if OP0 is not a
4455 register, or if OP1 is neither a register or constant, we can't
4459 op1 = equiv_constant (op1);
4461 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4462 || !REG_P (op0) || op1 == 0)
4465 /* Put OP0 in the hash table if it isn't already. This gives it a
4466 new quantity number. */
4469 if (insert_regs (op0, NULL, 0))
4471 rehash_using_reg (op0);
4472 op0_hash = HASH (op0, mode);
4474 /* If OP0 is contained in OP1, this changes its hash code
4475 as well. Faster to rehash than to check, except
4476 for the simple case of a constant. */
4477 if (! CONSTANT_P (op1))
4478 op1_hash = HASH (op1,mode);
4481 op0_elt = insert (op0, NULL, op0_hash, mode);
4482 op0_elt->in_memory = op0_in_memory;
4485 qty = REG_QTY (REGNO (op0));
4486 ent = &qty_table[qty];
4488 ent->comparison_code = code;
4491 /* Look it up again--in case op0 and op1 are the same. */
4492 op1_elt = lookup (op1, op1_hash, mode);
4494 /* Put OP1 in the hash table so it gets a new quantity number. */
4497 if (insert_regs (op1, NULL, 0))
4499 rehash_using_reg (op1);
4500 op1_hash = HASH (op1, mode);
4503 op1_elt = insert (op1, NULL, op1_hash, mode);
4504 op1_elt->in_memory = op1_in_memory;
4507 ent->comparison_const = NULL_RTX;
4508 ent->comparison_qty = REG_QTY (REGNO (op1));
4512 ent->comparison_const = op1;
4513 ent->comparison_qty = -1;
4519 /* If either side is still missing an equivalence, make it now,
4520 then merge the equivalences. */
4524 if (insert_regs (op0, NULL, 0))
4526 rehash_using_reg (op0);
4527 op0_hash = HASH (op0, mode);
4530 op0_elt = insert (op0, NULL, op0_hash, mode);
4531 op0_elt->in_memory = op0_in_memory;
4536 if (insert_regs (op1, NULL, 0))
4538 rehash_using_reg (op1);
4539 op1_hash = HASH (op1, mode);
4542 op1_elt = insert (op1, NULL, op1_hash, mode);
4543 op1_elt->in_memory = op1_in_memory;
4546 merge_equiv_classes (op0_elt, op1_elt);
4549 /* CSE processing for one instruction.
4550 First simplify sources and addresses of all assignments
4551 in the instruction, using previously-computed equivalents values.
4552 Then install the new sources and destinations in the table
4553 of available values.
4555 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4556 the insn. It means that INSN is inside libcall block. In this
4557 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4559 /* Data on one SET contained in the instruction. */
4563 /* The SET rtx itself. */
4565 /* The SET_SRC of the rtx (the original value, if it is changing). */
4567 /* The hash-table element for the SET_SRC of the SET. */
4568 struct table_elt *src_elt;
4569 /* Hash value for the SET_SRC. */
4571 /* Hash value for the SET_DEST. */
4573 /* The SET_DEST, with SUBREG, etc., stripped. */
4575 /* Nonzero if the SET_SRC is in memory. */
4577 /* Nonzero if the SET_SRC contains something
4578 whose value cannot be predicted and understood. */
4580 /* Original machine mode, in case it becomes a CONST_INT.
4581 The size of this field should match the size of the mode
4582 field of struct rtx_def (see rtl.h). */
4583 ENUM_BITFIELD(machine_mode) mode : 8;
4584 /* A constant equivalent for SET_SRC, if any. */
4586 /* Original SET_SRC value used for libcall notes. */
4588 /* Hash value of constant equivalent for SET_SRC. */
4589 unsigned src_const_hash;
4590 /* Table entry for constant equivalent for SET_SRC, if any. */
4591 struct table_elt *src_const_elt;
4595 cse_insn (rtx insn, rtx libcall_insn)
4597 rtx x = PATTERN (insn);
4603 /* Records what this insn does to set CC0. */
4604 rtx this_insn_cc0 = 0;
4605 enum machine_mode this_insn_cc0_mode = VOIDmode;
4609 struct table_elt *src_eqv_elt = 0;
4610 int src_eqv_volatile = 0;
4611 int src_eqv_in_memory = 0;
4612 unsigned src_eqv_hash = 0;
4614 struct set *sets = (struct set *) 0;
4618 /* Find all the SETs and CLOBBERs in this instruction.
4619 Record all the SETs in the array `set' and count them.
4620 Also determine whether there is a CLOBBER that invalidates
4621 all memory references, or all references at varying addresses. */
4625 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4627 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4628 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4629 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4633 if (GET_CODE (x) == SET)
4635 sets = alloca (sizeof (struct set));
4638 /* Ignore SETs that are unconditional jumps.
4639 They never need cse processing, so this does not hurt.
4640 The reason is not efficiency but rather
4641 so that we can test at the end for instructions
4642 that have been simplified to unconditional jumps
4643 and not be misled by unchanged instructions
4644 that were unconditional jumps to begin with. */
4645 if (SET_DEST (x) == pc_rtx
4646 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4649 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4650 The hard function value register is used only once, to copy to
4651 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4652 Ensure we invalidate the destination register. On the 80386 no
4653 other code would invalidate it since it is a fixed_reg.
4654 We need not check the return of apply_change_group; see canon_reg. */
4656 else if (GET_CODE (SET_SRC (x)) == CALL)
4658 canon_reg (SET_SRC (x), insn);
4659 apply_change_group ();
4660 fold_rtx (SET_SRC (x), insn);
4661 invalidate (SET_DEST (x), VOIDmode);
4666 else if (GET_CODE (x) == PARALLEL)
4668 int lim = XVECLEN (x, 0);
4670 sets = alloca (lim * sizeof (struct set));
4672 /* Find all regs explicitly clobbered in this insn,
4673 and ensure they are not replaced with any other regs
4674 elsewhere in this insn.
4675 When a reg that is clobbered is also used for input,
4676 we should presume that that is for a reason,
4677 and we should not substitute some other register
4678 which is not supposed to be clobbered.
4679 Therefore, this loop cannot be merged into the one below
4680 because a CALL may precede a CLOBBER and refer to the
4681 value clobbered. We must not let a canonicalization do
4682 anything in that case. */
4683 for (i = 0; i < lim; i++)
4685 rtx y = XVECEXP (x, 0, i);
4686 if (GET_CODE (y) == CLOBBER)
4688 rtx clobbered = XEXP (y, 0);
4690 if (REG_P (clobbered)
4691 || GET_CODE (clobbered) == SUBREG)
4692 invalidate (clobbered, VOIDmode);
4693 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4694 || GET_CODE (clobbered) == ZERO_EXTRACT)
4695 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4699 for (i = 0; i < lim; i++)
4701 rtx y = XVECEXP (x, 0, i);
4702 if (GET_CODE (y) == SET)
4704 /* As above, we ignore unconditional jumps and call-insns and
4705 ignore the result of apply_change_group. */
4706 if (GET_CODE (SET_SRC (y)) == CALL)
4708 canon_reg (SET_SRC (y), insn);
4709 apply_change_group ();
4710 fold_rtx (SET_SRC (y), insn);
4711 invalidate (SET_DEST (y), VOIDmode);
4713 else if (SET_DEST (y) == pc_rtx
4714 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4717 sets[n_sets++].rtl = y;
4719 else if (GET_CODE (y) == CLOBBER)
4721 /* If we clobber memory, canon the address.
4722 This does nothing when a register is clobbered
4723 because we have already invalidated the reg. */
4724 if (MEM_P (XEXP (y, 0)))
4725 canon_reg (XEXP (y, 0), NULL_RTX);
4727 else if (GET_CODE (y) == USE
4728 && ! (REG_P (XEXP (y, 0))
4729 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4730 canon_reg (y, NULL_RTX);
4731 else if (GET_CODE (y) == CALL)
4733 /* The result of apply_change_group can be ignored; see
4735 canon_reg (y, insn);
4736 apply_change_group ();
4741 else if (GET_CODE (x) == CLOBBER)
4743 if (MEM_P (XEXP (x, 0)))
4744 canon_reg (XEXP (x, 0), NULL_RTX);
4747 /* Canonicalize a USE of a pseudo register or memory location. */
4748 else if (GET_CODE (x) == USE
4749 && ! (REG_P (XEXP (x, 0))
4750 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4751 canon_reg (XEXP (x, 0), NULL_RTX);
4752 else if (GET_CODE (x) == CALL)
4754 /* The result of apply_change_group can be ignored; see canon_reg. */
4755 canon_reg (x, insn);
4756 apply_change_group ();
4760 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4761 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4762 is handled specially for this case, and if it isn't set, then there will
4763 be no equivalence for the destination. */
4764 if (n_sets == 1 && REG_NOTES (insn) != 0
4765 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4766 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4767 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4769 src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4770 XEXP (tem, 0) = src_eqv;
4773 /* Canonicalize sources and addresses of destinations.
4774 We do this in a separate pass to avoid problems when a MATCH_DUP is
4775 present in the insn pattern. In that case, we want to ensure that
4776 we don't break the duplicate nature of the pattern. So we will replace
4777 both operands at the same time. Otherwise, we would fail to find an
4778 equivalent substitution in the loop calling validate_change below.
4780 We used to suppress canonicalization of DEST if it appears in SRC,
4781 but we don't do this any more. */
4783 for (i = 0; i < n_sets; i++)
4785 rtx dest = SET_DEST (sets[i].rtl);
4786 rtx src = SET_SRC (sets[i].rtl);
4787 rtx new = canon_reg (src, insn);
4790 sets[i].orig_src = src;
4791 if ((REG_P (new) && REG_P (src)
4792 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4793 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4794 || (insn_code = recog_memoized (insn)) < 0
4795 || insn_data[insn_code].n_dups > 0)
4796 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4798 SET_SRC (sets[i].rtl) = new;
4800 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4802 validate_change (insn, &XEXP (dest, 1),
4803 canon_reg (XEXP (dest, 1), insn), 1);
4804 validate_change (insn, &XEXP (dest, 2),
4805 canon_reg (XEXP (dest, 2), insn), 1);
4808 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4809 || GET_CODE (dest) == ZERO_EXTRACT
4810 || GET_CODE (dest) == SIGN_EXTRACT)
4811 dest = XEXP (dest, 0);
4814 canon_reg (dest, insn);
4817 /* Now that we have done all the replacements, we can apply the change
4818 group and see if they all work. Note that this will cause some
4819 canonicalizations that would have worked individually not to be applied
4820 because some other canonicalization didn't work, but this should not
4823 The result of apply_change_group can be ignored; see canon_reg. */
4825 apply_change_group ();
4827 /* Set sets[i].src_elt to the class each source belongs to.
4828 Detect assignments from or to volatile things
4829 and set set[i] to zero so they will be ignored
4830 in the rest of this function.
4832 Nothing in this loop changes the hash table or the register chains. */
4834 for (i = 0; i < n_sets; i++)
4838 struct table_elt *elt = 0, *p;
4839 enum machine_mode mode;
4842 rtx src_related = 0;
4843 struct table_elt *src_const_elt = 0;
4844 int src_cost = MAX_COST;
4845 int src_eqv_cost = MAX_COST;
4846 int src_folded_cost = MAX_COST;
4847 int src_related_cost = MAX_COST;
4848 int src_elt_cost = MAX_COST;
4849 int src_regcost = MAX_COST;
4850 int src_eqv_regcost = MAX_COST;
4851 int src_folded_regcost = MAX_COST;
4852 int src_related_regcost = MAX_COST;
4853 int src_elt_regcost = MAX_COST;
4854 /* Set nonzero if we need to call force_const_mem on with the
4855 contents of src_folded before using it. */
4856 int src_folded_force_flag = 0;
4858 dest = SET_DEST (sets[i].rtl);
4859 src = SET_SRC (sets[i].rtl);
4861 /* If SRC is a constant that has no machine mode,
4862 hash it with the destination's machine mode.
4863 This way we can keep different modes separate. */
4865 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4866 sets[i].mode = mode;
4870 enum machine_mode eqvmode = mode;
4871 if (GET_CODE (dest) == STRICT_LOW_PART)
4872 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4874 hash_arg_in_memory = 0;
4875 src_eqv_hash = HASH (src_eqv, eqvmode);
4877 /* Find the equivalence class for the equivalent expression. */
4880 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4882 src_eqv_volatile = do_not_record;
4883 src_eqv_in_memory = hash_arg_in_memory;
4886 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4887 value of the INNER register, not the destination. So it is not
4888 a valid substitution for the source. But save it for later. */
4889 if (GET_CODE (dest) == STRICT_LOW_PART)
4892 src_eqv_here = src_eqv;
4894 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4895 simplified result, which may not necessarily be valid. */
4896 src_folded = fold_rtx (src, insn);
4899 /* ??? This caused bad code to be generated for the m68k port with -O2.
4900 Suppose src is (CONST_INT -1), and that after truncation src_folded
4901 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4902 At the end we will add src and src_const to the same equivalence
4903 class. We now have 3 and -1 on the same equivalence class. This
4904 causes later instructions to be mis-optimized. */
4905 /* If storing a constant in a bitfield, pre-truncate the constant
4906 so we will be able to record it later. */
4907 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
4908 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
4910 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4912 if (GET_CODE (src) == CONST_INT
4913 && GET_CODE (width) == CONST_INT
4914 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4915 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4917 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
4918 << INTVAL (width)) - 1));
4922 /* Compute SRC's hash code, and also notice if it
4923 should not be recorded at all. In that case,
4924 prevent any further processing of this assignment. */
4926 hash_arg_in_memory = 0;
4929 sets[i].src_hash = HASH (src, mode);
4930 sets[i].src_volatile = do_not_record;
4931 sets[i].src_in_memory = hash_arg_in_memory;
4933 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4934 a pseudo, do not record SRC. Using SRC as a replacement for
4935 anything else will be incorrect in that situation. Note that
4936 this usually occurs only for stack slots, in which case all the
4937 RTL would be referring to SRC, so we don't lose any optimization
4938 opportunities by not having SRC in the hash table. */
4941 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
4943 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
4944 sets[i].src_volatile = 1;
4947 /* It is no longer clear why we used to do this, but it doesn't
4948 appear to still be needed. So let's try without it since this
4949 code hurts cse'ing widened ops. */
4950 /* If source is a paradoxical subreg (such as QI treated as an SI),
4951 treat it as volatile. It may do the work of an SI in one context
4952 where the extra bits are not being used, but cannot replace an SI
4954 if (GET_CODE (src) == SUBREG
4955 && (GET_MODE_SIZE (GET_MODE (src))
4956 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4957 sets[i].src_volatile = 1;
4960 /* Locate all possible equivalent forms for SRC. Try to replace
4961 SRC in the insn with each cheaper equivalent.
4963 We have the following types of equivalents: SRC itself, a folded
4964 version, a value given in a REG_EQUAL note, or a value related
4967 Each of these equivalents may be part of an additional class
4968 of equivalents (if more than one is in the table, they must be in
4969 the same class; we check for this).
4971 If the source is volatile, we don't do any table lookups.
4973 We note any constant equivalent for possible later use in a
4976 if (!sets[i].src_volatile)
4977 elt = lookup (src, sets[i].src_hash, mode);
4979 sets[i].src_elt = elt;
4981 if (elt && src_eqv_here && src_eqv_elt)
4983 if (elt->first_same_value != src_eqv_elt->first_same_value)
4985 /* The REG_EQUAL is indicating that two formerly distinct
4986 classes are now equivalent. So merge them. */
4987 merge_equiv_classes (elt, src_eqv_elt);
4988 src_eqv_hash = HASH (src_eqv, elt->mode);
4989 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
4995 else if (src_eqv_elt)
4998 /* Try to find a constant somewhere and record it in `src_const'.
4999 Record its table element, if any, in `src_const_elt'. Look in
5000 any known equivalences first. (If the constant is not in the
5001 table, also set `sets[i].src_const_hash'). */
5003 for (p = elt->first_same_value; p; p = p->next_same_value)
5007 src_const_elt = elt;
5012 && (CONSTANT_P (src_folded)
5013 /* Consider (minus (label_ref L1) (label_ref L2)) as
5014 "constant" here so we will record it. This allows us
5015 to fold switch statements when an ADDR_DIFF_VEC is used. */
5016 || (GET_CODE (src_folded) == MINUS
5017 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5018 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5019 src_const = src_folded, src_const_elt = elt;
5020 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5021 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5023 /* If we don't know if the constant is in the table, get its
5024 hash code and look it up. */
5025 if (src_const && src_const_elt == 0)
5027 sets[i].src_const_hash = HASH (src_const, mode);
5028 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5031 sets[i].src_const = src_const;
5032 sets[i].src_const_elt = src_const_elt;
5034 /* If the constant and our source are both in the table, mark them as
5035 equivalent. Otherwise, if a constant is in the table but the source
5036 isn't, set ELT to it. */
5037 if (src_const_elt && elt
5038 && src_const_elt->first_same_value != elt->first_same_value)
5039 merge_equiv_classes (elt, src_const_elt);
5040 else if (src_const_elt && elt == 0)
5041 elt = src_const_elt;
5043 /* See if there is a register linearly related to a constant
5044 equivalent of SRC. */
5046 && (GET_CODE (src_const) == CONST
5047 || (src_const_elt && src_const_elt->related_value != 0)))
5049 src_related = use_related_value (src_const, src_const_elt);
5052 struct table_elt *src_related_elt
5053 = lookup (src_related, HASH (src_related, mode), mode);
5054 if (src_related_elt && elt)
5056 if (elt->first_same_value
5057 != src_related_elt->first_same_value)
5058 /* This can occur when we previously saw a CONST
5059 involving a SYMBOL_REF and then see the SYMBOL_REF
5060 twice. Merge the involved classes. */
5061 merge_equiv_classes (elt, src_related_elt);
5064 src_related_elt = 0;
5066 else if (src_related_elt && elt == 0)
5067 elt = src_related_elt;
5071 /* See if we have a CONST_INT that is already in a register in a
5074 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5075 && GET_MODE_CLASS (mode) == MODE_INT
5076 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5078 enum machine_mode wider_mode;
5080 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5081 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5082 && src_related == 0;
5083 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5085 struct table_elt *const_elt
5086 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5091 for (const_elt = const_elt->first_same_value;
5092 const_elt; const_elt = const_elt->next_same_value)
5093 if (REG_P (const_elt->exp))
5095 src_related = gen_lowpart (mode,
5102 /* Another possibility is that we have an AND with a constant in
5103 a mode narrower than a word. If so, it might have been generated
5104 as part of an "if" which would narrow the AND. If we already
5105 have done the AND in a wider mode, we can use a SUBREG of that
5108 if (flag_expensive_optimizations && ! src_related
5109 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5110 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5112 enum machine_mode tmode;
5113 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5115 for (tmode = GET_MODE_WIDER_MODE (mode);
5116 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5117 tmode = GET_MODE_WIDER_MODE (tmode))
5119 rtx inner = gen_lowpart (tmode, XEXP (src, 0));
5120 struct table_elt *larger_elt;
5124 PUT_MODE (new_and, tmode);
5125 XEXP (new_and, 0) = inner;
5126 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5127 if (larger_elt == 0)
5130 for (larger_elt = larger_elt->first_same_value;
5131 larger_elt; larger_elt = larger_elt->next_same_value)
5132 if (REG_P (larger_elt->exp))
5135 = gen_lowpart (mode, larger_elt->exp);
5145 #ifdef LOAD_EXTEND_OP
5146 /* See if a MEM has already been loaded with a widening operation;
5147 if it has, we can use a subreg of that. Many CISC machines
5148 also have such operations, but this is only likely to be
5149 beneficial on these machines. */
5151 if (flag_expensive_optimizations && src_related == 0
5152 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5153 && GET_MODE_CLASS (mode) == MODE_INT
5154 && MEM_P (src) && ! do_not_record
5155 && LOAD_EXTEND_OP (mode) != UNKNOWN)
5157 enum machine_mode tmode;
5159 /* Set what we are trying to extend and the operation it might
5160 have been extended with. */
5161 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5162 XEXP (memory_extend_rtx, 0) = src;
5164 for (tmode = GET_MODE_WIDER_MODE (mode);
5165 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5166 tmode = GET_MODE_WIDER_MODE (tmode))
5168 struct table_elt *larger_elt;
5170 PUT_MODE (memory_extend_rtx, tmode);
5171 larger_elt = lookup (memory_extend_rtx,
5172 HASH (memory_extend_rtx, tmode), tmode);
5173 if (larger_elt == 0)
5176 for (larger_elt = larger_elt->first_same_value;
5177 larger_elt; larger_elt = larger_elt->next_same_value)
5178 if (REG_P (larger_elt->exp))
5180 src_related = gen_lowpart (mode,
5189 #endif /* LOAD_EXTEND_OP */
5191 if (src == src_folded)
5194 /* At this point, ELT, if nonzero, points to a class of expressions
5195 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5196 and SRC_RELATED, if nonzero, each contain additional equivalent
5197 expressions. Prune these latter expressions by deleting expressions
5198 already in the equivalence class.
5200 Check for an equivalent identical to the destination. If found,
5201 this is the preferred equivalent since it will likely lead to
5202 elimination of the insn. Indicate this by placing it in
5206 elt = elt->first_same_value;
5207 for (p = elt; p; p = p->next_same_value)
5209 enum rtx_code code = GET_CODE (p->exp);
5211 /* If the expression is not valid, ignore it. Then we do not
5212 have to check for validity below. In most cases, we can use
5213 `rtx_equal_p', since canonicalization has already been done. */
5214 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
5217 /* Also skip paradoxical subregs, unless that's what we're
5220 && (GET_MODE_SIZE (GET_MODE (p->exp))
5221 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5223 && GET_CODE (src) == SUBREG
5224 && GET_MODE (src) == GET_MODE (p->exp)
5225 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5226 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5229 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5231 else if (src_folded && GET_CODE (src_folded) == code
5232 && rtx_equal_p (src_folded, p->exp))
5234 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5235 && rtx_equal_p (src_eqv_here, p->exp))
5237 else if (src_related && GET_CODE (src_related) == code
5238 && rtx_equal_p (src_related, p->exp))
5241 /* This is the same as the destination of the insns, we want
5242 to prefer it. Copy it to src_related. The code below will
5243 then give it a negative cost. */
5244 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5248 /* Find the cheapest valid equivalent, trying all the available
5249 possibilities. Prefer items not in the hash table to ones
5250 that are when they are equal cost. Note that we can never
5251 worsen an insn as the current contents will also succeed.
5252 If we find an equivalent identical to the destination, use it as best,
5253 since this insn will probably be eliminated in that case. */
5256 if (rtx_equal_p (src, dest))
5257 src_cost = src_regcost = -1;
5260 src_cost = COST (src);
5261 src_regcost = approx_reg_cost (src);
5267 if (rtx_equal_p (src_eqv_here, dest))
5268 src_eqv_cost = src_eqv_regcost = -1;
5271 src_eqv_cost = COST (src_eqv_here);
5272 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5278 if (rtx_equal_p (src_folded, dest))
5279 src_folded_cost = src_folded_regcost = -1;
5282 src_folded_cost = COST (src_folded);
5283 src_folded_regcost = approx_reg_cost (src_folded);
5289 if (rtx_equal_p (src_related, dest))
5290 src_related_cost = src_related_regcost = -1;
5293 src_related_cost = COST (src_related);
5294 src_related_regcost = approx_reg_cost (src_related);
5298 /* If this was an indirect jump insn, a known label will really be
5299 cheaper even though it looks more expensive. */
5300 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5301 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5303 /* Terminate loop when replacement made. This must terminate since
5304 the current contents will be tested and will always be valid. */
5309 /* Skip invalid entries. */
5310 while (elt && !REG_P (elt->exp)
5311 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5312 elt = elt->next_same_value;
5314 /* A paradoxical subreg would be bad here: it'll be the right
5315 size, but later may be adjusted so that the upper bits aren't
5316 what we want. So reject it. */
5318 && GET_CODE (elt->exp) == SUBREG
5319 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5320 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5321 /* It is okay, though, if the rtx we're trying to match
5322 will ignore any of the bits we can't predict. */
5324 && GET_CODE (src) == SUBREG
5325 && GET_MODE (src) == GET_MODE (elt->exp)
5326 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5327 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5329 elt = elt->next_same_value;
5335 src_elt_cost = elt->cost;
5336 src_elt_regcost = elt->regcost;
5339 /* Find cheapest and skip it for the next time. For items
5340 of equal cost, use this order:
5341 src_folded, src, src_eqv, src_related and hash table entry. */
5343 && preferable (src_folded_cost, src_folded_regcost,
5344 src_cost, src_regcost) <= 0
5345 && preferable (src_folded_cost, src_folded_regcost,
5346 src_eqv_cost, src_eqv_regcost) <= 0
5347 && preferable (src_folded_cost, src_folded_regcost,
5348 src_related_cost, src_related_regcost) <= 0
5349 && preferable (src_folded_cost, src_folded_regcost,
5350 src_elt_cost, src_elt_regcost) <= 0)
5352 trial = src_folded, src_folded_cost = MAX_COST;
5353 if (src_folded_force_flag)
5355 rtx forced = force_const_mem (mode, trial);
5361 && preferable (src_cost, src_regcost,
5362 src_eqv_cost, src_eqv_regcost) <= 0
5363 && preferable (src_cost, src_regcost,
5364 src_related_cost, src_related_regcost) <= 0
5365 && preferable (src_cost, src_regcost,
5366 src_elt_cost, src_elt_regcost) <= 0)
5367 trial = src, src_cost = MAX_COST;
5368 else if (src_eqv_here
5369 && preferable (src_eqv_cost, src_eqv_regcost,
5370 src_related_cost, src_related_regcost) <= 0
5371 && preferable (src_eqv_cost, src_eqv_regcost,
5372 src_elt_cost, src_elt_regcost) <= 0)
5373 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5374 else if (src_related
5375 && preferable (src_related_cost, src_related_regcost,
5376 src_elt_cost, src_elt_regcost) <= 0)
5377 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5380 trial = copy_rtx (elt->exp);
5381 elt = elt->next_same_value;
5382 src_elt_cost = MAX_COST;
5385 /* We don't normally have an insn matching (set (pc) (pc)), so
5386 check for this separately here. We will delete such an
5389 For other cases such as a table jump or conditional jump
5390 where we know the ultimate target, go ahead and replace the
5391 operand. While that may not make a valid insn, we will
5392 reemit the jump below (and also insert any necessary
5394 if (n_sets == 1 && dest == pc_rtx
5396 || (GET_CODE (trial) == LABEL_REF
5397 && ! condjump_p (insn))))
5399 /* Don't substitute non-local labels, this confuses CFG. */
5400 if (GET_CODE (trial) == LABEL_REF
5401 && LABEL_REF_NONLOCAL_P (trial))
5404 SET_SRC (sets[i].rtl) = trial;
5405 cse_jumps_altered = 1;
5409 /* Look for a substitution that makes a valid insn. */
5410 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5412 rtx new = canon_reg (SET_SRC (sets[i].rtl), insn);
5414 /* If we just made a substitution inside a libcall, then we
5415 need to make the same substitution in any notes attached
5416 to the RETVAL insn. */
5418 && (REG_P (sets[i].orig_src)
5419 || GET_CODE (sets[i].orig_src) == SUBREG
5420 || MEM_P (sets[i].orig_src)))
5422 rtx note = find_reg_equal_equiv_note (libcall_insn);
5424 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0),
5429 /* The result of apply_change_group can be ignored; see
5432 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5433 apply_change_group ();
5437 /* If we previously found constant pool entries for
5438 constants and this is a constant, try making a
5439 pool entry. Put it in src_folded unless we already have done
5440 this since that is where it likely came from. */
5442 else if (constant_pool_entries_cost
5443 && CONSTANT_P (trial)
5444 /* Reject cases that will abort in decode_rtx_const.
5445 On the alpha when simplifying a switch, we get
5446 (const (truncate (minus (label_ref) (label_ref)))). */
5447 && ! (GET_CODE (trial) == CONST
5448 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5449 /* Likewise on IA-64, except without the truncate. */
5450 && ! (GET_CODE (trial) == CONST
5451 && GET_CODE (XEXP (trial, 0)) == MINUS
5452 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5453 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5455 || (!MEM_P (src_folded)
5456 && ! src_folded_force_flag))
5457 && GET_MODE_CLASS (mode) != MODE_CC
5458 && mode != VOIDmode)
5460 src_folded_force_flag = 1;
5462 src_folded_cost = constant_pool_entries_cost;
5463 src_folded_regcost = constant_pool_entries_regcost;
5467 src = SET_SRC (sets[i].rtl);
5469 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5470 However, there is an important exception: If both are registers
5471 that are not the head of their equivalence class, replace SET_SRC
5472 with the head of the class. If we do not do this, we will have
5473 both registers live over a portion of the basic block. This way,
5474 their lifetimes will likely abut instead of overlapping. */
5476 && REGNO_QTY_VALID_P (REGNO (dest)))
5478 int dest_q = REG_QTY (REGNO (dest));
5479 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5481 if (dest_ent->mode == GET_MODE (dest)
5482 && dest_ent->first_reg != REGNO (dest)
5483 && REG_P (src) && REGNO (src) == REGNO (dest)
5484 /* Don't do this if the original insn had a hard reg as
5485 SET_SRC or SET_DEST. */
5486 && (!REG_P (sets[i].src)
5487 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5488 && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5489 /* We can't call canon_reg here because it won't do anything if
5490 SRC is a hard register. */
5492 int src_q = REG_QTY (REGNO (src));
5493 struct qty_table_elem *src_ent = &qty_table[src_q];
5494 int first = src_ent->first_reg;
5496 = (first >= FIRST_PSEUDO_REGISTER
5497 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5499 /* We must use validate-change even for this, because this
5500 might be a special no-op instruction, suitable only to
5502 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5505 /* If we had a constant that is cheaper than what we are now
5506 setting SRC to, use that constant. We ignored it when we
5507 thought we could make this into a no-op. */
5508 if (src_const && COST (src_const) < COST (src)
5509 && validate_change (insn, &SET_SRC (sets[i].rtl),
5516 /* If we made a change, recompute SRC values. */
5517 if (src != sets[i].src)
5521 hash_arg_in_memory = 0;
5523 sets[i].src_hash = HASH (src, mode);
5524 sets[i].src_volatile = do_not_record;
5525 sets[i].src_in_memory = hash_arg_in_memory;
5526 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5529 /* If this is a single SET, we are setting a register, and we have an
5530 equivalent constant, we want to add a REG_NOTE. We don't want
5531 to write a REG_EQUAL note for a constant pseudo since verifying that
5532 that pseudo hasn't been eliminated is a pain. Such a note also
5533 won't help anything.
5535 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5536 which can be created for a reference to a compile time computable
5537 entry in a jump table. */
5539 if (n_sets == 1 && src_const && REG_P (dest)
5540 && !REG_P (src_const)
5541 && ! (GET_CODE (src_const) == CONST
5542 && GET_CODE (XEXP (src_const, 0)) == MINUS
5543 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5544 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5546 /* We only want a REG_EQUAL note if src_const != src. */
5547 if (! rtx_equal_p (src, src_const))
5549 /* Make sure that the rtx is not shared. */
5550 src_const = copy_rtx (src_const);
5552 /* Record the actual constant value in a REG_EQUAL note,
5553 making a new one if one does not already exist. */
5554 set_unique_reg_note (insn, REG_EQUAL, src_const);
5558 /* Now deal with the destination. */
5561 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5562 to the MEM or REG within it. */
5563 while (GET_CODE (dest) == SIGN_EXTRACT
5564 || GET_CODE (dest) == ZERO_EXTRACT
5565 || GET_CODE (dest) == SUBREG
5566 || GET_CODE (dest) == STRICT_LOW_PART)
5567 dest = XEXP (dest, 0);
5569 sets[i].inner_dest = dest;
5573 #ifdef PUSH_ROUNDING
5574 /* Stack pushes invalidate the stack pointer. */
5575 rtx addr = XEXP (dest, 0);
5576 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5577 && XEXP (addr, 0) == stack_pointer_rtx)
5578 invalidate (stack_pointer_rtx, Pmode);
5580 dest = fold_rtx (dest, insn);
5583 /* Compute the hash code of the destination now,
5584 before the effects of this instruction are recorded,
5585 since the register values used in the address computation
5586 are those before this instruction. */
5587 sets[i].dest_hash = HASH (dest, mode);
5589 /* Don't enter a bit-field in the hash table
5590 because the value in it after the store
5591 may not equal what was stored, due to truncation. */
5593 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5594 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5596 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5598 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5599 && GET_CODE (width) == CONST_INT
5600 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5601 && ! (INTVAL (src_const)
5602 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5603 /* Exception: if the value is constant,
5604 and it won't be truncated, record it. */
5608 /* This is chosen so that the destination will be invalidated
5609 but no new value will be recorded.
5610 We must invalidate because sometimes constant
5611 values can be recorded for bitfields. */
5612 sets[i].src_elt = 0;
5613 sets[i].src_volatile = 1;
5619 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5621 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5623 /* One less use of the label this insn used to jump to. */
5625 cse_jumps_altered = 1;
5626 /* No more processing for this set. */
5630 /* If this SET is now setting PC to a label, we know it used to
5631 be a conditional or computed branch. */
5632 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5633 && !LABEL_REF_NONLOCAL_P (src))
5635 /* Now emit a BARRIER after the unconditional jump. */
5636 if (NEXT_INSN (insn) == 0
5637 || !BARRIER_P (NEXT_INSN (insn)))
5638 emit_barrier_after (insn);
5640 /* We reemit the jump in as many cases as possible just in
5641 case the form of an unconditional jump is significantly
5642 different than a computed jump or conditional jump.
5644 If this insn has multiple sets, then reemitting the
5645 jump is nontrivial. So instead we just force rerecognition
5646 and hope for the best. */
5651 new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
5652 JUMP_LABEL (new) = XEXP (src, 0);
5653 LABEL_NUSES (XEXP (src, 0))++;
5655 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5656 note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5659 XEXP (note, 1) = NULL_RTX;
5660 REG_NOTES (new) = note;
5666 /* Now emit a BARRIER after the unconditional jump. */
5667 if (NEXT_INSN (insn) == 0
5668 || !BARRIER_P (NEXT_INSN (insn)))
5669 emit_barrier_after (insn);
5672 INSN_CODE (insn) = -1;
5674 /* Do not bother deleting any unreachable code,
5675 let jump/flow do that. */
5677 cse_jumps_altered = 1;
5681 /* If destination is volatile, invalidate it and then do no further
5682 processing for this assignment. */
5684 else if (do_not_record)
5686 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5687 invalidate (dest, VOIDmode);
5688 else if (MEM_P (dest))
5690 /* Outgoing arguments for a libcall don't
5691 affect any recorded expressions. */
5692 if (! libcall_insn || insn == libcall_insn)
5693 invalidate (dest, VOIDmode);
5695 else if (GET_CODE (dest) == STRICT_LOW_PART
5696 || GET_CODE (dest) == ZERO_EXTRACT)
5697 invalidate (XEXP (dest, 0), GET_MODE (dest));
5701 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5702 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5705 /* If setting CC0, record what it was set to, or a constant, if it
5706 is equivalent to a constant. If it is being set to a floating-point
5707 value, make a COMPARE with the appropriate constant of 0. If we
5708 don't do this, later code can interpret this as a test against
5709 const0_rtx, which can cause problems if we try to put it into an
5710 insn as a floating-point operand. */
5711 if (dest == cc0_rtx)
5713 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5714 this_insn_cc0_mode = mode;
5715 if (FLOAT_MODE_P (mode))
5716 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5722 /* Now enter all non-volatile source expressions in the hash table
5723 if they are not already present.
5724 Record their equivalence classes in src_elt.
5725 This way we can insert the corresponding destinations into
5726 the same classes even if the actual sources are no longer in them
5727 (having been invalidated). */
5729 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5730 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5732 struct table_elt *elt;
5733 struct table_elt *classp = sets[0].src_elt;
5734 rtx dest = SET_DEST (sets[0].rtl);
5735 enum machine_mode eqvmode = GET_MODE (dest);
5737 if (GET_CODE (dest) == STRICT_LOW_PART)
5739 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5742 if (insert_regs (src_eqv, classp, 0))
5744 rehash_using_reg (src_eqv);
5745 src_eqv_hash = HASH (src_eqv, eqvmode);
5747 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5748 elt->in_memory = src_eqv_in_memory;
5751 /* Check to see if src_eqv_elt is the same as a set source which
5752 does not yet have an elt, and if so set the elt of the set source
5754 for (i = 0; i < n_sets; i++)
5755 if (sets[i].rtl && sets[i].src_elt == 0
5756 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5757 sets[i].src_elt = src_eqv_elt;
5760 for (i = 0; i < n_sets; i++)
5761 if (sets[i].rtl && ! sets[i].src_volatile
5762 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5764 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5766 /* REG_EQUAL in setting a STRICT_LOW_PART
5767 gives an equivalent for the entire destination register,
5768 not just for the subreg being stored in now.
5769 This is a more interesting equivalence, so we arrange later
5770 to treat the entire reg as the destination. */
5771 sets[i].src_elt = src_eqv_elt;
5772 sets[i].src_hash = src_eqv_hash;
5776 /* Insert source and constant equivalent into hash table, if not
5778 struct table_elt *classp = src_eqv_elt;
5779 rtx src = sets[i].src;
5780 rtx dest = SET_DEST (sets[i].rtl);
5781 enum machine_mode mode
5782 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5784 /* It's possible that we have a source value known to be
5785 constant but don't have a REG_EQUAL note on the insn.
5786 Lack of a note will mean src_eqv_elt will be NULL. This
5787 can happen where we've generated a SUBREG to access a
5788 CONST_INT that is already in a register in a wider mode.
5789 Ensure that the source expression is put in the proper
5792 classp = sets[i].src_const_elt;
5794 if (sets[i].src_elt == 0)
5796 /* Don't put a hard register source into the table if this is
5797 the last insn of a libcall. In this case, we only need
5798 to put src_eqv_elt in src_elt. */
5799 if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5801 struct table_elt *elt;
5803 /* Note that these insert_regs calls cannot remove
5804 any of the src_elt's, because they would have failed to
5805 match if not still valid. */
5806 if (insert_regs (src, classp, 0))
5808 rehash_using_reg (src);
5809 sets[i].src_hash = HASH (src, mode);
5811 elt = insert (src, classp, sets[i].src_hash, mode);
5812 elt->in_memory = sets[i].src_in_memory;
5813 sets[i].src_elt = classp = elt;
5816 sets[i].src_elt = classp;
5818 if (sets[i].src_const && sets[i].src_const_elt == 0
5819 && src != sets[i].src_const
5820 && ! rtx_equal_p (sets[i].src_const, src))
5821 sets[i].src_elt = insert (sets[i].src_const, classp,
5822 sets[i].src_const_hash, mode);
5825 else if (sets[i].src_elt == 0)
5826 /* If we did not insert the source into the hash table (e.g., it was
5827 volatile), note the equivalence class for the REG_EQUAL value, if any,
5828 so that the destination goes into that class. */
5829 sets[i].src_elt = src_eqv_elt;
5831 invalidate_from_clobbers (x);
5833 /* Some registers are invalidated by subroutine calls. Memory is
5834 invalidated by non-constant calls. */
5838 if (! CONST_OR_PURE_CALL_P (insn))
5839 invalidate_memory ();
5840 invalidate_for_call ();
5843 /* Now invalidate everything set by this instruction.
5844 If a SUBREG or other funny destination is being set,
5845 sets[i].rtl is still nonzero, so here we invalidate the reg
5846 a part of which is being set. */
5848 for (i = 0; i < n_sets; i++)
5851 /* We can't use the inner dest, because the mode associated with
5852 a ZERO_EXTRACT is significant. */
5853 rtx dest = SET_DEST (sets[i].rtl);
5855 /* Needed for registers to remove the register from its
5856 previous quantity's chain.
5857 Needed for memory if this is a nonvarying address, unless
5858 we have just done an invalidate_memory that covers even those. */
5859 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5860 invalidate (dest, VOIDmode);
5861 else if (MEM_P (dest))
5863 /* Outgoing arguments for a libcall don't
5864 affect any recorded expressions. */
5865 if (! libcall_insn || insn == libcall_insn)
5866 invalidate (dest, VOIDmode);
5868 else if (GET_CODE (dest) == STRICT_LOW_PART
5869 || GET_CODE (dest) == ZERO_EXTRACT)
5870 invalidate (XEXP (dest, 0), GET_MODE (dest));
5873 /* A volatile ASM invalidates everything. */
5874 if (NONJUMP_INSN_P (insn)
5875 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5876 && MEM_VOLATILE_P (PATTERN (insn)))
5877 flush_hash_table ();
5879 /* Make sure registers mentioned in destinations
5880 are safe for use in an expression to be inserted.
5881 This removes from the hash table
5882 any invalid entry that refers to one of these registers.
5884 We don't care about the return value from mention_regs because
5885 we are going to hash the SET_DEST values unconditionally. */
5887 for (i = 0; i < n_sets; i++)
5891 rtx x = SET_DEST (sets[i].rtl);
5897 /* We used to rely on all references to a register becoming
5898 inaccessible when a register changes to a new quantity,
5899 since that changes the hash code. However, that is not
5900 safe, since after HASH_SIZE new quantities we get a
5901 hash 'collision' of a register with its own invalid
5902 entries. And since SUBREGs have been changed not to
5903 change their hash code with the hash code of the register,
5904 it wouldn't work any longer at all. So we have to check
5905 for any invalid references lying around now.
5906 This code is similar to the REG case in mention_regs,
5907 but it knows that reg_tick has been incremented, and
5908 it leaves reg_in_table as -1 . */
5909 unsigned int regno = REGNO (x);
5910 unsigned int endregno
5911 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
5912 : hard_regno_nregs[regno][GET_MODE (x)]);
5915 for (i = regno; i < endregno; i++)
5917 if (REG_IN_TABLE (i) >= 0)
5919 remove_invalid_refs (i);
5920 REG_IN_TABLE (i) = -1;
5927 /* We may have just removed some of the src_elt's from the hash table.
5928 So replace each one with the current head of the same class. */
5930 for (i = 0; i < n_sets; i++)
5933 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5934 /* If elt was removed, find current head of same class,
5935 or 0 if nothing remains of that class. */
5937 struct table_elt *elt = sets[i].src_elt;
5939 while (elt && elt->prev_same_value)
5940 elt = elt->prev_same_value;
5942 while (elt && elt->first_same_value == 0)
5943 elt = elt->next_same_value;
5944 sets[i].src_elt = elt ? elt->first_same_value : 0;
5948 /* Now insert the destinations into their equivalence classes. */
5950 for (i = 0; i < n_sets; i++)
5953 rtx dest = SET_DEST (sets[i].rtl);
5954 struct table_elt *elt;
5956 /* Don't record value if we are not supposed to risk allocating
5957 floating-point values in registers that might be wider than
5959 if ((flag_float_store
5961 && FLOAT_MODE_P (GET_MODE (dest)))
5962 /* Don't record BLKmode values, because we don't know the
5963 size of it, and can't be sure that other BLKmode values
5964 have the same or smaller size. */
5965 || GET_MODE (dest) == BLKmode
5966 /* Don't record values of destinations set inside a libcall block
5967 since we might delete the libcall. Things should have been set
5968 up so we won't want to reuse such a value, but we play it safe
5971 /* If we didn't put a REG_EQUAL value or a source into the hash
5972 table, there is no point is recording DEST. */
5973 || sets[i].src_elt == 0
5974 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
5975 or SIGN_EXTEND, don't record DEST since it can cause
5976 some tracking to be wrong.
5978 ??? Think about this more later. */
5979 || (GET_CODE (dest) == SUBREG
5980 && (GET_MODE_SIZE (GET_MODE (dest))
5981 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5982 && (GET_CODE (sets[i].src) == SIGN_EXTEND
5983 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
5986 /* STRICT_LOW_PART isn't part of the value BEING set,
5987 and neither is the SUBREG inside it.
5988 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
5989 if (GET_CODE (dest) == STRICT_LOW_PART)
5990 dest = SUBREG_REG (XEXP (dest, 0));
5992 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5993 /* Registers must also be inserted into chains for quantities. */
5994 if (insert_regs (dest, sets[i].src_elt, 1))
5996 /* If `insert_regs' changes something, the hash code must be
5998 rehash_using_reg (dest);
5999 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6002 elt = insert (dest, sets[i].src_elt,
6003 sets[i].dest_hash, GET_MODE (dest));
6005 elt->in_memory = (MEM_P (sets[i].inner_dest)
6006 && !MEM_READONLY_P (sets[i].inner_dest));
6008 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6009 narrower than M2, and both M1 and M2 are the same number of words,
6010 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6011 make that equivalence as well.
6013 However, BAR may have equivalences for which gen_lowpart
6014 will produce a simpler value than gen_lowpart applied to
6015 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6016 BAR's equivalences. If we don't get a simplified form, make
6017 the SUBREG. It will not be used in an equivalence, but will
6018 cause two similar assignments to be detected.
6020 Note the loop below will find SUBREG_REG (DEST) since we have
6021 already entered SRC and DEST of the SET in the table. */
6023 if (GET_CODE (dest) == SUBREG
6024 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6026 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6027 && (GET_MODE_SIZE (GET_MODE (dest))
6028 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6029 && sets[i].src_elt != 0)
6031 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6032 struct table_elt *elt, *classp = 0;
6034 for (elt = sets[i].src_elt->first_same_value; elt;
6035 elt = elt->next_same_value)
6039 struct table_elt *src_elt;
6042 /* Ignore invalid entries. */
6043 if (!REG_P (elt->exp)
6044 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
6047 /* We may have already been playing subreg games. If the
6048 mode is already correct for the destination, use it. */
6049 if (GET_MODE (elt->exp) == new_mode)
6053 /* Calculate big endian correction for the SUBREG_BYTE.
6054 We have already checked that M1 (GET_MODE (dest))
6055 is not narrower than M2 (new_mode). */
6056 if (BYTES_BIG_ENDIAN)
6057 byte = (GET_MODE_SIZE (GET_MODE (dest))
6058 - GET_MODE_SIZE (new_mode));
6060 new_src = simplify_gen_subreg (new_mode, elt->exp,
6061 GET_MODE (dest), byte);
6064 /* The call to simplify_gen_subreg fails if the value
6065 is VOIDmode, yet we can't do any simplification, e.g.
6066 for EXPR_LISTs denoting function call results.
6067 It is invalid to construct a SUBREG with a VOIDmode
6068 SUBREG_REG, hence a zero new_src means we can't do
6069 this substitution. */
6073 src_hash = HASH (new_src, new_mode);
6074 src_elt = lookup (new_src, src_hash, new_mode);
6076 /* Put the new source in the hash table is if isn't
6080 if (insert_regs (new_src, classp, 0))
6082 rehash_using_reg (new_src);
6083 src_hash = HASH (new_src, new_mode);
6085 src_elt = insert (new_src, classp, src_hash, new_mode);
6086 src_elt->in_memory = elt->in_memory;
6088 else if (classp && classp != src_elt->first_same_value)
6089 /* Show that two things that we've seen before are
6090 actually the same. */
6091 merge_equiv_classes (src_elt, classp);
6093 classp = src_elt->first_same_value;
6094 /* Ignore invalid entries. */
6096 && !REG_P (classp->exp)
6097 && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
6098 classp = classp->next_same_value;
6103 /* Special handling for (set REG0 REG1) where REG0 is the
6104 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6105 be used in the sequel, so (if easily done) change this insn to
6106 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6107 that computed their value. Then REG1 will become a dead store
6108 and won't cloud the situation for later optimizations.
6110 Do not make this change if REG1 is a hard register, because it will
6111 then be used in the sequel and we may be changing a two-operand insn
6112 into a three-operand insn.
6114 Also do not do this if we are operating on a copy of INSN.
6116 Also don't do this if INSN ends a libcall; this would cause an unrelated
6117 register to be set in the middle of a libcall, and we then get bad code
6118 if the libcall is deleted. */
6120 if (n_sets == 1 && sets[0].rtl && REG_P (SET_DEST (sets[0].rtl))
6121 && NEXT_INSN (PREV_INSN (insn)) == insn
6122 && REG_P (SET_SRC (sets[0].rtl))
6123 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6124 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6126 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6127 struct qty_table_elem *src_ent = &qty_table[src_q];
6129 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6130 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6133 /* Scan for the previous nonnote insn, but stop at a basic
6137 prev = PREV_INSN (prev);
6139 while (prev && NOTE_P (prev)
6140 && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
6142 /* Do not swap the registers around if the previous instruction
6143 attaches a REG_EQUIV note to REG1.
6145 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6146 from the pseudo that originally shadowed an incoming argument
6147 to another register. Some uses of REG_EQUIV might rely on it
6148 being attached to REG1 rather than REG2.
6150 This section previously turned the REG_EQUIV into a REG_EQUAL
6151 note. We cannot do that because REG_EQUIV may provide an
6152 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
6154 if (prev != 0 && NONJUMP_INSN_P (prev)
6155 && GET_CODE (PATTERN (prev)) == SET
6156 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6157 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6159 rtx dest = SET_DEST (sets[0].rtl);
6160 rtx src = SET_SRC (sets[0].rtl);
6163 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6164 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6165 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6166 apply_change_group ();
6168 /* If INSN has a REG_EQUAL note, and this note mentions
6169 REG0, then we must delete it, because the value in
6170 REG0 has changed. If the note's value is REG1, we must
6171 also delete it because that is now this insn's dest. */
6172 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6174 && (reg_mentioned_p (dest, XEXP (note, 0))
6175 || rtx_equal_p (src, XEXP (note, 0))))
6176 remove_note (insn, note);
6181 /* If this is a conditional jump insn, record any known equivalences due to
6182 the condition being tested. */
6185 && n_sets == 1 && GET_CODE (x) == SET
6186 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6187 record_jump_equiv (insn, 0);
6190 /* If the previous insn set CC0 and this insn no longer references CC0,
6191 delete the previous insn. Here we use the fact that nothing expects CC0
6192 to be valid over an insn, which is true until the final pass. */
6193 if (prev_insn && NONJUMP_INSN_P (prev_insn)
6194 && (tem = single_set (prev_insn)) != 0
6195 && SET_DEST (tem) == cc0_rtx
6196 && ! reg_mentioned_p (cc0_rtx, x))
6197 delete_insn (prev_insn);
6199 prev_insn_cc0 = this_insn_cc0;
6200 prev_insn_cc0_mode = this_insn_cc0_mode;
6205 /* Remove from the hash table all expressions that reference memory. */
6208 invalidate_memory (void)
6211 struct table_elt *p, *next;
6213 for (i = 0; i < HASH_SIZE; i++)
6214 for (p = table[i]; p; p = next)
6216 next = p->next_same_hash;
6218 remove_from_table (p, i);
6222 /* If ADDR is an address that implicitly affects the stack pointer, return
6223 1 and update the register tables to show the effect. Else, return 0. */
6226 addr_affects_sp_p (rtx addr)
6228 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
6229 && REG_P (XEXP (addr, 0))
6230 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6232 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6234 REG_TICK (STACK_POINTER_REGNUM)++;
6235 /* Is it possible to use a subreg of SP? */
6236 SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6239 /* This should be *very* rare. */
6240 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6241 invalidate (stack_pointer_rtx, VOIDmode);
6249 /* Perform invalidation on the basis of everything about an insn
6250 except for invalidating the actual places that are SET in it.
6251 This includes the places CLOBBERed, and anything that might
6252 alias with something that is SET or CLOBBERed.
6254 X is the pattern of the insn. */
6257 invalidate_from_clobbers (rtx x)
6259 if (GET_CODE (x) == CLOBBER)
6261 rtx ref = XEXP (x, 0);
6264 if (REG_P (ref) || GET_CODE (ref) == SUBREG
6266 invalidate (ref, VOIDmode);
6267 else if (GET_CODE (ref) == STRICT_LOW_PART
6268 || GET_CODE (ref) == ZERO_EXTRACT)
6269 invalidate (XEXP (ref, 0), GET_MODE (ref));
6272 else if (GET_CODE (x) == PARALLEL)
6275 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6277 rtx y = XVECEXP (x, 0, i);
6278 if (GET_CODE (y) == CLOBBER)
6280 rtx ref = XEXP (y, 0);
6281 if (REG_P (ref) || GET_CODE (ref) == SUBREG
6283 invalidate (ref, VOIDmode);
6284 else if (GET_CODE (ref) == STRICT_LOW_PART
6285 || GET_CODE (ref) == ZERO_EXTRACT)
6286 invalidate (XEXP (ref, 0), GET_MODE (ref));
6292 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6293 and replace any registers in them with either an equivalent constant
6294 or the canonical form of the register. If we are inside an address,
6295 only do this if the address remains valid.
6297 OBJECT is 0 except when within a MEM in which case it is the MEM.
6299 Return the replacement for X. */
6302 cse_process_notes (rtx x, rtx object)
6304 enum rtx_code code = GET_CODE (x);
6305 const char *fmt = GET_RTX_FORMAT (code);
6322 validate_change (x, &XEXP (x, 0),
6323 cse_process_notes (XEXP (x, 0), x), 0);
6328 if (REG_NOTE_KIND (x) == REG_EQUAL)
6329 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6331 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6338 rtx new = cse_process_notes (XEXP (x, 0), object);
6339 /* We don't substitute VOIDmode constants into these rtx,
6340 since they would impede folding. */
6341 if (GET_MODE (new) != VOIDmode)
6342 validate_change (object, &XEXP (x, 0), new, 0);
6347 i = REG_QTY (REGNO (x));
6349 /* Return a constant or a constant register. */
6350 if (REGNO_QTY_VALID_P (REGNO (x)))
6352 struct qty_table_elem *ent = &qty_table[i];
6354 if (ent->const_rtx != NULL_RTX
6355 && (CONSTANT_P (ent->const_rtx)
6356 || REG_P (ent->const_rtx)))
6358 rtx new = gen_lowpart (GET_MODE (x), ent->const_rtx);
6364 /* Otherwise, canonicalize this register. */
6365 return canon_reg (x, NULL_RTX);
6371 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6373 validate_change (object, &XEXP (x, i),
6374 cse_process_notes (XEXP (x, i), object), 0);
6379 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6380 since they are done elsewhere. This function is called via note_stores. */
6383 invalidate_skipped_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
6385 enum rtx_code code = GET_CODE (dest);
6388 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6389 /* There are times when an address can appear varying and be a PLUS
6390 during this scan when it would be a fixed address were we to know
6391 the proper equivalences. So invalidate all memory if there is
6392 a BLKmode or nonscalar memory reference or a reference to a
6393 variable address. */
6394 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6395 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6397 invalidate_memory ();
6401 if (GET_CODE (set) == CLOBBER
6406 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6407 invalidate (XEXP (dest, 0), GET_MODE (dest));
6408 else if (code == REG || code == SUBREG || code == MEM)
6409 invalidate (dest, VOIDmode);
6412 /* Invalidate all insns from START up to the end of the function or the
6413 next label. This called when we wish to CSE around a block that is
6414 conditionally executed. */
6417 invalidate_skipped_block (rtx start)
6421 for (insn = start; insn && !LABEL_P (insn);
6422 insn = NEXT_INSN (insn))
6424 if (! INSN_P (insn))
6429 if (! CONST_OR_PURE_CALL_P (insn))
6430 invalidate_memory ();
6431 invalidate_for_call ();
6434 invalidate_from_clobbers (PATTERN (insn));
6435 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6439 /* Find the end of INSN's basic block and return its range,
6440 the total number of SETs in all the insns of the block, the last insn of the
6441 block, and the branch path.
6443 The branch path indicates which branches should be followed. If a nonzero
6444 path size is specified, the block should be rescanned and a different set
6445 of branches will be taken. The branch path is only used if
6446 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
6448 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6449 used to describe the block. It is filled in with the information about
6450 the current block. The incoming structure's branch path, if any, is used
6451 to construct the output branch path. */
6454 cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
6455 int follow_jumps, int skip_blocks)
6459 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6460 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6461 int path_size = data->path_size;
6465 /* Update the previous branch path, if any. If the last branch was
6466 previously PATH_TAKEN, mark it PATH_NOT_TAKEN.
6467 If it was previously PATH_NOT_TAKEN,
6468 shorten the path by one and look at the previous branch. We know that
6469 at least one branch must have been taken if PATH_SIZE is nonzero. */
6470 while (path_size > 0)
6472 if (data->path[path_size - 1].status != PATH_NOT_TAKEN)
6474 data->path[path_size - 1].status = PATH_NOT_TAKEN;
6481 /* If the first instruction is marked with QImode, that means we've
6482 already processed this block. Our caller will look at DATA->LAST
6483 to figure out where to go next. We want to return the next block
6484 in the instruction stream, not some branched-to block somewhere
6485 else. We accomplish this by pretending our called forbid us to
6486 follow jumps, or skip blocks. */
6487 if (GET_MODE (insn) == QImode)
6488 follow_jumps = skip_blocks = 0;
6490 /* Scan to end of this basic block. */
6491 while (p && !LABEL_P (p))
6493 /* Don't cse over a call to setjmp; on some machines (eg VAX)
6494 the regs restored by the longjmp come from
6495 a later time than the setjmp. */
6496 if (PREV_INSN (p) && CALL_P (PREV_INSN (p))
6497 && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6500 /* A PARALLEL can have lots of SETs in it,
6501 especially if it is really an ASM_OPERANDS. */
6502 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6503 nsets += XVECLEN (PATTERN (p), 0);
6504 else if (!NOTE_P (p))
6507 /* Ignore insns made by CSE; they cannot affect the boundaries of
6510 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6511 high_cuid = INSN_CUID (p);
6512 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6513 low_cuid = INSN_CUID (p);
6515 /* See if this insn is in our branch path. If it is and we are to
6517 if (path_entry < path_size && data->path[path_entry].branch == p)
6519 if (data->path[path_entry].status != PATH_NOT_TAKEN)
6522 /* Point to next entry in path, if any. */
6526 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6527 was specified, we haven't reached our maximum path length, there are
6528 insns following the target of the jump, this is the only use of the
6529 jump label, and the target label is preceded by a BARRIER.
6531 Alternatively, we can follow the jump if it branches around a
6532 block of code and there are no other branches into the block.
6533 In this case invalidate_skipped_block will be called to invalidate any
6534 registers set in the block when following the jump. */
6536 else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
6538 && GET_CODE (PATTERN (p)) == SET
6539 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6540 && JUMP_LABEL (p) != 0
6541 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6542 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6544 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6546 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6547 || (PREV_INSN (q) && CALL_P (PREV_INSN (q))
6548 && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6549 && (!LABEL_P (q) || LABEL_NUSES (q) != 0))
6552 /* If we ran into a BARRIER, this code is an extension of the
6553 basic block when the branch is taken. */
6554 if (follow_jumps && q != 0 && BARRIER_P (q))
6556 /* Don't allow ourself to keep walking around an
6557 always-executed loop. */
6558 if (next_real_insn (q) == next)
6564 /* Similarly, don't put a branch in our path more than once. */
6565 for (i = 0; i < path_entry; i++)
6566 if (data->path[i].branch == p)
6569 if (i != path_entry)
6572 data->path[path_entry].branch = p;
6573 data->path[path_entry++].status = PATH_TAKEN;
6575 /* This branch now ends our path. It was possible that we
6576 didn't see this branch the last time around (when the
6577 insn in front of the target was a JUMP_INSN that was
6578 turned into a no-op). */
6579 path_size = path_entry;
6582 /* Mark block so we won't scan it again later. */
6583 PUT_MODE (NEXT_INSN (p), QImode);
6585 /* Detect a branch around a block of code. */
6586 else if (skip_blocks && q != 0 && !LABEL_P (q))
6590 if (next_real_insn (q) == next)
6596 for (i = 0; i < path_entry; i++)
6597 if (data->path[i].branch == p)
6600 if (i != path_entry)
6603 /* This is no_labels_between_p (p, q) with an added check for
6604 reaching the end of a function (in case Q precedes P). */
6605 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6611 data->path[path_entry].branch = p;
6612 data->path[path_entry++].status = PATH_AROUND;
6614 path_size = path_entry;
6617 /* Mark block so we won't scan it again later. */
6618 PUT_MODE (NEXT_INSN (p), QImode);
6625 data->low_cuid = low_cuid;
6626 data->high_cuid = high_cuid;
6627 data->nsets = nsets;
6630 /* If all jumps in the path are not taken, set our path length to zero
6631 so a rescan won't be done. */
6632 for (i = path_size - 1; i >= 0; i--)
6633 if (data->path[i].status != PATH_NOT_TAKEN)
6637 data->path_size = 0;
6639 data->path_size = path_size;
6641 /* End the current branch path. */
6642 data->path[path_size].branch = 0;
6645 /* Perform cse on the instructions of a function.
6646 F is the first instruction.
6647 NREGS is one plus the highest pseudo-reg number used in the instruction.
6649 Returns 1 if jump_optimize should be redone due to simplifications
6650 in conditional jump instructions. */
6653 cse_main (rtx f, int nregs, FILE *file)
6655 struct cse_basic_block_data val;
6659 val.path = xmalloc (sizeof (struct branch_path)
6660 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6662 cse_jumps_altered = 0;
6663 recorded_label_ref = 0;
6664 constant_pool_entries_cost = 0;
6665 constant_pool_entries_regcost = 0;
6667 rtl_hooks = cse_rtl_hooks;
6670 init_alias_analysis ();
6674 max_insn_uid = get_max_uid ();
6676 reg_eqv_table = xmalloc (nregs * sizeof (struct reg_eqv_elem));
6678 #ifdef LOAD_EXTEND_OP
6680 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
6681 and change the code and mode as appropriate. */
6682 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
6685 /* Reset the counter indicating how many elements have been made
6687 n_elements_made = 0;
6689 /* Find the largest uid. */
6691 max_uid = get_max_uid ();
6692 uid_cuid = xcalloc (max_uid + 1, sizeof (int));
6694 /* Compute the mapping from uids to cuids.
6695 CUIDs are numbers assigned to insns, like uids,
6696 except that cuids increase monotonically through the code.
6697 Don't assign cuids to line-number NOTEs, so that the distance in cuids
6698 between two insns is not affected by -g. */
6700 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
6703 || NOTE_LINE_NUMBER (insn) < 0)
6704 INSN_CUID (insn) = ++i;
6706 /* Give a line number note the same cuid as preceding insn. */
6707 INSN_CUID (insn) = i;
6710 /* Loop over basic blocks.
6711 Compute the maximum number of qty's needed for each basic block
6712 (which is 2 for each SET). */
6717 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps,
6718 flag_cse_skip_blocks);
6720 /* If this basic block was already processed or has no sets, skip it. */
6721 if (val.nsets == 0 || GET_MODE (insn) == QImode)
6723 PUT_MODE (insn, VOIDmode);
6724 insn = (val.last ? NEXT_INSN (val.last) : 0);
6729 cse_basic_block_start = val.low_cuid;
6730 cse_basic_block_end = val.high_cuid;
6731 max_qty = val.nsets * 2;
6734 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
6735 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
6738 /* Make MAX_QTY bigger to give us room to optimize
6739 past the end of this basic block, if that should prove useful. */
6743 /* If this basic block is being extended by following certain jumps,
6744 (see `cse_end_of_basic_block'), we reprocess the code from the start.
6745 Otherwise, we start after this basic block. */
6746 if (val.path_size > 0)
6747 cse_basic_block (insn, val.last, val.path);
6750 int old_cse_jumps_altered = cse_jumps_altered;
6753 /* When cse changes a conditional jump to an unconditional
6754 jump, we want to reprocess the block, since it will give
6755 us a new branch path to investigate. */
6756 cse_jumps_altered = 0;
6757 temp = cse_basic_block (insn, val.last, val.path);
6758 if (cse_jumps_altered == 0
6759 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
6762 cse_jumps_altered |= old_cse_jumps_altered;
6773 if (max_elements_made < n_elements_made)
6774 max_elements_made = n_elements_made;
6777 end_alias_analysis ();
6779 free (reg_eqv_table);
6781 rtl_hooks = general_rtl_hooks;
6783 return cse_jumps_altered || recorded_label_ref;
6786 /* Process a single basic block. FROM and TO and the limits of the basic
6787 block. NEXT_BRANCH points to the branch path when following jumps or
6788 a null path when not following jumps.
6790 AROUND_LOOP is nonzero if we are to try to cse around to the start of a
6791 loop. This is true when we are being called for the last time on a
6792 block and this CSE pass is before loop.c. */
6795 cse_basic_block (rtx from, rtx to, struct branch_path *next_branch)
6799 rtx libcall_insn = NULL_RTX;
6801 int no_conflict = 0;
6803 /* Allocate the space needed by qty_table. */
6804 qty_table = xmalloc (max_qty * sizeof (struct qty_table_elem));
6808 /* TO might be a label. If so, protect it from being deleted. */
6809 if (to != 0 && LABEL_P (to))
6812 for (insn = from; insn != to; insn = NEXT_INSN (insn))
6814 enum rtx_code code = GET_CODE (insn);
6816 /* If we have processed 1,000 insns, flush the hash table to
6817 avoid extreme quadratic behavior. We must not include NOTEs
6818 in the count since there may be more of them when generating
6819 debugging information. If we clear the table at different
6820 times, code generated with -g -O might be different than code
6821 generated with -O but not -g.
6823 ??? This is a real kludge and needs to be done some other way.
6825 if (code != NOTE && num_insns++ > 1000)
6827 flush_hash_table ();
6831 /* See if this is a branch that is part of the path. If so, and it is
6832 to be taken, do so. */
6833 if (next_branch->branch == insn)
6835 enum taken status = next_branch++->status;
6836 if (status != PATH_NOT_TAKEN)
6838 if (status == PATH_TAKEN)
6839 record_jump_equiv (insn, 1);
6841 invalidate_skipped_block (NEXT_INSN (insn));
6843 /* Set the last insn as the jump insn; it doesn't affect cc0.
6844 Then follow this branch. */
6849 insn = JUMP_LABEL (insn);
6854 if (GET_MODE (insn) == QImode)
6855 PUT_MODE (insn, VOIDmode);
6857 if (GET_RTX_CLASS (code) == RTX_INSN)
6861 /* Process notes first so we have all notes in canonical forms when
6862 looking for duplicate operations. */
6864 if (REG_NOTES (insn))
6865 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
6867 /* Track when we are inside in LIBCALL block. Inside such a block,
6868 we do not want to record destinations. The last insn of a
6869 LIBCALL block is not considered to be part of the block, since
6870 its destination is the result of the block and hence should be
6873 if (REG_NOTES (insn) != 0)
6875 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
6876 libcall_insn = XEXP (p, 0);
6877 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
6879 /* Keep libcall_insn for the last SET insn of a no-conflict
6880 block to prevent changing the destination. */
6886 else if (find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
6890 cse_insn (insn, libcall_insn);
6892 if (no_conflict == -1)
6898 /* If we haven't already found an insn where we added a LABEL_REF,
6900 if (NONJUMP_INSN_P (insn) && ! recorded_label_ref
6901 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
6903 recorded_label_ref = 1;
6906 /* If INSN is now an unconditional jump, skip to the end of our
6907 basic block by pretending that we just did the last insn in the
6908 basic block. If we are jumping to the end of our block, show
6909 that we can have one usage of TO. */
6911 if (any_uncondjump_p (insn))
6919 if (JUMP_LABEL (insn) == to)
6922 /* Maybe TO was deleted because the jump is unconditional.
6923 If so, there is nothing left in this basic block. */
6924 /* ??? Perhaps it would be smarter to set TO
6925 to whatever follows this insn,
6926 and pretend the basic block had always ended here. */
6927 if (INSN_DELETED_P (to))
6930 insn = PREV_INSN (to);
6933 /* See if it is ok to keep on going past the label
6934 which used to end our basic block. Remember that we incremented
6935 the count of that label, so we decrement it here. If we made
6936 a jump unconditional, TO_USAGE will be one; in that case, we don't
6937 want to count the use in that jump. */
6939 if (to != 0 && NEXT_INSN (insn) == to
6940 && LABEL_P (to) && --LABEL_NUSES (to) == to_usage)
6942 struct cse_basic_block_data val;
6945 insn = NEXT_INSN (to);
6947 /* If TO was the last insn in the function, we are done. */
6954 /* If TO was preceded by a BARRIER we are done with this block
6955 because it has no continuation. */
6956 prev = prev_nonnote_insn (to);
6957 if (prev && BARRIER_P (prev))
6963 /* Find the end of the following block. Note that we won't be
6964 following branches in this case. */
6967 val.path = xmalloc (sizeof (struct branch_path)
6968 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6969 cse_end_of_basic_block (insn, &val, 0, 0);
6972 /* If the tables we allocated have enough space left
6973 to handle all the SETs in the next basic block,
6974 continue through it. Otherwise, return,
6975 and that block will be scanned individually. */
6976 if (val.nsets * 2 + next_qty > max_qty)
6979 cse_basic_block_start = val.low_cuid;
6980 cse_basic_block_end = val.high_cuid;
6983 /* Prevent TO from being deleted if it is a label. */
6984 if (to != 0 && LABEL_P (to))
6987 /* Back up so we process the first insn in the extension. */
6988 insn = PREV_INSN (insn);
6992 gcc_assert (next_qty <= max_qty);
6996 return to ? NEXT_INSN (to) : 0;
6999 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7000 there isn't a REG_LABEL note. Return one if so. DATA is the insn. */
7003 check_for_label_ref (rtx *rtl, void *data)
7005 rtx insn = (rtx) data;
7007 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7008 we must rerun jump since it needs to place the note. If this is a
7009 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7010 since no REG_LABEL will be added. */
7011 return (GET_CODE (*rtl) == LABEL_REF
7012 && ! LABEL_REF_NONLOCAL_P (*rtl)
7013 && LABEL_P (XEXP (*rtl, 0))
7014 && INSN_UID (XEXP (*rtl, 0)) != 0
7015 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7018 /* Count the number of times registers are used (not set) in X.
7019 COUNTS is an array in which we accumulate the count, INCR is how much
7020 we count each register usage. */
7023 count_reg_usage (rtx x, int *counts, int incr)
7033 switch (code = GET_CODE (x))
7036 counts[REGNO (x)] += incr;
7050 /* If we are clobbering a MEM, mark any registers inside the address
7052 if (MEM_P (XEXP (x, 0)))
7053 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, incr);
7057 /* Unless we are setting a REG, count everything in SET_DEST. */
7058 if (!REG_P (SET_DEST (x)))
7059 count_reg_usage (SET_DEST (x), counts, incr);
7060 count_reg_usage (SET_SRC (x), counts, incr);
7064 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, incr);
7069 count_reg_usage (PATTERN (x), counts, incr);
7071 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7074 note = find_reg_equal_equiv_note (x);
7077 rtx eqv = XEXP (note, 0);
7079 if (GET_CODE (eqv) == EXPR_LIST)
7080 /* This REG_EQUAL note describes the result of a function call.
7081 Process all the arguments. */
7084 count_reg_usage (XEXP (eqv, 0), counts, incr);
7085 eqv = XEXP (eqv, 1);
7087 while (eqv && GET_CODE (eqv) == EXPR_LIST);
7089 count_reg_usage (eqv, counts, incr);
7094 if (REG_NOTE_KIND (x) == REG_EQUAL
7095 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
7096 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
7097 involving registers in the address. */
7098 || GET_CODE (XEXP (x, 0)) == CLOBBER)
7099 count_reg_usage (XEXP (x, 0), counts, incr);
7101 count_reg_usage (XEXP (x, 1), counts, incr);
7105 /* Iterate over just the inputs, not the constraints as well. */
7106 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
7107 count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, incr);
7117 fmt = GET_RTX_FORMAT (code);
7118 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7121 count_reg_usage (XEXP (x, i), counts, incr);
7122 else if (fmt[i] == 'E')
7123 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7124 count_reg_usage (XVECEXP (x, i, j), counts, incr);
7128 /* Return true if set is live. */
7130 set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */
7137 if (set_noop_p (set))
7141 else if (GET_CODE (SET_DEST (set)) == CC0
7142 && !side_effects_p (SET_SRC (set))
7143 && ((tem = next_nonnote_insn (insn)) == 0
7145 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7148 else if (!REG_P (SET_DEST (set))
7149 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7150 || counts[REGNO (SET_DEST (set))] != 0
7151 || side_effects_p (SET_SRC (set)))
7156 /* Return true if insn is live. */
7159 insn_live_p (rtx insn, int *counts)
7162 if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
7164 else if (GET_CODE (PATTERN (insn)) == SET)
7165 return set_live_p (PATTERN (insn), insn, counts);
7166 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7168 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7170 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7172 if (GET_CODE (elt) == SET)
7174 if (set_live_p (elt, insn, counts))
7177 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7186 /* Return true if libcall is dead as a whole. */
7189 dead_libcall_p (rtx insn, int *counts)
7193 /* See if there's a REG_EQUAL note on this insn and try to
7194 replace the source with the REG_EQUAL expression.
7196 We assume that insns with REG_RETVALs can only be reg->reg
7197 copies at this point. */
7198 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7202 set = single_set (insn);
7206 new = simplify_rtx (XEXP (note, 0));
7208 new = XEXP (note, 0);
7210 /* While changing insn, we must update the counts accordingly. */
7211 count_reg_usage (insn, counts, -1);
7213 if (validate_change (insn, &SET_SRC (set), new, 0))
7215 count_reg_usage (insn, counts, 1);
7216 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7217 remove_note (insn, note);
7221 if (CONSTANT_P (new))
7223 new = force_const_mem (GET_MODE (SET_DEST (set)), new);
7224 if (new && validate_change (insn, &SET_SRC (set), new, 0))
7226 count_reg_usage (insn, counts, 1);
7227 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7228 remove_note (insn, note);
7233 count_reg_usage (insn, counts, 1);
7237 /* Scan all the insns and delete any that are dead; i.e., they store a register
7238 that is never used or they copy a register to itself.
7240 This is used to remove insns made obviously dead by cse, loop or other
7241 optimizations. It improves the heuristics in loop since it won't try to
7242 move dead invariants out of loops or make givs for dead quantities. The
7243 remaining passes of the compilation are also sped up. */
7246 delete_trivially_dead_insns (rtx insns, int nreg)
7250 int in_libcall = 0, dead_libcall = 0;
7251 int ndead = 0, nlastdead, niterations = 0;
7253 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7254 /* First count the number of times each register is used. */
7255 counts = xcalloc (nreg, sizeof (int));
7256 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7257 count_reg_usage (insn, counts, 1);
7263 /* Go from the last insn to the first and delete insns that only set unused
7264 registers or copy a register to itself. As we delete an insn, remove
7265 usage counts for registers it uses.
7267 The first jump optimization pass may leave a real insn as the last
7268 insn in the function. We must not skip that insn or we may end
7269 up deleting code that is not really dead. */
7270 insn = get_last_insn ();
7271 if (! INSN_P (insn))
7272 insn = prev_real_insn (insn);
7274 for (; insn; insn = prev)
7278 prev = prev_real_insn (insn);
7280 /* Don't delete any insns that are part of a libcall block unless
7281 we can delete the whole libcall block.
7283 Flow or loop might get confused if we did that. Remember
7284 that we are scanning backwards. */
7285 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7289 dead_libcall = dead_libcall_p (insn, counts);
7291 else if (in_libcall)
7292 live_insn = ! dead_libcall;
7294 live_insn = insn_live_p (insn, counts);
7296 /* If this is a dead insn, delete it and show registers in it aren't
7301 count_reg_usage (insn, counts, -1);
7302 delete_insn_and_edges (insn);
7306 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7313 while (ndead != nlastdead);
7315 if (dump_file && ndead)
7316 fprintf (dump_file, "Deleted %i trivially dead insns; %i iterations\n",
7317 ndead, niterations);
7320 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7324 /* This function is called via for_each_rtx. The argument, NEWREG, is
7325 a condition code register with the desired mode. If we are looking
7326 at the same register in a different mode, replace it with
7330 cse_change_cc_mode (rtx *loc, void *data)
7332 rtx newreg = (rtx) data;
7336 && REGNO (*loc) == REGNO (newreg)
7337 && GET_MODE (*loc) != GET_MODE (newreg))
7345 /* Change the mode of any reference to the register REGNO (NEWREG) to
7346 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
7347 any instruction which modifies NEWREG. */
7350 cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
7354 for (insn = start; insn != end; insn = NEXT_INSN (insn))
7356 if (! INSN_P (insn))
7359 if (reg_set_p (newreg, insn))
7362 for_each_rtx (&PATTERN (insn), cse_change_cc_mode, newreg);
7363 for_each_rtx (®_NOTES (insn), cse_change_cc_mode, newreg);
7367 /* BB is a basic block which finishes with CC_REG as a condition code
7368 register which is set to CC_SRC. Look through the successors of BB
7369 to find blocks which have a single predecessor (i.e., this one),
7370 and look through those blocks for an assignment to CC_REG which is
7371 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7372 permitted to change the mode of CC_SRC to a compatible mode. This
7373 returns VOIDmode if no equivalent assignments were found.
7374 Otherwise it returns the mode which CC_SRC should wind up with.
7376 The main complexity in this function is handling the mode issues.
7377 We may have more than one duplicate which we can eliminate, and we
7378 try to find a mode which will work for multiple duplicates. */
7380 static enum machine_mode
7381 cse_cc_succs (basic_block bb, rtx cc_reg, rtx cc_src, bool can_change_mode)
7384 enum machine_mode mode;
7385 unsigned int insn_count;
7388 enum machine_mode modes[2];
7394 /* We expect to have two successors. Look at both before picking
7395 the final mode for the comparison. If we have more successors
7396 (i.e., some sort of table jump, although that seems unlikely),
7397 then we require all beyond the first two to use the same
7400 found_equiv = false;
7401 mode = GET_MODE (cc_src);
7403 FOR_EACH_EDGE (e, ei, bb->succs)
7408 if (e->flags & EDGE_COMPLEX)
7411 if (EDGE_COUNT (e->dest->preds) != 1
7412 || e->dest == EXIT_BLOCK_PTR)
7415 end = NEXT_INSN (BB_END (e->dest));
7416 for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7420 if (! INSN_P (insn))
7423 /* If CC_SRC is modified, we have to stop looking for
7424 something which uses it. */
7425 if (modified_in_p (cc_src, insn))
7428 /* Check whether INSN sets CC_REG to CC_SRC. */
7429 set = single_set (insn);
7431 && REG_P (SET_DEST (set))
7432 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7435 enum machine_mode set_mode;
7436 enum machine_mode comp_mode;
7439 set_mode = GET_MODE (SET_SRC (set));
7440 comp_mode = set_mode;
7441 if (rtx_equal_p (cc_src, SET_SRC (set)))
7443 else if (GET_CODE (cc_src) == COMPARE
7444 && GET_CODE (SET_SRC (set)) == COMPARE
7446 && rtx_equal_p (XEXP (cc_src, 0),
7447 XEXP (SET_SRC (set), 0))
7448 && rtx_equal_p (XEXP (cc_src, 1),
7449 XEXP (SET_SRC (set), 1)))
7452 comp_mode = targetm.cc_modes_compatible (mode, set_mode);
7453 if (comp_mode != VOIDmode
7454 && (can_change_mode || comp_mode == mode))
7461 if (insn_count < ARRAY_SIZE (insns))
7463 insns[insn_count] = insn;
7464 modes[insn_count] = set_mode;
7465 last_insns[insn_count] = end;
7468 if (mode != comp_mode)
7470 gcc_assert (can_change_mode);
7472 PUT_MODE (cc_src, mode);
7477 if (set_mode != mode)
7479 /* We found a matching expression in the
7480 wrong mode, but we don't have room to
7481 store it in the array. Punt. This case
7485 /* INSN sets CC_REG to a value equal to CC_SRC
7486 with the right mode. We can simply delete
7491 /* We found an instruction to delete. Keep looking,
7492 in the hopes of finding a three-way jump. */
7496 /* We found an instruction which sets the condition
7497 code, so don't look any farther. */
7501 /* If INSN sets CC_REG in some other way, don't look any
7503 if (reg_set_p (cc_reg, insn))
7507 /* If we fell off the bottom of the block, we can keep looking
7508 through successors. We pass CAN_CHANGE_MODE as false because
7509 we aren't prepared to handle compatibility between the
7510 further blocks and this block. */
7513 enum machine_mode submode;
7515 submode = cse_cc_succs (e->dest, cc_reg, cc_src, false);
7516 if (submode != VOIDmode)
7518 gcc_assert (submode == mode);
7520 can_change_mode = false;
7528 /* Now INSN_COUNT is the number of instructions we found which set
7529 CC_REG to a value equivalent to CC_SRC. The instructions are in
7530 INSNS. The modes used by those instructions are in MODES. */
7533 for (i = 0; i < insn_count; ++i)
7535 if (modes[i] != mode)
7537 /* We need to change the mode of CC_REG in INSNS[i] and
7538 subsequent instructions. */
7541 if (GET_MODE (cc_reg) == mode)
7544 newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7546 cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7550 delete_insn (insns[i]);
7556 /* If we have a fixed condition code register (or two), walk through
7557 the instructions and try to eliminate duplicate assignments. */
7560 cse_condition_code_reg (void)
7562 unsigned int cc_regno_1;
7563 unsigned int cc_regno_2;
7568 if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
7571 cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7572 if (cc_regno_2 != INVALID_REGNUM)
7573 cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7575 cc_reg_2 = NULL_RTX;
7584 enum machine_mode mode;
7585 enum machine_mode orig_mode;
7587 /* Look for blocks which end with a conditional jump based on a
7588 condition code register. Then look for the instruction which
7589 sets the condition code register. Then look through the
7590 successor blocks for instructions which set the condition
7591 code register to the same value. There are other possible
7592 uses of the condition code register, but these are by far the
7593 most common and the ones which we are most likely to be able
7596 last_insn = BB_END (bb);
7597 if (!JUMP_P (last_insn))
7600 if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7602 else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7607 cc_src_insn = NULL_RTX;
7609 for (insn = PREV_INSN (last_insn);
7610 insn && insn != PREV_INSN (BB_HEAD (bb));
7611 insn = PREV_INSN (insn))
7615 if (! INSN_P (insn))
7617 set = single_set (insn);
7619 && REG_P (SET_DEST (set))
7620 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7623 cc_src = SET_SRC (set);
7626 else if (reg_set_p (cc_reg, insn))
7633 if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7636 /* Now CC_REG is a condition code register used for a
7637 conditional jump at the end of the block, and CC_SRC, in
7638 CC_SRC_INSN, is the value to which that condition code
7639 register is set, and CC_SRC is still meaningful at the end of
7642 orig_mode = GET_MODE (cc_src);
7643 mode = cse_cc_succs (bb, cc_reg, cc_src, true);
7644 if (mode != VOIDmode)
7646 gcc_assert (mode == GET_MODE (cc_src));
7647 if (mode != orig_mode)
7649 rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7651 /* Change the mode of CC_REG in CC_SRC_INSN to
7652 GET_MODE (NEWREG). */
7653 for_each_rtx (&PATTERN (cc_src_insn), cse_change_cc_mode,
7655 for_each_rtx (®_NOTES (cc_src_insn), cse_change_cc_mode,
7658 /* Do the same in the following insns that use the
7659 current value of CC_REG within BB. */
7660 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7661 NEXT_INSN (last_insn),