1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 /* stdio.h must precede rtl.h for FFS. */
25 #include "coretypes.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
35 #include "insn-config.h"
46 #include "rtlhooks-def.h"
48 /* The basic idea of common subexpression elimination is to go
49 through the code, keeping a record of expressions that would
50 have the same value at the current scan point, and replacing
51 expressions encountered with the cheapest equivalent expression.
53 It is too complicated to keep track of the different possibilities
54 when control paths merge in this code; so, at each label, we forget all
55 that is known and start fresh. This can be described as processing each
56 extended basic block separately. We have a separate pass to perform
59 Note CSE can turn a conditional or computed jump into a nop or
60 an unconditional jump. When this occurs we arrange to run the jump
61 optimizer after CSE to delete the unreachable code.
63 We use two data structures to record the equivalent expressions:
64 a hash table for most expressions, and a vector of "quantity
65 numbers" to record equivalent (pseudo) registers.
67 The use of the special data structure for registers is desirable
68 because it is faster. It is possible because registers references
69 contain a fairly small number, the register number, taken from
70 a contiguously allocated series, and two register references are
71 identical if they have the same number. General expressions
72 do not have any such thing, so the only way to retrieve the
73 information recorded on an expression other than a register
74 is to keep it in a hash table.
76 Registers and "quantity numbers":
78 At the start of each basic block, all of the (hardware and pseudo)
79 registers used in the function are given distinct quantity
80 numbers to indicate their contents. During scan, when the code
81 copies one register into another, we copy the quantity number.
82 When a register is loaded in any other way, we allocate a new
83 quantity number to describe the value generated by this operation.
84 `reg_qty' records what quantity a register is currently thought
87 All real quantity numbers are greater than or equal to `max_reg'.
88 If register N has not been assigned a quantity, reg_qty[N] will equal N.
90 Quantity numbers below `max_reg' do not exist and none of the `qty_table'
91 entries should be referenced with an index below `max_reg'.
93 We also maintain a bidirectional chain of registers for each
94 quantity number. The `qty_table` members `first_reg' and `last_reg',
95 and `reg_eqv_table' members `next' and `prev' hold these chains.
97 The first register in a chain is the one whose lifespan is least local.
98 Among equals, it is the one that was seen first.
99 We replace any equivalent register with that one.
101 If two registers have the same quantity number, it must be true that
102 REG expressions with qty_table `mode' must be in the hash table for both
103 registers and must be in the same class.
105 The converse is not true. Since hard registers may be referenced in
106 any mode, two REG expressions might be equivalent in the hash table
107 but not have the same quantity number if the quantity number of one
108 of the registers is not the same mode as those expressions.
110 Constants and quantity numbers
112 When a quantity has a known constant value, that value is stored
113 in the appropriate qty_table `const_rtx'. This is in addition to
114 putting the constant in the hash table as is usual for non-regs.
116 Whether a reg or a constant is preferred is determined by the configuration
117 macro CONST_COSTS and will often depend on the constant value. In any
118 event, expressions containing constants can be simplified, by fold_rtx.
120 When a quantity has a known nearly constant value (such as an address
121 of a stack slot), that value is stored in the appropriate qty_table
124 Integer constants don't have a machine mode. However, cse
125 determines the intended machine mode from the destination
126 of the instruction that moves the constant. The machine mode
127 is recorded in the hash table along with the actual RTL
128 constant expression so that different modes are kept separate.
132 To record known equivalences among expressions in general
133 we use a hash table called `table'. It has a fixed number of buckets
134 that contain chains of `struct table_elt' elements for expressions.
135 These chains connect the elements whose expressions have the same
138 Other chains through the same elements connect the elements which
139 currently have equivalent values.
141 Register references in an expression are canonicalized before hashing
142 the expression. This is done using `reg_qty' and qty_table `first_reg'.
143 The hash code of a register reference is computed using the quantity
144 number, not the register number.
146 When the value of an expression changes, it is necessary to remove from the
147 hash table not just that expression but all expressions whose values
148 could be different as a result.
150 1. If the value changing is in memory, except in special cases
151 ANYTHING referring to memory could be changed. That is because
152 nobody knows where a pointer does not point.
153 The function `invalidate_memory' removes what is necessary.
155 The special cases are when the address is constant or is
156 a constant plus a fixed register such as the frame pointer
157 or a static chain pointer. When such addresses are stored in,
158 we can tell exactly which other such addresses must be invalidated
159 due to overlap. `invalidate' does this.
160 All expressions that refer to non-constant
161 memory addresses are also invalidated. `invalidate_memory' does this.
163 2. If the value changing is a register, all expressions
164 containing references to that register, and only those,
167 Because searching the entire hash table for expressions that contain
168 a register is very slow, we try to figure out when it isn't necessary.
169 Precisely, this is necessary only when expressions have been
170 entered in the hash table using this register, and then the value has
171 changed, and then another expression wants to be added to refer to
172 the register's new value. This sequence of circumstances is rare
173 within any one basic block.
175 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
176 reg_tick[i] is incremented whenever a value is stored in register i.
177 reg_in_table[i] holds -1 if no references to register i have been
178 entered in the table; otherwise, it contains the value reg_tick[i] had
179 when the references were entered. If we want to enter a reference
180 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
181 Until we want to enter a new entry, the mere fact that the two vectors
182 don't match makes the entries be ignored if anyone tries to match them.
184 Registers themselves are entered in the hash table as well as in
185 the equivalent-register chains. However, the vectors `reg_tick'
186 and `reg_in_table' do not apply to expressions which are simple
187 register references. These expressions are removed from the table
188 immediately when they become invalid, and this can be done even if
189 we do not immediately search for all the expressions that refer to
192 A CLOBBER rtx in an instruction invalidates its operand for further
193 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
194 invalidates everything that resides in memory.
198 Constant expressions that differ only by an additive integer
199 are called related. When a constant expression is put in
200 the table, the related expression with no constant term
201 is also entered. These are made to point at each other
202 so that it is possible to find out if there exists any
203 register equivalent to an expression related to a given expression. */
205 /* One plus largest register number used in this function. */
209 /* One plus largest instruction UID used in this function at time of
212 static int max_insn_uid;
214 /* Length of qty_table vector. We know in advance we will not need
215 a quantity number this big. */
219 /* Next quantity number to be allocated.
220 This is 1 + the largest number needed so far. */
224 /* Per-qty information tracking.
226 `first_reg' and `last_reg' track the head and tail of the
227 chain of registers which currently contain this quantity.
229 `mode' contains the machine mode of this quantity.
231 `const_rtx' holds the rtx of the constant value of this
232 quantity, if known. A summations of the frame/arg pointer
233 and a constant can also be entered here. When this holds
234 a known value, `const_insn' is the insn which stored the
237 `comparison_{code,const,qty}' are used to track when a
238 comparison between a quantity and some constant or register has
239 been passed. In such a case, we know the results of the comparison
240 in case we see it again. These members record a comparison that
241 is known to be true. `comparison_code' holds the rtx code of such
242 a comparison, else it is set to UNKNOWN and the other two
243 comparison members are undefined. `comparison_const' holds
244 the constant being compared against, or zero if the comparison
245 is not against a constant. `comparison_qty' holds the quantity
246 being compared against when the result is known. If the comparison
247 is not with a register, `comparison_qty' is -1. */
249 struct qty_table_elem
253 rtx comparison_const;
255 unsigned int first_reg, last_reg;
256 /* The sizes of these fields should match the sizes of the
257 code and mode fields of struct rtx_def (see rtl.h). */
258 ENUM_BITFIELD(rtx_code) comparison_code : 16;
259 ENUM_BITFIELD(machine_mode) mode : 8;
262 /* The table of all qtys, indexed by qty number. */
263 static struct qty_table_elem *qty_table;
266 /* For machines that have a CC0, we do not record its value in the hash
267 table since its use is guaranteed to be the insn immediately following
268 its definition and any other insn is presumed to invalidate it.
270 Instead, we store below the value last assigned to CC0. If it should
271 happen to be a constant, it is stored in preference to the actual
272 assigned value. In case it is a constant, we store the mode in which
273 the constant should be interpreted. */
275 static rtx prev_insn_cc0;
276 static enum machine_mode prev_insn_cc0_mode;
278 /* Previous actual insn. 0 if at first insn of basic block. */
280 static rtx prev_insn;
283 /* Insn being scanned. */
285 static rtx this_insn;
287 /* Index by register number, gives the number of the next (or
288 previous) register in the chain of registers sharing the same
291 Or -1 if this register is at the end of the chain.
293 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
295 /* Per-register equivalence chain. */
301 /* The table of all register equivalence chains. */
302 static struct reg_eqv_elem *reg_eqv_table;
306 /* Next in hash chain. */
307 struct cse_reg_info *hash_next;
309 /* The next cse_reg_info structure in the free or used list. */
310 struct cse_reg_info *next;
315 /* The quantity number of the register's current contents. */
318 /* The number of times the register has been altered in the current
322 /* The REG_TICK value at which rtx's containing this register are
323 valid in the hash table. If this does not equal the current
324 reg_tick value, such expressions existing in the hash table are
328 /* The SUBREG that was set when REG_TICK was last incremented. Set
329 to -1 if the last store was to the whole register, not a subreg. */
330 unsigned int subreg_ticked;
333 /* A free list of cse_reg_info entries. */
334 static struct cse_reg_info *cse_reg_info_free_list;
336 /* A used list of cse_reg_info entries. */
337 static struct cse_reg_info *cse_reg_info_used_list;
338 static struct cse_reg_info *cse_reg_info_used_list_end;
340 /* A mapping from registers to cse_reg_info data structures. */
341 #define REGHASH_SHIFT 7
342 #define REGHASH_SIZE (1 << REGHASH_SHIFT)
343 #define REGHASH_MASK (REGHASH_SIZE - 1)
344 static struct cse_reg_info *reg_hash[REGHASH_SIZE];
346 #define REGHASH_FN(REGNO) \
347 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
349 /* The last lookup we did into the cse_reg_info_tree. This allows us
350 to cache repeated lookups. */
351 static unsigned int cached_regno;
352 static struct cse_reg_info *cached_cse_reg_info;
354 /* A HARD_REG_SET containing all the hard registers for which there is
355 currently a REG expression in the hash table. Note the difference
356 from the above variables, which indicate if the REG is mentioned in some
357 expression in the table. */
359 static HARD_REG_SET hard_regs_in_table;
361 /* CUID of insn that starts the basic block currently being cse-processed. */
363 static int cse_basic_block_start;
365 /* CUID of insn that ends the basic block currently being cse-processed. */
367 static int cse_basic_block_end;
369 /* Vector mapping INSN_UIDs to cuids.
370 The cuids are like uids but increase monotonically always.
371 We use them to see whether a reg is used outside a given basic block. */
373 static int *uid_cuid;
375 /* Highest UID in UID_CUID. */
378 /* Get the cuid of an insn. */
380 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
382 /* Nonzero if this pass has made changes, and therefore it's
383 worthwhile to run the garbage collector. */
385 static int cse_altered;
387 /* Nonzero if cse has altered conditional jump insns
388 in such a way that jump optimization should be redone. */
390 static int cse_jumps_altered;
392 /* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
393 REG_LABEL, we have to rerun jump after CSE to put in the note. */
394 static int recorded_label_ref;
396 /* canon_hash stores 1 in do_not_record
397 if it notices a reference to CC0, PC, or some other volatile
400 static int do_not_record;
402 #ifdef LOAD_EXTEND_OP
404 /* Scratch rtl used when looking for load-extended copy of a MEM. */
405 static rtx memory_extend_rtx;
408 /* canon_hash stores 1 in hash_arg_in_memory
409 if it notices a reference to memory within the expression being hashed. */
411 static int hash_arg_in_memory;
413 /* The hash table contains buckets which are chains of `struct table_elt's,
414 each recording one expression's information.
415 That expression is in the `exp' field.
417 The canon_exp field contains a canonical (from the point of view of
418 alias analysis) version of the `exp' field.
420 Those elements with the same hash code are chained in both directions
421 through the `next_same_hash' and `prev_same_hash' fields.
423 Each set of expressions with equivalent values
424 are on a two-way chain through the `next_same_value'
425 and `prev_same_value' fields, and all point with
426 the `first_same_value' field at the first element in
427 that chain. The chain is in order of increasing cost.
428 Each element's cost value is in its `cost' field.
430 The `in_memory' field is nonzero for elements that
431 involve any reference to memory. These elements are removed
432 whenever a write is done to an unidentified location in memory.
433 To be safe, we assume that a memory address is unidentified unless
434 the address is either a symbol constant or a constant plus
435 the frame pointer or argument pointer.
437 The `related_value' field is used to connect related expressions
438 (that differ by adding an integer).
439 The related expressions are chained in a circular fashion.
440 `related_value' is zero for expressions for which this
443 The `cost' field stores the cost of this element's expression.
444 The `regcost' field stores the value returned by approx_reg_cost for
445 this element's expression.
447 The `is_const' flag is set if the element is a constant (including
450 The `flag' field is used as a temporary during some search routines.
452 The `mode' field is usually the same as GET_MODE (`exp'), but
453 if `exp' is a CONST_INT and has no machine mode then the `mode'
454 field is the mode it was being used as. Each constant is
455 recorded separately for each mode it is used with. */
461 struct table_elt *next_same_hash;
462 struct table_elt *prev_same_hash;
463 struct table_elt *next_same_value;
464 struct table_elt *prev_same_value;
465 struct table_elt *first_same_value;
466 struct table_elt *related_value;
469 /* The size of this field should match the size
470 of the mode field of struct rtx_def (see rtl.h). */
471 ENUM_BITFIELD(machine_mode) mode : 8;
477 /* We don't want a lot of buckets, because we rarely have very many
478 things stored in the hash table, and a lot of buckets slows
479 down a lot of loops that happen frequently. */
481 #define HASH_SIZE (1 << HASH_SHIFT)
482 #define HASH_MASK (HASH_SIZE - 1)
484 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
485 register (hard registers may require `do_not_record' to be set). */
488 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
489 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
490 : canon_hash (X, M)) & HASH_MASK)
492 /* Like HASH, but without side-effects. */
493 #define SAFE_HASH(X, M) \
494 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
495 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
496 : safe_hash (X, M)) & HASH_MASK)
498 /* Determine whether register number N is considered a fixed register for the
499 purpose of approximating register costs.
500 It is desirable to replace other regs with fixed regs, to reduce need for
502 A reg wins if it is either the frame pointer or designated as fixed. */
503 #define FIXED_REGNO_P(N) \
504 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
505 || fixed_regs[N] || global_regs[N])
507 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
508 hard registers and pointers into the frame are the cheapest with a cost
509 of 0. Next come pseudos with a cost of one and other hard registers with
510 a cost of 2. Aside from these special cases, call `rtx_cost'. */
512 #define CHEAP_REGNO(N) \
513 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
514 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
515 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
516 || ((N) < FIRST_PSEUDO_REGISTER \
517 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
519 #define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET))
520 #define COST_IN(X,OUTER) (REG_P (X) ? 0 : notreg_cost (X, OUTER))
522 /* Get the info associated with register N. */
524 #define GET_CSE_REG_INFO(N) \
525 (((N) == cached_regno && cached_cse_reg_info) \
526 ? cached_cse_reg_info : get_cse_reg_info ((N)))
528 /* Get the number of times this register has been updated in this
531 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
533 /* Get the point at which REG was recorded in the table. */
535 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
537 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
540 #define SUBREG_TICKED(N) ((GET_CSE_REG_INFO (N))->subreg_ticked)
542 /* Get the quantity number for REG. */
544 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
546 /* Determine if the quantity number for register X represents a valid index
547 into the qty_table. */
549 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (int) (N))
551 static struct table_elt *table[HASH_SIZE];
553 /* Chain of `struct table_elt's made so far for this function
554 but currently removed from the table. */
556 static struct table_elt *free_element_chain;
558 /* Number of `struct table_elt' structures made so far for this function. */
560 static int n_elements_made;
562 /* Maximum value `n_elements_made' has had so far in this compilation
563 for functions previously processed. */
565 static int max_elements_made;
567 /* Set to the cost of a constant pool reference if one was found for a
568 symbolic constant. If this was found, it means we should try to
569 convert constants into constant pool entries if they don't fit in
572 static int constant_pool_entries_cost;
573 static int constant_pool_entries_regcost;
575 /* This data describes a block that will be processed by cse_basic_block. */
577 struct cse_basic_block_data
579 /* Lowest CUID value of insns in block. */
581 /* Highest CUID value of insns in block. */
583 /* Total number of SETs in block. */
585 /* Last insn in the block. */
587 /* Size of current branch path, if any. */
589 /* Current branch path, indicating which branches will be taken. */
592 /* The branch insn. */
594 /* Whether it should be taken or not. AROUND is the same as taken
595 except that it is used when the destination label is not preceded
597 enum taken {PATH_TAKEN, PATH_NOT_TAKEN, PATH_AROUND} status;
601 static bool fixed_base_plus_p (rtx x);
602 static int notreg_cost (rtx, enum rtx_code);
603 static int approx_reg_cost_1 (rtx *, void *);
604 static int approx_reg_cost (rtx);
605 static int preferable (int, int, int, int);
606 static void new_basic_block (void);
607 static void make_new_qty (unsigned int, enum machine_mode);
608 static void make_regs_eqv (unsigned int, unsigned int);
609 static void delete_reg_equiv (unsigned int);
610 static int mention_regs (rtx);
611 static int insert_regs (rtx, struct table_elt *, int);
612 static void remove_from_table (struct table_elt *, unsigned);
613 static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
614 static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
615 static rtx lookup_as_function (rtx, enum rtx_code);
616 static struct table_elt *insert (rtx, struct table_elt *, unsigned,
618 static void merge_equiv_classes (struct table_elt *, struct table_elt *);
619 static void invalidate (rtx, enum machine_mode);
620 static int cse_rtx_varies_p (rtx, int);
621 static void remove_invalid_refs (unsigned int);
622 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
624 static void rehash_using_reg (rtx);
625 static void invalidate_memory (void);
626 static void invalidate_for_call (void);
627 static rtx use_related_value (rtx, struct table_elt *);
629 static inline unsigned canon_hash (rtx, enum machine_mode);
630 static inline unsigned safe_hash (rtx, enum machine_mode);
631 static unsigned hash_rtx_string (const char *);
633 static rtx canon_reg (rtx, rtx);
634 static void find_best_addr (rtx, rtx *, enum machine_mode);
635 static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
637 enum machine_mode *);
638 static rtx fold_rtx (rtx, rtx);
639 static rtx equiv_constant (rtx);
640 static void record_jump_equiv (rtx, int);
641 static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
643 static void cse_insn (rtx, rtx);
644 static void cse_end_of_basic_block (rtx, struct cse_basic_block_data *,
646 static int addr_affects_sp_p (rtx);
647 static void invalidate_from_clobbers (rtx);
648 static rtx cse_process_notes (rtx, rtx);
649 static void invalidate_skipped_set (rtx, rtx, void *);
650 static void invalidate_skipped_block (rtx);
651 static rtx cse_basic_block (rtx, rtx, struct branch_path *);
652 static void count_reg_usage (rtx, int *, int);
653 static int check_for_label_ref (rtx *, void *);
654 extern void dump_class (struct table_elt*);
655 static struct cse_reg_info * get_cse_reg_info (unsigned int);
656 static int check_dependence (rtx *, void *);
658 static void flush_hash_table (void);
659 static bool insn_live_p (rtx, int *);
660 static bool set_live_p (rtx, rtx, int *);
661 static bool dead_libcall_p (rtx, int *);
662 static int cse_change_cc_mode (rtx *, void *);
663 static void cse_change_cc_mode_insns (rtx, rtx, rtx);
664 static enum machine_mode cse_cc_succs (basic_block, rtx, rtx, bool);
667 #undef RTL_HOOKS_GEN_LOWPART
668 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
670 static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
672 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
673 virtual regs here because the simplify_*_operation routines are called
674 by integrate.c, which is called before virtual register instantiation. */
677 fixed_base_plus_p (rtx x)
679 switch (GET_CODE (x))
682 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
684 if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
686 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
687 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
692 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
694 return fixed_base_plus_p (XEXP (x, 0));
701 /* Dump the expressions in the equivalence class indicated by CLASSP.
702 This function is used only for debugging. */
704 dump_class (struct table_elt *classp)
706 struct table_elt *elt;
708 fprintf (stderr, "Equivalence chain for ");
709 print_rtl (stderr, classp->exp);
710 fprintf (stderr, ": \n");
712 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
714 print_rtl (stderr, elt->exp);
715 fprintf (stderr, "\n");
719 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
722 approx_reg_cost_1 (rtx *xp, void *data)
729 unsigned int regno = REGNO (x);
731 if (! CHEAP_REGNO (regno))
733 if (regno < FIRST_PSEUDO_REGISTER)
735 if (SMALL_REGISTER_CLASSES)
747 /* Return an estimate of the cost of the registers used in an rtx.
748 This is mostly the number of different REG expressions in the rtx;
749 however for some exceptions like fixed registers we use a cost of
750 0. If any other hard register reference occurs, return MAX_COST. */
753 approx_reg_cost (rtx x)
757 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
763 /* Return a negative value if an rtx A, whose costs are given by COST_A
764 and REGCOST_A, is more desirable than an rtx B.
765 Return a positive value if A is less desirable, or 0 if the two are
768 preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
770 /* First, get rid of cases involving expressions that are entirely
772 if (cost_a != cost_b)
774 if (cost_a == MAX_COST)
776 if (cost_b == MAX_COST)
780 /* Avoid extending lifetimes of hardregs. */
781 if (regcost_a != regcost_b)
783 if (regcost_a == MAX_COST)
785 if (regcost_b == MAX_COST)
789 /* Normal operation costs take precedence. */
790 if (cost_a != cost_b)
791 return cost_a - cost_b;
792 /* Only if these are identical consider effects on register pressure. */
793 if (regcost_a != regcost_b)
794 return regcost_a - regcost_b;
798 /* Internal function, to compute cost when X is not a register; called
799 from COST macro to keep it simple. */
802 notreg_cost (rtx x, enum rtx_code outer)
804 return ((GET_CODE (x) == SUBREG
805 && REG_P (SUBREG_REG (x))
806 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
807 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
808 && (GET_MODE_SIZE (GET_MODE (x))
809 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
810 && subreg_lowpart_p (x)
811 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
812 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
814 : rtx_cost (x, outer) * 2);
818 static struct cse_reg_info *
819 get_cse_reg_info (unsigned int regno)
821 struct cse_reg_info **hash_head = ®_hash[REGHASH_FN (regno)];
822 struct cse_reg_info *p;
824 for (p = *hash_head; p != NULL; p = p->hash_next)
825 if (p->regno == regno)
830 /* Get a new cse_reg_info structure. */
831 if (cse_reg_info_free_list)
833 p = cse_reg_info_free_list;
834 cse_reg_info_free_list = p->next;
837 p = xmalloc (sizeof (struct cse_reg_info));
839 /* Insert into hash table. */
840 p->hash_next = *hash_head;
845 p->reg_in_table = -1;
846 p->subreg_ticked = -1;
849 p->next = cse_reg_info_used_list;
850 cse_reg_info_used_list = p;
851 if (!cse_reg_info_used_list_end)
852 cse_reg_info_used_list_end = p;
855 /* Cache this lookup; we tend to be looking up information about the
856 same register several times in a row. */
857 cached_regno = regno;
858 cached_cse_reg_info = p;
863 /* Clear the hash table and initialize each register with its own quantity,
864 for a new basic block. */
867 new_basic_block (void)
873 /* Clear out hash table state for this pass. */
875 memset (reg_hash, 0, sizeof reg_hash);
877 if (cse_reg_info_used_list)
879 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
880 cse_reg_info_free_list = cse_reg_info_used_list;
881 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
883 cached_cse_reg_info = 0;
885 CLEAR_HARD_REG_SET (hard_regs_in_table);
887 /* The per-quantity values used to be initialized here, but it is
888 much faster to initialize each as it is made in `make_new_qty'. */
890 for (i = 0; i < HASH_SIZE; i++)
892 struct table_elt *first;
897 struct table_elt *last = first;
901 while (last->next_same_hash != NULL)
902 last = last->next_same_hash;
904 /* Now relink this hash entire chain into
905 the free element list. */
907 last->next_same_hash = free_element_chain;
908 free_element_chain = first;
918 /* Say that register REG contains a quantity in mode MODE not in any
919 register before and initialize that quantity. */
922 make_new_qty (unsigned int reg, enum machine_mode mode)
925 struct qty_table_elem *ent;
926 struct reg_eqv_elem *eqv;
928 gcc_assert (next_qty < max_qty);
930 q = REG_QTY (reg) = next_qty++;
932 ent->first_reg = reg;
935 ent->const_rtx = ent->const_insn = NULL_RTX;
936 ent->comparison_code = UNKNOWN;
938 eqv = ®_eqv_table[reg];
939 eqv->next = eqv->prev = -1;
942 /* Make reg NEW equivalent to reg OLD.
943 OLD is not changing; NEW is. */
946 make_regs_eqv (unsigned int new, unsigned int old)
948 unsigned int lastr, firstr;
949 int q = REG_QTY (old);
950 struct qty_table_elem *ent;
954 /* Nothing should become eqv until it has a "non-invalid" qty number. */
955 gcc_assert (REGNO_QTY_VALID_P (old));
958 firstr = ent->first_reg;
959 lastr = ent->last_reg;
961 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
962 hard regs. Among pseudos, if NEW will live longer than any other reg
963 of the same qty, and that is beyond the current basic block,
964 make it the new canonical replacement for this qty. */
965 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
966 /* Certain fixed registers might be of the class NO_REGS. This means
967 that not only can they not be allocated by the compiler, but
968 they cannot be used in substitutions or canonicalizations
970 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
971 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
972 || (new >= FIRST_PSEUDO_REGISTER
973 && (firstr < FIRST_PSEUDO_REGISTER
974 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
975 || (uid_cuid[REGNO_FIRST_UID (new)]
976 < cse_basic_block_start))
977 && (uid_cuid[REGNO_LAST_UID (new)]
978 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
980 reg_eqv_table[firstr].prev = new;
981 reg_eqv_table[new].next = firstr;
982 reg_eqv_table[new].prev = -1;
983 ent->first_reg = new;
987 /* If NEW is a hard reg (known to be non-fixed), insert at end.
988 Otherwise, insert before any non-fixed hard regs that are at the
989 end. Registers of class NO_REGS cannot be used as an
990 equivalent for anything. */
991 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
992 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
993 && new >= FIRST_PSEUDO_REGISTER)
994 lastr = reg_eqv_table[lastr].prev;
995 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
996 if (reg_eqv_table[lastr].next >= 0)
997 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
999 qty_table[q].last_reg = new;
1000 reg_eqv_table[lastr].next = new;
1001 reg_eqv_table[new].prev = lastr;
1005 /* Remove REG from its equivalence class. */
1008 delete_reg_equiv (unsigned int reg)
1010 struct qty_table_elem *ent;
1011 int q = REG_QTY (reg);
1014 /* If invalid, do nothing. */
1018 ent = &qty_table[q];
1020 p = reg_eqv_table[reg].prev;
1021 n = reg_eqv_table[reg].next;
1024 reg_eqv_table[n].prev = p;
1028 reg_eqv_table[p].next = n;
1032 REG_QTY (reg) = reg;
1035 /* Remove any invalid expressions from the hash table
1036 that refer to any of the registers contained in expression X.
1038 Make sure that newly inserted references to those registers
1039 as subexpressions will be considered valid.
1041 mention_regs is not called when a register itself
1042 is being stored in the table.
1044 Return 1 if we have done something that may have changed the hash code
1048 mention_regs (rtx x)
1058 code = GET_CODE (x);
1061 unsigned int regno = REGNO (x);
1062 unsigned int endregno
1063 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1064 : hard_regno_nregs[regno][GET_MODE (x)]);
1067 for (i = regno; i < endregno; i++)
1069 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1070 remove_invalid_refs (i);
1072 REG_IN_TABLE (i) = REG_TICK (i);
1073 SUBREG_TICKED (i) = -1;
1079 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1080 pseudo if they don't use overlapping words. We handle only pseudos
1081 here for simplicity. */
1082 if (code == SUBREG && REG_P (SUBREG_REG (x))
1083 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1085 unsigned int i = REGNO (SUBREG_REG (x));
1087 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1089 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1090 the last store to this register really stored into this
1091 subreg, then remove the memory of this subreg.
1092 Otherwise, remove any memory of the entire register and
1093 all its subregs from the table. */
1094 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1095 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1096 remove_invalid_refs (i);
1098 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1101 REG_IN_TABLE (i) = REG_TICK (i);
1102 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1106 /* If X is a comparison or a COMPARE and either operand is a register
1107 that does not have a quantity, give it one. This is so that a later
1108 call to record_jump_equiv won't cause X to be assigned a different
1109 hash code and not found in the table after that call.
1111 It is not necessary to do this here, since rehash_using_reg can
1112 fix up the table later, but doing this here eliminates the need to
1113 call that expensive function in the most common case where the only
1114 use of the register is in the comparison. */
1116 if (code == COMPARE || COMPARISON_P (x))
1118 if (REG_P (XEXP (x, 0))
1119 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1120 if (insert_regs (XEXP (x, 0), NULL, 0))
1122 rehash_using_reg (XEXP (x, 0));
1126 if (REG_P (XEXP (x, 1))
1127 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1128 if (insert_regs (XEXP (x, 1), NULL, 0))
1130 rehash_using_reg (XEXP (x, 1));
1135 fmt = GET_RTX_FORMAT (code);
1136 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1138 changed |= mention_regs (XEXP (x, i));
1139 else if (fmt[i] == 'E')
1140 for (j = 0; j < XVECLEN (x, i); j++)
1141 changed |= mention_regs (XVECEXP (x, i, j));
1146 /* Update the register quantities for inserting X into the hash table
1147 with a value equivalent to CLASSP.
1148 (If the class does not contain a REG, it is irrelevant.)
1149 If MODIFIED is nonzero, X is a destination; it is being modified.
1150 Note that delete_reg_equiv should be called on a register
1151 before insert_regs is done on that register with MODIFIED != 0.
1153 Nonzero value means that elements of reg_qty have changed
1154 so X's hash code may be different. */
1157 insert_regs (rtx x, struct table_elt *classp, int modified)
1161 unsigned int regno = REGNO (x);
1164 /* If REGNO is in the equivalence table already but is of the
1165 wrong mode for that equivalence, don't do anything here. */
1167 qty_valid = REGNO_QTY_VALID_P (regno);
1170 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1172 if (ent->mode != GET_MODE (x))
1176 if (modified || ! qty_valid)
1179 for (classp = classp->first_same_value;
1181 classp = classp->next_same_value)
1182 if (REG_P (classp->exp)
1183 && GET_MODE (classp->exp) == GET_MODE (x))
1185 make_regs_eqv (regno, REGNO (classp->exp));
1189 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1190 than REG_IN_TABLE to find out if there was only a single preceding
1191 invalidation - for the SUBREG - or another one, which would be
1192 for the full register. However, if we find here that REG_TICK
1193 indicates that the register is invalid, it means that it has
1194 been invalidated in a separate operation. The SUBREG might be used
1195 now (then this is a recursive call), or we might use the full REG
1196 now and a SUBREG of it later. So bump up REG_TICK so that
1197 mention_regs will do the right thing. */
1199 && REG_IN_TABLE (regno) >= 0
1200 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1202 make_new_qty (regno, GET_MODE (x));
1209 /* If X is a SUBREG, we will likely be inserting the inner register in the
1210 table. If that register doesn't have an assigned quantity number at
1211 this point but does later, the insertion that we will be doing now will
1212 not be accessible because its hash code will have changed. So assign
1213 a quantity number now. */
1215 else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1216 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1218 insert_regs (SUBREG_REG (x), NULL, 0);
1223 return mention_regs (x);
1226 /* Look in or update the hash table. */
1228 /* Remove table element ELT from use in the table.
1229 HASH is its hash code, made using the HASH macro.
1230 It's an argument because often that is known in advance
1231 and we save much time not recomputing it. */
1234 remove_from_table (struct table_elt *elt, unsigned int hash)
1239 /* Mark this element as removed. See cse_insn. */
1240 elt->first_same_value = 0;
1242 /* Remove the table element from its equivalence class. */
1245 struct table_elt *prev = elt->prev_same_value;
1246 struct table_elt *next = elt->next_same_value;
1249 next->prev_same_value = prev;
1252 prev->next_same_value = next;
1255 struct table_elt *newfirst = next;
1258 next->first_same_value = newfirst;
1259 next = next->next_same_value;
1264 /* Remove the table element from its hash bucket. */
1267 struct table_elt *prev = elt->prev_same_hash;
1268 struct table_elt *next = elt->next_same_hash;
1271 next->prev_same_hash = prev;
1274 prev->next_same_hash = next;
1275 else if (table[hash] == elt)
1279 /* This entry is not in the proper hash bucket. This can happen
1280 when two classes were merged by `merge_equiv_classes'. Search
1281 for the hash bucket that it heads. This happens only very
1282 rarely, so the cost is acceptable. */
1283 for (hash = 0; hash < HASH_SIZE; hash++)
1284 if (table[hash] == elt)
1289 /* Remove the table element from its related-value circular chain. */
1291 if (elt->related_value != 0 && elt->related_value != elt)
1293 struct table_elt *p = elt->related_value;
1295 while (p->related_value != elt)
1296 p = p->related_value;
1297 p->related_value = elt->related_value;
1298 if (p->related_value == p)
1299 p->related_value = 0;
1302 /* Now add it to the free element chain. */
1303 elt->next_same_hash = free_element_chain;
1304 free_element_chain = elt;
1307 /* Look up X in the hash table and return its table element,
1308 or 0 if X is not in the table.
1310 MODE is the machine-mode of X, or if X is an integer constant
1311 with VOIDmode then MODE is the mode with which X will be used.
1313 Here we are satisfied to find an expression whose tree structure
1316 static struct table_elt *
1317 lookup (rtx x, unsigned int hash, enum machine_mode mode)
1319 struct table_elt *p;
1321 for (p = table[hash]; p; p = p->next_same_hash)
1322 if (mode == p->mode && ((x == p->exp && REG_P (x))
1323 || exp_equiv_p (x, p->exp, !REG_P (x), false)))
1329 /* Like `lookup' but don't care whether the table element uses invalid regs.
1330 Also ignore discrepancies in the machine mode of a register. */
1332 static struct table_elt *
1333 lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1335 struct table_elt *p;
1339 unsigned int regno = REGNO (x);
1341 /* Don't check the machine mode when comparing registers;
1342 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1343 for (p = table[hash]; p; p = p->next_same_hash)
1345 && REGNO (p->exp) == regno)
1350 for (p = table[hash]; p; p = p->next_same_hash)
1352 && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
1359 /* Look for an expression equivalent to X and with code CODE.
1360 If one is found, return that expression. */
1363 lookup_as_function (rtx x, enum rtx_code code)
1366 = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
1368 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1369 long as we are narrowing. So if we looked in vain for a mode narrower
1370 than word_mode before, look for word_mode now. */
1371 if (p == 0 && code == CONST_INT
1372 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1375 PUT_MODE (x, word_mode);
1376 p = lookup (x, SAFE_HASH (x, VOIDmode), word_mode);
1382 for (p = p->first_same_value; p; p = p->next_same_value)
1383 if (GET_CODE (p->exp) == code
1384 /* Make sure this is a valid entry in the table. */
1385 && exp_equiv_p (p->exp, p->exp, 1, false))
1391 /* Insert X in the hash table, assuming HASH is its hash code
1392 and CLASSP is an element of the class it should go in
1393 (or 0 if a new class should be made).
1394 It is inserted at the proper position to keep the class in
1395 the order cheapest first.
1397 MODE is the machine-mode of X, or if X is an integer constant
1398 with VOIDmode then MODE is the mode with which X will be used.
1400 For elements of equal cheapness, the most recent one
1401 goes in front, except that the first element in the list
1402 remains first unless a cheaper element is added. The order of
1403 pseudo-registers does not matter, as canon_reg will be called to
1404 find the cheapest when a register is retrieved from the table.
1406 The in_memory field in the hash table element is set to 0.
1407 The caller must set it nonzero if appropriate.
1409 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1410 and if insert_regs returns a nonzero value
1411 you must then recompute its hash code before calling here.
1413 If necessary, update table showing constant values of quantities. */
1415 #define CHEAPER(X, Y) \
1416 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1418 static struct table_elt *
1419 insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mode)
1421 struct table_elt *elt;
1423 /* If X is a register and we haven't made a quantity for it,
1424 something is wrong. */
1425 gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
1427 /* If X is a hard register, show it is being put in the table. */
1428 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1430 unsigned int regno = REGNO (x);
1431 unsigned int endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
1434 for (i = regno; i < endregno; i++)
1435 SET_HARD_REG_BIT (hard_regs_in_table, i);
1438 /* Put an element for X into the right hash bucket. */
1440 elt = free_element_chain;
1442 free_element_chain = elt->next_same_hash;
1446 elt = xmalloc (sizeof (struct table_elt));
1450 elt->canon_exp = NULL_RTX;
1451 elt->cost = COST (x);
1452 elt->regcost = approx_reg_cost (x);
1453 elt->next_same_value = 0;
1454 elt->prev_same_value = 0;
1455 elt->next_same_hash = table[hash];
1456 elt->prev_same_hash = 0;
1457 elt->related_value = 0;
1460 elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1463 table[hash]->prev_same_hash = elt;
1466 /* Put it into the proper value-class. */
1469 classp = classp->first_same_value;
1470 if (CHEAPER (elt, classp))
1471 /* Insert at the head of the class. */
1473 struct table_elt *p;
1474 elt->next_same_value = classp;
1475 classp->prev_same_value = elt;
1476 elt->first_same_value = elt;
1478 for (p = classp; p; p = p->next_same_value)
1479 p->first_same_value = elt;
1483 /* Insert not at head of the class. */
1484 /* Put it after the last element cheaper than X. */
1485 struct table_elt *p, *next;
1487 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1490 /* Put it after P and before NEXT. */
1491 elt->next_same_value = next;
1493 next->prev_same_value = elt;
1495 elt->prev_same_value = p;
1496 p->next_same_value = elt;
1497 elt->first_same_value = classp;
1501 elt->first_same_value = elt;
1503 /* If this is a constant being set equivalent to a register or a register
1504 being set equivalent to a constant, note the constant equivalence.
1506 If this is a constant, it cannot be equivalent to a different constant,
1507 and a constant is the only thing that can be cheaper than a register. So
1508 we know the register is the head of the class (before the constant was
1511 If this is a register that is not already known equivalent to a
1512 constant, we must check the entire class.
1514 If this is a register that is already known equivalent to an insn,
1515 update the qtys `const_insn' to show that `this_insn' is the latest
1516 insn making that quantity equivalent to the constant. */
1518 if (elt->is_const && classp && REG_P (classp->exp)
1521 int exp_q = REG_QTY (REGNO (classp->exp));
1522 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1524 exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1525 exp_ent->const_insn = this_insn;
1530 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1533 struct table_elt *p;
1535 for (p = classp; p != 0; p = p->next_same_value)
1537 if (p->is_const && !REG_P (p->exp))
1539 int x_q = REG_QTY (REGNO (x));
1540 struct qty_table_elem *x_ent = &qty_table[x_q];
1543 = gen_lowpart (GET_MODE (x), p->exp);
1544 x_ent->const_insn = this_insn;
1551 && qty_table[REG_QTY (REGNO (x))].const_rtx
1552 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1553 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1555 /* If this is a constant with symbolic value,
1556 and it has a term with an explicit integer value,
1557 link it up with related expressions. */
1558 if (GET_CODE (x) == CONST)
1560 rtx subexp = get_related_value (x);
1562 struct table_elt *subelt, *subelt_prev;
1566 /* Get the integer-free subexpression in the hash table. */
1567 subhash = SAFE_HASH (subexp, mode);
1568 subelt = lookup (subexp, subhash, mode);
1570 subelt = insert (subexp, NULL, subhash, mode);
1571 /* Initialize SUBELT's circular chain if it has none. */
1572 if (subelt->related_value == 0)
1573 subelt->related_value = subelt;
1574 /* Find the element in the circular chain that precedes SUBELT. */
1575 subelt_prev = subelt;
1576 while (subelt_prev->related_value != subelt)
1577 subelt_prev = subelt_prev->related_value;
1578 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1579 This way the element that follows SUBELT is the oldest one. */
1580 elt->related_value = subelt_prev->related_value;
1581 subelt_prev->related_value = elt;
1588 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1589 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1590 the two classes equivalent.
1592 CLASS1 will be the surviving class; CLASS2 should not be used after this
1595 Any invalid entries in CLASS2 will not be copied. */
1598 merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1600 struct table_elt *elt, *next, *new;
1602 /* Ensure we start with the head of the classes. */
1603 class1 = class1->first_same_value;
1604 class2 = class2->first_same_value;
1606 /* If they were already equal, forget it. */
1607 if (class1 == class2)
1610 for (elt = class2; elt; elt = next)
1614 enum machine_mode mode = elt->mode;
1616 next = elt->next_same_value;
1618 /* Remove old entry, make a new one in CLASS1's class.
1619 Don't do this for invalid entries as we cannot find their
1620 hash code (it also isn't necessary). */
1621 if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
1623 bool need_rehash = false;
1625 hash_arg_in_memory = 0;
1626 hash = HASH (exp, mode);
1630 need_rehash = (unsigned) REG_QTY (REGNO (exp)) != REGNO (exp);
1631 delete_reg_equiv (REGNO (exp));
1634 remove_from_table (elt, hash);
1636 if (insert_regs (exp, class1, 0) || need_rehash)
1638 rehash_using_reg (exp);
1639 hash = HASH (exp, mode);
1641 new = insert (exp, class1, hash, mode);
1642 new->in_memory = hash_arg_in_memory;
1647 /* Flush the entire hash table. */
1650 flush_hash_table (void)
1653 struct table_elt *p;
1655 for (i = 0; i < HASH_SIZE; i++)
1656 for (p = table[i]; p; p = table[i])
1658 /* Note that invalidate can remove elements
1659 after P in the current hash chain. */
1661 invalidate (p->exp, p->mode);
1663 remove_from_table (p, i);
1667 /* Function called for each rtx to check whether true dependence exist. */
1668 struct check_dependence_data
1670 enum machine_mode mode;
1676 check_dependence (rtx *x, void *data)
1678 struct check_dependence_data *d = (struct check_dependence_data *) data;
1679 if (*x && MEM_P (*x))
1680 return canon_true_dependence (d->exp, d->mode, d->addr, *x,
1686 /* Remove from the hash table, or mark as invalid, all expressions whose
1687 values could be altered by storing in X. X is a register, a subreg, or
1688 a memory reference with nonvarying address (because, when a memory
1689 reference with a varying address is stored in, all memory references are
1690 removed by invalidate_memory so specific invalidation is superfluous).
1691 FULL_MODE, if not VOIDmode, indicates that this much should be
1692 invalidated instead of just the amount indicated by the mode of X. This
1693 is only used for bitfield stores into memory.
1695 A nonvarying address may be just a register or just a symbol reference,
1696 or it may be either of those plus a numeric offset. */
1699 invalidate (rtx x, enum machine_mode full_mode)
1702 struct table_elt *p;
1705 switch (GET_CODE (x))
1709 /* If X is a register, dependencies on its contents are recorded
1710 through the qty number mechanism. Just change the qty number of
1711 the register, mark it as invalid for expressions that refer to it,
1712 and remove it itself. */
1713 unsigned int regno = REGNO (x);
1714 unsigned int hash = HASH (x, GET_MODE (x));
1716 /* Remove REGNO from any quantity list it might be on and indicate
1717 that its value might have changed. If it is a pseudo, remove its
1718 entry from the hash table.
1720 For a hard register, we do the first two actions above for any
1721 additional hard registers corresponding to X. Then, if any of these
1722 registers are in the table, we must remove any REG entries that
1723 overlap these registers. */
1725 delete_reg_equiv (regno);
1727 SUBREG_TICKED (regno) = -1;
1729 if (regno >= FIRST_PSEUDO_REGISTER)
1731 /* Because a register can be referenced in more than one mode,
1732 we might have to remove more than one table entry. */
1733 struct table_elt *elt;
1735 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1736 remove_from_table (elt, hash);
1740 HOST_WIDE_INT in_table
1741 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1742 unsigned int endregno
1743 = regno + hard_regno_nregs[regno][GET_MODE (x)];
1744 unsigned int tregno, tendregno, rn;
1745 struct table_elt *p, *next;
1747 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1749 for (rn = regno + 1; rn < endregno; rn++)
1751 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1752 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1753 delete_reg_equiv (rn);
1755 SUBREG_TICKED (rn) = -1;
1759 for (hash = 0; hash < HASH_SIZE; hash++)
1760 for (p = table[hash]; p; p = next)
1762 next = p->next_same_hash;
1765 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1768 tregno = REGNO (p->exp);
1770 = tregno + hard_regno_nregs[tregno][GET_MODE (p->exp)];
1771 if (tendregno > regno && tregno < endregno)
1772 remove_from_table (p, hash);
1779 invalidate (SUBREG_REG (x), VOIDmode);
1783 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1784 invalidate (XVECEXP (x, 0, i), VOIDmode);
1788 /* This is part of a disjoint return value; extract the location in
1789 question ignoring the offset. */
1790 invalidate (XEXP (x, 0), VOIDmode);
1794 addr = canon_rtx (get_addr (XEXP (x, 0)));
1795 /* Calculate the canonical version of X here so that
1796 true_dependence doesn't generate new RTL for X on each call. */
1799 /* Remove all hash table elements that refer to overlapping pieces of
1801 if (full_mode == VOIDmode)
1802 full_mode = GET_MODE (x);
1804 for (i = 0; i < HASH_SIZE; i++)
1806 struct table_elt *next;
1808 for (p = table[i]; p; p = next)
1810 next = p->next_same_hash;
1813 struct check_dependence_data d;
1815 /* Just canonicalize the expression once;
1816 otherwise each time we call invalidate
1817 true_dependence will canonicalize the
1818 expression again. */
1820 p->canon_exp = canon_rtx (p->exp);
1824 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1825 remove_from_table (p, i);
1836 /* Remove all expressions that refer to register REGNO,
1837 since they are already invalid, and we are about to
1838 mark that register valid again and don't want the old
1839 expressions to reappear as valid. */
1842 remove_invalid_refs (unsigned int regno)
1845 struct table_elt *p, *next;
1847 for (i = 0; i < HASH_SIZE; i++)
1848 for (p = table[i]; p; p = next)
1850 next = p->next_same_hash;
1852 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1853 remove_from_table (p, i);
1857 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1860 remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
1861 enum machine_mode mode)
1864 struct table_elt *p, *next;
1865 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
1867 for (i = 0; i < HASH_SIZE; i++)
1868 for (p = table[i]; p; p = next)
1871 next = p->next_same_hash;
1874 && (GET_CODE (exp) != SUBREG
1875 || !REG_P (SUBREG_REG (exp))
1876 || REGNO (SUBREG_REG (exp)) != regno
1877 || (((SUBREG_BYTE (exp)
1878 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
1879 && SUBREG_BYTE (exp) <= end))
1880 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1881 remove_from_table (p, i);
1885 /* Recompute the hash codes of any valid entries in the hash table that
1886 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1888 This is called when we make a jump equivalence. */
1891 rehash_using_reg (rtx x)
1894 struct table_elt *p, *next;
1897 if (GET_CODE (x) == SUBREG)
1900 /* If X is not a register or if the register is known not to be in any
1901 valid entries in the table, we have no work to do. */
1904 || REG_IN_TABLE (REGNO (x)) < 0
1905 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
1908 /* Scan all hash chains looking for valid entries that mention X.
1909 If we find one and it is in the wrong hash chain, move it. */
1911 for (i = 0; i < HASH_SIZE; i++)
1912 for (p = table[i]; p; p = next)
1914 next = p->next_same_hash;
1915 if (reg_mentioned_p (x, p->exp)
1916 && exp_equiv_p (p->exp, p->exp, 1, false)
1917 && i != (hash = SAFE_HASH (p->exp, p->mode)))
1919 if (p->next_same_hash)
1920 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1922 if (p->prev_same_hash)
1923 p->prev_same_hash->next_same_hash = p->next_same_hash;
1925 table[i] = p->next_same_hash;
1927 p->next_same_hash = table[hash];
1928 p->prev_same_hash = 0;
1930 table[hash]->prev_same_hash = p;
1936 /* Remove from the hash table any expression that is a call-clobbered
1937 register. Also update their TICK values. */
1940 invalidate_for_call (void)
1942 unsigned int regno, endregno;
1945 struct table_elt *p, *next;
1948 /* Go through all the hard registers. For each that is clobbered in
1949 a CALL_INSN, remove the register from quantity chains and update
1950 reg_tick if defined. Also see if any of these registers is currently
1953 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1954 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1956 delete_reg_equiv (regno);
1957 if (REG_TICK (regno) >= 0)
1960 SUBREG_TICKED (regno) = -1;
1963 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
1966 /* In the case where we have no call-clobbered hard registers in the
1967 table, we are done. Otherwise, scan the table and remove any
1968 entry that overlaps a call-clobbered register. */
1971 for (hash = 0; hash < HASH_SIZE; hash++)
1972 for (p = table[hash]; p; p = next)
1974 next = p->next_same_hash;
1977 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1980 regno = REGNO (p->exp);
1981 endregno = regno + hard_regno_nregs[regno][GET_MODE (p->exp)];
1983 for (i = regno; i < endregno; i++)
1984 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1986 remove_from_table (p, hash);
1992 /* Given an expression X of type CONST,
1993 and ELT which is its table entry (or 0 if it
1994 is not in the hash table),
1995 return an alternate expression for X as a register plus integer.
1996 If none can be found, return 0. */
1999 use_related_value (rtx x, struct table_elt *elt)
2001 struct table_elt *relt = 0;
2002 struct table_elt *p, *q;
2003 HOST_WIDE_INT offset;
2005 /* First, is there anything related known?
2006 If we have a table element, we can tell from that.
2007 Otherwise, must look it up. */
2009 if (elt != 0 && elt->related_value != 0)
2011 else if (elt == 0 && GET_CODE (x) == CONST)
2013 rtx subexp = get_related_value (x);
2015 relt = lookup (subexp,
2016 SAFE_HASH (subexp, GET_MODE (subexp)),
2023 /* Search all related table entries for one that has an
2024 equivalent register. */
2029 /* This loop is strange in that it is executed in two different cases.
2030 The first is when X is already in the table. Then it is searching
2031 the RELATED_VALUE list of X's class (RELT). The second case is when
2032 X is not in the table. Then RELT points to a class for the related
2035 Ensure that, whatever case we are in, that we ignore classes that have
2036 the same value as X. */
2038 if (rtx_equal_p (x, p->exp))
2041 for (q = p->first_same_value; q; q = q->next_same_value)
2048 p = p->related_value;
2050 /* We went all the way around, so there is nothing to be found.
2051 Alternatively, perhaps RELT was in the table for some other reason
2052 and it has no related values recorded. */
2053 if (p == relt || p == 0)
2060 offset = (get_integer_term (x) - get_integer_term (p->exp));
2061 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2062 return plus_constant (q->exp, offset);
2065 /* Hash a string. Just add its bytes up. */
2066 static inline unsigned
2067 hash_rtx_string (const char *ps)
2070 const unsigned char *p = (const unsigned char *) ps;
2079 /* Hash an rtx. We are careful to make sure the value is never negative.
2080 Equivalent registers hash identically.
2081 MODE is used in hashing for CONST_INTs only;
2082 otherwise the mode of X is used.
2084 Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2086 If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2087 a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
2089 Note that cse_insn knows that the hash code of a MEM expression
2090 is just (int) MEM plus the hash code of the address. */
2093 hash_rtx (rtx x, enum machine_mode mode, int *do_not_record_p,
2094 int *hash_arg_in_memory_p, bool have_reg_qty)
2101 /* Used to turn recursion into iteration. We can't rely on GCC's
2102 tail-recursion elimination since we need to keep accumulating values
2108 code = GET_CODE (x);
2113 unsigned int regno = REGNO (x);
2115 if (!reload_completed)
2117 /* On some machines, we can't record any non-fixed hard register,
2118 because extending its life will cause reload problems. We
2119 consider ap, fp, sp, gp to be fixed for this purpose.
2121 We also consider CCmode registers to be fixed for this purpose;
2122 failure to do so leads to failure to simplify 0<100 type of
2125 On all machines, we can't record any global registers.
2126 Nor should we record any register that is in a small
2127 class, as defined by CLASS_LIKELY_SPILLED_P. */
2130 if (regno >= FIRST_PSEUDO_REGISTER)
2132 else if (x == frame_pointer_rtx
2133 || x == hard_frame_pointer_rtx
2134 || x == arg_pointer_rtx
2135 || x == stack_pointer_rtx
2136 || x == pic_offset_table_rtx)
2138 else if (global_regs[regno])
2140 else if (fixed_regs[regno])
2142 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2144 else if (SMALL_REGISTER_CLASSES)
2146 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2153 *do_not_record_p = 1;
2158 hash += ((unsigned int) REG << 7);
2159 hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2163 /* We handle SUBREG of a REG specially because the underlying
2164 reg changes its hash value with every value change; we don't
2165 want to have to forget unrelated subregs when one subreg changes. */
2168 if (REG_P (SUBREG_REG (x)))
2170 hash += (((unsigned int) SUBREG << 7)
2171 + REGNO (SUBREG_REG (x))
2172 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2179 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2180 + (unsigned int) INTVAL (x));
2184 /* This is like the general case, except that it only counts
2185 the integers representing the constant. */
2186 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2187 if (GET_MODE (x) != VOIDmode)
2188 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2190 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2191 + (unsigned int) CONST_DOUBLE_HIGH (x));
2199 units = CONST_VECTOR_NUNITS (x);
2201 for (i = 0; i < units; ++i)
2203 elt = CONST_VECTOR_ELT (x, i);
2204 hash += hash_rtx (elt, GET_MODE (elt), do_not_record_p,
2205 hash_arg_in_memory_p, have_reg_qty);
2211 /* Assume there is only one rtx object for any given label. */
2213 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2214 differences and differences between each stage's debugging dumps. */
2215 hash += (((unsigned int) LABEL_REF << 7)
2216 + CODE_LABEL_NUMBER (XEXP (x, 0)));
2221 /* Don't hash on the symbol's address to avoid bootstrap differences.
2222 Different hash values may cause expressions to be recorded in
2223 different orders and thus different registers to be used in the
2224 final assembler. This also avoids differences in the dump files
2225 between various stages. */
2227 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2230 h += (h << 7) + *p++; /* ??? revisit */
2232 hash += ((unsigned int) SYMBOL_REF << 7) + h;
2237 /* We don't record if marked volatile or if BLKmode since we don't
2238 know the size of the move. */
2239 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2241 *do_not_record_p = 1;
2244 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2245 *hash_arg_in_memory_p = 1;
2247 /* Now that we have already found this special case,
2248 might as well speed it up as much as possible. */
2249 hash += (unsigned) MEM;
2254 /* A USE that mentions non-volatile memory needs special
2255 handling since the MEM may be BLKmode which normally
2256 prevents an entry from being made. Pure calls are
2257 marked by a USE which mentions BLKmode memory.
2258 See calls.c:emit_call_1. */
2259 if (MEM_P (XEXP (x, 0))
2260 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2262 hash += (unsigned) USE;
2265 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2266 *hash_arg_in_memory_p = 1;
2268 /* Now that we have already found this special case,
2269 might as well speed it up as much as possible. */
2270 hash += (unsigned) MEM;
2285 case UNSPEC_VOLATILE:
2286 *do_not_record_p = 1;
2290 if (MEM_VOLATILE_P (x))
2292 *do_not_record_p = 1;
2297 /* We don't want to take the filename and line into account. */
2298 hash += (unsigned) code + (unsigned) GET_MODE (x)
2299 + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2300 + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2301 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2303 if (ASM_OPERANDS_INPUT_LENGTH (x))
2305 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2307 hash += (hash_rtx (ASM_OPERANDS_INPUT (x, i),
2308 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2309 do_not_record_p, hash_arg_in_memory_p,
2312 (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
2315 hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2316 x = ASM_OPERANDS_INPUT (x, 0);
2317 mode = GET_MODE (x);
2329 i = GET_RTX_LENGTH (code) - 1;
2330 hash += (unsigned) code + (unsigned) GET_MODE (x);
2331 fmt = GET_RTX_FORMAT (code);
2337 /* If we are about to do the last recursive call
2338 needed at this level, change it into iteration.
2339 This function is called enough to be worth it. */
2346 hash += hash_rtx (XEXP (x, i), 0, do_not_record_p,
2347 hash_arg_in_memory_p, have_reg_qty);
2351 for (j = 0; j < XVECLEN (x, i); j++)
2352 hash += hash_rtx (XVECEXP (x, i, j), 0, do_not_record_p,
2353 hash_arg_in_memory_p, have_reg_qty);
2357 hash += hash_rtx_string (XSTR (x, i));
2361 hash += (unsigned int) XINT (x, i);
2376 /* Hash an rtx X for cse via hash_rtx.
2377 Stores 1 in do_not_record if any subexpression is volatile.
2378 Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2379 does not have the RTX_UNCHANGING_P bit set. */
2381 static inline unsigned
2382 canon_hash (rtx x, enum machine_mode mode)
2384 return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2387 /* Like canon_hash but with no side effects, i.e. do_not_record
2388 and hash_arg_in_memory are not changed. */
2390 static inline unsigned
2391 safe_hash (rtx x, enum machine_mode mode)
2393 int dummy_do_not_record;
2394 return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
2397 /* Return 1 iff X and Y would canonicalize into the same thing,
2398 without actually constructing the canonicalization of either one.
2399 If VALIDATE is nonzero,
2400 we assume X is an expression being processed from the rtl
2401 and Y was found in the hash table. We check register refs
2402 in Y for being marked as valid.
2404 If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
2407 exp_equiv_p (rtx x, rtx y, int validate, bool for_gcse)
2413 /* Note: it is incorrect to assume an expression is equivalent to itself
2414 if VALIDATE is nonzero. */
2415 if (x == y && !validate)
2418 if (x == 0 || y == 0)
2421 code = GET_CODE (x);
2422 if (code != GET_CODE (y))
2425 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2426 if (GET_MODE (x) != GET_MODE (y))
2437 return XEXP (x, 0) == XEXP (y, 0);
2440 return XSTR (x, 0) == XSTR (y, 0);
2444 return REGNO (x) == REGNO (y);
2447 unsigned int regno = REGNO (y);
2449 unsigned int endregno
2450 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2451 : hard_regno_nregs[regno][GET_MODE (y)]);
2453 /* If the quantities are not the same, the expressions are not
2454 equivalent. If there are and we are not to validate, they
2455 are equivalent. Otherwise, ensure all regs are up-to-date. */
2457 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2463 for (i = regno; i < endregno; i++)
2464 if (REG_IN_TABLE (i) != REG_TICK (i))
2473 /* Can't merge two expressions in different alias sets, since we
2474 can decide that the expression is transparent in a block when
2475 it isn't, due to it being set with the different alias set. */
2476 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
2479 /* A volatile mem should not be considered equivalent to any
2481 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2486 /* For commutative operations, check both orders. */
2494 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2496 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2497 validate, for_gcse))
2498 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2500 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2501 validate, for_gcse)));
2504 /* We don't use the generic code below because we want to
2505 disregard filename and line numbers. */
2507 /* A volatile asm isn't equivalent to any other. */
2508 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2511 if (GET_MODE (x) != GET_MODE (y)
2512 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2513 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2514 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2515 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2516 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2519 if (ASM_OPERANDS_INPUT_LENGTH (x))
2521 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2522 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2523 ASM_OPERANDS_INPUT (y, i),
2525 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2526 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2536 /* Compare the elements. If any pair of corresponding elements
2537 fail to match, return 0 for the whole thing. */
2539 fmt = GET_RTX_FORMAT (code);
2540 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2545 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2546 validate, for_gcse))
2551 if (XVECLEN (x, i) != XVECLEN (y, i))
2553 for (j = 0; j < XVECLEN (x, i); j++)
2554 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2555 validate, for_gcse))
2560 if (strcmp (XSTR (x, i), XSTR (y, i)))
2565 if (XINT (x, i) != XINT (y, i))
2570 if (XWINT (x, i) != XWINT (y, i))
2586 /* Return 1 if X has a value that can vary even between two
2587 executions of the program. 0 means X can be compared reliably
2588 against certain constants or near-constants. */
2591 cse_rtx_varies_p (rtx x, int from_alias)
2593 /* We need not check for X and the equivalence class being of the same
2594 mode because if X is equivalent to a constant in some mode, it
2595 doesn't vary in any mode. */
2598 && REGNO_QTY_VALID_P (REGNO (x)))
2600 int x_q = REG_QTY (REGNO (x));
2601 struct qty_table_elem *x_ent = &qty_table[x_q];
2603 if (GET_MODE (x) == x_ent->mode
2604 && x_ent->const_rtx != NULL_RTX)
2608 if (GET_CODE (x) == PLUS
2609 && GET_CODE (XEXP (x, 1)) == CONST_INT
2610 && REG_P (XEXP (x, 0))
2611 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2613 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2614 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2616 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2617 && x0_ent->const_rtx != NULL_RTX)
2621 /* This can happen as the result of virtual register instantiation, if
2622 the initial constant is too large to be a valid address. This gives
2623 us a three instruction sequence, load large offset into a register,
2624 load fp minus a constant into a register, then a MEM which is the
2625 sum of the two `constant' registers. */
2626 if (GET_CODE (x) == PLUS
2627 && REG_P (XEXP (x, 0))
2628 && REG_P (XEXP (x, 1))
2629 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2630 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2632 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2633 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2634 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2635 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2637 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2638 && x0_ent->const_rtx != NULL_RTX
2639 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2640 && x1_ent->const_rtx != NULL_RTX)
2644 return rtx_varies_p (x, from_alias);
2647 /* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2648 the result if necessary. INSN is as for canon_reg. */
2651 validate_canon_reg (rtx *xloc, rtx insn)
2653 rtx new = canon_reg (*xloc, insn);
2656 /* If replacing pseudo with hard reg or vice versa, ensure the
2657 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2658 if (insn != 0 && new != 0
2659 && REG_P (new) && REG_P (*xloc)
2660 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2661 != (REGNO (*xloc) < FIRST_PSEUDO_REGISTER))
2662 || GET_MODE (new) != GET_MODE (*xloc)
2663 || (insn_code = recog_memoized (insn)) < 0
2664 || insn_data[insn_code].n_dups > 0))
2665 validate_change (insn, xloc, new, 1);
2670 /* Canonicalize an expression:
2671 replace each register reference inside it
2672 with the "oldest" equivalent register.
2674 If INSN is nonzero and we are replacing a pseudo with a hard register
2675 or vice versa, validate_change is used to ensure that INSN remains valid
2676 after we make our substitution. The calls are made with IN_GROUP nonzero
2677 so apply_change_group must be called upon the outermost return from this
2678 function (unless INSN is zero). The result of apply_change_group can
2679 generally be discarded since the changes we are making are optional. */
2682 canon_reg (rtx x, rtx insn)
2691 code = GET_CODE (x);
2710 struct qty_table_elem *ent;
2712 /* Never replace a hard reg, because hard regs can appear
2713 in more than one machine mode, and we must preserve the mode
2714 of each occurrence. Also, some hard regs appear in
2715 MEMs that are shared and mustn't be altered. Don't try to
2716 replace any reg that maps to a reg of class NO_REGS. */
2717 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2718 || ! REGNO_QTY_VALID_P (REGNO (x)))
2721 q = REG_QTY (REGNO (x));
2722 ent = &qty_table[q];
2723 first = ent->first_reg;
2724 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2725 : REGNO_REG_CLASS (first) == NO_REGS ? x
2726 : gen_rtx_REG (ent->mode, first));
2733 fmt = GET_RTX_FORMAT (code);
2734 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2739 validate_canon_reg (&XEXP (x, i), insn);
2740 else if (fmt[i] == 'E')
2741 for (j = 0; j < XVECLEN (x, i); j++)
2742 validate_canon_reg (&XVECEXP (x, i, j), insn);
2748 /* LOC is a location within INSN that is an operand address (the contents of
2749 a MEM). Find the best equivalent address to use that is valid for this
2752 On most CISC machines, complicated address modes are costly, and rtx_cost
2753 is a good approximation for that cost. However, most RISC machines have
2754 only a few (usually only one) memory reference formats. If an address is
2755 valid at all, it is often just as cheap as any other address. Hence, for
2756 RISC machines, we use `address_cost' to compare the costs of various
2757 addresses. For two addresses of equal cost, choose the one with the
2758 highest `rtx_cost' value as that has the potential of eliminating the
2759 most insns. For equal costs, we choose the first in the equivalence
2760 class. Note that we ignore the fact that pseudo registers are cheaper than
2761 hard registers here because we would also prefer the pseudo registers. */
2764 find_best_addr (rtx insn, rtx *loc, enum machine_mode mode)
2766 struct table_elt *elt;
2768 struct table_elt *p;
2769 int found_better = 1;
2770 int save_do_not_record = do_not_record;
2771 int save_hash_arg_in_memory = hash_arg_in_memory;
2776 /* Do not try to replace constant addresses or addresses of local and
2777 argument slots. These MEM expressions are made only once and inserted
2778 in many instructions, as well as being used to control symbol table
2779 output. It is not safe to clobber them.
2781 There are some uncommon cases where the address is already in a register
2782 for some reason, but we cannot take advantage of that because we have
2783 no easy way to unshare the MEM. In addition, looking up all stack
2784 addresses is costly. */
2785 if ((GET_CODE (addr) == PLUS
2786 && REG_P (XEXP (addr, 0))
2787 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2788 && (regno = REGNO (XEXP (addr, 0)),
2789 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2790 || regno == ARG_POINTER_REGNUM))
2792 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2793 || regno == HARD_FRAME_POINTER_REGNUM
2794 || regno == ARG_POINTER_REGNUM))
2795 || CONSTANT_ADDRESS_P (addr))
2798 /* If this address is not simply a register, try to fold it. This will
2799 sometimes simplify the expression. Many simplifications
2800 will not be valid, but some, usually applying the associative rule, will
2801 be valid and produce better code. */
2804 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2805 int addr_folded_cost = address_cost (folded, mode);
2806 int addr_cost = address_cost (addr, mode);
2808 if ((addr_folded_cost < addr_cost
2809 || (addr_folded_cost == addr_cost
2810 /* ??? The rtx_cost comparison is left over from an older
2811 version of this code. It is probably no longer helpful. */
2812 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2813 || approx_reg_cost (folded) < approx_reg_cost (addr))))
2814 && validate_change (insn, loc, folded, 0))
2818 /* If this address is not in the hash table, we can't look for equivalences
2819 of the whole address. Also, ignore if volatile. */
2822 hash = HASH (addr, Pmode);
2823 addr_volatile = do_not_record;
2824 do_not_record = save_do_not_record;
2825 hash_arg_in_memory = save_hash_arg_in_memory;
2830 elt = lookup (addr, hash, Pmode);
2834 /* We need to find the best (under the criteria documented above) entry
2835 in the class that is valid. We use the `flag' field to indicate
2836 choices that were invalid and iterate until we can't find a better
2837 one that hasn't already been tried. */
2839 for (p = elt->first_same_value; p; p = p->next_same_value)
2842 while (found_better)
2844 int best_addr_cost = address_cost (*loc, mode);
2845 int best_rtx_cost = (elt->cost + 1) >> 1;
2847 struct table_elt *best_elt = elt;
2850 for (p = elt->first_same_value; p; p = p->next_same_value)
2854 || exp_equiv_p (p->exp, p->exp, 1, false))
2855 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2856 || (exp_cost == best_addr_cost
2857 && ((p->cost + 1) >> 1) > best_rtx_cost)))
2860 best_addr_cost = exp_cost;
2861 best_rtx_cost = (p->cost + 1) >> 1;
2868 if (validate_change (insn, loc,
2869 canon_reg (copy_rtx (best_elt->exp),
2878 /* If the address is a binary operation with the first operand a register
2879 and the second a constant, do the same as above, but looking for
2880 equivalences of the register. Then try to simplify before checking for
2881 the best address to use. This catches a few cases: First is when we
2882 have REG+const and the register is another REG+const. We can often merge
2883 the constants and eliminate one insn and one register. It may also be
2884 that a machine has a cheap REG+REG+const. Finally, this improves the
2885 code on the Alpha for unaligned byte stores. */
2887 if (flag_expensive_optimizations
2888 && ARITHMETIC_P (*loc)
2889 && REG_P (XEXP (*loc, 0)))
2891 rtx op1 = XEXP (*loc, 1);
2894 hash = HASH (XEXP (*loc, 0), Pmode);
2895 do_not_record = save_do_not_record;
2896 hash_arg_in_memory = save_hash_arg_in_memory;
2898 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2902 /* We need to find the best (under the criteria documented above) entry
2903 in the class that is valid. We use the `flag' field to indicate
2904 choices that were invalid and iterate until we can't find a better
2905 one that hasn't already been tried. */
2907 for (p = elt->first_same_value; p; p = p->next_same_value)
2910 while (found_better)
2912 int best_addr_cost = address_cost (*loc, mode);
2913 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2914 struct table_elt *best_elt = elt;
2915 rtx best_rtx = *loc;
2918 /* This is at worst case an O(n^2) algorithm, so limit our search
2919 to the first 32 elements on the list. This avoids trouble
2920 compiling code with very long basic blocks that can easily
2921 call simplify_gen_binary so many times that we run out of
2925 for (p = elt->first_same_value, count = 0;
2927 p = p->next_same_value, count++)
2930 || exp_equiv_p (p->exp, p->exp, 1, false)))
2932 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
2935 new_cost = address_cost (new, mode);
2937 if (new_cost < best_addr_cost
2938 || (new_cost == best_addr_cost
2939 && (COST (new) + 1) >> 1 > best_rtx_cost))
2942 best_addr_cost = new_cost;
2943 best_rtx_cost = (COST (new) + 1) >> 1;
2951 if (validate_change (insn, loc,
2952 canon_reg (copy_rtx (best_rtx),
2962 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2963 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2964 what values are being compared.
2966 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2967 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2968 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2969 compared to produce cc0.
2971 The return value is the comparison operator and is either the code of
2972 A or the code corresponding to the inverse of the comparison. */
2974 static enum rtx_code
2975 find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
2976 enum machine_mode *pmode1, enum machine_mode *pmode2)
2980 arg1 = *parg1, arg2 = *parg2;
2982 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2984 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2986 /* Set nonzero when we find something of interest. */
2988 int reverse_code = 0;
2989 struct table_elt *p = 0;
2991 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2992 On machines with CC0, this is the only case that can occur, since
2993 fold_rtx will return the COMPARE or item being compared with zero
2996 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2999 /* If ARG1 is a comparison operator and CODE is testing for
3000 STORE_FLAG_VALUE, get the inner arguments. */
3002 else if (COMPARISON_P (arg1))
3004 #ifdef FLOAT_STORE_FLAG_VALUE
3005 REAL_VALUE_TYPE fsfv;
3009 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3010 && code == LT && STORE_FLAG_VALUE == -1)
3011 #ifdef FLOAT_STORE_FLAG_VALUE
3012 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3013 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3014 REAL_VALUE_NEGATIVE (fsfv)))
3019 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3020 && code == GE && STORE_FLAG_VALUE == -1)
3021 #ifdef FLOAT_STORE_FLAG_VALUE
3022 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3023 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3024 REAL_VALUE_NEGATIVE (fsfv)))
3027 x = arg1, reverse_code = 1;
3030 /* ??? We could also check for
3032 (ne (and (eq (...) (const_int 1))) (const_int 0))
3034 and related forms, but let's wait until we see them occurring. */
3037 /* Look up ARG1 in the hash table and see if it has an equivalence
3038 that lets us see what is being compared. */
3039 p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
3042 p = p->first_same_value;
3044 /* If what we compare is already known to be constant, that is as
3046 We need to break the loop in this case, because otherwise we
3047 can have an infinite loop when looking at a reg that is known
3048 to be a constant which is the same as a comparison of a reg
3049 against zero which appears later in the insn stream, which in
3050 turn is constant and the same as the comparison of the first reg
3056 for (; p; p = p->next_same_value)
3058 enum machine_mode inner_mode = GET_MODE (p->exp);
3059 #ifdef FLOAT_STORE_FLAG_VALUE
3060 REAL_VALUE_TYPE fsfv;
3063 /* If the entry isn't valid, skip it. */
3064 if (! exp_equiv_p (p->exp, p->exp, 1, false))
3067 if (GET_CODE (p->exp) == COMPARE
3068 /* Another possibility is that this machine has a compare insn
3069 that includes the comparison code. In that case, ARG1 would
3070 be equivalent to a comparison operation that would set ARG1 to
3071 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3072 ORIG_CODE is the actual comparison being done; if it is an EQ,
3073 we must reverse ORIG_CODE. On machine with a negative value
3074 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3077 && GET_MODE_CLASS (inner_mode) == MODE_INT
3078 && (GET_MODE_BITSIZE (inner_mode)
3079 <= HOST_BITS_PER_WIDE_INT)
3080 && (STORE_FLAG_VALUE
3081 & ((HOST_WIDE_INT) 1
3082 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3083 #ifdef FLOAT_STORE_FLAG_VALUE
3085 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3086 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3087 REAL_VALUE_NEGATIVE (fsfv)))
3090 && COMPARISON_P (p->exp)))
3095 else if ((code == EQ
3097 && GET_MODE_CLASS (inner_mode) == MODE_INT
3098 && (GET_MODE_BITSIZE (inner_mode)
3099 <= HOST_BITS_PER_WIDE_INT)
3100 && (STORE_FLAG_VALUE
3101 & ((HOST_WIDE_INT) 1
3102 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3103 #ifdef FLOAT_STORE_FLAG_VALUE
3105 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3106 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3107 REAL_VALUE_NEGATIVE (fsfv)))
3110 && COMPARISON_P (p->exp))
3117 /* If this non-trapping address, e.g. fp + constant, the
3118 equivalent is a better operand since it may let us predict
3119 the value of the comparison. */
3120 else if (!rtx_addr_can_trap_p (p->exp))
3127 /* If we didn't find a useful equivalence for ARG1, we are done.
3128 Otherwise, set up for the next iteration. */
3132 /* If we need to reverse the comparison, make sure that that is
3133 possible -- we can't necessarily infer the value of GE from LT
3134 with floating-point operands. */
3137 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3138 if (reversed == UNKNOWN)
3143 else if (COMPARISON_P (x))
3144 code = GET_CODE (x);
3145 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3148 /* Return our results. Return the modes from before fold_rtx
3149 because fold_rtx might produce const_int, and then it's too late. */
3150 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3151 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3156 /* If X is a nontrivial arithmetic operation on an argument
3157 for which a constant value can be determined, return
3158 the result of operating on that value, as a constant.
3159 Otherwise, return X, possibly with one or more operands
3160 modified by recursive calls to this function.
3162 If X is a register whose contents are known, we do NOT
3163 return those contents here. equiv_constant is called to
3166 INSN is the insn that we may be modifying. If it is 0, make a copy
3167 of X before modifying it. */
3170 fold_rtx (rtx x, rtx insn)
3173 enum machine_mode mode;
3180 /* Folded equivalents of first two operands of X. */
3184 /* Constant equivalents of first three operands of X;
3185 0 when no such equivalent is known. */
3190 /* The mode of the first operand of X. We need this for sign and zero
3192 enum machine_mode mode_arg0;
3197 mode = GET_MODE (x);
3198 code = GET_CODE (x);
3208 /* No use simplifying an EXPR_LIST
3209 since they are used only for lists of args
3210 in a function call's REG_EQUAL note. */
3216 return prev_insn_cc0;
3220 /* If the next insn is a CODE_LABEL followed by a jump table,
3221 PC's value is a LABEL_REF pointing to that label. That
3222 lets us fold switch statements on the VAX. */
3225 if (insn && tablejump_p (insn, &next, NULL))
3226 return gen_rtx_LABEL_REF (Pmode, next);
3231 /* See if we previously assigned a constant value to this SUBREG. */
3232 if ((new = lookup_as_function (x, CONST_INT)) != 0
3233 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3236 /* If this is a paradoxical SUBREG, we have no idea what value the
3237 extra bits would have. However, if the operand is equivalent
3238 to a SUBREG whose operand is the same as our mode, and all the
3239 modes are within a word, we can just use the inner operand
3240 because these SUBREGs just say how to treat the register.
3242 Similarly if we find an integer constant. */
3244 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3246 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3247 struct table_elt *elt;
3249 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3250 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3251 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3253 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3255 if (CONSTANT_P (elt->exp)
3256 && GET_MODE (elt->exp) == VOIDmode)
3259 if (GET_CODE (elt->exp) == SUBREG
3260 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3261 && exp_equiv_p (elt->exp, elt->exp, 1, false))
3262 return copy_rtx (SUBREG_REG (elt->exp));
3268 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3269 We might be able to if the SUBREG is extracting a single word in an
3270 integral mode or extracting the low part. */
3272 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3273 const_arg0 = equiv_constant (folded_arg0);
3275 folded_arg0 = const_arg0;
3277 if (folded_arg0 != SUBREG_REG (x))
3279 new = simplify_subreg (mode, folded_arg0,
3280 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3285 if (REG_P (folded_arg0)
3286 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0)))
3288 struct table_elt *elt;
3290 elt = lookup (folded_arg0,
3291 HASH (folded_arg0, GET_MODE (folded_arg0)),
3292 GET_MODE (folded_arg0));
3295 elt = elt->first_same_value;
3297 if (subreg_lowpart_p (x))
3298 /* If this is a narrowing SUBREG and our operand is a REG, see
3299 if we can find an equivalence for REG that is an arithmetic
3300 operation in a wider mode where both operands are paradoxical
3301 SUBREGs from objects of our result mode. In that case, we
3302 couldn-t report an equivalent value for that operation, since we
3303 don't know what the extra bits will be. But we can find an
3304 equivalence for this SUBREG by folding that operation in the
3305 narrow mode. This allows us to fold arithmetic in narrow modes
3306 when the machine only supports word-sized arithmetic.
3308 Also look for a case where we have a SUBREG whose operand
3309 is the same as our result. If both modes are smaller
3310 than a word, we are simply interpreting a register in
3311 different modes and we can use the inner value. */
3313 for (; elt; elt = elt->next_same_value)
3315 enum rtx_code eltcode = GET_CODE (elt->exp);
3317 /* Just check for unary and binary operations. */
3318 if (UNARY_P (elt->exp)
3319 && eltcode != SIGN_EXTEND
3320 && eltcode != ZERO_EXTEND
3321 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3322 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3323 && (GET_MODE_CLASS (mode)
3324 == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3326 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3328 if (!REG_P (op0) && ! CONSTANT_P (op0))
3329 op0 = fold_rtx (op0, NULL_RTX);
3331 op0 = equiv_constant (op0);
3333 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3336 else if (ARITHMETIC_P (elt->exp)
3337 && eltcode != DIV && eltcode != MOD
3338 && eltcode != UDIV && eltcode != UMOD
3339 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3340 && eltcode != ROTATE && eltcode != ROTATERT
3341 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3342 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3344 || CONSTANT_P (XEXP (elt->exp, 0)))
3345 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3346 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3348 || CONSTANT_P (XEXP (elt->exp, 1))))
3350 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3351 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3353 if (op0 && !REG_P (op0) && ! CONSTANT_P (op0))
3354 op0 = fold_rtx (op0, NULL_RTX);
3357 op0 = equiv_constant (op0);
3359 if (op1 && !REG_P (op1) && ! CONSTANT_P (op1))
3360 op1 = fold_rtx (op1, NULL_RTX);
3363 op1 = equiv_constant (op1);
3365 /* If we are looking for the low SImode part of
3366 (ashift:DI c (const_int 32)), it doesn't work
3367 to compute that in SImode, because a 32-bit shift
3368 in SImode is unpredictable. We know the value is 0. */
3370 && GET_CODE (elt->exp) == ASHIFT
3371 && GET_CODE (op1) == CONST_INT
3372 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3375 < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3376 /* If the count fits in the inner mode's width,
3377 but exceeds the outer mode's width,
3378 the value will get truncated to 0
3380 new = CONST0_RTX (mode);
3382 /* If the count exceeds even the inner mode's width,
3383 don't fold this expression. */
3386 else if (op0 && op1)
3387 new = simplify_binary_operation (GET_CODE (elt->exp), mode, op0, op1);
3390 else if (GET_CODE (elt->exp) == SUBREG
3391 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3392 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3394 && exp_equiv_p (elt->exp, elt->exp, 1, false))
3395 new = copy_rtx (SUBREG_REG (elt->exp));
3401 /* A SUBREG resulting from a zero extension may fold to zero if
3402 it extracts higher bits than the ZERO_EXTEND's source bits.
3403 FIXME: if combine tried to, er, combine these instructions,
3404 this transformation may be moved to simplify_subreg. */
3405 for (; elt; elt = elt->next_same_value)
3407 if (GET_CODE (elt->exp) == ZERO_EXTEND
3409 >= GET_MODE_BITSIZE (GET_MODE (XEXP (elt->exp, 0))))
3410 return CONST0_RTX (mode);
3418 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3419 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3420 new = lookup_as_function (XEXP (x, 0), code);
3422 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3426 /* If we are not actually processing an insn, don't try to find the
3427 best address. Not only don't we care, but we could modify the
3428 MEM in an invalid way since we have no insn to validate against. */
3430 find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3433 /* Even if we don't fold in the insn itself,
3434 we can safely do so here, in hopes of getting a constant. */
3435 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3437 HOST_WIDE_INT offset = 0;
3440 && REGNO_QTY_VALID_P (REGNO (addr)))
3442 int addr_q = REG_QTY (REGNO (addr));
3443 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3445 if (GET_MODE (addr) == addr_ent->mode
3446 && addr_ent->const_rtx != NULL_RTX)
3447 addr = addr_ent->const_rtx;
3450 /* If address is constant, split it into a base and integer offset. */
3451 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3453 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3454 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3456 base = XEXP (XEXP (addr, 0), 0);
3457 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3459 else if (GET_CODE (addr) == LO_SUM
3460 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3461 base = XEXP (addr, 1);
3463 /* If this is a constant pool reference, we can fold it into its
3464 constant to allow better value tracking. */
3465 if (base && GET_CODE (base) == SYMBOL_REF
3466 && CONSTANT_POOL_ADDRESS_P (base))
3468 rtx constant = get_pool_constant (base);
3469 enum machine_mode const_mode = get_pool_mode (base);
3472 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3474 constant_pool_entries_cost = COST (constant);
3475 constant_pool_entries_regcost = approx_reg_cost (constant);
3478 /* If we are loading the full constant, we have an equivalence. */
3479 if (offset == 0 && mode == const_mode)
3482 /* If this actually isn't a constant (weird!), we can't do
3483 anything. Otherwise, handle the two most common cases:
3484 extracting a word from a multi-word constant, and extracting
3485 the low-order bits. Other cases don't seem common enough to
3487 if (! CONSTANT_P (constant))
3490 if (GET_MODE_CLASS (mode) == MODE_INT
3491 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3492 && offset % UNITS_PER_WORD == 0
3493 && (new = operand_subword (constant,
3494 offset / UNITS_PER_WORD,
3495 0, const_mode)) != 0)
3498 if (((BYTES_BIG_ENDIAN
3499 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3500 || (! BYTES_BIG_ENDIAN && offset == 0))
3501 && (new = gen_lowpart (mode, constant)) != 0)
3505 /* If this is a reference to a label at a known position in a jump
3506 table, we also know its value. */
3507 if (base && GET_CODE (base) == LABEL_REF)
3509 rtx label = XEXP (base, 0);
3510 rtx table_insn = NEXT_INSN (label);
3512 if (table_insn && JUMP_P (table_insn)
3513 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3515 rtx table = PATTERN (table_insn);
3518 && (offset / GET_MODE_SIZE (GET_MODE (table))
3519 < XVECLEN (table, 0)))
3520 return XVECEXP (table, 0,
3521 offset / GET_MODE_SIZE (GET_MODE (table)));
3523 if (table_insn && JUMP_P (table_insn)
3524 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3526 rtx table = PATTERN (table_insn);
3529 && (offset / GET_MODE_SIZE (GET_MODE (table))
3530 < XVECLEN (table, 1)))
3532 offset /= GET_MODE_SIZE (GET_MODE (table));
3533 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3536 if (GET_MODE (table) != Pmode)
3537 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3539 /* Indicate this is a constant. This isn't a
3540 valid form of CONST, but it will only be used
3541 to fold the next insns and then discarded, so
3544 Note this expression must be explicitly discarded,
3545 by cse_insn, else it may end up in a REG_EQUAL note
3546 and "escape" to cause problems elsewhere. */
3547 return gen_rtx_CONST (GET_MODE (new), new);
3555 #ifdef NO_FUNCTION_CSE
3557 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3563 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3564 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3565 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3575 mode_arg0 = VOIDmode;
3577 /* Try folding our operands.
3578 Then see which ones have constant values known. */
3580 fmt = GET_RTX_FORMAT (code);
3581 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3584 rtx arg = XEXP (x, i);
3585 rtx folded_arg = arg, const_arg = 0;
3586 enum machine_mode mode_arg = GET_MODE (arg);
3587 rtx cheap_arg, expensive_arg;
3588 rtx replacements[2];
3590 int old_cost = COST_IN (XEXP (x, i), code);
3592 /* Most arguments are cheap, so handle them specially. */
3593 switch (GET_CODE (arg))
3596 /* This is the same as calling equiv_constant; it is duplicated
3598 if (REGNO_QTY_VALID_P (REGNO (arg)))
3600 int arg_q = REG_QTY (REGNO (arg));
3601 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3603 if (arg_ent->const_rtx != NULL_RTX
3604 && !REG_P (arg_ent->const_rtx)
3605 && GET_CODE (arg_ent->const_rtx) != PLUS)
3607 = gen_lowpart (GET_MODE (arg),
3608 arg_ent->const_rtx);
3623 folded_arg = prev_insn_cc0;
3624 mode_arg = prev_insn_cc0_mode;
3625 const_arg = equiv_constant (folded_arg);
3630 folded_arg = fold_rtx (arg, insn);
3631 const_arg = equiv_constant (folded_arg);
3634 /* For the first three operands, see if the operand
3635 is constant or equivalent to a constant. */
3639 folded_arg0 = folded_arg;
3640 const_arg0 = const_arg;
3641 mode_arg0 = mode_arg;
3644 folded_arg1 = folded_arg;
3645 const_arg1 = const_arg;
3648 const_arg2 = const_arg;
3652 /* Pick the least expensive of the folded argument and an
3653 equivalent constant argument. */
3654 if (const_arg == 0 || const_arg == folded_arg
3655 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3656 cheap_arg = folded_arg, expensive_arg = const_arg;
3658 cheap_arg = const_arg, expensive_arg = folded_arg;
3660 /* Try to replace the operand with the cheapest of the two
3661 possibilities. If it doesn't work and this is either of the first
3662 two operands of a commutative operation, try swapping them.
3663 If THAT fails, try the more expensive, provided it is cheaper
3664 than what is already there. */
3666 if (cheap_arg == XEXP (x, i))
3669 if (insn == 0 && ! copied)
3675 /* Order the replacements from cheapest to most expensive. */
3676 replacements[0] = cheap_arg;
3677 replacements[1] = expensive_arg;
3679 for (j = 0; j < 2 && replacements[j]; j++)
3681 int new_cost = COST_IN (replacements[j], code);
3683 /* Stop if what existed before was cheaper. Prefer constants
3684 in the case of a tie. */
3685 if (new_cost > old_cost
3686 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3689 /* It's not safe to substitute the operand of a conversion
3690 operator with a constant, as the conversion's identity
3691 depends upon the mode of it's operand. This optimization
3692 is handled by the call to simplify_unary_operation. */
3693 if (GET_RTX_CLASS (code) == RTX_UNARY
3694 && GET_MODE (replacements[j]) != mode_arg0
3695 && (code == ZERO_EXTEND
3696 || code == SIGN_EXTEND
3698 || code == FLOAT_TRUNCATE
3699 || code == FLOAT_EXTEND
3702 || code == UNSIGNED_FLOAT
3703 || code == UNSIGNED_FIX))
3706 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3709 if (GET_RTX_CLASS (code) == RTX_COMM_COMPARE
3710 || GET_RTX_CLASS (code) == RTX_COMM_ARITH)
3712 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3713 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3715 if (apply_change_group ())
3717 /* Swap them back to be invalid so that this loop can
3718 continue and flag them to be swapped back later. */
3721 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3733 /* Don't try to fold inside of a vector of expressions.
3734 Doing nothing is harmless. */
3738 /* If a commutative operation, place a constant integer as the second
3739 operand unless the first operand is also a constant integer. Otherwise,
3740 place any constant second unless the first operand is also a constant. */
3742 if (COMMUTATIVE_P (x))
3745 || swap_commutative_operands_p (const_arg0 ? const_arg0
3747 const_arg1 ? const_arg1
3750 rtx tem = XEXP (x, 0);
3752 if (insn == 0 && ! copied)
3758 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3759 validate_change (insn, &XEXP (x, 1), tem, 1);
3760 if (apply_change_group ())
3762 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3763 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3768 /* If X is an arithmetic operation, see if we can simplify it. */
3770 switch (GET_RTX_CLASS (code))
3776 /* We can't simplify extension ops unless we know the
3778 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3779 && mode_arg0 == VOIDmode)
3782 /* If we had a CONST, strip it off and put it back later if we
3784 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3785 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3787 new = simplify_unary_operation (code, mode,
3788 const_arg0 ? const_arg0 : folded_arg0,
3790 if (new != 0 && is_const)
3791 new = gen_rtx_CONST (mode, new);
3796 case RTX_COMM_COMPARE:
3797 /* See what items are actually being compared and set FOLDED_ARG[01]
3798 to those values and CODE to the actual comparison code. If any are
3799 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3800 do anything if both operands are already known to be constant. */
3802 if (const_arg0 == 0 || const_arg1 == 0)
3804 struct table_elt *p0, *p1;
3805 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3806 enum machine_mode mode_arg1;
3808 #ifdef FLOAT_STORE_FLAG_VALUE
3809 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3811 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3812 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3813 false_rtx = CONST0_RTX (mode);
3817 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3818 &mode_arg0, &mode_arg1);
3819 const_arg0 = equiv_constant (folded_arg0);
3820 const_arg1 = equiv_constant (folded_arg1);
3822 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3823 what kinds of things are being compared, so we can't do
3824 anything with this comparison. */
3826 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3829 /* If we do not now have two constants being compared, see
3830 if we can nevertheless deduce some things about the
3832 if (const_arg0 == 0 || const_arg1 == 0)
3834 /* Some addresses are known to be nonzero. We don't know
3835 their sign, but equality comparisons are known. */
3836 if (const_arg1 == const0_rtx
3837 && nonzero_address_p (folded_arg0))
3841 else if (code == NE)
3845 /* See if the two operands are the same. */
3847 if (folded_arg0 == folded_arg1
3848 || (REG_P (folded_arg0)
3849 && REG_P (folded_arg1)
3850 && (REG_QTY (REGNO (folded_arg0))
3851 == REG_QTY (REGNO (folded_arg1))))
3852 || ((p0 = lookup (folded_arg0,
3853 SAFE_HASH (folded_arg0, mode_arg0),
3855 && (p1 = lookup (folded_arg1,
3856 SAFE_HASH (folded_arg1, mode_arg0),
3858 && p0->first_same_value == p1->first_same_value))
3860 /* Sadly two equal NaNs are not equivalent. */
3861 if (!HONOR_NANS (mode_arg0))
3862 return ((code == EQ || code == LE || code == GE
3863 || code == LEU || code == GEU || code == UNEQ
3864 || code == UNLE || code == UNGE
3866 ? true_rtx : false_rtx);
3867 /* Take care for the FP compares we can resolve. */
3868 if (code == UNEQ || code == UNLE || code == UNGE)
3870 if (code == LTGT || code == LT || code == GT)
3874 /* If FOLDED_ARG0 is a register, see if the comparison we are
3875 doing now is either the same as we did before or the reverse
3876 (we only check the reverse if not floating-point). */
3877 else if (REG_P (folded_arg0))
3879 int qty = REG_QTY (REGNO (folded_arg0));
3881 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3883 struct qty_table_elem *ent = &qty_table[qty];
3885 if ((comparison_dominates_p (ent->comparison_code, code)
3886 || (! FLOAT_MODE_P (mode_arg0)
3887 && comparison_dominates_p (ent->comparison_code,
3888 reverse_condition (code))))
3889 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3891 && rtx_equal_p (ent->comparison_const,
3893 || (REG_P (folded_arg1)
3894 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3895 return (comparison_dominates_p (ent->comparison_code, code)
3896 ? true_rtx : false_rtx);
3902 /* If we are comparing against zero, see if the first operand is
3903 equivalent to an IOR with a constant. If so, we may be able to
3904 determine the result of this comparison. */
3906 if (const_arg1 == const0_rtx)
3908 rtx y = lookup_as_function (folded_arg0, IOR);
3912 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3913 && GET_CODE (inner_const) == CONST_INT
3914 && INTVAL (inner_const) != 0)
3916 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
3917 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
3918 && (INTVAL (inner_const)
3919 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
3920 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3922 #ifdef FLOAT_STORE_FLAG_VALUE
3923 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3925 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3926 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3927 false_rtx = CONST0_RTX (mode);
3952 rtx op0 = const_arg0 ? const_arg0 : folded_arg0;
3953 rtx op1 = const_arg1 ? const_arg1 : folded_arg1;
3954 new = simplify_relational_operation (code, mode, mode_arg0, op0, op1);
3959 case RTX_COMM_ARITH:
3963 /* If the second operand is a LABEL_REF, see if the first is a MINUS
3964 with that LABEL_REF as its second operand. If so, the result is
3965 the first operand of that MINUS. This handles switches with an
3966 ADDR_DIFF_VEC table. */
3967 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
3970 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
3971 : lookup_as_function (folded_arg0, MINUS);
3973 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3974 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
3977 /* Now try for a CONST of a MINUS like the above. */
3978 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
3979 : lookup_as_function (folded_arg0, CONST))) != 0
3980 && GET_CODE (XEXP (y, 0)) == MINUS
3981 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3982 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
3983 return XEXP (XEXP (y, 0), 0);
3986 /* Likewise if the operands are in the other order. */
3987 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
3990 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
3991 : lookup_as_function (folded_arg1, MINUS);
3993 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3994 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
3997 /* Now try for a CONST of a MINUS like the above. */
3998 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
3999 : lookup_as_function (folded_arg1, CONST))) != 0
4000 && GET_CODE (XEXP (y, 0)) == MINUS
4001 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4002 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4003 return XEXP (XEXP (y, 0), 0);
4006 /* If second operand is a register equivalent to a negative
4007 CONST_INT, see if we can find a register equivalent to the
4008 positive constant. Make a MINUS if so. Don't do this for
4009 a non-negative constant since we might then alternate between
4010 choosing positive and negative constants. Having the positive
4011 constant previously-used is the more common case. Be sure
4012 the resulting constant is non-negative; if const_arg1 were
4013 the smallest negative number this would overflow: depending
4014 on the mode, this would either just be the same value (and
4015 hence not save anything) or be incorrect. */
4016 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4017 && INTVAL (const_arg1) < 0
4018 /* This used to test
4020 -INTVAL (const_arg1) >= 0
4022 But The Sun V5.0 compilers mis-compiled that test. So
4023 instead we test for the problematic value in a more direct
4024 manner and hope the Sun compilers get it correct. */
4025 && INTVAL (const_arg1) !=
4026 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4027 && REG_P (folded_arg1))
4029 rtx new_const = GEN_INT (-INTVAL (const_arg1));
4031 = lookup (new_const, SAFE_HASH (new_const, mode), mode);
4034 for (p = p->first_same_value; p; p = p->next_same_value)
4036 return simplify_gen_binary (MINUS, mode, folded_arg0,
4037 canon_reg (p->exp, NULL_RTX));
4042 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4043 If so, produce (PLUS Z C2-C). */
4044 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4046 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4047 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4048 return fold_rtx (plus_constant (copy_rtx (y),
4049 -INTVAL (const_arg1)),
4056 case SMIN: case SMAX: case UMIN: case UMAX:
4057 case IOR: case AND: case XOR:
4059 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4060 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4061 is known to be of similar form, we may be able to replace the
4062 operation with a combined operation. This may eliminate the
4063 intermediate operation if every use is simplified in this way.
4064 Note that the similar optimization done by combine.c only works
4065 if the intermediate operation's result has only one reference. */
4067 if (REG_P (folded_arg0)
4068 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4071 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4072 rtx y = lookup_as_function (folded_arg0, code);
4074 enum rtx_code associate_code;
4078 || 0 == (inner_const
4079 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4080 || GET_CODE (inner_const) != CONST_INT
4081 /* If we have compiled a statement like
4082 "if (x == (x & mask1))", and now are looking at
4083 "x & mask2", we will have a case where the first operand
4084 of Y is the same as our first operand. Unless we detect
4085 this case, an infinite loop will result. */
4086 || XEXP (y, 0) == folded_arg0)
4089 /* Don't associate these operations if they are a PLUS with the
4090 same constant and it is a power of two. These might be doable
4091 with a pre- or post-increment. Similarly for two subtracts of
4092 identical powers of two with post decrement. */
4094 if (code == PLUS && const_arg1 == inner_const
4095 && ((HAVE_PRE_INCREMENT
4096 && exact_log2 (INTVAL (const_arg1)) >= 0)
4097 || (HAVE_POST_INCREMENT
4098 && exact_log2 (INTVAL (const_arg1)) >= 0)
4099 || (HAVE_PRE_DECREMENT
4100 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4101 || (HAVE_POST_DECREMENT
4102 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4105 /* Compute the code used to compose the constants. For example,
4106 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
4108 associate_code = (is_shift || code == MINUS ? PLUS : code);
4110 new_const = simplify_binary_operation (associate_code, mode,
4111 const_arg1, inner_const);
4116 /* If we are associating shift operations, don't let this
4117 produce a shift of the size of the object or larger.
4118 This could occur when we follow a sign-extend by a right
4119 shift on a machine that does a sign-extend as a pair
4122 if (is_shift && GET_CODE (new_const) == CONST_INT
4123 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4125 /* As an exception, we can turn an ASHIFTRT of this
4126 form into a shift of the number of bits - 1. */
4127 if (code == ASHIFTRT)
4128 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4133 y = copy_rtx (XEXP (y, 0));
4135 /* If Y contains our first operand (the most common way this
4136 can happen is if Y is a MEM), we would do into an infinite
4137 loop if we tried to fold it. So don't in that case. */
4139 if (! reg_mentioned_p (folded_arg0, y))
4140 y = fold_rtx (y, insn);
4142 return simplify_gen_binary (code, mode, y, new_const);
4146 case DIV: case UDIV:
4147 /* ??? The associative optimization performed immediately above is
4148 also possible for DIV and UDIV using associate_code of MULT.
4149 However, we would need extra code to verify that the
4150 multiplication does not overflow, that is, there is no overflow
4151 in the calculation of new_const. */
4158 new = simplify_binary_operation (code, mode,
4159 const_arg0 ? const_arg0 : folded_arg0,
4160 const_arg1 ? const_arg1 : folded_arg1);
4164 /* (lo_sum (high X) X) is simply X. */
4165 if (code == LO_SUM && const_arg0 != 0
4166 && GET_CODE (const_arg0) == HIGH
4167 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4172 case RTX_BITFIELD_OPS:
4173 new = simplify_ternary_operation (code, mode, mode_arg0,
4174 const_arg0 ? const_arg0 : folded_arg0,
4175 const_arg1 ? const_arg1 : folded_arg1,
4176 const_arg2 ? const_arg2 : XEXP (x, 2));
4183 return new ? new : x;
4186 /* Return a constant value currently equivalent to X.
4187 Return 0 if we don't know one. */
4190 equiv_constant (rtx x)
4193 && REGNO_QTY_VALID_P (REGNO (x)))
4195 int x_q = REG_QTY (REGNO (x));
4196 struct qty_table_elem *x_ent = &qty_table[x_q];
4198 if (x_ent->const_rtx)
4199 x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
4202 if (x == 0 || CONSTANT_P (x))
4205 /* If X is a MEM, try to fold it outside the context of any insn to see if
4206 it might be equivalent to a constant. That handles the case where it
4207 is a constant-pool reference. Then try to look it up in the hash table
4208 in case it is something whose value we have seen before. */
4212 struct table_elt *elt;
4214 x = fold_rtx (x, NULL_RTX);
4218 elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
4222 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4223 if (elt->is_const && CONSTANT_P (elt->exp))
4230 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4231 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4232 least-significant part of X.
4233 MODE specifies how big a part of X to return.
4235 If the requested operation cannot be done, 0 is returned.
4237 This is similar to gen_lowpart_general in emit-rtl.c. */
4240 gen_lowpart_if_possible (enum machine_mode mode, rtx x)
4242 rtx result = gen_lowpart_common (mode, x);
4248 /* This is the only other case we handle. */
4252 if (WORDS_BIG_ENDIAN)
4253 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4254 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4255 if (BYTES_BIG_ENDIAN)
4256 /* Adjust the address so that the address-after-the-data is
4258 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4259 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4261 new = adjust_address_nv (x, mode, offset);
4262 if (! memory_address_p (mode, XEXP (new, 0)))
4271 /* Given INSN, a jump insn, PATH_TAKEN indicates if we are following the "taken"
4272 branch. It will be zero if not.
4274 In certain cases, this can cause us to add an equivalence. For example,
4275 if we are following the taken case of
4277 we can add the fact that `i' and '2' are now equivalent.
4279 In any case, we can record that this comparison was passed. If the same
4280 comparison is seen later, we will know its value. */
4283 record_jump_equiv (rtx insn, int taken)
4285 int cond_known_true;
4288 enum machine_mode mode, mode0, mode1;
4289 int reversed_nonequality = 0;
4292 /* Ensure this is the right kind of insn. */
4293 if (! any_condjump_p (insn))
4295 set = pc_set (insn);
4297 /* See if this jump condition is known true or false. */
4299 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4301 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4303 /* Get the type of comparison being done and the operands being compared.
4304 If we had to reverse a non-equality condition, record that fact so we
4305 know that it isn't valid for floating-point. */
4306 code = GET_CODE (XEXP (SET_SRC (set), 0));
4307 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4308 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4310 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4311 if (! cond_known_true)
4313 code = reversed_comparison_code_parts (code, op0, op1, insn);
4315 /* Don't remember if we can't find the inverse. */
4316 if (code == UNKNOWN)
4320 /* The mode is the mode of the non-constant. */
4322 if (mode1 != VOIDmode)
4325 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4328 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4329 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4330 Make any useful entries we can with that information. Called from
4331 above function and called recursively. */
4334 record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
4335 rtx op1, int reversed_nonequality)
4337 unsigned op0_hash, op1_hash;
4338 int op0_in_memory, op1_in_memory;
4339 struct table_elt *op0_elt, *op1_elt;
4341 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4342 we know that they are also equal in the smaller mode (this is also
4343 true for all smaller modes whether or not there is a SUBREG, but
4344 is not worth testing for with no SUBREG). */
4346 /* Note that GET_MODE (op0) may not equal MODE. */
4347 if (code == EQ && GET_CODE (op0) == SUBREG
4348 && (GET_MODE_SIZE (GET_MODE (op0))
4349 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4351 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4352 rtx tem = gen_lowpart (inner_mode, op1);
4354 record_jump_cond (code, mode, SUBREG_REG (op0),
4355 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4356 reversed_nonequality);
4359 if (code == EQ && GET_CODE (op1) == SUBREG
4360 && (GET_MODE_SIZE (GET_MODE (op1))
4361 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4363 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4364 rtx tem = gen_lowpart (inner_mode, op0);
4366 record_jump_cond (code, mode, SUBREG_REG (op1),
4367 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4368 reversed_nonequality);
4371 /* Similarly, if this is an NE comparison, and either is a SUBREG
4372 making a smaller mode, we know the whole thing is also NE. */
4374 /* Note that GET_MODE (op0) may not equal MODE;
4375 if we test MODE instead, we can get an infinite recursion
4376 alternating between two modes each wider than MODE. */
4378 if (code == NE && GET_CODE (op0) == SUBREG
4379 && subreg_lowpart_p (op0)
4380 && (GET_MODE_SIZE (GET_MODE (op0))
4381 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4383 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4384 rtx tem = gen_lowpart (inner_mode, op1);
4386 record_jump_cond (code, mode, SUBREG_REG (op0),
4387 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4388 reversed_nonequality);
4391 if (code == NE && GET_CODE (op1) == SUBREG
4392 && subreg_lowpart_p (op1)
4393 && (GET_MODE_SIZE (GET_MODE (op1))
4394 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4396 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4397 rtx tem = gen_lowpart (inner_mode, op0);
4399 record_jump_cond (code, mode, SUBREG_REG (op1),
4400 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4401 reversed_nonequality);
4404 /* Hash both operands. */
4407 hash_arg_in_memory = 0;
4408 op0_hash = HASH (op0, mode);
4409 op0_in_memory = hash_arg_in_memory;
4415 hash_arg_in_memory = 0;
4416 op1_hash = HASH (op1, mode);
4417 op1_in_memory = hash_arg_in_memory;
4422 /* Look up both operands. */
4423 op0_elt = lookup (op0, op0_hash, mode);
4424 op1_elt = lookup (op1, op1_hash, mode);
4426 /* If both operands are already equivalent or if they are not in the
4427 table but are identical, do nothing. */
4428 if ((op0_elt != 0 && op1_elt != 0
4429 && op0_elt->first_same_value == op1_elt->first_same_value)
4430 || op0 == op1 || rtx_equal_p (op0, op1))
4433 /* If we aren't setting two things equal all we can do is save this
4434 comparison. Similarly if this is floating-point. In the latter
4435 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4436 If we record the equality, we might inadvertently delete code
4437 whose intent was to change -0 to +0. */
4439 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4441 struct qty_table_elem *ent;
4444 /* If we reversed a floating-point comparison, if OP0 is not a
4445 register, or if OP1 is neither a register or constant, we can't
4449 op1 = equiv_constant (op1);
4451 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4452 || !REG_P (op0) || op1 == 0)
4455 /* Put OP0 in the hash table if it isn't already. This gives it a
4456 new quantity number. */
4459 if (insert_regs (op0, NULL, 0))
4461 rehash_using_reg (op0);
4462 op0_hash = HASH (op0, mode);
4464 /* If OP0 is contained in OP1, this changes its hash code
4465 as well. Faster to rehash than to check, except
4466 for the simple case of a constant. */
4467 if (! CONSTANT_P (op1))
4468 op1_hash = HASH (op1,mode);
4471 op0_elt = insert (op0, NULL, op0_hash, mode);
4472 op0_elt->in_memory = op0_in_memory;
4475 qty = REG_QTY (REGNO (op0));
4476 ent = &qty_table[qty];
4478 ent->comparison_code = code;
4481 /* Look it up again--in case op0 and op1 are the same. */
4482 op1_elt = lookup (op1, op1_hash, mode);
4484 /* Put OP1 in the hash table so it gets a new quantity number. */
4487 if (insert_regs (op1, NULL, 0))
4489 rehash_using_reg (op1);
4490 op1_hash = HASH (op1, mode);
4493 op1_elt = insert (op1, NULL, op1_hash, mode);
4494 op1_elt->in_memory = op1_in_memory;
4497 ent->comparison_const = NULL_RTX;
4498 ent->comparison_qty = REG_QTY (REGNO (op1));
4502 ent->comparison_const = op1;
4503 ent->comparison_qty = -1;
4509 /* If either side is still missing an equivalence, make it now,
4510 then merge the equivalences. */
4514 if (insert_regs (op0, NULL, 0))
4516 rehash_using_reg (op0);
4517 op0_hash = HASH (op0, mode);
4520 op0_elt = insert (op0, NULL, op0_hash, mode);
4521 op0_elt->in_memory = op0_in_memory;
4526 if (insert_regs (op1, NULL, 0))
4528 rehash_using_reg (op1);
4529 op1_hash = HASH (op1, mode);
4532 op1_elt = insert (op1, NULL, op1_hash, mode);
4533 op1_elt->in_memory = op1_in_memory;
4536 merge_equiv_classes (op0_elt, op1_elt);
4539 /* CSE processing for one instruction.
4540 First simplify sources and addresses of all assignments
4541 in the instruction, using previously-computed equivalents values.
4542 Then install the new sources and destinations in the table
4543 of available values.
4545 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4546 the insn. It means that INSN is inside libcall block. In this
4547 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4549 /* Data on one SET contained in the instruction. */
4553 /* The SET rtx itself. */
4555 /* The SET_SRC of the rtx (the original value, if it is changing). */
4557 /* The hash-table element for the SET_SRC of the SET. */
4558 struct table_elt *src_elt;
4559 /* Hash value for the SET_SRC. */
4561 /* Hash value for the SET_DEST. */
4563 /* The SET_DEST, with SUBREG, etc., stripped. */
4565 /* Nonzero if the SET_SRC is in memory. */
4567 /* Nonzero if the SET_SRC contains something
4568 whose value cannot be predicted and understood. */
4570 /* Original machine mode, in case it becomes a CONST_INT.
4571 The size of this field should match the size of the mode
4572 field of struct rtx_def (see rtl.h). */
4573 ENUM_BITFIELD(machine_mode) mode : 8;
4574 /* A constant equivalent for SET_SRC, if any. */
4576 /* Original SET_SRC value used for libcall notes. */
4578 /* Hash value of constant equivalent for SET_SRC. */
4579 unsigned src_const_hash;
4580 /* Table entry for constant equivalent for SET_SRC, if any. */
4581 struct table_elt *src_const_elt;
4585 cse_insn (rtx insn, rtx libcall_insn)
4587 rtx x = PATTERN (insn);
4593 /* Records what this insn does to set CC0. */
4594 rtx this_insn_cc0 = 0;
4595 enum machine_mode this_insn_cc0_mode = VOIDmode;
4599 struct table_elt *src_eqv_elt = 0;
4600 int src_eqv_volatile = 0;
4601 int src_eqv_in_memory = 0;
4602 unsigned src_eqv_hash = 0;
4604 struct set *sets = (struct set *) 0;
4608 /* Find all the SETs and CLOBBERs in this instruction.
4609 Record all the SETs in the array `set' and count them.
4610 Also determine whether there is a CLOBBER that invalidates
4611 all memory references, or all references at varying addresses. */
4615 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4617 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4618 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4619 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4623 if (GET_CODE (x) == SET)
4625 sets = alloca (sizeof (struct set));
4628 /* Ignore SETs that are unconditional jumps.
4629 They never need cse processing, so this does not hurt.
4630 The reason is not efficiency but rather
4631 so that we can test at the end for instructions
4632 that have been simplified to unconditional jumps
4633 and not be misled by unchanged instructions
4634 that were unconditional jumps to begin with. */
4635 if (SET_DEST (x) == pc_rtx
4636 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4639 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4640 The hard function value register is used only once, to copy to
4641 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4642 Ensure we invalidate the destination register. On the 80386 no
4643 other code would invalidate it since it is a fixed_reg.
4644 We need not check the return of apply_change_group; see canon_reg. */
4646 else if (GET_CODE (SET_SRC (x)) == CALL)
4648 canon_reg (SET_SRC (x), insn);
4649 apply_change_group ();
4650 fold_rtx (SET_SRC (x), insn);
4651 invalidate (SET_DEST (x), VOIDmode);
4656 else if (GET_CODE (x) == PARALLEL)
4658 int lim = XVECLEN (x, 0);
4660 sets = alloca (lim * sizeof (struct set));
4662 /* Find all regs explicitly clobbered in this insn,
4663 and ensure they are not replaced with any other regs
4664 elsewhere in this insn.
4665 When a reg that is clobbered is also used for input,
4666 we should presume that that is for a reason,
4667 and we should not substitute some other register
4668 which is not supposed to be clobbered.
4669 Therefore, this loop cannot be merged into the one below
4670 because a CALL may precede a CLOBBER and refer to the
4671 value clobbered. We must not let a canonicalization do
4672 anything in that case. */
4673 for (i = 0; i < lim; i++)
4675 rtx y = XVECEXP (x, 0, i);
4676 if (GET_CODE (y) == CLOBBER)
4678 rtx clobbered = XEXP (y, 0);
4680 if (REG_P (clobbered)
4681 || GET_CODE (clobbered) == SUBREG)
4682 invalidate (clobbered, VOIDmode);
4683 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4684 || GET_CODE (clobbered) == ZERO_EXTRACT)
4685 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4689 for (i = 0; i < lim; i++)
4691 rtx y = XVECEXP (x, 0, i);
4692 if (GET_CODE (y) == SET)
4694 /* As above, we ignore unconditional jumps and call-insns and
4695 ignore the result of apply_change_group. */
4696 if (GET_CODE (SET_SRC (y)) == CALL)
4698 canon_reg (SET_SRC (y), insn);
4699 apply_change_group ();
4700 fold_rtx (SET_SRC (y), insn);
4701 invalidate (SET_DEST (y), VOIDmode);
4703 else if (SET_DEST (y) == pc_rtx
4704 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4707 sets[n_sets++].rtl = y;
4709 else if (GET_CODE (y) == CLOBBER)
4711 /* If we clobber memory, canon the address.
4712 This does nothing when a register is clobbered
4713 because we have already invalidated the reg. */
4714 if (MEM_P (XEXP (y, 0)))
4715 canon_reg (XEXP (y, 0), NULL_RTX);
4717 else if (GET_CODE (y) == USE
4718 && ! (REG_P (XEXP (y, 0))
4719 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4720 canon_reg (y, NULL_RTX);
4721 else if (GET_CODE (y) == CALL)
4723 /* The result of apply_change_group can be ignored; see
4725 canon_reg (y, insn);
4726 apply_change_group ();
4731 else if (GET_CODE (x) == CLOBBER)
4733 if (MEM_P (XEXP (x, 0)))
4734 canon_reg (XEXP (x, 0), NULL_RTX);
4737 /* Canonicalize a USE of a pseudo register or memory location. */
4738 else if (GET_CODE (x) == USE
4739 && ! (REG_P (XEXP (x, 0))
4740 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4741 canon_reg (XEXP (x, 0), NULL_RTX);
4742 else if (GET_CODE (x) == CALL)
4744 /* The result of apply_change_group can be ignored; see canon_reg. */
4745 canon_reg (x, insn);
4746 apply_change_group ();
4750 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4751 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4752 is handled specially for this case, and if it isn't set, then there will
4753 be no equivalence for the destination. */
4754 if (n_sets == 1 && REG_NOTES (insn) != 0
4755 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4756 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4757 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4759 src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4760 XEXP (tem, 0) = src_eqv;
4763 /* Canonicalize sources and addresses of destinations.
4764 We do this in a separate pass to avoid problems when a MATCH_DUP is
4765 present in the insn pattern. In that case, we want to ensure that
4766 we don't break the duplicate nature of the pattern. So we will replace
4767 both operands at the same time. Otherwise, we would fail to find an
4768 equivalent substitution in the loop calling validate_change below.
4770 We used to suppress canonicalization of DEST if it appears in SRC,
4771 but we don't do this any more. */
4773 for (i = 0; i < n_sets; i++)
4775 rtx dest = SET_DEST (sets[i].rtl);
4776 rtx src = SET_SRC (sets[i].rtl);
4777 rtx new = canon_reg (src, insn);
4780 sets[i].orig_src = src;
4781 if ((REG_P (new) && REG_P (src)
4782 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4783 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4784 || (insn_code = recog_memoized (insn)) < 0
4785 || insn_data[insn_code].n_dups > 0)
4786 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4788 SET_SRC (sets[i].rtl) = new;
4790 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4792 validate_change (insn, &XEXP (dest, 1),
4793 canon_reg (XEXP (dest, 1), insn), 1);
4794 validate_change (insn, &XEXP (dest, 2),
4795 canon_reg (XEXP (dest, 2), insn), 1);
4798 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4799 || GET_CODE (dest) == ZERO_EXTRACT
4800 || GET_CODE (dest) == SIGN_EXTRACT)
4801 dest = XEXP (dest, 0);
4804 canon_reg (dest, insn);
4807 /* Now that we have done all the replacements, we can apply the change
4808 group and see if they all work. Note that this will cause some
4809 canonicalizations that would have worked individually not to be applied
4810 because some other canonicalization didn't work, but this should not
4813 The result of apply_change_group can be ignored; see canon_reg. */
4815 apply_change_group ();
4817 /* Set sets[i].src_elt to the class each source belongs to.
4818 Detect assignments from or to volatile things
4819 and set set[i] to zero so they will be ignored
4820 in the rest of this function.
4822 Nothing in this loop changes the hash table or the register chains. */
4824 for (i = 0; i < n_sets; i++)
4828 struct table_elt *elt = 0, *p;
4829 enum machine_mode mode;
4832 rtx src_related = 0;
4833 struct table_elt *src_const_elt = 0;
4834 int src_cost = MAX_COST;
4835 int src_eqv_cost = MAX_COST;
4836 int src_folded_cost = MAX_COST;
4837 int src_related_cost = MAX_COST;
4838 int src_elt_cost = MAX_COST;
4839 int src_regcost = MAX_COST;
4840 int src_eqv_regcost = MAX_COST;
4841 int src_folded_regcost = MAX_COST;
4842 int src_related_regcost = MAX_COST;
4843 int src_elt_regcost = MAX_COST;
4844 /* Set nonzero if we need to call force_const_mem on with the
4845 contents of src_folded before using it. */
4846 int src_folded_force_flag = 0;
4848 dest = SET_DEST (sets[i].rtl);
4849 src = SET_SRC (sets[i].rtl);
4851 /* If SRC is a constant that has no machine mode,
4852 hash it with the destination's machine mode.
4853 This way we can keep different modes separate. */
4855 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4856 sets[i].mode = mode;
4860 enum machine_mode eqvmode = mode;
4861 if (GET_CODE (dest) == STRICT_LOW_PART)
4862 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4864 hash_arg_in_memory = 0;
4865 src_eqv_hash = HASH (src_eqv, eqvmode);
4867 /* Find the equivalence class for the equivalent expression. */
4870 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4872 src_eqv_volatile = do_not_record;
4873 src_eqv_in_memory = hash_arg_in_memory;
4876 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4877 value of the INNER register, not the destination. So it is not
4878 a valid substitution for the source. But save it for later. */
4879 if (GET_CODE (dest) == STRICT_LOW_PART)
4882 src_eqv_here = src_eqv;
4884 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4885 simplified result, which may not necessarily be valid. */
4886 src_folded = fold_rtx (src, insn);
4889 /* ??? This caused bad code to be generated for the m68k port with -O2.
4890 Suppose src is (CONST_INT -1), and that after truncation src_folded
4891 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4892 At the end we will add src and src_const to the same equivalence
4893 class. We now have 3 and -1 on the same equivalence class. This
4894 causes later instructions to be mis-optimized. */
4895 /* If storing a constant in a bitfield, pre-truncate the constant
4896 so we will be able to record it later. */
4897 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
4898 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
4900 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4902 if (GET_CODE (src) == CONST_INT
4903 && GET_CODE (width) == CONST_INT
4904 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4905 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4907 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
4908 << INTVAL (width)) - 1));
4912 /* Compute SRC's hash code, and also notice if it
4913 should not be recorded at all. In that case,
4914 prevent any further processing of this assignment. */
4916 hash_arg_in_memory = 0;
4919 sets[i].src_hash = HASH (src, mode);
4920 sets[i].src_volatile = do_not_record;
4921 sets[i].src_in_memory = hash_arg_in_memory;
4923 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4924 a pseudo, do not record SRC. Using SRC as a replacement for
4925 anything else will be incorrect in that situation. Note that
4926 this usually occurs only for stack slots, in which case all the
4927 RTL would be referring to SRC, so we don't lose any optimization
4928 opportunities by not having SRC in the hash table. */
4931 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
4933 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
4934 sets[i].src_volatile = 1;
4937 /* It is no longer clear why we used to do this, but it doesn't
4938 appear to still be needed. So let's try without it since this
4939 code hurts cse'ing widened ops. */
4940 /* If source is a paradoxical subreg (such as QI treated as an SI),
4941 treat it as volatile. It may do the work of an SI in one context
4942 where the extra bits are not being used, but cannot replace an SI
4944 if (GET_CODE (src) == SUBREG
4945 && (GET_MODE_SIZE (GET_MODE (src))
4946 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4947 sets[i].src_volatile = 1;
4950 /* Locate all possible equivalent forms for SRC. Try to replace
4951 SRC in the insn with each cheaper equivalent.
4953 We have the following types of equivalents: SRC itself, a folded
4954 version, a value given in a REG_EQUAL note, or a value related
4957 Each of these equivalents may be part of an additional class
4958 of equivalents (if more than one is in the table, they must be in
4959 the same class; we check for this).
4961 If the source is volatile, we don't do any table lookups.
4963 We note any constant equivalent for possible later use in a
4966 if (!sets[i].src_volatile)
4967 elt = lookup (src, sets[i].src_hash, mode);
4969 sets[i].src_elt = elt;
4971 if (elt && src_eqv_here && src_eqv_elt)
4973 if (elt->first_same_value != src_eqv_elt->first_same_value)
4975 /* The REG_EQUAL is indicating that two formerly distinct
4976 classes are now equivalent. So merge them. */
4977 merge_equiv_classes (elt, src_eqv_elt);
4978 src_eqv_hash = HASH (src_eqv, elt->mode);
4979 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
4985 else if (src_eqv_elt)
4988 /* Try to find a constant somewhere and record it in `src_const'.
4989 Record its table element, if any, in `src_const_elt'. Look in
4990 any known equivalences first. (If the constant is not in the
4991 table, also set `sets[i].src_const_hash'). */
4993 for (p = elt->first_same_value; p; p = p->next_same_value)
4997 src_const_elt = elt;
5002 && (CONSTANT_P (src_folded)
5003 /* Consider (minus (label_ref L1) (label_ref L2)) as
5004 "constant" here so we will record it. This allows us
5005 to fold switch statements when an ADDR_DIFF_VEC is used. */
5006 || (GET_CODE (src_folded) == MINUS
5007 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5008 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5009 src_const = src_folded, src_const_elt = elt;
5010 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5011 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5013 /* If we don't know if the constant is in the table, get its
5014 hash code and look it up. */
5015 if (src_const && src_const_elt == 0)
5017 sets[i].src_const_hash = HASH (src_const, mode);
5018 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5021 sets[i].src_const = src_const;
5022 sets[i].src_const_elt = src_const_elt;
5024 /* If the constant and our source are both in the table, mark them as
5025 equivalent. Otherwise, if a constant is in the table but the source
5026 isn't, set ELT to it. */
5027 if (src_const_elt && elt
5028 && src_const_elt->first_same_value != elt->first_same_value)
5029 merge_equiv_classes (elt, src_const_elt);
5030 else if (src_const_elt && elt == 0)
5031 elt = src_const_elt;
5033 /* See if there is a register linearly related to a constant
5034 equivalent of SRC. */
5036 && (GET_CODE (src_const) == CONST
5037 || (src_const_elt && src_const_elt->related_value != 0)))
5039 src_related = use_related_value (src_const, src_const_elt);
5042 struct table_elt *src_related_elt
5043 = lookup (src_related, HASH (src_related, mode), mode);
5044 if (src_related_elt && elt)
5046 if (elt->first_same_value
5047 != src_related_elt->first_same_value)
5048 /* This can occur when we previously saw a CONST
5049 involving a SYMBOL_REF and then see the SYMBOL_REF
5050 twice. Merge the involved classes. */
5051 merge_equiv_classes (elt, src_related_elt);
5054 src_related_elt = 0;
5056 else if (src_related_elt && elt == 0)
5057 elt = src_related_elt;
5061 /* See if we have a CONST_INT that is already in a register in a
5064 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5065 && GET_MODE_CLASS (mode) == MODE_INT
5066 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5068 enum machine_mode wider_mode;
5070 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5071 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5072 && src_related == 0;
5073 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5075 struct table_elt *const_elt
5076 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5081 for (const_elt = const_elt->first_same_value;
5082 const_elt; const_elt = const_elt->next_same_value)
5083 if (REG_P (const_elt->exp))
5085 src_related = gen_lowpart (mode,
5092 /* Another possibility is that we have an AND with a constant in
5093 a mode narrower than a word. If so, it might have been generated
5094 as part of an "if" which would narrow the AND. If we already
5095 have done the AND in a wider mode, we can use a SUBREG of that
5098 if (flag_expensive_optimizations && ! src_related
5099 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5100 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5102 enum machine_mode tmode;
5103 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5105 for (tmode = GET_MODE_WIDER_MODE (mode);
5106 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5107 tmode = GET_MODE_WIDER_MODE (tmode))
5109 rtx inner = gen_lowpart (tmode, XEXP (src, 0));
5110 struct table_elt *larger_elt;
5114 PUT_MODE (new_and, tmode);
5115 XEXP (new_and, 0) = inner;
5116 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5117 if (larger_elt == 0)
5120 for (larger_elt = larger_elt->first_same_value;
5121 larger_elt; larger_elt = larger_elt->next_same_value)
5122 if (REG_P (larger_elt->exp))
5125 = gen_lowpart (mode, larger_elt->exp);
5135 #ifdef LOAD_EXTEND_OP
5136 /* See if a MEM has already been loaded with a widening operation;
5137 if it has, we can use a subreg of that. Many CISC machines
5138 also have such operations, but this is only likely to be
5139 beneficial on these machines. */
5141 if (flag_expensive_optimizations && src_related == 0
5142 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5143 && GET_MODE_CLASS (mode) == MODE_INT
5144 && MEM_P (src) && ! do_not_record
5145 && LOAD_EXTEND_OP (mode) != UNKNOWN)
5147 enum machine_mode tmode;
5149 /* Set what we are trying to extend and the operation it might
5150 have been extended with. */
5151 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5152 XEXP (memory_extend_rtx, 0) = src;
5154 for (tmode = GET_MODE_WIDER_MODE (mode);
5155 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5156 tmode = GET_MODE_WIDER_MODE (tmode))
5158 struct table_elt *larger_elt;
5160 PUT_MODE (memory_extend_rtx, tmode);
5161 larger_elt = lookup (memory_extend_rtx,
5162 HASH (memory_extend_rtx, tmode), tmode);
5163 if (larger_elt == 0)
5166 for (larger_elt = larger_elt->first_same_value;
5167 larger_elt; larger_elt = larger_elt->next_same_value)
5168 if (REG_P (larger_elt->exp))
5170 src_related = gen_lowpart (mode,
5179 #endif /* LOAD_EXTEND_OP */
5181 if (src == src_folded)
5184 /* At this point, ELT, if nonzero, points to a class of expressions
5185 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5186 and SRC_RELATED, if nonzero, each contain additional equivalent
5187 expressions. Prune these latter expressions by deleting expressions
5188 already in the equivalence class.
5190 Check for an equivalent identical to the destination. If found,
5191 this is the preferred equivalent since it will likely lead to
5192 elimination of the insn. Indicate this by placing it in
5196 elt = elt->first_same_value;
5197 for (p = elt; p; p = p->next_same_value)
5199 enum rtx_code code = GET_CODE (p->exp);
5201 /* If the expression is not valid, ignore it. Then we do not
5202 have to check for validity below. In most cases, we can use
5203 `rtx_equal_p', since canonicalization has already been done. */
5204 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
5207 /* Also skip paradoxical subregs, unless that's what we're
5210 && (GET_MODE_SIZE (GET_MODE (p->exp))
5211 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5213 && GET_CODE (src) == SUBREG
5214 && GET_MODE (src) == GET_MODE (p->exp)
5215 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5216 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5219 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5221 else if (src_folded && GET_CODE (src_folded) == code
5222 && rtx_equal_p (src_folded, p->exp))
5224 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5225 && rtx_equal_p (src_eqv_here, p->exp))
5227 else if (src_related && GET_CODE (src_related) == code
5228 && rtx_equal_p (src_related, p->exp))
5231 /* This is the same as the destination of the insns, we want
5232 to prefer it. Copy it to src_related. The code below will
5233 then give it a negative cost. */
5234 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5238 /* Find the cheapest valid equivalent, trying all the available
5239 possibilities. Prefer items not in the hash table to ones
5240 that are when they are equal cost. Note that we can never
5241 worsen an insn as the current contents will also succeed.
5242 If we find an equivalent identical to the destination, use it as best,
5243 since this insn will probably be eliminated in that case. */
5246 if (rtx_equal_p (src, dest))
5247 src_cost = src_regcost = -1;
5250 src_cost = COST (src);
5251 src_regcost = approx_reg_cost (src);
5257 if (rtx_equal_p (src_eqv_here, dest))
5258 src_eqv_cost = src_eqv_regcost = -1;
5261 src_eqv_cost = COST (src_eqv_here);
5262 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5268 if (rtx_equal_p (src_folded, dest))
5269 src_folded_cost = src_folded_regcost = -1;
5272 src_folded_cost = COST (src_folded);
5273 src_folded_regcost = approx_reg_cost (src_folded);
5279 if (rtx_equal_p (src_related, dest))
5280 src_related_cost = src_related_regcost = -1;
5283 src_related_cost = COST (src_related);
5284 src_related_regcost = approx_reg_cost (src_related);
5288 /* If this was an indirect jump insn, a known label will really be
5289 cheaper even though it looks more expensive. */
5290 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5291 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5293 /* Terminate loop when replacement made. This must terminate since
5294 the current contents will be tested and will always be valid. */
5299 /* Skip invalid entries. */
5300 while (elt && !REG_P (elt->exp)
5301 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5302 elt = elt->next_same_value;
5304 /* A paradoxical subreg would be bad here: it'll be the right
5305 size, but later may be adjusted so that the upper bits aren't
5306 what we want. So reject it. */
5308 && GET_CODE (elt->exp) == SUBREG
5309 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5310 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5311 /* It is okay, though, if the rtx we're trying to match
5312 will ignore any of the bits we can't predict. */
5314 && GET_CODE (src) == SUBREG
5315 && GET_MODE (src) == GET_MODE (elt->exp)
5316 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5317 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5319 elt = elt->next_same_value;
5325 src_elt_cost = elt->cost;
5326 src_elt_regcost = elt->regcost;
5329 /* Find cheapest and skip it for the next time. For items
5330 of equal cost, use this order:
5331 src_folded, src, src_eqv, src_related and hash table entry. */
5333 && preferable (src_folded_cost, src_folded_regcost,
5334 src_cost, src_regcost) <= 0
5335 && preferable (src_folded_cost, src_folded_regcost,
5336 src_eqv_cost, src_eqv_regcost) <= 0
5337 && preferable (src_folded_cost, src_folded_regcost,
5338 src_related_cost, src_related_regcost) <= 0
5339 && preferable (src_folded_cost, src_folded_regcost,
5340 src_elt_cost, src_elt_regcost) <= 0)
5342 trial = src_folded, src_folded_cost = MAX_COST;
5343 if (src_folded_force_flag)
5345 rtx forced = force_const_mem (mode, trial);
5351 && preferable (src_cost, src_regcost,
5352 src_eqv_cost, src_eqv_regcost) <= 0
5353 && preferable (src_cost, src_regcost,
5354 src_related_cost, src_related_regcost) <= 0
5355 && preferable (src_cost, src_regcost,
5356 src_elt_cost, src_elt_regcost) <= 0)
5357 trial = src, src_cost = MAX_COST;
5358 else if (src_eqv_here
5359 && preferable (src_eqv_cost, src_eqv_regcost,
5360 src_related_cost, src_related_regcost) <= 0
5361 && preferable (src_eqv_cost, src_eqv_regcost,
5362 src_elt_cost, src_elt_regcost) <= 0)
5363 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5364 else if (src_related
5365 && preferable (src_related_cost, src_related_regcost,
5366 src_elt_cost, src_elt_regcost) <= 0)
5367 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5370 trial = copy_rtx (elt->exp);
5371 elt = elt->next_same_value;
5372 src_elt_cost = MAX_COST;
5375 /* We don't normally have an insn matching (set (pc) (pc)), so
5376 check for this separately here. We will delete such an
5379 For other cases such as a table jump or conditional jump
5380 where we know the ultimate target, go ahead and replace the
5381 operand. While that may not make a valid insn, we will
5382 reemit the jump below (and also insert any necessary
5384 if (n_sets == 1 && dest == pc_rtx
5386 || (GET_CODE (trial) == LABEL_REF
5387 && ! condjump_p (insn))))
5389 SET_SRC (sets[i].rtl) = trial;
5390 cse_jumps_altered = 1;
5394 /* Look for a substitution that makes a valid insn. */
5395 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5397 rtx new = canon_reg (SET_SRC (sets[i].rtl), insn);
5399 /* If we just made a substitution inside a libcall, then we
5400 need to make the same substitution in any notes attached
5401 to the RETVAL insn. */
5403 && (REG_P (sets[i].orig_src)
5404 || GET_CODE (sets[i].orig_src) == SUBREG
5405 || MEM_P (sets[i].orig_src)))
5407 rtx note = find_reg_equal_equiv_note (libcall_insn);
5409 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0),
5414 /* The result of apply_change_group can be ignored; see
5417 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5418 apply_change_group ();
5422 /* If we previously found constant pool entries for
5423 constants and this is a constant, try making a
5424 pool entry. Put it in src_folded unless we already have done
5425 this since that is where it likely came from. */
5427 else if (constant_pool_entries_cost
5428 && CONSTANT_P (trial)
5429 /* Reject cases that will abort in decode_rtx_const.
5430 On the alpha when simplifying a switch, we get
5431 (const (truncate (minus (label_ref) (label_ref)))). */
5432 && ! (GET_CODE (trial) == CONST
5433 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5434 /* Likewise on IA-64, except without the truncate. */
5435 && ! (GET_CODE (trial) == CONST
5436 && GET_CODE (XEXP (trial, 0)) == MINUS
5437 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5438 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5440 || (!MEM_P (src_folded)
5441 && ! src_folded_force_flag))
5442 && GET_MODE_CLASS (mode) != MODE_CC
5443 && mode != VOIDmode)
5445 src_folded_force_flag = 1;
5447 src_folded_cost = constant_pool_entries_cost;
5448 src_folded_regcost = constant_pool_entries_regcost;
5452 src = SET_SRC (sets[i].rtl);
5454 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5455 However, there is an important exception: If both are registers
5456 that are not the head of their equivalence class, replace SET_SRC
5457 with the head of the class. If we do not do this, we will have
5458 both registers live over a portion of the basic block. This way,
5459 their lifetimes will likely abut instead of overlapping. */
5461 && REGNO_QTY_VALID_P (REGNO (dest)))
5463 int dest_q = REG_QTY (REGNO (dest));
5464 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5466 if (dest_ent->mode == GET_MODE (dest)
5467 && dest_ent->first_reg != REGNO (dest)
5468 && REG_P (src) && REGNO (src) == REGNO (dest)
5469 /* Don't do this if the original insn had a hard reg as
5470 SET_SRC or SET_DEST. */
5471 && (!REG_P (sets[i].src)
5472 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5473 && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5474 /* We can't call canon_reg here because it won't do anything if
5475 SRC is a hard register. */
5477 int src_q = REG_QTY (REGNO (src));
5478 struct qty_table_elem *src_ent = &qty_table[src_q];
5479 int first = src_ent->first_reg;
5481 = (first >= FIRST_PSEUDO_REGISTER
5482 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5484 /* We must use validate-change even for this, because this
5485 might be a special no-op instruction, suitable only to
5487 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5490 /* If we had a constant that is cheaper than what we are now
5491 setting SRC to, use that constant. We ignored it when we
5492 thought we could make this into a no-op. */
5493 if (src_const && COST (src_const) < COST (src)
5494 && validate_change (insn, &SET_SRC (sets[i].rtl),
5501 /* If we made a change, recompute SRC values. */
5502 if (src != sets[i].src)
5506 hash_arg_in_memory = 0;
5508 sets[i].src_hash = HASH (src, mode);
5509 sets[i].src_volatile = do_not_record;
5510 sets[i].src_in_memory = hash_arg_in_memory;
5511 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5514 /* If this is a single SET, we are setting a register, and we have an
5515 equivalent constant, we want to add a REG_NOTE. We don't want
5516 to write a REG_EQUAL note for a constant pseudo since verifying that
5517 that pseudo hasn't been eliminated is a pain. Such a note also
5518 won't help anything.
5520 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5521 which can be created for a reference to a compile time computable
5522 entry in a jump table. */
5524 if (n_sets == 1 && src_const && REG_P (dest)
5525 && !REG_P (src_const)
5526 && ! (GET_CODE (src_const) == CONST
5527 && GET_CODE (XEXP (src_const, 0)) == MINUS
5528 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5529 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5531 /* We only want a REG_EQUAL note if src_const != src. */
5532 if (! rtx_equal_p (src, src_const))
5534 /* Make sure that the rtx is not shared. */
5535 src_const = copy_rtx (src_const);
5537 /* Record the actual constant value in a REG_EQUAL note,
5538 making a new one if one does not already exist. */
5539 set_unique_reg_note (insn, REG_EQUAL, src_const);
5543 /* Now deal with the destination. */
5546 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5547 to the MEM or REG within it. */
5548 while (GET_CODE (dest) == SIGN_EXTRACT
5549 || GET_CODE (dest) == ZERO_EXTRACT
5550 || GET_CODE (dest) == SUBREG
5551 || GET_CODE (dest) == STRICT_LOW_PART)
5552 dest = XEXP (dest, 0);
5554 sets[i].inner_dest = dest;
5558 #ifdef PUSH_ROUNDING
5559 /* Stack pushes invalidate the stack pointer. */
5560 rtx addr = XEXP (dest, 0);
5561 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5562 && XEXP (addr, 0) == stack_pointer_rtx)
5563 invalidate (stack_pointer_rtx, Pmode);
5565 dest = fold_rtx (dest, insn);
5568 /* Compute the hash code of the destination now,
5569 before the effects of this instruction are recorded,
5570 since the register values used in the address computation
5571 are those before this instruction. */
5572 sets[i].dest_hash = HASH (dest, mode);
5574 /* Don't enter a bit-field in the hash table
5575 because the value in it after the store
5576 may not equal what was stored, due to truncation. */
5578 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5579 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5581 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5583 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5584 && GET_CODE (width) == CONST_INT
5585 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5586 && ! (INTVAL (src_const)
5587 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5588 /* Exception: if the value is constant,
5589 and it won't be truncated, record it. */
5593 /* This is chosen so that the destination will be invalidated
5594 but no new value will be recorded.
5595 We must invalidate because sometimes constant
5596 values can be recorded for bitfields. */
5597 sets[i].src_elt = 0;
5598 sets[i].src_volatile = 1;
5604 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5606 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5608 /* One less use of the label this insn used to jump to. */
5610 cse_jumps_altered = 1;
5611 /* No more processing for this set. */
5615 /* If this SET is now setting PC to a label, we know it used to
5616 be a conditional or computed branch. */
5617 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5619 /* Now emit a BARRIER after the unconditional jump. */
5620 if (NEXT_INSN (insn) == 0
5621 || !BARRIER_P (NEXT_INSN (insn)))
5622 emit_barrier_after (insn);
5624 /* We reemit the jump in as many cases as possible just in
5625 case the form of an unconditional jump is significantly
5626 different than a computed jump or conditional jump.
5628 If this insn has multiple sets, then reemitting the
5629 jump is nontrivial. So instead we just force rerecognition
5630 and hope for the best. */
5635 new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
5636 JUMP_LABEL (new) = XEXP (src, 0);
5637 LABEL_NUSES (XEXP (src, 0))++;
5639 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5640 note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5643 XEXP (note, 1) = NULL_RTX;
5644 REG_NOTES (new) = note;
5650 /* Now emit a BARRIER after the unconditional jump. */
5651 if (NEXT_INSN (insn) == 0
5652 || !BARRIER_P (NEXT_INSN (insn)))
5653 emit_barrier_after (insn);
5656 INSN_CODE (insn) = -1;
5658 /* Do not bother deleting any unreachable code,
5659 let jump/flow do that. */
5661 cse_jumps_altered = 1;
5665 /* If destination is volatile, invalidate it and then do no further
5666 processing for this assignment. */
5668 else if (do_not_record)
5670 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5671 invalidate (dest, VOIDmode);
5672 else if (MEM_P (dest))
5674 /* Outgoing arguments for a libcall don't
5675 affect any recorded expressions. */
5676 if (! libcall_insn || insn == libcall_insn)
5677 invalidate (dest, VOIDmode);
5679 else if (GET_CODE (dest) == STRICT_LOW_PART
5680 || GET_CODE (dest) == ZERO_EXTRACT)
5681 invalidate (XEXP (dest, 0), GET_MODE (dest));
5685 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5686 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5689 /* If setting CC0, record what it was set to, or a constant, if it
5690 is equivalent to a constant. If it is being set to a floating-point
5691 value, make a COMPARE with the appropriate constant of 0. If we
5692 don't do this, later code can interpret this as a test against
5693 const0_rtx, which can cause problems if we try to put it into an
5694 insn as a floating-point operand. */
5695 if (dest == cc0_rtx)
5697 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5698 this_insn_cc0_mode = mode;
5699 if (FLOAT_MODE_P (mode))
5700 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5706 /* Now enter all non-volatile source expressions in the hash table
5707 if they are not already present.
5708 Record their equivalence classes in src_elt.
5709 This way we can insert the corresponding destinations into
5710 the same classes even if the actual sources are no longer in them
5711 (having been invalidated). */
5713 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5714 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5716 struct table_elt *elt;
5717 struct table_elt *classp = sets[0].src_elt;
5718 rtx dest = SET_DEST (sets[0].rtl);
5719 enum machine_mode eqvmode = GET_MODE (dest);
5721 if (GET_CODE (dest) == STRICT_LOW_PART)
5723 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5726 if (insert_regs (src_eqv, classp, 0))
5728 rehash_using_reg (src_eqv);
5729 src_eqv_hash = HASH (src_eqv, eqvmode);
5731 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5732 elt->in_memory = src_eqv_in_memory;
5735 /* Check to see if src_eqv_elt is the same as a set source which
5736 does not yet have an elt, and if so set the elt of the set source
5738 for (i = 0; i < n_sets; i++)
5739 if (sets[i].rtl && sets[i].src_elt == 0
5740 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5741 sets[i].src_elt = src_eqv_elt;
5744 for (i = 0; i < n_sets; i++)
5745 if (sets[i].rtl && ! sets[i].src_volatile
5746 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5748 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5750 /* REG_EQUAL in setting a STRICT_LOW_PART
5751 gives an equivalent for the entire destination register,
5752 not just for the subreg being stored in now.
5753 This is a more interesting equivalence, so we arrange later
5754 to treat the entire reg as the destination. */
5755 sets[i].src_elt = src_eqv_elt;
5756 sets[i].src_hash = src_eqv_hash;
5760 /* Insert source and constant equivalent into hash table, if not
5762 struct table_elt *classp = src_eqv_elt;
5763 rtx src = sets[i].src;
5764 rtx dest = SET_DEST (sets[i].rtl);
5765 enum machine_mode mode
5766 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5768 /* It's possible that we have a source value known to be
5769 constant but don't have a REG_EQUAL note on the insn.
5770 Lack of a note will mean src_eqv_elt will be NULL. This
5771 can happen where we've generated a SUBREG to access a
5772 CONST_INT that is already in a register in a wider mode.
5773 Ensure that the source expression is put in the proper
5776 classp = sets[i].src_const_elt;
5778 if (sets[i].src_elt == 0)
5780 /* Don't put a hard register source into the table if this is
5781 the last insn of a libcall. In this case, we only need
5782 to put src_eqv_elt in src_elt. */
5783 if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5785 struct table_elt *elt;
5787 /* Note that these insert_regs calls cannot remove
5788 any of the src_elt's, because they would have failed to
5789 match if not still valid. */
5790 if (insert_regs (src, classp, 0))
5792 rehash_using_reg (src);
5793 sets[i].src_hash = HASH (src, mode);
5795 elt = insert (src, classp, sets[i].src_hash, mode);
5796 elt->in_memory = sets[i].src_in_memory;
5797 sets[i].src_elt = classp = elt;
5800 sets[i].src_elt = classp;
5802 if (sets[i].src_const && sets[i].src_const_elt == 0
5803 && src != sets[i].src_const
5804 && ! rtx_equal_p (sets[i].src_const, src))
5805 sets[i].src_elt = insert (sets[i].src_const, classp,
5806 sets[i].src_const_hash, mode);
5809 else if (sets[i].src_elt == 0)
5810 /* If we did not insert the source into the hash table (e.g., it was
5811 volatile), note the equivalence class for the REG_EQUAL value, if any,
5812 so that the destination goes into that class. */
5813 sets[i].src_elt = src_eqv_elt;
5815 invalidate_from_clobbers (x);
5817 /* Some registers are invalidated by subroutine calls. Memory is
5818 invalidated by non-constant calls. */
5822 if (! CONST_OR_PURE_CALL_P (insn))
5823 invalidate_memory ();
5824 invalidate_for_call ();
5827 /* Now invalidate everything set by this instruction.
5828 If a SUBREG or other funny destination is being set,
5829 sets[i].rtl is still nonzero, so here we invalidate the reg
5830 a part of which is being set. */
5832 for (i = 0; i < n_sets; i++)
5835 /* We can't use the inner dest, because the mode associated with
5836 a ZERO_EXTRACT is significant. */
5837 rtx dest = SET_DEST (sets[i].rtl);
5839 /* Needed for registers to remove the register from its
5840 previous quantity's chain.
5841 Needed for memory if this is a nonvarying address, unless
5842 we have just done an invalidate_memory that covers even those. */
5843 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5844 invalidate (dest, VOIDmode);
5845 else if (MEM_P (dest))
5847 /* Outgoing arguments for a libcall don't
5848 affect any recorded expressions. */
5849 if (! libcall_insn || insn == libcall_insn)
5850 invalidate (dest, VOIDmode);
5852 else if (GET_CODE (dest) == STRICT_LOW_PART
5853 || GET_CODE (dest) == ZERO_EXTRACT)
5854 invalidate (XEXP (dest, 0), GET_MODE (dest));
5857 /* A volatile ASM invalidates everything. */
5858 if (NONJUMP_INSN_P (insn)
5859 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5860 && MEM_VOLATILE_P (PATTERN (insn)))
5861 flush_hash_table ();
5863 /* Make sure registers mentioned in destinations
5864 are safe for use in an expression to be inserted.
5865 This removes from the hash table
5866 any invalid entry that refers to one of these registers.
5868 We don't care about the return value from mention_regs because
5869 we are going to hash the SET_DEST values unconditionally. */
5871 for (i = 0; i < n_sets; i++)
5875 rtx x = SET_DEST (sets[i].rtl);
5881 /* We used to rely on all references to a register becoming
5882 inaccessible when a register changes to a new quantity,
5883 since that changes the hash code. However, that is not
5884 safe, since after HASH_SIZE new quantities we get a
5885 hash 'collision' of a register with its own invalid
5886 entries. And since SUBREGs have been changed not to
5887 change their hash code with the hash code of the register,
5888 it wouldn't work any longer at all. So we have to check
5889 for any invalid references lying around now.
5890 This code is similar to the REG case in mention_regs,
5891 but it knows that reg_tick has been incremented, and
5892 it leaves reg_in_table as -1 . */
5893 unsigned int regno = REGNO (x);
5894 unsigned int endregno
5895 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
5896 : hard_regno_nregs[regno][GET_MODE (x)]);
5899 for (i = regno; i < endregno; i++)
5901 if (REG_IN_TABLE (i) >= 0)
5903 remove_invalid_refs (i);
5904 REG_IN_TABLE (i) = -1;
5911 /* We may have just removed some of the src_elt's from the hash table.
5912 So replace each one with the current head of the same class. */
5914 for (i = 0; i < n_sets; i++)
5917 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5918 /* If elt was removed, find current head of same class,
5919 or 0 if nothing remains of that class. */
5921 struct table_elt *elt = sets[i].src_elt;
5923 while (elt && elt->prev_same_value)
5924 elt = elt->prev_same_value;
5926 while (elt && elt->first_same_value == 0)
5927 elt = elt->next_same_value;
5928 sets[i].src_elt = elt ? elt->first_same_value : 0;
5932 /* Now insert the destinations into their equivalence classes. */
5934 for (i = 0; i < n_sets; i++)
5937 rtx dest = SET_DEST (sets[i].rtl);
5938 struct table_elt *elt;
5940 /* Don't record value if we are not supposed to risk allocating
5941 floating-point values in registers that might be wider than
5943 if ((flag_float_store
5945 && FLOAT_MODE_P (GET_MODE (dest)))
5946 /* Don't record BLKmode values, because we don't know the
5947 size of it, and can't be sure that other BLKmode values
5948 have the same or smaller size. */
5949 || GET_MODE (dest) == BLKmode
5950 /* Don't record values of destinations set inside a libcall block
5951 since we might delete the libcall. Things should have been set
5952 up so we won't want to reuse such a value, but we play it safe
5955 /* If we didn't put a REG_EQUAL value or a source into the hash
5956 table, there is no point is recording DEST. */
5957 || sets[i].src_elt == 0
5958 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
5959 or SIGN_EXTEND, don't record DEST since it can cause
5960 some tracking to be wrong.
5962 ??? Think about this more later. */
5963 || (GET_CODE (dest) == SUBREG
5964 && (GET_MODE_SIZE (GET_MODE (dest))
5965 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5966 && (GET_CODE (sets[i].src) == SIGN_EXTEND
5967 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
5970 /* STRICT_LOW_PART isn't part of the value BEING set,
5971 and neither is the SUBREG inside it.
5972 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
5973 if (GET_CODE (dest) == STRICT_LOW_PART)
5974 dest = SUBREG_REG (XEXP (dest, 0));
5976 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5977 /* Registers must also be inserted into chains for quantities. */
5978 if (insert_regs (dest, sets[i].src_elt, 1))
5980 /* If `insert_regs' changes something, the hash code must be
5982 rehash_using_reg (dest);
5983 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5986 elt = insert (dest, sets[i].src_elt,
5987 sets[i].dest_hash, GET_MODE (dest));
5989 elt->in_memory = (MEM_P (sets[i].inner_dest)
5990 && !MEM_READONLY_P (sets[i].inner_dest));
5992 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
5993 narrower than M2, and both M1 and M2 are the same number of words,
5994 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
5995 make that equivalence as well.
5997 However, BAR may have equivalences for which gen_lowpart
5998 will produce a simpler value than gen_lowpart applied to
5999 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6000 BAR's equivalences. If we don't get a simplified form, make
6001 the SUBREG. It will not be used in an equivalence, but will
6002 cause two similar assignments to be detected.
6004 Note the loop below will find SUBREG_REG (DEST) since we have
6005 already entered SRC and DEST of the SET in the table. */
6007 if (GET_CODE (dest) == SUBREG
6008 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6010 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6011 && (GET_MODE_SIZE (GET_MODE (dest))
6012 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6013 && sets[i].src_elt != 0)
6015 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6016 struct table_elt *elt, *classp = 0;
6018 for (elt = sets[i].src_elt->first_same_value; elt;
6019 elt = elt->next_same_value)
6023 struct table_elt *src_elt;
6026 /* Ignore invalid entries. */
6027 if (!REG_P (elt->exp)
6028 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
6031 /* We may have already been playing subreg games. If the
6032 mode is already correct for the destination, use it. */
6033 if (GET_MODE (elt->exp) == new_mode)
6037 /* Calculate big endian correction for the SUBREG_BYTE.
6038 We have already checked that M1 (GET_MODE (dest))
6039 is not narrower than M2 (new_mode). */
6040 if (BYTES_BIG_ENDIAN)
6041 byte = (GET_MODE_SIZE (GET_MODE (dest))
6042 - GET_MODE_SIZE (new_mode));
6044 new_src = simplify_gen_subreg (new_mode, elt->exp,
6045 GET_MODE (dest), byte);
6048 /* The call to simplify_gen_subreg fails if the value
6049 is VOIDmode, yet we can't do any simplification, e.g.
6050 for EXPR_LISTs denoting function call results.
6051 It is invalid to construct a SUBREG with a VOIDmode
6052 SUBREG_REG, hence a zero new_src means we can't do
6053 this substitution. */
6057 src_hash = HASH (new_src, new_mode);
6058 src_elt = lookup (new_src, src_hash, new_mode);
6060 /* Put the new source in the hash table is if isn't
6064 if (insert_regs (new_src, classp, 0))
6066 rehash_using_reg (new_src);
6067 src_hash = HASH (new_src, new_mode);
6069 src_elt = insert (new_src, classp, src_hash, new_mode);
6070 src_elt->in_memory = elt->in_memory;
6072 else if (classp && classp != src_elt->first_same_value)
6073 /* Show that two things that we've seen before are
6074 actually the same. */
6075 merge_equiv_classes (src_elt, classp);
6077 classp = src_elt->first_same_value;
6078 /* Ignore invalid entries. */
6080 && !REG_P (classp->exp)
6081 && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
6082 classp = classp->next_same_value;
6087 /* Special handling for (set REG0 REG1) where REG0 is the
6088 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6089 be used in the sequel, so (if easily done) change this insn to
6090 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6091 that computed their value. Then REG1 will become a dead store
6092 and won't cloud the situation for later optimizations.
6094 Do not make this change if REG1 is a hard register, because it will
6095 then be used in the sequel and we may be changing a two-operand insn
6096 into a three-operand insn.
6098 Also do not do this if we are operating on a copy of INSN.
6100 Also don't do this if INSN ends a libcall; this would cause an unrelated
6101 register to be set in the middle of a libcall, and we then get bad code
6102 if the libcall is deleted. */
6104 if (n_sets == 1 && sets[0].rtl && REG_P (SET_DEST (sets[0].rtl))
6105 && NEXT_INSN (PREV_INSN (insn)) == insn
6106 && REG_P (SET_SRC (sets[0].rtl))
6107 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6108 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6110 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6111 struct qty_table_elem *src_ent = &qty_table[src_q];
6113 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6114 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6117 /* Scan for the previous nonnote insn, but stop at a basic
6121 prev = PREV_INSN (prev);
6123 while (prev && NOTE_P (prev)
6124 && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
6126 /* Do not swap the registers around if the previous instruction
6127 attaches a REG_EQUIV note to REG1.
6129 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6130 from the pseudo that originally shadowed an incoming argument
6131 to another register. Some uses of REG_EQUIV might rely on it
6132 being attached to REG1 rather than REG2.
6134 This section previously turned the REG_EQUIV into a REG_EQUAL
6135 note. We cannot do that because REG_EQUIV may provide an
6136 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
6138 if (prev != 0 && NONJUMP_INSN_P (prev)
6139 && GET_CODE (PATTERN (prev)) == SET
6140 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6141 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6143 rtx dest = SET_DEST (sets[0].rtl);
6144 rtx src = SET_SRC (sets[0].rtl);
6147 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6148 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6149 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6150 apply_change_group ();
6152 /* If INSN has a REG_EQUAL note, and this note mentions
6153 REG0, then we must delete it, because the value in
6154 REG0 has changed. If the note's value is REG1, we must
6155 also delete it because that is now this insn's dest. */
6156 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6158 && (reg_mentioned_p (dest, XEXP (note, 0))
6159 || rtx_equal_p (src, XEXP (note, 0))))
6160 remove_note (insn, note);
6165 /* If this is a conditional jump insn, record any known equivalences due to
6166 the condition being tested. */
6169 && n_sets == 1 && GET_CODE (x) == SET
6170 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6171 record_jump_equiv (insn, 0);
6174 /* If the previous insn set CC0 and this insn no longer references CC0,
6175 delete the previous insn. Here we use the fact that nothing expects CC0
6176 to be valid over an insn, which is true until the final pass. */
6177 if (prev_insn && NONJUMP_INSN_P (prev_insn)
6178 && (tem = single_set (prev_insn)) != 0
6179 && SET_DEST (tem) == cc0_rtx
6180 && ! reg_mentioned_p (cc0_rtx, x))
6181 delete_insn (prev_insn);
6183 prev_insn_cc0 = this_insn_cc0;
6184 prev_insn_cc0_mode = this_insn_cc0_mode;
6189 /* Remove from the hash table all expressions that reference memory. */
6192 invalidate_memory (void)
6195 struct table_elt *p, *next;
6197 for (i = 0; i < HASH_SIZE; i++)
6198 for (p = table[i]; p; p = next)
6200 next = p->next_same_hash;
6202 remove_from_table (p, i);
6206 /* If ADDR is an address that implicitly affects the stack pointer, return
6207 1 and update the register tables to show the effect. Else, return 0. */
6210 addr_affects_sp_p (rtx addr)
6212 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
6213 && REG_P (XEXP (addr, 0))
6214 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6216 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6218 REG_TICK (STACK_POINTER_REGNUM)++;
6219 /* Is it possible to use a subreg of SP? */
6220 SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6223 /* This should be *very* rare. */
6224 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6225 invalidate (stack_pointer_rtx, VOIDmode);
6233 /* Perform invalidation on the basis of everything about an insn
6234 except for invalidating the actual places that are SET in it.
6235 This includes the places CLOBBERed, and anything that might
6236 alias with something that is SET or CLOBBERed.
6238 X is the pattern of the insn. */
6241 invalidate_from_clobbers (rtx x)
6243 if (GET_CODE (x) == CLOBBER)
6245 rtx ref = XEXP (x, 0);
6248 if (REG_P (ref) || GET_CODE (ref) == SUBREG
6250 invalidate (ref, VOIDmode);
6251 else if (GET_CODE (ref) == STRICT_LOW_PART
6252 || GET_CODE (ref) == ZERO_EXTRACT)
6253 invalidate (XEXP (ref, 0), GET_MODE (ref));
6256 else if (GET_CODE (x) == PARALLEL)
6259 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6261 rtx y = XVECEXP (x, 0, i);
6262 if (GET_CODE (y) == CLOBBER)
6264 rtx ref = XEXP (y, 0);
6265 if (REG_P (ref) || GET_CODE (ref) == SUBREG
6267 invalidate (ref, VOIDmode);
6268 else if (GET_CODE (ref) == STRICT_LOW_PART
6269 || GET_CODE (ref) == ZERO_EXTRACT)
6270 invalidate (XEXP (ref, 0), GET_MODE (ref));
6276 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6277 and replace any registers in them with either an equivalent constant
6278 or the canonical form of the register. If we are inside an address,
6279 only do this if the address remains valid.
6281 OBJECT is 0 except when within a MEM in which case it is the MEM.
6283 Return the replacement for X. */
6286 cse_process_notes (rtx x, rtx object)
6288 enum rtx_code code = GET_CODE (x);
6289 const char *fmt = GET_RTX_FORMAT (code);
6306 validate_change (x, &XEXP (x, 0),
6307 cse_process_notes (XEXP (x, 0), x), 0);
6312 if (REG_NOTE_KIND (x) == REG_EQUAL)
6313 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6315 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6322 rtx new = cse_process_notes (XEXP (x, 0), object);
6323 /* We don't substitute VOIDmode constants into these rtx,
6324 since they would impede folding. */
6325 if (GET_MODE (new) != VOIDmode)
6326 validate_change (object, &XEXP (x, 0), new, 0);
6331 i = REG_QTY (REGNO (x));
6333 /* Return a constant or a constant register. */
6334 if (REGNO_QTY_VALID_P (REGNO (x)))
6336 struct qty_table_elem *ent = &qty_table[i];
6338 if (ent->const_rtx != NULL_RTX
6339 && (CONSTANT_P (ent->const_rtx)
6340 || REG_P (ent->const_rtx)))
6342 rtx new = gen_lowpart (GET_MODE (x), ent->const_rtx);
6348 /* Otherwise, canonicalize this register. */
6349 return canon_reg (x, NULL_RTX);
6355 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6357 validate_change (object, &XEXP (x, i),
6358 cse_process_notes (XEXP (x, i), object), 0);
6363 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6364 since they are done elsewhere. This function is called via note_stores. */
6367 invalidate_skipped_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
6369 enum rtx_code code = GET_CODE (dest);
6372 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6373 /* There are times when an address can appear varying and be a PLUS
6374 during this scan when it would be a fixed address were we to know
6375 the proper equivalences. So invalidate all memory if there is
6376 a BLKmode or nonscalar memory reference or a reference to a
6377 variable address. */
6378 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6379 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6381 invalidate_memory ();
6385 if (GET_CODE (set) == CLOBBER
6390 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6391 invalidate (XEXP (dest, 0), GET_MODE (dest));
6392 else if (code == REG || code == SUBREG || code == MEM)
6393 invalidate (dest, VOIDmode);
6396 /* Invalidate all insns from START up to the end of the function or the
6397 next label. This called when we wish to CSE around a block that is
6398 conditionally executed. */
6401 invalidate_skipped_block (rtx start)
6405 for (insn = start; insn && !LABEL_P (insn);
6406 insn = NEXT_INSN (insn))
6408 if (! INSN_P (insn))
6413 if (! CONST_OR_PURE_CALL_P (insn))
6414 invalidate_memory ();
6415 invalidate_for_call ();
6418 invalidate_from_clobbers (PATTERN (insn));
6419 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6423 /* Find the end of INSN's basic block and return its range,
6424 the total number of SETs in all the insns of the block, the last insn of the
6425 block, and the branch path.
6427 The branch path indicates which branches should be followed. If a nonzero
6428 path size is specified, the block should be rescanned and a different set
6429 of branches will be taken. The branch path is only used if
6430 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
6432 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6433 used to describe the block. It is filled in with the information about
6434 the current block. The incoming structure's branch path, if any, is used
6435 to construct the output branch path. */
6438 cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
6439 int follow_jumps, int skip_blocks)
6443 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6444 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6445 int path_size = data->path_size;
6449 /* Update the previous branch path, if any. If the last branch was
6450 previously PATH_TAKEN, mark it PATH_NOT_TAKEN.
6451 If it was previously PATH_NOT_TAKEN,
6452 shorten the path by one and look at the previous branch. We know that
6453 at least one branch must have been taken if PATH_SIZE is nonzero. */
6454 while (path_size > 0)
6456 if (data->path[path_size - 1].status != PATH_NOT_TAKEN)
6458 data->path[path_size - 1].status = PATH_NOT_TAKEN;
6465 /* If the first instruction is marked with QImode, that means we've
6466 already processed this block. Our caller will look at DATA->LAST
6467 to figure out where to go next. We want to return the next block
6468 in the instruction stream, not some branched-to block somewhere
6469 else. We accomplish this by pretending our called forbid us to
6470 follow jumps, or skip blocks. */
6471 if (GET_MODE (insn) == QImode)
6472 follow_jumps = skip_blocks = 0;
6474 /* Scan to end of this basic block. */
6475 while (p && !LABEL_P (p))
6477 /* Don't cse over a call to setjmp; on some machines (eg VAX)
6478 the regs restored by the longjmp come from
6479 a later time than the setjmp. */
6480 if (PREV_INSN (p) && CALL_P (PREV_INSN (p))
6481 && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6484 /* A PARALLEL can have lots of SETs in it,
6485 especially if it is really an ASM_OPERANDS. */
6486 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6487 nsets += XVECLEN (PATTERN (p), 0);
6488 else if (!NOTE_P (p))
6491 /* Ignore insns made by CSE; they cannot affect the boundaries of
6494 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6495 high_cuid = INSN_CUID (p);
6496 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6497 low_cuid = INSN_CUID (p);
6499 /* See if this insn is in our branch path. If it is and we are to
6501 if (path_entry < path_size && data->path[path_entry].branch == p)
6503 if (data->path[path_entry].status != PATH_NOT_TAKEN)
6506 /* Point to next entry in path, if any. */
6510 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6511 was specified, we haven't reached our maximum path length, there are
6512 insns following the target of the jump, this is the only use of the
6513 jump label, and the target label is preceded by a BARRIER.
6515 Alternatively, we can follow the jump if it branches around a
6516 block of code and there are no other branches into the block.
6517 In this case invalidate_skipped_block will be called to invalidate any
6518 registers set in the block when following the jump. */
6520 else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
6522 && GET_CODE (PATTERN (p)) == SET
6523 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6524 && JUMP_LABEL (p) != 0
6525 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6526 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6528 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6530 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6531 || (PREV_INSN (q) && CALL_P (PREV_INSN (q))
6532 && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6533 && (!LABEL_P (q) || LABEL_NUSES (q) != 0))
6536 /* If we ran into a BARRIER, this code is an extension of the
6537 basic block when the branch is taken. */
6538 if (follow_jumps && q != 0 && BARRIER_P (q))
6540 /* Don't allow ourself to keep walking around an
6541 always-executed loop. */
6542 if (next_real_insn (q) == next)
6548 /* Similarly, don't put a branch in our path more than once. */
6549 for (i = 0; i < path_entry; i++)
6550 if (data->path[i].branch == p)
6553 if (i != path_entry)
6556 data->path[path_entry].branch = p;
6557 data->path[path_entry++].status = PATH_TAKEN;
6559 /* This branch now ends our path. It was possible that we
6560 didn't see this branch the last time around (when the
6561 insn in front of the target was a JUMP_INSN that was
6562 turned into a no-op). */
6563 path_size = path_entry;
6566 /* Mark block so we won't scan it again later. */
6567 PUT_MODE (NEXT_INSN (p), QImode);
6569 /* Detect a branch around a block of code. */
6570 else if (skip_blocks && q != 0 && !LABEL_P (q))
6574 if (next_real_insn (q) == next)
6580 for (i = 0; i < path_entry; i++)
6581 if (data->path[i].branch == p)
6584 if (i != path_entry)
6587 /* This is no_labels_between_p (p, q) with an added check for
6588 reaching the end of a function (in case Q precedes P). */
6589 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6595 data->path[path_entry].branch = p;
6596 data->path[path_entry++].status = PATH_AROUND;
6598 path_size = path_entry;
6601 /* Mark block so we won't scan it again later. */
6602 PUT_MODE (NEXT_INSN (p), QImode);
6609 data->low_cuid = low_cuid;
6610 data->high_cuid = high_cuid;
6611 data->nsets = nsets;
6614 /* If all jumps in the path are not taken, set our path length to zero
6615 so a rescan won't be done. */
6616 for (i = path_size - 1; i >= 0; i--)
6617 if (data->path[i].status != PATH_NOT_TAKEN)
6621 data->path_size = 0;
6623 data->path_size = path_size;
6625 /* End the current branch path. */
6626 data->path[path_size].branch = 0;
6629 /* Perform cse on the instructions of a function.
6630 F is the first instruction.
6631 NREGS is one plus the highest pseudo-reg number used in the instruction.
6633 Returns 1 if jump_optimize should be redone due to simplifications
6634 in conditional jump instructions. */
6637 cse_main (rtx f, int nregs, FILE *file)
6639 struct cse_basic_block_data val;
6643 val.path = xmalloc (sizeof (struct branch_path)
6644 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6646 cse_jumps_altered = 0;
6647 recorded_label_ref = 0;
6648 constant_pool_entries_cost = 0;
6649 constant_pool_entries_regcost = 0;
6651 rtl_hooks = cse_rtl_hooks;
6654 init_alias_analysis ();
6658 max_insn_uid = get_max_uid ();
6660 reg_eqv_table = xmalloc (nregs * sizeof (struct reg_eqv_elem));
6662 #ifdef LOAD_EXTEND_OP
6664 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
6665 and change the code and mode as appropriate. */
6666 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
6669 /* Reset the counter indicating how many elements have been made
6671 n_elements_made = 0;
6673 /* Find the largest uid. */
6675 max_uid = get_max_uid ();
6676 uid_cuid = xcalloc (max_uid + 1, sizeof (int));
6678 /* Compute the mapping from uids to cuids.
6679 CUIDs are numbers assigned to insns, like uids,
6680 except that cuids increase monotonically through the code.
6681 Don't assign cuids to line-number NOTEs, so that the distance in cuids
6682 between two insns is not affected by -g. */
6684 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
6687 || NOTE_LINE_NUMBER (insn) < 0)
6688 INSN_CUID (insn) = ++i;
6690 /* Give a line number note the same cuid as preceding insn. */
6691 INSN_CUID (insn) = i;
6694 ggc_push_context ();
6696 /* Loop over basic blocks.
6697 Compute the maximum number of qty's needed for each basic block
6698 (which is 2 for each SET). */
6703 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps,
6704 flag_cse_skip_blocks);
6706 /* If this basic block was already processed or has no sets, skip it. */
6707 if (val.nsets == 0 || GET_MODE (insn) == QImode)
6709 PUT_MODE (insn, VOIDmode);
6710 insn = (val.last ? NEXT_INSN (val.last) : 0);
6715 cse_basic_block_start = val.low_cuid;
6716 cse_basic_block_end = val.high_cuid;
6717 max_qty = val.nsets * 2;
6720 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
6721 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
6724 /* Make MAX_QTY bigger to give us room to optimize
6725 past the end of this basic block, if that should prove useful. */
6731 /* If this basic block is being extended by following certain jumps,
6732 (see `cse_end_of_basic_block'), we reprocess the code from the start.
6733 Otherwise, we start after this basic block. */
6734 if (val.path_size > 0)
6735 cse_basic_block (insn, val.last, val.path);
6738 int old_cse_jumps_altered = cse_jumps_altered;
6741 /* When cse changes a conditional jump to an unconditional
6742 jump, we want to reprocess the block, since it will give
6743 us a new branch path to investigate. */
6744 cse_jumps_altered = 0;
6745 temp = cse_basic_block (insn, val.last, val.path);
6746 if (cse_jumps_altered == 0
6747 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
6750 cse_jumps_altered |= old_cse_jumps_altered;
6763 if (max_elements_made < n_elements_made)
6764 max_elements_made = n_elements_made;
6767 end_alias_analysis ();
6769 free (reg_eqv_table);
6771 rtl_hooks = general_rtl_hooks;
6773 return cse_jumps_altered || recorded_label_ref;
6776 /* Process a single basic block. FROM and TO and the limits of the basic
6777 block. NEXT_BRANCH points to the branch path when following jumps or
6778 a null path when not following jumps.
6780 AROUND_LOOP is nonzero if we are to try to cse around to the start of a
6781 loop. This is true when we are being called for the last time on a
6782 block and this CSE pass is before loop.c. */
6785 cse_basic_block (rtx from, rtx to, struct branch_path *next_branch)
6789 rtx libcall_insn = NULL_RTX;
6791 int no_conflict = 0;
6793 /* This array is undefined before max_reg, so only allocate
6794 the space actually needed and adjust the start. */
6796 qty_table = xmalloc ((max_qty - max_reg) * sizeof (struct qty_table_elem));
6797 qty_table -= max_reg;
6801 /* TO might be a label. If so, protect it from being deleted. */
6802 if (to != 0 && LABEL_P (to))
6805 for (insn = from; insn != to; insn = NEXT_INSN (insn))
6807 enum rtx_code code = GET_CODE (insn);
6809 /* If we have processed 1,000 insns, flush the hash table to
6810 avoid extreme quadratic behavior. We must not include NOTEs
6811 in the count since there may be more of them when generating
6812 debugging information. If we clear the table at different
6813 times, code generated with -g -O might be different than code
6814 generated with -O but not -g.
6816 ??? This is a real kludge and needs to be done some other way.
6818 if (code != NOTE && num_insns++ > 1000)
6820 flush_hash_table ();
6824 /* See if this is a branch that is part of the path. If so, and it is
6825 to be taken, do so. */
6826 if (next_branch->branch == insn)
6828 enum taken status = next_branch++->status;
6829 if (status != PATH_NOT_TAKEN)
6831 if (status == PATH_TAKEN)
6832 record_jump_equiv (insn, 1);
6834 invalidate_skipped_block (NEXT_INSN (insn));
6836 /* Set the last insn as the jump insn; it doesn't affect cc0.
6837 Then follow this branch. */
6842 insn = JUMP_LABEL (insn);
6847 if (GET_MODE (insn) == QImode)
6848 PUT_MODE (insn, VOIDmode);
6850 if (GET_RTX_CLASS (code) == RTX_INSN)
6854 /* Process notes first so we have all notes in canonical forms when
6855 looking for duplicate operations. */
6857 if (REG_NOTES (insn))
6858 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
6860 /* Track when we are inside in LIBCALL block. Inside such a block,
6861 we do not want to record destinations. The last insn of a
6862 LIBCALL block is not considered to be part of the block, since
6863 its destination is the result of the block and hence should be
6866 if (REG_NOTES (insn) != 0)
6868 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
6869 libcall_insn = XEXP (p, 0);
6870 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
6872 /* Keep libcall_insn for the last SET insn of a no-conflict
6873 block to prevent changing the destination. */
6879 else if (find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
6883 cse_insn (insn, libcall_insn);
6885 if (no_conflict == -1)
6891 /* If we haven't already found an insn where we added a LABEL_REF,
6893 if (NONJUMP_INSN_P (insn) && ! recorded_label_ref
6894 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
6896 recorded_label_ref = 1;
6899 /* If INSN is now an unconditional jump, skip to the end of our
6900 basic block by pretending that we just did the last insn in the
6901 basic block. If we are jumping to the end of our block, show
6902 that we can have one usage of TO. */
6904 if (any_uncondjump_p (insn))
6908 free (qty_table + max_reg);
6912 if (JUMP_LABEL (insn) == to)
6915 /* Maybe TO was deleted because the jump is unconditional.
6916 If so, there is nothing left in this basic block. */
6917 /* ??? Perhaps it would be smarter to set TO
6918 to whatever follows this insn,
6919 and pretend the basic block had always ended here. */
6920 if (INSN_DELETED_P (to))
6923 insn = PREV_INSN (to);
6926 /* See if it is ok to keep on going past the label
6927 which used to end our basic block. Remember that we incremented
6928 the count of that label, so we decrement it here. If we made
6929 a jump unconditional, TO_USAGE will be one; in that case, we don't
6930 want to count the use in that jump. */
6932 if (to != 0 && NEXT_INSN (insn) == to
6933 && LABEL_P (to) && --LABEL_NUSES (to) == to_usage)
6935 struct cse_basic_block_data val;
6938 insn = NEXT_INSN (to);
6940 /* If TO was the last insn in the function, we are done. */
6943 free (qty_table + max_reg);
6947 /* If TO was preceded by a BARRIER we are done with this block
6948 because it has no continuation. */
6949 prev = prev_nonnote_insn (to);
6950 if (prev && BARRIER_P (prev))
6952 free (qty_table + max_reg);
6956 /* Find the end of the following block. Note that we won't be
6957 following branches in this case. */
6960 val.path = xmalloc (sizeof (struct branch_path)
6961 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6962 cse_end_of_basic_block (insn, &val, 0, 0);
6965 /* If the tables we allocated have enough space left
6966 to handle all the SETs in the next basic block,
6967 continue through it. Otherwise, return,
6968 and that block will be scanned individually. */
6969 if (val.nsets * 2 + next_qty > max_qty)
6972 cse_basic_block_start = val.low_cuid;
6973 cse_basic_block_end = val.high_cuid;
6976 /* Prevent TO from being deleted if it is a label. */
6977 if (to != 0 && LABEL_P (to))
6980 /* Back up so we process the first insn in the extension. */
6981 insn = PREV_INSN (insn);
6985 gcc_assert (next_qty <= max_qty);
6987 free (qty_table + max_reg);
6989 return to ? NEXT_INSN (to) : 0;
6992 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
6993 there isn't a REG_LABEL note. Return one if so. DATA is the insn. */
6996 check_for_label_ref (rtx *rtl, void *data)
6998 rtx insn = (rtx) data;
7000 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7001 we must rerun jump since it needs to place the note. If this is a
7002 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7003 since no REG_LABEL will be added. */
7004 return (GET_CODE (*rtl) == LABEL_REF
7005 && ! LABEL_REF_NONLOCAL_P (*rtl)
7006 && LABEL_P (XEXP (*rtl, 0))
7007 && INSN_UID (XEXP (*rtl, 0)) != 0
7008 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7011 /* Count the number of times registers are used (not set) in X.
7012 COUNTS is an array in which we accumulate the count, INCR is how much
7013 we count each register usage. */
7016 count_reg_usage (rtx x, int *counts, int incr)
7026 switch (code = GET_CODE (x))
7029 counts[REGNO (x)] += incr;
7043 /* If we are clobbering a MEM, mark any registers inside the address
7045 if (MEM_P (XEXP (x, 0)))
7046 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, incr);
7050 /* Unless we are setting a REG, count everything in SET_DEST. */
7051 if (!REG_P (SET_DEST (x)))
7052 count_reg_usage (SET_DEST (x), counts, incr);
7053 count_reg_usage (SET_SRC (x), counts, incr);
7057 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, incr);
7062 count_reg_usage (PATTERN (x), counts, incr);
7064 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7067 note = find_reg_equal_equiv_note (x);
7070 rtx eqv = XEXP (note, 0);
7072 if (GET_CODE (eqv) == EXPR_LIST)
7073 /* This REG_EQUAL note describes the result of a function call.
7074 Process all the arguments. */
7077 count_reg_usage (XEXP (eqv, 0), counts, incr);
7078 eqv = XEXP (eqv, 1);
7080 while (eqv && GET_CODE (eqv) == EXPR_LIST);
7082 count_reg_usage (eqv, counts, incr);
7087 if (REG_NOTE_KIND (x) == REG_EQUAL
7088 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
7089 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
7090 involving registers in the address. */
7091 || GET_CODE (XEXP (x, 0)) == CLOBBER)
7092 count_reg_usage (XEXP (x, 0), counts, incr);
7094 count_reg_usage (XEXP (x, 1), counts, incr);
7098 /* Iterate over just the inputs, not the constraints as well. */
7099 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
7100 count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, incr);
7110 fmt = GET_RTX_FORMAT (code);
7111 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7114 count_reg_usage (XEXP (x, i), counts, incr);
7115 else if (fmt[i] == 'E')
7116 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7117 count_reg_usage (XVECEXP (x, i, j), counts, incr);
7121 /* Return true if set is live. */
7123 set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */
7130 if (set_noop_p (set))
7134 else if (GET_CODE (SET_DEST (set)) == CC0
7135 && !side_effects_p (SET_SRC (set))
7136 && ((tem = next_nonnote_insn (insn)) == 0
7138 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7141 else if (!REG_P (SET_DEST (set))
7142 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7143 || counts[REGNO (SET_DEST (set))] != 0
7144 || side_effects_p (SET_SRC (set)))
7149 /* Return true if insn is live. */
7152 insn_live_p (rtx insn, int *counts)
7155 if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
7157 else if (GET_CODE (PATTERN (insn)) == SET)
7158 return set_live_p (PATTERN (insn), insn, counts);
7159 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7161 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7163 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7165 if (GET_CODE (elt) == SET)
7167 if (set_live_p (elt, insn, counts))
7170 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7179 /* Return true if libcall is dead as a whole. */
7182 dead_libcall_p (rtx insn, int *counts)
7186 /* See if there's a REG_EQUAL note on this insn and try to
7187 replace the source with the REG_EQUAL expression.
7189 We assume that insns with REG_RETVALs can only be reg->reg
7190 copies at this point. */
7191 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7195 set = single_set (insn);
7199 new = simplify_rtx (XEXP (note, 0));
7201 new = XEXP (note, 0);
7203 /* While changing insn, we must update the counts accordingly. */
7204 count_reg_usage (insn, counts, -1);
7206 if (validate_change (insn, &SET_SRC (set), new, 0))
7208 count_reg_usage (insn, counts, 1);
7209 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7210 remove_note (insn, note);
7214 if (CONSTANT_P (new))
7216 new = force_const_mem (GET_MODE (SET_DEST (set)), new);
7217 if (new && validate_change (insn, &SET_SRC (set), new, 0))
7219 count_reg_usage (insn, counts, 1);
7220 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7221 remove_note (insn, note);
7226 count_reg_usage (insn, counts, 1);
7230 /* Scan all the insns and delete any that are dead; i.e., they store a register
7231 that is never used or they copy a register to itself.
7233 This is used to remove insns made obviously dead by cse, loop or other
7234 optimizations. It improves the heuristics in loop since it won't try to
7235 move dead invariants out of loops or make givs for dead quantities. The
7236 remaining passes of the compilation are also sped up. */
7239 delete_trivially_dead_insns (rtx insns, int nreg)
7243 int in_libcall = 0, dead_libcall = 0;
7244 int ndead = 0, nlastdead, niterations = 0;
7246 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7247 /* First count the number of times each register is used. */
7248 counts = xcalloc (nreg, sizeof (int));
7249 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7250 count_reg_usage (insn, counts, 1);
7256 /* Go from the last insn to the first and delete insns that only set unused
7257 registers or copy a register to itself. As we delete an insn, remove
7258 usage counts for registers it uses.
7260 The first jump optimization pass may leave a real insn as the last
7261 insn in the function. We must not skip that insn or we may end
7262 up deleting code that is not really dead. */
7263 insn = get_last_insn ();
7264 if (! INSN_P (insn))
7265 insn = prev_real_insn (insn);
7267 for (; insn; insn = prev)
7271 prev = prev_real_insn (insn);
7273 /* Don't delete any insns that are part of a libcall block unless
7274 we can delete the whole libcall block.
7276 Flow or loop might get confused if we did that. Remember
7277 that we are scanning backwards. */
7278 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7282 dead_libcall = dead_libcall_p (insn, counts);
7284 else if (in_libcall)
7285 live_insn = ! dead_libcall;
7287 live_insn = insn_live_p (insn, counts);
7289 /* If this is a dead insn, delete it and show registers in it aren't
7294 count_reg_usage (insn, counts, -1);
7295 delete_insn_and_edges (insn);
7299 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7306 while (ndead != nlastdead);
7308 if (dump_file && ndead)
7309 fprintf (dump_file, "Deleted %i trivially dead insns; %i iterations\n",
7310 ndead, niterations);
7313 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7317 /* This function is called via for_each_rtx. The argument, NEWREG, is
7318 a condition code register with the desired mode. If we are looking
7319 at the same register in a different mode, replace it with
7323 cse_change_cc_mode (rtx *loc, void *data)
7325 rtx newreg = (rtx) data;
7329 && REGNO (*loc) == REGNO (newreg)
7330 && GET_MODE (*loc) != GET_MODE (newreg))
7338 /* Change the mode of any reference to the register REGNO (NEWREG) to
7339 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
7340 any instruction which modifies NEWREG. */
7343 cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
7347 for (insn = start; insn != end; insn = NEXT_INSN (insn))
7349 if (! INSN_P (insn))
7352 if (reg_set_p (newreg, insn))
7355 for_each_rtx (&PATTERN (insn), cse_change_cc_mode, newreg);
7356 for_each_rtx (®_NOTES (insn), cse_change_cc_mode, newreg);
7360 /* BB is a basic block which finishes with CC_REG as a condition code
7361 register which is set to CC_SRC. Look through the successors of BB
7362 to find blocks which have a single predecessor (i.e., this one),
7363 and look through those blocks for an assignment to CC_REG which is
7364 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7365 permitted to change the mode of CC_SRC to a compatible mode. This
7366 returns VOIDmode if no equivalent assignments were found.
7367 Otherwise it returns the mode which CC_SRC should wind up with.
7369 The main complexity in this function is handling the mode issues.
7370 We may have more than one duplicate which we can eliminate, and we
7371 try to find a mode which will work for multiple duplicates. */
7373 static enum machine_mode
7374 cse_cc_succs (basic_block bb, rtx cc_reg, rtx cc_src, bool can_change_mode)
7377 enum machine_mode mode;
7378 unsigned int insn_count;
7381 enum machine_mode modes[2];
7386 /* We expect to have two successors. Look at both before picking
7387 the final mode for the comparison. If we have more successors
7388 (i.e., some sort of table jump, although that seems unlikely),
7389 then we require all beyond the first two to use the same
7392 found_equiv = false;
7393 mode = GET_MODE (cc_src);
7395 for (e = bb->succ; e; e = e->succ_next)
7400 if (e->flags & EDGE_COMPLEX)
7404 || e->dest->pred->pred_next
7405 || e->dest == EXIT_BLOCK_PTR)
7408 end = NEXT_INSN (BB_END (e->dest));
7409 for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7413 if (! INSN_P (insn))
7416 /* If CC_SRC is modified, we have to stop looking for
7417 something which uses it. */
7418 if (modified_in_p (cc_src, insn))
7421 /* Check whether INSN sets CC_REG to CC_SRC. */
7422 set = single_set (insn);
7424 && REG_P (SET_DEST (set))
7425 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7428 enum machine_mode set_mode;
7429 enum machine_mode comp_mode;
7432 set_mode = GET_MODE (SET_SRC (set));
7433 comp_mode = set_mode;
7434 if (rtx_equal_p (cc_src, SET_SRC (set)))
7436 else if (GET_CODE (cc_src) == COMPARE
7437 && GET_CODE (SET_SRC (set)) == COMPARE
7439 && rtx_equal_p (XEXP (cc_src, 0),
7440 XEXP (SET_SRC (set), 0))
7441 && rtx_equal_p (XEXP (cc_src, 1),
7442 XEXP (SET_SRC (set), 1)))
7445 comp_mode = targetm.cc_modes_compatible (mode, set_mode);
7446 if (comp_mode != VOIDmode
7447 && (can_change_mode || comp_mode == mode))
7454 if (insn_count < ARRAY_SIZE (insns))
7456 insns[insn_count] = insn;
7457 modes[insn_count] = set_mode;
7458 last_insns[insn_count] = end;
7461 if (mode != comp_mode)
7463 gcc_assert (can_change_mode);
7465 PUT_MODE (cc_src, mode);
7470 if (set_mode != mode)
7472 /* We found a matching expression in the
7473 wrong mode, but we don't have room to
7474 store it in the array. Punt. This case
7478 /* INSN sets CC_REG to a value equal to CC_SRC
7479 with the right mode. We can simply delete
7484 /* We found an instruction to delete. Keep looking,
7485 in the hopes of finding a three-way jump. */
7489 /* We found an instruction which sets the condition
7490 code, so don't look any farther. */
7494 /* If INSN sets CC_REG in some other way, don't look any
7496 if (reg_set_p (cc_reg, insn))
7500 /* If we fell off the bottom of the block, we can keep looking
7501 through successors. We pass CAN_CHANGE_MODE as false because
7502 we aren't prepared to handle compatibility between the
7503 further blocks and this block. */
7506 enum machine_mode submode;
7508 submode = cse_cc_succs (e->dest, cc_reg, cc_src, false);
7509 if (submode != VOIDmode)
7511 gcc_assert (submode == mode);
7513 can_change_mode = false;
7521 /* Now INSN_COUNT is the number of instructions we found which set
7522 CC_REG to a value equivalent to CC_SRC. The instructions are in
7523 INSNS. The modes used by those instructions are in MODES. */
7526 for (i = 0; i < insn_count; ++i)
7528 if (modes[i] != mode)
7530 /* We need to change the mode of CC_REG in INSNS[i] and
7531 subsequent instructions. */
7534 if (GET_MODE (cc_reg) == mode)
7537 newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7539 cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7543 delete_insn (insns[i]);
7549 /* If we have a fixed condition code register (or two), walk through
7550 the instructions and try to eliminate duplicate assignments. */
7553 cse_condition_code_reg (void)
7555 unsigned int cc_regno_1;
7556 unsigned int cc_regno_2;
7561 if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
7564 cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7565 if (cc_regno_2 != INVALID_REGNUM)
7566 cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7568 cc_reg_2 = NULL_RTX;
7577 enum machine_mode mode;
7578 enum machine_mode orig_mode;
7580 /* Look for blocks which end with a conditional jump based on a
7581 condition code register. Then look for the instruction which
7582 sets the condition code register. Then look through the
7583 successor blocks for instructions which set the condition
7584 code register to the same value. There are other possible
7585 uses of the condition code register, but these are by far the
7586 most common and the ones which we are most likely to be able
7589 last_insn = BB_END (bb);
7590 if (!JUMP_P (last_insn))
7593 if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7595 else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7600 cc_src_insn = NULL_RTX;
7602 for (insn = PREV_INSN (last_insn);
7603 insn && insn != PREV_INSN (BB_HEAD (bb));
7604 insn = PREV_INSN (insn))
7608 if (! INSN_P (insn))
7610 set = single_set (insn);
7612 && REG_P (SET_DEST (set))
7613 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7616 cc_src = SET_SRC (set);
7619 else if (reg_set_p (cc_reg, insn))
7626 if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7629 /* Now CC_REG is a condition code register used for a
7630 conditional jump at the end of the block, and CC_SRC, in
7631 CC_SRC_INSN, is the value to which that condition code
7632 register is set, and CC_SRC is still meaningful at the end of
7635 orig_mode = GET_MODE (cc_src);
7636 mode = cse_cc_succs (bb, cc_reg, cc_src, true);
7637 if (mode != VOIDmode)
7639 gcc_assert (mode == GET_MODE (cc_src));
7640 if (mode != orig_mode)
7642 rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7644 /* Change the mode of CC_REG in CC_SRC_INSN to
7645 GET_MODE (NEWREG). */
7646 for_each_rtx (&PATTERN (cc_src_insn), cse_change_cc_mode,
7648 for_each_rtx (®_NOTES (cc_src_insn), cse_change_cc_mode,
7651 /* Do the same in the following insns that use the
7652 current value of CC_REG within BB. */
7653 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7654 NEXT_INSN (last_insn),