1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 /* stdio.h must precede rtl.h for FFS. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "basic-block.h"
34 #include "insn-config.h"
45 #include "rtlhooks-def.h"
46 #include "tree-pass.h"
48 /* The basic idea of common subexpression elimination is to go
49 through the code, keeping a record of expressions that would
50 have the same value at the current scan point, and replacing
51 expressions encountered with the cheapest equivalent expression.
53 It is too complicated to keep track of the different possibilities
54 when control paths merge in this code; so, at each label, we forget all
55 that is known and start fresh. This can be described as processing each
56 extended basic block separately. We have a separate pass to perform
59 Note CSE can turn a conditional or computed jump into a nop or
60 an unconditional jump. When this occurs we arrange to run the jump
61 optimizer after CSE to delete the unreachable code.
63 We use two data structures to record the equivalent expressions:
64 a hash table for most expressions, and a vector of "quantity
65 numbers" to record equivalent (pseudo) registers.
67 The use of the special data structure for registers is desirable
68 because it is faster. It is possible because registers references
69 contain a fairly small number, the register number, taken from
70 a contiguously allocated series, and two register references are
71 identical if they have the same number. General expressions
72 do not have any such thing, so the only way to retrieve the
73 information recorded on an expression other than a register
74 is to keep it in a hash table.
76 Registers and "quantity numbers":
78 At the start of each basic block, all of the (hardware and pseudo)
79 registers used in the function are given distinct quantity
80 numbers to indicate their contents. During scan, when the code
81 copies one register into another, we copy the quantity number.
82 When a register is loaded in any other way, we allocate a new
83 quantity number to describe the value generated by this operation.
84 `REG_QTY (N)' records what quantity register N is currently thought
87 All real quantity numbers are greater than or equal to zero.
88 If register N has not been assigned a quantity, `REG_QTY (N)' will
89 equal -N - 1, which is always negative.
91 Quantity numbers below zero do not exist and none of the `qty_table'
92 entries should be referenced with a negative index.
94 We also maintain a bidirectional chain of registers for each
95 quantity number. The `qty_table` members `first_reg' and `last_reg',
96 and `reg_eqv_table' members `next' and `prev' hold these chains.
98 The first register in a chain is the one whose lifespan is least local.
99 Among equals, it is the one that was seen first.
100 We replace any equivalent register with that one.
102 If two registers have the same quantity number, it must be true that
103 REG expressions with qty_table `mode' must be in the hash table for both
104 registers and must be in the same class.
106 The converse is not true. Since hard registers may be referenced in
107 any mode, two REG expressions might be equivalent in the hash table
108 but not have the same quantity number if the quantity number of one
109 of the registers is not the same mode as those expressions.
111 Constants and quantity numbers
113 When a quantity has a known constant value, that value is stored
114 in the appropriate qty_table `const_rtx'. This is in addition to
115 putting the constant in the hash table as is usual for non-regs.
117 Whether a reg or a constant is preferred is determined by the configuration
118 macro CONST_COSTS and will often depend on the constant value. In any
119 event, expressions containing constants can be simplified, by fold_rtx.
121 When a quantity has a known nearly constant value (such as an address
122 of a stack slot), that value is stored in the appropriate qty_table
125 Integer constants don't have a machine mode. However, cse
126 determines the intended machine mode from the destination
127 of the instruction that moves the constant. The machine mode
128 is recorded in the hash table along with the actual RTL
129 constant expression so that different modes are kept separate.
133 To record known equivalences among expressions in general
134 we use a hash table called `table'. It has a fixed number of buckets
135 that contain chains of `struct table_elt' elements for expressions.
136 These chains connect the elements whose expressions have the same
139 Other chains through the same elements connect the elements which
140 currently have equivalent values.
142 Register references in an expression are canonicalized before hashing
143 the expression. This is done using `reg_qty' and qty_table `first_reg'.
144 The hash code of a register reference is computed using the quantity
145 number, not the register number.
147 When the value of an expression changes, it is necessary to remove from the
148 hash table not just that expression but all expressions whose values
149 could be different as a result.
151 1. If the value changing is in memory, except in special cases
152 ANYTHING referring to memory could be changed. That is because
153 nobody knows where a pointer does not point.
154 The function `invalidate_memory' removes what is necessary.
156 The special cases are when the address is constant or is
157 a constant plus a fixed register such as the frame pointer
158 or a static chain pointer. When such addresses are stored in,
159 we can tell exactly which other such addresses must be invalidated
160 due to overlap. `invalidate' does this.
161 All expressions that refer to non-constant
162 memory addresses are also invalidated. `invalidate_memory' does this.
164 2. If the value changing is a register, all expressions
165 containing references to that register, and only those,
168 Because searching the entire hash table for expressions that contain
169 a register is very slow, we try to figure out when it isn't necessary.
170 Precisely, this is necessary only when expressions have been
171 entered in the hash table using this register, and then the value has
172 changed, and then another expression wants to be added to refer to
173 the register's new value. This sequence of circumstances is rare
174 within any one basic block.
176 `REG_TICK' and `REG_IN_TABLE', accessors for members of
177 cse_reg_info, are used to detect this case. REG_TICK (i) is
178 incremented whenever a value is stored in register i.
179 REG_IN_TABLE (i) holds -1 if no references to register i have been
180 entered in the table; otherwise, it contains the value REG_TICK (i)
181 had when the references were entered. If we want to enter a
182 reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
183 remove old references. Until we want to enter a new entry, the
184 mere fact that the two vectors don't match makes the entries be
185 ignored if anyone tries to match them.
187 Registers themselves are entered in the hash table as well as in
188 the equivalent-register chains. However, `REG_TICK' and
189 `REG_IN_TABLE' do not apply to expressions which are simple
190 register references. These expressions are removed from the table
191 immediately when they become invalid, and this can be done even if
192 we do not immediately search for all the expressions that refer to
195 A CLOBBER rtx in an instruction invalidates its operand for further
196 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
197 invalidates everything that resides in memory.
201 Constant expressions that differ only by an additive integer
202 are called related. When a constant expression is put in
203 the table, the related expression with no constant term
204 is also entered. These are made to point at each other
205 so that it is possible to find out if there exists any
206 register equivalent to an expression related to a given expression. */
208 /* Length of qty_table vector. We know in advance we will not need
209 a quantity number this big. */
213 /* Next quantity number to be allocated.
214 This is 1 + the largest number needed so far. */
218 /* Per-qty information tracking.
220 `first_reg' and `last_reg' track the head and tail of the
221 chain of registers which currently contain this quantity.
223 `mode' contains the machine mode of this quantity.
225 `const_rtx' holds the rtx of the constant value of this
226 quantity, if known. A summations of the frame/arg pointer
227 and a constant can also be entered here. When this holds
228 a known value, `const_insn' is the insn which stored the
231 `comparison_{code,const,qty}' are used to track when a
232 comparison between a quantity and some constant or register has
233 been passed. In such a case, we know the results of the comparison
234 in case we see it again. These members record a comparison that
235 is known to be true. `comparison_code' holds the rtx code of such
236 a comparison, else it is set to UNKNOWN and the other two
237 comparison members are undefined. `comparison_const' holds
238 the constant being compared against, or zero if the comparison
239 is not against a constant. `comparison_qty' holds the quantity
240 being compared against when the result is known. If the comparison
241 is not with a register, `comparison_qty' is -1. */
243 struct qty_table_elem
247 rtx comparison_const;
249 unsigned int first_reg, last_reg;
250 /* The sizes of these fields should match the sizes of the
251 code and mode fields of struct rtx_def (see rtl.h). */
252 ENUM_BITFIELD(rtx_code) comparison_code : 16;
253 ENUM_BITFIELD(machine_mode) mode : 8;
256 /* The table of all qtys, indexed by qty number. */
257 static struct qty_table_elem *qty_table;
259 /* Structure used to pass arguments via for_each_rtx to function
260 cse_change_cc_mode. */
261 struct change_cc_mode_args
268 /* For machines that have a CC0, we do not record its value in the hash
269 table since its use is guaranteed to be the insn immediately following
270 its definition and any other insn is presumed to invalidate it.
272 Instead, we store below the value last assigned to CC0. If it should
273 happen to be a constant, it is stored in preference to the actual
274 assigned value. In case it is a constant, we store the mode in which
275 the constant should be interpreted. */
277 static rtx prev_insn_cc0;
278 static enum machine_mode prev_insn_cc0_mode;
280 /* Previous actual insn. 0 if at first insn of basic block. */
282 static rtx prev_insn;
285 /* Insn being scanned. */
287 static rtx this_insn;
289 /* Index by register number, gives the number of the next (or
290 previous) register in the chain of registers sharing the same
293 Or -1 if this register is at the end of the chain.
295 If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined. */
297 /* Per-register equivalence chain. */
303 /* The table of all register equivalence chains. */
304 static struct reg_eqv_elem *reg_eqv_table;
308 /* The timestamp at which this register is initialized. */
309 unsigned int timestamp;
311 /* The quantity number of the register's current contents. */
314 /* The number of times the register has been altered in the current
318 /* The REG_TICK value at which rtx's containing this register are
319 valid in the hash table. If this does not equal the current
320 reg_tick value, such expressions existing in the hash table are
324 /* The SUBREG that was set when REG_TICK was last incremented. Set
325 to -1 if the last store was to the whole register, not a subreg. */
326 unsigned int subreg_ticked;
329 /* A table of cse_reg_info indexed by register numbers. */
330 static struct cse_reg_info *cse_reg_info_table;
332 /* The size of the above table. */
333 static unsigned int cse_reg_info_table_size;
335 /* The index of the first entry that has not been initialized. */
336 static unsigned int cse_reg_info_table_first_uninitialized;
338 /* The timestamp at the beginning of the current run of
339 cse_basic_block. We increment this variable at the beginning of
340 the current run of cse_basic_block. The timestamp field of a
341 cse_reg_info entry matches the value of this variable if and only
342 if the entry has been initialized during the current run of
344 static unsigned int cse_reg_info_timestamp;
346 /* A HARD_REG_SET containing all the hard registers for which there is
347 currently a REG expression in the hash table. Note the difference
348 from the above variables, which indicate if the REG is mentioned in some
349 expression in the table. */
351 static HARD_REG_SET hard_regs_in_table;
353 /* CUID of insn that starts the basic block currently being cse-processed. */
355 static int cse_basic_block_start;
357 /* CUID of insn that ends the basic block currently being cse-processed. */
359 static int cse_basic_block_end;
361 /* Vector mapping INSN_UIDs to cuids.
362 The cuids are like uids but increase monotonically always.
363 We use them to see whether a reg is used outside a given basic block. */
365 static int *uid_cuid;
367 /* Highest UID in UID_CUID. */
370 /* Get the cuid of an insn. */
372 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
374 /* Nonzero if this pass has made changes, and therefore it's
375 worthwhile to run the garbage collector. */
377 static int cse_altered;
379 /* Nonzero if cse has altered conditional jump insns
380 in such a way that jump optimization should be redone. */
382 static int cse_jumps_altered;
384 /* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
385 REG_LABEL, we have to rerun jump after CSE to put in the note. */
386 static int recorded_label_ref;
388 /* canon_hash stores 1 in do_not_record
389 if it notices a reference to CC0, PC, or some other volatile
392 static int do_not_record;
394 /* canon_hash stores 1 in hash_arg_in_memory
395 if it notices a reference to memory within the expression being hashed. */
397 static int hash_arg_in_memory;
399 /* The hash table contains buckets which are chains of `struct table_elt's,
400 each recording one expression's information.
401 That expression is in the `exp' field.
403 The canon_exp field contains a canonical (from the point of view of
404 alias analysis) version of the `exp' field.
406 Those elements with the same hash code are chained in both directions
407 through the `next_same_hash' and `prev_same_hash' fields.
409 Each set of expressions with equivalent values
410 are on a two-way chain through the `next_same_value'
411 and `prev_same_value' fields, and all point with
412 the `first_same_value' field at the first element in
413 that chain. The chain is in order of increasing cost.
414 Each element's cost value is in its `cost' field.
416 The `in_memory' field is nonzero for elements that
417 involve any reference to memory. These elements are removed
418 whenever a write is done to an unidentified location in memory.
419 To be safe, we assume that a memory address is unidentified unless
420 the address is either a symbol constant or a constant plus
421 the frame pointer or argument pointer.
423 The `related_value' field is used to connect related expressions
424 (that differ by adding an integer).
425 The related expressions are chained in a circular fashion.
426 `related_value' is zero for expressions for which this
429 The `cost' field stores the cost of this element's expression.
430 The `regcost' field stores the value returned by approx_reg_cost for
431 this element's expression.
433 The `is_const' flag is set if the element is a constant (including
436 The `flag' field is used as a temporary during some search routines.
438 The `mode' field is usually the same as GET_MODE (`exp'), but
439 if `exp' is a CONST_INT and has no machine mode then the `mode'
440 field is the mode it was being used as. Each constant is
441 recorded separately for each mode it is used with. */
447 struct table_elt *next_same_hash;
448 struct table_elt *prev_same_hash;
449 struct table_elt *next_same_value;
450 struct table_elt *prev_same_value;
451 struct table_elt *first_same_value;
452 struct table_elt *related_value;
455 /* The size of this field should match the size
456 of the mode field of struct rtx_def (see rtl.h). */
457 ENUM_BITFIELD(machine_mode) mode : 8;
463 /* We don't want a lot of buckets, because we rarely have very many
464 things stored in the hash table, and a lot of buckets slows
465 down a lot of loops that happen frequently. */
467 #define HASH_SIZE (1 << HASH_SHIFT)
468 #define HASH_MASK (HASH_SIZE - 1)
470 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
471 register (hard registers may require `do_not_record' to be set). */
474 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
475 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
476 : canon_hash (X, M)) & HASH_MASK)
478 /* Like HASH, but without side-effects. */
479 #define SAFE_HASH(X, M) \
480 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
481 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
482 : safe_hash (X, M)) & HASH_MASK)
484 /* Determine whether register number N is considered a fixed register for the
485 purpose of approximating register costs.
486 It is desirable to replace other regs with fixed regs, to reduce need for
488 A reg wins if it is either the frame pointer or designated as fixed. */
489 #define FIXED_REGNO_P(N) \
490 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
491 || fixed_regs[N] || global_regs[N])
493 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
494 hard registers and pointers into the frame are the cheapest with a cost
495 of 0. Next come pseudos with a cost of one and other hard registers with
496 a cost of 2. Aside from these special cases, call `rtx_cost'. */
498 #define CHEAP_REGNO(N) \
499 (REGNO_PTR_FRAME_P(N) \
500 || (HARD_REGISTER_NUM_P (N) \
501 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
503 #define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET))
504 #define COST_IN(X,OUTER) (REG_P (X) ? 0 : notreg_cost (X, OUTER))
506 /* Get the number of times this register has been updated in this
509 #define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
511 /* Get the point at which REG was recorded in the table. */
513 #define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
515 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
518 #define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
520 /* Get the quantity number for REG. */
522 #define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
524 /* Determine if the quantity number for register X represents a valid index
525 into the qty_table. */
527 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
529 static struct table_elt *table[HASH_SIZE];
531 /* Chain of `struct table_elt's made so far for this function
532 but currently removed from the table. */
534 static struct table_elt *free_element_chain;
536 /* Set to the cost of a constant pool reference if one was found for a
537 symbolic constant. If this was found, it means we should try to
538 convert constants into constant pool entries if they don't fit in
541 static int constant_pool_entries_cost;
542 static int constant_pool_entries_regcost;
544 /* This data describes a block that will be processed by cse_basic_block. */
546 struct cse_basic_block_data
548 /* Lowest CUID value of insns in block. */
550 /* Highest CUID value of insns in block. */
552 /* Total number of SETs in block. */
554 /* Last insn in the block. */
556 /* Size of current branch path, if any. */
558 /* Current branch path, indicating which branches will be taken. */
561 /* The branch insn. */
563 /* Whether it should be taken or not. AROUND is the same as taken
564 except that it is used when the destination label is not preceded
566 enum taken {PATH_TAKEN, PATH_NOT_TAKEN, PATH_AROUND} status;
570 static bool fixed_base_plus_p (rtx x);
571 static int notreg_cost (rtx, enum rtx_code);
572 static int approx_reg_cost_1 (rtx *, void *);
573 static int approx_reg_cost (rtx);
574 static int preferable (int, int, int, int);
575 static void new_basic_block (void);
576 static void make_new_qty (unsigned int, enum machine_mode);
577 static void make_regs_eqv (unsigned int, unsigned int);
578 static void delete_reg_equiv (unsigned int);
579 static int mention_regs (rtx);
580 static int insert_regs (rtx, struct table_elt *, int);
581 static void remove_from_table (struct table_elt *, unsigned);
582 static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
583 static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
584 static rtx lookup_as_function (rtx, enum rtx_code);
585 static struct table_elt *insert (rtx, struct table_elt *, unsigned,
587 static void merge_equiv_classes (struct table_elt *, struct table_elt *);
588 static void invalidate (rtx, enum machine_mode);
589 static int cse_rtx_varies_p (rtx, int);
590 static void remove_invalid_refs (unsigned int);
591 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
593 static void rehash_using_reg (rtx);
594 static void invalidate_memory (void);
595 static void invalidate_for_call (void);
596 static rtx use_related_value (rtx, struct table_elt *);
598 static inline unsigned canon_hash (rtx, enum machine_mode);
599 static inline unsigned safe_hash (rtx, enum machine_mode);
600 static unsigned hash_rtx_string (const char *);
602 static rtx canon_reg (rtx, rtx);
603 static void find_best_addr (rtx, rtx *, enum machine_mode);
604 static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
606 enum machine_mode *);
607 static rtx fold_rtx (rtx, rtx);
608 static rtx equiv_constant (rtx);
609 static void record_jump_equiv (rtx, int);
610 static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
612 static void cse_insn (rtx, rtx);
613 static void cse_end_of_basic_block (rtx, struct cse_basic_block_data *,
615 static int addr_affects_sp_p (rtx);
616 static void invalidate_from_clobbers (rtx);
617 static rtx cse_process_notes (rtx, rtx);
618 static void invalidate_skipped_set (rtx, rtx, void *);
619 static void invalidate_skipped_block (rtx);
620 static rtx cse_basic_block (rtx, rtx, struct branch_path *);
621 static void count_reg_usage (rtx, int *, rtx, int);
622 static int check_for_label_ref (rtx *, void *);
623 extern void dump_class (struct table_elt*);
624 static void get_cse_reg_info_1 (unsigned int regno);
625 static struct cse_reg_info * get_cse_reg_info (unsigned int regno);
626 static int check_dependence (rtx *, void *);
628 static void flush_hash_table (void);
629 static bool insn_live_p (rtx, int *);
630 static bool set_live_p (rtx, rtx, int *);
631 static bool dead_libcall_p (rtx, int *);
632 static int cse_change_cc_mode (rtx *, void *);
633 static void cse_change_cc_mode_insn (rtx, rtx);
634 static void cse_change_cc_mode_insns (rtx, rtx, rtx);
635 static enum machine_mode cse_cc_succs (basic_block, rtx, rtx, bool);
638 #undef RTL_HOOKS_GEN_LOWPART
639 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
641 static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
643 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
644 virtual regs here because the simplify_*_operation routines are called
645 by integrate.c, which is called before virtual register instantiation. */
648 fixed_base_plus_p (rtx x)
650 switch (GET_CODE (x))
653 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
655 if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
657 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
658 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
663 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
665 return fixed_base_plus_p (XEXP (x, 0));
672 /* Dump the expressions in the equivalence class indicated by CLASSP.
673 This function is used only for debugging. */
675 dump_class (struct table_elt *classp)
677 struct table_elt *elt;
679 fprintf (stderr, "Equivalence chain for ");
680 print_rtl (stderr, classp->exp);
681 fprintf (stderr, ": \n");
683 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
685 print_rtl (stderr, elt->exp);
686 fprintf (stderr, "\n");
690 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
693 approx_reg_cost_1 (rtx *xp, void *data)
700 unsigned int regno = REGNO (x);
702 if (! CHEAP_REGNO (regno))
704 if (regno < FIRST_PSEUDO_REGISTER)
706 if (SMALL_REGISTER_CLASSES)
718 /* Return an estimate of the cost of the registers used in an rtx.
719 This is mostly the number of different REG expressions in the rtx;
720 however for some exceptions like fixed registers we use a cost of
721 0. If any other hard register reference occurs, return MAX_COST. */
724 approx_reg_cost (rtx x)
728 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
734 /* Returns a canonical version of X for the address, from the point of view,
735 that all multiplications are represented as MULT instead of the multiply
736 by a power of 2 being represented as ASHIFT. */
739 canon_for_address (rtx x)
742 enum machine_mode mode;
756 if (GET_CODE (XEXP (x, 1)) == CONST_INT
757 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode)
758 && INTVAL (XEXP (x, 1)) >= 0)
760 new = canon_for_address (XEXP (x, 0));
761 new = gen_rtx_MULT (mode, new,
762 gen_int_mode ((HOST_WIDE_INT) 1
763 << INTVAL (XEXP (x, 1)),
774 /* Now recursively process each operand of this operation. */
775 fmt = GET_RTX_FORMAT (code);
776 for (i = 0; i < GET_RTX_LENGTH (code); i++)
779 new = canon_for_address (XEXP (x, i));
785 /* Return a negative value if an rtx A, whose costs are given by COST_A
786 and REGCOST_A, is more desirable than an rtx B.
787 Return a positive value if A is less desirable, or 0 if the two are
790 preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
792 /* First, get rid of cases involving expressions that are entirely
794 if (cost_a != cost_b)
796 if (cost_a == MAX_COST)
798 if (cost_b == MAX_COST)
802 /* Avoid extending lifetimes of hardregs. */
803 if (regcost_a != regcost_b)
805 if (regcost_a == MAX_COST)
807 if (regcost_b == MAX_COST)
811 /* Normal operation costs take precedence. */
812 if (cost_a != cost_b)
813 return cost_a - cost_b;
814 /* Only if these are identical consider effects on register pressure. */
815 if (regcost_a != regcost_b)
816 return regcost_a - regcost_b;
820 /* Internal function, to compute cost when X is not a register; called
821 from COST macro to keep it simple. */
824 notreg_cost (rtx x, enum rtx_code outer)
826 return ((GET_CODE (x) == SUBREG
827 && REG_P (SUBREG_REG (x))
828 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
829 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
830 && (GET_MODE_SIZE (GET_MODE (x))
831 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
832 && subreg_lowpart_p (x)
833 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
834 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
836 : rtx_cost (x, outer) * 2);
840 /* Initialize CSE_REG_INFO_TABLE. */
843 init_cse_reg_info (unsigned int nregs)
845 /* Do we need to grow the table? */
846 if (nregs > cse_reg_info_table_size)
848 unsigned int new_size;
850 if (cse_reg_info_table_size < 2048)
852 /* Compute a new size that is a power of 2 and no smaller
853 than the large of NREGS and 64. */
854 new_size = (cse_reg_info_table_size
855 ? cse_reg_info_table_size : 64);
857 while (new_size < nregs)
862 /* If we need a big table, allocate just enough to hold
867 /* Reallocate the table with NEW_SIZE entries. */
868 if (cse_reg_info_table)
869 free (cse_reg_info_table);
870 cse_reg_info_table = XNEWVEC (struct cse_reg_info, new_size);
871 cse_reg_info_table_size = new_size;
872 cse_reg_info_table_first_uninitialized = 0;
875 /* Do we have all of the first NREGS entries initialized? */
876 if (cse_reg_info_table_first_uninitialized < nregs)
878 unsigned int old_timestamp = cse_reg_info_timestamp - 1;
881 /* Put the old timestamp on newly allocated entries so that they
882 will all be considered out of date. We do not touch those
883 entries beyond the first NREGS entries to be nice to the
885 for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++)
886 cse_reg_info_table[i].timestamp = old_timestamp;
888 cse_reg_info_table_first_uninitialized = nregs;
892 /* Given REGNO, initialize the cse_reg_info entry for REGNO. */
895 get_cse_reg_info_1 (unsigned int regno)
897 /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
898 entry will be considered to have been initialized. */
899 cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
901 /* Initialize the rest of the entry. */
902 cse_reg_info_table[regno].reg_tick = 1;
903 cse_reg_info_table[regno].reg_in_table = -1;
904 cse_reg_info_table[regno].subreg_ticked = -1;
905 cse_reg_info_table[regno].reg_qty = -regno - 1;
908 /* Find a cse_reg_info entry for REGNO. */
910 static inline struct cse_reg_info *
911 get_cse_reg_info (unsigned int regno)
913 struct cse_reg_info *p = &cse_reg_info_table[regno];
915 /* If this entry has not been initialized, go ahead and initialize
917 if (p->timestamp != cse_reg_info_timestamp)
918 get_cse_reg_info_1 (regno);
923 /* Clear the hash table and initialize each register with its own quantity,
924 for a new basic block. */
927 new_basic_block (void)
933 /* Invalidate cse_reg_info_table. */
934 cse_reg_info_timestamp++;
936 /* Clear out hash table state for this pass. */
937 CLEAR_HARD_REG_SET (hard_regs_in_table);
939 /* The per-quantity values used to be initialized here, but it is
940 much faster to initialize each as it is made in `make_new_qty'. */
942 for (i = 0; i < HASH_SIZE; i++)
944 struct table_elt *first;
949 struct table_elt *last = first;
953 while (last->next_same_hash != NULL)
954 last = last->next_same_hash;
956 /* Now relink this hash entire chain into
957 the free element list. */
959 last->next_same_hash = free_element_chain;
960 free_element_chain = first;
970 /* Say that register REG contains a quantity in mode MODE not in any
971 register before and initialize that quantity. */
974 make_new_qty (unsigned int reg, enum machine_mode mode)
977 struct qty_table_elem *ent;
978 struct reg_eqv_elem *eqv;
980 gcc_assert (next_qty < max_qty);
982 q = REG_QTY (reg) = next_qty++;
984 ent->first_reg = reg;
987 ent->const_rtx = ent->const_insn = NULL_RTX;
988 ent->comparison_code = UNKNOWN;
990 eqv = ®_eqv_table[reg];
991 eqv->next = eqv->prev = -1;
994 /* Make reg NEW equivalent to reg OLD.
995 OLD is not changing; NEW is. */
998 make_regs_eqv (unsigned int new, unsigned int old)
1000 unsigned int lastr, firstr;
1001 int q = REG_QTY (old);
1002 struct qty_table_elem *ent;
1004 ent = &qty_table[q];
1006 /* Nothing should become eqv until it has a "non-invalid" qty number. */
1007 gcc_assert (REGNO_QTY_VALID_P (old));
1010 firstr = ent->first_reg;
1011 lastr = ent->last_reg;
1013 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
1014 hard regs. Among pseudos, if NEW will live longer than any other reg
1015 of the same qty, and that is beyond the current basic block,
1016 make it the new canonical replacement for this qty. */
1017 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1018 /* Certain fixed registers might be of the class NO_REGS. This means
1019 that not only can they not be allocated by the compiler, but
1020 they cannot be used in substitutions or canonicalizations
1022 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1023 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1024 || (new >= FIRST_PSEUDO_REGISTER
1025 && (firstr < FIRST_PSEUDO_REGISTER
1026 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1027 || (uid_cuid[REGNO_FIRST_UID (new)]
1028 < cse_basic_block_start))
1029 && (uid_cuid[REGNO_LAST_UID (new)]
1030 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1032 reg_eqv_table[firstr].prev = new;
1033 reg_eqv_table[new].next = firstr;
1034 reg_eqv_table[new].prev = -1;
1035 ent->first_reg = new;
1039 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1040 Otherwise, insert before any non-fixed hard regs that are at the
1041 end. Registers of class NO_REGS cannot be used as an
1042 equivalent for anything. */
1043 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1044 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1045 && new >= FIRST_PSEUDO_REGISTER)
1046 lastr = reg_eqv_table[lastr].prev;
1047 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1048 if (reg_eqv_table[lastr].next >= 0)
1049 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1051 qty_table[q].last_reg = new;
1052 reg_eqv_table[lastr].next = new;
1053 reg_eqv_table[new].prev = lastr;
1057 /* Remove REG from its equivalence class. */
1060 delete_reg_equiv (unsigned int reg)
1062 struct qty_table_elem *ent;
1063 int q = REG_QTY (reg);
1066 /* If invalid, do nothing. */
1067 if (! REGNO_QTY_VALID_P (reg))
1070 ent = &qty_table[q];
1072 p = reg_eqv_table[reg].prev;
1073 n = reg_eqv_table[reg].next;
1076 reg_eqv_table[n].prev = p;
1080 reg_eqv_table[p].next = n;
1084 REG_QTY (reg) = -reg - 1;
1087 /* Remove any invalid expressions from the hash table
1088 that refer to any of the registers contained in expression X.
1090 Make sure that newly inserted references to those registers
1091 as subexpressions will be considered valid.
1093 mention_regs is not called when a register itself
1094 is being stored in the table.
1096 Return 1 if we have done something that may have changed the hash code
1100 mention_regs (rtx x)
1110 code = GET_CODE (x);
1113 unsigned int regno = REGNO (x);
1114 unsigned int endregno
1115 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1116 : hard_regno_nregs[regno][GET_MODE (x)]);
1119 for (i = regno; i < endregno; i++)
1121 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1122 remove_invalid_refs (i);
1124 REG_IN_TABLE (i) = REG_TICK (i);
1125 SUBREG_TICKED (i) = -1;
1131 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1132 pseudo if they don't use overlapping words. We handle only pseudos
1133 here for simplicity. */
1134 if (code == SUBREG && REG_P (SUBREG_REG (x))
1135 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1137 unsigned int i = REGNO (SUBREG_REG (x));
1139 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1141 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1142 the last store to this register really stored into this
1143 subreg, then remove the memory of this subreg.
1144 Otherwise, remove any memory of the entire register and
1145 all its subregs from the table. */
1146 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1147 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1148 remove_invalid_refs (i);
1150 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1153 REG_IN_TABLE (i) = REG_TICK (i);
1154 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1158 /* If X is a comparison or a COMPARE and either operand is a register
1159 that does not have a quantity, give it one. This is so that a later
1160 call to record_jump_equiv won't cause X to be assigned a different
1161 hash code and not found in the table after that call.
1163 It is not necessary to do this here, since rehash_using_reg can
1164 fix up the table later, but doing this here eliminates the need to
1165 call that expensive function in the most common case where the only
1166 use of the register is in the comparison. */
1168 if (code == COMPARE || COMPARISON_P (x))
1170 if (REG_P (XEXP (x, 0))
1171 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1172 if (insert_regs (XEXP (x, 0), NULL, 0))
1174 rehash_using_reg (XEXP (x, 0));
1178 if (REG_P (XEXP (x, 1))
1179 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1180 if (insert_regs (XEXP (x, 1), NULL, 0))
1182 rehash_using_reg (XEXP (x, 1));
1187 fmt = GET_RTX_FORMAT (code);
1188 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1190 changed |= mention_regs (XEXP (x, i));
1191 else if (fmt[i] == 'E')
1192 for (j = 0; j < XVECLEN (x, i); j++)
1193 changed |= mention_regs (XVECEXP (x, i, j));
1198 /* Update the register quantities for inserting X into the hash table
1199 with a value equivalent to CLASSP.
1200 (If the class does not contain a REG, it is irrelevant.)
1201 If MODIFIED is nonzero, X is a destination; it is being modified.
1202 Note that delete_reg_equiv should be called on a register
1203 before insert_regs is done on that register with MODIFIED != 0.
1205 Nonzero value means that elements of reg_qty have changed
1206 so X's hash code may be different. */
1209 insert_regs (rtx x, struct table_elt *classp, int modified)
1213 unsigned int regno = REGNO (x);
1216 /* If REGNO is in the equivalence table already but is of the
1217 wrong mode for that equivalence, don't do anything here. */
1219 qty_valid = REGNO_QTY_VALID_P (regno);
1222 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1224 if (ent->mode != GET_MODE (x))
1228 if (modified || ! qty_valid)
1231 for (classp = classp->first_same_value;
1233 classp = classp->next_same_value)
1234 if (REG_P (classp->exp)
1235 && GET_MODE (classp->exp) == GET_MODE (x))
1237 unsigned c_regno = REGNO (classp->exp);
1239 gcc_assert (REGNO_QTY_VALID_P (c_regno));
1241 /* Suppose that 5 is hard reg and 100 and 101 are
1244 (set (reg:si 100) (reg:si 5))
1245 (set (reg:si 5) (reg:si 100))
1246 (set (reg:di 101) (reg:di 5))
1248 We would now set REG_QTY (101) = REG_QTY (5), but the
1249 entry for 5 is in SImode. When we use this later in
1250 copy propagation, we get the register in wrong mode. */
1251 if (qty_table[REG_QTY (c_regno)].mode != GET_MODE (x))
1254 make_regs_eqv (regno, c_regno);
1258 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1259 than REG_IN_TABLE to find out if there was only a single preceding
1260 invalidation - for the SUBREG - or another one, which would be
1261 for the full register. However, if we find here that REG_TICK
1262 indicates that the register is invalid, it means that it has
1263 been invalidated in a separate operation. The SUBREG might be used
1264 now (then this is a recursive call), or we might use the full REG
1265 now and a SUBREG of it later. So bump up REG_TICK so that
1266 mention_regs will do the right thing. */
1268 && REG_IN_TABLE (regno) >= 0
1269 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1271 make_new_qty (regno, GET_MODE (x));
1278 /* If X is a SUBREG, we will likely be inserting the inner register in the
1279 table. If that register doesn't have an assigned quantity number at
1280 this point but does later, the insertion that we will be doing now will
1281 not be accessible because its hash code will have changed. So assign
1282 a quantity number now. */
1284 else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1285 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1287 insert_regs (SUBREG_REG (x), NULL, 0);
1292 return mention_regs (x);
1295 /* Look in or update the hash table. */
1297 /* Remove table element ELT from use in the table.
1298 HASH is its hash code, made using the HASH macro.
1299 It's an argument because often that is known in advance
1300 and we save much time not recomputing it. */
1303 remove_from_table (struct table_elt *elt, unsigned int hash)
1308 /* Mark this element as removed. See cse_insn. */
1309 elt->first_same_value = 0;
1311 /* Remove the table element from its equivalence class. */
1314 struct table_elt *prev = elt->prev_same_value;
1315 struct table_elt *next = elt->next_same_value;
1318 next->prev_same_value = prev;
1321 prev->next_same_value = next;
1324 struct table_elt *newfirst = next;
1327 next->first_same_value = newfirst;
1328 next = next->next_same_value;
1333 /* Remove the table element from its hash bucket. */
1336 struct table_elt *prev = elt->prev_same_hash;
1337 struct table_elt *next = elt->next_same_hash;
1340 next->prev_same_hash = prev;
1343 prev->next_same_hash = next;
1344 else if (table[hash] == elt)
1348 /* This entry is not in the proper hash bucket. This can happen
1349 when two classes were merged by `merge_equiv_classes'. Search
1350 for the hash bucket that it heads. This happens only very
1351 rarely, so the cost is acceptable. */
1352 for (hash = 0; hash < HASH_SIZE; hash++)
1353 if (table[hash] == elt)
1358 /* Remove the table element from its related-value circular chain. */
1360 if (elt->related_value != 0 && elt->related_value != elt)
1362 struct table_elt *p = elt->related_value;
1364 while (p->related_value != elt)
1365 p = p->related_value;
1366 p->related_value = elt->related_value;
1367 if (p->related_value == p)
1368 p->related_value = 0;
1371 /* Now add it to the free element chain. */
1372 elt->next_same_hash = free_element_chain;
1373 free_element_chain = elt;
1376 /* Look up X in the hash table and return its table element,
1377 or 0 if X is not in the table.
1379 MODE is the machine-mode of X, or if X is an integer constant
1380 with VOIDmode then MODE is the mode with which X will be used.
1382 Here we are satisfied to find an expression whose tree structure
1385 static struct table_elt *
1386 lookup (rtx x, unsigned int hash, enum machine_mode mode)
1388 struct table_elt *p;
1390 for (p = table[hash]; p; p = p->next_same_hash)
1391 if (mode == p->mode && ((x == p->exp && REG_P (x))
1392 || exp_equiv_p (x, p->exp, !REG_P (x), false)))
1398 /* Like `lookup' but don't care whether the table element uses invalid regs.
1399 Also ignore discrepancies in the machine mode of a register. */
1401 static struct table_elt *
1402 lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1404 struct table_elt *p;
1408 unsigned int regno = REGNO (x);
1410 /* Don't check the machine mode when comparing registers;
1411 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1412 for (p = table[hash]; p; p = p->next_same_hash)
1414 && REGNO (p->exp) == regno)
1419 for (p = table[hash]; p; p = p->next_same_hash)
1421 && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
1428 /* Look for an expression equivalent to X and with code CODE.
1429 If one is found, return that expression. */
1432 lookup_as_function (rtx x, enum rtx_code code)
1435 = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
1437 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1438 long as we are narrowing. So if we looked in vain for a mode narrower
1439 than word_mode before, look for word_mode now. */
1440 if (p == 0 && code == CONST_INT
1441 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1444 PUT_MODE (x, word_mode);
1445 p = lookup (x, SAFE_HASH (x, VOIDmode), word_mode);
1451 for (p = p->first_same_value; p; p = p->next_same_value)
1452 if (GET_CODE (p->exp) == code
1453 /* Make sure this is a valid entry in the table. */
1454 && exp_equiv_p (p->exp, p->exp, 1, false))
1460 /* Insert X in the hash table, assuming HASH is its hash code
1461 and CLASSP is an element of the class it should go in
1462 (or 0 if a new class should be made).
1463 It is inserted at the proper position to keep the class in
1464 the order cheapest first.
1466 MODE is the machine-mode of X, or if X is an integer constant
1467 with VOIDmode then MODE is the mode with which X will be used.
1469 For elements of equal cheapness, the most recent one
1470 goes in front, except that the first element in the list
1471 remains first unless a cheaper element is added. The order of
1472 pseudo-registers does not matter, as canon_reg will be called to
1473 find the cheapest when a register is retrieved from the table.
1475 The in_memory field in the hash table element is set to 0.
1476 The caller must set it nonzero if appropriate.
1478 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1479 and if insert_regs returns a nonzero value
1480 you must then recompute its hash code before calling here.
1482 If necessary, update table showing constant values of quantities. */
1484 #define CHEAPER(X, Y) \
1485 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1487 static struct table_elt *
1488 insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mode)
1490 struct table_elt *elt;
1492 /* If X is a register and we haven't made a quantity for it,
1493 something is wrong. */
1494 gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
1496 /* If X is a hard register, show it is being put in the table. */
1497 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1499 unsigned int regno = REGNO (x);
1500 unsigned int endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
1503 for (i = regno; i < endregno; i++)
1504 SET_HARD_REG_BIT (hard_regs_in_table, i);
1507 /* Put an element for X into the right hash bucket. */
1509 elt = free_element_chain;
1511 free_element_chain = elt->next_same_hash;
1513 elt = XNEW (struct table_elt);
1516 elt->canon_exp = NULL_RTX;
1517 elt->cost = COST (x);
1518 elt->regcost = approx_reg_cost (x);
1519 elt->next_same_value = 0;
1520 elt->prev_same_value = 0;
1521 elt->next_same_hash = table[hash];
1522 elt->prev_same_hash = 0;
1523 elt->related_value = 0;
1526 elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1529 table[hash]->prev_same_hash = elt;
1532 /* Put it into the proper value-class. */
1535 classp = classp->first_same_value;
1536 if (CHEAPER (elt, classp))
1537 /* Insert at the head of the class. */
1539 struct table_elt *p;
1540 elt->next_same_value = classp;
1541 classp->prev_same_value = elt;
1542 elt->first_same_value = elt;
1544 for (p = classp; p; p = p->next_same_value)
1545 p->first_same_value = elt;
1549 /* Insert not at head of the class. */
1550 /* Put it after the last element cheaper than X. */
1551 struct table_elt *p, *next;
1553 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1556 /* Put it after P and before NEXT. */
1557 elt->next_same_value = next;
1559 next->prev_same_value = elt;
1561 elt->prev_same_value = p;
1562 p->next_same_value = elt;
1563 elt->first_same_value = classp;
1567 elt->first_same_value = elt;
1569 /* If this is a constant being set equivalent to a register or a register
1570 being set equivalent to a constant, note the constant equivalence.
1572 If this is a constant, it cannot be equivalent to a different constant,
1573 and a constant is the only thing that can be cheaper than a register. So
1574 we know the register is the head of the class (before the constant was
1577 If this is a register that is not already known equivalent to a
1578 constant, we must check the entire class.
1580 If this is a register that is already known equivalent to an insn,
1581 update the qtys `const_insn' to show that `this_insn' is the latest
1582 insn making that quantity equivalent to the constant. */
1584 if (elt->is_const && classp && REG_P (classp->exp)
1587 int exp_q = REG_QTY (REGNO (classp->exp));
1588 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1590 exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1591 exp_ent->const_insn = this_insn;
1596 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1599 struct table_elt *p;
1601 for (p = classp; p != 0; p = p->next_same_value)
1603 if (p->is_const && !REG_P (p->exp))
1605 int x_q = REG_QTY (REGNO (x));
1606 struct qty_table_elem *x_ent = &qty_table[x_q];
1609 = gen_lowpart (GET_MODE (x), p->exp);
1610 x_ent->const_insn = this_insn;
1617 && qty_table[REG_QTY (REGNO (x))].const_rtx
1618 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1619 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1621 /* If this is a constant with symbolic value,
1622 and it has a term with an explicit integer value,
1623 link it up with related expressions. */
1624 if (GET_CODE (x) == CONST)
1626 rtx subexp = get_related_value (x);
1628 struct table_elt *subelt, *subelt_prev;
1632 /* Get the integer-free subexpression in the hash table. */
1633 subhash = SAFE_HASH (subexp, mode);
1634 subelt = lookup (subexp, subhash, mode);
1636 subelt = insert (subexp, NULL, subhash, mode);
1637 /* Initialize SUBELT's circular chain if it has none. */
1638 if (subelt->related_value == 0)
1639 subelt->related_value = subelt;
1640 /* Find the element in the circular chain that precedes SUBELT. */
1641 subelt_prev = subelt;
1642 while (subelt_prev->related_value != subelt)
1643 subelt_prev = subelt_prev->related_value;
1644 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1645 This way the element that follows SUBELT is the oldest one. */
1646 elt->related_value = subelt_prev->related_value;
1647 subelt_prev->related_value = elt;
1654 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1655 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1656 the two classes equivalent.
1658 CLASS1 will be the surviving class; CLASS2 should not be used after this
1661 Any invalid entries in CLASS2 will not be copied. */
1664 merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1666 struct table_elt *elt, *next, *new;
1668 /* Ensure we start with the head of the classes. */
1669 class1 = class1->first_same_value;
1670 class2 = class2->first_same_value;
1672 /* If they were already equal, forget it. */
1673 if (class1 == class2)
1676 for (elt = class2; elt; elt = next)
1680 enum machine_mode mode = elt->mode;
1682 next = elt->next_same_value;
1684 /* Remove old entry, make a new one in CLASS1's class.
1685 Don't do this for invalid entries as we cannot find their
1686 hash code (it also isn't necessary). */
1687 if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
1689 bool need_rehash = false;
1691 hash_arg_in_memory = 0;
1692 hash = HASH (exp, mode);
1696 need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1697 delete_reg_equiv (REGNO (exp));
1700 remove_from_table (elt, hash);
1702 if (insert_regs (exp, class1, 0) || need_rehash)
1704 rehash_using_reg (exp);
1705 hash = HASH (exp, mode);
1707 new = insert (exp, class1, hash, mode);
1708 new->in_memory = hash_arg_in_memory;
1713 /* Flush the entire hash table. */
1716 flush_hash_table (void)
1719 struct table_elt *p;
1721 for (i = 0; i < HASH_SIZE; i++)
1722 for (p = table[i]; p; p = table[i])
1724 /* Note that invalidate can remove elements
1725 after P in the current hash chain. */
1727 invalidate (p->exp, p->mode);
1729 remove_from_table (p, i);
1733 /* Function called for each rtx to check whether true dependence exist. */
1734 struct check_dependence_data
1736 enum machine_mode mode;
1742 check_dependence (rtx *x, void *data)
1744 struct check_dependence_data *d = (struct check_dependence_data *) data;
1745 if (*x && MEM_P (*x))
1746 return canon_true_dependence (d->exp, d->mode, d->addr, *x,
1752 /* Remove from the hash table, or mark as invalid, all expressions whose
1753 values could be altered by storing in X. X is a register, a subreg, or
1754 a memory reference with nonvarying address (because, when a memory
1755 reference with a varying address is stored in, all memory references are
1756 removed by invalidate_memory so specific invalidation is superfluous).
1757 FULL_MODE, if not VOIDmode, indicates that this much should be
1758 invalidated instead of just the amount indicated by the mode of X. This
1759 is only used for bitfield stores into memory.
1761 A nonvarying address may be just a register or just a symbol reference,
1762 or it may be either of those plus a numeric offset. */
1765 invalidate (rtx x, enum machine_mode full_mode)
1768 struct table_elt *p;
1771 switch (GET_CODE (x))
1775 /* If X is a register, dependencies on its contents are recorded
1776 through the qty number mechanism. Just change the qty number of
1777 the register, mark it as invalid for expressions that refer to it,
1778 and remove it itself. */
1779 unsigned int regno = REGNO (x);
1780 unsigned int hash = HASH (x, GET_MODE (x));
1782 /* Remove REGNO from any quantity list it might be on and indicate
1783 that its value might have changed. If it is a pseudo, remove its
1784 entry from the hash table.
1786 For a hard register, we do the first two actions above for any
1787 additional hard registers corresponding to X. Then, if any of these
1788 registers are in the table, we must remove any REG entries that
1789 overlap these registers. */
1791 delete_reg_equiv (regno);
1793 SUBREG_TICKED (regno) = -1;
1795 if (regno >= FIRST_PSEUDO_REGISTER)
1797 /* Because a register can be referenced in more than one mode,
1798 we might have to remove more than one table entry. */
1799 struct table_elt *elt;
1801 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1802 remove_from_table (elt, hash);
1806 HOST_WIDE_INT in_table
1807 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1808 unsigned int endregno
1809 = regno + hard_regno_nregs[regno][GET_MODE (x)];
1810 unsigned int tregno, tendregno, rn;
1811 struct table_elt *p, *next;
1813 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1815 for (rn = regno + 1; rn < endregno; rn++)
1817 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1818 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1819 delete_reg_equiv (rn);
1821 SUBREG_TICKED (rn) = -1;
1825 for (hash = 0; hash < HASH_SIZE; hash++)
1826 for (p = table[hash]; p; p = next)
1828 next = p->next_same_hash;
1831 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1834 tregno = REGNO (p->exp);
1836 = tregno + hard_regno_nregs[tregno][GET_MODE (p->exp)];
1837 if (tendregno > regno && tregno < endregno)
1838 remove_from_table (p, hash);
1845 invalidate (SUBREG_REG (x), VOIDmode);
1849 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1850 invalidate (XVECEXP (x, 0, i), VOIDmode);
1854 /* This is part of a disjoint return value; extract the location in
1855 question ignoring the offset. */
1856 invalidate (XEXP (x, 0), VOIDmode);
1860 addr = canon_rtx (get_addr (XEXP (x, 0)));
1861 /* Calculate the canonical version of X here so that
1862 true_dependence doesn't generate new RTL for X on each call. */
1865 /* Remove all hash table elements that refer to overlapping pieces of
1867 if (full_mode == VOIDmode)
1868 full_mode = GET_MODE (x);
1870 for (i = 0; i < HASH_SIZE; i++)
1872 struct table_elt *next;
1874 for (p = table[i]; p; p = next)
1876 next = p->next_same_hash;
1879 struct check_dependence_data d;
1881 /* Just canonicalize the expression once;
1882 otherwise each time we call invalidate
1883 true_dependence will canonicalize the
1884 expression again. */
1886 p->canon_exp = canon_rtx (p->exp);
1890 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1891 remove_from_table (p, i);
1902 /* Remove all expressions that refer to register REGNO,
1903 since they are already invalid, and we are about to
1904 mark that register valid again and don't want the old
1905 expressions to reappear as valid. */
1908 remove_invalid_refs (unsigned int regno)
1911 struct table_elt *p, *next;
1913 for (i = 0; i < HASH_SIZE; i++)
1914 for (p = table[i]; p; p = next)
1916 next = p->next_same_hash;
1918 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1919 remove_from_table (p, i);
1923 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1926 remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
1927 enum machine_mode mode)
1930 struct table_elt *p, *next;
1931 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
1933 for (i = 0; i < HASH_SIZE; i++)
1934 for (p = table[i]; p; p = next)
1937 next = p->next_same_hash;
1940 && (GET_CODE (exp) != SUBREG
1941 || !REG_P (SUBREG_REG (exp))
1942 || REGNO (SUBREG_REG (exp)) != regno
1943 || (((SUBREG_BYTE (exp)
1944 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
1945 && SUBREG_BYTE (exp) <= end))
1946 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1947 remove_from_table (p, i);
1951 /* Recompute the hash codes of any valid entries in the hash table that
1952 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1954 This is called when we make a jump equivalence. */
1957 rehash_using_reg (rtx x)
1960 struct table_elt *p, *next;
1963 if (GET_CODE (x) == SUBREG)
1966 /* If X is not a register or if the register is known not to be in any
1967 valid entries in the table, we have no work to do. */
1970 || REG_IN_TABLE (REGNO (x)) < 0
1971 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
1974 /* Scan all hash chains looking for valid entries that mention X.
1975 If we find one and it is in the wrong hash chain, move it. */
1977 for (i = 0; i < HASH_SIZE; i++)
1978 for (p = table[i]; p; p = next)
1980 next = p->next_same_hash;
1981 if (reg_mentioned_p (x, p->exp)
1982 && exp_equiv_p (p->exp, p->exp, 1, false)
1983 && i != (hash = SAFE_HASH (p->exp, p->mode)))
1985 if (p->next_same_hash)
1986 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1988 if (p->prev_same_hash)
1989 p->prev_same_hash->next_same_hash = p->next_same_hash;
1991 table[i] = p->next_same_hash;
1993 p->next_same_hash = table[hash];
1994 p->prev_same_hash = 0;
1996 table[hash]->prev_same_hash = p;
2002 /* Remove from the hash table any expression that is a call-clobbered
2003 register. Also update their TICK values. */
2006 invalidate_for_call (void)
2008 unsigned int regno, endregno;
2011 struct table_elt *p, *next;
2014 /* Go through all the hard registers. For each that is clobbered in
2015 a CALL_INSN, remove the register from quantity chains and update
2016 reg_tick if defined. Also see if any of these registers is currently
2019 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2020 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2022 delete_reg_equiv (regno);
2023 if (REG_TICK (regno) >= 0)
2026 SUBREG_TICKED (regno) = -1;
2029 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2032 /* In the case where we have no call-clobbered hard registers in the
2033 table, we are done. Otherwise, scan the table and remove any
2034 entry that overlaps a call-clobbered register. */
2037 for (hash = 0; hash < HASH_SIZE; hash++)
2038 for (p = table[hash]; p; p = next)
2040 next = p->next_same_hash;
2043 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2046 regno = REGNO (p->exp);
2047 endregno = regno + hard_regno_nregs[regno][GET_MODE (p->exp)];
2049 for (i = regno; i < endregno; i++)
2050 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2052 remove_from_table (p, hash);
2058 /* Given an expression X of type CONST,
2059 and ELT which is its table entry (or 0 if it
2060 is not in the hash table),
2061 return an alternate expression for X as a register plus integer.
2062 If none can be found, return 0. */
2065 use_related_value (rtx x, struct table_elt *elt)
2067 struct table_elt *relt = 0;
2068 struct table_elt *p, *q;
2069 HOST_WIDE_INT offset;
2071 /* First, is there anything related known?
2072 If we have a table element, we can tell from that.
2073 Otherwise, must look it up. */
2075 if (elt != 0 && elt->related_value != 0)
2077 else if (elt == 0 && GET_CODE (x) == CONST)
2079 rtx subexp = get_related_value (x);
2081 relt = lookup (subexp,
2082 SAFE_HASH (subexp, GET_MODE (subexp)),
2089 /* Search all related table entries for one that has an
2090 equivalent register. */
2095 /* This loop is strange in that it is executed in two different cases.
2096 The first is when X is already in the table. Then it is searching
2097 the RELATED_VALUE list of X's class (RELT). The second case is when
2098 X is not in the table. Then RELT points to a class for the related
2101 Ensure that, whatever case we are in, that we ignore classes that have
2102 the same value as X. */
2104 if (rtx_equal_p (x, p->exp))
2107 for (q = p->first_same_value; q; q = q->next_same_value)
2114 p = p->related_value;
2116 /* We went all the way around, so there is nothing to be found.
2117 Alternatively, perhaps RELT was in the table for some other reason
2118 and it has no related values recorded. */
2119 if (p == relt || p == 0)
2126 offset = (get_integer_term (x) - get_integer_term (p->exp));
2127 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2128 return plus_constant (q->exp, offset);
2131 /* Hash a string. Just add its bytes up. */
2132 static inline unsigned
2133 hash_rtx_string (const char *ps)
2136 const unsigned char *p = (const unsigned char *) ps;
2145 /* Hash an rtx. We are careful to make sure the value is never negative.
2146 Equivalent registers hash identically.
2147 MODE is used in hashing for CONST_INTs only;
2148 otherwise the mode of X is used.
2150 Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2152 If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2153 a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
2155 Note that cse_insn knows that the hash code of a MEM expression
2156 is just (int) MEM plus the hash code of the address. */
2159 hash_rtx (rtx x, enum machine_mode mode, int *do_not_record_p,
2160 int *hash_arg_in_memory_p, bool have_reg_qty)
2167 /* Used to turn recursion into iteration. We can't rely on GCC's
2168 tail-recursion elimination since we need to keep accumulating values
2174 code = GET_CODE (x);
2179 unsigned int regno = REGNO (x);
2181 if (!reload_completed)
2183 /* On some machines, we can't record any non-fixed hard register,
2184 because extending its life will cause reload problems. We
2185 consider ap, fp, sp, gp to be fixed for this purpose.
2187 We also consider CCmode registers to be fixed for this purpose;
2188 failure to do so leads to failure to simplify 0<100 type of
2191 On all machines, we can't record any global registers.
2192 Nor should we record any register that is in a small
2193 class, as defined by CLASS_LIKELY_SPILLED_P. */
2196 if (regno >= FIRST_PSEUDO_REGISTER)
2198 else if (x == frame_pointer_rtx
2199 || x == hard_frame_pointer_rtx
2200 || x == arg_pointer_rtx
2201 || x == stack_pointer_rtx
2202 || x == pic_offset_table_rtx)
2204 else if (global_regs[regno])
2206 else if (fixed_regs[regno])
2208 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2210 else if (SMALL_REGISTER_CLASSES)
2212 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2219 *do_not_record_p = 1;
2224 hash += ((unsigned int) REG << 7);
2225 hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2229 /* We handle SUBREG of a REG specially because the underlying
2230 reg changes its hash value with every value change; we don't
2231 want to have to forget unrelated subregs when one subreg changes. */
2234 if (REG_P (SUBREG_REG (x)))
2236 hash += (((unsigned int) SUBREG << 7)
2237 + REGNO (SUBREG_REG (x))
2238 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2245 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2246 + (unsigned int) INTVAL (x));
2250 /* This is like the general case, except that it only counts
2251 the integers representing the constant. */
2252 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2253 if (GET_MODE (x) != VOIDmode)
2254 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2256 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2257 + (unsigned int) CONST_DOUBLE_HIGH (x));
2265 units = CONST_VECTOR_NUNITS (x);
2267 for (i = 0; i < units; ++i)
2269 elt = CONST_VECTOR_ELT (x, i);
2270 hash += hash_rtx (elt, GET_MODE (elt), do_not_record_p,
2271 hash_arg_in_memory_p, have_reg_qty);
2277 /* Assume there is only one rtx object for any given label. */
2279 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2280 differences and differences between each stage's debugging dumps. */
2281 hash += (((unsigned int) LABEL_REF << 7)
2282 + CODE_LABEL_NUMBER (XEXP (x, 0)));
2287 /* Don't hash on the symbol's address to avoid bootstrap differences.
2288 Different hash values may cause expressions to be recorded in
2289 different orders and thus different registers to be used in the
2290 final assembler. This also avoids differences in the dump files
2291 between various stages. */
2293 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2296 h += (h << 7) + *p++; /* ??? revisit */
2298 hash += ((unsigned int) SYMBOL_REF << 7) + h;
2303 /* We don't record if marked volatile or if BLKmode since we don't
2304 know the size of the move. */
2305 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2307 *do_not_record_p = 1;
2310 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2311 *hash_arg_in_memory_p = 1;
2313 /* Now that we have already found this special case,
2314 might as well speed it up as much as possible. */
2315 hash += (unsigned) MEM;
2320 /* A USE that mentions non-volatile memory needs special
2321 handling since the MEM may be BLKmode which normally
2322 prevents an entry from being made. Pure calls are
2323 marked by a USE which mentions BLKmode memory.
2324 See calls.c:emit_call_1. */
2325 if (MEM_P (XEXP (x, 0))
2326 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2328 hash += (unsigned) USE;
2331 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2332 *hash_arg_in_memory_p = 1;
2334 /* Now that we have already found this special case,
2335 might as well speed it up as much as possible. */
2336 hash += (unsigned) MEM;
2351 case UNSPEC_VOLATILE:
2352 *do_not_record_p = 1;
2356 if (MEM_VOLATILE_P (x))
2358 *do_not_record_p = 1;
2363 /* We don't want to take the filename and line into account. */
2364 hash += (unsigned) code + (unsigned) GET_MODE (x)
2365 + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2366 + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2367 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2369 if (ASM_OPERANDS_INPUT_LENGTH (x))
2371 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2373 hash += (hash_rtx (ASM_OPERANDS_INPUT (x, i),
2374 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2375 do_not_record_p, hash_arg_in_memory_p,
2378 (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
2381 hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2382 x = ASM_OPERANDS_INPUT (x, 0);
2383 mode = GET_MODE (x);
2395 i = GET_RTX_LENGTH (code) - 1;
2396 hash += (unsigned) code + (unsigned) GET_MODE (x);
2397 fmt = GET_RTX_FORMAT (code);
2403 /* If we are about to do the last recursive call
2404 needed at this level, change it into iteration.
2405 This function is called enough to be worth it. */
2412 hash += hash_rtx (XEXP (x, i), 0, do_not_record_p,
2413 hash_arg_in_memory_p, have_reg_qty);
2417 for (j = 0; j < XVECLEN (x, i); j++)
2418 hash += hash_rtx (XVECEXP (x, i, j), 0, do_not_record_p,
2419 hash_arg_in_memory_p, have_reg_qty);
2423 hash += hash_rtx_string (XSTR (x, i));
2427 hash += (unsigned int) XINT (x, i);
2442 /* Hash an rtx X for cse via hash_rtx.
2443 Stores 1 in do_not_record if any subexpression is volatile.
2444 Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2445 does not have the RTX_UNCHANGING_P bit set. */
2447 static inline unsigned
2448 canon_hash (rtx x, enum machine_mode mode)
2450 return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2453 /* Like canon_hash but with no side effects, i.e. do_not_record
2454 and hash_arg_in_memory are not changed. */
2456 static inline unsigned
2457 safe_hash (rtx x, enum machine_mode mode)
2459 int dummy_do_not_record;
2460 return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
2463 /* Return 1 iff X and Y would canonicalize into the same thing,
2464 without actually constructing the canonicalization of either one.
2465 If VALIDATE is nonzero,
2466 we assume X is an expression being processed from the rtl
2467 and Y was found in the hash table. We check register refs
2468 in Y for being marked as valid.
2470 If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
2473 exp_equiv_p (rtx x, rtx y, int validate, bool for_gcse)
2479 /* Note: it is incorrect to assume an expression is equivalent to itself
2480 if VALIDATE is nonzero. */
2481 if (x == y && !validate)
2484 if (x == 0 || y == 0)
2487 code = GET_CODE (x);
2488 if (code != GET_CODE (y))
2491 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2492 if (GET_MODE (x) != GET_MODE (y))
2504 return XEXP (x, 0) == XEXP (y, 0);
2507 return XSTR (x, 0) == XSTR (y, 0);
2511 return REGNO (x) == REGNO (y);
2514 unsigned int regno = REGNO (y);
2516 unsigned int endregno
2517 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2518 : hard_regno_nregs[regno][GET_MODE (y)]);
2520 /* If the quantities are not the same, the expressions are not
2521 equivalent. If there are and we are not to validate, they
2522 are equivalent. Otherwise, ensure all regs are up-to-date. */
2524 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2530 for (i = regno; i < endregno; i++)
2531 if (REG_IN_TABLE (i) != REG_TICK (i))
2540 /* A volatile mem should not be considered equivalent to any
2542 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2545 /* Can't merge two expressions in different alias sets, since we
2546 can decide that the expression is transparent in a block when
2547 it isn't, due to it being set with the different alias set.
2549 Also, can't merge two expressions with different MEM_ATTRS.
2550 They could e.g. be two different entities allocated into the
2551 same space on the stack (see e.g. PR25130). In that case, the
2552 MEM addresses can be the same, even though the two MEMs are
2553 absolutely not equivalent.
2555 But because really all MEM attributes should be the same for
2556 equivalent MEMs, we just use the invariant that MEMs that have
2557 the same attributes share the same mem_attrs data structure. */
2558 if (MEM_ATTRS (x) != MEM_ATTRS (y))
2563 /* For commutative operations, check both orders. */
2571 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2573 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2574 validate, for_gcse))
2575 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2577 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2578 validate, for_gcse)));
2581 /* We don't use the generic code below because we want to
2582 disregard filename and line numbers. */
2584 /* A volatile asm isn't equivalent to any other. */
2585 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2588 if (GET_MODE (x) != GET_MODE (y)
2589 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2590 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2591 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2592 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2593 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2596 if (ASM_OPERANDS_INPUT_LENGTH (x))
2598 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2599 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2600 ASM_OPERANDS_INPUT (y, i),
2602 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2603 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2613 /* Compare the elements. If any pair of corresponding elements
2614 fail to match, return 0 for the whole thing. */
2616 fmt = GET_RTX_FORMAT (code);
2617 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2622 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2623 validate, for_gcse))
2628 if (XVECLEN (x, i) != XVECLEN (y, i))
2630 for (j = 0; j < XVECLEN (x, i); j++)
2631 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2632 validate, for_gcse))
2637 if (strcmp (XSTR (x, i), XSTR (y, i)))
2642 if (XINT (x, i) != XINT (y, i))
2647 if (XWINT (x, i) != XWINT (y, i))
2663 /* Return 1 if X has a value that can vary even between two
2664 executions of the program. 0 means X can be compared reliably
2665 against certain constants or near-constants. */
2668 cse_rtx_varies_p (rtx x, int from_alias)
2670 /* We need not check for X and the equivalence class being of the same
2671 mode because if X is equivalent to a constant in some mode, it
2672 doesn't vary in any mode. */
2675 && REGNO_QTY_VALID_P (REGNO (x)))
2677 int x_q = REG_QTY (REGNO (x));
2678 struct qty_table_elem *x_ent = &qty_table[x_q];
2680 if (GET_MODE (x) == x_ent->mode
2681 && x_ent->const_rtx != NULL_RTX)
2685 if (GET_CODE (x) == PLUS
2686 && GET_CODE (XEXP (x, 1)) == CONST_INT
2687 && REG_P (XEXP (x, 0))
2688 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2690 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2691 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2693 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2694 && x0_ent->const_rtx != NULL_RTX)
2698 /* This can happen as the result of virtual register instantiation, if
2699 the initial constant is too large to be a valid address. This gives
2700 us a three instruction sequence, load large offset into a register,
2701 load fp minus a constant into a register, then a MEM which is the
2702 sum of the two `constant' registers. */
2703 if (GET_CODE (x) == PLUS
2704 && REG_P (XEXP (x, 0))
2705 && REG_P (XEXP (x, 1))
2706 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2707 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2709 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2710 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2711 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2712 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2714 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2715 && x0_ent->const_rtx != NULL_RTX
2716 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2717 && x1_ent->const_rtx != NULL_RTX)
2721 return rtx_varies_p (x, from_alias);
2724 /* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2725 the result if necessary. INSN is as for canon_reg. */
2728 validate_canon_reg (rtx *xloc, rtx insn)
2730 rtx new = canon_reg (*xloc, insn);
2733 /* If replacing pseudo with hard reg or vice versa, ensure the
2734 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2735 if (insn != 0 && new != 0
2736 && REG_P (new) && REG_P (*xloc)
2737 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2738 != (REGNO (*xloc) < FIRST_PSEUDO_REGISTER))
2739 || GET_MODE (new) != GET_MODE (*xloc)
2740 || (insn_code = recog_memoized (insn)) < 0
2741 || insn_data[insn_code].n_dups > 0))
2742 validate_change (insn, xloc, new, 1);
2747 /* Canonicalize an expression:
2748 replace each register reference inside it
2749 with the "oldest" equivalent register.
2751 If INSN is nonzero and we are replacing a pseudo with a hard register
2752 or vice versa, validate_change is used to ensure that INSN remains valid
2753 after we make our substitution. The calls are made with IN_GROUP nonzero
2754 so apply_change_group must be called upon the outermost return from this
2755 function (unless INSN is zero). The result of apply_change_group can
2756 generally be discarded since the changes we are making are optional. */
2759 canon_reg (rtx x, rtx insn)
2768 code = GET_CODE (x);
2787 struct qty_table_elem *ent;
2789 /* Never replace a hard reg, because hard regs can appear
2790 in more than one machine mode, and we must preserve the mode
2791 of each occurrence. Also, some hard regs appear in
2792 MEMs that are shared and mustn't be altered. Don't try to
2793 replace any reg that maps to a reg of class NO_REGS. */
2794 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2795 || ! REGNO_QTY_VALID_P (REGNO (x)))
2798 q = REG_QTY (REGNO (x));
2799 ent = &qty_table[q];
2800 first = ent->first_reg;
2801 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2802 : REGNO_REG_CLASS (first) == NO_REGS ? x
2803 : gen_rtx_REG (ent->mode, first));
2810 fmt = GET_RTX_FORMAT (code);
2811 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2816 validate_canon_reg (&XEXP (x, i), insn);
2817 else if (fmt[i] == 'E')
2818 for (j = 0; j < XVECLEN (x, i); j++)
2819 validate_canon_reg (&XVECEXP (x, i, j), insn);
2825 /* LOC is a location within INSN that is an operand address (the contents of
2826 a MEM). Find the best equivalent address to use that is valid for this
2829 On most CISC machines, complicated address modes are costly, and rtx_cost
2830 is a good approximation for that cost. However, most RISC machines have
2831 only a few (usually only one) memory reference formats. If an address is
2832 valid at all, it is often just as cheap as any other address. Hence, for
2833 RISC machines, we use `address_cost' to compare the costs of various
2834 addresses. For two addresses of equal cost, choose the one with the
2835 highest `rtx_cost' value as that has the potential of eliminating the
2836 most insns. For equal costs, we choose the first in the equivalence
2837 class. Note that we ignore the fact that pseudo registers are cheaper than
2838 hard registers here because we would also prefer the pseudo registers. */
2841 find_best_addr (rtx insn, rtx *loc, enum machine_mode mode)
2843 struct table_elt *elt;
2845 struct table_elt *p;
2846 int found_better = 1;
2847 int save_do_not_record = do_not_record;
2848 int save_hash_arg_in_memory = hash_arg_in_memory;
2853 /* Do not try to replace constant addresses or addresses of local and
2854 argument slots. These MEM expressions are made only once and inserted
2855 in many instructions, as well as being used to control symbol table
2856 output. It is not safe to clobber them.
2858 There are some uncommon cases where the address is already in a register
2859 for some reason, but we cannot take advantage of that because we have
2860 no easy way to unshare the MEM. In addition, looking up all stack
2861 addresses is costly. */
2862 if ((GET_CODE (addr) == PLUS
2863 && REG_P (XEXP (addr, 0))
2864 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2865 && (regno = REGNO (XEXP (addr, 0)),
2866 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2867 || regno == ARG_POINTER_REGNUM))
2869 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2870 || regno == HARD_FRAME_POINTER_REGNUM
2871 || regno == ARG_POINTER_REGNUM))
2872 || CONSTANT_ADDRESS_P (addr))
2875 /* If this address is not simply a register, try to fold it. This will
2876 sometimes simplify the expression. Many simplifications
2877 will not be valid, but some, usually applying the associative rule, will
2878 be valid and produce better code. */
2881 rtx folded = canon_for_address (fold_rtx (addr, NULL_RTX));
2885 int addr_folded_cost = address_cost (folded, mode);
2886 int addr_cost = address_cost (addr, mode);
2888 if ((addr_folded_cost < addr_cost
2889 || (addr_folded_cost == addr_cost
2890 /* ??? The rtx_cost comparison is left over from an older
2891 version of this code. It is probably no longer helpful.*/
2892 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2893 || approx_reg_cost (folded) < approx_reg_cost (addr))))
2894 && validate_change (insn, loc, folded, 0))
2899 /* If this address is not in the hash table, we can't look for equivalences
2900 of the whole address. Also, ignore if volatile. */
2903 hash = HASH (addr, Pmode);
2904 addr_volatile = do_not_record;
2905 do_not_record = save_do_not_record;
2906 hash_arg_in_memory = save_hash_arg_in_memory;
2911 elt = lookup (addr, hash, Pmode);
2915 /* We need to find the best (under the criteria documented above) entry
2916 in the class that is valid. We use the `flag' field to indicate
2917 choices that were invalid and iterate until we can't find a better
2918 one that hasn't already been tried. */
2920 for (p = elt->first_same_value; p; p = p->next_same_value)
2923 while (found_better)
2925 int best_addr_cost = address_cost (*loc, mode);
2926 int best_rtx_cost = (elt->cost + 1) >> 1;
2928 struct table_elt *best_elt = elt;
2931 for (p = elt->first_same_value; p; p = p->next_same_value)
2935 || exp_equiv_p (p->exp, p->exp, 1, false))
2936 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2937 || (exp_cost == best_addr_cost
2938 && ((p->cost + 1) >> 1) > best_rtx_cost)))
2941 best_addr_cost = exp_cost;
2942 best_rtx_cost = (p->cost + 1) >> 1;
2949 if (validate_change (insn, loc,
2950 canon_reg (copy_rtx (best_elt->exp),
2959 /* If the address is a binary operation with the first operand a register
2960 and the second a constant, do the same as above, but looking for
2961 equivalences of the register. Then try to simplify before checking for
2962 the best address to use. This catches a few cases: First is when we
2963 have REG+const and the register is another REG+const. We can often merge
2964 the constants and eliminate one insn and one register. It may also be
2965 that a machine has a cheap REG+REG+const. Finally, this improves the
2966 code on the Alpha for unaligned byte stores. */
2968 if (flag_expensive_optimizations
2969 && ARITHMETIC_P (*loc)
2970 && REG_P (XEXP (*loc, 0)))
2972 rtx op1 = XEXP (*loc, 1);
2975 hash = HASH (XEXP (*loc, 0), Pmode);
2976 do_not_record = save_do_not_record;
2977 hash_arg_in_memory = save_hash_arg_in_memory;
2979 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2983 /* We need to find the best (under the criteria documented above) entry
2984 in the class that is valid. We use the `flag' field to indicate
2985 choices that were invalid and iterate until we can't find a better
2986 one that hasn't already been tried. */
2988 for (p = elt->first_same_value; p; p = p->next_same_value)
2991 while (found_better)
2993 int best_addr_cost = address_cost (*loc, mode);
2994 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2995 struct table_elt *best_elt = elt;
2996 rtx best_rtx = *loc;
2999 /* This is at worst case an O(n^2) algorithm, so limit our search
3000 to the first 32 elements on the list. This avoids trouble
3001 compiling code with very long basic blocks that can easily
3002 call simplify_gen_binary so many times that we run out of
3006 for (p = elt->first_same_value, count = 0;
3008 p = p->next_same_value, count++)
3011 || (GET_CODE (p->exp) != EXPR_LIST
3012 && exp_equiv_p (p->exp, p->exp, 1, false))))
3015 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
3019 /* Get the canonical version of the address so we can accept
3021 new = canon_for_address (new);
3023 new_cost = address_cost (new, mode);
3025 if (new_cost < best_addr_cost
3026 || (new_cost == best_addr_cost
3027 && (COST (new) + 1) >> 1 > best_rtx_cost))
3030 best_addr_cost = new_cost;
3031 best_rtx_cost = (COST (new) + 1) >> 1;
3039 if (validate_change (insn, loc,
3040 canon_reg (copy_rtx (best_rtx),
3050 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3051 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3052 what values are being compared.
3054 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3055 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3056 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3057 compared to produce cc0.
3059 The return value is the comparison operator and is either the code of
3060 A or the code corresponding to the inverse of the comparison. */
3062 static enum rtx_code
3063 find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
3064 enum machine_mode *pmode1, enum machine_mode *pmode2)
3068 arg1 = *parg1, arg2 = *parg2;
3070 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
3072 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3074 /* Set nonzero when we find something of interest. */
3076 int reverse_code = 0;
3077 struct table_elt *p = 0;
3079 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3080 On machines with CC0, this is the only case that can occur, since
3081 fold_rtx will return the COMPARE or item being compared with zero
3084 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3087 /* If ARG1 is a comparison operator and CODE is testing for
3088 STORE_FLAG_VALUE, get the inner arguments. */
3090 else if (COMPARISON_P (arg1))
3092 #ifdef FLOAT_STORE_FLAG_VALUE
3093 REAL_VALUE_TYPE fsfv;
3097 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3098 && code == LT && STORE_FLAG_VALUE == -1)
3099 #ifdef FLOAT_STORE_FLAG_VALUE
3100 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
3101 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3102 REAL_VALUE_NEGATIVE (fsfv)))
3107 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3108 && code == GE && STORE_FLAG_VALUE == -1)
3109 #ifdef FLOAT_STORE_FLAG_VALUE
3110 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
3111 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3112 REAL_VALUE_NEGATIVE (fsfv)))
3115 x = arg1, reverse_code = 1;
3118 /* ??? We could also check for
3120 (ne (and (eq (...) (const_int 1))) (const_int 0))
3122 and related forms, but let's wait until we see them occurring. */
3125 /* Look up ARG1 in the hash table and see if it has an equivalence
3126 that lets us see what is being compared. */
3127 p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
3130 p = p->first_same_value;
3132 /* If what we compare is already known to be constant, that is as
3134 We need to break the loop in this case, because otherwise we
3135 can have an infinite loop when looking at a reg that is known
3136 to be a constant which is the same as a comparison of a reg
3137 against zero which appears later in the insn stream, which in
3138 turn is constant and the same as the comparison of the first reg
3144 for (; p; p = p->next_same_value)
3146 enum machine_mode inner_mode = GET_MODE (p->exp);
3147 #ifdef FLOAT_STORE_FLAG_VALUE
3148 REAL_VALUE_TYPE fsfv;
3151 /* If the entry isn't valid, skip it. */
3152 if (! exp_equiv_p (p->exp, p->exp, 1, false))
3155 if (GET_CODE (p->exp) == COMPARE
3156 /* Another possibility is that this machine has a compare insn
3157 that includes the comparison code. In that case, ARG1 would
3158 be equivalent to a comparison operation that would set ARG1 to
3159 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3160 ORIG_CODE is the actual comparison being done; if it is an EQ,
3161 we must reverse ORIG_CODE. On machine with a negative value
3162 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3165 && GET_MODE_CLASS (inner_mode) == MODE_INT
3166 && (GET_MODE_BITSIZE (inner_mode)
3167 <= HOST_BITS_PER_WIDE_INT)
3168 && (STORE_FLAG_VALUE
3169 & ((HOST_WIDE_INT) 1
3170 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3171 #ifdef FLOAT_STORE_FLAG_VALUE
3173 && SCALAR_FLOAT_MODE_P (inner_mode)
3174 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3175 REAL_VALUE_NEGATIVE (fsfv)))
3178 && COMPARISON_P (p->exp)))
3183 else if ((code == EQ
3185 && GET_MODE_CLASS (inner_mode) == MODE_INT
3186 && (GET_MODE_BITSIZE (inner_mode)
3187 <= HOST_BITS_PER_WIDE_INT)
3188 && (STORE_FLAG_VALUE
3189 & ((HOST_WIDE_INT) 1
3190 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3191 #ifdef FLOAT_STORE_FLAG_VALUE
3193 && SCALAR_FLOAT_MODE_P (inner_mode)
3194 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3195 REAL_VALUE_NEGATIVE (fsfv)))
3198 && COMPARISON_P (p->exp))
3205 /* If this non-trapping address, e.g. fp + constant, the
3206 equivalent is a better operand since it may let us predict
3207 the value of the comparison. */
3208 else if (!rtx_addr_can_trap_p (p->exp))
3215 /* If we didn't find a useful equivalence for ARG1, we are done.
3216 Otherwise, set up for the next iteration. */
3220 /* If we need to reverse the comparison, make sure that that is
3221 possible -- we can't necessarily infer the value of GE from LT
3222 with floating-point operands. */
3225 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3226 if (reversed == UNKNOWN)
3231 else if (COMPARISON_P (x))
3232 code = GET_CODE (x);
3233 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3236 /* Return our results. Return the modes from before fold_rtx
3237 because fold_rtx might produce const_int, and then it's too late. */
3238 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3239 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3247 fold_rtx_subreg (rtx x, rtx insn)
3249 enum machine_mode mode = GET_MODE (x);
3254 /* See if we previously assigned a constant value to this SUBREG. */
3255 if ((new = lookup_as_function (x, CONST_INT)) != 0
3256 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3259 /* If this is a paradoxical SUBREG, we have no idea what value the
3260 extra bits would have. However, if the operand is equivalent to
3261 a SUBREG whose operand is the same as our mode, and all the modes
3262 are within a word, we can just use the inner operand because
3263 these SUBREGs just say how to treat the register.
3265 Similarly if we find an integer constant. */
3267 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3269 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3270 struct table_elt *elt;
3272 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3273 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3274 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3276 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3278 if (CONSTANT_P (elt->exp)
3279 && GET_MODE (elt->exp) == VOIDmode)
3282 if (GET_CODE (elt->exp) == SUBREG
3283 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3284 && exp_equiv_p (elt->exp, elt->exp, 1, false))
3285 return copy_rtx (SUBREG_REG (elt->exp));
3291 /* Fold SUBREG_REG. If it changed, see if we can simplify the
3292 SUBREG. We might be able to if the SUBREG is extracting a single
3293 word in an integral mode or extracting the low part. */
3295 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3296 const_arg0 = equiv_constant (folded_arg0);
3298 folded_arg0 = const_arg0;
3300 if (folded_arg0 != SUBREG_REG (x))
3302 new = simplify_subreg (mode, folded_arg0,
3303 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3308 if (REG_P (folded_arg0)
3309 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0)))
3311 struct table_elt *elt;
3313 elt = lookup (folded_arg0,
3314 HASH (folded_arg0, GET_MODE (folded_arg0)),
3315 GET_MODE (folded_arg0));
3318 elt = elt->first_same_value;
3320 if (subreg_lowpart_p (x))
3321 /* If this is a narrowing SUBREG and our operand is a REG, see
3322 if we can find an equivalence for REG that is an arithmetic
3323 operation in a wider mode where both operands are
3324 paradoxical SUBREGs from objects of our result mode. In
3325 that case, we couldn-t report an equivalent value for that
3326 operation, since we don't know what the extra bits will be.
3327 But we can find an equivalence for this SUBREG by folding
3328 that operation in the narrow mode. This allows us to fold
3329 arithmetic in narrow modes when the machine only supports
3330 word-sized arithmetic.
3332 Also look for a case where we have a SUBREG whose operand
3333 is the same as our result. If both modes are smaller than
3334 a word, we are simply interpreting a register in different
3335 modes and we can use the inner value. */
3337 for (; elt; elt = elt->next_same_value)
3339 enum rtx_code eltcode = GET_CODE (elt->exp);
3341 /* Just check for unary and binary operations. */
3342 if (UNARY_P (elt->exp)
3343 && eltcode != SIGN_EXTEND
3344 && eltcode != ZERO_EXTEND
3345 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3346 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3347 && (GET_MODE_CLASS (mode)
3348 == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3350 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3352 if (!REG_P (op0) && ! CONSTANT_P (op0))
3353 op0 = fold_rtx (op0, NULL_RTX);
3355 op0 = equiv_constant (op0);
3357 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3360 else if (ARITHMETIC_P (elt->exp)
3361 && eltcode != DIV && eltcode != MOD
3362 && eltcode != UDIV && eltcode != UMOD
3363 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3364 && eltcode != ROTATE && eltcode != ROTATERT
3365 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3366 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3368 || CONSTANT_P (XEXP (elt->exp, 0)))
3369 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3370 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3372 || CONSTANT_P (XEXP (elt->exp, 1))))
3374 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3375 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3377 if (op0 && !REG_P (op0) && ! CONSTANT_P (op0))
3378 op0 = fold_rtx (op0, NULL_RTX);
3381 op0 = equiv_constant (op0);
3383 if (op1 && !REG_P (op1) && ! CONSTANT_P (op1))
3384 op1 = fold_rtx (op1, NULL_RTX);
3387 op1 = equiv_constant (op1);
3389 /* If we are looking for the low SImode part of
3390 (ashift:DI c (const_int 32)), it doesn't work to
3391 compute that in SImode, because a 32-bit shift in
3392 SImode is unpredictable. We know the value is
3395 && GET_CODE (elt->exp) == ASHIFT
3396 && GET_CODE (op1) == CONST_INT
3397 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3400 < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3401 /* If the count fits in the inner mode's width,
3402 but exceeds the outer mode's width, the value
3403 will get truncated to 0 by the subreg. */
3404 new = CONST0_RTX (mode);
3406 /* If the count exceeds even the inner mode's width,
3407 don't fold this expression. */
3410 else if (op0 && op1)
3411 new = simplify_binary_operation (GET_CODE (elt->exp),
3415 else if (GET_CODE (elt->exp) == SUBREG
3416 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3417 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3419 && exp_equiv_p (elt->exp, elt->exp, 1, false))
3420 new = copy_rtx (SUBREG_REG (elt->exp));
3426 /* A SUBREG resulting from a zero extension may fold to zero
3427 if it extracts higher bits than the ZERO_EXTEND's source
3428 bits. FIXME: if combine tried to, er, combine these
3429 instructions, this transformation may be moved to
3431 for (; elt; elt = elt->next_same_value)
3433 if (GET_CODE (elt->exp) == ZERO_EXTEND
3435 >= GET_MODE_BITSIZE (GET_MODE (XEXP (elt->exp, 0))))
3436 return CONST0_RTX (mode);
3446 fold_rtx_mem (rtx x, rtx insn)
3448 enum machine_mode mode = GET_MODE (x);
3451 /* If we are not actually processing an insn, don't try to find the
3452 best address. Not only don't we care, but we could modify the
3453 MEM in an invalid way since we have no insn to validate
3456 find_best_addr (insn, &XEXP (x, 0), mode);
3459 /* Even if we don't fold in the insn itself, we can safely do so
3460 here, in hopes of getting a constant. */
3461 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3463 HOST_WIDE_INT offset = 0;
3466 && REGNO_QTY_VALID_P (REGNO (addr)))
3468 int addr_q = REG_QTY (REGNO (addr));
3469 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3471 if (GET_MODE (addr) == addr_ent->mode
3472 && addr_ent->const_rtx != NULL_RTX)
3473 addr = addr_ent->const_rtx;
3476 /* Call target hook to avoid the effects of -fpic etc.... */
3477 addr = targetm.delegitimize_address (addr);
3479 /* If address is constant, split it into a base and integer
3481 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3483 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3484 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3486 base = XEXP (XEXP (addr, 0), 0);
3487 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3489 else if (GET_CODE (addr) == LO_SUM
3490 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3491 base = XEXP (addr, 1);
3493 /* If this is a constant pool reference, we can fold it into its
3494 constant to allow better value tracking. */
3495 if (base && GET_CODE (base) == SYMBOL_REF
3496 && CONSTANT_POOL_ADDRESS_P (base))
3498 rtx constant = get_pool_constant (base);
3499 enum machine_mode const_mode = get_pool_mode (base);
3502 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3504 constant_pool_entries_cost = COST (constant);
3505 constant_pool_entries_regcost = approx_reg_cost (constant);
3508 /* If we are loading the full constant, we have an
3510 if (offset == 0 && mode == const_mode)
3513 /* If this actually isn't a constant (weird!), we can't do
3514 anything. Otherwise, handle the two most common cases:
3515 extracting a word from a multi-word constant, and
3516 extracting the low-order bits. Other cases don't seem
3517 common enough to worry about. */
3518 if (! CONSTANT_P (constant))
3521 if (GET_MODE_CLASS (mode) == MODE_INT
3522 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3523 && offset % UNITS_PER_WORD == 0
3524 && (new = operand_subword (constant,
3525 offset / UNITS_PER_WORD,
3526 0, const_mode)) != 0)
3529 if (((BYTES_BIG_ENDIAN
3530 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3531 || (! BYTES_BIG_ENDIAN && offset == 0))
3532 && (new = gen_lowpart (mode, constant)) != 0)
3536 /* If this is a reference to a label at a known position in a jump
3537 table, we also know its value. */
3538 if (base && GET_CODE (base) == LABEL_REF)
3540 rtx label = XEXP (base, 0);
3541 rtx table_insn = NEXT_INSN (label);
3543 if (table_insn && JUMP_P (table_insn)
3544 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3546 rtx table = PATTERN (table_insn);
3549 && (offset / GET_MODE_SIZE (GET_MODE (table))
3550 < XVECLEN (table, 0)))
3553 (table, 0, offset / GET_MODE_SIZE (GET_MODE (table)));
3556 /* If we have an insn that loads the label from the
3557 jumptable into a reg, we don't want to set the reg
3558 to the label, because this may cause a reference to
3559 the label to remain after the label is removed in
3560 some very obscure cases (PR middle-end/18628). */
3564 set = single_set (insn);
3566 if (! set || SET_SRC (set) != x)
3569 /* If it's a jump, it's safe to reference the label. */
3570 if (SET_DEST (set) == pc_rtx)
3576 if (table_insn && JUMP_P (table_insn)
3577 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3579 rtx table = PATTERN (table_insn);
3582 && (offset / GET_MODE_SIZE (GET_MODE (table))
3583 < XVECLEN (table, 1)))
3585 offset /= GET_MODE_SIZE (GET_MODE (table));
3586 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3589 if (GET_MODE (table) != Pmode)
3590 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3592 /* Indicate this is a constant. This isn't a valid
3593 form of CONST, but it will only be used to fold the
3594 next insns and then discarded, so it should be
3597 Note this expression must be explicitly discarded,
3598 by cse_insn, else it may end up in a REG_EQUAL note
3599 and "escape" to cause problems elsewhere. */
3600 return gen_rtx_CONST (GET_MODE (new), new);
3609 /* If X is a nontrivial arithmetic operation on an argument
3610 for which a constant value can be determined, return
3611 the result of operating on that value, as a constant.
3612 Otherwise, return X, possibly with one or more operands
3613 modified by recursive calls to this function.
3615 If X is a register whose contents are known, we do NOT
3616 return those contents here. equiv_constant is called to
3619 INSN is the insn that we may be modifying. If it is 0, make a copy
3620 of X before modifying it. */
3623 fold_rtx (rtx x, rtx insn)
3626 enum machine_mode mode;
3633 /* Folded equivalents of first two operands of X. */
3637 /* Constant equivalents of first three operands of X;
3638 0 when no such equivalent is known. */
3643 /* The mode of the first operand of X. We need this for sign and zero
3645 enum machine_mode mode_arg0;
3650 mode = GET_MODE (x);
3651 code = GET_CODE (x);
3662 /* No use simplifying an EXPR_LIST
3663 since they are used only for lists of args
3664 in a function call's REG_EQUAL note. */
3670 return prev_insn_cc0;
3674 return fold_rtx_subreg (x, insn);
3678 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3679 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3680 new = lookup_as_function (XEXP (x, 0), code);
3682 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3686 return fold_rtx_mem (x, insn);
3688 #ifdef NO_FUNCTION_CSE
3690 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3698 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3699 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3700 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3711 mode_arg0 = VOIDmode;
3713 /* Try folding our operands.
3714 Then see which ones have constant values known. */
3716 fmt = GET_RTX_FORMAT (code);
3717 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3720 rtx arg = XEXP (x, i);
3721 rtx folded_arg = arg, const_arg = 0;
3722 enum machine_mode mode_arg = GET_MODE (arg);
3723 rtx cheap_arg, expensive_arg;
3724 rtx replacements[2];
3726 int old_cost = COST_IN (XEXP (x, i), code);
3728 /* Most arguments are cheap, so handle them specially. */
3729 switch (GET_CODE (arg))
3732 /* This is the same as calling equiv_constant; it is duplicated
3734 if (REGNO_QTY_VALID_P (REGNO (arg)))
3736 int arg_q = REG_QTY (REGNO (arg));
3737 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3739 if (arg_ent->const_rtx != NULL_RTX
3740 && !REG_P (arg_ent->const_rtx)
3741 && GET_CODE (arg_ent->const_rtx) != PLUS)
3743 = gen_lowpart (GET_MODE (arg),
3744 arg_ent->const_rtx);
3759 folded_arg = prev_insn_cc0;
3760 mode_arg = prev_insn_cc0_mode;
3761 const_arg = equiv_constant (folded_arg);
3766 folded_arg = fold_rtx (arg, insn);
3767 const_arg = equiv_constant (folded_arg);
3770 /* For the first three operands, see if the operand
3771 is constant or equivalent to a constant. */
3775 folded_arg0 = folded_arg;
3776 const_arg0 = const_arg;
3777 mode_arg0 = mode_arg;
3780 folded_arg1 = folded_arg;
3781 const_arg1 = const_arg;
3784 const_arg2 = const_arg;
3788 /* Pick the least expensive of the folded argument and an
3789 equivalent constant argument. */
3790 if (const_arg == 0 || const_arg == folded_arg
3791 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3792 cheap_arg = folded_arg, expensive_arg = const_arg;
3794 cheap_arg = const_arg, expensive_arg = folded_arg;
3796 /* Try to replace the operand with the cheapest of the two
3797 possibilities. If it doesn't work and this is either of the first
3798 two operands of a commutative operation, try swapping them.
3799 If THAT fails, try the more expensive, provided it is cheaper
3800 than what is already there. */
3802 if (cheap_arg == XEXP (x, i))
3805 if (insn == 0 && ! copied)
3811 /* Order the replacements from cheapest to most expensive. */
3812 replacements[0] = cheap_arg;
3813 replacements[1] = expensive_arg;
3815 for (j = 0; j < 2 && replacements[j]; j++)
3817 int new_cost = COST_IN (replacements[j], code);
3819 /* Stop if what existed before was cheaper. Prefer constants
3820 in the case of a tie. */
3821 if (new_cost > old_cost
3822 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3825 /* It's not safe to substitute the operand of a conversion
3826 operator with a constant, as the conversion's identity
3827 depends upon the mode of its operand. This optimization
3828 is handled by the call to simplify_unary_operation. */
3829 if (GET_RTX_CLASS (code) == RTX_UNARY
3830 && GET_MODE (replacements[j]) != mode_arg0
3831 && (code == ZERO_EXTEND
3832 || code == SIGN_EXTEND
3834 || code == FLOAT_TRUNCATE
3835 || code == FLOAT_EXTEND
3838 || code == UNSIGNED_FLOAT
3839 || code == UNSIGNED_FIX))
3842 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3845 if (GET_RTX_CLASS (code) == RTX_COMM_COMPARE
3846 || GET_RTX_CLASS (code) == RTX_COMM_ARITH)
3848 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3849 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3851 if (apply_change_group ())
3853 /* Swap them back to be invalid so that this loop can
3854 continue and flag them to be swapped back later. */
3857 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3869 /* Don't try to fold inside of a vector of expressions.
3870 Doing nothing is harmless. */
3874 /* If a commutative operation, place a constant integer as the second
3875 operand unless the first operand is also a constant integer. Otherwise,
3876 place any constant second unless the first operand is also a constant. */
3878 if (COMMUTATIVE_P (x))
3881 || swap_commutative_operands_p (const_arg0 ? const_arg0
3883 const_arg1 ? const_arg1
3886 rtx tem = XEXP (x, 0);
3888 if (insn == 0 && ! copied)
3894 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3895 validate_change (insn, &XEXP (x, 1), tem, 1);
3896 if (apply_change_group ())
3898 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3899 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3904 /* If X is an arithmetic operation, see if we can simplify it. */
3906 switch (GET_RTX_CLASS (code))
3912 /* We can't simplify extension ops unless we know the
3914 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3915 && mode_arg0 == VOIDmode)
3918 /* If we had a CONST, strip it off and put it back later if we
3920 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3921 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3923 new = simplify_unary_operation (code, mode,
3924 const_arg0 ? const_arg0 : folded_arg0,
3926 /* NEG of PLUS could be converted into MINUS, but that causes
3927 expressions of the form
3928 (CONST (MINUS (CONST_INT) (SYMBOL_REF)))
3929 which many ports mistakenly treat as LEGITIMATE_CONSTANT_P.
3930 FIXME: those ports should be fixed. */
3931 if (new != 0 && is_const
3932 && GET_CODE (new) == PLUS
3933 && (GET_CODE (XEXP (new, 0)) == SYMBOL_REF
3934 || GET_CODE (XEXP (new, 0)) == LABEL_REF)
3935 && GET_CODE (XEXP (new, 1)) == CONST_INT)
3936 new = gen_rtx_CONST (mode, new);
3941 case RTX_COMM_COMPARE:
3942 /* See what items are actually being compared and set FOLDED_ARG[01]
3943 to those values and CODE to the actual comparison code. If any are
3944 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3945 do anything if both operands are already known to be constant. */
3947 /* ??? Vector mode comparisons are not supported yet. */
3948 if (VECTOR_MODE_P (mode))
3951 if (const_arg0 == 0 || const_arg1 == 0)
3953 struct table_elt *p0, *p1;
3954 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3955 enum machine_mode mode_arg1;
3957 #ifdef FLOAT_STORE_FLAG_VALUE
3958 if (SCALAR_FLOAT_MODE_P (mode))
3960 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3961 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3962 false_rtx = CONST0_RTX (mode);
3966 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3967 &mode_arg0, &mode_arg1);
3969 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3970 what kinds of things are being compared, so we can't do
3971 anything with this comparison. */
3973 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3976 const_arg0 = equiv_constant (folded_arg0);
3977 const_arg1 = equiv_constant (folded_arg1);
3979 /* If we do not now have two constants being compared, see
3980 if we can nevertheless deduce some things about the
3982 if (const_arg0 == 0 || const_arg1 == 0)
3984 /* Some addresses are known to be nonzero. We don't know
3985 their sign, but equality comparisons are known. */
3986 if (const_arg1 == const0_rtx
3987 && nonzero_address_p (folded_arg0))
3991 else if (code == NE)
3995 /* See if the two operands are the same. */
3997 if (folded_arg0 == folded_arg1
3998 || (REG_P (folded_arg0)
3999 && REG_P (folded_arg1)
4000 && (REG_QTY (REGNO (folded_arg0))
4001 == REG_QTY (REGNO (folded_arg1))))
4002 || ((p0 = lookup (folded_arg0,
4003 SAFE_HASH (folded_arg0, mode_arg0),
4005 && (p1 = lookup (folded_arg1,
4006 SAFE_HASH (folded_arg1, mode_arg0),
4008 && p0->first_same_value == p1->first_same_value))
4010 /* Sadly two equal NaNs are not equivalent. */
4011 if (!HONOR_NANS (mode_arg0))
4012 return ((code == EQ || code == LE || code == GE
4013 || code == LEU || code == GEU || code == UNEQ
4014 || code == UNLE || code == UNGE
4016 ? true_rtx : false_rtx);
4017 /* Take care for the FP compares we can resolve. */
4018 if (code == UNEQ || code == UNLE || code == UNGE)
4020 if (code == LTGT || code == LT || code == GT)
4024 /* If FOLDED_ARG0 is a register, see if the comparison we are
4025 doing now is either the same as we did before or the reverse
4026 (we only check the reverse if not floating-point). */
4027 else if (REG_P (folded_arg0))
4029 int qty = REG_QTY (REGNO (folded_arg0));
4031 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
4033 struct qty_table_elem *ent = &qty_table[qty];
4035 if ((comparison_dominates_p (ent->comparison_code, code)
4036 || (! FLOAT_MODE_P (mode_arg0)
4037 && comparison_dominates_p (ent->comparison_code,
4038 reverse_condition (code))))
4039 && (rtx_equal_p (ent->comparison_const, folded_arg1)
4041 && rtx_equal_p (ent->comparison_const,
4043 || (REG_P (folded_arg1)
4044 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
4045 return (comparison_dominates_p (ent->comparison_code, code)
4046 ? true_rtx : false_rtx);
4052 /* If we are comparing against zero, see if the first operand is
4053 equivalent to an IOR with a constant. If so, we may be able to
4054 determine the result of this comparison. */
4056 if (const_arg1 == const0_rtx)
4058 rtx y = lookup_as_function (folded_arg0, IOR);
4062 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
4063 && GET_CODE (inner_const) == CONST_INT
4064 && INTVAL (inner_const) != 0)
4066 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
4067 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
4068 && (INTVAL (inner_const)
4069 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
4070 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
4072 #ifdef FLOAT_STORE_FLAG_VALUE
4073 if (SCALAR_FLOAT_MODE_P (mode))
4075 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
4076 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4077 false_rtx = CONST0_RTX (mode);
4102 rtx op0 = const_arg0 ? const_arg0 : folded_arg0;
4103 rtx op1 = const_arg1 ? const_arg1 : folded_arg1;
4104 new = simplify_relational_operation (code, mode, mode_arg0, op0, op1);
4109 case RTX_COMM_ARITH:
4113 /* If the second operand is a LABEL_REF, see if the first is a MINUS
4114 with that LABEL_REF as its second operand. If so, the result is
4115 the first operand of that MINUS. This handles switches with an
4116 ADDR_DIFF_VEC table. */
4117 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4120 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
4121 : lookup_as_function (folded_arg0, MINUS);
4123 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4124 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4127 /* Now try for a CONST of a MINUS like the above. */
4128 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4129 : lookup_as_function (folded_arg0, CONST))) != 0
4130 && GET_CODE (XEXP (y, 0)) == MINUS
4131 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4132 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4133 return XEXP (XEXP (y, 0), 0);
4136 /* Likewise if the operands are in the other order. */
4137 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4140 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
4141 : lookup_as_function (folded_arg1, MINUS);
4143 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4144 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4147 /* Now try for a CONST of a MINUS like the above. */
4148 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4149 : lookup_as_function (folded_arg1, CONST))) != 0
4150 && GET_CODE (XEXP (y, 0)) == MINUS
4151 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4152 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4153 return XEXP (XEXP (y, 0), 0);
4156 /* If second operand is a register equivalent to a negative
4157 CONST_INT, see if we can find a register equivalent to the
4158 positive constant. Make a MINUS if so. Don't do this for
4159 a non-negative constant since we might then alternate between
4160 choosing positive and negative constants. Having the positive
4161 constant previously-used is the more common case. Be sure
4162 the resulting constant is non-negative; if const_arg1 were
4163 the smallest negative number this would overflow: depending
4164 on the mode, this would either just be the same value (and
4165 hence not save anything) or be incorrect. */
4166 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4167 && INTVAL (const_arg1) < 0
4168 /* This used to test
4170 -INTVAL (const_arg1) >= 0
4172 But The Sun V5.0 compilers mis-compiled that test. So
4173 instead we test for the problematic value in a more direct
4174 manner and hope the Sun compilers get it correct. */
4175 && INTVAL (const_arg1) !=
4176 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4177 && REG_P (folded_arg1))
4179 rtx new_const = GEN_INT (-INTVAL (const_arg1));
4181 = lookup (new_const, SAFE_HASH (new_const, mode), mode);
4184 for (p = p->first_same_value; p; p = p->next_same_value)
4186 return simplify_gen_binary (MINUS, mode, folded_arg0,
4187 canon_reg (p->exp, NULL_RTX));
4192 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4193 If so, produce (PLUS Z C2-C). */
4194 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4196 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4197 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4198 return fold_rtx (plus_constant (copy_rtx (y),
4199 -INTVAL (const_arg1)),
4206 case SMIN: case SMAX: case UMIN: case UMAX:
4207 case IOR: case AND: case XOR:
4209 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4210 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4211 is known to be of similar form, we may be able to replace the
4212 operation with a combined operation. This may eliminate the
4213 intermediate operation if every use is simplified in this way.
4214 Note that the similar optimization done by combine.c only works
4215 if the intermediate operation's result has only one reference. */
4217 if (REG_P (folded_arg0)
4218 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4221 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4222 rtx y = lookup_as_function (folded_arg0, code);
4224 enum rtx_code associate_code;
4228 || 0 == (inner_const
4229 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4230 || GET_CODE (inner_const) != CONST_INT
4231 /* If we have compiled a statement like
4232 "if (x == (x & mask1))", and now are looking at
4233 "x & mask2", we will have a case where the first operand
4234 of Y is the same as our first operand. Unless we detect
4235 this case, an infinite loop will result. */
4236 || XEXP (y, 0) == folded_arg0)
4239 /* Don't associate these operations if they are a PLUS with the
4240 same constant and it is a power of two. These might be doable
4241 with a pre- or post-increment. Similarly for two subtracts of
4242 identical powers of two with post decrement. */
4244 if (code == PLUS && const_arg1 == inner_const
4245 && ((HAVE_PRE_INCREMENT
4246 && exact_log2 (INTVAL (const_arg1)) >= 0)
4247 || (HAVE_POST_INCREMENT
4248 && exact_log2 (INTVAL (const_arg1)) >= 0)
4249 || (HAVE_PRE_DECREMENT
4250 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4251 || (HAVE_POST_DECREMENT
4252 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4255 /* Compute the code used to compose the constants. For example,
4256 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
4258 associate_code = (is_shift || code == MINUS ? PLUS : code);
4260 new_const = simplify_binary_operation (associate_code, mode,
4261 const_arg1, inner_const);
4266 /* If we are associating shift operations, don't let this
4267 produce a shift of the size of the object or larger.
4268 This could occur when we follow a sign-extend by a right
4269 shift on a machine that does a sign-extend as a pair
4272 if (is_shift && GET_CODE (new_const) == CONST_INT
4273 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4275 /* As an exception, we can turn an ASHIFTRT of this
4276 form into a shift of the number of bits - 1. */
4277 if (code == ASHIFTRT)
4278 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4283 y = copy_rtx (XEXP (y, 0));
4285 /* If Y contains our first operand (the most common way this
4286 can happen is if Y is a MEM), we would do into an infinite
4287 loop if we tried to fold it. So don't in that case. */
4289 if (! reg_mentioned_p (folded_arg0, y))
4290 y = fold_rtx (y, insn);
4292 return simplify_gen_binary (code, mode, y, new_const);
4296 case DIV: case UDIV:
4297 /* ??? The associative optimization performed immediately above is
4298 also possible for DIV and UDIV using associate_code of MULT.
4299 However, we would need extra code to verify that the
4300 multiplication does not overflow, that is, there is no overflow
4301 in the calculation of new_const. */
4308 new = simplify_binary_operation (code, mode,
4309 const_arg0 ? const_arg0 : folded_arg0,
4310 const_arg1 ? const_arg1 : folded_arg1);
4314 /* (lo_sum (high X) X) is simply X. */
4315 if (code == LO_SUM && const_arg0 != 0
4316 && GET_CODE (const_arg0) == HIGH
4317 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4322 case RTX_BITFIELD_OPS:
4323 new = simplify_ternary_operation (code, mode, mode_arg0,
4324 const_arg0 ? const_arg0 : folded_arg0,
4325 const_arg1 ? const_arg1 : folded_arg1,
4326 const_arg2 ? const_arg2 : XEXP (x, 2));
4333 return new ? new : x;
4336 /* Return a constant value currently equivalent to X.
4337 Return 0 if we don't know one. */
4340 equiv_constant (rtx x)
4343 && REGNO_QTY_VALID_P (REGNO (x)))
4345 int x_q = REG_QTY (REGNO (x));
4346 struct qty_table_elem *x_ent = &qty_table[x_q];
4348 if (x_ent->const_rtx)
4349 x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
4352 if (x == 0 || CONSTANT_P (x))
4355 /* If X is a MEM, try to fold it outside the context of any insn to see if
4356 it might be equivalent to a constant. That handles the case where it
4357 is a constant-pool reference. Then try to look it up in the hash table
4358 in case it is something whose value we have seen before. */
4362 struct table_elt *elt;
4364 x = fold_rtx (x, NULL_RTX);
4368 elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
4372 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4373 if (elt->is_const && CONSTANT_P (elt->exp))
4380 /* Given INSN, a jump insn, PATH_TAKEN indicates if we are following the "taken"
4381 branch. It will be zero if not.
4383 In certain cases, this can cause us to add an equivalence. For example,
4384 if we are following the taken case of
4386 we can add the fact that `i' and '2' are now equivalent.
4388 In any case, we can record that this comparison was passed. If the same
4389 comparison is seen later, we will know its value. */
4392 record_jump_equiv (rtx insn, int taken)
4394 int cond_known_true;
4397 enum machine_mode mode, mode0, mode1;
4398 int reversed_nonequality = 0;
4401 /* Ensure this is the right kind of insn. */
4402 if (! any_condjump_p (insn))
4404 set = pc_set (insn);
4406 /* See if this jump condition is known true or false. */
4408 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4410 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4412 /* Get the type of comparison being done and the operands being compared.
4413 If we had to reverse a non-equality condition, record that fact so we
4414 know that it isn't valid for floating-point. */
4415 code = GET_CODE (XEXP (SET_SRC (set), 0));
4416 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4417 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4419 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4420 if (! cond_known_true)
4422 code = reversed_comparison_code_parts (code, op0, op1, insn);
4424 /* Don't remember if we can't find the inverse. */
4425 if (code == UNKNOWN)
4429 /* The mode is the mode of the non-constant. */
4431 if (mode1 != VOIDmode)
4434 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4437 /* Yet another form of subreg creation. In this case, we want something in
4438 MODE, and we should assume OP has MODE iff it is naturally modeless. */
4441 record_jump_cond_subreg (enum machine_mode mode, rtx op)
4443 enum machine_mode op_mode = GET_MODE (op);
4444 if (op_mode == mode || op_mode == VOIDmode)
4446 return lowpart_subreg (mode, op, op_mode);
4449 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4450 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4451 Make any useful entries we can with that information. Called from
4452 above function and called recursively. */
4455 record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
4456 rtx op1, int reversed_nonequality)
4458 unsigned op0_hash, op1_hash;
4459 int op0_in_memory, op1_in_memory;
4460 struct table_elt *op0_elt, *op1_elt;
4462 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4463 we know that they are also equal in the smaller mode (this is also
4464 true for all smaller modes whether or not there is a SUBREG, but
4465 is not worth testing for with no SUBREG). */
4467 /* Note that GET_MODE (op0) may not equal MODE. */
4468 if (code == EQ && GET_CODE (op0) == SUBREG
4469 && (GET_MODE_SIZE (GET_MODE (op0))
4470 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4472 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4473 rtx tem = record_jump_cond_subreg (inner_mode, op1);
4475 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4476 reversed_nonequality);
4479 if (code == EQ && GET_CODE (op1) == SUBREG
4480 && (GET_MODE_SIZE (GET_MODE (op1))
4481 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4483 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4484 rtx tem = record_jump_cond_subreg (inner_mode, op0);
4486 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4487 reversed_nonequality);
4490 /* Similarly, if this is an NE comparison, and either is a SUBREG
4491 making a smaller mode, we know the whole thing is also NE. */
4493 /* Note that GET_MODE (op0) may not equal MODE;
4494 if we test MODE instead, we can get an infinite recursion
4495 alternating between two modes each wider than MODE. */
4497 if (code == NE && GET_CODE (op0) == SUBREG
4498 && subreg_lowpart_p (op0)
4499 && (GET_MODE_SIZE (GET_MODE (op0))
4500 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4502 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4503 rtx tem = record_jump_cond_subreg (inner_mode, op1);
4505 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4506 reversed_nonequality);
4509 if (code == NE && GET_CODE (op1) == SUBREG
4510 && subreg_lowpart_p (op1)
4511 && (GET_MODE_SIZE (GET_MODE (op1))
4512 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4514 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4515 rtx tem = record_jump_cond_subreg (inner_mode, op0);
4517 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4518 reversed_nonequality);
4521 /* Hash both operands. */
4524 hash_arg_in_memory = 0;
4525 op0_hash = HASH (op0, mode);
4526 op0_in_memory = hash_arg_in_memory;
4532 hash_arg_in_memory = 0;
4533 op1_hash = HASH (op1, mode);
4534 op1_in_memory = hash_arg_in_memory;
4539 /* Look up both operands. */
4540 op0_elt = lookup (op0, op0_hash, mode);
4541 op1_elt = lookup (op1, op1_hash, mode);
4543 /* If both operands are already equivalent or if they are not in the
4544 table but are identical, do nothing. */
4545 if ((op0_elt != 0 && op1_elt != 0
4546 && op0_elt->first_same_value == op1_elt->first_same_value)
4547 || op0 == op1 || rtx_equal_p (op0, op1))
4550 /* If we aren't setting two things equal all we can do is save this
4551 comparison. Similarly if this is floating-point. In the latter
4552 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4553 If we record the equality, we might inadvertently delete code
4554 whose intent was to change -0 to +0. */
4556 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4558 struct qty_table_elem *ent;
4561 /* If we reversed a floating-point comparison, if OP0 is not a
4562 register, or if OP1 is neither a register or constant, we can't
4566 op1 = equiv_constant (op1);
4568 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4569 || !REG_P (op0) || op1 == 0)
4572 /* Put OP0 in the hash table if it isn't already. This gives it a
4573 new quantity number. */
4576 if (insert_regs (op0, NULL, 0))
4578 rehash_using_reg (op0);
4579 op0_hash = HASH (op0, mode);
4581 /* If OP0 is contained in OP1, this changes its hash code
4582 as well. Faster to rehash than to check, except
4583 for the simple case of a constant. */
4584 if (! CONSTANT_P (op1))
4585 op1_hash = HASH (op1,mode);
4588 op0_elt = insert (op0, NULL, op0_hash, mode);
4589 op0_elt->in_memory = op0_in_memory;
4592 qty = REG_QTY (REGNO (op0));
4593 ent = &qty_table[qty];
4595 ent->comparison_code = code;
4598 /* Look it up again--in case op0 and op1 are the same. */
4599 op1_elt = lookup (op1, op1_hash, mode);
4601 /* Put OP1 in the hash table so it gets a new quantity number. */
4604 if (insert_regs (op1, NULL, 0))
4606 rehash_using_reg (op1);
4607 op1_hash = HASH (op1, mode);
4610 op1_elt = insert (op1, NULL, op1_hash, mode);
4611 op1_elt->in_memory = op1_in_memory;
4614 ent->comparison_const = NULL_RTX;
4615 ent->comparison_qty = REG_QTY (REGNO (op1));
4619 ent->comparison_const = op1;
4620 ent->comparison_qty = -1;
4626 /* If either side is still missing an equivalence, make it now,
4627 then merge the equivalences. */
4631 if (insert_regs (op0, NULL, 0))
4633 rehash_using_reg (op0);
4634 op0_hash = HASH (op0, mode);
4637 op0_elt = insert (op0, NULL, op0_hash, mode);
4638 op0_elt->in_memory = op0_in_memory;
4643 if (insert_regs (op1, NULL, 0))
4645 rehash_using_reg (op1);
4646 op1_hash = HASH (op1, mode);
4649 op1_elt = insert (op1, NULL, op1_hash, mode);
4650 op1_elt->in_memory = op1_in_memory;
4653 merge_equiv_classes (op0_elt, op1_elt);
4656 /* CSE processing for one instruction.
4657 First simplify sources and addresses of all assignments
4658 in the instruction, using previously-computed equivalents values.
4659 Then install the new sources and destinations in the table
4660 of available values.
4662 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4663 the insn. It means that INSN is inside libcall block. In this
4664 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4666 /* Data on one SET contained in the instruction. */
4670 /* The SET rtx itself. */
4672 /* The SET_SRC of the rtx (the original value, if it is changing). */
4674 /* The hash-table element for the SET_SRC of the SET. */
4675 struct table_elt *src_elt;
4676 /* Hash value for the SET_SRC. */
4678 /* Hash value for the SET_DEST. */
4680 /* The SET_DEST, with SUBREG, etc., stripped. */
4682 /* Nonzero if the SET_SRC is in memory. */
4684 /* Nonzero if the SET_SRC contains something
4685 whose value cannot be predicted and understood. */
4687 /* Original machine mode, in case it becomes a CONST_INT.
4688 The size of this field should match the size of the mode
4689 field of struct rtx_def (see rtl.h). */
4690 ENUM_BITFIELD(machine_mode) mode : 8;
4691 /* A constant equivalent for SET_SRC, if any. */
4693 /* Original SET_SRC value used for libcall notes. */
4695 /* Hash value of constant equivalent for SET_SRC. */
4696 unsigned src_const_hash;
4697 /* Table entry for constant equivalent for SET_SRC, if any. */
4698 struct table_elt *src_const_elt;
4702 cse_insn (rtx insn, rtx libcall_insn)
4704 rtx x = PATTERN (insn);
4710 /* Records what this insn does to set CC0. */
4711 rtx this_insn_cc0 = 0;
4712 enum machine_mode this_insn_cc0_mode = VOIDmode;
4716 struct table_elt *src_eqv_elt = 0;
4717 int src_eqv_volatile = 0;
4718 int src_eqv_in_memory = 0;
4719 unsigned src_eqv_hash = 0;
4721 struct set *sets = (struct set *) 0;
4725 /* Find all the SETs and CLOBBERs in this instruction.
4726 Record all the SETs in the array `set' and count them.
4727 Also determine whether there is a CLOBBER that invalidates
4728 all memory references, or all references at varying addresses. */
4732 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4734 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4735 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4736 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4740 if (GET_CODE (x) == SET)
4742 sets = alloca (sizeof (struct set));
4745 /* Ignore SETs that are unconditional jumps.
4746 They never need cse processing, so this does not hurt.
4747 The reason is not efficiency but rather
4748 so that we can test at the end for instructions
4749 that have been simplified to unconditional jumps
4750 and not be misled by unchanged instructions
4751 that were unconditional jumps to begin with. */
4752 if (SET_DEST (x) == pc_rtx
4753 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4756 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4757 The hard function value register is used only once, to copy to
4758 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4759 Ensure we invalidate the destination register. On the 80386 no
4760 other code would invalidate it since it is a fixed_reg.
4761 We need not check the return of apply_change_group; see canon_reg. */
4763 else if (GET_CODE (SET_SRC (x)) == CALL)
4765 canon_reg (SET_SRC (x), insn);
4766 apply_change_group ();
4767 fold_rtx (SET_SRC (x), insn);
4768 invalidate (SET_DEST (x), VOIDmode);
4773 else if (GET_CODE (x) == PARALLEL)
4775 int lim = XVECLEN (x, 0);
4777 sets = alloca (lim * sizeof (struct set));
4779 /* Find all regs explicitly clobbered in this insn,
4780 and ensure they are not replaced with any other regs
4781 elsewhere in this insn.
4782 When a reg that is clobbered is also used for input,
4783 we should presume that that is for a reason,
4784 and we should not substitute some other register
4785 which is not supposed to be clobbered.
4786 Therefore, this loop cannot be merged into the one below
4787 because a CALL may precede a CLOBBER and refer to the
4788 value clobbered. We must not let a canonicalization do
4789 anything in that case. */
4790 for (i = 0; i < lim; i++)
4792 rtx y = XVECEXP (x, 0, i);
4793 if (GET_CODE (y) == CLOBBER)
4795 rtx clobbered = XEXP (y, 0);
4797 if (REG_P (clobbered)
4798 || GET_CODE (clobbered) == SUBREG)
4799 invalidate (clobbered, VOIDmode);
4800 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4801 || GET_CODE (clobbered) == ZERO_EXTRACT)
4802 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4806 for (i = 0; i < lim; i++)
4808 rtx y = XVECEXP (x, 0, i);
4809 if (GET_CODE (y) == SET)
4811 /* As above, we ignore unconditional jumps and call-insns and
4812 ignore the result of apply_change_group. */
4813 if (GET_CODE (SET_SRC (y)) == CALL)
4815 canon_reg (SET_SRC (y), insn);
4816 apply_change_group ();
4817 fold_rtx (SET_SRC (y), insn);
4818 invalidate (SET_DEST (y), VOIDmode);
4820 else if (SET_DEST (y) == pc_rtx
4821 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4824 sets[n_sets++].rtl = y;
4826 else if (GET_CODE (y) == CLOBBER)
4828 /* If we clobber memory, canon the address.
4829 This does nothing when a register is clobbered
4830 because we have already invalidated the reg. */
4831 if (MEM_P (XEXP (y, 0)))
4832 canon_reg (XEXP (y, 0), NULL_RTX);
4834 else if (GET_CODE (y) == USE
4835 && ! (REG_P (XEXP (y, 0))
4836 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4837 canon_reg (y, NULL_RTX);
4838 else if (GET_CODE (y) == CALL)
4840 /* The result of apply_change_group can be ignored; see
4842 canon_reg (y, insn);
4843 apply_change_group ();
4848 else if (GET_CODE (x) == CLOBBER)
4850 if (MEM_P (XEXP (x, 0)))
4851 canon_reg (XEXP (x, 0), NULL_RTX);
4854 /* Canonicalize a USE of a pseudo register or memory location. */
4855 else if (GET_CODE (x) == USE
4856 && ! (REG_P (XEXP (x, 0))
4857 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4858 canon_reg (XEXP (x, 0), NULL_RTX);
4859 else if (GET_CODE (x) == CALL)
4861 /* The result of apply_change_group can be ignored; see canon_reg. */
4862 canon_reg (x, insn);
4863 apply_change_group ();
4867 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4868 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4869 is handled specially for this case, and if it isn't set, then there will
4870 be no equivalence for the destination. */
4871 if (n_sets == 1 && REG_NOTES (insn) != 0
4872 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4873 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4874 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4876 src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4877 XEXP (tem, 0) = src_eqv;
4880 /* Canonicalize sources and addresses of destinations.
4881 We do this in a separate pass to avoid problems when a MATCH_DUP is
4882 present in the insn pattern. In that case, we want to ensure that
4883 we don't break the duplicate nature of the pattern. So we will replace
4884 both operands at the same time. Otherwise, we would fail to find an
4885 equivalent substitution in the loop calling validate_change below.
4887 We used to suppress canonicalization of DEST if it appears in SRC,
4888 but we don't do this any more. */
4890 for (i = 0; i < n_sets; i++)
4892 rtx dest = SET_DEST (sets[i].rtl);
4893 rtx src = SET_SRC (sets[i].rtl);
4894 rtx new = canon_reg (src, insn);
4897 sets[i].orig_src = src;
4898 if ((REG_P (new) && REG_P (src)
4899 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4900 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4901 || (insn_code = recog_memoized (insn)) < 0
4902 || insn_data[insn_code].n_dups > 0)
4903 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4905 SET_SRC (sets[i].rtl) = new;
4907 if (GET_CODE (dest) == ZERO_EXTRACT)
4909 validate_change (insn, &XEXP (dest, 1),
4910 canon_reg (XEXP (dest, 1), insn), 1);
4911 validate_change (insn, &XEXP (dest, 2),
4912 canon_reg (XEXP (dest, 2), insn), 1);
4915 while (GET_CODE (dest) == SUBREG
4916 || GET_CODE (dest) == ZERO_EXTRACT
4917 || GET_CODE (dest) == STRICT_LOW_PART)
4918 dest = XEXP (dest, 0);
4921 canon_reg (dest, insn);
4924 /* Now that we have done all the replacements, we can apply the change
4925 group and see if they all work. Note that this will cause some
4926 canonicalizations that would have worked individually not to be applied
4927 because some other canonicalization didn't work, but this should not
4930 The result of apply_change_group can be ignored; see canon_reg. */
4932 apply_change_group ();
4934 /* Set sets[i].src_elt to the class each source belongs to.
4935 Detect assignments from or to volatile things
4936 and set set[i] to zero so they will be ignored
4937 in the rest of this function.
4939 Nothing in this loop changes the hash table or the register chains. */
4941 for (i = 0; i < n_sets; i++)
4945 struct table_elt *elt = 0, *p;
4946 enum machine_mode mode;
4949 rtx src_related = 0;
4950 struct table_elt *src_const_elt = 0;
4951 int src_cost = MAX_COST;
4952 int src_eqv_cost = MAX_COST;
4953 int src_folded_cost = MAX_COST;
4954 int src_related_cost = MAX_COST;
4955 int src_elt_cost = MAX_COST;
4956 int src_regcost = MAX_COST;
4957 int src_eqv_regcost = MAX_COST;
4958 int src_folded_regcost = MAX_COST;
4959 int src_related_regcost = MAX_COST;
4960 int src_elt_regcost = MAX_COST;
4961 /* Set nonzero if we need to call force_const_mem on with the
4962 contents of src_folded before using it. */
4963 int src_folded_force_flag = 0;
4965 dest = SET_DEST (sets[i].rtl);
4966 src = SET_SRC (sets[i].rtl);
4968 /* If SRC is a constant that has no machine mode,
4969 hash it with the destination's machine mode.
4970 This way we can keep different modes separate. */
4972 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4973 sets[i].mode = mode;
4977 enum machine_mode eqvmode = mode;
4978 if (GET_CODE (dest) == STRICT_LOW_PART)
4979 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4981 hash_arg_in_memory = 0;
4982 src_eqv_hash = HASH (src_eqv, eqvmode);
4984 /* Find the equivalence class for the equivalent expression. */
4987 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4989 src_eqv_volatile = do_not_record;
4990 src_eqv_in_memory = hash_arg_in_memory;
4993 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4994 value of the INNER register, not the destination. So it is not
4995 a valid substitution for the source. But save it for later. */
4996 if (GET_CODE (dest) == STRICT_LOW_PART)
4999 src_eqv_here = src_eqv;
5001 /* Simplify and foldable subexpressions in SRC. Then get the fully-
5002 simplified result, which may not necessarily be valid. */
5003 src_folded = fold_rtx (src, insn);
5006 /* ??? This caused bad code to be generated for the m68k port with -O2.
5007 Suppose src is (CONST_INT -1), and that after truncation src_folded
5008 is (CONST_INT 3). Suppose src_folded is then used for src_const.
5009 At the end we will add src and src_const to the same equivalence
5010 class. We now have 3 and -1 on the same equivalence class. This
5011 causes later instructions to be mis-optimized. */
5012 /* If storing a constant in a bitfield, pre-truncate the constant
5013 so we will be able to record it later. */
5014 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5016 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5018 if (GET_CODE (src) == CONST_INT
5019 && GET_CODE (width) == CONST_INT
5020 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5021 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5023 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5024 << INTVAL (width)) - 1));
5028 /* Compute SRC's hash code, and also notice if it
5029 should not be recorded at all. In that case,
5030 prevent any further processing of this assignment. */
5032 hash_arg_in_memory = 0;
5035 sets[i].src_hash = HASH (src, mode);
5036 sets[i].src_volatile = do_not_record;
5037 sets[i].src_in_memory = hash_arg_in_memory;
5039 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
5040 a pseudo, do not record SRC. Using SRC as a replacement for
5041 anything else will be incorrect in that situation. Note that
5042 this usually occurs only for stack slots, in which case all the
5043 RTL would be referring to SRC, so we don't lose any optimization
5044 opportunities by not having SRC in the hash table. */
5047 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5049 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5050 sets[i].src_volatile = 1;
5053 /* It is no longer clear why we used to do this, but it doesn't
5054 appear to still be needed. So let's try without it since this
5055 code hurts cse'ing widened ops. */
5056 /* If source is a paradoxical subreg (such as QI treated as an SI),
5057 treat it as volatile. It may do the work of an SI in one context
5058 where the extra bits are not being used, but cannot replace an SI
5060 if (GET_CODE (src) == SUBREG
5061 && (GET_MODE_SIZE (GET_MODE (src))
5062 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5063 sets[i].src_volatile = 1;
5066 /* Locate all possible equivalent forms for SRC. Try to replace
5067 SRC in the insn with each cheaper equivalent.
5069 We have the following types of equivalents: SRC itself, a folded
5070 version, a value given in a REG_EQUAL note, or a value related
5073 Each of these equivalents may be part of an additional class
5074 of equivalents (if more than one is in the table, they must be in
5075 the same class; we check for this).
5077 If the source is volatile, we don't do any table lookups.
5079 We note any constant equivalent for possible later use in a
5082 if (!sets[i].src_volatile)
5083 elt = lookup (src, sets[i].src_hash, mode);
5085 sets[i].src_elt = elt;
5087 if (elt && src_eqv_here && src_eqv_elt)
5089 if (elt->first_same_value != src_eqv_elt->first_same_value)
5091 /* The REG_EQUAL is indicating that two formerly distinct
5092 classes are now equivalent. So merge them. */
5093 merge_equiv_classes (elt, src_eqv_elt);
5094 src_eqv_hash = HASH (src_eqv, elt->mode);
5095 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5101 else if (src_eqv_elt)
5104 /* Try to find a constant somewhere and record it in `src_const'.
5105 Record its table element, if any, in `src_const_elt'. Look in
5106 any known equivalences first. (If the constant is not in the
5107 table, also set `sets[i].src_const_hash'). */
5109 for (p = elt->first_same_value; p; p = p->next_same_value)
5113 src_const_elt = elt;
5118 && (CONSTANT_P (src_folded)
5119 /* Consider (minus (label_ref L1) (label_ref L2)) as
5120 "constant" here so we will record it. This allows us
5121 to fold switch statements when an ADDR_DIFF_VEC is used. */
5122 || (GET_CODE (src_folded) == MINUS
5123 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5124 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5125 src_const = src_folded, src_const_elt = elt;
5126 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5127 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5129 /* If we don't know if the constant is in the table, get its
5130 hash code and look it up. */
5131 if (src_const && src_const_elt == 0)
5133 sets[i].src_const_hash = HASH (src_const, mode);
5134 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5137 sets[i].src_const = src_const;
5138 sets[i].src_const_elt = src_const_elt;
5140 /* If the constant and our source are both in the table, mark them as
5141 equivalent. Otherwise, if a constant is in the table but the source
5142 isn't, set ELT to it. */
5143 if (src_const_elt && elt
5144 && src_const_elt->first_same_value != elt->first_same_value)
5145 merge_equiv_classes (elt, src_const_elt);
5146 else if (src_const_elt && elt == 0)
5147 elt = src_const_elt;
5149 /* See if there is a register linearly related to a constant
5150 equivalent of SRC. */
5152 && (GET_CODE (src_const) == CONST
5153 || (src_const_elt && src_const_elt->related_value != 0)))
5155 src_related = use_related_value (src_const, src_const_elt);
5158 struct table_elt *src_related_elt
5159 = lookup (src_related, HASH (src_related, mode), mode);
5160 if (src_related_elt && elt)
5162 if (elt->first_same_value
5163 != src_related_elt->first_same_value)
5164 /* This can occur when we previously saw a CONST
5165 involving a SYMBOL_REF and then see the SYMBOL_REF
5166 twice. Merge the involved classes. */
5167 merge_equiv_classes (elt, src_related_elt);
5170 src_related_elt = 0;
5172 else if (src_related_elt && elt == 0)
5173 elt = src_related_elt;
5177 /* See if we have a CONST_INT that is already in a register in a
5180 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5181 && GET_MODE_CLASS (mode) == MODE_INT
5182 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5184 enum machine_mode wider_mode;
5186 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5187 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5188 && src_related == 0;
5189 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5191 struct table_elt *const_elt
5192 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5197 for (const_elt = const_elt->first_same_value;
5198 const_elt; const_elt = const_elt->next_same_value)
5199 if (REG_P (const_elt->exp))
5201 src_related = gen_lowpart (mode,
5208 /* Another possibility is that we have an AND with a constant in
5209 a mode narrower than a word. If so, it might have been generated
5210 as part of an "if" which would narrow the AND. If we already
5211 have done the AND in a wider mode, we can use a SUBREG of that
5214 if (flag_expensive_optimizations && ! src_related
5215 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5216 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5218 enum machine_mode tmode;
5219 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5221 for (tmode = GET_MODE_WIDER_MODE (mode);
5222 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5223 tmode = GET_MODE_WIDER_MODE (tmode))
5225 rtx inner = gen_lowpart (tmode, XEXP (src, 0));
5226 struct table_elt *larger_elt;
5230 PUT_MODE (new_and, tmode);
5231 XEXP (new_and, 0) = inner;
5232 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5233 if (larger_elt == 0)
5236 for (larger_elt = larger_elt->first_same_value;
5237 larger_elt; larger_elt = larger_elt->next_same_value)
5238 if (REG_P (larger_elt->exp))
5241 = gen_lowpart (mode, larger_elt->exp);
5251 #ifdef LOAD_EXTEND_OP
5252 /* See if a MEM has already been loaded with a widening operation;
5253 if it has, we can use a subreg of that. Many CISC machines
5254 also have such operations, but this is only likely to be
5255 beneficial on these machines. */
5257 if (flag_expensive_optimizations && src_related == 0
5258 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5259 && GET_MODE_CLASS (mode) == MODE_INT
5260 && MEM_P (src) && ! do_not_record
5261 && LOAD_EXTEND_OP (mode) != UNKNOWN)
5263 struct rtx_def memory_extend_buf;
5264 rtx memory_extend_rtx = &memory_extend_buf;
5265 enum machine_mode tmode;
5267 /* Set what we are trying to extend and the operation it might
5268 have been extended with. */
5269 memset (memory_extend_rtx, 0, sizeof(*memory_extend_rtx));
5270 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5271 XEXP (memory_extend_rtx, 0) = src;
5273 for (tmode = GET_MODE_WIDER_MODE (mode);
5274 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5275 tmode = GET_MODE_WIDER_MODE (tmode))
5277 struct table_elt *larger_elt;
5279 PUT_MODE (memory_extend_rtx, tmode);
5280 larger_elt = lookup (memory_extend_rtx,
5281 HASH (memory_extend_rtx, tmode), tmode);
5282 if (larger_elt == 0)
5285 for (larger_elt = larger_elt->first_same_value;
5286 larger_elt; larger_elt = larger_elt->next_same_value)
5287 if (REG_P (larger_elt->exp))
5289 src_related = gen_lowpart (mode,
5298 #endif /* LOAD_EXTEND_OP */
5300 if (src == src_folded)
5303 /* At this point, ELT, if nonzero, points to a class of expressions
5304 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5305 and SRC_RELATED, if nonzero, each contain additional equivalent
5306 expressions. Prune these latter expressions by deleting expressions
5307 already in the equivalence class.
5309 Check for an equivalent identical to the destination. If found,
5310 this is the preferred equivalent since it will likely lead to
5311 elimination of the insn. Indicate this by placing it in
5315 elt = elt->first_same_value;
5316 for (p = elt; p; p = p->next_same_value)
5318 enum rtx_code code = GET_CODE (p->exp);
5320 /* If the expression is not valid, ignore it. Then we do not
5321 have to check for validity below. In most cases, we can use
5322 `rtx_equal_p', since canonicalization has already been done. */
5323 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
5326 /* Also skip paradoxical subregs, unless that's what we're
5329 && (GET_MODE_SIZE (GET_MODE (p->exp))
5330 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5332 && GET_CODE (src) == SUBREG
5333 && GET_MODE (src) == GET_MODE (p->exp)
5334 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5335 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5338 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5340 else if (src_folded && GET_CODE (src_folded) == code
5341 && rtx_equal_p (src_folded, p->exp))
5343 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5344 && rtx_equal_p (src_eqv_here, p->exp))
5346 else if (src_related && GET_CODE (src_related) == code
5347 && rtx_equal_p (src_related, p->exp))
5350 /* This is the same as the destination of the insns, we want
5351 to prefer it. Copy it to src_related. The code below will
5352 then give it a negative cost. */
5353 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5357 /* Find the cheapest valid equivalent, trying all the available
5358 possibilities. Prefer items not in the hash table to ones
5359 that are when they are equal cost. Note that we can never
5360 worsen an insn as the current contents will also succeed.
5361 If we find an equivalent identical to the destination, use it as best,
5362 since this insn will probably be eliminated in that case. */
5365 if (rtx_equal_p (src, dest))
5366 src_cost = src_regcost = -1;
5369 src_cost = COST (src);
5370 src_regcost = approx_reg_cost (src);
5376 if (rtx_equal_p (src_eqv_here, dest))
5377 src_eqv_cost = src_eqv_regcost = -1;
5380 src_eqv_cost = COST (src_eqv_here);
5381 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5387 if (rtx_equal_p (src_folded, dest))
5388 src_folded_cost = src_folded_regcost = -1;
5391 src_folded_cost = COST (src_folded);
5392 src_folded_regcost = approx_reg_cost (src_folded);
5398 if (rtx_equal_p (src_related, dest))
5399 src_related_cost = src_related_regcost = -1;
5402 src_related_cost = COST (src_related);
5403 src_related_regcost = approx_reg_cost (src_related);
5407 /* If this was an indirect jump insn, a known label will really be
5408 cheaper even though it looks more expensive. */
5409 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5410 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5412 /* Terminate loop when replacement made. This must terminate since
5413 the current contents will be tested and will always be valid. */
5418 /* Skip invalid entries. */
5419 while (elt && !REG_P (elt->exp)
5420 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5421 elt = elt->next_same_value;
5423 /* A paradoxical subreg would be bad here: it'll be the right
5424 size, but later may be adjusted so that the upper bits aren't
5425 what we want. So reject it. */
5427 && GET_CODE (elt->exp) == SUBREG
5428 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5429 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5430 /* It is okay, though, if the rtx we're trying to match
5431 will ignore any of the bits we can't predict. */
5433 && GET_CODE (src) == SUBREG
5434 && GET_MODE (src) == GET_MODE (elt->exp)
5435 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5436 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5438 elt = elt->next_same_value;
5444 src_elt_cost = elt->cost;
5445 src_elt_regcost = elt->regcost;
5448 /* Find cheapest and skip it for the next time. For items
5449 of equal cost, use this order:
5450 src_folded, src, src_eqv, src_related and hash table entry. */
5452 && preferable (src_folded_cost, src_folded_regcost,
5453 src_cost, src_regcost) <= 0
5454 && preferable (src_folded_cost, src_folded_regcost,
5455 src_eqv_cost, src_eqv_regcost) <= 0
5456 && preferable (src_folded_cost, src_folded_regcost,
5457 src_related_cost, src_related_regcost) <= 0
5458 && preferable (src_folded_cost, src_folded_regcost,
5459 src_elt_cost, src_elt_regcost) <= 0)
5461 trial = src_folded, src_folded_cost = MAX_COST;
5462 if (src_folded_force_flag)
5464 rtx forced = force_const_mem (mode, trial);
5470 && preferable (src_cost, src_regcost,
5471 src_eqv_cost, src_eqv_regcost) <= 0
5472 && preferable (src_cost, src_regcost,
5473 src_related_cost, src_related_regcost) <= 0
5474 && preferable (src_cost, src_regcost,
5475 src_elt_cost, src_elt_regcost) <= 0)
5476 trial = src, src_cost = MAX_COST;
5477 else if (src_eqv_here
5478 && preferable (src_eqv_cost, src_eqv_regcost,
5479 src_related_cost, src_related_regcost) <= 0
5480 && preferable (src_eqv_cost, src_eqv_regcost,
5481 src_elt_cost, src_elt_regcost) <= 0)
5482 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5483 else if (src_related
5484 && preferable (src_related_cost, src_related_regcost,
5485 src_elt_cost, src_elt_regcost) <= 0)
5486 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5489 trial = copy_rtx (elt->exp);
5490 elt = elt->next_same_value;
5491 src_elt_cost = MAX_COST;
5494 /* We don't normally have an insn matching (set (pc) (pc)), so
5495 check for this separately here. We will delete such an
5498 For other cases such as a table jump or conditional jump
5499 where we know the ultimate target, go ahead and replace the
5500 operand. While that may not make a valid insn, we will
5501 reemit the jump below (and also insert any necessary
5503 if (n_sets == 1 && dest == pc_rtx
5505 || (GET_CODE (trial) == LABEL_REF
5506 && ! condjump_p (insn))))
5508 /* Don't substitute non-local labels, this confuses CFG. */
5509 if (GET_CODE (trial) == LABEL_REF
5510 && LABEL_REF_NONLOCAL_P (trial))
5513 SET_SRC (sets[i].rtl) = trial;
5514 cse_jumps_altered = 1;
5518 /* Reject certain invalid forms of CONST that we create. */
5519 else if (CONSTANT_P (trial)
5520 && GET_CODE (trial) == CONST
5521 /* Reject cases that will cause decode_rtx_const to
5522 die. On the alpha when simplifying a switch, we
5523 get (const (truncate (minus (label_ref)
5525 && (GET_CODE (XEXP (trial, 0)) == TRUNCATE
5526 /* Likewise on IA-64, except without the
5528 || (GET_CODE (XEXP (trial, 0)) == MINUS
5529 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5530 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)))
5531 /* Do nothing for this case. */
5534 /* Look for a substitution that makes a valid insn. */
5535 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5537 rtx new = canon_reg (SET_SRC (sets[i].rtl), insn);
5539 /* If we just made a substitution inside a libcall, then we
5540 need to make the same substitution in any notes attached
5541 to the RETVAL insn. */
5543 && (REG_P (sets[i].orig_src)
5544 || GET_CODE (sets[i].orig_src) == SUBREG
5545 || MEM_P (sets[i].orig_src)))
5547 rtx note = find_reg_equal_equiv_note (libcall_insn);
5549 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0),
5554 /* The result of apply_change_group can be ignored; see
5557 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5558 apply_change_group ();
5562 /* If we previously found constant pool entries for
5563 constants and this is a constant, try making a
5564 pool entry. Put it in src_folded unless we already have done
5565 this since that is where it likely came from. */
5567 else if (constant_pool_entries_cost
5568 && CONSTANT_P (trial)
5570 || (!MEM_P (src_folded)
5571 && ! src_folded_force_flag))
5572 && GET_MODE_CLASS (mode) != MODE_CC
5573 && mode != VOIDmode)
5575 src_folded_force_flag = 1;
5577 src_folded_cost = constant_pool_entries_cost;
5578 src_folded_regcost = constant_pool_entries_regcost;
5582 src = SET_SRC (sets[i].rtl);
5584 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5585 However, there is an important exception: If both are registers
5586 that are not the head of their equivalence class, replace SET_SRC
5587 with the head of the class. If we do not do this, we will have
5588 both registers live over a portion of the basic block. This way,
5589 their lifetimes will likely abut instead of overlapping. */
5591 && REGNO_QTY_VALID_P (REGNO (dest)))
5593 int dest_q = REG_QTY (REGNO (dest));
5594 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5596 if (dest_ent->mode == GET_MODE (dest)
5597 && dest_ent->first_reg != REGNO (dest)
5598 && REG_P (src) && REGNO (src) == REGNO (dest)
5599 /* Don't do this if the original insn had a hard reg as
5600 SET_SRC or SET_DEST. */
5601 && (!REG_P (sets[i].src)
5602 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5603 && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5604 /* We can't call canon_reg here because it won't do anything if
5605 SRC is a hard register. */
5607 int src_q = REG_QTY (REGNO (src));
5608 struct qty_table_elem *src_ent = &qty_table[src_q];
5609 int first = src_ent->first_reg;
5611 = (first >= FIRST_PSEUDO_REGISTER
5612 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5614 /* We must use validate-change even for this, because this
5615 might be a special no-op instruction, suitable only to
5617 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5620 /* If we had a constant that is cheaper than what we are now
5621 setting SRC to, use that constant. We ignored it when we
5622 thought we could make this into a no-op. */
5623 if (src_const && COST (src_const) < COST (src)
5624 && validate_change (insn, &SET_SRC (sets[i].rtl),
5631 /* If we made a change, recompute SRC values. */
5632 if (src != sets[i].src)
5636 hash_arg_in_memory = 0;
5638 sets[i].src_hash = HASH (src, mode);
5639 sets[i].src_volatile = do_not_record;
5640 sets[i].src_in_memory = hash_arg_in_memory;
5641 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5644 /* If this is a single SET, we are setting a register, and we have an
5645 equivalent constant, we want to add a REG_NOTE. We don't want
5646 to write a REG_EQUAL note for a constant pseudo since verifying that
5647 that pseudo hasn't been eliminated is a pain. Such a note also
5648 won't help anything.
5650 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5651 which can be created for a reference to a compile time computable
5652 entry in a jump table. */
5654 if (n_sets == 1 && src_const && REG_P (dest)
5655 && !REG_P (src_const)
5656 && ! (GET_CODE (src_const) == CONST
5657 && GET_CODE (XEXP (src_const, 0)) == MINUS
5658 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5659 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5661 /* We only want a REG_EQUAL note if src_const != src. */
5662 if (! rtx_equal_p (src, src_const))
5664 /* Make sure that the rtx is not shared. */
5665 src_const = copy_rtx (src_const);
5667 /* Record the actual constant value in a REG_EQUAL note,
5668 making a new one if one does not already exist. */
5669 set_unique_reg_note (insn, REG_EQUAL, src_const);
5673 /* Now deal with the destination. */
5676 /* Look within any ZERO_EXTRACT to the MEM or REG within it. */
5677 while (GET_CODE (dest) == SUBREG
5678 || GET_CODE (dest) == ZERO_EXTRACT
5679 || GET_CODE (dest) == STRICT_LOW_PART)
5680 dest = XEXP (dest, 0);
5682 sets[i].inner_dest = dest;
5686 #ifdef PUSH_ROUNDING
5687 /* Stack pushes invalidate the stack pointer. */
5688 rtx addr = XEXP (dest, 0);
5689 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5690 && XEXP (addr, 0) == stack_pointer_rtx)
5691 invalidate (stack_pointer_rtx, Pmode);
5693 dest = fold_rtx (dest, insn);
5696 /* Compute the hash code of the destination now,
5697 before the effects of this instruction are recorded,
5698 since the register values used in the address computation
5699 are those before this instruction. */
5700 sets[i].dest_hash = HASH (dest, mode);
5702 /* Don't enter a bit-field in the hash table
5703 because the value in it after the store
5704 may not equal what was stored, due to truncation. */
5706 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5708 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5710 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5711 && GET_CODE (width) == CONST_INT
5712 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5713 && ! (INTVAL (src_const)
5714 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5715 /* Exception: if the value is constant,
5716 and it won't be truncated, record it. */
5720 /* This is chosen so that the destination will be invalidated
5721 but no new value will be recorded.
5722 We must invalidate because sometimes constant
5723 values can be recorded for bitfields. */
5724 sets[i].src_elt = 0;
5725 sets[i].src_volatile = 1;
5731 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5733 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5735 /* One less use of the label this insn used to jump to. */
5737 cse_jumps_altered = 1;
5738 /* No more processing for this set. */
5742 /* If this SET is now setting PC to a label, we know it used to
5743 be a conditional or computed branch. */
5744 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5745 && !LABEL_REF_NONLOCAL_P (src))
5747 /* Now emit a BARRIER after the unconditional jump. */
5748 if (NEXT_INSN (insn) == 0
5749 || !BARRIER_P (NEXT_INSN (insn)))
5750 emit_barrier_after (insn);
5752 /* We reemit the jump in as many cases as possible just in
5753 case the form of an unconditional jump is significantly
5754 different than a computed jump or conditional jump.
5756 If this insn has multiple sets, then reemitting the
5757 jump is nontrivial. So instead we just force rerecognition
5758 and hope for the best. */
5763 new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
5764 JUMP_LABEL (new) = XEXP (src, 0);
5765 LABEL_NUSES (XEXP (src, 0))++;
5767 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5768 note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5771 XEXP (note, 1) = NULL_RTX;
5772 REG_NOTES (new) = note;
5778 /* Now emit a BARRIER after the unconditional jump. */
5779 if (NEXT_INSN (insn) == 0
5780 || !BARRIER_P (NEXT_INSN (insn)))
5781 emit_barrier_after (insn);
5784 INSN_CODE (insn) = -1;
5786 /* Do not bother deleting any unreachable code,
5787 let jump/flow do that. */
5789 cse_jumps_altered = 1;
5793 /* If destination is volatile, invalidate it and then do no further
5794 processing for this assignment. */
5796 else if (do_not_record)
5798 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5799 invalidate (dest, VOIDmode);
5800 else if (MEM_P (dest))
5801 invalidate (dest, VOIDmode);
5802 else if (GET_CODE (dest) == STRICT_LOW_PART
5803 || GET_CODE (dest) == ZERO_EXTRACT)
5804 invalidate (XEXP (dest, 0), GET_MODE (dest));
5808 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5809 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5812 /* If setting CC0, record what it was set to, or a constant, if it
5813 is equivalent to a constant. If it is being set to a floating-point
5814 value, make a COMPARE with the appropriate constant of 0. If we
5815 don't do this, later code can interpret this as a test against
5816 const0_rtx, which can cause problems if we try to put it into an
5817 insn as a floating-point operand. */
5818 if (dest == cc0_rtx)
5820 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5821 this_insn_cc0_mode = mode;
5822 if (FLOAT_MODE_P (mode))
5823 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5829 /* Now enter all non-volatile source expressions in the hash table
5830 if they are not already present.
5831 Record their equivalence classes in src_elt.
5832 This way we can insert the corresponding destinations into
5833 the same classes even if the actual sources are no longer in them
5834 (having been invalidated). */
5836 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5837 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5839 struct table_elt *elt;
5840 struct table_elt *classp = sets[0].src_elt;
5841 rtx dest = SET_DEST (sets[0].rtl);
5842 enum machine_mode eqvmode = GET_MODE (dest);
5844 if (GET_CODE (dest) == STRICT_LOW_PART)
5846 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5849 if (insert_regs (src_eqv, classp, 0))
5851 rehash_using_reg (src_eqv);
5852 src_eqv_hash = HASH (src_eqv, eqvmode);
5854 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5855 elt->in_memory = src_eqv_in_memory;
5858 /* Check to see if src_eqv_elt is the same as a set source which
5859 does not yet have an elt, and if so set the elt of the set source
5861 for (i = 0; i < n_sets; i++)
5862 if (sets[i].rtl && sets[i].src_elt == 0
5863 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5864 sets[i].src_elt = src_eqv_elt;
5867 for (i = 0; i < n_sets; i++)
5868 if (sets[i].rtl && ! sets[i].src_volatile
5869 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5871 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5873 /* REG_EQUAL in setting a STRICT_LOW_PART
5874 gives an equivalent for the entire destination register,
5875 not just for the subreg being stored in now.
5876 This is a more interesting equivalence, so we arrange later
5877 to treat the entire reg as the destination. */
5878 sets[i].src_elt = src_eqv_elt;
5879 sets[i].src_hash = src_eqv_hash;
5883 /* Insert source and constant equivalent into hash table, if not
5885 struct table_elt *classp = src_eqv_elt;
5886 rtx src = sets[i].src;
5887 rtx dest = SET_DEST (sets[i].rtl);
5888 enum machine_mode mode
5889 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5891 /* It's possible that we have a source value known to be
5892 constant but don't have a REG_EQUAL note on the insn.
5893 Lack of a note will mean src_eqv_elt will be NULL. This
5894 can happen where we've generated a SUBREG to access a
5895 CONST_INT that is already in a register in a wider mode.
5896 Ensure that the source expression is put in the proper
5899 classp = sets[i].src_const_elt;
5901 if (sets[i].src_elt == 0)
5903 /* Don't put a hard register source into the table if this is
5904 the last insn of a libcall. In this case, we only need
5905 to put src_eqv_elt in src_elt. */
5906 if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5908 struct table_elt *elt;
5910 /* Note that these insert_regs calls cannot remove
5911 any of the src_elt's, because they would have failed to
5912 match if not still valid. */
5913 if (insert_regs (src, classp, 0))
5915 rehash_using_reg (src);
5916 sets[i].src_hash = HASH (src, mode);
5918 elt = insert (src, classp, sets[i].src_hash, mode);
5919 elt->in_memory = sets[i].src_in_memory;
5920 sets[i].src_elt = classp = elt;
5923 sets[i].src_elt = classp;
5925 if (sets[i].src_const && sets[i].src_const_elt == 0
5926 && src != sets[i].src_const
5927 && ! rtx_equal_p (sets[i].src_const, src))
5928 sets[i].src_elt = insert (sets[i].src_const, classp,
5929 sets[i].src_const_hash, mode);
5932 else if (sets[i].src_elt == 0)
5933 /* If we did not insert the source into the hash table (e.g., it was
5934 volatile), note the equivalence class for the REG_EQUAL value, if any,
5935 so that the destination goes into that class. */
5936 sets[i].src_elt = src_eqv_elt;
5938 invalidate_from_clobbers (x);
5940 /* Some registers are invalidated by subroutine calls. Memory is
5941 invalidated by non-constant calls. */
5945 if (! CONST_OR_PURE_CALL_P (insn))
5946 invalidate_memory ();
5947 invalidate_for_call ();
5950 /* Now invalidate everything set by this instruction.
5951 If a SUBREG or other funny destination is being set,
5952 sets[i].rtl is still nonzero, so here we invalidate the reg
5953 a part of which is being set. */
5955 for (i = 0; i < n_sets; i++)
5958 /* We can't use the inner dest, because the mode associated with
5959 a ZERO_EXTRACT is significant. */
5960 rtx dest = SET_DEST (sets[i].rtl);
5962 /* Needed for registers to remove the register from its
5963 previous quantity's chain.
5964 Needed for memory if this is a nonvarying address, unless
5965 we have just done an invalidate_memory that covers even those. */
5966 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5967 invalidate (dest, VOIDmode);
5968 else if (MEM_P (dest))
5969 invalidate (dest, VOIDmode);
5970 else if (GET_CODE (dest) == STRICT_LOW_PART
5971 || GET_CODE (dest) == ZERO_EXTRACT)
5972 invalidate (XEXP (dest, 0), GET_MODE (dest));
5975 /* A volatile ASM invalidates everything. */
5976 if (NONJUMP_INSN_P (insn)
5977 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5978 && MEM_VOLATILE_P (PATTERN (insn)))
5979 flush_hash_table ();
5981 /* Make sure registers mentioned in destinations
5982 are safe for use in an expression to be inserted.
5983 This removes from the hash table
5984 any invalid entry that refers to one of these registers.
5986 We don't care about the return value from mention_regs because
5987 we are going to hash the SET_DEST values unconditionally. */
5989 for (i = 0; i < n_sets; i++)
5993 rtx x = SET_DEST (sets[i].rtl);
5999 /* We used to rely on all references to a register becoming
6000 inaccessible when a register changes to a new quantity,
6001 since that changes the hash code. However, that is not
6002 safe, since after HASH_SIZE new quantities we get a
6003 hash 'collision' of a register with its own invalid
6004 entries. And since SUBREGs have been changed not to
6005 change their hash code with the hash code of the register,
6006 it wouldn't work any longer at all. So we have to check
6007 for any invalid references lying around now.
6008 This code is similar to the REG case in mention_regs,
6009 but it knows that reg_tick has been incremented, and
6010 it leaves reg_in_table as -1 . */
6011 unsigned int regno = REGNO (x);
6012 unsigned int endregno
6013 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
6014 : hard_regno_nregs[regno][GET_MODE (x)]);
6017 for (i = regno; i < endregno; i++)
6019 if (REG_IN_TABLE (i) >= 0)
6021 remove_invalid_refs (i);
6022 REG_IN_TABLE (i) = -1;
6029 /* We may have just removed some of the src_elt's from the hash table.
6030 So replace each one with the current head of the same class. */
6032 for (i = 0; i < n_sets; i++)
6035 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6036 /* If elt was removed, find current head of same class,
6037 or 0 if nothing remains of that class. */
6039 struct table_elt *elt = sets[i].src_elt;
6041 while (elt && elt->prev_same_value)
6042 elt = elt->prev_same_value;
6044 while (elt && elt->first_same_value == 0)
6045 elt = elt->next_same_value;
6046 sets[i].src_elt = elt ? elt->first_same_value : 0;
6050 /* Now insert the destinations into their equivalence classes. */
6052 for (i = 0; i < n_sets; i++)
6055 rtx dest = SET_DEST (sets[i].rtl);
6056 struct table_elt *elt;
6058 /* Don't record value if we are not supposed to risk allocating
6059 floating-point values in registers that might be wider than
6061 if ((flag_float_store
6063 && FLOAT_MODE_P (GET_MODE (dest)))
6064 /* Don't record BLKmode values, because we don't know the
6065 size of it, and can't be sure that other BLKmode values
6066 have the same or smaller size. */
6067 || GET_MODE (dest) == BLKmode
6068 /* Don't record values of destinations set inside a libcall block
6069 since we might delete the libcall. Things should have been set
6070 up so we won't want to reuse such a value, but we play it safe
6073 /* If we didn't put a REG_EQUAL value or a source into the hash
6074 table, there is no point is recording DEST. */
6075 || sets[i].src_elt == 0
6076 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6077 or SIGN_EXTEND, don't record DEST since it can cause
6078 some tracking to be wrong.
6080 ??? Think about this more later. */
6081 || (GET_CODE (dest) == SUBREG
6082 && (GET_MODE_SIZE (GET_MODE (dest))
6083 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6084 && (GET_CODE (sets[i].src) == SIGN_EXTEND
6085 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6088 /* STRICT_LOW_PART isn't part of the value BEING set,
6089 and neither is the SUBREG inside it.
6090 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6091 if (GET_CODE (dest) == STRICT_LOW_PART)
6092 dest = SUBREG_REG (XEXP (dest, 0));
6094 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
6095 /* Registers must also be inserted into chains for quantities. */
6096 if (insert_regs (dest, sets[i].src_elt, 1))
6098 /* If `insert_regs' changes something, the hash code must be
6100 rehash_using_reg (dest);
6101 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6104 elt = insert (dest, sets[i].src_elt,
6105 sets[i].dest_hash, GET_MODE (dest));
6107 elt->in_memory = (MEM_P (sets[i].inner_dest)
6108 && !MEM_READONLY_P (sets[i].inner_dest));
6110 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6111 narrower than M2, and both M1 and M2 are the same number of words,
6112 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6113 make that equivalence as well.
6115 However, BAR may have equivalences for which gen_lowpart
6116 will produce a simpler value than gen_lowpart applied to
6117 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6118 BAR's equivalences. If we don't get a simplified form, make
6119 the SUBREG. It will not be used in an equivalence, but will
6120 cause two similar assignments to be detected.
6122 Note the loop below will find SUBREG_REG (DEST) since we have
6123 already entered SRC and DEST of the SET in the table. */
6125 if (GET_CODE (dest) == SUBREG
6126 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6128 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6129 && (GET_MODE_SIZE (GET_MODE (dest))
6130 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6131 && sets[i].src_elt != 0)
6133 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6134 struct table_elt *elt, *classp = 0;
6136 for (elt = sets[i].src_elt->first_same_value; elt;
6137 elt = elt->next_same_value)
6141 struct table_elt *src_elt;
6144 /* Ignore invalid entries. */
6145 if (!REG_P (elt->exp)
6146 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
6149 /* We may have already been playing subreg games. If the
6150 mode is already correct for the destination, use it. */
6151 if (GET_MODE (elt->exp) == new_mode)
6155 /* Calculate big endian correction for the SUBREG_BYTE.
6156 We have already checked that M1 (GET_MODE (dest))
6157 is not narrower than M2 (new_mode). */
6158 if (BYTES_BIG_ENDIAN)
6159 byte = (GET_MODE_SIZE (GET_MODE (dest))
6160 - GET_MODE_SIZE (new_mode));
6162 new_src = simplify_gen_subreg (new_mode, elt->exp,
6163 GET_MODE (dest), byte);
6166 /* The call to simplify_gen_subreg fails if the value
6167 is VOIDmode, yet we can't do any simplification, e.g.
6168 for EXPR_LISTs denoting function call results.
6169 It is invalid to construct a SUBREG with a VOIDmode
6170 SUBREG_REG, hence a zero new_src means we can't do
6171 this substitution. */
6175 src_hash = HASH (new_src, new_mode);
6176 src_elt = lookup (new_src, src_hash, new_mode);
6178 /* Put the new source in the hash table is if isn't
6182 if (insert_regs (new_src, classp, 0))
6184 rehash_using_reg (new_src);
6185 src_hash = HASH (new_src, new_mode);
6187 src_elt = insert (new_src, classp, src_hash, new_mode);
6188 src_elt->in_memory = elt->in_memory;
6190 else if (classp && classp != src_elt->first_same_value)
6191 /* Show that two things that we've seen before are
6192 actually the same. */
6193 merge_equiv_classes (src_elt, classp);
6195 classp = src_elt->first_same_value;
6196 /* Ignore invalid entries. */
6198 && !REG_P (classp->exp)
6199 && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
6200 classp = classp->next_same_value;
6205 /* Special handling for (set REG0 REG1) where REG0 is the
6206 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6207 be used in the sequel, so (if easily done) change this insn to
6208 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6209 that computed their value. Then REG1 will become a dead store
6210 and won't cloud the situation for later optimizations.
6212 Do not make this change if REG1 is a hard register, because it will
6213 then be used in the sequel and we may be changing a two-operand insn
6214 into a three-operand insn.
6216 Also do not do this if we are operating on a copy of INSN.
6218 Also don't do this if INSN ends a libcall; this would cause an unrelated
6219 register to be set in the middle of a libcall, and we then get bad code
6220 if the libcall is deleted. */
6222 if (n_sets == 1 && sets[0].rtl && REG_P (SET_DEST (sets[0].rtl))
6223 && NEXT_INSN (PREV_INSN (insn)) == insn
6224 && REG_P (SET_SRC (sets[0].rtl))
6225 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6226 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6228 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6229 struct qty_table_elem *src_ent = &qty_table[src_q];
6231 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6232 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6235 /* Scan for the previous nonnote insn, but stop at a basic
6239 prev = PREV_INSN (prev);
6241 while (prev && NOTE_P (prev)
6242 && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
6244 /* Do not swap the registers around if the previous instruction
6245 attaches a REG_EQUIV note to REG1.
6247 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6248 from the pseudo that originally shadowed an incoming argument
6249 to another register. Some uses of REG_EQUIV might rely on it
6250 being attached to REG1 rather than REG2.
6252 This section previously turned the REG_EQUIV into a REG_EQUAL
6253 note. We cannot do that because REG_EQUIV may provide an
6254 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
6256 if (prev != 0 && NONJUMP_INSN_P (prev)
6257 && GET_CODE (PATTERN (prev)) == SET
6258 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6259 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6261 rtx dest = SET_DEST (sets[0].rtl);
6262 rtx src = SET_SRC (sets[0].rtl);
6265 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6266 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6267 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6268 apply_change_group ();
6270 /* If INSN has a REG_EQUAL note, and this note mentions
6271 REG0, then we must delete it, because the value in
6272 REG0 has changed. If the note's value is REG1, we must
6273 also delete it because that is now this insn's dest. */
6274 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6276 && (reg_mentioned_p (dest, XEXP (note, 0))
6277 || rtx_equal_p (src, XEXP (note, 0))))
6278 remove_note (insn, note);
6283 /* If this is a conditional jump insn, record any known equivalences due to
6284 the condition being tested. */
6287 && n_sets == 1 && GET_CODE (x) == SET
6288 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6289 record_jump_equiv (insn, 0);
6292 /* If the previous insn set CC0 and this insn no longer references CC0,
6293 delete the previous insn. Here we use the fact that nothing expects CC0
6294 to be valid over an insn, which is true until the final pass. */
6295 if (prev_insn && NONJUMP_INSN_P (prev_insn)
6296 && (tem = single_set (prev_insn)) != 0
6297 && SET_DEST (tem) == cc0_rtx
6298 && ! reg_mentioned_p (cc0_rtx, x))
6299 delete_insn (prev_insn);
6301 prev_insn_cc0 = this_insn_cc0;
6302 prev_insn_cc0_mode = this_insn_cc0_mode;
6307 /* Remove from the hash table all expressions that reference memory. */
6310 invalidate_memory (void)
6313 struct table_elt *p, *next;
6315 for (i = 0; i < HASH_SIZE; i++)
6316 for (p = table[i]; p; p = next)
6318 next = p->next_same_hash;
6320 remove_from_table (p, i);
6324 /* If ADDR is an address that implicitly affects the stack pointer, return
6325 1 and update the register tables to show the effect. Else, return 0. */
6328 addr_affects_sp_p (rtx addr)
6330 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
6331 && REG_P (XEXP (addr, 0))
6332 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6334 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6336 REG_TICK (STACK_POINTER_REGNUM)++;
6337 /* Is it possible to use a subreg of SP? */
6338 SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6341 /* This should be *very* rare. */
6342 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6343 invalidate (stack_pointer_rtx, VOIDmode);
6351 /* Perform invalidation on the basis of everything about an insn
6352 except for invalidating the actual places that are SET in it.
6353 This includes the places CLOBBERed, and anything that might
6354 alias with something that is SET or CLOBBERed.
6356 X is the pattern of the insn. */
6359 invalidate_from_clobbers (rtx x)
6361 if (GET_CODE (x) == CLOBBER)
6363 rtx ref = XEXP (x, 0);
6366 if (REG_P (ref) || GET_CODE (ref) == SUBREG
6368 invalidate (ref, VOIDmode);
6369 else if (GET_CODE (ref) == STRICT_LOW_PART
6370 || GET_CODE (ref) == ZERO_EXTRACT)
6371 invalidate (XEXP (ref, 0), GET_MODE (ref));
6374 else if (GET_CODE (x) == PARALLEL)
6377 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6379 rtx y = XVECEXP (x, 0, i);
6380 if (GET_CODE (y) == CLOBBER)
6382 rtx ref = XEXP (y, 0);
6383 if (REG_P (ref) || GET_CODE (ref) == SUBREG
6385 invalidate (ref, VOIDmode);
6386 else if (GET_CODE (ref) == STRICT_LOW_PART
6387 || GET_CODE (ref) == ZERO_EXTRACT)
6388 invalidate (XEXP (ref, 0), GET_MODE (ref));
6394 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6395 and replace any registers in them with either an equivalent constant
6396 or the canonical form of the register. If we are inside an address,
6397 only do this if the address remains valid.
6399 OBJECT is 0 except when within a MEM in which case it is the MEM.
6401 Return the replacement for X. */
6404 cse_process_notes (rtx x, rtx object)
6406 enum rtx_code code = GET_CODE (x);
6407 const char *fmt = GET_RTX_FORMAT (code);
6424 validate_change (x, &XEXP (x, 0),
6425 cse_process_notes (XEXP (x, 0), x), 0);
6430 if (REG_NOTE_KIND (x) == REG_EQUAL)
6431 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6433 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6440 rtx new = cse_process_notes (XEXP (x, 0), object);
6441 /* We don't substitute VOIDmode constants into these rtx,
6442 since they would impede folding. */
6443 if (GET_MODE (new) != VOIDmode)
6444 validate_change (object, &XEXP (x, 0), new, 0);
6449 i = REG_QTY (REGNO (x));
6451 /* Return a constant or a constant register. */
6452 if (REGNO_QTY_VALID_P (REGNO (x)))
6454 struct qty_table_elem *ent = &qty_table[i];
6456 if (ent->const_rtx != NULL_RTX
6457 && (CONSTANT_P (ent->const_rtx)
6458 || REG_P (ent->const_rtx)))
6460 rtx new = gen_lowpart (GET_MODE (x), ent->const_rtx);
6466 /* Otherwise, canonicalize this register. */
6467 return canon_reg (x, NULL_RTX);
6473 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6475 validate_change (object, &XEXP (x, i),
6476 cse_process_notes (XEXP (x, i), object), 0);
6481 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6482 since they are done elsewhere. This function is called via note_stores. */
6485 invalidate_skipped_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
6487 enum rtx_code code = GET_CODE (dest);
6490 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6491 /* There are times when an address can appear varying and be a PLUS
6492 during this scan when it would be a fixed address were we to know
6493 the proper equivalences. So invalidate all memory if there is
6494 a BLKmode or nonscalar memory reference or a reference to a
6495 variable address. */
6496 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6497 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6499 invalidate_memory ();
6503 if (GET_CODE (set) == CLOBBER
6508 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6509 invalidate (XEXP (dest, 0), GET_MODE (dest));
6510 else if (code == REG || code == SUBREG || code == MEM)
6511 invalidate (dest, VOIDmode);
6514 /* Invalidate all insns from START up to the end of the function or the
6515 next label. This called when we wish to CSE around a block that is
6516 conditionally executed. */
6519 invalidate_skipped_block (rtx start)
6523 for (insn = start; insn && !LABEL_P (insn);
6524 insn = NEXT_INSN (insn))
6526 if (! INSN_P (insn))
6531 if (! CONST_OR_PURE_CALL_P (insn))
6532 invalidate_memory ();
6533 invalidate_for_call ();
6536 invalidate_from_clobbers (PATTERN (insn));
6537 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6541 /* Find the end of INSN's basic block and return its range,
6542 the total number of SETs in all the insns of the block, the last insn of the
6543 block, and the branch path.
6545 The branch path indicates which branches should be followed. If a nonzero
6546 path size is specified, the block should be rescanned and a different set
6547 of branches will be taken. The branch path is only used if
6548 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
6550 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6551 used to describe the block. It is filled in with the information about
6552 the current block. The incoming structure's branch path, if any, is used
6553 to construct the output branch path. */
6556 cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
6557 int follow_jumps, int skip_blocks)
6561 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6562 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6563 int path_size = data->path_size;
6567 /* Update the previous branch path, if any. If the last branch was
6568 previously PATH_TAKEN, mark it PATH_NOT_TAKEN.
6569 If it was previously PATH_NOT_TAKEN,
6570 shorten the path by one and look at the previous branch. We know that
6571 at least one branch must have been taken if PATH_SIZE is nonzero. */
6572 while (path_size > 0)
6574 if (data->path[path_size - 1].status != PATH_NOT_TAKEN)
6576 data->path[path_size - 1].status = PATH_NOT_TAKEN;
6583 /* If the first instruction is marked with QImode, that means we've
6584 already processed this block. Our caller will look at DATA->LAST
6585 to figure out where to go next. We want to return the next block
6586 in the instruction stream, not some branched-to block somewhere
6587 else. We accomplish this by pretending our called forbid us to
6588 follow jumps, or skip blocks. */
6589 if (GET_MODE (insn) == QImode)
6590 follow_jumps = skip_blocks = 0;
6592 /* Scan to end of this basic block. */
6593 while (p && !LABEL_P (p))
6595 /* Don't cse over a call to setjmp; on some machines (eg VAX)
6596 the regs restored by the longjmp come from
6597 a later time than the setjmp. */
6598 if (PREV_INSN (p) && CALL_P (PREV_INSN (p))
6599 && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6602 /* A PARALLEL can have lots of SETs in it,
6603 especially if it is really an ASM_OPERANDS. */
6604 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6605 nsets += XVECLEN (PATTERN (p), 0);
6606 else if (!NOTE_P (p))
6609 /* Ignore insns made by CSE; they cannot affect the boundaries of
6612 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6613 high_cuid = INSN_CUID (p);
6614 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6615 low_cuid = INSN_CUID (p);
6617 /* See if this insn is in our branch path. If it is and we are to
6619 if (path_entry < path_size && data->path[path_entry].branch == p)
6621 if (data->path[path_entry].status != PATH_NOT_TAKEN)
6624 /* Point to next entry in path, if any. */
6628 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6629 was specified, we haven't reached our maximum path length, there are
6630 insns following the target of the jump, this is the only use of the
6631 jump label, and the target label is preceded by a BARRIER.
6633 Alternatively, we can follow the jump if it branches around a
6634 block of code and there are no other branches into the block.
6635 In this case invalidate_skipped_block will be called to invalidate any
6636 registers set in the block when following the jump. */
6638 else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
6640 && GET_CODE (PATTERN (p)) == SET
6641 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6642 && JUMP_LABEL (p) != 0
6643 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6644 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6646 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6648 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6649 || (PREV_INSN (q) && CALL_P (PREV_INSN (q))
6650 && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6651 && (!LABEL_P (q) || LABEL_NUSES (q) != 0))
6654 /* If we ran into a BARRIER, this code is an extension of the
6655 basic block when the branch is taken. */
6656 if (follow_jumps && q != 0 && BARRIER_P (q))
6658 /* Don't allow ourself to keep walking around an
6659 always-executed loop. */
6660 if (next_real_insn (q) == next)
6666 /* Similarly, don't put a branch in our path more than once. */
6667 for (i = 0; i < path_entry; i++)
6668 if (data->path[i].branch == p)
6671 if (i != path_entry)
6674 data->path[path_entry].branch = p;
6675 data->path[path_entry++].status = PATH_TAKEN;
6677 /* This branch now ends our path. It was possible that we
6678 didn't see this branch the last time around (when the
6679 insn in front of the target was a JUMP_INSN that was
6680 turned into a no-op). */
6681 path_size = path_entry;
6684 /* Mark block so we won't scan it again later. */
6685 PUT_MODE (NEXT_INSN (p), QImode);
6687 /* Detect a branch around a block of code. */
6688 else if (skip_blocks && q != 0 && !LABEL_P (q))
6692 if (next_real_insn (q) == next)
6698 for (i = 0; i < path_entry; i++)
6699 if (data->path[i].branch == p)
6702 if (i != path_entry)
6705 /* This is no_labels_between_p (p, q) with an added check for
6706 reaching the end of a function (in case Q precedes P). */
6707 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6713 data->path[path_entry].branch = p;
6714 data->path[path_entry++].status = PATH_AROUND;
6716 path_size = path_entry;
6719 /* Mark block so we won't scan it again later. */
6720 PUT_MODE (NEXT_INSN (p), QImode);
6727 data->low_cuid = low_cuid;
6728 data->high_cuid = high_cuid;
6729 data->nsets = nsets;
6732 /* If all jumps in the path are not taken, set our path length to zero
6733 so a rescan won't be done. */
6734 for (i = path_size - 1; i >= 0; i--)
6735 if (data->path[i].status != PATH_NOT_TAKEN)
6739 data->path_size = 0;
6741 data->path_size = path_size;
6743 /* End the current branch path. */
6744 data->path[path_size].branch = 0;
6747 /* Perform cse on the instructions of a function.
6748 F is the first instruction.
6749 NREGS is one plus the highest pseudo-reg number used in the instruction.
6751 Returns 1 if jump_optimize should be redone due to simplifications
6752 in conditional jump instructions. */
6755 cse_main (rtx f, int nregs, FILE *file)
6757 struct cse_basic_block_data val;
6761 init_cse_reg_info (nregs);
6763 val.path = XNEWVEC (struct branch_path, PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6765 cse_jumps_altered = 0;
6766 recorded_label_ref = 0;
6767 constant_pool_entries_cost = 0;
6768 constant_pool_entries_regcost = 0;
6770 rtl_hooks = cse_rtl_hooks;
6773 init_alias_analysis ();
6775 reg_eqv_table = XNEWVEC (struct reg_eqv_elem, nregs);
6777 /* Find the largest uid. */
6779 max_uid = get_max_uid ();
6780 uid_cuid = XCNEWVEC (int, max_uid + 1);
6782 /* Compute the mapping from uids to cuids.
6783 CUIDs are numbers assigned to insns, like uids,
6784 except that cuids increase monotonically through the code.
6785 Don't assign cuids to line-number NOTEs, so that the distance in cuids
6786 between two insns is not affected by -g. */
6788 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
6791 || NOTE_LINE_NUMBER (insn) < 0)
6792 INSN_CUID (insn) = ++i;
6794 /* Give a line number note the same cuid as preceding insn. */
6795 INSN_CUID (insn) = i;
6798 /* Loop over basic blocks.
6799 Compute the maximum number of qty's needed for each basic block
6800 (which is 2 for each SET). */
6805 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps,
6806 flag_cse_skip_blocks);
6808 /* If this basic block was already processed or has no sets, skip it. */
6809 if (val.nsets == 0 || GET_MODE (insn) == QImode)
6811 PUT_MODE (insn, VOIDmode);
6812 insn = (val.last ? NEXT_INSN (val.last) : 0);
6817 cse_basic_block_start = val.low_cuid;
6818 cse_basic_block_end = val.high_cuid;
6819 max_qty = val.nsets * 2;
6822 fprintf (file, ";; Processing block from %d to %d, %d sets.\n",
6823 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
6826 /* Make MAX_QTY bigger to give us room to optimize
6827 past the end of this basic block, if that should prove useful. */
6831 /* If this basic block is being extended by following certain jumps,
6832 (see `cse_end_of_basic_block'), we reprocess the code from the start.
6833 Otherwise, we start after this basic block. */
6834 if (val.path_size > 0)
6835 cse_basic_block (insn, val.last, val.path);
6838 int old_cse_jumps_altered = cse_jumps_altered;
6841 /* When cse changes a conditional jump to an unconditional
6842 jump, we want to reprocess the block, since it will give
6843 us a new branch path to investigate. */
6844 cse_jumps_altered = 0;
6845 temp = cse_basic_block (insn, val.last, val.path);
6846 if (cse_jumps_altered == 0
6847 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
6850 cse_jumps_altered |= old_cse_jumps_altered;
6862 end_alias_analysis ();
6864 free (reg_eqv_table);
6866 rtl_hooks = general_rtl_hooks;
6868 return cse_jumps_altered || recorded_label_ref;
6871 /* Process a single basic block. FROM and TO and the limits of the basic
6872 block. NEXT_BRANCH points to the branch path when following jumps or
6873 a null path when not following jumps. */
6876 cse_basic_block (rtx from, rtx to, struct branch_path *next_branch)
6880 rtx libcall_insn = NULL_RTX;
6882 int no_conflict = 0;
6884 /* Allocate the space needed by qty_table. */
6885 qty_table = XNEWVEC (struct qty_table_elem, max_qty);
6889 /* TO might be a label. If so, protect it from being deleted. */
6890 if (to != 0 && LABEL_P (to))
6893 for (insn = from; insn != to; insn = NEXT_INSN (insn))
6895 enum rtx_code code = GET_CODE (insn);
6897 /* If we have processed 1,000 insns, flush the hash table to
6898 avoid extreme quadratic behavior. We must not include NOTEs
6899 in the count since there may be more of them when generating
6900 debugging information. If we clear the table at different
6901 times, code generated with -g -O might be different than code
6902 generated with -O but not -g.
6904 ??? This is a real kludge and needs to be done some other way.
6906 if (code != NOTE && num_insns++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS))
6908 flush_hash_table ();
6912 /* See if this is a branch that is part of the path. If so, and it is
6913 to be taken, do so. */
6914 if (next_branch->branch == insn)
6916 enum taken status = next_branch++->status;
6917 if (status != PATH_NOT_TAKEN)
6919 if (status == PATH_TAKEN)
6920 record_jump_equiv (insn, 1);
6922 invalidate_skipped_block (NEXT_INSN (insn));
6924 /* Set the last insn as the jump insn; it doesn't affect cc0.
6925 Then follow this branch. */
6930 insn = JUMP_LABEL (insn);
6935 if (GET_MODE (insn) == QImode)
6936 PUT_MODE (insn, VOIDmode);
6938 if (GET_RTX_CLASS (code) == RTX_INSN)
6942 /* Process notes first so we have all notes in canonical forms when
6943 looking for duplicate operations. */
6945 if (REG_NOTES (insn))
6946 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
6948 /* Track when we are inside in LIBCALL block. Inside such a block,
6949 we do not want to record destinations. The last insn of a
6950 LIBCALL block is not considered to be part of the block, since
6951 its destination is the result of the block and hence should be
6954 if (REG_NOTES (insn) != 0)
6956 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
6957 libcall_insn = XEXP (p, 0);
6958 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
6960 /* Keep libcall_insn for the last SET insn of a no-conflict
6961 block to prevent changing the destination. */
6967 else if (find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
6971 cse_insn (insn, libcall_insn);
6973 if (no_conflict == -1)
6979 /* If we haven't already found an insn where we added a LABEL_REF,
6981 if (NONJUMP_INSN_P (insn) && ! recorded_label_ref
6982 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
6984 recorded_label_ref = 1;
6987 /* If INSN is now an unconditional jump, skip to the end of our
6988 basic block by pretending that we just did the last insn in the
6989 basic block. If we are jumping to the end of our block, show
6990 that we can have one usage of TO. */
6992 if (any_uncondjump_p (insn))
7000 if (JUMP_LABEL (insn) == to)
7003 /* Maybe TO was deleted because the jump is unconditional.
7004 If so, there is nothing left in this basic block. */
7005 /* ??? Perhaps it would be smarter to set TO
7006 to whatever follows this insn,
7007 and pretend the basic block had always ended here. */
7008 if (INSN_DELETED_P (to))
7011 insn = PREV_INSN (to);
7014 /* See if it is ok to keep on going past the label
7015 which used to end our basic block. Remember that we incremented
7016 the count of that label, so we decrement it here. If we made
7017 a jump unconditional, TO_USAGE will be one; in that case, we don't
7018 want to count the use in that jump. */
7020 if (to != 0 && NEXT_INSN (insn) == to
7021 && LABEL_P (to) && --LABEL_NUSES (to) == to_usage)
7023 struct cse_basic_block_data val;
7026 insn = NEXT_INSN (to);
7028 /* If TO was the last insn in the function, we are done. */
7035 /* If TO was preceded by a BARRIER we are done with this block
7036 because it has no continuation. */
7037 prev = prev_nonnote_insn (to);
7038 if (prev && BARRIER_P (prev))
7044 /* Find the end of the following block. Note that we won't be
7045 following branches in this case. */
7048 val.path = XNEWVEC (struct branch_path, PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
7049 cse_end_of_basic_block (insn, &val, 0, 0);
7052 /* If the tables we allocated have enough space left
7053 to handle all the SETs in the next basic block,
7054 continue through it. Otherwise, return,
7055 and that block will be scanned individually. */
7056 if (val.nsets * 2 + next_qty > max_qty)
7059 cse_basic_block_start = val.low_cuid;
7060 cse_basic_block_end = val.high_cuid;
7063 /* Prevent TO from being deleted if it is a label. */
7064 if (to != 0 && LABEL_P (to))
7067 /* Back up so we process the first insn in the extension. */
7068 insn = PREV_INSN (insn);
7072 gcc_assert (next_qty <= max_qty);
7076 return to ? NEXT_INSN (to) : 0;
7079 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7080 there isn't a REG_LABEL note. Return one if so. DATA is the insn. */
7083 check_for_label_ref (rtx *rtl, void *data)
7085 rtx insn = (rtx) data;
7087 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7088 we must rerun jump since it needs to place the note. If this is a
7089 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7090 since no REG_LABEL will be added. */
7091 return (GET_CODE (*rtl) == LABEL_REF
7092 && ! LABEL_REF_NONLOCAL_P (*rtl)
7093 && LABEL_P (XEXP (*rtl, 0))
7094 && INSN_UID (XEXP (*rtl, 0)) != 0
7095 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7098 /* Count the number of times registers are used (not set) in X.
7099 COUNTS is an array in which we accumulate the count, INCR is how much
7100 we count each register usage.
7102 Don't count a usage of DEST, which is the SET_DEST of a SET which
7103 contains X in its SET_SRC. This is because such a SET does not
7104 modify the liveness of DEST.
7105 DEST is set to pc_rtx for a trapping insn, which means that we must count
7106 uses of a SET_DEST regardless because the insn can't be deleted here. */
7109 count_reg_usage (rtx x, int *counts, rtx dest, int incr)
7119 switch (code = GET_CODE (x))
7123 counts[REGNO (x)] += incr;
7137 /* If we are clobbering a MEM, mark any registers inside the address
7139 if (MEM_P (XEXP (x, 0)))
7140 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7144 /* Unless we are setting a REG, count everything in SET_DEST. */
7145 if (!REG_P (SET_DEST (x)))
7146 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
7147 count_reg_usage (SET_SRC (x), counts,
7148 dest ? dest : SET_DEST (x),
7155 /* We expect dest to be NULL_RTX here. If the insn may trap, mark
7156 this fact by setting DEST to pc_rtx. */
7157 if (flag_non_call_exceptions && may_trap_p (PATTERN (x)))
7159 if (code == CALL_INSN)
7160 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, dest, incr);
7161 count_reg_usage (PATTERN (x), counts, dest, incr);
7163 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7166 note = find_reg_equal_equiv_note (x);
7169 rtx eqv = XEXP (note, 0);
7171 if (GET_CODE (eqv) == EXPR_LIST)
7172 /* This REG_EQUAL note describes the result of a function call.
7173 Process all the arguments. */
7176 count_reg_usage (XEXP (eqv, 0), counts, dest, incr);
7177 eqv = XEXP (eqv, 1);
7179 while (eqv && GET_CODE (eqv) == EXPR_LIST);
7181 count_reg_usage (eqv, counts, dest, incr);
7186 if (REG_NOTE_KIND (x) == REG_EQUAL
7187 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
7188 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
7189 involving registers in the address. */
7190 || GET_CODE (XEXP (x, 0)) == CLOBBER)
7191 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
7193 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7197 /* If the asm is volatile, then this insn cannot be deleted,
7198 and so the inputs *must* be live. */
7199 if (MEM_VOLATILE_P (x))
7201 /* Iterate over just the inputs, not the constraints as well. */
7202 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
7203 count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, dest, incr);
7213 fmt = GET_RTX_FORMAT (code);
7214 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7217 count_reg_usage (XEXP (x, i), counts, dest, incr);
7218 else if (fmt[i] == 'E')
7219 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7220 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7224 /* Return true if set is live. */
7226 set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */
7233 if (set_noop_p (set))
7237 else if (GET_CODE (SET_DEST (set)) == CC0
7238 && !side_effects_p (SET_SRC (set))
7239 && ((tem = next_nonnote_insn (insn)) == 0
7241 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7244 else if (!REG_P (SET_DEST (set))
7245 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7246 || counts[REGNO (SET_DEST (set))] != 0
7247 || side_effects_p (SET_SRC (set)))
7252 /* Return true if insn is live. */
7255 insn_live_p (rtx insn, int *counts)
7258 if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
7260 else if (GET_CODE (PATTERN (insn)) == SET)
7261 return set_live_p (PATTERN (insn), insn, counts);
7262 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7264 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7266 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7268 if (GET_CODE (elt) == SET)
7270 if (set_live_p (elt, insn, counts))
7273 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7282 /* Return true if libcall is dead as a whole. */
7285 dead_libcall_p (rtx insn, int *counts)
7289 /* See if there's a REG_EQUAL note on this insn and try to
7290 replace the source with the REG_EQUAL expression.
7292 We assume that insns with REG_RETVALs can only be reg->reg
7293 copies at this point. */
7294 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7298 set = single_set (insn);
7302 new = simplify_rtx (XEXP (note, 0));
7304 new = XEXP (note, 0);
7306 /* While changing insn, we must update the counts accordingly. */
7307 count_reg_usage (insn, counts, NULL_RTX, -1);
7309 if (validate_change (insn, &SET_SRC (set), new, 0))
7311 count_reg_usage (insn, counts, NULL_RTX, 1);
7312 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7313 remove_note (insn, note);
7317 if (CONSTANT_P (new))
7319 new = force_const_mem (GET_MODE (SET_DEST (set)), new);
7320 if (new && validate_change (insn, &SET_SRC (set), new, 0))
7322 count_reg_usage (insn, counts, NULL_RTX, 1);
7323 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7324 remove_note (insn, note);
7329 count_reg_usage (insn, counts, NULL_RTX, 1);
7333 /* Scan all the insns and delete any that are dead; i.e., they store a register
7334 that is never used or they copy a register to itself.
7336 This is used to remove insns made obviously dead by cse, loop or other
7337 optimizations. It improves the heuristics in loop since it won't try to
7338 move dead invariants out of loops or make givs for dead quantities. The
7339 remaining passes of the compilation are also sped up. */
7342 delete_trivially_dead_insns (rtx insns, int nreg)
7346 int in_libcall = 0, dead_libcall = 0;
7349 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7350 /* First count the number of times each register is used. */
7351 counts = XCNEWVEC (int, nreg);
7352 for (insn = insns; insn; insn = NEXT_INSN (insn))
7354 count_reg_usage (insn, counts, NULL_RTX, 1);
7356 /* Go from the last insn to the first and delete insns that only set unused
7357 registers or copy a register to itself. As we delete an insn, remove
7358 usage counts for registers it uses.
7360 The first jump optimization pass may leave a real insn as the last
7361 insn in the function. We must not skip that insn or we may end
7362 up deleting code that is not really dead. */
7363 for (insn = get_last_insn (); insn; insn = prev)
7367 prev = PREV_INSN (insn);
7371 /* Don't delete any insns that are part of a libcall block unless
7372 we can delete the whole libcall block.
7374 Flow or loop might get confused if we did that. Remember
7375 that we are scanning backwards. */
7376 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7380 dead_libcall = dead_libcall_p (insn, counts);
7382 else if (in_libcall)
7383 live_insn = ! dead_libcall;
7385 live_insn = insn_live_p (insn, counts);
7387 /* If this is a dead insn, delete it and show registers in it aren't
7392 count_reg_usage (insn, counts, NULL_RTX, -1);
7393 delete_insn_and_edges (insn);
7397 if (in_libcall && find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7404 if (dump_file && ndead)
7405 fprintf (dump_file, "Deleted %i trivially dead insns\n",
7409 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7413 /* This function is called via for_each_rtx. The argument, NEWREG, is
7414 a condition code register with the desired mode. If we are looking
7415 at the same register in a different mode, replace it with
7419 cse_change_cc_mode (rtx *loc, void *data)
7421 struct change_cc_mode_args* args = (struct change_cc_mode_args*)data;
7425 && REGNO (*loc) == REGNO (args->newreg)
7426 && GET_MODE (*loc) != GET_MODE (args->newreg))
7428 validate_change (args->insn, loc, args->newreg, 1);
7435 /* Change the mode of any reference to the register REGNO (NEWREG) to
7436 GET_MODE (NEWREG) in INSN. */
7439 cse_change_cc_mode_insn (rtx insn, rtx newreg)
7441 struct change_cc_mode_args args;
7448 args.newreg = newreg;
7450 for_each_rtx (&PATTERN (insn), cse_change_cc_mode, &args);
7451 for_each_rtx (®_NOTES (insn), cse_change_cc_mode, &args);
7453 /* If the following assertion was triggered, there is most probably
7454 something wrong with the cc_modes_compatible back end function.
7455 CC modes only can be considered compatible if the insn - with the mode
7456 replaced by any of the compatible modes - can still be recognized. */
7457 success = apply_change_group ();
7458 gcc_assert (success);
7461 /* Change the mode of any reference to the register REGNO (NEWREG) to
7462 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
7463 any instruction which modifies NEWREG. */
7466 cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
7470 for (insn = start; insn != end; insn = NEXT_INSN (insn))
7472 if (! INSN_P (insn))
7475 if (reg_set_p (newreg, insn))
7478 cse_change_cc_mode_insn (insn, newreg);
7482 /* BB is a basic block which finishes with CC_REG as a condition code
7483 register which is set to CC_SRC. Look through the successors of BB
7484 to find blocks which have a single predecessor (i.e., this one),
7485 and look through those blocks for an assignment to CC_REG which is
7486 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7487 permitted to change the mode of CC_SRC to a compatible mode. This
7488 returns VOIDmode if no equivalent assignments were found.
7489 Otherwise it returns the mode which CC_SRC should wind up with.
7491 The main complexity in this function is handling the mode issues.
7492 We may have more than one duplicate which we can eliminate, and we
7493 try to find a mode which will work for multiple duplicates. */
7495 static enum machine_mode
7496 cse_cc_succs (basic_block bb, rtx cc_reg, rtx cc_src, bool can_change_mode)
7499 enum machine_mode mode;
7500 unsigned int insn_count;
7503 enum machine_mode modes[2];
7509 /* We expect to have two successors. Look at both before picking
7510 the final mode for the comparison. If we have more successors
7511 (i.e., some sort of table jump, although that seems unlikely),
7512 then we require all beyond the first two to use the same
7515 found_equiv = false;
7516 mode = GET_MODE (cc_src);
7518 FOR_EACH_EDGE (e, ei, bb->succs)
7523 if (e->flags & EDGE_COMPLEX)
7526 if (EDGE_COUNT (e->dest->preds) != 1
7527 || e->dest == EXIT_BLOCK_PTR)
7530 end = NEXT_INSN (BB_END (e->dest));
7531 for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7535 if (! INSN_P (insn))
7538 /* If CC_SRC is modified, we have to stop looking for
7539 something which uses it. */
7540 if (modified_in_p (cc_src, insn))
7543 /* Check whether INSN sets CC_REG to CC_SRC. */
7544 set = single_set (insn);
7546 && REG_P (SET_DEST (set))
7547 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7550 enum machine_mode set_mode;
7551 enum machine_mode comp_mode;
7554 set_mode = GET_MODE (SET_SRC (set));
7555 comp_mode = set_mode;
7556 if (rtx_equal_p (cc_src, SET_SRC (set)))
7558 else if (GET_CODE (cc_src) == COMPARE
7559 && GET_CODE (SET_SRC (set)) == COMPARE
7561 && rtx_equal_p (XEXP (cc_src, 0),
7562 XEXP (SET_SRC (set), 0))
7563 && rtx_equal_p (XEXP (cc_src, 1),
7564 XEXP (SET_SRC (set), 1)))
7567 comp_mode = targetm.cc_modes_compatible (mode, set_mode);
7568 if (comp_mode != VOIDmode
7569 && (can_change_mode || comp_mode == mode))
7576 if (insn_count < ARRAY_SIZE (insns))
7578 insns[insn_count] = insn;
7579 modes[insn_count] = set_mode;
7580 last_insns[insn_count] = end;
7583 if (mode != comp_mode)
7585 gcc_assert (can_change_mode);
7588 /* The modified insn will be re-recognized later. */
7589 PUT_MODE (cc_src, mode);
7594 if (set_mode != mode)
7596 /* We found a matching expression in the
7597 wrong mode, but we don't have room to
7598 store it in the array. Punt. This case
7602 /* INSN sets CC_REG to a value equal to CC_SRC
7603 with the right mode. We can simply delete
7608 /* We found an instruction to delete. Keep looking,
7609 in the hopes of finding a three-way jump. */
7613 /* We found an instruction which sets the condition
7614 code, so don't look any farther. */
7618 /* If INSN sets CC_REG in some other way, don't look any
7620 if (reg_set_p (cc_reg, insn))
7624 /* If we fell off the bottom of the block, we can keep looking
7625 through successors. We pass CAN_CHANGE_MODE as false because
7626 we aren't prepared to handle compatibility between the
7627 further blocks and this block. */
7630 enum machine_mode submode;
7632 submode = cse_cc_succs (e->dest, cc_reg, cc_src, false);
7633 if (submode != VOIDmode)
7635 gcc_assert (submode == mode);
7637 can_change_mode = false;
7645 /* Now INSN_COUNT is the number of instructions we found which set
7646 CC_REG to a value equivalent to CC_SRC. The instructions are in
7647 INSNS. The modes used by those instructions are in MODES. */
7650 for (i = 0; i < insn_count; ++i)
7652 if (modes[i] != mode)
7654 /* We need to change the mode of CC_REG in INSNS[i] and
7655 subsequent instructions. */
7658 if (GET_MODE (cc_reg) == mode)
7661 newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7663 cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7667 delete_insn (insns[i]);
7673 /* If we have a fixed condition code register (or two), walk through
7674 the instructions and try to eliminate duplicate assignments. */
7677 cse_condition_code_reg (void)
7679 unsigned int cc_regno_1;
7680 unsigned int cc_regno_2;
7685 if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
7688 cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7689 if (cc_regno_2 != INVALID_REGNUM)
7690 cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7692 cc_reg_2 = NULL_RTX;
7701 enum machine_mode mode;
7702 enum machine_mode orig_mode;
7704 /* Look for blocks which end with a conditional jump based on a
7705 condition code register. Then look for the instruction which
7706 sets the condition code register. Then look through the
7707 successor blocks for instructions which set the condition
7708 code register to the same value. There are other possible
7709 uses of the condition code register, but these are by far the
7710 most common and the ones which we are most likely to be able
7713 last_insn = BB_END (bb);
7714 if (!JUMP_P (last_insn))
7717 if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7719 else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7724 cc_src_insn = NULL_RTX;
7726 for (insn = PREV_INSN (last_insn);
7727 insn && insn != PREV_INSN (BB_HEAD (bb));
7728 insn = PREV_INSN (insn))
7732 if (! INSN_P (insn))
7734 set = single_set (insn);
7736 && REG_P (SET_DEST (set))
7737 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7740 cc_src = SET_SRC (set);
7743 else if (reg_set_p (cc_reg, insn))
7750 if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7753 /* Now CC_REG is a condition code register used for a
7754 conditional jump at the end of the block, and CC_SRC, in
7755 CC_SRC_INSN, is the value to which that condition code
7756 register is set, and CC_SRC is still meaningful at the end of
7759 orig_mode = GET_MODE (cc_src);
7760 mode = cse_cc_succs (bb, cc_reg, cc_src, true);
7761 if (mode != VOIDmode)
7763 gcc_assert (mode == GET_MODE (cc_src));
7764 if (mode != orig_mode)
7766 rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7768 cse_change_cc_mode_insn (cc_src_insn, newreg);
7770 /* Do the same in the following insns that use the
7771 current value of CC_REG within BB. */
7772 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7773 NEXT_INSN (last_insn),
7781 /* Perform common subexpression elimination. Nonzero value from
7782 `cse_main' means that jumps were simplified and some code may now
7783 be unreachable, so do jump optimization again. */
7785 gate_handle_cse (void)
7787 return optimize > 0;
7791 rest_of_handle_cse (void)
7796 dump_flow_info (dump_file);
7798 reg_scan (get_insns (), max_reg_num ());
7800 tem = cse_main (get_insns (), max_reg_num (), dump_file);
7802 rebuild_jump_labels (get_insns ());
7803 if (purge_all_dead_edges ())
7804 delete_unreachable_blocks ();
7806 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7808 /* If we are not running more CSE passes, then we are no longer
7809 expecting CSE to be run. But always rerun it in a cheap mode. */
7810 cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
7813 delete_dead_jumptables ();
7815 if (tem || optimize > 1)
7816 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
7819 struct tree_opt_pass pass_cse =
7822 gate_handle_cse, /* gate */
7823 rest_of_handle_cse, /* execute */
7826 0, /* static_pass_number */
7828 0, /* properties_required */
7829 0, /* properties_provided */
7830 0, /* properties_destroyed */
7831 0, /* todo_flags_start */
7833 TODO_ggc_collect, /* todo_flags_finish */
7839 gate_handle_cse2 (void)
7841 return optimize > 0 && flag_rerun_cse_after_loop;
7844 /* Run second CSE pass after loop optimizations. */
7846 rest_of_handle_cse2 (void)
7851 dump_flow_info (dump_file);
7853 tem = cse_main (get_insns (), max_reg_num (), dump_file);
7855 /* Run a pass to eliminate duplicated assignments to condition code
7856 registers. We have to run this after bypass_jumps, because it
7857 makes it harder for that pass to determine whether a jump can be
7859 cse_condition_code_reg ();
7861 purge_all_dead_edges ();
7862 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7866 timevar_push (TV_JUMP);
7867 rebuild_jump_labels (get_insns ());
7868 delete_dead_jumptables ();
7869 cleanup_cfg (CLEANUP_EXPENSIVE);
7870 timevar_pop (TV_JUMP);
7872 reg_scan (get_insns (), max_reg_num ());
7873 cse_not_expected = 1;
7877 struct tree_opt_pass pass_cse2 =
7880 gate_handle_cse2, /* gate */
7881 rest_of_handle_cse2, /* execute */
7884 0, /* static_pass_number */
7885 TV_CSE2, /* tv_id */
7886 0, /* properties_required */
7887 0, /* properties_provided */
7888 0, /* properties_destroyed */
7889 0, /* todo_flags_start */
7891 TODO_ggc_collect, /* todo_flags_finish */