1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 /* stdio.h must precede rtl.h for FFS. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "basic-block.h"
34 #include "insn-config.h"
45 #include "rtlhooks-def.h"
47 /* The basic idea of common subexpression elimination is to go
48 through the code, keeping a record of expressions that would
49 have the same value at the current scan point, and replacing
50 expressions encountered with the cheapest equivalent expression.
52 It is too complicated to keep track of the different possibilities
53 when control paths merge in this code; so, at each label, we forget all
54 that is known and start fresh. This can be described as processing each
55 extended basic block separately. We have a separate pass to perform
58 Note CSE can turn a conditional or computed jump into a nop or
59 an unconditional jump. When this occurs we arrange to run the jump
60 optimizer after CSE to delete the unreachable code.
62 We use two data structures to record the equivalent expressions:
63 a hash table for most expressions, and a vector of "quantity
64 numbers" to record equivalent (pseudo) registers.
66 The use of the special data structure for registers is desirable
67 because it is faster. It is possible because registers references
68 contain a fairly small number, the register number, taken from
69 a contiguously allocated series, and two register references are
70 identical if they have the same number. General expressions
71 do not have any such thing, so the only way to retrieve the
72 information recorded on an expression other than a register
73 is to keep it in a hash table.
75 Registers and "quantity numbers":
77 At the start of each basic block, all of the (hardware and pseudo)
78 registers used in the function are given distinct quantity
79 numbers to indicate their contents. During scan, when the code
80 copies one register into another, we copy the quantity number.
81 When a register is loaded in any other way, we allocate a new
82 quantity number to describe the value generated by this operation.
83 `reg_qty' records what quantity a register is currently thought
86 All real quantity numbers are greater than or equal to zero.
87 If register N has not been assigned a quantity, reg_qty[N] will
88 equal -N - 1, which is always negative.
90 Quantity numbers below zero do not exist and none of the `qty_table'
91 entries should be referenced with a negative index.
93 We also maintain a bidirectional chain of registers for each
94 quantity number. The `qty_table` members `first_reg' and `last_reg',
95 and `reg_eqv_table' members `next' and `prev' hold these chains.
97 The first register in a chain is the one whose lifespan is least local.
98 Among equals, it is the one that was seen first.
99 We replace any equivalent register with that one.
101 If two registers have the same quantity number, it must be true that
102 REG expressions with qty_table `mode' must be in the hash table for both
103 registers and must be in the same class.
105 The converse is not true. Since hard registers may be referenced in
106 any mode, two REG expressions might be equivalent in the hash table
107 but not have the same quantity number if the quantity number of one
108 of the registers is not the same mode as those expressions.
110 Constants and quantity numbers
112 When a quantity has a known constant value, that value is stored
113 in the appropriate qty_table `const_rtx'. This is in addition to
114 putting the constant in the hash table as is usual for non-regs.
116 Whether a reg or a constant is preferred is determined by the configuration
117 macro CONST_COSTS and will often depend on the constant value. In any
118 event, expressions containing constants can be simplified, by fold_rtx.
120 When a quantity has a known nearly constant value (such as an address
121 of a stack slot), that value is stored in the appropriate qty_table
124 Integer constants don't have a machine mode. However, cse
125 determines the intended machine mode from the destination
126 of the instruction that moves the constant. The machine mode
127 is recorded in the hash table along with the actual RTL
128 constant expression so that different modes are kept separate.
132 To record known equivalences among expressions in general
133 we use a hash table called `table'. It has a fixed number of buckets
134 that contain chains of `struct table_elt' elements for expressions.
135 These chains connect the elements whose expressions have the same
138 Other chains through the same elements connect the elements which
139 currently have equivalent values.
141 Register references in an expression are canonicalized before hashing
142 the expression. This is done using `reg_qty' and qty_table `first_reg'.
143 The hash code of a register reference is computed using the quantity
144 number, not the register number.
146 When the value of an expression changes, it is necessary to remove from the
147 hash table not just that expression but all expressions whose values
148 could be different as a result.
150 1. If the value changing is in memory, except in special cases
151 ANYTHING referring to memory could be changed. That is because
152 nobody knows where a pointer does not point.
153 The function `invalidate_memory' removes what is necessary.
155 The special cases are when the address is constant or is
156 a constant plus a fixed register such as the frame pointer
157 or a static chain pointer. When such addresses are stored in,
158 we can tell exactly which other such addresses must be invalidated
159 due to overlap. `invalidate' does this.
160 All expressions that refer to non-constant
161 memory addresses are also invalidated. `invalidate_memory' does this.
163 2. If the value changing is a register, all expressions
164 containing references to that register, and only those,
167 Because searching the entire hash table for expressions that contain
168 a register is very slow, we try to figure out when it isn't necessary.
169 Precisely, this is necessary only when expressions have been
170 entered in the hash table using this register, and then the value has
171 changed, and then another expression wants to be added to refer to
172 the register's new value. This sequence of circumstances is rare
173 within any one basic block.
175 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
176 reg_tick[i] is incremented whenever a value is stored in register i.
177 reg_in_table[i] holds -1 if no references to register i have been
178 entered in the table; otherwise, it contains the value reg_tick[i] had
179 when the references were entered. If we want to enter a reference
180 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
181 Until we want to enter a new entry, the mere fact that the two vectors
182 don't match makes the entries be ignored if anyone tries to match them.
184 Registers themselves are entered in the hash table as well as in
185 the equivalent-register chains. However, the vectors `reg_tick'
186 and `reg_in_table' do not apply to expressions which are simple
187 register references. These expressions are removed from the table
188 immediately when they become invalid, and this can be done even if
189 we do not immediately search for all the expressions that refer to
192 A CLOBBER rtx in an instruction invalidates its operand for further
193 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
194 invalidates everything that resides in memory.
198 Constant expressions that differ only by an additive integer
199 are called related. When a constant expression is put in
200 the table, the related expression with no constant term
201 is also entered. These are made to point at each other
202 so that it is possible to find out if there exists any
203 register equivalent to an expression related to a given expression. */
205 /* Length of qty_table vector. We know in advance we will not need
206 a quantity number this big. */
210 /* Next quantity number to be allocated.
211 This is 1 + the largest number needed so far. */
215 /* Per-qty information tracking.
217 `first_reg' and `last_reg' track the head and tail of the
218 chain of registers which currently contain this quantity.
220 `mode' contains the machine mode of this quantity.
222 `const_rtx' holds the rtx of the constant value of this
223 quantity, if known. A summations of the frame/arg pointer
224 and a constant can also be entered here. When this holds
225 a known value, `const_insn' is the insn which stored the
228 `comparison_{code,const,qty}' are used to track when a
229 comparison between a quantity and some constant or register has
230 been passed. In such a case, we know the results of the comparison
231 in case we see it again. These members record a comparison that
232 is known to be true. `comparison_code' holds the rtx code of such
233 a comparison, else it is set to UNKNOWN and the other two
234 comparison members are undefined. `comparison_const' holds
235 the constant being compared against, or zero if the comparison
236 is not against a constant. `comparison_qty' holds the quantity
237 being compared against when the result is known. If the comparison
238 is not with a register, `comparison_qty' is -1. */
240 struct qty_table_elem
244 rtx comparison_const;
246 unsigned int first_reg, last_reg;
247 /* The sizes of these fields should match the sizes of the
248 code and mode fields of struct rtx_def (see rtl.h). */
249 ENUM_BITFIELD(rtx_code) comparison_code : 16;
250 ENUM_BITFIELD(machine_mode) mode : 8;
253 /* The table of all qtys, indexed by qty number. */
254 static struct qty_table_elem *qty_table;
256 /* Structure used to pass arguments via for_each_rtx to function
257 cse_change_cc_mode. */
258 struct change_cc_mode_args
265 /* For machines that have a CC0, we do not record its value in the hash
266 table since its use is guaranteed to be the insn immediately following
267 its definition and any other insn is presumed to invalidate it.
269 Instead, we store below the value last assigned to CC0. If it should
270 happen to be a constant, it is stored in preference to the actual
271 assigned value. In case it is a constant, we store the mode in which
272 the constant should be interpreted. */
274 static rtx prev_insn_cc0;
275 static enum machine_mode prev_insn_cc0_mode;
277 /* Previous actual insn. 0 if at first insn of basic block. */
279 static rtx prev_insn;
282 /* Insn being scanned. */
284 static rtx this_insn;
286 /* Index by register number, gives the number of the next (or
287 previous) register in the chain of registers sharing the same
290 Or -1 if this register is at the end of the chain.
292 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
294 /* Per-register equivalence chain. */
300 /* The table of all register equivalence chains. */
301 static struct reg_eqv_elem *reg_eqv_table;
305 /* Next in hash chain. */
306 struct cse_reg_info *hash_next;
308 /* The next cse_reg_info structure in the free or used list. */
309 struct cse_reg_info *next;
314 /* The quantity number of the register's current contents. */
317 /* The number of times the register has been altered in the current
321 /* The REG_TICK value at which rtx's containing this register are
322 valid in the hash table. If this does not equal the current
323 reg_tick value, such expressions existing in the hash table are
327 /* The SUBREG that was set when REG_TICK was last incremented. Set
328 to -1 if the last store was to the whole register, not a subreg. */
329 unsigned int subreg_ticked;
332 /* A free list of cse_reg_info entries. */
333 static struct cse_reg_info *cse_reg_info_free_list;
335 /* A used list of cse_reg_info entries. */
336 static struct cse_reg_info *cse_reg_info_used_list;
337 static struct cse_reg_info *cse_reg_info_used_list_end;
339 /* A mapping from registers to cse_reg_info data structures. */
340 #define REGHASH_SHIFT 7
341 #define REGHASH_SIZE (1 << REGHASH_SHIFT)
342 #define REGHASH_MASK (REGHASH_SIZE - 1)
343 static struct cse_reg_info *reg_hash[REGHASH_SIZE];
345 #define REGHASH_FN(REGNO) \
346 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
348 /* The last lookup we did into the cse_reg_info_tree. This allows us
349 to cache repeated lookups. */
350 static unsigned int cached_regno;
351 static struct cse_reg_info *cached_cse_reg_info;
353 /* A HARD_REG_SET containing all the hard registers for which there is
354 currently a REG expression in the hash table. Note the difference
355 from the above variables, which indicate if the REG is mentioned in some
356 expression in the table. */
358 static HARD_REG_SET hard_regs_in_table;
360 /* CUID of insn that starts the basic block currently being cse-processed. */
362 static int cse_basic_block_start;
364 /* CUID of insn that ends the basic block currently being cse-processed. */
366 static int cse_basic_block_end;
368 /* Vector mapping INSN_UIDs to cuids.
369 The cuids are like uids but increase monotonically always.
370 We use them to see whether a reg is used outside a given basic block. */
372 static int *uid_cuid;
374 /* Highest UID in UID_CUID. */
377 /* Get the cuid of an insn. */
379 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
381 /* Nonzero if this pass has made changes, and therefore it's
382 worthwhile to run the garbage collector. */
384 static int cse_altered;
386 /* Nonzero if cse has altered conditional jump insns
387 in such a way that jump optimization should be redone. */
389 static int cse_jumps_altered;
391 /* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
392 REG_LABEL, we have to rerun jump after CSE to put in the note. */
393 static int recorded_label_ref;
395 /* canon_hash stores 1 in do_not_record
396 if it notices a reference to CC0, PC, or some other volatile
399 static int do_not_record;
401 /* canon_hash stores 1 in hash_arg_in_memory
402 if it notices a reference to memory within the expression being hashed. */
404 static int hash_arg_in_memory;
406 /* The hash table contains buckets which are chains of `struct table_elt's,
407 each recording one expression's information.
408 That expression is in the `exp' field.
410 The canon_exp field contains a canonical (from the point of view of
411 alias analysis) version of the `exp' field.
413 Those elements with the same hash code are chained in both directions
414 through the `next_same_hash' and `prev_same_hash' fields.
416 Each set of expressions with equivalent values
417 are on a two-way chain through the `next_same_value'
418 and `prev_same_value' fields, and all point with
419 the `first_same_value' field at the first element in
420 that chain. The chain is in order of increasing cost.
421 Each element's cost value is in its `cost' field.
423 The `in_memory' field is nonzero for elements that
424 involve any reference to memory. These elements are removed
425 whenever a write is done to an unidentified location in memory.
426 To be safe, we assume that a memory address is unidentified unless
427 the address is either a symbol constant or a constant plus
428 the frame pointer or argument pointer.
430 The `related_value' field is used to connect related expressions
431 (that differ by adding an integer).
432 The related expressions are chained in a circular fashion.
433 `related_value' is zero for expressions for which this
436 The `cost' field stores the cost of this element's expression.
437 The `regcost' field stores the value returned by approx_reg_cost for
438 this element's expression.
440 The `is_const' flag is set if the element is a constant (including
443 The `flag' field is used as a temporary during some search routines.
445 The `mode' field is usually the same as GET_MODE (`exp'), but
446 if `exp' is a CONST_INT and has no machine mode then the `mode'
447 field is the mode it was being used as. Each constant is
448 recorded separately for each mode it is used with. */
454 struct table_elt *next_same_hash;
455 struct table_elt *prev_same_hash;
456 struct table_elt *next_same_value;
457 struct table_elt *prev_same_value;
458 struct table_elt *first_same_value;
459 struct table_elt *related_value;
462 /* The size of this field should match the size
463 of the mode field of struct rtx_def (see rtl.h). */
464 ENUM_BITFIELD(machine_mode) mode : 8;
470 /* We don't want a lot of buckets, because we rarely have very many
471 things stored in the hash table, and a lot of buckets slows
472 down a lot of loops that happen frequently. */
474 #define HASH_SIZE (1 << HASH_SHIFT)
475 #define HASH_MASK (HASH_SIZE - 1)
477 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
478 register (hard registers may require `do_not_record' to be set). */
481 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
482 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
483 : canon_hash (X, M)) & HASH_MASK)
485 /* Like HASH, but without side-effects. */
486 #define SAFE_HASH(X, M) \
487 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
488 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
489 : safe_hash (X, M)) & HASH_MASK)
491 /* Determine whether register number N is considered a fixed register for the
492 purpose of approximating register costs.
493 It is desirable to replace other regs with fixed regs, to reduce need for
495 A reg wins if it is either the frame pointer or designated as fixed. */
496 #define FIXED_REGNO_P(N) \
497 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
498 || fixed_regs[N] || global_regs[N])
500 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
501 hard registers and pointers into the frame are the cheapest with a cost
502 of 0. Next come pseudos with a cost of one and other hard registers with
503 a cost of 2. Aside from these special cases, call `rtx_cost'. */
505 #define CHEAP_REGNO(N) \
506 (REGNO_PTR_FRAME_P(N) \
507 || (HARD_REGISTER_NUM_P (N) \
508 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
510 #define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET))
511 #define COST_IN(X,OUTER) (REG_P (X) ? 0 : notreg_cost (X, OUTER))
513 /* Get the info associated with register N. */
515 #define GET_CSE_REG_INFO(N) \
516 (((N) == cached_regno && cached_cse_reg_info) \
517 ? cached_cse_reg_info : get_cse_reg_info ((N)))
519 /* Get the number of times this register has been updated in this
522 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
524 /* Get the point at which REG was recorded in the table. */
526 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
528 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
531 #define SUBREG_TICKED(N) ((GET_CSE_REG_INFO (N))->subreg_ticked)
533 /* Get the quantity number for REG. */
535 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
537 /* Determine if the quantity number for register X represents a valid index
538 into the qty_table. */
540 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
542 static struct table_elt *table[HASH_SIZE];
544 /* Chain of `struct table_elt's made so far for this function
545 but currently removed from the table. */
547 static struct table_elt *free_element_chain;
549 /* Number of `struct table_elt' structures made so far for this function. */
551 static int n_elements_made;
553 /* Maximum value `n_elements_made' has had so far in this compilation
554 for functions previously processed. */
556 static int max_elements_made;
558 /* Set to the cost of a constant pool reference if one was found for a
559 symbolic constant. If this was found, it means we should try to
560 convert constants into constant pool entries if they don't fit in
563 static int constant_pool_entries_cost;
564 static int constant_pool_entries_regcost;
566 /* This data describes a block that will be processed by cse_basic_block. */
568 struct cse_basic_block_data
570 /* Lowest CUID value of insns in block. */
572 /* Highest CUID value of insns in block. */
574 /* Total number of SETs in block. */
576 /* Last insn in the block. */
578 /* Size of current branch path, if any. */
580 /* Current branch path, indicating which branches will be taken. */
583 /* The branch insn. */
585 /* Whether it should be taken or not. AROUND is the same as taken
586 except that it is used when the destination label is not preceded
588 enum taken {PATH_TAKEN, PATH_NOT_TAKEN, PATH_AROUND} status;
592 static bool fixed_base_plus_p (rtx x);
593 static int notreg_cost (rtx, enum rtx_code);
594 static int approx_reg_cost_1 (rtx *, void *);
595 static int approx_reg_cost (rtx);
596 static int preferable (int, int, int, int);
597 static void new_basic_block (void);
598 static void make_new_qty (unsigned int, enum machine_mode);
599 static void make_regs_eqv (unsigned int, unsigned int);
600 static void delete_reg_equiv (unsigned int);
601 static int mention_regs (rtx);
602 static int insert_regs (rtx, struct table_elt *, int);
603 static void remove_from_table (struct table_elt *, unsigned);
604 static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
605 static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
606 static rtx lookup_as_function (rtx, enum rtx_code);
607 static struct table_elt *insert (rtx, struct table_elt *, unsigned,
609 static void merge_equiv_classes (struct table_elt *, struct table_elt *);
610 static void invalidate (rtx, enum machine_mode);
611 static int cse_rtx_varies_p (rtx, int);
612 static void remove_invalid_refs (unsigned int);
613 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
615 static void rehash_using_reg (rtx);
616 static void invalidate_memory (void);
617 static void invalidate_for_call (void);
618 static rtx use_related_value (rtx, struct table_elt *);
620 static inline unsigned canon_hash (rtx, enum machine_mode);
621 static inline unsigned safe_hash (rtx, enum machine_mode);
622 static unsigned hash_rtx_string (const char *);
624 static rtx canon_reg (rtx, rtx);
625 static void find_best_addr (rtx, rtx *, enum machine_mode);
626 static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
628 enum machine_mode *);
629 static rtx fold_rtx (rtx, rtx);
630 static rtx equiv_constant (rtx);
631 static void record_jump_equiv (rtx, int);
632 static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
634 static void cse_insn (rtx, rtx);
635 static void cse_end_of_basic_block (rtx, struct cse_basic_block_data *,
637 static int addr_affects_sp_p (rtx);
638 static void invalidate_from_clobbers (rtx);
639 static rtx cse_process_notes (rtx, rtx);
640 static void invalidate_skipped_set (rtx, rtx, void *);
641 static void invalidate_skipped_block (rtx);
642 static rtx cse_basic_block (rtx, rtx, struct branch_path *);
643 static void count_reg_usage (rtx, int *, int);
644 static int check_for_label_ref (rtx *, void *);
645 extern void dump_class (struct table_elt*);
646 static struct cse_reg_info * get_cse_reg_info (unsigned int);
647 static int check_dependence (rtx *, void *);
649 static void flush_hash_table (void);
650 static bool insn_live_p (rtx, int *);
651 static bool set_live_p (rtx, rtx, int *);
652 static bool dead_libcall_p (rtx, int *);
653 static int cse_change_cc_mode (rtx *, void *);
654 static void cse_change_cc_mode_insn (rtx, rtx);
655 static void cse_change_cc_mode_insns (rtx, rtx, rtx);
656 static enum machine_mode cse_cc_succs (basic_block, rtx, rtx, bool);
659 #undef RTL_HOOKS_GEN_LOWPART
660 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
662 static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
664 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
665 virtual regs here because the simplify_*_operation routines are called
666 by integrate.c, which is called before virtual register instantiation. */
669 fixed_base_plus_p (rtx x)
671 switch (GET_CODE (x))
674 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
676 if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
678 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
679 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
684 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
686 return fixed_base_plus_p (XEXP (x, 0));
693 /* Dump the expressions in the equivalence class indicated by CLASSP.
694 This function is used only for debugging. */
696 dump_class (struct table_elt *classp)
698 struct table_elt *elt;
700 fprintf (stderr, "Equivalence chain for ");
701 print_rtl (stderr, classp->exp);
702 fprintf (stderr, ": \n");
704 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
706 print_rtl (stderr, elt->exp);
707 fprintf (stderr, "\n");
711 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
714 approx_reg_cost_1 (rtx *xp, void *data)
721 unsigned int regno = REGNO (x);
723 if (! CHEAP_REGNO (regno))
725 if (regno < FIRST_PSEUDO_REGISTER)
727 if (SMALL_REGISTER_CLASSES)
739 /* Return an estimate of the cost of the registers used in an rtx.
740 This is mostly the number of different REG expressions in the rtx;
741 however for some exceptions like fixed registers we use a cost of
742 0. If any other hard register reference occurs, return MAX_COST. */
745 approx_reg_cost (rtx x)
749 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
755 /* Returns a canonical version of X for the address, from the point of view,
756 that all multiplications are represented as MULT instead of the multiply
757 by a power of 2 being represented as ASHIFT. */
760 canon_for_address (rtx x)
763 enum machine_mode mode;
777 if (GET_CODE (XEXP (x, 1)) == CONST_INT
778 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode)
779 && INTVAL (XEXP (x, 1)) >= 0)
781 new = canon_for_address (XEXP (x, 0));
782 new = gen_rtx_MULT (mode, new,
783 gen_int_mode ((HOST_WIDE_INT) 1
784 << INTVAL (XEXP (x, 1)),
795 /* Now recursively process each operand of this operation. */
796 fmt = GET_RTX_FORMAT (code);
797 for (i = 0; i < GET_RTX_LENGTH (code); i++)
800 new = canon_for_address (XEXP (x, i));
806 /* Return a negative value if an rtx A, whose costs are given by COST_A
807 and REGCOST_A, is more desirable than an rtx B.
808 Return a positive value if A is less desirable, or 0 if the two are
811 preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
813 /* First, get rid of cases involving expressions that are entirely
815 if (cost_a != cost_b)
817 if (cost_a == MAX_COST)
819 if (cost_b == MAX_COST)
823 /* Avoid extending lifetimes of hardregs. */
824 if (regcost_a != regcost_b)
826 if (regcost_a == MAX_COST)
828 if (regcost_b == MAX_COST)
832 /* Normal operation costs take precedence. */
833 if (cost_a != cost_b)
834 return cost_a - cost_b;
835 /* Only if these are identical consider effects on register pressure. */
836 if (regcost_a != regcost_b)
837 return regcost_a - regcost_b;
841 /* Internal function, to compute cost when X is not a register; called
842 from COST macro to keep it simple. */
845 notreg_cost (rtx x, enum rtx_code outer)
847 return ((GET_CODE (x) == SUBREG
848 && REG_P (SUBREG_REG (x))
849 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
850 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
851 && (GET_MODE_SIZE (GET_MODE (x))
852 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
853 && subreg_lowpart_p (x)
854 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
855 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
857 : rtx_cost (x, outer) * 2);
861 static struct cse_reg_info *
862 get_cse_reg_info (unsigned int regno)
864 struct cse_reg_info **hash_head = ®_hash[REGHASH_FN (regno)];
865 struct cse_reg_info *p;
867 for (p = *hash_head; p != NULL; p = p->hash_next)
868 if (p->regno == regno)
873 /* Get a new cse_reg_info structure. */
874 if (cse_reg_info_free_list)
876 p = cse_reg_info_free_list;
877 cse_reg_info_free_list = p->next;
880 p = xmalloc (sizeof (struct cse_reg_info));
882 /* Insert into hash table. */
883 p->hash_next = *hash_head;
888 p->reg_in_table = -1;
889 p->subreg_ticked = -1;
890 p->reg_qty = -regno - 1;
892 p->next = cse_reg_info_used_list;
893 cse_reg_info_used_list = p;
894 if (!cse_reg_info_used_list_end)
895 cse_reg_info_used_list_end = p;
898 /* Cache this lookup; we tend to be looking up information about the
899 same register several times in a row. */
900 cached_regno = regno;
901 cached_cse_reg_info = p;
906 /* Clear the hash table and initialize each register with its own quantity,
907 for a new basic block. */
910 new_basic_block (void)
916 /* Clear out hash table state for this pass. */
918 memset (reg_hash, 0, sizeof reg_hash);
920 if (cse_reg_info_used_list)
922 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
923 cse_reg_info_free_list = cse_reg_info_used_list;
924 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
926 cached_cse_reg_info = 0;
928 CLEAR_HARD_REG_SET (hard_regs_in_table);
930 /* The per-quantity values used to be initialized here, but it is
931 much faster to initialize each as it is made in `make_new_qty'. */
933 for (i = 0; i < HASH_SIZE; i++)
935 struct table_elt *first;
940 struct table_elt *last = first;
944 while (last->next_same_hash != NULL)
945 last = last->next_same_hash;
947 /* Now relink this hash entire chain into
948 the free element list. */
950 last->next_same_hash = free_element_chain;
951 free_element_chain = first;
961 /* Say that register REG contains a quantity in mode MODE not in any
962 register before and initialize that quantity. */
965 make_new_qty (unsigned int reg, enum machine_mode mode)
968 struct qty_table_elem *ent;
969 struct reg_eqv_elem *eqv;
971 gcc_assert (next_qty < max_qty);
973 q = REG_QTY (reg) = next_qty++;
975 ent->first_reg = reg;
978 ent->const_rtx = ent->const_insn = NULL_RTX;
979 ent->comparison_code = UNKNOWN;
981 eqv = ®_eqv_table[reg];
982 eqv->next = eqv->prev = -1;
985 /* Make reg NEW equivalent to reg OLD.
986 OLD is not changing; NEW is. */
989 make_regs_eqv (unsigned int new, unsigned int old)
991 unsigned int lastr, firstr;
992 int q = REG_QTY (old);
993 struct qty_table_elem *ent;
997 /* Nothing should become eqv until it has a "non-invalid" qty number. */
998 gcc_assert (REGNO_QTY_VALID_P (old));
1001 firstr = ent->first_reg;
1002 lastr = ent->last_reg;
1004 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
1005 hard regs. Among pseudos, if NEW will live longer than any other reg
1006 of the same qty, and that is beyond the current basic block,
1007 make it the new canonical replacement for this qty. */
1008 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1009 /* Certain fixed registers might be of the class NO_REGS. This means
1010 that not only can they not be allocated by the compiler, but
1011 they cannot be used in substitutions or canonicalizations
1013 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1014 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1015 || (new >= FIRST_PSEUDO_REGISTER
1016 && (firstr < FIRST_PSEUDO_REGISTER
1017 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1018 || (uid_cuid[REGNO_FIRST_UID (new)]
1019 < cse_basic_block_start))
1020 && (uid_cuid[REGNO_LAST_UID (new)]
1021 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1023 reg_eqv_table[firstr].prev = new;
1024 reg_eqv_table[new].next = firstr;
1025 reg_eqv_table[new].prev = -1;
1026 ent->first_reg = new;
1030 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1031 Otherwise, insert before any non-fixed hard regs that are at the
1032 end. Registers of class NO_REGS cannot be used as an
1033 equivalent for anything. */
1034 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1035 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1036 && new >= FIRST_PSEUDO_REGISTER)
1037 lastr = reg_eqv_table[lastr].prev;
1038 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1039 if (reg_eqv_table[lastr].next >= 0)
1040 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1042 qty_table[q].last_reg = new;
1043 reg_eqv_table[lastr].next = new;
1044 reg_eqv_table[new].prev = lastr;
1048 /* Remove REG from its equivalence class. */
1051 delete_reg_equiv (unsigned int reg)
1053 struct qty_table_elem *ent;
1054 int q = REG_QTY (reg);
1057 /* If invalid, do nothing. */
1058 if (! REGNO_QTY_VALID_P (reg))
1061 ent = &qty_table[q];
1063 p = reg_eqv_table[reg].prev;
1064 n = reg_eqv_table[reg].next;
1067 reg_eqv_table[n].prev = p;
1071 reg_eqv_table[p].next = n;
1075 REG_QTY (reg) = -reg - 1;
1078 /* Remove any invalid expressions from the hash table
1079 that refer to any of the registers contained in expression X.
1081 Make sure that newly inserted references to those registers
1082 as subexpressions will be considered valid.
1084 mention_regs is not called when a register itself
1085 is being stored in the table.
1087 Return 1 if we have done something that may have changed the hash code
1091 mention_regs (rtx x)
1101 code = GET_CODE (x);
1104 unsigned int regno = REGNO (x);
1105 unsigned int endregno
1106 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1107 : hard_regno_nregs[regno][GET_MODE (x)]);
1110 for (i = regno; i < endregno; i++)
1112 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1113 remove_invalid_refs (i);
1115 REG_IN_TABLE (i) = REG_TICK (i);
1116 SUBREG_TICKED (i) = -1;
1122 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1123 pseudo if they don't use overlapping words. We handle only pseudos
1124 here for simplicity. */
1125 if (code == SUBREG && REG_P (SUBREG_REG (x))
1126 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1128 unsigned int i = REGNO (SUBREG_REG (x));
1130 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1132 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1133 the last store to this register really stored into this
1134 subreg, then remove the memory of this subreg.
1135 Otherwise, remove any memory of the entire register and
1136 all its subregs from the table. */
1137 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1138 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1139 remove_invalid_refs (i);
1141 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1144 REG_IN_TABLE (i) = REG_TICK (i);
1145 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1149 /* If X is a comparison or a COMPARE and either operand is a register
1150 that does not have a quantity, give it one. This is so that a later
1151 call to record_jump_equiv won't cause X to be assigned a different
1152 hash code and not found in the table after that call.
1154 It is not necessary to do this here, since rehash_using_reg can
1155 fix up the table later, but doing this here eliminates the need to
1156 call that expensive function in the most common case where the only
1157 use of the register is in the comparison. */
1159 if (code == COMPARE || COMPARISON_P (x))
1161 if (REG_P (XEXP (x, 0))
1162 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1163 if (insert_regs (XEXP (x, 0), NULL, 0))
1165 rehash_using_reg (XEXP (x, 0));
1169 if (REG_P (XEXP (x, 1))
1170 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1171 if (insert_regs (XEXP (x, 1), NULL, 0))
1173 rehash_using_reg (XEXP (x, 1));
1178 fmt = GET_RTX_FORMAT (code);
1179 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1181 changed |= mention_regs (XEXP (x, i));
1182 else if (fmt[i] == 'E')
1183 for (j = 0; j < XVECLEN (x, i); j++)
1184 changed |= mention_regs (XVECEXP (x, i, j));
1189 /* Update the register quantities for inserting X into the hash table
1190 with a value equivalent to CLASSP.
1191 (If the class does not contain a REG, it is irrelevant.)
1192 If MODIFIED is nonzero, X is a destination; it is being modified.
1193 Note that delete_reg_equiv should be called on a register
1194 before insert_regs is done on that register with MODIFIED != 0.
1196 Nonzero value means that elements of reg_qty have changed
1197 so X's hash code may be different. */
1200 insert_regs (rtx x, struct table_elt *classp, int modified)
1204 unsigned int regno = REGNO (x);
1207 /* If REGNO is in the equivalence table already but is of the
1208 wrong mode for that equivalence, don't do anything here. */
1210 qty_valid = REGNO_QTY_VALID_P (regno);
1213 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1215 if (ent->mode != GET_MODE (x))
1219 if (modified || ! qty_valid)
1222 for (classp = classp->first_same_value;
1224 classp = classp->next_same_value)
1225 if (REG_P (classp->exp)
1226 && GET_MODE (classp->exp) == GET_MODE (x))
1228 make_regs_eqv (regno, REGNO (classp->exp));
1232 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1233 than REG_IN_TABLE to find out if there was only a single preceding
1234 invalidation - for the SUBREG - or another one, which would be
1235 for the full register. However, if we find here that REG_TICK
1236 indicates that the register is invalid, it means that it has
1237 been invalidated in a separate operation. The SUBREG might be used
1238 now (then this is a recursive call), or we might use the full REG
1239 now and a SUBREG of it later. So bump up REG_TICK so that
1240 mention_regs will do the right thing. */
1242 && REG_IN_TABLE (regno) >= 0
1243 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1245 make_new_qty (regno, GET_MODE (x));
1252 /* If X is a SUBREG, we will likely be inserting the inner register in the
1253 table. If that register doesn't have an assigned quantity number at
1254 this point but does later, the insertion that we will be doing now will
1255 not be accessible because its hash code will have changed. So assign
1256 a quantity number now. */
1258 else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1259 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1261 insert_regs (SUBREG_REG (x), NULL, 0);
1266 return mention_regs (x);
1269 /* Look in or update the hash table. */
1271 /* Remove table element ELT from use in the table.
1272 HASH is its hash code, made using the HASH macro.
1273 It's an argument because often that is known in advance
1274 and we save much time not recomputing it. */
1277 remove_from_table (struct table_elt *elt, unsigned int hash)
1282 /* Mark this element as removed. See cse_insn. */
1283 elt->first_same_value = 0;
1285 /* Remove the table element from its equivalence class. */
1288 struct table_elt *prev = elt->prev_same_value;
1289 struct table_elt *next = elt->next_same_value;
1292 next->prev_same_value = prev;
1295 prev->next_same_value = next;
1298 struct table_elt *newfirst = next;
1301 next->first_same_value = newfirst;
1302 next = next->next_same_value;
1307 /* Remove the table element from its hash bucket. */
1310 struct table_elt *prev = elt->prev_same_hash;
1311 struct table_elt *next = elt->next_same_hash;
1314 next->prev_same_hash = prev;
1317 prev->next_same_hash = next;
1318 else if (table[hash] == elt)
1322 /* This entry is not in the proper hash bucket. This can happen
1323 when two classes were merged by `merge_equiv_classes'. Search
1324 for the hash bucket that it heads. This happens only very
1325 rarely, so the cost is acceptable. */
1326 for (hash = 0; hash < HASH_SIZE; hash++)
1327 if (table[hash] == elt)
1332 /* Remove the table element from its related-value circular chain. */
1334 if (elt->related_value != 0 && elt->related_value != elt)
1336 struct table_elt *p = elt->related_value;
1338 while (p->related_value != elt)
1339 p = p->related_value;
1340 p->related_value = elt->related_value;
1341 if (p->related_value == p)
1342 p->related_value = 0;
1345 /* Now add it to the free element chain. */
1346 elt->next_same_hash = free_element_chain;
1347 free_element_chain = elt;
1350 /* Look up X in the hash table and return its table element,
1351 or 0 if X is not in the table.
1353 MODE is the machine-mode of X, or if X is an integer constant
1354 with VOIDmode then MODE is the mode with which X will be used.
1356 Here we are satisfied to find an expression whose tree structure
1359 static struct table_elt *
1360 lookup (rtx x, unsigned int hash, enum machine_mode mode)
1362 struct table_elt *p;
1364 for (p = table[hash]; p; p = p->next_same_hash)
1365 if (mode == p->mode && ((x == p->exp && REG_P (x))
1366 || exp_equiv_p (x, p->exp, !REG_P (x), false)))
1372 /* Like `lookup' but don't care whether the table element uses invalid regs.
1373 Also ignore discrepancies in the machine mode of a register. */
1375 static struct table_elt *
1376 lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1378 struct table_elt *p;
1382 unsigned int regno = REGNO (x);
1384 /* Don't check the machine mode when comparing registers;
1385 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1386 for (p = table[hash]; p; p = p->next_same_hash)
1388 && REGNO (p->exp) == regno)
1393 for (p = table[hash]; p; p = p->next_same_hash)
1395 && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
1402 /* Look for an expression equivalent to X and with code CODE.
1403 If one is found, return that expression. */
1406 lookup_as_function (rtx x, enum rtx_code code)
1409 = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
1411 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1412 long as we are narrowing. So if we looked in vain for a mode narrower
1413 than word_mode before, look for word_mode now. */
1414 if (p == 0 && code == CONST_INT
1415 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1418 PUT_MODE (x, word_mode);
1419 p = lookup (x, SAFE_HASH (x, VOIDmode), word_mode);
1425 for (p = p->first_same_value; p; p = p->next_same_value)
1426 if (GET_CODE (p->exp) == code
1427 /* Make sure this is a valid entry in the table. */
1428 && exp_equiv_p (p->exp, p->exp, 1, false))
1434 /* Insert X in the hash table, assuming HASH is its hash code
1435 and CLASSP is an element of the class it should go in
1436 (or 0 if a new class should be made).
1437 It is inserted at the proper position to keep the class in
1438 the order cheapest first.
1440 MODE is the machine-mode of X, or if X is an integer constant
1441 with VOIDmode then MODE is the mode with which X will be used.
1443 For elements of equal cheapness, the most recent one
1444 goes in front, except that the first element in the list
1445 remains first unless a cheaper element is added. The order of
1446 pseudo-registers does not matter, as canon_reg will be called to
1447 find the cheapest when a register is retrieved from the table.
1449 The in_memory field in the hash table element is set to 0.
1450 The caller must set it nonzero if appropriate.
1452 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1453 and if insert_regs returns a nonzero value
1454 you must then recompute its hash code before calling here.
1456 If necessary, update table showing constant values of quantities. */
1458 #define CHEAPER(X, Y) \
1459 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1461 static struct table_elt *
1462 insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mode)
1464 struct table_elt *elt;
1466 /* If X is a register and we haven't made a quantity for it,
1467 something is wrong. */
1468 gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
1470 /* If X is a hard register, show it is being put in the table. */
1471 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1473 unsigned int regno = REGNO (x);
1474 unsigned int endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
1477 for (i = regno; i < endregno; i++)
1478 SET_HARD_REG_BIT (hard_regs_in_table, i);
1481 /* Put an element for X into the right hash bucket. */
1483 elt = free_element_chain;
1485 free_element_chain = elt->next_same_hash;
1489 elt = xmalloc (sizeof (struct table_elt));
1493 elt->canon_exp = NULL_RTX;
1494 elt->cost = COST (x);
1495 elt->regcost = approx_reg_cost (x);
1496 elt->next_same_value = 0;
1497 elt->prev_same_value = 0;
1498 elt->next_same_hash = table[hash];
1499 elt->prev_same_hash = 0;
1500 elt->related_value = 0;
1503 elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1506 table[hash]->prev_same_hash = elt;
1509 /* Put it into the proper value-class. */
1512 classp = classp->first_same_value;
1513 if (CHEAPER (elt, classp))
1514 /* Insert at the head of the class. */
1516 struct table_elt *p;
1517 elt->next_same_value = classp;
1518 classp->prev_same_value = elt;
1519 elt->first_same_value = elt;
1521 for (p = classp; p; p = p->next_same_value)
1522 p->first_same_value = elt;
1526 /* Insert not at head of the class. */
1527 /* Put it after the last element cheaper than X. */
1528 struct table_elt *p, *next;
1530 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1533 /* Put it after P and before NEXT. */
1534 elt->next_same_value = next;
1536 next->prev_same_value = elt;
1538 elt->prev_same_value = p;
1539 p->next_same_value = elt;
1540 elt->first_same_value = classp;
1544 elt->first_same_value = elt;
1546 /* If this is a constant being set equivalent to a register or a register
1547 being set equivalent to a constant, note the constant equivalence.
1549 If this is a constant, it cannot be equivalent to a different constant,
1550 and a constant is the only thing that can be cheaper than a register. So
1551 we know the register is the head of the class (before the constant was
1554 If this is a register that is not already known equivalent to a
1555 constant, we must check the entire class.
1557 If this is a register that is already known equivalent to an insn,
1558 update the qtys `const_insn' to show that `this_insn' is the latest
1559 insn making that quantity equivalent to the constant. */
1561 if (elt->is_const && classp && REG_P (classp->exp)
1564 int exp_q = REG_QTY (REGNO (classp->exp));
1565 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1567 exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1568 exp_ent->const_insn = this_insn;
1573 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1576 struct table_elt *p;
1578 for (p = classp; p != 0; p = p->next_same_value)
1580 if (p->is_const && !REG_P (p->exp))
1582 int x_q = REG_QTY (REGNO (x));
1583 struct qty_table_elem *x_ent = &qty_table[x_q];
1586 = gen_lowpart (GET_MODE (x), p->exp);
1587 x_ent->const_insn = this_insn;
1594 && qty_table[REG_QTY (REGNO (x))].const_rtx
1595 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1596 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1598 /* If this is a constant with symbolic value,
1599 and it has a term with an explicit integer value,
1600 link it up with related expressions. */
1601 if (GET_CODE (x) == CONST)
1603 rtx subexp = get_related_value (x);
1605 struct table_elt *subelt, *subelt_prev;
1609 /* Get the integer-free subexpression in the hash table. */
1610 subhash = SAFE_HASH (subexp, mode);
1611 subelt = lookup (subexp, subhash, mode);
1613 subelt = insert (subexp, NULL, subhash, mode);
1614 /* Initialize SUBELT's circular chain if it has none. */
1615 if (subelt->related_value == 0)
1616 subelt->related_value = subelt;
1617 /* Find the element in the circular chain that precedes SUBELT. */
1618 subelt_prev = subelt;
1619 while (subelt_prev->related_value != subelt)
1620 subelt_prev = subelt_prev->related_value;
1621 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1622 This way the element that follows SUBELT is the oldest one. */
1623 elt->related_value = subelt_prev->related_value;
1624 subelt_prev->related_value = elt;
1631 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1632 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1633 the two classes equivalent.
1635 CLASS1 will be the surviving class; CLASS2 should not be used after this
1638 Any invalid entries in CLASS2 will not be copied. */
1641 merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1643 struct table_elt *elt, *next, *new;
1645 /* Ensure we start with the head of the classes. */
1646 class1 = class1->first_same_value;
1647 class2 = class2->first_same_value;
1649 /* If they were already equal, forget it. */
1650 if (class1 == class2)
1653 for (elt = class2; elt; elt = next)
1657 enum machine_mode mode = elt->mode;
1659 next = elt->next_same_value;
1661 /* Remove old entry, make a new one in CLASS1's class.
1662 Don't do this for invalid entries as we cannot find their
1663 hash code (it also isn't necessary). */
1664 if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
1666 bool need_rehash = false;
1668 hash_arg_in_memory = 0;
1669 hash = HASH (exp, mode);
1673 need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1674 delete_reg_equiv (REGNO (exp));
1677 remove_from_table (elt, hash);
1679 if (insert_regs (exp, class1, 0) || need_rehash)
1681 rehash_using_reg (exp);
1682 hash = HASH (exp, mode);
1684 new = insert (exp, class1, hash, mode);
1685 new->in_memory = hash_arg_in_memory;
1690 /* Flush the entire hash table. */
1693 flush_hash_table (void)
1696 struct table_elt *p;
1698 for (i = 0; i < HASH_SIZE; i++)
1699 for (p = table[i]; p; p = table[i])
1701 /* Note that invalidate can remove elements
1702 after P in the current hash chain. */
1704 invalidate (p->exp, p->mode);
1706 remove_from_table (p, i);
1710 /* Function called for each rtx to check whether true dependence exist. */
1711 struct check_dependence_data
1713 enum machine_mode mode;
1719 check_dependence (rtx *x, void *data)
1721 struct check_dependence_data *d = (struct check_dependence_data *) data;
1722 if (*x && MEM_P (*x))
1723 return canon_true_dependence (d->exp, d->mode, d->addr, *x,
1729 /* Remove from the hash table, or mark as invalid, all expressions whose
1730 values could be altered by storing in X. X is a register, a subreg, or
1731 a memory reference with nonvarying address (because, when a memory
1732 reference with a varying address is stored in, all memory references are
1733 removed by invalidate_memory so specific invalidation is superfluous).
1734 FULL_MODE, if not VOIDmode, indicates that this much should be
1735 invalidated instead of just the amount indicated by the mode of X. This
1736 is only used for bitfield stores into memory.
1738 A nonvarying address may be just a register or just a symbol reference,
1739 or it may be either of those plus a numeric offset. */
1742 invalidate (rtx x, enum machine_mode full_mode)
1745 struct table_elt *p;
1748 switch (GET_CODE (x))
1752 /* If X is a register, dependencies on its contents are recorded
1753 through the qty number mechanism. Just change the qty number of
1754 the register, mark it as invalid for expressions that refer to it,
1755 and remove it itself. */
1756 unsigned int regno = REGNO (x);
1757 unsigned int hash = HASH (x, GET_MODE (x));
1759 /* Remove REGNO from any quantity list it might be on and indicate
1760 that its value might have changed. If it is a pseudo, remove its
1761 entry from the hash table.
1763 For a hard register, we do the first two actions above for any
1764 additional hard registers corresponding to X. Then, if any of these
1765 registers are in the table, we must remove any REG entries that
1766 overlap these registers. */
1768 delete_reg_equiv (regno);
1770 SUBREG_TICKED (regno) = -1;
1772 if (regno >= FIRST_PSEUDO_REGISTER)
1774 /* Because a register can be referenced in more than one mode,
1775 we might have to remove more than one table entry. */
1776 struct table_elt *elt;
1778 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1779 remove_from_table (elt, hash);
1783 HOST_WIDE_INT in_table
1784 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1785 unsigned int endregno
1786 = regno + hard_regno_nregs[regno][GET_MODE (x)];
1787 unsigned int tregno, tendregno, rn;
1788 struct table_elt *p, *next;
1790 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1792 for (rn = regno + 1; rn < endregno; rn++)
1794 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1795 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1796 delete_reg_equiv (rn);
1798 SUBREG_TICKED (rn) = -1;
1802 for (hash = 0; hash < HASH_SIZE; hash++)
1803 for (p = table[hash]; p; p = next)
1805 next = p->next_same_hash;
1808 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1811 tregno = REGNO (p->exp);
1813 = tregno + hard_regno_nregs[tregno][GET_MODE (p->exp)];
1814 if (tendregno > regno && tregno < endregno)
1815 remove_from_table (p, hash);
1822 invalidate (SUBREG_REG (x), VOIDmode);
1826 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1827 invalidate (XVECEXP (x, 0, i), VOIDmode);
1831 /* This is part of a disjoint return value; extract the location in
1832 question ignoring the offset. */
1833 invalidate (XEXP (x, 0), VOIDmode);
1837 addr = canon_rtx (get_addr (XEXP (x, 0)));
1838 /* Calculate the canonical version of X here so that
1839 true_dependence doesn't generate new RTL for X on each call. */
1842 /* Remove all hash table elements that refer to overlapping pieces of
1844 if (full_mode == VOIDmode)
1845 full_mode = GET_MODE (x);
1847 for (i = 0; i < HASH_SIZE; i++)
1849 struct table_elt *next;
1851 for (p = table[i]; p; p = next)
1853 next = p->next_same_hash;
1856 struct check_dependence_data d;
1858 /* Just canonicalize the expression once;
1859 otherwise each time we call invalidate
1860 true_dependence will canonicalize the
1861 expression again. */
1863 p->canon_exp = canon_rtx (p->exp);
1867 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1868 remove_from_table (p, i);
1879 /* Remove all expressions that refer to register REGNO,
1880 since they are already invalid, and we are about to
1881 mark that register valid again and don't want the old
1882 expressions to reappear as valid. */
1885 remove_invalid_refs (unsigned int regno)
1888 struct table_elt *p, *next;
1890 for (i = 0; i < HASH_SIZE; i++)
1891 for (p = table[i]; p; p = next)
1893 next = p->next_same_hash;
1895 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1896 remove_from_table (p, i);
1900 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1903 remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
1904 enum machine_mode mode)
1907 struct table_elt *p, *next;
1908 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
1910 for (i = 0; i < HASH_SIZE; i++)
1911 for (p = table[i]; p; p = next)
1914 next = p->next_same_hash;
1917 && (GET_CODE (exp) != SUBREG
1918 || !REG_P (SUBREG_REG (exp))
1919 || REGNO (SUBREG_REG (exp)) != regno
1920 || (((SUBREG_BYTE (exp)
1921 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
1922 && SUBREG_BYTE (exp) <= end))
1923 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1924 remove_from_table (p, i);
1928 /* Recompute the hash codes of any valid entries in the hash table that
1929 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1931 This is called when we make a jump equivalence. */
1934 rehash_using_reg (rtx x)
1937 struct table_elt *p, *next;
1940 if (GET_CODE (x) == SUBREG)
1943 /* If X is not a register or if the register is known not to be in any
1944 valid entries in the table, we have no work to do. */
1947 || REG_IN_TABLE (REGNO (x)) < 0
1948 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
1951 /* Scan all hash chains looking for valid entries that mention X.
1952 If we find one and it is in the wrong hash chain, move it. */
1954 for (i = 0; i < HASH_SIZE; i++)
1955 for (p = table[i]; p; p = next)
1957 next = p->next_same_hash;
1958 if (reg_mentioned_p (x, p->exp)
1959 && exp_equiv_p (p->exp, p->exp, 1, false)
1960 && i != (hash = SAFE_HASH (p->exp, p->mode)))
1962 if (p->next_same_hash)
1963 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1965 if (p->prev_same_hash)
1966 p->prev_same_hash->next_same_hash = p->next_same_hash;
1968 table[i] = p->next_same_hash;
1970 p->next_same_hash = table[hash];
1971 p->prev_same_hash = 0;
1973 table[hash]->prev_same_hash = p;
1979 /* Remove from the hash table any expression that is a call-clobbered
1980 register. Also update their TICK values. */
1983 invalidate_for_call (void)
1985 unsigned int regno, endregno;
1988 struct table_elt *p, *next;
1991 /* Go through all the hard registers. For each that is clobbered in
1992 a CALL_INSN, remove the register from quantity chains and update
1993 reg_tick if defined. Also see if any of these registers is currently
1996 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1997 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1999 delete_reg_equiv (regno);
2000 if (REG_TICK (regno) >= 0)
2003 SUBREG_TICKED (regno) = -1;
2006 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2009 /* In the case where we have no call-clobbered hard registers in the
2010 table, we are done. Otherwise, scan the table and remove any
2011 entry that overlaps a call-clobbered register. */
2014 for (hash = 0; hash < HASH_SIZE; hash++)
2015 for (p = table[hash]; p; p = next)
2017 next = p->next_same_hash;
2020 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2023 regno = REGNO (p->exp);
2024 endregno = regno + hard_regno_nregs[regno][GET_MODE (p->exp)];
2026 for (i = regno; i < endregno; i++)
2027 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2029 remove_from_table (p, hash);
2035 /* Given an expression X of type CONST,
2036 and ELT which is its table entry (or 0 if it
2037 is not in the hash table),
2038 return an alternate expression for X as a register plus integer.
2039 If none can be found, return 0. */
2042 use_related_value (rtx x, struct table_elt *elt)
2044 struct table_elt *relt = 0;
2045 struct table_elt *p, *q;
2046 HOST_WIDE_INT offset;
2048 /* First, is there anything related known?
2049 If we have a table element, we can tell from that.
2050 Otherwise, must look it up. */
2052 if (elt != 0 && elt->related_value != 0)
2054 else if (elt == 0 && GET_CODE (x) == CONST)
2056 rtx subexp = get_related_value (x);
2058 relt = lookup (subexp,
2059 SAFE_HASH (subexp, GET_MODE (subexp)),
2066 /* Search all related table entries for one that has an
2067 equivalent register. */
2072 /* This loop is strange in that it is executed in two different cases.
2073 The first is when X is already in the table. Then it is searching
2074 the RELATED_VALUE list of X's class (RELT). The second case is when
2075 X is not in the table. Then RELT points to a class for the related
2078 Ensure that, whatever case we are in, that we ignore classes that have
2079 the same value as X. */
2081 if (rtx_equal_p (x, p->exp))
2084 for (q = p->first_same_value; q; q = q->next_same_value)
2091 p = p->related_value;
2093 /* We went all the way around, so there is nothing to be found.
2094 Alternatively, perhaps RELT was in the table for some other reason
2095 and it has no related values recorded. */
2096 if (p == relt || p == 0)
2103 offset = (get_integer_term (x) - get_integer_term (p->exp));
2104 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2105 return plus_constant (q->exp, offset);
2108 /* Hash a string. Just add its bytes up. */
2109 static inline unsigned
2110 hash_rtx_string (const char *ps)
2113 const unsigned char *p = (const unsigned char *) ps;
2122 /* Hash an rtx. We are careful to make sure the value is never negative.
2123 Equivalent registers hash identically.
2124 MODE is used in hashing for CONST_INTs only;
2125 otherwise the mode of X is used.
2127 Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2129 If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2130 a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
2132 Note that cse_insn knows that the hash code of a MEM expression
2133 is just (int) MEM plus the hash code of the address. */
2136 hash_rtx (rtx x, enum machine_mode mode, int *do_not_record_p,
2137 int *hash_arg_in_memory_p, bool have_reg_qty)
2144 /* Used to turn recursion into iteration. We can't rely on GCC's
2145 tail-recursion elimination since we need to keep accumulating values
2151 code = GET_CODE (x);
2156 unsigned int regno = REGNO (x);
2158 if (!reload_completed)
2160 /* On some machines, we can't record any non-fixed hard register,
2161 because extending its life will cause reload problems. We
2162 consider ap, fp, sp, gp to be fixed for this purpose.
2164 We also consider CCmode registers to be fixed for this purpose;
2165 failure to do so leads to failure to simplify 0<100 type of
2168 On all machines, we can't record any global registers.
2169 Nor should we record any register that is in a small
2170 class, as defined by CLASS_LIKELY_SPILLED_P. */
2173 if (regno >= FIRST_PSEUDO_REGISTER)
2175 else if (x == frame_pointer_rtx
2176 || x == hard_frame_pointer_rtx
2177 || x == arg_pointer_rtx
2178 || x == stack_pointer_rtx
2179 || x == pic_offset_table_rtx)
2181 else if (global_regs[regno])
2183 else if (fixed_regs[regno])
2185 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2187 else if (SMALL_REGISTER_CLASSES)
2189 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2196 *do_not_record_p = 1;
2201 hash += ((unsigned int) REG << 7);
2202 hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2206 /* We handle SUBREG of a REG specially because the underlying
2207 reg changes its hash value with every value change; we don't
2208 want to have to forget unrelated subregs when one subreg changes. */
2211 if (REG_P (SUBREG_REG (x)))
2213 hash += (((unsigned int) SUBREG << 7)
2214 + REGNO (SUBREG_REG (x))
2215 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2222 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2223 + (unsigned int) INTVAL (x));
2227 /* This is like the general case, except that it only counts
2228 the integers representing the constant. */
2229 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2230 if (GET_MODE (x) != VOIDmode)
2231 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2233 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2234 + (unsigned int) CONST_DOUBLE_HIGH (x));
2242 units = CONST_VECTOR_NUNITS (x);
2244 for (i = 0; i < units; ++i)
2246 elt = CONST_VECTOR_ELT (x, i);
2247 hash += hash_rtx (elt, GET_MODE (elt), do_not_record_p,
2248 hash_arg_in_memory_p, have_reg_qty);
2254 /* Assume there is only one rtx object for any given label. */
2256 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2257 differences and differences between each stage's debugging dumps. */
2258 hash += (((unsigned int) LABEL_REF << 7)
2259 + CODE_LABEL_NUMBER (XEXP (x, 0)));
2264 /* Don't hash on the symbol's address to avoid bootstrap differences.
2265 Different hash values may cause expressions to be recorded in
2266 different orders and thus different registers to be used in the
2267 final assembler. This also avoids differences in the dump files
2268 between various stages. */
2270 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2273 h += (h << 7) + *p++; /* ??? revisit */
2275 hash += ((unsigned int) SYMBOL_REF << 7) + h;
2280 /* We don't record if marked volatile or if BLKmode since we don't
2281 know the size of the move. */
2282 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2284 *do_not_record_p = 1;
2287 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2288 *hash_arg_in_memory_p = 1;
2290 /* Now that we have already found this special case,
2291 might as well speed it up as much as possible. */
2292 hash += (unsigned) MEM;
2297 /* A USE that mentions non-volatile memory needs special
2298 handling since the MEM may be BLKmode which normally
2299 prevents an entry from being made. Pure calls are
2300 marked by a USE which mentions BLKmode memory.
2301 See calls.c:emit_call_1. */
2302 if (MEM_P (XEXP (x, 0))
2303 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2305 hash += (unsigned) USE;
2308 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2309 *hash_arg_in_memory_p = 1;
2311 /* Now that we have already found this special case,
2312 might as well speed it up as much as possible. */
2313 hash += (unsigned) MEM;
2328 case UNSPEC_VOLATILE:
2329 *do_not_record_p = 1;
2333 if (MEM_VOLATILE_P (x))
2335 *do_not_record_p = 1;
2340 /* We don't want to take the filename and line into account. */
2341 hash += (unsigned) code + (unsigned) GET_MODE (x)
2342 + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2343 + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2344 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2346 if (ASM_OPERANDS_INPUT_LENGTH (x))
2348 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2350 hash += (hash_rtx (ASM_OPERANDS_INPUT (x, i),
2351 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2352 do_not_record_p, hash_arg_in_memory_p,
2355 (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
2358 hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2359 x = ASM_OPERANDS_INPUT (x, 0);
2360 mode = GET_MODE (x);
2372 i = GET_RTX_LENGTH (code) - 1;
2373 hash += (unsigned) code + (unsigned) GET_MODE (x);
2374 fmt = GET_RTX_FORMAT (code);
2380 /* If we are about to do the last recursive call
2381 needed at this level, change it into iteration.
2382 This function is called enough to be worth it. */
2389 hash += hash_rtx (XEXP (x, i), 0, do_not_record_p,
2390 hash_arg_in_memory_p, have_reg_qty);
2394 for (j = 0; j < XVECLEN (x, i); j++)
2395 hash += hash_rtx (XVECEXP (x, i, j), 0, do_not_record_p,
2396 hash_arg_in_memory_p, have_reg_qty);
2400 hash += hash_rtx_string (XSTR (x, i));
2404 hash += (unsigned int) XINT (x, i);
2419 /* Hash an rtx X for cse via hash_rtx.
2420 Stores 1 in do_not_record if any subexpression is volatile.
2421 Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2422 does not have the RTX_UNCHANGING_P bit set. */
2424 static inline unsigned
2425 canon_hash (rtx x, enum machine_mode mode)
2427 return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2430 /* Like canon_hash but with no side effects, i.e. do_not_record
2431 and hash_arg_in_memory are not changed. */
2433 static inline unsigned
2434 safe_hash (rtx x, enum machine_mode mode)
2436 int dummy_do_not_record;
2437 return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
2440 /* Return 1 iff X and Y would canonicalize into the same thing,
2441 without actually constructing the canonicalization of either one.
2442 If VALIDATE is nonzero,
2443 we assume X is an expression being processed from the rtl
2444 and Y was found in the hash table. We check register refs
2445 in Y for being marked as valid.
2447 If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
2450 exp_equiv_p (rtx x, rtx y, int validate, bool for_gcse)
2456 /* Note: it is incorrect to assume an expression is equivalent to itself
2457 if VALIDATE is nonzero. */
2458 if (x == y && !validate)
2461 if (x == 0 || y == 0)
2464 code = GET_CODE (x);
2465 if (code != GET_CODE (y))
2468 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2469 if (GET_MODE (x) != GET_MODE (y))
2480 return XEXP (x, 0) == XEXP (y, 0);
2483 return XSTR (x, 0) == XSTR (y, 0);
2487 return REGNO (x) == REGNO (y);
2490 unsigned int regno = REGNO (y);
2492 unsigned int endregno
2493 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2494 : hard_regno_nregs[regno][GET_MODE (y)]);
2496 /* If the quantities are not the same, the expressions are not
2497 equivalent. If there are and we are not to validate, they
2498 are equivalent. Otherwise, ensure all regs are up-to-date. */
2500 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2506 for (i = regno; i < endregno; i++)
2507 if (REG_IN_TABLE (i) != REG_TICK (i))
2516 /* Can't merge two expressions in different alias sets, since we
2517 can decide that the expression is transparent in a block when
2518 it isn't, due to it being set with the different alias set. */
2519 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
2522 /* A volatile mem should not be considered equivalent to any
2524 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2529 /* For commutative operations, check both orders. */
2537 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2539 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2540 validate, for_gcse))
2541 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2543 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2544 validate, for_gcse)));
2547 /* We don't use the generic code below because we want to
2548 disregard filename and line numbers. */
2550 /* A volatile asm isn't equivalent to any other. */
2551 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2554 if (GET_MODE (x) != GET_MODE (y)
2555 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2556 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2557 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2558 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2559 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2562 if (ASM_OPERANDS_INPUT_LENGTH (x))
2564 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2565 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2566 ASM_OPERANDS_INPUT (y, i),
2568 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2569 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2579 /* Compare the elements. If any pair of corresponding elements
2580 fail to match, return 0 for the whole thing. */
2582 fmt = GET_RTX_FORMAT (code);
2583 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2588 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2589 validate, for_gcse))
2594 if (XVECLEN (x, i) != XVECLEN (y, i))
2596 for (j = 0; j < XVECLEN (x, i); j++)
2597 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2598 validate, for_gcse))
2603 if (strcmp (XSTR (x, i), XSTR (y, i)))
2608 if (XINT (x, i) != XINT (y, i))
2613 if (XWINT (x, i) != XWINT (y, i))
2629 /* Return 1 if X has a value that can vary even between two
2630 executions of the program. 0 means X can be compared reliably
2631 against certain constants or near-constants. */
2634 cse_rtx_varies_p (rtx x, int from_alias)
2636 /* We need not check for X and the equivalence class being of the same
2637 mode because if X is equivalent to a constant in some mode, it
2638 doesn't vary in any mode. */
2641 && REGNO_QTY_VALID_P (REGNO (x)))
2643 int x_q = REG_QTY (REGNO (x));
2644 struct qty_table_elem *x_ent = &qty_table[x_q];
2646 if (GET_MODE (x) == x_ent->mode
2647 && x_ent->const_rtx != NULL_RTX)
2651 if (GET_CODE (x) == PLUS
2652 && GET_CODE (XEXP (x, 1)) == CONST_INT
2653 && REG_P (XEXP (x, 0))
2654 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2656 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2657 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2659 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2660 && x0_ent->const_rtx != NULL_RTX)
2664 /* This can happen as the result of virtual register instantiation, if
2665 the initial constant is too large to be a valid address. This gives
2666 us a three instruction sequence, load large offset into a register,
2667 load fp minus a constant into a register, then a MEM which is the
2668 sum of the two `constant' registers. */
2669 if (GET_CODE (x) == PLUS
2670 && REG_P (XEXP (x, 0))
2671 && REG_P (XEXP (x, 1))
2672 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2673 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2675 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2676 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2677 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2678 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2680 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2681 && x0_ent->const_rtx != NULL_RTX
2682 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2683 && x1_ent->const_rtx != NULL_RTX)
2687 return rtx_varies_p (x, from_alias);
2690 /* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2691 the result if necessary. INSN is as for canon_reg. */
2694 validate_canon_reg (rtx *xloc, rtx insn)
2696 rtx new = canon_reg (*xloc, insn);
2699 /* If replacing pseudo with hard reg or vice versa, ensure the
2700 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2701 if (insn != 0 && new != 0
2702 && REG_P (new) && REG_P (*xloc)
2703 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2704 != (REGNO (*xloc) < FIRST_PSEUDO_REGISTER))
2705 || GET_MODE (new) != GET_MODE (*xloc)
2706 || (insn_code = recog_memoized (insn)) < 0
2707 || insn_data[insn_code].n_dups > 0))
2708 validate_change (insn, xloc, new, 1);
2713 /* Canonicalize an expression:
2714 replace each register reference inside it
2715 with the "oldest" equivalent register.
2717 If INSN is nonzero and we are replacing a pseudo with a hard register
2718 or vice versa, validate_change is used to ensure that INSN remains valid
2719 after we make our substitution. The calls are made with IN_GROUP nonzero
2720 so apply_change_group must be called upon the outermost return from this
2721 function (unless INSN is zero). The result of apply_change_group can
2722 generally be discarded since the changes we are making are optional. */
2725 canon_reg (rtx x, rtx insn)
2734 code = GET_CODE (x);
2753 struct qty_table_elem *ent;
2755 /* Never replace a hard reg, because hard regs can appear
2756 in more than one machine mode, and we must preserve the mode
2757 of each occurrence. Also, some hard regs appear in
2758 MEMs that are shared and mustn't be altered. Don't try to
2759 replace any reg that maps to a reg of class NO_REGS. */
2760 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2761 || ! REGNO_QTY_VALID_P (REGNO (x)))
2764 q = REG_QTY (REGNO (x));
2765 ent = &qty_table[q];
2766 first = ent->first_reg;
2767 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2768 : REGNO_REG_CLASS (first) == NO_REGS ? x
2769 : gen_rtx_REG (ent->mode, first));
2776 fmt = GET_RTX_FORMAT (code);
2777 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2782 validate_canon_reg (&XEXP (x, i), insn);
2783 else if (fmt[i] == 'E')
2784 for (j = 0; j < XVECLEN (x, i); j++)
2785 validate_canon_reg (&XVECEXP (x, i, j), insn);
2791 /* LOC is a location within INSN that is an operand address (the contents of
2792 a MEM). Find the best equivalent address to use that is valid for this
2795 On most CISC machines, complicated address modes are costly, and rtx_cost
2796 is a good approximation for that cost. However, most RISC machines have
2797 only a few (usually only one) memory reference formats. If an address is
2798 valid at all, it is often just as cheap as any other address. Hence, for
2799 RISC machines, we use `address_cost' to compare the costs of various
2800 addresses. For two addresses of equal cost, choose the one with the
2801 highest `rtx_cost' value as that has the potential of eliminating the
2802 most insns. For equal costs, we choose the first in the equivalence
2803 class. Note that we ignore the fact that pseudo registers are cheaper than
2804 hard registers here because we would also prefer the pseudo registers. */
2807 find_best_addr (rtx insn, rtx *loc, enum machine_mode mode)
2809 struct table_elt *elt;
2811 struct table_elt *p;
2812 int found_better = 1;
2813 int save_do_not_record = do_not_record;
2814 int save_hash_arg_in_memory = hash_arg_in_memory;
2819 /* Do not try to replace constant addresses or addresses of local and
2820 argument slots. These MEM expressions are made only once and inserted
2821 in many instructions, as well as being used to control symbol table
2822 output. It is not safe to clobber them.
2824 There are some uncommon cases where the address is already in a register
2825 for some reason, but we cannot take advantage of that because we have
2826 no easy way to unshare the MEM. In addition, looking up all stack
2827 addresses is costly. */
2828 if ((GET_CODE (addr) == PLUS
2829 && REG_P (XEXP (addr, 0))
2830 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2831 && (regno = REGNO (XEXP (addr, 0)),
2832 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2833 || regno == ARG_POINTER_REGNUM))
2835 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2836 || regno == HARD_FRAME_POINTER_REGNUM
2837 || regno == ARG_POINTER_REGNUM))
2838 || CONSTANT_ADDRESS_P (addr))
2841 /* If this address is not simply a register, try to fold it. This will
2842 sometimes simplify the expression. Many simplifications
2843 will not be valid, but some, usually applying the associative rule, will
2844 be valid and produce better code. */
2847 rtx folded = fold_rtx (addr, NULL_RTX);
2850 int addr_folded_cost = address_cost (folded, mode);
2851 int addr_cost = address_cost (addr, mode);
2853 if ((addr_folded_cost < addr_cost
2854 || (addr_folded_cost == addr_cost
2855 /* ??? The rtx_cost comparison is left over from an older
2856 version of this code. It is probably no longer helpful.*/
2857 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2858 || approx_reg_cost (folded) < approx_reg_cost (addr))))
2859 && validate_change (insn, loc, folded, 0))
2864 /* If this address is not in the hash table, we can't look for equivalences
2865 of the whole address. Also, ignore if volatile. */
2868 hash = HASH (addr, Pmode);
2869 addr_volatile = do_not_record;
2870 do_not_record = save_do_not_record;
2871 hash_arg_in_memory = save_hash_arg_in_memory;
2876 elt = lookup (addr, hash, Pmode);
2880 /* We need to find the best (under the criteria documented above) entry
2881 in the class that is valid. We use the `flag' field to indicate
2882 choices that were invalid and iterate until we can't find a better
2883 one that hasn't already been tried. */
2885 for (p = elt->first_same_value; p; p = p->next_same_value)
2888 while (found_better)
2890 int best_addr_cost = address_cost (*loc, mode);
2891 int best_rtx_cost = (elt->cost + 1) >> 1;
2893 struct table_elt *best_elt = elt;
2896 for (p = elt->first_same_value; p; p = p->next_same_value)
2900 || exp_equiv_p (p->exp, p->exp, 1, false))
2901 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2902 || (exp_cost == best_addr_cost
2903 && ((p->cost + 1) >> 1) > best_rtx_cost)))
2906 best_addr_cost = exp_cost;
2907 best_rtx_cost = (p->cost + 1) >> 1;
2914 if (validate_change (insn, loc,
2915 canon_reg (copy_rtx (best_elt->exp),
2924 /* If the address is a binary operation with the first operand a register
2925 and the second a constant, do the same as above, but looking for
2926 equivalences of the register. Then try to simplify before checking for
2927 the best address to use. This catches a few cases: First is when we
2928 have REG+const and the register is another REG+const. We can often merge
2929 the constants and eliminate one insn and one register. It may also be
2930 that a machine has a cheap REG+REG+const. Finally, this improves the
2931 code on the Alpha for unaligned byte stores. */
2933 if (flag_expensive_optimizations
2934 && ARITHMETIC_P (*loc)
2935 && REG_P (XEXP (*loc, 0)))
2937 rtx op1 = XEXP (*loc, 1);
2940 hash = HASH (XEXP (*loc, 0), Pmode);
2941 do_not_record = save_do_not_record;
2942 hash_arg_in_memory = save_hash_arg_in_memory;
2944 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2948 /* We need to find the best (under the criteria documented above) entry
2949 in the class that is valid. We use the `flag' field to indicate
2950 choices that were invalid and iterate until we can't find a better
2951 one that hasn't already been tried. */
2953 for (p = elt->first_same_value; p; p = p->next_same_value)
2956 while (found_better)
2958 int best_addr_cost = address_cost (*loc, mode);
2959 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2960 struct table_elt *best_elt = elt;
2961 rtx best_rtx = *loc;
2964 /* This is at worst case an O(n^2) algorithm, so limit our search
2965 to the first 32 elements on the list. This avoids trouble
2966 compiling code with very long basic blocks that can easily
2967 call simplify_gen_binary so many times that we run out of
2971 for (p = elt->first_same_value, count = 0;
2973 p = p->next_same_value, count++)
2976 || exp_equiv_p (p->exp, p->exp, 1, false)))
2978 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
2982 /* Get the canonical version of the address so we can accept
2984 new = canon_for_address (new);
2986 new_cost = address_cost (new, mode);
2988 if (new_cost < best_addr_cost
2989 || (new_cost == best_addr_cost
2990 && (COST (new) + 1) >> 1 > best_rtx_cost))
2993 best_addr_cost = new_cost;
2994 best_rtx_cost = (COST (new) + 1) >> 1;
3002 if (validate_change (insn, loc,
3003 canon_reg (copy_rtx (best_rtx),
3013 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3014 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3015 what values are being compared.
3017 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3018 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3019 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3020 compared to produce cc0.
3022 The return value is the comparison operator and is either the code of
3023 A or the code corresponding to the inverse of the comparison. */
3025 static enum rtx_code
3026 find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
3027 enum machine_mode *pmode1, enum machine_mode *pmode2)
3031 arg1 = *parg1, arg2 = *parg2;
3033 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
3035 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3037 /* Set nonzero when we find something of interest. */
3039 int reverse_code = 0;
3040 struct table_elt *p = 0;
3042 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3043 On machines with CC0, this is the only case that can occur, since
3044 fold_rtx will return the COMPARE or item being compared with zero
3047 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3050 /* If ARG1 is a comparison operator and CODE is testing for
3051 STORE_FLAG_VALUE, get the inner arguments. */
3053 else if (COMPARISON_P (arg1))
3055 #ifdef FLOAT_STORE_FLAG_VALUE
3056 REAL_VALUE_TYPE fsfv;
3060 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3061 && code == LT && STORE_FLAG_VALUE == -1)
3062 #ifdef FLOAT_STORE_FLAG_VALUE
3063 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3064 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3065 REAL_VALUE_NEGATIVE (fsfv)))
3070 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3071 && code == GE && STORE_FLAG_VALUE == -1)
3072 #ifdef FLOAT_STORE_FLAG_VALUE
3073 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3074 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3075 REAL_VALUE_NEGATIVE (fsfv)))
3078 x = arg1, reverse_code = 1;
3081 /* ??? We could also check for
3083 (ne (and (eq (...) (const_int 1))) (const_int 0))
3085 and related forms, but let's wait until we see them occurring. */
3088 /* Look up ARG1 in the hash table and see if it has an equivalence
3089 that lets us see what is being compared. */
3090 p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
3093 p = p->first_same_value;
3095 /* If what we compare is already known to be constant, that is as
3097 We need to break the loop in this case, because otherwise we
3098 can have an infinite loop when looking at a reg that is known
3099 to be a constant which is the same as a comparison of a reg
3100 against zero which appears later in the insn stream, which in
3101 turn is constant and the same as the comparison of the first reg
3107 for (; p; p = p->next_same_value)
3109 enum machine_mode inner_mode = GET_MODE (p->exp);
3110 #ifdef FLOAT_STORE_FLAG_VALUE
3111 REAL_VALUE_TYPE fsfv;
3114 /* If the entry isn't valid, skip it. */
3115 if (! exp_equiv_p (p->exp, p->exp, 1, false))
3118 if (GET_CODE (p->exp) == COMPARE
3119 /* Another possibility is that this machine has a compare insn
3120 that includes the comparison code. In that case, ARG1 would
3121 be equivalent to a comparison operation that would set ARG1 to
3122 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3123 ORIG_CODE is the actual comparison being done; if it is an EQ,
3124 we must reverse ORIG_CODE. On machine with a negative value
3125 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3128 && GET_MODE_CLASS (inner_mode) == MODE_INT
3129 && (GET_MODE_BITSIZE (inner_mode)
3130 <= HOST_BITS_PER_WIDE_INT)
3131 && (STORE_FLAG_VALUE
3132 & ((HOST_WIDE_INT) 1
3133 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3134 #ifdef FLOAT_STORE_FLAG_VALUE
3136 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3137 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3138 REAL_VALUE_NEGATIVE (fsfv)))
3141 && COMPARISON_P (p->exp)))
3146 else if ((code == EQ
3148 && GET_MODE_CLASS (inner_mode) == MODE_INT
3149 && (GET_MODE_BITSIZE (inner_mode)
3150 <= HOST_BITS_PER_WIDE_INT)
3151 && (STORE_FLAG_VALUE
3152 & ((HOST_WIDE_INT) 1
3153 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3154 #ifdef FLOAT_STORE_FLAG_VALUE
3156 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3157 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3158 REAL_VALUE_NEGATIVE (fsfv)))
3161 && COMPARISON_P (p->exp))
3168 /* If this non-trapping address, e.g. fp + constant, the
3169 equivalent is a better operand since it may let us predict
3170 the value of the comparison. */
3171 else if (!rtx_addr_can_trap_p (p->exp))
3178 /* If we didn't find a useful equivalence for ARG1, we are done.
3179 Otherwise, set up for the next iteration. */
3183 /* If we need to reverse the comparison, make sure that that is
3184 possible -- we can't necessarily infer the value of GE from LT
3185 with floating-point operands. */
3188 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3189 if (reversed == UNKNOWN)
3194 else if (COMPARISON_P (x))
3195 code = GET_CODE (x);
3196 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3199 /* Return our results. Return the modes from before fold_rtx
3200 because fold_rtx might produce const_int, and then it's too late. */
3201 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3202 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3207 /* If X is a nontrivial arithmetic operation on an argument
3208 for which a constant value can be determined, return
3209 the result of operating on that value, as a constant.
3210 Otherwise, return X, possibly with one or more operands
3211 modified by recursive calls to this function.
3213 If X is a register whose contents are known, we do NOT
3214 return those contents here. equiv_constant is called to
3217 INSN is the insn that we may be modifying. If it is 0, make a copy
3218 of X before modifying it. */
3221 fold_rtx (rtx x, rtx insn)
3224 enum machine_mode mode;
3231 /* Folded equivalents of first two operands of X. */
3235 /* Constant equivalents of first three operands of X;
3236 0 when no such equivalent is known. */
3241 /* The mode of the first operand of X. We need this for sign and zero
3243 enum machine_mode mode_arg0;
3248 mode = GET_MODE (x);
3249 code = GET_CODE (x);
3259 /* No use simplifying an EXPR_LIST
3260 since they are used only for lists of args
3261 in a function call's REG_EQUAL note. */
3267 return prev_insn_cc0;
3271 /* If the next insn is a CODE_LABEL followed by a jump table,
3272 PC's value is a LABEL_REF pointing to that label. That
3273 lets us fold switch statements on the VAX. */
3276 if (insn && tablejump_p (insn, &next, NULL))
3277 return gen_rtx_LABEL_REF (Pmode, next);
3282 /* See if we previously assigned a constant value to this SUBREG. */
3283 if ((new = lookup_as_function (x, CONST_INT)) != 0
3284 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3287 /* If this is a paradoxical SUBREG, we have no idea what value the
3288 extra bits would have. However, if the operand is equivalent
3289 to a SUBREG whose operand is the same as our mode, and all the
3290 modes are within a word, we can just use the inner operand
3291 because these SUBREGs just say how to treat the register.
3293 Similarly if we find an integer constant. */
3295 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3297 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3298 struct table_elt *elt;
3300 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3301 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3302 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3304 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3306 if (CONSTANT_P (elt->exp)
3307 && GET_MODE (elt->exp) == VOIDmode)
3310 if (GET_CODE (elt->exp) == SUBREG
3311 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3312 && exp_equiv_p (elt->exp, elt->exp, 1, false))
3313 return copy_rtx (SUBREG_REG (elt->exp));
3319 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3320 We might be able to if the SUBREG is extracting a single word in an
3321 integral mode or extracting the low part. */
3323 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3324 const_arg0 = equiv_constant (folded_arg0);
3326 folded_arg0 = const_arg0;
3328 if (folded_arg0 != SUBREG_REG (x))
3330 new = simplify_subreg (mode, folded_arg0,
3331 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3336 if (REG_P (folded_arg0)
3337 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0)))
3339 struct table_elt *elt;
3341 elt = lookup (folded_arg0,
3342 HASH (folded_arg0, GET_MODE (folded_arg0)),
3343 GET_MODE (folded_arg0));
3346 elt = elt->first_same_value;
3348 if (subreg_lowpart_p (x))
3349 /* If this is a narrowing SUBREG and our operand is a REG, see
3350 if we can find an equivalence for REG that is an arithmetic
3351 operation in a wider mode where both operands are paradoxical
3352 SUBREGs from objects of our result mode. In that case, we
3353 couldn-t report an equivalent value for that operation, since we
3354 don't know what the extra bits will be. But we can find an
3355 equivalence for this SUBREG by folding that operation in the
3356 narrow mode. This allows us to fold arithmetic in narrow modes
3357 when the machine only supports word-sized arithmetic.
3359 Also look for a case where we have a SUBREG whose operand
3360 is the same as our result. If both modes are smaller
3361 than a word, we are simply interpreting a register in
3362 different modes and we can use the inner value. */
3364 for (; elt; elt = elt->next_same_value)
3366 enum rtx_code eltcode = GET_CODE (elt->exp);
3368 /* Just check for unary and binary operations. */
3369 if (UNARY_P (elt->exp)
3370 && eltcode != SIGN_EXTEND
3371 && eltcode != ZERO_EXTEND
3372 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3373 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3374 && (GET_MODE_CLASS (mode)
3375 == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3377 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3379 if (!REG_P (op0) && ! CONSTANT_P (op0))
3380 op0 = fold_rtx (op0, NULL_RTX);
3382 op0 = equiv_constant (op0);
3384 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3387 else if (ARITHMETIC_P (elt->exp)
3388 && eltcode != DIV && eltcode != MOD
3389 && eltcode != UDIV && eltcode != UMOD
3390 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3391 && eltcode != ROTATE && eltcode != ROTATERT
3392 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3393 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3395 || CONSTANT_P (XEXP (elt->exp, 0)))
3396 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3397 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3399 || CONSTANT_P (XEXP (elt->exp, 1))))
3401 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3402 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3404 if (op0 && !REG_P (op0) && ! CONSTANT_P (op0))
3405 op0 = fold_rtx (op0, NULL_RTX);
3408 op0 = equiv_constant (op0);
3410 if (op1 && !REG_P (op1) && ! CONSTANT_P (op1))
3411 op1 = fold_rtx (op1, NULL_RTX);
3414 op1 = equiv_constant (op1);
3416 /* If we are looking for the low SImode part of
3417 (ashift:DI c (const_int 32)), it doesn't work
3418 to compute that in SImode, because a 32-bit shift
3419 in SImode is unpredictable. We know the value is 0. */
3421 && GET_CODE (elt->exp) == ASHIFT
3422 && GET_CODE (op1) == CONST_INT
3423 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3426 < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3427 /* If the count fits in the inner mode's width,
3428 but exceeds the outer mode's width,
3429 the value will get truncated to 0
3431 new = CONST0_RTX (mode);
3433 /* If the count exceeds even the inner mode's width,
3434 don't fold this expression. */
3437 else if (op0 && op1)
3438 new = simplify_binary_operation (GET_CODE (elt->exp), mode, op0, op1);
3441 else if (GET_CODE (elt->exp) == SUBREG
3442 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3443 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3445 && exp_equiv_p (elt->exp, elt->exp, 1, false))
3446 new = copy_rtx (SUBREG_REG (elt->exp));
3452 /* A SUBREG resulting from a zero extension may fold to zero if
3453 it extracts higher bits than the ZERO_EXTEND's source bits.
3454 FIXME: if combine tried to, er, combine these instructions,
3455 this transformation may be moved to simplify_subreg. */
3456 for (; elt; elt = elt->next_same_value)
3458 if (GET_CODE (elt->exp) == ZERO_EXTEND
3460 >= GET_MODE_BITSIZE (GET_MODE (XEXP (elt->exp, 0))))
3461 return CONST0_RTX (mode);
3469 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3470 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3471 new = lookup_as_function (XEXP (x, 0), code);
3473 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3477 /* If we are not actually processing an insn, don't try to find the
3478 best address. Not only don't we care, but we could modify the
3479 MEM in an invalid way since we have no insn to validate against. */
3481 find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3484 /* Even if we don't fold in the insn itself,
3485 we can safely do so here, in hopes of getting a constant. */
3486 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3488 HOST_WIDE_INT offset = 0;
3491 && REGNO_QTY_VALID_P (REGNO (addr)))
3493 int addr_q = REG_QTY (REGNO (addr));
3494 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3496 if (GET_MODE (addr) == addr_ent->mode
3497 && addr_ent->const_rtx != NULL_RTX)
3498 addr = addr_ent->const_rtx;
3501 /* If address is constant, split it into a base and integer offset. */
3502 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3504 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3505 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3507 base = XEXP (XEXP (addr, 0), 0);
3508 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3510 else if (GET_CODE (addr) == LO_SUM
3511 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3512 base = XEXP (addr, 1);
3514 /* If this is a constant pool reference, we can fold it into its
3515 constant to allow better value tracking. */
3516 if (base && GET_CODE (base) == SYMBOL_REF
3517 && CONSTANT_POOL_ADDRESS_P (base))
3519 rtx constant = get_pool_constant (base);
3520 enum machine_mode const_mode = get_pool_mode (base);
3523 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3525 constant_pool_entries_cost = COST (constant);
3526 constant_pool_entries_regcost = approx_reg_cost (constant);
3529 /* If we are loading the full constant, we have an equivalence. */
3530 if (offset == 0 && mode == const_mode)
3533 /* If this actually isn't a constant (weird!), we can't do
3534 anything. Otherwise, handle the two most common cases:
3535 extracting a word from a multi-word constant, and extracting
3536 the low-order bits. Other cases don't seem common enough to
3538 if (! CONSTANT_P (constant))
3541 if (GET_MODE_CLASS (mode) == MODE_INT
3542 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3543 && offset % UNITS_PER_WORD == 0
3544 && (new = operand_subword (constant,
3545 offset / UNITS_PER_WORD,
3546 0, const_mode)) != 0)
3549 if (((BYTES_BIG_ENDIAN
3550 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3551 || (! BYTES_BIG_ENDIAN && offset == 0))
3552 && (new = gen_lowpart (mode, constant)) != 0)
3556 /* If this is a reference to a label at a known position in a jump
3557 table, we also know its value. */
3558 if (base && GET_CODE (base) == LABEL_REF)
3560 rtx label = XEXP (base, 0);
3561 rtx table_insn = NEXT_INSN (label);
3563 if (table_insn && JUMP_P (table_insn)
3564 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3566 rtx table = PATTERN (table_insn);
3569 && (offset / GET_MODE_SIZE (GET_MODE (table))
3570 < XVECLEN (table, 0)))
3571 return XVECEXP (table, 0,
3572 offset / GET_MODE_SIZE (GET_MODE (table)));
3574 if (table_insn && JUMP_P (table_insn)
3575 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3577 rtx table = PATTERN (table_insn);
3580 && (offset / GET_MODE_SIZE (GET_MODE (table))
3581 < XVECLEN (table, 1)))
3583 offset /= GET_MODE_SIZE (GET_MODE (table));
3584 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3587 if (GET_MODE (table) != Pmode)
3588 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3590 /* Indicate this is a constant. This isn't a
3591 valid form of CONST, but it will only be used
3592 to fold the next insns and then discarded, so
3595 Note this expression must be explicitly discarded,
3596 by cse_insn, else it may end up in a REG_EQUAL note
3597 and "escape" to cause problems elsewhere. */
3598 return gen_rtx_CONST (GET_MODE (new), new);
3606 #ifdef NO_FUNCTION_CSE
3608 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3616 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3617 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3618 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3629 mode_arg0 = VOIDmode;
3631 /* Try folding our operands.
3632 Then see which ones have constant values known. */
3634 fmt = GET_RTX_FORMAT (code);
3635 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3638 rtx arg = XEXP (x, i);
3639 rtx folded_arg = arg, const_arg = 0;
3640 enum machine_mode mode_arg = GET_MODE (arg);
3641 rtx cheap_arg, expensive_arg;
3642 rtx replacements[2];
3644 int old_cost = COST_IN (XEXP (x, i), code);
3646 /* Most arguments are cheap, so handle them specially. */
3647 switch (GET_CODE (arg))
3650 /* This is the same as calling equiv_constant; it is duplicated
3652 if (REGNO_QTY_VALID_P (REGNO (arg)))
3654 int arg_q = REG_QTY (REGNO (arg));
3655 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3657 if (arg_ent->const_rtx != NULL_RTX
3658 && !REG_P (arg_ent->const_rtx)
3659 && GET_CODE (arg_ent->const_rtx) != PLUS)
3661 = gen_lowpart (GET_MODE (arg),
3662 arg_ent->const_rtx);
3677 folded_arg = prev_insn_cc0;
3678 mode_arg = prev_insn_cc0_mode;
3679 const_arg = equiv_constant (folded_arg);
3684 folded_arg = fold_rtx (arg, insn);
3685 const_arg = equiv_constant (folded_arg);
3688 /* For the first three operands, see if the operand
3689 is constant or equivalent to a constant. */
3693 folded_arg0 = folded_arg;
3694 const_arg0 = const_arg;
3695 mode_arg0 = mode_arg;
3698 folded_arg1 = folded_arg;
3699 const_arg1 = const_arg;
3702 const_arg2 = const_arg;
3706 /* Pick the least expensive of the folded argument and an
3707 equivalent constant argument. */
3708 if (const_arg == 0 || const_arg == folded_arg
3709 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3710 cheap_arg = folded_arg, expensive_arg = const_arg;
3712 cheap_arg = const_arg, expensive_arg = folded_arg;
3714 /* Try to replace the operand with the cheapest of the two
3715 possibilities. If it doesn't work and this is either of the first
3716 two operands of a commutative operation, try swapping them.
3717 If THAT fails, try the more expensive, provided it is cheaper
3718 than what is already there. */
3720 if (cheap_arg == XEXP (x, i))
3723 if (insn == 0 && ! copied)
3729 /* Order the replacements from cheapest to most expensive. */
3730 replacements[0] = cheap_arg;
3731 replacements[1] = expensive_arg;
3733 for (j = 0; j < 2 && replacements[j]; j++)
3735 int new_cost = COST_IN (replacements[j], code);
3737 /* Stop if what existed before was cheaper. Prefer constants
3738 in the case of a tie. */
3739 if (new_cost > old_cost
3740 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3743 /* It's not safe to substitute the operand of a conversion
3744 operator with a constant, as the conversion's identity
3745 depends upon the mode of it's operand. This optimization
3746 is handled by the call to simplify_unary_operation. */
3747 if (GET_RTX_CLASS (code) == RTX_UNARY
3748 && GET_MODE (replacements[j]) != mode_arg0
3749 && (code == ZERO_EXTEND
3750 || code == SIGN_EXTEND
3752 || code == FLOAT_TRUNCATE
3753 || code == FLOAT_EXTEND
3756 || code == UNSIGNED_FLOAT
3757 || code == UNSIGNED_FIX))
3760 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3763 if (GET_RTX_CLASS (code) == RTX_COMM_COMPARE
3764 || GET_RTX_CLASS (code) == RTX_COMM_ARITH)
3766 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3767 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3769 if (apply_change_group ())
3771 /* Swap them back to be invalid so that this loop can
3772 continue and flag them to be swapped back later. */
3775 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3787 /* Don't try to fold inside of a vector of expressions.
3788 Doing nothing is harmless. */
3792 /* If a commutative operation, place a constant integer as the second
3793 operand unless the first operand is also a constant integer. Otherwise,
3794 place any constant second unless the first operand is also a constant. */
3796 if (COMMUTATIVE_P (x))
3799 || swap_commutative_operands_p (const_arg0 ? const_arg0
3801 const_arg1 ? const_arg1
3804 rtx tem = XEXP (x, 0);
3806 if (insn == 0 && ! copied)
3812 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3813 validate_change (insn, &XEXP (x, 1), tem, 1);
3814 if (apply_change_group ())
3816 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3817 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3822 /* If X is an arithmetic operation, see if we can simplify it. */
3824 switch (GET_RTX_CLASS (code))
3830 /* We can't simplify extension ops unless we know the
3832 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3833 && mode_arg0 == VOIDmode)
3836 /* If we had a CONST, strip it off and put it back later if we
3838 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3839 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3841 new = simplify_unary_operation (code, mode,
3842 const_arg0 ? const_arg0 : folded_arg0,
3844 /* NEG of PLUS could be converted into MINUS, but that causes
3845 expressions of the form
3846 (CONST (MINUS (CONST_INT) (SYMBOL_REF)))
3847 which many ports mistakenly treat as LEGITIMATE_CONSTANT_P.
3848 FIXME: those ports should be fixed. */
3849 if (new != 0 && is_const
3850 && GET_CODE (new) == PLUS
3851 && (GET_CODE (XEXP (new, 0)) == SYMBOL_REF
3852 || GET_CODE (XEXP (new, 0)) == LABEL_REF)
3853 && GET_CODE (XEXP (new, 1)) == CONST_INT)
3854 new = gen_rtx_CONST (mode, new);
3859 case RTX_COMM_COMPARE:
3860 /* See what items are actually being compared and set FOLDED_ARG[01]
3861 to those values and CODE to the actual comparison code. If any are
3862 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3863 do anything if both operands are already known to be constant. */
3865 /* ??? Vector mode comparisons are not supported yet. */
3866 if (VECTOR_MODE_P (mode))
3869 if (const_arg0 == 0 || const_arg1 == 0)
3871 struct table_elt *p0, *p1;
3872 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3873 enum machine_mode mode_arg1;
3875 #ifdef FLOAT_STORE_FLAG_VALUE
3876 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3878 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3879 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3880 false_rtx = CONST0_RTX (mode);
3884 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3885 &mode_arg0, &mode_arg1);
3887 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3888 what kinds of things are being compared, so we can't do
3889 anything with this comparison. */
3891 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3894 const_arg0 = equiv_constant (folded_arg0);
3895 const_arg1 = equiv_constant (folded_arg1);
3897 /* If we do not now have two constants being compared, see
3898 if we can nevertheless deduce some things about the
3900 if (const_arg0 == 0 || const_arg1 == 0)
3902 /* Some addresses are known to be nonzero. We don't know
3903 their sign, but equality comparisons are known. */
3904 if (const_arg1 == const0_rtx
3905 && nonzero_address_p (folded_arg0))
3909 else if (code == NE)
3913 /* See if the two operands are the same. */
3915 if (folded_arg0 == folded_arg1
3916 || (REG_P (folded_arg0)
3917 && REG_P (folded_arg1)
3918 && (REG_QTY (REGNO (folded_arg0))
3919 == REG_QTY (REGNO (folded_arg1))))
3920 || ((p0 = lookup (folded_arg0,
3921 SAFE_HASH (folded_arg0, mode_arg0),
3923 && (p1 = lookup (folded_arg1,
3924 SAFE_HASH (folded_arg1, mode_arg0),
3926 && p0->first_same_value == p1->first_same_value))
3928 /* Sadly two equal NaNs are not equivalent. */
3929 if (!HONOR_NANS (mode_arg0))
3930 return ((code == EQ || code == LE || code == GE
3931 || code == LEU || code == GEU || code == UNEQ
3932 || code == UNLE || code == UNGE
3934 ? true_rtx : false_rtx);
3935 /* Take care for the FP compares we can resolve. */
3936 if (code == UNEQ || code == UNLE || code == UNGE)
3938 if (code == LTGT || code == LT || code == GT)
3942 /* If FOLDED_ARG0 is a register, see if the comparison we are
3943 doing now is either the same as we did before or the reverse
3944 (we only check the reverse if not floating-point). */
3945 else if (REG_P (folded_arg0))
3947 int qty = REG_QTY (REGNO (folded_arg0));
3949 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3951 struct qty_table_elem *ent = &qty_table[qty];
3953 if ((comparison_dominates_p (ent->comparison_code, code)
3954 || (! FLOAT_MODE_P (mode_arg0)
3955 && comparison_dominates_p (ent->comparison_code,
3956 reverse_condition (code))))
3957 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3959 && rtx_equal_p (ent->comparison_const,
3961 || (REG_P (folded_arg1)
3962 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3963 return (comparison_dominates_p (ent->comparison_code, code)
3964 ? true_rtx : false_rtx);
3970 /* If we are comparing against zero, see if the first operand is
3971 equivalent to an IOR with a constant. If so, we may be able to
3972 determine the result of this comparison. */
3974 if (const_arg1 == const0_rtx)
3976 rtx y = lookup_as_function (folded_arg0, IOR);
3980 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3981 && GET_CODE (inner_const) == CONST_INT
3982 && INTVAL (inner_const) != 0)
3984 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
3985 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
3986 && (INTVAL (inner_const)
3987 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
3988 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3990 #ifdef FLOAT_STORE_FLAG_VALUE
3991 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3993 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3994 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3995 false_rtx = CONST0_RTX (mode);
4020 rtx op0 = const_arg0 ? const_arg0 : folded_arg0;
4021 rtx op1 = const_arg1 ? const_arg1 : folded_arg1;
4022 new = simplify_relational_operation (code, mode, mode_arg0, op0, op1);
4027 case RTX_COMM_ARITH:
4031 /* If the second operand is a LABEL_REF, see if the first is a MINUS
4032 with that LABEL_REF as its second operand. If so, the result is
4033 the first operand of that MINUS. This handles switches with an
4034 ADDR_DIFF_VEC table. */
4035 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4038 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
4039 : lookup_as_function (folded_arg0, MINUS);
4041 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4042 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4045 /* Now try for a CONST of a MINUS like the above. */
4046 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4047 : lookup_as_function (folded_arg0, CONST))) != 0
4048 && GET_CODE (XEXP (y, 0)) == MINUS
4049 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4050 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4051 return XEXP (XEXP (y, 0), 0);
4054 /* Likewise if the operands are in the other order. */
4055 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4058 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
4059 : lookup_as_function (folded_arg1, MINUS);
4061 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4062 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4065 /* Now try for a CONST of a MINUS like the above. */
4066 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4067 : lookup_as_function (folded_arg1, CONST))) != 0
4068 && GET_CODE (XEXP (y, 0)) == MINUS
4069 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4070 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4071 return XEXP (XEXP (y, 0), 0);
4074 /* If second operand is a register equivalent to a negative
4075 CONST_INT, see if we can find a register equivalent to the
4076 positive constant. Make a MINUS if so. Don't do this for
4077 a non-negative constant since we might then alternate between
4078 choosing positive and negative constants. Having the positive
4079 constant previously-used is the more common case. Be sure
4080 the resulting constant is non-negative; if const_arg1 were
4081 the smallest negative number this would overflow: depending
4082 on the mode, this would either just be the same value (and
4083 hence not save anything) or be incorrect. */
4084 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4085 && INTVAL (const_arg1) < 0
4086 /* This used to test
4088 -INTVAL (const_arg1) >= 0
4090 But The Sun V5.0 compilers mis-compiled that test. So
4091 instead we test for the problematic value in a more direct
4092 manner and hope the Sun compilers get it correct. */
4093 && INTVAL (const_arg1) !=
4094 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4095 && REG_P (folded_arg1))
4097 rtx new_const = GEN_INT (-INTVAL (const_arg1));
4099 = lookup (new_const, SAFE_HASH (new_const, mode), mode);
4102 for (p = p->first_same_value; p; p = p->next_same_value)
4104 return simplify_gen_binary (MINUS, mode, folded_arg0,
4105 canon_reg (p->exp, NULL_RTX));
4110 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4111 If so, produce (PLUS Z C2-C). */
4112 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4114 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4115 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4116 return fold_rtx (plus_constant (copy_rtx (y),
4117 -INTVAL (const_arg1)),
4124 case SMIN: case SMAX: case UMIN: case UMAX:
4125 case IOR: case AND: case XOR:
4127 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4128 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4129 is known to be of similar form, we may be able to replace the
4130 operation with a combined operation. This may eliminate the
4131 intermediate operation if every use is simplified in this way.
4132 Note that the similar optimization done by combine.c only works
4133 if the intermediate operation's result has only one reference. */
4135 if (REG_P (folded_arg0)
4136 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4139 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4140 rtx y = lookup_as_function (folded_arg0, code);
4142 enum rtx_code associate_code;
4146 || 0 == (inner_const
4147 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4148 || GET_CODE (inner_const) != CONST_INT
4149 /* If we have compiled a statement like
4150 "if (x == (x & mask1))", and now are looking at
4151 "x & mask2", we will have a case where the first operand
4152 of Y is the same as our first operand. Unless we detect
4153 this case, an infinite loop will result. */
4154 || XEXP (y, 0) == folded_arg0)
4157 /* Don't associate these operations if they are a PLUS with the
4158 same constant and it is a power of two. These might be doable
4159 with a pre- or post-increment. Similarly for two subtracts of
4160 identical powers of two with post decrement. */
4162 if (code == PLUS && const_arg1 == inner_const
4163 && ((HAVE_PRE_INCREMENT
4164 && exact_log2 (INTVAL (const_arg1)) >= 0)
4165 || (HAVE_POST_INCREMENT
4166 && exact_log2 (INTVAL (const_arg1)) >= 0)
4167 || (HAVE_PRE_DECREMENT
4168 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4169 || (HAVE_POST_DECREMENT
4170 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4173 /* Compute the code used to compose the constants. For example,
4174 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
4176 associate_code = (is_shift || code == MINUS ? PLUS : code);
4178 new_const = simplify_binary_operation (associate_code, mode,
4179 const_arg1, inner_const);
4184 /* If we are associating shift operations, don't let this
4185 produce a shift of the size of the object or larger.
4186 This could occur when we follow a sign-extend by a right
4187 shift on a machine that does a sign-extend as a pair
4190 if (is_shift && GET_CODE (new_const) == CONST_INT
4191 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4193 /* As an exception, we can turn an ASHIFTRT of this
4194 form into a shift of the number of bits - 1. */
4195 if (code == ASHIFTRT)
4196 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4201 y = copy_rtx (XEXP (y, 0));
4203 /* If Y contains our first operand (the most common way this
4204 can happen is if Y is a MEM), we would do into an infinite
4205 loop if we tried to fold it. So don't in that case. */
4207 if (! reg_mentioned_p (folded_arg0, y))
4208 y = fold_rtx (y, insn);
4210 return simplify_gen_binary (code, mode, y, new_const);
4214 case DIV: case UDIV:
4215 /* ??? The associative optimization performed immediately above is
4216 also possible for DIV and UDIV using associate_code of MULT.
4217 However, we would need extra code to verify that the
4218 multiplication does not overflow, that is, there is no overflow
4219 in the calculation of new_const. */
4226 new = simplify_binary_operation (code, mode,
4227 const_arg0 ? const_arg0 : folded_arg0,
4228 const_arg1 ? const_arg1 : folded_arg1);
4232 /* (lo_sum (high X) X) is simply X. */
4233 if (code == LO_SUM && const_arg0 != 0
4234 && GET_CODE (const_arg0) == HIGH
4235 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4240 case RTX_BITFIELD_OPS:
4241 new = simplify_ternary_operation (code, mode, mode_arg0,
4242 const_arg0 ? const_arg0 : folded_arg0,
4243 const_arg1 ? const_arg1 : folded_arg1,
4244 const_arg2 ? const_arg2 : XEXP (x, 2));
4251 return new ? new : x;
4254 /* Return a constant value currently equivalent to X.
4255 Return 0 if we don't know one. */
4258 equiv_constant (rtx x)
4261 && REGNO_QTY_VALID_P (REGNO (x)))
4263 int x_q = REG_QTY (REGNO (x));
4264 struct qty_table_elem *x_ent = &qty_table[x_q];
4266 if (x_ent->const_rtx)
4267 x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
4270 if (x == 0 || CONSTANT_P (x))
4273 /* If X is a MEM, try to fold it outside the context of any insn to see if
4274 it might be equivalent to a constant. That handles the case where it
4275 is a constant-pool reference. Then try to look it up in the hash table
4276 in case it is something whose value we have seen before. */
4280 struct table_elt *elt;
4282 x = fold_rtx (x, NULL_RTX);
4286 elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
4290 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4291 if (elt->is_const && CONSTANT_P (elt->exp))
4298 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4299 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4300 least-significant part of X.
4301 MODE specifies how big a part of X to return.
4303 If the requested operation cannot be done, 0 is returned.
4305 This is similar to gen_lowpart_general in emit-rtl.c. */
4308 gen_lowpart_if_possible (enum machine_mode mode, rtx x)
4310 rtx result = gen_lowpart_common (mode, x);
4316 /* This is the only other case we handle. */
4320 if (WORDS_BIG_ENDIAN)
4321 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4322 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4323 if (BYTES_BIG_ENDIAN)
4324 /* Adjust the address so that the address-after-the-data is
4326 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4327 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4329 new = adjust_address_nv (x, mode, offset);
4330 if (! memory_address_p (mode, XEXP (new, 0)))
4339 /* Given INSN, a jump insn, PATH_TAKEN indicates if we are following the "taken"
4340 branch. It will be zero if not.
4342 In certain cases, this can cause us to add an equivalence. For example,
4343 if we are following the taken case of
4345 we can add the fact that `i' and '2' are now equivalent.
4347 In any case, we can record that this comparison was passed. If the same
4348 comparison is seen later, we will know its value. */
4351 record_jump_equiv (rtx insn, int taken)
4353 int cond_known_true;
4356 enum machine_mode mode, mode0, mode1;
4357 int reversed_nonequality = 0;
4360 /* Ensure this is the right kind of insn. */
4361 if (! any_condjump_p (insn))
4363 set = pc_set (insn);
4365 /* See if this jump condition is known true or false. */
4367 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4369 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4371 /* Get the type of comparison being done and the operands being compared.
4372 If we had to reverse a non-equality condition, record that fact so we
4373 know that it isn't valid for floating-point. */
4374 code = GET_CODE (XEXP (SET_SRC (set), 0));
4375 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4376 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4378 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4379 if (! cond_known_true)
4381 code = reversed_comparison_code_parts (code, op0, op1, insn);
4383 /* Don't remember if we can't find the inverse. */
4384 if (code == UNKNOWN)
4388 /* The mode is the mode of the non-constant. */
4390 if (mode1 != VOIDmode)
4393 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4396 /* Yet another form of subreg creation. In this case, we want something in
4397 MODE, and we should assume OP has MODE iff it is naturally modeless. */
4400 record_jump_cond_subreg (enum machine_mode mode, rtx op)
4402 enum machine_mode op_mode = GET_MODE (op);
4403 if (op_mode == mode || op_mode == VOIDmode)
4405 return lowpart_subreg (mode, op, op_mode);
4408 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4409 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4410 Make any useful entries we can with that information. Called from
4411 above function and called recursively. */
4414 record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
4415 rtx op1, int reversed_nonequality)
4417 unsigned op0_hash, op1_hash;
4418 int op0_in_memory, op1_in_memory;
4419 struct table_elt *op0_elt, *op1_elt;
4421 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4422 we know that they are also equal in the smaller mode (this is also
4423 true for all smaller modes whether or not there is a SUBREG, but
4424 is not worth testing for with no SUBREG). */
4426 /* Note that GET_MODE (op0) may not equal MODE. */
4427 if (code == EQ && GET_CODE (op0) == SUBREG
4428 && (GET_MODE_SIZE (GET_MODE (op0))
4429 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4431 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4432 rtx tem = record_jump_cond_subreg (inner_mode, op1);
4434 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4435 reversed_nonequality);
4438 if (code == EQ && GET_CODE (op1) == SUBREG
4439 && (GET_MODE_SIZE (GET_MODE (op1))
4440 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4442 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4443 rtx tem = record_jump_cond_subreg (inner_mode, op0);
4445 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4446 reversed_nonequality);
4449 /* Similarly, if this is an NE comparison, and either is a SUBREG
4450 making a smaller mode, we know the whole thing is also NE. */
4452 /* Note that GET_MODE (op0) may not equal MODE;
4453 if we test MODE instead, we can get an infinite recursion
4454 alternating between two modes each wider than MODE. */
4456 if (code == NE && GET_CODE (op0) == SUBREG
4457 && subreg_lowpart_p (op0)
4458 && (GET_MODE_SIZE (GET_MODE (op0))
4459 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4461 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4462 rtx tem = record_jump_cond_subreg (inner_mode, op1);
4464 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4465 reversed_nonequality);
4468 if (code == NE && GET_CODE (op1) == SUBREG
4469 && subreg_lowpart_p (op1)
4470 && (GET_MODE_SIZE (GET_MODE (op1))
4471 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4473 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4474 rtx tem = record_jump_cond_subreg (inner_mode, op0);
4476 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4477 reversed_nonequality);
4480 /* Hash both operands. */
4483 hash_arg_in_memory = 0;
4484 op0_hash = HASH (op0, mode);
4485 op0_in_memory = hash_arg_in_memory;
4491 hash_arg_in_memory = 0;
4492 op1_hash = HASH (op1, mode);
4493 op1_in_memory = hash_arg_in_memory;
4498 /* Look up both operands. */
4499 op0_elt = lookup (op0, op0_hash, mode);
4500 op1_elt = lookup (op1, op1_hash, mode);
4502 /* If both operands are already equivalent or if they are not in the
4503 table but are identical, do nothing. */
4504 if ((op0_elt != 0 && op1_elt != 0
4505 && op0_elt->first_same_value == op1_elt->first_same_value)
4506 || op0 == op1 || rtx_equal_p (op0, op1))
4509 /* If we aren't setting two things equal all we can do is save this
4510 comparison. Similarly if this is floating-point. In the latter
4511 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4512 If we record the equality, we might inadvertently delete code
4513 whose intent was to change -0 to +0. */
4515 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4517 struct qty_table_elem *ent;
4520 /* If we reversed a floating-point comparison, if OP0 is not a
4521 register, or if OP1 is neither a register or constant, we can't
4525 op1 = equiv_constant (op1);
4527 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4528 || !REG_P (op0) || op1 == 0)
4531 /* Put OP0 in the hash table if it isn't already. This gives it a
4532 new quantity number. */
4535 if (insert_regs (op0, NULL, 0))
4537 rehash_using_reg (op0);
4538 op0_hash = HASH (op0, mode);
4540 /* If OP0 is contained in OP1, this changes its hash code
4541 as well. Faster to rehash than to check, except
4542 for the simple case of a constant. */
4543 if (! CONSTANT_P (op1))
4544 op1_hash = HASH (op1,mode);
4547 op0_elt = insert (op0, NULL, op0_hash, mode);
4548 op0_elt->in_memory = op0_in_memory;
4551 qty = REG_QTY (REGNO (op0));
4552 ent = &qty_table[qty];
4554 ent->comparison_code = code;
4557 /* Look it up again--in case op0 and op1 are the same. */
4558 op1_elt = lookup (op1, op1_hash, mode);
4560 /* Put OP1 in the hash table so it gets a new quantity number. */
4563 if (insert_regs (op1, NULL, 0))
4565 rehash_using_reg (op1);
4566 op1_hash = HASH (op1, mode);
4569 op1_elt = insert (op1, NULL, op1_hash, mode);
4570 op1_elt->in_memory = op1_in_memory;
4573 ent->comparison_const = NULL_RTX;
4574 ent->comparison_qty = REG_QTY (REGNO (op1));
4578 ent->comparison_const = op1;
4579 ent->comparison_qty = -1;
4585 /* If either side is still missing an equivalence, make it now,
4586 then merge the equivalences. */
4590 if (insert_regs (op0, NULL, 0))
4592 rehash_using_reg (op0);
4593 op0_hash = HASH (op0, mode);
4596 op0_elt = insert (op0, NULL, op0_hash, mode);
4597 op0_elt->in_memory = op0_in_memory;
4602 if (insert_regs (op1, NULL, 0))
4604 rehash_using_reg (op1);
4605 op1_hash = HASH (op1, mode);
4608 op1_elt = insert (op1, NULL, op1_hash, mode);
4609 op1_elt->in_memory = op1_in_memory;
4612 merge_equiv_classes (op0_elt, op1_elt);
4615 /* CSE processing for one instruction.
4616 First simplify sources and addresses of all assignments
4617 in the instruction, using previously-computed equivalents values.
4618 Then install the new sources and destinations in the table
4619 of available values.
4621 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4622 the insn. It means that INSN is inside libcall block. In this
4623 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4625 /* Data on one SET contained in the instruction. */
4629 /* The SET rtx itself. */
4631 /* The SET_SRC of the rtx (the original value, if it is changing). */
4633 /* The hash-table element for the SET_SRC of the SET. */
4634 struct table_elt *src_elt;
4635 /* Hash value for the SET_SRC. */
4637 /* Hash value for the SET_DEST. */
4639 /* The SET_DEST, with SUBREG, etc., stripped. */
4641 /* Nonzero if the SET_SRC is in memory. */
4643 /* Nonzero if the SET_SRC contains something
4644 whose value cannot be predicted and understood. */
4646 /* Original machine mode, in case it becomes a CONST_INT.
4647 The size of this field should match the size of the mode
4648 field of struct rtx_def (see rtl.h). */
4649 ENUM_BITFIELD(machine_mode) mode : 8;
4650 /* A constant equivalent for SET_SRC, if any. */
4652 /* Original SET_SRC value used for libcall notes. */
4654 /* Hash value of constant equivalent for SET_SRC. */
4655 unsigned src_const_hash;
4656 /* Table entry for constant equivalent for SET_SRC, if any. */
4657 struct table_elt *src_const_elt;
4661 cse_insn (rtx insn, rtx libcall_insn)
4663 rtx x = PATTERN (insn);
4669 /* Records what this insn does to set CC0. */
4670 rtx this_insn_cc0 = 0;
4671 enum machine_mode this_insn_cc0_mode = VOIDmode;
4675 struct table_elt *src_eqv_elt = 0;
4676 int src_eqv_volatile = 0;
4677 int src_eqv_in_memory = 0;
4678 unsigned src_eqv_hash = 0;
4680 struct set *sets = (struct set *) 0;
4684 /* Find all the SETs and CLOBBERs in this instruction.
4685 Record all the SETs in the array `set' and count them.
4686 Also determine whether there is a CLOBBER that invalidates
4687 all memory references, or all references at varying addresses. */
4691 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4693 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4694 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4695 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4699 if (GET_CODE (x) == SET)
4701 sets = alloca (sizeof (struct set));
4704 /* Ignore SETs that are unconditional jumps.
4705 They never need cse processing, so this does not hurt.
4706 The reason is not efficiency but rather
4707 so that we can test at the end for instructions
4708 that have been simplified to unconditional jumps
4709 and not be misled by unchanged instructions
4710 that were unconditional jumps to begin with. */
4711 if (SET_DEST (x) == pc_rtx
4712 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4715 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4716 The hard function value register is used only once, to copy to
4717 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4718 Ensure we invalidate the destination register. On the 80386 no
4719 other code would invalidate it since it is a fixed_reg.
4720 We need not check the return of apply_change_group; see canon_reg. */
4722 else if (GET_CODE (SET_SRC (x)) == CALL)
4724 canon_reg (SET_SRC (x), insn);
4725 apply_change_group ();
4726 fold_rtx (SET_SRC (x), insn);
4727 invalidate (SET_DEST (x), VOIDmode);
4732 else if (GET_CODE (x) == PARALLEL)
4734 int lim = XVECLEN (x, 0);
4736 sets = alloca (lim * sizeof (struct set));
4738 /* Find all regs explicitly clobbered in this insn,
4739 and ensure they are not replaced with any other regs
4740 elsewhere in this insn.
4741 When a reg that is clobbered is also used for input,
4742 we should presume that that is for a reason,
4743 and we should not substitute some other register
4744 which is not supposed to be clobbered.
4745 Therefore, this loop cannot be merged into the one below
4746 because a CALL may precede a CLOBBER and refer to the
4747 value clobbered. We must not let a canonicalization do
4748 anything in that case. */
4749 for (i = 0; i < lim; i++)
4751 rtx y = XVECEXP (x, 0, i);
4752 if (GET_CODE (y) == CLOBBER)
4754 rtx clobbered = XEXP (y, 0);
4756 if (REG_P (clobbered)
4757 || GET_CODE (clobbered) == SUBREG)
4758 invalidate (clobbered, VOIDmode);
4759 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4760 || GET_CODE (clobbered) == ZERO_EXTRACT)
4761 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4765 for (i = 0; i < lim; i++)
4767 rtx y = XVECEXP (x, 0, i);
4768 if (GET_CODE (y) == SET)
4770 /* As above, we ignore unconditional jumps and call-insns and
4771 ignore the result of apply_change_group. */
4772 if (GET_CODE (SET_SRC (y)) == CALL)
4774 canon_reg (SET_SRC (y), insn);
4775 apply_change_group ();
4776 fold_rtx (SET_SRC (y), insn);
4777 invalidate (SET_DEST (y), VOIDmode);
4779 else if (SET_DEST (y) == pc_rtx
4780 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4783 sets[n_sets++].rtl = y;
4785 else if (GET_CODE (y) == CLOBBER)
4787 /* If we clobber memory, canon the address.
4788 This does nothing when a register is clobbered
4789 because we have already invalidated the reg. */
4790 if (MEM_P (XEXP (y, 0)))
4791 canon_reg (XEXP (y, 0), NULL_RTX);
4793 else if (GET_CODE (y) == USE
4794 && ! (REG_P (XEXP (y, 0))
4795 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4796 canon_reg (y, NULL_RTX);
4797 else if (GET_CODE (y) == CALL)
4799 /* The result of apply_change_group can be ignored; see
4801 canon_reg (y, insn);
4802 apply_change_group ();
4807 else if (GET_CODE (x) == CLOBBER)
4809 if (MEM_P (XEXP (x, 0)))
4810 canon_reg (XEXP (x, 0), NULL_RTX);
4813 /* Canonicalize a USE of a pseudo register or memory location. */
4814 else if (GET_CODE (x) == USE
4815 && ! (REG_P (XEXP (x, 0))
4816 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4817 canon_reg (XEXP (x, 0), NULL_RTX);
4818 else if (GET_CODE (x) == CALL)
4820 /* The result of apply_change_group can be ignored; see canon_reg. */
4821 canon_reg (x, insn);
4822 apply_change_group ();
4826 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4827 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4828 is handled specially for this case, and if it isn't set, then there will
4829 be no equivalence for the destination. */
4830 if (n_sets == 1 && REG_NOTES (insn) != 0
4831 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4832 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4833 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4835 src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4836 XEXP (tem, 0) = src_eqv;
4839 /* Canonicalize sources and addresses of destinations.
4840 We do this in a separate pass to avoid problems when a MATCH_DUP is
4841 present in the insn pattern. In that case, we want to ensure that
4842 we don't break the duplicate nature of the pattern. So we will replace
4843 both operands at the same time. Otherwise, we would fail to find an
4844 equivalent substitution in the loop calling validate_change below.
4846 We used to suppress canonicalization of DEST if it appears in SRC,
4847 but we don't do this any more. */
4849 for (i = 0; i < n_sets; i++)
4851 rtx dest = SET_DEST (sets[i].rtl);
4852 rtx src = SET_SRC (sets[i].rtl);
4853 rtx new = canon_reg (src, insn);
4856 sets[i].orig_src = src;
4857 if ((REG_P (new) && REG_P (src)
4858 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4859 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4860 || (insn_code = recog_memoized (insn)) < 0
4861 || insn_data[insn_code].n_dups > 0)
4862 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4864 SET_SRC (sets[i].rtl) = new;
4866 if (GET_CODE (dest) == ZERO_EXTRACT)
4868 validate_change (insn, &XEXP (dest, 1),
4869 canon_reg (XEXP (dest, 1), insn), 1);
4870 validate_change (insn, &XEXP (dest, 2),
4871 canon_reg (XEXP (dest, 2), insn), 1);
4874 while (GET_CODE (dest) == SUBREG
4875 || GET_CODE (dest) == ZERO_EXTRACT
4876 || GET_CODE (dest) == STRICT_LOW_PART)
4877 dest = XEXP (dest, 0);
4880 canon_reg (dest, insn);
4883 /* Now that we have done all the replacements, we can apply the change
4884 group and see if they all work. Note that this will cause some
4885 canonicalizations that would have worked individually not to be applied
4886 because some other canonicalization didn't work, but this should not
4889 The result of apply_change_group can be ignored; see canon_reg. */
4891 apply_change_group ();
4893 /* Set sets[i].src_elt to the class each source belongs to.
4894 Detect assignments from or to volatile things
4895 and set set[i] to zero so they will be ignored
4896 in the rest of this function.
4898 Nothing in this loop changes the hash table or the register chains. */
4900 for (i = 0; i < n_sets; i++)
4904 struct table_elt *elt = 0, *p;
4905 enum machine_mode mode;
4908 rtx src_related = 0;
4909 struct table_elt *src_const_elt = 0;
4910 int src_cost = MAX_COST;
4911 int src_eqv_cost = MAX_COST;
4912 int src_folded_cost = MAX_COST;
4913 int src_related_cost = MAX_COST;
4914 int src_elt_cost = MAX_COST;
4915 int src_regcost = MAX_COST;
4916 int src_eqv_regcost = MAX_COST;
4917 int src_folded_regcost = MAX_COST;
4918 int src_related_regcost = MAX_COST;
4919 int src_elt_regcost = MAX_COST;
4920 /* Set nonzero if we need to call force_const_mem on with the
4921 contents of src_folded before using it. */
4922 int src_folded_force_flag = 0;
4924 dest = SET_DEST (sets[i].rtl);
4925 src = SET_SRC (sets[i].rtl);
4927 /* If SRC is a constant that has no machine mode,
4928 hash it with the destination's machine mode.
4929 This way we can keep different modes separate. */
4931 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4932 sets[i].mode = mode;
4936 enum machine_mode eqvmode = mode;
4937 if (GET_CODE (dest) == STRICT_LOW_PART)
4938 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4940 hash_arg_in_memory = 0;
4941 src_eqv_hash = HASH (src_eqv, eqvmode);
4943 /* Find the equivalence class for the equivalent expression. */
4946 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4948 src_eqv_volatile = do_not_record;
4949 src_eqv_in_memory = hash_arg_in_memory;
4952 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4953 value of the INNER register, not the destination. So it is not
4954 a valid substitution for the source. But save it for later. */
4955 if (GET_CODE (dest) == STRICT_LOW_PART)
4958 src_eqv_here = src_eqv;
4960 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4961 simplified result, which may not necessarily be valid. */
4962 src_folded = fold_rtx (src, insn);
4965 /* ??? This caused bad code to be generated for the m68k port with -O2.
4966 Suppose src is (CONST_INT -1), and that after truncation src_folded
4967 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4968 At the end we will add src and src_const to the same equivalence
4969 class. We now have 3 and -1 on the same equivalence class. This
4970 causes later instructions to be mis-optimized. */
4971 /* If storing a constant in a bitfield, pre-truncate the constant
4972 so we will be able to record it later. */
4973 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
4975 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4977 if (GET_CODE (src) == CONST_INT
4978 && GET_CODE (width) == CONST_INT
4979 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4980 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4982 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
4983 << INTVAL (width)) - 1));
4987 /* Compute SRC's hash code, and also notice if it
4988 should not be recorded at all. In that case,
4989 prevent any further processing of this assignment. */
4991 hash_arg_in_memory = 0;
4994 sets[i].src_hash = HASH (src, mode);
4995 sets[i].src_volatile = do_not_record;
4996 sets[i].src_in_memory = hash_arg_in_memory;
4998 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4999 a pseudo, do not record SRC. Using SRC as a replacement for
5000 anything else will be incorrect in that situation. Note that
5001 this usually occurs only for stack slots, in which case all the
5002 RTL would be referring to SRC, so we don't lose any optimization
5003 opportunities by not having SRC in the hash table. */
5006 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5008 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5009 sets[i].src_volatile = 1;
5012 /* It is no longer clear why we used to do this, but it doesn't
5013 appear to still be needed. So let's try without it since this
5014 code hurts cse'ing widened ops. */
5015 /* If source is a paradoxical subreg (such as QI treated as an SI),
5016 treat it as volatile. It may do the work of an SI in one context
5017 where the extra bits are not being used, but cannot replace an SI
5019 if (GET_CODE (src) == SUBREG
5020 && (GET_MODE_SIZE (GET_MODE (src))
5021 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5022 sets[i].src_volatile = 1;
5025 /* Locate all possible equivalent forms for SRC. Try to replace
5026 SRC in the insn with each cheaper equivalent.
5028 We have the following types of equivalents: SRC itself, a folded
5029 version, a value given in a REG_EQUAL note, or a value related
5032 Each of these equivalents may be part of an additional class
5033 of equivalents (if more than one is in the table, they must be in
5034 the same class; we check for this).
5036 If the source is volatile, we don't do any table lookups.
5038 We note any constant equivalent for possible later use in a
5041 if (!sets[i].src_volatile)
5042 elt = lookup (src, sets[i].src_hash, mode);
5044 sets[i].src_elt = elt;
5046 if (elt && src_eqv_here && src_eqv_elt)
5048 if (elt->first_same_value != src_eqv_elt->first_same_value)
5050 /* The REG_EQUAL is indicating that two formerly distinct
5051 classes are now equivalent. So merge them. */
5052 merge_equiv_classes (elt, src_eqv_elt);
5053 src_eqv_hash = HASH (src_eqv, elt->mode);
5054 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5060 else if (src_eqv_elt)
5063 /* Try to find a constant somewhere and record it in `src_const'.
5064 Record its table element, if any, in `src_const_elt'. Look in
5065 any known equivalences first. (If the constant is not in the
5066 table, also set `sets[i].src_const_hash'). */
5068 for (p = elt->first_same_value; p; p = p->next_same_value)
5072 src_const_elt = elt;
5077 && (CONSTANT_P (src_folded)
5078 /* Consider (minus (label_ref L1) (label_ref L2)) as
5079 "constant" here so we will record it. This allows us
5080 to fold switch statements when an ADDR_DIFF_VEC is used. */
5081 || (GET_CODE (src_folded) == MINUS
5082 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5083 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5084 src_const = src_folded, src_const_elt = elt;
5085 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5086 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5088 /* If we don't know if the constant is in the table, get its
5089 hash code and look it up. */
5090 if (src_const && src_const_elt == 0)
5092 sets[i].src_const_hash = HASH (src_const, mode);
5093 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5096 sets[i].src_const = src_const;
5097 sets[i].src_const_elt = src_const_elt;
5099 /* If the constant and our source are both in the table, mark them as
5100 equivalent. Otherwise, if a constant is in the table but the source
5101 isn't, set ELT to it. */
5102 if (src_const_elt && elt
5103 && src_const_elt->first_same_value != elt->first_same_value)
5104 merge_equiv_classes (elt, src_const_elt);
5105 else if (src_const_elt && elt == 0)
5106 elt = src_const_elt;
5108 /* See if there is a register linearly related to a constant
5109 equivalent of SRC. */
5111 && (GET_CODE (src_const) == CONST
5112 || (src_const_elt && src_const_elt->related_value != 0)))
5114 src_related = use_related_value (src_const, src_const_elt);
5117 struct table_elt *src_related_elt
5118 = lookup (src_related, HASH (src_related, mode), mode);
5119 if (src_related_elt && elt)
5121 if (elt->first_same_value
5122 != src_related_elt->first_same_value)
5123 /* This can occur when we previously saw a CONST
5124 involving a SYMBOL_REF and then see the SYMBOL_REF
5125 twice. Merge the involved classes. */
5126 merge_equiv_classes (elt, src_related_elt);
5129 src_related_elt = 0;
5131 else if (src_related_elt && elt == 0)
5132 elt = src_related_elt;
5136 /* See if we have a CONST_INT that is already in a register in a
5139 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5140 && GET_MODE_CLASS (mode) == MODE_INT
5141 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5143 enum machine_mode wider_mode;
5145 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5146 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5147 && src_related == 0;
5148 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5150 struct table_elt *const_elt
5151 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5156 for (const_elt = const_elt->first_same_value;
5157 const_elt; const_elt = const_elt->next_same_value)
5158 if (REG_P (const_elt->exp))
5160 src_related = gen_lowpart (mode,
5167 /* Another possibility is that we have an AND with a constant in
5168 a mode narrower than a word. If so, it might have been generated
5169 as part of an "if" which would narrow the AND. If we already
5170 have done the AND in a wider mode, we can use a SUBREG of that
5173 if (flag_expensive_optimizations && ! src_related
5174 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5175 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5177 enum machine_mode tmode;
5178 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5180 for (tmode = GET_MODE_WIDER_MODE (mode);
5181 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5182 tmode = GET_MODE_WIDER_MODE (tmode))
5184 rtx inner = gen_lowpart (tmode, XEXP (src, 0));
5185 struct table_elt *larger_elt;
5189 PUT_MODE (new_and, tmode);
5190 XEXP (new_and, 0) = inner;
5191 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5192 if (larger_elt == 0)
5195 for (larger_elt = larger_elt->first_same_value;
5196 larger_elt; larger_elt = larger_elt->next_same_value)
5197 if (REG_P (larger_elt->exp))
5200 = gen_lowpart (mode, larger_elt->exp);
5210 #ifdef LOAD_EXTEND_OP
5211 /* See if a MEM has already been loaded with a widening operation;
5212 if it has, we can use a subreg of that. Many CISC machines
5213 also have such operations, but this is only likely to be
5214 beneficial on these machines. */
5216 if (flag_expensive_optimizations && src_related == 0
5217 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5218 && GET_MODE_CLASS (mode) == MODE_INT
5219 && MEM_P (src) && ! do_not_record
5220 && LOAD_EXTEND_OP (mode) != UNKNOWN)
5222 struct rtx_def memory_extend_buf;
5223 rtx memory_extend_rtx = &memory_extend_buf;
5224 enum machine_mode tmode;
5226 /* Set what we are trying to extend and the operation it might
5227 have been extended with. */
5228 memset (memory_extend_rtx, 0, sizeof(*memory_extend_rtx));
5229 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5230 XEXP (memory_extend_rtx, 0) = src;
5232 for (tmode = GET_MODE_WIDER_MODE (mode);
5233 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5234 tmode = GET_MODE_WIDER_MODE (tmode))
5236 struct table_elt *larger_elt;
5238 PUT_MODE (memory_extend_rtx, tmode);
5239 larger_elt = lookup (memory_extend_rtx,
5240 HASH (memory_extend_rtx, tmode), tmode);
5241 if (larger_elt == 0)
5244 for (larger_elt = larger_elt->first_same_value;
5245 larger_elt; larger_elt = larger_elt->next_same_value)
5246 if (REG_P (larger_elt->exp))
5248 src_related = gen_lowpart (mode,
5257 #endif /* LOAD_EXTEND_OP */
5259 if (src == src_folded)
5262 /* At this point, ELT, if nonzero, points to a class of expressions
5263 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5264 and SRC_RELATED, if nonzero, each contain additional equivalent
5265 expressions. Prune these latter expressions by deleting expressions
5266 already in the equivalence class.
5268 Check for an equivalent identical to the destination. If found,
5269 this is the preferred equivalent since it will likely lead to
5270 elimination of the insn. Indicate this by placing it in
5274 elt = elt->first_same_value;
5275 for (p = elt; p; p = p->next_same_value)
5277 enum rtx_code code = GET_CODE (p->exp);
5279 /* If the expression is not valid, ignore it. Then we do not
5280 have to check for validity below. In most cases, we can use
5281 `rtx_equal_p', since canonicalization has already been done. */
5282 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
5285 /* Also skip paradoxical subregs, unless that's what we're
5288 && (GET_MODE_SIZE (GET_MODE (p->exp))
5289 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5291 && GET_CODE (src) == SUBREG
5292 && GET_MODE (src) == GET_MODE (p->exp)
5293 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5294 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5297 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5299 else if (src_folded && GET_CODE (src_folded) == code
5300 && rtx_equal_p (src_folded, p->exp))
5302 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5303 && rtx_equal_p (src_eqv_here, p->exp))
5305 else if (src_related && GET_CODE (src_related) == code
5306 && rtx_equal_p (src_related, p->exp))
5309 /* This is the same as the destination of the insns, we want
5310 to prefer it. Copy it to src_related. The code below will
5311 then give it a negative cost. */
5312 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5316 /* Find the cheapest valid equivalent, trying all the available
5317 possibilities. Prefer items not in the hash table to ones
5318 that are when they are equal cost. Note that we can never
5319 worsen an insn as the current contents will also succeed.
5320 If we find an equivalent identical to the destination, use it as best,
5321 since this insn will probably be eliminated in that case. */
5324 if (rtx_equal_p (src, dest))
5325 src_cost = src_regcost = -1;
5328 src_cost = COST (src);
5329 src_regcost = approx_reg_cost (src);
5335 if (rtx_equal_p (src_eqv_here, dest))
5336 src_eqv_cost = src_eqv_regcost = -1;
5339 src_eqv_cost = COST (src_eqv_here);
5340 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5346 if (rtx_equal_p (src_folded, dest))
5347 src_folded_cost = src_folded_regcost = -1;
5350 src_folded_cost = COST (src_folded);
5351 src_folded_regcost = approx_reg_cost (src_folded);
5357 if (rtx_equal_p (src_related, dest))
5358 src_related_cost = src_related_regcost = -1;
5361 src_related_cost = COST (src_related);
5362 src_related_regcost = approx_reg_cost (src_related);
5366 /* If this was an indirect jump insn, a known label will really be
5367 cheaper even though it looks more expensive. */
5368 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5369 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5371 /* Terminate loop when replacement made. This must terminate since
5372 the current contents will be tested and will always be valid. */
5377 /* Skip invalid entries. */
5378 while (elt && !REG_P (elt->exp)
5379 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5380 elt = elt->next_same_value;
5382 /* A paradoxical subreg would be bad here: it'll be the right
5383 size, but later may be adjusted so that the upper bits aren't
5384 what we want. So reject it. */
5386 && GET_CODE (elt->exp) == SUBREG
5387 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5388 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5389 /* It is okay, though, if the rtx we're trying to match
5390 will ignore any of the bits we can't predict. */
5392 && GET_CODE (src) == SUBREG
5393 && GET_MODE (src) == GET_MODE (elt->exp)
5394 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5395 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5397 elt = elt->next_same_value;
5403 src_elt_cost = elt->cost;
5404 src_elt_regcost = elt->regcost;
5407 /* Find cheapest and skip it for the next time. For items
5408 of equal cost, use this order:
5409 src_folded, src, src_eqv, src_related and hash table entry. */
5411 && preferable (src_folded_cost, src_folded_regcost,
5412 src_cost, src_regcost) <= 0
5413 && preferable (src_folded_cost, src_folded_regcost,
5414 src_eqv_cost, src_eqv_regcost) <= 0
5415 && preferable (src_folded_cost, src_folded_regcost,
5416 src_related_cost, src_related_regcost) <= 0
5417 && preferable (src_folded_cost, src_folded_regcost,
5418 src_elt_cost, src_elt_regcost) <= 0)
5420 trial = src_folded, src_folded_cost = MAX_COST;
5421 if (src_folded_force_flag)
5423 rtx forced = force_const_mem (mode, trial);
5429 && preferable (src_cost, src_regcost,
5430 src_eqv_cost, src_eqv_regcost) <= 0
5431 && preferable (src_cost, src_regcost,
5432 src_related_cost, src_related_regcost) <= 0
5433 && preferable (src_cost, src_regcost,
5434 src_elt_cost, src_elt_regcost) <= 0)
5435 trial = src, src_cost = MAX_COST;
5436 else if (src_eqv_here
5437 && preferable (src_eqv_cost, src_eqv_regcost,
5438 src_related_cost, src_related_regcost) <= 0
5439 && preferable (src_eqv_cost, src_eqv_regcost,
5440 src_elt_cost, src_elt_regcost) <= 0)
5441 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5442 else if (src_related
5443 && preferable (src_related_cost, src_related_regcost,
5444 src_elt_cost, src_elt_regcost) <= 0)
5445 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5448 trial = copy_rtx (elt->exp);
5449 elt = elt->next_same_value;
5450 src_elt_cost = MAX_COST;
5453 /* We don't normally have an insn matching (set (pc) (pc)), so
5454 check for this separately here. We will delete such an
5457 For other cases such as a table jump or conditional jump
5458 where we know the ultimate target, go ahead and replace the
5459 operand. While that may not make a valid insn, we will
5460 reemit the jump below (and also insert any necessary
5462 if (n_sets == 1 && dest == pc_rtx
5464 || (GET_CODE (trial) == LABEL_REF
5465 && ! condjump_p (insn))))
5467 /* Don't substitute non-local labels, this confuses CFG. */
5468 if (GET_CODE (trial) == LABEL_REF
5469 && LABEL_REF_NONLOCAL_P (trial))
5472 SET_SRC (sets[i].rtl) = trial;
5473 cse_jumps_altered = 1;
5477 /* Look for a substitution that makes a valid insn. */
5478 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5480 rtx new = canon_reg (SET_SRC (sets[i].rtl), insn);
5482 /* If we just made a substitution inside a libcall, then we
5483 need to make the same substitution in any notes attached
5484 to the RETVAL insn. */
5486 && (REG_P (sets[i].orig_src)
5487 || GET_CODE (sets[i].orig_src) == SUBREG
5488 || MEM_P (sets[i].orig_src)))
5490 rtx note = find_reg_equal_equiv_note (libcall_insn);
5492 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0),
5497 /* The result of apply_change_group can be ignored; see
5500 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5501 apply_change_group ();
5505 /* If we previously found constant pool entries for
5506 constants and this is a constant, try making a
5507 pool entry. Put it in src_folded unless we already have done
5508 this since that is where it likely came from. */
5510 else if (constant_pool_entries_cost
5511 && CONSTANT_P (trial)
5512 /* Reject cases that will abort in decode_rtx_const.
5513 On the alpha when simplifying a switch, we get
5514 (const (truncate (minus (label_ref) (label_ref)))). */
5515 && ! (GET_CODE (trial) == CONST
5516 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5517 /* Likewise on IA-64, except without the truncate. */
5518 && ! (GET_CODE (trial) == CONST
5519 && GET_CODE (XEXP (trial, 0)) == MINUS
5520 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5521 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5523 || (!MEM_P (src_folded)
5524 && ! src_folded_force_flag))
5525 && GET_MODE_CLASS (mode) != MODE_CC
5526 && mode != VOIDmode)
5528 src_folded_force_flag = 1;
5530 src_folded_cost = constant_pool_entries_cost;
5531 src_folded_regcost = constant_pool_entries_regcost;
5535 src = SET_SRC (sets[i].rtl);
5537 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5538 However, there is an important exception: If both are registers
5539 that are not the head of their equivalence class, replace SET_SRC
5540 with the head of the class. If we do not do this, we will have
5541 both registers live over a portion of the basic block. This way,
5542 their lifetimes will likely abut instead of overlapping. */
5544 && REGNO_QTY_VALID_P (REGNO (dest)))
5546 int dest_q = REG_QTY (REGNO (dest));
5547 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5549 if (dest_ent->mode == GET_MODE (dest)
5550 && dest_ent->first_reg != REGNO (dest)
5551 && REG_P (src) && REGNO (src) == REGNO (dest)
5552 /* Don't do this if the original insn had a hard reg as
5553 SET_SRC or SET_DEST. */
5554 && (!REG_P (sets[i].src)
5555 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5556 && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5557 /* We can't call canon_reg here because it won't do anything if
5558 SRC is a hard register. */
5560 int src_q = REG_QTY (REGNO (src));
5561 struct qty_table_elem *src_ent = &qty_table[src_q];
5562 int first = src_ent->first_reg;
5564 = (first >= FIRST_PSEUDO_REGISTER
5565 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5567 /* We must use validate-change even for this, because this
5568 might be a special no-op instruction, suitable only to
5570 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5573 /* If we had a constant that is cheaper than what we are now
5574 setting SRC to, use that constant. We ignored it when we
5575 thought we could make this into a no-op. */
5576 if (src_const && COST (src_const) < COST (src)
5577 && validate_change (insn, &SET_SRC (sets[i].rtl),
5584 /* If we made a change, recompute SRC values. */
5585 if (src != sets[i].src)
5589 hash_arg_in_memory = 0;
5591 sets[i].src_hash = HASH (src, mode);
5592 sets[i].src_volatile = do_not_record;
5593 sets[i].src_in_memory = hash_arg_in_memory;
5594 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5597 /* If this is a single SET, we are setting a register, and we have an
5598 equivalent constant, we want to add a REG_NOTE. We don't want
5599 to write a REG_EQUAL note for a constant pseudo since verifying that
5600 that pseudo hasn't been eliminated is a pain. Such a note also
5601 won't help anything.
5603 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5604 which can be created for a reference to a compile time computable
5605 entry in a jump table. */
5607 if (n_sets == 1 && src_const && REG_P (dest)
5608 && !REG_P (src_const)
5609 && ! (GET_CODE (src_const) == CONST
5610 && GET_CODE (XEXP (src_const, 0)) == MINUS
5611 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5612 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5614 /* We only want a REG_EQUAL note if src_const != src. */
5615 if (! rtx_equal_p (src, src_const))
5617 /* Make sure that the rtx is not shared. */
5618 src_const = copy_rtx (src_const);
5620 /* Record the actual constant value in a REG_EQUAL note,
5621 making a new one if one does not already exist. */
5622 set_unique_reg_note (insn, REG_EQUAL, src_const);
5626 /* Now deal with the destination. */
5629 /* Look within any ZERO_EXTRACT to the MEM or REG within it. */
5630 while (GET_CODE (dest) == SUBREG
5631 || GET_CODE (dest) == ZERO_EXTRACT
5632 || GET_CODE (dest) == STRICT_LOW_PART)
5633 dest = XEXP (dest, 0);
5635 sets[i].inner_dest = dest;
5639 #ifdef PUSH_ROUNDING
5640 /* Stack pushes invalidate the stack pointer. */
5641 rtx addr = XEXP (dest, 0);
5642 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5643 && XEXP (addr, 0) == stack_pointer_rtx)
5644 invalidate (stack_pointer_rtx, Pmode);
5646 dest = fold_rtx (dest, insn);
5649 /* Compute the hash code of the destination now,
5650 before the effects of this instruction are recorded,
5651 since the register values used in the address computation
5652 are those before this instruction. */
5653 sets[i].dest_hash = HASH (dest, mode);
5655 /* Don't enter a bit-field in the hash table
5656 because the value in it after the store
5657 may not equal what was stored, due to truncation. */
5659 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5661 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5663 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5664 && GET_CODE (width) == CONST_INT
5665 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5666 && ! (INTVAL (src_const)
5667 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5668 /* Exception: if the value is constant,
5669 and it won't be truncated, record it. */
5673 /* This is chosen so that the destination will be invalidated
5674 but no new value will be recorded.
5675 We must invalidate because sometimes constant
5676 values can be recorded for bitfields. */
5677 sets[i].src_elt = 0;
5678 sets[i].src_volatile = 1;
5684 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5686 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5688 /* One less use of the label this insn used to jump to. */
5690 cse_jumps_altered = 1;
5691 /* No more processing for this set. */
5695 /* If this SET is now setting PC to a label, we know it used to
5696 be a conditional or computed branch. */
5697 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5698 && !LABEL_REF_NONLOCAL_P (src))
5700 /* Now emit a BARRIER after the unconditional jump. */
5701 if (NEXT_INSN (insn) == 0
5702 || !BARRIER_P (NEXT_INSN (insn)))
5703 emit_barrier_after (insn);
5705 /* We reemit the jump in as many cases as possible just in
5706 case the form of an unconditional jump is significantly
5707 different than a computed jump or conditional jump.
5709 If this insn has multiple sets, then reemitting the
5710 jump is nontrivial. So instead we just force rerecognition
5711 and hope for the best. */
5716 new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
5717 JUMP_LABEL (new) = XEXP (src, 0);
5718 LABEL_NUSES (XEXP (src, 0))++;
5720 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5721 note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5724 XEXP (note, 1) = NULL_RTX;
5725 REG_NOTES (new) = note;
5731 /* Now emit a BARRIER after the unconditional jump. */
5732 if (NEXT_INSN (insn) == 0
5733 || !BARRIER_P (NEXT_INSN (insn)))
5734 emit_barrier_after (insn);
5737 INSN_CODE (insn) = -1;
5739 /* Do not bother deleting any unreachable code,
5740 let jump/flow do that. */
5742 cse_jumps_altered = 1;
5746 /* If destination is volatile, invalidate it and then do no further
5747 processing for this assignment. */
5749 else if (do_not_record)
5751 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5752 invalidate (dest, VOIDmode);
5753 else if (MEM_P (dest))
5754 invalidate (dest, VOIDmode);
5755 else if (GET_CODE (dest) == STRICT_LOW_PART
5756 || GET_CODE (dest) == ZERO_EXTRACT)
5757 invalidate (XEXP (dest, 0), GET_MODE (dest));
5761 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5762 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5765 /* If setting CC0, record what it was set to, or a constant, if it
5766 is equivalent to a constant. If it is being set to a floating-point
5767 value, make a COMPARE with the appropriate constant of 0. If we
5768 don't do this, later code can interpret this as a test against
5769 const0_rtx, which can cause problems if we try to put it into an
5770 insn as a floating-point operand. */
5771 if (dest == cc0_rtx)
5773 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5774 this_insn_cc0_mode = mode;
5775 if (FLOAT_MODE_P (mode))
5776 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5782 /* Now enter all non-volatile source expressions in the hash table
5783 if they are not already present.
5784 Record their equivalence classes in src_elt.
5785 This way we can insert the corresponding destinations into
5786 the same classes even if the actual sources are no longer in them
5787 (having been invalidated). */
5789 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5790 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5792 struct table_elt *elt;
5793 struct table_elt *classp = sets[0].src_elt;
5794 rtx dest = SET_DEST (sets[0].rtl);
5795 enum machine_mode eqvmode = GET_MODE (dest);
5797 if (GET_CODE (dest) == STRICT_LOW_PART)
5799 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5802 if (insert_regs (src_eqv, classp, 0))
5804 rehash_using_reg (src_eqv);
5805 src_eqv_hash = HASH (src_eqv, eqvmode);
5807 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5808 elt->in_memory = src_eqv_in_memory;
5811 /* Check to see if src_eqv_elt is the same as a set source which
5812 does not yet have an elt, and if so set the elt of the set source
5814 for (i = 0; i < n_sets; i++)
5815 if (sets[i].rtl && sets[i].src_elt == 0
5816 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5817 sets[i].src_elt = src_eqv_elt;
5820 for (i = 0; i < n_sets; i++)
5821 if (sets[i].rtl && ! sets[i].src_volatile
5822 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5824 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5826 /* REG_EQUAL in setting a STRICT_LOW_PART
5827 gives an equivalent for the entire destination register,
5828 not just for the subreg being stored in now.
5829 This is a more interesting equivalence, so we arrange later
5830 to treat the entire reg as the destination. */
5831 sets[i].src_elt = src_eqv_elt;
5832 sets[i].src_hash = src_eqv_hash;
5836 /* Insert source and constant equivalent into hash table, if not
5838 struct table_elt *classp = src_eqv_elt;
5839 rtx src = sets[i].src;
5840 rtx dest = SET_DEST (sets[i].rtl);
5841 enum machine_mode mode
5842 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5844 /* It's possible that we have a source value known to be
5845 constant but don't have a REG_EQUAL note on the insn.
5846 Lack of a note will mean src_eqv_elt will be NULL. This
5847 can happen where we've generated a SUBREG to access a
5848 CONST_INT that is already in a register in a wider mode.
5849 Ensure that the source expression is put in the proper
5852 classp = sets[i].src_const_elt;
5854 if (sets[i].src_elt == 0)
5856 /* Don't put a hard register source into the table if this is
5857 the last insn of a libcall. In this case, we only need
5858 to put src_eqv_elt in src_elt. */
5859 if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5861 struct table_elt *elt;
5863 /* Note that these insert_regs calls cannot remove
5864 any of the src_elt's, because they would have failed to
5865 match if not still valid. */
5866 if (insert_regs (src, classp, 0))
5868 rehash_using_reg (src);
5869 sets[i].src_hash = HASH (src, mode);
5871 elt = insert (src, classp, sets[i].src_hash, mode);
5872 elt->in_memory = sets[i].src_in_memory;
5873 sets[i].src_elt = classp = elt;
5876 sets[i].src_elt = classp;
5878 if (sets[i].src_const && sets[i].src_const_elt == 0
5879 && src != sets[i].src_const
5880 && ! rtx_equal_p (sets[i].src_const, src))
5881 sets[i].src_elt = insert (sets[i].src_const, classp,
5882 sets[i].src_const_hash, mode);
5885 else if (sets[i].src_elt == 0)
5886 /* If we did not insert the source into the hash table (e.g., it was
5887 volatile), note the equivalence class for the REG_EQUAL value, if any,
5888 so that the destination goes into that class. */
5889 sets[i].src_elt = src_eqv_elt;
5891 invalidate_from_clobbers (x);
5893 /* Some registers are invalidated by subroutine calls. Memory is
5894 invalidated by non-constant calls. */
5898 if (! CONST_OR_PURE_CALL_P (insn))
5899 invalidate_memory ();
5900 invalidate_for_call ();
5903 /* Now invalidate everything set by this instruction.
5904 If a SUBREG or other funny destination is being set,
5905 sets[i].rtl is still nonzero, so here we invalidate the reg
5906 a part of which is being set. */
5908 for (i = 0; i < n_sets; i++)
5911 /* We can't use the inner dest, because the mode associated with
5912 a ZERO_EXTRACT is significant. */
5913 rtx dest = SET_DEST (sets[i].rtl);
5915 /* Needed for registers to remove the register from its
5916 previous quantity's chain.
5917 Needed for memory if this is a nonvarying address, unless
5918 we have just done an invalidate_memory that covers even those. */
5919 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5920 invalidate (dest, VOIDmode);
5921 else if (MEM_P (dest))
5922 invalidate (dest, VOIDmode);
5923 else if (GET_CODE (dest) == STRICT_LOW_PART
5924 || GET_CODE (dest) == ZERO_EXTRACT)
5925 invalidate (XEXP (dest, 0), GET_MODE (dest));
5928 /* A volatile ASM invalidates everything. */
5929 if (NONJUMP_INSN_P (insn)
5930 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5931 && MEM_VOLATILE_P (PATTERN (insn)))
5932 flush_hash_table ();
5934 /* Make sure registers mentioned in destinations
5935 are safe for use in an expression to be inserted.
5936 This removes from the hash table
5937 any invalid entry that refers to one of these registers.
5939 We don't care about the return value from mention_regs because
5940 we are going to hash the SET_DEST values unconditionally. */
5942 for (i = 0; i < n_sets; i++)
5946 rtx x = SET_DEST (sets[i].rtl);
5952 /* We used to rely on all references to a register becoming
5953 inaccessible when a register changes to a new quantity,
5954 since that changes the hash code. However, that is not
5955 safe, since after HASH_SIZE new quantities we get a
5956 hash 'collision' of a register with its own invalid
5957 entries. And since SUBREGs have been changed not to
5958 change their hash code with the hash code of the register,
5959 it wouldn't work any longer at all. So we have to check
5960 for any invalid references lying around now.
5961 This code is similar to the REG case in mention_regs,
5962 but it knows that reg_tick has been incremented, and
5963 it leaves reg_in_table as -1 . */
5964 unsigned int regno = REGNO (x);
5965 unsigned int endregno
5966 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
5967 : hard_regno_nregs[regno][GET_MODE (x)]);
5970 for (i = regno; i < endregno; i++)
5972 if (REG_IN_TABLE (i) >= 0)
5974 remove_invalid_refs (i);
5975 REG_IN_TABLE (i) = -1;
5982 /* We may have just removed some of the src_elt's from the hash table.
5983 So replace each one with the current head of the same class. */
5985 for (i = 0; i < n_sets; i++)
5988 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5989 /* If elt was removed, find current head of same class,
5990 or 0 if nothing remains of that class. */
5992 struct table_elt *elt = sets[i].src_elt;
5994 while (elt && elt->prev_same_value)
5995 elt = elt->prev_same_value;
5997 while (elt && elt->first_same_value == 0)
5998 elt = elt->next_same_value;
5999 sets[i].src_elt = elt ? elt->first_same_value : 0;
6003 /* Now insert the destinations into their equivalence classes. */
6005 for (i = 0; i < n_sets; i++)
6008 rtx dest = SET_DEST (sets[i].rtl);
6009 struct table_elt *elt;
6011 /* Don't record value if we are not supposed to risk allocating
6012 floating-point values in registers that might be wider than
6014 if ((flag_float_store
6016 && FLOAT_MODE_P (GET_MODE (dest)))
6017 /* Don't record BLKmode values, because we don't know the
6018 size of it, and can't be sure that other BLKmode values
6019 have the same or smaller size. */
6020 || GET_MODE (dest) == BLKmode
6021 /* Don't record values of destinations set inside a libcall block
6022 since we might delete the libcall. Things should have been set
6023 up so we won't want to reuse such a value, but we play it safe
6026 /* If we didn't put a REG_EQUAL value or a source into the hash
6027 table, there is no point is recording DEST. */
6028 || sets[i].src_elt == 0
6029 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6030 or SIGN_EXTEND, don't record DEST since it can cause
6031 some tracking to be wrong.
6033 ??? Think about this more later. */
6034 || (GET_CODE (dest) == SUBREG
6035 && (GET_MODE_SIZE (GET_MODE (dest))
6036 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6037 && (GET_CODE (sets[i].src) == SIGN_EXTEND
6038 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6041 /* STRICT_LOW_PART isn't part of the value BEING set,
6042 and neither is the SUBREG inside it.
6043 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6044 if (GET_CODE (dest) == STRICT_LOW_PART)
6045 dest = SUBREG_REG (XEXP (dest, 0));
6047 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
6048 /* Registers must also be inserted into chains for quantities. */
6049 if (insert_regs (dest, sets[i].src_elt, 1))
6051 /* If `insert_regs' changes something, the hash code must be
6053 rehash_using_reg (dest);
6054 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6057 elt = insert (dest, sets[i].src_elt,
6058 sets[i].dest_hash, GET_MODE (dest));
6060 elt->in_memory = (MEM_P (sets[i].inner_dest)
6061 && !MEM_READONLY_P (sets[i].inner_dest));
6063 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6064 narrower than M2, and both M1 and M2 are the same number of words,
6065 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6066 make that equivalence as well.
6068 However, BAR may have equivalences for which gen_lowpart
6069 will produce a simpler value than gen_lowpart applied to
6070 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6071 BAR's equivalences. If we don't get a simplified form, make
6072 the SUBREG. It will not be used in an equivalence, but will
6073 cause two similar assignments to be detected.
6075 Note the loop below will find SUBREG_REG (DEST) since we have
6076 already entered SRC and DEST of the SET in the table. */
6078 if (GET_CODE (dest) == SUBREG
6079 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6081 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6082 && (GET_MODE_SIZE (GET_MODE (dest))
6083 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6084 && sets[i].src_elt != 0)
6086 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6087 struct table_elt *elt, *classp = 0;
6089 for (elt = sets[i].src_elt->first_same_value; elt;
6090 elt = elt->next_same_value)
6094 struct table_elt *src_elt;
6097 /* Ignore invalid entries. */
6098 if (!REG_P (elt->exp)
6099 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
6102 /* We may have already been playing subreg games. If the
6103 mode is already correct for the destination, use it. */
6104 if (GET_MODE (elt->exp) == new_mode)
6108 /* Calculate big endian correction for the SUBREG_BYTE.
6109 We have already checked that M1 (GET_MODE (dest))
6110 is not narrower than M2 (new_mode). */
6111 if (BYTES_BIG_ENDIAN)
6112 byte = (GET_MODE_SIZE (GET_MODE (dest))
6113 - GET_MODE_SIZE (new_mode));
6115 new_src = simplify_gen_subreg (new_mode, elt->exp,
6116 GET_MODE (dest), byte);
6119 /* The call to simplify_gen_subreg fails if the value
6120 is VOIDmode, yet we can't do any simplification, e.g.
6121 for EXPR_LISTs denoting function call results.
6122 It is invalid to construct a SUBREG with a VOIDmode
6123 SUBREG_REG, hence a zero new_src means we can't do
6124 this substitution. */
6128 src_hash = HASH (new_src, new_mode);
6129 src_elt = lookup (new_src, src_hash, new_mode);
6131 /* Put the new source in the hash table is if isn't
6135 if (insert_regs (new_src, classp, 0))
6137 rehash_using_reg (new_src);
6138 src_hash = HASH (new_src, new_mode);
6140 src_elt = insert (new_src, classp, src_hash, new_mode);
6141 src_elt->in_memory = elt->in_memory;
6143 else if (classp && classp != src_elt->first_same_value)
6144 /* Show that two things that we've seen before are
6145 actually the same. */
6146 merge_equiv_classes (src_elt, classp);
6148 classp = src_elt->first_same_value;
6149 /* Ignore invalid entries. */
6151 && !REG_P (classp->exp)
6152 && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
6153 classp = classp->next_same_value;
6158 /* Special handling for (set REG0 REG1) where REG0 is the
6159 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6160 be used in the sequel, so (if easily done) change this insn to
6161 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6162 that computed their value. Then REG1 will become a dead store
6163 and won't cloud the situation for later optimizations.
6165 Do not make this change if REG1 is a hard register, because it will
6166 then be used in the sequel and we may be changing a two-operand insn
6167 into a three-operand insn.
6169 Also do not do this if we are operating on a copy of INSN.
6171 Also don't do this if INSN ends a libcall; this would cause an unrelated
6172 register to be set in the middle of a libcall, and we then get bad code
6173 if the libcall is deleted. */
6175 if (n_sets == 1 && sets[0].rtl && REG_P (SET_DEST (sets[0].rtl))
6176 && NEXT_INSN (PREV_INSN (insn)) == insn
6177 && REG_P (SET_SRC (sets[0].rtl))
6178 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6179 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6181 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6182 struct qty_table_elem *src_ent = &qty_table[src_q];
6184 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6185 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6188 /* Scan for the previous nonnote insn, but stop at a basic
6192 prev = PREV_INSN (prev);
6194 while (prev && NOTE_P (prev)
6195 && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
6197 /* Do not swap the registers around if the previous instruction
6198 attaches a REG_EQUIV note to REG1.
6200 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6201 from the pseudo that originally shadowed an incoming argument
6202 to another register. Some uses of REG_EQUIV might rely on it
6203 being attached to REG1 rather than REG2.
6205 This section previously turned the REG_EQUIV into a REG_EQUAL
6206 note. We cannot do that because REG_EQUIV may provide an
6207 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
6209 if (prev != 0 && NONJUMP_INSN_P (prev)
6210 && GET_CODE (PATTERN (prev)) == SET
6211 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6212 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6214 rtx dest = SET_DEST (sets[0].rtl);
6215 rtx src = SET_SRC (sets[0].rtl);
6218 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6219 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6220 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6221 apply_change_group ();
6223 /* If INSN has a REG_EQUAL note, and this note mentions
6224 REG0, then we must delete it, because the value in
6225 REG0 has changed. If the note's value is REG1, we must
6226 also delete it because that is now this insn's dest. */
6227 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6229 && (reg_mentioned_p (dest, XEXP (note, 0))
6230 || rtx_equal_p (src, XEXP (note, 0))))
6231 remove_note (insn, note);
6236 /* If this is a conditional jump insn, record any known equivalences due to
6237 the condition being tested. */
6240 && n_sets == 1 && GET_CODE (x) == SET
6241 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6242 record_jump_equiv (insn, 0);
6245 /* If the previous insn set CC0 and this insn no longer references CC0,
6246 delete the previous insn. Here we use the fact that nothing expects CC0
6247 to be valid over an insn, which is true until the final pass. */
6248 if (prev_insn && NONJUMP_INSN_P (prev_insn)
6249 && (tem = single_set (prev_insn)) != 0
6250 && SET_DEST (tem) == cc0_rtx
6251 && ! reg_mentioned_p (cc0_rtx, x))
6252 delete_insn (prev_insn);
6254 prev_insn_cc0 = this_insn_cc0;
6255 prev_insn_cc0_mode = this_insn_cc0_mode;
6260 /* Remove from the hash table all expressions that reference memory. */
6263 invalidate_memory (void)
6266 struct table_elt *p, *next;
6268 for (i = 0; i < HASH_SIZE; i++)
6269 for (p = table[i]; p; p = next)
6271 next = p->next_same_hash;
6273 remove_from_table (p, i);
6277 /* If ADDR is an address that implicitly affects the stack pointer, return
6278 1 and update the register tables to show the effect. Else, return 0. */
6281 addr_affects_sp_p (rtx addr)
6283 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
6284 && REG_P (XEXP (addr, 0))
6285 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6287 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6289 REG_TICK (STACK_POINTER_REGNUM)++;
6290 /* Is it possible to use a subreg of SP? */
6291 SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6294 /* This should be *very* rare. */
6295 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6296 invalidate (stack_pointer_rtx, VOIDmode);
6304 /* Perform invalidation on the basis of everything about an insn
6305 except for invalidating the actual places that are SET in it.
6306 This includes the places CLOBBERed, and anything that might
6307 alias with something that is SET or CLOBBERed.
6309 X is the pattern of the insn. */
6312 invalidate_from_clobbers (rtx x)
6314 if (GET_CODE (x) == CLOBBER)
6316 rtx ref = XEXP (x, 0);
6319 if (REG_P (ref) || GET_CODE (ref) == SUBREG
6321 invalidate (ref, VOIDmode);
6322 else if (GET_CODE (ref) == STRICT_LOW_PART
6323 || GET_CODE (ref) == ZERO_EXTRACT)
6324 invalidate (XEXP (ref, 0), GET_MODE (ref));
6327 else if (GET_CODE (x) == PARALLEL)
6330 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6332 rtx y = XVECEXP (x, 0, i);
6333 if (GET_CODE (y) == CLOBBER)
6335 rtx ref = XEXP (y, 0);
6336 if (REG_P (ref) || GET_CODE (ref) == SUBREG
6338 invalidate (ref, VOIDmode);
6339 else if (GET_CODE (ref) == STRICT_LOW_PART
6340 || GET_CODE (ref) == ZERO_EXTRACT)
6341 invalidate (XEXP (ref, 0), GET_MODE (ref));
6347 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6348 and replace any registers in them with either an equivalent constant
6349 or the canonical form of the register. If we are inside an address,
6350 only do this if the address remains valid.
6352 OBJECT is 0 except when within a MEM in which case it is the MEM.
6354 Return the replacement for X. */
6357 cse_process_notes (rtx x, rtx object)
6359 enum rtx_code code = GET_CODE (x);
6360 const char *fmt = GET_RTX_FORMAT (code);
6377 validate_change (x, &XEXP (x, 0),
6378 cse_process_notes (XEXP (x, 0), x), 0);
6383 if (REG_NOTE_KIND (x) == REG_EQUAL)
6384 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6386 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6393 rtx new = cse_process_notes (XEXP (x, 0), object);
6394 /* We don't substitute VOIDmode constants into these rtx,
6395 since they would impede folding. */
6396 if (GET_MODE (new) != VOIDmode)
6397 validate_change (object, &XEXP (x, 0), new, 0);
6402 i = REG_QTY (REGNO (x));
6404 /* Return a constant or a constant register. */
6405 if (REGNO_QTY_VALID_P (REGNO (x)))
6407 struct qty_table_elem *ent = &qty_table[i];
6409 if (ent->const_rtx != NULL_RTX
6410 && (CONSTANT_P (ent->const_rtx)
6411 || REG_P (ent->const_rtx)))
6413 rtx new = gen_lowpart (GET_MODE (x), ent->const_rtx);
6419 /* Otherwise, canonicalize this register. */
6420 return canon_reg (x, NULL_RTX);
6426 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6428 validate_change (object, &XEXP (x, i),
6429 cse_process_notes (XEXP (x, i), object), 0);
6434 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6435 since they are done elsewhere. This function is called via note_stores. */
6438 invalidate_skipped_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
6440 enum rtx_code code = GET_CODE (dest);
6443 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6444 /* There are times when an address can appear varying and be a PLUS
6445 during this scan when it would be a fixed address were we to know
6446 the proper equivalences. So invalidate all memory if there is
6447 a BLKmode or nonscalar memory reference or a reference to a
6448 variable address. */
6449 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6450 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6452 invalidate_memory ();
6456 if (GET_CODE (set) == CLOBBER
6461 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6462 invalidate (XEXP (dest, 0), GET_MODE (dest));
6463 else if (code == REG || code == SUBREG || code == MEM)
6464 invalidate (dest, VOIDmode);
6467 /* Invalidate all insns from START up to the end of the function or the
6468 next label. This called when we wish to CSE around a block that is
6469 conditionally executed. */
6472 invalidate_skipped_block (rtx start)
6476 for (insn = start; insn && !LABEL_P (insn);
6477 insn = NEXT_INSN (insn))
6479 if (! INSN_P (insn))
6484 if (! CONST_OR_PURE_CALL_P (insn))
6485 invalidate_memory ();
6486 invalidate_for_call ();
6489 invalidate_from_clobbers (PATTERN (insn));
6490 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6494 /* Find the end of INSN's basic block and return its range,
6495 the total number of SETs in all the insns of the block, the last insn of the
6496 block, and the branch path.
6498 The branch path indicates which branches should be followed. If a nonzero
6499 path size is specified, the block should be rescanned and a different set
6500 of branches will be taken. The branch path is only used if
6501 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
6503 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6504 used to describe the block. It is filled in with the information about
6505 the current block. The incoming structure's branch path, if any, is used
6506 to construct the output branch path. */
6509 cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
6510 int follow_jumps, int skip_blocks)
6514 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6515 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6516 int path_size = data->path_size;
6520 /* Update the previous branch path, if any. If the last branch was
6521 previously PATH_TAKEN, mark it PATH_NOT_TAKEN.
6522 If it was previously PATH_NOT_TAKEN,
6523 shorten the path by one and look at the previous branch. We know that
6524 at least one branch must have been taken if PATH_SIZE is nonzero. */
6525 while (path_size > 0)
6527 if (data->path[path_size - 1].status != PATH_NOT_TAKEN)
6529 data->path[path_size - 1].status = PATH_NOT_TAKEN;
6536 /* If the first instruction is marked with QImode, that means we've
6537 already processed this block. Our caller will look at DATA->LAST
6538 to figure out where to go next. We want to return the next block
6539 in the instruction stream, not some branched-to block somewhere
6540 else. We accomplish this by pretending our called forbid us to
6541 follow jumps, or skip blocks. */
6542 if (GET_MODE (insn) == QImode)
6543 follow_jumps = skip_blocks = 0;
6545 /* Scan to end of this basic block. */
6546 while (p && !LABEL_P (p))
6548 /* Don't cse over a call to setjmp; on some machines (eg VAX)
6549 the regs restored by the longjmp come from
6550 a later time than the setjmp. */
6551 if (PREV_INSN (p) && CALL_P (PREV_INSN (p))
6552 && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6555 /* A PARALLEL can have lots of SETs in it,
6556 especially if it is really an ASM_OPERANDS. */
6557 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6558 nsets += XVECLEN (PATTERN (p), 0);
6559 else if (!NOTE_P (p))
6562 /* Ignore insns made by CSE; they cannot affect the boundaries of
6565 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6566 high_cuid = INSN_CUID (p);
6567 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6568 low_cuid = INSN_CUID (p);
6570 /* See if this insn is in our branch path. If it is and we are to
6572 if (path_entry < path_size && data->path[path_entry].branch == p)
6574 if (data->path[path_entry].status != PATH_NOT_TAKEN)
6577 /* Point to next entry in path, if any. */
6581 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6582 was specified, we haven't reached our maximum path length, there are
6583 insns following the target of the jump, this is the only use of the
6584 jump label, and the target label is preceded by a BARRIER.
6586 Alternatively, we can follow the jump if it branches around a
6587 block of code and there are no other branches into the block.
6588 In this case invalidate_skipped_block will be called to invalidate any
6589 registers set in the block when following the jump. */
6591 else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
6593 && GET_CODE (PATTERN (p)) == SET
6594 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6595 && JUMP_LABEL (p) != 0
6596 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6597 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6599 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6601 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6602 || (PREV_INSN (q) && CALL_P (PREV_INSN (q))
6603 && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6604 && (!LABEL_P (q) || LABEL_NUSES (q) != 0))
6607 /* If we ran into a BARRIER, this code is an extension of the
6608 basic block when the branch is taken. */
6609 if (follow_jumps && q != 0 && BARRIER_P (q))
6611 /* Don't allow ourself to keep walking around an
6612 always-executed loop. */
6613 if (next_real_insn (q) == next)
6619 /* Similarly, don't put a branch in our path more than once. */
6620 for (i = 0; i < path_entry; i++)
6621 if (data->path[i].branch == p)
6624 if (i != path_entry)
6627 data->path[path_entry].branch = p;
6628 data->path[path_entry++].status = PATH_TAKEN;
6630 /* This branch now ends our path. It was possible that we
6631 didn't see this branch the last time around (when the
6632 insn in front of the target was a JUMP_INSN that was
6633 turned into a no-op). */
6634 path_size = path_entry;
6637 /* Mark block so we won't scan it again later. */
6638 PUT_MODE (NEXT_INSN (p), QImode);
6640 /* Detect a branch around a block of code. */
6641 else if (skip_blocks && q != 0 && !LABEL_P (q))
6645 if (next_real_insn (q) == next)
6651 for (i = 0; i < path_entry; i++)
6652 if (data->path[i].branch == p)
6655 if (i != path_entry)
6658 /* This is no_labels_between_p (p, q) with an added check for
6659 reaching the end of a function (in case Q precedes P). */
6660 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6666 data->path[path_entry].branch = p;
6667 data->path[path_entry++].status = PATH_AROUND;
6669 path_size = path_entry;
6672 /* Mark block so we won't scan it again later. */
6673 PUT_MODE (NEXT_INSN (p), QImode);
6680 data->low_cuid = low_cuid;
6681 data->high_cuid = high_cuid;
6682 data->nsets = nsets;
6685 /* If all jumps in the path are not taken, set our path length to zero
6686 so a rescan won't be done. */
6687 for (i = path_size - 1; i >= 0; i--)
6688 if (data->path[i].status != PATH_NOT_TAKEN)
6692 data->path_size = 0;
6694 data->path_size = path_size;
6696 /* End the current branch path. */
6697 data->path[path_size].branch = 0;
6700 /* Perform cse on the instructions of a function.
6701 F is the first instruction.
6702 NREGS is one plus the highest pseudo-reg number used in the instruction.
6704 Returns 1 if jump_optimize should be redone due to simplifications
6705 in conditional jump instructions. */
6708 cse_main (rtx f, int nregs, FILE *file)
6710 struct cse_basic_block_data val;
6714 val.path = xmalloc (sizeof (struct branch_path)
6715 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6717 cse_jumps_altered = 0;
6718 recorded_label_ref = 0;
6719 constant_pool_entries_cost = 0;
6720 constant_pool_entries_regcost = 0;
6722 rtl_hooks = cse_rtl_hooks;
6725 init_alias_analysis ();
6727 reg_eqv_table = xmalloc (nregs * sizeof (struct reg_eqv_elem));
6729 /* Reset the counter indicating how many elements have been made
6731 n_elements_made = 0;
6733 /* Find the largest uid. */
6735 max_uid = get_max_uid ();
6736 uid_cuid = xcalloc (max_uid + 1, sizeof (int));
6738 /* Compute the mapping from uids to cuids.
6739 CUIDs are numbers assigned to insns, like uids,
6740 except that cuids increase monotonically through the code.
6741 Don't assign cuids to line-number NOTEs, so that the distance in cuids
6742 between two insns is not affected by -g. */
6744 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
6747 || NOTE_LINE_NUMBER (insn) < 0)
6748 INSN_CUID (insn) = ++i;
6750 /* Give a line number note the same cuid as preceding insn. */
6751 INSN_CUID (insn) = i;
6754 /* Loop over basic blocks.
6755 Compute the maximum number of qty's needed for each basic block
6756 (which is 2 for each SET). */
6761 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps,
6762 flag_cse_skip_blocks);
6764 /* If this basic block was already processed or has no sets, skip it. */
6765 if (val.nsets == 0 || GET_MODE (insn) == QImode)
6767 PUT_MODE (insn, VOIDmode);
6768 insn = (val.last ? NEXT_INSN (val.last) : 0);
6773 cse_basic_block_start = val.low_cuid;
6774 cse_basic_block_end = val.high_cuid;
6775 max_qty = val.nsets * 2;
6778 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
6779 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
6782 /* Make MAX_QTY bigger to give us room to optimize
6783 past the end of this basic block, if that should prove useful. */
6787 /* If this basic block is being extended by following certain jumps,
6788 (see `cse_end_of_basic_block'), we reprocess the code from the start.
6789 Otherwise, we start after this basic block. */
6790 if (val.path_size > 0)
6791 cse_basic_block (insn, val.last, val.path);
6794 int old_cse_jumps_altered = cse_jumps_altered;
6797 /* When cse changes a conditional jump to an unconditional
6798 jump, we want to reprocess the block, since it will give
6799 us a new branch path to investigate. */
6800 cse_jumps_altered = 0;
6801 temp = cse_basic_block (insn, val.last, val.path);
6802 if (cse_jumps_altered == 0
6803 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
6806 cse_jumps_altered |= old_cse_jumps_altered;
6817 if (max_elements_made < n_elements_made)
6818 max_elements_made = n_elements_made;
6821 end_alias_analysis ();
6823 free (reg_eqv_table);
6825 rtl_hooks = general_rtl_hooks;
6827 return cse_jumps_altered || recorded_label_ref;
6830 /* Process a single basic block. FROM and TO and the limits of the basic
6831 block. NEXT_BRANCH points to the branch path when following jumps or
6832 a null path when not following jumps. */
6835 cse_basic_block (rtx from, rtx to, struct branch_path *next_branch)
6839 rtx libcall_insn = NULL_RTX;
6841 int no_conflict = 0;
6843 /* Allocate the space needed by qty_table. */
6844 qty_table = xmalloc (max_qty * sizeof (struct qty_table_elem));
6848 /* TO might be a label. If so, protect it from being deleted. */
6849 if (to != 0 && LABEL_P (to))
6852 for (insn = from; insn != to; insn = NEXT_INSN (insn))
6854 enum rtx_code code = GET_CODE (insn);
6856 /* If we have processed 1,000 insns, flush the hash table to
6857 avoid extreme quadratic behavior. We must not include NOTEs
6858 in the count since there may be more of them when generating
6859 debugging information. If we clear the table at different
6860 times, code generated with -g -O might be different than code
6861 generated with -O but not -g.
6863 ??? This is a real kludge and needs to be done some other way.
6865 if (code != NOTE && num_insns++ > 1000)
6867 flush_hash_table ();
6871 /* See if this is a branch that is part of the path. If so, and it is
6872 to be taken, do so. */
6873 if (next_branch->branch == insn)
6875 enum taken status = next_branch++->status;
6876 if (status != PATH_NOT_TAKEN)
6878 if (status == PATH_TAKEN)
6879 record_jump_equiv (insn, 1);
6881 invalidate_skipped_block (NEXT_INSN (insn));
6883 /* Set the last insn as the jump insn; it doesn't affect cc0.
6884 Then follow this branch. */
6889 insn = JUMP_LABEL (insn);
6894 if (GET_MODE (insn) == QImode)
6895 PUT_MODE (insn, VOIDmode);
6897 if (GET_RTX_CLASS (code) == RTX_INSN)
6901 /* Process notes first so we have all notes in canonical forms when
6902 looking for duplicate operations. */
6904 if (REG_NOTES (insn))
6905 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
6907 /* Track when we are inside in LIBCALL block. Inside such a block,
6908 we do not want to record destinations. The last insn of a
6909 LIBCALL block is not considered to be part of the block, since
6910 its destination is the result of the block and hence should be
6913 if (REG_NOTES (insn) != 0)
6915 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
6916 libcall_insn = XEXP (p, 0);
6917 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
6919 /* Keep libcall_insn for the last SET insn of a no-conflict
6920 block to prevent changing the destination. */
6926 else if (find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
6930 cse_insn (insn, libcall_insn);
6932 if (no_conflict == -1)
6938 /* If we haven't already found an insn where we added a LABEL_REF,
6940 if (NONJUMP_INSN_P (insn) && ! recorded_label_ref
6941 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
6943 recorded_label_ref = 1;
6946 /* If INSN is now an unconditional jump, skip to the end of our
6947 basic block by pretending that we just did the last insn in the
6948 basic block. If we are jumping to the end of our block, show
6949 that we can have one usage of TO. */
6951 if (any_uncondjump_p (insn))
6959 if (JUMP_LABEL (insn) == to)
6962 /* Maybe TO was deleted because the jump is unconditional.
6963 If so, there is nothing left in this basic block. */
6964 /* ??? Perhaps it would be smarter to set TO
6965 to whatever follows this insn,
6966 and pretend the basic block had always ended here. */
6967 if (INSN_DELETED_P (to))
6970 insn = PREV_INSN (to);
6973 /* See if it is ok to keep on going past the label
6974 which used to end our basic block. Remember that we incremented
6975 the count of that label, so we decrement it here. If we made
6976 a jump unconditional, TO_USAGE will be one; in that case, we don't
6977 want to count the use in that jump. */
6979 if (to != 0 && NEXT_INSN (insn) == to
6980 && LABEL_P (to) && --LABEL_NUSES (to) == to_usage)
6982 struct cse_basic_block_data val;
6985 insn = NEXT_INSN (to);
6987 /* If TO was the last insn in the function, we are done. */
6994 /* If TO was preceded by a BARRIER we are done with this block
6995 because it has no continuation. */
6996 prev = prev_nonnote_insn (to);
6997 if (prev && BARRIER_P (prev))
7003 /* Find the end of the following block. Note that we won't be
7004 following branches in this case. */
7007 val.path = xmalloc (sizeof (struct branch_path)
7008 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
7009 cse_end_of_basic_block (insn, &val, 0, 0);
7012 /* If the tables we allocated have enough space left
7013 to handle all the SETs in the next basic block,
7014 continue through it. Otherwise, return,
7015 and that block will be scanned individually. */
7016 if (val.nsets * 2 + next_qty > max_qty)
7019 cse_basic_block_start = val.low_cuid;
7020 cse_basic_block_end = val.high_cuid;
7023 /* Prevent TO from being deleted if it is a label. */
7024 if (to != 0 && LABEL_P (to))
7027 /* Back up so we process the first insn in the extension. */
7028 insn = PREV_INSN (insn);
7032 gcc_assert (next_qty <= max_qty);
7036 return to ? NEXT_INSN (to) : 0;
7039 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7040 there isn't a REG_LABEL note. Return one if so. DATA is the insn. */
7043 check_for_label_ref (rtx *rtl, void *data)
7045 rtx insn = (rtx) data;
7047 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7048 we must rerun jump since it needs to place the note. If this is a
7049 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7050 since no REG_LABEL will be added. */
7051 return (GET_CODE (*rtl) == LABEL_REF
7052 && ! LABEL_REF_NONLOCAL_P (*rtl)
7053 && LABEL_P (XEXP (*rtl, 0))
7054 && INSN_UID (XEXP (*rtl, 0)) != 0
7055 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7058 /* Count the number of times registers are used (not set) in X.
7059 COUNTS is an array in which we accumulate the count, INCR is how much
7060 we count each register usage. */
7063 count_reg_usage (rtx x, int *counts, int incr)
7073 switch (code = GET_CODE (x))
7076 counts[REGNO (x)] += incr;
7090 /* If we are clobbering a MEM, mark any registers inside the address
7092 if (MEM_P (XEXP (x, 0)))
7093 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, incr);
7097 /* Unless we are setting a REG, count everything in SET_DEST. */
7098 if (!REG_P (SET_DEST (x)))
7099 count_reg_usage (SET_DEST (x), counts, incr);
7100 count_reg_usage (SET_SRC (x), counts, incr);
7104 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, incr);
7109 count_reg_usage (PATTERN (x), counts, incr);
7111 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7114 note = find_reg_equal_equiv_note (x);
7117 rtx eqv = XEXP (note, 0);
7119 if (GET_CODE (eqv) == EXPR_LIST)
7120 /* This REG_EQUAL note describes the result of a function call.
7121 Process all the arguments. */
7124 count_reg_usage (XEXP (eqv, 0), counts, incr);
7125 eqv = XEXP (eqv, 1);
7127 while (eqv && GET_CODE (eqv) == EXPR_LIST);
7129 count_reg_usage (eqv, counts, incr);
7134 if (REG_NOTE_KIND (x) == REG_EQUAL
7135 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
7136 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
7137 involving registers in the address. */
7138 || GET_CODE (XEXP (x, 0)) == CLOBBER)
7139 count_reg_usage (XEXP (x, 0), counts, incr);
7141 count_reg_usage (XEXP (x, 1), counts, incr);
7145 /* Iterate over just the inputs, not the constraints as well. */
7146 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
7147 count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, incr);
7157 fmt = GET_RTX_FORMAT (code);
7158 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7161 count_reg_usage (XEXP (x, i), counts, incr);
7162 else if (fmt[i] == 'E')
7163 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7164 count_reg_usage (XVECEXP (x, i, j), counts, incr);
7168 /* Return true if set is live. */
7170 set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */
7177 if (set_noop_p (set))
7181 else if (GET_CODE (SET_DEST (set)) == CC0
7182 && !side_effects_p (SET_SRC (set))
7183 && ((tem = next_nonnote_insn (insn)) == 0
7185 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7188 else if (!REG_P (SET_DEST (set))
7189 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7190 || counts[REGNO (SET_DEST (set))] != 0
7191 || side_effects_p (SET_SRC (set)))
7196 /* Return true if insn is live. */
7199 insn_live_p (rtx insn, int *counts)
7202 if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
7204 else if (GET_CODE (PATTERN (insn)) == SET)
7205 return set_live_p (PATTERN (insn), insn, counts);
7206 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7208 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7210 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7212 if (GET_CODE (elt) == SET)
7214 if (set_live_p (elt, insn, counts))
7217 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7226 /* Return true if libcall is dead as a whole. */
7229 dead_libcall_p (rtx insn, int *counts)
7233 /* See if there's a REG_EQUAL note on this insn and try to
7234 replace the source with the REG_EQUAL expression.
7236 We assume that insns with REG_RETVALs can only be reg->reg
7237 copies at this point. */
7238 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7242 set = single_set (insn);
7246 new = simplify_rtx (XEXP (note, 0));
7248 new = XEXP (note, 0);
7250 /* While changing insn, we must update the counts accordingly. */
7251 count_reg_usage (insn, counts, -1);
7253 if (validate_change (insn, &SET_SRC (set), new, 0))
7255 count_reg_usage (insn, counts, 1);
7256 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7257 remove_note (insn, note);
7261 if (CONSTANT_P (new))
7263 new = force_const_mem (GET_MODE (SET_DEST (set)), new);
7264 if (new && validate_change (insn, &SET_SRC (set), new, 0))
7266 count_reg_usage (insn, counts, 1);
7267 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7268 remove_note (insn, note);
7273 count_reg_usage (insn, counts, 1);
7277 /* Scan all the insns and delete any that are dead; i.e., they store a register
7278 that is never used or they copy a register to itself.
7280 This is used to remove insns made obviously dead by cse, loop or other
7281 optimizations. It improves the heuristics in loop since it won't try to
7282 move dead invariants out of loops or make givs for dead quantities. The
7283 remaining passes of the compilation are also sped up. */
7286 delete_trivially_dead_insns (rtx insns, int nreg)
7290 int in_libcall = 0, dead_libcall = 0;
7291 int ndead = 0, nlastdead, niterations = 0;
7293 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7294 /* First count the number of times each register is used. */
7295 counts = xcalloc (nreg, sizeof (int));
7296 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7297 count_reg_usage (insn, counts, 1);
7303 /* Go from the last insn to the first and delete insns that only set unused
7304 registers or copy a register to itself. As we delete an insn, remove
7305 usage counts for registers it uses.
7307 The first jump optimization pass may leave a real insn as the last
7308 insn in the function. We must not skip that insn or we may end
7309 up deleting code that is not really dead. */
7310 insn = get_last_insn ();
7311 if (! INSN_P (insn))
7312 insn = prev_real_insn (insn);
7314 for (; insn; insn = prev)
7318 prev = prev_real_insn (insn);
7320 /* Don't delete any insns that are part of a libcall block unless
7321 we can delete the whole libcall block.
7323 Flow or loop might get confused if we did that. Remember
7324 that we are scanning backwards. */
7325 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7329 dead_libcall = dead_libcall_p (insn, counts);
7331 else if (in_libcall)
7332 live_insn = ! dead_libcall;
7334 live_insn = insn_live_p (insn, counts);
7336 /* If this is a dead insn, delete it and show registers in it aren't
7341 count_reg_usage (insn, counts, -1);
7342 delete_insn_and_edges (insn);
7346 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7353 while (ndead != nlastdead);
7355 if (dump_file && ndead)
7356 fprintf (dump_file, "Deleted %i trivially dead insns; %i iterations\n",
7357 ndead, niterations);
7360 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7364 /* This function is called via for_each_rtx. The argument, NEWREG, is
7365 a condition code register with the desired mode. If we are looking
7366 at the same register in a different mode, replace it with
7370 cse_change_cc_mode (rtx *loc, void *data)
7372 struct change_cc_mode_args* args = (struct change_cc_mode_args*)data;
7376 && REGNO (*loc) == REGNO (args->newreg)
7377 && GET_MODE (*loc) != GET_MODE (args->newreg))
7379 validate_change (args->insn, loc, args->newreg, 1);
7386 /* Change the mode of any reference to the register REGNO (NEWREG) to
7387 GET_MODE (NEWREG) in INSN. */
7390 cse_change_cc_mode_insn (rtx insn, rtx newreg)
7392 struct change_cc_mode_args args;
7399 args.newreg = newreg;
7401 for_each_rtx (&PATTERN (insn), cse_change_cc_mode, &args);
7402 for_each_rtx (®_NOTES (insn), cse_change_cc_mode, &args);
7404 /* If the following assertion was triggered, there is most probably
7405 something wrong with the cc_modes_compatible back end function.
7406 CC modes only can be considered compatible if the insn - with the mode
7407 replaced by any of the compatible modes - can still be recognized. */
7408 success = apply_change_group ();
7409 gcc_assert (success);
7412 /* Change the mode of any reference to the register REGNO (NEWREG) to
7413 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
7414 any instruction which modifies NEWREG. */
7417 cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
7421 for (insn = start; insn != end; insn = NEXT_INSN (insn))
7423 if (! INSN_P (insn))
7426 if (reg_set_p (newreg, insn))
7429 cse_change_cc_mode_insn (insn, newreg);
7433 /* BB is a basic block which finishes with CC_REG as a condition code
7434 register which is set to CC_SRC. Look through the successors of BB
7435 to find blocks which have a single predecessor (i.e., this one),
7436 and look through those blocks for an assignment to CC_REG which is
7437 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7438 permitted to change the mode of CC_SRC to a compatible mode. This
7439 returns VOIDmode if no equivalent assignments were found.
7440 Otherwise it returns the mode which CC_SRC should wind up with.
7442 The main complexity in this function is handling the mode issues.
7443 We may have more than one duplicate which we can eliminate, and we
7444 try to find a mode which will work for multiple duplicates. */
7446 static enum machine_mode
7447 cse_cc_succs (basic_block bb, rtx cc_reg, rtx cc_src, bool can_change_mode)
7450 enum machine_mode mode;
7451 unsigned int insn_count;
7454 enum machine_mode modes[2];
7460 /* We expect to have two successors. Look at both before picking
7461 the final mode for the comparison. If we have more successors
7462 (i.e., some sort of table jump, although that seems unlikely),
7463 then we require all beyond the first two to use the same
7466 found_equiv = false;
7467 mode = GET_MODE (cc_src);
7469 FOR_EACH_EDGE (e, ei, bb->succs)
7474 if (e->flags & EDGE_COMPLEX)
7477 if (EDGE_COUNT (e->dest->preds) != 1
7478 || e->dest == EXIT_BLOCK_PTR)
7481 end = NEXT_INSN (BB_END (e->dest));
7482 for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7486 if (! INSN_P (insn))
7489 /* If CC_SRC is modified, we have to stop looking for
7490 something which uses it. */
7491 if (modified_in_p (cc_src, insn))
7494 /* Check whether INSN sets CC_REG to CC_SRC. */
7495 set = single_set (insn);
7497 && REG_P (SET_DEST (set))
7498 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7501 enum machine_mode set_mode;
7502 enum machine_mode comp_mode;
7505 set_mode = GET_MODE (SET_SRC (set));
7506 comp_mode = set_mode;
7507 if (rtx_equal_p (cc_src, SET_SRC (set)))
7509 else if (GET_CODE (cc_src) == COMPARE
7510 && GET_CODE (SET_SRC (set)) == COMPARE
7512 && rtx_equal_p (XEXP (cc_src, 0),
7513 XEXP (SET_SRC (set), 0))
7514 && rtx_equal_p (XEXP (cc_src, 1),
7515 XEXP (SET_SRC (set), 1)))
7518 comp_mode = targetm.cc_modes_compatible (mode, set_mode);
7519 if (comp_mode != VOIDmode
7520 && (can_change_mode || comp_mode == mode))
7527 if (insn_count < ARRAY_SIZE (insns))
7529 insns[insn_count] = insn;
7530 modes[insn_count] = set_mode;
7531 last_insns[insn_count] = end;
7534 if (mode != comp_mode)
7536 gcc_assert (can_change_mode);
7539 /* The modified insn will be re-recognized later. */
7540 PUT_MODE (cc_src, mode);
7545 if (set_mode != mode)
7547 /* We found a matching expression in the
7548 wrong mode, but we don't have room to
7549 store it in the array. Punt. This case
7553 /* INSN sets CC_REG to a value equal to CC_SRC
7554 with the right mode. We can simply delete
7559 /* We found an instruction to delete. Keep looking,
7560 in the hopes of finding a three-way jump. */
7564 /* We found an instruction which sets the condition
7565 code, so don't look any farther. */
7569 /* If INSN sets CC_REG in some other way, don't look any
7571 if (reg_set_p (cc_reg, insn))
7575 /* If we fell off the bottom of the block, we can keep looking
7576 through successors. We pass CAN_CHANGE_MODE as false because
7577 we aren't prepared to handle compatibility between the
7578 further blocks and this block. */
7581 enum machine_mode submode;
7583 submode = cse_cc_succs (e->dest, cc_reg, cc_src, false);
7584 if (submode != VOIDmode)
7586 gcc_assert (submode == mode);
7588 can_change_mode = false;
7596 /* Now INSN_COUNT is the number of instructions we found which set
7597 CC_REG to a value equivalent to CC_SRC. The instructions are in
7598 INSNS. The modes used by those instructions are in MODES. */
7601 for (i = 0; i < insn_count; ++i)
7603 if (modes[i] != mode)
7605 /* We need to change the mode of CC_REG in INSNS[i] and
7606 subsequent instructions. */
7609 if (GET_MODE (cc_reg) == mode)
7612 newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7614 cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7618 delete_insn (insns[i]);
7624 /* If we have a fixed condition code register (or two), walk through
7625 the instructions and try to eliminate duplicate assignments. */
7628 cse_condition_code_reg (void)
7630 unsigned int cc_regno_1;
7631 unsigned int cc_regno_2;
7636 if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
7639 cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7640 if (cc_regno_2 != INVALID_REGNUM)
7641 cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7643 cc_reg_2 = NULL_RTX;
7652 enum machine_mode mode;
7653 enum machine_mode orig_mode;
7655 /* Look for blocks which end with a conditional jump based on a
7656 condition code register. Then look for the instruction which
7657 sets the condition code register. Then look through the
7658 successor blocks for instructions which set the condition
7659 code register to the same value. There are other possible
7660 uses of the condition code register, but these are by far the
7661 most common and the ones which we are most likely to be able
7664 last_insn = BB_END (bb);
7665 if (!JUMP_P (last_insn))
7668 if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7670 else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7675 cc_src_insn = NULL_RTX;
7677 for (insn = PREV_INSN (last_insn);
7678 insn && insn != PREV_INSN (BB_HEAD (bb));
7679 insn = PREV_INSN (insn))
7683 if (! INSN_P (insn))
7685 set = single_set (insn);
7687 && REG_P (SET_DEST (set))
7688 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7691 cc_src = SET_SRC (set);
7694 else if (reg_set_p (cc_reg, insn))
7701 if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7704 /* Now CC_REG is a condition code register used for a
7705 conditional jump at the end of the block, and CC_SRC, in
7706 CC_SRC_INSN, is the value to which that condition code
7707 register is set, and CC_SRC is still meaningful at the end of
7710 orig_mode = GET_MODE (cc_src);
7711 mode = cse_cc_succs (bb, cc_reg, cc_src, true);
7712 if (mode != VOIDmode)
7714 gcc_assert (mode == GET_MODE (cc_src));
7715 if (mode != orig_mode)
7717 rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7719 cse_change_cc_mode_insn (cc_src_insn, newreg);
7721 /* Do the same in the following insns that use the
7722 current value of CC_REG within BB. */
7723 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7724 NEXT_INSN (last_insn),