1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 /* stdio.h must precede rtl.h for FFS. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "basic-block.h"
34 #include "insn-config.h"
45 #include "rtlhooks-def.h"
47 /* The basic idea of common subexpression elimination is to go
48 through the code, keeping a record of expressions that would
49 have the same value at the current scan point, and replacing
50 expressions encountered with the cheapest equivalent expression.
52 It is too complicated to keep track of the different possibilities
53 when control paths merge in this code; so, at each label, we forget all
54 that is known and start fresh. This can be described as processing each
55 extended basic block separately. We have a separate pass to perform
58 Note CSE can turn a conditional or computed jump into a nop or
59 an unconditional jump. When this occurs we arrange to run the jump
60 optimizer after CSE to delete the unreachable code.
62 We use two data structures to record the equivalent expressions:
63 a hash table for most expressions, and a vector of "quantity
64 numbers" to record equivalent (pseudo) registers.
66 The use of the special data structure for registers is desirable
67 because it is faster. It is possible because registers references
68 contain a fairly small number, the register number, taken from
69 a contiguously allocated series, and two register references are
70 identical if they have the same number. General expressions
71 do not have any such thing, so the only way to retrieve the
72 information recorded on an expression other than a register
73 is to keep it in a hash table.
75 Registers and "quantity numbers":
77 At the start of each basic block, all of the (hardware and pseudo)
78 registers used in the function are given distinct quantity
79 numbers to indicate their contents. During scan, when the code
80 copies one register into another, we copy the quantity number.
81 When a register is loaded in any other way, we allocate a new
82 quantity number to describe the value generated by this operation.
83 `reg_qty' records what quantity a register is currently thought
86 All real quantity numbers are greater than or equal to zero.
87 If register N has not been assigned a quantity, reg_qty[N] will
88 equal -N - 1, which is always negative.
90 Quantity numbers below zero do not exist and none of the `qty_table'
91 entries should be referenced with a negative index.
93 We also maintain a bidirectional chain of registers for each
94 quantity number. The `qty_table` members `first_reg' and `last_reg',
95 and `reg_eqv_table' members `next' and `prev' hold these chains.
97 The first register in a chain is the one whose lifespan is least local.
98 Among equals, it is the one that was seen first.
99 We replace any equivalent register with that one.
101 If two registers have the same quantity number, it must be true that
102 REG expressions with qty_table `mode' must be in the hash table for both
103 registers and must be in the same class.
105 The converse is not true. Since hard registers may be referenced in
106 any mode, two REG expressions might be equivalent in the hash table
107 but not have the same quantity number if the quantity number of one
108 of the registers is not the same mode as those expressions.
110 Constants and quantity numbers
112 When a quantity has a known constant value, that value is stored
113 in the appropriate qty_table `const_rtx'. This is in addition to
114 putting the constant in the hash table as is usual for non-regs.
116 Whether a reg or a constant is preferred is determined by the configuration
117 macro CONST_COSTS and will often depend on the constant value. In any
118 event, expressions containing constants can be simplified, by fold_rtx.
120 When a quantity has a known nearly constant value (such as an address
121 of a stack slot), that value is stored in the appropriate qty_table
124 Integer constants don't have a machine mode. However, cse
125 determines the intended machine mode from the destination
126 of the instruction that moves the constant. The machine mode
127 is recorded in the hash table along with the actual RTL
128 constant expression so that different modes are kept separate.
132 To record known equivalences among expressions in general
133 we use a hash table called `table'. It has a fixed number of buckets
134 that contain chains of `struct table_elt' elements for expressions.
135 These chains connect the elements whose expressions have the same
138 Other chains through the same elements connect the elements which
139 currently have equivalent values.
141 Register references in an expression are canonicalized before hashing
142 the expression. This is done using `reg_qty' and qty_table `first_reg'.
143 The hash code of a register reference is computed using the quantity
144 number, not the register number.
146 When the value of an expression changes, it is necessary to remove from the
147 hash table not just that expression but all expressions whose values
148 could be different as a result.
150 1. If the value changing is in memory, except in special cases
151 ANYTHING referring to memory could be changed. That is because
152 nobody knows where a pointer does not point.
153 The function `invalidate_memory' removes what is necessary.
155 The special cases are when the address is constant or is
156 a constant plus a fixed register such as the frame pointer
157 or a static chain pointer. When such addresses are stored in,
158 we can tell exactly which other such addresses must be invalidated
159 due to overlap. `invalidate' does this.
160 All expressions that refer to non-constant
161 memory addresses are also invalidated. `invalidate_memory' does this.
163 2. If the value changing is a register, all expressions
164 containing references to that register, and only those,
167 Because searching the entire hash table for expressions that contain
168 a register is very slow, we try to figure out when it isn't necessary.
169 Precisely, this is necessary only when expressions have been
170 entered in the hash table using this register, and then the value has
171 changed, and then another expression wants to be added to refer to
172 the register's new value. This sequence of circumstances is rare
173 within any one basic block.
175 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
176 reg_tick[i] is incremented whenever a value is stored in register i.
177 reg_in_table[i] holds -1 if no references to register i have been
178 entered in the table; otherwise, it contains the value reg_tick[i] had
179 when the references were entered. If we want to enter a reference
180 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
181 Until we want to enter a new entry, the mere fact that the two vectors
182 don't match makes the entries be ignored if anyone tries to match them.
184 Registers themselves are entered in the hash table as well as in
185 the equivalent-register chains. However, the vectors `reg_tick'
186 and `reg_in_table' do not apply to expressions which are simple
187 register references. These expressions are removed from the table
188 immediately when they become invalid, and this can be done even if
189 we do not immediately search for all the expressions that refer to
192 A CLOBBER rtx in an instruction invalidates its operand for further
193 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
194 invalidates everything that resides in memory.
198 Constant expressions that differ only by an additive integer
199 are called related. When a constant expression is put in
200 the table, the related expression with no constant term
201 is also entered. These are made to point at each other
202 so that it is possible to find out if there exists any
203 register equivalent to an expression related to a given expression. */
205 /* Length of qty_table vector. We know in advance we will not need
206 a quantity number this big. */
210 /* Next quantity number to be allocated.
211 This is 1 + the largest number needed so far. */
215 /* Per-qty information tracking.
217 `first_reg' and `last_reg' track the head and tail of the
218 chain of registers which currently contain this quantity.
220 `mode' contains the machine mode of this quantity.
222 `const_rtx' holds the rtx of the constant value of this
223 quantity, if known. A summations of the frame/arg pointer
224 and a constant can also be entered here. When this holds
225 a known value, `const_insn' is the insn which stored the
228 `comparison_{code,const,qty}' are used to track when a
229 comparison between a quantity and some constant or register has
230 been passed. In such a case, we know the results of the comparison
231 in case we see it again. These members record a comparison that
232 is known to be true. `comparison_code' holds the rtx code of such
233 a comparison, else it is set to UNKNOWN and the other two
234 comparison members are undefined. `comparison_const' holds
235 the constant being compared against, or zero if the comparison
236 is not against a constant. `comparison_qty' holds the quantity
237 being compared against when the result is known. If the comparison
238 is not with a register, `comparison_qty' is -1. */
240 struct qty_table_elem
244 rtx comparison_const;
246 unsigned int first_reg, last_reg;
247 /* The sizes of these fields should match the sizes of the
248 code and mode fields of struct rtx_def (see rtl.h). */
249 ENUM_BITFIELD(rtx_code) comparison_code : 16;
250 ENUM_BITFIELD(machine_mode) mode : 8;
253 /* The table of all qtys, indexed by qty number. */
254 static struct qty_table_elem *qty_table;
256 /* Structure used to pass arguments via for_each_rtx to function
257 cse_change_cc_mode. */
258 struct change_cc_mode_args
265 /* For machines that have a CC0, we do not record its value in the hash
266 table since its use is guaranteed to be the insn immediately following
267 its definition and any other insn is presumed to invalidate it.
269 Instead, we store below the value last assigned to CC0. If it should
270 happen to be a constant, it is stored in preference to the actual
271 assigned value. In case it is a constant, we store the mode in which
272 the constant should be interpreted. */
274 static rtx prev_insn_cc0;
275 static enum machine_mode prev_insn_cc0_mode;
277 /* Previous actual insn. 0 if at first insn of basic block. */
279 static rtx prev_insn;
282 /* Insn being scanned. */
284 static rtx this_insn;
286 /* Index by register number, gives the number of the next (or
287 previous) register in the chain of registers sharing the same
290 Or -1 if this register is at the end of the chain.
292 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
294 /* Per-register equivalence chain. */
300 /* The table of all register equivalence chains. */
301 static struct reg_eqv_elem *reg_eqv_table;
305 /* Next in hash chain. */
306 struct cse_reg_info *hash_next;
308 /* The next cse_reg_info structure in the free or used list. */
309 struct cse_reg_info *next;
314 /* The quantity number of the register's current contents. */
317 /* The number of times the register has been altered in the current
321 /* The REG_TICK value at which rtx's containing this register are
322 valid in the hash table. If this does not equal the current
323 reg_tick value, such expressions existing in the hash table are
327 /* The SUBREG that was set when REG_TICK was last incremented. Set
328 to -1 if the last store was to the whole register, not a subreg. */
329 unsigned int subreg_ticked;
332 /* A free list of cse_reg_info entries. */
333 static struct cse_reg_info *cse_reg_info_free_list;
335 /* A used list of cse_reg_info entries. */
336 static struct cse_reg_info *cse_reg_info_used_list;
337 static struct cse_reg_info *cse_reg_info_used_list_end;
339 /* A mapping from registers to cse_reg_info data structures. */
340 #define REGHASH_SHIFT 7
341 #define REGHASH_SIZE (1 << REGHASH_SHIFT)
342 #define REGHASH_MASK (REGHASH_SIZE - 1)
343 static struct cse_reg_info *reg_hash[REGHASH_SIZE];
345 #define REGHASH_FN(REGNO) \
346 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
348 /* The last lookup we did into the cse_reg_info_tree. This allows us
349 to cache repeated lookups. */
350 static unsigned int cached_regno;
351 static struct cse_reg_info *cached_cse_reg_info;
353 /* A HARD_REG_SET containing all the hard registers for which there is
354 currently a REG expression in the hash table. Note the difference
355 from the above variables, which indicate if the REG is mentioned in some
356 expression in the table. */
358 static HARD_REG_SET hard_regs_in_table;
360 /* CUID of insn that starts the basic block currently being cse-processed. */
362 static int cse_basic_block_start;
364 /* CUID of insn that ends the basic block currently being cse-processed. */
366 static int cse_basic_block_end;
368 /* Vector mapping INSN_UIDs to cuids.
369 The cuids are like uids but increase monotonically always.
370 We use them to see whether a reg is used outside a given basic block. */
372 static int *uid_cuid;
374 /* Highest UID in UID_CUID. */
377 /* Get the cuid of an insn. */
379 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
381 /* Nonzero if this pass has made changes, and therefore it's
382 worthwhile to run the garbage collector. */
384 static int cse_altered;
386 /* Nonzero if cse has altered conditional jump insns
387 in such a way that jump optimization should be redone. */
389 static int cse_jumps_altered;
391 /* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
392 REG_LABEL, we have to rerun jump after CSE to put in the note. */
393 static int recorded_label_ref;
395 /* canon_hash stores 1 in do_not_record
396 if it notices a reference to CC0, PC, or some other volatile
399 static int do_not_record;
401 /* canon_hash stores 1 in hash_arg_in_memory
402 if it notices a reference to memory within the expression being hashed. */
404 static int hash_arg_in_memory;
406 /* The hash table contains buckets which are chains of `struct table_elt's,
407 each recording one expression's information.
408 That expression is in the `exp' field.
410 The canon_exp field contains a canonical (from the point of view of
411 alias analysis) version of the `exp' field.
413 Those elements with the same hash code are chained in both directions
414 through the `next_same_hash' and `prev_same_hash' fields.
416 Each set of expressions with equivalent values
417 are on a two-way chain through the `next_same_value'
418 and `prev_same_value' fields, and all point with
419 the `first_same_value' field at the first element in
420 that chain. The chain is in order of increasing cost.
421 Each element's cost value is in its `cost' field.
423 The `in_memory' field is nonzero for elements that
424 involve any reference to memory. These elements are removed
425 whenever a write is done to an unidentified location in memory.
426 To be safe, we assume that a memory address is unidentified unless
427 the address is either a symbol constant or a constant plus
428 the frame pointer or argument pointer.
430 The `related_value' field is used to connect related expressions
431 (that differ by adding an integer).
432 The related expressions are chained in a circular fashion.
433 `related_value' is zero for expressions for which this
436 The `cost' field stores the cost of this element's expression.
437 The `regcost' field stores the value returned by approx_reg_cost for
438 this element's expression.
440 The `is_const' flag is set if the element is a constant (including
443 The `flag' field is used as a temporary during some search routines.
445 The `mode' field is usually the same as GET_MODE (`exp'), but
446 if `exp' is a CONST_INT and has no machine mode then the `mode'
447 field is the mode it was being used as. Each constant is
448 recorded separately for each mode it is used with. */
454 struct table_elt *next_same_hash;
455 struct table_elt *prev_same_hash;
456 struct table_elt *next_same_value;
457 struct table_elt *prev_same_value;
458 struct table_elt *first_same_value;
459 struct table_elt *related_value;
462 /* The size of this field should match the size
463 of the mode field of struct rtx_def (see rtl.h). */
464 ENUM_BITFIELD(machine_mode) mode : 8;
470 /* We don't want a lot of buckets, because we rarely have very many
471 things stored in the hash table, and a lot of buckets slows
472 down a lot of loops that happen frequently. */
474 #define HASH_SIZE (1 << HASH_SHIFT)
475 #define HASH_MASK (HASH_SIZE - 1)
477 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
478 register (hard registers may require `do_not_record' to be set). */
481 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
482 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
483 : canon_hash (X, M)) & HASH_MASK)
485 /* Like HASH, but without side-effects. */
486 #define SAFE_HASH(X, M) \
487 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
488 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
489 : safe_hash (X, M)) & HASH_MASK)
491 /* Determine whether register number N is considered a fixed register for the
492 purpose of approximating register costs.
493 It is desirable to replace other regs with fixed regs, to reduce need for
495 A reg wins if it is either the frame pointer or designated as fixed. */
496 #define FIXED_REGNO_P(N) \
497 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
498 || fixed_regs[N] || global_regs[N])
500 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
501 hard registers and pointers into the frame are the cheapest with a cost
502 of 0. Next come pseudos with a cost of one and other hard registers with
503 a cost of 2. Aside from these special cases, call `rtx_cost'. */
505 #define CHEAP_REGNO(N) \
506 (REGNO_PTR_FRAME_P(N) \
507 || (HARD_REGISTER_NUM_P (N) \
508 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
510 #define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET))
511 #define COST_IN(X,OUTER) (REG_P (X) ? 0 : notreg_cost (X, OUTER))
513 /* Get the info associated with register N. */
515 #define GET_CSE_REG_INFO(N) \
516 (((N) == cached_regno && cached_cse_reg_info) \
517 ? cached_cse_reg_info : get_cse_reg_info ((N)))
519 /* Get the number of times this register has been updated in this
522 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
524 /* Get the point at which REG was recorded in the table. */
526 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
528 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
531 #define SUBREG_TICKED(N) ((GET_CSE_REG_INFO (N))->subreg_ticked)
533 /* Get the quantity number for REG. */
535 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
537 /* Determine if the quantity number for register X represents a valid index
538 into the qty_table. */
540 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
542 static struct table_elt *table[HASH_SIZE];
544 /* Chain of `struct table_elt's made so far for this function
545 but currently removed from the table. */
547 static struct table_elt *free_element_chain;
549 /* Number of `struct table_elt' structures made so far for this function. */
551 static int n_elements_made;
553 /* Maximum value `n_elements_made' has had so far in this compilation
554 for functions previously processed. */
556 static int max_elements_made;
558 /* Set to the cost of a constant pool reference if one was found for a
559 symbolic constant. If this was found, it means we should try to
560 convert constants into constant pool entries if they don't fit in
563 static int constant_pool_entries_cost;
564 static int constant_pool_entries_regcost;
566 /* This data describes a block that will be processed by cse_basic_block. */
568 struct cse_basic_block_data
570 /* Lowest CUID value of insns in block. */
572 /* Highest CUID value of insns in block. */
574 /* Total number of SETs in block. */
576 /* Last insn in the block. */
578 /* Size of current branch path, if any. */
580 /* Current branch path, indicating which branches will be taken. */
583 /* The branch insn. */
585 /* Whether it should be taken or not. AROUND is the same as taken
586 except that it is used when the destination label is not preceded
588 enum taken {PATH_TAKEN, PATH_NOT_TAKEN, PATH_AROUND} status;
592 static bool fixed_base_plus_p (rtx x);
593 static int notreg_cost (rtx, enum rtx_code);
594 static int approx_reg_cost_1 (rtx *, void *);
595 static int approx_reg_cost (rtx);
596 static int preferable (int, int, int, int);
597 static void new_basic_block (void);
598 static void make_new_qty (unsigned int, enum machine_mode);
599 static void make_regs_eqv (unsigned int, unsigned int);
600 static void delete_reg_equiv (unsigned int);
601 static int mention_regs (rtx);
602 static int insert_regs (rtx, struct table_elt *, int);
603 static void remove_from_table (struct table_elt *, unsigned);
604 static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
605 static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
606 static rtx lookup_as_function (rtx, enum rtx_code);
607 static struct table_elt *insert (rtx, struct table_elt *, unsigned,
609 static void merge_equiv_classes (struct table_elt *, struct table_elt *);
610 static void invalidate (rtx, enum machine_mode);
611 static int cse_rtx_varies_p (rtx, int);
612 static void remove_invalid_refs (unsigned int);
613 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
615 static void rehash_using_reg (rtx);
616 static void invalidate_memory (void);
617 static void invalidate_for_call (void);
618 static rtx use_related_value (rtx, struct table_elt *);
620 static inline unsigned canon_hash (rtx, enum machine_mode);
621 static inline unsigned safe_hash (rtx, enum machine_mode);
622 static unsigned hash_rtx_string (const char *);
624 static rtx canon_reg (rtx, rtx);
625 static void find_best_addr (rtx, rtx *, enum machine_mode);
626 static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
628 enum machine_mode *);
629 static rtx fold_rtx (rtx, rtx);
630 static rtx equiv_constant (rtx);
631 static void record_jump_equiv (rtx, int);
632 static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
634 static void cse_insn (rtx, rtx);
635 static void cse_end_of_basic_block (rtx, struct cse_basic_block_data *,
637 static int addr_affects_sp_p (rtx);
638 static void invalidate_from_clobbers (rtx);
639 static rtx cse_process_notes (rtx, rtx);
640 static void invalidate_skipped_set (rtx, rtx, void *);
641 static void invalidate_skipped_block (rtx);
642 static rtx cse_basic_block (rtx, rtx, struct branch_path *);
643 static void count_reg_usage (rtx, int *, int);
644 static int check_for_label_ref (rtx *, void *);
645 extern void dump_class (struct table_elt*);
646 static struct cse_reg_info * get_cse_reg_info (unsigned int);
647 static int check_dependence (rtx *, void *);
649 static void flush_hash_table (void);
650 static bool insn_live_p (rtx, int *);
651 static bool set_live_p (rtx, rtx, int *);
652 static bool dead_libcall_p (rtx, int *);
653 static int cse_change_cc_mode (rtx *, void *);
654 static void cse_change_cc_mode_insn (rtx, rtx);
655 static void cse_change_cc_mode_insns (rtx, rtx, rtx);
656 static enum machine_mode cse_cc_succs (basic_block, rtx, rtx, bool);
659 #undef RTL_HOOKS_GEN_LOWPART
660 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
662 static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
664 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
665 virtual regs here because the simplify_*_operation routines are called
666 by integrate.c, which is called before virtual register instantiation. */
669 fixed_base_plus_p (rtx x)
671 switch (GET_CODE (x))
674 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
676 if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
678 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
679 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
684 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
686 return fixed_base_plus_p (XEXP (x, 0));
693 /* Dump the expressions in the equivalence class indicated by CLASSP.
694 This function is used only for debugging. */
696 dump_class (struct table_elt *classp)
698 struct table_elt *elt;
700 fprintf (stderr, "Equivalence chain for ");
701 print_rtl (stderr, classp->exp);
702 fprintf (stderr, ": \n");
704 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
706 print_rtl (stderr, elt->exp);
707 fprintf (stderr, "\n");
711 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
714 approx_reg_cost_1 (rtx *xp, void *data)
721 unsigned int regno = REGNO (x);
723 if (! CHEAP_REGNO (regno))
725 if (regno < FIRST_PSEUDO_REGISTER)
727 if (SMALL_REGISTER_CLASSES)
739 /* Return an estimate of the cost of the registers used in an rtx.
740 This is mostly the number of different REG expressions in the rtx;
741 however for some exceptions like fixed registers we use a cost of
742 0. If any other hard register reference occurs, return MAX_COST. */
745 approx_reg_cost (rtx x)
749 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
755 /* Returns a canonical version of X for the address, from the point of view,
756 that all multiplications are represented as MULT instead of the multiply
757 by a power of 2 being represented as ASHIFT. */
760 canon_for_address (rtx x)
763 enum machine_mode mode;
777 if (GET_CODE (XEXP (x, 1)) == CONST_INT
778 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode)
779 && INTVAL (XEXP (x, 1)) >= 0)
781 new = canon_for_address (XEXP (x, 0));
782 new = gen_rtx_MULT (mode, new,
783 gen_int_mode ((HOST_WIDE_INT) 1
784 << INTVAL (XEXP (x, 1)),
795 /* Now recursively process each operand of this operation. */
796 fmt = GET_RTX_FORMAT (code);
797 for (i = 0; i < GET_RTX_LENGTH (code); i++)
800 new = canon_for_address (XEXP (x, i));
806 /* Return a negative value if an rtx A, whose costs are given by COST_A
807 and REGCOST_A, is more desirable than an rtx B.
808 Return a positive value if A is less desirable, or 0 if the two are
811 preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
813 /* First, get rid of cases involving expressions that are entirely
815 if (cost_a != cost_b)
817 if (cost_a == MAX_COST)
819 if (cost_b == MAX_COST)
823 /* Avoid extending lifetimes of hardregs. */
824 if (regcost_a != regcost_b)
826 if (regcost_a == MAX_COST)
828 if (regcost_b == MAX_COST)
832 /* Normal operation costs take precedence. */
833 if (cost_a != cost_b)
834 return cost_a - cost_b;
835 /* Only if these are identical consider effects on register pressure. */
836 if (regcost_a != regcost_b)
837 return regcost_a - regcost_b;
841 /* Internal function, to compute cost when X is not a register; called
842 from COST macro to keep it simple. */
845 notreg_cost (rtx x, enum rtx_code outer)
847 return ((GET_CODE (x) == SUBREG
848 && REG_P (SUBREG_REG (x))
849 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
850 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
851 && (GET_MODE_SIZE (GET_MODE (x))
852 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
853 && subreg_lowpart_p (x)
854 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
855 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
857 : rtx_cost (x, outer) * 2);
861 static struct cse_reg_info *
862 get_cse_reg_info (unsigned int regno)
864 struct cse_reg_info **hash_head = ®_hash[REGHASH_FN (regno)];
865 struct cse_reg_info *p;
867 for (p = *hash_head; p != NULL; p = p->hash_next)
868 if (p->regno == regno)
873 /* Get a new cse_reg_info structure. */
874 if (cse_reg_info_free_list)
876 p = cse_reg_info_free_list;
877 cse_reg_info_free_list = p->next;
880 p = xmalloc (sizeof (struct cse_reg_info));
882 /* Insert into hash table. */
883 p->hash_next = *hash_head;
888 p->reg_in_table = -1;
889 p->subreg_ticked = -1;
890 p->reg_qty = -regno - 1;
892 p->next = cse_reg_info_used_list;
893 cse_reg_info_used_list = p;
894 if (!cse_reg_info_used_list_end)
895 cse_reg_info_used_list_end = p;
898 /* Cache this lookup; we tend to be looking up information about the
899 same register several times in a row. */
900 cached_regno = regno;
901 cached_cse_reg_info = p;
906 /* Clear the hash table and initialize each register with its own quantity,
907 for a new basic block. */
910 new_basic_block (void)
916 /* Clear out hash table state for this pass. */
918 memset (reg_hash, 0, sizeof reg_hash);
920 if (cse_reg_info_used_list)
922 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
923 cse_reg_info_free_list = cse_reg_info_used_list;
924 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
926 cached_cse_reg_info = 0;
928 CLEAR_HARD_REG_SET (hard_regs_in_table);
930 /* The per-quantity values used to be initialized here, but it is
931 much faster to initialize each as it is made in `make_new_qty'. */
933 for (i = 0; i < HASH_SIZE; i++)
935 struct table_elt *first;
940 struct table_elt *last = first;
944 while (last->next_same_hash != NULL)
945 last = last->next_same_hash;
947 /* Now relink this hash entire chain into
948 the free element list. */
950 last->next_same_hash = free_element_chain;
951 free_element_chain = first;
961 /* Say that register REG contains a quantity in mode MODE not in any
962 register before and initialize that quantity. */
965 make_new_qty (unsigned int reg, enum machine_mode mode)
968 struct qty_table_elem *ent;
969 struct reg_eqv_elem *eqv;
971 gcc_assert (next_qty < max_qty);
973 q = REG_QTY (reg) = next_qty++;
975 ent->first_reg = reg;
978 ent->const_rtx = ent->const_insn = NULL_RTX;
979 ent->comparison_code = UNKNOWN;
981 eqv = ®_eqv_table[reg];
982 eqv->next = eqv->prev = -1;
985 /* Make reg NEW equivalent to reg OLD.
986 OLD is not changing; NEW is. */
989 make_regs_eqv (unsigned int new, unsigned int old)
991 unsigned int lastr, firstr;
992 int q = REG_QTY (old);
993 struct qty_table_elem *ent;
997 /* Nothing should become eqv until it has a "non-invalid" qty number. */
998 gcc_assert (REGNO_QTY_VALID_P (old));
1001 firstr = ent->first_reg;
1002 lastr = ent->last_reg;
1004 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
1005 hard regs. Among pseudos, if NEW will live longer than any other reg
1006 of the same qty, and that is beyond the current basic block,
1007 make it the new canonical replacement for this qty. */
1008 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1009 /* Certain fixed registers might be of the class NO_REGS. This means
1010 that not only can they not be allocated by the compiler, but
1011 they cannot be used in substitutions or canonicalizations
1013 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1014 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1015 || (new >= FIRST_PSEUDO_REGISTER
1016 && (firstr < FIRST_PSEUDO_REGISTER
1017 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1018 || (uid_cuid[REGNO_FIRST_UID (new)]
1019 < cse_basic_block_start))
1020 && (uid_cuid[REGNO_LAST_UID (new)]
1021 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1023 reg_eqv_table[firstr].prev = new;
1024 reg_eqv_table[new].next = firstr;
1025 reg_eqv_table[new].prev = -1;
1026 ent->first_reg = new;
1030 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1031 Otherwise, insert before any non-fixed hard regs that are at the
1032 end. Registers of class NO_REGS cannot be used as an
1033 equivalent for anything. */
1034 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1035 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1036 && new >= FIRST_PSEUDO_REGISTER)
1037 lastr = reg_eqv_table[lastr].prev;
1038 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1039 if (reg_eqv_table[lastr].next >= 0)
1040 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1042 qty_table[q].last_reg = new;
1043 reg_eqv_table[lastr].next = new;
1044 reg_eqv_table[new].prev = lastr;
1048 /* Remove REG from its equivalence class. */
1051 delete_reg_equiv (unsigned int reg)
1053 struct qty_table_elem *ent;
1054 int q = REG_QTY (reg);
1057 /* If invalid, do nothing. */
1058 if (! REGNO_QTY_VALID_P (reg))
1061 ent = &qty_table[q];
1063 p = reg_eqv_table[reg].prev;
1064 n = reg_eqv_table[reg].next;
1067 reg_eqv_table[n].prev = p;
1071 reg_eqv_table[p].next = n;
1075 REG_QTY (reg) = -reg - 1;
1078 /* Remove any invalid expressions from the hash table
1079 that refer to any of the registers contained in expression X.
1081 Make sure that newly inserted references to those registers
1082 as subexpressions will be considered valid.
1084 mention_regs is not called when a register itself
1085 is being stored in the table.
1087 Return 1 if we have done something that may have changed the hash code
1091 mention_regs (rtx x)
1101 code = GET_CODE (x);
1104 unsigned int regno = REGNO (x);
1105 unsigned int endregno
1106 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1107 : hard_regno_nregs[regno][GET_MODE (x)]);
1110 for (i = regno; i < endregno; i++)
1112 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1113 remove_invalid_refs (i);
1115 REG_IN_TABLE (i) = REG_TICK (i);
1116 SUBREG_TICKED (i) = -1;
1122 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1123 pseudo if they don't use overlapping words. We handle only pseudos
1124 here for simplicity. */
1125 if (code == SUBREG && REG_P (SUBREG_REG (x))
1126 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1128 unsigned int i = REGNO (SUBREG_REG (x));
1130 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1132 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1133 the last store to this register really stored into this
1134 subreg, then remove the memory of this subreg.
1135 Otherwise, remove any memory of the entire register and
1136 all its subregs from the table. */
1137 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1138 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1139 remove_invalid_refs (i);
1141 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1144 REG_IN_TABLE (i) = REG_TICK (i);
1145 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1149 /* If X is a comparison or a COMPARE and either operand is a register
1150 that does not have a quantity, give it one. This is so that a later
1151 call to record_jump_equiv won't cause X to be assigned a different
1152 hash code and not found in the table after that call.
1154 It is not necessary to do this here, since rehash_using_reg can
1155 fix up the table later, but doing this here eliminates the need to
1156 call that expensive function in the most common case where the only
1157 use of the register is in the comparison. */
1159 if (code == COMPARE || COMPARISON_P (x))
1161 if (REG_P (XEXP (x, 0))
1162 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1163 if (insert_regs (XEXP (x, 0), NULL, 0))
1165 rehash_using_reg (XEXP (x, 0));
1169 if (REG_P (XEXP (x, 1))
1170 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1171 if (insert_regs (XEXP (x, 1), NULL, 0))
1173 rehash_using_reg (XEXP (x, 1));
1178 fmt = GET_RTX_FORMAT (code);
1179 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1181 changed |= mention_regs (XEXP (x, i));
1182 else if (fmt[i] == 'E')
1183 for (j = 0; j < XVECLEN (x, i); j++)
1184 changed |= mention_regs (XVECEXP (x, i, j));
1189 /* Update the register quantities for inserting X into the hash table
1190 with a value equivalent to CLASSP.
1191 (If the class does not contain a REG, it is irrelevant.)
1192 If MODIFIED is nonzero, X is a destination; it is being modified.
1193 Note that delete_reg_equiv should be called on a register
1194 before insert_regs is done on that register with MODIFIED != 0.
1196 Nonzero value means that elements of reg_qty have changed
1197 so X's hash code may be different. */
1200 insert_regs (rtx x, struct table_elt *classp, int modified)
1204 unsigned int regno = REGNO (x);
1207 /* If REGNO is in the equivalence table already but is of the
1208 wrong mode for that equivalence, don't do anything here. */
1210 qty_valid = REGNO_QTY_VALID_P (regno);
1213 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1215 if (ent->mode != GET_MODE (x))
1219 if (modified || ! qty_valid)
1222 for (classp = classp->first_same_value;
1224 classp = classp->next_same_value)
1225 if (REG_P (classp->exp)
1226 && GET_MODE (classp->exp) == GET_MODE (x))
1228 make_regs_eqv (regno, REGNO (classp->exp));
1232 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1233 than REG_IN_TABLE to find out if there was only a single preceding
1234 invalidation - for the SUBREG - or another one, which would be
1235 for the full register. However, if we find here that REG_TICK
1236 indicates that the register is invalid, it means that it has
1237 been invalidated in a separate operation. The SUBREG might be used
1238 now (then this is a recursive call), or we might use the full REG
1239 now and a SUBREG of it later. So bump up REG_TICK so that
1240 mention_regs will do the right thing. */
1242 && REG_IN_TABLE (regno) >= 0
1243 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1245 make_new_qty (regno, GET_MODE (x));
1252 /* If X is a SUBREG, we will likely be inserting the inner register in the
1253 table. If that register doesn't have an assigned quantity number at
1254 this point but does later, the insertion that we will be doing now will
1255 not be accessible because its hash code will have changed. So assign
1256 a quantity number now. */
1258 else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1259 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1261 insert_regs (SUBREG_REG (x), NULL, 0);
1266 return mention_regs (x);
1269 /* Look in or update the hash table. */
1271 /* Remove table element ELT from use in the table.
1272 HASH is its hash code, made using the HASH macro.
1273 It's an argument because often that is known in advance
1274 and we save much time not recomputing it. */
1277 remove_from_table (struct table_elt *elt, unsigned int hash)
1282 /* Mark this element as removed. See cse_insn. */
1283 elt->first_same_value = 0;
1285 /* Remove the table element from its equivalence class. */
1288 struct table_elt *prev = elt->prev_same_value;
1289 struct table_elt *next = elt->next_same_value;
1292 next->prev_same_value = prev;
1295 prev->next_same_value = next;
1298 struct table_elt *newfirst = next;
1301 next->first_same_value = newfirst;
1302 next = next->next_same_value;
1307 /* Remove the table element from its hash bucket. */
1310 struct table_elt *prev = elt->prev_same_hash;
1311 struct table_elt *next = elt->next_same_hash;
1314 next->prev_same_hash = prev;
1317 prev->next_same_hash = next;
1318 else if (table[hash] == elt)
1322 /* This entry is not in the proper hash bucket. This can happen
1323 when two classes were merged by `merge_equiv_classes'. Search
1324 for the hash bucket that it heads. This happens only very
1325 rarely, so the cost is acceptable. */
1326 for (hash = 0; hash < HASH_SIZE; hash++)
1327 if (table[hash] == elt)
1332 /* Remove the table element from its related-value circular chain. */
1334 if (elt->related_value != 0 && elt->related_value != elt)
1336 struct table_elt *p = elt->related_value;
1338 while (p->related_value != elt)
1339 p = p->related_value;
1340 p->related_value = elt->related_value;
1341 if (p->related_value == p)
1342 p->related_value = 0;
1345 /* Now add it to the free element chain. */
1346 elt->next_same_hash = free_element_chain;
1347 free_element_chain = elt;
1350 /* Look up X in the hash table and return its table element,
1351 or 0 if X is not in the table.
1353 MODE is the machine-mode of X, or if X is an integer constant
1354 with VOIDmode then MODE is the mode with which X will be used.
1356 Here we are satisfied to find an expression whose tree structure
1359 static struct table_elt *
1360 lookup (rtx x, unsigned int hash, enum machine_mode mode)
1362 struct table_elt *p;
1364 for (p = table[hash]; p; p = p->next_same_hash)
1365 if (mode == p->mode && ((x == p->exp && REG_P (x))
1366 || exp_equiv_p (x, p->exp, !REG_P (x), false)))
1372 /* Like `lookup' but don't care whether the table element uses invalid regs.
1373 Also ignore discrepancies in the machine mode of a register. */
1375 static struct table_elt *
1376 lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1378 struct table_elt *p;
1382 unsigned int regno = REGNO (x);
1384 /* Don't check the machine mode when comparing registers;
1385 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1386 for (p = table[hash]; p; p = p->next_same_hash)
1388 && REGNO (p->exp) == regno)
1393 for (p = table[hash]; p; p = p->next_same_hash)
1395 && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
1402 /* Look for an expression equivalent to X and with code CODE.
1403 If one is found, return that expression. */
1406 lookup_as_function (rtx x, enum rtx_code code)
1409 = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
1411 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1412 long as we are narrowing. So if we looked in vain for a mode narrower
1413 than word_mode before, look for word_mode now. */
1414 if (p == 0 && code == CONST_INT
1415 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1418 PUT_MODE (x, word_mode);
1419 p = lookup (x, SAFE_HASH (x, VOIDmode), word_mode);
1425 for (p = p->first_same_value; p; p = p->next_same_value)
1426 if (GET_CODE (p->exp) == code
1427 /* Make sure this is a valid entry in the table. */
1428 && exp_equiv_p (p->exp, p->exp, 1, false))
1434 /* Insert X in the hash table, assuming HASH is its hash code
1435 and CLASSP is an element of the class it should go in
1436 (or 0 if a new class should be made).
1437 It is inserted at the proper position to keep the class in
1438 the order cheapest first.
1440 MODE is the machine-mode of X, or if X is an integer constant
1441 with VOIDmode then MODE is the mode with which X will be used.
1443 For elements of equal cheapness, the most recent one
1444 goes in front, except that the first element in the list
1445 remains first unless a cheaper element is added. The order of
1446 pseudo-registers does not matter, as canon_reg will be called to
1447 find the cheapest when a register is retrieved from the table.
1449 The in_memory field in the hash table element is set to 0.
1450 The caller must set it nonzero if appropriate.
1452 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1453 and if insert_regs returns a nonzero value
1454 you must then recompute its hash code before calling here.
1456 If necessary, update table showing constant values of quantities. */
1458 #define CHEAPER(X, Y) \
1459 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1461 static struct table_elt *
1462 insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mode)
1464 struct table_elt *elt;
1466 /* If X is a register and we haven't made a quantity for it,
1467 something is wrong. */
1468 gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
1470 /* If X is a hard register, show it is being put in the table. */
1471 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1473 unsigned int regno = REGNO (x);
1474 unsigned int endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
1477 for (i = regno; i < endregno; i++)
1478 SET_HARD_REG_BIT (hard_regs_in_table, i);
1481 /* Put an element for X into the right hash bucket. */
1483 elt = free_element_chain;
1485 free_element_chain = elt->next_same_hash;
1489 elt = xmalloc (sizeof (struct table_elt));
1493 elt->canon_exp = NULL_RTX;
1494 elt->cost = COST (x);
1495 elt->regcost = approx_reg_cost (x);
1496 elt->next_same_value = 0;
1497 elt->prev_same_value = 0;
1498 elt->next_same_hash = table[hash];
1499 elt->prev_same_hash = 0;
1500 elt->related_value = 0;
1503 elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1506 table[hash]->prev_same_hash = elt;
1509 /* Put it into the proper value-class. */
1512 classp = classp->first_same_value;
1513 if (CHEAPER (elt, classp))
1514 /* Insert at the head of the class. */
1516 struct table_elt *p;
1517 elt->next_same_value = classp;
1518 classp->prev_same_value = elt;
1519 elt->first_same_value = elt;
1521 for (p = classp; p; p = p->next_same_value)
1522 p->first_same_value = elt;
1526 /* Insert not at head of the class. */
1527 /* Put it after the last element cheaper than X. */
1528 struct table_elt *p, *next;
1530 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1533 /* Put it after P and before NEXT. */
1534 elt->next_same_value = next;
1536 next->prev_same_value = elt;
1538 elt->prev_same_value = p;
1539 p->next_same_value = elt;
1540 elt->first_same_value = classp;
1544 elt->first_same_value = elt;
1546 /* If this is a constant being set equivalent to a register or a register
1547 being set equivalent to a constant, note the constant equivalence.
1549 If this is a constant, it cannot be equivalent to a different constant,
1550 and a constant is the only thing that can be cheaper than a register. So
1551 we know the register is the head of the class (before the constant was
1554 If this is a register that is not already known equivalent to a
1555 constant, we must check the entire class.
1557 If this is a register that is already known equivalent to an insn,
1558 update the qtys `const_insn' to show that `this_insn' is the latest
1559 insn making that quantity equivalent to the constant. */
1561 if (elt->is_const && classp && REG_P (classp->exp)
1564 int exp_q = REG_QTY (REGNO (classp->exp));
1565 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1567 exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1568 exp_ent->const_insn = this_insn;
1573 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1576 struct table_elt *p;
1578 for (p = classp; p != 0; p = p->next_same_value)
1580 if (p->is_const && !REG_P (p->exp))
1582 int x_q = REG_QTY (REGNO (x));
1583 struct qty_table_elem *x_ent = &qty_table[x_q];
1586 = gen_lowpart (GET_MODE (x), p->exp);
1587 x_ent->const_insn = this_insn;
1594 && qty_table[REG_QTY (REGNO (x))].const_rtx
1595 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1596 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1598 /* If this is a constant with symbolic value,
1599 and it has a term with an explicit integer value,
1600 link it up with related expressions. */
1601 if (GET_CODE (x) == CONST)
1603 rtx subexp = get_related_value (x);
1605 struct table_elt *subelt, *subelt_prev;
1609 /* Get the integer-free subexpression in the hash table. */
1610 subhash = SAFE_HASH (subexp, mode);
1611 subelt = lookup (subexp, subhash, mode);
1613 subelt = insert (subexp, NULL, subhash, mode);
1614 /* Initialize SUBELT's circular chain if it has none. */
1615 if (subelt->related_value == 0)
1616 subelt->related_value = subelt;
1617 /* Find the element in the circular chain that precedes SUBELT. */
1618 subelt_prev = subelt;
1619 while (subelt_prev->related_value != subelt)
1620 subelt_prev = subelt_prev->related_value;
1621 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1622 This way the element that follows SUBELT is the oldest one. */
1623 elt->related_value = subelt_prev->related_value;
1624 subelt_prev->related_value = elt;
1631 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1632 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1633 the two classes equivalent.
1635 CLASS1 will be the surviving class; CLASS2 should not be used after this
1638 Any invalid entries in CLASS2 will not be copied. */
1641 merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1643 struct table_elt *elt, *next, *new;
1645 /* Ensure we start with the head of the classes. */
1646 class1 = class1->first_same_value;
1647 class2 = class2->first_same_value;
1649 /* If they were already equal, forget it. */
1650 if (class1 == class2)
1653 for (elt = class2; elt; elt = next)
1657 enum machine_mode mode = elt->mode;
1659 next = elt->next_same_value;
1661 /* Remove old entry, make a new one in CLASS1's class.
1662 Don't do this for invalid entries as we cannot find their
1663 hash code (it also isn't necessary). */
1664 if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
1666 bool need_rehash = false;
1668 hash_arg_in_memory = 0;
1669 hash = HASH (exp, mode);
1673 need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1674 delete_reg_equiv (REGNO (exp));
1677 remove_from_table (elt, hash);
1679 if (insert_regs (exp, class1, 0) || need_rehash)
1681 rehash_using_reg (exp);
1682 hash = HASH (exp, mode);
1684 new = insert (exp, class1, hash, mode);
1685 new->in_memory = hash_arg_in_memory;
1690 /* Flush the entire hash table. */
1693 flush_hash_table (void)
1696 struct table_elt *p;
1698 for (i = 0; i < HASH_SIZE; i++)
1699 for (p = table[i]; p; p = table[i])
1701 /* Note that invalidate can remove elements
1702 after P in the current hash chain. */
1704 invalidate (p->exp, p->mode);
1706 remove_from_table (p, i);
1710 /* Function called for each rtx to check whether true dependence exist. */
1711 struct check_dependence_data
1713 enum machine_mode mode;
1719 check_dependence (rtx *x, void *data)
1721 struct check_dependence_data *d = (struct check_dependence_data *) data;
1722 if (*x && MEM_P (*x))
1723 return canon_true_dependence (d->exp, d->mode, d->addr, *x,
1729 /* Remove from the hash table, or mark as invalid, all expressions whose
1730 values could be altered by storing in X. X is a register, a subreg, or
1731 a memory reference with nonvarying address (because, when a memory
1732 reference with a varying address is stored in, all memory references are
1733 removed by invalidate_memory so specific invalidation is superfluous).
1734 FULL_MODE, if not VOIDmode, indicates that this much should be
1735 invalidated instead of just the amount indicated by the mode of X. This
1736 is only used for bitfield stores into memory.
1738 A nonvarying address may be just a register or just a symbol reference,
1739 or it may be either of those plus a numeric offset. */
1742 invalidate (rtx x, enum machine_mode full_mode)
1745 struct table_elt *p;
1748 switch (GET_CODE (x))
1752 /* If X is a register, dependencies on its contents are recorded
1753 through the qty number mechanism. Just change the qty number of
1754 the register, mark it as invalid for expressions that refer to it,
1755 and remove it itself. */
1756 unsigned int regno = REGNO (x);
1757 unsigned int hash = HASH (x, GET_MODE (x));
1759 /* Remove REGNO from any quantity list it might be on and indicate
1760 that its value might have changed. If it is a pseudo, remove its
1761 entry from the hash table.
1763 For a hard register, we do the first two actions above for any
1764 additional hard registers corresponding to X. Then, if any of these
1765 registers are in the table, we must remove any REG entries that
1766 overlap these registers. */
1768 delete_reg_equiv (regno);
1770 SUBREG_TICKED (regno) = -1;
1772 if (regno >= FIRST_PSEUDO_REGISTER)
1774 /* Because a register can be referenced in more than one mode,
1775 we might have to remove more than one table entry. */
1776 struct table_elt *elt;
1778 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1779 remove_from_table (elt, hash);
1783 HOST_WIDE_INT in_table
1784 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1785 unsigned int endregno
1786 = regno + hard_regno_nregs[regno][GET_MODE (x)];
1787 unsigned int tregno, tendregno, rn;
1788 struct table_elt *p, *next;
1790 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1792 for (rn = regno + 1; rn < endregno; rn++)
1794 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1795 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1796 delete_reg_equiv (rn);
1798 SUBREG_TICKED (rn) = -1;
1802 for (hash = 0; hash < HASH_SIZE; hash++)
1803 for (p = table[hash]; p; p = next)
1805 next = p->next_same_hash;
1808 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1811 tregno = REGNO (p->exp);
1813 = tregno + hard_regno_nregs[tregno][GET_MODE (p->exp)];
1814 if (tendregno > regno && tregno < endregno)
1815 remove_from_table (p, hash);
1822 invalidate (SUBREG_REG (x), VOIDmode);
1826 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1827 invalidate (XVECEXP (x, 0, i), VOIDmode);
1831 /* This is part of a disjoint return value; extract the location in
1832 question ignoring the offset. */
1833 invalidate (XEXP (x, 0), VOIDmode);
1837 addr = canon_rtx (get_addr (XEXP (x, 0)));
1838 /* Calculate the canonical version of X here so that
1839 true_dependence doesn't generate new RTL for X on each call. */
1842 /* Remove all hash table elements that refer to overlapping pieces of
1844 if (full_mode == VOIDmode)
1845 full_mode = GET_MODE (x);
1847 for (i = 0; i < HASH_SIZE; i++)
1849 struct table_elt *next;
1851 for (p = table[i]; p; p = next)
1853 next = p->next_same_hash;
1856 struct check_dependence_data d;
1858 /* Just canonicalize the expression once;
1859 otherwise each time we call invalidate
1860 true_dependence will canonicalize the
1861 expression again. */
1863 p->canon_exp = canon_rtx (p->exp);
1867 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1868 remove_from_table (p, i);
1879 /* Remove all expressions that refer to register REGNO,
1880 since they are already invalid, and we are about to
1881 mark that register valid again and don't want the old
1882 expressions to reappear as valid. */
1885 remove_invalid_refs (unsigned int regno)
1888 struct table_elt *p, *next;
1890 for (i = 0; i < HASH_SIZE; i++)
1891 for (p = table[i]; p; p = next)
1893 next = p->next_same_hash;
1895 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1896 remove_from_table (p, i);
1900 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1903 remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
1904 enum machine_mode mode)
1907 struct table_elt *p, *next;
1908 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
1910 for (i = 0; i < HASH_SIZE; i++)
1911 for (p = table[i]; p; p = next)
1914 next = p->next_same_hash;
1917 && (GET_CODE (exp) != SUBREG
1918 || !REG_P (SUBREG_REG (exp))
1919 || REGNO (SUBREG_REG (exp)) != regno
1920 || (((SUBREG_BYTE (exp)
1921 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
1922 && SUBREG_BYTE (exp) <= end))
1923 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1924 remove_from_table (p, i);
1928 /* Recompute the hash codes of any valid entries in the hash table that
1929 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1931 This is called when we make a jump equivalence. */
1934 rehash_using_reg (rtx x)
1937 struct table_elt *p, *next;
1940 if (GET_CODE (x) == SUBREG)
1943 /* If X is not a register or if the register is known not to be in any
1944 valid entries in the table, we have no work to do. */
1947 || REG_IN_TABLE (REGNO (x)) < 0
1948 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
1951 /* Scan all hash chains looking for valid entries that mention X.
1952 If we find one and it is in the wrong hash chain, move it. */
1954 for (i = 0; i < HASH_SIZE; i++)
1955 for (p = table[i]; p; p = next)
1957 next = p->next_same_hash;
1958 if (reg_mentioned_p (x, p->exp)
1959 && exp_equiv_p (p->exp, p->exp, 1, false)
1960 && i != (hash = SAFE_HASH (p->exp, p->mode)))
1962 if (p->next_same_hash)
1963 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1965 if (p->prev_same_hash)
1966 p->prev_same_hash->next_same_hash = p->next_same_hash;
1968 table[i] = p->next_same_hash;
1970 p->next_same_hash = table[hash];
1971 p->prev_same_hash = 0;
1973 table[hash]->prev_same_hash = p;
1979 /* Remove from the hash table any expression that is a call-clobbered
1980 register. Also update their TICK values. */
1983 invalidate_for_call (void)
1985 unsigned int regno, endregno;
1988 struct table_elt *p, *next;
1991 /* Go through all the hard registers. For each that is clobbered in
1992 a CALL_INSN, remove the register from quantity chains and update
1993 reg_tick if defined. Also see if any of these registers is currently
1996 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1997 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1999 delete_reg_equiv (regno);
2000 if (REG_TICK (regno) >= 0)
2003 SUBREG_TICKED (regno) = -1;
2006 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2009 /* In the case where we have no call-clobbered hard registers in the
2010 table, we are done. Otherwise, scan the table and remove any
2011 entry that overlaps a call-clobbered register. */
2014 for (hash = 0; hash < HASH_SIZE; hash++)
2015 for (p = table[hash]; p; p = next)
2017 next = p->next_same_hash;
2020 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2023 regno = REGNO (p->exp);
2024 endregno = regno + hard_regno_nregs[regno][GET_MODE (p->exp)];
2026 for (i = regno; i < endregno; i++)
2027 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2029 remove_from_table (p, hash);
2035 /* Given an expression X of type CONST,
2036 and ELT which is its table entry (or 0 if it
2037 is not in the hash table),
2038 return an alternate expression for X as a register plus integer.
2039 If none can be found, return 0. */
2042 use_related_value (rtx x, struct table_elt *elt)
2044 struct table_elt *relt = 0;
2045 struct table_elt *p, *q;
2046 HOST_WIDE_INT offset;
2048 /* First, is there anything related known?
2049 If we have a table element, we can tell from that.
2050 Otherwise, must look it up. */
2052 if (elt != 0 && elt->related_value != 0)
2054 else if (elt == 0 && GET_CODE (x) == CONST)
2056 rtx subexp = get_related_value (x);
2058 relt = lookup (subexp,
2059 SAFE_HASH (subexp, GET_MODE (subexp)),
2066 /* Search all related table entries for one that has an
2067 equivalent register. */
2072 /* This loop is strange in that it is executed in two different cases.
2073 The first is when X is already in the table. Then it is searching
2074 the RELATED_VALUE list of X's class (RELT). The second case is when
2075 X is not in the table. Then RELT points to a class for the related
2078 Ensure that, whatever case we are in, that we ignore classes that have
2079 the same value as X. */
2081 if (rtx_equal_p (x, p->exp))
2084 for (q = p->first_same_value; q; q = q->next_same_value)
2091 p = p->related_value;
2093 /* We went all the way around, so there is nothing to be found.
2094 Alternatively, perhaps RELT was in the table for some other reason
2095 and it has no related values recorded. */
2096 if (p == relt || p == 0)
2103 offset = (get_integer_term (x) - get_integer_term (p->exp));
2104 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2105 return plus_constant (q->exp, offset);
2108 /* Hash a string. Just add its bytes up. */
2109 static inline unsigned
2110 hash_rtx_string (const char *ps)
2113 const unsigned char *p = (const unsigned char *) ps;
2122 /* Hash an rtx. We are careful to make sure the value is never negative.
2123 Equivalent registers hash identically.
2124 MODE is used in hashing for CONST_INTs only;
2125 otherwise the mode of X is used.
2127 Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2129 If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2130 a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
2132 Note that cse_insn knows that the hash code of a MEM expression
2133 is just (int) MEM plus the hash code of the address. */
2136 hash_rtx (rtx x, enum machine_mode mode, int *do_not_record_p,
2137 int *hash_arg_in_memory_p, bool have_reg_qty)
2144 /* Used to turn recursion into iteration. We can't rely on GCC's
2145 tail-recursion elimination since we need to keep accumulating values
2151 code = GET_CODE (x);
2156 unsigned int regno = REGNO (x);
2158 if (!reload_completed)
2160 /* On some machines, we can't record any non-fixed hard register,
2161 because extending its life will cause reload problems. We
2162 consider ap, fp, sp, gp to be fixed for this purpose.
2164 We also consider CCmode registers to be fixed for this purpose;
2165 failure to do so leads to failure to simplify 0<100 type of
2168 On all machines, we can't record any global registers.
2169 Nor should we record any register that is in a small
2170 class, as defined by CLASS_LIKELY_SPILLED_P. */
2173 if (regno >= FIRST_PSEUDO_REGISTER)
2175 else if (x == frame_pointer_rtx
2176 || x == hard_frame_pointer_rtx
2177 || x == arg_pointer_rtx
2178 || x == stack_pointer_rtx
2179 || x == pic_offset_table_rtx)
2181 else if (global_regs[regno])
2183 else if (fixed_regs[regno])
2185 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2187 else if (SMALL_REGISTER_CLASSES)
2189 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2196 *do_not_record_p = 1;
2201 hash += ((unsigned int) REG << 7);
2202 hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2206 /* We handle SUBREG of a REG specially because the underlying
2207 reg changes its hash value with every value change; we don't
2208 want to have to forget unrelated subregs when one subreg changes. */
2211 if (REG_P (SUBREG_REG (x)))
2213 hash += (((unsigned int) SUBREG << 7)
2214 + REGNO (SUBREG_REG (x))
2215 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2222 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2223 + (unsigned int) INTVAL (x));
2227 /* This is like the general case, except that it only counts
2228 the integers representing the constant. */
2229 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2230 if (GET_MODE (x) != VOIDmode)
2231 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2233 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2234 + (unsigned int) CONST_DOUBLE_HIGH (x));
2242 units = CONST_VECTOR_NUNITS (x);
2244 for (i = 0; i < units; ++i)
2246 elt = CONST_VECTOR_ELT (x, i);
2247 hash += hash_rtx (elt, GET_MODE (elt), do_not_record_p,
2248 hash_arg_in_memory_p, have_reg_qty);
2254 /* Assume there is only one rtx object for any given label. */
2256 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2257 differences and differences between each stage's debugging dumps. */
2258 hash += (((unsigned int) LABEL_REF << 7)
2259 + CODE_LABEL_NUMBER (XEXP (x, 0)));
2264 /* Don't hash on the symbol's address to avoid bootstrap differences.
2265 Different hash values may cause expressions to be recorded in
2266 different orders and thus different registers to be used in the
2267 final assembler. This also avoids differences in the dump files
2268 between various stages. */
2270 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2273 h += (h << 7) + *p++; /* ??? revisit */
2275 hash += ((unsigned int) SYMBOL_REF << 7) + h;
2280 /* We don't record if marked volatile or if BLKmode since we don't
2281 know the size of the move. */
2282 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2284 *do_not_record_p = 1;
2287 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2288 *hash_arg_in_memory_p = 1;
2290 /* Now that we have already found this special case,
2291 might as well speed it up as much as possible. */
2292 hash += (unsigned) MEM;
2297 /* A USE that mentions non-volatile memory needs special
2298 handling since the MEM may be BLKmode which normally
2299 prevents an entry from being made. Pure calls are
2300 marked by a USE which mentions BLKmode memory.
2301 See calls.c:emit_call_1. */
2302 if (MEM_P (XEXP (x, 0))
2303 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2305 hash += (unsigned) USE;
2308 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2309 *hash_arg_in_memory_p = 1;
2311 /* Now that we have already found this special case,
2312 might as well speed it up as much as possible. */
2313 hash += (unsigned) MEM;
2328 case UNSPEC_VOLATILE:
2329 *do_not_record_p = 1;
2333 if (MEM_VOLATILE_P (x))
2335 *do_not_record_p = 1;
2340 /* We don't want to take the filename and line into account. */
2341 hash += (unsigned) code + (unsigned) GET_MODE (x)
2342 + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2343 + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2344 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2346 if (ASM_OPERANDS_INPUT_LENGTH (x))
2348 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2350 hash += (hash_rtx (ASM_OPERANDS_INPUT (x, i),
2351 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2352 do_not_record_p, hash_arg_in_memory_p,
2355 (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
2358 hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2359 x = ASM_OPERANDS_INPUT (x, 0);
2360 mode = GET_MODE (x);
2372 i = GET_RTX_LENGTH (code) - 1;
2373 hash += (unsigned) code + (unsigned) GET_MODE (x);
2374 fmt = GET_RTX_FORMAT (code);
2380 /* If we are about to do the last recursive call
2381 needed at this level, change it into iteration.
2382 This function is called enough to be worth it. */
2389 hash += hash_rtx (XEXP (x, i), 0, do_not_record_p,
2390 hash_arg_in_memory_p, have_reg_qty);
2394 for (j = 0; j < XVECLEN (x, i); j++)
2395 hash += hash_rtx (XVECEXP (x, i, j), 0, do_not_record_p,
2396 hash_arg_in_memory_p, have_reg_qty);
2400 hash += hash_rtx_string (XSTR (x, i));
2404 hash += (unsigned int) XINT (x, i);
2419 /* Hash an rtx X for cse via hash_rtx.
2420 Stores 1 in do_not_record if any subexpression is volatile.
2421 Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2422 does not have the RTX_UNCHANGING_P bit set. */
2424 static inline unsigned
2425 canon_hash (rtx x, enum machine_mode mode)
2427 return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2430 /* Like canon_hash but with no side effects, i.e. do_not_record
2431 and hash_arg_in_memory are not changed. */
2433 static inline unsigned
2434 safe_hash (rtx x, enum machine_mode mode)
2436 int dummy_do_not_record;
2437 return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
2440 /* Return 1 iff X and Y would canonicalize into the same thing,
2441 without actually constructing the canonicalization of either one.
2442 If VALIDATE is nonzero,
2443 we assume X is an expression being processed from the rtl
2444 and Y was found in the hash table. We check register refs
2445 in Y for being marked as valid.
2447 If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
2450 exp_equiv_p (rtx x, rtx y, int validate, bool for_gcse)
2456 /* Note: it is incorrect to assume an expression is equivalent to itself
2457 if VALIDATE is nonzero. */
2458 if (x == y && !validate)
2461 if (x == 0 || y == 0)
2464 code = GET_CODE (x);
2465 if (code != GET_CODE (y))
2468 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2469 if (GET_MODE (x) != GET_MODE (y))
2480 return XEXP (x, 0) == XEXP (y, 0);
2483 return XSTR (x, 0) == XSTR (y, 0);
2487 return REGNO (x) == REGNO (y);
2490 unsigned int regno = REGNO (y);
2492 unsigned int endregno
2493 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2494 : hard_regno_nregs[regno][GET_MODE (y)]);
2496 /* If the quantities are not the same, the expressions are not
2497 equivalent. If there are and we are not to validate, they
2498 are equivalent. Otherwise, ensure all regs are up-to-date. */
2500 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2506 for (i = regno; i < endregno; i++)
2507 if (REG_IN_TABLE (i) != REG_TICK (i))
2516 /* Can't merge two expressions in different alias sets, since we
2517 can decide that the expression is transparent in a block when
2518 it isn't, due to it being set with the different alias set. */
2519 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
2522 /* A volatile mem should not be considered equivalent to any
2524 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2529 /* For commutative operations, check both orders. */
2537 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2539 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2540 validate, for_gcse))
2541 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2543 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2544 validate, for_gcse)));
2547 /* We don't use the generic code below because we want to
2548 disregard filename and line numbers. */
2550 /* A volatile asm isn't equivalent to any other. */
2551 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2554 if (GET_MODE (x) != GET_MODE (y)
2555 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2556 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2557 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2558 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2559 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2562 if (ASM_OPERANDS_INPUT_LENGTH (x))
2564 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2565 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2566 ASM_OPERANDS_INPUT (y, i),
2568 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2569 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2579 /* Compare the elements. If any pair of corresponding elements
2580 fail to match, return 0 for the whole thing. */
2582 fmt = GET_RTX_FORMAT (code);
2583 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2588 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2589 validate, for_gcse))
2594 if (XVECLEN (x, i) != XVECLEN (y, i))
2596 for (j = 0; j < XVECLEN (x, i); j++)
2597 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2598 validate, for_gcse))
2603 if (strcmp (XSTR (x, i), XSTR (y, i)))
2608 if (XINT (x, i) != XINT (y, i))
2613 if (XWINT (x, i) != XWINT (y, i))
2629 /* Return 1 if X has a value that can vary even between two
2630 executions of the program. 0 means X can be compared reliably
2631 against certain constants or near-constants. */
2634 cse_rtx_varies_p (rtx x, int from_alias)
2636 /* We need not check for X and the equivalence class being of the same
2637 mode because if X is equivalent to a constant in some mode, it
2638 doesn't vary in any mode. */
2641 && REGNO_QTY_VALID_P (REGNO (x)))
2643 int x_q = REG_QTY (REGNO (x));
2644 struct qty_table_elem *x_ent = &qty_table[x_q];
2646 if (GET_MODE (x) == x_ent->mode
2647 && x_ent->const_rtx != NULL_RTX)
2651 if (GET_CODE (x) == PLUS
2652 && GET_CODE (XEXP (x, 1)) == CONST_INT
2653 && REG_P (XEXP (x, 0))
2654 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2656 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2657 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2659 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2660 && x0_ent->const_rtx != NULL_RTX)
2664 /* This can happen as the result of virtual register instantiation, if
2665 the initial constant is too large to be a valid address. This gives
2666 us a three instruction sequence, load large offset into a register,
2667 load fp minus a constant into a register, then a MEM which is the
2668 sum of the two `constant' registers. */
2669 if (GET_CODE (x) == PLUS
2670 && REG_P (XEXP (x, 0))
2671 && REG_P (XEXP (x, 1))
2672 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2673 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2675 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2676 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2677 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2678 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2680 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2681 && x0_ent->const_rtx != NULL_RTX
2682 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2683 && x1_ent->const_rtx != NULL_RTX)
2687 return rtx_varies_p (x, from_alias);
2690 /* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2691 the result if necessary. INSN is as for canon_reg. */
2694 validate_canon_reg (rtx *xloc, rtx insn)
2696 rtx new = canon_reg (*xloc, insn);
2699 /* If replacing pseudo with hard reg or vice versa, ensure the
2700 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2701 if (insn != 0 && new != 0
2702 && REG_P (new) && REG_P (*xloc)
2703 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2704 != (REGNO (*xloc) < FIRST_PSEUDO_REGISTER))
2705 || GET_MODE (new) != GET_MODE (*xloc)
2706 || (insn_code = recog_memoized (insn)) < 0
2707 || insn_data[insn_code].n_dups > 0))
2708 validate_change (insn, xloc, new, 1);
2713 /* Canonicalize an expression:
2714 replace each register reference inside it
2715 with the "oldest" equivalent register.
2717 If INSN is nonzero and we are replacing a pseudo with a hard register
2718 or vice versa, validate_change is used to ensure that INSN remains valid
2719 after we make our substitution. The calls are made with IN_GROUP nonzero
2720 so apply_change_group must be called upon the outermost return from this
2721 function (unless INSN is zero). The result of apply_change_group can
2722 generally be discarded since the changes we are making are optional. */
2725 canon_reg (rtx x, rtx insn)
2734 code = GET_CODE (x);
2753 struct qty_table_elem *ent;
2755 /* Never replace a hard reg, because hard regs can appear
2756 in more than one machine mode, and we must preserve the mode
2757 of each occurrence. Also, some hard regs appear in
2758 MEMs that are shared and mustn't be altered. Don't try to
2759 replace any reg that maps to a reg of class NO_REGS. */
2760 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2761 || ! REGNO_QTY_VALID_P (REGNO (x)))
2764 q = REG_QTY (REGNO (x));
2765 ent = &qty_table[q];
2766 first = ent->first_reg;
2767 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2768 : REGNO_REG_CLASS (first) == NO_REGS ? x
2769 : gen_rtx_REG (ent->mode, first));
2776 fmt = GET_RTX_FORMAT (code);
2777 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2782 validate_canon_reg (&XEXP (x, i), insn);
2783 else if (fmt[i] == 'E')
2784 for (j = 0; j < XVECLEN (x, i); j++)
2785 validate_canon_reg (&XVECEXP (x, i, j), insn);
2791 /* LOC is a location within INSN that is an operand address (the contents of
2792 a MEM). Find the best equivalent address to use that is valid for this
2795 On most CISC machines, complicated address modes are costly, and rtx_cost
2796 is a good approximation for that cost. However, most RISC machines have
2797 only a few (usually only one) memory reference formats. If an address is
2798 valid at all, it is often just as cheap as any other address. Hence, for
2799 RISC machines, we use `address_cost' to compare the costs of various
2800 addresses. For two addresses of equal cost, choose the one with the
2801 highest `rtx_cost' value as that has the potential of eliminating the
2802 most insns. For equal costs, we choose the first in the equivalence
2803 class. Note that we ignore the fact that pseudo registers are cheaper than
2804 hard registers here because we would also prefer the pseudo registers. */
2807 find_best_addr (rtx insn, rtx *loc, enum machine_mode mode)
2809 struct table_elt *elt;
2811 struct table_elt *p;
2812 int found_better = 1;
2813 int save_do_not_record = do_not_record;
2814 int save_hash_arg_in_memory = hash_arg_in_memory;
2819 /* Do not try to replace constant addresses or addresses of local and
2820 argument slots. These MEM expressions are made only once and inserted
2821 in many instructions, as well as being used to control symbol table
2822 output. It is not safe to clobber them.
2824 There are some uncommon cases where the address is already in a register
2825 for some reason, but we cannot take advantage of that because we have
2826 no easy way to unshare the MEM. In addition, looking up all stack
2827 addresses is costly. */
2828 if ((GET_CODE (addr) == PLUS
2829 && REG_P (XEXP (addr, 0))
2830 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2831 && (regno = REGNO (XEXP (addr, 0)),
2832 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2833 || regno == ARG_POINTER_REGNUM))
2835 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2836 || regno == HARD_FRAME_POINTER_REGNUM
2837 || regno == ARG_POINTER_REGNUM))
2838 || CONSTANT_ADDRESS_P (addr))
2841 /* If this address is not simply a register, try to fold it. This will
2842 sometimes simplify the expression. Many simplifications
2843 will not be valid, but some, usually applying the associative rule, will
2844 be valid and produce better code. */
2847 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2848 int addr_folded_cost = address_cost (folded, mode);
2849 int addr_cost = address_cost (addr, mode);
2851 if ((addr_folded_cost < addr_cost
2852 || (addr_folded_cost == addr_cost
2853 /* ??? The rtx_cost comparison is left over from an older
2854 version of this code. It is probably no longer helpful. */
2855 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2856 || approx_reg_cost (folded) < approx_reg_cost (addr))))
2857 && validate_change (insn, loc, folded, 0))
2861 /* If this address is not in the hash table, we can't look for equivalences
2862 of the whole address. Also, ignore if volatile. */
2865 hash = HASH (addr, Pmode);
2866 addr_volatile = do_not_record;
2867 do_not_record = save_do_not_record;
2868 hash_arg_in_memory = save_hash_arg_in_memory;
2873 elt = lookup (addr, hash, Pmode);
2877 /* We need to find the best (under the criteria documented above) entry
2878 in the class that is valid. We use the `flag' field to indicate
2879 choices that were invalid and iterate until we can't find a better
2880 one that hasn't already been tried. */
2882 for (p = elt->first_same_value; p; p = p->next_same_value)
2885 while (found_better)
2887 int best_addr_cost = address_cost (*loc, mode);
2888 int best_rtx_cost = (elt->cost + 1) >> 1;
2890 struct table_elt *best_elt = elt;
2893 for (p = elt->first_same_value; p; p = p->next_same_value)
2897 || exp_equiv_p (p->exp, p->exp, 1, false))
2898 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2899 || (exp_cost == best_addr_cost
2900 && ((p->cost + 1) >> 1) > best_rtx_cost)))
2903 best_addr_cost = exp_cost;
2904 best_rtx_cost = (p->cost + 1) >> 1;
2911 if (validate_change (insn, loc,
2912 canon_reg (copy_rtx (best_elt->exp),
2921 /* If the address is a binary operation with the first operand a register
2922 and the second a constant, do the same as above, but looking for
2923 equivalences of the register. Then try to simplify before checking for
2924 the best address to use. This catches a few cases: First is when we
2925 have REG+const and the register is another REG+const. We can often merge
2926 the constants and eliminate one insn and one register. It may also be
2927 that a machine has a cheap REG+REG+const. Finally, this improves the
2928 code on the Alpha for unaligned byte stores. */
2930 if (flag_expensive_optimizations
2931 && ARITHMETIC_P (*loc)
2932 && REG_P (XEXP (*loc, 0)))
2934 rtx op1 = XEXP (*loc, 1);
2937 hash = HASH (XEXP (*loc, 0), Pmode);
2938 do_not_record = save_do_not_record;
2939 hash_arg_in_memory = save_hash_arg_in_memory;
2941 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2945 /* We need to find the best (under the criteria documented above) entry
2946 in the class that is valid. We use the `flag' field to indicate
2947 choices that were invalid and iterate until we can't find a better
2948 one that hasn't already been tried. */
2950 for (p = elt->first_same_value; p; p = p->next_same_value)
2953 while (found_better)
2955 int best_addr_cost = address_cost (*loc, mode);
2956 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2957 struct table_elt *best_elt = elt;
2958 rtx best_rtx = *loc;
2961 /* This is at worst case an O(n^2) algorithm, so limit our search
2962 to the first 32 elements on the list. This avoids trouble
2963 compiling code with very long basic blocks that can easily
2964 call simplify_gen_binary so many times that we run out of
2968 for (p = elt->first_same_value, count = 0;
2970 p = p->next_same_value, count++)
2973 || exp_equiv_p (p->exp, p->exp, 1, false)))
2975 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
2979 /* Get the canonical version of the address so we can accept
2981 new = canon_for_address (new);
2983 new_cost = address_cost (new, mode);
2985 if (new_cost < best_addr_cost
2986 || (new_cost == best_addr_cost
2987 && (COST (new) + 1) >> 1 > best_rtx_cost))
2990 best_addr_cost = new_cost;
2991 best_rtx_cost = (COST (new) + 1) >> 1;
2999 if (validate_change (insn, loc,
3000 canon_reg (copy_rtx (best_rtx),
3010 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3011 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3012 what values are being compared.
3014 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3015 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3016 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3017 compared to produce cc0.
3019 The return value is the comparison operator and is either the code of
3020 A or the code corresponding to the inverse of the comparison. */
3022 static enum rtx_code
3023 find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
3024 enum machine_mode *pmode1, enum machine_mode *pmode2)
3028 arg1 = *parg1, arg2 = *parg2;
3030 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
3032 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3034 /* Set nonzero when we find something of interest. */
3036 int reverse_code = 0;
3037 struct table_elt *p = 0;
3039 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3040 On machines with CC0, this is the only case that can occur, since
3041 fold_rtx will return the COMPARE or item being compared with zero
3044 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3047 /* If ARG1 is a comparison operator and CODE is testing for
3048 STORE_FLAG_VALUE, get the inner arguments. */
3050 else if (COMPARISON_P (arg1))
3052 #ifdef FLOAT_STORE_FLAG_VALUE
3053 REAL_VALUE_TYPE fsfv;
3057 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3058 && code == LT && STORE_FLAG_VALUE == -1)
3059 #ifdef FLOAT_STORE_FLAG_VALUE
3060 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3061 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3062 REAL_VALUE_NEGATIVE (fsfv)))
3067 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3068 && code == GE && STORE_FLAG_VALUE == -1)
3069 #ifdef FLOAT_STORE_FLAG_VALUE
3070 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3071 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3072 REAL_VALUE_NEGATIVE (fsfv)))
3075 x = arg1, reverse_code = 1;
3078 /* ??? We could also check for
3080 (ne (and (eq (...) (const_int 1))) (const_int 0))
3082 and related forms, but let's wait until we see them occurring. */
3085 /* Look up ARG1 in the hash table and see if it has an equivalence
3086 that lets us see what is being compared. */
3087 p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
3090 p = p->first_same_value;
3092 /* If what we compare is already known to be constant, that is as
3094 We need to break the loop in this case, because otherwise we
3095 can have an infinite loop when looking at a reg that is known
3096 to be a constant which is the same as a comparison of a reg
3097 against zero which appears later in the insn stream, which in
3098 turn is constant and the same as the comparison of the first reg
3104 for (; p; p = p->next_same_value)
3106 enum machine_mode inner_mode = GET_MODE (p->exp);
3107 #ifdef FLOAT_STORE_FLAG_VALUE
3108 REAL_VALUE_TYPE fsfv;
3111 /* If the entry isn't valid, skip it. */
3112 if (! exp_equiv_p (p->exp, p->exp, 1, false))
3115 if (GET_CODE (p->exp) == COMPARE
3116 /* Another possibility is that this machine has a compare insn
3117 that includes the comparison code. In that case, ARG1 would
3118 be equivalent to a comparison operation that would set ARG1 to
3119 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3120 ORIG_CODE is the actual comparison being done; if it is an EQ,
3121 we must reverse ORIG_CODE. On machine with a negative value
3122 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3125 && GET_MODE_CLASS (inner_mode) == MODE_INT
3126 && (GET_MODE_BITSIZE (inner_mode)
3127 <= HOST_BITS_PER_WIDE_INT)
3128 && (STORE_FLAG_VALUE
3129 & ((HOST_WIDE_INT) 1
3130 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3131 #ifdef FLOAT_STORE_FLAG_VALUE
3133 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3134 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3135 REAL_VALUE_NEGATIVE (fsfv)))
3138 && COMPARISON_P (p->exp)))
3143 else if ((code == EQ
3145 && GET_MODE_CLASS (inner_mode) == MODE_INT
3146 && (GET_MODE_BITSIZE (inner_mode)
3147 <= HOST_BITS_PER_WIDE_INT)
3148 && (STORE_FLAG_VALUE
3149 & ((HOST_WIDE_INT) 1
3150 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3151 #ifdef FLOAT_STORE_FLAG_VALUE
3153 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3154 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3155 REAL_VALUE_NEGATIVE (fsfv)))
3158 && COMPARISON_P (p->exp))
3165 /* If this non-trapping address, e.g. fp + constant, the
3166 equivalent is a better operand since it may let us predict
3167 the value of the comparison. */
3168 else if (!rtx_addr_can_trap_p (p->exp))
3175 /* If we didn't find a useful equivalence for ARG1, we are done.
3176 Otherwise, set up for the next iteration. */
3180 /* If we need to reverse the comparison, make sure that that is
3181 possible -- we can't necessarily infer the value of GE from LT
3182 with floating-point operands. */
3185 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3186 if (reversed == UNKNOWN)
3191 else if (COMPARISON_P (x))
3192 code = GET_CODE (x);
3193 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3196 /* Return our results. Return the modes from before fold_rtx
3197 because fold_rtx might produce const_int, and then it's too late. */
3198 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3199 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3204 /* If X is a nontrivial arithmetic operation on an argument
3205 for which a constant value can be determined, return
3206 the result of operating on that value, as a constant.
3207 Otherwise, return X, possibly with one or more operands
3208 modified by recursive calls to this function.
3210 If X is a register whose contents are known, we do NOT
3211 return those contents here. equiv_constant is called to
3214 INSN is the insn that we may be modifying. If it is 0, make a copy
3215 of X before modifying it. */
3218 fold_rtx (rtx x, rtx insn)
3221 enum machine_mode mode;
3228 /* Folded equivalents of first two operands of X. */
3232 /* Constant equivalents of first three operands of X;
3233 0 when no such equivalent is known. */
3238 /* The mode of the first operand of X. We need this for sign and zero
3240 enum machine_mode mode_arg0;
3245 mode = GET_MODE (x);
3246 code = GET_CODE (x);
3256 /* No use simplifying an EXPR_LIST
3257 since they are used only for lists of args
3258 in a function call's REG_EQUAL note. */
3264 return prev_insn_cc0;
3268 /* If the next insn is a CODE_LABEL followed by a jump table,
3269 PC's value is a LABEL_REF pointing to that label. That
3270 lets us fold switch statements on the VAX. */
3273 if (insn && tablejump_p (insn, &next, NULL))
3274 return gen_rtx_LABEL_REF (Pmode, next);
3279 /* See if we previously assigned a constant value to this SUBREG. */
3280 if ((new = lookup_as_function (x, CONST_INT)) != 0
3281 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3284 /* If this is a paradoxical SUBREG, we have no idea what value the
3285 extra bits would have. However, if the operand is equivalent
3286 to a SUBREG whose operand is the same as our mode, and all the
3287 modes are within a word, we can just use the inner operand
3288 because these SUBREGs just say how to treat the register.
3290 Similarly if we find an integer constant. */
3292 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3294 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3295 struct table_elt *elt;
3297 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3298 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3299 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3301 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3303 if (CONSTANT_P (elt->exp)
3304 && GET_MODE (elt->exp) == VOIDmode)
3307 if (GET_CODE (elt->exp) == SUBREG
3308 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3309 && exp_equiv_p (elt->exp, elt->exp, 1, false))
3310 return copy_rtx (SUBREG_REG (elt->exp));
3316 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3317 We might be able to if the SUBREG is extracting a single word in an
3318 integral mode or extracting the low part. */
3320 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3321 const_arg0 = equiv_constant (folded_arg0);
3323 folded_arg0 = const_arg0;
3325 if (folded_arg0 != SUBREG_REG (x))
3327 new = simplify_subreg (mode, folded_arg0,
3328 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3333 if (REG_P (folded_arg0)
3334 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0)))
3336 struct table_elt *elt;
3338 elt = lookup (folded_arg0,
3339 HASH (folded_arg0, GET_MODE (folded_arg0)),
3340 GET_MODE (folded_arg0));
3343 elt = elt->first_same_value;
3345 if (subreg_lowpart_p (x))
3346 /* If this is a narrowing SUBREG and our operand is a REG, see
3347 if we can find an equivalence for REG that is an arithmetic
3348 operation in a wider mode where both operands are paradoxical
3349 SUBREGs from objects of our result mode. In that case, we
3350 couldn-t report an equivalent value for that operation, since we
3351 don't know what the extra bits will be. But we can find an
3352 equivalence for this SUBREG by folding that operation in the
3353 narrow mode. This allows us to fold arithmetic in narrow modes
3354 when the machine only supports word-sized arithmetic.
3356 Also look for a case where we have a SUBREG whose operand
3357 is the same as our result. If both modes are smaller
3358 than a word, we are simply interpreting a register in
3359 different modes and we can use the inner value. */
3361 for (; elt; elt = elt->next_same_value)
3363 enum rtx_code eltcode = GET_CODE (elt->exp);
3365 /* Just check for unary and binary operations. */
3366 if (UNARY_P (elt->exp)
3367 && eltcode != SIGN_EXTEND
3368 && eltcode != ZERO_EXTEND
3369 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3370 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3371 && (GET_MODE_CLASS (mode)
3372 == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3374 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3376 if (!REG_P (op0) && ! CONSTANT_P (op0))
3377 op0 = fold_rtx (op0, NULL_RTX);
3379 op0 = equiv_constant (op0);
3381 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3384 else if (ARITHMETIC_P (elt->exp)
3385 && eltcode != DIV && eltcode != MOD
3386 && eltcode != UDIV && eltcode != UMOD
3387 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3388 && eltcode != ROTATE && eltcode != ROTATERT
3389 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3390 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3392 || CONSTANT_P (XEXP (elt->exp, 0)))
3393 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3394 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3396 || CONSTANT_P (XEXP (elt->exp, 1))))
3398 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3399 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3401 if (op0 && !REG_P (op0) && ! CONSTANT_P (op0))
3402 op0 = fold_rtx (op0, NULL_RTX);
3405 op0 = equiv_constant (op0);
3407 if (op1 && !REG_P (op1) && ! CONSTANT_P (op1))
3408 op1 = fold_rtx (op1, NULL_RTX);
3411 op1 = equiv_constant (op1);
3413 /* If we are looking for the low SImode part of
3414 (ashift:DI c (const_int 32)), it doesn't work
3415 to compute that in SImode, because a 32-bit shift
3416 in SImode is unpredictable. We know the value is 0. */
3418 && GET_CODE (elt->exp) == ASHIFT
3419 && GET_CODE (op1) == CONST_INT
3420 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3423 < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3424 /* If the count fits in the inner mode's width,
3425 but exceeds the outer mode's width,
3426 the value will get truncated to 0
3428 new = CONST0_RTX (mode);
3430 /* If the count exceeds even the inner mode's width,
3431 don't fold this expression. */
3434 else if (op0 && op1)
3435 new = simplify_binary_operation (GET_CODE (elt->exp), mode, op0, op1);
3438 else if (GET_CODE (elt->exp) == SUBREG
3439 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3440 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3442 && exp_equiv_p (elt->exp, elt->exp, 1, false))
3443 new = copy_rtx (SUBREG_REG (elt->exp));
3449 /* A SUBREG resulting from a zero extension may fold to zero if
3450 it extracts higher bits than the ZERO_EXTEND's source bits.
3451 FIXME: if combine tried to, er, combine these instructions,
3452 this transformation may be moved to simplify_subreg. */
3453 for (; elt; elt = elt->next_same_value)
3455 if (GET_CODE (elt->exp) == ZERO_EXTEND
3457 >= GET_MODE_BITSIZE (GET_MODE (XEXP (elt->exp, 0))))
3458 return CONST0_RTX (mode);
3466 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3467 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3468 new = lookup_as_function (XEXP (x, 0), code);
3470 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3474 /* If we are not actually processing an insn, don't try to find the
3475 best address. Not only don't we care, but we could modify the
3476 MEM in an invalid way since we have no insn to validate against. */
3478 find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3481 /* Even if we don't fold in the insn itself,
3482 we can safely do so here, in hopes of getting a constant. */
3483 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3485 HOST_WIDE_INT offset = 0;
3488 && REGNO_QTY_VALID_P (REGNO (addr)))
3490 int addr_q = REG_QTY (REGNO (addr));
3491 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3493 if (GET_MODE (addr) == addr_ent->mode
3494 && addr_ent->const_rtx != NULL_RTX)
3495 addr = addr_ent->const_rtx;
3498 /* If address is constant, split it into a base and integer offset. */
3499 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3501 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3502 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3504 base = XEXP (XEXP (addr, 0), 0);
3505 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3507 else if (GET_CODE (addr) == LO_SUM
3508 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3509 base = XEXP (addr, 1);
3511 /* If this is a constant pool reference, we can fold it into its
3512 constant to allow better value tracking. */
3513 if (base && GET_CODE (base) == SYMBOL_REF
3514 && CONSTANT_POOL_ADDRESS_P (base))
3516 rtx constant = get_pool_constant (base);
3517 enum machine_mode const_mode = get_pool_mode (base);
3520 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3522 constant_pool_entries_cost = COST (constant);
3523 constant_pool_entries_regcost = approx_reg_cost (constant);
3526 /* If we are loading the full constant, we have an equivalence. */
3527 if (offset == 0 && mode == const_mode)
3530 /* If this actually isn't a constant (weird!), we can't do
3531 anything. Otherwise, handle the two most common cases:
3532 extracting a word from a multi-word constant, and extracting
3533 the low-order bits. Other cases don't seem common enough to
3535 if (! CONSTANT_P (constant))
3538 if (GET_MODE_CLASS (mode) == MODE_INT
3539 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3540 && offset % UNITS_PER_WORD == 0
3541 && (new = operand_subword (constant,
3542 offset / UNITS_PER_WORD,
3543 0, const_mode)) != 0)
3546 if (((BYTES_BIG_ENDIAN
3547 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3548 || (! BYTES_BIG_ENDIAN && offset == 0))
3549 && (new = gen_lowpart (mode, constant)) != 0)
3553 /* If this is a reference to a label at a known position in a jump
3554 table, we also know its value. */
3555 if (base && GET_CODE (base) == LABEL_REF)
3557 rtx label = XEXP (base, 0);
3558 rtx table_insn = NEXT_INSN (label);
3560 if (table_insn && JUMP_P (table_insn)
3561 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3563 rtx table = PATTERN (table_insn);
3566 && (offset / GET_MODE_SIZE (GET_MODE (table))
3567 < XVECLEN (table, 0)))
3568 return XVECEXP (table, 0,
3569 offset / GET_MODE_SIZE (GET_MODE (table)));
3571 if (table_insn && JUMP_P (table_insn)
3572 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3574 rtx table = PATTERN (table_insn);
3577 && (offset / GET_MODE_SIZE (GET_MODE (table))
3578 < XVECLEN (table, 1)))
3580 offset /= GET_MODE_SIZE (GET_MODE (table));
3581 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3584 if (GET_MODE (table) != Pmode)
3585 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3587 /* Indicate this is a constant. This isn't a
3588 valid form of CONST, but it will only be used
3589 to fold the next insns and then discarded, so
3592 Note this expression must be explicitly discarded,
3593 by cse_insn, else it may end up in a REG_EQUAL note
3594 and "escape" to cause problems elsewhere. */
3595 return gen_rtx_CONST (GET_MODE (new), new);
3603 #ifdef NO_FUNCTION_CSE
3605 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3611 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3612 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3613 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3623 mode_arg0 = VOIDmode;
3625 /* Try folding our operands.
3626 Then see which ones have constant values known. */
3628 fmt = GET_RTX_FORMAT (code);
3629 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3632 rtx arg = XEXP (x, i);
3633 rtx folded_arg = arg, const_arg = 0;
3634 enum machine_mode mode_arg = GET_MODE (arg);
3635 rtx cheap_arg, expensive_arg;
3636 rtx replacements[2];
3638 int old_cost = COST_IN (XEXP (x, i), code);
3640 /* Most arguments are cheap, so handle them specially. */
3641 switch (GET_CODE (arg))
3644 /* This is the same as calling equiv_constant; it is duplicated
3646 if (REGNO_QTY_VALID_P (REGNO (arg)))
3648 int arg_q = REG_QTY (REGNO (arg));
3649 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3651 if (arg_ent->const_rtx != NULL_RTX
3652 && !REG_P (arg_ent->const_rtx)
3653 && GET_CODE (arg_ent->const_rtx) != PLUS)
3655 = gen_lowpart (GET_MODE (arg),
3656 arg_ent->const_rtx);
3671 folded_arg = prev_insn_cc0;
3672 mode_arg = prev_insn_cc0_mode;
3673 const_arg = equiv_constant (folded_arg);
3678 folded_arg = fold_rtx (arg, insn);
3679 const_arg = equiv_constant (folded_arg);
3682 /* For the first three operands, see if the operand
3683 is constant or equivalent to a constant. */
3687 folded_arg0 = folded_arg;
3688 const_arg0 = const_arg;
3689 mode_arg0 = mode_arg;
3692 folded_arg1 = folded_arg;
3693 const_arg1 = const_arg;
3696 const_arg2 = const_arg;
3700 /* Pick the least expensive of the folded argument and an
3701 equivalent constant argument. */
3702 if (const_arg == 0 || const_arg == folded_arg
3703 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3704 cheap_arg = folded_arg, expensive_arg = const_arg;
3706 cheap_arg = const_arg, expensive_arg = folded_arg;
3708 /* Try to replace the operand with the cheapest of the two
3709 possibilities. If it doesn't work and this is either of the first
3710 two operands of a commutative operation, try swapping them.
3711 If THAT fails, try the more expensive, provided it is cheaper
3712 than what is already there. */
3714 if (cheap_arg == XEXP (x, i))
3717 if (insn == 0 && ! copied)
3723 /* Order the replacements from cheapest to most expensive. */
3724 replacements[0] = cheap_arg;
3725 replacements[1] = expensive_arg;
3727 for (j = 0; j < 2 && replacements[j]; j++)
3729 int new_cost = COST_IN (replacements[j], code);
3731 /* Stop if what existed before was cheaper. Prefer constants
3732 in the case of a tie. */
3733 if (new_cost > old_cost
3734 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3737 /* It's not safe to substitute the operand of a conversion
3738 operator with a constant, as the conversion's identity
3739 depends upon the mode of it's operand. This optimization
3740 is handled by the call to simplify_unary_operation. */
3741 if (GET_RTX_CLASS (code) == RTX_UNARY
3742 && GET_MODE (replacements[j]) != mode_arg0
3743 && (code == ZERO_EXTEND
3744 || code == SIGN_EXTEND
3746 || code == FLOAT_TRUNCATE
3747 || code == FLOAT_EXTEND
3750 || code == UNSIGNED_FLOAT
3751 || code == UNSIGNED_FIX))
3754 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3757 if (GET_RTX_CLASS (code) == RTX_COMM_COMPARE
3758 || GET_RTX_CLASS (code) == RTX_COMM_ARITH)
3760 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3761 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3763 if (apply_change_group ())
3765 /* Swap them back to be invalid so that this loop can
3766 continue and flag them to be swapped back later. */
3769 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3781 /* Don't try to fold inside of a vector of expressions.
3782 Doing nothing is harmless. */
3786 /* If a commutative operation, place a constant integer as the second
3787 operand unless the first operand is also a constant integer. Otherwise,
3788 place any constant second unless the first operand is also a constant. */
3790 if (COMMUTATIVE_P (x))
3793 || swap_commutative_operands_p (const_arg0 ? const_arg0
3795 const_arg1 ? const_arg1
3798 rtx tem = XEXP (x, 0);
3800 if (insn == 0 && ! copied)
3806 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3807 validate_change (insn, &XEXP (x, 1), tem, 1);
3808 if (apply_change_group ())
3810 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3811 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3816 /* If X is an arithmetic operation, see if we can simplify it. */
3818 switch (GET_RTX_CLASS (code))
3824 /* We can't simplify extension ops unless we know the
3826 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3827 && mode_arg0 == VOIDmode)
3830 /* If we had a CONST, strip it off and put it back later if we
3832 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3833 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3835 new = simplify_unary_operation (code, mode,
3836 const_arg0 ? const_arg0 : folded_arg0,
3838 /* NEG of PLUS could be converted into MINUS, but that causes
3839 expressions of the form
3840 (CONST (MINUS (CONST_INT) (SYMBOL_REF)))
3841 which many ports mistakenly treat as LEGITIMATE_CONSTANT_P.
3842 FIXME: those ports should be fixed. */
3843 if (new != 0 && is_const
3844 && GET_CODE (new) == PLUS
3845 && (GET_CODE (XEXP (new, 0)) == SYMBOL_REF
3846 || GET_CODE (XEXP (new, 0)) == LABEL_REF)
3847 && GET_CODE (XEXP (new, 1)) == CONST_INT)
3848 new = gen_rtx_CONST (mode, new);
3853 case RTX_COMM_COMPARE:
3854 /* See what items are actually being compared and set FOLDED_ARG[01]
3855 to those values and CODE to the actual comparison code. If any are
3856 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3857 do anything if both operands are already known to be constant. */
3859 /* ??? Vector mode comparisons are not supported yet. */
3860 if (VECTOR_MODE_P (mode))
3863 if (const_arg0 == 0 || const_arg1 == 0)
3865 struct table_elt *p0, *p1;
3866 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3867 enum machine_mode mode_arg1;
3869 #ifdef FLOAT_STORE_FLAG_VALUE
3870 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3872 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3873 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3874 false_rtx = CONST0_RTX (mode);
3878 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3879 &mode_arg0, &mode_arg1);
3880 const_arg0 = equiv_constant (folded_arg0);
3881 const_arg1 = equiv_constant (folded_arg1);
3883 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3884 what kinds of things are being compared, so we can't do
3885 anything with this comparison. */
3887 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3890 /* If we do not now have two constants being compared, see
3891 if we can nevertheless deduce some things about the
3893 if (const_arg0 == 0 || const_arg1 == 0)
3895 /* Some addresses are known to be nonzero. We don't know
3896 their sign, but equality comparisons are known. */
3897 if (const_arg1 == const0_rtx
3898 && nonzero_address_p (folded_arg0))
3902 else if (code == NE)
3906 /* See if the two operands are the same. */
3908 if (folded_arg0 == folded_arg1
3909 || (REG_P (folded_arg0)
3910 && REG_P (folded_arg1)
3911 && (REG_QTY (REGNO (folded_arg0))
3912 == REG_QTY (REGNO (folded_arg1))))
3913 || ((p0 = lookup (folded_arg0,
3914 SAFE_HASH (folded_arg0, mode_arg0),
3916 && (p1 = lookup (folded_arg1,
3917 SAFE_HASH (folded_arg1, mode_arg0),
3919 && p0->first_same_value == p1->first_same_value))
3921 /* Sadly two equal NaNs are not equivalent. */
3922 if (!HONOR_NANS (mode_arg0))
3923 return ((code == EQ || code == LE || code == GE
3924 || code == LEU || code == GEU || code == UNEQ
3925 || code == UNLE || code == UNGE
3927 ? true_rtx : false_rtx);
3928 /* Take care for the FP compares we can resolve. */
3929 if (code == UNEQ || code == UNLE || code == UNGE)
3931 if (code == LTGT || code == LT || code == GT)
3935 /* If FOLDED_ARG0 is a register, see if the comparison we are
3936 doing now is either the same as we did before or the reverse
3937 (we only check the reverse if not floating-point). */
3938 else if (REG_P (folded_arg0))
3940 int qty = REG_QTY (REGNO (folded_arg0));
3942 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3944 struct qty_table_elem *ent = &qty_table[qty];
3946 if ((comparison_dominates_p (ent->comparison_code, code)
3947 || (! FLOAT_MODE_P (mode_arg0)
3948 && comparison_dominates_p (ent->comparison_code,
3949 reverse_condition (code))))
3950 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3952 && rtx_equal_p (ent->comparison_const,
3954 || (REG_P (folded_arg1)
3955 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3956 return (comparison_dominates_p (ent->comparison_code, code)
3957 ? true_rtx : false_rtx);
3963 /* If we are comparing against zero, see if the first operand is
3964 equivalent to an IOR with a constant. If so, we may be able to
3965 determine the result of this comparison. */
3967 if (const_arg1 == const0_rtx)
3969 rtx y = lookup_as_function (folded_arg0, IOR);
3973 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3974 && GET_CODE (inner_const) == CONST_INT
3975 && INTVAL (inner_const) != 0)
3977 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
3978 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
3979 && (INTVAL (inner_const)
3980 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
3981 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3983 #ifdef FLOAT_STORE_FLAG_VALUE
3984 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3986 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3987 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3988 false_rtx = CONST0_RTX (mode);
4013 rtx op0 = const_arg0 ? const_arg0 : folded_arg0;
4014 rtx op1 = const_arg1 ? const_arg1 : folded_arg1;
4015 new = simplify_relational_operation (code, mode, mode_arg0, op0, op1);
4020 case RTX_COMM_ARITH:
4024 /* If the second operand is a LABEL_REF, see if the first is a MINUS
4025 with that LABEL_REF as its second operand. If so, the result is
4026 the first operand of that MINUS. This handles switches with an
4027 ADDR_DIFF_VEC table. */
4028 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4031 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
4032 : lookup_as_function (folded_arg0, MINUS);
4034 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4035 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4038 /* Now try for a CONST of a MINUS like the above. */
4039 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4040 : lookup_as_function (folded_arg0, CONST))) != 0
4041 && GET_CODE (XEXP (y, 0)) == MINUS
4042 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4043 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4044 return XEXP (XEXP (y, 0), 0);
4047 /* Likewise if the operands are in the other order. */
4048 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4051 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
4052 : lookup_as_function (folded_arg1, MINUS);
4054 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4055 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4058 /* Now try for a CONST of a MINUS like the above. */
4059 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4060 : lookup_as_function (folded_arg1, CONST))) != 0
4061 && GET_CODE (XEXP (y, 0)) == MINUS
4062 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4063 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4064 return XEXP (XEXP (y, 0), 0);
4067 /* If second operand is a register equivalent to a negative
4068 CONST_INT, see if we can find a register equivalent to the
4069 positive constant. Make a MINUS if so. Don't do this for
4070 a non-negative constant since we might then alternate between
4071 choosing positive and negative constants. Having the positive
4072 constant previously-used is the more common case. Be sure
4073 the resulting constant is non-negative; if const_arg1 were
4074 the smallest negative number this would overflow: depending
4075 on the mode, this would either just be the same value (and
4076 hence not save anything) or be incorrect. */
4077 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4078 && INTVAL (const_arg1) < 0
4079 /* This used to test
4081 -INTVAL (const_arg1) >= 0
4083 But The Sun V5.0 compilers mis-compiled that test. So
4084 instead we test for the problematic value in a more direct
4085 manner and hope the Sun compilers get it correct. */
4086 && INTVAL (const_arg1) !=
4087 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4088 && REG_P (folded_arg1))
4090 rtx new_const = GEN_INT (-INTVAL (const_arg1));
4092 = lookup (new_const, SAFE_HASH (new_const, mode), mode);
4095 for (p = p->first_same_value; p; p = p->next_same_value)
4097 return simplify_gen_binary (MINUS, mode, folded_arg0,
4098 canon_reg (p->exp, NULL_RTX));
4103 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4104 If so, produce (PLUS Z C2-C). */
4105 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4107 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4108 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4109 return fold_rtx (plus_constant (copy_rtx (y),
4110 -INTVAL (const_arg1)),
4117 case SMIN: case SMAX: case UMIN: case UMAX:
4118 case IOR: case AND: case XOR:
4120 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4121 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4122 is known to be of similar form, we may be able to replace the
4123 operation with a combined operation. This may eliminate the
4124 intermediate operation if every use is simplified in this way.
4125 Note that the similar optimization done by combine.c only works
4126 if the intermediate operation's result has only one reference. */
4128 if (REG_P (folded_arg0)
4129 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4132 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4133 rtx y = lookup_as_function (folded_arg0, code);
4135 enum rtx_code associate_code;
4139 || 0 == (inner_const
4140 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4141 || GET_CODE (inner_const) != CONST_INT
4142 /* If we have compiled a statement like
4143 "if (x == (x & mask1))", and now are looking at
4144 "x & mask2", we will have a case where the first operand
4145 of Y is the same as our first operand. Unless we detect
4146 this case, an infinite loop will result. */
4147 || XEXP (y, 0) == folded_arg0)
4150 /* Don't associate these operations if they are a PLUS with the
4151 same constant and it is a power of two. These might be doable
4152 with a pre- or post-increment. Similarly for two subtracts of
4153 identical powers of two with post decrement. */
4155 if (code == PLUS && const_arg1 == inner_const
4156 && ((HAVE_PRE_INCREMENT
4157 && exact_log2 (INTVAL (const_arg1)) >= 0)
4158 || (HAVE_POST_INCREMENT
4159 && exact_log2 (INTVAL (const_arg1)) >= 0)
4160 || (HAVE_PRE_DECREMENT
4161 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4162 || (HAVE_POST_DECREMENT
4163 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4166 /* Compute the code used to compose the constants. For example,
4167 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
4169 associate_code = (is_shift || code == MINUS ? PLUS : code);
4171 new_const = simplify_binary_operation (associate_code, mode,
4172 const_arg1, inner_const);
4177 /* If we are associating shift operations, don't let this
4178 produce a shift of the size of the object or larger.
4179 This could occur when we follow a sign-extend by a right
4180 shift on a machine that does a sign-extend as a pair
4183 if (is_shift && GET_CODE (new_const) == CONST_INT
4184 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4186 /* As an exception, we can turn an ASHIFTRT of this
4187 form into a shift of the number of bits - 1. */
4188 if (code == ASHIFTRT)
4189 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4194 y = copy_rtx (XEXP (y, 0));
4196 /* If Y contains our first operand (the most common way this
4197 can happen is if Y is a MEM), we would do into an infinite
4198 loop if we tried to fold it. So don't in that case. */
4200 if (! reg_mentioned_p (folded_arg0, y))
4201 y = fold_rtx (y, insn);
4203 return simplify_gen_binary (code, mode, y, new_const);
4207 case DIV: case UDIV:
4208 /* ??? The associative optimization performed immediately above is
4209 also possible for DIV and UDIV using associate_code of MULT.
4210 However, we would need extra code to verify that the
4211 multiplication does not overflow, that is, there is no overflow
4212 in the calculation of new_const. */
4219 new = simplify_binary_operation (code, mode,
4220 const_arg0 ? const_arg0 : folded_arg0,
4221 const_arg1 ? const_arg1 : folded_arg1);
4225 /* (lo_sum (high X) X) is simply X. */
4226 if (code == LO_SUM && const_arg0 != 0
4227 && GET_CODE (const_arg0) == HIGH
4228 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4233 case RTX_BITFIELD_OPS:
4234 new = simplify_ternary_operation (code, mode, mode_arg0,
4235 const_arg0 ? const_arg0 : folded_arg0,
4236 const_arg1 ? const_arg1 : folded_arg1,
4237 const_arg2 ? const_arg2 : XEXP (x, 2));
4244 return new ? new : x;
4247 /* Return a constant value currently equivalent to X.
4248 Return 0 if we don't know one. */
4251 equiv_constant (rtx x)
4254 && REGNO_QTY_VALID_P (REGNO (x)))
4256 int x_q = REG_QTY (REGNO (x));
4257 struct qty_table_elem *x_ent = &qty_table[x_q];
4259 if (x_ent->const_rtx)
4260 x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
4263 if (x == 0 || CONSTANT_P (x))
4266 /* If X is a MEM, try to fold it outside the context of any insn to see if
4267 it might be equivalent to a constant. That handles the case where it
4268 is a constant-pool reference. Then try to look it up in the hash table
4269 in case it is something whose value we have seen before. */
4273 struct table_elt *elt;
4275 x = fold_rtx (x, NULL_RTX);
4279 elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
4283 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4284 if (elt->is_const && CONSTANT_P (elt->exp))
4291 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4292 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4293 least-significant part of X.
4294 MODE specifies how big a part of X to return.
4296 If the requested operation cannot be done, 0 is returned.
4298 This is similar to gen_lowpart_general in emit-rtl.c. */
4301 gen_lowpart_if_possible (enum machine_mode mode, rtx x)
4303 rtx result = gen_lowpart_common (mode, x);
4309 /* This is the only other case we handle. */
4313 if (WORDS_BIG_ENDIAN)
4314 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4315 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4316 if (BYTES_BIG_ENDIAN)
4317 /* Adjust the address so that the address-after-the-data is
4319 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4320 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4322 new = adjust_address_nv (x, mode, offset);
4323 if (! memory_address_p (mode, XEXP (new, 0)))
4332 /* Given INSN, a jump insn, PATH_TAKEN indicates if we are following the "taken"
4333 branch. It will be zero if not.
4335 In certain cases, this can cause us to add an equivalence. For example,
4336 if we are following the taken case of
4338 we can add the fact that `i' and '2' are now equivalent.
4340 In any case, we can record that this comparison was passed. If the same
4341 comparison is seen later, we will know its value. */
4344 record_jump_equiv (rtx insn, int taken)
4346 int cond_known_true;
4349 enum machine_mode mode, mode0, mode1;
4350 int reversed_nonequality = 0;
4353 /* Ensure this is the right kind of insn. */
4354 if (! any_condjump_p (insn))
4356 set = pc_set (insn);
4358 /* See if this jump condition is known true or false. */
4360 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4362 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4364 /* Get the type of comparison being done and the operands being compared.
4365 If we had to reverse a non-equality condition, record that fact so we
4366 know that it isn't valid for floating-point. */
4367 code = GET_CODE (XEXP (SET_SRC (set), 0));
4368 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4369 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4371 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4372 if (! cond_known_true)
4374 code = reversed_comparison_code_parts (code, op0, op1, insn);
4376 /* Don't remember if we can't find the inverse. */
4377 if (code == UNKNOWN)
4381 /* The mode is the mode of the non-constant. */
4383 if (mode1 != VOIDmode)
4386 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4389 /* Yet another form of subreg creation. In this case, we want something in
4390 MODE, and we should assume OP has MODE iff it is naturally modeless. */
4393 record_jump_cond_subreg (enum machine_mode mode, rtx op)
4395 enum machine_mode op_mode = GET_MODE (op);
4396 if (op_mode == mode || op_mode == VOIDmode)
4398 return lowpart_subreg (mode, op, op_mode);
4401 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4402 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4403 Make any useful entries we can with that information. Called from
4404 above function and called recursively. */
4407 record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
4408 rtx op1, int reversed_nonequality)
4410 unsigned op0_hash, op1_hash;
4411 int op0_in_memory, op1_in_memory;
4412 struct table_elt *op0_elt, *op1_elt;
4414 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4415 we know that they are also equal in the smaller mode (this is also
4416 true for all smaller modes whether or not there is a SUBREG, but
4417 is not worth testing for with no SUBREG). */
4419 /* Note that GET_MODE (op0) may not equal MODE. */
4420 if (code == EQ && GET_CODE (op0) == SUBREG
4421 && (GET_MODE_SIZE (GET_MODE (op0))
4422 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4424 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4425 rtx tem = record_jump_cond_subreg (inner_mode, op1);
4427 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4428 reversed_nonequality);
4431 if (code == EQ && GET_CODE (op1) == SUBREG
4432 && (GET_MODE_SIZE (GET_MODE (op1))
4433 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4435 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4436 rtx tem = record_jump_cond_subreg (inner_mode, op0);
4438 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4439 reversed_nonequality);
4442 /* Similarly, if this is an NE comparison, and either is a SUBREG
4443 making a smaller mode, we know the whole thing is also NE. */
4445 /* Note that GET_MODE (op0) may not equal MODE;
4446 if we test MODE instead, we can get an infinite recursion
4447 alternating between two modes each wider than MODE. */
4449 if (code == NE && GET_CODE (op0) == SUBREG
4450 && subreg_lowpart_p (op0)
4451 && (GET_MODE_SIZE (GET_MODE (op0))
4452 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4454 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4455 rtx tem = record_jump_cond_subreg (inner_mode, op1);
4457 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4458 reversed_nonequality);
4461 if (code == NE && GET_CODE (op1) == SUBREG
4462 && subreg_lowpart_p (op1)
4463 && (GET_MODE_SIZE (GET_MODE (op1))
4464 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4466 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4467 rtx tem = record_jump_cond_subreg (inner_mode, op0);
4469 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4470 reversed_nonequality);
4473 /* Hash both operands. */
4476 hash_arg_in_memory = 0;
4477 op0_hash = HASH (op0, mode);
4478 op0_in_memory = hash_arg_in_memory;
4484 hash_arg_in_memory = 0;
4485 op1_hash = HASH (op1, mode);
4486 op1_in_memory = hash_arg_in_memory;
4491 /* Look up both operands. */
4492 op0_elt = lookup (op0, op0_hash, mode);
4493 op1_elt = lookup (op1, op1_hash, mode);
4495 /* If both operands are already equivalent or if they are not in the
4496 table but are identical, do nothing. */
4497 if ((op0_elt != 0 && op1_elt != 0
4498 && op0_elt->first_same_value == op1_elt->first_same_value)
4499 || op0 == op1 || rtx_equal_p (op0, op1))
4502 /* If we aren't setting two things equal all we can do is save this
4503 comparison. Similarly if this is floating-point. In the latter
4504 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4505 If we record the equality, we might inadvertently delete code
4506 whose intent was to change -0 to +0. */
4508 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4510 struct qty_table_elem *ent;
4513 /* If we reversed a floating-point comparison, if OP0 is not a
4514 register, or if OP1 is neither a register or constant, we can't
4518 op1 = equiv_constant (op1);
4520 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4521 || !REG_P (op0) || op1 == 0)
4524 /* Put OP0 in the hash table if it isn't already. This gives it a
4525 new quantity number. */
4528 if (insert_regs (op0, NULL, 0))
4530 rehash_using_reg (op0);
4531 op0_hash = HASH (op0, mode);
4533 /* If OP0 is contained in OP1, this changes its hash code
4534 as well. Faster to rehash than to check, except
4535 for the simple case of a constant. */
4536 if (! CONSTANT_P (op1))
4537 op1_hash = HASH (op1,mode);
4540 op0_elt = insert (op0, NULL, op0_hash, mode);
4541 op0_elt->in_memory = op0_in_memory;
4544 qty = REG_QTY (REGNO (op0));
4545 ent = &qty_table[qty];
4547 ent->comparison_code = code;
4550 /* Look it up again--in case op0 and op1 are the same. */
4551 op1_elt = lookup (op1, op1_hash, mode);
4553 /* Put OP1 in the hash table so it gets a new quantity number. */
4556 if (insert_regs (op1, NULL, 0))
4558 rehash_using_reg (op1);
4559 op1_hash = HASH (op1, mode);
4562 op1_elt = insert (op1, NULL, op1_hash, mode);
4563 op1_elt->in_memory = op1_in_memory;
4566 ent->comparison_const = NULL_RTX;
4567 ent->comparison_qty = REG_QTY (REGNO (op1));
4571 ent->comparison_const = op1;
4572 ent->comparison_qty = -1;
4578 /* If either side is still missing an equivalence, make it now,
4579 then merge the equivalences. */
4583 if (insert_regs (op0, NULL, 0))
4585 rehash_using_reg (op0);
4586 op0_hash = HASH (op0, mode);
4589 op0_elt = insert (op0, NULL, op0_hash, mode);
4590 op0_elt->in_memory = op0_in_memory;
4595 if (insert_regs (op1, NULL, 0))
4597 rehash_using_reg (op1);
4598 op1_hash = HASH (op1, mode);
4601 op1_elt = insert (op1, NULL, op1_hash, mode);
4602 op1_elt->in_memory = op1_in_memory;
4605 merge_equiv_classes (op0_elt, op1_elt);
4608 /* CSE processing for one instruction.
4609 First simplify sources and addresses of all assignments
4610 in the instruction, using previously-computed equivalents values.
4611 Then install the new sources and destinations in the table
4612 of available values.
4614 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4615 the insn. It means that INSN is inside libcall block. In this
4616 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4618 /* Data on one SET contained in the instruction. */
4622 /* The SET rtx itself. */
4624 /* The SET_SRC of the rtx (the original value, if it is changing). */
4626 /* The hash-table element for the SET_SRC of the SET. */
4627 struct table_elt *src_elt;
4628 /* Hash value for the SET_SRC. */
4630 /* Hash value for the SET_DEST. */
4632 /* The SET_DEST, with SUBREG, etc., stripped. */
4634 /* Nonzero if the SET_SRC is in memory. */
4636 /* Nonzero if the SET_SRC contains something
4637 whose value cannot be predicted and understood. */
4639 /* Original machine mode, in case it becomes a CONST_INT.
4640 The size of this field should match the size of the mode
4641 field of struct rtx_def (see rtl.h). */
4642 ENUM_BITFIELD(machine_mode) mode : 8;
4643 /* A constant equivalent for SET_SRC, if any. */
4645 /* Original SET_SRC value used for libcall notes. */
4647 /* Hash value of constant equivalent for SET_SRC. */
4648 unsigned src_const_hash;
4649 /* Table entry for constant equivalent for SET_SRC, if any. */
4650 struct table_elt *src_const_elt;
4654 cse_insn (rtx insn, rtx libcall_insn)
4656 rtx x = PATTERN (insn);
4662 /* Records what this insn does to set CC0. */
4663 rtx this_insn_cc0 = 0;
4664 enum machine_mode this_insn_cc0_mode = VOIDmode;
4668 struct table_elt *src_eqv_elt = 0;
4669 int src_eqv_volatile = 0;
4670 int src_eqv_in_memory = 0;
4671 unsigned src_eqv_hash = 0;
4673 struct set *sets = (struct set *) 0;
4677 /* Find all the SETs and CLOBBERs in this instruction.
4678 Record all the SETs in the array `set' and count them.
4679 Also determine whether there is a CLOBBER that invalidates
4680 all memory references, or all references at varying addresses. */
4684 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4686 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4687 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4688 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4692 if (GET_CODE (x) == SET)
4694 sets = alloca (sizeof (struct set));
4697 /* Ignore SETs that are unconditional jumps.
4698 They never need cse processing, so this does not hurt.
4699 The reason is not efficiency but rather
4700 so that we can test at the end for instructions
4701 that have been simplified to unconditional jumps
4702 and not be misled by unchanged instructions
4703 that were unconditional jumps to begin with. */
4704 if (SET_DEST (x) == pc_rtx
4705 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4708 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4709 The hard function value register is used only once, to copy to
4710 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4711 Ensure we invalidate the destination register. On the 80386 no
4712 other code would invalidate it since it is a fixed_reg.
4713 We need not check the return of apply_change_group; see canon_reg. */
4715 else if (GET_CODE (SET_SRC (x)) == CALL)
4717 canon_reg (SET_SRC (x), insn);
4718 apply_change_group ();
4719 fold_rtx (SET_SRC (x), insn);
4720 invalidate (SET_DEST (x), VOIDmode);
4725 else if (GET_CODE (x) == PARALLEL)
4727 int lim = XVECLEN (x, 0);
4729 sets = alloca (lim * sizeof (struct set));
4731 /* Find all regs explicitly clobbered in this insn,
4732 and ensure they are not replaced with any other regs
4733 elsewhere in this insn.
4734 When a reg that is clobbered is also used for input,
4735 we should presume that that is for a reason,
4736 and we should not substitute some other register
4737 which is not supposed to be clobbered.
4738 Therefore, this loop cannot be merged into the one below
4739 because a CALL may precede a CLOBBER and refer to the
4740 value clobbered. We must not let a canonicalization do
4741 anything in that case. */
4742 for (i = 0; i < lim; i++)
4744 rtx y = XVECEXP (x, 0, i);
4745 if (GET_CODE (y) == CLOBBER)
4747 rtx clobbered = XEXP (y, 0);
4749 if (REG_P (clobbered)
4750 || GET_CODE (clobbered) == SUBREG)
4751 invalidate (clobbered, VOIDmode);
4752 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4753 || GET_CODE (clobbered) == ZERO_EXTRACT)
4754 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4758 for (i = 0; i < lim; i++)
4760 rtx y = XVECEXP (x, 0, i);
4761 if (GET_CODE (y) == SET)
4763 /* As above, we ignore unconditional jumps and call-insns and
4764 ignore the result of apply_change_group. */
4765 if (GET_CODE (SET_SRC (y)) == CALL)
4767 canon_reg (SET_SRC (y), insn);
4768 apply_change_group ();
4769 fold_rtx (SET_SRC (y), insn);
4770 invalidate (SET_DEST (y), VOIDmode);
4772 else if (SET_DEST (y) == pc_rtx
4773 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4776 sets[n_sets++].rtl = y;
4778 else if (GET_CODE (y) == CLOBBER)
4780 /* If we clobber memory, canon the address.
4781 This does nothing when a register is clobbered
4782 because we have already invalidated the reg. */
4783 if (MEM_P (XEXP (y, 0)))
4784 canon_reg (XEXP (y, 0), NULL_RTX);
4786 else if (GET_CODE (y) == USE
4787 && ! (REG_P (XEXP (y, 0))
4788 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4789 canon_reg (y, NULL_RTX);
4790 else if (GET_CODE (y) == CALL)
4792 /* The result of apply_change_group can be ignored; see
4794 canon_reg (y, insn);
4795 apply_change_group ();
4800 else if (GET_CODE (x) == CLOBBER)
4802 if (MEM_P (XEXP (x, 0)))
4803 canon_reg (XEXP (x, 0), NULL_RTX);
4806 /* Canonicalize a USE of a pseudo register or memory location. */
4807 else if (GET_CODE (x) == USE
4808 && ! (REG_P (XEXP (x, 0))
4809 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4810 canon_reg (XEXP (x, 0), NULL_RTX);
4811 else if (GET_CODE (x) == CALL)
4813 /* The result of apply_change_group can be ignored; see canon_reg. */
4814 canon_reg (x, insn);
4815 apply_change_group ();
4819 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4820 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4821 is handled specially for this case, and if it isn't set, then there will
4822 be no equivalence for the destination. */
4823 if (n_sets == 1 && REG_NOTES (insn) != 0
4824 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4825 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4826 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4828 src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4829 XEXP (tem, 0) = src_eqv;
4832 /* Canonicalize sources and addresses of destinations.
4833 We do this in a separate pass to avoid problems when a MATCH_DUP is
4834 present in the insn pattern. In that case, we want to ensure that
4835 we don't break the duplicate nature of the pattern. So we will replace
4836 both operands at the same time. Otherwise, we would fail to find an
4837 equivalent substitution in the loop calling validate_change below.
4839 We used to suppress canonicalization of DEST if it appears in SRC,
4840 but we don't do this any more. */
4842 for (i = 0; i < n_sets; i++)
4844 rtx dest = SET_DEST (sets[i].rtl);
4845 rtx src = SET_SRC (sets[i].rtl);
4846 rtx new = canon_reg (src, insn);
4849 sets[i].orig_src = src;
4850 if ((REG_P (new) && REG_P (src)
4851 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4852 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4853 || (insn_code = recog_memoized (insn)) < 0
4854 || insn_data[insn_code].n_dups > 0)
4855 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4857 SET_SRC (sets[i].rtl) = new;
4859 if (GET_CODE (dest) == ZERO_EXTRACT)
4861 validate_change (insn, &XEXP (dest, 1),
4862 canon_reg (XEXP (dest, 1), insn), 1);
4863 validate_change (insn, &XEXP (dest, 2),
4864 canon_reg (XEXP (dest, 2), insn), 1);
4867 while (GET_CODE (dest) == SUBREG
4868 || GET_CODE (dest) == ZERO_EXTRACT
4869 || GET_CODE (dest) == STRICT_LOW_PART)
4870 dest = XEXP (dest, 0);
4873 canon_reg (dest, insn);
4876 /* Now that we have done all the replacements, we can apply the change
4877 group and see if they all work. Note that this will cause some
4878 canonicalizations that would have worked individually not to be applied
4879 because some other canonicalization didn't work, but this should not
4882 The result of apply_change_group can be ignored; see canon_reg. */
4884 apply_change_group ();
4886 /* Set sets[i].src_elt to the class each source belongs to.
4887 Detect assignments from or to volatile things
4888 and set set[i] to zero so they will be ignored
4889 in the rest of this function.
4891 Nothing in this loop changes the hash table or the register chains. */
4893 for (i = 0; i < n_sets; i++)
4897 struct table_elt *elt = 0, *p;
4898 enum machine_mode mode;
4901 rtx src_related = 0;
4902 struct table_elt *src_const_elt = 0;
4903 int src_cost = MAX_COST;
4904 int src_eqv_cost = MAX_COST;
4905 int src_folded_cost = MAX_COST;
4906 int src_related_cost = MAX_COST;
4907 int src_elt_cost = MAX_COST;
4908 int src_regcost = MAX_COST;
4909 int src_eqv_regcost = MAX_COST;
4910 int src_folded_regcost = MAX_COST;
4911 int src_related_regcost = MAX_COST;
4912 int src_elt_regcost = MAX_COST;
4913 /* Set nonzero if we need to call force_const_mem on with the
4914 contents of src_folded before using it. */
4915 int src_folded_force_flag = 0;
4917 dest = SET_DEST (sets[i].rtl);
4918 src = SET_SRC (sets[i].rtl);
4920 /* If SRC is a constant that has no machine mode,
4921 hash it with the destination's machine mode.
4922 This way we can keep different modes separate. */
4924 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4925 sets[i].mode = mode;
4929 enum machine_mode eqvmode = mode;
4930 if (GET_CODE (dest) == STRICT_LOW_PART)
4931 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4933 hash_arg_in_memory = 0;
4934 src_eqv_hash = HASH (src_eqv, eqvmode);
4936 /* Find the equivalence class for the equivalent expression. */
4939 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4941 src_eqv_volatile = do_not_record;
4942 src_eqv_in_memory = hash_arg_in_memory;
4945 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4946 value of the INNER register, not the destination. So it is not
4947 a valid substitution for the source. But save it for later. */
4948 if (GET_CODE (dest) == STRICT_LOW_PART)
4951 src_eqv_here = src_eqv;
4953 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4954 simplified result, which may not necessarily be valid. */
4955 src_folded = fold_rtx (src, insn);
4958 /* ??? This caused bad code to be generated for the m68k port with -O2.
4959 Suppose src is (CONST_INT -1), and that after truncation src_folded
4960 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4961 At the end we will add src and src_const to the same equivalence
4962 class. We now have 3 and -1 on the same equivalence class. This
4963 causes later instructions to be mis-optimized. */
4964 /* If storing a constant in a bitfield, pre-truncate the constant
4965 so we will be able to record it later. */
4966 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
4968 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4970 if (GET_CODE (src) == CONST_INT
4971 && GET_CODE (width) == CONST_INT
4972 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4973 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4975 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
4976 << INTVAL (width)) - 1));
4980 /* Compute SRC's hash code, and also notice if it
4981 should not be recorded at all. In that case,
4982 prevent any further processing of this assignment. */
4984 hash_arg_in_memory = 0;
4987 sets[i].src_hash = HASH (src, mode);
4988 sets[i].src_volatile = do_not_record;
4989 sets[i].src_in_memory = hash_arg_in_memory;
4991 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4992 a pseudo, do not record SRC. Using SRC as a replacement for
4993 anything else will be incorrect in that situation. Note that
4994 this usually occurs only for stack slots, in which case all the
4995 RTL would be referring to SRC, so we don't lose any optimization
4996 opportunities by not having SRC in the hash table. */
4999 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5001 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5002 sets[i].src_volatile = 1;
5005 /* It is no longer clear why we used to do this, but it doesn't
5006 appear to still be needed. So let's try without it since this
5007 code hurts cse'ing widened ops. */
5008 /* If source is a paradoxical subreg (such as QI treated as an SI),
5009 treat it as volatile. It may do the work of an SI in one context
5010 where the extra bits are not being used, but cannot replace an SI
5012 if (GET_CODE (src) == SUBREG
5013 && (GET_MODE_SIZE (GET_MODE (src))
5014 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5015 sets[i].src_volatile = 1;
5018 /* Locate all possible equivalent forms for SRC. Try to replace
5019 SRC in the insn with each cheaper equivalent.
5021 We have the following types of equivalents: SRC itself, a folded
5022 version, a value given in a REG_EQUAL note, or a value related
5025 Each of these equivalents may be part of an additional class
5026 of equivalents (if more than one is in the table, they must be in
5027 the same class; we check for this).
5029 If the source is volatile, we don't do any table lookups.
5031 We note any constant equivalent for possible later use in a
5034 if (!sets[i].src_volatile)
5035 elt = lookup (src, sets[i].src_hash, mode);
5037 sets[i].src_elt = elt;
5039 if (elt && src_eqv_here && src_eqv_elt)
5041 if (elt->first_same_value != src_eqv_elt->first_same_value)
5043 /* The REG_EQUAL is indicating that two formerly distinct
5044 classes are now equivalent. So merge them. */
5045 merge_equiv_classes (elt, src_eqv_elt);
5046 src_eqv_hash = HASH (src_eqv, elt->mode);
5047 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5053 else if (src_eqv_elt)
5056 /* Try to find a constant somewhere and record it in `src_const'.
5057 Record its table element, if any, in `src_const_elt'. Look in
5058 any known equivalences first. (If the constant is not in the
5059 table, also set `sets[i].src_const_hash'). */
5061 for (p = elt->first_same_value; p; p = p->next_same_value)
5065 src_const_elt = elt;
5070 && (CONSTANT_P (src_folded)
5071 /* Consider (minus (label_ref L1) (label_ref L2)) as
5072 "constant" here so we will record it. This allows us
5073 to fold switch statements when an ADDR_DIFF_VEC is used. */
5074 || (GET_CODE (src_folded) == MINUS
5075 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5076 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5077 src_const = src_folded, src_const_elt = elt;
5078 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5079 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5081 /* If we don't know if the constant is in the table, get its
5082 hash code and look it up. */
5083 if (src_const && src_const_elt == 0)
5085 sets[i].src_const_hash = HASH (src_const, mode);
5086 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5089 sets[i].src_const = src_const;
5090 sets[i].src_const_elt = src_const_elt;
5092 /* If the constant and our source are both in the table, mark them as
5093 equivalent. Otherwise, if a constant is in the table but the source
5094 isn't, set ELT to it. */
5095 if (src_const_elt && elt
5096 && src_const_elt->first_same_value != elt->first_same_value)
5097 merge_equiv_classes (elt, src_const_elt);
5098 else if (src_const_elt && elt == 0)
5099 elt = src_const_elt;
5101 /* See if there is a register linearly related to a constant
5102 equivalent of SRC. */
5104 && (GET_CODE (src_const) == CONST
5105 || (src_const_elt && src_const_elt->related_value != 0)))
5107 src_related = use_related_value (src_const, src_const_elt);
5110 struct table_elt *src_related_elt
5111 = lookup (src_related, HASH (src_related, mode), mode);
5112 if (src_related_elt && elt)
5114 if (elt->first_same_value
5115 != src_related_elt->first_same_value)
5116 /* This can occur when we previously saw a CONST
5117 involving a SYMBOL_REF and then see the SYMBOL_REF
5118 twice. Merge the involved classes. */
5119 merge_equiv_classes (elt, src_related_elt);
5122 src_related_elt = 0;
5124 else if (src_related_elt && elt == 0)
5125 elt = src_related_elt;
5129 /* See if we have a CONST_INT that is already in a register in a
5132 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5133 && GET_MODE_CLASS (mode) == MODE_INT
5134 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5136 enum machine_mode wider_mode;
5138 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5139 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5140 && src_related == 0;
5141 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5143 struct table_elt *const_elt
5144 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5149 for (const_elt = const_elt->first_same_value;
5150 const_elt; const_elt = const_elt->next_same_value)
5151 if (REG_P (const_elt->exp))
5153 src_related = gen_lowpart (mode,
5160 /* Another possibility is that we have an AND with a constant in
5161 a mode narrower than a word. If so, it might have been generated
5162 as part of an "if" which would narrow the AND. If we already
5163 have done the AND in a wider mode, we can use a SUBREG of that
5166 if (flag_expensive_optimizations && ! src_related
5167 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5168 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5170 enum machine_mode tmode;
5171 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5173 for (tmode = GET_MODE_WIDER_MODE (mode);
5174 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5175 tmode = GET_MODE_WIDER_MODE (tmode))
5177 rtx inner = gen_lowpart (tmode, XEXP (src, 0));
5178 struct table_elt *larger_elt;
5182 PUT_MODE (new_and, tmode);
5183 XEXP (new_and, 0) = inner;
5184 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5185 if (larger_elt == 0)
5188 for (larger_elt = larger_elt->first_same_value;
5189 larger_elt; larger_elt = larger_elt->next_same_value)
5190 if (REG_P (larger_elt->exp))
5193 = gen_lowpart (mode, larger_elt->exp);
5203 #ifdef LOAD_EXTEND_OP
5204 /* See if a MEM has already been loaded with a widening operation;
5205 if it has, we can use a subreg of that. Many CISC machines
5206 also have such operations, but this is only likely to be
5207 beneficial on these machines. */
5209 if (flag_expensive_optimizations && src_related == 0
5210 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5211 && GET_MODE_CLASS (mode) == MODE_INT
5212 && MEM_P (src) && ! do_not_record
5213 && LOAD_EXTEND_OP (mode) != UNKNOWN)
5215 struct rtx_def memory_extend_buf;
5216 rtx memory_extend_rtx = &memory_extend_buf;
5217 enum machine_mode tmode;
5219 /* Set what we are trying to extend and the operation it might
5220 have been extended with. */
5221 memset (memory_extend_rtx, 0, sizeof(*memory_extend_rtx));
5222 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5223 XEXP (memory_extend_rtx, 0) = src;
5225 for (tmode = GET_MODE_WIDER_MODE (mode);
5226 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5227 tmode = GET_MODE_WIDER_MODE (tmode))
5229 struct table_elt *larger_elt;
5231 PUT_MODE (memory_extend_rtx, tmode);
5232 larger_elt = lookup (memory_extend_rtx,
5233 HASH (memory_extend_rtx, tmode), tmode);
5234 if (larger_elt == 0)
5237 for (larger_elt = larger_elt->first_same_value;
5238 larger_elt; larger_elt = larger_elt->next_same_value)
5239 if (REG_P (larger_elt->exp))
5241 src_related = gen_lowpart (mode,
5250 #endif /* LOAD_EXTEND_OP */
5252 if (src == src_folded)
5255 /* At this point, ELT, if nonzero, points to a class of expressions
5256 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5257 and SRC_RELATED, if nonzero, each contain additional equivalent
5258 expressions. Prune these latter expressions by deleting expressions
5259 already in the equivalence class.
5261 Check for an equivalent identical to the destination. If found,
5262 this is the preferred equivalent since it will likely lead to
5263 elimination of the insn. Indicate this by placing it in
5267 elt = elt->first_same_value;
5268 for (p = elt; p; p = p->next_same_value)
5270 enum rtx_code code = GET_CODE (p->exp);
5272 /* If the expression is not valid, ignore it. Then we do not
5273 have to check for validity below. In most cases, we can use
5274 `rtx_equal_p', since canonicalization has already been done. */
5275 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
5278 /* Also skip paradoxical subregs, unless that's what we're
5281 && (GET_MODE_SIZE (GET_MODE (p->exp))
5282 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5284 && GET_CODE (src) == SUBREG
5285 && GET_MODE (src) == GET_MODE (p->exp)
5286 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5287 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5290 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5292 else if (src_folded && GET_CODE (src_folded) == code
5293 && rtx_equal_p (src_folded, p->exp))
5295 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5296 && rtx_equal_p (src_eqv_here, p->exp))
5298 else if (src_related && GET_CODE (src_related) == code
5299 && rtx_equal_p (src_related, p->exp))
5302 /* This is the same as the destination of the insns, we want
5303 to prefer it. Copy it to src_related. The code below will
5304 then give it a negative cost. */
5305 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5309 /* Find the cheapest valid equivalent, trying all the available
5310 possibilities. Prefer items not in the hash table to ones
5311 that are when they are equal cost. Note that we can never
5312 worsen an insn as the current contents will also succeed.
5313 If we find an equivalent identical to the destination, use it as best,
5314 since this insn will probably be eliminated in that case. */
5317 if (rtx_equal_p (src, dest))
5318 src_cost = src_regcost = -1;
5321 src_cost = COST (src);
5322 src_regcost = approx_reg_cost (src);
5328 if (rtx_equal_p (src_eqv_here, dest))
5329 src_eqv_cost = src_eqv_regcost = -1;
5332 src_eqv_cost = COST (src_eqv_here);
5333 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5339 if (rtx_equal_p (src_folded, dest))
5340 src_folded_cost = src_folded_regcost = -1;
5343 src_folded_cost = COST (src_folded);
5344 src_folded_regcost = approx_reg_cost (src_folded);
5350 if (rtx_equal_p (src_related, dest))
5351 src_related_cost = src_related_regcost = -1;
5354 src_related_cost = COST (src_related);
5355 src_related_regcost = approx_reg_cost (src_related);
5359 /* If this was an indirect jump insn, a known label will really be
5360 cheaper even though it looks more expensive. */
5361 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5362 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5364 /* Terminate loop when replacement made. This must terminate since
5365 the current contents will be tested and will always be valid. */
5370 /* Skip invalid entries. */
5371 while (elt && !REG_P (elt->exp)
5372 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5373 elt = elt->next_same_value;
5375 /* A paradoxical subreg would be bad here: it'll be the right
5376 size, but later may be adjusted so that the upper bits aren't
5377 what we want. So reject it. */
5379 && GET_CODE (elt->exp) == SUBREG
5380 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5381 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5382 /* It is okay, though, if the rtx we're trying to match
5383 will ignore any of the bits we can't predict. */
5385 && GET_CODE (src) == SUBREG
5386 && GET_MODE (src) == GET_MODE (elt->exp)
5387 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5388 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5390 elt = elt->next_same_value;
5396 src_elt_cost = elt->cost;
5397 src_elt_regcost = elt->regcost;
5400 /* Find cheapest and skip it for the next time. For items
5401 of equal cost, use this order:
5402 src_folded, src, src_eqv, src_related and hash table entry. */
5404 && preferable (src_folded_cost, src_folded_regcost,
5405 src_cost, src_regcost) <= 0
5406 && preferable (src_folded_cost, src_folded_regcost,
5407 src_eqv_cost, src_eqv_regcost) <= 0
5408 && preferable (src_folded_cost, src_folded_regcost,
5409 src_related_cost, src_related_regcost) <= 0
5410 && preferable (src_folded_cost, src_folded_regcost,
5411 src_elt_cost, src_elt_regcost) <= 0)
5413 trial = src_folded, src_folded_cost = MAX_COST;
5414 if (src_folded_force_flag)
5416 rtx forced = force_const_mem (mode, trial);
5422 && preferable (src_cost, src_regcost,
5423 src_eqv_cost, src_eqv_regcost) <= 0
5424 && preferable (src_cost, src_regcost,
5425 src_related_cost, src_related_regcost) <= 0
5426 && preferable (src_cost, src_regcost,
5427 src_elt_cost, src_elt_regcost) <= 0)
5428 trial = src, src_cost = MAX_COST;
5429 else if (src_eqv_here
5430 && preferable (src_eqv_cost, src_eqv_regcost,
5431 src_related_cost, src_related_regcost) <= 0
5432 && preferable (src_eqv_cost, src_eqv_regcost,
5433 src_elt_cost, src_elt_regcost) <= 0)
5434 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5435 else if (src_related
5436 && preferable (src_related_cost, src_related_regcost,
5437 src_elt_cost, src_elt_regcost) <= 0)
5438 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5441 trial = copy_rtx (elt->exp);
5442 elt = elt->next_same_value;
5443 src_elt_cost = MAX_COST;
5446 /* We don't normally have an insn matching (set (pc) (pc)), so
5447 check for this separately here. We will delete such an
5450 For other cases such as a table jump or conditional jump
5451 where we know the ultimate target, go ahead and replace the
5452 operand. While that may not make a valid insn, we will
5453 reemit the jump below (and also insert any necessary
5455 if (n_sets == 1 && dest == pc_rtx
5457 || (GET_CODE (trial) == LABEL_REF
5458 && ! condjump_p (insn))))
5460 /* Don't substitute non-local labels, this confuses CFG. */
5461 if (GET_CODE (trial) == LABEL_REF
5462 && LABEL_REF_NONLOCAL_P (trial))
5465 SET_SRC (sets[i].rtl) = trial;
5466 cse_jumps_altered = 1;
5470 /* Look for a substitution that makes a valid insn. */
5471 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5473 rtx new = canon_reg (SET_SRC (sets[i].rtl), insn);
5475 /* If we just made a substitution inside a libcall, then we
5476 need to make the same substitution in any notes attached
5477 to the RETVAL insn. */
5479 && (REG_P (sets[i].orig_src)
5480 || GET_CODE (sets[i].orig_src) == SUBREG
5481 || MEM_P (sets[i].orig_src)))
5483 rtx note = find_reg_equal_equiv_note (libcall_insn);
5485 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0),
5490 /* The result of apply_change_group can be ignored; see
5493 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5494 apply_change_group ();
5498 /* If we previously found constant pool entries for
5499 constants and this is a constant, try making a
5500 pool entry. Put it in src_folded unless we already have done
5501 this since that is where it likely came from. */
5503 else if (constant_pool_entries_cost
5504 && CONSTANT_P (trial)
5505 /* Reject cases that will abort in decode_rtx_const.
5506 On the alpha when simplifying a switch, we get
5507 (const (truncate (minus (label_ref) (label_ref)))). */
5508 && ! (GET_CODE (trial) == CONST
5509 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5510 /* Likewise on IA-64, except without the truncate. */
5511 && ! (GET_CODE (trial) == CONST
5512 && GET_CODE (XEXP (trial, 0)) == MINUS
5513 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5514 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5516 || (!MEM_P (src_folded)
5517 && ! src_folded_force_flag))
5518 && GET_MODE_CLASS (mode) != MODE_CC
5519 && mode != VOIDmode)
5521 src_folded_force_flag = 1;
5523 src_folded_cost = constant_pool_entries_cost;
5524 src_folded_regcost = constant_pool_entries_regcost;
5528 src = SET_SRC (sets[i].rtl);
5530 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5531 However, there is an important exception: If both are registers
5532 that are not the head of their equivalence class, replace SET_SRC
5533 with the head of the class. If we do not do this, we will have
5534 both registers live over a portion of the basic block. This way,
5535 their lifetimes will likely abut instead of overlapping. */
5537 && REGNO_QTY_VALID_P (REGNO (dest)))
5539 int dest_q = REG_QTY (REGNO (dest));
5540 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5542 if (dest_ent->mode == GET_MODE (dest)
5543 && dest_ent->first_reg != REGNO (dest)
5544 && REG_P (src) && REGNO (src) == REGNO (dest)
5545 /* Don't do this if the original insn had a hard reg as
5546 SET_SRC or SET_DEST. */
5547 && (!REG_P (sets[i].src)
5548 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5549 && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5550 /* We can't call canon_reg here because it won't do anything if
5551 SRC is a hard register. */
5553 int src_q = REG_QTY (REGNO (src));
5554 struct qty_table_elem *src_ent = &qty_table[src_q];
5555 int first = src_ent->first_reg;
5557 = (first >= FIRST_PSEUDO_REGISTER
5558 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5560 /* We must use validate-change even for this, because this
5561 might be a special no-op instruction, suitable only to
5563 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5566 /* If we had a constant that is cheaper than what we are now
5567 setting SRC to, use that constant. We ignored it when we
5568 thought we could make this into a no-op. */
5569 if (src_const && COST (src_const) < COST (src)
5570 && validate_change (insn, &SET_SRC (sets[i].rtl),
5577 /* If we made a change, recompute SRC values. */
5578 if (src != sets[i].src)
5582 hash_arg_in_memory = 0;
5584 sets[i].src_hash = HASH (src, mode);
5585 sets[i].src_volatile = do_not_record;
5586 sets[i].src_in_memory = hash_arg_in_memory;
5587 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5590 /* If this is a single SET, we are setting a register, and we have an
5591 equivalent constant, we want to add a REG_NOTE. We don't want
5592 to write a REG_EQUAL note for a constant pseudo since verifying that
5593 that pseudo hasn't been eliminated is a pain. Such a note also
5594 won't help anything.
5596 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5597 which can be created for a reference to a compile time computable
5598 entry in a jump table. */
5600 if (n_sets == 1 && src_const && REG_P (dest)
5601 && !REG_P (src_const)
5602 && ! (GET_CODE (src_const) == CONST
5603 && GET_CODE (XEXP (src_const, 0)) == MINUS
5604 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5605 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5607 /* We only want a REG_EQUAL note if src_const != src. */
5608 if (! rtx_equal_p (src, src_const))
5610 /* Make sure that the rtx is not shared. */
5611 src_const = copy_rtx (src_const);
5613 /* Record the actual constant value in a REG_EQUAL note,
5614 making a new one if one does not already exist. */
5615 set_unique_reg_note (insn, REG_EQUAL, src_const);
5619 /* Now deal with the destination. */
5622 /* Look within any ZERO_EXTRACT to the MEM or REG within it. */
5623 while (GET_CODE (dest) == SUBREG
5624 || GET_CODE (dest) == ZERO_EXTRACT
5625 || GET_CODE (dest) == STRICT_LOW_PART)
5626 dest = XEXP (dest, 0);
5628 sets[i].inner_dest = dest;
5632 #ifdef PUSH_ROUNDING
5633 /* Stack pushes invalidate the stack pointer. */
5634 rtx addr = XEXP (dest, 0);
5635 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5636 && XEXP (addr, 0) == stack_pointer_rtx)
5637 invalidate (stack_pointer_rtx, Pmode);
5639 dest = fold_rtx (dest, insn);
5642 /* Compute the hash code of the destination now,
5643 before the effects of this instruction are recorded,
5644 since the register values used in the address computation
5645 are those before this instruction. */
5646 sets[i].dest_hash = HASH (dest, mode);
5648 /* Don't enter a bit-field in the hash table
5649 because the value in it after the store
5650 may not equal what was stored, due to truncation. */
5652 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5654 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5656 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5657 && GET_CODE (width) == CONST_INT
5658 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5659 && ! (INTVAL (src_const)
5660 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5661 /* Exception: if the value is constant,
5662 and it won't be truncated, record it. */
5666 /* This is chosen so that the destination will be invalidated
5667 but no new value will be recorded.
5668 We must invalidate because sometimes constant
5669 values can be recorded for bitfields. */
5670 sets[i].src_elt = 0;
5671 sets[i].src_volatile = 1;
5677 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5679 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5681 /* One less use of the label this insn used to jump to. */
5683 cse_jumps_altered = 1;
5684 /* No more processing for this set. */
5688 /* If this SET is now setting PC to a label, we know it used to
5689 be a conditional or computed branch. */
5690 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5691 && !LABEL_REF_NONLOCAL_P (src))
5693 /* Now emit a BARRIER after the unconditional jump. */
5694 if (NEXT_INSN (insn) == 0
5695 || !BARRIER_P (NEXT_INSN (insn)))
5696 emit_barrier_after (insn);
5698 /* We reemit the jump in as many cases as possible just in
5699 case the form of an unconditional jump is significantly
5700 different than a computed jump or conditional jump.
5702 If this insn has multiple sets, then reemitting the
5703 jump is nontrivial. So instead we just force rerecognition
5704 and hope for the best. */
5709 new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
5710 JUMP_LABEL (new) = XEXP (src, 0);
5711 LABEL_NUSES (XEXP (src, 0))++;
5713 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5714 note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5717 XEXP (note, 1) = NULL_RTX;
5718 REG_NOTES (new) = note;
5724 /* Now emit a BARRIER after the unconditional jump. */
5725 if (NEXT_INSN (insn) == 0
5726 || !BARRIER_P (NEXT_INSN (insn)))
5727 emit_barrier_after (insn);
5730 INSN_CODE (insn) = -1;
5732 /* Do not bother deleting any unreachable code,
5733 let jump/flow do that. */
5735 cse_jumps_altered = 1;
5739 /* If destination is volatile, invalidate it and then do no further
5740 processing for this assignment. */
5742 else if (do_not_record)
5744 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5745 invalidate (dest, VOIDmode);
5746 else if (MEM_P (dest))
5747 invalidate (dest, VOIDmode);
5748 else if (GET_CODE (dest) == STRICT_LOW_PART
5749 || GET_CODE (dest) == ZERO_EXTRACT)
5750 invalidate (XEXP (dest, 0), GET_MODE (dest));
5754 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5755 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5758 /* If setting CC0, record what it was set to, or a constant, if it
5759 is equivalent to a constant. If it is being set to a floating-point
5760 value, make a COMPARE with the appropriate constant of 0. If we
5761 don't do this, later code can interpret this as a test against
5762 const0_rtx, which can cause problems if we try to put it into an
5763 insn as a floating-point operand. */
5764 if (dest == cc0_rtx)
5766 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5767 this_insn_cc0_mode = mode;
5768 if (FLOAT_MODE_P (mode))
5769 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5775 /* Now enter all non-volatile source expressions in the hash table
5776 if they are not already present.
5777 Record their equivalence classes in src_elt.
5778 This way we can insert the corresponding destinations into
5779 the same classes even if the actual sources are no longer in them
5780 (having been invalidated). */
5782 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5783 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5785 struct table_elt *elt;
5786 struct table_elt *classp = sets[0].src_elt;
5787 rtx dest = SET_DEST (sets[0].rtl);
5788 enum machine_mode eqvmode = GET_MODE (dest);
5790 if (GET_CODE (dest) == STRICT_LOW_PART)
5792 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5795 if (insert_regs (src_eqv, classp, 0))
5797 rehash_using_reg (src_eqv);
5798 src_eqv_hash = HASH (src_eqv, eqvmode);
5800 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5801 elt->in_memory = src_eqv_in_memory;
5804 /* Check to see if src_eqv_elt is the same as a set source which
5805 does not yet have an elt, and if so set the elt of the set source
5807 for (i = 0; i < n_sets; i++)
5808 if (sets[i].rtl && sets[i].src_elt == 0
5809 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5810 sets[i].src_elt = src_eqv_elt;
5813 for (i = 0; i < n_sets; i++)
5814 if (sets[i].rtl && ! sets[i].src_volatile
5815 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5817 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5819 /* REG_EQUAL in setting a STRICT_LOW_PART
5820 gives an equivalent for the entire destination register,
5821 not just for the subreg being stored in now.
5822 This is a more interesting equivalence, so we arrange later
5823 to treat the entire reg as the destination. */
5824 sets[i].src_elt = src_eqv_elt;
5825 sets[i].src_hash = src_eqv_hash;
5829 /* Insert source and constant equivalent into hash table, if not
5831 struct table_elt *classp = src_eqv_elt;
5832 rtx src = sets[i].src;
5833 rtx dest = SET_DEST (sets[i].rtl);
5834 enum machine_mode mode
5835 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5837 /* It's possible that we have a source value known to be
5838 constant but don't have a REG_EQUAL note on the insn.
5839 Lack of a note will mean src_eqv_elt will be NULL. This
5840 can happen where we've generated a SUBREG to access a
5841 CONST_INT that is already in a register in a wider mode.
5842 Ensure that the source expression is put in the proper
5845 classp = sets[i].src_const_elt;
5847 if (sets[i].src_elt == 0)
5849 /* Don't put a hard register source into the table if this is
5850 the last insn of a libcall. In this case, we only need
5851 to put src_eqv_elt in src_elt. */
5852 if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5854 struct table_elt *elt;
5856 /* Note that these insert_regs calls cannot remove
5857 any of the src_elt's, because they would have failed to
5858 match if not still valid. */
5859 if (insert_regs (src, classp, 0))
5861 rehash_using_reg (src);
5862 sets[i].src_hash = HASH (src, mode);
5864 elt = insert (src, classp, sets[i].src_hash, mode);
5865 elt->in_memory = sets[i].src_in_memory;
5866 sets[i].src_elt = classp = elt;
5869 sets[i].src_elt = classp;
5871 if (sets[i].src_const && sets[i].src_const_elt == 0
5872 && src != sets[i].src_const
5873 && ! rtx_equal_p (sets[i].src_const, src))
5874 sets[i].src_elt = insert (sets[i].src_const, classp,
5875 sets[i].src_const_hash, mode);
5878 else if (sets[i].src_elt == 0)
5879 /* If we did not insert the source into the hash table (e.g., it was
5880 volatile), note the equivalence class for the REG_EQUAL value, if any,
5881 so that the destination goes into that class. */
5882 sets[i].src_elt = src_eqv_elt;
5884 invalidate_from_clobbers (x);
5886 /* Some registers are invalidated by subroutine calls. Memory is
5887 invalidated by non-constant calls. */
5891 if (! CONST_OR_PURE_CALL_P (insn))
5892 invalidate_memory ();
5893 invalidate_for_call ();
5896 /* Now invalidate everything set by this instruction.
5897 If a SUBREG or other funny destination is being set,
5898 sets[i].rtl is still nonzero, so here we invalidate the reg
5899 a part of which is being set. */
5901 for (i = 0; i < n_sets; i++)
5904 /* We can't use the inner dest, because the mode associated with
5905 a ZERO_EXTRACT is significant. */
5906 rtx dest = SET_DEST (sets[i].rtl);
5908 /* Needed for registers to remove the register from its
5909 previous quantity's chain.
5910 Needed for memory if this is a nonvarying address, unless
5911 we have just done an invalidate_memory that covers even those. */
5912 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5913 invalidate (dest, VOIDmode);
5914 else if (MEM_P (dest))
5915 invalidate (dest, VOIDmode);
5916 else if (GET_CODE (dest) == STRICT_LOW_PART
5917 || GET_CODE (dest) == ZERO_EXTRACT)
5918 invalidate (XEXP (dest, 0), GET_MODE (dest));
5921 /* A volatile ASM invalidates everything. */
5922 if (NONJUMP_INSN_P (insn)
5923 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5924 && MEM_VOLATILE_P (PATTERN (insn)))
5925 flush_hash_table ();
5927 /* Make sure registers mentioned in destinations
5928 are safe for use in an expression to be inserted.
5929 This removes from the hash table
5930 any invalid entry that refers to one of these registers.
5932 We don't care about the return value from mention_regs because
5933 we are going to hash the SET_DEST values unconditionally. */
5935 for (i = 0; i < n_sets; i++)
5939 rtx x = SET_DEST (sets[i].rtl);
5945 /* We used to rely on all references to a register becoming
5946 inaccessible when a register changes to a new quantity,
5947 since that changes the hash code. However, that is not
5948 safe, since after HASH_SIZE new quantities we get a
5949 hash 'collision' of a register with its own invalid
5950 entries. And since SUBREGs have been changed not to
5951 change their hash code with the hash code of the register,
5952 it wouldn't work any longer at all. So we have to check
5953 for any invalid references lying around now.
5954 This code is similar to the REG case in mention_regs,
5955 but it knows that reg_tick has been incremented, and
5956 it leaves reg_in_table as -1 . */
5957 unsigned int regno = REGNO (x);
5958 unsigned int endregno
5959 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
5960 : hard_regno_nregs[regno][GET_MODE (x)]);
5963 for (i = regno; i < endregno; i++)
5965 if (REG_IN_TABLE (i) >= 0)
5967 remove_invalid_refs (i);
5968 REG_IN_TABLE (i) = -1;
5975 /* We may have just removed some of the src_elt's from the hash table.
5976 So replace each one with the current head of the same class. */
5978 for (i = 0; i < n_sets; i++)
5981 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5982 /* If elt was removed, find current head of same class,
5983 or 0 if nothing remains of that class. */
5985 struct table_elt *elt = sets[i].src_elt;
5987 while (elt && elt->prev_same_value)
5988 elt = elt->prev_same_value;
5990 while (elt && elt->first_same_value == 0)
5991 elt = elt->next_same_value;
5992 sets[i].src_elt = elt ? elt->first_same_value : 0;
5996 /* Now insert the destinations into their equivalence classes. */
5998 for (i = 0; i < n_sets; i++)
6001 rtx dest = SET_DEST (sets[i].rtl);
6002 struct table_elt *elt;
6004 /* Don't record value if we are not supposed to risk allocating
6005 floating-point values in registers that might be wider than
6007 if ((flag_float_store
6009 && FLOAT_MODE_P (GET_MODE (dest)))
6010 /* Don't record BLKmode values, because we don't know the
6011 size of it, and can't be sure that other BLKmode values
6012 have the same or smaller size. */
6013 || GET_MODE (dest) == BLKmode
6014 /* Don't record values of destinations set inside a libcall block
6015 since we might delete the libcall. Things should have been set
6016 up so we won't want to reuse such a value, but we play it safe
6019 /* If we didn't put a REG_EQUAL value or a source into the hash
6020 table, there is no point is recording DEST. */
6021 || sets[i].src_elt == 0
6022 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6023 or SIGN_EXTEND, don't record DEST since it can cause
6024 some tracking to be wrong.
6026 ??? Think about this more later. */
6027 || (GET_CODE (dest) == SUBREG
6028 && (GET_MODE_SIZE (GET_MODE (dest))
6029 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6030 && (GET_CODE (sets[i].src) == SIGN_EXTEND
6031 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6034 /* STRICT_LOW_PART isn't part of the value BEING set,
6035 and neither is the SUBREG inside it.
6036 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6037 if (GET_CODE (dest) == STRICT_LOW_PART)
6038 dest = SUBREG_REG (XEXP (dest, 0));
6040 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
6041 /* Registers must also be inserted into chains for quantities. */
6042 if (insert_regs (dest, sets[i].src_elt, 1))
6044 /* If `insert_regs' changes something, the hash code must be
6046 rehash_using_reg (dest);
6047 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6050 elt = insert (dest, sets[i].src_elt,
6051 sets[i].dest_hash, GET_MODE (dest));
6053 elt->in_memory = (MEM_P (sets[i].inner_dest)
6054 && !MEM_READONLY_P (sets[i].inner_dest));
6056 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6057 narrower than M2, and both M1 and M2 are the same number of words,
6058 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6059 make that equivalence as well.
6061 However, BAR may have equivalences for which gen_lowpart
6062 will produce a simpler value than gen_lowpart applied to
6063 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6064 BAR's equivalences. If we don't get a simplified form, make
6065 the SUBREG. It will not be used in an equivalence, but will
6066 cause two similar assignments to be detected.
6068 Note the loop below will find SUBREG_REG (DEST) since we have
6069 already entered SRC and DEST of the SET in the table. */
6071 if (GET_CODE (dest) == SUBREG
6072 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6074 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6075 && (GET_MODE_SIZE (GET_MODE (dest))
6076 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6077 && sets[i].src_elt != 0)
6079 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6080 struct table_elt *elt, *classp = 0;
6082 for (elt = sets[i].src_elt->first_same_value; elt;
6083 elt = elt->next_same_value)
6087 struct table_elt *src_elt;
6090 /* Ignore invalid entries. */
6091 if (!REG_P (elt->exp)
6092 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
6095 /* We may have already been playing subreg games. If the
6096 mode is already correct for the destination, use it. */
6097 if (GET_MODE (elt->exp) == new_mode)
6101 /* Calculate big endian correction for the SUBREG_BYTE.
6102 We have already checked that M1 (GET_MODE (dest))
6103 is not narrower than M2 (new_mode). */
6104 if (BYTES_BIG_ENDIAN)
6105 byte = (GET_MODE_SIZE (GET_MODE (dest))
6106 - GET_MODE_SIZE (new_mode));
6108 new_src = simplify_gen_subreg (new_mode, elt->exp,
6109 GET_MODE (dest), byte);
6112 /* The call to simplify_gen_subreg fails if the value
6113 is VOIDmode, yet we can't do any simplification, e.g.
6114 for EXPR_LISTs denoting function call results.
6115 It is invalid to construct a SUBREG with a VOIDmode
6116 SUBREG_REG, hence a zero new_src means we can't do
6117 this substitution. */
6121 src_hash = HASH (new_src, new_mode);
6122 src_elt = lookup (new_src, src_hash, new_mode);
6124 /* Put the new source in the hash table is if isn't
6128 if (insert_regs (new_src, classp, 0))
6130 rehash_using_reg (new_src);
6131 src_hash = HASH (new_src, new_mode);
6133 src_elt = insert (new_src, classp, src_hash, new_mode);
6134 src_elt->in_memory = elt->in_memory;
6136 else if (classp && classp != src_elt->first_same_value)
6137 /* Show that two things that we've seen before are
6138 actually the same. */
6139 merge_equiv_classes (src_elt, classp);
6141 classp = src_elt->first_same_value;
6142 /* Ignore invalid entries. */
6144 && !REG_P (classp->exp)
6145 && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
6146 classp = classp->next_same_value;
6151 /* Special handling for (set REG0 REG1) where REG0 is the
6152 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6153 be used in the sequel, so (if easily done) change this insn to
6154 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6155 that computed their value. Then REG1 will become a dead store
6156 and won't cloud the situation for later optimizations.
6158 Do not make this change if REG1 is a hard register, because it will
6159 then be used in the sequel and we may be changing a two-operand insn
6160 into a three-operand insn.
6162 Also do not do this if we are operating on a copy of INSN.
6164 Also don't do this if INSN ends a libcall; this would cause an unrelated
6165 register to be set in the middle of a libcall, and we then get bad code
6166 if the libcall is deleted. */
6168 if (n_sets == 1 && sets[0].rtl && REG_P (SET_DEST (sets[0].rtl))
6169 && NEXT_INSN (PREV_INSN (insn)) == insn
6170 && REG_P (SET_SRC (sets[0].rtl))
6171 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6172 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6174 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6175 struct qty_table_elem *src_ent = &qty_table[src_q];
6177 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6178 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6181 /* Scan for the previous nonnote insn, but stop at a basic
6185 prev = PREV_INSN (prev);
6187 while (prev && NOTE_P (prev)
6188 && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
6190 /* Do not swap the registers around if the previous instruction
6191 attaches a REG_EQUIV note to REG1.
6193 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6194 from the pseudo that originally shadowed an incoming argument
6195 to another register. Some uses of REG_EQUIV might rely on it
6196 being attached to REG1 rather than REG2.
6198 This section previously turned the REG_EQUIV into a REG_EQUAL
6199 note. We cannot do that because REG_EQUIV may provide an
6200 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
6202 if (prev != 0 && NONJUMP_INSN_P (prev)
6203 && GET_CODE (PATTERN (prev)) == SET
6204 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6205 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6207 rtx dest = SET_DEST (sets[0].rtl);
6208 rtx src = SET_SRC (sets[0].rtl);
6211 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6212 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6213 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6214 apply_change_group ();
6216 /* If INSN has a REG_EQUAL note, and this note mentions
6217 REG0, then we must delete it, because the value in
6218 REG0 has changed. If the note's value is REG1, we must
6219 also delete it because that is now this insn's dest. */
6220 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6222 && (reg_mentioned_p (dest, XEXP (note, 0))
6223 || rtx_equal_p (src, XEXP (note, 0))))
6224 remove_note (insn, note);
6229 /* If this is a conditional jump insn, record any known equivalences due to
6230 the condition being tested. */
6233 && n_sets == 1 && GET_CODE (x) == SET
6234 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6235 record_jump_equiv (insn, 0);
6238 /* If the previous insn set CC0 and this insn no longer references CC0,
6239 delete the previous insn. Here we use the fact that nothing expects CC0
6240 to be valid over an insn, which is true until the final pass. */
6241 if (prev_insn && NONJUMP_INSN_P (prev_insn)
6242 && (tem = single_set (prev_insn)) != 0
6243 && SET_DEST (tem) == cc0_rtx
6244 && ! reg_mentioned_p (cc0_rtx, x))
6245 delete_insn (prev_insn);
6247 prev_insn_cc0 = this_insn_cc0;
6248 prev_insn_cc0_mode = this_insn_cc0_mode;
6253 /* Remove from the hash table all expressions that reference memory. */
6256 invalidate_memory (void)
6259 struct table_elt *p, *next;
6261 for (i = 0; i < HASH_SIZE; i++)
6262 for (p = table[i]; p; p = next)
6264 next = p->next_same_hash;
6266 remove_from_table (p, i);
6270 /* If ADDR is an address that implicitly affects the stack pointer, return
6271 1 and update the register tables to show the effect. Else, return 0. */
6274 addr_affects_sp_p (rtx addr)
6276 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
6277 && REG_P (XEXP (addr, 0))
6278 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6280 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6282 REG_TICK (STACK_POINTER_REGNUM)++;
6283 /* Is it possible to use a subreg of SP? */
6284 SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6287 /* This should be *very* rare. */
6288 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6289 invalidate (stack_pointer_rtx, VOIDmode);
6297 /* Perform invalidation on the basis of everything about an insn
6298 except for invalidating the actual places that are SET in it.
6299 This includes the places CLOBBERed, and anything that might
6300 alias with something that is SET or CLOBBERed.
6302 X is the pattern of the insn. */
6305 invalidate_from_clobbers (rtx x)
6307 if (GET_CODE (x) == CLOBBER)
6309 rtx ref = XEXP (x, 0);
6312 if (REG_P (ref) || GET_CODE (ref) == SUBREG
6314 invalidate (ref, VOIDmode);
6315 else if (GET_CODE (ref) == STRICT_LOW_PART
6316 || GET_CODE (ref) == ZERO_EXTRACT)
6317 invalidate (XEXP (ref, 0), GET_MODE (ref));
6320 else if (GET_CODE (x) == PARALLEL)
6323 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6325 rtx y = XVECEXP (x, 0, i);
6326 if (GET_CODE (y) == CLOBBER)
6328 rtx ref = XEXP (y, 0);
6329 if (REG_P (ref) || GET_CODE (ref) == SUBREG
6331 invalidate (ref, VOIDmode);
6332 else if (GET_CODE (ref) == STRICT_LOW_PART
6333 || GET_CODE (ref) == ZERO_EXTRACT)
6334 invalidate (XEXP (ref, 0), GET_MODE (ref));
6340 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6341 and replace any registers in them with either an equivalent constant
6342 or the canonical form of the register. If we are inside an address,
6343 only do this if the address remains valid.
6345 OBJECT is 0 except when within a MEM in which case it is the MEM.
6347 Return the replacement for X. */
6350 cse_process_notes (rtx x, rtx object)
6352 enum rtx_code code = GET_CODE (x);
6353 const char *fmt = GET_RTX_FORMAT (code);
6370 validate_change (x, &XEXP (x, 0),
6371 cse_process_notes (XEXP (x, 0), x), 0);
6376 if (REG_NOTE_KIND (x) == REG_EQUAL)
6377 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6379 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6386 rtx new = cse_process_notes (XEXP (x, 0), object);
6387 /* We don't substitute VOIDmode constants into these rtx,
6388 since they would impede folding. */
6389 if (GET_MODE (new) != VOIDmode)
6390 validate_change (object, &XEXP (x, 0), new, 0);
6395 i = REG_QTY (REGNO (x));
6397 /* Return a constant or a constant register. */
6398 if (REGNO_QTY_VALID_P (REGNO (x)))
6400 struct qty_table_elem *ent = &qty_table[i];
6402 if (ent->const_rtx != NULL_RTX
6403 && (CONSTANT_P (ent->const_rtx)
6404 || REG_P (ent->const_rtx)))
6406 rtx new = gen_lowpart (GET_MODE (x), ent->const_rtx);
6412 /* Otherwise, canonicalize this register. */
6413 return canon_reg (x, NULL_RTX);
6419 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6421 validate_change (object, &XEXP (x, i),
6422 cse_process_notes (XEXP (x, i), object), 0);
6427 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6428 since they are done elsewhere. This function is called via note_stores. */
6431 invalidate_skipped_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
6433 enum rtx_code code = GET_CODE (dest);
6436 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6437 /* There are times when an address can appear varying and be a PLUS
6438 during this scan when it would be a fixed address were we to know
6439 the proper equivalences. So invalidate all memory if there is
6440 a BLKmode or nonscalar memory reference or a reference to a
6441 variable address. */
6442 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6443 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6445 invalidate_memory ();
6449 if (GET_CODE (set) == CLOBBER
6454 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6455 invalidate (XEXP (dest, 0), GET_MODE (dest));
6456 else if (code == REG || code == SUBREG || code == MEM)
6457 invalidate (dest, VOIDmode);
6460 /* Invalidate all insns from START up to the end of the function or the
6461 next label. This called when we wish to CSE around a block that is
6462 conditionally executed. */
6465 invalidate_skipped_block (rtx start)
6469 for (insn = start; insn && !LABEL_P (insn);
6470 insn = NEXT_INSN (insn))
6472 if (! INSN_P (insn))
6477 if (! CONST_OR_PURE_CALL_P (insn))
6478 invalidate_memory ();
6479 invalidate_for_call ();
6482 invalidate_from_clobbers (PATTERN (insn));
6483 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6487 /* Find the end of INSN's basic block and return its range,
6488 the total number of SETs in all the insns of the block, the last insn of the
6489 block, and the branch path.
6491 The branch path indicates which branches should be followed. If a nonzero
6492 path size is specified, the block should be rescanned and a different set
6493 of branches will be taken. The branch path is only used if
6494 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
6496 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6497 used to describe the block. It is filled in with the information about
6498 the current block. The incoming structure's branch path, if any, is used
6499 to construct the output branch path. */
6502 cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
6503 int follow_jumps, int skip_blocks)
6507 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6508 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6509 int path_size = data->path_size;
6513 /* Update the previous branch path, if any. If the last branch was
6514 previously PATH_TAKEN, mark it PATH_NOT_TAKEN.
6515 If it was previously PATH_NOT_TAKEN,
6516 shorten the path by one and look at the previous branch. We know that
6517 at least one branch must have been taken if PATH_SIZE is nonzero. */
6518 while (path_size > 0)
6520 if (data->path[path_size - 1].status != PATH_NOT_TAKEN)
6522 data->path[path_size - 1].status = PATH_NOT_TAKEN;
6529 /* If the first instruction is marked with QImode, that means we've
6530 already processed this block. Our caller will look at DATA->LAST
6531 to figure out where to go next. We want to return the next block
6532 in the instruction stream, not some branched-to block somewhere
6533 else. We accomplish this by pretending our called forbid us to
6534 follow jumps, or skip blocks. */
6535 if (GET_MODE (insn) == QImode)
6536 follow_jumps = skip_blocks = 0;
6538 /* Scan to end of this basic block. */
6539 while (p && !LABEL_P (p))
6541 /* Don't cse over a call to setjmp; on some machines (eg VAX)
6542 the regs restored by the longjmp come from
6543 a later time than the setjmp. */
6544 if (PREV_INSN (p) && CALL_P (PREV_INSN (p))
6545 && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6548 /* A PARALLEL can have lots of SETs in it,
6549 especially if it is really an ASM_OPERANDS. */
6550 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6551 nsets += XVECLEN (PATTERN (p), 0);
6552 else if (!NOTE_P (p))
6555 /* Ignore insns made by CSE; they cannot affect the boundaries of
6558 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6559 high_cuid = INSN_CUID (p);
6560 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6561 low_cuid = INSN_CUID (p);
6563 /* See if this insn is in our branch path. If it is and we are to
6565 if (path_entry < path_size && data->path[path_entry].branch == p)
6567 if (data->path[path_entry].status != PATH_NOT_TAKEN)
6570 /* Point to next entry in path, if any. */
6574 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6575 was specified, we haven't reached our maximum path length, there are
6576 insns following the target of the jump, this is the only use of the
6577 jump label, and the target label is preceded by a BARRIER.
6579 Alternatively, we can follow the jump if it branches around a
6580 block of code and there are no other branches into the block.
6581 In this case invalidate_skipped_block will be called to invalidate any
6582 registers set in the block when following the jump. */
6584 else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
6586 && GET_CODE (PATTERN (p)) == SET
6587 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6588 && JUMP_LABEL (p) != 0
6589 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6590 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6592 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6594 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6595 || (PREV_INSN (q) && CALL_P (PREV_INSN (q))
6596 && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6597 && (!LABEL_P (q) || LABEL_NUSES (q) != 0))
6600 /* If we ran into a BARRIER, this code is an extension of the
6601 basic block when the branch is taken. */
6602 if (follow_jumps && q != 0 && BARRIER_P (q))
6604 /* Don't allow ourself to keep walking around an
6605 always-executed loop. */
6606 if (next_real_insn (q) == next)
6612 /* Similarly, don't put a branch in our path more than once. */
6613 for (i = 0; i < path_entry; i++)
6614 if (data->path[i].branch == p)
6617 if (i != path_entry)
6620 data->path[path_entry].branch = p;
6621 data->path[path_entry++].status = PATH_TAKEN;
6623 /* This branch now ends our path. It was possible that we
6624 didn't see this branch the last time around (when the
6625 insn in front of the target was a JUMP_INSN that was
6626 turned into a no-op). */
6627 path_size = path_entry;
6630 /* Mark block so we won't scan it again later. */
6631 PUT_MODE (NEXT_INSN (p), QImode);
6633 /* Detect a branch around a block of code. */
6634 else if (skip_blocks && q != 0 && !LABEL_P (q))
6638 if (next_real_insn (q) == next)
6644 for (i = 0; i < path_entry; i++)
6645 if (data->path[i].branch == p)
6648 if (i != path_entry)
6651 /* This is no_labels_between_p (p, q) with an added check for
6652 reaching the end of a function (in case Q precedes P). */
6653 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6659 data->path[path_entry].branch = p;
6660 data->path[path_entry++].status = PATH_AROUND;
6662 path_size = path_entry;
6665 /* Mark block so we won't scan it again later. */
6666 PUT_MODE (NEXT_INSN (p), QImode);
6673 data->low_cuid = low_cuid;
6674 data->high_cuid = high_cuid;
6675 data->nsets = nsets;
6678 /* If all jumps in the path are not taken, set our path length to zero
6679 so a rescan won't be done. */
6680 for (i = path_size - 1; i >= 0; i--)
6681 if (data->path[i].status != PATH_NOT_TAKEN)
6685 data->path_size = 0;
6687 data->path_size = path_size;
6689 /* End the current branch path. */
6690 data->path[path_size].branch = 0;
6693 /* Perform cse on the instructions of a function.
6694 F is the first instruction.
6695 NREGS is one plus the highest pseudo-reg number used in the instruction.
6697 Returns 1 if jump_optimize should be redone due to simplifications
6698 in conditional jump instructions. */
6701 cse_main (rtx f, int nregs, FILE *file)
6703 struct cse_basic_block_data val;
6707 val.path = xmalloc (sizeof (struct branch_path)
6708 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6710 cse_jumps_altered = 0;
6711 recorded_label_ref = 0;
6712 constant_pool_entries_cost = 0;
6713 constant_pool_entries_regcost = 0;
6715 rtl_hooks = cse_rtl_hooks;
6718 init_alias_analysis ();
6720 reg_eqv_table = xmalloc (nregs * sizeof (struct reg_eqv_elem));
6722 /* Reset the counter indicating how many elements have been made
6724 n_elements_made = 0;
6726 /* Find the largest uid. */
6728 max_uid = get_max_uid ();
6729 uid_cuid = xcalloc (max_uid + 1, sizeof (int));
6731 /* Compute the mapping from uids to cuids.
6732 CUIDs are numbers assigned to insns, like uids,
6733 except that cuids increase monotonically through the code.
6734 Don't assign cuids to line-number NOTEs, so that the distance in cuids
6735 between two insns is not affected by -g. */
6737 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
6740 || NOTE_LINE_NUMBER (insn) < 0)
6741 INSN_CUID (insn) = ++i;
6743 /* Give a line number note the same cuid as preceding insn. */
6744 INSN_CUID (insn) = i;
6747 /* Loop over basic blocks.
6748 Compute the maximum number of qty's needed for each basic block
6749 (which is 2 for each SET). */
6754 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps,
6755 flag_cse_skip_blocks);
6757 /* If this basic block was already processed or has no sets, skip it. */
6758 if (val.nsets == 0 || GET_MODE (insn) == QImode)
6760 PUT_MODE (insn, VOIDmode);
6761 insn = (val.last ? NEXT_INSN (val.last) : 0);
6766 cse_basic_block_start = val.low_cuid;
6767 cse_basic_block_end = val.high_cuid;
6768 max_qty = val.nsets * 2;
6771 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
6772 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
6775 /* Make MAX_QTY bigger to give us room to optimize
6776 past the end of this basic block, if that should prove useful. */
6780 /* If this basic block is being extended by following certain jumps,
6781 (see `cse_end_of_basic_block'), we reprocess the code from the start.
6782 Otherwise, we start after this basic block. */
6783 if (val.path_size > 0)
6784 cse_basic_block (insn, val.last, val.path);
6787 int old_cse_jumps_altered = cse_jumps_altered;
6790 /* When cse changes a conditional jump to an unconditional
6791 jump, we want to reprocess the block, since it will give
6792 us a new branch path to investigate. */
6793 cse_jumps_altered = 0;
6794 temp = cse_basic_block (insn, val.last, val.path);
6795 if (cse_jumps_altered == 0
6796 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
6799 cse_jumps_altered |= old_cse_jumps_altered;
6810 if (max_elements_made < n_elements_made)
6811 max_elements_made = n_elements_made;
6814 end_alias_analysis ();
6816 free (reg_eqv_table);
6818 rtl_hooks = general_rtl_hooks;
6820 return cse_jumps_altered || recorded_label_ref;
6823 /* Process a single basic block. FROM and TO and the limits of the basic
6824 block. NEXT_BRANCH points to the branch path when following jumps or
6825 a null path when not following jumps. */
6828 cse_basic_block (rtx from, rtx to, struct branch_path *next_branch)
6832 rtx libcall_insn = NULL_RTX;
6834 int no_conflict = 0;
6836 /* Allocate the space needed by qty_table. */
6837 qty_table = xmalloc (max_qty * sizeof (struct qty_table_elem));
6841 /* TO might be a label. If so, protect it from being deleted. */
6842 if (to != 0 && LABEL_P (to))
6845 for (insn = from; insn != to; insn = NEXT_INSN (insn))
6847 enum rtx_code code = GET_CODE (insn);
6849 /* If we have processed 1,000 insns, flush the hash table to
6850 avoid extreme quadratic behavior. We must not include NOTEs
6851 in the count since there may be more of them when generating
6852 debugging information. If we clear the table at different
6853 times, code generated with -g -O might be different than code
6854 generated with -O but not -g.
6856 ??? This is a real kludge and needs to be done some other way.
6858 if (code != NOTE && num_insns++ > 1000)
6860 flush_hash_table ();
6864 /* See if this is a branch that is part of the path. If so, and it is
6865 to be taken, do so. */
6866 if (next_branch->branch == insn)
6868 enum taken status = next_branch++->status;
6869 if (status != PATH_NOT_TAKEN)
6871 if (status == PATH_TAKEN)
6872 record_jump_equiv (insn, 1);
6874 invalidate_skipped_block (NEXT_INSN (insn));
6876 /* Set the last insn as the jump insn; it doesn't affect cc0.
6877 Then follow this branch. */
6882 insn = JUMP_LABEL (insn);
6887 if (GET_MODE (insn) == QImode)
6888 PUT_MODE (insn, VOIDmode);
6890 if (GET_RTX_CLASS (code) == RTX_INSN)
6894 /* Process notes first so we have all notes in canonical forms when
6895 looking for duplicate operations. */
6897 if (REG_NOTES (insn))
6898 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
6900 /* Track when we are inside in LIBCALL block. Inside such a block,
6901 we do not want to record destinations. The last insn of a
6902 LIBCALL block is not considered to be part of the block, since
6903 its destination is the result of the block and hence should be
6906 if (REG_NOTES (insn) != 0)
6908 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
6909 libcall_insn = XEXP (p, 0);
6910 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
6912 /* Keep libcall_insn for the last SET insn of a no-conflict
6913 block to prevent changing the destination. */
6919 else if (find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
6923 cse_insn (insn, libcall_insn);
6925 if (no_conflict == -1)
6931 /* If we haven't already found an insn where we added a LABEL_REF,
6933 if (NONJUMP_INSN_P (insn) && ! recorded_label_ref
6934 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
6936 recorded_label_ref = 1;
6939 /* If INSN is now an unconditional jump, skip to the end of our
6940 basic block by pretending that we just did the last insn in the
6941 basic block. If we are jumping to the end of our block, show
6942 that we can have one usage of TO. */
6944 if (any_uncondjump_p (insn))
6952 if (JUMP_LABEL (insn) == to)
6955 /* Maybe TO was deleted because the jump is unconditional.
6956 If so, there is nothing left in this basic block. */
6957 /* ??? Perhaps it would be smarter to set TO
6958 to whatever follows this insn,
6959 and pretend the basic block had always ended here. */
6960 if (INSN_DELETED_P (to))
6963 insn = PREV_INSN (to);
6966 /* See if it is ok to keep on going past the label
6967 which used to end our basic block. Remember that we incremented
6968 the count of that label, so we decrement it here. If we made
6969 a jump unconditional, TO_USAGE will be one; in that case, we don't
6970 want to count the use in that jump. */
6972 if (to != 0 && NEXT_INSN (insn) == to
6973 && LABEL_P (to) && --LABEL_NUSES (to) == to_usage)
6975 struct cse_basic_block_data val;
6978 insn = NEXT_INSN (to);
6980 /* If TO was the last insn in the function, we are done. */
6987 /* If TO was preceded by a BARRIER we are done with this block
6988 because it has no continuation. */
6989 prev = prev_nonnote_insn (to);
6990 if (prev && BARRIER_P (prev))
6996 /* Find the end of the following block. Note that we won't be
6997 following branches in this case. */
7000 val.path = xmalloc (sizeof (struct branch_path)
7001 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
7002 cse_end_of_basic_block (insn, &val, 0, 0);
7005 /* If the tables we allocated have enough space left
7006 to handle all the SETs in the next basic block,
7007 continue through it. Otherwise, return,
7008 and that block will be scanned individually. */
7009 if (val.nsets * 2 + next_qty > max_qty)
7012 cse_basic_block_start = val.low_cuid;
7013 cse_basic_block_end = val.high_cuid;
7016 /* Prevent TO from being deleted if it is a label. */
7017 if (to != 0 && LABEL_P (to))
7020 /* Back up so we process the first insn in the extension. */
7021 insn = PREV_INSN (insn);
7025 gcc_assert (next_qty <= max_qty);
7029 return to ? NEXT_INSN (to) : 0;
7032 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7033 there isn't a REG_LABEL note. Return one if so. DATA is the insn. */
7036 check_for_label_ref (rtx *rtl, void *data)
7038 rtx insn = (rtx) data;
7040 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7041 we must rerun jump since it needs to place the note. If this is a
7042 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7043 since no REG_LABEL will be added. */
7044 return (GET_CODE (*rtl) == LABEL_REF
7045 && ! LABEL_REF_NONLOCAL_P (*rtl)
7046 && LABEL_P (XEXP (*rtl, 0))
7047 && INSN_UID (XEXP (*rtl, 0)) != 0
7048 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7051 /* Count the number of times registers are used (not set) in X.
7052 COUNTS is an array in which we accumulate the count, INCR is how much
7053 we count each register usage. */
7056 count_reg_usage (rtx x, int *counts, int incr)
7066 switch (code = GET_CODE (x))
7069 counts[REGNO (x)] += incr;
7083 /* If we are clobbering a MEM, mark any registers inside the address
7085 if (MEM_P (XEXP (x, 0)))
7086 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, incr);
7090 /* Unless we are setting a REG, count everything in SET_DEST. */
7091 if (!REG_P (SET_DEST (x)))
7092 count_reg_usage (SET_DEST (x), counts, incr);
7093 count_reg_usage (SET_SRC (x), counts, incr);
7097 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, incr);
7102 count_reg_usage (PATTERN (x), counts, incr);
7104 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7107 note = find_reg_equal_equiv_note (x);
7110 rtx eqv = XEXP (note, 0);
7112 if (GET_CODE (eqv) == EXPR_LIST)
7113 /* This REG_EQUAL note describes the result of a function call.
7114 Process all the arguments. */
7117 count_reg_usage (XEXP (eqv, 0), counts, incr);
7118 eqv = XEXP (eqv, 1);
7120 while (eqv && GET_CODE (eqv) == EXPR_LIST);
7122 count_reg_usage (eqv, counts, incr);
7127 if (REG_NOTE_KIND (x) == REG_EQUAL
7128 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
7129 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
7130 involving registers in the address. */
7131 || GET_CODE (XEXP (x, 0)) == CLOBBER)
7132 count_reg_usage (XEXP (x, 0), counts, incr);
7134 count_reg_usage (XEXP (x, 1), counts, incr);
7138 /* Iterate over just the inputs, not the constraints as well. */
7139 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
7140 count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, incr);
7150 fmt = GET_RTX_FORMAT (code);
7151 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7154 count_reg_usage (XEXP (x, i), counts, incr);
7155 else if (fmt[i] == 'E')
7156 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7157 count_reg_usage (XVECEXP (x, i, j), counts, incr);
7161 /* Return true if set is live. */
7163 set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */
7170 if (set_noop_p (set))
7174 else if (GET_CODE (SET_DEST (set)) == CC0
7175 && !side_effects_p (SET_SRC (set))
7176 && ((tem = next_nonnote_insn (insn)) == 0
7178 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7181 else if (!REG_P (SET_DEST (set))
7182 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7183 || counts[REGNO (SET_DEST (set))] != 0
7184 || side_effects_p (SET_SRC (set)))
7189 /* Return true if insn is live. */
7192 insn_live_p (rtx insn, int *counts)
7195 if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
7197 else if (GET_CODE (PATTERN (insn)) == SET)
7198 return set_live_p (PATTERN (insn), insn, counts);
7199 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7201 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7203 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7205 if (GET_CODE (elt) == SET)
7207 if (set_live_p (elt, insn, counts))
7210 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7219 /* Return true if libcall is dead as a whole. */
7222 dead_libcall_p (rtx insn, int *counts)
7226 /* See if there's a REG_EQUAL note on this insn and try to
7227 replace the source with the REG_EQUAL expression.
7229 We assume that insns with REG_RETVALs can only be reg->reg
7230 copies at this point. */
7231 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7235 set = single_set (insn);
7239 new = simplify_rtx (XEXP (note, 0));
7241 new = XEXP (note, 0);
7243 /* While changing insn, we must update the counts accordingly. */
7244 count_reg_usage (insn, counts, -1);
7246 if (validate_change (insn, &SET_SRC (set), new, 0))
7248 count_reg_usage (insn, counts, 1);
7249 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7250 remove_note (insn, note);
7254 if (CONSTANT_P (new))
7256 new = force_const_mem (GET_MODE (SET_DEST (set)), new);
7257 if (new && validate_change (insn, &SET_SRC (set), new, 0))
7259 count_reg_usage (insn, counts, 1);
7260 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7261 remove_note (insn, note);
7266 count_reg_usage (insn, counts, 1);
7270 /* Scan all the insns and delete any that are dead; i.e., they store a register
7271 that is never used or they copy a register to itself.
7273 This is used to remove insns made obviously dead by cse, loop or other
7274 optimizations. It improves the heuristics in loop since it won't try to
7275 move dead invariants out of loops or make givs for dead quantities. The
7276 remaining passes of the compilation are also sped up. */
7279 delete_trivially_dead_insns (rtx insns, int nreg)
7283 int in_libcall = 0, dead_libcall = 0;
7284 int ndead = 0, nlastdead, niterations = 0;
7286 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7287 /* First count the number of times each register is used. */
7288 counts = xcalloc (nreg, sizeof (int));
7289 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7290 count_reg_usage (insn, counts, 1);
7296 /* Go from the last insn to the first and delete insns that only set unused
7297 registers or copy a register to itself. As we delete an insn, remove
7298 usage counts for registers it uses.
7300 The first jump optimization pass may leave a real insn as the last
7301 insn in the function. We must not skip that insn or we may end
7302 up deleting code that is not really dead. */
7303 insn = get_last_insn ();
7304 if (! INSN_P (insn))
7305 insn = prev_real_insn (insn);
7307 for (; insn; insn = prev)
7311 prev = prev_real_insn (insn);
7313 /* Don't delete any insns that are part of a libcall block unless
7314 we can delete the whole libcall block.
7316 Flow or loop might get confused if we did that. Remember
7317 that we are scanning backwards. */
7318 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7322 dead_libcall = dead_libcall_p (insn, counts);
7324 else if (in_libcall)
7325 live_insn = ! dead_libcall;
7327 live_insn = insn_live_p (insn, counts);
7329 /* If this is a dead insn, delete it and show registers in it aren't
7334 count_reg_usage (insn, counts, -1);
7335 delete_insn_and_edges (insn);
7339 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7346 while (ndead != nlastdead);
7348 if (dump_file && ndead)
7349 fprintf (dump_file, "Deleted %i trivially dead insns; %i iterations\n",
7350 ndead, niterations);
7353 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7357 /* This function is called via for_each_rtx. The argument, NEWREG, is
7358 a condition code register with the desired mode. If we are looking
7359 at the same register in a different mode, replace it with
7363 cse_change_cc_mode (rtx *loc, void *data)
7365 struct change_cc_mode_args* args = (struct change_cc_mode_args*)data;
7369 && REGNO (*loc) == REGNO (args->newreg)
7370 && GET_MODE (*loc) != GET_MODE (args->newreg))
7372 validate_change (args->insn, loc, args->newreg, 1);
7379 /* Change the mode of any reference to the register REGNO (NEWREG) to
7380 GET_MODE (NEWREG) in INSN. */
7383 cse_change_cc_mode_insn (rtx insn, rtx newreg)
7385 struct change_cc_mode_args args;
7392 args.newreg = newreg;
7394 for_each_rtx (&PATTERN (insn), cse_change_cc_mode, &args);
7395 for_each_rtx (®_NOTES (insn), cse_change_cc_mode, &args);
7397 /* If the following assertion was triggered, there is most probably
7398 something wrong with the cc_modes_compatible back end function.
7399 CC modes only can be considered compatible if the insn - with the mode
7400 replaced by any of the compatible modes - can still be recognized. */
7401 success = apply_change_group ();
7402 gcc_assert (success);
7405 /* Change the mode of any reference to the register REGNO (NEWREG) to
7406 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
7407 any instruction which modifies NEWREG. */
7410 cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
7414 for (insn = start; insn != end; insn = NEXT_INSN (insn))
7416 if (! INSN_P (insn))
7419 if (reg_set_p (newreg, insn))
7422 cse_change_cc_mode_insn (insn, newreg);
7426 /* BB is a basic block which finishes with CC_REG as a condition code
7427 register which is set to CC_SRC. Look through the successors of BB
7428 to find blocks which have a single predecessor (i.e., this one),
7429 and look through those blocks for an assignment to CC_REG which is
7430 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7431 permitted to change the mode of CC_SRC to a compatible mode. This
7432 returns VOIDmode if no equivalent assignments were found.
7433 Otherwise it returns the mode which CC_SRC should wind up with.
7435 The main complexity in this function is handling the mode issues.
7436 We may have more than one duplicate which we can eliminate, and we
7437 try to find a mode which will work for multiple duplicates. */
7439 static enum machine_mode
7440 cse_cc_succs (basic_block bb, rtx cc_reg, rtx cc_src, bool can_change_mode)
7443 enum machine_mode mode;
7444 unsigned int insn_count;
7447 enum machine_mode modes[2];
7453 /* We expect to have two successors. Look at both before picking
7454 the final mode for the comparison. If we have more successors
7455 (i.e., some sort of table jump, although that seems unlikely),
7456 then we require all beyond the first two to use the same
7459 found_equiv = false;
7460 mode = GET_MODE (cc_src);
7462 FOR_EACH_EDGE (e, ei, bb->succs)
7467 if (e->flags & EDGE_COMPLEX)
7470 if (EDGE_COUNT (e->dest->preds) != 1
7471 || e->dest == EXIT_BLOCK_PTR)
7474 end = NEXT_INSN (BB_END (e->dest));
7475 for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7479 if (! INSN_P (insn))
7482 /* If CC_SRC is modified, we have to stop looking for
7483 something which uses it. */
7484 if (modified_in_p (cc_src, insn))
7487 /* Check whether INSN sets CC_REG to CC_SRC. */
7488 set = single_set (insn);
7490 && REG_P (SET_DEST (set))
7491 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7494 enum machine_mode set_mode;
7495 enum machine_mode comp_mode;
7498 set_mode = GET_MODE (SET_SRC (set));
7499 comp_mode = set_mode;
7500 if (rtx_equal_p (cc_src, SET_SRC (set)))
7502 else if (GET_CODE (cc_src) == COMPARE
7503 && GET_CODE (SET_SRC (set)) == COMPARE
7505 && rtx_equal_p (XEXP (cc_src, 0),
7506 XEXP (SET_SRC (set), 0))
7507 && rtx_equal_p (XEXP (cc_src, 1),
7508 XEXP (SET_SRC (set), 1)))
7511 comp_mode = targetm.cc_modes_compatible (mode, set_mode);
7512 if (comp_mode != VOIDmode
7513 && (can_change_mode || comp_mode == mode))
7520 if (insn_count < ARRAY_SIZE (insns))
7522 insns[insn_count] = insn;
7523 modes[insn_count] = set_mode;
7524 last_insns[insn_count] = end;
7527 if (mode != comp_mode)
7529 gcc_assert (can_change_mode);
7532 /* The modified insn will be re-recognized later. */
7533 PUT_MODE (cc_src, mode);
7538 if (set_mode != mode)
7540 /* We found a matching expression in the
7541 wrong mode, but we don't have room to
7542 store it in the array. Punt. This case
7546 /* INSN sets CC_REG to a value equal to CC_SRC
7547 with the right mode. We can simply delete
7552 /* We found an instruction to delete. Keep looking,
7553 in the hopes of finding a three-way jump. */
7557 /* We found an instruction which sets the condition
7558 code, so don't look any farther. */
7562 /* If INSN sets CC_REG in some other way, don't look any
7564 if (reg_set_p (cc_reg, insn))
7568 /* If we fell off the bottom of the block, we can keep looking
7569 through successors. We pass CAN_CHANGE_MODE as false because
7570 we aren't prepared to handle compatibility between the
7571 further blocks and this block. */
7574 enum machine_mode submode;
7576 submode = cse_cc_succs (e->dest, cc_reg, cc_src, false);
7577 if (submode != VOIDmode)
7579 gcc_assert (submode == mode);
7581 can_change_mode = false;
7589 /* Now INSN_COUNT is the number of instructions we found which set
7590 CC_REG to a value equivalent to CC_SRC. The instructions are in
7591 INSNS. The modes used by those instructions are in MODES. */
7594 for (i = 0; i < insn_count; ++i)
7596 if (modes[i] != mode)
7598 /* We need to change the mode of CC_REG in INSNS[i] and
7599 subsequent instructions. */
7602 if (GET_MODE (cc_reg) == mode)
7605 newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7607 cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7611 delete_insn (insns[i]);
7617 /* If we have a fixed condition code register (or two), walk through
7618 the instructions and try to eliminate duplicate assignments. */
7621 cse_condition_code_reg (void)
7623 unsigned int cc_regno_1;
7624 unsigned int cc_regno_2;
7629 if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
7632 cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7633 if (cc_regno_2 != INVALID_REGNUM)
7634 cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7636 cc_reg_2 = NULL_RTX;
7645 enum machine_mode mode;
7646 enum machine_mode orig_mode;
7648 /* Look for blocks which end with a conditional jump based on a
7649 condition code register. Then look for the instruction which
7650 sets the condition code register. Then look through the
7651 successor blocks for instructions which set the condition
7652 code register to the same value. There are other possible
7653 uses of the condition code register, but these are by far the
7654 most common and the ones which we are most likely to be able
7657 last_insn = BB_END (bb);
7658 if (!JUMP_P (last_insn))
7661 if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7663 else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7668 cc_src_insn = NULL_RTX;
7670 for (insn = PREV_INSN (last_insn);
7671 insn && insn != PREV_INSN (BB_HEAD (bb));
7672 insn = PREV_INSN (insn))
7676 if (! INSN_P (insn))
7678 set = single_set (insn);
7680 && REG_P (SET_DEST (set))
7681 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7684 cc_src = SET_SRC (set);
7687 else if (reg_set_p (cc_reg, insn))
7694 if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7697 /* Now CC_REG is a condition code register used for a
7698 conditional jump at the end of the block, and CC_SRC, in
7699 CC_SRC_INSN, is the value to which that condition code
7700 register is set, and CC_SRC is still meaningful at the end of
7703 orig_mode = GET_MODE (cc_src);
7704 mode = cse_cc_succs (bb, cc_reg, cc_src, true);
7705 if (mode != VOIDmode)
7707 gcc_assert (mode == GET_MODE (cc_src));
7708 if (mode != orig_mode)
7710 rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7712 cse_change_cc_mode_insn (cc_src_insn, newreg);
7714 /* Do the same in the following insns that use the
7715 current value of CC_REG within BB. */
7716 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7717 NEXT_INSN (last_insn),