1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 /* stdio.h must precede rtl.h for FFS. */
25 #include "coretypes.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
35 #include "insn-config.h"
47 /* The basic idea of common subexpression elimination is to go
48 through the code, keeping a record of expressions that would
49 have the same value at the current scan point, and replacing
50 expressions encountered with the cheapest equivalent expression.
52 It is too complicated to keep track of the different possibilities
53 when control paths merge in this code; so, at each label, we forget all
54 that is known and start fresh. This can be described as processing each
55 extended basic block separately. We have a separate pass to perform
58 Note CSE can turn a conditional or computed jump into a nop or
59 an unconditional jump. When this occurs we arrange to run the jump
60 optimizer after CSE to delete the unreachable code.
62 We use two data structures to record the equivalent expressions:
63 a hash table for most expressions, and a vector of "quantity
64 numbers" to record equivalent (pseudo) registers.
66 The use of the special data structure for registers is desirable
67 because it is faster. It is possible because registers references
68 contain a fairly small number, the register number, taken from
69 a contiguously allocated series, and two register references are
70 identical if they have the same number. General expressions
71 do not have any such thing, so the only way to retrieve the
72 information recorded on an expression other than a register
73 is to keep it in a hash table.
75 Registers and "quantity numbers":
77 At the start of each basic block, all of the (hardware and pseudo)
78 registers used in the function are given distinct quantity
79 numbers to indicate their contents. During scan, when the code
80 copies one register into another, we copy the quantity number.
81 When a register is loaded in any other way, we allocate a new
82 quantity number to describe the value generated by this operation.
83 `reg_qty' records what quantity a register is currently thought
86 All real quantity numbers are greater than or equal to `max_reg'.
87 If register N has not been assigned a quantity, reg_qty[N] will equal N.
89 Quantity numbers below `max_reg' do not exist and none of the `qty_table'
90 entries should be referenced with an index below `max_reg'.
92 We also maintain a bidirectional chain of registers for each
93 quantity number. The `qty_table` members `first_reg' and `last_reg',
94 and `reg_eqv_table' members `next' and `prev' hold these chains.
96 The first register in a chain is the one whose lifespan is least local.
97 Among equals, it is the one that was seen first.
98 We replace any equivalent register with that one.
100 If two registers have the same quantity number, it must be true that
101 REG expressions with qty_table `mode' must be in the hash table for both
102 registers and must be in the same class.
104 The converse is not true. Since hard registers may be referenced in
105 any mode, two REG expressions might be equivalent in the hash table
106 but not have the same quantity number if the quantity number of one
107 of the registers is not the same mode as those expressions.
109 Constants and quantity numbers
111 When a quantity has a known constant value, that value is stored
112 in the appropriate qty_table `const_rtx'. This is in addition to
113 putting the constant in the hash table as is usual for non-regs.
115 Whether a reg or a constant is preferred is determined by the configuration
116 macro CONST_COSTS and will often depend on the constant value. In any
117 event, expressions containing constants can be simplified, by fold_rtx.
119 When a quantity has a known nearly constant value (such as an address
120 of a stack slot), that value is stored in the appropriate qty_table
123 Integer constants don't have a machine mode. However, cse
124 determines the intended machine mode from the destination
125 of the instruction that moves the constant. The machine mode
126 is recorded in the hash table along with the actual RTL
127 constant expression so that different modes are kept separate.
131 To record known equivalences among expressions in general
132 we use a hash table called `table'. It has a fixed number of buckets
133 that contain chains of `struct table_elt' elements for expressions.
134 These chains connect the elements whose expressions have the same
137 Other chains through the same elements connect the elements which
138 currently have equivalent values.
140 Register references in an expression are canonicalized before hashing
141 the expression. This is done using `reg_qty' and qty_table `first_reg'.
142 The hash code of a register reference is computed using the quantity
143 number, not the register number.
145 When the value of an expression changes, it is necessary to remove from the
146 hash table not just that expression but all expressions whose values
147 could be different as a result.
149 1. If the value changing is in memory, except in special cases
150 ANYTHING referring to memory could be changed. That is because
151 nobody knows where a pointer does not point.
152 The function `invalidate_memory' removes what is necessary.
154 The special cases are when the address is constant or is
155 a constant plus a fixed register such as the frame pointer
156 or a static chain pointer. When such addresses are stored in,
157 we can tell exactly which other such addresses must be invalidated
158 due to overlap. `invalidate' does this.
159 All expressions that refer to non-constant
160 memory addresses are also invalidated. `invalidate_memory' does this.
162 2. If the value changing is a register, all expressions
163 containing references to that register, and only those,
166 Because searching the entire hash table for expressions that contain
167 a register is very slow, we try to figure out when it isn't necessary.
168 Precisely, this is necessary only when expressions have been
169 entered in the hash table using this register, and then the value has
170 changed, and then another expression wants to be added to refer to
171 the register's new value. This sequence of circumstances is rare
172 within any one basic block.
174 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
175 reg_tick[i] is incremented whenever a value is stored in register i.
176 reg_in_table[i] holds -1 if no references to register i have been
177 entered in the table; otherwise, it contains the value reg_tick[i] had
178 when the references were entered. If we want to enter a reference
179 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
180 Until we want to enter a new entry, the mere fact that the two vectors
181 don't match makes the entries be ignored if anyone tries to match them.
183 Registers themselves are entered in the hash table as well as in
184 the equivalent-register chains. However, the vectors `reg_tick'
185 and `reg_in_table' do not apply to expressions which are simple
186 register references. These expressions are removed from the table
187 immediately when they become invalid, and this can be done even if
188 we do not immediately search for all the expressions that refer to
191 A CLOBBER rtx in an instruction invalidates its operand for further
192 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
193 invalidates everything that resides in memory.
197 Constant expressions that differ only by an additive integer
198 are called related. When a constant expression is put in
199 the table, the related expression with no constant term
200 is also entered. These are made to point at each other
201 so that it is possible to find out if there exists any
202 register equivalent to an expression related to a given expression. */
204 /* One plus largest register number used in this function. */
208 /* One plus largest instruction UID used in this function at time of
211 static int max_insn_uid;
213 /* Length of qty_table vector. We know in advance we will not need
214 a quantity number this big. */
218 /* Next quantity number to be allocated.
219 This is 1 + the largest number needed so far. */
223 /* Per-qty information tracking.
225 `first_reg' and `last_reg' track the head and tail of the
226 chain of registers which currently contain this quantity.
228 `mode' contains the machine mode of this quantity.
230 `const_rtx' holds the rtx of the constant value of this
231 quantity, if known. A summations of the frame/arg pointer
232 and a constant can also be entered here. When this holds
233 a known value, `const_insn' is the insn which stored the
236 `comparison_{code,const,qty}' are used to track when a
237 comparison between a quantity and some constant or register has
238 been passed. In such a case, we know the results of the comparison
239 in case we see it again. These members record a comparison that
240 is known to be true. `comparison_code' holds the rtx code of such
241 a comparison, else it is set to UNKNOWN and the other two
242 comparison members are undefined. `comparison_const' holds
243 the constant being compared against, or zero if the comparison
244 is not against a constant. `comparison_qty' holds the quantity
245 being compared against when the result is known. If the comparison
246 is not with a register, `comparison_qty' is -1. */
248 struct qty_table_elem
252 rtx comparison_const;
254 unsigned int first_reg, last_reg;
255 /* The sizes of these fields should match the sizes of the
256 code and mode fields of struct rtx_def (see rtl.h). */
257 ENUM_BITFIELD(rtx_code) comparison_code : 16;
258 ENUM_BITFIELD(machine_mode) mode : 8;
261 /* The table of all qtys, indexed by qty number. */
262 static struct qty_table_elem *qty_table;
265 /* For machines that have a CC0, we do not record its value in the hash
266 table since its use is guaranteed to be the insn immediately following
267 its definition and any other insn is presumed to invalidate it.
269 Instead, we store below the value last assigned to CC0. If it should
270 happen to be a constant, it is stored in preference to the actual
271 assigned value. In case it is a constant, we store the mode in which
272 the constant should be interpreted. */
274 static rtx prev_insn_cc0;
275 static enum machine_mode prev_insn_cc0_mode;
277 /* Previous actual insn. 0 if at first insn of basic block. */
279 static rtx prev_insn;
282 /* Insn being scanned. */
284 static rtx this_insn;
286 /* Index by register number, gives the number of the next (or
287 previous) register in the chain of registers sharing the same
290 Or -1 if this register is at the end of the chain.
292 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
294 /* Per-register equivalence chain. */
300 /* The table of all register equivalence chains. */
301 static struct reg_eqv_elem *reg_eqv_table;
305 /* Next in hash chain. */
306 struct cse_reg_info *hash_next;
308 /* The next cse_reg_info structure in the free or used list. */
309 struct cse_reg_info *next;
314 /* The quantity number of the register's current contents. */
317 /* The number of times the register has been altered in the current
321 /* The REG_TICK value at which rtx's containing this register are
322 valid in the hash table. If this does not equal the current
323 reg_tick value, such expressions existing in the hash table are
327 /* The SUBREG that was set when REG_TICK was last incremented. Set
328 to -1 if the last store was to the whole register, not a subreg. */
329 unsigned int subreg_ticked;
332 /* A free list of cse_reg_info entries. */
333 static struct cse_reg_info *cse_reg_info_free_list;
335 /* A used list of cse_reg_info entries. */
336 static struct cse_reg_info *cse_reg_info_used_list;
337 static struct cse_reg_info *cse_reg_info_used_list_end;
339 /* A mapping from registers to cse_reg_info data structures. */
340 #define REGHASH_SHIFT 7
341 #define REGHASH_SIZE (1 << REGHASH_SHIFT)
342 #define REGHASH_MASK (REGHASH_SIZE - 1)
343 static struct cse_reg_info *reg_hash[REGHASH_SIZE];
345 #define REGHASH_FN(REGNO) \
346 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
348 /* The last lookup we did into the cse_reg_info_tree. This allows us
349 to cache repeated lookups. */
350 static unsigned int cached_regno;
351 static struct cse_reg_info *cached_cse_reg_info;
353 /* A HARD_REG_SET containing all the hard registers for which there is
354 currently a REG expression in the hash table. Note the difference
355 from the above variables, which indicate if the REG is mentioned in some
356 expression in the table. */
358 static HARD_REG_SET hard_regs_in_table;
360 /* CUID of insn that starts the basic block currently being cse-processed. */
362 static int cse_basic_block_start;
364 /* CUID of insn that ends the basic block currently being cse-processed. */
366 static int cse_basic_block_end;
368 /* Vector mapping INSN_UIDs to cuids.
369 The cuids are like uids but increase monotonically always.
370 We use them to see whether a reg is used outside a given basic block. */
372 static int *uid_cuid;
374 /* Highest UID in UID_CUID. */
377 /* Get the cuid of an insn. */
379 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
381 /* Nonzero if this pass has made changes, and therefore it's
382 worthwhile to run the garbage collector. */
384 static int cse_altered;
386 /* Nonzero if cse has altered conditional jump insns
387 in such a way that jump optimization should be redone. */
389 static int cse_jumps_altered;
391 /* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
392 REG_LABEL, we have to rerun jump after CSE to put in the note. */
393 static int recorded_label_ref;
395 /* canon_hash stores 1 in do_not_record
396 if it notices a reference to CC0, PC, or some other volatile
399 static int do_not_record;
401 #ifdef LOAD_EXTEND_OP
403 /* Scratch rtl used when looking for load-extended copy of a MEM. */
404 static rtx memory_extend_rtx;
407 /* canon_hash stores 1 in hash_arg_in_memory
408 if it notices a reference to memory within the expression being hashed. */
410 static int hash_arg_in_memory;
412 /* The hash table contains buckets which are chains of `struct table_elt's,
413 each recording one expression's information.
414 That expression is in the `exp' field.
416 The canon_exp field contains a canonical (from the point of view of
417 alias analysis) version of the `exp' field.
419 Those elements with the same hash code are chained in both directions
420 through the `next_same_hash' and `prev_same_hash' fields.
422 Each set of expressions with equivalent values
423 are on a two-way chain through the `next_same_value'
424 and `prev_same_value' fields, and all point with
425 the `first_same_value' field at the first element in
426 that chain. The chain is in order of increasing cost.
427 Each element's cost value is in its `cost' field.
429 The `in_memory' field is nonzero for elements that
430 involve any reference to memory. These elements are removed
431 whenever a write is done to an unidentified location in memory.
432 To be safe, we assume that a memory address is unidentified unless
433 the address is either a symbol constant or a constant plus
434 the frame pointer or argument pointer.
436 The `related_value' field is used to connect related expressions
437 (that differ by adding an integer).
438 The related expressions are chained in a circular fashion.
439 `related_value' is zero for expressions for which this
442 The `cost' field stores the cost of this element's expression.
443 The `regcost' field stores the value returned by approx_reg_cost for
444 this element's expression.
446 The `is_const' flag is set if the element is a constant (including
449 The `flag' field is used as a temporary during some search routines.
451 The `mode' field is usually the same as GET_MODE (`exp'), but
452 if `exp' is a CONST_INT and has no machine mode then the `mode'
453 field is the mode it was being used as. Each constant is
454 recorded separately for each mode it is used with. */
460 struct table_elt *next_same_hash;
461 struct table_elt *prev_same_hash;
462 struct table_elt *next_same_value;
463 struct table_elt *prev_same_value;
464 struct table_elt *first_same_value;
465 struct table_elt *related_value;
468 /* The size of this field should match the size
469 of the mode field of struct rtx_def (see rtl.h). */
470 ENUM_BITFIELD(machine_mode) mode : 8;
476 /* We don't want a lot of buckets, because we rarely have very many
477 things stored in the hash table, and a lot of buckets slows
478 down a lot of loops that happen frequently. */
480 #define HASH_SIZE (1 << HASH_SHIFT)
481 #define HASH_MASK (HASH_SIZE - 1)
483 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
484 register (hard registers may require `do_not_record' to be set). */
487 ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
488 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
489 : canon_hash (X, M)) & HASH_MASK)
491 /* Determine whether register number N is considered a fixed register for the
492 purpose of approximating register costs.
493 It is desirable to replace other regs with fixed regs, to reduce need for
495 A reg wins if it is either the frame pointer or designated as fixed. */
496 #define FIXED_REGNO_P(N) \
497 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
498 || fixed_regs[N] || global_regs[N])
500 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
501 hard registers and pointers into the frame are the cheapest with a cost
502 of 0. Next come pseudos with a cost of one and other hard registers with
503 a cost of 2. Aside from these special cases, call `rtx_cost'. */
505 #define CHEAP_REGNO(N) \
506 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
507 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
508 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
509 || ((N) < FIRST_PSEUDO_REGISTER \
510 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
512 #define COST(X) (GET_CODE (X) == REG ? 0 : notreg_cost (X, SET))
513 #define COST_IN(X,OUTER) (GET_CODE (X) == REG ? 0 : notreg_cost (X, OUTER))
515 /* Get the info associated with register N. */
517 #define GET_CSE_REG_INFO(N) \
518 (((N) == cached_regno && cached_cse_reg_info) \
519 ? cached_cse_reg_info : get_cse_reg_info ((N)))
521 /* Get the number of times this register has been updated in this
524 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
526 /* Get the point at which REG was recorded in the table. */
528 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
530 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
533 #define SUBREG_TICKED(N) ((GET_CSE_REG_INFO (N))->subreg_ticked)
535 /* Get the quantity number for REG. */
537 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
539 /* Determine if the quantity number for register X represents a valid index
540 into the qty_table. */
542 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (int) (N))
544 static struct table_elt *table[HASH_SIZE];
546 /* Chain of `struct table_elt's made so far for this function
547 but currently removed from the table. */
549 static struct table_elt *free_element_chain;
551 /* Number of `struct table_elt' structures made so far for this function. */
553 static int n_elements_made;
555 /* Maximum value `n_elements_made' has had so far in this compilation
556 for functions previously processed. */
558 static int max_elements_made;
560 /* Surviving equivalence class when two equivalence classes are merged
561 by recording the effects of a jump in the last insn. Zero if the
562 last insn was not a conditional jump. */
564 static struct table_elt *last_jump_equiv_class;
566 /* Set to the cost of a constant pool reference if one was found for a
567 symbolic constant. If this was found, it means we should try to
568 convert constants into constant pool entries if they don't fit in
571 static int constant_pool_entries_cost;
572 static int constant_pool_entries_regcost;
574 /* This data describes a block that will be processed by cse_basic_block. */
576 struct cse_basic_block_data
578 /* Lowest CUID value of insns in block. */
580 /* Highest CUID value of insns in block. */
582 /* Total number of SETs in block. */
584 /* Last insn in the block. */
586 /* Size of current branch path, if any. */
588 /* Current branch path, indicating which branches will be taken. */
591 /* The branch insn. */
593 /* Whether it should be taken or not. AROUND is the same as taken
594 except that it is used when the destination label is not preceded
596 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
600 static bool fixed_base_plus_p (rtx x);
601 static int notreg_cost (rtx, enum rtx_code);
602 static int approx_reg_cost_1 (rtx *, void *);
603 static int approx_reg_cost (rtx);
604 static int preferable (int, int, int, int);
605 static void new_basic_block (void);
606 static void make_new_qty (unsigned int, enum machine_mode);
607 static void make_regs_eqv (unsigned int, unsigned int);
608 static void delete_reg_equiv (unsigned int);
609 static int mention_regs (rtx);
610 static int insert_regs (rtx, struct table_elt *, int);
611 static void remove_from_table (struct table_elt *, unsigned);
612 static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
613 static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
614 static rtx lookup_as_function (rtx, enum rtx_code);
615 static struct table_elt *insert (rtx, struct table_elt *, unsigned,
617 static void merge_equiv_classes (struct table_elt *, struct table_elt *);
618 static void invalidate (rtx, enum machine_mode);
619 static int cse_rtx_varies_p (rtx, int);
620 static void remove_invalid_refs (unsigned int);
621 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
623 static void rehash_using_reg (rtx);
624 static void invalidate_memory (void);
625 static void invalidate_for_call (void);
626 static rtx use_related_value (rtx, struct table_elt *);
627 static unsigned canon_hash (rtx, enum machine_mode);
628 static unsigned canon_hash_string (const char *);
629 static unsigned safe_hash (rtx, enum machine_mode);
630 static int exp_equiv_p (rtx, rtx, int, int);
631 static rtx canon_reg (rtx, rtx);
632 static void find_best_addr (rtx, rtx *, enum machine_mode);
633 static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
635 enum machine_mode *);
636 static rtx fold_rtx (rtx, rtx);
637 static rtx equiv_constant (rtx);
638 static void record_jump_equiv (rtx, int);
639 static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
641 static void cse_insn (rtx, rtx);
642 static void cse_end_of_basic_block (rtx, struct cse_basic_block_data *,
644 static int addr_affects_sp_p (rtx);
645 static void invalidate_from_clobbers (rtx);
646 static rtx cse_process_notes (rtx, rtx);
647 static void cse_around_loop (rtx);
648 static void invalidate_skipped_set (rtx, rtx, void *);
649 static void invalidate_skipped_block (rtx);
650 static void cse_check_loop_start (rtx, rtx, void *);
651 static void cse_set_around_loop (rtx, rtx, rtx);
652 static rtx cse_basic_block (rtx, rtx, struct branch_path *, int);
653 static void count_reg_usage (rtx, int *, int);
654 static int check_for_label_ref (rtx *, void *);
655 extern void dump_class (struct table_elt*);
656 static struct cse_reg_info * get_cse_reg_info (unsigned int);
657 static int check_dependence (rtx *, void *);
659 static void flush_hash_table (void);
660 static bool insn_live_p (rtx, int *);
661 static bool set_live_p (rtx, rtx, int *);
662 static bool dead_libcall_p (rtx, int *);
663 static int cse_change_cc_mode (rtx *, void *);
664 static void cse_change_cc_mode_insns (rtx, rtx, rtx);
665 static enum machine_mode cse_cc_succs (basic_block, rtx, rtx, bool);
667 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
668 virtual regs here because the simplify_*_operation routines are called
669 by integrate.c, which is called before virtual register instantiation. */
672 fixed_base_plus_p (rtx x)
674 switch (GET_CODE (x))
677 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
679 if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
681 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
682 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
687 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
689 return fixed_base_plus_p (XEXP (x, 0));
699 /* Dump the expressions in the equivalence class indicated by CLASSP.
700 This function is used only for debugging. */
702 dump_class (struct table_elt *classp)
704 struct table_elt *elt;
706 fprintf (stderr, "Equivalence chain for ");
707 print_rtl (stderr, classp->exp);
708 fprintf (stderr, ": \n");
710 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
712 print_rtl (stderr, elt->exp);
713 fprintf (stderr, "\n");
717 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
720 approx_reg_cost_1 (rtx *xp, void *data)
725 if (x && GET_CODE (x) == REG)
727 unsigned int regno = REGNO (x);
729 if (! CHEAP_REGNO (regno))
731 if (regno < FIRST_PSEUDO_REGISTER)
733 if (SMALL_REGISTER_CLASSES)
745 /* Return an estimate of the cost of the registers used in an rtx.
746 This is mostly the number of different REG expressions in the rtx;
747 however for some exceptions like fixed registers we use a cost of
748 0. If any other hard register reference occurs, return MAX_COST. */
751 approx_reg_cost (rtx x)
755 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
761 /* Return a negative value if an rtx A, whose costs are given by COST_A
762 and REGCOST_A, is more desirable than an rtx B.
763 Return a positive value if A is less desirable, or 0 if the two are
766 preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
768 /* First, get rid of cases involving expressions that are entirely
770 if (cost_a != cost_b)
772 if (cost_a == MAX_COST)
774 if (cost_b == MAX_COST)
778 /* Avoid extending lifetimes of hardregs. */
779 if (regcost_a != regcost_b)
781 if (regcost_a == MAX_COST)
783 if (regcost_b == MAX_COST)
787 /* Normal operation costs take precedence. */
788 if (cost_a != cost_b)
789 return cost_a - cost_b;
790 /* Only if these are identical consider effects on register pressure. */
791 if (regcost_a != regcost_b)
792 return regcost_a - regcost_b;
796 /* Internal function, to compute cost when X is not a register; called
797 from COST macro to keep it simple. */
800 notreg_cost (rtx x, enum rtx_code outer)
802 return ((GET_CODE (x) == SUBREG
803 && GET_CODE (SUBREG_REG (x)) == REG
804 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
805 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
806 && (GET_MODE_SIZE (GET_MODE (x))
807 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
808 && subreg_lowpart_p (x)
809 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
810 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
812 : rtx_cost (x, outer) * 2);
815 /* Return an estimate of the cost of computing rtx X.
816 One use is in cse, to decide which expression to keep in the hash table.
817 Another is in rtl generation, to pick the cheapest way to multiply.
818 Other uses like the latter are expected in the future. */
821 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
831 /* Compute the default costs of certain things.
832 Note that targetm.rtx_costs can override the defaults. */
838 total = COSTS_N_INSNS (5);
844 total = COSTS_N_INSNS (7);
847 /* Used in loop.c and combine.c as a marker. */
851 total = COSTS_N_INSNS (1);
860 /* If we can't tie these modes, make this expensive. The larger
861 the mode, the more expensive it is. */
862 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
863 return COSTS_N_INSNS (2
864 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
868 if ((*targetm.rtx_costs) (x, code, outer_code, &total))
873 /* Sum the costs of the sub-rtx's, plus cost of this operation,
874 which is already in total. */
876 fmt = GET_RTX_FORMAT (code);
877 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
879 total += rtx_cost (XEXP (x, i), code);
880 else if (fmt[i] == 'E')
881 for (j = 0; j < XVECLEN (x, i); j++)
882 total += rtx_cost (XVECEXP (x, i, j), code);
887 /* Return cost of address expression X.
888 Expect that X is properly formed address reference. */
891 address_cost (rtx x, enum machine_mode mode)
893 /* The address_cost target hook does not deal with ADDRESSOF nodes. But,
894 during CSE, such nodes are present. Using an ADDRESSOF node which
895 refers to the address of a REG is a good thing because we can then
896 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
898 if (GET_CODE (x) == ADDRESSOF && REG_P (XEXP ((x), 0)))
901 /* We may be asked for cost of various unusual addresses, such as operands
902 of push instruction. It is not worthwhile to complicate writing
903 of the target hook by such cases. */
905 if (!memory_address_p (mode, x))
908 return (*targetm.address_cost) (x);
911 /* If the target doesn't override, compute the cost as with arithmetic. */
914 default_address_cost (rtx x)
916 return rtx_cost (x, MEM);
919 static struct cse_reg_info *
920 get_cse_reg_info (unsigned int regno)
922 struct cse_reg_info **hash_head = ®_hash[REGHASH_FN (regno)];
923 struct cse_reg_info *p;
925 for (p = *hash_head; p != NULL; p = p->hash_next)
926 if (p->regno == regno)
931 /* Get a new cse_reg_info structure. */
932 if (cse_reg_info_free_list)
934 p = cse_reg_info_free_list;
935 cse_reg_info_free_list = p->next;
938 p = xmalloc (sizeof (struct cse_reg_info));
940 /* Insert into hash table. */
941 p->hash_next = *hash_head;
946 p->reg_in_table = -1;
947 p->subreg_ticked = -1;
950 p->next = cse_reg_info_used_list;
951 cse_reg_info_used_list = p;
952 if (!cse_reg_info_used_list_end)
953 cse_reg_info_used_list_end = p;
956 /* Cache this lookup; we tend to be looking up information about the
957 same register several times in a row. */
958 cached_regno = regno;
959 cached_cse_reg_info = p;
964 /* Clear the hash table and initialize each register with its own quantity,
965 for a new basic block. */
968 new_basic_block (void)
974 /* Clear out hash table state for this pass. */
976 memset (reg_hash, 0, sizeof reg_hash);
978 if (cse_reg_info_used_list)
980 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
981 cse_reg_info_free_list = cse_reg_info_used_list;
982 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
984 cached_cse_reg_info = 0;
986 CLEAR_HARD_REG_SET (hard_regs_in_table);
988 /* The per-quantity values used to be initialized here, but it is
989 much faster to initialize each as it is made in `make_new_qty'. */
991 for (i = 0; i < HASH_SIZE; i++)
993 struct table_elt *first;
998 struct table_elt *last = first;
1002 while (last->next_same_hash != NULL)
1003 last = last->next_same_hash;
1005 /* Now relink this hash entire chain into
1006 the free element list. */
1008 last->next_same_hash = free_element_chain;
1009 free_element_chain = first;
1019 /* Say that register REG contains a quantity in mode MODE not in any
1020 register before and initialize that quantity. */
1023 make_new_qty (unsigned int reg, enum machine_mode mode)
1026 struct qty_table_elem *ent;
1027 struct reg_eqv_elem *eqv;
1029 if (next_qty >= max_qty)
1032 q = REG_QTY (reg) = next_qty++;
1033 ent = &qty_table[q];
1034 ent->first_reg = reg;
1035 ent->last_reg = reg;
1037 ent->const_rtx = ent->const_insn = NULL_RTX;
1038 ent->comparison_code = UNKNOWN;
1040 eqv = ®_eqv_table[reg];
1041 eqv->next = eqv->prev = -1;
1044 /* Make reg NEW equivalent to reg OLD.
1045 OLD is not changing; NEW is. */
1048 make_regs_eqv (unsigned int new, unsigned int old)
1050 unsigned int lastr, firstr;
1051 int q = REG_QTY (old);
1052 struct qty_table_elem *ent;
1054 ent = &qty_table[q];
1056 /* Nothing should become eqv until it has a "non-invalid" qty number. */
1057 if (! REGNO_QTY_VALID_P (old))
1061 firstr = ent->first_reg;
1062 lastr = ent->last_reg;
1064 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
1065 hard regs. Among pseudos, if NEW will live longer than any other reg
1066 of the same qty, and that is beyond the current basic block,
1067 make it the new canonical replacement for this qty. */
1068 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1069 /* Certain fixed registers might be of the class NO_REGS. This means
1070 that not only can they not be allocated by the compiler, but
1071 they cannot be used in substitutions or canonicalizations
1073 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1074 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1075 || (new >= FIRST_PSEUDO_REGISTER
1076 && (firstr < FIRST_PSEUDO_REGISTER
1077 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1078 || (uid_cuid[REGNO_FIRST_UID (new)]
1079 < cse_basic_block_start))
1080 && (uid_cuid[REGNO_LAST_UID (new)]
1081 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1083 reg_eqv_table[firstr].prev = new;
1084 reg_eqv_table[new].next = firstr;
1085 reg_eqv_table[new].prev = -1;
1086 ent->first_reg = new;
1090 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1091 Otherwise, insert before any non-fixed hard regs that are at the
1092 end. Registers of class NO_REGS cannot be used as an
1093 equivalent for anything. */
1094 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1095 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1096 && new >= FIRST_PSEUDO_REGISTER)
1097 lastr = reg_eqv_table[lastr].prev;
1098 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1099 if (reg_eqv_table[lastr].next >= 0)
1100 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1102 qty_table[q].last_reg = new;
1103 reg_eqv_table[lastr].next = new;
1104 reg_eqv_table[new].prev = lastr;
1108 /* Remove REG from its equivalence class. */
1111 delete_reg_equiv (unsigned int reg)
1113 struct qty_table_elem *ent;
1114 int q = REG_QTY (reg);
1117 /* If invalid, do nothing. */
1121 ent = &qty_table[q];
1123 p = reg_eqv_table[reg].prev;
1124 n = reg_eqv_table[reg].next;
1127 reg_eqv_table[n].prev = p;
1131 reg_eqv_table[p].next = n;
1135 REG_QTY (reg) = reg;
1138 /* Remove any invalid expressions from the hash table
1139 that refer to any of the registers contained in expression X.
1141 Make sure that newly inserted references to those registers
1142 as subexpressions will be considered valid.
1144 mention_regs is not called when a register itself
1145 is being stored in the table.
1147 Return 1 if we have done something that may have changed the hash code
1151 mention_regs (rtx x)
1161 code = GET_CODE (x);
1164 unsigned int regno = REGNO (x);
1165 unsigned int endregno
1166 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1167 : hard_regno_nregs[regno][GET_MODE (x)]);
1170 for (i = regno; i < endregno; i++)
1172 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1173 remove_invalid_refs (i);
1175 REG_IN_TABLE (i) = REG_TICK (i);
1176 SUBREG_TICKED (i) = -1;
1182 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1183 pseudo if they don't use overlapping words. We handle only pseudos
1184 here for simplicity. */
1185 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1186 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1188 unsigned int i = REGNO (SUBREG_REG (x));
1190 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1192 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1193 the last store to this register really stored into this
1194 subreg, then remove the memory of this subreg.
1195 Otherwise, remove any memory of the entire register and
1196 all its subregs from the table. */
1197 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1198 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1199 remove_invalid_refs (i);
1201 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1204 REG_IN_TABLE (i) = REG_TICK (i);
1205 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1209 /* If X is a comparison or a COMPARE and either operand is a register
1210 that does not have a quantity, give it one. This is so that a later
1211 call to record_jump_equiv won't cause X to be assigned a different
1212 hash code and not found in the table after that call.
1214 It is not necessary to do this here, since rehash_using_reg can
1215 fix up the table later, but doing this here eliminates the need to
1216 call that expensive function in the most common case where the only
1217 use of the register is in the comparison. */
1219 if (code == COMPARE || COMPARISON_P (x))
1221 if (GET_CODE (XEXP (x, 0)) == REG
1222 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1223 if (insert_regs (XEXP (x, 0), NULL, 0))
1225 rehash_using_reg (XEXP (x, 0));
1229 if (GET_CODE (XEXP (x, 1)) == REG
1230 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1231 if (insert_regs (XEXP (x, 1), NULL, 0))
1233 rehash_using_reg (XEXP (x, 1));
1238 fmt = GET_RTX_FORMAT (code);
1239 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1241 changed |= mention_regs (XEXP (x, i));
1242 else if (fmt[i] == 'E')
1243 for (j = 0; j < XVECLEN (x, i); j++)
1244 changed |= mention_regs (XVECEXP (x, i, j));
1249 /* Update the register quantities for inserting X into the hash table
1250 with a value equivalent to CLASSP.
1251 (If the class does not contain a REG, it is irrelevant.)
1252 If MODIFIED is nonzero, X is a destination; it is being modified.
1253 Note that delete_reg_equiv should be called on a register
1254 before insert_regs is done on that register with MODIFIED != 0.
1256 Nonzero value means that elements of reg_qty have changed
1257 so X's hash code may be different. */
1260 insert_regs (rtx x, struct table_elt *classp, int modified)
1262 if (GET_CODE (x) == REG)
1264 unsigned int regno = REGNO (x);
1267 /* If REGNO is in the equivalence table already but is of the
1268 wrong mode for that equivalence, don't do anything here. */
1270 qty_valid = REGNO_QTY_VALID_P (regno);
1273 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1275 if (ent->mode != GET_MODE (x))
1279 if (modified || ! qty_valid)
1282 for (classp = classp->first_same_value;
1284 classp = classp->next_same_value)
1285 if (GET_CODE (classp->exp) == REG
1286 && GET_MODE (classp->exp) == GET_MODE (x))
1288 make_regs_eqv (regno, REGNO (classp->exp));
1292 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1293 than REG_IN_TABLE to find out if there was only a single preceding
1294 invalidation - for the SUBREG - or another one, which would be
1295 for the full register. However, if we find here that REG_TICK
1296 indicates that the register is invalid, it means that it has
1297 been invalidated in a separate operation. The SUBREG might be used
1298 now (then this is a recursive call), or we might use the full REG
1299 now and a SUBREG of it later. So bump up REG_TICK so that
1300 mention_regs will do the right thing. */
1302 && REG_IN_TABLE (regno) >= 0
1303 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1305 make_new_qty (regno, GET_MODE (x));
1312 /* If X is a SUBREG, we will likely be inserting the inner register in the
1313 table. If that register doesn't have an assigned quantity number at
1314 this point but does later, the insertion that we will be doing now will
1315 not be accessible because its hash code will have changed. So assign
1316 a quantity number now. */
1318 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1319 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1321 insert_regs (SUBREG_REG (x), NULL, 0);
1326 return mention_regs (x);
1329 /* Look in or update the hash table. */
1331 /* Remove table element ELT from use in the table.
1332 HASH is its hash code, made using the HASH macro.
1333 It's an argument because often that is known in advance
1334 and we save much time not recomputing it. */
1337 remove_from_table (struct table_elt *elt, unsigned int hash)
1342 /* Mark this element as removed. See cse_insn. */
1343 elt->first_same_value = 0;
1345 /* Remove the table element from its equivalence class. */
1348 struct table_elt *prev = elt->prev_same_value;
1349 struct table_elt *next = elt->next_same_value;
1352 next->prev_same_value = prev;
1355 prev->next_same_value = next;
1358 struct table_elt *newfirst = next;
1361 next->first_same_value = newfirst;
1362 next = next->next_same_value;
1367 /* Remove the table element from its hash bucket. */
1370 struct table_elt *prev = elt->prev_same_hash;
1371 struct table_elt *next = elt->next_same_hash;
1374 next->prev_same_hash = prev;
1377 prev->next_same_hash = next;
1378 else if (table[hash] == elt)
1382 /* This entry is not in the proper hash bucket. This can happen
1383 when two classes were merged by `merge_equiv_classes'. Search
1384 for the hash bucket that it heads. This happens only very
1385 rarely, so the cost is acceptable. */
1386 for (hash = 0; hash < HASH_SIZE; hash++)
1387 if (table[hash] == elt)
1392 /* Remove the table element from its related-value circular chain. */
1394 if (elt->related_value != 0 && elt->related_value != elt)
1396 struct table_elt *p = elt->related_value;
1398 while (p->related_value != elt)
1399 p = p->related_value;
1400 p->related_value = elt->related_value;
1401 if (p->related_value == p)
1402 p->related_value = 0;
1405 /* Now add it to the free element chain. */
1406 elt->next_same_hash = free_element_chain;
1407 free_element_chain = elt;
1410 /* Look up X in the hash table and return its table element,
1411 or 0 if X is not in the table.
1413 MODE is the machine-mode of X, or if X is an integer constant
1414 with VOIDmode then MODE is the mode with which X will be used.
1416 Here we are satisfied to find an expression whose tree structure
1419 static struct table_elt *
1420 lookup (rtx x, unsigned int hash, enum machine_mode mode)
1422 struct table_elt *p;
1424 for (p = table[hash]; p; p = p->next_same_hash)
1425 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1426 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1432 /* Like `lookup' but don't care whether the table element uses invalid regs.
1433 Also ignore discrepancies in the machine mode of a register. */
1435 static struct table_elt *
1436 lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1438 struct table_elt *p;
1440 if (GET_CODE (x) == REG)
1442 unsigned int regno = REGNO (x);
1444 /* Don't check the machine mode when comparing registers;
1445 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1446 for (p = table[hash]; p; p = p->next_same_hash)
1447 if (GET_CODE (p->exp) == REG
1448 && REGNO (p->exp) == regno)
1453 for (p = table[hash]; p; p = p->next_same_hash)
1454 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1461 /* Look for an expression equivalent to X and with code CODE.
1462 If one is found, return that expression. */
1465 lookup_as_function (rtx x, enum rtx_code code)
1468 = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, GET_MODE (x));
1470 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1471 long as we are narrowing. So if we looked in vain for a mode narrower
1472 than word_mode before, look for word_mode now. */
1473 if (p == 0 && code == CONST_INT
1474 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1477 PUT_MODE (x, word_mode);
1478 p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
1484 for (p = p->first_same_value; p; p = p->next_same_value)
1485 if (GET_CODE (p->exp) == code
1486 /* Make sure this is a valid entry in the table. */
1487 && exp_equiv_p (p->exp, p->exp, 1, 0))
1493 /* Insert X in the hash table, assuming HASH is its hash code
1494 and CLASSP is an element of the class it should go in
1495 (or 0 if a new class should be made).
1496 It is inserted at the proper position to keep the class in
1497 the order cheapest first.
1499 MODE is the machine-mode of X, or if X is an integer constant
1500 with VOIDmode then MODE is the mode with which X will be used.
1502 For elements of equal cheapness, the most recent one
1503 goes in front, except that the first element in the list
1504 remains first unless a cheaper element is added. The order of
1505 pseudo-registers does not matter, as canon_reg will be called to
1506 find the cheapest when a register is retrieved from the table.
1508 The in_memory field in the hash table element is set to 0.
1509 The caller must set it nonzero if appropriate.
1511 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1512 and if insert_regs returns a nonzero value
1513 you must then recompute its hash code before calling here.
1515 If necessary, update table showing constant values of quantities. */
1517 #define CHEAPER(X, Y) \
1518 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1520 static struct table_elt *
1521 insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mode)
1523 struct table_elt *elt;
1525 /* If X is a register and we haven't made a quantity for it,
1526 something is wrong. */
1527 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1530 /* If X is a hard register, show it is being put in the table. */
1531 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1533 unsigned int regno = REGNO (x);
1534 unsigned int endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
1537 for (i = regno; i < endregno; i++)
1538 SET_HARD_REG_BIT (hard_regs_in_table, i);
1541 /* Put an element for X into the right hash bucket. */
1543 elt = free_element_chain;
1545 free_element_chain = elt->next_same_hash;
1549 elt = xmalloc (sizeof (struct table_elt));
1553 elt->canon_exp = NULL_RTX;
1554 elt->cost = COST (x);
1555 elt->regcost = approx_reg_cost (x);
1556 elt->next_same_value = 0;
1557 elt->prev_same_value = 0;
1558 elt->next_same_hash = table[hash];
1559 elt->prev_same_hash = 0;
1560 elt->related_value = 0;
1563 elt->is_const = (CONSTANT_P (x)
1564 /* GNU C++ takes advantage of this for `this'
1565 (and other const values). */
1566 || (GET_CODE (x) == REG
1567 && RTX_UNCHANGING_P (x)
1568 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1569 || fixed_base_plus_p (x));
1572 table[hash]->prev_same_hash = elt;
1575 /* Put it into the proper value-class. */
1578 classp = classp->first_same_value;
1579 if (CHEAPER (elt, classp))
1580 /* Insert at the head of the class. */
1582 struct table_elt *p;
1583 elt->next_same_value = classp;
1584 classp->prev_same_value = elt;
1585 elt->first_same_value = elt;
1587 for (p = classp; p; p = p->next_same_value)
1588 p->first_same_value = elt;
1592 /* Insert not at head of the class. */
1593 /* Put it after the last element cheaper than X. */
1594 struct table_elt *p, *next;
1596 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1599 /* Put it after P and before NEXT. */
1600 elt->next_same_value = next;
1602 next->prev_same_value = elt;
1604 elt->prev_same_value = p;
1605 p->next_same_value = elt;
1606 elt->first_same_value = classp;
1610 elt->first_same_value = elt;
1612 /* If this is a constant being set equivalent to a register or a register
1613 being set equivalent to a constant, note the constant equivalence.
1615 If this is a constant, it cannot be equivalent to a different constant,
1616 and a constant is the only thing that can be cheaper than a register. So
1617 we know the register is the head of the class (before the constant was
1620 If this is a register that is not already known equivalent to a
1621 constant, we must check the entire class.
1623 If this is a register that is already known equivalent to an insn,
1624 update the qtys `const_insn' to show that `this_insn' is the latest
1625 insn making that quantity equivalent to the constant. */
1627 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1628 && GET_CODE (x) != REG)
1630 int exp_q = REG_QTY (REGNO (classp->exp));
1631 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1633 exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1634 exp_ent->const_insn = this_insn;
1637 else if (GET_CODE (x) == REG
1639 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1642 struct table_elt *p;
1644 for (p = classp; p != 0; p = p->next_same_value)
1646 if (p->is_const && GET_CODE (p->exp) != REG)
1648 int x_q = REG_QTY (REGNO (x));
1649 struct qty_table_elem *x_ent = &qty_table[x_q];
1652 = gen_lowpart (GET_MODE (x), p->exp);
1653 x_ent->const_insn = this_insn;
1659 else if (GET_CODE (x) == REG
1660 && qty_table[REG_QTY (REGNO (x))].const_rtx
1661 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1662 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1664 /* If this is a constant with symbolic value,
1665 and it has a term with an explicit integer value,
1666 link it up with related expressions. */
1667 if (GET_CODE (x) == CONST)
1669 rtx subexp = get_related_value (x);
1671 struct table_elt *subelt, *subelt_prev;
1675 /* Get the integer-free subexpression in the hash table. */
1676 subhash = safe_hash (subexp, mode) & HASH_MASK;
1677 subelt = lookup (subexp, subhash, mode);
1679 subelt = insert (subexp, NULL, subhash, mode);
1680 /* Initialize SUBELT's circular chain if it has none. */
1681 if (subelt->related_value == 0)
1682 subelt->related_value = subelt;
1683 /* Find the element in the circular chain that precedes SUBELT. */
1684 subelt_prev = subelt;
1685 while (subelt_prev->related_value != subelt)
1686 subelt_prev = subelt_prev->related_value;
1687 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1688 This way the element that follows SUBELT is the oldest one. */
1689 elt->related_value = subelt_prev->related_value;
1690 subelt_prev->related_value = elt;
1697 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1698 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1699 the two classes equivalent.
1701 CLASS1 will be the surviving class; CLASS2 should not be used after this
1704 Any invalid entries in CLASS2 will not be copied. */
1707 merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1709 struct table_elt *elt, *next, *new;
1711 /* Ensure we start with the head of the classes. */
1712 class1 = class1->first_same_value;
1713 class2 = class2->first_same_value;
1715 /* If they were already equal, forget it. */
1716 if (class1 == class2)
1719 for (elt = class2; elt; elt = next)
1723 enum machine_mode mode = elt->mode;
1725 next = elt->next_same_value;
1727 /* Remove old entry, make a new one in CLASS1's class.
1728 Don't do this for invalid entries as we cannot find their
1729 hash code (it also isn't necessary). */
1730 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1732 hash_arg_in_memory = 0;
1733 hash = HASH (exp, mode);
1735 if (GET_CODE (exp) == REG)
1736 delete_reg_equiv (REGNO (exp));
1738 remove_from_table (elt, hash);
1740 if (insert_regs (exp, class1, 0))
1742 rehash_using_reg (exp);
1743 hash = HASH (exp, mode);
1745 new = insert (exp, class1, hash, mode);
1746 new->in_memory = hash_arg_in_memory;
1751 /* Flush the entire hash table. */
1754 flush_hash_table (void)
1757 struct table_elt *p;
1759 for (i = 0; i < HASH_SIZE; i++)
1760 for (p = table[i]; p; p = table[i])
1762 /* Note that invalidate can remove elements
1763 after P in the current hash chain. */
1764 if (GET_CODE (p->exp) == REG)
1765 invalidate (p->exp, p->mode);
1767 remove_from_table (p, i);
1771 /* Function called for each rtx to check whether true dependence exist. */
1772 struct check_dependence_data
1774 enum machine_mode mode;
1780 check_dependence (rtx *x, void *data)
1782 struct check_dependence_data *d = (struct check_dependence_data *) data;
1783 if (*x && GET_CODE (*x) == MEM)
1784 return canon_true_dependence (d->exp, d->mode, d->addr, *x,
1790 /* Remove from the hash table, or mark as invalid, all expressions whose
1791 values could be altered by storing in X. X is a register, a subreg, or
1792 a memory reference with nonvarying address (because, when a memory
1793 reference with a varying address is stored in, all memory references are
1794 removed by invalidate_memory so specific invalidation is superfluous).
1795 FULL_MODE, if not VOIDmode, indicates that this much should be
1796 invalidated instead of just the amount indicated by the mode of X. This
1797 is only used for bitfield stores into memory.
1799 A nonvarying address may be just a register or just a symbol reference,
1800 or it may be either of those plus a numeric offset. */
1803 invalidate (rtx x, enum machine_mode full_mode)
1806 struct table_elt *p;
1809 switch (GET_CODE (x))
1813 /* If X is a register, dependencies on its contents are recorded
1814 through the qty number mechanism. Just change the qty number of
1815 the register, mark it as invalid for expressions that refer to it,
1816 and remove it itself. */
1817 unsigned int regno = REGNO (x);
1818 unsigned int hash = HASH (x, GET_MODE (x));
1820 /* Remove REGNO from any quantity list it might be on and indicate
1821 that its value might have changed. If it is a pseudo, remove its
1822 entry from the hash table.
1824 For a hard register, we do the first two actions above for any
1825 additional hard registers corresponding to X. Then, if any of these
1826 registers are in the table, we must remove any REG entries that
1827 overlap these registers. */
1829 delete_reg_equiv (regno);
1831 SUBREG_TICKED (regno) = -1;
1833 if (regno >= FIRST_PSEUDO_REGISTER)
1835 /* Because a register can be referenced in more than one mode,
1836 we might have to remove more than one table entry. */
1837 struct table_elt *elt;
1839 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1840 remove_from_table (elt, hash);
1844 HOST_WIDE_INT in_table
1845 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1846 unsigned int endregno
1847 = regno + hard_regno_nregs[regno][GET_MODE (x)];
1848 unsigned int tregno, tendregno, rn;
1849 struct table_elt *p, *next;
1851 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1853 for (rn = regno + 1; rn < endregno; rn++)
1855 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1856 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1857 delete_reg_equiv (rn);
1859 SUBREG_TICKED (rn) = -1;
1863 for (hash = 0; hash < HASH_SIZE; hash++)
1864 for (p = table[hash]; p; p = next)
1866 next = p->next_same_hash;
1868 if (GET_CODE (p->exp) != REG
1869 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1872 tregno = REGNO (p->exp);
1874 = tregno + hard_regno_nregs[tregno][GET_MODE (p->exp)];
1875 if (tendregno > regno && tregno < endregno)
1876 remove_from_table (p, hash);
1883 invalidate (SUBREG_REG (x), VOIDmode);
1887 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1888 invalidate (XVECEXP (x, 0, i), VOIDmode);
1892 /* This is part of a disjoint return value; extract the location in
1893 question ignoring the offset. */
1894 invalidate (XEXP (x, 0), VOIDmode);
1898 addr = canon_rtx (get_addr (XEXP (x, 0)));
1899 /* Calculate the canonical version of X here so that
1900 true_dependence doesn't generate new RTL for X on each call. */
1903 /* Remove all hash table elements that refer to overlapping pieces of
1905 if (full_mode == VOIDmode)
1906 full_mode = GET_MODE (x);
1908 for (i = 0; i < HASH_SIZE; i++)
1910 struct table_elt *next;
1912 for (p = table[i]; p; p = next)
1914 next = p->next_same_hash;
1917 struct check_dependence_data d;
1919 /* Just canonicalize the expression once;
1920 otherwise each time we call invalidate
1921 true_dependence will canonicalize the
1922 expression again. */
1924 p->canon_exp = canon_rtx (p->exp);
1928 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1929 remove_from_table (p, i);
1940 /* Remove all expressions that refer to register REGNO,
1941 since they are already invalid, and we are about to
1942 mark that register valid again and don't want the old
1943 expressions to reappear as valid. */
1946 remove_invalid_refs (unsigned int regno)
1949 struct table_elt *p, *next;
1951 for (i = 0; i < HASH_SIZE; i++)
1952 for (p = table[i]; p; p = next)
1954 next = p->next_same_hash;
1955 if (GET_CODE (p->exp) != REG
1956 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1957 remove_from_table (p, i);
1961 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1964 remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
1965 enum machine_mode mode)
1968 struct table_elt *p, *next;
1969 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
1971 for (i = 0; i < HASH_SIZE; i++)
1972 for (p = table[i]; p; p = next)
1975 next = p->next_same_hash;
1977 if (GET_CODE (exp) != REG
1978 && (GET_CODE (exp) != SUBREG
1979 || GET_CODE (SUBREG_REG (exp)) != REG
1980 || REGNO (SUBREG_REG (exp)) != regno
1981 || (((SUBREG_BYTE (exp)
1982 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
1983 && SUBREG_BYTE (exp) <= end))
1984 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1985 remove_from_table (p, i);
1989 /* Recompute the hash codes of any valid entries in the hash table that
1990 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1992 This is called when we make a jump equivalence. */
1995 rehash_using_reg (rtx x)
1998 struct table_elt *p, *next;
2001 if (GET_CODE (x) == SUBREG)
2004 /* If X is not a register or if the register is known not to be in any
2005 valid entries in the table, we have no work to do. */
2007 if (GET_CODE (x) != REG
2008 || REG_IN_TABLE (REGNO (x)) < 0
2009 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2012 /* Scan all hash chains looking for valid entries that mention X.
2013 If we find one and it is in the wrong hash chain, move it. We can skip
2014 objects that are registers, since they are handled specially. */
2016 for (i = 0; i < HASH_SIZE; i++)
2017 for (p = table[i]; p; p = next)
2019 next = p->next_same_hash;
2020 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
2021 && exp_equiv_p (p->exp, p->exp, 1, 0)
2022 && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
2024 if (p->next_same_hash)
2025 p->next_same_hash->prev_same_hash = p->prev_same_hash;
2027 if (p->prev_same_hash)
2028 p->prev_same_hash->next_same_hash = p->next_same_hash;
2030 table[i] = p->next_same_hash;
2032 p->next_same_hash = table[hash];
2033 p->prev_same_hash = 0;
2035 table[hash]->prev_same_hash = p;
2041 /* Remove from the hash table any expression that is a call-clobbered
2042 register. Also update their TICK values. */
2045 invalidate_for_call (void)
2047 unsigned int regno, endregno;
2050 struct table_elt *p, *next;
2053 /* Go through all the hard registers. For each that is clobbered in
2054 a CALL_INSN, remove the register from quantity chains and update
2055 reg_tick if defined. Also see if any of these registers is currently
2058 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2059 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2061 delete_reg_equiv (regno);
2062 if (REG_TICK (regno) >= 0)
2065 SUBREG_TICKED (regno) = -1;
2068 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2071 /* In the case where we have no call-clobbered hard registers in the
2072 table, we are done. Otherwise, scan the table and remove any
2073 entry that overlaps a call-clobbered register. */
2076 for (hash = 0; hash < HASH_SIZE; hash++)
2077 for (p = table[hash]; p; p = next)
2079 next = p->next_same_hash;
2081 if (GET_CODE (p->exp) != REG
2082 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2085 regno = REGNO (p->exp);
2086 endregno = regno + hard_regno_nregs[regno][GET_MODE (p->exp)];
2088 for (i = regno; i < endregno; i++)
2089 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2091 remove_from_table (p, hash);
2097 /* Given an expression X of type CONST,
2098 and ELT which is its table entry (or 0 if it
2099 is not in the hash table),
2100 return an alternate expression for X as a register plus integer.
2101 If none can be found, return 0. */
2104 use_related_value (rtx x, struct table_elt *elt)
2106 struct table_elt *relt = 0;
2107 struct table_elt *p, *q;
2108 HOST_WIDE_INT offset;
2110 /* First, is there anything related known?
2111 If we have a table element, we can tell from that.
2112 Otherwise, must look it up. */
2114 if (elt != 0 && elt->related_value != 0)
2116 else if (elt == 0 && GET_CODE (x) == CONST)
2118 rtx subexp = get_related_value (x);
2120 relt = lookup (subexp,
2121 safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
2128 /* Search all related table entries for one that has an
2129 equivalent register. */
2134 /* This loop is strange in that it is executed in two different cases.
2135 The first is when X is already in the table. Then it is searching
2136 the RELATED_VALUE list of X's class (RELT). The second case is when
2137 X is not in the table. Then RELT points to a class for the related
2140 Ensure that, whatever case we are in, that we ignore classes that have
2141 the same value as X. */
2143 if (rtx_equal_p (x, p->exp))
2146 for (q = p->first_same_value; q; q = q->next_same_value)
2147 if (GET_CODE (q->exp) == REG)
2153 p = p->related_value;
2155 /* We went all the way around, so there is nothing to be found.
2156 Alternatively, perhaps RELT was in the table for some other reason
2157 and it has no related values recorded. */
2158 if (p == relt || p == 0)
2165 offset = (get_integer_term (x) - get_integer_term (p->exp));
2166 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2167 return plus_constant (q->exp, offset);
2170 /* Hash a string. Just add its bytes up. */
2171 static inline unsigned
2172 canon_hash_string (const char *ps)
2175 const unsigned char *p = (const unsigned char *) ps;
2184 /* Hash an rtx. We are careful to make sure the value is never negative.
2185 Equivalent registers hash identically.
2186 MODE is used in hashing for CONST_INTs only;
2187 otherwise the mode of X is used.
2189 Store 1 in do_not_record if any subexpression is volatile.
2191 Store 1 in hash_arg_in_memory if X contains a MEM rtx
2192 which does not have the RTX_UNCHANGING_P bit set.
2194 Note that cse_insn knows that the hash code of a MEM expression
2195 is just (int) MEM plus the hash code of the address. */
2198 canon_hash (rtx x, enum machine_mode mode)
2205 /* repeat is used to turn tail-recursion into iteration. */
2210 code = GET_CODE (x);
2215 unsigned int regno = REGNO (x);
2218 /* On some machines, we can't record any non-fixed hard register,
2219 because extending its life will cause reload problems. We
2220 consider ap, fp, sp, gp to be fixed for this purpose.
2222 We also consider CCmode registers to be fixed for this purpose;
2223 failure to do so leads to failure to simplify 0<100 type of
2226 On all machines, we can't record any global registers.
2227 Nor should we record any register that is in a small
2228 class, as defined by CLASS_LIKELY_SPILLED_P. */
2230 if (regno >= FIRST_PSEUDO_REGISTER)
2232 else if (x == frame_pointer_rtx
2233 || x == hard_frame_pointer_rtx
2234 || x == arg_pointer_rtx
2235 || x == stack_pointer_rtx
2236 || x == pic_offset_table_rtx)
2238 else if (global_regs[regno])
2240 else if (fixed_regs[regno])
2242 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2244 else if (SMALL_REGISTER_CLASSES)
2246 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2257 hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2261 /* We handle SUBREG of a REG specially because the underlying
2262 reg changes its hash value with every value change; we don't
2263 want to have to forget unrelated subregs when one subreg changes. */
2266 if (GET_CODE (SUBREG_REG (x)) == REG)
2268 hash += (((unsigned) SUBREG << 7)
2269 + REGNO (SUBREG_REG (x))
2270 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2278 unsigned HOST_WIDE_INT tem = INTVAL (x);
2279 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2284 /* This is like the general case, except that it only counts
2285 the integers representing the constant. */
2286 hash += (unsigned) code + (unsigned) GET_MODE (x);
2287 if (GET_MODE (x) != VOIDmode)
2288 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2290 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2291 + (unsigned) CONST_DOUBLE_HIGH (x));
2299 units = CONST_VECTOR_NUNITS (x);
2301 for (i = 0; i < units; ++i)
2303 elt = CONST_VECTOR_ELT (x, i);
2304 hash += canon_hash (elt, GET_MODE (elt));
2310 /* Assume there is only one rtx object for any given label. */
2312 hash += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2316 hash += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2320 /* We don't record if marked volatile or if BLKmode since we don't
2321 know the size of the move. */
2322 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2327 if (! RTX_UNCHANGING_P (x) || fixed_base_plus_p (XEXP (x, 0)))
2328 hash_arg_in_memory = 1;
2330 /* Now that we have already found this special case,
2331 might as well speed it up as much as possible. */
2332 hash += (unsigned) MEM;
2337 /* A USE that mentions non-volatile memory needs special
2338 handling since the MEM may be BLKmode which normally
2339 prevents an entry from being made. Pure calls are
2340 marked by a USE which mentions BLKmode memory. */
2341 if (GET_CODE (XEXP (x, 0)) == MEM
2342 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2344 hash += (unsigned) USE;
2347 if (! RTX_UNCHANGING_P (x) || fixed_base_plus_p (XEXP (x, 0)))
2348 hash_arg_in_memory = 1;
2350 /* Now that we have already found this special case,
2351 might as well speed it up as much as possible. */
2352 hash += (unsigned) MEM;
2367 case UNSPEC_VOLATILE:
2372 if (MEM_VOLATILE_P (x))
2379 /* We don't want to take the filename and line into account. */
2380 hash += (unsigned) code + (unsigned) GET_MODE (x)
2381 + canon_hash_string (ASM_OPERANDS_TEMPLATE (x))
2382 + canon_hash_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2383 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2385 if (ASM_OPERANDS_INPUT_LENGTH (x))
2387 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2389 hash += (canon_hash (ASM_OPERANDS_INPUT (x, i),
2390 GET_MODE (ASM_OPERANDS_INPUT (x, i)))
2391 + canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT
2395 hash += canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2396 x = ASM_OPERANDS_INPUT (x, 0);
2397 mode = GET_MODE (x);
2409 i = GET_RTX_LENGTH (code) - 1;
2410 hash += (unsigned) code + (unsigned) GET_MODE (x);
2411 fmt = GET_RTX_FORMAT (code);
2416 rtx tem = XEXP (x, i);
2418 /* If we are about to do the last recursive call
2419 needed at this level, change it into iteration.
2420 This function is called enough to be worth it. */
2426 hash += canon_hash (tem, 0);
2428 else if (fmt[i] == 'E')
2429 for (j = 0; j < XVECLEN (x, i); j++)
2430 hash += canon_hash (XVECEXP (x, i, j), 0);
2431 else if (fmt[i] == 's')
2432 hash += canon_hash_string (XSTR (x, i));
2433 else if (fmt[i] == 'i')
2435 unsigned tem = XINT (x, i);
2438 else if (fmt[i] == '0' || fmt[i] == 't')
2447 /* Like canon_hash but with no side effects. */
2450 safe_hash (rtx x, enum machine_mode mode)
2452 int save_do_not_record = do_not_record;
2453 int save_hash_arg_in_memory = hash_arg_in_memory;
2454 unsigned hash = canon_hash (x, mode);
2455 hash_arg_in_memory = save_hash_arg_in_memory;
2456 do_not_record = save_do_not_record;
2460 /* Return 1 iff X and Y would canonicalize into the same thing,
2461 without actually constructing the canonicalization of either one.
2462 If VALIDATE is nonzero,
2463 we assume X is an expression being processed from the rtl
2464 and Y was found in the hash table. We check register refs
2465 in Y for being marked as valid.
2467 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2468 that is known to be in the register. Ordinarily, we don't allow them
2469 to match, because letting them match would cause unpredictable results
2470 in all the places that search a hash table chain for an equivalent
2471 for a given value. A possible equivalent that has different structure
2472 has its hash code computed from different data. Whether the hash code
2473 is the same as that of the given value is pure luck. */
2476 exp_equiv_p (rtx x, rtx y, int validate, int equal_values)
2482 /* Note: it is incorrect to assume an expression is equivalent to itself
2483 if VALIDATE is nonzero. */
2484 if (x == y && !validate)
2486 if (x == 0 || y == 0)
2489 code = GET_CODE (x);
2490 if (code != GET_CODE (y))
2495 /* If X is a constant and Y is a register or vice versa, they may be
2496 equivalent. We only have to validate if Y is a register. */
2497 if (CONSTANT_P (x) && GET_CODE (y) == REG
2498 && REGNO_QTY_VALID_P (REGNO (y)))
2500 int y_q = REG_QTY (REGNO (y));
2501 struct qty_table_elem *y_ent = &qty_table[y_q];
2503 if (GET_MODE (y) == y_ent->mode
2504 && rtx_equal_p (x, y_ent->const_rtx)
2505 && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2509 if (CONSTANT_P (y) && code == REG
2510 && REGNO_QTY_VALID_P (REGNO (x)))
2512 int x_q = REG_QTY (REGNO (x));
2513 struct qty_table_elem *x_ent = &qty_table[x_q];
2515 if (GET_MODE (x) == x_ent->mode
2516 && rtx_equal_p (y, x_ent->const_rtx))
2523 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2524 if (GET_MODE (x) != GET_MODE (y))
2535 return XEXP (x, 0) == XEXP (y, 0);
2538 return XSTR (x, 0) == XSTR (y, 0);
2542 unsigned int regno = REGNO (y);
2543 unsigned int endregno
2544 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2545 : hard_regno_nregs[regno][GET_MODE (y)]);
2548 /* If the quantities are not the same, the expressions are not
2549 equivalent. If there are and we are not to validate, they
2550 are equivalent. Otherwise, ensure all regs are up-to-date. */
2552 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2558 for (i = regno; i < endregno; i++)
2559 if (REG_IN_TABLE (i) != REG_TICK (i))
2565 /* For commutative operations, check both orders. */
2573 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2574 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2575 validate, equal_values))
2576 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2577 validate, equal_values)
2578 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2579 validate, equal_values)));
2582 /* We don't use the generic code below because we want to
2583 disregard filename and line numbers. */
2585 /* A volatile asm isn't equivalent to any other. */
2586 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2589 if (GET_MODE (x) != GET_MODE (y)
2590 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2591 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2592 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2593 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2594 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2597 if (ASM_OPERANDS_INPUT_LENGTH (x))
2599 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2600 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2601 ASM_OPERANDS_INPUT (y, i),
2602 validate, equal_values)
2603 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2604 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2614 /* Compare the elements. If any pair of corresponding elements
2615 fail to match, return 0 for the whole things. */
2617 fmt = GET_RTX_FORMAT (code);
2618 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2623 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2628 if (XVECLEN (x, i) != XVECLEN (y, i))
2630 for (j = 0; j < XVECLEN (x, i); j++)
2631 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2632 validate, equal_values))
2637 if (strcmp (XSTR (x, i), XSTR (y, i)))
2642 if (XINT (x, i) != XINT (y, i))
2647 if (XWINT (x, i) != XWINT (y, i))
2663 /* Return 1 if X has a value that can vary even between two
2664 executions of the program. 0 means X can be compared reliably
2665 against certain constants or near-constants. */
2668 cse_rtx_varies_p (rtx x, int from_alias)
2670 /* We need not check for X and the equivalence class being of the same
2671 mode because if X is equivalent to a constant in some mode, it
2672 doesn't vary in any mode. */
2674 if (GET_CODE (x) == REG
2675 && REGNO_QTY_VALID_P (REGNO (x)))
2677 int x_q = REG_QTY (REGNO (x));
2678 struct qty_table_elem *x_ent = &qty_table[x_q];
2680 if (GET_MODE (x) == x_ent->mode
2681 && x_ent->const_rtx != NULL_RTX)
2685 if (GET_CODE (x) == PLUS
2686 && GET_CODE (XEXP (x, 1)) == CONST_INT
2687 && GET_CODE (XEXP (x, 0)) == REG
2688 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2690 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2691 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2693 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2694 && x0_ent->const_rtx != NULL_RTX)
2698 /* This can happen as the result of virtual register instantiation, if
2699 the initial constant is too large to be a valid address. This gives
2700 us a three instruction sequence, load large offset into a register,
2701 load fp minus a constant into a register, then a MEM which is the
2702 sum of the two `constant' registers. */
2703 if (GET_CODE (x) == PLUS
2704 && GET_CODE (XEXP (x, 0)) == REG
2705 && GET_CODE (XEXP (x, 1)) == REG
2706 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2707 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2709 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2710 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2711 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2712 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2714 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2715 && x0_ent->const_rtx != NULL_RTX
2716 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2717 && x1_ent->const_rtx != NULL_RTX)
2721 return rtx_varies_p (x, from_alias);
2724 /* Canonicalize an expression:
2725 replace each register reference inside it
2726 with the "oldest" equivalent register.
2728 If INSN is nonzero and we are replacing a pseudo with a hard register
2729 or vice versa, validate_change is used to ensure that INSN remains valid
2730 after we make our substitution. The calls are made with IN_GROUP nonzero
2731 so apply_change_group must be called upon the outermost return from this
2732 function (unless INSN is zero). The result of apply_change_group can
2733 generally be discarded since the changes we are making are optional. */
2736 canon_reg (rtx x, rtx insn)
2745 code = GET_CODE (x);
2764 struct qty_table_elem *ent;
2766 /* Never replace a hard reg, because hard regs can appear
2767 in more than one machine mode, and we must preserve the mode
2768 of each occurrence. Also, some hard regs appear in
2769 MEMs that are shared and mustn't be altered. Don't try to
2770 replace any reg that maps to a reg of class NO_REGS. */
2771 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2772 || ! REGNO_QTY_VALID_P (REGNO (x)))
2775 q = REG_QTY (REGNO (x));
2776 ent = &qty_table[q];
2777 first = ent->first_reg;
2778 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2779 : REGNO_REG_CLASS (first) == NO_REGS ? x
2780 : gen_rtx_REG (ent->mode, first));
2787 fmt = GET_RTX_FORMAT (code);
2788 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2794 rtx new = canon_reg (XEXP (x, i), insn);
2797 /* If replacing pseudo with hard reg or vice versa, ensure the
2798 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2799 if (insn != 0 && new != 0
2800 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2801 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2802 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2803 || (insn_code = recog_memoized (insn)) < 0
2804 || insn_data[insn_code].n_dups > 0))
2805 validate_change (insn, &XEXP (x, i), new, 1);
2809 else if (fmt[i] == 'E')
2810 for (j = 0; j < XVECLEN (x, i); j++)
2811 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2817 /* LOC is a location within INSN that is an operand address (the contents of
2818 a MEM). Find the best equivalent address to use that is valid for this
2821 On most CISC machines, complicated address modes are costly, and rtx_cost
2822 is a good approximation for that cost. However, most RISC machines have
2823 only a few (usually only one) memory reference formats. If an address is
2824 valid at all, it is often just as cheap as any other address. Hence, for
2825 RISC machines, we use `address_cost' to compare the costs of various
2826 addresses. For two addresses of equal cost, choose the one with the
2827 highest `rtx_cost' value as that has the potential of eliminating the
2828 most insns. For equal costs, we choose the first in the equivalence
2829 class. Note that we ignore the fact that pseudo registers are cheaper than
2830 hard registers here because we would also prefer the pseudo registers. */
2833 find_best_addr (rtx insn, rtx *loc, enum machine_mode mode)
2835 struct table_elt *elt;
2837 struct table_elt *p;
2838 int found_better = 1;
2839 int save_do_not_record = do_not_record;
2840 int save_hash_arg_in_memory = hash_arg_in_memory;
2845 /* Do not try to replace constant addresses or addresses of local and
2846 argument slots. These MEM expressions are made only once and inserted
2847 in many instructions, as well as being used to control symbol table
2848 output. It is not safe to clobber them.
2850 There are some uncommon cases where the address is already in a register
2851 for some reason, but we cannot take advantage of that because we have
2852 no easy way to unshare the MEM. In addition, looking up all stack
2853 addresses is costly. */
2854 if ((GET_CODE (addr) == PLUS
2855 && GET_CODE (XEXP (addr, 0)) == REG
2856 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2857 && (regno = REGNO (XEXP (addr, 0)),
2858 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2859 || regno == ARG_POINTER_REGNUM))
2860 || (GET_CODE (addr) == REG
2861 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2862 || regno == HARD_FRAME_POINTER_REGNUM
2863 || regno == ARG_POINTER_REGNUM))
2864 || GET_CODE (addr) == ADDRESSOF
2865 || CONSTANT_ADDRESS_P (addr))
2868 /* If this address is not simply a register, try to fold it. This will
2869 sometimes simplify the expression. Many simplifications
2870 will not be valid, but some, usually applying the associative rule, will
2871 be valid and produce better code. */
2872 if (GET_CODE (addr) != REG)
2874 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2875 int addr_folded_cost = address_cost (folded, mode);
2876 int addr_cost = address_cost (addr, mode);
2878 if ((addr_folded_cost < addr_cost
2879 || (addr_folded_cost == addr_cost
2880 /* ??? The rtx_cost comparison is left over from an older
2881 version of this code. It is probably no longer helpful. */
2882 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2883 || approx_reg_cost (folded) < approx_reg_cost (addr))))
2884 && validate_change (insn, loc, folded, 0))
2888 /* If this address is not in the hash table, we can't look for equivalences
2889 of the whole address. Also, ignore if volatile. */
2892 hash = HASH (addr, Pmode);
2893 addr_volatile = do_not_record;
2894 do_not_record = save_do_not_record;
2895 hash_arg_in_memory = save_hash_arg_in_memory;
2900 elt = lookup (addr, hash, Pmode);
2904 /* We need to find the best (under the criteria documented above) entry
2905 in the class that is valid. We use the `flag' field to indicate
2906 choices that were invalid and iterate until we can't find a better
2907 one that hasn't already been tried. */
2909 for (p = elt->first_same_value; p; p = p->next_same_value)
2912 while (found_better)
2914 int best_addr_cost = address_cost (*loc, mode);
2915 int best_rtx_cost = (elt->cost + 1) >> 1;
2917 struct table_elt *best_elt = elt;
2920 for (p = elt->first_same_value; p; p = p->next_same_value)
2923 if ((GET_CODE (p->exp) == REG
2924 || exp_equiv_p (p->exp, p->exp, 1, 0))
2925 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2926 || (exp_cost == best_addr_cost
2927 && ((p->cost + 1) >> 1) > best_rtx_cost)))
2930 best_addr_cost = exp_cost;
2931 best_rtx_cost = (p->cost + 1) >> 1;
2938 if (validate_change (insn, loc,
2939 canon_reg (copy_rtx (best_elt->exp),
2948 /* If the address is a binary operation with the first operand a register
2949 and the second a constant, do the same as above, but looking for
2950 equivalences of the register. Then try to simplify before checking for
2951 the best address to use. This catches a few cases: First is when we
2952 have REG+const and the register is another REG+const. We can often merge
2953 the constants and eliminate one insn and one register. It may also be
2954 that a machine has a cheap REG+REG+const. Finally, this improves the
2955 code on the Alpha for unaligned byte stores. */
2957 if (flag_expensive_optimizations
2958 && ARITHMETIC_P (*loc)
2959 && GET_CODE (XEXP (*loc, 0)) == REG)
2961 rtx op1 = XEXP (*loc, 1);
2964 hash = HASH (XEXP (*loc, 0), Pmode);
2965 do_not_record = save_do_not_record;
2966 hash_arg_in_memory = save_hash_arg_in_memory;
2968 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2972 /* We need to find the best (under the criteria documented above) entry
2973 in the class that is valid. We use the `flag' field to indicate
2974 choices that were invalid and iterate until we can't find a better
2975 one that hasn't already been tried. */
2977 for (p = elt->first_same_value; p; p = p->next_same_value)
2980 while (found_better)
2982 int best_addr_cost = address_cost (*loc, mode);
2983 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2984 struct table_elt *best_elt = elt;
2985 rtx best_rtx = *loc;
2988 /* This is at worst case an O(n^2) algorithm, so limit our search
2989 to the first 32 elements on the list. This avoids trouble
2990 compiling code with very long basic blocks that can easily
2991 call simplify_gen_binary so many times that we run out of
2995 for (p = elt->first_same_value, count = 0;
2997 p = p->next_same_value, count++)
2999 && (GET_CODE (p->exp) == REG
3000 || exp_equiv_p (p->exp, p->exp, 1, 0)))
3002 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
3005 new_cost = address_cost (new, mode);
3007 if (new_cost < best_addr_cost
3008 || (new_cost == best_addr_cost
3009 && (COST (new) + 1) >> 1 > best_rtx_cost))
3012 best_addr_cost = new_cost;
3013 best_rtx_cost = (COST (new) + 1) >> 1;
3021 if (validate_change (insn, loc,
3022 canon_reg (copy_rtx (best_rtx),
3032 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3033 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3034 what values are being compared.
3036 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3037 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3038 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3039 compared to produce cc0.
3041 The return value is the comparison operator and is either the code of
3042 A or the code corresponding to the inverse of the comparison. */
3044 static enum rtx_code
3045 find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
3046 enum machine_mode *pmode1, enum machine_mode *pmode2)
3050 arg1 = *parg1, arg2 = *parg2;
3052 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
3054 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3056 /* Set nonzero when we find something of interest. */
3058 int reverse_code = 0;
3059 struct table_elt *p = 0;
3061 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3062 On machines with CC0, this is the only case that can occur, since
3063 fold_rtx will return the COMPARE or item being compared with zero
3066 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3069 /* If ARG1 is a comparison operator and CODE is testing for
3070 STORE_FLAG_VALUE, get the inner arguments. */
3072 else if (COMPARISON_P (arg1))
3074 #ifdef FLOAT_STORE_FLAG_VALUE
3075 REAL_VALUE_TYPE fsfv;
3079 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3080 && code == LT && STORE_FLAG_VALUE == -1)
3081 #ifdef FLOAT_STORE_FLAG_VALUE
3082 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3083 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3084 REAL_VALUE_NEGATIVE (fsfv)))
3089 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3090 && code == GE && STORE_FLAG_VALUE == -1)
3091 #ifdef FLOAT_STORE_FLAG_VALUE
3092 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3093 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3094 REAL_VALUE_NEGATIVE (fsfv)))
3097 x = arg1, reverse_code = 1;
3100 /* ??? We could also check for
3102 (ne (and (eq (...) (const_int 1))) (const_int 0))
3104 and related forms, but let's wait until we see them occurring. */
3107 /* Look up ARG1 in the hash table and see if it has an equivalence
3108 that lets us see what is being compared. */
3109 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
3113 p = p->first_same_value;
3115 /* If what we compare is already known to be constant, that is as
3117 We need to break the loop in this case, because otherwise we
3118 can have an infinite loop when looking at a reg that is known
3119 to be a constant which is the same as a comparison of a reg
3120 against zero which appears later in the insn stream, which in
3121 turn is constant and the same as the comparison of the first reg
3127 for (; p; p = p->next_same_value)
3129 enum machine_mode inner_mode = GET_MODE (p->exp);
3130 #ifdef FLOAT_STORE_FLAG_VALUE
3131 REAL_VALUE_TYPE fsfv;
3134 /* If the entry isn't valid, skip it. */
3135 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3138 if (GET_CODE (p->exp) == COMPARE
3139 /* Another possibility is that this machine has a compare insn
3140 that includes the comparison code. In that case, ARG1 would
3141 be equivalent to a comparison operation that would set ARG1 to
3142 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3143 ORIG_CODE is the actual comparison being done; if it is an EQ,
3144 we must reverse ORIG_CODE. On machine with a negative value
3145 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3148 && GET_MODE_CLASS (inner_mode) == MODE_INT
3149 && (GET_MODE_BITSIZE (inner_mode)
3150 <= HOST_BITS_PER_WIDE_INT)
3151 && (STORE_FLAG_VALUE
3152 & ((HOST_WIDE_INT) 1
3153 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3154 #ifdef FLOAT_STORE_FLAG_VALUE
3156 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3157 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3158 REAL_VALUE_NEGATIVE (fsfv)))
3161 && COMPARISON_P (p->exp)))
3166 else if ((code == EQ
3168 && GET_MODE_CLASS (inner_mode) == MODE_INT
3169 && (GET_MODE_BITSIZE (inner_mode)
3170 <= HOST_BITS_PER_WIDE_INT)
3171 && (STORE_FLAG_VALUE
3172 & ((HOST_WIDE_INT) 1
3173 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3174 #ifdef FLOAT_STORE_FLAG_VALUE
3176 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3177 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3178 REAL_VALUE_NEGATIVE (fsfv)))
3181 && COMPARISON_P (p->exp))
3188 /* If this non-trapping address, e.g. fp + constant, the
3189 equivalent is a better operand since it may let us predict
3190 the value of the comparison. */
3191 else if (!rtx_addr_can_trap_p (p->exp))
3198 /* If we didn't find a useful equivalence for ARG1, we are done.
3199 Otherwise, set up for the next iteration. */
3203 /* If we need to reverse the comparison, make sure that that is
3204 possible -- we can't necessarily infer the value of GE from LT
3205 with floating-point operands. */
3208 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3209 if (reversed == UNKNOWN)
3214 else if (COMPARISON_P (x))
3215 code = GET_CODE (x);
3216 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3219 /* Return our results. Return the modes from before fold_rtx
3220 because fold_rtx might produce const_int, and then it's too late. */
3221 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3222 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3227 /* If X is a nontrivial arithmetic operation on an argument
3228 for which a constant value can be determined, return
3229 the result of operating on that value, as a constant.
3230 Otherwise, return X, possibly with one or more operands
3231 modified by recursive calls to this function.
3233 If X is a register whose contents are known, we do NOT
3234 return those contents here. equiv_constant is called to
3237 INSN is the insn that we may be modifying. If it is 0, make a copy
3238 of X before modifying it. */
3241 fold_rtx (rtx x, rtx insn)
3244 enum machine_mode mode;
3251 /* Folded equivalents of first two operands of X. */
3255 /* Constant equivalents of first three operands of X;
3256 0 when no such equivalent is known. */
3261 /* The mode of the first operand of X. We need this for sign and zero
3263 enum machine_mode mode_arg0;
3268 mode = GET_MODE (x);
3269 code = GET_CODE (x);
3279 /* No use simplifying an EXPR_LIST
3280 since they are used only for lists of args
3281 in a function call's REG_EQUAL note. */
3283 /* Changing anything inside an ADDRESSOF is incorrect; we don't
3284 want to (e.g.,) make (addressof (const_int 0)) just because
3285 the location is known to be zero. */
3291 return prev_insn_cc0;
3295 /* If the next insn is a CODE_LABEL followed by a jump table,
3296 PC's value is a LABEL_REF pointing to that label. That
3297 lets us fold switch statements on the VAX. */
3300 if (insn && tablejump_p (insn, &next, NULL))
3301 return gen_rtx_LABEL_REF (Pmode, next);
3306 /* See if we previously assigned a constant value to this SUBREG. */
3307 if ((new = lookup_as_function (x, CONST_INT)) != 0
3308 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3311 /* If this is a paradoxical SUBREG, we have no idea what value the
3312 extra bits would have. However, if the operand is equivalent
3313 to a SUBREG whose operand is the same as our mode, and all the
3314 modes are within a word, we can just use the inner operand
3315 because these SUBREGs just say how to treat the register.
3317 Similarly if we find an integer constant. */
3319 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3321 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3322 struct table_elt *elt;
3324 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3325 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3326 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3328 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3330 if (CONSTANT_P (elt->exp)
3331 && GET_MODE (elt->exp) == VOIDmode)
3334 if (GET_CODE (elt->exp) == SUBREG
3335 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3336 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3337 return copy_rtx (SUBREG_REG (elt->exp));
3343 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3344 We might be able to if the SUBREG is extracting a single word in an
3345 integral mode or extracting the low part. */
3347 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3348 const_arg0 = equiv_constant (folded_arg0);
3350 folded_arg0 = const_arg0;
3352 if (folded_arg0 != SUBREG_REG (x))
3354 new = simplify_subreg (mode, folded_arg0,
3355 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3360 if (GET_CODE (folded_arg0) == REG
3361 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0)))
3363 struct table_elt *elt;
3365 /* We can use HASH here since we know that canon_hash won't be
3367 elt = lookup (folded_arg0,
3368 HASH (folded_arg0, GET_MODE (folded_arg0)),
3369 GET_MODE (folded_arg0));
3372 elt = elt->first_same_value;
3374 if (subreg_lowpart_p (x))
3375 /* If this is a narrowing SUBREG and our operand is a REG, see
3376 if we can find an equivalence for REG that is an arithmetic
3377 operation in a wider mode where both operands are paradoxical
3378 SUBREGs from objects of our result mode. In that case, we
3379 couldn-t report an equivalent value for that operation, since we
3380 don't know what the extra bits will be. But we can find an
3381 equivalence for this SUBREG by folding that operation in the
3382 narrow mode. This allows us to fold arithmetic in narrow modes
3383 when the machine only supports word-sized arithmetic.
3385 Also look for a case where we have a SUBREG whose operand
3386 is the same as our result. If both modes are smaller
3387 than a word, we are simply interpreting a register in
3388 different modes and we can use the inner value. */
3390 for (; elt; elt = elt->next_same_value)
3392 enum rtx_code eltcode = GET_CODE (elt->exp);
3394 /* Just check for unary and binary operations. */
3395 if (UNARY_P (elt->exp)
3396 && eltcode != SIGN_EXTEND
3397 && eltcode != ZERO_EXTEND
3398 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3399 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3400 && (GET_MODE_CLASS (mode)
3401 == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3403 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3405 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3406 op0 = fold_rtx (op0, NULL_RTX);
3408 op0 = equiv_constant (op0);
3410 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3413 else if (ARITHMETIC_P (elt->exp)
3414 && eltcode != DIV && eltcode != MOD
3415 && eltcode != UDIV && eltcode != UMOD
3416 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3417 && eltcode != ROTATE && eltcode != ROTATERT
3418 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3419 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3421 || CONSTANT_P (XEXP (elt->exp, 0)))
3422 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3423 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3425 || CONSTANT_P (XEXP (elt->exp, 1))))
3427 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3428 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3430 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3431 op0 = fold_rtx (op0, NULL_RTX);
3434 op0 = equiv_constant (op0);
3436 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
3437 op1 = fold_rtx (op1, NULL_RTX);
3440 op1 = equiv_constant (op1);
3442 /* If we are looking for the low SImode part of
3443 (ashift:DI c (const_int 32)), it doesn't work
3444 to compute that in SImode, because a 32-bit shift
3445 in SImode is unpredictable. We know the value is 0. */
3447 && GET_CODE (elt->exp) == ASHIFT
3448 && GET_CODE (op1) == CONST_INT
3449 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3452 < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3453 /* If the count fits in the inner mode's width,
3454 but exceeds the outer mode's width,
3455 the value will get truncated to 0
3457 new = CONST0_RTX (mode);
3459 /* If the count exceeds even the inner mode's width,
3460 don't fold this expression. */
3463 else if (op0 && op1)
3464 new = simplify_binary_operation (GET_CODE (elt->exp), mode, op0, op1);
3467 else if (GET_CODE (elt->exp) == SUBREG
3468 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3469 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3471 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3472 new = copy_rtx (SUBREG_REG (elt->exp));
3478 /* A SUBREG resulting from a zero extension may fold to zero if
3479 it extracts higher bits than the ZERO_EXTEND's source bits.
3480 FIXME: if combine tried to, er, combine these instructions,
3481 this transformation may be moved to simplify_subreg. */
3482 for (; elt; elt = elt->next_same_value)
3484 if (GET_CODE (elt->exp) == ZERO_EXTEND
3486 >= GET_MODE_BITSIZE (GET_MODE (XEXP (elt->exp, 0))))
3487 return CONST0_RTX (mode);
3495 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3496 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3497 new = lookup_as_function (XEXP (x, 0), code);
3499 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3503 /* If we are not actually processing an insn, don't try to find the
3504 best address. Not only don't we care, but we could modify the
3505 MEM in an invalid way since we have no insn to validate against. */
3507 find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3510 /* Even if we don't fold in the insn itself,
3511 we can safely do so here, in hopes of getting a constant. */
3512 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3514 HOST_WIDE_INT offset = 0;
3516 if (GET_CODE (addr) == REG
3517 && REGNO_QTY_VALID_P (REGNO (addr)))
3519 int addr_q = REG_QTY (REGNO (addr));
3520 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3522 if (GET_MODE (addr) == addr_ent->mode
3523 && addr_ent->const_rtx != NULL_RTX)
3524 addr = addr_ent->const_rtx;
3527 /* If address is constant, split it into a base and integer offset. */
3528 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3530 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3531 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3533 base = XEXP (XEXP (addr, 0), 0);
3534 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3536 else if (GET_CODE (addr) == LO_SUM
3537 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3538 base = XEXP (addr, 1);
3539 else if (GET_CODE (addr) == ADDRESSOF)
3540 return change_address (x, VOIDmode, addr);
3542 /* If this is a constant pool reference, we can fold it into its
3543 constant to allow better value tracking. */
3544 if (base && GET_CODE (base) == SYMBOL_REF
3545 && CONSTANT_POOL_ADDRESS_P (base))
3547 rtx constant = get_pool_constant (base);
3548 enum machine_mode const_mode = get_pool_mode (base);
3551 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3553 constant_pool_entries_cost = COST (constant);
3554 constant_pool_entries_regcost = approx_reg_cost (constant);
3557 /* If we are loading the full constant, we have an equivalence. */
3558 if (offset == 0 && mode == const_mode)
3561 /* If this actually isn't a constant (weird!), we can't do
3562 anything. Otherwise, handle the two most common cases:
3563 extracting a word from a multi-word constant, and extracting
3564 the low-order bits. Other cases don't seem common enough to
3566 if (! CONSTANT_P (constant))
3569 if (GET_MODE_CLASS (mode) == MODE_INT
3570 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3571 && offset % UNITS_PER_WORD == 0
3572 && (new = operand_subword (constant,
3573 offset / UNITS_PER_WORD,
3574 0, const_mode)) != 0)
3577 if (((BYTES_BIG_ENDIAN
3578 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3579 || (! BYTES_BIG_ENDIAN && offset == 0))
3580 && (new = gen_lowpart (mode, constant)) != 0)
3584 /* If this is a reference to a label at a known position in a jump
3585 table, we also know its value. */
3586 if (base && GET_CODE (base) == LABEL_REF)
3588 rtx label = XEXP (base, 0);
3589 rtx table_insn = NEXT_INSN (label);
3591 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3592 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3594 rtx table = PATTERN (table_insn);
3597 && (offset / GET_MODE_SIZE (GET_MODE (table))
3598 < XVECLEN (table, 0)))
3599 return XVECEXP (table, 0,
3600 offset / GET_MODE_SIZE (GET_MODE (table)));
3602 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3603 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3605 rtx table = PATTERN (table_insn);
3608 && (offset / GET_MODE_SIZE (GET_MODE (table))
3609 < XVECLEN (table, 1)))
3611 offset /= GET_MODE_SIZE (GET_MODE (table));
3612 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3615 if (GET_MODE (table) != Pmode)
3616 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3618 /* Indicate this is a constant. This isn't a
3619 valid form of CONST, but it will only be used
3620 to fold the next insns and then discarded, so
3623 Note this expression must be explicitly discarded,
3624 by cse_insn, else it may end up in a REG_EQUAL note
3625 and "escape" to cause problems elsewhere. */
3626 return gen_rtx_CONST (GET_MODE (new), new);
3634 #ifdef NO_FUNCTION_CSE
3636 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3642 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3643 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3644 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3654 mode_arg0 = VOIDmode;
3656 /* Try folding our operands.
3657 Then see which ones have constant values known. */
3659 fmt = GET_RTX_FORMAT (code);
3660 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3663 rtx arg = XEXP (x, i);
3664 rtx folded_arg = arg, const_arg = 0;
3665 enum machine_mode mode_arg = GET_MODE (arg);
3666 rtx cheap_arg, expensive_arg;
3667 rtx replacements[2];
3669 int old_cost = COST_IN (XEXP (x, i), code);
3671 /* Most arguments are cheap, so handle them specially. */
3672 switch (GET_CODE (arg))
3675 /* This is the same as calling equiv_constant; it is duplicated
3677 if (REGNO_QTY_VALID_P (REGNO (arg)))
3679 int arg_q = REG_QTY (REGNO (arg));
3680 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3682 if (arg_ent->const_rtx != NULL_RTX
3683 && GET_CODE (arg_ent->const_rtx) != REG
3684 && GET_CODE (arg_ent->const_rtx) != PLUS)
3686 = gen_lowpart (GET_MODE (arg),
3687 arg_ent->const_rtx);
3702 folded_arg = prev_insn_cc0;
3703 mode_arg = prev_insn_cc0_mode;
3704 const_arg = equiv_constant (folded_arg);
3709 folded_arg = fold_rtx (arg, insn);
3710 const_arg = equiv_constant (folded_arg);
3713 /* For the first three operands, see if the operand
3714 is constant or equivalent to a constant. */
3718 folded_arg0 = folded_arg;
3719 const_arg0 = const_arg;
3720 mode_arg0 = mode_arg;
3723 folded_arg1 = folded_arg;
3724 const_arg1 = const_arg;
3727 const_arg2 = const_arg;
3731 /* Pick the least expensive of the folded argument and an
3732 equivalent constant argument. */
3733 if (const_arg == 0 || const_arg == folded_arg
3734 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3735 cheap_arg = folded_arg, expensive_arg = const_arg;
3737 cheap_arg = const_arg, expensive_arg = folded_arg;
3739 /* Try to replace the operand with the cheapest of the two
3740 possibilities. If it doesn't work and this is either of the first
3741 two operands of a commutative operation, try swapping them.
3742 If THAT fails, try the more expensive, provided it is cheaper
3743 than what is already there. */
3745 if (cheap_arg == XEXP (x, i))
3748 if (insn == 0 && ! copied)
3754 /* Order the replacements from cheapest to most expensive. */
3755 replacements[0] = cheap_arg;
3756 replacements[1] = expensive_arg;
3758 for (j = 0; j < 2 && replacements[j]; j++)
3760 int new_cost = COST_IN (replacements[j], code);
3762 /* Stop if what existed before was cheaper. Prefer constants
3763 in the case of a tie. */
3764 if (new_cost > old_cost
3765 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3768 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3771 if (GET_RTX_CLASS (code) == RTX_COMM_COMPARE
3772 || GET_RTX_CLASS (code) == RTX_COMM_ARITH)
3774 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3775 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3777 if (apply_change_group ())
3779 /* Swap them back to be invalid so that this loop can
3780 continue and flag them to be swapped back later. */
3783 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3795 /* Don't try to fold inside of a vector of expressions.
3796 Doing nothing is harmless. */
3800 /* If a commutative operation, place a constant integer as the second
3801 operand unless the first operand is also a constant integer. Otherwise,
3802 place any constant second unless the first operand is also a constant. */
3804 if (COMMUTATIVE_P (x))
3807 || swap_commutative_operands_p (const_arg0 ? const_arg0
3809 const_arg1 ? const_arg1
3812 rtx tem = XEXP (x, 0);
3814 if (insn == 0 && ! copied)
3820 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3821 validate_change (insn, &XEXP (x, 1), tem, 1);
3822 if (apply_change_group ())
3824 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3825 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3830 /* If X is an arithmetic operation, see if we can simplify it. */
3832 switch (GET_RTX_CLASS (code))
3838 /* We can't simplify extension ops unless we know the
3840 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3841 && mode_arg0 == VOIDmode)
3844 /* If we had a CONST, strip it off and put it back later if we
3846 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3847 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3849 new = simplify_unary_operation (code, mode,
3850 const_arg0 ? const_arg0 : folded_arg0,
3852 if (new != 0 && is_const)
3853 new = gen_rtx_CONST (mode, new);
3858 case RTX_COMM_COMPARE:
3859 /* See what items are actually being compared and set FOLDED_ARG[01]
3860 to those values and CODE to the actual comparison code. If any are
3861 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3862 do anything if both operands are already known to be constant. */
3864 if (const_arg0 == 0 || const_arg1 == 0)
3866 struct table_elt *p0, *p1;
3867 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3868 enum machine_mode mode_arg1;
3870 #ifdef FLOAT_STORE_FLAG_VALUE
3871 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3873 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3874 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3875 false_rtx = CONST0_RTX (mode);
3879 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3880 &mode_arg0, &mode_arg1);
3881 const_arg0 = equiv_constant (folded_arg0);
3882 const_arg1 = equiv_constant (folded_arg1);
3884 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3885 what kinds of things are being compared, so we can't do
3886 anything with this comparison. */
3888 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3891 /* If we do not now have two constants being compared, see
3892 if we can nevertheless deduce some things about the
3894 if (const_arg0 == 0 || const_arg1 == 0)
3896 /* Some addresses are known to be nonzero. We don't know
3897 their sign, but equality comparisons are known. */
3898 if (const_arg1 == const0_rtx
3899 && nonzero_address_p (folded_arg0))
3903 else if (code == NE)
3907 /* See if the two operands are the same. */
3909 if (folded_arg0 == folded_arg1
3910 || (GET_CODE (folded_arg0) == REG
3911 && GET_CODE (folded_arg1) == REG
3912 && (REG_QTY (REGNO (folded_arg0))
3913 == REG_QTY (REGNO (folded_arg1))))
3914 || ((p0 = lookup (folded_arg0,
3915 (safe_hash (folded_arg0, mode_arg0)
3916 & HASH_MASK), mode_arg0))
3917 && (p1 = lookup (folded_arg1,
3918 (safe_hash (folded_arg1, mode_arg0)
3919 & HASH_MASK), mode_arg0))
3920 && p0->first_same_value == p1->first_same_value))
3922 /* Sadly two equal NaNs are not equivalent. */
3923 if (!HONOR_NANS (mode_arg0))
3924 return ((code == EQ || code == LE || code == GE
3925 || code == LEU || code == GEU || code == UNEQ
3926 || code == UNLE || code == UNGE
3928 ? true_rtx : false_rtx);
3929 /* Take care for the FP compares we can resolve. */
3930 if (code == UNEQ || code == UNLE || code == UNGE)
3932 if (code == LTGT || code == LT || code == GT)
3936 /* If FOLDED_ARG0 is a register, see if the comparison we are
3937 doing now is either the same as we did before or the reverse
3938 (we only check the reverse if not floating-point). */
3939 else if (GET_CODE (folded_arg0) == REG)
3941 int qty = REG_QTY (REGNO (folded_arg0));
3943 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3945 struct qty_table_elem *ent = &qty_table[qty];
3947 if ((comparison_dominates_p (ent->comparison_code, code)
3948 || (! FLOAT_MODE_P (mode_arg0)
3949 && comparison_dominates_p (ent->comparison_code,
3950 reverse_condition (code))))
3951 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3953 && rtx_equal_p (ent->comparison_const,
3955 || (GET_CODE (folded_arg1) == REG
3956 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3957 return (comparison_dominates_p (ent->comparison_code, code)
3958 ? true_rtx : false_rtx);
3964 /* If we are comparing against zero, see if the first operand is
3965 equivalent to an IOR with a constant. If so, we may be able to
3966 determine the result of this comparison. */
3968 if (const_arg1 == const0_rtx)
3970 rtx y = lookup_as_function (folded_arg0, IOR);
3974 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3975 && GET_CODE (inner_const) == CONST_INT
3976 && INTVAL (inner_const) != 0)
3978 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
3979 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
3980 && (INTVAL (inner_const)
3981 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
3982 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3984 #ifdef FLOAT_STORE_FLAG_VALUE
3985 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3987 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3988 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3989 false_rtx = CONST0_RTX (mode);
4013 new = simplify_relational_operation (code,
4014 (mode_arg0 != VOIDmode
4016 : (GET_MODE (const_arg0
4020 ? GET_MODE (const_arg0
4023 : GET_MODE (const_arg1
4026 const_arg0 ? const_arg0 : folded_arg0,
4027 const_arg1 ? const_arg1 : folded_arg1);
4028 #ifdef FLOAT_STORE_FLAG_VALUE
4029 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
4031 if (new == const0_rtx)
4032 new = CONST0_RTX (mode);
4034 new = (CONST_DOUBLE_FROM_REAL_VALUE
4035 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4041 case RTX_COMM_ARITH:
4045 /* If the second operand is a LABEL_REF, see if the first is a MINUS
4046 with that LABEL_REF as its second operand. If so, the result is
4047 the first operand of that MINUS. This handles switches with an
4048 ADDR_DIFF_VEC table. */
4049 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4052 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
4053 : lookup_as_function (folded_arg0, MINUS);
4055 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4056 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4059 /* Now try for a CONST of a MINUS like the above. */
4060 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4061 : lookup_as_function (folded_arg0, CONST))) != 0
4062 && GET_CODE (XEXP (y, 0)) == MINUS
4063 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4064 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4065 return XEXP (XEXP (y, 0), 0);
4068 /* Likewise if the operands are in the other order. */
4069 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4072 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
4073 : lookup_as_function (folded_arg1, MINUS);
4075 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4076 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4079 /* Now try for a CONST of a MINUS like the above. */
4080 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4081 : lookup_as_function (folded_arg1, CONST))) != 0
4082 && GET_CODE (XEXP (y, 0)) == MINUS
4083 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4084 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4085 return XEXP (XEXP (y, 0), 0);
4088 /* If second operand is a register equivalent to a negative
4089 CONST_INT, see if we can find a register equivalent to the
4090 positive constant. Make a MINUS if so. Don't do this for
4091 a non-negative constant since we might then alternate between
4092 choosing positive and negative constants. Having the positive
4093 constant previously-used is the more common case. Be sure
4094 the resulting constant is non-negative; if const_arg1 were
4095 the smallest negative number this would overflow: depending
4096 on the mode, this would either just be the same value (and
4097 hence not save anything) or be incorrect. */
4098 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4099 && INTVAL (const_arg1) < 0
4100 /* This used to test
4102 -INTVAL (const_arg1) >= 0
4104 But The Sun V5.0 compilers mis-compiled that test. So
4105 instead we test for the problematic value in a more direct
4106 manner and hope the Sun compilers get it correct. */
4107 && INTVAL (const_arg1) !=
4108 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4109 && GET_CODE (folded_arg1) == REG)
4111 rtx new_const = GEN_INT (-INTVAL (const_arg1));
4113 = lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
4117 for (p = p->first_same_value; p; p = p->next_same_value)
4118 if (GET_CODE (p->exp) == REG)
4119 return simplify_gen_binary (MINUS, mode, folded_arg0,
4120 canon_reg (p->exp, NULL_RTX));
4125 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4126 If so, produce (PLUS Z C2-C). */
4127 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4129 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4130 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4131 return fold_rtx (plus_constant (copy_rtx (y),
4132 -INTVAL (const_arg1)),
4139 case SMIN: case SMAX: case UMIN: case UMAX:
4140 case IOR: case AND: case XOR:
4142 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4143 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4144 is known to be of similar form, we may be able to replace the
4145 operation with a combined operation. This may eliminate the
4146 intermediate operation if every use is simplified in this way.
4147 Note that the similar optimization done by combine.c only works
4148 if the intermediate operation's result has only one reference. */
4150 if (GET_CODE (folded_arg0) == REG
4151 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4154 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4155 rtx y = lookup_as_function (folded_arg0, code);
4157 enum rtx_code associate_code;
4161 || 0 == (inner_const
4162 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4163 || GET_CODE (inner_const) != CONST_INT
4164 /* If we have compiled a statement like
4165 "if (x == (x & mask1))", and now are looking at
4166 "x & mask2", we will have a case where the first operand
4167 of Y is the same as our first operand. Unless we detect
4168 this case, an infinite loop will result. */
4169 || XEXP (y, 0) == folded_arg0)
4172 /* Don't associate these operations if they are a PLUS with the
4173 same constant and it is a power of two. These might be doable
4174 with a pre- or post-increment. Similarly for two subtracts of
4175 identical powers of two with post decrement. */
4177 if (code == PLUS && const_arg1 == inner_const
4178 && ((HAVE_PRE_INCREMENT
4179 && exact_log2 (INTVAL (const_arg1)) >= 0)
4180 || (HAVE_POST_INCREMENT
4181 && exact_log2 (INTVAL (const_arg1)) >= 0)
4182 || (HAVE_PRE_DECREMENT
4183 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4184 || (HAVE_POST_DECREMENT
4185 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4188 /* Compute the code used to compose the constants. For example,
4189 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
4191 associate_code = (is_shift || code == MINUS ? PLUS : code);
4193 new_const = simplify_binary_operation (associate_code, mode,
4194 const_arg1, inner_const);
4199 /* If we are associating shift operations, don't let this
4200 produce a shift of the size of the object or larger.
4201 This could occur when we follow a sign-extend by a right
4202 shift on a machine that does a sign-extend as a pair
4205 if (is_shift && GET_CODE (new_const) == CONST_INT
4206 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4208 /* As an exception, we can turn an ASHIFTRT of this
4209 form into a shift of the number of bits - 1. */
4210 if (code == ASHIFTRT)
4211 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4216 y = copy_rtx (XEXP (y, 0));
4218 /* If Y contains our first operand (the most common way this
4219 can happen is if Y is a MEM), we would do into an infinite
4220 loop if we tried to fold it. So don't in that case. */
4222 if (! reg_mentioned_p (folded_arg0, y))
4223 y = fold_rtx (y, insn);
4225 return simplify_gen_binary (code, mode, y, new_const);
4229 case DIV: case UDIV:
4230 /* ??? The associative optimization performed immediately above is
4231 also possible for DIV and UDIV using associate_code of MULT.
4232 However, we would need extra code to verify that the
4233 multiplication does not overflow, that is, there is no overflow
4234 in the calculation of new_const. */
4241 new = simplify_binary_operation (code, mode,
4242 const_arg0 ? const_arg0 : folded_arg0,
4243 const_arg1 ? const_arg1 : folded_arg1);
4247 /* (lo_sum (high X) X) is simply X. */
4248 if (code == LO_SUM && const_arg0 != 0
4249 && GET_CODE (const_arg0) == HIGH
4250 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4255 case RTX_BITFIELD_OPS:
4256 new = simplify_ternary_operation (code, mode, mode_arg0,
4257 const_arg0 ? const_arg0 : folded_arg0,
4258 const_arg1 ? const_arg1 : folded_arg1,
4259 const_arg2 ? const_arg2 : XEXP (x, 2));
4263 /* Eliminate CONSTANT_P_RTX if its constant. */
4264 if (code == CONSTANT_P_RTX)
4268 if (optimize == 0 || !flag_gcse)
4277 return new ? new : x;
4280 /* Return a constant value currently equivalent to X.
4281 Return 0 if we don't know one. */
4284 equiv_constant (rtx x)
4286 if (GET_CODE (x) == REG
4287 && REGNO_QTY_VALID_P (REGNO (x)))
4289 int x_q = REG_QTY (REGNO (x));
4290 struct qty_table_elem *x_ent = &qty_table[x_q];
4292 if (x_ent->const_rtx)
4293 x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
4296 if (x == 0 || CONSTANT_P (x))
4299 /* If X is a MEM, try to fold it outside the context of any insn to see if
4300 it might be equivalent to a constant. That handles the case where it
4301 is a constant-pool reference. Then try to look it up in the hash table
4302 in case it is something whose value we have seen before. */
4304 if (GET_CODE (x) == MEM)
4306 struct table_elt *elt;
4308 x = fold_rtx (x, NULL_RTX);
4312 elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
4316 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4317 if (elt->is_const && CONSTANT_P (elt->exp))
4324 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4325 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4326 least-significant part of X.
4327 MODE specifies how big a part of X to return.
4329 If the requested operation cannot be done, 0 is returned.
4331 This is similar to gen_lowpart_general in emit-rtl.c. */
4334 gen_lowpart_if_possible (enum machine_mode mode, rtx x)
4336 rtx result = gen_lowpart_common (mode, x);
4340 else if (GET_CODE (x) == MEM)
4342 /* This is the only other case we handle. */
4346 if (WORDS_BIG_ENDIAN)
4347 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4348 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4349 if (BYTES_BIG_ENDIAN)
4350 /* Adjust the address so that the address-after-the-data is
4352 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4353 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4355 new = adjust_address_nv (x, mode, offset);
4356 if (! memory_address_p (mode, XEXP (new, 0)))
4365 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4366 branch. It will be zero if not.
4368 In certain cases, this can cause us to add an equivalence. For example,
4369 if we are following the taken case of
4371 we can add the fact that `i' and '2' are now equivalent.
4373 In any case, we can record that this comparison was passed. If the same
4374 comparison is seen later, we will know its value. */
4377 record_jump_equiv (rtx insn, int taken)
4379 int cond_known_true;
4382 enum machine_mode mode, mode0, mode1;
4383 int reversed_nonequality = 0;
4386 /* Ensure this is the right kind of insn. */
4387 if (! any_condjump_p (insn))
4389 set = pc_set (insn);
4391 /* See if this jump condition is known true or false. */
4393 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4395 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4397 /* Get the type of comparison being done and the operands being compared.
4398 If we had to reverse a non-equality condition, record that fact so we
4399 know that it isn't valid for floating-point. */
4400 code = GET_CODE (XEXP (SET_SRC (set), 0));
4401 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4402 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4404 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4405 if (! cond_known_true)
4407 code = reversed_comparison_code_parts (code, op0, op1, insn);
4409 /* Don't remember if we can't find the inverse. */
4410 if (code == UNKNOWN)
4414 /* The mode is the mode of the non-constant. */
4416 if (mode1 != VOIDmode)
4419 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4422 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4423 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4424 Make any useful entries we can with that information. Called from
4425 above function and called recursively. */
4428 record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
4429 rtx op1, int reversed_nonequality)
4431 unsigned op0_hash, op1_hash;
4432 int op0_in_memory, op1_in_memory;
4433 struct table_elt *op0_elt, *op1_elt;
4435 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4436 we know that they are also equal in the smaller mode (this is also
4437 true for all smaller modes whether or not there is a SUBREG, but
4438 is not worth testing for with no SUBREG). */
4440 /* Note that GET_MODE (op0) may not equal MODE. */
4441 if (code == EQ && GET_CODE (op0) == SUBREG
4442 && (GET_MODE_SIZE (GET_MODE (op0))
4443 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4445 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4446 rtx tem = gen_lowpart (inner_mode, op1);
4448 record_jump_cond (code, mode, SUBREG_REG (op0),
4449 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4450 reversed_nonequality);
4453 if (code == EQ && GET_CODE (op1) == SUBREG
4454 && (GET_MODE_SIZE (GET_MODE (op1))
4455 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4457 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4458 rtx tem = gen_lowpart (inner_mode, op0);
4460 record_jump_cond (code, mode, SUBREG_REG (op1),
4461 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4462 reversed_nonequality);
4465 /* Similarly, if this is an NE comparison, and either is a SUBREG
4466 making a smaller mode, we know the whole thing is also NE. */
4468 /* Note that GET_MODE (op0) may not equal MODE;
4469 if we test MODE instead, we can get an infinite recursion
4470 alternating between two modes each wider than MODE. */
4472 if (code == NE && GET_CODE (op0) == SUBREG
4473 && subreg_lowpart_p (op0)
4474 && (GET_MODE_SIZE (GET_MODE (op0))
4475 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4477 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4478 rtx tem = gen_lowpart (inner_mode, op1);
4480 record_jump_cond (code, mode, SUBREG_REG (op0),
4481 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4482 reversed_nonequality);
4485 if (code == NE && GET_CODE (op1) == SUBREG
4486 && subreg_lowpart_p (op1)
4487 && (GET_MODE_SIZE (GET_MODE (op1))
4488 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4490 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4491 rtx tem = gen_lowpart (inner_mode, op0);
4493 record_jump_cond (code, mode, SUBREG_REG (op1),
4494 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4495 reversed_nonequality);
4498 /* Hash both operands. */
4501 hash_arg_in_memory = 0;
4502 op0_hash = HASH (op0, mode);
4503 op0_in_memory = hash_arg_in_memory;
4509 hash_arg_in_memory = 0;
4510 op1_hash = HASH (op1, mode);
4511 op1_in_memory = hash_arg_in_memory;
4516 /* Look up both operands. */
4517 op0_elt = lookup (op0, op0_hash, mode);
4518 op1_elt = lookup (op1, op1_hash, mode);
4520 /* If both operands are already equivalent or if they are not in the
4521 table but are identical, do nothing. */
4522 if ((op0_elt != 0 && op1_elt != 0
4523 && op0_elt->first_same_value == op1_elt->first_same_value)
4524 || op0 == op1 || rtx_equal_p (op0, op1))
4527 /* If we aren't setting two things equal all we can do is save this
4528 comparison. Similarly if this is floating-point. In the latter
4529 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4530 If we record the equality, we might inadvertently delete code
4531 whose intent was to change -0 to +0. */
4533 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4535 struct qty_table_elem *ent;
4538 /* If we reversed a floating-point comparison, if OP0 is not a
4539 register, or if OP1 is neither a register or constant, we can't
4542 if (GET_CODE (op1) != REG)
4543 op1 = equiv_constant (op1);
4545 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4546 || GET_CODE (op0) != REG || op1 == 0)
4549 /* Put OP0 in the hash table if it isn't already. This gives it a
4550 new quantity number. */
4553 if (insert_regs (op0, NULL, 0))
4555 rehash_using_reg (op0);
4556 op0_hash = HASH (op0, mode);
4558 /* If OP0 is contained in OP1, this changes its hash code
4559 as well. Faster to rehash than to check, except
4560 for the simple case of a constant. */
4561 if (! CONSTANT_P (op1))
4562 op1_hash = HASH (op1,mode);
4565 op0_elt = insert (op0, NULL, op0_hash, mode);
4566 op0_elt->in_memory = op0_in_memory;
4569 qty = REG_QTY (REGNO (op0));
4570 ent = &qty_table[qty];
4572 ent->comparison_code = code;
4573 if (GET_CODE (op1) == REG)
4575 /* Look it up again--in case op0 and op1 are the same. */
4576 op1_elt = lookup (op1, op1_hash, mode);
4578 /* Put OP1 in the hash table so it gets a new quantity number. */
4581 if (insert_regs (op1, NULL, 0))
4583 rehash_using_reg (op1);
4584 op1_hash = HASH (op1, mode);
4587 op1_elt = insert (op1, NULL, op1_hash, mode);
4588 op1_elt->in_memory = op1_in_memory;
4591 ent->comparison_const = NULL_RTX;
4592 ent->comparison_qty = REG_QTY (REGNO (op1));
4596 ent->comparison_const = op1;
4597 ent->comparison_qty = -1;
4603 /* If either side is still missing an equivalence, make it now,
4604 then merge the equivalences. */
4608 if (insert_regs (op0, NULL, 0))
4610 rehash_using_reg (op0);
4611 op0_hash = HASH (op0, mode);
4614 op0_elt = insert (op0, NULL, op0_hash, mode);
4615 op0_elt->in_memory = op0_in_memory;
4620 if (insert_regs (op1, NULL, 0))
4622 rehash_using_reg (op1);
4623 op1_hash = HASH (op1, mode);
4626 op1_elt = insert (op1, NULL, op1_hash, mode);
4627 op1_elt->in_memory = op1_in_memory;
4630 merge_equiv_classes (op0_elt, op1_elt);
4631 last_jump_equiv_class = op0_elt;
4634 /* CSE processing for one instruction.
4635 First simplify sources and addresses of all assignments
4636 in the instruction, using previously-computed equivalents values.
4637 Then install the new sources and destinations in the table
4638 of available values.
4640 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4641 the insn. It means that INSN is inside libcall block. In this
4642 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4644 /* Data on one SET contained in the instruction. */
4648 /* The SET rtx itself. */
4650 /* The SET_SRC of the rtx (the original value, if it is changing). */
4652 /* The hash-table element for the SET_SRC of the SET. */
4653 struct table_elt *src_elt;
4654 /* Hash value for the SET_SRC. */
4656 /* Hash value for the SET_DEST. */
4658 /* The SET_DEST, with SUBREG, etc., stripped. */
4660 /* Nonzero if the SET_SRC is in memory. */
4662 /* Nonzero if the SET_SRC contains something
4663 whose value cannot be predicted and understood. */
4665 /* Original machine mode, in case it becomes a CONST_INT.
4666 The size of this field should match the size of the mode
4667 field of struct rtx_def (see rtl.h). */
4668 ENUM_BITFIELD(machine_mode) mode : 8;
4669 /* A constant equivalent for SET_SRC, if any. */
4671 /* Original SET_SRC value used for libcall notes. */
4673 /* Hash value of constant equivalent for SET_SRC. */
4674 unsigned src_const_hash;
4675 /* Table entry for constant equivalent for SET_SRC, if any. */
4676 struct table_elt *src_const_elt;
4680 cse_insn (rtx insn, rtx libcall_insn)
4682 rtx x = PATTERN (insn);
4688 /* Records what this insn does to set CC0. */
4689 rtx this_insn_cc0 = 0;
4690 enum machine_mode this_insn_cc0_mode = VOIDmode;
4694 struct table_elt *src_eqv_elt = 0;
4695 int src_eqv_volatile = 0;
4696 int src_eqv_in_memory = 0;
4697 unsigned src_eqv_hash = 0;
4699 struct set *sets = (struct set *) 0;
4703 /* Find all the SETs and CLOBBERs in this instruction.
4704 Record all the SETs in the array `set' and count them.
4705 Also determine whether there is a CLOBBER that invalidates
4706 all memory references, or all references at varying addresses. */
4708 if (GET_CODE (insn) == CALL_INSN)
4710 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4712 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4713 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4714 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4718 if (GET_CODE (x) == SET)
4720 sets = alloca (sizeof (struct set));
4723 /* Ignore SETs that are unconditional jumps.
4724 They never need cse processing, so this does not hurt.
4725 The reason is not efficiency but rather
4726 so that we can test at the end for instructions
4727 that have been simplified to unconditional jumps
4728 and not be misled by unchanged instructions
4729 that were unconditional jumps to begin with. */
4730 if (SET_DEST (x) == pc_rtx
4731 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4734 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4735 The hard function value register is used only once, to copy to
4736 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4737 Ensure we invalidate the destination register. On the 80386 no
4738 other code would invalidate it since it is a fixed_reg.
4739 We need not check the return of apply_change_group; see canon_reg. */
4741 else if (GET_CODE (SET_SRC (x)) == CALL)
4743 canon_reg (SET_SRC (x), insn);
4744 apply_change_group ();
4745 fold_rtx (SET_SRC (x), insn);
4746 invalidate (SET_DEST (x), VOIDmode);
4751 else if (GET_CODE (x) == PARALLEL)
4753 int lim = XVECLEN (x, 0);
4755 sets = alloca (lim * sizeof (struct set));
4757 /* Find all regs explicitly clobbered in this insn,
4758 and ensure they are not replaced with any other regs
4759 elsewhere in this insn.
4760 When a reg that is clobbered is also used for input,
4761 we should presume that that is for a reason,
4762 and we should not substitute some other register
4763 which is not supposed to be clobbered.
4764 Therefore, this loop cannot be merged into the one below
4765 because a CALL may precede a CLOBBER and refer to the
4766 value clobbered. We must not let a canonicalization do
4767 anything in that case. */
4768 for (i = 0; i < lim; i++)
4770 rtx y = XVECEXP (x, 0, i);
4771 if (GET_CODE (y) == CLOBBER)
4773 rtx clobbered = XEXP (y, 0);
4775 if (GET_CODE (clobbered) == REG
4776 || GET_CODE (clobbered) == SUBREG)
4777 invalidate (clobbered, VOIDmode);
4778 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4779 || GET_CODE (clobbered) == ZERO_EXTRACT)
4780 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4784 for (i = 0; i < lim; i++)
4786 rtx y = XVECEXP (x, 0, i);
4787 if (GET_CODE (y) == SET)
4789 /* As above, we ignore unconditional jumps and call-insns and
4790 ignore the result of apply_change_group. */
4791 if (GET_CODE (SET_SRC (y)) == CALL)
4793 canon_reg (SET_SRC (y), insn);
4794 apply_change_group ();
4795 fold_rtx (SET_SRC (y), insn);
4796 invalidate (SET_DEST (y), VOIDmode);
4798 else if (SET_DEST (y) == pc_rtx
4799 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4802 sets[n_sets++].rtl = y;
4804 else if (GET_CODE (y) == CLOBBER)
4806 /* If we clobber memory, canon the address.
4807 This does nothing when a register is clobbered
4808 because we have already invalidated the reg. */
4809 if (GET_CODE (XEXP (y, 0)) == MEM)
4810 canon_reg (XEXP (y, 0), NULL_RTX);
4812 else if (GET_CODE (y) == USE
4813 && ! (GET_CODE (XEXP (y, 0)) == REG
4814 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4815 canon_reg (y, NULL_RTX);
4816 else if (GET_CODE (y) == CALL)
4818 /* The result of apply_change_group can be ignored; see
4820 canon_reg (y, insn);
4821 apply_change_group ();
4826 else if (GET_CODE (x) == CLOBBER)
4828 if (GET_CODE (XEXP (x, 0)) == MEM)
4829 canon_reg (XEXP (x, 0), NULL_RTX);
4832 /* Canonicalize a USE of a pseudo register or memory location. */
4833 else if (GET_CODE (x) == USE
4834 && ! (GET_CODE (XEXP (x, 0)) == REG
4835 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4836 canon_reg (XEXP (x, 0), NULL_RTX);
4837 else if (GET_CODE (x) == CALL)
4839 /* The result of apply_change_group can be ignored; see canon_reg. */
4840 canon_reg (x, insn);
4841 apply_change_group ();
4845 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4846 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4847 is handled specially for this case, and if it isn't set, then there will
4848 be no equivalence for the destination. */
4849 if (n_sets == 1 && REG_NOTES (insn) != 0
4850 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4851 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4852 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4854 src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4855 XEXP (tem, 0) = src_eqv;
4858 /* Canonicalize sources and addresses of destinations.
4859 We do this in a separate pass to avoid problems when a MATCH_DUP is
4860 present in the insn pattern. In that case, we want to ensure that
4861 we don't break the duplicate nature of the pattern. So we will replace
4862 both operands at the same time. Otherwise, we would fail to find an
4863 equivalent substitution in the loop calling validate_change below.
4865 We used to suppress canonicalization of DEST if it appears in SRC,
4866 but we don't do this any more. */
4868 for (i = 0; i < n_sets; i++)
4870 rtx dest = SET_DEST (sets[i].rtl);
4871 rtx src = SET_SRC (sets[i].rtl);
4872 rtx new = canon_reg (src, insn);
4875 sets[i].orig_src = src;
4876 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
4877 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4878 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4879 || (insn_code = recog_memoized (insn)) < 0
4880 || insn_data[insn_code].n_dups > 0)
4881 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4883 SET_SRC (sets[i].rtl) = new;
4885 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4887 validate_change (insn, &XEXP (dest, 1),
4888 canon_reg (XEXP (dest, 1), insn), 1);
4889 validate_change (insn, &XEXP (dest, 2),
4890 canon_reg (XEXP (dest, 2), insn), 1);
4893 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4894 || GET_CODE (dest) == ZERO_EXTRACT
4895 || GET_CODE (dest) == SIGN_EXTRACT)
4896 dest = XEXP (dest, 0);
4898 if (GET_CODE (dest) == MEM)
4899 canon_reg (dest, insn);
4902 /* Now that we have done all the replacements, we can apply the change
4903 group and see if they all work. Note that this will cause some
4904 canonicalizations that would have worked individually not to be applied
4905 because some other canonicalization didn't work, but this should not
4908 The result of apply_change_group can be ignored; see canon_reg. */
4910 apply_change_group ();
4912 /* Set sets[i].src_elt to the class each source belongs to.
4913 Detect assignments from or to volatile things
4914 and set set[i] to zero so they will be ignored
4915 in the rest of this function.
4917 Nothing in this loop changes the hash table or the register chains. */
4919 for (i = 0; i < n_sets; i++)
4923 struct table_elt *elt = 0, *p;
4924 enum machine_mode mode;
4927 rtx src_related = 0;
4928 struct table_elt *src_const_elt = 0;
4929 int src_cost = MAX_COST;
4930 int src_eqv_cost = MAX_COST;
4931 int src_folded_cost = MAX_COST;
4932 int src_related_cost = MAX_COST;
4933 int src_elt_cost = MAX_COST;
4934 int src_regcost = MAX_COST;
4935 int src_eqv_regcost = MAX_COST;
4936 int src_folded_regcost = MAX_COST;
4937 int src_related_regcost = MAX_COST;
4938 int src_elt_regcost = MAX_COST;
4939 /* Set nonzero if we need to call force_const_mem on with the
4940 contents of src_folded before using it. */
4941 int src_folded_force_flag = 0;
4943 dest = SET_DEST (sets[i].rtl);
4944 src = SET_SRC (sets[i].rtl);
4946 /* If SRC is a constant that has no machine mode,
4947 hash it with the destination's machine mode.
4948 This way we can keep different modes separate. */
4950 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4951 sets[i].mode = mode;
4955 enum machine_mode eqvmode = mode;
4956 if (GET_CODE (dest) == STRICT_LOW_PART)
4957 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4959 hash_arg_in_memory = 0;
4960 src_eqv_hash = HASH (src_eqv, eqvmode);
4962 /* Find the equivalence class for the equivalent expression. */
4965 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4967 src_eqv_volatile = do_not_record;
4968 src_eqv_in_memory = hash_arg_in_memory;
4971 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4972 value of the INNER register, not the destination. So it is not
4973 a valid substitution for the source. But save it for later. */
4974 if (GET_CODE (dest) == STRICT_LOW_PART)
4977 src_eqv_here = src_eqv;
4979 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4980 simplified result, which may not necessarily be valid. */
4981 src_folded = fold_rtx (src, insn);
4984 /* ??? This caused bad code to be generated for the m68k port with -O2.
4985 Suppose src is (CONST_INT -1), and that after truncation src_folded
4986 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4987 At the end we will add src and src_const to the same equivalence
4988 class. We now have 3 and -1 on the same equivalence class. This
4989 causes later instructions to be mis-optimized. */
4990 /* If storing a constant in a bitfield, pre-truncate the constant
4991 so we will be able to record it later. */
4992 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
4993 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
4995 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4997 if (GET_CODE (src) == CONST_INT
4998 && GET_CODE (width) == CONST_INT
4999 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5000 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5002 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5003 << INTVAL (width)) - 1));
5007 /* Compute SRC's hash code, and also notice if it
5008 should not be recorded at all. In that case,
5009 prevent any further processing of this assignment. */
5011 hash_arg_in_memory = 0;
5014 sets[i].src_hash = HASH (src, mode);
5015 sets[i].src_volatile = do_not_record;
5016 sets[i].src_in_memory = hash_arg_in_memory;
5018 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
5019 a pseudo, do not record SRC. Using SRC as a replacement for
5020 anything else will be incorrect in that situation. Note that
5021 this usually occurs only for stack slots, in which case all the
5022 RTL would be referring to SRC, so we don't lose any optimization
5023 opportunities by not having SRC in the hash table. */
5025 if (GET_CODE (src) == MEM
5026 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5027 && GET_CODE (dest) == REG
5028 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5029 sets[i].src_volatile = 1;
5032 /* It is no longer clear why we used to do this, but it doesn't
5033 appear to still be needed. So let's try without it since this
5034 code hurts cse'ing widened ops. */
5035 /* If source is a paradoxical subreg (such as QI treated as an SI),
5036 treat it as volatile. It may do the work of an SI in one context
5037 where the extra bits are not being used, but cannot replace an SI
5039 if (GET_CODE (src) == SUBREG
5040 && (GET_MODE_SIZE (GET_MODE (src))
5041 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5042 sets[i].src_volatile = 1;
5045 /* Locate all possible equivalent forms for SRC. Try to replace
5046 SRC in the insn with each cheaper equivalent.
5048 We have the following types of equivalents: SRC itself, a folded
5049 version, a value given in a REG_EQUAL note, or a value related
5052 Each of these equivalents may be part of an additional class
5053 of equivalents (if more than one is in the table, they must be in
5054 the same class; we check for this).
5056 If the source is volatile, we don't do any table lookups.
5058 We note any constant equivalent for possible later use in a
5061 if (!sets[i].src_volatile)
5062 elt = lookup (src, sets[i].src_hash, mode);
5064 sets[i].src_elt = elt;
5066 if (elt && src_eqv_here && src_eqv_elt)
5068 if (elt->first_same_value != src_eqv_elt->first_same_value)
5070 /* The REG_EQUAL is indicating that two formerly distinct
5071 classes are now equivalent. So merge them. */
5072 merge_equiv_classes (elt, src_eqv_elt);
5073 src_eqv_hash = HASH (src_eqv, elt->mode);
5074 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5080 else if (src_eqv_elt)
5083 /* Try to find a constant somewhere and record it in `src_const'.
5084 Record its table element, if any, in `src_const_elt'. Look in
5085 any known equivalences first. (If the constant is not in the
5086 table, also set `sets[i].src_const_hash'). */
5088 for (p = elt->first_same_value; p; p = p->next_same_value)
5092 src_const_elt = elt;
5097 && (CONSTANT_P (src_folded)
5098 /* Consider (minus (label_ref L1) (label_ref L2)) as
5099 "constant" here so we will record it. This allows us
5100 to fold switch statements when an ADDR_DIFF_VEC is used. */
5101 || (GET_CODE (src_folded) == MINUS
5102 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5103 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5104 src_const = src_folded, src_const_elt = elt;
5105 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5106 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5108 /* If we don't know if the constant is in the table, get its
5109 hash code and look it up. */
5110 if (src_const && src_const_elt == 0)
5112 sets[i].src_const_hash = HASH (src_const, mode);
5113 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5116 sets[i].src_const = src_const;
5117 sets[i].src_const_elt = src_const_elt;
5119 /* If the constant and our source are both in the table, mark them as
5120 equivalent. Otherwise, if a constant is in the table but the source
5121 isn't, set ELT to it. */
5122 if (src_const_elt && elt
5123 && src_const_elt->first_same_value != elt->first_same_value)
5124 merge_equiv_classes (elt, src_const_elt);
5125 else if (src_const_elt && elt == 0)
5126 elt = src_const_elt;
5128 /* See if there is a register linearly related to a constant
5129 equivalent of SRC. */
5131 && (GET_CODE (src_const) == CONST
5132 || (src_const_elt && src_const_elt->related_value != 0)))
5134 src_related = use_related_value (src_const, src_const_elt);
5137 struct table_elt *src_related_elt
5138 = lookup (src_related, HASH (src_related, mode), mode);
5139 if (src_related_elt && elt)
5141 if (elt->first_same_value
5142 != src_related_elt->first_same_value)
5143 /* This can occur when we previously saw a CONST
5144 involving a SYMBOL_REF and then see the SYMBOL_REF
5145 twice. Merge the involved classes. */
5146 merge_equiv_classes (elt, src_related_elt);
5149 src_related_elt = 0;
5151 else if (src_related_elt && elt == 0)
5152 elt = src_related_elt;
5156 /* See if we have a CONST_INT that is already in a register in a
5159 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5160 && GET_MODE_CLASS (mode) == MODE_INT
5161 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5163 enum machine_mode wider_mode;
5165 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5166 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5167 && src_related == 0;
5168 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5170 struct table_elt *const_elt
5171 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5176 for (const_elt = const_elt->first_same_value;
5177 const_elt; const_elt = const_elt->next_same_value)
5178 if (GET_CODE (const_elt->exp) == REG)
5180 src_related = gen_lowpart (mode,
5187 /* Another possibility is that we have an AND with a constant in
5188 a mode narrower than a word. If so, it might have been generated
5189 as part of an "if" which would narrow the AND. If we already
5190 have done the AND in a wider mode, we can use a SUBREG of that
5193 if (flag_expensive_optimizations && ! src_related
5194 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5195 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5197 enum machine_mode tmode;
5198 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5200 for (tmode = GET_MODE_WIDER_MODE (mode);
5201 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5202 tmode = GET_MODE_WIDER_MODE (tmode))
5204 rtx inner = gen_lowpart (tmode, XEXP (src, 0));
5205 struct table_elt *larger_elt;
5209 PUT_MODE (new_and, tmode);
5210 XEXP (new_and, 0) = inner;
5211 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5212 if (larger_elt == 0)
5215 for (larger_elt = larger_elt->first_same_value;
5216 larger_elt; larger_elt = larger_elt->next_same_value)
5217 if (GET_CODE (larger_elt->exp) == REG)
5220 = gen_lowpart (mode, larger_elt->exp);
5230 #ifdef LOAD_EXTEND_OP
5231 /* See if a MEM has already been loaded with a widening operation;
5232 if it has, we can use a subreg of that. Many CISC machines
5233 also have such operations, but this is only likely to be
5234 beneficial these machines. */
5236 if (flag_expensive_optimizations && src_related == 0
5237 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5238 && GET_MODE_CLASS (mode) == MODE_INT
5239 && GET_CODE (src) == MEM && ! do_not_record
5240 && LOAD_EXTEND_OP (mode) != NIL)
5242 enum machine_mode tmode;
5244 /* Set what we are trying to extend and the operation it might
5245 have been extended with. */
5246 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5247 XEXP (memory_extend_rtx, 0) = src;
5249 for (tmode = GET_MODE_WIDER_MODE (mode);
5250 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5251 tmode = GET_MODE_WIDER_MODE (tmode))
5253 struct table_elt *larger_elt;
5255 PUT_MODE (memory_extend_rtx, tmode);
5256 larger_elt = lookup (memory_extend_rtx,
5257 HASH (memory_extend_rtx, tmode), tmode);
5258 if (larger_elt == 0)
5261 for (larger_elt = larger_elt->first_same_value;
5262 larger_elt; larger_elt = larger_elt->next_same_value)
5263 if (GET_CODE (larger_elt->exp) == REG)
5265 src_related = gen_lowpart (mode,
5274 #endif /* LOAD_EXTEND_OP */
5276 if (src == src_folded)
5279 /* At this point, ELT, if nonzero, points to a class of expressions
5280 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5281 and SRC_RELATED, if nonzero, each contain additional equivalent
5282 expressions. Prune these latter expressions by deleting expressions
5283 already in the equivalence class.
5285 Check for an equivalent identical to the destination. If found,
5286 this is the preferred equivalent since it will likely lead to
5287 elimination of the insn. Indicate this by placing it in
5291 elt = elt->first_same_value;
5292 for (p = elt; p; p = p->next_same_value)
5294 enum rtx_code code = GET_CODE (p->exp);
5296 /* If the expression is not valid, ignore it. Then we do not
5297 have to check for validity below. In most cases, we can use
5298 `rtx_equal_p', since canonicalization has already been done. */
5299 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5302 /* Also skip paradoxical subregs, unless that's what we're
5305 && (GET_MODE_SIZE (GET_MODE (p->exp))
5306 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5308 && GET_CODE (src) == SUBREG
5309 && GET_MODE (src) == GET_MODE (p->exp)
5310 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5311 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5314 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5316 else if (src_folded && GET_CODE (src_folded) == code
5317 && rtx_equal_p (src_folded, p->exp))
5319 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5320 && rtx_equal_p (src_eqv_here, p->exp))
5322 else if (src_related && GET_CODE (src_related) == code
5323 && rtx_equal_p (src_related, p->exp))
5326 /* This is the same as the destination of the insns, we want
5327 to prefer it. Copy it to src_related. The code below will
5328 then give it a negative cost. */
5329 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5333 /* Find the cheapest valid equivalent, trying all the available
5334 possibilities. Prefer items not in the hash table to ones
5335 that are when they are equal cost. Note that we can never
5336 worsen an insn as the current contents will also succeed.
5337 If we find an equivalent identical to the destination, use it as best,
5338 since this insn will probably be eliminated in that case. */
5341 if (rtx_equal_p (src, dest))
5342 src_cost = src_regcost = -1;
5345 src_cost = COST (src);
5346 src_regcost = approx_reg_cost (src);
5352 if (rtx_equal_p (src_eqv_here, dest))
5353 src_eqv_cost = src_eqv_regcost = -1;
5356 src_eqv_cost = COST (src_eqv_here);
5357 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5363 if (rtx_equal_p (src_folded, dest))
5364 src_folded_cost = src_folded_regcost = -1;
5367 src_folded_cost = COST (src_folded);
5368 src_folded_regcost = approx_reg_cost (src_folded);
5374 if (rtx_equal_p (src_related, dest))
5375 src_related_cost = src_related_regcost = -1;
5378 src_related_cost = COST (src_related);
5379 src_related_regcost = approx_reg_cost (src_related);
5383 /* If this was an indirect jump insn, a known label will really be
5384 cheaper even though it looks more expensive. */
5385 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5386 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5388 /* Terminate loop when replacement made. This must terminate since
5389 the current contents will be tested and will always be valid. */
5394 /* Skip invalid entries. */
5395 while (elt && GET_CODE (elt->exp) != REG
5396 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5397 elt = elt->next_same_value;
5399 /* A paradoxical subreg would be bad here: it'll be the right
5400 size, but later may be adjusted so that the upper bits aren't
5401 what we want. So reject it. */
5403 && GET_CODE (elt->exp) == SUBREG
5404 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5405 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5406 /* It is okay, though, if the rtx we're trying to match
5407 will ignore any of the bits we can't predict. */
5409 && GET_CODE (src) == SUBREG
5410 && GET_MODE (src) == GET_MODE (elt->exp)
5411 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5412 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5414 elt = elt->next_same_value;
5420 src_elt_cost = elt->cost;
5421 src_elt_regcost = elt->regcost;
5424 /* Find cheapest and skip it for the next time. For items
5425 of equal cost, use this order:
5426 src_folded, src, src_eqv, src_related and hash table entry. */
5428 && preferable (src_folded_cost, src_folded_regcost,
5429 src_cost, src_regcost) <= 0
5430 && preferable (src_folded_cost, src_folded_regcost,
5431 src_eqv_cost, src_eqv_regcost) <= 0
5432 && preferable (src_folded_cost, src_folded_regcost,
5433 src_related_cost, src_related_regcost) <= 0
5434 && preferable (src_folded_cost, src_folded_regcost,
5435 src_elt_cost, src_elt_regcost) <= 0)
5437 trial = src_folded, src_folded_cost = MAX_COST;
5438 if (src_folded_force_flag)
5440 rtx forced = force_const_mem (mode, trial);
5446 && preferable (src_cost, src_regcost,
5447 src_eqv_cost, src_eqv_regcost) <= 0
5448 && preferable (src_cost, src_regcost,
5449 src_related_cost, src_related_regcost) <= 0
5450 && preferable (src_cost, src_regcost,
5451 src_elt_cost, src_elt_regcost) <= 0)
5452 trial = src, src_cost = MAX_COST;
5453 else if (src_eqv_here
5454 && preferable (src_eqv_cost, src_eqv_regcost,
5455 src_related_cost, src_related_regcost) <= 0
5456 && preferable (src_eqv_cost, src_eqv_regcost,
5457 src_elt_cost, src_elt_regcost) <= 0)
5458 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5459 else if (src_related
5460 && preferable (src_related_cost, src_related_regcost,
5461 src_elt_cost, src_elt_regcost) <= 0)
5462 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5465 trial = copy_rtx (elt->exp);
5466 elt = elt->next_same_value;
5467 src_elt_cost = MAX_COST;
5470 /* We don't normally have an insn matching (set (pc) (pc)), so
5471 check for this separately here. We will delete such an
5474 For other cases such as a table jump or conditional jump
5475 where we know the ultimate target, go ahead and replace the
5476 operand. While that may not make a valid insn, we will
5477 reemit the jump below (and also insert any necessary
5479 if (n_sets == 1 && dest == pc_rtx
5481 || (GET_CODE (trial) == LABEL_REF
5482 && ! condjump_p (insn))))
5484 SET_SRC (sets[i].rtl) = trial;
5485 cse_jumps_altered = 1;
5489 /* Look for a substitution that makes a valid insn. */
5490 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5492 rtx new = canon_reg (SET_SRC (sets[i].rtl), insn);
5494 /* If we just made a substitution inside a libcall, then we
5495 need to make the same substitution in any notes attached
5496 to the RETVAL insn. */
5498 && (GET_CODE (sets[i].orig_src) == REG
5499 || GET_CODE (sets[i].orig_src) == SUBREG
5500 || GET_CODE (sets[i].orig_src) == MEM))
5501 simplify_replace_rtx (REG_NOTES (libcall_insn),
5502 sets[i].orig_src, copy_rtx (new));
5504 /* The result of apply_change_group can be ignored; see
5507 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5508 apply_change_group ();
5512 /* If we previously found constant pool entries for
5513 constants and this is a constant, try making a
5514 pool entry. Put it in src_folded unless we already have done
5515 this since that is where it likely came from. */
5517 else if (constant_pool_entries_cost
5518 && CONSTANT_P (trial)
5519 /* Reject cases that will abort in decode_rtx_const.
5520 On the alpha when simplifying a switch, we get
5521 (const (truncate (minus (label_ref) (label_ref)))). */
5522 && ! (GET_CODE (trial) == CONST
5523 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5524 /* Likewise on IA-64, except without the truncate. */
5525 && ! (GET_CODE (trial) == CONST
5526 && GET_CODE (XEXP (trial, 0)) == MINUS
5527 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5528 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5530 || (GET_CODE (src_folded) != MEM
5531 && ! src_folded_force_flag))
5532 && GET_MODE_CLASS (mode) != MODE_CC
5533 && mode != VOIDmode)
5535 src_folded_force_flag = 1;
5537 src_folded_cost = constant_pool_entries_cost;
5538 src_folded_regcost = constant_pool_entries_regcost;
5542 src = SET_SRC (sets[i].rtl);
5544 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5545 However, there is an important exception: If both are registers
5546 that are not the head of their equivalence class, replace SET_SRC
5547 with the head of the class. If we do not do this, we will have
5548 both registers live over a portion of the basic block. This way,
5549 their lifetimes will likely abut instead of overlapping. */
5550 if (GET_CODE (dest) == REG
5551 && REGNO_QTY_VALID_P (REGNO (dest)))
5553 int dest_q = REG_QTY (REGNO (dest));
5554 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5556 if (dest_ent->mode == GET_MODE (dest)
5557 && dest_ent->first_reg != REGNO (dest)
5558 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5559 /* Don't do this if the original insn had a hard reg as
5560 SET_SRC or SET_DEST. */
5561 && (GET_CODE (sets[i].src) != REG
5562 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5563 && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5564 /* We can't call canon_reg here because it won't do anything if
5565 SRC is a hard register. */
5567 int src_q = REG_QTY (REGNO (src));
5568 struct qty_table_elem *src_ent = &qty_table[src_q];
5569 int first = src_ent->first_reg;
5571 = (first >= FIRST_PSEUDO_REGISTER
5572 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5574 /* We must use validate-change even for this, because this
5575 might be a special no-op instruction, suitable only to
5577 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5580 /* If we had a constant that is cheaper than what we are now
5581 setting SRC to, use that constant. We ignored it when we
5582 thought we could make this into a no-op. */
5583 if (src_const && COST (src_const) < COST (src)
5584 && validate_change (insn, &SET_SRC (sets[i].rtl),
5591 /* If we made a change, recompute SRC values. */
5592 if (src != sets[i].src)
5596 hash_arg_in_memory = 0;
5598 sets[i].src_hash = HASH (src, mode);
5599 sets[i].src_volatile = do_not_record;
5600 sets[i].src_in_memory = hash_arg_in_memory;
5601 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5604 /* If this is a single SET, we are setting a register, and we have an
5605 equivalent constant, we want to add a REG_NOTE. We don't want
5606 to write a REG_EQUAL note for a constant pseudo since verifying that
5607 that pseudo hasn't been eliminated is a pain. Such a note also
5608 won't help anything.
5610 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5611 which can be created for a reference to a compile time computable
5612 entry in a jump table. */
5614 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5615 && GET_CODE (src_const) != REG
5616 && ! (GET_CODE (src_const) == CONST
5617 && GET_CODE (XEXP (src_const, 0)) == MINUS
5618 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5619 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5621 /* We only want a REG_EQUAL note if src_const != src. */
5622 if (! rtx_equal_p (src, src_const))
5624 /* Make sure that the rtx is not shared. */
5625 src_const = copy_rtx (src_const);
5627 /* Record the actual constant value in a REG_EQUAL note,
5628 making a new one if one does not already exist. */
5629 set_unique_reg_note (insn, REG_EQUAL, src_const);
5633 /* Now deal with the destination. */
5636 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5637 to the MEM or REG within it. */
5638 while (GET_CODE (dest) == SIGN_EXTRACT
5639 || GET_CODE (dest) == ZERO_EXTRACT
5640 || GET_CODE (dest) == SUBREG
5641 || GET_CODE (dest) == STRICT_LOW_PART)
5642 dest = XEXP (dest, 0);
5644 sets[i].inner_dest = dest;
5646 if (GET_CODE (dest) == MEM)
5648 #ifdef PUSH_ROUNDING
5649 /* Stack pushes invalidate the stack pointer. */
5650 rtx addr = XEXP (dest, 0);
5651 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5652 && XEXP (addr, 0) == stack_pointer_rtx)
5653 invalidate (stack_pointer_rtx, Pmode);
5655 dest = fold_rtx (dest, insn);
5658 /* Compute the hash code of the destination now,
5659 before the effects of this instruction are recorded,
5660 since the register values used in the address computation
5661 are those before this instruction. */
5662 sets[i].dest_hash = HASH (dest, mode);
5664 /* Don't enter a bit-field in the hash table
5665 because the value in it after the store
5666 may not equal what was stored, due to truncation. */
5668 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5669 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5671 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5673 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5674 && GET_CODE (width) == CONST_INT
5675 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5676 && ! (INTVAL (src_const)
5677 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5678 /* Exception: if the value is constant,
5679 and it won't be truncated, record it. */
5683 /* This is chosen so that the destination will be invalidated
5684 but no new value will be recorded.
5685 We must invalidate because sometimes constant
5686 values can be recorded for bitfields. */
5687 sets[i].src_elt = 0;
5688 sets[i].src_volatile = 1;
5694 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5696 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5698 /* One less use of the label this insn used to jump to. */
5700 cse_jumps_altered = 1;
5701 /* No more processing for this set. */
5705 /* If this SET is now setting PC to a label, we know it used to
5706 be a conditional or computed branch. */
5707 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5709 /* Now emit a BARRIER after the unconditional jump. */
5710 if (NEXT_INSN (insn) == 0
5711 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5712 emit_barrier_after (insn);
5714 /* We reemit the jump in as many cases as possible just in
5715 case the form of an unconditional jump is significantly
5716 different than a computed jump or conditional jump.
5718 If this insn has multiple sets, then reemitting the
5719 jump is nontrivial. So instead we just force rerecognition
5720 and hope for the best. */
5725 new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
5726 JUMP_LABEL (new) = XEXP (src, 0);
5727 LABEL_NUSES (XEXP (src, 0))++;
5729 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5730 note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5733 XEXP (note, 1) = NULL_RTX;
5734 REG_NOTES (new) = note;
5740 /* Now emit a BARRIER after the unconditional jump. */
5741 if (NEXT_INSN (insn) == 0
5742 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5743 emit_barrier_after (insn);
5746 INSN_CODE (insn) = -1;
5748 never_reached_warning (insn, NULL);
5750 /* Do not bother deleting any unreachable code,
5751 let jump/flow do that. */
5753 cse_jumps_altered = 1;
5757 /* If destination is volatile, invalidate it and then do no further
5758 processing for this assignment. */
5760 else if (do_not_record)
5762 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5763 invalidate (dest, VOIDmode);
5764 else if (GET_CODE (dest) == MEM)
5766 /* Outgoing arguments for a libcall don't
5767 affect any recorded expressions. */
5768 if (! libcall_insn || insn == libcall_insn)
5769 invalidate (dest, VOIDmode);
5771 else if (GET_CODE (dest) == STRICT_LOW_PART
5772 || GET_CODE (dest) == ZERO_EXTRACT)
5773 invalidate (XEXP (dest, 0), GET_MODE (dest));
5777 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5778 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5781 /* If setting CC0, record what it was set to, or a constant, if it
5782 is equivalent to a constant. If it is being set to a floating-point
5783 value, make a COMPARE with the appropriate constant of 0. If we
5784 don't do this, later code can interpret this as a test against
5785 const0_rtx, which can cause problems if we try to put it into an
5786 insn as a floating-point operand. */
5787 if (dest == cc0_rtx)
5789 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5790 this_insn_cc0_mode = mode;
5791 if (FLOAT_MODE_P (mode))
5792 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5798 /* Now enter all non-volatile source expressions in the hash table
5799 if they are not already present.
5800 Record their equivalence classes in src_elt.
5801 This way we can insert the corresponding destinations into
5802 the same classes even if the actual sources are no longer in them
5803 (having been invalidated). */
5805 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5806 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5808 struct table_elt *elt;
5809 struct table_elt *classp = sets[0].src_elt;
5810 rtx dest = SET_DEST (sets[0].rtl);
5811 enum machine_mode eqvmode = GET_MODE (dest);
5813 if (GET_CODE (dest) == STRICT_LOW_PART)
5815 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5818 if (insert_regs (src_eqv, classp, 0))
5820 rehash_using_reg (src_eqv);
5821 src_eqv_hash = HASH (src_eqv, eqvmode);
5823 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5824 elt->in_memory = src_eqv_in_memory;
5827 /* Check to see if src_eqv_elt is the same as a set source which
5828 does not yet have an elt, and if so set the elt of the set source
5830 for (i = 0; i < n_sets; i++)
5831 if (sets[i].rtl && sets[i].src_elt == 0
5832 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5833 sets[i].src_elt = src_eqv_elt;
5836 for (i = 0; i < n_sets; i++)
5837 if (sets[i].rtl && ! sets[i].src_volatile
5838 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5840 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5842 /* REG_EQUAL in setting a STRICT_LOW_PART
5843 gives an equivalent for the entire destination register,
5844 not just for the subreg being stored in now.
5845 This is a more interesting equivalence, so we arrange later
5846 to treat the entire reg as the destination. */
5847 sets[i].src_elt = src_eqv_elt;
5848 sets[i].src_hash = src_eqv_hash;
5852 /* Insert source and constant equivalent into hash table, if not
5854 struct table_elt *classp = src_eqv_elt;
5855 rtx src = sets[i].src;
5856 rtx dest = SET_DEST (sets[i].rtl);
5857 enum machine_mode mode
5858 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5860 /* It's possible that we have a source value known to be
5861 constant but don't have a REG_EQUAL note on the insn.
5862 Lack of a note will mean src_eqv_elt will be NULL. This
5863 can happen where we've generated a SUBREG to access a
5864 CONST_INT that is already in a register in a wider mode.
5865 Ensure that the source expression is put in the proper
5868 classp = sets[i].src_const_elt;
5870 if (sets[i].src_elt == 0)
5872 /* Don't put a hard register source into the table if this is
5873 the last insn of a libcall. In this case, we only need
5874 to put src_eqv_elt in src_elt. */
5875 if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5877 struct table_elt *elt;
5879 /* Note that these insert_regs calls cannot remove
5880 any of the src_elt's, because they would have failed to
5881 match if not still valid. */
5882 if (insert_regs (src, classp, 0))
5884 rehash_using_reg (src);
5885 sets[i].src_hash = HASH (src, mode);
5887 elt = insert (src, classp, sets[i].src_hash, mode);
5888 elt->in_memory = sets[i].src_in_memory;
5889 sets[i].src_elt = classp = elt;
5892 sets[i].src_elt = classp;
5894 if (sets[i].src_const && sets[i].src_const_elt == 0
5895 && src != sets[i].src_const
5896 && ! rtx_equal_p (sets[i].src_const, src))
5897 sets[i].src_elt = insert (sets[i].src_const, classp,
5898 sets[i].src_const_hash, mode);
5901 else if (sets[i].src_elt == 0)
5902 /* If we did not insert the source into the hash table (e.g., it was
5903 volatile), note the equivalence class for the REG_EQUAL value, if any,
5904 so that the destination goes into that class. */
5905 sets[i].src_elt = src_eqv_elt;
5907 invalidate_from_clobbers (x);
5909 /* Some registers are invalidated by subroutine calls. Memory is
5910 invalidated by non-constant calls. */
5912 if (GET_CODE (insn) == CALL_INSN)
5914 if (! CONST_OR_PURE_CALL_P (insn))
5915 invalidate_memory ();
5916 invalidate_for_call ();
5919 /* Now invalidate everything set by this instruction.
5920 If a SUBREG or other funny destination is being set,
5921 sets[i].rtl is still nonzero, so here we invalidate the reg
5922 a part of which is being set. */
5924 for (i = 0; i < n_sets; i++)
5927 /* We can't use the inner dest, because the mode associated with
5928 a ZERO_EXTRACT is significant. */
5929 rtx dest = SET_DEST (sets[i].rtl);
5931 /* Needed for registers to remove the register from its
5932 previous quantity's chain.
5933 Needed for memory if this is a nonvarying address, unless
5934 we have just done an invalidate_memory that covers even those. */
5935 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5936 invalidate (dest, VOIDmode);
5937 else if (GET_CODE (dest) == MEM)
5939 /* Outgoing arguments for a libcall don't
5940 affect any recorded expressions. */
5941 if (! libcall_insn || insn == libcall_insn)
5942 invalidate (dest, VOIDmode);
5944 else if (GET_CODE (dest) == STRICT_LOW_PART
5945 || GET_CODE (dest) == ZERO_EXTRACT)
5946 invalidate (XEXP (dest, 0), GET_MODE (dest));
5949 /* A volatile ASM invalidates everything. */
5950 if (GET_CODE (insn) == INSN
5951 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5952 && MEM_VOLATILE_P (PATTERN (insn)))
5953 flush_hash_table ();
5955 /* Make sure registers mentioned in destinations
5956 are safe for use in an expression to be inserted.
5957 This removes from the hash table
5958 any invalid entry that refers to one of these registers.
5960 We don't care about the return value from mention_regs because
5961 we are going to hash the SET_DEST values unconditionally. */
5963 for (i = 0; i < n_sets; i++)
5967 rtx x = SET_DEST (sets[i].rtl);
5969 if (GET_CODE (x) != REG)
5973 /* We used to rely on all references to a register becoming
5974 inaccessible when a register changes to a new quantity,
5975 since that changes the hash code. However, that is not
5976 safe, since after HASH_SIZE new quantities we get a
5977 hash 'collision' of a register with its own invalid
5978 entries. And since SUBREGs have been changed not to
5979 change their hash code with the hash code of the register,
5980 it wouldn't work any longer at all. So we have to check
5981 for any invalid references lying around now.
5982 This code is similar to the REG case in mention_regs,
5983 but it knows that reg_tick has been incremented, and
5984 it leaves reg_in_table as -1 . */
5985 unsigned int regno = REGNO (x);
5986 unsigned int endregno
5987 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
5988 : hard_regno_nregs[regno][GET_MODE (x)]);
5991 for (i = regno; i < endregno; i++)
5993 if (REG_IN_TABLE (i) >= 0)
5995 remove_invalid_refs (i);
5996 REG_IN_TABLE (i) = -1;
6003 /* We may have just removed some of the src_elt's from the hash table.
6004 So replace each one with the current head of the same class. */
6006 for (i = 0; i < n_sets; i++)
6009 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6010 /* If elt was removed, find current head of same class,
6011 or 0 if nothing remains of that class. */
6013 struct table_elt *elt = sets[i].src_elt;
6015 while (elt && elt->prev_same_value)
6016 elt = elt->prev_same_value;
6018 while (elt && elt->first_same_value == 0)
6019 elt = elt->next_same_value;
6020 sets[i].src_elt = elt ? elt->first_same_value : 0;
6024 /* Now insert the destinations into their equivalence classes. */
6026 for (i = 0; i < n_sets; i++)
6029 rtx dest = SET_DEST (sets[i].rtl);
6030 rtx inner_dest = sets[i].inner_dest;
6031 struct table_elt *elt;
6033 /* Don't record value if we are not supposed to risk allocating
6034 floating-point values in registers that might be wider than
6036 if ((flag_float_store
6037 && GET_CODE (dest) == MEM
6038 && FLOAT_MODE_P (GET_MODE (dest)))
6039 /* Don't record BLKmode values, because we don't know the
6040 size of it, and can't be sure that other BLKmode values
6041 have the same or smaller size. */
6042 || GET_MODE (dest) == BLKmode
6043 /* Don't record values of destinations set inside a libcall block
6044 since we might delete the libcall. Things should have been set
6045 up so we won't want to reuse such a value, but we play it safe
6048 /* If we didn't put a REG_EQUAL value or a source into the hash
6049 table, there is no point is recording DEST. */
6050 || sets[i].src_elt == 0
6051 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6052 or SIGN_EXTEND, don't record DEST since it can cause
6053 some tracking to be wrong.
6055 ??? Think about this more later. */
6056 || (GET_CODE (dest) == SUBREG
6057 && (GET_MODE_SIZE (GET_MODE (dest))
6058 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6059 && (GET_CODE (sets[i].src) == SIGN_EXTEND
6060 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6063 /* STRICT_LOW_PART isn't part of the value BEING set,
6064 and neither is the SUBREG inside it.
6065 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6066 if (GET_CODE (dest) == STRICT_LOW_PART)
6067 dest = SUBREG_REG (XEXP (dest, 0));
6069 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6070 /* Registers must also be inserted into chains for quantities. */
6071 if (insert_regs (dest, sets[i].src_elt, 1))
6073 /* If `insert_regs' changes something, the hash code must be
6075 rehash_using_reg (dest);
6076 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6079 if (GET_CODE (inner_dest) == MEM
6080 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
6081 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
6082 that (MEM (ADDRESSOF (X))) is equivalent to Y.
6083 Consider the case in which the address of the MEM is
6084 passed to a function, which alters the MEM. Then, if we
6085 later use Y instead of the MEM we'll miss the update. */
6086 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
6088 elt = insert (dest, sets[i].src_elt,
6089 sets[i].dest_hash, GET_MODE (dest));
6091 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
6092 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
6093 || fixed_base_plus_p (XEXP (sets[i].inner_dest,
6096 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6097 narrower than M2, and both M1 and M2 are the same number of words,
6098 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6099 make that equivalence as well.
6101 However, BAR may have equivalences for which gen_lowpart
6102 will produce a simpler value than gen_lowpart applied to
6103 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6104 BAR's equivalences. If we don't get a simplified form, make
6105 the SUBREG. It will not be used in an equivalence, but will
6106 cause two similar assignments to be detected.
6108 Note the loop below will find SUBREG_REG (DEST) since we have
6109 already entered SRC and DEST of the SET in the table. */
6111 if (GET_CODE (dest) == SUBREG
6112 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6114 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6115 && (GET_MODE_SIZE (GET_MODE (dest))
6116 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6117 && sets[i].src_elt != 0)
6119 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6120 struct table_elt *elt, *classp = 0;
6122 for (elt = sets[i].src_elt->first_same_value; elt;
6123 elt = elt->next_same_value)
6127 struct table_elt *src_elt;
6130 /* Ignore invalid entries. */
6131 if (GET_CODE (elt->exp) != REG
6132 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6135 /* We may have already been playing subreg games. If the
6136 mode is already correct for the destination, use it. */
6137 if (GET_MODE (elt->exp) == new_mode)
6141 /* Calculate big endian correction for the SUBREG_BYTE.
6142 We have already checked that M1 (GET_MODE (dest))
6143 is not narrower than M2 (new_mode). */
6144 if (BYTES_BIG_ENDIAN)
6145 byte = (GET_MODE_SIZE (GET_MODE (dest))
6146 - GET_MODE_SIZE (new_mode));
6148 new_src = simplify_gen_subreg (new_mode, elt->exp,
6149 GET_MODE (dest), byte);
6152 /* The call to simplify_gen_subreg fails if the value
6153 is VOIDmode, yet we can't do any simplification, e.g.
6154 for EXPR_LISTs denoting function call results.
6155 It is invalid to construct a SUBREG with a VOIDmode
6156 SUBREG_REG, hence a zero new_src means we can't do
6157 this substitution. */
6161 src_hash = HASH (new_src, new_mode);
6162 src_elt = lookup (new_src, src_hash, new_mode);
6164 /* Put the new source in the hash table is if isn't
6168 if (insert_regs (new_src, classp, 0))
6170 rehash_using_reg (new_src);
6171 src_hash = HASH (new_src, new_mode);
6173 src_elt = insert (new_src, classp, src_hash, new_mode);
6174 src_elt->in_memory = elt->in_memory;
6176 else if (classp && classp != src_elt->first_same_value)
6177 /* Show that two things that we've seen before are
6178 actually the same. */
6179 merge_equiv_classes (src_elt, classp);
6181 classp = src_elt->first_same_value;
6182 /* Ignore invalid entries. */
6184 && GET_CODE (classp->exp) != REG
6185 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
6186 classp = classp->next_same_value;
6191 /* Special handling for (set REG0 REG1) where REG0 is the
6192 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6193 be used in the sequel, so (if easily done) change this insn to
6194 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6195 that computed their value. Then REG1 will become a dead store
6196 and won't cloud the situation for later optimizations.
6198 Do not make this change if REG1 is a hard register, because it will
6199 then be used in the sequel and we may be changing a two-operand insn
6200 into a three-operand insn.
6202 Also do not do this if we are operating on a copy of INSN.
6204 Also don't do this if INSN ends a libcall; this would cause an unrelated
6205 register to be set in the middle of a libcall, and we then get bad code
6206 if the libcall is deleted. */
6208 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
6209 && NEXT_INSN (PREV_INSN (insn)) == insn
6210 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
6211 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6212 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6214 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6215 struct qty_table_elem *src_ent = &qty_table[src_q];
6217 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6218 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6221 /* Scan for the previous nonnote insn, but stop at a basic
6225 prev = PREV_INSN (prev);
6227 while (prev && GET_CODE (prev) == NOTE
6228 && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
6230 /* Do not swap the registers around if the previous instruction
6231 attaches a REG_EQUIV note to REG1.
6233 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6234 from the pseudo that originally shadowed an incoming argument
6235 to another register. Some uses of REG_EQUIV might rely on it
6236 being attached to REG1 rather than REG2.
6238 This section previously turned the REG_EQUIV into a REG_EQUAL
6239 note. We cannot do that because REG_EQUIV may provide an
6240 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
6242 if (prev != 0 && GET_CODE (prev) == INSN
6243 && GET_CODE (PATTERN (prev)) == SET
6244 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6245 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6247 rtx dest = SET_DEST (sets[0].rtl);
6248 rtx src = SET_SRC (sets[0].rtl);
6251 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6252 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6253 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6254 apply_change_group ();
6256 /* If INSN has a REG_EQUAL note, and this note mentions
6257 REG0, then we must delete it, because the value in
6258 REG0 has changed. If the note's value is REG1, we must
6259 also delete it because that is now this insn's dest. */
6260 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6262 && (reg_mentioned_p (dest, XEXP (note, 0))
6263 || rtx_equal_p (src, XEXP (note, 0))))
6264 remove_note (insn, note);
6269 /* If this is a conditional jump insn, record any known equivalences due to
6270 the condition being tested. */
6272 last_jump_equiv_class = 0;
6273 if (GET_CODE (insn) == JUMP_INSN
6274 && n_sets == 1 && GET_CODE (x) == SET
6275 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6276 record_jump_equiv (insn, 0);
6279 /* If the previous insn set CC0 and this insn no longer references CC0,
6280 delete the previous insn. Here we use the fact that nothing expects CC0
6281 to be valid over an insn, which is true until the final pass. */
6282 if (prev_insn && GET_CODE (prev_insn) == INSN
6283 && (tem = single_set (prev_insn)) != 0
6284 && SET_DEST (tem) == cc0_rtx
6285 && ! reg_mentioned_p (cc0_rtx, x))
6286 delete_insn (prev_insn);
6288 prev_insn_cc0 = this_insn_cc0;
6289 prev_insn_cc0_mode = this_insn_cc0_mode;
6294 /* Remove from the hash table all expressions that reference memory. */
6297 invalidate_memory (void)
6300 struct table_elt *p, *next;
6302 for (i = 0; i < HASH_SIZE; i++)
6303 for (p = table[i]; p; p = next)
6305 next = p->next_same_hash;
6307 remove_from_table (p, i);
6311 /* If ADDR is an address that implicitly affects the stack pointer, return
6312 1 and update the register tables to show the effect. Else, return 0. */
6315 addr_affects_sp_p (rtx addr)
6317 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
6318 && GET_CODE (XEXP (addr, 0)) == REG
6319 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6321 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6323 REG_TICK (STACK_POINTER_REGNUM)++;
6324 /* Is it possible to use a subreg of SP? */
6325 SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6328 /* This should be *very* rare. */
6329 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6330 invalidate (stack_pointer_rtx, VOIDmode);
6338 /* Perform invalidation on the basis of everything about an insn
6339 except for invalidating the actual places that are SET in it.
6340 This includes the places CLOBBERed, and anything that might
6341 alias with something that is SET or CLOBBERed.
6343 X is the pattern of the insn. */
6346 invalidate_from_clobbers (rtx x)
6348 if (GET_CODE (x) == CLOBBER)
6350 rtx ref = XEXP (x, 0);
6353 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6354 || GET_CODE (ref) == MEM)
6355 invalidate (ref, VOIDmode);
6356 else if (GET_CODE (ref) == STRICT_LOW_PART
6357 || GET_CODE (ref) == ZERO_EXTRACT)
6358 invalidate (XEXP (ref, 0), GET_MODE (ref));
6361 else if (GET_CODE (x) == PARALLEL)
6364 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6366 rtx y = XVECEXP (x, 0, i);
6367 if (GET_CODE (y) == CLOBBER)
6369 rtx ref = XEXP (y, 0);
6370 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6371 || GET_CODE (ref) == MEM)
6372 invalidate (ref, VOIDmode);
6373 else if (GET_CODE (ref) == STRICT_LOW_PART
6374 || GET_CODE (ref) == ZERO_EXTRACT)
6375 invalidate (XEXP (ref, 0), GET_MODE (ref));
6381 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6382 and replace any registers in them with either an equivalent constant
6383 or the canonical form of the register. If we are inside an address,
6384 only do this if the address remains valid.
6386 OBJECT is 0 except when within a MEM in which case it is the MEM.
6388 Return the replacement for X. */
6391 cse_process_notes (rtx x, rtx object)
6393 enum rtx_code code = GET_CODE (x);
6394 const char *fmt = GET_RTX_FORMAT (code);
6411 validate_change (x, &XEXP (x, 0),
6412 cse_process_notes (XEXP (x, 0), x), 0);
6417 if (REG_NOTE_KIND (x) == REG_EQUAL)
6418 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6420 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6427 rtx new = cse_process_notes (XEXP (x, 0), object);
6428 /* We don't substitute VOIDmode constants into these rtx,
6429 since they would impede folding. */
6430 if (GET_MODE (new) != VOIDmode)
6431 validate_change (object, &XEXP (x, 0), new, 0);
6436 i = REG_QTY (REGNO (x));
6438 /* Return a constant or a constant register. */
6439 if (REGNO_QTY_VALID_P (REGNO (x)))
6441 struct qty_table_elem *ent = &qty_table[i];
6443 if (ent->const_rtx != NULL_RTX
6444 && (CONSTANT_P (ent->const_rtx)
6445 || GET_CODE (ent->const_rtx) == REG))
6447 rtx new = gen_lowpart (GET_MODE (x), ent->const_rtx);
6453 /* Otherwise, canonicalize this register. */
6454 return canon_reg (x, NULL_RTX);
6460 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6462 validate_change (object, &XEXP (x, i),
6463 cse_process_notes (XEXP (x, i), object), 0);
6468 /* Find common subexpressions between the end test of a loop and the beginning
6469 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
6471 Often we have a loop where an expression in the exit test is used
6472 in the body of the loop. For example "while (*p) *q++ = *p++;".
6473 Because of the way we duplicate the loop exit test in front of the loop,
6474 however, we don't detect that common subexpression. This will be caught
6475 when global cse is implemented, but this is a quite common case.
6477 This function handles the most common cases of these common expressions.
6478 It is called after we have processed the basic block ending with the
6479 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6480 jumps to a label used only once. */
6483 cse_around_loop (rtx loop_start)
6487 struct table_elt *p;
6489 /* If the jump at the end of the loop doesn't go to the start, we don't
6491 for (insn = PREV_INSN (loop_start);
6492 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6493 insn = PREV_INSN (insn))
6497 || GET_CODE (insn) != NOTE
6498 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6501 /* If the last insn of the loop (the end test) was an NE comparison,
6502 we will interpret it as an EQ comparison, since we fell through
6503 the loop. Any equivalences resulting from that comparison are
6504 therefore not valid and must be invalidated. */
6505 if (last_jump_equiv_class)
6506 for (p = last_jump_equiv_class->first_same_value; p;
6507 p = p->next_same_value)
6509 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6510 || (GET_CODE (p->exp) == SUBREG
6511 && GET_CODE (SUBREG_REG (p->exp)) == REG))
6512 invalidate (p->exp, VOIDmode);
6513 else if (GET_CODE (p->exp) == STRICT_LOW_PART
6514 || GET_CODE (p->exp) == ZERO_EXTRACT)
6515 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6518 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6519 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6521 The only thing we do with SET_DEST is invalidate entries, so we
6522 can safely process each SET in order. It is slightly less efficient
6523 to do so, but we only want to handle the most common cases.
6525 The gen_move_insn call in cse_set_around_loop may create new pseudos.
6526 These pseudos won't have valid entries in any of the tables indexed
6527 by register number, such as reg_qty. We avoid out-of-range array
6528 accesses by not processing any instructions created after cse started. */
6530 for (insn = NEXT_INSN (loop_start);
6531 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6532 && INSN_UID (insn) < max_insn_uid
6533 && ! (GET_CODE (insn) == NOTE
6534 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6535 insn = NEXT_INSN (insn))
6538 && (GET_CODE (PATTERN (insn)) == SET
6539 || GET_CODE (PATTERN (insn)) == CLOBBER))
6540 cse_set_around_loop (PATTERN (insn), insn, loop_start);
6541 else if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == PARALLEL)
6542 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6543 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6544 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6545 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6550 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6551 since they are done elsewhere. This function is called via note_stores. */
6554 invalidate_skipped_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
6556 enum rtx_code code = GET_CODE (dest);
6559 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6560 /* There are times when an address can appear varying and be a PLUS
6561 during this scan when it would be a fixed address were we to know
6562 the proper equivalences. So invalidate all memory if there is
6563 a BLKmode or nonscalar memory reference or a reference to a
6564 variable address. */
6565 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6566 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6568 invalidate_memory ();
6572 if (GET_CODE (set) == CLOBBER
6577 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6578 invalidate (XEXP (dest, 0), GET_MODE (dest));
6579 else if (code == REG || code == SUBREG || code == MEM)
6580 invalidate (dest, VOIDmode);
6583 /* Invalidate all insns from START up to the end of the function or the
6584 next label. This called when we wish to CSE around a block that is
6585 conditionally executed. */
6588 invalidate_skipped_block (rtx start)
6592 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6593 insn = NEXT_INSN (insn))
6595 if (! INSN_P (insn))
6598 if (GET_CODE (insn) == CALL_INSN)
6600 if (! CONST_OR_PURE_CALL_P (insn))
6601 invalidate_memory ();
6602 invalidate_for_call ();
6605 invalidate_from_clobbers (PATTERN (insn));
6606 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6610 /* If modifying X will modify the value in *DATA (which is really an
6611 `rtx *'), indicate that fact by setting the pointed to value to
6615 cse_check_loop_start (rtx x, rtx set ATTRIBUTE_UNUSED, void *data)
6617 rtx *cse_check_loop_start_value = (rtx *) data;
6619 if (*cse_check_loop_start_value == NULL_RTX
6620 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6623 if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
6624 || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6625 *cse_check_loop_start_value = NULL_RTX;
6628 /* X is a SET or CLOBBER contained in INSN that was found near the start of
6629 a loop that starts with the label at LOOP_START.
6631 If X is a SET, we see if its SET_SRC is currently in our hash table.
6632 If so, we see if it has a value equal to some register used only in the
6633 loop exit code (as marked by jump.c).
6635 If those two conditions are true, we search backwards from the start of
6636 the loop to see if that same value was loaded into a register that still
6637 retains its value at the start of the loop.
6639 If so, we insert an insn after the load to copy the destination of that
6640 load into the equivalent register and (try to) replace our SET_SRC with that
6643 In any event, we invalidate whatever this SET or CLOBBER modifies. */
6646 cse_set_around_loop (rtx x, rtx insn, rtx loop_start)
6648 struct table_elt *src_elt;
6650 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6651 are setting PC or CC0 or whose SET_SRC is already a register. */
6652 if (GET_CODE (x) == SET
6653 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6654 && GET_CODE (SET_SRC (x)) != REG)
6656 src_elt = lookup (SET_SRC (x),
6657 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6658 GET_MODE (SET_DEST (x)));
6661 for (src_elt = src_elt->first_same_value; src_elt;
6662 src_elt = src_elt->next_same_value)
6663 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6664 && COST (src_elt->exp) < COST (SET_SRC (x)))
6668 /* Look for an insn in front of LOOP_START that sets
6669 something in the desired mode to SET_SRC (x) before we hit
6670 a label or CALL_INSN. */
6672 for (p = prev_nonnote_insn (loop_start);
6673 p && GET_CODE (p) != CALL_INSN
6674 && GET_CODE (p) != CODE_LABEL;
6675 p = prev_nonnote_insn (p))
6676 if ((set = single_set (p)) != 0
6677 && GET_CODE (SET_DEST (set)) == REG
6678 && GET_MODE (SET_DEST (set)) == src_elt->mode
6679 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6681 /* We now have to ensure that nothing between P
6682 and LOOP_START modified anything referenced in
6683 SET_SRC (x). We know that nothing within the loop
6684 can modify it, or we would have invalidated it in
6687 rtx cse_check_loop_start_value = SET_SRC (x);
6688 for (q = p; q != loop_start; q = NEXT_INSN (q))
6690 note_stores (PATTERN (q),
6691 cse_check_loop_start,
6692 &cse_check_loop_start_value);
6694 /* If nothing was changed and we can replace our
6695 SET_SRC, add an insn after P to copy its destination
6696 to what we will be replacing SET_SRC with. */
6697 if (cse_check_loop_start_value
6699 && !can_throw_internal (insn)
6700 && validate_change (insn, &SET_SRC (x),
6703 /* If this creates new pseudos, this is unsafe,
6704 because the regno of new pseudo is unsuitable
6705 to index into reg_qty when cse_insn processes
6706 the new insn. Therefore, if a new pseudo was
6707 created, discard this optimization. */
6708 int nregs = max_reg_num ();
6710 = gen_move_insn (src_elt->exp, SET_DEST (set));
6711 if (nregs != max_reg_num ())
6713 if (! validate_change (insn, &SET_SRC (x),
6719 if (CONSTANT_P (SET_SRC (set))
6720 && ! find_reg_equal_equiv_note (insn))
6721 set_unique_reg_note (insn, REG_EQUAL,
6723 if (control_flow_insn_p (p))
6724 /* p can cause a control flow transfer so it
6725 is the last insn of a basic block. We can't
6726 therefore use emit_insn_after. */
6727 emit_insn_before (move, next_nonnote_insn (p));
6729 emit_insn_after (move, p);
6737 /* Deal with the destination of X affecting the stack pointer. */
6738 addr_affects_sp_p (SET_DEST (x));
6740 /* See comment on similar code in cse_insn for explanation of these
6742 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6743 || GET_CODE (SET_DEST (x)) == MEM)
6744 invalidate (SET_DEST (x), VOIDmode);
6745 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6746 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
6747 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
6750 /* Find the end of INSN's basic block and return its range,
6751 the total number of SETs in all the insns of the block, the last insn of the
6752 block, and the branch path.
6754 The branch path indicates which branches should be followed. If a nonzero
6755 path size is specified, the block should be rescanned and a different set
6756 of branches will be taken. The branch path is only used if
6757 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
6759 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6760 used to describe the block. It is filled in with the information about
6761 the current block. The incoming structure's branch path, if any, is used
6762 to construct the output branch path. */
6765 cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
6766 int follow_jumps, int after_loop, int skip_blocks)
6770 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6771 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6772 int path_size = data->path_size;
6776 /* Update the previous branch path, if any. If the last branch was
6777 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
6778 shorten the path by one and look at the previous branch. We know that
6779 at least one branch must have been taken if PATH_SIZE is nonzero. */
6780 while (path_size > 0)
6782 if (data->path[path_size - 1].status != NOT_TAKEN)
6784 data->path[path_size - 1].status = NOT_TAKEN;
6791 /* If the first instruction is marked with QImode, that means we've
6792 already processed this block. Our caller will look at DATA->LAST
6793 to figure out where to go next. We want to return the next block
6794 in the instruction stream, not some branched-to block somewhere
6795 else. We accomplish this by pretending our called forbid us to
6796 follow jumps, or skip blocks. */
6797 if (GET_MODE (insn) == QImode)
6798 follow_jumps = skip_blocks = 0;
6800 /* Scan to end of this basic block. */
6801 while (p && GET_CODE (p) != CODE_LABEL)
6803 /* Don't cse out the end of a loop. This makes a difference
6804 only for the unusual loops that always execute at least once;
6805 all other loops have labels there so we will stop in any case.
6806 Cse'ing out the end of the loop is dangerous because it
6807 might cause an invariant expression inside the loop
6808 to be reused after the end of the loop. This would make it
6809 hard to move the expression out of the loop in loop.c,
6810 especially if it is one of several equivalent expressions
6811 and loop.c would like to eliminate it.
6813 If we are running after loop.c has finished, we can ignore
6814 the NOTE_INSN_LOOP_END. */
6816 if (! after_loop && GET_CODE (p) == NOTE
6817 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6820 /* Don't cse over a call to setjmp; on some machines (eg VAX)
6821 the regs restored by the longjmp come from
6822 a later time than the setjmp. */
6823 if (PREV_INSN (p) && GET_CODE (PREV_INSN (p)) == CALL_INSN
6824 && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6827 /* A PARALLEL can have lots of SETs in it,
6828 especially if it is really an ASM_OPERANDS. */
6829 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6830 nsets += XVECLEN (PATTERN (p), 0);
6831 else if (GET_CODE (p) != NOTE)
6834 /* Ignore insns made by CSE; they cannot affect the boundaries of
6837 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6838 high_cuid = INSN_CUID (p);
6839 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6840 low_cuid = INSN_CUID (p);
6842 /* See if this insn is in our branch path. If it is and we are to
6844 if (path_entry < path_size && data->path[path_entry].branch == p)
6846 if (data->path[path_entry].status != NOT_TAKEN)
6849 /* Point to next entry in path, if any. */
6853 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6854 was specified, we haven't reached our maximum path length, there are
6855 insns following the target of the jump, this is the only use of the
6856 jump label, and the target label is preceded by a BARRIER.
6858 Alternatively, we can follow the jump if it branches around a
6859 block of code and there are no other branches into the block.
6860 In this case invalidate_skipped_block will be called to invalidate any
6861 registers set in the block when following the jump. */
6863 else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
6864 && GET_CODE (p) == JUMP_INSN
6865 && GET_CODE (PATTERN (p)) == SET
6866 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6867 && JUMP_LABEL (p) != 0
6868 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6869 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6871 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6872 if ((GET_CODE (q) != NOTE
6873 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6874 || (PREV_INSN (q) && GET_CODE (PREV_INSN (q)) == CALL_INSN
6875 && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6876 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
6879 /* If we ran into a BARRIER, this code is an extension of the
6880 basic block when the branch is taken. */
6881 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
6883 /* Don't allow ourself to keep walking around an
6884 always-executed loop. */
6885 if (next_real_insn (q) == next)
6891 /* Similarly, don't put a branch in our path more than once. */
6892 for (i = 0; i < path_entry; i++)
6893 if (data->path[i].branch == p)
6896 if (i != path_entry)
6899 data->path[path_entry].branch = p;
6900 data->path[path_entry++].status = TAKEN;
6902 /* This branch now ends our path. It was possible that we
6903 didn't see this branch the last time around (when the
6904 insn in front of the target was a JUMP_INSN that was
6905 turned into a no-op). */
6906 path_size = path_entry;
6909 /* Mark block so we won't scan it again later. */
6910 PUT_MODE (NEXT_INSN (p), QImode);
6912 /* Detect a branch around a block of code. */
6913 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
6917 if (next_real_insn (q) == next)
6923 for (i = 0; i < path_entry; i++)
6924 if (data->path[i].branch == p)
6927 if (i != path_entry)
6930 /* This is no_labels_between_p (p, q) with an added check for
6931 reaching the end of a function (in case Q precedes P). */
6932 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6933 if (GET_CODE (tmp) == CODE_LABEL)
6938 data->path[path_entry].branch = p;
6939 data->path[path_entry++].status = AROUND;
6941 path_size = path_entry;
6944 /* Mark block so we won't scan it again later. */
6945 PUT_MODE (NEXT_INSN (p), QImode);
6952 data->low_cuid = low_cuid;
6953 data->high_cuid = high_cuid;
6954 data->nsets = nsets;
6957 /* If all jumps in the path are not taken, set our path length to zero
6958 so a rescan won't be done. */
6959 for (i = path_size - 1; i >= 0; i--)
6960 if (data->path[i].status != NOT_TAKEN)
6964 data->path_size = 0;
6966 data->path_size = path_size;
6968 /* End the current branch path. */
6969 data->path[path_size].branch = 0;
6972 /* Perform cse on the instructions of a function.
6973 F is the first instruction.
6974 NREGS is one plus the highest pseudo-reg number used in the instruction.
6976 AFTER_LOOP is 1 if this is the cse call done after loop optimization
6977 (only if -frerun-cse-after-loop).
6979 Returns 1 if jump_optimize should be redone due to simplifications
6980 in conditional jump instructions. */
6983 cse_main (rtx f, int nregs, int after_loop, FILE *file)
6985 struct cse_basic_block_data val;
6989 val.path = xmalloc (sizeof (struct branch_path)
6990 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6992 cse_jumps_altered = 0;
6993 recorded_label_ref = 0;
6994 constant_pool_entries_cost = 0;
6995 constant_pool_entries_regcost = 0;
6997 gen_lowpart = gen_lowpart_if_possible;
7000 init_alias_analysis ();
7004 max_insn_uid = get_max_uid ();
7006 reg_eqv_table = xmalloc (nregs * sizeof (struct reg_eqv_elem));
7008 #ifdef LOAD_EXTEND_OP
7010 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
7011 and change the code and mode as appropriate. */
7012 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
7015 /* Reset the counter indicating how many elements have been made
7017 n_elements_made = 0;
7019 /* Find the largest uid. */
7021 max_uid = get_max_uid ();
7022 uid_cuid = xcalloc (max_uid + 1, sizeof (int));
7024 /* Compute the mapping from uids to cuids.
7025 CUIDs are numbers assigned to insns, like uids,
7026 except that cuids increase monotonically through the code.
7027 Don't assign cuids to line-number NOTEs, so that the distance in cuids
7028 between two insns is not affected by -g. */
7030 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
7032 if (GET_CODE (insn) != NOTE
7033 || NOTE_LINE_NUMBER (insn) < 0)
7034 INSN_CUID (insn) = ++i;
7036 /* Give a line number note the same cuid as preceding insn. */
7037 INSN_CUID (insn) = i;
7040 ggc_push_context ();
7042 /* Loop over basic blocks.
7043 Compute the maximum number of qty's needed for each basic block
7044 (which is 2 for each SET). */
7049 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
7050 flag_cse_skip_blocks);
7052 /* If this basic block was already processed or has no sets, skip it. */
7053 if (val.nsets == 0 || GET_MODE (insn) == QImode)
7055 PUT_MODE (insn, VOIDmode);
7056 insn = (val.last ? NEXT_INSN (val.last) : 0);
7061 cse_basic_block_start = val.low_cuid;
7062 cse_basic_block_end = val.high_cuid;
7063 max_qty = val.nsets * 2;
7066 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
7067 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
7070 /* Make MAX_QTY bigger to give us room to optimize
7071 past the end of this basic block, if that should prove useful. */
7077 /* If this basic block is being extended by following certain jumps,
7078 (see `cse_end_of_basic_block'), we reprocess the code from the start.
7079 Otherwise, we start after this basic block. */
7080 if (val.path_size > 0)
7081 cse_basic_block (insn, val.last, val.path, 0);
7084 int old_cse_jumps_altered = cse_jumps_altered;
7087 /* When cse changes a conditional jump to an unconditional
7088 jump, we want to reprocess the block, since it will give
7089 us a new branch path to investigate. */
7090 cse_jumps_altered = 0;
7091 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
7092 if (cse_jumps_altered == 0
7093 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7096 cse_jumps_altered |= old_cse_jumps_altered;
7109 if (max_elements_made < n_elements_made)
7110 max_elements_made = n_elements_made;
7113 end_alias_analysis ();
7115 free (reg_eqv_table);
7117 gen_lowpart = gen_lowpart_general;
7119 return cse_jumps_altered || recorded_label_ref;
7122 /* Process a single basic block. FROM and TO and the limits of the basic
7123 block. NEXT_BRANCH points to the branch path when following jumps or
7124 a null path when not following jumps.
7126 AROUND_LOOP is nonzero if we are to try to cse around to the start of a
7127 loop. This is true when we are being called for the last time on a
7128 block and this CSE pass is before loop.c. */
7131 cse_basic_block (rtx from, rtx to, struct branch_path *next_branch,
7136 rtx libcall_insn = NULL_RTX;
7138 int no_conflict = 0;
7140 /* This array is undefined before max_reg, so only allocate
7141 the space actually needed and adjust the start. */
7143 qty_table = xmalloc ((max_qty - max_reg) * sizeof (struct qty_table_elem));
7144 qty_table -= max_reg;
7148 /* TO might be a label. If so, protect it from being deleted. */
7149 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7152 for (insn = from; insn != to; insn = NEXT_INSN (insn))
7154 enum rtx_code code = GET_CODE (insn);
7156 /* If we have processed 1,000 insns, flush the hash table to
7157 avoid extreme quadratic behavior. We must not include NOTEs
7158 in the count since there may be more of them when generating
7159 debugging information. If we clear the table at different
7160 times, code generated with -g -O might be different than code
7161 generated with -O but not -g.
7163 ??? This is a real kludge and needs to be done some other way.
7165 if (code != NOTE && num_insns++ > 1000)
7167 flush_hash_table ();
7171 /* See if this is a branch that is part of the path. If so, and it is
7172 to be taken, do so. */
7173 if (next_branch->branch == insn)
7175 enum taken status = next_branch++->status;
7176 if (status != NOT_TAKEN)
7178 if (status == TAKEN)
7179 record_jump_equiv (insn, 1);
7181 invalidate_skipped_block (NEXT_INSN (insn));
7183 /* Set the last insn as the jump insn; it doesn't affect cc0.
7184 Then follow this branch. */
7189 insn = JUMP_LABEL (insn);
7194 if (GET_MODE (insn) == QImode)
7195 PUT_MODE (insn, VOIDmode);
7197 if (GET_RTX_CLASS (code) == RTX_INSN)
7201 /* Process notes first so we have all notes in canonical forms when
7202 looking for duplicate operations. */
7204 if (REG_NOTES (insn))
7205 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7207 /* Track when we are inside in LIBCALL block. Inside such a block,
7208 we do not want to record destinations. The last insn of a
7209 LIBCALL block is not considered to be part of the block, since
7210 its destination is the result of the block and hence should be
7213 if (REG_NOTES (insn) != 0)
7215 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7216 libcall_insn = XEXP (p, 0);
7217 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7219 /* Keep libcall_insn for the last SET insn of a no-conflict
7220 block to prevent changing the destination. */
7226 else if (find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
7230 cse_insn (insn, libcall_insn);
7232 if (no_conflict == -1)
7238 /* If we haven't already found an insn where we added a LABEL_REF,
7240 if (GET_CODE (insn) == INSN && ! recorded_label_ref
7241 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
7243 recorded_label_ref = 1;
7246 /* If INSN is now an unconditional jump, skip to the end of our
7247 basic block by pretending that we just did the last insn in the
7248 basic block. If we are jumping to the end of our block, show
7249 that we can have one usage of TO. */
7251 if (any_uncondjump_p (insn))
7255 free (qty_table + max_reg);
7259 if (JUMP_LABEL (insn) == to)
7262 /* Maybe TO was deleted because the jump is unconditional.
7263 If so, there is nothing left in this basic block. */
7264 /* ??? Perhaps it would be smarter to set TO
7265 to whatever follows this insn,
7266 and pretend the basic block had always ended here. */
7267 if (INSN_DELETED_P (to))
7270 insn = PREV_INSN (to);
7273 /* See if it is ok to keep on going past the label
7274 which used to end our basic block. Remember that we incremented
7275 the count of that label, so we decrement it here. If we made
7276 a jump unconditional, TO_USAGE will be one; in that case, we don't
7277 want to count the use in that jump. */
7279 if (to != 0 && NEXT_INSN (insn) == to
7280 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7282 struct cse_basic_block_data val;
7285 insn = NEXT_INSN (to);
7287 /* If TO was the last insn in the function, we are done. */
7290 free (qty_table + max_reg);
7294 /* If TO was preceded by a BARRIER we are done with this block
7295 because it has no continuation. */
7296 prev = prev_nonnote_insn (to);
7297 if (prev && GET_CODE (prev) == BARRIER)
7299 free (qty_table + max_reg);
7303 /* Find the end of the following block. Note that we won't be
7304 following branches in this case. */
7307 val.path = xmalloc (sizeof (struct branch_path)
7308 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
7309 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7312 /* If the tables we allocated have enough space left
7313 to handle all the SETs in the next basic block,
7314 continue through it. Otherwise, return,
7315 and that block will be scanned individually. */
7316 if (val.nsets * 2 + next_qty > max_qty)
7319 cse_basic_block_start = val.low_cuid;
7320 cse_basic_block_end = val.high_cuid;
7323 /* Prevent TO from being deleted if it is a label. */
7324 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7327 /* Back up so we process the first insn in the extension. */
7328 insn = PREV_INSN (insn);
7332 if (next_qty > max_qty)
7335 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7336 the previous insn is the only insn that branches to the head of a loop,
7337 we can cse into the loop. Don't do this if we changed the jump
7338 structure of a loop unless we aren't going to be following jumps. */
7340 insn = prev_nonnote_insn (to);
7341 if ((cse_jumps_altered == 0
7342 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7343 && around_loop && to != 0
7344 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7345 && GET_CODE (insn) == JUMP_INSN
7346 && JUMP_LABEL (insn) != 0
7347 && LABEL_NUSES (JUMP_LABEL (insn)) == 1)
7348 cse_around_loop (JUMP_LABEL (insn));
7350 free (qty_table + max_reg);
7352 return to ? NEXT_INSN (to) : 0;
7355 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7356 there isn't a REG_LABEL note. Return one if so. DATA is the insn. */
7359 check_for_label_ref (rtx *rtl, void *data)
7361 rtx insn = (rtx) data;
7363 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7364 we must rerun jump since it needs to place the note. If this is a
7365 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7366 since no REG_LABEL will be added. */
7367 return (GET_CODE (*rtl) == LABEL_REF
7368 && ! LABEL_REF_NONLOCAL_P (*rtl)
7369 && LABEL_P (XEXP (*rtl, 0))
7370 && INSN_UID (XEXP (*rtl, 0)) != 0
7371 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7374 /* Count the number of times registers are used (not set) in X.
7375 COUNTS is an array in which we accumulate the count, INCR is how much
7376 we count each register usage. */
7379 count_reg_usage (rtx x, int *counts, int incr)
7389 switch (code = GET_CODE (x))
7392 counts[REGNO (x)] += incr;
7406 /* If we are clobbering a MEM, mark any registers inside the address
7408 if (GET_CODE (XEXP (x, 0)) == MEM)
7409 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, incr);
7413 /* Unless we are setting a REG, count everything in SET_DEST. */
7414 if (GET_CODE (SET_DEST (x)) != REG)
7415 count_reg_usage (SET_DEST (x), counts, incr);
7416 count_reg_usage (SET_SRC (x), counts, incr);
7420 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, incr);
7425 count_reg_usage (PATTERN (x), counts, incr);
7427 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7430 note = find_reg_equal_equiv_note (x);
7433 rtx eqv = XEXP (note, 0);
7435 if (GET_CODE (eqv) == EXPR_LIST)
7436 /* This REG_EQUAL note describes the result of a function call.
7437 Process all the arguments. */
7440 count_reg_usage (XEXP (eqv, 0), counts, incr);
7441 eqv = XEXP (eqv, 1);
7443 while (eqv && GET_CODE (eqv) == EXPR_LIST);
7445 count_reg_usage (eqv, counts, incr);
7450 if (REG_NOTE_KIND (x) == REG_EQUAL
7451 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
7452 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
7453 involving registers in the address. */
7454 || GET_CODE (XEXP (x, 0)) == CLOBBER)
7455 count_reg_usage (XEXP (x, 0), counts, incr);
7457 count_reg_usage (XEXP (x, 1), counts, incr);
7461 /* Iterate over just the inputs, not the constraints as well. */
7462 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
7463 count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, incr);
7473 fmt = GET_RTX_FORMAT (code);
7474 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7477 count_reg_usage (XEXP (x, i), counts, incr);
7478 else if (fmt[i] == 'E')
7479 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7480 count_reg_usage (XVECEXP (x, i, j), counts, incr);
7484 /* Return true if set is live. */
7486 set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */
7493 if (set_noop_p (set))
7497 else if (GET_CODE (SET_DEST (set)) == CC0
7498 && !side_effects_p (SET_SRC (set))
7499 && ((tem = next_nonnote_insn (insn)) == 0
7501 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7504 else if (GET_CODE (SET_DEST (set)) != REG
7505 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7506 || counts[REGNO (SET_DEST (set))] != 0
7507 || side_effects_p (SET_SRC (set))
7508 /* An ADDRESSOF expression can turn into a use of the
7509 internal arg pointer, so always consider the
7510 internal arg pointer live. If it is truly dead,
7511 flow will delete the initializing insn. */
7512 || (SET_DEST (set) == current_function_internal_arg_pointer))
7517 /* Return true if insn is live. */
7520 insn_live_p (rtx insn, int *counts)
7523 if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
7525 else if (GET_CODE (PATTERN (insn)) == SET)
7526 return set_live_p (PATTERN (insn), insn, counts);
7527 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7529 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7531 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7533 if (GET_CODE (elt) == SET)
7535 if (set_live_p (elt, insn, counts))
7538 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7547 /* Return true if libcall is dead as a whole. */
7550 dead_libcall_p (rtx insn, int *counts)
7554 /* See if there's a REG_EQUAL note on this insn and try to
7555 replace the source with the REG_EQUAL expression.
7557 We assume that insns with REG_RETVALs can only be reg->reg
7558 copies at this point. */
7559 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7563 set = single_set (insn);
7567 new = simplify_rtx (XEXP (note, 0));
7569 new = XEXP (note, 0);
7571 /* While changing insn, we must update the counts accordingly. */
7572 count_reg_usage (insn, counts, -1);
7574 if (validate_change (insn, &SET_SRC (set), new, 0))
7576 count_reg_usage (insn, counts, 1);
7577 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7578 remove_note (insn, note);
7582 if (CONSTANT_P (new))
7584 new = force_const_mem (GET_MODE (SET_DEST (set)), new);
7585 if (new && validate_change (insn, &SET_SRC (set), new, 0))
7587 count_reg_usage (insn, counts, 1);
7588 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7589 remove_note (insn, note);
7594 count_reg_usage (insn, counts, 1);
7598 /* Scan all the insns and delete any that are dead; i.e., they store a register
7599 that is never used or they copy a register to itself.
7601 This is used to remove insns made obviously dead by cse, loop or other
7602 optimizations. It improves the heuristics in loop since it won't try to
7603 move dead invariants out of loops or make givs for dead quantities. The
7604 remaining passes of the compilation are also sped up. */
7607 delete_trivially_dead_insns (rtx insns, int nreg)
7611 int in_libcall = 0, dead_libcall = 0;
7612 int ndead = 0, nlastdead, niterations = 0;
7614 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7615 /* First count the number of times each register is used. */
7616 counts = xcalloc (nreg, sizeof (int));
7617 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7618 count_reg_usage (insn, counts, 1);
7624 /* Go from the last insn to the first and delete insns that only set unused
7625 registers or copy a register to itself. As we delete an insn, remove
7626 usage counts for registers it uses.
7628 The first jump optimization pass may leave a real insn as the last
7629 insn in the function. We must not skip that insn or we may end
7630 up deleting code that is not really dead. */
7631 insn = get_last_insn ();
7632 if (! INSN_P (insn))
7633 insn = prev_real_insn (insn);
7635 for (; insn; insn = prev)
7639 prev = prev_real_insn (insn);
7641 /* Don't delete any insns that are part of a libcall block unless
7642 we can delete the whole libcall block.
7644 Flow or loop might get confused if we did that. Remember
7645 that we are scanning backwards. */
7646 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7650 dead_libcall = dead_libcall_p (insn, counts);
7652 else if (in_libcall)
7653 live_insn = ! dead_libcall;
7655 live_insn = insn_live_p (insn, counts);
7657 /* If this is a dead insn, delete it and show registers in it aren't
7662 count_reg_usage (insn, counts, -1);
7663 delete_insn_and_edges (insn);
7667 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7674 while (ndead != nlastdead);
7676 if (dump_file && ndead)
7677 fprintf (dump_file, "Deleted %i trivially dead insns; %i iterations\n",
7678 ndead, niterations);
7681 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7685 /* This function is called via for_each_rtx. The argument, NEWREG, is
7686 a condition code register with the desired mode. If we are looking
7687 at the same register in a different mode, replace it with
7691 cse_change_cc_mode (rtx *loc, void *data)
7693 rtx newreg = (rtx) data;
7696 && GET_CODE (*loc) == REG
7697 && REGNO (*loc) == REGNO (newreg)
7698 && GET_MODE (*loc) != GET_MODE (newreg))
7706 /* Change the mode of any reference to the register REGNO (NEWREG) to
7707 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
7708 any instruction which modifies NEWREG. */
7711 cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
7715 for (insn = start; insn != end; insn = NEXT_INSN (insn))
7717 if (! INSN_P (insn))
7720 if (reg_set_p (newreg, insn))
7723 for_each_rtx (&PATTERN (insn), cse_change_cc_mode, newreg);
7724 for_each_rtx (®_NOTES (insn), cse_change_cc_mode, newreg);
7728 /* BB is a basic block which finishes with CC_REG as a condition code
7729 register which is set to CC_SRC. Look through the successors of BB
7730 to find blocks which have a single predecessor (i.e., this one),
7731 and look through those blocks for an assignment to CC_REG which is
7732 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7733 permitted to change the mode of CC_SRC to a compatible mode. This
7734 returns VOIDmode if no equivalent assignments were found.
7735 Otherwise it returns the mode which CC_SRC should wind up with.
7737 The main complexity in this function is handling the mode issues.
7738 We may have more than one duplicate which we can eliminate, and we
7739 try to find a mode which will work for multiple duplicates. */
7741 static enum machine_mode
7742 cse_cc_succs (basic_block bb, rtx cc_reg, rtx cc_src, bool can_change_mode)
7745 enum machine_mode mode;
7746 unsigned int insn_count;
7749 enum machine_mode modes[2];
7754 /* We expect to have two successors. Look at both before picking
7755 the final mode for the comparison. If we have more successors
7756 (i.e., some sort of table jump, although that seems unlikely),
7757 then we require all beyond the first two to use the same
7760 found_equiv = false;
7761 mode = GET_MODE (cc_src);
7763 for (e = bb->succ; e; e = e->succ_next)
7768 if (e->flags & EDGE_COMPLEX)
7772 || e->dest->pred->pred_next
7773 || e->dest == EXIT_BLOCK_PTR)
7776 end = NEXT_INSN (BB_END (e->dest));
7777 for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7781 if (! INSN_P (insn))
7784 /* If CC_SRC is modified, we have to stop looking for
7785 something which uses it. */
7786 if (modified_in_p (cc_src, insn))
7789 /* Check whether INSN sets CC_REG to CC_SRC. */
7790 set = single_set (insn);
7792 && GET_CODE (SET_DEST (set)) == REG
7793 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7796 enum machine_mode set_mode;
7797 enum machine_mode comp_mode;
7800 set_mode = GET_MODE (SET_SRC (set));
7801 comp_mode = set_mode;
7802 if (rtx_equal_p (cc_src, SET_SRC (set)))
7804 else if (GET_CODE (cc_src) == COMPARE
7805 && GET_CODE (SET_SRC (set)) == COMPARE
7807 && rtx_equal_p (XEXP (cc_src, 0),
7808 XEXP (SET_SRC (set), 0))
7809 && rtx_equal_p (XEXP (cc_src, 1),
7810 XEXP (SET_SRC (set), 1)))
7813 comp_mode = (*targetm.cc_modes_compatible) (mode, set_mode);
7814 if (comp_mode != VOIDmode
7815 && (can_change_mode || comp_mode == mode))
7822 if (insn_count < ARRAY_SIZE (insns))
7824 insns[insn_count] = insn;
7825 modes[insn_count] = set_mode;
7826 last_insns[insn_count] = end;
7829 if (mode != comp_mode)
7831 if (! can_change_mode)
7834 PUT_MODE (cc_src, mode);
7839 if (set_mode != mode)
7841 /* We found a matching expression in the
7842 wrong mode, but we don't have room to
7843 store it in the array. Punt. This case
7847 /* INSN sets CC_REG to a value equal to CC_SRC
7848 with the right mode. We can simply delete
7853 /* We found an instruction to delete. Keep looking,
7854 in the hopes of finding a three-way jump. */
7858 /* We found an instruction which sets the condition
7859 code, so don't look any farther. */
7863 /* If INSN sets CC_REG in some other way, don't look any
7865 if (reg_set_p (cc_reg, insn))
7869 /* If we fell off the bottom of the block, we can keep looking
7870 through successors. We pass CAN_CHANGE_MODE as false because
7871 we aren't prepared to handle compatibility between the
7872 further blocks and this block. */
7875 enum machine_mode submode;
7877 submode = cse_cc_succs (e->dest, cc_reg, cc_src, false);
7878 if (submode != VOIDmode)
7880 if (submode != mode)
7883 can_change_mode = false;
7891 /* Now INSN_COUNT is the number of instructions we found which set
7892 CC_REG to a value equivalent to CC_SRC. The instructions are in
7893 INSNS. The modes used by those instructions are in MODES. */
7896 for (i = 0; i < insn_count; ++i)
7898 if (modes[i] != mode)
7900 /* We need to change the mode of CC_REG in INSNS[i] and
7901 subsequent instructions. */
7904 if (GET_MODE (cc_reg) == mode)
7907 newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7909 cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7913 delete_insn (insns[i]);
7919 /* If we have a fixed condition code register (or two), walk through
7920 the instructions and try to eliminate duplicate assignments. */
7923 cse_condition_code_reg (void)
7925 unsigned int cc_regno_1;
7926 unsigned int cc_regno_2;
7931 if (! (*targetm.fixed_condition_code_regs) (&cc_regno_1, &cc_regno_2))
7934 cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7935 if (cc_regno_2 != INVALID_REGNUM)
7936 cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7938 cc_reg_2 = NULL_RTX;
7947 enum machine_mode mode;
7948 enum machine_mode orig_mode;
7950 /* Look for blocks which end with a conditional jump based on a
7951 condition code register. Then look for the instruction which
7952 sets the condition code register. Then look through the
7953 successor blocks for instructions which set the condition
7954 code register to the same value. There are other possible
7955 uses of the condition code register, but these are by far the
7956 most common and the ones which we are most likely to be able
7959 last_insn = BB_END (bb);
7960 if (GET_CODE (last_insn) != JUMP_INSN)
7963 if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7965 else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7970 cc_src_insn = NULL_RTX;
7972 for (insn = PREV_INSN (last_insn);
7973 insn && insn != PREV_INSN (BB_HEAD (bb));
7974 insn = PREV_INSN (insn))
7978 if (! INSN_P (insn))
7980 set = single_set (insn);
7982 && GET_CODE (SET_DEST (set)) == REG
7983 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7986 cc_src = SET_SRC (set);
7989 else if (reg_set_p (cc_reg, insn))
7996 if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7999 /* Now CC_REG is a condition code register used for a
8000 conditional jump at the end of the block, and CC_SRC, in
8001 CC_SRC_INSN, is the value to which that condition code
8002 register is set, and CC_SRC is still meaningful at the end of
8005 orig_mode = GET_MODE (cc_src);
8006 mode = cse_cc_succs (bb, cc_reg, cc_src, true);
8007 if (mode != VOIDmode)
8009 if (mode != GET_MODE (cc_src))
8011 if (mode != orig_mode)
8013 rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
8015 /* Change the mode of CC_REG in CC_SRC_INSN to
8016 GET_MODE (NEWREG). */
8017 for_each_rtx (&PATTERN (cc_src_insn), cse_change_cc_mode,
8019 for_each_rtx (®_NOTES (cc_src_insn), cse_change_cc_mode,
8022 /* Do the same in the following insns that use the
8023 current value of CC_REG within BB. */
8024 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
8025 NEXT_INSN (last_insn),