1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* stdio.h must precede rtl.h for FFS. */
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
34 #include "insn-config.h"
42 /* The basic idea of common subexpression elimination is to go
43 through the code, keeping a record of expressions that would
44 have the same value at the current scan point, and replacing
45 expressions encountered with the cheapest equivalent expression.
47 It is too complicated to keep track of the different possibilities
48 when control paths merge in this code; so, at each label, we forget all
49 that is known and start fresh. This can be described as processing each
50 extended basic block separately. We have a separate pass to perform
53 Note CSE can turn a conditional or computed jump into a nop or
54 an unconditional jump. When this occurs we arrange to run the jump
55 optimizer after CSE to delete the unreachable code.
57 We use two data structures to record the equivalent expressions:
58 a hash table for most expressions, and a vector of "quantity
59 numbers" to record equivalent (pseudo) registers.
61 The use of the special data structure for registers is desirable
62 because it is faster. It is possible because registers references
63 contain a fairly small number, the register number, taken from
64 a contiguously allocated series, and two register references are
65 identical if they have the same number. General expressions
66 do not have any such thing, so the only way to retrieve the
67 information recorded on an expression other than a register
68 is to keep it in a hash table.
70 Registers and "quantity numbers":
72 At the start of each basic block, all of the (hardware and pseudo)
73 registers used in the function are given distinct quantity
74 numbers to indicate their contents. During scan, when the code
75 copies one register into another, we copy the quantity number.
76 When a register is loaded in any other way, we allocate a new
77 quantity number to describe the value generated by this operation.
78 `reg_qty' records what quantity a register is currently thought
81 All real quantity numbers are greater than or equal to `max_reg'.
82 If register N has not been assigned a quantity, reg_qty[N] will equal N.
84 Quantity numbers below `max_reg' do not exist and none of the `qty_table'
85 entries should be referenced with an index below `max_reg'.
87 We also maintain a bidirectional chain of registers for each
88 quantity number. The `qty_table` members `first_reg' and `last_reg',
89 and `reg_eqv_table' members `next' and `prev' hold these chains.
91 The first register in a chain is the one whose lifespan is least local.
92 Among equals, it is the one that was seen first.
93 We replace any equivalent register with that one.
95 If two registers have the same quantity number, it must be true that
96 REG expressions with qty_table `mode' must be in the hash table for both
97 registers and must be in the same class.
99 The converse is not true. Since hard registers may be referenced in
100 any mode, two REG expressions might be equivalent in the hash table
101 but not have the same quantity number if the quantity number of one
102 of the registers is not the same mode as those expressions.
104 Constants and quantity numbers
106 When a quantity has a known constant value, that value is stored
107 in the appropriate qty_table `const_rtx'. This is in addition to
108 putting the constant in the hash table as is usual for non-regs.
110 Whether a reg or a constant is preferred is determined by the configuration
111 macro CONST_COSTS and will often depend on the constant value. In any
112 event, expressions containing constants can be simplified, by fold_rtx.
114 When a quantity has a known nearly constant value (such as an address
115 of a stack slot), that value is stored in the appropriate qty_table
118 Integer constants don't have a machine mode. However, cse
119 determines the intended machine mode from the destination
120 of the instruction that moves the constant. The machine mode
121 is recorded in the hash table along with the actual RTL
122 constant expression so that different modes are kept separate.
126 To record known equivalences among expressions in general
127 we use a hash table called `table'. It has a fixed number of buckets
128 that contain chains of `struct table_elt' elements for expressions.
129 These chains connect the elements whose expressions have the same
132 Other chains through the same elements connect the elements which
133 currently have equivalent values.
135 Register references in an expression are canonicalized before hashing
136 the expression. This is done using `reg_qty' and qty_table `first_reg'.
137 The hash code of a register reference is computed using the quantity
138 number, not the register number.
140 When the value of an expression changes, it is necessary to remove from the
141 hash table not just that expression but all expressions whose values
142 could be different as a result.
144 1. If the value changing is in memory, except in special cases
145 ANYTHING referring to memory could be changed. That is because
146 nobody knows where a pointer does not point.
147 The function `invalidate_memory' removes what is necessary.
149 The special cases are when the address is constant or is
150 a constant plus a fixed register such as the frame pointer
151 or a static chain pointer. When such addresses are stored in,
152 we can tell exactly which other such addresses must be invalidated
153 due to overlap. `invalidate' does this.
154 All expressions that refer to non-constant
155 memory addresses are also invalidated. `invalidate_memory' does this.
157 2. If the value changing is a register, all expressions
158 containing references to that register, and only those,
161 Because searching the entire hash table for expressions that contain
162 a register is very slow, we try to figure out when it isn't necessary.
163 Precisely, this is necessary only when expressions have been
164 entered in the hash table using this register, and then the value has
165 changed, and then another expression wants to be added to refer to
166 the register's new value. This sequence of circumstances is rare
167 within any one basic block.
169 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
170 reg_tick[i] is incremented whenever a value is stored in register i.
171 reg_in_table[i] holds -1 if no references to register i have been
172 entered in the table; otherwise, it contains the value reg_tick[i] had
173 when the references were entered. If we want to enter a reference
174 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
175 Until we want to enter a new entry, the mere fact that the two vectors
176 don't match makes the entries be ignored if anyone tries to match them.
178 Registers themselves are entered in the hash table as well as in
179 the equivalent-register chains. However, the vectors `reg_tick'
180 and `reg_in_table' do not apply to expressions which are simple
181 register references. These expressions are removed from the table
182 immediately when they become invalid, and this can be done even if
183 we do not immediately search for all the expressions that refer to
186 A CLOBBER rtx in an instruction invalidates its operand for further
187 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
188 invalidates everything that resides in memory.
192 Constant expressions that differ only by an additive integer
193 are called related. When a constant expression is put in
194 the table, the related expression with no constant term
195 is also entered. These are made to point at each other
196 so that it is possible to find out if there exists any
197 register equivalent to an expression related to a given expression. */
199 /* One plus largest register number used in this function. */
203 /* One plus largest instruction UID used in this function at time of
206 static int max_insn_uid;
208 /* Length of qty_table vector. We know in advance we will not need
209 a quantity number this big. */
213 /* Next quantity number to be allocated.
214 This is 1 + the largest number needed so far. */
218 /* Per-qty information tracking.
220 `first_reg' and `last_reg' track the head and tail of the
221 chain of registers which currently contain this quantity.
223 `mode' contains the machine mode of this quantity.
225 `const_rtx' holds the rtx of the constant value of this
226 quantity, if known. A summations of the frame/arg pointer
227 and a constant can also be entered here. When this holds
228 a known value, `const_insn' is the insn which stored the
231 `comparison_{code,const,qty}' are used to track when a
232 comparison between a quantity and some constant or register has
233 been passed. In such a case, we know the results of the comparison
234 in case we see it again. These members record a comparison that
235 is known to be true. `comparison_code' holds the rtx code of such
236 a comparison, else it is set to UNKNOWN and the other two
237 comparison members are undefined. `comparison_const' holds
238 the constant being compared against, or zero if the comparison
239 is not against a constant. `comparison_qty' holds the quantity
240 being compared against when the result is known. If the comparison
241 is not with a register, `comparison_qty' is -1. */
243 struct qty_table_elem
247 rtx comparison_const;
249 unsigned int first_reg, last_reg;
250 enum machine_mode mode;
251 enum rtx_code comparison_code;
254 /* The table of all qtys, indexed by qty number. */
255 static struct qty_table_elem *qty_table;
258 /* For machines that have a CC0, we do not record its value in the hash
259 table since its use is guaranteed to be the insn immediately following
260 its definition and any other insn is presumed to invalidate it.
262 Instead, we store below the value last assigned to CC0. If it should
263 happen to be a constant, it is stored in preference to the actual
264 assigned value. In case it is a constant, we store the mode in which
265 the constant should be interpreted. */
267 static rtx prev_insn_cc0;
268 static enum machine_mode prev_insn_cc0_mode;
271 /* Previous actual insn. 0 if at first insn of basic block. */
273 static rtx prev_insn;
275 /* Insn being scanned. */
277 static rtx this_insn;
279 /* Index by register number, gives the number of the next (or
280 previous) register in the chain of registers sharing the same
283 Or -1 if this register is at the end of the chain.
285 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
287 /* Per-register equivalence chain. */
293 /* The table of all register equivalence chains. */
294 static struct reg_eqv_elem *reg_eqv_table;
298 /* Next in hash chain. */
299 struct cse_reg_info *hash_next;
301 /* The next cse_reg_info structure in the free or used list. */
302 struct cse_reg_info *next;
307 /* The quantity number of the register's current contents. */
310 /* The number of times the register has been altered in the current
314 /* The REG_TICK value at which rtx's containing this register are
315 valid in the hash table. If this does not equal the current
316 reg_tick value, such expressions existing in the hash table are
321 /* A free list of cse_reg_info entries. */
322 static struct cse_reg_info *cse_reg_info_free_list;
324 /* A used list of cse_reg_info entries. */
325 static struct cse_reg_info *cse_reg_info_used_list;
326 static struct cse_reg_info *cse_reg_info_used_list_end;
328 /* A mapping from registers to cse_reg_info data structures. */
329 #define REGHASH_SHIFT 7
330 #define REGHASH_SIZE (1 << REGHASH_SHIFT)
331 #define REGHASH_MASK (REGHASH_SIZE - 1)
332 static struct cse_reg_info *reg_hash[REGHASH_SIZE];
334 #define REGHASH_FN(REGNO) \
335 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
337 /* The last lookup we did into the cse_reg_info_tree. This allows us
338 to cache repeated lookups. */
339 static unsigned int cached_regno;
340 static struct cse_reg_info *cached_cse_reg_info;
342 /* A HARD_REG_SET containing all the hard registers for which there is
343 currently a REG expression in the hash table. Note the difference
344 from the above variables, which indicate if the REG is mentioned in some
345 expression in the table. */
347 static HARD_REG_SET hard_regs_in_table;
349 /* CUID of insn that starts the basic block currently being cse-processed. */
351 static int cse_basic_block_start;
353 /* CUID of insn that ends the basic block currently being cse-processed. */
355 static int cse_basic_block_end;
357 /* Vector mapping INSN_UIDs to cuids.
358 The cuids are like uids but increase monotonically always.
359 We use them to see whether a reg is used outside a given basic block. */
361 static int *uid_cuid;
363 /* Highest UID in UID_CUID. */
366 /* Get the cuid of an insn. */
368 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
370 /* Nonzero if this pass has made changes, and therefore it's
371 worthwhile to run the garbage collector. */
373 static int cse_altered;
375 /* Nonzero if cse has altered conditional jump insns
376 in such a way that jump optimization should be redone. */
378 static int cse_jumps_altered;
380 /* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
381 REG_LABEL, we have to rerun jump after CSE to put in the note. */
382 static int recorded_label_ref;
384 /* canon_hash stores 1 in do_not_record
385 if it notices a reference to CC0, PC, or some other volatile
388 static int do_not_record;
390 #ifdef LOAD_EXTEND_OP
392 /* Scratch rtl used when looking for load-extended copy of a MEM. */
393 static rtx memory_extend_rtx;
396 /* canon_hash stores 1 in hash_arg_in_memory
397 if it notices a reference to memory within the expression being hashed. */
399 static int hash_arg_in_memory;
401 /* The hash table contains buckets which are chains of `struct table_elt's,
402 each recording one expression's information.
403 That expression is in the `exp' field.
405 The canon_exp field contains a canonical (from the point of view of
406 alias analysis) version of the `exp' field.
408 Those elements with the same hash code are chained in both directions
409 through the `next_same_hash' and `prev_same_hash' fields.
411 Each set of expressions with equivalent values
412 are on a two-way chain through the `next_same_value'
413 and `prev_same_value' fields, and all point with
414 the `first_same_value' field at the first element in
415 that chain. The chain is in order of increasing cost.
416 Each element's cost value is in its `cost' field.
418 The `in_memory' field is nonzero for elements that
419 involve any reference to memory. These elements are removed
420 whenever a write is done to an unidentified location in memory.
421 To be safe, we assume that a memory address is unidentified unless
422 the address is either a symbol constant or a constant plus
423 the frame pointer or argument pointer.
425 The `related_value' field is used to connect related expressions
426 (that differ by adding an integer).
427 The related expressions are chained in a circular fashion.
428 `related_value' is zero for expressions for which this
431 The `cost' field stores the cost of this element's expression.
432 The `regcost' field stores the value returned by approx_reg_cost for
433 this element's expression.
435 The `is_const' flag is set if the element is a constant (including
438 The `flag' field is used as a temporary during some search routines.
440 The `mode' field is usually the same as GET_MODE (`exp'), but
441 if `exp' is a CONST_INT and has no machine mode then the `mode'
442 field is the mode it was being used as. Each constant is
443 recorded separately for each mode it is used with. */
449 struct table_elt *next_same_hash;
450 struct table_elt *prev_same_hash;
451 struct table_elt *next_same_value;
452 struct table_elt *prev_same_value;
453 struct table_elt *first_same_value;
454 struct table_elt *related_value;
457 enum machine_mode mode;
463 /* We don't want a lot of buckets, because we rarely have very many
464 things stored in the hash table, and a lot of buckets slows
465 down a lot of loops that happen frequently. */
467 #define HASH_SIZE (1 << HASH_SHIFT)
468 #define HASH_MASK (HASH_SIZE - 1)
470 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
471 register (hard registers may require `do_not_record' to be set). */
474 ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
475 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
476 : canon_hash (X, M)) & HASH_MASK)
478 /* Determine whether register number N is considered a fixed register for the
479 purpose of approximating register costs.
480 It is desirable to replace other regs with fixed regs, to reduce need for
482 A reg wins if it is either the frame pointer or designated as fixed. */
483 #define FIXED_REGNO_P(N) \
484 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
485 || fixed_regs[N] || global_regs[N])
487 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
488 hard registers and pointers into the frame are the cheapest with a cost
489 of 0. Next come pseudos with a cost of one and other hard registers with
490 a cost of 2. Aside from these special cases, call `rtx_cost'. */
492 #define CHEAP_REGNO(N) \
493 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
494 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
495 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
496 || ((N) < FIRST_PSEUDO_REGISTER \
497 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
499 #define COST(X) (GET_CODE (X) == REG ? 0 : notreg_cost (X, SET))
500 #define COST_IN(X,OUTER) (GET_CODE (X) == REG ? 0 : notreg_cost (X, OUTER))
502 /* Get the info associated with register N. */
504 #define GET_CSE_REG_INFO(N) \
505 (((N) == cached_regno && cached_cse_reg_info) \
506 ? cached_cse_reg_info : get_cse_reg_info ((N)))
508 /* Get the number of times this register has been updated in this
511 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
513 /* Get the point at which REG was recorded in the table. */
515 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
517 /* Get the quantity number for REG. */
519 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
521 /* Determine if the quantity number for register X represents a valid index
522 into the qty_table. */
524 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (int) (N))
526 static struct table_elt *table[HASH_SIZE];
528 /* Chain of `struct table_elt's made so far for this function
529 but currently removed from the table. */
531 static struct table_elt *free_element_chain;
533 /* Number of `struct table_elt' structures made so far for this function. */
535 static int n_elements_made;
537 /* Maximum value `n_elements_made' has had so far in this compilation
538 for functions previously processed. */
540 static int max_elements_made;
542 /* Surviving equivalence class when two equivalence classes are merged
543 by recording the effects of a jump in the last insn. Zero if the
544 last insn was not a conditional jump. */
546 static struct table_elt *last_jump_equiv_class;
548 /* Set to the cost of a constant pool reference if one was found for a
549 symbolic constant. If this was found, it means we should try to
550 convert constants into constant pool entries if they don't fit in
553 static int constant_pool_entries_cost;
555 /* Define maximum length of a branch path. */
557 #define PATHLENGTH 10
559 /* This data describes a block that will be processed by cse_basic_block. */
561 struct cse_basic_block_data
563 /* Lowest CUID value of insns in block. */
565 /* Highest CUID value of insns in block. */
567 /* Total number of SETs in block. */
569 /* Last insn in the block. */
571 /* Size of current branch path, if any. */
573 /* Current branch path, indicating which branches will be taken. */
576 /* The branch insn. */
578 /* Whether it should be taken or not. AROUND is the same as taken
579 except that it is used when the destination label is not preceded
581 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
585 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
586 virtual regs here because the simplify_*_operation routines are called
587 by integrate.c, which is called before virtual register instantiation.
589 ?!? FIXED_BASE_PLUS_P and NONZERO_BASE_PLUS_P need to move into
590 a header file so that their definitions can be shared with the
591 simplification routines in simplify-rtx.c. Until then, do not
592 change these macros without also changing the copy in simplify-rtx.c. */
594 #define FIXED_BASE_PLUS_P(X) \
595 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
596 || ((X) == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])\
597 || (X) == virtual_stack_vars_rtx \
598 || (X) == virtual_incoming_args_rtx \
599 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
600 && (XEXP (X, 0) == frame_pointer_rtx \
601 || XEXP (X, 0) == hard_frame_pointer_rtx \
602 || ((X) == arg_pointer_rtx \
603 && fixed_regs[ARG_POINTER_REGNUM]) \
604 || XEXP (X, 0) == virtual_stack_vars_rtx \
605 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
606 || GET_CODE (X) == ADDRESSOF)
608 /* Similar, but also allows reference to the stack pointer.
610 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
611 arg_pointer_rtx by itself is nonzero, because on at least one machine,
612 the i960, the arg pointer is zero when it is unused. */
614 #define NONZERO_BASE_PLUS_P(X) \
615 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
616 || (X) == virtual_stack_vars_rtx \
617 || (X) == virtual_incoming_args_rtx \
618 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
619 && (XEXP (X, 0) == frame_pointer_rtx \
620 || XEXP (X, 0) == hard_frame_pointer_rtx \
621 || ((X) == arg_pointer_rtx \
622 && fixed_regs[ARG_POINTER_REGNUM]) \
623 || XEXP (X, 0) == virtual_stack_vars_rtx \
624 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
625 || (X) == stack_pointer_rtx \
626 || (X) == virtual_stack_dynamic_rtx \
627 || (X) == virtual_outgoing_args_rtx \
628 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
629 && (XEXP (X, 0) == stack_pointer_rtx \
630 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
631 || XEXP (X, 0) == virtual_outgoing_args_rtx)) \
632 || GET_CODE (X) == ADDRESSOF)
634 static int notreg_cost PARAMS ((rtx, enum rtx_code));
635 static int approx_reg_cost_1 PARAMS ((rtx *, void *));
636 static int approx_reg_cost PARAMS ((rtx));
637 static int preferrable PARAMS ((int, int, int, int));
638 static void new_basic_block PARAMS ((void));
639 static void make_new_qty PARAMS ((unsigned int, enum machine_mode));
640 static void make_regs_eqv PARAMS ((unsigned int, unsigned int));
641 static void delete_reg_equiv PARAMS ((unsigned int));
642 static int mention_regs PARAMS ((rtx));
643 static int insert_regs PARAMS ((rtx, struct table_elt *, int));
644 static void remove_from_table PARAMS ((struct table_elt *, unsigned));
645 static struct table_elt *lookup PARAMS ((rtx, unsigned, enum machine_mode)),
646 *lookup_for_remove PARAMS ((rtx, unsigned, enum machine_mode));
647 static rtx lookup_as_function PARAMS ((rtx, enum rtx_code));
648 static struct table_elt *insert PARAMS ((rtx, struct table_elt *, unsigned,
650 static void merge_equiv_classes PARAMS ((struct table_elt *,
651 struct table_elt *));
652 static void invalidate PARAMS ((rtx, enum machine_mode));
653 static int cse_rtx_varies_p PARAMS ((rtx, int));
654 static void remove_invalid_refs PARAMS ((unsigned int));
655 static void remove_invalid_subreg_refs PARAMS ((unsigned int, unsigned int,
657 static void rehash_using_reg PARAMS ((rtx));
658 static void invalidate_memory PARAMS ((void));
659 static void invalidate_for_call PARAMS ((void));
660 static rtx use_related_value PARAMS ((rtx, struct table_elt *));
661 static unsigned canon_hash PARAMS ((rtx, enum machine_mode));
662 static unsigned canon_hash_string PARAMS ((const char *));
663 static unsigned safe_hash PARAMS ((rtx, enum machine_mode));
664 static int exp_equiv_p PARAMS ((rtx, rtx, int, int));
665 static rtx canon_reg PARAMS ((rtx, rtx));
666 static void find_best_addr PARAMS ((rtx, rtx *, enum machine_mode));
667 static enum rtx_code find_comparison_args PARAMS ((enum rtx_code, rtx *, rtx *,
669 enum machine_mode *));
670 static rtx fold_rtx PARAMS ((rtx, rtx));
671 static rtx equiv_constant PARAMS ((rtx));
672 static void record_jump_equiv PARAMS ((rtx, int));
673 static void record_jump_cond PARAMS ((enum rtx_code, enum machine_mode,
675 static void cse_insn PARAMS ((rtx, rtx));
676 static int addr_affects_sp_p PARAMS ((rtx));
677 static void invalidate_from_clobbers PARAMS ((rtx));
678 static rtx cse_process_notes PARAMS ((rtx, rtx));
679 static void cse_around_loop PARAMS ((rtx));
680 static void invalidate_skipped_set PARAMS ((rtx, rtx, void *));
681 static void invalidate_skipped_block PARAMS ((rtx));
682 static void cse_check_loop_start PARAMS ((rtx, rtx, void *));
683 static void cse_set_around_loop PARAMS ((rtx, rtx, rtx));
684 static rtx cse_basic_block PARAMS ((rtx, rtx, struct branch_path *, int));
685 static void count_reg_usage PARAMS ((rtx, int *, rtx, int));
686 static int check_for_label_ref PARAMS ((rtx *, void *));
687 extern void dump_class PARAMS ((struct table_elt*));
688 static struct cse_reg_info * get_cse_reg_info PARAMS ((unsigned int));
689 static int check_dependence PARAMS ((rtx *, void *));
691 static void flush_hash_table PARAMS ((void));
692 static bool insn_live_p PARAMS ((rtx, int *));
693 static bool set_live_p PARAMS ((rtx, rtx, int *));
694 static bool dead_libcall_p PARAMS ((rtx));
696 /* Dump the expressions in the equivalence class indicated by CLASSP.
697 This function is used only for debugging. */
700 struct table_elt *classp;
702 struct table_elt *elt;
704 fprintf (stderr, "Equivalence chain for ");
705 print_rtl (stderr, classp->exp);
706 fprintf (stderr, ": \n");
708 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
710 print_rtl (stderr, elt->exp);
711 fprintf (stderr, "\n");
715 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
718 approx_reg_cost_1 (xp, data)
723 regset set = (regset) data;
725 if (x && GET_CODE (x) == REG)
726 SET_REGNO_REG_SET (set, REGNO (x));
730 /* Return an estimate of the cost of the registers used in an rtx.
731 This is mostly the number of different REG expressions in the rtx;
732 however for some excecptions like fixed registers we use a cost of
733 0. If any other hard register reference occurs, return MAX_COST. */
745 for_each_rtx (&x, approx_reg_cost_1, (void *)&set);
747 EXECUTE_IF_SET_IN_REG_SET
750 if (! CHEAP_REGNO (i))
752 if (i < FIRST_PSEUDO_REGISTER)
755 cost += i < FIRST_PSEUDO_REGISTER ? 2 : 1;
759 CLEAR_REG_SET (&set);
760 return hardregs && SMALL_REGISTER_CLASSES ? MAX_COST : cost;
763 /* Return a negative value if an rtx A, whose costs are given by COST_A
764 and REGCOST_A, is more desirable than an rtx B.
765 Return a positive value if A is less desirable, or 0 if the two are
768 preferrable (cost_a, regcost_a, cost_b, regcost_b)
769 int cost_a, regcost_a, cost_b, regcost_b;
771 /* First, get rid of a cases involving expressions that are entirely
773 if (cost_a != cost_b)
775 if (cost_a == MAX_COST)
777 if (cost_b == MAX_COST)
781 /* Avoid extending lifetimes of hardregs. */
782 if (regcost_a != regcost_b)
784 if (regcost_a == MAX_COST)
786 if (regcost_b == MAX_COST)
790 /* Normal operation costs take precedence. */
791 if (cost_a != cost_b)
792 return cost_a - cost_b;
793 /* Only if these are identical consider effects on register pressure. */
794 if (regcost_a != regcost_b)
795 return regcost_a - regcost_b;
799 /* Internal function, to compute cost when X is not a register; called
800 from COST macro to keep it simple. */
803 notreg_cost (x, outer)
807 return ((GET_CODE (x) == SUBREG
808 && GET_CODE (SUBREG_REG (x)) == REG
809 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
810 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
811 && (GET_MODE_SIZE (GET_MODE (x))
812 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
813 && subreg_lowpart_p (x)
814 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
815 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
817 : rtx_cost (x, outer) * 2);
820 /* Return an estimate of the cost of computing rtx X.
821 One use is in cse, to decide which expression to keep in the hash table.
822 Another is in rtl generation, to pick the cheapest way to multiply.
823 Other uses like the latter are expected in the future. */
826 rtx_cost (x, outer_code)
828 enum rtx_code outer_code ATTRIBUTE_UNUSED;
831 register enum rtx_code code;
832 register const char *fmt;
838 /* Compute the default costs of certain things.
839 Note that RTX_COSTS can override the defaults. */
845 /* Count multiplication by 2**n as a shift,
846 because if we are considering it, we would output it as a shift. */
847 if (GET_CODE (XEXP (x, 1)) == CONST_INT
848 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
851 total = COSTS_N_INSNS (5);
857 total = COSTS_N_INSNS (7);
860 /* Used in loop.c and combine.c as a marker. */
864 total = COSTS_N_INSNS (1);
873 /* If we can't tie these modes, make this expensive. The larger
874 the mode, the more expensive it is. */
875 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
876 return COSTS_N_INSNS (2
877 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
881 RTX_COSTS (x, code, outer_code);
884 CONST_COSTS (x, code, outer_code);
888 #ifdef DEFAULT_RTX_COSTS
889 DEFAULT_RTX_COSTS (x, code, outer_code);
894 /* Sum the costs of the sub-rtx's, plus cost of this operation,
895 which is already in total. */
897 fmt = GET_RTX_FORMAT (code);
898 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
900 total += rtx_cost (XEXP (x, i), code);
901 else if (fmt[i] == 'E')
902 for (j = 0; j < XVECLEN (x, i); j++)
903 total += rtx_cost (XVECEXP (x, i, j), code);
908 /* Return cost of address expression X.
909 Expect that X is propertly formed address reference. */
912 address_cost (x, mode)
914 enum machine_mode mode;
916 /* The ADDRESS_COST macro does not deal with ADDRESSOF nodes. But,
917 during CSE, such nodes are present. Using an ADDRESSOF node which
918 refers to the address of a REG is a good thing because we can then
919 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
921 if (GET_CODE (x) == ADDRESSOF && REG_P (XEXP ((x), 0)))
924 /* We may be asked for cost of various unusual addresses, such as operands
925 of push instruction. It is not worthwhile to complicate writing
926 of ADDRESS_COST macro by such cases. */
928 if (!memory_address_p (mode, x))
931 return ADDRESS_COST (x);
933 return rtx_cost (x, MEM);
938 static struct cse_reg_info *
939 get_cse_reg_info (regno)
942 struct cse_reg_info **hash_head = ®_hash[REGHASH_FN (regno)];
943 struct cse_reg_info *p;
945 for (p = *hash_head; p != NULL; p = p->hash_next)
946 if (p->regno == regno)
951 /* Get a new cse_reg_info structure. */
952 if (cse_reg_info_free_list)
954 p = cse_reg_info_free_list;
955 cse_reg_info_free_list = p->next;
958 p = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
960 /* Insert into hash table. */
961 p->hash_next = *hash_head;
966 p->reg_in_table = -1;
969 p->next = cse_reg_info_used_list;
970 cse_reg_info_used_list = p;
971 if (!cse_reg_info_used_list_end)
972 cse_reg_info_used_list_end = p;
975 /* Cache this lookup; we tend to be looking up information about the
976 same register several times in a row. */
977 cached_regno = regno;
978 cached_cse_reg_info = p;
983 /* Clear the hash table and initialize each register with its own quantity,
984 for a new basic block. */
993 /* Clear out hash table state for this pass. */
995 memset ((char *) reg_hash, 0, sizeof reg_hash);
997 if (cse_reg_info_used_list)
999 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
1000 cse_reg_info_free_list = cse_reg_info_used_list;
1001 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
1003 cached_cse_reg_info = 0;
1005 CLEAR_HARD_REG_SET (hard_regs_in_table);
1007 /* The per-quantity values used to be initialized here, but it is
1008 much faster to initialize each as it is made in `make_new_qty'. */
1010 for (i = 0; i < HASH_SIZE; i++)
1012 struct table_elt *first;
1017 struct table_elt *last = first;
1021 while (last->next_same_hash != NULL)
1022 last = last->next_same_hash;
1024 /* Now relink this hash entire chain into
1025 the free element list. */
1027 last->next_same_hash = free_element_chain;
1028 free_element_chain = first;
1039 /* Say that register REG contains a quantity in mode MODE not in any
1040 register before and initialize that quantity. */
1043 make_new_qty (reg, mode)
1045 enum machine_mode mode;
1048 register struct qty_table_elem *ent;
1049 register struct reg_eqv_elem *eqv;
1051 if (next_qty >= max_qty)
1054 q = REG_QTY (reg) = next_qty++;
1055 ent = &qty_table[q];
1056 ent->first_reg = reg;
1057 ent->last_reg = reg;
1059 ent->const_rtx = ent->const_insn = NULL_RTX;
1060 ent->comparison_code = UNKNOWN;
1062 eqv = ®_eqv_table[reg];
1063 eqv->next = eqv->prev = -1;
1066 /* Make reg NEW equivalent to reg OLD.
1067 OLD is not changing; NEW is. */
1070 make_regs_eqv (new, old)
1071 unsigned int new, old;
1073 unsigned int lastr, firstr;
1074 int q = REG_QTY (old);
1075 struct qty_table_elem *ent;
1077 ent = &qty_table[q];
1079 /* Nothing should become eqv until it has a "non-invalid" qty number. */
1080 if (! REGNO_QTY_VALID_P (old))
1084 firstr = ent->first_reg;
1085 lastr = ent->last_reg;
1087 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
1088 hard regs. Among pseudos, if NEW will live longer than any other reg
1089 of the same qty, and that is beyond the current basic block,
1090 make it the new canonical replacement for this qty. */
1091 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1092 /* Certain fixed registers might be of the class NO_REGS. This means
1093 that not only can they not be allocated by the compiler, but
1094 they cannot be used in substitutions or canonicalizations
1096 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1097 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1098 || (new >= FIRST_PSEUDO_REGISTER
1099 && (firstr < FIRST_PSEUDO_REGISTER
1100 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1101 || (uid_cuid[REGNO_FIRST_UID (new)]
1102 < cse_basic_block_start))
1103 && (uid_cuid[REGNO_LAST_UID (new)]
1104 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1106 reg_eqv_table[firstr].prev = new;
1107 reg_eqv_table[new].next = firstr;
1108 reg_eqv_table[new].prev = -1;
1109 ent->first_reg = new;
1113 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1114 Otherwise, insert before any non-fixed hard regs that are at the
1115 end. Registers of class NO_REGS cannot be used as an
1116 equivalent for anything. */
1117 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1118 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1119 && new >= FIRST_PSEUDO_REGISTER)
1120 lastr = reg_eqv_table[lastr].prev;
1121 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1122 if (reg_eqv_table[lastr].next >= 0)
1123 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1125 qty_table[q].last_reg = new;
1126 reg_eqv_table[lastr].next = new;
1127 reg_eqv_table[new].prev = lastr;
1131 /* Remove REG from its equivalence class. */
1134 delete_reg_equiv (reg)
1137 register struct qty_table_elem *ent;
1138 register int q = REG_QTY (reg);
1141 /* If invalid, do nothing. */
1145 ent = &qty_table[q];
1147 p = reg_eqv_table[reg].prev;
1148 n = reg_eqv_table[reg].next;
1151 reg_eqv_table[n].prev = p;
1155 reg_eqv_table[p].next = n;
1159 REG_QTY (reg) = reg;
1162 /* Remove any invalid expressions from the hash table
1163 that refer to any of the registers contained in expression X.
1165 Make sure that newly inserted references to those registers
1166 as subexpressions will be considered valid.
1168 mention_regs is not called when a register itself
1169 is being stored in the table.
1171 Return 1 if we have done something that may have changed the hash code
1178 register enum rtx_code code;
1180 register const char *fmt;
1181 register int changed = 0;
1186 code = GET_CODE (x);
1189 unsigned int regno = REGNO (x);
1190 unsigned int endregno
1191 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1192 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1195 for (i = regno; i < endregno; i++)
1197 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1198 remove_invalid_refs (i);
1200 REG_IN_TABLE (i) = REG_TICK (i);
1206 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1207 pseudo if they don't use overlapping words. We handle only pseudos
1208 here for simplicity. */
1209 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1210 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1212 unsigned int i = REGNO (SUBREG_REG (x));
1214 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1216 /* If reg_tick has been incremented more than once since
1217 reg_in_table was last set, that means that the entire
1218 register has been set before, so discard anything memorized
1219 for the entire register, including all SUBREG expressions. */
1220 if (REG_IN_TABLE (i) != REG_TICK (i) - 1)
1221 remove_invalid_refs (i);
1223 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1226 REG_IN_TABLE (i) = REG_TICK (i);
1230 /* If X is a comparison or a COMPARE and either operand is a register
1231 that does not have a quantity, give it one. This is so that a later
1232 call to record_jump_equiv won't cause X to be assigned a different
1233 hash code and not found in the table after that call.
1235 It is not necessary to do this here, since rehash_using_reg can
1236 fix up the table later, but doing this here eliminates the need to
1237 call that expensive function in the most common case where the only
1238 use of the register is in the comparison. */
1240 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1242 if (GET_CODE (XEXP (x, 0)) == REG
1243 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1244 if (insert_regs (XEXP (x, 0), NULL, 0))
1246 rehash_using_reg (XEXP (x, 0));
1250 if (GET_CODE (XEXP (x, 1)) == REG
1251 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1252 if (insert_regs (XEXP (x, 1), NULL, 0))
1254 rehash_using_reg (XEXP (x, 1));
1259 fmt = GET_RTX_FORMAT (code);
1260 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1262 changed |= mention_regs (XEXP (x, i));
1263 else if (fmt[i] == 'E')
1264 for (j = 0; j < XVECLEN (x, i); j++)
1265 changed |= mention_regs (XVECEXP (x, i, j));
1270 /* Update the register quantities for inserting X into the hash table
1271 with a value equivalent to CLASSP.
1272 (If the class does not contain a REG, it is irrelevant.)
1273 If MODIFIED is nonzero, X is a destination; it is being modified.
1274 Note that delete_reg_equiv should be called on a register
1275 before insert_regs is done on that register with MODIFIED != 0.
1277 Nonzero value means that elements of reg_qty have changed
1278 so X's hash code may be different. */
1281 insert_regs (x, classp, modified)
1283 struct table_elt *classp;
1286 if (GET_CODE (x) == REG)
1288 unsigned int regno = REGNO (x);
1291 /* If REGNO is in the equivalence table already but is of the
1292 wrong mode for that equivalence, don't do anything here. */
1294 qty_valid = REGNO_QTY_VALID_P (regno);
1297 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1299 if (ent->mode != GET_MODE (x))
1303 if (modified || ! qty_valid)
1306 for (classp = classp->first_same_value;
1308 classp = classp->next_same_value)
1309 if (GET_CODE (classp->exp) == REG
1310 && GET_MODE (classp->exp) == GET_MODE (x))
1312 make_regs_eqv (regno, REGNO (classp->exp));
1316 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1317 than REG_IN_TABLE to find out if there was only a single preceding
1318 invalidation - for the SUBREG - or another one, which would be
1319 for the full register. However, if we find here that REG_TICK
1320 indicates that the register is invalid, it means that it has
1321 been invalidated in a separate operation. The SUBREG might be used
1322 now (then this is a recursive call), or we might use the full REG
1323 now and a SUBREG of it later. So bump up REG_TICK so that
1324 mention_regs will do the right thing. */
1326 && REG_IN_TABLE (regno) >= 0
1327 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1329 make_new_qty (regno, GET_MODE (x));
1336 /* If X is a SUBREG, we will likely be inserting the inner register in the
1337 table. If that register doesn't have an assigned quantity number at
1338 this point but does later, the insertion that we will be doing now will
1339 not be accessible because its hash code will have changed. So assign
1340 a quantity number now. */
1342 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1343 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1345 insert_regs (SUBREG_REG (x), NULL, 0);
1350 return mention_regs (x);
1353 /* Look in or update the hash table. */
1355 /* Remove table element ELT from use in the table.
1356 HASH is its hash code, made using the HASH macro.
1357 It's an argument because often that is known in advance
1358 and we save much time not recomputing it. */
1361 remove_from_table (elt, hash)
1362 register struct table_elt *elt;
1368 /* Mark this element as removed. See cse_insn. */
1369 elt->first_same_value = 0;
1371 /* Remove the table element from its equivalence class. */
1374 register struct table_elt *prev = elt->prev_same_value;
1375 register struct table_elt *next = elt->next_same_value;
1378 next->prev_same_value = prev;
1381 prev->next_same_value = next;
1384 register struct table_elt *newfirst = next;
1387 next->first_same_value = newfirst;
1388 next = next->next_same_value;
1393 /* Remove the table element from its hash bucket. */
1396 register struct table_elt *prev = elt->prev_same_hash;
1397 register struct table_elt *next = elt->next_same_hash;
1400 next->prev_same_hash = prev;
1403 prev->next_same_hash = next;
1404 else if (table[hash] == elt)
1408 /* This entry is not in the proper hash bucket. This can happen
1409 when two classes were merged by `merge_equiv_classes'. Search
1410 for the hash bucket that it heads. This happens only very
1411 rarely, so the cost is acceptable. */
1412 for (hash = 0; hash < HASH_SIZE; hash++)
1413 if (table[hash] == elt)
1418 /* Remove the table element from its related-value circular chain. */
1420 if (elt->related_value != 0 && elt->related_value != elt)
1422 register struct table_elt *p = elt->related_value;
1424 while (p->related_value != elt)
1425 p = p->related_value;
1426 p->related_value = elt->related_value;
1427 if (p->related_value == p)
1428 p->related_value = 0;
1431 /* Now add it to the free element chain. */
1432 elt->next_same_hash = free_element_chain;
1433 free_element_chain = elt;
1436 /* Look up X in the hash table and return its table element,
1437 or 0 if X is not in the table.
1439 MODE is the machine-mode of X, or if X is an integer constant
1440 with VOIDmode then MODE is the mode with which X will be used.
1442 Here we are satisfied to find an expression whose tree structure
1445 static struct table_elt *
1446 lookup (x, hash, mode)
1449 enum machine_mode mode;
1451 register struct table_elt *p;
1453 for (p = table[hash]; p; p = p->next_same_hash)
1454 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1455 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1461 /* Like `lookup' but don't care whether the table element uses invalid regs.
1462 Also ignore discrepancies in the machine mode of a register. */
1464 static struct table_elt *
1465 lookup_for_remove (x, hash, mode)
1468 enum machine_mode mode;
1470 register struct table_elt *p;
1472 if (GET_CODE (x) == REG)
1474 unsigned int regno = REGNO (x);
1476 /* Don't check the machine mode when comparing registers;
1477 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1478 for (p = table[hash]; p; p = p->next_same_hash)
1479 if (GET_CODE (p->exp) == REG
1480 && REGNO (p->exp) == regno)
1485 for (p = table[hash]; p; p = p->next_same_hash)
1486 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1493 /* Look for an expression equivalent to X and with code CODE.
1494 If one is found, return that expression. */
1497 lookup_as_function (x, code)
1501 register struct table_elt *p
1502 = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, GET_MODE (x));
1504 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1505 long as we are narrowing. So if we looked in vain for a mode narrower
1506 than word_mode before, look for word_mode now. */
1507 if (p == 0 && code == CONST_INT
1508 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1511 PUT_MODE (x, word_mode);
1512 p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
1518 for (p = p->first_same_value; p; p = p->next_same_value)
1519 if (GET_CODE (p->exp) == code
1520 /* Make sure this is a valid entry in the table. */
1521 && exp_equiv_p (p->exp, p->exp, 1, 0))
1527 /* Insert X in the hash table, assuming HASH is its hash code
1528 and CLASSP is an element of the class it should go in
1529 (or 0 if a new class should be made).
1530 It is inserted at the proper position to keep the class in
1531 the order cheapest first.
1533 MODE is the machine-mode of X, or if X is an integer constant
1534 with VOIDmode then MODE is the mode with which X will be used.
1536 For elements of equal cheapness, the most recent one
1537 goes in front, except that the first element in the list
1538 remains first unless a cheaper element is added. The order of
1539 pseudo-registers does not matter, as canon_reg will be called to
1540 find the cheapest when a register is retrieved from the table.
1542 The in_memory field in the hash table element is set to 0.
1543 The caller must set it nonzero if appropriate.
1545 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1546 and if insert_regs returns a nonzero value
1547 you must then recompute its hash code before calling here.
1549 If necessary, update table showing constant values of quantities. */
1551 #define CHEAPER(X, Y) \
1552 (preferrable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1554 static struct table_elt *
1555 insert (x, classp, hash, mode)
1557 register struct table_elt *classp;
1559 enum machine_mode mode;
1561 register struct table_elt *elt;
1563 /* If X is a register and we haven't made a quantity for it,
1564 something is wrong. */
1565 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1568 /* If X is a hard register, show it is being put in the table. */
1569 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1571 unsigned int regno = REGNO (x);
1572 unsigned int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1575 for (i = regno; i < endregno; i++)
1576 SET_HARD_REG_BIT (hard_regs_in_table, i);
1579 /* Put an element for X into the right hash bucket. */
1581 elt = free_element_chain;
1583 free_element_chain = elt->next_same_hash;
1587 elt = (struct table_elt *) xmalloc (sizeof (struct table_elt));
1591 elt->canon_exp = NULL_RTX;
1592 elt->cost = COST (x);
1593 elt->regcost = approx_reg_cost (x);
1594 elt->next_same_value = 0;
1595 elt->prev_same_value = 0;
1596 elt->next_same_hash = table[hash];
1597 elt->prev_same_hash = 0;
1598 elt->related_value = 0;
1601 elt->is_const = (CONSTANT_P (x)
1602 /* GNU C++ takes advantage of this for `this'
1603 (and other const values). */
1604 || (RTX_UNCHANGING_P (x)
1605 && GET_CODE (x) == REG
1606 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1607 || FIXED_BASE_PLUS_P (x));
1610 table[hash]->prev_same_hash = elt;
1613 /* Put it into the proper value-class. */
1616 classp = classp->first_same_value;
1617 if (CHEAPER (elt, classp))
1618 /* Insert at the head of the class */
1620 register struct table_elt *p;
1621 elt->next_same_value = classp;
1622 classp->prev_same_value = elt;
1623 elt->first_same_value = elt;
1625 for (p = classp; p; p = p->next_same_value)
1626 p->first_same_value = elt;
1630 /* Insert not at head of the class. */
1631 /* Put it after the last element cheaper than X. */
1632 register struct table_elt *p, *next;
1634 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1637 /* Put it after P and before NEXT. */
1638 elt->next_same_value = next;
1640 next->prev_same_value = elt;
1642 elt->prev_same_value = p;
1643 p->next_same_value = elt;
1644 elt->first_same_value = classp;
1648 elt->first_same_value = elt;
1650 /* If this is a constant being set equivalent to a register or a register
1651 being set equivalent to a constant, note the constant equivalence.
1653 If this is a constant, it cannot be equivalent to a different constant,
1654 and a constant is the only thing that can be cheaper than a register. So
1655 we know the register is the head of the class (before the constant was
1658 If this is a register that is not already known equivalent to a
1659 constant, we must check the entire class.
1661 If this is a register that is already known equivalent to an insn,
1662 update the qtys `const_insn' to show that `this_insn' is the latest
1663 insn making that quantity equivalent to the constant. */
1665 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1666 && GET_CODE (x) != REG)
1668 int exp_q = REG_QTY (REGNO (classp->exp));
1669 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1671 exp_ent->const_rtx = gen_lowpart_if_possible (exp_ent->mode, x);
1672 exp_ent->const_insn = this_insn;
1675 else if (GET_CODE (x) == REG
1677 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1680 register struct table_elt *p;
1682 for (p = classp; p != 0; p = p->next_same_value)
1684 if (p->is_const && GET_CODE (p->exp) != REG)
1686 int x_q = REG_QTY (REGNO (x));
1687 struct qty_table_elem *x_ent = &qty_table[x_q];
1690 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1691 x_ent->const_insn = this_insn;
1697 else if (GET_CODE (x) == REG
1698 && qty_table[REG_QTY (REGNO (x))].const_rtx
1699 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1700 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1702 /* If this is a constant with symbolic value,
1703 and it has a term with an explicit integer value,
1704 link it up with related expressions. */
1705 if (GET_CODE (x) == CONST)
1707 rtx subexp = get_related_value (x);
1709 struct table_elt *subelt, *subelt_prev;
1713 /* Get the integer-free subexpression in the hash table. */
1714 subhash = safe_hash (subexp, mode) & HASH_MASK;
1715 subelt = lookup (subexp, subhash, mode);
1717 subelt = insert (subexp, NULL, subhash, mode);
1718 /* Initialize SUBELT's circular chain if it has none. */
1719 if (subelt->related_value == 0)
1720 subelt->related_value = subelt;
1721 /* Find the element in the circular chain that precedes SUBELT. */
1722 subelt_prev = subelt;
1723 while (subelt_prev->related_value != subelt)
1724 subelt_prev = subelt_prev->related_value;
1725 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1726 This way the element that follows SUBELT is the oldest one. */
1727 elt->related_value = subelt_prev->related_value;
1728 subelt_prev->related_value = elt;
1735 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1736 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1737 the two classes equivalent.
1739 CLASS1 will be the surviving class; CLASS2 should not be used after this
1742 Any invalid entries in CLASS2 will not be copied. */
1745 merge_equiv_classes (class1, class2)
1746 struct table_elt *class1, *class2;
1748 struct table_elt *elt, *next, *new;
1750 /* Ensure we start with the head of the classes. */
1751 class1 = class1->first_same_value;
1752 class2 = class2->first_same_value;
1754 /* If they were already equal, forget it. */
1755 if (class1 == class2)
1758 for (elt = class2; elt; elt = next)
1762 enum machine_mode mode = elt->mode;
1764 next = elt->next_same_value;
1766 /* Remove old entry, make a new one in CLASS1's class.
1767 Don't do this for invalid entries as we cannot find their
1768 hash code (it also isn't necessary). */
1769 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1771 hash_arg_in_memory = 0;
1772 hash = HASH (exp, mode);
1774 if (GET_CODE (exp) == REG)
1775 delete_reg_equiv (REGNO (exp));
1777 remove_from_table (elt, hash);
1779 if (insert_regs (exp, class1, 0))
1781 rehash_using_reg (exp);
1782 hash = HASH (exp, mode);
1784 new = insert (exp, class1, hash, mode);
1785 new->in_memory = hash_arg_in_memory;
1790 /* Flush the entire hash table. */
1796 struct table_elt *p;
1798 for (i = 0; i < HASH_SIZE; i++)
1799 for (p = table[i]; p; p = table[i])
1801 /* Note that invalidate can remove elements
1802 after P in the current hash chain. */
1803 if (GET_CODE (p->exp) == REG)
1804 invalidate (p->exp, p->mode);
1806 remove_from_table (p, i);
1810 /* Function called for each rtx to check whether true dependence exist. */
1811 struct check_dependence_data
1813 enum machine_mode mode;
1818 check_dependence (x, data)
1822 struct check_dependence_data *d = (struct check_dependence_data *) data;
1823 if (*x && GET_CODE (*x) == MEM)
1824 return true_dependence (d->exp, d->mode, *x, cse_rtx_varies_p);
1829 /* Remove from the hash table, or mark as invalid, all expressions whose
1830 values could be altered by storing in X. X is a register, a subreg, or
1831 a memory reference with nonvarying address (because, when a memory
1832 reference with a varying address is stored in, all memory references are
1833 removed by invalidate_memory so specific invalidation is superfluous).
1834 FULL_MODE, if not VOIDmode, indicates that this much should be
1835 invalidated instead of just the amount indicated by the mode of X. This
1836 is only used for bitfield stores into memory.
1838 A nonvarying address may be just a register or just a symbol reference,
1839 or it may be either of those plus a numeric offset. */
1842 invalidate (x, full_mode)
1844 enum machine_mode full_mode;
1847 register struct table_elt *p;
1849 switch (GET_CODE (x))
1853 /* If X is a register, dependencies on its contents are recorded
1854 through the qty number mechanism. Just change the qty number of
1855 the register, mark it as invalid for expressions that refer to it,
1856 and remove it itself. */
1857 unsigned int regno = REGNO (x);
1858 unsigned int hash = HASH (x, GET_MODE (x));
1860 /* Remove REGNO from any quantity list it might be on and indicate
1861 that its value might have changed. If it is a pseudo, remove its
1862 entry from the hash table.
1864 For a hard register, we do the first two actions above for any
1865 additional hard registers corresponding to X. Then, if any of these
1866 registers are in the table, we must remove any REG entries that
1867 overlap these registers. */
1869 delete_reg_equiv (regno);
1872 if (regno >= FIRST_PSEUDO_REGISTER)
1874 /* Because a register can be referenced in more than one mode,
1875 we might have to remove more than one table entry. */
1876 struct table_elt *elt;
1878 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1879 remove_from_table (elt, hash);
1883 HOST_WIDE_INT in_table
1884 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1885 unsigned int endregno
1886 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1887 unsigned int tregno, tendregno, rn;
1888 register struct table_elt *p, *next;
1890 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1892 for (rn = regno + 1; rn < endregno; rn++)
1894 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1895 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1896 delete_reg_equiv (rn);
1901 for (hash = 0; hash < HASH_SIZE; hash++)
1902 for (p = table[hash]; p; p = next)
1904 next = p->next_same_hash;
1906 if (GET_CODE (p->exp) != REG
1907 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1910 tregno = REGNO (p->exp);
1912 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1913 if (tendregno > regno && tregno < endregno)
1914 remove_from_table (p, hash);
1921 invalidate (SUBREG_REG (x), VOIDmode);
1925 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1926 invalidate (XVECEXP (x, 0, i), VOIDmode);
1930 /* This is part of a disjoint return value; extract the location in
1931 question ignoring the offset. */
1932 invalidate (XEXP (x, 0), VOIDmode);
1936 /* Calculate the canonical version of X here so that
1937 true_dependence doesn't generate new RTL for X on each call. */
1940 /* Remove all hash table elements that refer to overlapping pieces of
1942 if (full_mode == VOIDmode)
1943 full_mode = GET_MODE (x);
1945 for (i = 0; i < HASH_SIZE; i++)
1947 register struct table_elt *next;
1949 for (p = table[i]; p; p = next)
1951 next = p->next_same_hash;
1954 struct check_dependence_data d;
1956 /* Just canonicalize the expression once;
1957 otherwise each time we call invalidate
1958 true_dependence will canonicalize the
1959 expression again. */
1961 p->canon_exp = canon_rtx (p->exp);
1964 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1965 remove_from_table (p, i);
1976 /* Remove all expressions that refer to register REGNO,
1977 since they are already invalid, and we are about to
1978 mark that register valid again and don't want the old
1979 expressions to reappear as valid. */
1982 remove_invalid_refs (regno)
1986 struct table_elt *p, *next;
1988 for (i = 0; i < HASH_SIZE; i++)
1989 for (p = table[i]; p; p = next)
1991 next = p->next_same_hash;
1992 if (GET_CODE (p->exp) != REG
1993 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx*)0))
1994 remove_from_table (p, i);
1998 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
2001 remove_invalid_subreg_refs (regno, offset, mode)
2003 unsigned int offset;
2004 enum machine_mode mode;
2007 struct table_elt *p, *next;
2008 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
2010 for (i = 0; i < HASH_SIZE; i++)
2011 for (p = table[i]; p; p = next)
2014 next = p->next_same_hash;
2016 if (GET_CODE (exp) != REG
2017 && (GET_CODE (exp) != SUBREG
2018 || GET_CODE (SUBREG_REG (exp)) != REG
2019 || REGNO (SUBREG_REG (exp)) != regno
2020 || (((SUBREG_BYTE (exp)
2021 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
2022 && SUBREG_BYTE (exp) <= end))
2023 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx*)0))
2024 remove_from_table (p, i);
2028 /* Recompute the hash codes of any valid entries in the hash table that
2029 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2031 This is called when we make a jump equivalence. */
2034 rehash_using_reg (x)
2038 struct table_elt *p, *next;
2041 if (GET_CODE (x) == SUBREG)
2044 /* If X is not a register or if the register is known not to be in any
2045 valid entries in the table, we have no work to do. */
2047 if (GET_CODE (x) != REG
2048 || REG_IN_TABLE (REGNO (x)) < 0
2049 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2052 /* Scan all hash chains looking for valid entries that mention X.
2053 If we find one and it is in the wrong hash chain, move it. We can skip
2054 objects that are registers, since they are handled specially. */
2056 for (i = 0; i < HASH_SIZE; i++)
2057 for (p = table[i]; p; p = next)
2059 next = p->next_same_hash;
2060 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
2061 && exp_equiv_p (p->exp, p->exp, 1, 0)
2062 && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
2064 if (p->next_same_hash)
2065 p->next_same_hash->prev_same_hash = p->prev_same_hash;
2067 if (p->prev_same_hash)
2068 p->prev_same_hash->next_same_hash = p->next_same_hash;
2070 table[i] = p->next_same_hash;
2072 p->next_same_hash = table[hash];
2073 p->prev_same_hash = 0;
2075 table[hash]->prev_same_hash = p;
2081 /* Remove from the hash table any expression that is a call-clobbered
2082 register. Also update their TICK values. */
2085 invalidate_for_call ()
2087 unsigned int regno, endregno;
2090 struct table_elt *p, *next;
2093 /* Go through all the hard registers. For each that is clobbered in
2094 a CALL_INSN, remove the register from quantity chains and update
2095 reg_tick if defined. Also see if any of these registers is currently
2098 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2099 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2101 delete_reg_equiv (regno);
2102 if (REG_TICK (regno) >= 0)
2105 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2108 /* In the case where we have no call-clobbered hard registers in the
2109 table, we are done. Otherwise, scan the table and remove any
2110 entry that overlaps a call-clobbered register. */
2113 for (hash = 0; hash < HASH_SIZE; hash++)
2114 for (p = table[hash]; p; p = next)
2116 next = p->next_same_hash;
2118 if (GET_CODE (p->exp) != REG
2119 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2122 regno = REGNO (p->exp);
2123 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
2125 for (i = regno; i < endregno; i++)
2126 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2128 remove_from_table (p, hash);
2134 /* Given an expression X of type CONST,
2135 and ELT which is its table entry (or 0 if it
2136 is not in the hash table),
2137 return an alternate expression for X as a register plus integer.
2138 If none can be found, return 0. */
2141 use_related_value (x, elt)
2143 struct table_elt *elt;
2145 register struct table_elt *relt = 0;
2146 register struct table_elt *p, *q;
2147 HOST_WIDE_INT offset;
2149 /* First, is there anything related known?
2150 If we have a table element, we can tell from that.
2151 Otherwise, must look it up. */
2153 if (elt != 0 && elt->related_value != 0)
2155 else if (elt == 0 && GET_CODE (x) == CONST)
2157 rtx subexp = get_related_value (x);
2159 relt = lookup (subexp,
2160 safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
2167 /* Search all related table entries for one that has an
2168 equivalent register. */
2173 /* This loop is strange in that it is executed in two different cases.
2174 The first is when X is already in the table. Then it is searching
2175 the RELATED_VALUE list of X's class (RELT). The second case is when
2176 X is not in the table. Then RELT points to a class for the related
2179 Ensure that, whatever case we are in, that we ignore classes that have
2180 the same value as X. */
2182 if (rtx_equal_p (x, p->exp))
2185 for (q = p->first_same_value; q; q = q->next_same_value)
2186 if (GET_CODE (q->exp) == REG)
2192 p = p->related_value;
2194 /* We went all the way around, so there is nothing to be found.
2195 Alternatively, perhaps RELT was in the table for some other reason
2196 and it has no related values recorded. */
2197 if (p == relt || p == 0)
2204 offset = (get_integer_term (x) - get_integer_term (p->exp));
2205 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2206 return plus_constant (q->exp, offset);
2209 /* Hash a string. Just add its bytes up. */
2210 static inline unsigned
2211 canon_hash_string (ps)
2215 const unsigned char *p = (const unsigned char *)ps;
2224 /* Hash an rtx. We are careful to make sure the value is never negative.
2225 Equivalent registers hash identically.
2226 MODE is used in hashing for CONST_INTs only;
2227 otherwise the mode of X is used.
2229 Store 1 in do_not_record if any subexpression is volatile.
2231 Store 1 in hash_arg_in_memory if X contains a MEM rtx
2232 which does not have the RTX_UNCHANGING_P bit set.
2234 Note that cse_insn knows that the hash code of a MEM expression
2235 is just (int) MEM plus the hash code of the address. */
2238 canon_hash (x, mode)
2240 enum machine_mode mode;
2243 register unsigned hash = 0;
2244 register enum rtx_code code;
2245 register const char *fmt;
2247 /* repeat is used to turn tail-recursion into iteration. */
2252 code = GET_CODE (x);
2257 unsigned int regno = REGNO (x);
2259 /* On some machines, we can't record any non-fixed hard register,
2260 because extending its life will cause reload problems. We
2261 consider ap, fp, and sp to be fixed for this purpose.
2263 We also consider CCmode registers to be fixed for this purpose;
2264 failure to do so leads to failure to simplify 0<100 type of
2267 On all machines, we can't record any global registers.
2268 Nor should we record any register that is in a small
2269 class, as defined by CLASS_LIKELY_SPILLED_P. */
2271 if (regno < FIRST_PSEUDO_REGISTER
2272 && (global_regs[regno]
2273 || CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno))
2274 || (SMALL_REGISTER_CLASSES
2275 && ! fixed_regs[regno]
2276 && regno != FRAME_POINTER_REGNUM
2277 && regno != HARD_FRAME_POINTER_REGNUM
2278 && regno != ARG_POINTER_REGNUM
2279 && regno != STACK_POINTER_REGNUM
2280 && GET_MODE_CLASS (GET_MODE (x)) != MODE_CC)))
2286 hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2290 /* We handle SUBREG of a REG specially because the underlying
2291 reg changes its hash value with every value change; we don't
2292 want to have to forget unrelated subregs when one subreg changes. */
2295 if (GET_CODE (SUBREG_REG (x)) == REG)
2297 hash += (((unsigned) SUBREG << 7)
2298 + REGNO (SUBREG_REG (x))
2299 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2307 unsigned HOST_WIDE_INT tem = INTVAL (x);
2308 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2313 /* This is like the general case, except that it only counts
2314 the integers representing the constant. */
2315 hash += (unsigned) code + (unsigned) GET_MODE (x);
2316 if (GET_MODE (x) != VOIDmode)
2317 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
2319 unsigned HOST_WIDE_INT tem = XWINT (x, i);
2323 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2324 + (unsigned) CONST_DOUBLE_HIGH (x));
2327 /* Assume there is only one rtx object for any given label. */
2329 hash += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2333 hash += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2337 /* We don't record if marked volatile or if BLKmode since we don't
2338 know the size of the move. */
2339 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2344 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2346 hash_arg_in_memory = 1;
2348 /* Now that we have already found this special case,
2349 might as well speed it up as much as possible. */
2350 hash += (unsigned) MEM;
2355 /* A USE that mentions non-volatile memory needs special
2356 handling since the MEM may be BLKmode which normally
2357 prevents an entry from being made. Pure calls are
2358 marked by a USE which mentions BLKmode memory. */
2359 if (GET_CODE (XEXP (x, 0)) == MEM
2360 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2362 hash += (unsigned)USE;
2365 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2366 hash_arg_in_memory = 1;
2368 /* Now that we have already found this special case,
2369 might as well speed it up as much as possible. */
2370 hash += (unsigned) MEM;
2385 case UNSPEC_VOLATILE:
2390 if (MEM_VOLATILE_P (x))
2397 /* We don't want to take the filename and line into account. */
2398 hash += (unsigned) code + (unsigned) GET_MODE (x)
2399 + canon_hash_string (ASM_OPERANDS_TEMPLATE (x))
2400 + canon_hash_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2401 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2403 if (ASM_OPERANDS_INPUT_LENGTH (x))
2405 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2407 hash += (canon_hash (ASM_OPERANDS_INPUT (x, i),
2408 GET_MODE (ASM_OPERANDS_INPUT (x, i)))
2409 + canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT
2413 hash += canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2414 x = ASM_OPERANDS_INPUT (x, 0);
2415 mode = GET_MODE (x);
2427 i = GET_RTX_LENGTH (code) - 1;
2428 hash += (unsigned) code + (unsigned) GET_MODE (x);
2429 fmt = GET_RTX_FORMAT (code);
2434 rtx tem = XEXP (x, i);
2436 /* If we are about to do the last recursive call
2437 needed at this level, change it into iteration.
2438 This function is called enough to be worth it. */
2444 hash += canon_hash (tem, 0);
2446 else if (fmt[i] == 'E')
2447 for (j = 0; j < XVECLEN (x, i); j++)
2448 hash += canon_hash (XVECEXP (x, i, j), 0);
2449 else if (fmt[i] == 's')
2450 hash += canon_hash_string (XSTR (x, i));
2451 else if (fmt[i] == 'i')
2453 register unsigned tem = XINT (x, i);
2456 else if (fmt[i] == '0' || fmt[i] == 't')
2465 /* Like canon_hash but with no side effects. */
2470 enum machine_mode mode;
2472 int save_do_not_record = do_not_record;
2473 int save_hash_arg_in_memory = hash_arg_in_memory;
2474 unsigned hash = canon_hash (x, mode);
2475 hash_arg_in_memory = save_hash_arg_in_memory;
2476 do_not_record = save_do_not_record;
2480 /* Return 1 iff X and Y would canonicalize into the same thing,
2481 without actually constructing the canonicalization of either one.
2482 If VALIDATE is nonzero,
2483 we assume X is an expression being processed from the rtl
2484 and Y was found in the hash table. We check register refs
2485 in Y for being marked as valid.
2487 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2488 that is known to be in the register. Ordinarily, we don't allow them
2489 to match, because letting them match would cause unpredictable results
2490 in all the places that search a hash table chain for an equivalent
2491 for a given value. A possible equivalent that has different structure
2492 has its hash code computed from different data. Whether the hash code
2493 is the same as that of the given value is pure luck. */
2496 exp_equiv_p (x, y, validate, equal_values)
2502 register enum rtx_code code;
2503 register const char *fmt;
2505 /* Note: it is incorrect to assume an expression is equivalent to itself
2506 if VALIDATE is nonzero. */
2507 if (x == y && !validate)
2509 if (x == 0 || y == 0)
2512 code = GET_CODE (x);
2513 if (code != GET_CODE (y))
2518 /* If X is a constant and Y is a register or vice versa, they may be
2519 equivalent. We only have to validate if Y is a register. */
2520 if (CONSTANT_P (x) && GET_CODE (y) == REG
2521 && REGNO_QTY_VALID_P (REGNO (y)))
2523 int y_q = REG_QTY (REGNO (y));
2524 struct qty_table_elem *y_ent = &qty_table[y_q];
2526 if (GET_MODE (y) == y_ent->mode
2527 && rtx_equal_p (x, y_ent->const_rtx)
2528 && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2532 if (CONSTANT_P (y) && code == REG
2533 && REGNO_QTY_VALID_P (REGNO (x)))
2535 int x_q = REG_QTY (REGNO (x));
2536 struct qty_table_elem *x_ent = &qty_table[x_q];
2538 if (GET_MODE (x) == x_ent->mode
2539 && rtx_equal_p (y, x_ent->const_rtx))
2546 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2547 if (GET_MODE (x) != GET_MODE (y))
2558 return XEXP (x, 0) == XEXP (y, 0);
2561 return XSTR (x, 0) == XSTR (y, 0);
2565 unsigned int regno = REGNO (y);
2566 unsigned int endregno
2567 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2568 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2571 /* If the quantities are not the same, the expressions are not
2572 equivalent. If there are and we are not to validate, they
2573 are equivalent. Otherwise, ensure all regs are up-to-date. */
2575 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2581 for (i = regno; i < endregno; i++)
2582 if (REG_IN_TABLE (i) != REG_TICK (i))
2588 /* For commutative operations, check both orders. */
2596 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2597 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2598 validate, equal_values))
2599 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2600 validate, equal_values)
2601 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2602 validate, equal_values)));
2605 /* We don't use the generic code below because we want to
2606 disregard filename and line numbers. */
2608 /* A volatile asm isn't equivalent to any other. */
2609 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2612 if (GET_MODE (x) != GET_MODE (y)
2613 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2614 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2615 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2616 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2617 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2620 if (ASM_OPERANDS_INPUT_LENGTH (x))
2622 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2623 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2624 ASM_OPERANDS_INPUT (y, i),
2625 validate, equal_values)
2626 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2627 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2637 /* Compare the elements. If any pair of corresponding elements
2638 fail to match, return 0 for the whole things. */
2640 fmt = GET_RTX_FORMAT (code);
2641 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2646 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2651 if (XVECLEN (x, i) != XVECLEN (y, i))
2653 for (j = 0; j < XVECLEN (x, i); j++)
2654 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2655 validate, equal_values))
2660 if (strcmp (XSTR (x, i), XSTR (y, i)))
2665 if (XINT (x, i) != XINT (y, i))
2670 if (XWINT (x, i) != XWINT (y, i))
2686 /* Return 1 if X has a value that can vary even between two
2687 executions of the program. 0 means X can be compared reliably
2688 against certain constants or near-constants. */
2691 cse_rtx_varies_p (x, from_alias)
2695 /* We need not check for X and the equivalence class being of the same
2696 mode because if X is equivalent to a constant in some mode, it
2697 doesn't vary in any mode. */
2699 if (GET_CODE (x) == REG
2700 && REGNO_QTY_VALID_P (REGNO (x)))
2702 int x_q = REG_QTY (REGNO (x));
2703 struct qty_table_elem *x_ent = &qty_table[x_q];
2705 if (GET_MODE (x) == x_ent->mode
2706 && x_ent->const_rtx != NULL_RTX)
2710 if (GET_CODE (x) == PLUS
2711 && GET_CODE (XEXP (x, 1)) == CONST_INT
2712 && GET_CODE (XEXP (x, 0)) == REG
2713 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2715 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2716 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2718 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2719 && x0_ent->const_rtx != NULL_RTX)
2723 /* This can happen as the result of virtual register instantiation, if
2724 the initial constant is too large to be a valid address. This gives
2725 us a three instruction sequence, load large offset into a register,
2726 load fp minus a constant into a register, then a MEM which is the
2727 sum of the two `constant' registers. */
2728 if (GET_CODE (x) == PLUS
2729 && GET_CODE (XEXP (x, 0)) == REG
2730 && GET_CODE (XEXP (x, 1)) == REG
2731 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2732 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2734 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2735 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2736 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2737 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2739 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2740 && x0_ent->const_rtx != NULL_RTX
2741 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2742 && x1_ent->const_rtx != NULL_RTX)
2746 return rtx_varies_p (x, from_alias);
2749 /* Canonicalize an expression:
2750 replace each register reference inside it
2751 with the "oldest" equivalent register.
2753 If INSN is non-zero and we are replacing a pseudo with a hard register
2754 or vice versa, validate_change is used to ensure that INSN remains valid
2755 after we make our substitution. The calls are made with IN_GROUP non-zero
2756 so apply_change_group must be called upon the outermost return from this
2757 function (unless INSN is zero). The result of apply_change_group can
2758 generally be discarded since the changes we are making are optional. */
2766 register enum rtx_code code;
2767 register const char *fmt;
2772 code = GET_CODE (x);
2790 register struct qty_table_elem *ent;
2792 /* Never replace a hard reg, because hard regs can appear
2793 in more than one machine mode, and we must preserve the mode
2794 of each occurrence. Also, some hard regs appear in
2795 MEMs that are shared and mustn't be altered. Don't try to
2796 replace any reg that maps to a reg of class NO_REGS. */
2797 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2798 || ! REGNO_QTY_VALID_P (REGNO (x)))
2801 q = REG_QTY (REGNO (x));
2802 ent = &qty_table[q];
2803 first = ent->first_reg;
2804 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2805 : REGNO_REG_CLASS (first) == NO_REGS ? x
2806 : gen_rtx_REG (ent->mode, first));
2813 fmt = GET_RTX_FORMAT (code);
2814 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2820 rtx new = canon_reg (XEXP (x, i), insn);
2823 /* If replacing pseudo with hard reg or vice versa, ensure the
2824 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2825 if (insn != 0 && new != 0
2826 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2827 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2828 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2829 || (insn_code = recog_memoized (insn)) < 0
2830 || insn_data[insn_code].n_dups > 0))
2831 validate_change (insn, &XEXP (x, i), new, 1);
2835 else if (fmt[i] == 'E')
2836 for (j = 0; j < XVECLEN (x, i); j++)
2837 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2843 /* LOC is a location within INSN that is an operand address (the contents of
2844 a MEM). Find the best equivalent address to use that is valid for this
2847 On most CISC machines, complicated address modes are costly, and rtx_cost
2848 is a good approximation for that cost. However, most RISC machines have
2849 only a few (usually only one) memory reference formats. If an address is
2850 valid at all, it is often just as cheap as any other address. Hence, for
2851 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2852 costs of various addresses. For two addresses of equal cost, choose the one
2853 with the highest `rtx_cost' value as that has the potential of eliminating
2854 the most insns. For equal costs, we choose the first in the equivalence
2855 class. Note that we ignore the fact that pseudo registers are cheaper
2856 than hard registers here because we would also prefer the pseudo registers.
2860 find_best_addr (insn, loc, mode)
2863 enum machine_mode mode;
2865 struct table_elt *elt;
2868 struct table_elt *p;
2869 int found_better = 1;
2871 int save_do_not_record = do_not_record;
2872 int save_hash_arg_in_memory = hash_arg_in_memory;
2877 /* Do not try to replace constant addresses or addresses of local and
2878 argument slots. These MEM expressions are made only once and inserted
2879 in many instructions, as well as being used to control symbol table
2880 output. It is not safe to clobber them.
2882 There are some uncommon cases where the address is already in a register
2883 for some reason, but we cannot take advantage of that because we have
2884 no easy way to unshare the MEM. In addition, looking up all stack
2885 addresses is costly. */
2886 if ((GET_CODE (addr) == PLUS
2887 && GET_CODE (XEXP (addr, 0)) == REG
2888 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2889 && (regno = REGNO (XEXP (addr, 0)),
2890 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2891 || regno == ARG_POINTER_REGNUM))
2892 || (GET_CODE (addr) == REG
2893 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2894 || regno == HARD_FRAME_POINTER_REGNUM
2895 || regno == ARG_POINTER_REGNUM))
2896 || GET_CODE (addr) == ADDRESSOF
2897 || CONSTANT_ADDRESS_P (addr))
2900 /* If this address is not simply a register, try to fold it. This will
2901 sometimes simplify the expression. Many simplifications
2902 will not be valid, but some, usually applying the associative rule, will
2903 be valid and produce better code. */
2904 if (GET_CODE (addr) != REG)
2906 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2907 int addr_folded_cost = address_cost (folded, mode);
2908 int addr_cost = address_cost (addr, mode);
2910 if ((addr_folded_cost < addr_cost
2911 || (addr_folded_cost == addr_cost
2912 /* ??? The rtx_cost comparison is left over from an older
2913 version of this code. It is probably no longer helpful. */
2914 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2915 || approx_reg_cost (folded) < approx_reg_cost (addr))))
2916 && validate_change (insn, loc, folded, 0))
2920 /* If this address is not in the hash table, we can't look for equivalences
2921 of the whole address. Also, ignore if volatile. */
2924 hash = HASH (addr, Pmode);
2925 addr_volatile = do_not_record;
2926 do_not_record = save_do_not_record;
2927 hash_arg_in_memory = save_hash_arg_in_memory;
2932 elt = lookup (addr, hash, Pmode);
2934 #ifndef ADDRESS_COST
2937 int our_cost = elt->cost;
2939 /* Find the lowest cost below ours that works. */
2940 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2941 if (elt->cost < our_cost
2942 && (GET_CODE (elt->exp) == REG
2943 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2944 && validate_change (insn, loc,
2945 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2952 /* We need to find the best (under the criteria documented above) entry
2953 in the class that is valid. We use the `flag' field to indicate
2954 choices that were invalid and iterate until we can't find a better
2955 one that hasn't already been tried. */
2957 for (p = elt->first_same_value; p; p = p->next_same_value)
2960 while (found_better)
2962 int best_addr_cost = address_cost (*loc, mode);
2963 int best_rtx_cost = (elt->cost + 1) >> 1;
2965 struct table_elt *best_elt = elt;
2968 for (p = elt->first_same_value; p; p = p->next_same_value)
2971 if ((GET_CODE (p->exp) == REG
2972 || exp_equiv_p (p->exp, p->exp, 1, 0))
2973 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2974 || (exp_cost == best_addr_cost
2975 && ((p->cost + 1) >> 1) > best_rtx_cost)))
2978 best_addr_cost = exp_cost;
2979 best_rtx_cost = (p->cost + 1) >> 1;
2986 if (validate_change (insn, loc,
2987 canon_reg (copy_rtx (best_elt->exp),
2996 /* If the address is a binary operation with the first operand a register
2997 and the second a constant, do the same as above, but looking for
2998 equivalences of the register. Then try to simplify before checking for
2999 the best address to use. This catches a few cases: First is when we
3000 have REG+const and the register is another REG+const. We can often merge
3001 the constants and eliminate one insn and one register. It may also be
3002 that a machine has a cheap REG+REG+const. Finally, this improves the
3003 code on the Alpha for unaligned byte stores. */
3005 if (flag_expensive_optimizations
3006 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
3007 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
3008 && GET_CODE (XEXP (*loc, 0)) == REG
3009 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
3011 rtx c = XEXP (*loc, 1);
3014 hash = HASH (XEXP (*loc, 0), Pmode);
3015 do_not_record = save_do_not_record;
3016 hash_arg_in_memory = save_hash_arg_in_memory;
3018 elt = lookup (XEXP (*loc, 0), hash, Pmode);
3022 /* We need to find the best (under the criteria documented above) entry
3023 in the class that is valid. We use the `flag' field to indicate
3024 choices that were invalid and iterate until we can't find a better
3025 one that hasn't already been tried. */
3027 for (p = elt->first_same_value; p; p = p->next_same_value)
3030 while (found_better)
3032 int best_addr_cost = address_cost (*loc, mode);
3033 int best_rtx_cost = (COST (*loc) + 1) >> 1;
3034 struct table_elt *best_elt = elt;
3035 rtx best_rtx = *loc;
3038 /* This is at worst case an O(n^2) algorithm, so limit our search
3039 to the first 32 elements on the list. This avoids trouble
3040 compiling code with very long basic blocks that can easily
3041 call simplify_gen_binary so many times that we run out of
3045 for (p = elt->first_same_value, count = 0;
3047 p = p->next_same_value, count++)
3049 && (GET_CODE (p->exp) == REG
3050 || exp_equiv_p (p->exp, p->exp, 1, 0)))
3052 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
3055 new_cost = address_cost (new, mode);
3057 if (new_cost < best_addr_cost
3058 || (new_cost == best_addr_cost
3059 && (COST (new) + 1) >> 1 > best_rtx_cost))
3062 best_addr_cost = new_cost;
3063 best_rtx_cost = (COST (new) + 1) >> 1;
3071 if (validate_change (insn, loc,
3072 canon_reg (copy_rtx (best_rtx),
3083 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3084 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3085 what values are being compared.
3087 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3088 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3089 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3090 compared to produce cc0.
3092 The return value is the comparison operator and is either the code of
3093 A or the code corresponding to the inverse of the comparison. */
3095 static enum rtx_code
3096 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
3099 enum machine_mode *pmode1, *pmode2;
3103 arg1 = *parg1, arg2 = *parg2;
3105 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
3107 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3109 /* Set non-zero when we find something of interest. */
3111 int reverse_code = 0;
3112 struct table_elt *p = 0;
3114 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3115 On machines with CC0, this is the only case that can occur, since
3116 fold_rtx will return the COMPARE or item being compared with zero
3119 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3122 /* If ARG1 is a comparison operator and CODE is testing for
3123 STORE_FLAG_VALUE, get the inner arguments. */
3125 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
3128 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3129 && code == LT && STORE_FLAG_VALUE == -1)
3130 #ifdef FLOAT_STORE_FLAG_VALUE
3131 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3132 && (REAL_VALUE_NEGATIVE
3133 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3138 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3139 && code == GE && STORE_FLAG_VALUE == -1)
3140 #ifdef FLOAT_STORE_FLAG_VALUE
3141 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3142 && (REAL_VALUE_NEGATIVE
3143 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3146 x = arg1, reverse_code = 1;
3149 /* ??? We could also check for
3151 (ne (and (eq (...) (const_int 1))) (const_int 0))
3153 and related forms, but let's wait until we see them occurring. */
3156 /* Look up ARG1 in the hash table and see if it has an equivalence
3157 that lets us see what is being compared. */
3158 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
3162 p = p->first_same_value;
3164 /* If what we compare is already known to be constant, that is as
3166 We need to break the loop in this case, because otherwise we
3167 can have an infinite loop when looking at a reg that is known
3168 to be a constant which is the same as a comparison of a reg
3169 against zero which appears later in the insn stream, which in
3170 turn is constant and the same as the comparison of the first reg
3176 for (; p; p = p->next_same_value)
3178 enum machine_mode inner_mode = GET_MODE (p->exp);
3180 /* If the entry isn't valid, skip it. */
3181 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3184 if (GET_CODE (p->exp) == COMPARE
3185 /* Another possibility is that this machine has a compare insn
3186 that includes the comparison code. In that case, ARG1 would
3187 be equivalent to a comparison operation that would set ARG1 to
3188 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3189 ORIG_CODE is the actual comparison being done; if it is an EQ,
3190 we must reverse ORIG_CODE. On machine with a negative value
3191 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3194 && GET_MODE_CLASS (inner_mode) == MODE_INT
3195 && (GET_MODE_BITSIZE (inner_mode)
3196 <= HOST_BITS_PER_WIDE_INT)
3197 && (STORE_FLAG_VALUE
3198 & ((HOST_WIDE_INT) 1
3199 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3200 #ifdef FLOAT_STORE_FLAG_VALUE
3202 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3203 && (REAL_VALUE_NEGATIVE
3204 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3207 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3212 else if ((code == EQ
3214 && GET_MODE_CLASS (inner_mode) == MODE_INT
3215 && (GET_MODE_BITSIZE (inner_mode)
3216 <= HOST_BITS_PER_WIDE_INT)
3217 && (STORE_FLAG_VALUE
3218 & ((HOST_WIDE_INT) 1
3219 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3220 #ifdef FLOAT_STORE_FLAG_VALUE
3222 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3223 && (REAL_VALUE_NEGATIVE
3224 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3227 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3234 /* If this is fp + constant, the equivalent is a better operand since
3235 it may let us predict the value of the comparison. */
3236 else if (NONZERO_BASE_PLUS_P (p->exp))
3243 /* If we didn't find a useful equivalence for ARG1, we are done.
3244 Otherwise, set up for the next iteration. */
3248 /* If we need to reverse the comparison, make sure that that is
3249 possible -- we can't necessarily infer the value of GE from LT
3250 with floating-point operands. */
3253 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3254 if (reversed == UNKNOWN)
3256 else code = reversed;
3258 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3259 code = GET_CODE (x);
3260 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3263 /* Return our results. Return the modes from before fold_rtx
3264 because fold_rtx might produce const_int, and then it's too late. */
3265 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3266 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3271 /* If X is a nontrivial arithmetic operation on an argument
3272 for which a constant value can be determined, return
3273 the result of operating on that value, as a constant.
3274 Otherwise, return X, possibly with one or more operands
3275 modified by recursive calls to this function.
3277 If X is a register whose contents are known, we do NOT
3278 return those contents here. equiv_constant is called to
3281 INSN is the insn that we may be modifying. If it is 0, make a copy
3282 of X before modifying it. */
3289 register enum rtx_code code;
3290 register enum machine_mode mode;
3291 register const char *fmt;
3297 /* Folded equivalents of first two operands of X. */
3301 /* Constant equivalents of first three operands of X;
3302 0 when no such equivalent is known. */
3307 /* The mode of the first operand of X. We need this for sign and zero
3309 enum machine_mode mode_arg0;
3314 mode = GET_MODE (x);
3315 code = GET_CODE (x);
3324 /* No use simplifying an EXPR_LIST
3325 since they are used only for lists of args
3326 in a function call's REG_EQUAL note. */
3328 /* Changing anything inside an ADDRESSOF is incorrect; we don't
3329 want to (e.g.,) make (addressof (const_int 0)) just because
3330 the location is known to be zero. */
3336 return prev_insn_cc0;
3340 /* If the next insn is a CODE_LABEL followed by a jump table,
3341 PC's value is a LABEL_REF pointing to that label. That
3342 lets us fold switch statements on the Vax. */
3343 if (insn && GET_CODE (insn) == JUMP_INSN)
3345 rtx next = next_nonnote_insn (insn);
3347 if (next && GET_CODE (next) == CODE_LABEL
3348 && NEXT_INSN (next) != 0
3349 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
3350 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
3351 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
3352 return gen_rtx_LABEL_REF (Pmode, next);
3357 /* See if we previously assigned a constant value to this SUBREG. */
3358 if ((new = lookup_as_function (x, CONST_INT)) != 0
3359 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3362 /* If this is a paradoxical SUBREG, we have no idea what value the
3363 extra bits would have. However, if the operand is equivalent
3364 to a SUBREG whose operand is the same as our mode, and all the
3365 modes are within a word, we can just use the inner operand
3366 because these SUBREGs just say how to treat the register.
3368 Similarly if we find an integer constant. */
3370 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3372 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3373 struct table_elt *elt;
3375 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3376 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3377 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3379 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3381 if (CONSTANT_P (elt->exp)
3382 && GET_MODE (elt->exp) == VOIDmode)
3385 if (GET_CODE (elt->exp) == SUBREG
3386 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3387 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3388 return copy_rtx (SUBREG_REG (elt->exp));
3394 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3395 We might be able to if the SUBREG is extracting a single word in an
3396 integral mode or extracting the low part. */
3398 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3399 const_arg0 = equiv_constant (folded_arg0);
3401 folded_arg0 = const_arg0;
3403 if (folded_arg0 != SUBREG_REG (x))
3405 new = simplify_subreg (mode, folded_arg0,
3406 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3411 /* If this is a narrowing SUBREG and our operand is a REG, see if
3412 we can find an equivalence for REG that is an arithmetic operation
3413 in a wider mode where both operands are paradoxical SUBREGs
3414 from objects of our result mode. In that case, we couldn't report
3415 an equivalent value for that operation, since we don't know what the
3416 extra bits will be. But we can find an equivalence for this SUBREG
3417 by folding that operation is the narrow mode. This allows us to
3418 fold arithmetic in narrow modes when the machine only supports
3419 word-sized arithmetic.
3421 Also look for a case where we have a SUBREG whose operand is the
3422 same as our result. If both modes are smaller than a word, we
3423 are simply interpreting a register in different modes and we
3424 can use the inner value. */
3426 if (GET_CODE (folded_arg0) == REG
3427 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
3428 && subreg_lowpart_p (x))
3430 struct table_elt *elt;
3432 /* We can use HASH here since we know that canon_hash won't be
3434 elt = lookup (folded_arg0,
3435 HASH (folded_arg0, GET_MODE (folded_arg0)),
3436 GET_MODE (folded_arg0));
3439 elt = elt->first_same_value;
3441 for (; elt; elt = elt->next_same_value)
3443 enum rtx_code eltcode = GET_CODE (elt->exp);
3445 /* Just check for unary and binary operations. */
3446 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
3447 && GET_CODE (elt->exp) != SIGN_EXTEND
3448 && GET_CODE (elt->exp) != ZERO_EXTEND
3449 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3450 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
3452 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3454 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3455 op0 = fold_rtx (op0, NULL_RTX);
3457 op0 = equiv_constant (op0);
3459 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3462 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
3463 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
3464 && eltcode != DIV && eltcode != MOD
3465 && eltcode != UDIV && eltcode != UMOD
3466 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3467 && eltcode != ROTATE && eltcode != ROTATERT
3468 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3469 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3471 || CONSTANT_P (XEXP (elt->exp, 0)))
3472 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3473 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3475 || CONSTANT_P (XEXP (elt->exp, 1))))
3477 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3478 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3480 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3481 op0 = fold_rtx (op0, NULL_RTX);
3484 op0 = equiv_constant (op0);
3486 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
3487 op1 = fold_rtx (op1, NULL_RTX);
3490 op1 = equiv_constant (op1);
3492 /* If we are looking for the low SImode part of
3493 (ashift:DI c (const_int 32)), it doesn't work
3494 to compute that in SImode, because a 32-bit shift
3495 in SImode is unpredictable. We know the value is 0. */
3497 && GET_CODE (elt->exp) == ASHIFT
3498 && GET_CODE (op1) == CONST_INT
3499 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3501 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3503 /* If the count fits in the inner mode's width,
3504 but exceeds the outer mode's width,
3505 the value will get truncated to 0
3509 /* If the count exceeds even the inner mode's width,
3510 don't fold this expression. */
3513 else if (op0 && op1)
3514 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
3518 else if (GET_CODE (elt->exp) == SUBREG
3519 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3520 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3522 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3523 new = copy_rtx (SUBREG_REG (elt->exp));
3534 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3535 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3536 new = lookup_as_function (XEXP (x, 0), code);
3538 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3542 /* If we are not actually processing an insn, don't try to find the
3543 best address. Not only don't we care, but we could modify the
3544 MEM in an invalid way since we have no insn to validate against. */
3546 find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3549 /* Even if we don't fold in the insn itself,
3550 we can safely do so here, in hopes of getting a constant. */
3551 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3553 HOST_WIDE_INT offset = 0;
3555 if (GET_CODE (addr) == REG
3556 && REGNO_QTY_VALID_P (REGNO (addr)))
3558 int addr_q = REG_QTY (REGNO (addr));
3559 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3561 if (GET_MODE (addr) == addr_ent->mode
3562 && addr_ent->const_rtx != NULL_RTX)
3563 addr = addr_ent->const_rtx;
3566 /* If address is constant, split it into a base and integer offset. */
3567 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3569 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3570 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3572 base = XEXP (XEXP (addr, 0), 0);
3573 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3575 else if (GET_CODE (addr) == LO_SUM
3576 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3577 base = XEXP (addr, 1);
3578 else if (GET_CODE (addr) == ADDRESSOF)
3579 return change_address (x, VOIDmode, addr);
3581 /* If this is a constant pool reference, we can fold it into its
3582 constant to allow better value tracking. */
3583 if (base && GET_CODE (base) == SYMBOL_REF
3584 && CONSTANT_POOL_ADDRESS_P (base))
3586 rtx constant = get_pool_constant (base);
3587 enum machine_mode const_mode = get_pool_mode (base);
3590 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3591 constant_pool_entries_cost = COST (constant);
3593 /* If we are loading the full constant, we have an equivalence. */
3594 if (offset == 0 && mode == const_mode)
3597 /* If this actually isn't a constant (weird!), we can't do
3598 anything. Otherwise, handle the two most common cases:
3599 extracting a word from a multi-word constant, and extracting
3600 the low-order bits. Other cases don't seem common enough to
3602 if (! CONSTANT_P (constant))
3605 if (GET_MODE_CLASS (mode) == MODE_INT
3606 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3607 && offset % UNITS_PER_WORD == 0
3608 && (new = operand_subword (constant,
3609 offset / UNITS_PER_WORD,
3610 0, const_mode)) != 0)
3613 if (((BYTES_BIG_ENDIAN
3614 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3615 || (! BYTES_BIG_ENDIAN && offset == 0))
3616 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
3620 /* If this is a reference to a label at a known position in a jump
3621 table, we also know its value. */
3622 if (base && GET_CODE (base) == LABEL_REF)
3624 rtx label = XEXP (base, 0);
3625 rtx table_insn = NEXT_INSN (label);
3627 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3628 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3630 rtx table = PATTERN (table_insn);
3633 && (offset / GET_MODE_SIZE (GET_MODE (table))
3634 < XVECLEN (table, 0)))
3635 return XVECEXP (table, 0,
3636 offset / GET_MODE_SIZE (GET_MODE (table)));
3638 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3639 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3641 rtx table = PATTERN (table_insn);
3644 && (offset / GET_MODE_SIZE (GET_MODE (table))
3645 < XVECLEN (table, 1)))
3647 offset /= GET_MODE_SIZE (GET_MODE (table));
3648 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3651 if (GET_MODE (table) != Pmode)
3652 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3654 /* Indicate this is a constant. This isn't a
3655 valid form of CONST, but it will only be used
3656 to fold the next insns and then discarded, so
3659 Note this expression must be explicitly discarded,
3660 by cse_insn, else it may end up in a REG_EQUAL note
3661 and "escape" to cause problems elsewhere. */
3662 return gen_rtx_CONST (GET_MODE (new), new);
3670 #ifdef NO_FUNCTION_CSE
3672 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3678 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3679 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3680 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3690 mode_arg0 = VOIDmode;
3692 /* Try folding our operands.
3693 Then see which ones have constant values known. */
3695 fmt = GET_RTX_FORMAT (code);
3696 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3699 rtx arg = XEXP (x, i);
3700 rtx folded_arg = arg, const_arg = 0;
3701 enum machine_mode mode_arg = GET_MODE (arg);
3702 rtx cheap_arg, expensive_arg;
3703 rtx replacements[2];
3706 /* Most arguments are cheap, so handle them specially. */
3707 switch (GET_CODE (arg))
3710 /* This is the same as calling equiv_constant; it is duplicated
3712 if (REGNO_QTY_VALID_P (REGNO (arg)))
3714 int arg_q = REG_QTY (REGNO (arg));
3715 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3717 if (arg_ent->const_rtx != NULL_RTX
3718 && GET_CODE (arg_ent->const_rtx) != REG
3719 && GET_CODE (arg_ent->const_rtx) != PLUS)
3721 = gen_lowpart_if_possible (GET_MODE (arg),
3722 arg_ent->const_rtx);
3736 folded_arg = prev_insn_cc0;
3737 mode_arg = prev_insn_cc0_mode;
3738 const_arg = equiv_constant (folded_arg);
3743 folded_arg = fold_rtx (arg, insn);
3744 const_arg = equiv_constant (folded_arg);
3747 /* For the first three operands, see if the operand
3748 is constant or equivalent to a constant. */
3752 folded_arg0 = folded_arg;
3753 const_arg0 = const_arg;
3754 mode_arg0 = mode_arg;
3757 folded_arg1 = folded_arg;
3758 const_arg1 = const_arg;
3761 const_arg2 = const_arg;
3765 /* Pick the least expensive of the folded argument and an
3766 equivalent constant argument. */
3767 if (const_arg == 0 || const_arg == folded_arg
3768 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3769 cheap_arg = folded_arg, expensive_arg = const_arg;
3771 cheap_arg = const_arg, expensive_arg = folded_arg;
3773 /* Try to replace the operand with the cheapest of the two
3774 possibilities. If it doesn't work and this is either of the first
3775 two operands of a commutative operation, try swapping them.
3776 If THAT fails, try the more expensive, provided it is cheaper
3777 than what is already there. */
3779 if (cheap_arg == XEXP (x, i))
3782 if (insn == 0 && ! copied)
3788 /* Order the replacements from cheapest to most expensive. */
3789 replacements[0] = cheap_arg;
3790 replacements[1] = expensive_arg;
3792 for (j = 0; j < 2 && replacements[j]; j++)
3794 int old_cost = COST_IN (XEXP (x, i), code);
3795 int new_cost = COST_IN (replacements[j], code);
3797 /* Stop if what existed before was cheaper. Prefer constants
3798 in the case of a tie. */
3799 if (new_cost > old_cost
3800 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3803 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3806 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c'
3807 || code == LTGT || code == UNEQ || code == ORDERED
3808 || code == UNORDERED)
3810 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3811 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3813 if (apply_change_group ())
3815 /* Swap them back to be invalid so that this loop can
3816 continue and flag them to be swapped back later. */
3819 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3831 /* Don't try to fold inside of a vector of expressions.
3832 Doing nothing is harmless. */
3836 /* If a commutative operation, place a constant integer as the second
3837 operand unless the first operand is also a constant integer. Otherwise,
3838 place any constant second unless the first operand is also a constant. */
3840 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c'
3841 || code == LTGT || code == UNEQ || code == ORDERED
3842 || code == UNORDERED)
3844 if (must_swap || (const_arg0
3846 || (GET_CODE (const_arg0) == CONST_INT
3847 && GET_CODE (const_arg1) != CONST_INT))))
3849 register rtx tem = XEXP (x, 0);
3851 if (insn == 0 && ! copied)
3857 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3858 validate_change (insn, &XEXP (x, 1), tem, 1);
3859 if (apply_change_group ())
3861 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3862 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3867 /* If X is an arithmetic operation, see if we can simplify it. */
3869 switch (GET_RTX_CLASS (code))
3875 /* We can't simplify extension ops unless we know the
3877 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3878 && mode_arg0 == VOIDmode)
3881 /* If we had a CONST, strip it off and put it back later if we
3883 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3884 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3886 new = simplify_unary_operation (code, mode,
3887 const_arg0 ? const_arg0 : folded_arg0,
3889 if (new != 0 && is_const)
3890 new = gen_rtx_CONST (mode, new);
3895 /* See what items are actually being compared and set FOLDED_ARG[01]
3896 to those values and CODE to the actual comparison code. If any are
3897 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3898 do anything if both operands are already known to be constant. */
3900 if (const_arg0 == 0 || const_arg1 == 0)
3902 struct table_elt *p0, *p1;
3903 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3904 enum machine_mode mode_arg1;
3906 #ifdef FLOAT_STORE_FLAG_VALUE
3907 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3909 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3910 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3911 false_rtx = CONST0_RTX (mode);
3915 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3916 &mode_arg0, &mode_arg1);
3917 const_arg0 = equiv_constant (folded_arg0);
3918 const_arg1 = equiv_constant (folded_arg1);
3920 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3921 what kinds of things are being compared, so we can't do
3922 anything with this comparison. */
3924 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3927 /* If we do not now have two constants being compared, see
3928 if we can nevertheless deduce some things about the
3930 if (const_arg0 == 0 || const_arg1 == 0)
3932 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or
3933 non-explicit constant? These aren't zero, but we
3934 don't know their sign. */
3935 if (const_arg1 == const0_rtx
3936 && (NONZERO_BASE_PLUS_P (folded_arg0)
3937 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
3939 || GET_CODE (folded_arg0) == SYMBOL_REF
3941 || GET_CODE (folded_arg0) == LABEL_REF
3942 || GET_CODE (folded_arg0) == CONST))
3946 else if (code == NE)
3950 /* See if the two operands are the same. */
3952 if (folded_arg0 == folded_arg1
3953 || (GET_CODE (folded_arg0) == REG
3954 && GET_CODE (folded_arg1) == REG
3955 && (REG_QTY (REGNO (folded_arg0))
3956 == REG_QTY (REGNO (folded_arg1))))
3957 || ((p0 = lookup (folded_arg0,
3958 (safe_hash (folded_arg0, mode_arg0)
3959 & HASH_MASK), mode_arg0))
3960 && (p1 = lookup (folded_arg1,
3961 (safe_hash (folded_arg1, mode_arg0)
3962 & HASH_MASK), mode_arg0))
3963 && p0->first_same_value == p1->first_same_value))
3965 /* Sadly two equal NaNs are not equivalent. */
3966 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3967 || ! FLOAT_MODE_P (mode_arg0)
3968 || flag_unsafe_math_optimizations)
3969 return ((code == EQ || code == LE || code == GE
3970 || code == LEU || code == GEU || code == UNEQ
3971 || code == UNLE || code == UNGE || code == ORDERED)
3972 ? true_rtx : false_rtx);
3973 /* Take care for the FP compares we can resolve. */
3974 if (code == UNEQ || code == UNLE || code == UNGE)
3976 if (code == LTGT || code == LT || code == GT)
3980 /* If FOLDED_ARG0 is a register, see if the comparison we are
3981 doing now is either the same as we did before or the reverse
3982 (we only check the reverse if not floating-point). */
3983 else if (GET_CODE (folded_arg0) == REG)
3985 int qty = REG_QTY (REGNO (folded_arg0));
3987 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3989 struct qty_table_elem *ent = &qty_table[qty];
3991 if ((comparison_dominates_p (ent->comparison_code, code)
3992 || (! FLOAT_MODE_P (mode_arg0)
3993 && comparison_dominates_p (ent->comparison_code,
3994 reverse_condition (code))))
3995 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3997 && rtx_equal_p (ent->comparison_const,
3999 || (GET_CODE (folded_arg1) == REG
4000 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
4001 return (comparison_dominates_p (ent->comparison_code, code)
4002 ? true_rtx : false_rtx);
4008 /* If we are comparing against zero, see if the first operand is
4009 equivalent to an IOR with a constant. If so, we may be able to
4010 determine the result of this comparison. */
4012 if (const_arg1 == const0_rtx)
4014 rtx y = lookup_as_function (folded_arg0, IOR);
4018 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
4019 && GET_CODE (inner_const) == CONST_INT
4020 && INTVAL (inner_const) != 0)
4022 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
4023 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
4024 && (INTVAL (inner_const)
4025 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
4026 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
4028 #ifdef FLOAT_STORE_FLAG_VALUE
4029 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
4031 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
4032 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4033 false_rtx = CONST0_RTX (mode);
4057 new = simplify_relational_operation (code,
4058 (mode_arg0 != VOIDmode
4060 : (GET_MODE (const_arg0
4064 ? GET_MODE (const_arg0
4067 : GET_MODE (const_arg1
4070 const_arg0 ? const_arg0 : folded_arg0,
4071 const_arg1 ? const_arg1 : folded_arg1);
4072 #ifdef FLOAT_STORE_FLAG_VALUE
4073 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
4075 if (new == const0_rtx)
4076 new = CONST0_RTX (mode);
4078 new = (CONST_DOUBLE_FROM_REAL_VALUE
4079 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4089 /* If the second operand is a LABEL_REF, see if the first is a MINUS
4090 with that LABEL_REF as its second operand. If so, the result is
4091 the first operand of that MINUS. This handles switches with an
4092 ADDR_DIFF_VEC table. */
4093 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4096 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
4097 : lookup_as_function (folded_arg0, MINUS);
4099 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4100 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4103 /* Now try for a CONST of a MINUS like the above. */
4104 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4105 : lookup_as_function (folded_arg0, CONST))) != 0
4106 && GET_CODE (XEXP (y, 0)) == MINUS
4107 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4108 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4109 return XEXP (XEXP (y, 0), 0);
4112 /* Likewise if the operands are in the other order. */
4113 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4116 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
4117 : lookup_as_function (folded_arg1, MINUS);
4119 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4120 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4123 /* Now try for a CONST of a MINUS like the above. */
4124 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4125 : lookup_as_function (folded_arg1, CONST))) != 0
4126 && GET_CODE (XEXP (y, 0)) == MINUS
4127 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4128 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4129 return XEXP (XEXP (y, 0), 0);
4132 /* If second operand is a register equivalent to a negative
4133 CONST_INT, see if we can find a register equivalent to the
4134 positive constant. Make a MINUS if so. Don't do this for
4135 a non-negative constant since we might then alternate between
4136 chosing positive and negative constants. Having the positive
4137 constant previously-used is the more common case. Be sure
4138 the resulting constant is non-negative; if const_arg1 were
4139 the smallest negative number this would overflow: depending
4140 on the mode, this would either just be the same value (and
4141 hence not save anything) or be incorrect. */
4142 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4143 && INTVAL (const_arg1) < 0
4144 /* This used to test
4146 -INTVAL (const_arg1) >= 0
4148 But The Sun V5.0 compilers mis-compiled that test. So
4149 instead we test for the problematic value in a more direct
4150 manner and hope the Sun compilers get it correct. */
4151 && INTVAL (const_arg1) !=
4152 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4153 && GET_CODE (folded_arg1) == REG)
4155 rtx new_const = GEN_INT (-INTVAL (const_arg1));
4157 = lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
4161 for (p = p->first_same_value; p; p = p->next_same_value)
4162 if (GET_CODE (p->exp) == REG)
4163 return simplify_gen_binary (MINUS, mode, folded_arg0,
4164 canon_reg (p->exp, NULL_RTX));
4169 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4170 If so, produce (PLUS Z C2-C). */
4171 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4173 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4174 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4175 return fold_rtx (plus_constant (copy_rtx (y),
4176 -INTVAL (const_arg1)),
4183 case SMIN: case SMAX: case UMIN: case UMAX:
4184 case IOR: case AND: case XOR:
4185 case MULT: case DIV: case UDIV:
4186 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4187 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4188 is known to be of similar form, we may be able to replace the
4189 operation with a combined operation. This may eliminate the
4190 intermediate operation if every use is simplified in this way.
4191 Note that the similar optimization done by combine.c only works
4192 if the intermediate operation's result has only one reference. */
4194 if (GET_CODE (folded_arg0) == REG
4195 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4198 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4199 rtx y = lookup_as_function (folded_arg0, code);
4201 enum rtx_code associate_code;
4205 || 0 == (inner_const
4206 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4207 || GET_CODE (inner_const) != CONST_INT
4208 /* If we have compiled a statement like
4209 "if (x == (x & mask1))", and now are looking at
4210 "x & mask2", we will have a case where the first operand
4211 of Y is the same as our first operand. Unless we detect
4212 this case, an infinite loop will result. */
4213 || XEXP (y, 0) == folded_arg0)
4216 /* Don't associate these operations if they are a PLUS with the
4217 same constant and it is a power of two. These might be doable
4218 with a pre- or post-increment. Similarly for two subtracts of
4219 identical powers of two with post decrement. */
4221 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
4222 && ((HAVE_PRE_INCREMENT
4223 && exact_log2 (INTVAL (const_arg1)) >= 0)
4224 || (HAVE_POST_INCREMENT
4225 && exact_log2 (INTVAL (const_arg1)) >= 0)
4226 || (HAVE_PRE_DECREMENT
4227 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4228 || (HAVE_POST_DECREMENT
4229 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4232 /* Compute the code used to compose the constants. For example,
4233 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
4236 = (code == MULT || code == DIV || code == UDIV ? MULT
4237 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
4239 new_const = simplify_binary_operation (associate_code, mode,
4240 const_arg1, inner_const);
4245 /* If we are associating shift operations, don't let this
4246 produce a shift of the size of the object or larger.
4247 This could occur when we follow a sign-extend by a right
4248 shift on a machine that does a sign-extend as a pair
4251 if (is_shift && GET_CODE (new_const) == CONST_INT
4252 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4254 /* As an exception, we can turn an ASHIFTRT of this
4255 form into a shift of the number of bits - 1. */
4256 if (code == ASHIFTRT)
4257 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4262 y = copy_rtx (XEXP (y, 0));
4264 /* If Y contains our first operand (the most common way this
4265 can happen is if Y is a MEM), we would do into an infinite
4266 loop if we tried to fold it. So don't in that case. */
4268 if (! reg_mentioned_p (folded_arg0, y))
4269 y = fold_rtx (y, insn);
4271 return simplify_gen_binary (code, mode, y, new_const);
4279 new = simplify_binary_operation (code, mode,
4280 const_arg0 ? const_arg0 : folded_arg0,
4281 const_arg1 ? const_arg1 : folded_arg1);
4285 /* (lo_sum (high X) X) is simply X. */
4286 if (code == LO_SUM && const_arg0 != 0
4287 && GET_CODE (const_arg0) == HIGH
4288 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4294 new = simplify_ternary_operation (code, mode, mode_arg0,
4295 const_arg0 ? const_arg0 : folded_arg0,
4296 const_arg1 ? const_arg1 : folded_arg1,
4297 const_arg2 ? const_arg2 : XEXP (x, 2));
4301 /* Always eliminate CONSTANT_P_RTX at this stage. */
4302 if (code == CONSTANT_P_RTX)
4303 return (const_arg0 ? const1_rtx : const0_rtx);
4307 return new ? new : x;
4310 /* Return a constant value currently equivalent to X.
4311 Return 0 if we don't know one. */
4317 if (GET_CODE (x) == REG
4318 && REGNO_QTY_VALID_P (REGNO (x)))
4320 int x_q = REG_QTY (REGNO (x));
4321 struct qty_table_elem *x_ent = &qty_table[x_q];
4323 if (x_ent->const_rtx)
4324 x = gen_lowpart_if_possible (GET_MODE (x), x_ent->const_rtx);
4327 if (x == 0 || CONSTANT_P (x))
4330 /* If X is a MEM, try to fold it outside the context of any insn to see if
4331 it might be equivalent to a constant. That handles the case where it
4332 is a constant-pool reference. Then try to look it up in the hash table
4333 in case it is something whose value we have seen before. */
4335 if (GET_CODE (x) == MEM)
4337 struct table_elt *elt;
4339 x = fold_rtx (x, NULL_RTX);
4343 elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
4347 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4348 if (elt->is_const && CONSTANT_P (elt->exp))
4355 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4356 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4357 least-significant part of X.
4358 MODE specifies how big a part of X to return.
4360 If the requested operation cannot be done, 0 is returned.
4362 This is similar to gen_lowpart in emit-rtl.c. */
4365 gen_lowpart_if_possible (mode, x)
4366 enum machine_mode mode;
4369 rtx result = gen_lowpart_common (mode, x);
4373 else if (GET_CODE (x) == MEM)
4375 /* This is the only other case we handle. */
4376 register int offset = 0;
4379 if (WORDS_BIG_ENDIAN)
4380 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4381 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4382 if (BYTES_BIG_ENDIAN)
4383 /* Adjust the address so that the address-after-the-data is
4385 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4386 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4388 new = adjust_address_nv (x, mode, offset);
4389 if (! memory_address_p (mode, XEXP (new, 0)))
4398 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4399 branch. It will be zero if not.
4401 In certain cases, this can cause us to add an equivalence. For example,
4402 if we are following the taken case of
4404 we can add the fact that `i' and '2' are now equivalent.
4406 In any case, we can record that this comparison was passed. If the same
4407 comparison is seen later, we will know its value. */
4410 record_jump_equiv (insn, taken)
4414 int cond_known_true;
4417 enum machine_mode mode, mode0, mode1;
4418 int reversed_nonequality = 0;
4421 /* Ensure this is the right kind of insn. */
4422 if (! any_condjump_p (insn))
4424 set = pc_set (insn);
4426 /* See if this jump condition is known true or false. */
4428 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4430 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4432 /* Get the type of comparison being done and the operands being compared.
4433 If we had to reverse a non-equality condition, record that fact so we
4434 know that it isn't valid for floating-point. */
4435 code = GET_CODE (XEXP (SET_SRC (set), 0));
4436 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4437 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4439 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4440 if (! cond_known_true)
4442 code = reversed_comparison_code_parts (code, op0, op1, insn);
4444 /* Don't remember if we can't find the inverse. */
4445 if (code == UNKNOWN)
4449 /* The mode is the mode of the non-constant. */
4451 if (mode1 != VOIDmode)
4454 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4457 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4458 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4459 Make any useful entries we can with that information. Called from
4460 above function and called recursively. */
4463 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
4465 enum machine_mode mode;
4467 int reversed_nonequality;
4469 unsigned op0_hash, op1_hash;
4470 int op0_in_memory, op1_in_memory;
4471 struct table_elt *op0_elt, *op1_elt;
4473 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4474 we know that they are also equal in the smaller mode (this is also
4475 true for all smaller modes whether or not there is a SUBREG, but
4476 is not worth testing for with no SUBREG). */
4478 /* Note that GET_MODE (op0) may not equal MODE. */
4479 if (code == EQ && GET_CODE (op0) == SUBREG
4480 && (GET_MODE_SIZE (GET_MODE (op0))
4481 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4483 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4484 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4486 record_jump_cond (code, mode, SUBREG_REG (op0),
4487 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4488 reversed_nonequality);
4491 if (code == EQ && GET_CODE (op1) == SUBREG
4492 && (GET_MODE_SIZE (GET_MODE (op1))
4493 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4495 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4496 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4498 record_jump_cond (code, mode, SUBREG_REG (op1),
4499 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4500 reversed_nonequality);
4503 /* Similarly, if this is an NE comparison, and either is a SUBREG
4504 making a smaller mode, we know the whole thing is also NE. */
4506 /* Note that GET_MODE (op0) may not equal MODE;
4507 if we test MODE instead, we can get an infinite recursion
4508 alternating between two modes each wider than MODE. */
4510 if (code == NE && GET_CODE (op0) == SUBREG
4511 && subreg_lowpart_p (op0)
4512 && (GET_MODE_SIZE (GET_MODE (op0))
4513 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4515 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4516 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4518 record_jump_cond (code, mode, SUBREG_REG (op0),
4519 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4520 reversed_nonequality);
4523 if (code == NE && GET_CODE (op1) == SUBREG
4524 && subreg_lowpart_p (op1)
4525 && (GET_MODE_SIZE (GET_MODE (op1))
4526 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4528 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4529 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4531 record_jump_cond (code, mode, SUBREG_REG (op1),
4532 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4533 reversed_nonequality);
4536 /* Hash both operands. */
4539 hash_arg_in_memory = 0;
4540 op0_hash = HASH (op0, mode);
4541 op0_in_memory = hash_arg_in_memory;
4547 hash_arg_in_memory = 0;
4548 op1_hash = HASH (op1, mode);
4549 op1_in_memory = hash_arg_in_memory;
4554 /* Look up both operands. */
4555 op0_elt = lookup (op0, op0_hash, mode);
4556 op1_elt = lookup (op1, op1_hash, mode);
4558 /* If both operands are already equivalent or if they are not in the
4559 table but are identical, do nothing. */
4560 if ((op0_elt != 0 && op1_elt != 0
4561 && op0_elt->first_same_value == op1_elt->first_same_value)
4562 || op0 == op1 || rtx_equal_p (op0, op1))
4565 /* If we aren't setting two things equal all we can do is save this
4566 comparison. Similarly if this is floating-point. In the latter
4567 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4568 If we record the equality, we might inadvertently delete code
4569 whose intent was to change -0 to +0. */
4571 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4573 struct qty_table_elem *ent;
4576 /* If we reversed a floating-point comparison, if OP0 is not a
4577 register, or if OP1 is neither a register or constant, we can't
4580 if (GET_CODE (op1) != REG)
4581 op1 = equiv_constant (op1);
4583 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4584 || GET_CODE (op0) != REG || op1 == 0)
4587 /* Put OP0 in the hash table if it isn't already. This gives it a
4588 new quantity number. */
4591 if (insert_regs (op0, NULL, 0))
4593 rehash_using_reg (op0);
4594 op0_hash = HASH (op0, mode);
4596 /* If OP0 is contained in OP1, this changes its hash code
4597 as well. Faster to rehash than to check, except
4598 for the simple case of a constant. */
4599 if (! CONSTANT_P (op1))
4600 op1_hash = HASH (op1,mode);
4603 op0_elt = insert (op0, NULL, op0_hash, mode);
4604 op0_elt->in_memory = op0_in_memory;
4607 qty = REG_QTY (REGNO (op0));
4608 ent = &qty_table[qty];
4610 ent->comparison_code = code;
4611 if (GET_CODE (op1) == REG)
4613 /* Look it up again--in case op0 and op1 are the same. */
4614 op1_elt = lookup (op1, op1_hash, mode);
4616 /* Put OP1 in the hash table so it gets a new quantity number. */
4619 if (insert_regs (op1, NULL, 0))
4621 rehash_using_reg (op1);
4622 op1_hash = HASH (op1, mode);
4625 op1_elt = insert (op1, NULL, op1_hash, mode);
4626 op1_elt->in_memory = op1_in_memory;
4629 ent->comparison_const = NULL_RTX;
4630 ent->comparison_qty = REG_QTY (REGNO (op1));
4634 ent->comparison_const = op1;
4635 ent->comparison_qty = -1;
4641 /* If either side is still missing an equivalence, make it now,
4642 then merge the equivalences. */
4646 if (insert_regs (op0, NULL, 0))
4648 rehash_using_reg (op0);
4649 op0_hash = HASH (op0, mode);
4652 op0_elt = insert (op0, NULL, op0_hash, mode);
4653 op0_elt->in_memory = op0_in_memory;
4658 if (insert_regs (op1, NULL, 0))
4660 rehash_using_reg (op1);
4661 op1_hash = HASH (op1, mode);
4664 op1_elt = insert (op1, NULL, op1_hash, mode);
4665 op1_elt->in_memory = op1_in_memory;
4668 merge_equiv_classes (op0_elt, op1_elt);
4669 last_jump_equiv_class = op0_elt;
4672 /* CSE processing for one instruction.
4673 First simplify sources and addresses of all assignments
4674 in the instruction, using previously-computed equivalents values.
4675 Then install the new sources and destinations in the table
4676 of available values.
4678 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4679 the insn. It means that INSN is inside libcall block. In this
4680 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4682 /* Data on one SET contained in the instruction. */
4686 /* The SET rtx itself. */
4688 /* The SET_SRC of the rtx (the original value, if it is changing). */
4690 /* The hash-table element for the SET_SRC of the SET. */
4691 struct table_elt *src_elt;
4692 /* Hash value for the SET_SRC. */
4694 /* Hash value for the SET_DEST. */
4696 /* The SET_DEST, with SUBREG, etc., stripped. */
4698 /* Nonzero if the SET_SRC is in memory. */
4700 /* Nonzero if the SET_SRC contains something
4701 whose value cannot be predicted and understood. */
4703 /* Original machine mode, in case it becomes a CONST_INT. */
4704 enum machine_mode mode;
4705 /* A constant equivalent for SET_SRC, if any. */
4707 /* Original SET_SRC value used for libcall notes. */
4709 /* Hash value of constant equivalent for SET_SRC. */
4710 unsigned src_const_hash;
4711 /* Table entry for constant equivalent for SET_SRC, if any. */
4712 struct table_elt *src_const_elt;
4716 cse_insn (insn, libcall_insn)
4720 register rtx x = PATTERN (insn);
4723 register int n_sets = 0;
4726 /* Records what this insn does to set CC0. */
4727 rtx this_insn_cc0 = 0;
4728 enum machine_mode this_insn_cc0_mode = VOIDmode;
4732 struct table_elt *src_eqv_elt = 0;
4733 int src_eqv_volatile = 0;
4734 int src_eqv_in_memory = 0;
4735 unsigned src_eqv_hash = 0;
4737 struct set *sets = (struct set *) 0;
4741 /* Find all the SETs and CLOBBERs in this instruction.
4742 Record all the SETs in the array `set' and count them.
4743 Also determine whether there is a CLOBBER that invalidates
4744 all memory references, or all references at varying addresses. */
4746 if (GET_CODE (insn) == CALL_INSN)
4748 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4750 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4751 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4752 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4756 if (GET_CODE (x) == SET)
4758 sets = (struct set *) alloca (sizeof (struct set));
4761 /* Ignore SETs that are unconditional jumps.
4762 They never need cse processing, so this does not hurt.
4763 The reason is not efficiency but rather
4764 so that we can test at the end for instructions
4765 that have been simplified to unconditional jumps
4766 and not be misled by unchanged instructions
4767 that were unconditional jumps to begin with. */
4768 if (SET_DEST (x) == pc_rtx
4769 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4772 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4773 The hard function value register is used only once, to copy to
4774 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4775 Ensure we invalidate the destination register. On the 80386 no
4776 other code would invalidate it since it is a fixed_reg.
4777 We need not check the return of apply_change_group; see canon_reg. */
4779 else if (GET_CODE (SET_SRC (x)) == CALL)
4781 canon_reg (SET_SRC (x), insn);
4782 apply_change_group ();
4783 fold_rtx (SET_SRC (x), insn);
4784 invalidate (SET_DEST (x), VOIDmode);
4789 else if (GET_CODE (x) == PARALLEL)
4791 register int lim = XVECLEN (x, 0);
4793 sets = (struct set *) alloca (lim * sizeof (struct set));
4795 /* Find all regs explicitly clobbered in this insn,
4796 and ensure they are not replaced with any other regs
4797 elsewhere in this insn.
4798 When a reg that is clobbered is also used for input,
4799 we should presume that that is for a reason,
4800 and we should not substitute some other register
4801 which is not supposed to be clobbered.
4802 Therefore, this loop cannot be merged into the one below
4803 because a CALL may precede a CLOBBER and refer to the
4804 value clobbered. We must not let a canonicalization do
4805 anything in that case. */
4806 for (i = 0; i < lim; i++)
4808 register rtx y = XVECEXP (x, 0, i);
4809 if (GET_CODE (y) == CLOBBER)
4811 rtx clobbered = XEXP (y, 0);
4813 if (GET_CODE (clobbered) == REG
4814 || GET_CODE (clobbered) == SUBREG)
4815 invalidate (clobbered, VOIDmode);
4816 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4817 || GET_CODE (clobbered) == ZERO_EXTRACT)
4818 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4822 for (i = 0; i < lim; i++)
4824 register rtx y = XVECEXP (x, 0, i);
4825 if (GET_CODE (y) == SET)
4827 /* As above, we ignore unconditional jumps and call-insns and
4828 ignore the result of apply_change_group. */
4829 if (GET_CODE (SET_SRC (y)) == CALL)
4831 canon_reg (SET_SRC (y), insn);
4832 apply_change_group ();
4833 fold_rtx (SET_SRC (y), insn);
4834 invalidate (SET_DEST (y), VOIDmode);
4836 else if (SET_DEST (y) == pc_rtx
4837 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4840 sets[n_sets++].rtl = y;
4842 else if (GET_CODE (y) == CLOBBER)
4844 /* If we clobber memory, canon the address.
4845 This does nothing when a register is clobbered
4846 because we have already invalidated the reg. */
4847 if (GET_CODE (XEXP (y, 0)) == MEM)
4848 canon_reg (XEXP (y, 0), NULL_RTX);
4850 else if (GET_CODE (y) == USE
4851 && ! (GET_CODE (XEXP (y, 0)) == REG
4852 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4853 canon_reg (y, NULL_RTX);
4854 else if (GET_CODE (y) == CALL)
4856 /* The result of apply_change_group can be ignored; see
4858 canon_reg (y, insn);
4859 apply_change_group ();
4864 else if (GET_CODE (x) == CLOBBER)
4866 if (GET_CODE (XEXP (x, 0)) == MEM)
4867 canon_reg (XEXP (x, 0), NULL_RTX);
4870 /* Canonicalize a USE of a pseudo register or memory location. */
4871 else if (GET_CODE (x) == USE
4872 && ! (GET_CODE (XEXP (x, 0)) == REG
4873 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4874 canon_reg (XEXP (x, 0), NULL_RTX);
4875 else if (GET_CODE (x) == CALL)
4877 /* The result of apply_change_group can be ignored; see canon_reg. */
4878 canon_reg (x, insn);
4879 apply_change_group ();
4883 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4884 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4885 is handled specially for this case, and if it isn't set, then there will
4886 be no equivalence for the destination. */
4887 if (n_sets == 1 && REG_NOTES (insn) != 0
4888 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4889 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4890 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4891 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
4893 /* Canonicalize sources and addresses of destinations.
4894 We do this in a separate pass to avoid problems when a MATCH_DUP is
4895 present in the insn pattern. In that case, we want to ensure that
4896 we don't break the duplicate nature of the pattern. So we will replace
4897 both operands at the same time. Otherwise, we would fail to find an
4898 equivalent substitution in the loop calling validate_change below.
4900 We used to suppress canonicalization of DEST if it appears in SRC,
4901 but we don't do this any more. */
4903 for (i = 0; i < n_sets; i++)
4905 rtx dest = SET_DEST (sets[i].rtl);
4906 rtx src = SET_SRC (sets[i].rtl);
4907 rtx new = canon_reg (src, insn);
4910 sets[i].orig_src = src;
4911 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
4912 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4913 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4914 || (insn_code = recog_memoized (insn)) < 0
4915 || insn_data[insn_code].n_dups > 0)
4916 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4918 SET_SRC (sets[i].rtl) = new;
4920 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4922 validate_change (insn, &XEXP (dest, 1),
4923 canon_reg (XEXP (dest, 1), insn), 1);
4924 validate_change (insn, &XEXP (dest, 2),
4925 canon_reg (XEXP (dest, 2), insn), 1);
4928 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4929 || GET_CODE (dest) == ZERO_EXTRACT
4930 || GET_CODE (dest) == SIGN_EXTRACT)
4931 dest = XEXP (dest, 0);
4933 if (GET_CODE (dest) == MEM)
4934 canon_reg (dest, insn);
4937 /* Now that we have done all the replacements, we can apply the change
4938 group and see if they all work. Note that this will cause some
4939 canonicalizations that would have worked individually not to be applied
4940 because some other canonicalization didn't work, but this should not
4943 The result of apply_change_group can be ignored; see canon_reg. */
4945 apply_change_group ();
4947 /* Set sets[i].src_elt to the class each source belongs to.
4948 Detect assignments from or to volatile things
4949 and set set[i] to zero so they will be ignored
4950 in the rest of this function.
4952 Nothing in this loop changes the hash table or the register chains. */
4954 for (i = 0; i < n_sets; i++)
4956 register rtx src, dest;
4957 register rtx src_folded;
4958 register struct table_elt *elt = 0, *p;
4959 enum machine_mode mode;
4962 rtx src_related = 0;
4963 struct table_elt *src_const_elt = 0;
4964 int src_cost = MAX_COST;
4965 int src_eqv_cost = MAX_COST;
4966 int src_folded_cost = MAX_COST;
4967 int src_related_cost = MAX_COST;
4968 int src_elt_cost = MAX_COST;
4969 int src_regcost = MAX_COST;
4970 int src_eqv_regcost = MAX_COST;
4971 int src_folded_regcost = MAX_COST;
4972 int src_related_regcost = MAX_COST;
4973 int src_elt_regcost = MAX_COST;
4974 /* Set non-zero if we need to call force_const_mem on with the
4975 contents of src_folded before using it. */
4976 int src_folded_force_flag = 0;
4978 dest = SET_DEST (sets[i].rtl);
4979 src = SET_SRC (sets[i].rtl);
4981 /* If SRC is a constant that has no machine mode,
4982 hash it with the destination's machine mode.
4983 This way we can keep different modes separate. */
4985 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4986 sets[i].mode = mode;
4990 enum machine_mode eqvmode = mode;
4991 if (GET_CODE (dest) == STRICT_LOW_PART)
4992 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4994 hash_arg_in_memory = 0;
4995 src_eqv = fold_rtx (src_eqv, insn);
4996 src_eqv_hash = HASH (src_eqv, eqvmode);
4998 /* Find the equivalence class for the equivalent expression. */
5001 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
5003 src_eqv_volatile = do_not_record;
5004 src_eqv_in_memory = hash_arg_in_memory;
5007 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
5008 value of the INNER register, not the destination. So it is not
5009 a valid substitution for the source. But save it for later. */
5010 if (GET_CODE (dest) == STRICT_LOW_PART)
5013 src_eqv_here = src_eqv;
5015 /* Simplify and foldable subexpressions in SRC. Then get the fully-
5016 simplified result, which may not necessarily be valid. */
5017 src_folded = fold_rtx (src, insn);
5020 /* ??? This caused bad code to be generated for the m68k port with -O2.
5021 Suppose src is (CONST_INT -1), and that after truncation src_folded
5022 is (CONST_INT 3). Suppose src_folded is then used for src_const.
5023 At the end we will add src and src_const to the same equivalence
5024 class. We now have 3 and -1 on the same equivalence class. This
5025 causes later instructions to be mis-optimized. */
5026 /* If storing a constant in a bitfield, pre-truncate the constant
5027 so we will be able to record it later. */
5028 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5029 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5031 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5033 if (GET_CODE (src) == CONST_INT
5034 && GET_CODE (width) == CONST_INT
5035 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5036 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5038 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5039 << INTVAL (width)) - 1));
5043 /* Compute SRC's hash code, and also notice if it
5044 should not be recorded at all. In that case,
5045 prevent any further processing of this assignment. */
5047 hash_arg_in_memory = 0;
5050 sets[i].src_hash = HASH (src, mode);
5051 sets[i].src_volatile = do_not_record;
5052 sets[i].src_in_memory = hash_arg_in_memory;
5054 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
5055 a pseudo, do not record SRC. Using SRC as a replacement for
5056 anything else will be incorrect in that situation. Note that
5057 this usually occurs only for stack slots, in which case all the
5058 RTL would be referring to SRC, so we don't lose any optimization
5059 opportunities by not having SRC in the hash table. */
5061 if (GET_CODE (src) == MEM
5062 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5063 && GET_CODE (dest) == REG
5064 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5065 sets[i].src_volatile = 1;
5068 /* It is no longer clear why we used to do this, but it doesn't
5069 appear to still be needed. So let's try without it since this
5070 code hurts cse'ing widened ops. */
5071 /* If source is a perverse subreg (such as QI treated as an SI),
5072 treat it as volatile. It may do the work of an SI in one context
5073 where the extra bits are not being used, but cannot replace an SI
5075 if (GET_CODE (src) == SUBREG
5076 && (GET_MODE_SIZE (GET_MODE (src))
5077 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5078 sets[i].src_volatile = 1;
5081 /* Locate all possible equivalent forms for SRC. Try to replace
5082 SRC in the insn with each cheaper equivalent.
5084 We have the following types of equivalents: SRC itself, a folded
5085 version, a value given in a REG_EQUAL note, or a value related
5088 Each of these equivalents may be part of an additional class
5089 of equivalents (if more than one is in the table, they must be in
5090 the same class; we check for this).
5092 If the source is volatile, we don't do any table lookups.
5094 We note any constant equivalent for possible later use in a
5097 if (!sets[i].src_volatile)
5098 elt = lookup (src, sets[i].src_hash, mode);
5100 sets[i].src_elt = elt;
5102 if (elt && src_eqv_here && src_eqv_elt)
5104 if (elt->first_same_value != src_eqv_elt->first_same_value)
5106 /* The REG_EQUAL is indicating that two formerly distinct
5107 classes are now equivalent. So merge them. */
5108 merge_equiv_classes (elt, src_eqv_elt);
5109 src_eqv_hash = HASH (src_eqv, elt->mode);
5110 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5116 else if (src_eqv_elt)
5119 /* Try to find a constant somewhere and record it in `src_const'.
5120 Record its table element, if any, in `src_const_elt'. Look in
5121 any known equivalences first. (If the constant is not in the
5122 table, also set `sets[i].src_const_hash'). */
5124 for (p = elt->first_same_value; p; p = p->next_same_value)
5128 src_const_elt = elt;
5133 && (CONSTANT_P (src_folded)
5134 /* Consider (minus (label_ref L1) (label_ref L2)) as
5135 "constant" here so we will record it. This allows us
5136 to fold switch statements when an ADDR_DIFF_VEC is used. */
5137 || (GET_CODE (src_folded) == MINUS
5138 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5139 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5140 src_const = src_folded, src_const_elt = elt;
5141 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5142 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5144 /* If we don't know if the constant is in the table, get its
5145 hash code and look it up. */
5146 if (src_const && src_const_elt == 0)
5148 sets[i].src_const_hash = HASH (src_const, mode);
5149 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5152 sets[i].src_const = src_const;
5153 sets[i].src_const_elt = src_const_elt;
5155 /* If the constant and our source are both in the table, mark them as
5156 equivalent. Otherwise, if a constant is in the table but the source
5157 isn't, set ELT to it. */
5158 if (src_const_elt && elt
5159 && src_const_elt->first_same_value != elt->first_same_value)
5160 merge_equiv_classes (elt, src_const_elt);
5161 else if (src_const_elt && elt == 0)
5162 elt = src_const_elt;
5164 /* See if there is a register linearly related to a constant
5165 equivalent of SRC. */
5167 && (GET_CODE (src_const) == CONST
5168 || (src_const_elt && src_const_elt->related_value != 0)))
5170 src_related = use_related_value (src_const, src_const_elt);
5173 struct table_elt *src_related_elt
5174 = lookup (src_related, HASH (src_related, mode), mode);
5175 if (src_related_elt && elt)
5177 if (elt->first_same_value
5178 != src_related_elt->first_same_value)
5179 /* This can occur when we previously saw a CONST
5180 involving a SYMBOL_REF and then see the SYMBOL_REF
5181 twice. Merge the involved classes. */
5182 merge_equiv_classes (elt, src_related_elt);
5185 src_related_elt = 0;
5187 else if (src_related_elt && elt == 0)
5188 elt = src_related_elt;
5192 /* See if we have a CONST_INT that is already in a register in a
5195 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5196 && GET_MODE_CLASS (mode) == MODE_INT
5197 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5199 enum machine_mode wider_mode;
5201 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5202 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5203 && src_related == 0;
5204 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5206 struct table_elt *const_elt
5207 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5212 for (const_elt = const_elt->first_same_value;
5213 const_elt; const_elt = const_elt->next_same_value)
5214 if (GET_CODE (const_elt->exp) == REG)
5216 src_related = gen_lowpart_if_possible (mode,
5223 /* Another possibility is that we have an AND with a constant in
5224 a mode narrower than a word. If so, it might have been generated
5225 as part of an "if" which would narrow the AND. If we already
5226 have done the AND in a wider mode, we can use a SUBREG of that
5229 if (flag_expensive_optimizations && ! src_related
5230 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5231 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5233 enum machine_mode tmode;
5234 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5236 for (tmode = GET_MODE_WIDER_MODE (mode);
5237 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5238 tmode = GET_MODE_WIDER_MODE (tmode))
5240 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
5241 struct table_elt *larger_elt;
5245 PUT_MODE (new_and, tmode);
5246 XEXP (new_and, 0) = inner;
5247 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5248 if (larger_elt == 0)
5251 for (larger_elt = larger_elt->first_same_value;
5252 larger_elt; larger_elt = larger_elt->next_same_value)
5253 if (GET_CODE (larger_elt->exp) == REG)
5256 = gen_lowpart_if_possible (mode, larger_elt->exp);
5266 #ifdef LOAD_EXTEND_OP
5267 /* See if a MEM has already been loaded with a widening operation;
5268 if it has, we can use a subreg of that. Many CISC machines
5269 also have such operations, but this is only likely to be
5270 beneficial these machines. */
5272 if (flag_expensive_optimizations && src_related == 0
5273 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5274 && GET_MODE_CLASS (mode) == MODE_INT
5275 && GET_CODE (src) == MEM && ! do_not_record
5276 && LOAD_EXTEND_OP (mode) != NIL)
5278 enum machine_mode tmode;
5280 /* Set what we are trying to extend and the operation it might
5281 have been extended with. */
5282 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5283 XEXP (memory_extend_rtx, 0) = src;
5285 for (tmode = GET_MODE_WIDER_MODE (mode);
5286 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5287 tmode = GET_MODE_WIDER_MODE (tmode))
5289 struct table_elt *larger_elt;
5291 PUT_MODE (memory_extend_rtx, tmode);
5292 larger_elt = lookup (memory_extend_rtx,
5293 HASH (memory_extend_rtx, tmode), tmode);
5294 if (larger_elt == 0)
5297 for (larger_elt = larger_elt->first_same_value;
5298 larger_elt; larger_elt = larger_elt->next_same_value)
5299 if (GET_CODE (larger_elt->exp) == REG)
5301 src_related = gen_lowpart_if_possible (mode,
5310 #endif /* LOAD_EXTEND_OP */
5312 if (src == src_folded)
5315 /* At this point, ELT, if non-zero, points to a class of expressions
5316 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5317 and SRC_RELATED, if non-zero, each contain additional equivalent
5318 expressions. Prune these latter expressions by deleting expressions
5319 already in the equivalence class.
5321 Check for an equivalent identical to the destination. If found,
5322 this is the preferred equivalent since it will likely lead to
5323 elimination of the insn. Indicate this by placing it in
5327 elt = elt->first_same_value;
5328 for (p = elt; p; p = p->next_same_value)
5330 enum rtx_code code = GET_CODE (p->exp);
5332 /* If the expression is not valid, ignore it. Then we do not
5333 have to check for validity below. In most cases, we can use
5334 `rtx_equal_p', since canonicalization has already been done. */
5335 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5338 /* Also skip paradoxical subregs, unless that's what we're
5341 && (GET_MODE_SIZE (GET_MODE (p->exp))
5342 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5344 && GET_CODE (src) == SUBREG
5345 && GET_MODE (src) == GET_MODE (p->exp)
5346 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5347 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5350 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5352 else if (src_folded && GET_CODE (src_folded) == code
5353 && rtx_equal_p (src_folded, p->exp))
5355 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5356 && rtx_equal_p (src_eqv_here, p->exp))
5358 else if (src_related && GET_CODE (src_related) == code
5359 && rtx_equal_p (src_related, p->exp))
5362 /* This is the same as the destination of the insns, we want
5363 to prefer it. Copy it to src_related. The code below will
5364 then give it a negative cost. */
5365 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5369 /* Find the cheapest valid equivalent, trying all the available
5370 possibilities. Prefer items not in the hash table to ones
5371 that are when they are equal cost. Note that we can never
5372 worsen an insn as the current contents will also succeed.
5373 If we find an equivalent identical to the destination, use it as best,
5374 since this insn will probably be eliminated in that case. */
5377 if (rtx_equal_p (src, dest))
5378 src_cost = src_regcost = -1;
5381 src_cost = COST (src);
5382 src_regcost = approx_reg_cost (src);
5388 if (rtx_equal_p (src_eqv_here, dest))
5389 src_eqv_cost = src_eqv_regcost = -1;
5392 src_eqv_cost = COST (src_eqv_here);
5393 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5399 if (rtx_equal_p (src_folded, dest))
5400 src_folded_cost = src_folded_regcost = -1;
5403 src_folded_cost = COST (src_folded);
5404 src_folded_regcost = approx_reg_cost (src_folded);
5410 if (rtx_equal_p (src_related, dest))
5411 src_related_cost = src_related_regcost = -1;
5414 src_related_cost = COST (src_related);
5415 src_related_regcost = approx_reg_cost (src_related);
5419 /* If this was an indirect jump insn, a known label will really be
5420 cheaper even though it looks more expensive. */
5421 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5422 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5424 /* Terminate loop when replacement made. This must terminate since
5425 the current contents will be tested and will always be valid. */
5430 /* Skip invalid entries. */
5431 while (elt && GET_CODE (elt->exp) != REG
5432 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5433 elt = elt->next_same_value;
5435 /* A paradoxical subreg would be bad here: it'll be the right
5436 size, but later may be adjusted so that the upper bits aren't
5437 what we want. So reject it. */
5439 && GET_CODE (elt->exp) == SUBREG
5440 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5441 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5442 /* It is okay, though, if the rtx we're trying to match
5443 will ignore any of the bits we can't predict. */
5445 && GET_CODE (src) == SUBREG
5446 && GET_MODE (src) == GET_MODE (elt->exp)
5447 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5448 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5450 elt = elt->next_same_value;
5456 src_elt_cost = elt->cost;
5457 src_elt_regcost = elt->regcost;
5460 /* Find cheapest and skip it for the next time. For items
5461 of equal cost, use this order:
5462 src_folded, src, src_eqv, src_related and hash table entry. */
5464 && preferrable (src_folded_cost, src_folded_regcost,
5465 src_cost, src_regcost) <= 0
5466 && preferrable (src_folded_cost, src_folded_regcost,
5467 src_eqv_cost, src_eqv_regcost) <= 0
5468 && preferrable (src_folded_cost, src_folded_regcost,
5469 src_related_cost, src_related_regcost) <= 0
5470 && preferrable (src_folded_cost, src_folded_regcost,
5471 src_elt_cost, src_elt_regcost) <= 0)
5473 trial = src_folded, src_folded_cost = MAX_COST;
5474 if (src_folded_force_flag)
5475 trial = force_const_mem (mode, trial);
5478 && preferrable (src_cost, src_regcost,
5479 src_eqv_cost, src_eqv_regcost) <= 0
5480 && preferrable (src_cost, src_regcost,
5481 src_related_cost, src_related_regcost) <= 0
5482 && preferrable (src_cost, src_regcost,
5483 src_elt_cost, src_elt_regcost) <= 0)
5484 trial = src, src_cost = MAX_COST;
5485 else if (src_eqv_here
5486 && preferrable (src_eqv_cost, src_eqv_regcost,
5487 src_related_cost, src_related_regcost) <= 0
5488 && preferrable (src_eqv_cost, src_eqv_regcost,
5489 src_elt_cost, src_elt_regcost) <= 0)
5490 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5491 else if (src_related
5492 && preferrable (src_related_cost, src_related_regcost,
5493 src_elt_cost, src_elt_regcost) <= 0)
5494 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5497 trial = copy_rtx (elt->exp);
5498 elt = elt->next_same_value;
5499 src_elt_cost = MAX_COST;
5502 /* We don't normally have an insn matching (set (pc) (pc)), so
5503 check for this separately here. We will delete such an
5506 For other cases such as a table jump or conditional jump
5507 where we know the ultimate target, go ahead and replace the
5508 operand. While that may not make a valid insn, we will
5509 reemit the jump below (and also insert any necessary
5511 if (n_sets == 1 && dest == pc_rtx
5513 || (GET_CODE (trial) == LABEL_REF
5514 && ! condjump_p (insn))))
5516 SET_SRC (sets[i].rtl) = trial;
5517 cse_jumps_altered = 1;
5521 /* Look for a substitution that makes a valid insn. */
5522 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5524 /* If we just made a substitution inside a libcall, then we
5525 need to make the same substitution in any notes attached
5526 to the RETVAL insn. */
5528 && (GET_CODE (sets[i].orig_src) == REG
5529 || GET_CODE (sets[i].orig_src) == SUBREG
5530 || GET_CODE (sets[i].orig_src) == MEM))
5531 replace_rtx (REG_NOTES (libcall_insn), sets[i].orig_src,
5532 canon_reg (SET_SRC (sets[i].rtl), insn));
5534 /* The result of apply_change_group can be ignored; see
5537 validate_change (insn, &SET_SRC (sets[i].rtl),
5538 canon_reg (SET_SRC (sets[i].rtl), insn),
5540 apply_change_group ();
5544 /* If we previously found constant pool entries for
5545 constants and this is a constant, try making a
5546 pool entry. Put it in src_folded unless we already have done
5547 this since that is where it likely came from. */
5549 else if (constant_pool_entries_cost
5550 && CONSTANT_P (trial)
5551 /* Reject cases that will abort in decode_rtx_const.
5552 On the alpha when simplifying a switch, we get
5553 (const (truncate (minus (label_ref) (label_ref)))). */
5554 && ! (GET_CODE (trial) == CONST
5555 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5556 /* Likewise on IA-64, except without the truncate. */
5557 && ! (GET_CODE (trial) == CONST
5558 && GET_CODE (XEXP (trial, 0)) == MINUS
5559 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5560 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5562 || (GET_CODE (src_folded) != MEM
5563 && ! src_folded_force_flag))
5564 && GET_MODE_CLASS (mode) != MODE_CC
5565 && mode != VOIDmode)
5567 src_folded_force_flag = 1;
5569 src_folded_cost = constant_pool_entries_cost;
5573 src = SET_SRC (sets[i].rtl);
5575 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5576 However, there is an important exception: If both are registers
5577 that are not the head of their equivalence class, replace SET_SRC
5578 with the head of the class. If we do not do this, we will have
5579 both registers live over a portion of the basic block. This way,
5580 their lifetimes will likely abut instead of overlapping. */
5581 if (GET_CODE (dest) == REG
5582 && REGNO_QTY_VALID_P (REGNO (dest)))
5584 int dest_q = REG_QTY (REGNO (dest));
5585 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5587 if (dest_ent->mode == GET_MODE (dest)
5588 && dest_ent->first_reg != REGNO (dest)
5589 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5590 /* Don't do this if the original insn had a hard reg as
5591 SET_SRC or SET_DEST. */
5592 && (GET_CODE (sets[i].src) != REG
5593 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5594 && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5595 /* We can't call canon_reg here because it won't do anything if
5596 SRC is a hard register. */
5598 int src_q = REG_QTY (REGNO (src));
5599 struct qty_table_elem *src_ent = &qty_table[src_q];
5600 int first = src_ent->first_reg;
5602 = (first >= FIRST_PSEUDO_REGISTER
5603 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5605 /* We must use validate-change even for this, because this
5606 might be a special no-op instruction, suitable only to
5608 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5611 /* If we had a constant that is cheaper than what we are now
5612 setting SRC to, use that constant. We ignored it when we
5613 thought we could make this into a no-op. */
5614 if (src_const && COST (src_const) < COST (src)
5615 && validate_change (insn, &SET_SRC (sets[i].rtl),
5622 /* If we made a change, recompute SRC values. */
5623 if (src != sets[i].src)
5627 hash_arg_in_memory = 0;
5629 sets[i].src_hash = HASH (src, mode);
5630 sets[i].src_volatile = do_not_record;
5631 sets[i].src_in_memory = hash_arg_in_memory;
5632 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5635 /* If this is a single SET, we are setting a register, and we have an
5636 equivalent constant, we want to add a REG_NOTE. We don't want
5637 to write a REG_EQUAL note for a constant pseudo since verifying that
5638 that pseudo hasn't been eliminated is a pain. Such a note also
5639 won't help anything.
5641 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5642 which can be created for a reference to a compile time computable
5643 entry in a jump table. */
5645 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5646 && GET_CODE (src_const) != REG
5647 && ! (GET_CODE (src_const) == CONST
5648 && GET_CODE (XEXP (src_const, 0)) == MINUS
5649 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5650 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5652 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5654 /* Make sure that the rtx is not shared with any other insn. */
5655 src_const = copy_rtx (src_const);
5657 /* Record the actual constant value in a REG_EQUAL note, making
5658 a new one if one does not already exist. */
5660 XEXP (tem, 0) = src_const;
5662 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL,
5663 src_const, REG_NOTES (insn));
5665 /* If storing a constant value in a register that
5666 previously held the constant value 0,
5667 record this fact with a REG_WAS_0 note on this insn.
5669 Note that the *register* is required to have previously held 0,
5670 not just any register in the quantity and we must point to the
5671 insn that set that register to zero.
5673 Rather than track each register individually, we just see if
5674 the last set for this quantity was for this register. */
5676 if (REGNO_QTY_VALID_P (REGNO (dest)))
5678 int dest_q = REG_QTY (REGNO (dest));
5679 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5681 if (dest_ent->const_rtx == const0_rtx)
5683 /* See if we previously had a REG_WAS_0 note. */
5684 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
5685 rtx const_insn = dest_ent->const_insn;
5687 if ((tem = single_set (const_insn)) != 0
5688 && rtx_equal_p (SET_DEST (tem), dest))
5691 XEXP (note, 0) = const_insn;
5694 = gen_rtx_INSN_LIST (REG_WAS_0, const_insn,
5701 /* Now deal with the destination. */
5704 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5705 to the MEM or REG within it. */
5706 while (GET_CODE (dest) == SIGN_EXTRACT
5707 || GET_CODE (dest) == ZERO_EXTRACT
5708 || GET_CODE (dest) == SUBREG
5709 || GET_CODE (dest) == STRICT_LOW_PART)
5710 dest = XEXP (dest, 0);
5712 sets[i].inner_dest = dest;
5714 if (GET_CODE (dest) == MEM)
5716 #ifdef PUSH_ROUNDING
5717 /* Stack pushes invalidate the stack pointer. */
5718 rtx addr = XEXP (dest, 0);
5719 if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
5720 && XEXP (addr, 0) == stack_pointer_rtx)
5721 invalidate (stack_pointer_rtx, Pmode);
5723 dest = fold_rtx (dest, insn);
5726 /* Compute the hash code of the destination now,
5727 before the effects of this instruction are recorded,
5728 since the register values used in the address computation
5729 are those before this instruction. */
5730 sets[i].dest_hash = HASH (dest, mode);
5732 /* Don't enter a bit-field in the hash table
5733 because the value in it after the store
5734 may not equal what was stored, due to truncation. */
5736 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5737 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5739 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5741 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5742 && GET_CODE (width) == CONST_INT
5743 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5744 && ! (INTVAL (src_const)
5745 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5746 /* Exception: if the value is constant,
5747 and it won't be truncated, record it. */
5751 /* This is chosen so that the destination will be invalidated
5752 but no new value will be recorded.
5753 We must invalidate because sometimes constant
5754 values can be recorded for bitfields. */
5755 sets[i].src_elt = 0;
5756 sets[i].src_volatile = 1;
5762 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5764 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5766 /* One less use of the label this insn used to jump to. */
5767 if (JUMP_LABEL (insn) != 0)
5768 --LABEL_NUSES (JUMP_LABEL (insn));
5769 PUT_CODE (insn, NOTE);
5770 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
5771 NOTE_SOURCE_FILE (insn) = 0;
5772 cse_jumps_altered = 1;
5773 /* No more processing for this set. */
5777 /* If this SET is now setting PC to a label, we know it used to
5778 be a conditional or computed branch. */
5779 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5781 /* We reemit the jump in as many cases as possible just in
5782 case the form of an unconditional jump is significantly
5783 different than a computed jump or conditional jump.
5785 If this insn has multiple sets, then reemitting the
5786 jump is nontrivial. So instead we just force rerecognition
5787 and hope for the best. */
5790 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
5791 JUMP_LABEL (new) = XEXP (src, 0);
5792 LABEL_NUSES (XEXP (src, 0))++;
5796 INSN_CODE (insn) = -1;
5798 never_reached_warning (insn);
5800 /* Now emit a BARRIER after the unconditional jump. Do not bother
5801 deleting any unreachable code, let jump/flow do that. */
5802 if (NEXT_INSN (insn) != 0
5803 && GET_CODE (NEXT_INSN (insn)) != BARRIER)
5804 emit_barrier_after (insn);
5806 cse_jumps_altered = 1;
5810 /* If destination is volatile, invalidate it and then do no further
5811 processing for this assignment. */
5813 else if (do_not_record)
5815 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5816 invalidate (dest, VOIDmode);
5817 else if (GET_CODE (dest) == MEM)
5819 /* Outgoing arguments for a libcall don't
5820 affect any recorded expressions. */
5821 if (! libcall_insn || insn == libcall_insn)
5822 invalidate (dest, VOIDmode);
5824 else if (GET_CODE (dest) == STRICT_LOW_PART
5825 || GET_CODE (dest) == ZERO_EXTRACT)
5826 invalidate (XEXP (dest, 0), GET_MODE (dest));
5830 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5831 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5834 /* If setting CC0, record what it was set to, or a constant, if it
5835 is equivalent to a constant. If it is being set to a floating-point
5836 value, make a COMPARE with the appropriate constant of 0. If we
5837 don't do this, later code can interpret this as a test against
5838 const0_rtx, which can cause problems if we try to put it into an
5839 insn as a floating-point operand. */
5840 if (dest == cc0_rtx)
5842 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5843 this_insn_cc0_mode = mode;
5844 if (FLOAT_MODE_P (mode))
5845 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5851 /* Now enter all non-volatile source expressions in the hash table
5852 if they are not already present.
5853 Record their equivalence classes in src_elt.
5854 This way we can insert the corresponding destinations into
5855 the same classes even if the actual sources are no longer in them
5856 (having been invalidated). */
5858 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5859 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5861 register struct table_elt *elt;
5862 register struct table_elt *classp = sets[0].src_elt;
5863 rtx dest = SET_DEST (sets[0].rtl);
5864 enum machine_mode eqvmode = GET_MODE (dest);
5866 if (GET_CODE (dest) == STRICT_LOW_PART)
5868 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5871 if (insert_regs (src_eqv, classp, 0))
5873 rehash_using_reg (src_eqv);
5874 src_eqv_hash = HASH (src_eqv, eqvmode);
5876 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5877 elt->in_memory = src_eqv_in_memory;
5880 /* Check to see if src_eqv_elt is the same as a set source which
5881 does not yet have an elt, and if so set the elt of the set source
5883 for (i = 0; i < n_sets; i++)
5884 if (sets[i].rtl && sets[i].src_elt == 0
5885 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5886 sets[i].src_elt = src_eqv_elt;
5889 for (i = 0; i < n_sets; i++)
5890 if (sets[i].rtl && ! sets[i].src_volatile
5891 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5893 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5895 /* REG_EQUAL in setting a STRICT_LOW_PART
5896 gives an equivalent for the entire destination register,
5897 not just for the subreg being stored in now.
5898 This is a more interesting equivalence, so we arrange later
5899 to treat the entire reg as the destination. */
5900 sets[i].src_elt = src_eqv_elt;
5901 sets[i].src_hash = src_eqv_hash;
5905 /* Insert source and constant equivalent into hash table, if not
5907 register struct table_elt *classp = src_eqv_elt;
5908 register rtx src = sets[i].src;
5909 register rtx dest = SET_DEST (sets[i].rtl);
5910 enum machine_mode mode
5911 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5913 if (sets[i].src_elt == 0)
5915 /* Don't put a hard register source into the table if this is
5916 the last insn of a libcall. In this case, we only need
5917 to put src_eqv_elt in src_elt. */
5918 if (GET_CODE (src) != REG
5919 || REGNO (src) >= FIRST_PSEUDO_REGISTER
5920 || ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5922 register struct table_elt *elt;
5924 /* Note that these insert_regs calls cannot remove
5925 any of the src_elt's, because they would have failed to
5926 match if not still valid. */
5927 if (insert_regs (src, classp, 0))
5929 rehash_using_reg (src);
5930 sets[i].src_hash = HASH (src, mode);
5932 elt = insert (src, classp, sets[i].src_hash, mode);
5933 elt->in_memory = sets[i].src_in_memory;
5934 sets[i].src_elt = classp = elt;
5937 sets[i].src_elt = classp;
5939 if (sets[i].src_const && sets[i].src_const_elt == 0
5940 && src != sets[i].src_const
5941 && ! rtx_equal_p (sets[i].src_const, src))
5942 sets[i].src_elt = insert (sets[i].src_const, classp,
5943 sets[i].src_const_hash, mode);
5946 else if (sets[i].src_elt == 0)
5947 /* If we did not insert the source into the hash table (e.g., it was
5948 volatile), note the equivalence class for the REG_EQUAL value, if any,
5949 so that the destination goes into that class. */
5950 sets[i].src_elt = src_eqv_elt;
5952 invalidate_from_clobbers (x);
5954 /* Some registers are invalidated by subroutine calls. Memory is
5955 invalidated by non-constant calls. */
5957 if (GET_CODE (insn) == CALL_INSN)
5959 if (! CONST_CALL_P (insn))
5960 invalidate_memory ();
5961 invalidate_for_call ();
5964 /* Now invalidate everything set by this instruction.
5965 If a SUBREG or other funny destination is being set,
5966 sets[i].rtl is still nonzero, so here we invalidate the reg
5967 a part of which is being set. */
5969 for (i = 0; i < n_sets; i++)
5972 /* We can't use the inner dest, because the mode associated with
5973 a ZERO_EXTRACT is significant. */
5974 register rtx dest = SET_DEST (sets[i].rtl);
5976 /* Needed for registers to remove the register from its
5977 previous quantity's chain.
5978 Needed for memory if this is a nonvarying address, unless
5979 we have just done an invalidate_memory that covers even those. */
5980 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5981 invalidate (dest, VOIDmode);
5982 else if (GET_CODE (dest) == MEM)
5984 /* Outgoing arguments for a libcall don't
5985 affect any recorded expressions. */
5986 if (! libcall_insn || insn == libcall_insn)
5987 invalidate (dest, VOIDmode);
5989 else if (GET_CODE (dest) == STRICT_LOW_PART
5990 || GET_CODE (dest) == ZERO_EXTRACT)
5991 invalidate (XEXP (dest, 0), GET_MODE (dest));
5994 /* A volatile ASM invalidates everything. */
5995 if (GET_CODE (insn) == INSN
5996 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5997 && MEM_VOLATILE_P (PATTERN (insn)))
5998 flush_hash_table ();
6000 /* Make sure registers mentioned in destinations
6001 are safe for use in an expression to be inserted.
6002 This removes from the hash table
6003 any invalid entry that refers to one of these registers.
6005 We don't care about the return value from mention_regs because
6006 we are going to hash the SET_DEST values unconditionally. */
6008 for (i = 0; i < n_sets; i++)
6012 rtx x = SET_DEST (sets[i].rtl);
6014 if (GET_CODE (x) != REG)
6018 /* We used to rely on all references to a register becoming
6019 inaccessible when a register changes to a new quantity,
6020 since that changes the hash code. However, that is not
6021 safe, since after HASH_SIZE new quantities we get a
6022 hash 'collision' of a register with its own invalid
6023 entries. And since SUBREGs have been changed not to
6024 change their hash code with the hash code of the register,
6025 it wouldn't work any longer at all. So we have to check
6026 for any invalid references lying around now.
6027 This code is similar to the REG case in mention_regs,
6028 but it knows that reg_tick has been incremented, and
6029 it leaves reg_in_table as -1 . */
6030 unsigned int regno = REGNO (x);
6031 unsigned int endregno
6032 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
6033 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
6036 for (i = regno; i < endregno; i++)
6038 if (REG_IN_TABLE (i) >= 0)
6040 remove_invalid_refs (i);
6041 REG_IN_TABLE (i) = -1;
6048 /* We may have just removed some of the src_elt's from the hash table.
6049 So replace each one with the current head of the same class. */
6051 for (i = 0; i < n_sets; i++)
6054 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6055 /* If elt was removed, find current head of same class,
6056 or 0 if nothing remains of that class. */
6058 register struct table_elt *elt = sets[i].src_elt;
6060 while (elt && elt->prev_same_value)
6061 elt = elt->prev_same_value;
6063 while (elt && elt->first_same_value == 0)
6064 elt = elt->next_same_value;
6065 sets[i].src_elt = elt ? elt->first_same_value : 0;
6069 /* Now insert the destinations into their equivalence classes. */
6071 for (i = 0; i < n_sets; i++)
6074 register rtx dest = SET_DEST (sets[i].rtl);
6075 rtx inner_dest = sets[i].inner_dest;
6076 register struct table_elt *elt;
6078 /* Don't record value if we are not supposed to risk allocating
6079 floating-point values in registers that might be wider than
6081 if ((flag_float_store
6082 && GET_CODE (dest) == MEM
6083 && FLOAT_MODE_P (GET_MODE (dest)))
6084 /* Don't record BLKmode values, because we don't know the
6085 size of it, and can't be sure that other BLKmode values
6086 have the same or smaller size. */
6087 || GET_MODE (dest) == BLKmode
6088 /* Don't record values of destinations set inside a libcall block
6089 since we might delete the libcall. Things should have been set
6090 up so we won't want to reuse such a value, but we play it safe
6093 /* If we didn't put a REG_EQUAL value or a source into the hash
6094 table, there is no point is recording DEST. */
6095 || sets[i].src_elt == 0
6096 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6097 or SIGN_EXTEND, don't record DEST since it can cause
6098 some tracking to be wrong.
6100 ??? Think about this more later. */
6101 || (GET_CODE (dest) == SUBREG
6102 && (GET_MODE_SIZE (GET_MODE (dest))
6103 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6104 && (GET_CODE (sets[i].src) == SIGN_EXTEND
6105 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6108 /* STRICT_LOW_PART isn't part of the value BEING set,
6109 and neither is the SUBREG inside it.
6110 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6111 if (GET_CODE (dest) == STRICT_LOW_PART)
6112 dest = SUBREG_REG (XEXP (dest, 0));
6114 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6115 /* Registers must also be inserted into chains for quantities. */
6116 if (insert_regs (dest, sets[i].src_elt, 1))
6118 /* If `insert_regs' changes something, the hash code must be
6120 rehash_using_reg (dest);
6121 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6124 if (GET_CODE (inner_dest) == MEM
6125 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
6126 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
6127 that (MEM (ADDRESSOF (X))) is equivalent to Y.
6128 Consider the case in which the address of the MEM is
6129 passed to a function, which alters the MEM. Then, if we
6130 later use Y instead of the MEM we'll miss the update. */
6131 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
6133 elt = insert (dest, sets[i].src_elt,
6134 sets[i].dest_hash, GET_MODE (dest));
6136 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
6137 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
6138 || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
6141 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6142 narrower than M2, and both M1 and M2 are the same number of words,
6143 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6144 make that equivalence as well.
6146 However, BAR may have equivalences for which gen_lowpart_if_possible
6147 will produce a simpler value than gen_lowpart_if_possible applied to
6148 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6149 BAR's equivalences. If we don't get a simplified form, make
6150 the SUBREG. It will not be used in an equivalence, but will
6151 cause two similar assignments to be detected.
6153 Note the loop below will find SUBREG_REG (DEST) since we have
6154 already entered SRC and DEST of the SET in the table. */
6156 if (GET_CODE (dest) == SUBREG
6157 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6159 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6160 && (GET_MODE_SIZE (GET_MODE (dest))
6161 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6162 && sets[i].src_elt != 0)
6164 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6165 struct table_elt *elt, *classp = 0;
6167 for (elt = sets[i].src_elt->first_same_value; elt;
6168 elt = elt->next_same_value)
6172 struct table_elt *src_elt;
6174 /* Ignore invalid entries. */
6175 if (GET_CODE (elt->exp) != REG
6176 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6179 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
6181 new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
6183 src_hash = HASH (new_src, new_mode);
6184 src_elt = lookup (new_src, src_hash, new_mode);
6186 /* Put the new source in the hash table is if isn't
6190 if (insert_regs (new_src, classp, 0))
6192 rehash_using_reg (new_src);
6193 src_hash = HASH (new_src, new_mode);
6195 src_elt = insert (new_src, classp, src_hash, new_mode);
6196 src_elt->in_memory = elt->in_memory;
6198 else if (classp && classp != src_elt->first_same_value)
6199 /* Show that two things that we've seen before are
6200 actually the same. */
6201 merge_equiv_classes (src_elt, classp);
6203 classp = src_elt->first_same_value;
6204 /* Ignore invalid entries. */
6206 && GET_CODE (classp->exp) != REG
6207 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
6208 classp = classp->next_same_value;
6213 /* Special handling for (set REG0 REG1) where REG0 is the
6214 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6215 be used in the sequel, so (if easily done) change this insn to
6216 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6217 that computed their value. Then REG1 will become a dead store
6218 and won't cloud the situation for later optimizations.
6220 Do not make this change if REG1 is a hard register, because it will
6221 then be used in the sequel and we may be changing a two-operand insn
6222 into a three-operand insn.
6224 Also do not do this if we are operating on a copy of INSN.
6226 Also don't do this if INSN ends a libcall; this would cause an unrelated
6227 register to be set in the middle of a libcall, and we then get bad code
6228 if the libcall is deleted. */
6230 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
6231 && NEXT_INSN (PREV_INSN (insn)) == insn
6232 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
6233 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6234 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6236 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6237 struct qty_table_elem *src_ent = &qty_table[src_q];
6239 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6240 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6242 rtx prev = prev_nonnote_insn (insn);
6244 /* Do not swap the registers around if the previous instruction
6245 attaches a REG_EQUIV note to REG1.
6247 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6248 from the pseudo that originally shadowed an incoming argument
6249 to another register. Some uses of REG_EQUIV might rely on it
6250 being attached to REG1 rather than REG2.
6252 This section previously turned the REG_EQUIV into a REG_EQUAL
6253 note. We cannot do that because REG_EQUIV may provide an
6254 uninitialised stack slot when REG_PARM_STACK_SPACE is used. */
6256 if (prev != 0 && GET_CODE (prev) == INSN
6257 && GET_CODE (PATTERN (prev)) == SET
6258 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6259 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6261 rtx dest = SET_DEST (sets[0].rtl);
6262 rtx src = SET_SRC (sets[0].rtl);
6265 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6266 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6267 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6268 apply_change_group ();
6270 /* If there was a REG_WAS_0 note on PREV, remove it. Move
6271 any REG_WAS_0 note on INSN to PREV. */
6272 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
6274 remove_note (prev, note);
6276 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6279 remove_note (insn, note);
6280 XEXP (note, 1) = REG_NOTES (prev);
6281 REG_NOTES (prev) = note;
6284 /* If INSN has a REG_EQUAL note, and this note mentions
6285 REG0, then we must delete it, because the value in
6286 REG0 has changed. If the note's value is REG1, we must
6287 also delete it because that is now this insn's dest. */
6288 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6290 && (reg_mentioned_p (dest, XEXP (note, 0))
6291 || rtx_equal_p (src, XEXP (note, 0))))
6292 remove_note (insn, note);
6297 /* If this is a conditional jump insn, record any known equivalences due to
6298 the condition being tested. */
6300 last_jump_equiv_class = 0;
6301 if (GET_CODE (insn) == JUMP_INSN
6302 && n_sets == 1 && GET_CODE (x) == SET
6303 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6304 record_jump_equiv (insn, 0);
6307 /* If the previous insn set CC0 and this insn no longer references CC0,
6308 delete the previous insn. Here we use the fact that nothing expects CC0
6309 to be valid over an insn, which is true until the final pass. */
6310 if (prev_insn && GET_CODE (prev_insn) == INSN
6311 && (tem = single_set (prev_insn)) != 0
6312 && SET_DEST (tem) == cc0_rtx
6313 && ! reg_mentioned_p (cc0_rtx, x))
6315 PUT_CODE (prev_insn, NOTE);
6316 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
6317 NOTE_SOURCE_FILE (prev_insn) = 0;
6320 prev_insn_cc0 = this_insn_cc0;
6321 prev_insn_cc0_mode = this_insn_cc0_mode;
6327 /* Remove from the hash table all expressions that reference memory. */
6330 invalidate_memory ()
6333 register struct table_elt *p, *next;
6335 for (i = 0; i < HASH_SIZE; i++)
6336 for (p = table[i]; p; p = next)
6338 next = p->next_same_hash;
6340 remove_from_table (p, i);
6344 /* If ADDR is an address that implicitly affects the stack pointer, return
6345 1 and update the register tables to show the effect. Else, return 0. */
6348 addr_affects_sp_p (addr)
6351 if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
6352 && GET_CODE (XEXP (addr, 0)) == REG
6353 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6355 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6356 REG_TICK (STACK_POINTER_REGNUM)++;
6358 /* This should be *very* rare. */
6359 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6360 invalidate (stack_pointer_rtx, VOIDmode);
6368 /* Perform invalidation on the basis of everything about an insn
6369 except for invalidating the actual places that are SET in it.
6370 This includes the places CLOBBERed, and anything that might
6371 alias with something that is SET or CLOBBERed.
6373 X is the pattern of the insn. */
6376 invalidate_from_clobbers (x)
6379 if (GET_CODE (x) == CLOBBER)
6381 rtx ref = XEXP (x, 0);
6384 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6385 || GET_CODE (ref) == MEM)
6386 invalidate (ref, VOIDmode);
6387 else if (GET_CODE (ref) == STRICT_LOW_PART
6388 || GET_CODE (ref) == ZERO_EXTRACT)
6389 invalidate (XEXP (ref, 0), GET_MODE (ref));
6392 else if (GET_CODE (x) == PARALLEL)
6395 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6397 register rtx y = XVECEXP (x, 0, i);
6398 if (GET_CODE (y) == CLOBBER)
6400 rtx ref = XEXP (y, 0);
6401 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6402 || GET_CODE (ref) == MEM)
6403 invalidate (ref, VOIDmode);
6404 else if (GET_CODE (ref) == STRICT_LOW_PART
6405 || GET_CODE (ref) == ZERO_EXTRACT)
6406 invalidate (XEXP (ref, 0), GET_MODE (ref));
6412 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6413 and replace any registers in them with either an equivalent constant
6414 or the canonical form of the register. If we are inside an address,
6415 only do this if the address remains valid.
6417 OBJECT is 0 except when within a MEM in which case it is the MEM.
6419 Return the replacement for X. */
6422 cse_process_notes (x, object)
6426 enum rtx_code code = GET_CODE (x);
6427 const char *fmt = GET_RTX_FORMAT (code);
6443 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
6448 if (REG_NOTE_KIND (x) == REG_EQUAL)
6449 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6451 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6458 rtx new = cse_process_notes (XEXP (x, 0), object);
6459 /* We don't substitute VOIDmode constants into these rtx,
6460 since they would impede folding. */
6461 if (GET_MODE (new) != VOIDmode)
6462 validate_change (object, &XEXP (x, 0), new, 0);
6467 i = REG_QTY (REGNO (x));
6469 /* Return a constant or a constant register. */
6470 if (REGNO_QTY_VALID_P (REGNO (x)))
6472 struct qty_table_elem *ent = &qty_table[i];
6474 if (ent->const_rtx != NULL_RTX
6475 && (CONSTANT_P (ent->const_rtx)
6476 || GET_CODE (ent->const_rtx) == REG))
6478 rtx new = gen_lowpart_if_possible (GET_MODE (x), ent->const_rtx);
6484 /* Otherwise, canonicalize this register. */
6485 return canon_reg (x, NULL_RTX);
6491 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6493 validate_change (object, &XEXP (x, i),
6494 cse_process_notes (XEXP (x, i), object), 0);
6499 /* Find common subexpressions between the end test of a loop and the beginning
6500 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
6502 Often we have a loop where an expression in the exit test is used
6503 in the body of the loop. For example "while (*p) *q++ = *p++;".
6504 Because of the way we duplicate the loop exit test in front of the loop,
6505 however, we don't detect that common subexpression. This will be caught
6506 when global cse is implemented, but this is a quite common case.
6508 This function handles the most common cases of these common expressions.
6509 It is called after we have processed the basic block ending with the
6510 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6511 jumps to a label used only once. */
6514 cse_around_loop (loop_start)
6519 struct table_elt *p;
6521 /* If the jump at the end of the loop doesn't go to the start, we don't
6523 for (insn = PREV_INSN (loop_start);
6524 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6525 insn = PREV_INSN (insn))
6529 || GET_CODE (insn) != NOTE
6530 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6533 /* If the last insn of the loop (the end test) was an NE comparison,
6534 we will interpret it as an EQ comparison, since we fell through
6535 the loop. Any equivalences resulting from that comparison are
6536 therefore not valid and must be invalidated. */
6537 if (last_jump_equiv_class)
6538 for (p = last_jump_equiv_class->first_same_value; p;
6539 p = p->next_same_value)
6541 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6542 || (GET_CODE (p->exp) == SUBREG
6543 && GET_CODE (SUBREG_REG (p->exp)) == REG))
6544 invalidate (p->exp, VOIDmode);
6545 else if (GET_CODE (p->exp) == STRICT_LOW_PART
6546 || GET_CODE (p->exp) == ZERO_EXTRACT)
6547 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6550 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6551 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6553 The only thing we do with SET_DEST is invalidate entries, so we
6554 can safely process each SET in order. It is slightly less efficient
6555 to do so, but we only want to handle the most common cases.
6557 The gen_move_insn call in cse_set_around_loop may create new pseudos.
6558 These pseudos won't have valid entries in any of the tables indexed
6559 by register number, such as reg_qty. We avoid out-of-range array
6560 accesses by not processing any instructions created after cse started. */
6562 for (insn = NEXT_INSN (loop_start);
6563 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6564 && INSN_UID (insn) < max_insn_uid
6565 && ! (GET_CODE (insn) == NOTE
6566 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6567 insn = NEXT_INSN (insn))
6570 && (GET_CODE (PATTERN (insn)) == SET
6571 || GET_CODE (PATTERN (insn)) == CLOBBER))
6572 cse_set_around_loop (PATTERN (insn), insn, loop_start);
6573 else if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == PARALLEL)
6574 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6575 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6576 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6577 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6582 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6583 since they are done elsewhere. This function is called via note_stores. */
6586 invalidate_skipped_set (dest, set, data)
6589 void *data ATTRIBUTE_UNUSED;
6591 enum rtx_code code = GET_CODE (dest);
6594 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6595 /* There are times when an address can appear varying and be a PLUS
6596 during this scan when it would be a fixed address were we to know
6597 the proper equivalences. So invalidate all memory if there is
6598 a BLKmode or nonscalar memory reference or a reference to a
6599 variable address. */
6600 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6601 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6603 invalidate_memory ();
6607 if (GET_CODE (set) == CLOBBER
6614 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6615 invalidate (XEXP (dest, 0), GET_MODE (dest));
6616 else if (code == REG || code == SUBREG || code == MEM)
6617 invalidate (dest, VOIDmode);
6620 /* Invalidate all insns from START up to the end of the function or the
6621 next label. This called when we wish to CSE around a block that is
6622 conditionally executed. */
6625 invalidate_skipped_block (start)
6630 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6631 insn = NEXT_INSN (insn))
6633 if (! INSN_P (insn))
6636 if (GET_CODE (insn) == CALL_INSN)
6638 if (! CONST_CALL_P (insn))
6639 invalidate_memory ();
6640 invalidate_for_call ();
6643 invalidate_from_clobbers (PATTERN (insn));
6644 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6648 /* If modifying X will modify the value in *DATA (which is really an
6649 `rtx *'), indicate that fact by setting the pointed to value to
6653 cse_check_loop_start (x, set, data)
6655 rtx set ATTRIBUTE_UNUSED;
6658 rtx *cse_check_loop_start_value = (rtx *) data;
6660 if (*cse_check_loop_start_value == NULL_RTX
6661 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6664 if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
6665 || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6666 *cse_check_loop_start_value = NULL_RTX;
6669 /* X is a SET or CLOBBER contained in INSN that was found near the start of
6670 a loop that starts with the label at LOOP_START.
6672 If X is a SET, we see if its SET_SRC is currently in our hash table.
6673 If so, we see if it has a value equal to some register used only in the
6674 loop exit code (as marked by jump.c).
6676 If those two conditions are true, we search backwards from the start of
6677 the loop to see if that same value was loaded into a register that still
6678 retains its value at the start of the loop.
6680 If so, we insert an insn after the load to copy the destination of that
6681 load into the equivalent register and (try to) replace our SET_SRC with that
6684 In any event, we invalidate whatever this SET or CLOBBER modifies. */
6687 cse_set_around_loop (x, insn, loop_start)
6692 struct table_elt *src_elt;
6694 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6695 are setting PC or CC0 or whose SET_SRC is already a register. */
6696 if (GET_CODE (x) == SET
6697 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6698 && GET_CODE (SET_SRC (x)) != REG)
6700 src_elt = lookup (SET_SRC (x),
6701 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6702 GET_MODE (SET_DEST (x)));
6705 for (src_elt = src_elt->first_same_value; src_elt;
6706 src_elt = src_elt->next_same_value)
6707 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6708 && COST (src_elt->exp) < COST (SET_SRC (x)))
6712 /* Look for an insn in front of LOOP_START that sets
6713 something in the desired mode to SET_SRC (x) before we hit
6714 a label or CALL_INSN. */
6716 for (p = prev_nonnote_insn (loop_start);
6717 p && GET_CODE (p) != CALL_INSN
6718 && GET_CODE (p) != CODE_LABEL;
6719 p = prev_nonnote_insn (p))
6720 if ((set = single_set (p)) != 0
6721 && GET_CODE (SET_DEST (set)) == REG
6722 && GET_MODE (SET_DEST (set)) == src_elt->mode
6723 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6725 /* We now have to ensure that nothing between P
6726 and LOOP_START modified anything referenced in
6727 SET_SRC (x). We know that nothing within the loop
6728 can modify it, or we would have invalidated it in
6731 rtx cse_check_loop_start_value = SET_SRC (x);
6732 for (q = p; q != loop_start; q = NEXT_INSN (q))
6734 note_stores (PATTERN (q),
6735 cse_check_loop_start,
6736 &cse_check_loop_start_value);
6738 /* If nothing was changed and we can replace our
6739 SET_SRC, add an insn after P to copy its destination
6740 to what we will be replacing SET_SRC with. */
6741 if (cse_check_loop_start_value
6742 && validate_change (insn, &SET_SRC (x),
6745 /* If this creates new pseudos, this is unsafe,
6746 because the regno of new pseudo is unsuitable
6747 to index into reg_qty when cse_insn processes
6748 the new insn. Therefore, if a new pseudo was
6749 created, discard this optimization. */
6750 int nregs = max_reg_num ();
6752 = gen_move_insn (src_elt->exp, SET_DEST (set));
6753 if (nregs != max_reg_num ())
6755 if (! validate_change (insn, &SET_SRC (x),
6760 emit_insn_after (move, p);
6767 /* Deal with the destination of X affecting the stack pointer. */
6768 addr_affects_sp_p (SET_DEST (x));
6770 /* See comment on similar code in cse_insn for explanation of these
6772 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6773 || GET_CODE (SET_DEST (x)) == MEM)
6774 invalidate (SET_DEST (x), VOIDmode);
6775 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6776 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
6777 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
6780 /* Find the end of INSN's basic block and return its range,
6781 the total number of SETs in all the insns of the block, the last insn of the
6782 block, and the branch path.
6784 The branch path indicates which branches should be followed. If a non-zero
6785 path size is specified, the block should be rescanned and a different set
6786 of branches will be taken. The branch path is only used if
6787 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
6789 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6790 used to describe the block. It is filled in with the information about
6791 the current block. The incoming structure's branch path, if any, is used
6792 to construct the output branch path. */
6795 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
6797 struct cse_basic_block_data *data;
6804 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6805 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6806 int path_size = data->path_size;
6810 /* Update the previous branch path, if any. If the last branch was
6811 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
6812 shorten the path by one and look at the previous branch. We know that
6813 at least one branch must have been taken if PATH_SIZE is non-zero. */
6814 while (path_size > 0)
6816 if (data->path[path_size - 1].status != NOT_TAKEN)
6818 data->path[path_size - 1].status = NOT_TAKEN;
6825 /* If the first instruction is marked with QImode, that means we've
6826 already processed this block. Our caller will look at DATA->LAST
6827 to figure out where to go next. We want to return the next block
6828 in the instruction stream, not some branched-to block somewhere
6829 else. We accomplish this by pretending our called forbid us to
6830 follow jumps, or skip blocks. */
6831 if (GET_MODE (insn) == QImode)
6832 follow_jumps = skip_blocks = 0;
6834 /* Scan to end of this basic block. */
6835 while (p && GET_CODE (p) != CODE_LABEL)
6837 /* Don't cse out the end of a loop. This makes a difference
6838 only for the unusual loops that always execute at least once;
6839 all other loops have labels there so we will stop in any case.
6840 Cse'ing out the end of the loop is dangerous because it
6841 might cause an invariant expression inside the loop
6842 to be reused after the end of the loop. This would make it
6843 hard to move the expression out of the loop in loop.c,
6844 especially if it is one of several equivalent expressions
6845 and loop.c would like to eliminate it.
6847 If we are running after loop.c has finished, we can ignore
6848 the NOTE_INSN_LOOP_END. */
6850 if (! after_loop && GET_CODE (p) == NOTE
6851 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6854 /* Don't cse over a call to setjmp; on some machines (eg vax)
6855 the regs restored by the longjmp come from
6856 a later time than the setjmp. */
6857 if (GET_CODE (p) == NOTE
6858 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
6861 /* A PARALLEL can have lots of SETs in it,
6862 especially if it is really an ASM_OPERANDS. */
6863 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6864 nsets += XVECLEN (PATTERN (p), 0);
6865 else if (GET_CODE (p) != NOTE)
6868 /* Ignore insns made by CSE; they cannot affect the boundaries of
6871 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6872 high_cuid = INSN_CUID (p);
6873 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6874 low_cuid = INSN_CUID (p);
6876 /* See if this insn is in our branch path. If it is and we are to
6878 if (path_entry < path_size && data->path[path_entry].branch == p)
6880 if (data->path[path_entry].status != NOT_TAKEN)
6883 /* Point to next entry in path, if any. */
6887 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6888 was specified, we haven't reached our maximum path length, there are
6889 insns following the target of the jump, this is the only use of the
6890 jump label, and the target label is preceded by a BARRIER.
6892 Alternatively, we can follow the jump if it branches around a
6893 block of code and there are no other branches into the block.
6894 In this case invalidate_skipped_block will be called to invalidate any
6895 registers set in the block when following the jump. */
6897 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
6898 && GET_CODE (p) == JUMP_INSN
6899 && GET_CODE (PATTERN (p)) == SET
6900 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6901 && JUMP_LABEL (p) != 0
6902 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6903 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6905 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6906 if ((GET_CODE (q) != NOTE
6907 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6908 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
6909 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
6912 /* If we ran into a BARRIER, this code is an extension of the
6913 basic block when the branch is taken. */
6914 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
6916 /* Don't allow ourself to keep walking around an
6917 always-executed loop. */
6918 if (next_real_insn (q) == next)
6924 /* Similarly, don't put a branch in our path more than once. */
6925 for (i = 0; i < path_entry; i++)
6926 if (data->path[i].branch == p)
6929 if (i != path_entry)
6932 data->path[path_entry].branch = p;
6933 data->path[path_entry++].status = TAKEN;
6935 /* This branch now ends our path. It was possible that we
6936 didn't see this branch the last time around (when the
6937 insn in front of the target was a JUMP_INSN that was
6938 turned into a no-op). */
6939 path_size = path_entry;
6942 /* Mark block so we won't scan it again later. */
6943 PUT_MODE (NEXT_INSN (p), QImode);
6945 /* Detect a branch around a block of code. */
6946 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
6950 if (next_real_insn (q) == next)
6956 for (i = 0; i < path_entry; i++)
6957 if (data->path[i].branch == p)
6960 if (i != path_entry)
6963 /* This is no_labels_between_p (p, q) with an added check for
6964 reaching the end of a function (in case Q precedes P). */
6965 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6966 if (GET_CODE (tmp) == CODE_LABEL)
6971 data->path[path_entry].branch = p;
6972 data->path[path_entry++].status = AROUND;
6974 path_size = path_entry;
6977 /* Mark block so we won't scan it again later. */
6978 PUT_MODE (NEXT_INSN (p), QImode);
6985 data->low_cuid = low_cuid;
6986 data->high_cuid = high_cuid;
6987 data->nsets = nsets;
6990 /* If all jumps in the path are not taken, set our path length to zero
6991 so a rescan won't be done. */
6992 for (i = path_size - 1; i >= 0; i--)
6993 if (data->path[i].status != NOT_TAKEN)
6997 data->path_size = 0;
6999 data->path_size = path_size;
7001 /* End the current branch path. */
7002 data->path[path_size].branch = 0;
7005 /* Perform cse on the instructions of a function.
7006 F is the first instruction.
7007 NREGS is one plus the highest pseudo-reg number used in the instruction.
7009 AFTER_LOOP is 1 if this is the cse call done after loop optimization
7010 (only if -frerun-cse-after-loop).
7012 Returns 1 if jump_optimize should be redone due to simplifications
7013 in conditional jump instructions. */
7016 cse_main (f, nregs, after_loop, file)
7022 struct cse_basic_block_data val;
7023 register rtx insn = f;
7026 cse_jumps_altered = 0;
7027 recorded_label_ref = 0;
7028 constant_pool_entries_cost = 0;
7032 init_alias_analysis ();
7036 max_insn_uid = get_max_uid ();
7038 reg_eqv_table = (struct reg_eqv_elem *)
7039 xmalloc (nregs * sizeof (struct reg_eqv_elem));
7041 #ifdef LOAD_EXTEND_OP
7043 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
7044 and change the code and mode as appropriate. */
7045 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
7048 /* Reset the counter indicating how many elements have been made
7050 n_elements_made = 0;
7052 /* Find the largest uid. */
7054 max_uid = get_max_uid ();
7055 uid_cuid = (int *) xcalloc (max_uid + 1, sizeof (int));
7057 /* Compute the mapping from uids to cuids.
7058 CUIDs are numbers assigned to insns, like uids,
7059 except that cuids increase monotonically through the code.
7060 Don't assign cuids to line-number NOTEs, so that the distance in cuids
7061 between two insns is not affected by -g. */
7063 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
7065 if (GET_CODE (insn) != NOTE
7066 || NOTE_LINE_NUMBER (insn) < 0)
7067 INSN_CUID (insn) = ++i;
7069 /* Give a line number note the same cuid as preceding insn. */
7070 INSN_CUID (insn) = i;
7073 ggc_push_context ();
7075 /* Loop over basic blocks.
7076 Compute the maximum number of qty's needed for each basic block
7077 (which is 2 for each SET). */
7082 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
7083 flag_cse_skip_blocks);
7085 /* If this basic block was already processed or has no sets, skip it. */
7086 if (val.nsets == 0 || GET_MODE (insn) == QImode)
7088 PUT_MODE (insn, VOIDmode);
7089 insn = (val.last ? NEXT_INSN (val.last) : 0);
7094 cse_basic_block_start = val.low_cuid;
7095 cse_basic_block_end = val.high_cuid;
7096 max_qty = val.nsets * 2;
7099 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
7100 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
7103 /* Make MAX_QTY bigger to give us room to optimize
7104 past the end of this basic block, if that should prove useful. */
7110 /* If this basic block is being extended by following certain jumps,
7111 (see `cse_end_of_basic_block'), we reprocess the code from the start.
7112 Otherwise, we start after this basic block. */
7113 if (val.path_size > 0)
7114 cse_basic_block (insn, val.last, val.path, 0);
7117 int old_cse_jumps_altered = cse_jumps_altered;
7120 /* When cse changes a conditional jump to an unconditional
7121 jump, we want to reprocess the block, since it will give
7122 us a new branch path to investigate. */
7123 cse_jumps_altered = 0;
7124 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
7125 if (cse_jumps_altered == 0
7126 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7129 cse_jumps_altered |= old_cse_jumps_altered;
7142 if (max_elements_made < n_elements_made)
7143 max_elements_made = n_elements_made;
7146 end_alias_analysis ();
7148 free (reg_eqv_table);
7150 return cse_jumps_altered || recorded_label_ref;
7153 /* Process a single basic block. FROM and TO and the limits of the basic
7154 block. NEXT_BRANCH points to the branch path when following jumps or
7155 a null path when not following jumps.
7157 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
7158 loop. This is true when we are being called for the last time on a
7159 block and this CSE pass is before loop.c. */
7162 cse_basic_block (from, to, next_branch, around_loop)
7163 register rtx from, to;
7164 struct branch_path *next_branch;
7169 rtx libcall_insn = NULL_RTX;
7172 /* This array is undefined before max_reg, so only allocate
7173 the space actually needed and adjust the start. */
7176 = (struct qty_table_elem *) xmalloc ((max_qty - max_reg)
7177 * sizeof (struct qty_table_elem));
7178 qty_table -= max_reg;
7182 /* TO might be a label. If so, protect it from being deleted. */
7183 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7186 for (insn = from; insn != to; insn = NEXT_INSN (insn))
7188 register enum rtx_code code = GET_CODE (insn);
7190 /* If we have processed 1,000 insns, flush the hash table to
7191 avoid extreme quadratic behavior. We must not include NOTEs
7192 in the count since there may be more of them when generating
7193 debugging information. If we clear the table at different
7194 times, code generated with -g -O might be different than code
7195 generated with -O but not -g.
7197 ??? This is a real kludge and needs to be done some other way.
7199 if (code != NOTE && num_insns++ > 1000)
7201 flush_hash_table ();
7205 /* See if this is a branch that is part of the path. If so, and it is
7206 to be taken, do so. */
7207 if (next_branch->branch == insn)
7209 enum taken status = next_branch++->status;
7210 if (status != NOT_TAKEN)
7212 if (status == TAKEN)
7213 record_jump_equiv (insn, 1);
7215 invalidate_skipped_block (NEXT_INSN (insn));
7217 /* Set the last insn as the jump insn; it doesn't affect cc0.
7218 Then follow this branch. */
7223 insn = JUMP_LABEL (insn);
7228 if (GET_MODE (insn) == QImode)
7229 PUT_MODE (insn, VOIDmode);
7231 if (GET_RTX_CLASS (code) == 'i')
7235 /* Process notes first so we have all notes in canonical forms when
7236 looking for duplicate operations. */
7238 if (REG_NOTES (insn))
7239 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7241 /* Track when we are inside in LIBCALL block. Inside such a block,
7242 we do not want to record destinations. The last insn of a
7243 LIBCALL block is not considered to be part of the block, since
7244 its destination is the result of the block and hence should be
7247 if (REG_NOTES (insn) != 0)
7249 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7250 libcall_insn = XEXP (p, 0);
7251 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7255 cse_insn (insn, libcall_insn);
7257 /* If we haven't already found an insn where we added a LABEL_REF,
7259 if (GET_CODE (insn) == INSN && ! recorded_label_ref
7260 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
7262 recorded_label_ref = 1;
7265 /* If INSN is now an unconditional jump, skip to the end of our
7266 basic block by pretending that we just did the last insn in the
7267 basic block. If we are jumping to the end of our block, show
7268 that we can have one usage of TO. */
7270 if (any_uncondjump_p (insn))
7274 free (qty_table + max_reg);
7278 if (JUMP_LABEL (insn) == to)
7281 /* Maybe TO was deleted because the jump is unconditional.
7282 If so, there is nothing left in this basic block. */
7283 /* ??? Perhaps it would be smarter to set TO
7284 to whatever follows this insn,
7285 and pretend the basic block had always ended here. */
7286 if (INSN_DELETED_P (to))
7289 insn = PREV_INSN (to);
7292 /* See if it is ok to keep on going past the label
7293 which used to end our basic block. Remember that we incremented
7294 the count of that label, so we decrement it here. If we made
7295 a jump unconditional, TO_USAGE will be one; in that case, we don't
7296 want to count the use in that jump. */
7298 if (to != 0 && NEXT_INSN (insn) == to
7299 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7301 struct cse_basic_block_data val;
7304 insn = NEXT_INSN (to);
7306 /* If TO was the last insn in the function, we are done. */
7309 free (qty_table + max_reg);
7313 /* If TO was preceded by a BARRIER we are done with this block
7314 because it has no continuation. */
7315 prev = prev_nonnote_insn (to);
7316 if (prev && GET_CODE (prev) == BARRIER)
7318 free (qty_table + max_reg);
7322 /* Find the end of the following block. Note that we won't be
7323 following branches in this case. */
7326 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7328 /* If the tables we allocated have enough space left
7329 to handle all the SETs in the next basic block,
7330 continue through it. Otherwise, return,
7331 and that block will be scanned individually. */
7332 if (val.nsets * 2 + next_qty > max_qty)
7335 cse_basic_block_start = val.low_cuid;
7336 cse_basic_block_end = val.high_cuid;
7339 /* Prevent TO from being deleted if it is a label. */
7340 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7343 /* Back up so we process the first insn in the extension. */
7344 insn = PREV_INSN (insn);
7348 if (next_qty > max_qty)
7351 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7352 the previous insn is the only insn that branches to the head of a loop,
7353 we can cse into the loop. Don't do this if we changed the jump
7354 structure of a loop unless we aren't going to be following jumps. */
7356 if ((cse_jumps_altered == 0
7357 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7358 && around_loop && to != 0
7359 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7360 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
7361 && JUMP_LABEL (PREV_INSN (to)) != 0
7362 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
7363 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
7365 free (qty_table + max_reg);
7367 return to ? NEXT_INSN (to) : 0;
7370 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7371 there isn't a REG_DEAD note. Return one if so. DATA is the insn. */
7374 check_for_label_ref (rtl, data)
7378 rtx insn = (rtx) data;
7380 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7381 we must rerun jump since it needs to place the note. If this is a
7382 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7383 since no REG_LABEL will be added. */
7384 return (GET_CODE (*rtl) == LABEL_REF
7385 && INSN_UID (XEXP (*rtl, 0)) != 0
7386 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7389 /* Count the number of times registers are used (not set) in X.
7390 COUNTS is an array in which we accumulate the count, INCR is how much
7391 we count each register usage.
7393 Don't count a usage of DEST, which is the SET_DEST of a SET which
7394 contains X in its SET_SRC. This is because such a SET does not
7395 modify the liveness of DEST. */
7398 count_reg_usage (x, counts, dest, incr)
7411 switch (code = GET_CODE (x))
7415 counts[REGNO (x)] += incr;
7428 /* If we are clobbering a MEM, mark any registers inside the address
7430 if (GET_CODE (XEXP (x, 0)) == MEM)
7431 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7435 /* Unless we are setting a REG, count everything in SET_DEST. */
7436 if (GET_CODE (SET_DEST (x)) != REG)
7437 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
7439 /* If SRC has side-effects, then we can't delete this insn, so the
7440 usage of SET_DEST inside SRC counts.
7442 ??? Strictly-speaking, we might be preserving this insn
7443 because some other SET has side-effects, but that's hard
7444 to do and can't happen now. */
7445 count_reg_usage (SET_SRC (x), counts,
7446 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
7451 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
7456 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
7458 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7461 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
7466 if (REG_NOTE_KIND (x) == REG_EQUAL
7467 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
7468 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
7469 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7476 fmt = GET_RTX_FORMAT (code);
7477 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7480 count_reg_usage (XEXP (x, i), counts, dest, incr);
7481 else if (fmt[i] == 'E')
7482 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7483 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7487 /* Return true if set is live. */
7489 set_live_p (set, insn, counts)
7491 rtx insn ATTRIBUTE_UNUSED;
7498 if (set_noop_p (set))
7502 else if (GET_CODE (SET_DEST (set)) == CC0
7503 && !side_effects_p (SET_SRC (set))
7504 && ((tem = next_nonnote_insn (insn)) == 0
7506 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7509 else if (GET_CODE (SET_DEST (set)) != REG
7510 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7511 || counts[REGNO (SET_DEST (set))] != 0
7512 || side_effects_p (SET_SRC (set))
7513 /* An ADDRESSOF expression can turn into a use of the
7514 internal arg pointer, so always consider the
7515 internal arg pointer live. If it is truly dead,
7516 flow will delete the initializing insn. */
7517 || (SET_DEST (set) == current_function_internal_arg_pointer))
7522 /* Return true if insn is live. */
7525 insn_live_p (insn, counts)
7530 if (GET_CODE (PATTERN (insn)) == SET)
7531 return set_live_p (PATTERN (insn), insn, counts);
7532 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7534 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7536 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7538 if (GET_CODE (elt) == SET)
7540 if (set_live_p (elt, insn, counts))
7543 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7552 /* Return true if libcall is dead as a whole. */
7555 dead_libcall_p (insn)
7559 /* See if there's a REG_EQUAL note on this insn and try to
7560 replace the source with the REG_EQUAL expression.
7562 We assume that insns with REG_RETVALs can only be reg->reg
7563 copies at this point. */
7564 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7567 rtx set = single_set (insn);
7568 rtx new = simplify_rtx (XEXP (note, 0));
7571 new = XEXP (note, 0);
7573 if (set && validate_change (insn, &SET_SRC (set), new, 0))
7575 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7582 /* Scan all the insns and delete any that are dead; i.e., they store a register
7583 that is never used or they copy a register to itself.
7585 This is used to remove insns made obviously dead by cse, loop or other
7586 optimizations. It improves the heuristics in loop since it won't try to
7587 move dead invariants out of loops or make givs for dead quantities. The
7588 remaining passes of the compilation are also sped up. */
7591 delete_trivially_dead_insns (insns, nreg, preserve_basic_blocks)
7594 int preserve_basic_blocks;
7599 int in_libcall = 0, dead_libcall = 0;
7602 /* First count the number of times each register is used. */
7603 counts = (int *) xcalloc (nreg, sizeof (int));
7604 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7605 count_reg_usage (insn, counts, NULL_RTX, 1);
7607 /* Go from the last insn to the first and delete insns that only set unused
7608 registers or copy a register to itself. As we delete an insn, remove
7609 usage counts for registers it uses.
7611 The first jump optimization pass may leave a real insn as the last
7612 insn in the function. We must not skip that insn or we may end
7613 up deleting code that is not really dead. */
7614 insn = get_last_insn ();
7615 if (! INSN_P (insn))
7616 insn = prev_real_insn (insn);
7618 if (!preserve_basic_blocks)
7619 for (; insn; insn = prev)
7623 prev = prev_real_insn (insn);
7625 /* Don't delete any insns that are part of a libcall block unless
7626 we can delete the whole libcall block.
7628 Flow or loop might get confused if we did that. Remember
7629 that we are scanning backwards. */
7630 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7633 /* If the insn storing return value is dead, whole libcall is dead.
7634 Otherwise attempt to eliminate libcall by doing an direct
7636 if (!insn_live_p (insn, counts))
7644 dead_libcall = dead_libcall_p (insn);
7647 else if (in_libcall)
7648 live_insn = ! dead_libcall;
7650 live_insn = insn_live_p (insn, counts);
7652 /* If this is a dead insn, delete it and show registers in it aren't
7657 count_reg_usage (insn, counts, NULL_RTX, -1);
7661 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7668 for (i = 0; i < n_basic_blocks; i++)
7669 for (bb = BASIC_BLOCK (i), insn = bb->end; insn != bb->head; insn = prev)
7673 prev = PREV_INSN (insn);
7677 /* Don't delete any insns that are part of a libcall block unless
7678 we can delete the whole libcall block.
7680 Flow or loop might get confused if we did that. Remember
7681 that we are scanning backwards. */
7682 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7685 /* If the insn storing return value is dead, whole libcall is dead.
7686 Otherwise attempt to eliminate libcall by doing an direct
7688 if (!insn_live_p (insn, counts))
7696 dead_libcall = dead_libcall_p (insn);
7699 else if (in_libcall)
7700 live_insn = ! dead_libcall;
7702 live_insn = insn_live_p (insn, counts);
7704 /* If this is a dead insn, delete it and show registers in it aren't
7709 count_reg_usage (insn, counts, NULL_RTX, -1);
7710 if (insn == bb->end)
7711 bb->end = PREV_INSN (insn);
7712 flow_delete_insn (insn);
7715 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))