1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 /* stdio.h must precede rtl.h for FFS. */
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
33 #include "insn-config.h"
42 /* The basic idea of common subexpression elimination is to go
43 through the code, keeping a record of expressions that would
44 have the same value at the current scan point, and replacing
45 expressions encountered with the cheapest equivalent expression.
47 It is too complicated to keep track of the different possibilities
48 when control paths merge in this code; so, at each label, we forget all
49 that is known and start fresh. This can be described as processing each
50 extended basic block separately. We have a separate pass to perform
53 Note CSE can turn a conditional or computed jump into a nop or
54 an unconditional jump. When this occurs we arrange to run the jump
55 optimizer after CSE to delete the unreachable code.
57 We use two data structures to record the equivalent expressions:
58 a hash table for most expressions, and a vector of "quantity
59 numbers" to record equivalent (pseudo) registers.
61 The use of the special data structure for registers is desirable
62 because it is faster. It is possible because registers references
63 contain a fairly small number, the register number, taken from
64 a contiguously allocated series, and two register references are
65 identical if they have the same number. General expressions
66 do not have any such thing, so the only way to retrieve the
67 information recorded on an expression other than a register
68 is to keep it in a hash table.
70 Registers and "quantity numbers":
72 At the start of each basic block, all of the (hardware and pseudo)
73 registers used in the function are given distinct quantity
74 numbers to indicate their contents. During scan, when the code
75 copies one register into another, we copy the quantity number.
76 When a register is loaded in any other way, we allocate a new
77 quantity number to describe the value generated by this operation.
78 `reg_qty' records what quantity a register is currently thought
81 All real quantity numbers are greater than or equal to `max_reg'.
82 If register N has not been assigned a quantity, reg_qty[N] will equal N.
84 Quantity numbers below `max_reg' do not exist and none of the `qty_table'
85 entries should be referenced with an index below `max_reg'.
87 We also maintain a bidirectional chain of registers for each
88 quantity number. The `qty_table` members `first_reg' and `last_reg',
89 and `reg_eqv_table' members `next' and `prev' hold these chains.
91 The first register in a chain is the one whose lifespan is least local.
92 Among equals, it is the one that was seen first.
93 We replace any equivalent register with that one.
95 If two registers have the same quantity number, it must be true that
96 REG expressions with qty_table `mode' must be in the hash table for both
97 registers and must be in the same class.
99 The converse is not true. Since hard registers may be referenced in
100 any mode, two REG expressions might be equivalent in the hash table
101 but not have the same quantity number if the quantity number of one
102 of the registers is not the same mode as those expressions.
104 Constants and quantity numbers
106 When a quantity has a known constant value, that value is stored
107 in the appropriate qty_table `const_rtx'. This is in addition to
108 putting the constant in the hash table as is usual for non-regs.
110 Whether a reg or a constant is preferred is determined by the configuration
111 macro CONST_COSTS and will often depend on the constant value. In any
112 event, expressions containing constants can be simplified, by fold_rtx.
114 When a quantity has a known nearly constant value (such as an address
115 of a stack slot), that value is stored in the appropriate qty_table
118 Integer constants don't have a machine mode. However, cse
119 determines the intended machine mode from the destination
120 of the instruction that moves the constant. The machine mode
121 is recorded in the hash table along with the actual RTL
122 constant expression so that different modes are kept separate.
126 To record known equivalences among expressions in general
127 we use a hash table called `table'. It has a fixed number of buckets
128 that contain chains of `struct table_elt' elements for expressions.
129 These chains connect the elements whose expressions have the same
132 Other chains through the same elements connect the elements which
133 currently have equivalent values.
135 Register references in an expression are canonicalized before hashing
136 the expression. This is done using `reg_qty' and qty_table `first_reg'.
137 The hash code of a register reference is computed using the quantity
138 number, not the register number.
140 When the value of an expression changes, it is necessary to remove from the
141 hash table not just that expression but all expressions whose values
142 could be different as a result.
144 1. If the value changing is in memory, except in special cases
145 ANYTHING referring to memory could be changed. That is because
146 nobody knows where a pointer does not point.
147 The function `invalidate_memory' removes what is necessary.
149 The special cases are when the address is constant or is
150 a constant plus a fixed register such as the frame pointer
151 or a static chain pointer. When such addresses are stored in,
152 we can tell exactly which other such addresses must be invalidated
153 due to overlap. `invalidate' does this.
154 All expressions that refer to non-constant
155 memory addresses are also invalidated. `invalidate_memory' does this.
157 2. If the value changing is a register, all expressions
158 containing references to that register, and only those,
161 Because searching the entire hash table for expressions that contain
162 a register is very slow, we try to figure out when it isn't necessary.
163 Precisely, this is necessary only when expressions have been
164 entered in the hash table using this register, and then the value has
165 changed, and then another expression wants to be added to refer to
166 the register's new value. This sequence of circumstances is rare
167 within any one basic block.
169 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
170 reg_tick[i] is incremented whenever a value is stored in register i.
171 reg_in_table[i] holds -1 if no references to register i have been
172 entered in the table; otherwise, it contains the value reg_tick[i] had
173 when the references were entered. If we want to enter a reference
174 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
175 Until we want to enter a new entry, the mere fact that the two vectors
176 don't match makes the entries be ignored if anyone tries to match them.
178 Registers themselves are entered in the hash table as well as in
179 the equivalent-register chains. However, the vectors `reg_tick'
180 and `reg_in_table' do not apply to expressions which are simple
181 register references. These expressions are removed from the table
182 immediately when they become invalid, and this can be done even if
183 we do not immediately search for all the expressions that refer to
186 A CLOBBER rtx in an instruction invalidates its operand for further
187 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
188 invalidates everything that resides in memory.
192 Constant expressions that differ only by an additive integer
193 are called related. When a constant expression is put in
194 the table, the related expression with no constant term
195 is also entered. These are made to point at each other
196 so that it is possible to find out if there exists any
197 register equivalent to an expression related to a given expression. */
199 /* One plus largest register number used in this function. */
203 /* One plus largest instruction UID used in this function at time of
206 static int max_insn_uid;
208 /* Length of qty_table vector. We know in advance we will not need
209 a quantity number this big. */
213 /* Next quantity number to be allocated.
214 This is 1 + the largest number needed so far. */
218 /* Per-qty information tracking.
220 `first_reg' and `last_reg' track the head and tail of the
221 chain of registers which currently contain this quantity.
223 `mode' contains the machine mode of this quantity.
225 `const_rtx' holds the rtx of the constant value of this
226 quantity, if known. A summations of the frame/arg pointer
227 and a constant can also be entered here. When this holds
228 a known value, `const_insn' is the insn which stored the
231 `comparison_{code,const,qty}' are used to track when a
232 comparison between a quantity and some constant or register has
233 been passed. In such a case, we know the results of the comparison
234 in case we see it again. These members record a comparison that
235 is known to be true. `comparison_code' holds the rtx code of such
236 a comparison, else it is set to UNKNOWN and the other two
237 comparison members are undefined. `comparison_const' holds
238 the constant being compared against, or zero if the comparison
239 is not against a constant. `comparison_qty' holds the quantity
240 being compared against when the result is known. If the comparison
241 is not with a register, `comparison_qty' is -1. */
243 struct qty_table_elem
247 rtx comparison_const;
249 unsigned int first_reg, last_reg;
250 enum machine_mode mode;
251 enum rtx_code comparison_code;
254 /* The table of all qtys, indexed by qty number. */
255 static struct qty_table_elem *qty_table;
258 /* For machines that have a CC0, we do not record its value in the hash
259 table since its use is guaranteed to be the insn immediately following
260 its definition and any other insn is presumed to invalidate it.
262 Instead, we store below the value last assigned to CC0. If it should
263 happen to be a constant, it is stored in preference to the actual
264 assigned value. In case it is a constant, we store the mode in which
265 the constant should be interpreted. */
267 static rtx prev_insn_cc0;
268 static enum machine_mode prev_insn_cc0_mode;
271 /* Previous actual insn. 0 if at first insn of basic block. */
273 static rtx prev_insn;
275 /* Insn being scanned. */
277 static rtx this_insn;
279 /* Index by register number, gives the number of the next (or
280 previous) register in the chain of registers sharing the same
283 Or -1 if this register is at the end of the chain.
285 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
287 /* Per-register equivalence chain. */
293 /* The table of all register equivalence chains. */
294 static struct reg_eqv_elem *reg_eqv_table;
298 /* Next in hash chain. */
299 struct cse_reg_info *hash_next;
301 /* The next cse_reg_info structure in the free or used list. */
302 struct cse_reg_info *next;
307 /* The quantity number of the register's current contents. */
310 /* The number of times the register has been altered in the current
314 /* The REG_TICK value at which rtx's containing this register are
315 valid in the hash table. If this does not equal the current
316 reg_tick value, such expressions existing in the hash table are
321 /* A free list of cse_reg_info entries. */
322 static struct cse_reg_info *cse_reg_info_free_list;
324 /* A used list of cse_reg_info entries. */
325 static struct cse_reg_info *cse_reg_info_used_list;
326 static struct cse_reg_info *cse_reg_info_used_list_end;
328 /* A mapping from registers to cse_reg_info data structures. */
329 #define REGHASH_SHIFT 7
330 #define REGHASH_SIZE (1 << REGHASH_SHIFT)
331 #define REGHASH_MASK (REGHASH_SIZE - 1)
332 static struct cse_reg_info *reg_hash[REGHASH_SIZE];
334 #define REGHASH_FN(REGNO) \
335 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
337 /* The last lookup we did into the cse_reg_info_tree. This allows us
338 to cache repeated lookups. */
339 static unsigned int cached_regno;
340 static struct cse_reg_info *cached_cse_reg_info;
342 /* A HARD_REG_SET containing all the hard registers for which there is
343 currently a REG expression in the hash table. Note the difference
344 from the above variables, which indicate if the REG is mentioned in some
345 expression in the table. */
347 static HARD_REG_SET hard_regs_in_table;
349 /* CUID of insn that starts the basic block currently being cse-processed. */
351 static int cse_basic_block_start;
353 /* CUID of insn that ends the basic block currently being cse-processed. */
355 static int cse_basic_block_end;
357 /* Vector mapping INSN_UIDs to cuids.
358 The cuids are like uids but increase monotonically always.
359 We use them to see whether a reg is used outside a given basic block. */
361 static int *uid_cuid;
363 /* Highest UID in UID_CUID. */
366 /* Get the cuid of an insn. */
368 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
370 /* Nonzero if this pass has made changes, and therefore it's
371 worthwhile to run the garbage collector. */
373 static int cse_altered;
375 /* Nonzero if cse has altered conditional jump insns
376 in such a way that jump optimization should be redone. */
378 static int cse_jumps_altered;
380 /* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
381 REG_LABEL, we have to rerun jump after CSE to put in the note. */
382 static int recorded_label_ref;
384 /* canon_hash stores 1 in do_not_record
385 if it notices a reference to CC0, PC, or some other volatile
388 static int do_not_record;
390 #ifdef LOAD_EXTEND_OP
392 /* Scratch rtl used when looking for load-extended copy of a MEM. */
393 static rtx memory_extend_rtx;
396 /* canon_hash stores 1 in hash_arg_in_memory
397 if it notices a reference to memory within the expression being hashed. */
399 static int hash_arg_in_memory;
401 /* The hash table contains buckets which are chains of `struct table_elt's,
402 each recording one expression's information.
403 That expression is in the `exp' field.
405 The canon_exp field contains a canonical (from the point of view of
406 alias analysis) version of the `exp' field.
408 Those elements with the same hash code are chained in both directions
409 through the `next_same_hash' and `prev_same_hash' fields.
411 Each set of expressions with equivalent values
412 are on a two-way chain through the `next_same_value'
413 and `prev_same_value' fields, and all point with
414 the `first_same_value' field at the first element in
415 that chain. The chain is in order of increasing cost.
416 Each element's cost value is in its `cost' field.
418 The `in_memory' field is nonzero for elements that
419 involve any reference to memory. These elements are removed
420 whenever a write is done to an unidentified location in memory.
421 To be safe, we assume that a memory address is unidentified unless
422 the address is either a symbol constant or a constant plus
423 the frame pointer or argument pointer.
425 The `related_value' field is used to connect related expressions
426 (that differ by adding an integer).
427 The related expressions are chained in a circular fashion.
428 `related_value' is zero for expressions for which this
431 The `cost' field stores the cost of this element's expression.
432 The `regcost' field stores the value returned by approx_reg_cost for
433 this element's expression.
435 The `is_const' flag is set if the element is a constant (including
438 The `flag' field is used as a temporary during some search routines.
440 The `mode' field is usually the same as GET_MODE (`exp'), but
441 if `exp' is a CONST_INT and has no machine mode then the `mode'
442 field is the mode it was being used as. Each constant is
443 recorded separately for each mode it is used with. */
449 struct table_elt *next_same_hash;
450 struct table_elt *prev_same_hash;
451 struct table_elt *next_same_value;
452 struct table_elt *prev_same_value;
453 struct table_elt *first_same_value;
454 struct table_elt *related_value;
457 enum machine_mode mode;
463 /* We don't want a lot of buckets, because we rarely have very many
464 things stored in the hash table, and a lot of buckets slows
465 down a lot of loops that happen frequently. */
467 #define HASH_SIZE (1 << HASH_SHIFT)
468 #define HASH_MASK (HASH_SIZE - 1)
470 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
471 register (hard registers may require `do_not_record' to be set). */
474 ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
475 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
476 : canon_hash (X, M)) & HASH_MASK)
478 /* Determine whether register number N is considered a fixed register for the
479 purpose of approximating register costs.
480 It is desirable to replace other regs with fixed regs, to reduce need for
482 A reg wins if it is either the frame pointer or designated as fixed. */
483 #define FIXED_REGNO_P(N) \
484 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
485 || fixed_regs[N] || global_regs[N])
487 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
488 hard registers and pointers into the frame are the cheapest with a cost
489 of 0. Next come pseudos with a cost of one and other hard registers with
490 a cost of 2. Aside from these special cases, call `rtx_cost'. */
492 #define CHEAP_REGNO(N) \
493 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
494 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
495 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
496 || ((N) < FIRST_PSEUDO_REGISTER \
497 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
499 #define COST(X) (GET_CODE (X) == REG ? 0 : notreg_cost (X, SET))
500 #define COST_IN(X,OUTER) (GET_CODE (X) == REG ? 0 : notreg_cost (X, OUTER))
502 /* Get the info associated with register N. */
504 #define GET_CSE_REG_INFO(N) \
505 (((N) == cached_regno && cached_cse_reg_info) \
506 ? cached_cse_reg_info : get_cse_reg_info ((N)))
508 /* Get the number of times this register has been updated in this
511 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
513 /* Get the point at which REG was recorded in the table. */
515 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
517 /* Get the quantity number for REG. */
519 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
521 /* Determine if the quantity number for register X represents a valid index
522 into the qty_table. */
524 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (int) (N))
526 static struct table_elt *table[HASH_SIZE];
528 /* Chain of `struct table_elt's made so far for this function
529 but currently removed from the table. */
531 static struct table_elt *free_element_chain;
533 /* Number of `struct table_elt' structures made so far for this function. */
535 static int n_elements_made;
537 /* Maximum value `n_elements_made' has had so far in this compilation
538 for functions previously processed. */
540 static int max_elements_made;
542 /* Surviving equivalence class when two equivalence classes are merged
543 by recording the effects of a jump in the last insn. Zero if the
544 last insn was not a conditional jump. */
546 static struct table_elt *last_jump_equiv_class;
548 /* Set to the cost of a constant pool reference if one was found for a
549 symbolic constant. If this was found, it means we should try to
550 convert constants into constant pool entries if they don't fit in
553 static int constant_pool_entries_cost;
555 /* Define maximum length of a branch path. */
557 #define PATHLENGTH 10
559 /* This data describes a block that will be processed by cse_basic_block. */
561 struct cse_basic_block_data
563 /* Lowest CUID value of insns in block. */
565 /* Highest CUID value of insns in block. */
567 /* Total number of SETs in block. */
569 /* Last insn in the block. */
571 /* Size of current branch path, if any. */
573 /* Current branch path, indicating which branches will be taken. */
576 /* The branch insn. */
578 /* Whether it should be taken or not. AROUND is the same as taken
579 except that it is used when the destination label is not preceded
581 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
585 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
586 virtual regs here because the simplify_*_operation routines are called
587 by integrate.c, which is called before virtual register instantiation.
589 ?!? FIXED_BASE_PLUS_P and NONZERO_BASE_PLUS_P need to move into
590 a header file so that their definitions can be shared with the
591 simplification routines in simplify-rtx.c. Until then, do not
592 change these macros without also changing the copy in simplify-rtx.c. */
594 #define FIXED_BASE_PLUS_P(X) \
595 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
596 || ((X) == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])\
597 || (X) == virtual_stack_vars_rtx \
598 || (X) == virtual_incoming_args_rtx \
599 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
600 && (XEXP (X, 0) == frame_pointer_rtx \
601 || XEXP (X, 0) == hard_frame_pointer_rtx \
602 || ((X) == arg_pointer_rtx \
603 && fixed_regs[ARG_POINTER_REGNUM]) \
604 || XEXP (X, 0) == virtual_stack_vars_rtx \
605 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
606 || GET_CODE (X) == ADDRESSOF)
608 /* Similar, but also allows reference to the stack pointer.
610 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
611 arg_pointer_rtx by itself is nonzero, because on at least one machine,
612 the i960, the arg pointer is zero when it is unused. */
614 #define NONZERO_BASE_PLUS_P(X) \
615 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
616 || (X) == virtual_stack_vars_rtx \
617 || (X) == virtual_incoming_args_rtx \
618 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
619 && (XEXP (X, 0) == frame_pointer_rtx \
620 || XEXP (X, 0) == hard_frame_pointer_rtx \
621 || ((X) == arg_pointer_rtx \
622 && fixed_regs[ARG_POINTER_REGNUM]) \
623 || XEXP (X, 0) == virtual_stack_vars_rtx \
624 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
625 || (X) == stack_pointer_rtx \
626 || (X) == virtual_stack_dynamic_rtx \
627 || (X) == virtual_outgoing_args_rtx \
628 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
629 && (XEXP (X, 0) == stack_pointer_rtx \
630 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
631 || XEXP (X, 0) == virtual_outgoing_args_rtx)) \
632 || GET_CODE (X) == ADDRESSOF)
634 static int notreg_cost PARAMS ((rtx, enum rtx_code));
635 static int approx_reg_cost_1 PARAMS ((rtx *, void *));
636 static int approx_reg_cost PARAMS ((rtx));
637 static int preferrable PARAMS ((int, int, int, int));
638 static void new_basic_block PARAMS ((void));
639 static void make_new_qty PARAMS ((unsigned int, enum machine_mode));
640 static void make_regs_eqv PARAMS ((unsigned int, unsigned int));
641 static void delete_reg_equiv PARAMS ((unsigned int));
642 static int mention_regs PARAMS ((rtx));
643 static int insert_regs PARAMS ((rtx, struct table_elt *, int));
644 static void remove_from_table PARAMS ((struct table_elt *, unsigned));
645 static struct table_elt *lookup PARAMS ((rtx, unsigned, enum machine_mode)),
646 *lookup_for_remove PARAMS ((rtx, unsigned, enum machine_mode));
647 static rtx lookup_as_function PARAMS ((rtx, enum rtx_code));
648 static struct table_elt *insert PARAMS ((rtx, struct table_elt *, unsigned,
650 static void merge_equiv_classes PARAMS ((struct table_elt *,
651 struct table_elt *));
652 static void invalidate PARAMS ((rtx, enum machine_mode));
653 static int cse_rtx_varies_p PARAMS ((rtx, int));
654 static void remove_invalid_refs PARAMS ((unsigned int));
655 static void remove_invalid_subreg_refs PARAMS ((unsigned int, unsigned int,
657 static void rehash_using_reg PARAMS ((rtx));
658 static void invalidate_memory PARAMS ((void));
659 static void invalidate_for_call PARAMS ((void));
660 static rtx use_related_value PARAMS ((rtx, struct table_elt *));
661 static unsigned canon_hash PARAMS ((rtx, enum machine_mode));
662 static unsigned canon_hash_string PARAMS ((const char *));
663 static unsigned safe_hash PARAMS ((rtx, enum machine_mode));
664 static int exp_equiv_p PARAMS ((rtx, rtx, int, int));
665 static rtx canon_reg PARAMS ((rtx, rtx));
666 static void find_best_addr PARAMS ((rtx, rtx *, enum machine_mode));
667 static enum rtx_code find_comparison_args PARAMS ((enum rtx_code, rtx *, rtx *,
669 enum machine_mode *));
670 static rtx fold_rtx PARAMS ((rtx, rtx));
671 static rtx equiv_constant PARAMS ((rtx));
672 static void record_jump_equiv PARAMS ((rtx, int));
673 static void record_jump_cond PARAMS ((enum rtx_code, enum machine_mode,
675 static void cse_insn PARAMS ((rtx, rtx));
676 static int addr_affects_sp_p PARAMS ((rtx));
677 static void invalidate_from_clobbers PARAMS ((rtx));
678 static rtx cse_process_notes PARAMS ((rtx, rtx));
679 static void cse_around_loop PARAMS ((rtx));
680 static void invalidate_skipped_set PARAMS ((rtx, rtx, void *));
681 static void invalidate_skipped_block PARAMS ((rtx));
682 static void cse_check_loop_start PARAMS ((rtx, rtx, void *));
683 static void cse_set_around_loop PARAMS ((rtx, rtx, rtx));
684 static rtx cse_basic_block PARAMS ((rtx, rtx, struct branch_path *, int));
685 static void count_reg_usage PARAMS ((rtx, int *, rtx, int));
686 static int check_for_label_ref PARAMS ((rtx *, void *));
687 extern void dump_class PARAMS ((struct table_elt*));
688 static struct cse_reg_info * get_cse_reg_info PARAMS ((unsigned int));
689 static int check_dependence PARAMS ((rtx *, void *));
691 static void flush_hash_table PARAMS ((void));
692 static bool insn_live_p PARAMS ((rtx, int *));
693 static bool set_live_p PARAMS ((rtx, rtx, int *));
694 static bool dead_libcall_p PARAMS ((rtx, int *));
696 /* Dump the expressions in the equivalence class indicated by CLASSP.
697 This function is used only for debugging. */
700 struct table_elt *classp;
702 struct table_elt *elt;
704 fprintf (stderr, "Equivalence chain for ");
705 print_rtl (stderr, classp->exp);
706 fprintf (stderr, ": \n");
708 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
710 print_rtl (stderr, elt->exp);
711 fprintf (stderr, "\n");
715 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
718 approx_reg_cost_1 (xp, data)
723 regset set = (regset) data;
725 if (x && GET_CODE (x) == REG)
726 SET_REGNO_REG_SET (set, REGNO (x));
730 /* Return an estimate of the cost of the registers used in an rtx.
731 This is mostly the number of different REG expressions in the rtx;
732 however for some exceptions like fixed registers we use a cost of
733 0. If any other hard register reference occurs, return MAX_COST. */
745 for_each_rtx (&x, approx_reg_cost_1, (void *) &set);
747 EXECUTE_IF_SET_IN_REG_SET
750 if (! CHEAP_REGNO (i))
752 if (i < FIRST_PSEUDO_REGISTER)
755 cost += i < FIRST_PSEUDO_REGISTER ? 2 : 1;
759 CLEAR_REG_SET (&set);
760 return hardregs && SMALL_REGISTER_CLASSES ? MAX_COST : cost;
763 /* Return a negative value if an rtx A, whose costs are given by COST_A
764 and REGCOST_A, is more desirable than an rtx B.
765 Return a positive value if A is less desirable, or 0 if the two are
768 preferrable (cost_a, regcost_a, cost_b, regcost_b)
769 int cost_a, regcost_a, cost_b, regcost_b;
771 /* First, get rid of a cases involving expressions that are entirely
773 if (cost_a != cost_b)
775 if (cost_a == MAX_COST)
777 if (cost_b == MAX_COST)
781 /* Avoid extending lifetimes of hardregs. */
782 if (regcost_a != regcost_b)
784 if (regcost_a == MAX_COST)
786 if (regcost_b == MAX_COST)
790 /* Normal operation costs take precedence. */
791 if (cost_a != cost_b)
792 return cost_a - cost_b;
793 /* Only if these are identical consider effects on register pressure. */
794 if (regcost_a != regcost_b)
795 return regcost_a - regcost_b;
799 /* Internal function, to compute cost when X is not a register; called
800 from COST macro to keep it simple. */
803 notreg_cost (x, outer)
807 return ((GET_CODE (x) == SUBREG
808 && GET_CODE (SUBREG_REG (x)) == REG
809 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
810 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
811 && (GET_MODE_SIZE (GET_MODE (x))
812 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
813 && subreg_lowpart_p (x)
814 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
815 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
817 : rtx_cost (x, outer) * 2);
820 /* Return an estimate of the cost of computing rtx X.
821 One use is in cse, to decide which expression to keep in the hash table.
822 Another is in rtl generation, to pick the cheapest way to multiply.
823 Other uses like the latter are expected in the future. */
826 rtx_cost (x, outer_code)
828 enum rtx_code outer_code ATTRIBUTE_UNUSED;
838 /* Compute the default costs of certain things.
839 Note that RTX_COSTS can override the defaults. */
845 total = COSTS_N_INSNS (5);
851 total = COSTS_N_INSNS (7);
854 /* Used in loop.c and combine.c as a marker. */
858 total = COSTS_N_INSNS (1);
867 /* If we can't tie these modes, make this expensive. The larger
868 the mode, the more expensive it is. */
869 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
870 return COSTS_N_INSNS (2
871 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
875 RTX_COSTS (x, code, outer_code);
878 CONST_COSTS (x, code, outer_code);
882 #ifdef DEFAULT_RTX_COSTS
883 DEFAULT_RTX_COSTS (x, code, outer_code);
888 /* Sum the costs of the sub-rtx's, plus cost of this operation,
889 which is already in total. */
891 fmt = GET_RTX_FORMAT (code);
892 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
894 total += rtx_cost (XEXP (x, i), code);
895 else if (fmt[i] == 'E')
896 for (j = 0; j < XVECLEN (x, i); j++)
897 total += rtx_cost (XVECEXP (x, i, j), code);
902 /* Return cost of address expression X.
903 Expect that X is properly formed address reference. */
906 address_cost (x, mode)
908 enum machine_mode mode;
910 /* The ADDRESS_COST macro does not deal with ADDRESSOF nodes. But,
911 during CSE, such nodes are present. Using an ADDRESSOF node which
912 refers to the address of a REG is a good thing because we can then
913 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
915 if (GET_CODE (x) == ADDRESSOF && REG_P (XEXP ((x), 0)))
918 /* We may be asked for cost of various unusual addresses, such as operands
919 of push instruction. It is not worthwhile to complicate writing
920 of ADDRESS_COST macro by such cases. */
922 if (!memory_address_p (mode, x))
925 return ADDRESS_COST (x);
927 return rtx_cost (x, MEM);
932 static struct cse_reg_info *
933 get_cse_reg_info (regno)
936 struct cse_reg_info **hash_head = ®_hash[REGHASH_FN (regno)];
937 struct cse_reg_info *p;
939 for (p = *hash_head; p != NULL; p = p->hash_next)
940 if (p->regno == regno)
945 /* Get a new cse_reg_info structure. */
946 if (cse_reg_info_free_list)
948 p = cse_reg_info_free_list;
949 cse_reg_info_free_list = p->next;
952 p = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
954 /* Insert into hash table. */
955 p->hash_next = *hash_head;
960 p->reg_in_table = -1;
963 p->next = cse_reg_info_used_list;
964 cse_reg_info_used_list = p;
965 if (!cse_reg_info_used_list_end)
966 cse_reg_info_used_list_end = p;
969 /* Cache this lookup; we tend to be looking up information about the
970 same register several times in a row. */
971 cached_regno = regno;
972 cached_cse_reg_info = p;
977 /* Clear the hash table and initialize each register with its own quantity,
978 for a new basic block. */
987 /* Clear out hash table state for this pass. */
989 memset ((char *) reg_hash, 0, sizeof reg_hash);
991 if (cse_reg_info_used_list)
993 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
994 cse_reg_info_free_list = cse_reg_info_used_list;
995 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
997 cached_cse_reg_info = 0;
999 CLEAR_HARD_REG_SET (hard_regs_in_table);
1001 /* The per-quantity values used to be initialized here, but it is
1002 much faster to initialize each as it is made in `make_new_qty'. */
1004 for (i = 0; i < HASH_SIZE; i++)
1006 struct table_elt *first;
1011 struct table_elt *last = first;
1015 while (last->next_same_hash != NULL)
1016 last = last->next_same_hash;
1018 /* Now relink this hash entire chain into
1019 the free element list. */
1021 last->next_same_hash = free_element_chain;
1022 free_element_chain = first;
1033 /* Say that register REG contains a quantity in mode MODE not in any
1034 register before and initialize that quantity. */
1037 make_new_qty (reg, mode)
1039 enum machine_mode mode;
1042 struct qty_table_elem *ent;
1043 struct reg_eqv_elem *eqv;
1045 if (next_qty >= max_qty)
1048 q = REG_QTY (reg) = next_qty++;
1049 ent = &qty_table[q];
1050 ent->first_reg = reg;
1051 ent->last_reg = reg;
1053 ent->const_rtx = ent->const_insn = NULL_RTX;
1054 ent->comparison_code = UNKNOWN;
1056 eqv = ®_eqv_table[reg];
1057 eqv->next = eqv->prev = -1;
1060 /* Make reg NEW equivalent to reg OLD.
1061 OLD is not changing; NEW is. */
1064 make_regs_eqv (new, old)
1065 unsigned int new, old;
1067 unsigned int lastr, firstr;
1068 int q = REG_QTY (old);
1069 struct qty_table_elem *ent;
1071 ent = &qty_table[q];
1073 /* Nothing should become eqv until it has a "non-invalid" qty number. */
1074 if (! REGNO_QTY_VALID_P (old))
1078 firstr = ent->first_reg;
1079 lastr = ent->last_reg;
1081 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
1082 hard regs. Among pseudos, if NEW will live longer than any other reg
1083 of the same qty, and that is beyond the current basic block,
1084 make it the new canonical replacement for this qty. */
1085 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1086 /* Certain fixed registers might be of the class NO_REGS. This means
1087 that not only can they not be allocated by the compiler, but
1088 they cannot be used in substitutions or canonicalizations
1090 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1091 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1092 || (new >= FIRST_PSEUDO_REGISTER
1093 && (firstr < FIRST_PSEUDO_REGISTER
1094 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1095 || (uid_cuid[REGNO_FIRST_UID (new)]
1096 < cse_basic_block_start))
1097 && (uid_cuid[REGNO_LAST_UID (new)]
1098 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1100 reg_eqv_table[firstr].prev = new;
1101 reg_eqv_table[new].next = firstr;
1102 reg_eqv_table[new].prev = -1;
1103 ent->first_reg = new;
1107 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1108 Otherwise, insert before any non-fixed hard regs that are at the
1109 end. Registers of class NO_REGS cannot be used as an
1110 equivalent for anything. */
1111 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1112 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1113 && new >= FIRST_PSEUDO_REGISTER)
1114 lastr = reg_eqv_table[lastr].prev;
1115 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1116 if (reg_eqv_table[lastr].next >= 0)
1117 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1119 qty_table[q].last_reg = new;
1120 reg_eqv_table[lastr].next = new;
1121 reg_eqv_table[new].prev = lastr;
1125 /* Remove REG from its equivalence class. */
1128 delete_reg_equiv (reg)
1131 struct qty_table_elem *ent;
1132 int q = REG_QTY (reg);
1135 /* If invalid, do nothing. */
1139 ent = &qty_table[q];
1141 p = reg_eqv_table[reg].prev;
1142 n = reg_eqv_table[reg].next;
1145 reg_eqv_table[n].prev = p;
1149 reg_eqv_table[p].next = n;
1153 REG_QTY (reg) = reg;
1156 /* Remove any invalid expressions from the hash table
1157 that refer to any of the registers contained in expression X.
1159 Make sure that newly inserted references to those registers
1160 as subexpressions will be considered valid.
1162 mention_regs is not called when a register itself
1163 is being stored in the table.
1165 Return 1 if we have done something that may have changed the hash code
1180 code = GET_CODE (x);
1183 unsigned int regno = REGNO (x);
1184 unsigned int endregno
1185 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1186 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1189 for (i = regno; i < endregno; i++)
1191 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1192 remove_invalid_refs (i);
1194 REG_IN_TABLE (i) = REG_TICK (i);
1200 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1201 pseudo if they don't use overlapping words. We handle only pseudos
1202 here for simplicity. */
1203 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1204 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1206 unsigned int i = REGNO (SUBREG_REG (x));
1208 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1210 /* If reg_tick has been incremented more than once since
1211 reg_in_table was last set, that means that the entire
1212 register has been set before, so discard anything memorized
1213 for the entire register, including all SUBREG expressions. */
1214 if (REG_IN_TABLE (i) != REG_TICK (i) - 1)
1215 remove_invalid_refs (i);
1217 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1220 REG_IN_TABLE (i) = REG_TICK (i);
1224 /* If X is a comparison or a COMPARE and either operand is a register
1225 that does not have a quantity, give it one. This is so that a later
1226 call to record_jump_equiv won't cause X to be assigned a different
1227 hash code and not found in the table after that call.
1229 It is not necessary to do this here, since rehash_using_reg can
1230 fix up the table later, but doing this here eliminates the need to
1231 call that expensive function in the most common case where the only
1232 use of the register is in the comparison. */
1234 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1236 if (GET_CODE (XEXP (x, 0)) == REG
1237 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1238 if (insert_regs (XEXP (x, 0), NULL, 0))
1240 rehash_using_reg (XEXP (x, 0));
1244 if (GET_CODE (XEXP (x, 1)) == REG
1245 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1246 if (insert_regs (XEXP (x, 1), NULL, 0))
1248 rehash_using_reg (XEXP (x, 1));
1253 fmt = GET_RTX_FORMAT (code);
1254 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1256 changed |= mention_regs (XEXP (x, i));
1257 else if (fmt[i] == 'E')
1258 for (j = 0; j < XVECLEN (x, i); j++)
1259 changed |= mention_regs (XVECEXP (x, i, j));
1264 /* Update the register quantities for inserting X into the hash table
1265 with a value equivalent to CLASSP.
1266 (If the class does not contain a REG, it is irrelevant.)
1267 If MODIFIED is nonzero, X is a destination; it is being modified.
1268 Note that delete_reg_equiv should be called on a register
1269 before insert_regs is done on that register with MODIFIED != 0.
1271 Nonzero value means that elements of reg_qty have changed
1272 so X's hash code may be different. */
1275 insert_regs (x, classp, modified)
1277 struct table_elt *classp;
1280 if (GET_CODE (x) == REG)
1282 unsigned int regno = REGNO (x);
1285 /* If REGNO is in the equivalence table already but is of the
1286 wrong mode for that equivalence, don't do anything here. */
1288 qty_valid = REGNO_QTY_VALID_P (regno);
1291 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1293 if (ent->mode != GET_MODE (x))
1297 if (modified || ! qty_valid)
1300 for (classp = classp->first_same_value;
1302 classp = classp->next_same_value)
1303 if (GET_CODE (classp->exp) == REG
1304 && GET_MODE (classp->exp) == GET_MODE (x))
1306 make_regs_eqv (regno, REGNO (classp->exp));
1310 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1311 than REG_IN_TABLE to find out if there was only a single preceding
1312 invalidation - for the SUBREG - or another one, which would be
1313 for the full register. However, if we find here that REG_TICK
1314 indicates that the register is invalid, it means that it has
1315 been invalidated in a separate operation. The SUBREG might be used
1316 now (then this is a recursive call), or we might use the full REG
1317 now and a SUBREG of it later. So bump up REG_TICK so that
1318 mention_regs will do the right thing. */
1320 && REG_IN_TABLE (regno) >= 0
1321 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1323 make_new_qty (regno, GET_MODE (x));
1330 /* If X is a SUBREG, we will likely be inserting the inner register in the
1331 table. If that register doesn't have an assigned quantity number at
1332 this point but does later, the insertion that we will be doing now will
1333 not be accessible because its hash code will have changed. So assign
1334 a quantity number now. */
1336 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1337 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1339 insert_regs (SUBREG_REG (x), NULL, 0);
1344 return mention_regs (x);
1347 /* Look in or update the hash table. */
1349 /* Remove table element ELT from use in the table.
1350 HASH is its hash code, made using the HASH macro.
1351 It's an argument because often that is known in advance
1352 and we save much time not recomputing it. */
1355 remove_from_table (elt, hash)
1356 struct table_elt *elt;
1362 /* Mark this element as removed. See cse_insn. */
1363 elt->first_same_value = 0;
1365 /* Remove the table element from its equivalence class. */
1368 struct table_elt *prev = elt->prev_same_value;
1369 struct table_elt *next = elt->next_same_value;
1372 next->prev_same_value = prev;
1375 prev->next_same_value = next;
1378 struct table_elt *newfirst = next;
1381 next->first_same_value = newfirst;
1382 next = next->next_same_value;
1387 /* Remove the table element from its hash bucket. */
1390 struct table_elt *prev = elt->prev_same_hash;
1391 struct table_elt *next = elt->next_same_hash;
1394 next->prev_same_hash = prev;
1397 prev->next_same_hash = next;
1398 else if (table[hash] == elt)
1402 /* This entry is not in the proper hash bucket. This can happen
1403 when two classes were merged by `merge_equiv_classes'. Search
1404 for the hash bucket that it heads. This happens only very
1405 rarely, so the cost is acceptable. */
1406 for (hash = 0; hash < HASH_SIZE; hash++)
1407 if (table[hash] == elt)
1412 /* Remove the table element from its related-value circular chain. */
1414 if (elt->related_value != 0 && elt->related_value != elt)
1416 struct table_elt *p = elt->related_value;
1418 while (p->related_value != elt)
1419 p = p->related_value;
1420 p->related_value = elt->related_value;
1421 if (p->related_value == p)
1422 p->related_value = 0;
1425 /* Now add it to the free element chain. */
1426 elt->next_same_hash = free_element_chain;
1427 free_element_chain = elt;
1430 /* Look up X in the hash table and return its table element,
1431 or 0 if X is not in the table.
1433 MODE is the machine-mode of X, or if X is an integer constant
1434 with VOIDmode then MODE is the mode with which X will be used.
1436 Here we are satisfied to find an expression whose tree structure
1439 static struct table_elt *
1440 lookup (x, hash, mode)
1443 enum machine_mode mode;
1445 struct table_elt *p;
1447 for (p = table[hash]; p; p = p->next_same_hash)
1448 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1449 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1455 /* Like `lookup' but don't care whether the table element uses invalid regs.
1456 Also ignore discrepancies in the machine mode of a register. */
1458 static struct table_elt *
1459 lookup_for_remove (x, hash, mode)
1462 enum machine_mode mode;
1464 struct table_elt *p;
1466 if (GET_CODE (x) == REG)
1468 unsigned int regno = REGNO (x);
1470 /* Don't check the machine mode when comparing registers;
1471 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1472 for (p = table[hash]; p; p = p->next_same_hash)
1473 if (GET_CODE (p->exp) == REG
1474 && REGNO (p->exp) == regno)
1479 for (p = table[hash]; p; p = p->next_same_hash)
1480 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1487 /* Look for an expression equivalent to X and with code CODE.
1488 If one is found, return that expression. */
1491 lookup_as_function (x, code)
1496 = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, GET_MODE (x));
1498 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1499 long as we are narrowing. So if we looked in vain for a mode narrower
1500 than word_mode before, look for word_mode now. */
1501 if (p == 0 && code == CONST_INT
1502 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1505 PUT_MODE (x, word_mode);
1506 p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
1512 for (p = p->first_same_value; p; p = p->next_same_value)
1513 if (GET_CODE (p->exp) == code
1514 /* Make sure this is a valid entry in the table. */
1515 && exp_equiv_p (p->exp, p->exp, 1, 0))
1521 /* Insert X in the hash table, assuming HASH is its hash code
1522 and CLASSP is an element of the class it should go in
1523 (or 0 if a new class should be made).
1524 It is inserted at the proper position to keep the class in
1525 the order cheapest first.
1527 MODE is the machine-mode of X, or if X is an integer constant
1528 with VOIDmode then MODE is the mode with which X will be used.
1530 For elements of equal cheapness, the most recent one
1531 goes in front, except that the first element in the list
1532 remains first unless a cheaper element is added. The order of
1533 pseudo-registers does not matter, as canon_reg will be called to
1534 find the cheapest when a register is retrieved from the table.
1536 The in_memory field in the hash table element is set to 0.
1537 The caller must set it nonzero if appropriate.
1539 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1540 and if insert_regs returns a nonzero value
1541 you must then recompute its hash code before calling here.
1543 If necessary, update table showing constant values of quantities. */
1545 #define CHEAPER(X, Y) \
1546 (preferrable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1548 static struct table_elt *
1549 insert (x, classp, hash, mode)
1551 struct table_elt *classp;
1553 enum machine_mode mode;
1555 struct table_elt *elt;
1557 /* If X is a register and we haven't made a quantity for it,
1558 something is wrong. */
1559 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1562 /* If X is a hard register, show it is being put in the table. */
1563 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1565 unsigned int regno = REGNO (x);
1566 unsigned int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1569 for (i = regno; i < endregno; i++)
1570 SET_HARD_REG_BIT (hard_regs_in_table, i);
1573 /* Put an element for X into the right hash bucket. */
1575 elt = free_element_chain;
1577 free_element_chain = elt->next_same_hash;
1581 elt = (struct table_elt *) xmalloc (sizeof (struct table_elt));
1585 elt->canon_exp = NULL_RTX;
1586 elt->cost = COST (x);
1587 elt->regcost = approx_reg_cost (x);
1588 elt->next_same_value = 0;
1589 elt->prev_same_value = 0;
1590 elt->next_same_hash = table[hash];
1591 elt->prev_same_hash = 0;
1592 elt->related_value = 0;
1595 elt->is_const = (CONSTANT_P (x)
1596 /* GNU C++ takes advantage of this for `this'
1597 (and other const values). */
1598 || (GET_CODE (x) == REG
1599 && RTX_UNCHANGING_P (x)
1600 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1601 || FIXED_BASE_PLUS_P (x));
1604 table[hash]->prev_same_hash = elt;
1607 /* Put it into the proper value-class. */
1610 classp = classp->first_same_value;
1611 if (CHEAPER (elt, classp))
1612 /* Insert at the head of the class */
1614 struct table_elt *p;
1615 elt->next_same_value = classp;
1616 classp->prev_same_value = elt;
1617 elt->first_same_value = elt;
1619 for (p = classp; p; p = p->next_same_value)
1620 p->first_same_value = elt;
1624 /* Insert not at head of the class. */
1625 /* Put it after the last element cheaper than X. */
1626 struct table_elt *p, *next;
1628 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1631 /* Put it after P and before NEXT. */
1632 elt->next_same_value = next;
1634 next->prev_same_value = elt;
1636 elt->prev_same_value = p;
1637 p->next_same_value = elt;
1638 elt->first_same_value = classp;
1642 elt->first_same_value = elt;
1644 /* If this is a constant being set equivalent to a register or a register
1645 being set equivalent to a constant, note the constant equivalence.
1647 If this is a constant, it cannot be equivalent to a different constant,
1648 and a constant is the only thing that can be cheaper than a register. So
1649 we know the register is the head of the class (before the constant was
1652 If this is a register that is not already known equivalent to a
1653 constant, we must check the entire class.
1655 If this is a register that is already known equivalent to an insn,
1656 update the qtys `const_insn' to show that `this_insn' is the latest
1657 insn making that quantity equivalent to the constant. */
1659 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1660 && GET_CODE (x) != REG)
1662 int exp_q = REG_QTY (REGNO (classp->exp));
1663 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1665 exp_ent->const_rtx = gen_lowpart_if_possible (exp_ent->mode, x);
1666 exp_ent->const_insn = this_insn;
1669 else if (GET_CODE (x) == REG
1671 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1674 struct table_elt *p;
1676 for (p = classp; p != 0; p = p->next_same_value)
1678 if (p->is_const && GET_CODE (p->exp) != REG)
1680 int x_q = REG_QTY (REGNO (x));
1681 struct qty_table_elem *x_ent = &qty_table[x_q];
1684 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1685 x_ent->const_insn = this_insn;
1691 else if (GET_CODE (x) == REG
1692 && qty_table[REG_QTY (REGNO (x))].const_rtx
1693 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1694 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1696 /* If this is a constant with symbolic value,
1697 and it has a term with an explicit integer value,
1698 link it up with related expressions. */
1699 if (GET_CODE (x) == CONST)
1701 rtx subexp = get_related_value (x);
1703 struct table_elt *subelt, *subelt_prev;
1707 /* Get the integer-free subexpression in the hash table. */
1708 subhash = safe_hash (subexp, mode) & HASH_MASK;
1709 subelt = lookup (subexp, subhash, mode);
1711 subelt = insert (subexp, NULL, subhash, mode);
1712 /* Initialize SUBELT's circular chain if it has none. */
1713 if (subelt->related_value == 0)
1714 subelt->related_value = subelt;
1715 /* Find the element in the circular chain that precedes SUBELT. */
1716 subelt_prev = subelt;
1717 while (subelt_prev->related_value != subelt)
1718 subelt_prev = subelt_prev->related_value;
1719 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1720 This way the element that follows SUBELT is the oldest one. */
1721 elt->related_value = subelt_prev->related_value;
1722 subelt_prev->related_value = elt;
1729 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1730 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1731 the two classes equivalent.
1733 CLASS1 will be the surviving class; CLASS2 should not be used after this
1736 Any invalid entries in CLASS2 will not be copied. */
1739 merge_equiv_classes (class1, class2)
1740 struct table_elt *class1, *class2;
1742 struct table_elt *elt, *next, *new;
1744 /* Ensure we start with the head of the classes. */
1745 class1 = class1->first_same_value;
1746 class2 = class2->first_same_value;
1748 /* If they were already equal, forget it. */
1749 if (class1 == class2)
1752 for (elt = class2; elt; elt = next)
1756 enum machine_mode mode = elt->mode;
1758 next = elt->next_same_value;
1760 /* Remove old entry, make a new one in CLASS1's class.
1761 Don't do this for invalid entries as we cannot find their
1762 hash code (it also isn't necessary). */
1763 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1765 hash_arg_in_memory = 0;
1766 hash = HASH (exp, mode);
1768 if (GET_CODE (exp) == REG)
1769 delete_reg_equiv (REGNO (exp));
1771 remove_from_table (elt, hash);
1773 if (insert_regs (exp, class1, 0))
1775 rehash_using_reg (exp);
1776 hash = HASH (exp, mode);
1778 new = insert (exp, class1, hash, mode);
1779 new->in_memory = hash_arg_in_memory;
1784 /* Flush the entire hash table. */
1790 struct table_elt *p;
1792 for (i = 0; i < HASH_SIZE; i++)
1793 for (p = table[i]; p; p = table[i])
1795 /* Note that invalidate can remove elements
1796 after P in the current hash chain. */
1797 if (GET_CODE (p->exp) == REG)
1798 invalidate (p->exp, p->mode);
1800 remove_from_table (p, i);
1804 /* Function called for each rtx to check whether true dependence exist. */
1805 struct check_dependence_data
1807 enum machine_mode mode;
1812 check_dependence (x, data)
1816 struct check_dependence_data *d = (struct check_dependence_data *) data;
1817 if (*x && GET_CODE (*x) == MEM)
1818 return true_dependence (d->exp, d->mode, *x, cse_rtx_varies_p);
1823 /* Remove from the hash table, or mark as invalid, all expressions whose
1824 values could be altered by storing in X. X is a register, a subreg, or
1825 a memory reference with nonvarying address (because, when a memory
1826 reference with a varying address is stored in, all memory references are
1827 removed by invalidate_memory so specific invalidation is superfluous).
1828 FULL_MODE, if not VOIDmode, indicates that this much should be
1829 invalidated instead of just the amount indicated by the mode of X. This
1830 is only used for bitfield stores into memory.
1832 A nonvarying address may be just a register or just a symbol reference,
1833 or it may be either of those plus a numeric offset. */
1836 invalidate (x, full_mode)
1838 enum machine_mode full_mode;
1841 struct table_elt *p;
1843 switch (GET_CODE (x))
1847 /* If X is a register, dependencies on its contents are recorded
1848 through the qty number mechanism. Just change the qty number of
1849 the register, mark it as invalid for expressions that refer to it,
1850 and remove it itself. */
1851 unsigned int regno = REGNO (x);
1852 unsigned int hash = HASH (x, GET_MODE (x));
1854 /* Remove REGNO from any quantity list it might be on and indicate
1855 that its value might have changed. If it is a pseudo, remove its
1856 entry from the hash table.
1858 For a hard register, we do the first two actions above for any
1859 additional hard registers corresponding to X. Then, if any of these
1860 registers are in the table, we must remove any REG entries that
1861 overlap these registers. */
1863 delete_reg_equiv (regno);
1866 if (regno >= FIRST_PSEUDO_REGISTER)
1868 /* Because a register can be referenced in more than one mode,
1869 we might have to remove more than one table entry. */
1870 struct table_elt *elt;
1872 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1873 remove_from_table (elt, hash);
1877 HOST_WIDE_INT in_table
1878 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1879 unsigned int endregno
1880 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1881 unsigned int tregno, tendregno, rn;
1882 struct table_elt *p, *next;
1884 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1886 for (rn = regno + 1; rn < endregno; rn++)
1888 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1889 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1890 delete_reg_equiv (rn);
1895 for (hash = 0; hash < HASH_SIZE; hash++)
1896 for (p = table[hash]; p; p = next)
1898 next = p->next_same_hash;
1900 if (GET_CODE (p->exp) != REG
1901 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1904 tregno = REGNO (p->exp);
1906 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1907 if (tendregno > regno && tregno < endregno)
1908 remove_from_table (p, hash);
1915 invalidate (SUBREG_REG (x), VOIDmode);
1919 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1920 invalidate (XVECEXP (x, 0, i), VOIDmode);
1924 /* This is part of a disjoint return value; extract the location in
1925 question ignoring the offset. */
1926 invalidate (XEXP (x, 0), VOIDmode);
1930 /* Calculate the canonical version of X here so that
1931 true_dependence doesn't generate new RTL for X on each call. */
1934 /* Remove all hash table elements that refer to overlapping pieces of
1936 if (full_mode == VOIDmode)
1937 full_mode = GET_MODE (x);
1939 for (i = 0; i < HASH_SIZE; i++)
1941 struct table_elt *next;
1943 for (p = table[i]; p; p = next)
1945 next = p->next_same_hash;
1948 struct check_dependence_data d;
1950 /* Just canonicalize the expression once;
1951 otherwise each time we call invalidate
1952 true_dependence will canonicalize the
1953 expression again. */
1955 p->canon_exp = canon_rtx (p->exp);
1958 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1959 remove_from_table (p, i);
1970 /* Remove all expressions that refer to register REGNO,
1971 since they are already invalid, and we are about to
1972 mark that register valid again and don't want the old
1973 expressions to reappear as valid. */
1976 remove_invalid_refs (regno)
1980 struct table_elt *p, *next;
1982 for (i = 0; i < HASH_SIZE; i++)
1983 for (p = table[i]; p; p = next)
1985 next = p->next_same_hash;
1986 if (GET_CODE (p->exp) != REG
1987 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1988 remove_from_table (p, i);
1992 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1995 remove_invalid_subreg_refs (regno, offset, mode)
1997 unsigned int offset;
1998 enum machine_mode mode;
2001 struct table_elt *p, *next;
2002 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
2004 for (i = 0; i < HASH_SIZE; i++)
2005 for (p = table[i]; p; p = next)
2008 next = p->next_same_hash;
2010 if (GET_CODE (exp) != REG
2011 && (GET_CODE (exp) != SUBREG
2012 || GET_CODE (SUBREG_REG (exp)) != REG
2013 || REGNO (SUBREG_REG (exp)) != regno
2014 || (((SUBREG_BYTE (exp)
2015 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
2016 && SUBREG_BYTE (exp) <= end))
2017 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
2018 remove_from_table (p, i);
2022 /* Recompute the hash codes of any valid entries in the hash table that
2023 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2025 This is called when we make a jump equivalence. */
2028 rehash_using_reg (x)
2032 struct table_elt *p, *next;
2035 if (GET_CODE (x) == SUBREG)
2038 /* If X is not a register or if the register is known not to be in any
2039 valid entries in the table, we have no work to do. */
2041 if (GET_CODE (x) != REG
2042 || REG_IN_TABLE (REGNO (x)) < 0
2043 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2046 /* Scan all hash chains looking for valid entries that mention X.
2047 If we find one and it is in the wrong hash chain, move it. We can skip
2048 objects that are registers, since they are handled specially. */
2050 for (i = 0; i < HASH_SIZE; i++)
2051 for (p = table[i]; p; p = next)
2053 next = p->next_same_hash;
2054 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
2055 && exp_equiv_p (p->exp, p->exp, 1, 0)
2056 && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
2058 if (p->next_same_hash)
2059 p->next_same_hash->prev_same_hash = p->prev_same_hash;
2061 if (p->prev_same_hash)
2062 p->prev_same_hash->next_same_hash = p->next_same_hash;
2064 table[i] = p->next_same_hash;
2066 p->next_same_hash = table[hash];
2067 p->prev_same_hash = 0;
2069 table[hash]->prev_same_hash = p;
2075 /* Remove from the hash table any expression that is a call-clobbered
2076 register. Also update their TICK values. */
2079 invalidate_for_call ()
2081 unsigned int regno, endregno;
2084 struct table_elt *p, *next;
2087 /* Go through all the hard registers. For each that is clobbered in
2088 a CALL_INSN, remove the register from quantity chains and update
2089 reg_tick if defined. Also see if any of these registers is currently
2092 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2093 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2095 delete_reg_equiv (regno);
2096 if (REG_TICK (regno) >= 0)
2099 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2102 /* In the case where we have no call-clobbered hard registers in the
2103 table, we are done. Otherwise, scan the table and remove any
2104 entry that overlaps a call-clobbered register. */
2107 for (hash = 0; hash < HASH_SIZE; hash++)
2108 for (p = table[hash]; p; p = next)
2110 next = p->next_same_hash;
2112 if (GET_CODE (p->exp) != REG
2113 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2116 regno = REGNO (p->exp);
2117 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
2119 for (i = regno; i < endregno; i++)
2120 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2122 remove_from_table (p, hash);
2128 /* Given an expression X of type CONST,
2129 and ELT which is its table entry (or 0 if it
2130 is not in the hash table),
2131 return an alternate expression for X as a register plus integer.
2132 If none can be found, return 0. */
2135 use_related_value (x, elt)
2137 struct table_elt *elt;
2139 struct table_elt *relt = 0;
2140 struct table_elt *p, *q;
2141 HOST_WIDE_INT offset;
2143 /* First, is there anything related known?
2144 If we have a table element, we can tell from that.
2145 Otherwise, must look it up. */
2147 if (elt != 0 && elt->related_value != 0)
2149 else if (elt == 0 && GET_CODE (x) == CONST)
2151 rtx subexp = get_related_value (x);
2153 relt = lookup (subexp,
2154 safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
2161 /* Search all related table entries for one that has an
2162 equivalent register. */
2167 /* This loop is strange in that it is executed in two different cases.
2168 The first is when X is already in the table. Then it is searching
2169 the RELATED_VALUE list of X's class (RELT). The second case is when
2170 X is not in the table. Then RELT points to a class for the related
2173 Ensure that, whatever case we are in, that we ignore classes that have
2174 the same value as X. */
2176 if (rtx_equal_p (x, p->exp))
2179 for (q = p->first_same_value; q; q = q->next_same_value)
2180 if (GET_CODE (q->exp) == REG)
2186 p = p->related_value;
2188 /* We went all the way around, so there is nothing to be found.
2189 Alternatively, perhaps RELT was in the table for some other reason
2190 and it has no related values recorded. */
2191 if (p == relt || p == 0)
2198 offset = (get_integer_term (x) - get_integer_term (p->exp));
2199 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2200 return plus_constant (q->exp, offset);
2203 /* Hash a string. Just add its bytes up. */
2204 static inline unsigned
2205 canon_hash_string (ps)
2209 const unsigned char *p = (const unsigned char *) ps;
2218 /* Hash an rtx. We are careful to make sure the value is never negative.
2219 Equivalent registers hash identically.
2220 MODE is used in hashing for CONST_INTs only;
2221 otherwise the mode of X is used.
2223 Store 1 in do_not_record if any subexpression is volatile.
2225 Store 1 in hash_arg_in_memory if X contains a MEM rtx
2226 which does not have the RTX_UNCHANGING_P bit set.
2228 Note that cse_insn knows that the hash code of a MEM expression
2229 is just (int) MEM plus the hash code of the address. */
2232 canon_hash (x, mode)
2234 enum machine_mode mode;
2241 /* repeat is used to turn tail-recursion into iteration. */
2246 code = GET_CODE (x);
2251 unsigned int regno = REGNO (x);
2254 /* On some machines, we can't record any non-fixed hard register,
2255 because extending its life will cause reload problems. We
2256 consider ap, fp, sp, gp to be fixed for this purpose.
2258 We also consider CCmode registers to be fixed for this purpose;
2259 failure to do so leads to failure to simplify 0<100 type of
2262 On all machines, we can't record any global registers.
2263 Nor should we record any register that is in a small
2264 class, as defined by CLASS_LIKELY_SPILLED_P. */
2266 if (regno >= FIRST_PSEUDO_REGISTER)
2268 else if (x == frame_pointer_rtx
2269 || x == hard_frame_pointer_rtx
2270 || x == arg_pointer_rtx
2271 || x == stack_pointer_rtx
2272 || x == pic_offset_table_rtx)
2274 else if (global_regs[regno])
2276 else if (fixed_regs[regno])
2278 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2280 else if (SMALL_REGISTER_CLASSES)
2282 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2293 hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2297 /* We handle SUBREG of a REG specially because the underlying
2298 reg changes its hash value with every value change; we don't
2299 want to have to forget unrelated subregs when one subreg changes. */
2302 if (GET_CODE (SUBREG_REG (x)) == REG)
2304 hash += (((unsigned) SUBREG << 7)
2305 + REGNO (SUBREG_REG (x))
2306 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2314 unsigned HOST_WIDE_INT tem = INTVAL (x);
2315 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2320 /* This is like the general case, except that it only counts
2321 the integers representing the constant. */
2322 hash += (unsigned) code + (unsigned) GET_MODE (x);
2323 if (GET_MODE (x) != VOIDmode)
2324 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
2326 unsigned HOST_WIDE_INT tem = XWINT (x, i);
2330 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2331 + (unsigned) CONST_DOUBLE_HIGH (x));
2339 units = CONST_VECTOR_NUNITS (x);
2341 for (i = 0; i < units; ++i)
2343 elt = CONST_VECTOR_ELT (x, i);
2344 hash += canon_hash (elt, GET_MODE (elt));
2350 /* Assume there is only one rtx object for any given label. */
2352 hash += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2356 hash += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2360 /* We don't record if marked volatile or if BLKmode since we don't
2361 know the size of the move. */
2362 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2367 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2369 hash_arg_in_memory = 1;
2371 /* Now that we have already found this special case,
2372 might as well speed it up as much as possible. */
2373 hash += (unsigned) MEM;
2378 /* A USE that mentions non-volatile memory needs special
2379 handling since the MEM may be BLKmode which normally
2380 prevents an entry from being made. Pure calls are
2381 marked by a USE which mentions BLKmode memory. */
2382 if (GET_CODE (XEXP (x, 0)) == MEM
2383 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2385 hash += (unsigned) USE;
2388 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2389 hash_arg_in_memory = 1;
2391 /* Now that we have already found this special case,
2392 might as well speed it up as much as possible. */
2393 hash += (unsigned) MEM;
2408 case UNSPEC_VOLATILE:
2413 if (MEM_VOLATILE_P (x))
2420 /* We don't want to take the filename and line into account. */
2421 hash += (unsigned) code + (unsigned) GET_MODE (x)
2422 + canon_hash_string (ASM_OPERANDS_TEMPLATE (x))
2423 + canon_hash_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2424 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2426 if (ASM_OPERANDS_INPUT_LENGTH (x))
2428 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2430 hash += (canon_hash (ASM_OPERANDS_INPUT (x, i),
2431 GET_MODE (ASM_OPERANDS_INPUT (x, i)))
2432 + canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT
2436 hash += canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2437 x = ASM_OPERANDS_INPUT (x, 0);
2438 mode = GET_MODE (x);
2450 i = GET_RTX_LENGTH (code) - 1;
2451 hash += (unsigned) code + (unsigned) GET_MODE (x);
2452 fmt = GET_RTX_FORMAT (code);
2457 rtx tem = XEXP (x, i);
2459 /* If we are about to do the last recursive call
2460 needed at this level, change it into iteration.
2461 This function is called enough to be worth it. */
2467 hash += canon_hash (tem, 0);
2469 else if (fmt[i] == 'E')
2470 for (j = 0; j < XVECLEN (x, i); j++)
2471 hash += canon_hash (XVECEXP (x, i, j), 0);
2472 else if (fmt[i] == 's')
2473 hash += canon_hash_string (XSTR (x, i));
2474 else if (fmt[i] == 'i')
2476 unsigned tem = XINT (x, i);
2479 else if (fmt[i] == '0' || fmt[i] == 't')
2488 /* Like canon_hash but with no side effects. */
2493 enum machine_mode mode;
2495 int save_do_not_record = do_not_record;
2496 int save_hash_arg_in_memory = hash_arg_in_memory;
2497 unsigned hash = canon_hash (x, mode);
2498 hash_arg_in_memory = save_hash_arg_in_memory;
2499 do_not_record = save_do_not_record;
2503 /* Return 1 iff X and Y would canonicalize into the same thing,
2504 without actually constructing the canonicalization of either one.
2505 If VALIDATE is nonzero,
2506 we assume X is an expression being processed from the rtl
2507 and Y was found in the hash table. We check register refs
2508 in Y for being marked as valid.
2510 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2511 that is known to be in the register. Ordinarily, we don't allow them
2512 to match, because letting them match would cause unpredictable results
2513 in all the places that search a hash table chain for an equivalent
2514 for a given value. A possible equivalent that has different structure
2515 has its hash code computed from different data. Whether the hash code
2516 is the same as that of the given value is pure luck. */
2519 exp_equiv_p (x, y, validate, equal_values)
2528 /* Note: it is incorrect to assume an expression is equivalent to itself
2529 if VALIDATE is nonzero. */
2530 if (x == y && !validate)
2532 if (x == 0 || y == 0)
2535 code = GET_CODE (x);
2536 if (code != GET_CODE (y))
2541 /* If X is a constant and Y is a register or vice versa, they may be
2542 equivalent. We only have to validate if Y is a register. */
2543 if (CONSTANT_P (x) && GET_CODE (y) == REG
2544 && REGNO_QTY_VALID_P (REGNO (y)))
2546 int y_q = REG_QTY (REGNO (y));
2547 struct qty_table_elem *y_ent = &qty_table[y_q];
2549 if (GET_MODE (y) == y_ent->mode
2550 && rtx_equal_p (x, y_ent->const_rtx)
2551 && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2555 if (CONSTANT_P (y) && code == REG
2556 && REGNO_QTY_VALID_P (REGNO (x)))
2558 int x_q = REG_QTY (REGNO (x));
2559 struct qty_table_elem *x_ent = &qty_table[x_q];
2561 if (GET_MODE (x) == x_ent->mode
2562 && rtx_equal_p (y, x_ent->const_rtx))
2569 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2570 if (GET_MODE (x) != GET_MODE (y))
2581 return XEXP (x, 0) == XEXP (y, 0);
2584 return XSTR (x, 0) == XSTR (y, 0);
2588 unsigned int regno = REGNO (y);
2589 unsigned int endregno
2590 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2591 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2594 /* If the quantities are not the same, the expressions are not
2595 equivalent. If there are and we are not to validate, they
2596 are equivalent. Otherwise, ensure all regs are up-to-date. */
2598 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2604 for (i = regno; i < endregno; i++)
2605 if (REG_IN_TABLE (i) != REG_TICK (i))
2611 /* For commutative operations, check both orders. */
2619 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2620 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2621 validate, equal_values))
2622 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2623 validate, equal_values)
2624 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2625 validate, equal_values)));
2628 /* We don't use the generic code below because we want to
2629 disregard filename and line numbers. */
2631 /* A volatile asm isn't equivalent to any other. */
2632 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2635 if (GET_MODE (x) != GET_MODE (y)
2636 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2637 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2638 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2639 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2640 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2643 if (ASM_OPERANDS_INPUT_LENGTH (x))
2645 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2646 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2647 ASM_OPERANDS_INPUT (y, i),
2648 validate, equal_values)
2649 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2650 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2660 /* Compare the elements. If any pair of corresponding elements
2661 fail to match, return 0 for the whole things. */
2663 fmt = GET_RTX_FORMAT (code);
2664 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2669 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2674 if (XVECLEN (x, i) != XVECLEN (y, i))
2676 for (j = 0; j < XVECLEN (x, i); j++)
2677 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2678 validate, equal_values))
2683 if (strcmp (XSTR (x, i), XSTR (y, i)))
2688 if (XINT (x, i) != XINT (y, i))
2693 if (XWINT (x, i) != XWINT (y, i))
2709 /* Return 1 if X has a value that can vary even between two
2710 executions of the program. 0 means X can be compared reliably
2711 against certain constants or near-constants. */
2714 cse_rtx_varies_p (x, from_alias)
2718 /* We need not check for X and the equivalence class being of the same
2719 mode because if X is equivalent to a constant in some mode, it
2720 doesn't vary in any mode. */
2722 if (GET_CODE (x) == REG
2723 && REGNO_QTY_VALID_P (REGNO (x)))
2725 int x_q = REG_QTY (REGNO (x));
2726 struct qty_table_elem *x_ent = &qty_table[x_q];
2728 if (GET_MODE (x) == x_ent->mode
2729 && x_ent->const_rtx != NULL_RTX)
2733 if (GET_CODE (x) == PLUS
2734 && GET_CODE (XEXP (x, 1)) == CONST_INT
2735 && GET_CODE (XEXP (x, 0)) == REG
2736 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2738 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2739 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2741 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2742 && x0_ent->const_rtx != NULL_RTX)
2746 /* This can happen as the result of virtual register instantiation, if
2747 the initial constant is too large to be a valid address. This gives
2748 us a three instruction sequence, load large offset into a register,
2749 load fp minus a constant into a register, then a MEM which is the
2750 sum of the two `constant' registers. */
2751 if (GET_CODE (x) == PLUS
2752 && GET_CODE (XEXP (x, 0)) == REG
2753 && GET_CODE (XEXP (x, 1)) == REG
2754 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2755 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2757 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2758 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2759 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2760 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2762 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2763 && x0_ent->const_rtx != NULL_RTX
2764 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2765 && x1_ent->const_rtx != NULL_RTX)
2769 return rtx_varies_p (x, from_alias);
2772 /* Canonicalize an expression:
2773 replace each register reference inside it
2774 with the "oldest" equivalent register.
2776 If INSN is non-zero and we are replacing a pseudo with a hard register
2777 or vice versa, validate_change is used to ensure that INSN remains valid
2778 after we make our substitution. The calls are made with IN_GROUP non-zero
2779 so apply_change_group must be called upon the outermost return from this
2780 function (unless INSN is zero). The result of apply_change_group can
2781 generally be discarded since the changes we are making are optional. */
2795 code = GET_CODE (x);
2814 struct qty_table_elem *ent;
2816 /* Never replace a hard reg, because hard regs can appear
2817 in more than one machine mode, and we must preserve the mode
2818 of each occurrence. Also, some hard regs appear in
2819 MEMs that are shared and mustn't be altered. Don't try to
2820 replace any reg that maps to a reg of class NO_REGS. */
2821 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2822 || ! REGNO_QTY_VALID_P (REGNO (x)))
2825 q = REG_QTY (REGNO (x));
2826 ent = &qty_table[q];
2827 first = ent->first_reg;
2828 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2829 : REGNO_REG_CLASS (first) == NO_REGS ? x
2830 : gen_rtx_REG (ent->mode, first));
2837 fmt = GET_RTX_FORMAT (code);
2838 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2844 rtx new = canon_reg (XEXP (x, i), insn);
2847 /* If replacing pseudo with hard reg or vice versa, ensure the
2848 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2849 if (insn != 0 && new != 0
2850 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2851 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2852 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2853 || (insn_code = recog_memoized (insn)) < 0
2854 || insn_data[insn_code].n_dups > 0))
2855 validate_change (insn, &XEXP (x, i), new, 1);
2859 else if (fmt[i] == 'E')
2860 for (j = 0; j < XVECLEN (x, i); j++)
2861 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2867 /* LOC is a location within INSN that is an operand address (the contents of
2868 a MEM). Find the best equivalent address to use that is valid for this
2871 On most CISC machines, complicated address modes are costly, and rtx_cost
2872 is a good approximation for that cost. However, most RISC machines have
2873 only a few (usually only one) memory reference formats. If an address is
2874 valid at all, it is often just as cheap as any other address. Hence, for
2875 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2876 costs of various addresses. For two addresses of equal cost, choose the one
2877 with the highest `rtx_cost' value as that has the potential of eliminating
2878 the most insns. For equal costs, we choose the first in the equivalence
2879 class. Note that we ignore the fact that pseudo registers are cheaper
2880 than hard registers here because we would also prefer the pseudo registers.
2884 find_best_addr (insn, loc, mode)
2887 enum machine_mode mode;
2889 struct table_elt *elt;
2892 struct table_elt *p;
2893 int found_better = 1;
2895 int save_do_not_record = do_not_record;
2896 int save_hash_arg_in_memory = hash_arg_in_memory;
2901 /* Do not try to replace constant addresses or addresses of local and
2902 argument slots. These MEM expressions are made only once and inserted
2903 in many instructions, as well as being used to control symbol table
2904 output. It is not safe to clobber them.
2906 There are some uncommon cases where the address is already in a register
2907 for some reason, but we cannot take advantage of that because we have
2908 no easy way to unshare the MEM. In addition, looking up all stack
2909 addresses is costly. */
2910 if ((GET_CODE (addr) == PLUS
2911 && GET_CODE (XEXP (addr, 0)) == REG
2912 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2913 && (regno = REGNO (XEXP (addr, 0)),
2914 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2915 || regno == ARG_POINTER_REGNUM))
2916 || (GET_CODE (addr) == REG
2917 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2918 || regno == HARD_FRAME_POINTER_REGNUM
2919 || regno == ARG_POINTER_REGNUM))
2920 || GET_CODE (addr) == ADDRESSOF
2921 || CONSTANT_ADDRESS_P (addr))
2924 /* If this address is not simply a register, try to fold it. This will
2925 sometimes simplify the expression. Many simplifications
2926 will not be valid, but some, usually applying the associative rule, will
2927 be valid and produce better code. */
2928 if (GET_CODE (addr) != REG)
2930 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2931 int addr_folded_cost = address_cost (folded, mode);
2932 int addr_cost = address_cost (addr, mode);
2934 if ((addr_folded_cost < addr_cost
2935 || (addr_folded_cost == addr_cost
2936 /* ??? The rtx_cost comparison is left over from an older
2937 version of this code. It is probably no longer helpful. */
2938 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2939 || approx_reg_cost (folded) < approx_reg_cost (addr))))
2940 && validate_change (insn, loc, folded, 0))
2944 /* If this address is not in the hash table, we can't look for equivalences
2945 of the whole address. Also, ignore if volatile. */
2948 hash = HASH (addr, Pmode);
2949 addr_volatile = do_not_record;
2950 do_not_record = save_do_not_record;
2951 hash_arg_in_memory = save_hash_arg_in_memory;
2956 elt = lookup (addr, hash, Pmode);
2958 #ifndef ADDRESS_COST
2961 int our_cost = elt->cost;
2963 /* Find the lowest cost below ours that works. */
2964 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2965 if (elt->cost < our_cost
2966 && (GET_CODE (elt->exp) == REG
2967 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2968 && validate_change (insn, loc,
2969 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2976 /* We need to find the best (under the criteria documented above) entry
2977 in the class that is valid. We use the `flag' field to indicate
2978 choices that were invalid and iterate until we can't find a better
2979 one that hasn't already been tried. */
2981 for (p = elt->first_same_value; p; p = p->next_same_value)
2984 while (found_better)
2986 int best_addr_cost = address_cost (*loc, mode);
2987 int best_rtx_cost = (elt->cost + 1) >> 1;
2989 struct table_elt *best_elt = elt;
2992 for (p = elt->first_same_value; p; p = p->next_same_value)
2995 if ((GET_CODE (p->exp) == REG
2996 || exp_equiv_p (p->exp, p->exp, 1, 0))
2997 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2998 || (exp_cost == best_addr_cost
2999 && ((p->cost + 1) >> 1) > best_rtx_cost)))
3002 best_addr_cost = exp_cost;
3003 best_rtx_cost = (p->cost + 1) >> 1;
3010 if (validate_change (insn, loc,
3011 canon_reg (copy_rtx (best_elt->exp),
3020 /* If the address is a binary operation with the first operand a register
3021 and the second a constant, do the same as above, but looking for
3022 equivalences of the register. Then try to simplify before checking for
3023 the best address to use. This catches a few cases: First is when we
3024 have REG+const and the register is another REG+const. We can often merge
3025 the constants and eliminate one insn and one register. It may also be
3026 that a machine has a cheap REG+REG+const. Finally, this improves the
3027 code on the Alpha for unaligned byte stores. */
3029 if (flag_expensive_optimizations
3030 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
3031 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
3032 && GET_CODE (XEXP (*loc, 0)) == REG
3033 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
3035 rtx c = XEXP (*loc, 1);
3038 hash = HASH (XEXP (*loc, 0), Pmode);
3039 do_not_record = save_do_not_record;
3040 hash_arg_in_memory = save_hash_arg_in_memory;
3042 elt = lookup (XEXP (*loc, 0), hash, Pmode);
3046 /* We need to find the best (under the criteria documented above) entry
3047 in the class that is valid. We use the `flag' field to indicate
3048 choices that were invalid and iterate until we can't find a better
3049 one that hasn't already been tried. */
3051 for (p = elt->first_same_value; p; p = p->next_same_value)
3054 while (found_better)
3056 int best_addr_cost = address_cost (*loc, mode);
3057 int best_rtx_cost = (COST (*loc) + 1) >> 1;
3058 struct table_elt *best_elt = elt;
3059 rtx best_rtx = *loc;
3062 /* This is at worst case an O(n^2) algorithm, so limit our search
3063 to the first 32 elements on the list. This avoids trouble
3064 compiling code with very long basic blocks that can easily
3065 call simplify_gen_binary so many times that we run out of
3069 for (p = elt->first_same_value, count = 0;
3071 p = p->next_same_value, count++)
3073 && (GET_CODE (p->exp) == REG
3074 || exp_equiv_p (p->exp, p->exp, 1, 0)))
3076 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
3079 new_cost = address_cost (new, mode);
3081 if (new_cost < best_addr_cost
3082 || (new_cost == best_addr_cost
3083 && (COST (new) + 1) >> 1 > best_rtx_cost))
3086 best_addr_cost = new_cost;
3087 best_rtx_cost = (COST (new) + 1) >> 1;
3095 if (validate_change (insn, loc,
3096 canon_reg (copy_rtx (best_rtx),
3107 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3108 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3109 what values are being compared.
3111 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3112 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3113 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3114 compared to produce cc0.
3116 The return value is the comparison operator and is either the code of
3117 A or the code corresponding to the inverse of the comparison. */
3119 static enum rtx_code
3120 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
3123 enum machine_mode *pmode1, *pmode2;
3127 arg1 = *parg1, arg2 = *parg2;
3129 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
3131 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3133 /* Set non-zero when we find something of interest. */
3135 int reverse_code = 0;
3136 struct table_elt *p = 0;
3138 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3139 On machines with CC0, this is the only case that can occur, since
3140 fold_rtx will return the COMPARE or item being compared with zero
3143 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3146 /* If ARG1 is a comparison operator and CODE is testing for
3147 STORE_FLAG_VALUE, get the inner arguments. */
3149 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
3152 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3153 && code == LT && STORE_FLAG_VALUE == -1)
3154 #ifdef FLOAT_STORE_FLAG_VALUE
3155 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3156 && (REAL_VALUE_NEGATIVE
3157 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3162 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3163 && code == GE && STORE_FLAG_VALUE == -1)
3164 #ifdef FLOAT_STORE_FLAG_VALUE
3165 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3166 && (REAL_VALUE_NEGATIVE
3167 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3170 x = arg1, reverse_code = 1;
3173 /* ??? We could also check for
3175 (ne (and (eq (...) (const_int 1))) (const_int 0))
3177 and related forms, but let's wait until we see them occurring. */
3180 /* Look up ARG1 in the hash table and see if it has an equivalence
3181 that lets us see what is being compared. */
3182 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
3186 p = p->first_same_value;
3188 /* If what we compare is already known to be constant, that is as
3190 We need to break the loop in this case, because otherwise we
3191 can have an infinite loop when looking at a reg that is known
3192 to be a constant which is the same as a comparison of a reg
3193 against zero which appears later in the insn stream, which in
3194 turn is constant and the same as the comparison of the first reg
3200 for (; p; p = p->next_same_value)
3202 enum machine_mode inner_mode = GET_MODE (p->exp);
3204 /* If the entry isn't valid, skip it. */
3205 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3208 if (GET_CODE (p->exp) == COMPARE
3209 /* Another possibility is that this machine has a compare insn
3210 that includes the comparison code. In that case, ARG1 would
3211 be equivalent to a comparison operation that would set ARG1 to
3212 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3213 ORIG_CODE is the actual comparison being done; if it is an EQ,
3214 we must reverse ORIG_CODE. On machine with a negative value
3215 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3218 && GET_MODE_CLASS (inner_mode) == MODE_INT
3219 && (GET_MODE_BITSIZE (inner_mode)
3220 <= HOST_BITS_PER_WIDE_INT)
3221 && (STORE_FLAG_VALUE
3222 & ((HOST_WIDE_INT) 1
3223 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3224 #ifdef FLOAT_STORE_FLAG_VALUE
3226 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3227 && (REAL_VALUE_NEGATIVE
3228 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3231 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3236 else if ((code == EQ
3238 && GET_MODE_CLASS (inner_mode) == MODE_INT
3239 && (GET_MODE_BITSIZE (inner_mode)
3240 <= HOST_BITS_PER_WIDE_INT)
3241 && (STORE_FLAG_VALUE
3242 & ((HOST_WIDE_INT) 1
3243 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3244 #ifdef FLOAT_STORE_FLAG_VALUE
3246 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3247 && (REAL_VALUE_NEGATIVE
3248 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3251 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3258 /* If this is fp + constant, the equivalent is a better operand since
3259 it may let us predict the value of the comparison. */
3260 else if (NONZERO_BASE_PLUS_P (p->exp))
3267 /* If we didn't find a useful equivalence for ARG1, we are done.
3268 Otherwise, set up for the next iteration. */
3272 /* If we need to reverse the comparison, make sure that that is
3273 possible -- we can't necessarily infer the value of GE from LT
3274 with floating-point operands. */
3277 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3278 if (reversed == UNKNOWN)
3283 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3284 code = GET_CODE (x);
3285 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3288 /* Return our results. Return the modes from before fold_rtx
3289 because fold_rtx might produce const_int, and then it's too late. */
3290 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3291 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3296 /* If X is a nontrivial arithmetic operation on an argument
3297 for which a constant value can be determined, return
3298 the result of operating on that value, as a constant.
3299 Otherwise, return X, possibly with one or more operands
3300 modified by recursive calls to this function.
3302 If X is a register whose contents are known, we do NOT
3303 return those contents here. equiv_constant is called to
3306 INSN is the insn that we may be modifying. If it is 0, make a copy
3307 of X before modifying it. */
3315 enum machine_mode mode;
3322 /* Folded equivalents of first two operands of X. */
3326 /* Constant equivalents of first three operands of X;
3327 0 when no such equivalent is known. */
3332 /* The mode of the first operand of X. We need this for sign and zero
3334 enum machine_mode mode_arg0;
3339 mode = GET_MODE (x);
3340 code = GET_CODE (x);
3350 /* No use simplifying an EXPR_LIST
3351 since they are used only for lists of args
3352 in a function call's REG_EQUAL note. */
3354 /* Changing anything inside an ADDRESSOF is incorrect; we don't
3355 want to (e.g.,) make (addressof (const_int 0)) just because
3356 the location is known to be zero. */
3362 return prev_insn_cc0;
3366 /* If the next insn is a CODE_LABEL followed by a jump table,
3367 PC's value is a LABEL_REF pointing to that label. That
3368 lets us fold switch statements on the VAX. */
3369 if (insn && GET_CODE (insn) == JUMP_INSN)
3371 rtx next = next_nonnote_insn (insn);
3373 if (next && GET_CODE (next) == CODE_LABEL
3374 && NEXT_INSN (next) != 0
3375 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
3376 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
3377 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
3378 return gen_rtx_LABEL_REF (Pmode, next);
3383 /* See if we previously assigned a constant value to this SUBREG. */
3384 if ((new = lookup_as_function (x, CONST_INT)) != 0
3385 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3388 /* If this is a paradoxical SUBREG, we have no idea what value the
3389 extra bits would have. However, if the operand is equivalent
3390 to a SUBREG whose operand is the same as our mode, and all the
3391 modes are within a word, we can just use the inner operand
3392 because these SUBREGs just say how to treat the register.
3394 Similarly if we find an integer constant. */
3396 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3398 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3399 struct table_elt *elt;
3401 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3402 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3403 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3405 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3407 if (CONSTANT_P (elt->exp)
3408 && GET_MODE (elt->exp) == VOIDmode)
3411 if (GET_CODE (elt->exp) == SUBREG
3412 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3413 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3414 return copy_rtx (SUBREG_REG (elt->exp));
3420 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3421 We might be able to if the SUBREG is extracting a single word in an
3422 integral mode or extracting the low part. */
3424 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3425 const_arg0 = equiv_constant (folded_arg0);
3427 folded_arg0 = const_arg0;
3429 if (folded_arg0 != SUBREG_REG (x))
3431 new = simplify_subreg (mode, folded_arg0,
3432 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3437 /* If this is a narrowing SUBREG and our operand is a REG, see if
3438 we can find an equivalence for REG that is an arithmetic operation
3439 in a wider mode where both operands are paradoxical SUBREGs
3440 from objects of our result mode. In that case, we couldn't report
3441 an equivalent value for that operation, since we don't know what the
3442 extra bits will be. But we can find an equivalence for this SUBREG
3443 by folding that operation is the narrow mode. This allows us to
3444 fold arithmetic in narrow modes when the machine only supports
3445 word-sized arithmetic.
3447 Also look for a case where we have a SUBREG whose operand is the
3448 same as our result. If both modes are smaller than a word, we
3449 are simply interpreting a register in different modes and we
3450 can use the inner value. */
3452 if (GET_CODE (folded_arg0) == REG
3453 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
3454 && subreg_lowpart_p (x))
3456 struct table_elt *elt;
3458 /* We can use HASH here since we know that canon_hash won't be
3460 elt = lookup (folded_arg0,
3461 HASH (folded_arg0, GET_MODE (folded_arg0)),
3462 GET_MODE (folded_arg0));
3465 elt = elt->first_same_value;
3467 for (; elt; elt = elt->next_same_value)
3469 enum rtx_code eltcode = GET_CODE (elt->exp);
3471 /* Just check for unary and binary operations. */
3472 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
3473 && GET_CODE (elt->exp) != SIGN_EXTEND
3474 && GET_CODE (elt->exp) != ZERO_EXTEND
3475 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3476 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
3478 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3480 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3481 op0 = fold_rtx (op0, NULL_RTX);
3483 op0 = equiv_constant (op0);
3485 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3488 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
3489 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
3490 && eltcode != DIV && eltcode != MOD
3491 && eltcode != UDIV && eltcode != UMOD
3492 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3493 && eltcode != ROTATE && eltcode != ROTATERT
3494 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3495 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3497 || CONSTANT_P (XEXP (elt->exp, 0)))
3498 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3499 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3501 || CONSTANT_P (XEXP (elt->exp, 1))))
3503 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3504 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3506 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3507 op0 = fold_rtx (op0, NULL_RTX);
3510 op0 = equiv_constant (op0);
3512 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
3513 op1 = fold_rtx (op1, NULL_RTX);
3516 op1 = equiv_constant (op1);
3518 /* If we are looking for the low SImode part of
3519 (ashift:DI c (const_int 32)), it doesn't work
3520 to compute that in SImode, because a 32-bit shift
3521 in SImode is unpredictable. We know the value is 0. */
3523 && GET_CODE (elt->exp) == ASHIFT
3524 && GET_CODE (op1) == CONST_INT
3525 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3527 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3529 /* If the count fits in the inner mode's width,
3530 but exceeds the outer mode's width,
3531 the value will get truncated to 0
3535 /* If the count exceeds even the inner mode's width,
3536 don't fold this expression. */
3539 else if (op0 && op1)
3540 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
3544 else if (GET_CODE (elt->exp) == SUBREG
3545 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3546 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3548 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3549 new = copy_rtx (SUBREG_REG (elt->exp));
3560 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3561 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3562 new = lookup_as_function (XEXP (x, 0), code);
3564 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3568 /* If we are not actually processing an insn, don't try to find the
3569 best address. Not only don't we care, but we could modify the
3570 MEM in an invalid way since we have no insn to validate against. */
3572 find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3575 /* Even if we don't fold in the insn itself,
3576 we can safely do so here, in hopes of getting a constant. */
3577 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3579 HOST_WIDE_INT offset = 0;
3581 if (GET_CODE (addr) == REG
3582 && REGNO_QTY_VALID_P (REGNO (addr)))
3584 int addr_q = REG_QTY (REGNO (addr));
3585 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3587 if (GET_MODE (addr) == addr_ent->mode
3588 && addr_ent->const_rtx != NULL_RTX)
3589 addr = addr_ent->const_rtx;
3592 /* If address is constant, split it into a base and integer offset. */
3593 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3595 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3596 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3598 base = XEXP (XEXP (addr, 0), 0);
3599 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3601 else if (GET_CODE (addr) == LO_SUM
3602 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3603 base = XEXP (addr, 1);
3604 else if (GET_CODE (addr) == ADDRESSOF)
3605 return change_address (x, VOIDmode, addr);
3607 /* If this is a constant pool reference, we can fold it into its
3608 constant to allow better value tracking. */
3609 if (base && GET_CODE (base) == SYMBOL_REF
3610 && CONSTANT_POOL_ADDRESS_P (base))
3612 rtx constant = get_pool_constant (base);
3613 enum machine_mode const_mode = get_pool_mode (base);
3616 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3617 constant_pool_entries_cost = COST (constant);
3619 /* If we are loading the full constant, we have an equivalence. */
3620 if (offset == 0 && mode == const_mode)
3623 /* If this actually isn't a constant (weird!), we can't do
3624 anything. Otherwise, handle the two most common cases:
3625 extracting a word from a multi-word constant, and extracting
3626 the low-order bits. Other cases don't seem common enough to
3628 if (! CONSTANT_P (constant))
3631 if (GET_MODE_CLASS (mode) == MODE_INT
3632 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3633 && offset % UNITS_PER_WORD == 0
3634 && (new = operand_subword (constant,
3635 offset / UNITS_PER_WORD,
3636 0, const_mode)) != 0)
3639 if (((BYTES_BIG_ENDIAN
3640 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3641 || (! BYTES_BIG_ENDIAN && offset == 0))
3642 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
3646 /* If this is a reference to a label at a known position in a jump
3647 table, we also know its value. */
3648 if (base && GET_CODE (base) == LABEL_REF)
3650 rtx label = XEXP (base, 0);
3651 rtx table_insn = NEXT_INSN (label);
3653 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3654 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3656 rtx table = PATTERN (table_insn);
3659 && (offset / GET_MODE_SIZE (GET_MODE (table))
3660 < XVECLEN (table, 0)))
3661 return XVECEXP (table, 0,
3662 offset / GET_MODE_SIZE (GET_MODE (table)));
3664 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3665 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3667 rtx table = PATTERN (table_insn);
3670 && (offset / GET_MODE_SIZE (GET_MODE (table))
3671 < XVECLEN (table, 1)))
3673 offset /= GET_MODE_SIZE (GET_MODE (table));
3674 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3677 if (GET_MODE (table) != Pmode)
3678 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3680 /* Indicate this is a constant. This isn't a
3681 valid form of CONST, but it will only be used
3682 to fold the next insns and then discarded, so
3685 Note this expression must be explicitly discarded,
3686 by cse_insn, else it may end up in a REG_EQUAL note
3687 and "escape" to cause problems elsewhere. */
3688 return gen_rtx_CONST (GET_MODE (new), new);
3696 #ifdef NO_FUNCTION_CSE
3698 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3704 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3705 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3706 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3716 mode_arg0 = VOIDmode;
3718 /* Try folding our operands.
3719 Then see which ones have constant values known. */
3721 fmt = GET_RTX_FORMAT (code);
3722 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3725 rtx arg = XEXP (x, i);
3726 rtx folded_arg = arg, const_arg = 0;
3727 enum machine_mode mode_arg = GET_MODE (arg);
3728 rtx cheap_arg, expensive_arg;
3729 rtx replacements[2];
3732 /* Most arguments are cheap, so handle them specially. */
3733 switch (GET_CODE (arg))
3736 /* This is the same as calling equiv_constant; it is duplicated
3738 if (REGNO_QTY_VALID_P (REGNO (arg)))
3740 int arg_q = REG_QTY (REGNO (arg));
3741 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3743 if (arg_ent->const_rtx != NULL_RTX
3744 && GET_CODE (arg_ent->const_rtx) != REG
3745 && GET_CODE (arg_ent->const_rtx) != PLUS)
3747 = gen_lowpart_if_possible (GET_MODE (arg),
3748 arg_ent->const_rtx);
3763 folded_arg = prev_insn_cc0;
3764 mode_arg = prev_insn_cc0_mode;
3765 const_arg = equiv_constant (folded_arg);
3770 folded_arg = fold_rtx (arg, insn);
3771 const_arg = equiv_constant (folded_arg);
3774 /* For the first three operands, see if the operand
3775 is constant or equivalent to a constant. */
3779 folded_arg0 = folded_arg;
3780 const_arg0 = const_arg;
3781 mode_arg0 = mode_arg;
3784 folded_arg1 = folded_arg;
3785 const_arg1 = const_arg;
3788 const_arg2 = const_arg;
3792 /* Pick the least expensive of the folded argument and an
3793 equivalent constant argument. */
3794 if (const_arg == 0 || const_arg == folded_arg
3795 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3796 cheap_arg = folded_arg, expensive_arg = const_arg;
3798 cheap_arg = const_arg, expensive_arg = folded_arg;
3800 /* Try to replace the operand with the cheapest of the two
3801 possibilities. If it doesn't work and this is either of the first
3802 two operands of a commutative operation, try swapping them.
3803 If THAT fails, try the more expensive, provided it is cheaper
3804 than what is already there. */
3806 if (cheap_arg == XEXP (x, i))
3809 if (insn == 0 && ! copied)
3815 /* Order the replacements from cheapest to most expensive. */
3816 replacements[0] = cheap_arg;
3817 replacements[1] = expensive_arg;
3819 for (j = 0; j < 2 && replacements[j]; j++)
3821 int old_cost = COST_IN (XEXP (x, i), code);
3822 int new_cost = COST_IN (replacements[j], code);
3824 /* Stop if what existed before was cheaper. Prefer constants
3825 in the case of a tie. */
3826 if (new_cost > old_cost
3827 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3830 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3833 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c'
3834 || code == LTGT || code == UNEQ || code == ORDERED
3835 || code == UNORDERED)
3837 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3838 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3840 if (apply_change_group ())
3842 /* Swap them back to be invalid so that this loop can
3843 continue and flag them to be swapped back later. */
3846 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3858 /* Don't try to fold inside of a vector of expressions.
3859 Doing nothing is harmless. */
3863 /* If a commutative operation, place a constant integer as the second
3864 operand unless the first operand is also a constant integer. Otherwise,
3865 place any constant second unless the first operand is also a constant. */
3867 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c'
3868 || code == LTGT || code == UNEQ || code == ORDERED
3869 || code == UNORDERED)
3871 if (must_swap || (const_arg0
3873 || (GET_CODE (const_arg0) == CONST_INT
3874 && GET_CODE (const_arg1) != CONST_INT))))
3876 rtx tem = XEXP (x, 0);
3878 if (insn == 0 && ! copied)
3884 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3885 validate_change (insn, &XEXP (x, 1), tem, 1);
3886 if (apply_change_group ())
3888 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3889 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3894 /* If X is an arithmetic operation, see if we can simplify it. */
3896 switch (GET_RTX_CLASS (code))
3902 /* We can't simplify extension ops unless we know the
3904 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3905 && mode_arg0 == VOIDmode)
3908 /* If we had a CONST, strip it off and put it back later if we
3910 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3911 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3913 new = simplify_unary_operation (code, mode,
3914 const_arg0 ? const_arg0 : folded_arg0,
3916 if (new != 0 && is_const)
3917 new = gen_rtx_CONST (mode, new);
3922 /* See what items are actually being compared and set FOLDED_ARG[01]
3923 to those values and CODE to the actual comparison code. If any are
3924 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3925 do anything if both operands are already known to be constant. */
3927 if (const_arg0 == 0 || const_arg1 == 0)
3929 struct table_elt *p0, *p1;
3930 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3931 enum machine_mode mode_arg1;
3933 #ifdef FLOAT_STORE_FLAG_VALUE
3934 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3936 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3937 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3938 false_rtx = CONST0_RTX (mode);
3942 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3943 &mode_arg0, &mode_arg1);
3944 const_arg0 = equiv_constant (folded_arg0);
3945 const_arg1 = equiv_constant (folded_arg1);
3947 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3948 what kinds of things are being compared, so we can't do
3949 anything with this comparison. */
3951 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3954 /* If we do not now have two constants being compared, see
3955 if we can nevertheless deduce some things about the
3957 if (const_arg0 == 0 || const_arg1 == 0)
3959 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or
3960 non-explicit constant? These aren't zero, but we
3961 don't know their sign. */
3962 if (const_arg1 == const0_rtx
3963 && (NONZERO_BASE_PLUS_P (folded_arg0)
3964 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
3966 || GET_CODE (folded_arg0) == SYMBOL_REF
3968 || GET_CODE (folded_arg0) == LABEL_REF
3969 || GET_CODE (folded_arg0) == CONST))
3973 else if (code == NE)
3977 /* See if the two operands are the same. */
3979 if (folded_arg0 == folded_arg1
3980 || (GET_CODE (folded_arg0) == REG
3981 && GET_CODE (folded_arg1) == REG
3982 && (REG_QTY (REGNO (folded_arg0))
3983 == REG_QTY (REGNO (folded_arg1))))
3984 || ((p0 = lookup (folded_arg0,
3985 (safe_hash (folded_arg0, mode_arg0)
3986 & HASH_MASK), mode_arg0))
3987 && (p1 = lookup (folded_arg1,
3988 (safe_hash (folded_arg1, mode_arg0)
3989 & HASH_MASK), mode_arg0))
3990 && p0->first_same_value == p1->first_same_value))
3992 /* Sadly two equal NaNs are not equivalent. */
3993 if (!HONOR_NANS (mode_arg0))
3994 return ((code == EQ || code == LE || code == GE
3995 || code == LEU || code == GEU || code == UNEQ
3996 || code == UNLE || code == UNGE
3998 ? true_rtx : false_rtx);
3999 /* Take care for the FP compares we can resolve. */
4000 if (code == UNEQ || code == UNLE || code == UNGE)
4002 if (code == LTGT || code == LT || code == GT)
4006 /* If FOLDED_ARG0 is a register, see if the comparison we are
4007 doing now is either the same as we did before or the reverse
4008 (we only check the reverse if not floating-point). */
4009 else if (GET_CODE (folded_arg0) == REG)
4011 int qty = REG_QTY (REGNO (folded_arg0));
4013 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
4015 struct qty_table_elem *ent = &qty_table[qty];
4017 if ((comparison_dominates_p (ent->comparison_code, code)
4018 || (! FLOAT_MODE_P (mode_arg0)
4019 && comparison_dominates_p (ent->comparison_code,
4020 reverse_condition (code))))
4021 && (rtx_equal_p (ent->comparison_const, folded_arg1)
4023 && rtx_equal_p (ent->comparison_const,
4025 || (GET_CODE (folded_arg1) == REG
4026 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
4027 return (comparison_dominates_p (ent->comparison_code, code)
4028 ? true_rtx : false_rtx);
4034 /* If we are comparing against zero, see if the first operand is
4035 equivalent to an IOR with a constant. If so, we may be able to
4036 determine the result of this comparison. */
4038 if (const_arg1 == const0_rtx)
4040 rtx y = lookup_as_function (folded_arg0, IOR);
4044 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
4045 && GET_CODE (inner_const) == CONST_INT
4046 && INTVAL (inner_const) != 0)
4048 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
4049 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
4050 && (INTVAL (inner_const)
4051 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
4052 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
4054 #ifdef FLOAT_STORE_FLAG_VALUE
4055 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
4057 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
4058 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4059 false_rtx = CONST0_RTX (mode);
4083 new = simplify_relational_operation (code,
4084 (mode_arg0 != VOIDmode
4086 : (GET_MODE (const_arg0
4090 ? GET_MODE (const_arg0
4093 : GET_MODE (const_arg1
4096 const_arg0 ? const_arg0 : folded_arg0,
4097 const_arg1 ? const_arg1 : folded_arg1);
4098 #ifdef FLOAT_STORE_FLAG_VALUE
4099 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
4101 if (new == const0_rtx)
4102 new = CONST0_RTX (mode);
4104 new = (CONST_DOUBLE_FROM_REAL_VALUE
4105 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4115 /* If the second operand is a LABEL_REF, see if the first is a MINUS
4116 with that LABEL_REF as its second operand. If so, the result is
4117 the first operand of that MINUS. This handles switches with an
4118 ADDR_DIFF_VEC table. */
4119 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4122 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
4123 : lookup_as_function (folded_arg0, MINUS);
4125 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4126 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4129 /* Now try for a CONST of a MINUS like the above. */
4130 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4131 : lookup_as_function (folded_arg0, CONST))) != 0
4132 && GET_CODE (XEXP (y, 0)) == MINUS
4133 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4134 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4135 return XEXP (XEXP (y, 0), 0);
4138 /* Likewise if the operands are in the other order. */
4139 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4142 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
4143 : lookup_as_function (folded_arg1, MINUS);
4145 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4146 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4149 /* Now try for a CONST of a MINUS like the above. */
4150 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4151 : lookup_as_function (folded_arg1, CONST))) != 0
4152 && GET_CODE (XEXP (y, 0)) == MINUS
4153 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4154 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4155 return XEXP (XEXP (y, 0), 0);
4158 /* If second operand is a register equivalent to a negative
4159 CONST_INT, see if we can find a register equivalent to the
4160 positive constant. Make a MINUS if so. Don't do this for
4161 a non-negative constant since we might then alternate between
4162 choosing positive and negative constants. Having the positive
4163 constant previously-used is the more common case. Be sure
4164 the resulting constant is non-negative; if const_arg1 were
4165 the smallest negative number this would overflow: depending
4166 on the mode, this would either just be the same value (and
4167 hence not save anything) or be incorrect. */
4168 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4169 && INTVAL (const_arg1) < 0
4170 /* This used to test
4172 -INTVAL (const_arg1) >= 0
4174 But The Sun V5.0 compilers mis-compiled that test. So
4175 instead we test for the problematic value in a more direct
4176 manner and hope the Sun compilers get it correct. */
4177 && INTVAL (const_arg1) !=
4178 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4179 && GET_CODE (folded_arg1) == REG)
4181 rtx new_const = GEN_INT (-INTVAL (const_arg1));
4183 = lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
4187 for (p = p->first_same_value; p; p = p->next_same_value)
4188 if (GET_CODE (p->exp) == REG)
4189 return simplify_gen_binary (MINUS, mode, folded_arg0,
4190 canon_reg (p->exp, NULL_RTX));
4195 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4196 If so, produce (PLUS Z C2-C). */
4197 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4199 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4200 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4201 return fold_rtx (plus_constant (copy_rtx (y),
4202 -INTVAL (const_arg1)),
4209 case SMIN: case SMAX: case UMIN: case UMAX:
4210 case IOR: case AND: case XOR:
4211 case MULT: case DIV: case UDIV:
4212 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4213 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4214 is known to be of similar form, we may be able to replace the
4215 operation with a combined operation. This may eliminate the
4216 intermediate operation if every use is simplified in this way.
4217 Note that the similar optimization done by combine.c only works
4218 if the intermediate operation's result has only one reference. */
4220 if (GET_CODE (folded_arg0) == REG
4221 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4224 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4225 rtx y = lookup_as_function (folded_arg0, code);
4227 enum rtx_code associate_code;
4231 || 0 == (inner_const
4232 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4233 || GET_CODE (inner_const) != CONST_INT
4234 /* If we have compiled a statement like
4235 "if (x == (x & mask1))", and now are looking at
4236 "x & mask2", we will have a case where the first operand
4237 of Y is the same as our first operand. Unless we detect
4238 this case, an infinite loop will result. */
4239 || XEXP (y, 0) == folded_arg0)
4242 /* Don't associate these operations if they are a PLUS with the
4243 same constant and it is a power of two. These might be doable
4244 with a pre- or post-increment. Similarly for two subtracts of
4245 identical powers of two with post decrement. */
4247 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
4248 && ((HAVE_PRE_INCREMENT
4249 && exact_log2 (INTVAL (const_arg1)) >= 0)
4250 || (HAVE_POST_INCREMENT
4251 && exact_log2 (INTVAL (const_arg1)) >= 0)
4252 || (HAVE_PRE_DECREMENT
4253 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4254 || (HAVE_POST_DECREMENT
4255 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4258 /* Compute the code used to compose the constants. For example,
4259 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
4262 = (code == MULT || code == DIV || code == UDIV ? MULT
4263 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
4265 new_const = simplify_binary_operation (associate_code, mode,
4266 const_arg1, inner_const);
4271 /* If we are associating shift operations, don't let this
4272 produce a shift of the size of the object or larger.
4273 This could occur when we follow a sign-extend by a right
4274 shift on a machine that does a sign-extend as a pair
4277 if (is_shift && GET_CODE (new_const) == CONST_INT
4278 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4280 /* As an exception, we can turn an ASHIFTRT of this
4281 form into a shift of the number of bits - 1. */
4282 if (code == ASHIFTRT)
4283 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4288 y = copy_rtx (XEXP (y, 0));
4290 /* If Y contains our first operand (the most common way this
4291 can happen is if Y is a MEM), we would do into an infinite
4292 loop if we tried to fold it. So don't in that case. */
4294 if (! reg_mentioned_p (folded_arg0, y))
4295 y = fold_rtx (y, insn);
4297 return simplify_gen_binary (code, mode, y, new_const);
4305 new = simplify_binary_operation (code, mode,
4306 const_arg0 ? const_arg0 : folded_arg0,
4307 const_arg1 ? const_arg1 : folded_arg1);
4311 /* (lo_sum (high X) X) is simply X. */
4312 if (code == LO_SUM && const_arg0 != 0
4313 && GET_CODE (const_arg0) == HIGH
4314 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4320 new = simplify_ternary_operation (code, mode, mode_arg0,
4321 const_arg0 ? const_arg0 : folded_arg0,
4322 const_arg1 ? const_arg1 : folded_arg1,
4323 const_arg2 ? const_arg2 : XEXP (x, 2));
4327 /* Always eliminate CONSTANT_P_RTX at this stage. */
4328 if (code == CONSTANT_P_RTX)
4329 return (const_arg0 ? const1_rtx : const0_rtx);
4333 return new ? new : x;
4336 /* Return a constant value currently equivalent to X.
4337 Return 0 if we don't know one. */
4343 if (GET_CODE (x) == REG
4344 && REGNO_QTY_VALID_P (REGNO (x)))
4346 int x_q = REG_QTY (REGNO (x));
4347 struct qty_table_elem *x_ent = &qty_table[x_q];
4349 if (x_ent->const_rtx)
4350 x = gen_lowpart_if_possible (GET_MODE (x), x_ent->const_rtx);
4353 if (x == 0 || CONSTANT_P (x))
4356 /* If X is a MEM, try to fold it outside the context of any insn to see if
4357 it might be equivalent to a constant. That handles the case where it
4358 is a constant-pool reference. Then try to look it up in the hash table
4359 in case it is something whose value we have seen before. */
4361 if (GET_CODE (x) == MEM)
4363 struct table_elt *elt;
4365 x = fold_rtx (x, NULL_RTX);
4369 elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
4373 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4374 if (elt->is_const && CONSTANT_P (elt->exp))
4381 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4382 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4383 least-significant part of X.
4384 MODE specifies how big a part of X to return.
4386 If the requested operation cannot be done, 0 is returned.
4388 This is similar to gen_lowpart in emit-rtl.c. */
4391 gen_lowpart_if_possible (mode, x)
4392 enum machine_mode mode;
4395 rtx result = gen_lowpart_common (mode, x);
4399 else if (GET_CODE (x) == MEM)
4401 /* This is the only other case we handle. */
4405 if (WORDS_BIG_ENDIAN)
4406 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4407 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4408 if (BYTES_BIG_ENDIAN)
4409 /* Adjust the address so that the address-after-the-data is
4411 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4412 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4414 new = adjust_address_nv (x, mode, offset);
4415 if (! memory_address_p (mode, XEXP (new, 0)))
4424 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4425 branch. It will be zero if not.
4427 In certain cases, this can cause us to add an equivalence. For example,
4428 if we are following the taken case of
4430 we can add the fact that `i' and '2' are now equivalent.
4432 In any case, we can record that this comparison was passed. If the same
4433 comparison is seen later, we will know its value. */
4436 record_jump_equiv (insn, taken)
4440 int cond_known_true;
4443 enum machine_mode mode, mode0, mode1;
4444 int reversed_nonequality = 0;
4447 /* Ensure this is the right kind of insn. */
4448 if (! any_condjump_p (insn))
4450 set = pc_set (insn);
4452 /* See if this jump condition is known true or false. */
4454 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4456 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4458 /* Get the type of comparison being done and the operands being compared.
4459 If we had to reverse a non-equality condition, record that fact so we
4460 know that it isn't valid for floating-point. */
4461 code = GET_CODE (XEXP (SET_SRC (set), 0));
4462 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4463 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4465 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4466 if (! cond_known_true)
4468 code = reversed_comparison_code_parts (code, op0, op1, insn);
4470 /* Don't remember if we can't find the inverse. */
4471 if (code == UNKNOWN)
4475 /* The mode is the mode of the non-constant. */
4477 if (mode1 != VOIDmode)
4480 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4483 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4484 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4485 Make any useful entries we can with that information. Called from
4486 above function and called recursively. */
4489 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
4491 enum machine_mode mode;
4493 int reversed_nonequality;
4495 unsigned op0_hash, op1_hash;
4496 int op0_in_memory, op1_in_memory;
4497 struct table_elt *op0_elt, *op1_elt;
4499 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4500 we know that they are also equal in the smaller mode (this is also
4501 true for all smaller modes whether or not there is a SUBREG, but
4502 is not worth testing for with no SUBREG). */
4504 /* Note that GET_MODE (op0) may not equal MODE. */
4505 if (code == EQ && GET_CODE (op0) == SUBREG
4506 && (GET_MODE_SIZE (GET_MODE (op0))
4507 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4509 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4510 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4512 record_jump_cond (code, mode, SUBREG_REG (op0),
4513 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4514 reversed_nonequality);
4517 if (code == EQ && GET_CODE (op1) == SUBREG
4518 && (GET_MODE_SIZE (GET_MODE (op1))
4519 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4521 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4522 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4524 record_jump_cond (code, mode, SUBREG_REG (op1),
4525 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4526 reversed_nonequality);
4529 /* Similarly, if this is an NE comparison, and either is a SUBREG
4530 making a smaller mode, we know the whole thing is also NE. */
4532 /* Note that GET_MODE (op0) may not equal MODE;
4533 if we test MODE instead, we can get an infinite recursion
4534 alternating between two modes each wider than MODE. */
4536 if (code == NE && GET_CODE (op0) == SUBREG
4537 && subreg_lowpart_p (op0)
4538 && (GET_MODE_SIZE (GET_MODE (op0))
4539 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4541 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4542 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4544 record_jump_cond (code, mode, SUBREG_REG (op0),
4545 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4546 reversed_nonequality);
4549 if (code == NE && GET_CODE (op1) == SUBREG
4550 && subreg_lowpart_p (op1)
4551 && (GET_MODE_SIZE (GET_MODE (op1))
4552 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4554 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4555 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4557 record_jump_cond (code, mode, SUBREG_REG (op1),
4558 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4559 reversed_nonequality);
4562 /* Hash both operands. */
4565 hash_arg_in_memory = 0;
4566 op0_hash = HASH (op0, mode);
4567 op0_in_memory = hash_arg_in_memory;
4573 hash_arg_in_memory = 0;
4574 op1_hash = HASH (op1, mode);
4575 op1_in_memory = hash_arg_in_memory;
4580 /* Look up both operands. */
4581 op0_elt = lookup (op0, op0_hash, mode);
4582 op1_elt = lookup (op1, op1_hash, mode);
4584 /* If both operands are already equivalent or if they are not in the
4585 table but are identical, do nothing. */
4586 if ((op0_elt != 0 && op1_elt != 0
4587 && op0_elt->first_same_value == op1_elt->first_same_value)
4588 || op0 == op1 || rtx_equal_p (op0, op1))
4591 /* If we aren't setting two things equal all we can do is save this
4592 comparison. Similarly if this is floating-point. In the latter
4593 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4594 If we record the equality, we might inadvertently delete code
4595 whose intent was to change -0 to +0. */
4597 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4599 struct qty_table_elem *ent;
4602 /* If we reversed a floating-point comparison, if OP0 is not a
4603 register, or if OP1 is neither a register or constant, we can't
4606 if (GET_CODE (op1) != REG)
4607 op1 = equiv_constant (op1);
4609 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4610 || GET_CODE (op0) != REG || op1 == 0)
4613 /* Put OP0 in the hash table if it isn't already. This gives it a
4614 new quantity number. */
4617 if (insert_regs (op0, NULL, 0))
4619 rehash_using_reg (op0);
4620 op0_hash = HASH (op0, mode);
4622 /* If OP0 is contained in OP1, this changes its hash code
4623 as well. Faster to rehash than to check, except
4624 for the simple case of a constant. */
4625 if (! CONSTANT_P (op1))
4626 op1_hash = HASH (op1,mode);
4629 op0_elt = insert (op0, NULL, op0_hash, mode);
4630 op0_elt->in_memory = op0_in_memory;
4633 qty = REG_QTY (REGNO (op0));
4634 ent = &qty_table[qty];
4636 ent->comparison_code = code;
4637 if (GET_CODE (op1) == REG)
4639 /* Look it up again--in case op0 and op1 are the same. */
4640 op1_elt = lookup (op1, op1_hash, mode);
4642 /* Put OP1 in the hash table so it gets a new quantity number. */
4645 if (insert_regs (op1, NULL, 0))
4647 rehash_using_reg (op1);
4648 op1_hash = HASH (op1, mode);
4651 op1_elt = insert (op1, NULL, op1_hash, mode);
4652 op1_elt->in_memory = op1_in_memory;
4655 ent->comparison_const = NULL_RTX;
4656 ent->comparison_qty = REG_QTY (REGNO (op1));
4660 ent->comparison_const = op1;
4661 ent->comparison_qty = -1;
4667 /* If either side is still missing an equivalence, make it now,
4668 then merge the equivalences. */
4672 if (insert_regs (op0, NULL, 0))
4674 rehash_using_reg (op0);
4675 op0_hash = HASH (op0, mode);
4678 op0_elt = insert (op0, NULL, op0_hash, mode);
4679 op0_elt->in_memory = op0_in_memory;
4684 if (insert_regs (op1, NULL, 0))
4686 rehash_using_reg (op1);
4687 op1_hash = HASH (op1, mode);
4690 op1_elt = insert (op1, NULL, op1_hash, mode);
4691 op1_elt->in_memory = op1_in_memory;
4694 merge_equiv_classes (op0_elt, op1_elt);
4695 last_jump_equiv_class = op0_elt;
4698 /* CSE processing for one instruction.
4699 First simplify sources and addresses of all assignments
4700 in the instruction, using previously-computed equivalents values.
4701 Then install the new sources and destinations in the table
4702 of available values.
4704 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4705 the insn. It means that INSN is inside libcall block. In this
4706 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4708 /* Data on one SET contained in the instruction. */
4712 /* The SET rtx itself. */
4714 /* The SET_SRC of the rtx (the original value, if it is changing). */
4716 /* The hash-table element for the SET_SRC of the SET. */
4717 struct table_elt *src_elt;
4718 /* Hash value for the SET_SRC. */
4720 /* Hash value for the SET_DEST. */
4722 /* The SET_DEST, with SUBREG, etc., stripped. */
4724 /* Nonzero if the SET_SRC is in memory. */
4726 /* Nonzero if the SET_SRC contains something
4727 whose value cannot be predicted and understood. */
4729 /* Original machine mode, in case it becomes a CONST_INT. */
4730 enum machine_mode mode;
4731 /* A constant equivalent for SET_SRC, if any. */
4733 /* Original SET_SRC value used for libcall notes. */
4735 /* Hash value of constant equivalent for SET_SRC. */
4736 unsigned src_const_hash;
4737 /* Table entry for constant equivalent for SET_SRC, if any. */
4738 struct table_elt *src_const_elt;
4742 cse_insn (insn, libcall_insn)
4746 rtx x = PATTERN (insn);
4752 /* Records what this insn does to set CC0. */
4753 rtx this_insn_cc0 = 0;
4754 enum machine_mode this_insn_cc0_mode = VOIDmode;
4758 struct table_elt *src_eqv_elt = 0;
4759 int src_eqv_volatile = 0;
4760 int src_eqv_in_memory = 0;
4761 unsigned src_eqv_hash = 0;
4763 struct set *sets = (struct set *) 0;
4767 /* Find all the SETs and CLOBBERs in this instruction.
4768 Record all the SETs in the array `set' and count them.
4769 Also determine whether there is a CLOBBER that invalidates
4770 all memory references, or all references at varying addresses. */
4772 if (GET_CODE (insn) == CALL_INSN)
4774 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4776 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4777 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4778 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4782 if (GET_CODE (x) == SET)
4784 sets = (struct set *) alloca (sizeof (struct set));
4787 /* Ignore SETs that are unconditional jumps.
4788 They never need cse processing, so this does not hurt.
4789 The reason is not efficiency but rather
4790 so that we can test at the end for instructions
4791 that have been simplified to unconditional jumps
4792 and not be misled by unchanged instructions
4793 that were unconditional jumps to begin with. */
4794 if (SET_DEST (x) == pc_rtx
4795 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4798 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4799 The hard function value register is used only once, to copy to
4800 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4801 Ensure we invalidate the destination register. On the 80386 no
4802 other code would invalidate it since it is a fixed_reg.
4803 We need not check the return of apply_change_group; see canon_reg. */
4805 else if (GET_CODE (SET_SRC (x)) == CALL)
4807 canon_reg (SET_SRC (x), insn);
4808 apply_change_group ();
4809 fold_rtx (SET_SRC (x), insn);
4810 invalidate (SET_DEST (x), VOIDmode);
4815 else if (GET_CODE (x) == PARALLEL)
4817 int lim = XVECLEN (x, 0);
4819 sets = (struct set *) alloca (lim * sizeof (struct set));
4821 /* Find all regs explicitly clobbered in this insn,
4822 and ensure they are not replaced with any other regs
4823 elsewhere in this insn.
4824 When a reg that is clobbered is also used for input,
4825 we should presume that that is for a reason,
4826 and we should not substitute some other register
4827 which is not supposed to be clobbered.
4828 Therefore, this loop cannot be merged into the one below
4829 because a CALL may precede a CLOBBER and refer to the
4830 value clobbered. We must not let a canonicalization do
4831 anything in that case. */
4832 for (i = 0; i < lim; i++)
4834 rtx y = XVECEXP (x, 0, i);
4835 if (GET_CODE (y) == CLOBBER)
4837 rtx clobbered = XEXP (y, 0);
4839 if (GET_CODE (clobbered) == REG
4840 || GET_CODE (clobbered) == SUBREG)
4841 invalidate (clobbered, VOIDmode);
4842 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4843 || GET_CODE (clobbered) == ZERO_EXTRACT)
4844 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4848 for (i = 0; i < lim; i++)
4850 rtx y = XVECEXP (x, 0, i);
4851 if (GET_CODE (y) == SET)
4853 /* As above, we ignore unconditional jumps and call-insns and
4854 ignore the result of apply_change_group. */
4855 if (GET_CODE (SET_SRC (y)) == CALL)
4857 canon_reg (SET_SRC (y), insn);
4858 apply_change_group ();
4859 fold_rtx (SET_SRC (y), insn);
4860 invalidate (SET_DEST (y), VOIDmode);
4862 else if (SET_DEST (y) == pc_rtx
4863 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4866 sets[n_sets++].rtl = y;
4868 else if (GET_CODE (y) == CLOBBER)
4870 /* If we clobber memory, canon the address.
4871 This does nothing when a register is clobbered
4872 because we have already invalidated the reg. */
4873 if (GET_CODE (XEXP (y, 0)) == MEM)
4874 canon_reg (XEXP (y, 0), NULL_RTX);
4876 else if (GET_CODE (y) == USE
4877 && ! (GET_CODE (XEXP (y, 0)) == REG
4878 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4879 canon_reg (y, NULL_RTX);
4880 else if (GET_CODE (y) == CALL)
4882 /* The result of apply_change_group can be ignored; see
4884 canon_reg (y, insn);
4885 apply_change_group ();
4890 else if (GET_CODE (x) == CLOBBER)
4892 if (GET_CODE (XEXP (x, 0)) == MEM)
4893 canon_reg (XEXP (x, 0), NULL_RTX);
4896 /* Canonicalize a USE of a pseudo register or memory location. */
4897 else if (GET_CODE (x) == USE
4898 && ! (GET_CODE (XEXP (x, 0)) == REG
4899 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4900 canon_reg (XEXP (x, 0), NULL_RTX);
4901 else if (GET_CODE (x) == CALL)
4903 /* The result of apply_change_group can be ignored; see canon_reg. */
4904 canon_reg (x, insn);
4905 apply_change_group ();
4909 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4910 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4911 is handled specially for this case, and if it isn't set, then there will
4912 be no equivalence for the destination. */
4913 if (n_sets == 1 && REG_NOTES (insn) != 0
4914 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4915 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4916 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4917 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
4919 /* Canonicalize sources and addresses of destinations.
4920 We do this in a separate pass to avoid problems when a MATCH_DUP is
4921 present in the insn pattern. In that case, we want to ensure that
4922 we don't break the duplicate nature of the pattern. So we will replace
4923 both operands at the same time. Otherwise, we would fail to find an
4924 equivalent substitution in the loop calling validate_change below.
4926 We used to suppress canonicalization of DEST if it appears in SRC,
4927 but we don't do this any more. */
4929 for (i = 0; i < n_sets; i++)
4931 rtx dest = SET_DEST (sets[i].rtl);
4932 rtx src = SET_SRC (sets[i].rtl);
4933 rtx new = canon_reg (src, insn);
4936 sets[i].orig_src = src;
4937 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
4938 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4939 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4940 || (insn_code = recog_memoized (insn)) < 0
4941 || insn_data[insn_code].n_dups > 0)
4942 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4944 SET_SRC (sets[i].rtl) = new;
4946 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4948 validate_change (insn, &XEXP (dest, 1),
4949 canon_reg (XEXP (dest, 1), insn), 1);
4950 validate_change (insn, &XEXP (dest, 2),
4951 canon_reg (XEXP (dest, 2), insn), 1);
4954 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4955 || GET_CODE (dest) == ZERO_EXTRACT
4956 || GET_CODE (dest) == SIGN_EXTRACT)
4957 dest = XEXP (dest, 0);
4959 if (GET_CODE (dest) == MEM)
4960 canon_reg (dest, insn);
4963 /* Now that we have done all the replacements, we can apply the change
4964 group and see if they all work. Note that this will cause some
4965 canonicalizations that would have worked individually not to be applied
4966 because some other canonicalization didn't work, but this should not
4969 The result of apply_change_group can be ignored; see canon_reg. */
4971 apply_change_group ();
4973 /* Set sets[i].src_elt to the class each source belongs to.
4974 Detect assignments from or to volatile things
4975 and set set[i] to zero so they will be ignored
4976 in the rest of this function.
4978 Nothing in this loop changes the hash table or the register chains. */
4980 for (i = 0; i < n_sets; i++)
4984 struct table_elt *elt = 0, *p;
4985 enum machine_mode mode;
4988 rtx src_related = 0;
4989 struct table_elt *src_const_elt = 0;
4990 int src_cost = MAX_COST;
4991 int src_eqv_cost = MAX_COST;
4992 int src_folded_cost = MAX_COST;
4993 int src_related_cost = MAX_COST;
4994 int src_elt_cost = MAX_COST;
4995 int src_regcost = MAX_COST;
4996 int src_eqv_regcost = MAX_COST;
4997 int src_folded_regcost = MAX_COST;
4998 int src_related_regcost = MAX_COST;
4999 int src_elt_regcost = MAX_COST;
5000 /* Set non-zero if we need to call force_const_mem on with the
5001 contents of src_folded before using it. */
5002 int src_folded_force_flag = 0;
5004 dest = SET_DEST (sets[i].rtl);
5005 src = SET_SRC (sets[i].rtl);
5007 /* If SRC is a constant that has no machine mode,
5008 hash it with the destination's machine mode.
5009 This way we can keep different modes separate. */
5011 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5012 sets[i].mode = mode;
5016 enum machine_mode eqvmode = mode;
5017 if (GET_CODE (dest) == STRICT_LOW_PART)
5018 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5020 hash_arg_in_memory = 0;
5021 src_eqv = fold_rtx (src_eqv, insn);
5022 src_eqv_hash = HASH (src_eqv, eqvmode);
5024 /* Find the equivalence class for the equivalent expression. */
5027 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
5029 src_eqv_volatile = do_not_record;
5030 src_eqv_in_memory = hash_arg_in_memory;
5033 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
5034 value of the INNER register, not the destination. So it is not
5035 a valid substitution for the source. But save it for later. */
5036 if (GET_CODE (dest) == STRICT_LOW_PART)
5039 src_eqv_here = src_eqv;
5041 /* Simplify and foldable subexpressions in SRC. Then get the fully-
5042 simplified result, which may not necessarily be valid. */
5043 src_folded = fold_rtx (src, insn);
5046 /* ??? This caused bad code to be generated for the m68k port with -O2.
5047 Suppose src is (CONST_INT -1), and that after truncation src_folded
5048 is (CONST_INT 3). Suppose src_folded is then used for src_const.
5049 At the end we will add src and src_const to the same equivalence
5050 class. We now have 3 and -1 on the same equivalence class. This
5051 causes later instructions to be mis-optimized. */
5052 /* If storing a constant in a bitfield, pre-truncate the constant
5053 so we will be able to record it later. */
5054 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5055 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5057 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5059 if (GET_CODE (src) == CONST_INT
5060 && GET_CODE (width) == CONST_INT
5061 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5062 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5064 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5065 << INTVAL (width)) - 1));
5069 /* Compute SRC's hash code, and also notice if it
5070 should not be recorded at all. In that case,
5071 prevent any further processing of this assignment. */
5073 hash_arg_in_memory = 0;
5076 sets[i].src_hash = HASH (src, mode);
5077 sets[i].src_volatile = do_not_record;
5078 sets[i].src_in_memory = hash_arg_in_memory;
5080 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
5081 a pseudo, do not record SRC. Using SRC as a replacement for
5082 anything else will be incorrect in that situation. Note that
5083 this usually occurs only for stack slots, in which case all the
5084 RTL would be referring to SRC, so we don't lose any optimization
5085 opportunities by not having SRC in the hash table. */
5087 if (GET_CODE (src) == MEM
5088 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5089 && GET_CODE (dest) == REG
5090 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5091 sets[i].src_volatile = 1;
5094 /* It is no longer clear why we used to do this, but it doesn't
5095 appear to still be needed. So let's try without it since this
5096 code hurts cse'ing widened ops. */
5097 /* If source is a perverse subreg (such as QI treated as an SI),
5098 treat it as volatile. It may do the work of an SI in one context
5099 where the extra bits are not being used, but cannot replace an SI
5101 if (GET_CODE (src) == SUBREG
5102 && (GET_MODE_SIZE (GET_MODE (src))
5103 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5104 sets[i].src_volatile = 1;
5107 /* Locate all possible equivalent forms for SRC. Try to replace
5108 SRC in the insn with each cheaper equivalent.
5110 We have the following types of equivalents: SRC itself, a folded
5111 version, a value given in a REG_EQUAL note, or a value related
5114 Each of these equivalents may be part of an additional class
5115 of equivalents (if more than one is in the table, they must be in
5116 the same class; we check for this).
5118 If the source is volatile, we don't do any table lookups.
5120 We note any constant equivalent for possible later use in a
5123 if (!sets[i].src_volatile)
5124 elt = lookup (src, sets[i].src_hash, mode);
5126 sets[i].src_elt = elt;
5128 if (elt && src_eqv_here && src_eqv_elt)
5130 if (elt->first_same_value != src_eqv_elt->first_same_value)
5132 /* The REG_EQUAL is indicating that two formerly distinct
5133 classes are now equivalent. So merge them. */
5134 merge_equiv_classes (elt, src_eqv_elt);
5135 src_eqv_hash = HASH (src_eqv, elt->mode);
5136 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5142 else if (src_eqv_elt)
5145 /* Try to find a constant somewhere and record it in `src_const'.
5146 Record its table element, if any, in `src_const_elt'. Look in
5147 any known equivalences first. (If the constant is not in the
5148 table, also set `sets[i].src_const_hash'). */
5150 for (p = elt->first_same_value; p; p = p->next_same_value)
5154 src_const_elt = elt;
5159 && (CONSTANT_P (src_folded)
5160 /* Consider (minus (label_ref L1) (label_ref L2)) as
5161 "constant" here so we will record it. This allows us
5162 to fold switch statements when an ADDR_DIFF_VEC is used. */
5163 || (GET_CODE (src_folded) == MINUS
5164 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5165 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5166 src_const = src_folded, src_const_elt = elt;
5167 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5168 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5170 /* If we don't know if the constant is in the table, get its
5171 hash code and look it up. */
5172 if (src_const && src_const_elt == 0)
5174 sets[i].src_const_hash = HASH (src_const, mode);
5175 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5178 sets[i].src_const = src_const;
5179 sets[i].src_const_elt = src_const_elt;
5181 /* If the constant and our source are both in the table, mark them as
5182 equivalent. Otherwise, if a constant is in the table but the source
5183 isn't, set ELT to it. */
5184 if (src_const_elt && elt
5185 && src_const_elt->first_same_value != elt->first_same_value)
5186 merge_equiv_classes (elt, src_const_elt);
5187 else if (src_const_elt && elt == 0)
5188 elt = src_const_elt;
5190 /* See if there is a register linearly related to a constant
5191 equivalent of SRC. */
5193 && (GET_CODE (src_const) == CONST
5194 || (src_const_elt && src_const_elt->related_value != 0)))
5196 src_related = use_related_value (src_const, src_const_elt);
5199 struct table_elt *src_related_elt
5200 = lookup (src_related, HASH (src_related, mode), mode);
5201 if (src_related_elt && elt)
5203 if (elt->first_same_value
5204 != src_related_elt->first_same_value)
5205 /* This can occur when we previously saw a CONST
5206 involving a SYMBOL_REF and then see the SYMBOL_REF
5207 twice. Merge the involved classes. */
5208 merge_equiv_classes (elt, src_related_elt);
5211 src_related_elt = 0;
5213 else if (src_related_elt && elt == 0)
5214 elt = src_related_elt;
5218 /* See if we have a CONST_INT that is already in a register in a
5221 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5222 && GET_MODE_CLASS (mode) == MODE_INT
5223 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5225 enum machine_mode wider_mode;
5227 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5228 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5229 && src_related == 0;
5230 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5232 struct table_elt *const_elt
5233 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5238 for (const_elt = const_elt->first_same_value;
5239 const_elt; const_elt = const_elt->next_same_value)
5240 if (GET_CODE (const_elt->exp) == REG)
5242 src_related = gen_lowpart_if_possible (mode,
5249 /* Another possibility is that we have an AND with a constant in
5250 a mode narrower than a word. If so, it might have been generated
5251 as part of an "if" which would narrow the AND. If we already
5252 have done the AND in a wider mode, we can use a SUBREG of that
5255 if (flag_expensive_optimizations && ! src_related
5256 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5257 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5259 enum machine_mode tmode;
5260 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5262 for (tmode = GET_MODE_WIDER_MODE (mode);
5263 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5264 tmode = GET_MODE_WIDER_MODE (tmode))
5266 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
5267 struct table_elt *larger_elt;
5271 PUT_MODE (new_and, tmode);
5272 XEXP (new_and, 0) = inner;
5273 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5274 if (larger_elt == 0)
5277 for (larger_elt = larger_elt->first_same_value;
5278 larger_elt; larger_elt = larger_elt->next_same_value)
5279 if (GET_CODE (larger_elt->exp) == REG)
5282 = gen_lowpart_if_possible (mode, larger_elt->exp);
5292 #ifdef LOAD_EXTEND_OP
5293 /* See if a MEM has already been loaded with a widening operation;
5294 if it has, we can use a subreg of that. Many CISC machines
5295 also have such operations, but this is only likely to be
5296 beneficial these machines. */
5298 if (flag_expensive_optimizations && src_related == 0
5299 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5300 && GET_MODE_CLASS (mode) == MODE_INT
5301 && GET_CODE (src) == MEM && ! do_not_record
5302 && LOAD_EXTEND_OP (mode) != NIL)
5304 enum machine_mode tmode;
5306 /* Set what we are trying to extend and the operation it might
5307 have been extended with. */
5308 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5309 XEXP (memory_extend_rtx, 0) = src;
5311 for (tmode = GET_MODE_WIDER_MODE (mode);
5312 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5313 tmode = GET_MODE_WIDER_MODE (tmode))
5315 struct table_elt *larger_elt;
5317 PUT_MODE (memory_extend_rtx, tmode);
5318 larger_elt = lookup (memory_extend_rtx,
5319 HASH (memory_extend_rtx, tmode), tmode);
5320 if (larger_elt == 0)
5323 for (larger_elt = larger_elt->first_same_value;
5324 larger_elt; larger_elt = larger_elt->next_same_value)
5325 if (GET_CODE (larger_elt->exp) == REG)
5327 src_related = gen_lowpart_if_possible (mode,
5336 #endif /* LOAD_EXTEND_OP */
5338 if (src == src_folded)
5341 /* At this point, ELT, if non-zero, points to a class of expressions
5342 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5343 and SRC_RELATED, if non-zero, each contain additional equivalent
5344 expressions. Prune these latter expressions by deleting expressions
5345 already in the equivalence class.
5347 Check for an equivalent identical to the destination. If found,
5348 this is the preferred equivalent since it will likely lead to
5349 elimination of the insn. Indicate this by placing it in
5353 elt = elt->first_same_value;
5354 for (p = elt; p; p = p->next_same_value)
5356 enum rtx_code code = GET_CODE (p->exp);
5358 /* If the expression is not valid, ignore it. Then we do not
5359 have to check for validity below. In most cases, we can use
5360 `rtx_equal_p', since canonicalization has already been done. */
5361 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5364 /* Also skip paradoxical subregs, unless that's what we're
5367 && (GET_MODE_SIZE (GET_MODE (p->exp))
5368 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5370 && GET_CODE (src) == SUBREG
5371 && GET_MODE (src) == GET_MODE (p->exp)
5372 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5373 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5376 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5378 else if (src_folded && GET_CODE (src_folded) == code
5379 && rtx_equal_p (src_folded, p->exp))
5381 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5382 && rtx_equal_p (src_eqv_here, p->exp))
5384 else if (src_related && GET_CODE (src_related) == code
5385 && rtx_equal_p (src_related, p->exp))
5388 /* This is the same as the destination of the insns, we want
5389 to prefer it. Copy it to src_related. The code below will
5390 then give it a negative cost. */
5391 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5395 /* Find the cheapest valid equivalent, trying all the available
5396 possibilities. Prefer items not in the hash table to ones
5397 that are when they are equal cost. Note that we can never
5398 worsen an insn as the current contents will also succeed.
5399 If we find an equivalent identical to the destination, use it as best,
5400 since this insn will probably be eliminated in that case. */
5403 if (rtx_equal_p (src, dest))
5404 src_cost = src_regcost = -1;
5407 src_cost = COST (src);
5408 src_regcost = approx_reg_cost (src);
5414 if (rtx_equal_p (src_eqv_here, dest))
5415 src_eqv_cost = src_eqv_regcost = -1;
5418 src_eqv_cost = COST (src_eqv_here);
5419 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5425 if (rtx_equal_p (src_folded, dest))
5426 src_folded_cost = src_folded_regcost = -1;
5429 src_folded_cost = COST (src_folded);
5430 src_folded_regcost = approx_reg_cost (src_folded);
5436 if (rtx_equal_p (src_related, dest))
5437 src_related_cost = src_related_regcost = -1;
5440 src_related_cost = COST (src_related);
5441 src_related_regcost = approx_reg_cost (src_related);
5445 /* If this was an indirect jump insn, a known label will really be
5446 cheaper even though it looks more expensive. */
5447 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5448 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5450 /* Terminate loop when replacement made. This must terminate since
5451 the current contents will be tested and will always be valid. */
5456 /* Skip invalid entries. */
5457 while (elt && GET_CODE (elt->exp) != REG
5458 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5459 elt = elt->next_same_value;
5461 /* A paradoxical subreg would be bad here: it'll be the right
5462 size, but later may be adjusted so that the upper bits aren't
5463 what we want. So reject it. */
5465 && GET_CODE (elt->exp) == SUBREG
5466 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5467 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5468 /* It is okay, though, if the rtx we're trying to match
5469 will ignore any of the bits we can't predict. */
5471 && GET_CODE (src) == SUBREG
5472 && GET_MODE (src) == GET_MODE (elt->exp)
5473 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5474 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5476 elt = elt->next_same_value;
5482 src_elt_cost = elt->cost;
5483 src_elt_regcost = elt->regcost;
5486 /* Find cheapest and skip it for the next time. For items
5487 of equal cost, use this order:
5488 src_folded, src, src_eqv, src_related and hash table entry. */
5490 && preferrable (src_folded_cost, src_folded_regcost,
5491 src_cost, src_regcost) <= 0
5492 && preferrable (src_folded_cost, src_folded_regcost,
5493 src_eqv_cost, src_eqv_regcost) <= 0
5494 && preferrable (src_folded_cost, src_folded_regcost,
5495 src_related_cost, src_related_regcost) <= 0
5496 && preferrable (src_folded_cost, src_folded_regcost,
5497 src_elt_cost, src_elt_regcost) <= 0)
5499 trial = src_folded, src_folded_cost = MAX_COST;
5500 if (src_folded_force_flag)
5501 trial = force_const_mem (mode, trial);
5504 && preferrable (src_cost, src_regcost,
5505 src_eqv_cost, src_eqv_regcost) <= 0
5506 && preferrable (src_cost, src_regcost,
5507 src_related_cost, src_related_regcost) <= 0
5508 && preferrable (src_cost, src_regcost,
5509 src_elt_cost, src_elt_regcost) <= 0)
5510 trial = src, src_cost = MAX_COST;
5511 else if (src_eqv_here
5512 && preferrable (src_eqv_cost, src_eqv_regcost,
5513 src_related_cost, src_related_regcost) <= 0
5514 && preferrable (src_eqv_cost, src_eqv_regcost,
5515 src_elt_cost, src_elt_regcost) <= 0)
5516 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5517 else if (src_related
5518 && preferrable (src_related_cost, src_related_regcost,
5519 src_elt_cost, src_elt_regcost) <= 0)
5520 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5523 trial = copy_rtx (elt->exp);
5524 elt = elt->next_same_value;
5525 src_elt_cost = MAX_COST;
5528 /* We don't normally have an insn matching (set (pc) (pc)), so
5529 check for this separately here. We will delete such an
5532 For other cases such as a table jump or conditional jump
5533 where we know the ultimate target, go ahead and replace the
5534 operand. While that may not make a valid insn, we will
5535 reemit the jump below (and also insert any necessary
5537 if (n_sets == 1 && dest == pc_rtx
5539 || (GET_CODE (trial) == LABEL_REF
5540 && ! condjump_p (insn))))
5542 SET_SRC (sets[i].rtl) = trial;
5543 cse_jumps_altered = 1;
5547 /* Look for a substitution that makes a valid insn. */
5548 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5550 /* If we just made a substitution inside a libcall, then we
5551 need to make the same substitution in any notes attached
5552 to the RETVAL insn. */
5554 && (GET_CODE (sets[i].orig_src) == REG
5555 || GET_CODE (sets[i].orig_src) == SUBREG
5556 || GET_CODE (sets[i].orig_src) == MEM))
5557 replace_rtx (REG_NOTES (libcall_insn), sets[i].orig_src,
5558 canon_reg (SET_SRC (sets[i].rtl), insn));
5560 /* The result of apply_change_group can be ignored; see
5563 validate_change (insn, &SET_SRC (sets[i].rtl),
5564 canon_reg (SET_SRC (sets[i].rtl), insn),
5566 apply_change_group ();
5570 /* If we previously found constant pool entries for
5571 constants and this is a constant, try making a
5572 pool entry. Put it in src_folded unless we already have done
5573 this since that is where it likely came from. */
5575 else if (constant_pool_entries_cost
5576 && CONSTANT_P (trial)
5577 /* Reject cases that will abort in decode_rtx_const.
5578 On the alpha when simplifying a switch, we get
5579 (const (truncate (minus (label_ref) (label_ref)))). */
5580 && ! (GET_CODE (trial) == CONST
5581 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5582 /* Likewise on IA-64, except without the truncate. */
5583 && ! (GET_CODE (trial) == CONST
5584 && GET_CODE (XEXP (trial, 0)) == MINUS
5585 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5586 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5588 || (GET_CODE (src_folded) != MEM
5589 && ! src_folded_force_flag))
5590 && GET_MODE_CLASS (mode) != MODE_CC
5591 && mode != VOIDmode)
5593 src_folded_force_flag = 1;
5595 src_folded_cost = constant_pool_entries_cost;
5599 src = SET_SRC (sets[i].rtl);
5601 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5602 However, there is an important exception: If both are registers
5603 that are not the head of their equivalence class, replace SET_SRC
5604 with the head of the class. If we do not do this, we will have
5605 both registers live over a portion of the basic block. This way,
5606 their lifetimes will likely abut instead of overlapping. */
5607 if (GET_CODE (dest) == REG
5608 && REGNO_QTY_VALID_P (REGNO (dest)))
5610 int dest_q = REG_QTY (REGNO (dest));
5611 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5613 if (dest_ent->mode == GET_MODE (dest)
5614 && dest_ent->first_reg != REGNO (dest)
5615 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5616 /* Don't do this if the original insn had a hard reg as
5617 SET_SRC or SET_DEST. */
5618 && (GET_CODE (sets[i].src) != REG
5619 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5620 && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5621 /* We can't call canon_reg here because it won't do anything if
5622 SRC is a hard register. */
5624 int src_q = REG_QTY (REGNO (src));
5625 struct qty_table_elem *src_ent = &qty_table[src_q];
5626 int first = src_ent->first_reg;
5628 = (first >= FIRST_PSEUDO_REGISTER
5629 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5631 /* We must use validate-change even for this, because this
5632 might be a special no-op instruction, suitable only to
5634 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5637 /* If we had a constant that is cheaper than what we are now
5638 setting SRC to, use that constant. We ignored it when we
5639 thought we could make this into a no-op. */
5640 if (src_const && COST (src_const) < COST (src)
5641 && validate_change (insn, &SET_SRC (sets[i].rtl),
5648 /* If we made a change, recompute SRC values. */
5649 if (src != sets[i].src)
5653 hash_arg_in_memory = 0;
5655 sets[i].src_hash = HASH (src, mode);
5656 sets[i].src_volatile = do_not_record;
5657 sets[i].src_in_memory = hash_arg_in_memory;
5658 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5661 /* If this is a single SET, we are setting a register, and we have an
5662 equivalent constant, we want to add a REG_NOTE. We don't want
5663 to write a REG_EQUAL note for a constant pseudo since verifying that
5664 that pseudo hasn't been eliminated is a pain. Such a note also
5665 won't help anything.
5667 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5668 which can be created for a reference to a compile time computable
5669 entry in a jump table. */
5671 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5672 && GET_CODE (src_const) != REG
5673 && ! (GET_CODE (src_const) == CONST
5674 && GET_CODE (XEXP (src_const, 0)) == MINUS
5675 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5676 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5678 /* Make sure that the rtx is not shared with any other insn. */
5679 src_const = copy_rtx (src_const);
5681 /* Record the actual constant value in a REG_EQUAL note, making
5682 a new one if one does not already exist. */
5683 set_unique_reg_note (insn, REG_EQUAL, src_const);
5685 /* If storing a constant value in a register that
5686 previously held the constant value 0,
5687 record this fact with a REG_WAS_0 note on this insn.
5689 Note that the *register* is required to have previously held 0,
5690 not just any register in the quantity and we must point to the
5691 insn that set that register to zero.
5693 Rather than track each register individually, we just see if
5694 the last set for this quantity was for this register. */
5696 if (REGNO_QTY_VALID_P (REGNO (dest)))
5698 int dest_q = REG_QTY (REGNO (dest));
5699 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5701 if (dest_ent->const_rtx == const0_rtx)
5703 /* See if we previously had a REG_WAS_0 note. */
5704 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
5705 rtx const_insn = dest_ent->const_insn;
5707 if ((tem = single_set (const_insn)) != 0
5708 && rtx_equal_p (SET_DEST (tem), dest))
5711 XEXP (note, 0) = const_insn;
5714 = gen_rtx_INSN_LIST (REG_WAS_0, const_insn,
5721 /* Now deal with the destination. */
5724 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5725 to the MEM or REG within it. */
5726 while (GET_CODE (dest) == SIGN_EXTRACT
5727 || GET_CODE (dest) == ZERO_EXTRACT
5728 || GET_CODE (dest) == SUBREG
5729 || GET_CODE (dest) == STRICT_LOW_PART)
5730 dest = XEXP (dest, 0);
5732 sets[i].inner_dest = dest;
5734 if (GET_CODE (dest) == MEM)
5736 #ifdef PUSH_ROUNDING
5737 /* Stack pushes invalidate the stack pointer. */
5738 rtx addr = XEXP (dest, 0);
5739 if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
5740 && XEXP (addr, 0) == stack_pointer_rtx)
5741 invalidate (stack_pointer_rtx, Pmode);
5743 dest = fold_rtx (dest, insn);
5746 /* Compute the hash code of the destination now,
5747 before the effects of this instruction are recorded,
5748 since the register values used in the address computation
5749 are those before this instruction. */
5750 sets[i].dest_hash = HASH (dest, mode);
5752 /* Don't enter a bit-field in the hash table
5753 because the value in it after the store
5754 may not equal what was stored, due to truncation. */
5756 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5757 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5759 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5761 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5762 && GET_CODE (width) == CONST_INT
5763 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5764 && ! (INTVAL (src_const)
5765 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5766 /* Exception: if the value is constant,
5767 and it won't be truncated, record it. */
5771 /* This is chosen so that the destination will be invalidated
5772 but no new value will be recorded.
5773 We must invalidate because sometimes constant
5774 values can be recorded for bitfields. */
5775 sets[i].src_elt = 0;
5776 sets[i].src_volatile = 1;
5782 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5784 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5786 /* One less use of the label this insn used to jump to. */
5788 cse_jumps_altered = 1;
5789 /* No more processing for this set. */
5793 /* If this SET is now setting PC to a label, we know it used to
5794 be a conditional or computed branch. */
5795 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5797 /* Now emit a BARRIER after the unconditional jump. */
5798 if (NEXT_INSN (insn) == 0
5799 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5800 emit_barrier_after (insn);
5802 /* We reemit the jump in as many cases as possible just in
5803 case the form of an unconditional jump is significantly
5804 different than a computed jump or conditional jump.
5806 If this insn has multiple sets, then reemitting the
5807 jump is nontrivial. So instead we just force rerecognition
5808 and hope for the best. */
5811 rtx new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
5813 JUMP_LABEL (new) = XEXP (src, 0);
5814 LABEL_NUSES (XEXP (src, 0))++;
5818 /* Now emit a BARRIER after the unconditional jump. */
5819 if (NEXT_INSN (insn) == 0
5820 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5821 emit_barrier_after (insn);
5824 INSN_CODE (insn) = -1;
5826 never_reached_warning (insn, NULL);
5828 /* Do not bother deleting any unreachable code,
5829 let jump/flow do that. */
5831 cse_jumps_altered = 1;
5835 /* If destination is volatile, invalidate it and then do no further
5836 processing for this assignment. */
5838 else if (do_not_record)
5840 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5841 invalidate (dest, VOIDmode);
5842 else if (GET_CODE (dest) == MEM)
5844 /* Outgoing arguments for a libcall don't
5845 affect any recorded expressions. */
5846 if (! libcall_insn || insn == libcall_insn)
5847 invalidate (dest, VOIDmode);
5849 else if (GET_CODE (dest) == STRICT_LOW_PART
5850 || GET_CODE (dest) == ZERO_EXTRACT)
5851 invalidate (XEXP (dest, 0), GET_MODE (dest));
5855 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5856 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5859 /* If setting CC0, record what it was set to, or a constant, if it
5860 is equivalent to a constant. If it is being set to a floating-point
5861 value, make a COMPARE with the appropriate constant of 0. If we
5862 don't do this, later code can interpret this as a test against
5863 const0_rtx, which can cause problems if we try to put it into an
5864 insn as a floating-point operand. */
5865 if (dest == cc0_rtx)
5867 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5868 this_insn_cc0_mode = mode;
5869 if (FLOAT_MODE_P (mode))
5870 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5876 /* Now enter all non-volatile source expressions in the hash table
5877 if they are not already present.
5878 Record their equivalence classes in src_elt.
5879 This way we can insert the corresponding destinations into
5880 the same classes even if the actual sources are no longer in them
5881 (having been invalidated). */
5883 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5884 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5886 struct table_elt *elt;
5887 struct table_elt *classp = sets[0].src_elt;
5888 rtx dest = SET_DEST (sets[0].rtl);
5889 enum machine_mode eqvmode = GET_MODE (dest);
5891 if (GET_CODE (dest) == STRICT_LOW_PART)
5893 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5896 if (insert_regs (src_eqv, classp, 0))
5898 rehash_using_reg (src_eqv);
5899 src_eqv_hash = HASH (src_eqv, eqvmode);
5901 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5902 elt->in_memory = src_eqv_in_memory;
5905 /* Check to see if src_eqv_elt is the same as a set source which
5906 does not yet have an elt, and if so set the elt of the set source
5908 for (i = 0; i < n_sets; i++)
5909 if (sets[i].rtl && sets[i].src_elt == 0
5910 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5911 sets[i].src_elt = src_eqv_elt;
5914 for (i = 0; i < n_sets; i++)
5915 if (sets[i].rtl && ! sets[i].src_volatile
5916 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5918 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5920 /* REG_EQUAL in setting a STRICT_LOW_PART
5921 gives an equivalent for the entire destination register,
5922 not just for the subreg being stored in now.
5923 This is a more interesting equivalence, so we arrange later
5924 to treat the entire reg as the destination. */
5925 sets[i].src_elt = src_eqv_elt;
5926 sets[i].src_hash = src_eqv_hash;
5930 /* Insert source and constant equivalent into hash table, if not
5932 struct table_elt *classp = src_eqv_elt;
5933 rtx src = sets[i].src;
5934 rtx dest = SET_DEST (sets[i].rtl);
5935 enum machine_mode mode
5936 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5938 if (sets[i].src_elt == 0)
5940 /* Don't put a hard register source into the table if this is
5941 the last insn of a libcall. In this case, we only need
5942 to put src_eqv_elt in src_elt. */
5943 if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5945 struct table_elt *elt;
5947 /* Note that these insert_regs calls cannot remove
5948 any of the src_elt's, because they would have failed to
5949 match if not still valid. */
5950 if (insert_regs (src, classp, 0))
5952 rehash_using_reg (src);
5953 sets[i].src_hash = HASH (src, mode);
5955 elt = insert (src, classp, sets[i].src_hash, mode);
5956 elt->in_memory = sets[i].src_in_memory;
5957 sets[i].src_elt = classp = elt;
5960 sets[i].src_elt = classp;
5962 if (sets[i].src_const && sets[i].src_const_elt == 0
5963 && src != sets[i].src_const
5964 && ! rtx_equal_p (sets[i].src_const, src))
5965 sets[i].src_elt = insert (sets[i].src_const, classp,
5966 sets[i].src_const_hash, mode);
5969 else if (sets[i].src_elt == 0)
5970 /* If we did not insert the source into the hash table (e.g., it was
5971 volatile), note the equivalence class for the REG_EQUAL value, if any,
5972 so that the destination goes into that class. */
5973 sets[i].src_elt = src_eqv_elt;
5975 invalidate_from_clobbers (x);
5977 /* Some registers are invalidated by subroutine calls. Memory is
5978 invalidated by non-constant calls. */
5980 if (GET_CODE (insn) == CALL_INSN)
5982 if (! CONST_OR_PURE_CALL_P (insn))
5983 invalidate_memory ();
5984 invalidate_for_call ();
5987 /* Now invalidate everything set by this instruction.
5988 If a SUBREG or other funny destination is being set,
5989 sets[i].rtl is still nonzero, so here we invalidate the reg
5990 a part of which is being set. */
5992 for (i = 0; i < n_sets; i++)
5995 /* We can't use the inner dest, because the mode associated with
5996 a ZERO_EXTRACT is significant. */
5997 rtx dest = SET_DEST (sets[i].rtl);
5999 /* Needed for registers to remove the register from its
6000 previous quantity's chain.
6001 Needed for memory if this is a nonvarying address, unless
6002 we have just done an invalidate_memory that covers even those. */
6003 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6004 invalidate (dest, VOIDmode);
6005 else if (GET_CODE (dest) == MEM)
6007 /* Outgoing arguments for a libcall don't
6008 affect any recorded expressions. */
6009 if (! libcall_insn || insn == libcall_insn)
6010 invalidate (dest, VOIDmode);
6012 else if (GET_CODE (dest) == STRICT_LOW_PART
6013 || GET_CODE (dest) == ZERO_EXTRACT)
6014 invalidate (XEXP (dest, 0), GET_MODE (dest));
6017 /* A volatile ASM invalidates everything. */
6018 if (GET_CODE (insn) == INSN
6019 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
6020 && MEM_VOLATILE_P (PATTERN (insn)))
6021 flush_hash_table ();
6023 /* Make sure registers mentioned in destinations
6024 are safe for use in an expression to be inserted.
6025 This removes from the hash table
6026 any invalid entry that refers to one of these registers.
6028 We don't care about the return value from mention_regs because
6029 we are going to hash the SET_DEST values unconditionally. */
6031 for (i = 0; i < n_sets; i++)
6035 rtx x = SET_DEST (sets[i].rtl);
6037 if (GET_CODE (x) != REG)
6041 /* We used to rely on all references to a register becoming
6042 inaccessible when a register changes to a new quantity,
6043 since that changes the hash code. However, that is not
6044 safe, since after HASH_SIZE new quantities we get a
6045 hash 'collision' of a register with its own invalid
6046 entries. And since SUBREGs have been changed not to
6047 change their hash code with the hash code of the register,
6048 it wouldn't work any longer at all. So we have to check
6049 for any invalid references lying around now.
6050 This code is similar to the REG case in mention_regs,
6051 but it knows that reg_tick has been incremented, and
6052 it leaves reg_in_table as -1 . */
6053 unsigned int regno = REGNO (x);
6054 unsigned int endregno
6055 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
6056 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
6059 for (i = regno; i < endregno; i++)
6061 if (REG_IN_TABLE (i) >= 0)
6063 remove_invalid_refs (i);
6064 REG_IN_TABLE (i) = -1;
6071 /* We may have just removed some of the src_elt's from the hash table.
6072 So replace each one with the current head of the same class. */
6074 for (i = 0; i < n_sets; i++)
6077 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6078 /* If elt was removed, find current head of same class,
6079 or 0 if nothing remains of that class. */
6081 struct table_elt *elt = sets[i].src_elt;
6083 while (elt && elt->prev_same_value)
6084 elt = elt->prev_same_value;
6086 while (elt && elt->first_same_value == 0)
6087 elt = elt->next_same_value;
6088 sets[i].src_elt = elt ? elt->first_same_value : 0;
6092 /* Now insert the destinations into their equivalence classes. */
6094 for (i = 0; i < n_sets; i++)
6097 rtx dest = SET_DEST (sets[i].rtl);
6098 rtx inner_dest = sets[i].inner_dest;
6099 struct table_elt *elt;
6101 /* Don't record value if we are not supposed to risk allocating
6102 floating-point values in registers that might be wider than
6104 if ((flag_float_store
6105 && GET_CODE (dest) == MEM
6106 && FLOAT_MODE_P (GET_MODE (dest)))
6107 /* Don't record BLKmode values, because we don't know the
6108 size of it, and can't be sure that other BLKmode values
6109 have the same or smaller size. */
6110 || GET_MODE (dest) == BLKmode
6111 /* Don't record values of destinations set inside a libcall block
6112 since we might delete the libcall. Things should have been set
6113 up so we won't want to reuse such a value, but we play it safe
6116 /* If we didn't put a REG_EQUAL value or a source into the hash
6117 table, there is no point is recording DEST. */
6118 || sets[i].src_elt == 0
6119 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6120 or SIGN_EXTEND, don't record DEST since it can cause
6121 some tracking to be wrong.
6123 ??? Think about this more later. */
6124 || (GET_CODE (dest) == SUBREG
6125 && (GET_MODE_SIZE (GET_MODE (dest))
6126 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6127 && (GET_CODE (sets[i].src) == SIGN_EXTEND
6128 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6131 /* STRICT_LOW_PART isn't part of the value BEING set,
6132 and neither is the SUBREG inside it.
6133 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6134 if (GET_CODE (dest) == STRICT_LOW_PART)
6135 dest = SUBREG_REG (XEXP (dest, 0));
6137 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6138 /* Registers must also be inserted into chains for quantities. */
6139 if (insert_regs (dest, sets[i].src_elt, 1))
6141 /* If `insert_regs' changes something, the hash code must be
6143 rehash_using_reg (dest);
6144 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6147 if (GET_CODE (inner_dest) == MEM
6148 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
6149 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
6150 that (MEM (ADDRESSOF (X))) is equivalent to Y.
6151 Consider the case in which the address of the MEM is
6152 passed to a function, which alters the MEM. Then, if we
6153 later use Y instead of the MEM we'll miss the update. */
6154 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
6156 elt = insert (dest, sets[i].src_elt,
6157 sets[i].dest_hash, GET_MODE (dest));
6159 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
6160 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
6161 || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
6164 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6165 narrower than M2, and both M1 and M2 are the same number of words,
6166 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6167 make that equivalence as well.
6169 However, BAR may have equivalences for which gen_lowpart_if_possible
6170 will produce a simpler value than gen_lowpart_if_possible applied to
6171 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6172 BAR's equivalences. If we don't get a simplified form, make
6173 the SUBREG. It will not be used in an equivalence, but will
6174 cause two similar assignments to be detected.
6176 Note the loop below will find SUBREG_REG (DEST) since we have
6177 already entered SRC and DEST of the SET in the table. */
6179 if (GET_CODE (dest) == SUBREG
6180 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6182 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6183 && (GET_MODE_SIZE (GET_MODE (dest))
6184 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6185 && sets[i].src_elt != 0)
6187 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6188 struct table_elt *elt, *classp = 0;
6190 for (elt = sets[i].src_elt->first_same_value; elt;
6191 elt = elt->next_same_value)
6195 struct table_elt *src_elt;
6197 /* Ignore invalid entries. */
6198 if (GET_CODE (elt->exp) != REG
6199 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6202 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
6204 new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
6206 src_hash = HASH (new_src, new_mode);
6207 src_elt = lookup (new_src, src_hash, new_mode);
6209 /* Put the new source in the hash table is if isn't
6213 if (insert_regs (new_src, classp, 0))
6215 rehash_using_reg (new_src);
6216 src_hash = HASH (new_src, new_mode);
6218 src_elt = insert (new_src, classp, src_hash, new_mode);
6219 src_elt->in_memory = elt->in_memory;
6221 else if (classp && classp != src_elt->first_same_value)
6222 /* Show that two things that we've seen before are
6223 actually the same. */
6224 merge_equiv_classes (src_elt, classp);
6226 classp = src_elt->first_same_value;
6227 /* Ignore invalid entries. */
6229 && GET_CODE (classp->exp) != REG
6230 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
6231 classp = classp->next_same_value;
6236 /* Special handling for (set REG0 REG1) where REG0 is the
6237 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6238 be used in the sequel, so (if easily done) change this insn to
6239 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6240 that computed their value. Then REG1 will become a dead store
6241 and won't cloud the situation for later optimizations.
6243 Do not make this change if REG1 is a hard register, because it will
6244 then be used in the sequel and we may be changing a two-operand insn
6245 into a three-operand insn.
6247 Also do not do this if we are operating on a copy of INSN.
6249 Also don't do this if INSN ends a libcall; this would cause an unrelated
6250 register to be set in the middle of a libcall, and we then get bad code
6251 if the libcall is deleted. */
6253 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
6254 && NEXT_INSN (PREV_INSN (insn)) == insn
6255 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
6256 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6257 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6259 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6260 struct qty_table_elem *src_ent = &qty_table[src_q];
6262 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6263 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6265 rtx prev = prev_nonnote_insn (insn);
6267 /* Do not swap the registers around if the previous instruction
6268 attaches a REG_EQUIV note to REG1.
6270 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6271 from the pseudo that originally shadowed an incoming argument
6272 to another register. Some uses of REG_EQUIV might rely on it
6273 being attached to REG1 rather than REG2.
6275 This section previously turned the REG_EQUIV into a REG_EQUAL
6276 note. We cannot do that because REG_EQUIV may provide an
6277 uninitialised stack slot when REG_PARM_STACK_SPACE is used. */
6279 if (prev != 0 && GET_CODE (prev) == INSN
6280 && GET_CODE (PATTERN (prev)) == SET
6281 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6282 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6284 rtx dest = SET_DEST (sets[0].rtl);
6285 rtx src = SET_SRC (sets[0].rtl);
6288 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6289 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6290 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6291 apply_change_group ();
6293 /* If there was a REG_WAS_0 note on PREV, remove it. Move
6294 any REG_WAS_0 note on INSN to PREV. */
6295 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
6297 remove_note (prev, note);
6299 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6302 remove_note (insn, note);
6303 XEXP (note, 1) = REG_NOTES (prev);
6304 REG_NOTES (prev) = note;
6307 /* If INSN has a REG_EQUAL note, and this note mentions
6308 REG0, then we must delete it, because the value in
6309 REG0 has changed. If the note's value is REG1, we must
6310 also delete it because that is now this insn's dest. */
6311 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6313 && (reg_mentioned_p (dest, XEXP (note, 0))
6314 || rtx_equal_p (src, XEXP (note, 0))))
6315 remove_note (insn, note);
6320 /* If this is a conditional jump insn, record any known equivalences due to
6321 the condition being tested. */
6323 last_jump_equiv_class = 0;
6324 if (GET_CODE (insn) == JUMP_INSN
6325 && n_sets == 1 && GET_CODE (x) == SET
6326 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6327 record_jump_equiv (insn, 0);
6330 /* If the previous insn set CC0 and this insn no longer references CC0,
6331 delete the previous insn. Here we use the fact that nothing expects CC0
6332 to be valid over an insn, which is true until the final pass. */
6333 if (prev_insn && GET_CODE (prev_insn) == INSN
6334 && (tem = single_set (prev_insn)) != 0
6335 && SET_DEST (tem) == cc0_rtx
6336 && ! reg_mentioned_p (cc0_rtx, x))
6337 delete_insn (prev_insn);
6339 prev_insn_cc0 = this_insn_cc0;
6340 prev_insn_cc0_mode = this_insn_cc0_mode;
6346 /* Remove from the hash table all expressions that reference memory. */
6349 invalidate_memory ()
6352 struct table_elt *p, *next;
6354 for (i = 0; i < HASH_SIZE; i++)
6355 for (p = table[i]; p; p = next)
6357 next = p->next_same_hash;
6359 remove_from_table (p, i);
6363 /* If ADDR is an address that implicitly affects the stack pointer, return
6364 1 and update the register tables to show the effect. Else, return 0. */
6367 addr_affects_sp_p (addr)
6370 if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
6371 && GET_CODE (XEXP (addr, 0)) == REG
6372 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6374 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6375 REG_TICK (STACK_POINTER_REGNUM)++;
6377 /* This should be *very* rare. */
6378 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6379 invalidate (stack_pointer_rtx, VOIDmode);
6387 /* Perform invalidation on the basis of everything about an insn
6388 except for invalidating the actual places that are SET in it.
6389 This includes the places CLOBBERed, and anything that might
6390 alias with something that is SET or CLOBBERed.
6392 X is the pattern of the insn. */
6395 invalidate_from_clobbers (x)
6398 if (GET_CODE (x) == CLOBBER)
6400 rtx ref = XEXP (x, 0);
6403 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6404 || GET_CODE (ref) == MEM)
6405 invalidate (ref, VOIDmode);
6406 else if (GET_CODE (ref) == STRICT_LOW_PART
6407 || GET_CODE (ref) == ZERO_EXTRACT)
6408 invalidate (XEXP (ref, 0), GET_MODE (ref));
6411 else if (GET_CODE (x) == PARALLEL)
6414 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6416 rtx y = XVECEXP (x, 0, i);
6417 if (GET_CODE (y) == CLOBBER)
6419 rtx ref = XEXP (y, 0);
6420 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6421 || GET_CODE (ref) == MEM)
6422 invalidate (ref, VOIDmode);
6423 else if (GET_CODE (ref) == STRICT_LOW_PART
6424 || GET_CODE (ref) == ZERO_EXTRACT)
6425 invalidate (XEXP (ref, 0), GET_MODE (ref));
6431 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6432 and replace any registers in them with either an equivalent constant
6433 or the canonical form of the register. If we are inside an address,
6434 only do this if the address remains valid.
6436 OBJECT is 0 except when within a MEM in which case it is the MEM.
6438 Return the replacement for X. */
6441 cse_process_notes (x, object)
6445 enum rtx_code code = GET_CODE (x);
6446 const char *fmt = GET_RTX_FORMAT (code);
6463 validate_change (x, &XEXP (x, 0),
6464 cse_process_notes (XEXP (x, 0), x), 0);
6469 if (REG_NOTE_KIND (x) == REG_EQUAL)
6470 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6472 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6479 rtx new = cse_process_notes (XEXP (x, 0), object);
6480 /* We don't substitute VOIDmode constants into these rtx,
6481 since they would impede folding. */
6482 if (GET_MODE (new) != VOIDmode)
6483 validate_change (object, &XEXP (x, 0), new, 0);
6488 i = REG_QTY (REGNO (x));
6490 /* Return a constant or a constant register. */
6491 if (REGNO_QTY_VALID_P (REGNO (x)))
6493 struct qty_table_elem *ent = &qty_table[i];
6495 if (ent->const_rtx != NULL_RTX
6496 && (CONSTANT_P (ent->const_rtx)
6497 || GET_CODE (ent->const_rtx) == REG))
6499 rtx new = gen_lowpart_if_possible (GET_MODE (x), ent->const_rtx);
6505 /* Otherwise, canonicalize this register. */
6506 return canon_reg (x, NULL_RTX);
6512 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6514 validate_change (object, &XEXP (x, i),
6515 cse_process_notes (XEXP (x, i), object), 0);
6520 /* Find common subexpressions between the end test of a loop and the beginning
6521 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
6523 Often we have a loop where an expression in the exit test is used
6524 in the body of the loop. For example "while (*p) *q++ = *p++;".
6525 Because of the way we duplicate the loop exit test in front of the loop,
6526 however, we don't detect that common subexpression. This will be caught
6527 when global cse is implemented, but this is a quite common case.
6529 This function handles the most common cases of these common expressions.
6530 It is called after we have processed the basic block ending with the
6531 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6532 jumps to a label used only once. */
6535 cse_around_loop (loop_start)
6540 struct table_elt *p;
6542 /* If the jump at the end of the loop doesn't go to the start, we don't
6544 for (insn = PREV_INSN (loop_start);
6545 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6546 insn = PREV_INSN (insn))
6550 || GET_CODE (insn) != NOTE
6551 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6554 /* If the last insn of the loop (the end test) was an NE comparison,
6555 we will interpret it as an EQ comparison, since we fell through
6556 the loop. Any equivalences resulting from that comparison are
6557 therefore not valid and must be invalidated. */
6558 if (last_jump_equiv_class)
6559 for (p = last_jump_equiv_class->first_same_value; p;
6560 p = p->next_same_value)
6562 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6563 || (GET_CODE (p->exp) == SUBREG
6564 && GET_CODE (SUBREG_REG (p->exp)) == REG))
6565 invalidate (p->exp, VOIDmode);
6566 else if (GET_CODE (p->exp) == STRICT_LOW_PART
6567 || GET_CODE (p->exp) == ZERO_EXTRACT)
6568 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6571 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6572 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6574 The only thing we do with SET_DEST is invalidate entries, so we
6575 can safely process each SET in order. It is slightly less efficient
6576 to do so, but we only want to handle the most common cases.
6578 The gen_move_insn call in cse_set_around_loop may create new pseudos.
6579 These pseudos won't have valid entries in any of the tables indexed
6580 by register number, such as reg_qty. We avoid out-of-range array
6581 accesses by not processing any instructions created after cse started. */
6583 for (insn = NEXT_INSN (loop_start);
6584 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6585 && INSN_UID (insn) < max_insn_uid
6586 && ! (GET_CODE (insn) == NOTE
6587 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6588 insn = NEXT_INSN (insn))
6591 && (GET_CODE (PATTERN (insn)) == SET
6592 || GET_CODE (PATTERN (insn)) == CLOBBER))
6593 cse_set_around_loop (PATTERN (insn), insn, loop_start);
6594 else if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == PARALLEL)
6595 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6596 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6597 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6598 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6603 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6604 since they are done elsewhere. This function is called via note_stores. */
6607 invalidate_skipped_set (dest, set, data)
6610 void *data ATTRIBUTE_UNUSED;
6612 enum rtx_code code = GET_CODE (dest);
6615 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6616 /* There are times when an address can appear varying and be a PLUS
6617 during this scan when it would be a fixed address were we to know
6618 the proper equivalences. So invalidate all memory if there is
6619 a BLKmode or nonscalar memory reference or a reference to a
6620 variable address. */
6621 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6622 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6624 invalidate_memory ();
6628 if (GET_CODE (set) == CLOBBER
6635 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6636 invalidate (XEXP (dest, 0), GET_MODE (dest));
6637 else if (code == REG || code == SUBREG || code == MEM)
6638 invalidate (dest, VOIDmode);
6641 /* Invalidate all insns from START up to the end of the function or the
6642 next label. This called when we wish to CSE around a block that is
6643 conditionally executed. */
6646 invalidate_skipped_block (start)
6651 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6652 insn = NEXT_INSN (insn))
6654 if (! INSN_P (insn))
6657 if (GET_CODE (insn) == CALL_INSN)
6659 if (! CONST_OR_PURE_CALL_P (insn))
6660 invalidate_memory ();
6661 invalidate_for_call ();
6664 invalidate_from_clobbers (PATTERN (insn));
6665 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6669 /* If modifying X will modify the value in *DATA (which is really an
6670 `rtx *'), indicate that fact by setting the pointed to value to
6674 cse_check_loop_start (x, set, data)
6676 rtx set ATTRIBUTE_UNUSED;
6679 rtx *cse_check_loop_start_value = (rtx *) data;
6681 if (*cse_check_loop_start_value == NULL_RTX
6682 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6685 if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
6686 || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6687 *cse_check_loop_start_value = NULL_RTX;
6690 /* X is a SET or CLOBBER contained in INSN that was found near the start of
6691 a loop that starts with the label at LOOP_START.
6693 If X is a SET, we see if its SET_SRC is currently in our hash table.
6694 If so, we see if it has a value equal to some register used only in the
6695 loop exit code (as marked by jump.c).
6697 If those two conditions are true, we search backwards from the start of
6698 the loop to see if that same value was loaded into a register that still
6699 retains its value at the start of the loop.
6701 If so, we insert an insn after the load to copy the destination of that
6702 load into the equivalent register and (try to) replace our SET_SRC with that
6705 In any event, we invalidate whatever this SET or CLOBBER modifies. */
6708 cse_set_around_loop (x, insn, loop_start)
6713 struct table_elt *src_elt;
6715 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6716 are setting PC or CC0 or whose SET_SRC is already a register. */
6717 if (GET_CODE (x) == SET
6718 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6719 && GET_CODE (SET_SRC (x)) != REG)
6721 src_elt = lookup (SET_SRC (x),
6722 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6723 GET_MODE (SET_DEST (x)));
6726 for (src_elt = src_elt->first_same_value; src_elt;
6727 src_elt = src_elt->next_same_value)
6728 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6729 && COST (src_elt->exp) < COST (SET_SRC (x)))
6733 /* Look for an insn in front of LOOP_START that sets
6734 something in the desired mode to SET_SRC (x) before we hit
6735 a label or CALL_INSN. */
6737 for (p = prev_nonnote_insn (loop_start);
6738 p && GET_CODE (p) != CALL_INSN
6739 && GET_CODE (p) != CODE_LABEL;
6740 p = prev_nonnote_insn (p))
6741 if ((set = single_set (p)) != 0
6742 && GET_CODE (SET_DEST (set)) == REG
6743 && GET_MODE (SET_DEST (set)) == src_elt->mode
6744 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6746 /* We now have to ensure that nothing between P
6747 and LOOP_START modified anything referenced in
6748 SET_SRC (x). We know that nothing within the loop
6749 can modify it, or we would have invalidated it in
6752 rtx cse_check_loop_start_value = SET_SRC (x);
6753 for (q = p; q != loop_start; q = NEXT_INSN (q))
6755 note_stores (PATTERN (q),
6756 cse_check_loop_start,
6757 &cse_check_loop_start_value);
6759 /* If nothing was changed and we can replace our
6760 SET_SRC, add an insn after P to copy its destination
6761 to what we will be replacing SET_SRC with. */
6762 if (cse_check_loop_start_value
6763 && validate_change (insn, &SET_SRC (x),
6766 /* If this creates new pseudos, this is unsafe,
6767 because the regno of new pseudo is unsuitable
6768 to index into reg_qty when cse_insn processes
6769 the new insn. Therefore, if a new pseudo was
6770 created, discard this optimization. */
6771 int nregs = max_reg_num ();
6773 = gen_move_insn (src_elt->exp, SET_DEST (set));
6774 if (nregs != max_reg_num ())
6776 if (! validate_change (insn, &SET_SRC (x),
6781 emit_insn_after (move, p);
6788 /* Deal with the destination of X affecting the stack pointer. */
6789 addr_affects_sp_p (SET_DEST (x));
6791 /* See comment on similar code in cse_insn for explanation of these
6793 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6794 || GET_CODE (SET_DEST (x)) == MEM)
6795 invalidate (SET_DEST (x), VOIDmode);
6796 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6797 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
6798 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
6801 /* Find the end of INSN's basic block and return its range,
6802 the total number of SETs in all the insns of the block, the last insn of the
6803 block, and the branch path.
6805 The branch path indicates which branches should be followed. If a non-zero
6806 path size is specified, the block should be rescanned and a different set
6807 of branches will be taken. The branch path is only used if
6808 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
6810 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6811 used to describe the block. It is filled in with the information about
6812 the current block. The incoming structure's branch path, if any, is used
6813 to construct the output branch path. */
6816 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
6818 struct cse_basic_block_data *data;
6825 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6826 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6827 int path_size = data->path_size;
6831 /* Update the previous branch path, if any. If the last branch was
6832 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
6833 shorten the path by one and look at the previous branch. We know that
6834 at least one branch must have been taken if PATH_SIZE is non-zero. */
6835 while (path_size > 0)
6837 if (data->path[path_size - 1].status != NOT_TAKEN)
6839 data->path[path_size - 1].status = NOT_TAKEN;
6846 /* If the first instruction is marked with QImode, that means we've
6847 already processed this block. Our caller will look at DATA->LAST
6848 to figure out where to go next. We want to return the next block
6849 in the instruction stream, not some branched-to block somewhere
6850 else. We accomplish this by pretending our called forbid us to
6851 follow jumps, or skip blocks. */
6852 if (GET_MODE (insn) == QImode)
6853 follow_jumps = skip_blocks = 0;
6855 /* Scan to end of this basic block. */
6856 while (p && GET_CODE (p) != CODE_LABEL)
6858 /* Don't cse out the end of a loop. This makes a difference
6859 only for the unusual loops that always execute at least once;
6860 all other loops have labels there so we will stop in any case.
6861 Cse'ing out the end of the loop is dangerous because it
6862 might cause an invariant expression inside the loop
6863 to be reused after the end of the loop. This would make it
6864 hard to move the expression out of the loop in loop.c,
6865 especially if it is one of several equivalent expressions
6866 and loop.c would like to eliminate it.
6868 If we are running after loop.c has finished, we can ignore
6869 the NOTE_INSN_LOOP_END. */
6871 if (! after_loop && GET_CODE (p) == NOTE
6872 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6875 /* Don't cse over a call to setjmp; on some machines (eg VAX)
6876 the regs restored by the longjmp come from
6877 a later time than the setjmp. */
6878 if (PREV_INSN (p) && GET_CODE (PREV_INSN (p)) == CALL_INSN
6879 && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6882 /* A PARALLEL can have lots of SETs in it,
6883 especially if it is really an ASM_OPERANDS. */
6884 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6885 nsets += XVECLEN (PATTERN (p), 0);
6886 else if (GET_CODE (p) != NOTE)
6889 /* Ignore insns made by CSE; they cannot affect the boundaries of
6892 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6893 high_cuid = INSN_CUID (p);
6894 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6895 low_cuid = INSN_CUID (p);
6897 /* See if this insn is in our branch path. If it is and we are to
6899 if (path_entry < path_size && data->path[path_entry].branch == p)
6901 if (data->path[path_entry].status != NOT_TAKEN)
6904 /* Point to next entry in path, if any. */
6908 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6909 was specified, we haven't reached our maximum path length, there are
6910 insns following the target of the jump, this is the only use of the
6911 jump label, and the target label is preceded by a BARRIER.
6913 Alternatively, we can follow the jump if it branches around a
6914 block of code and there are no other branches into the block.
6915 In this case invalidate_skipped_block will be called to invalidate any
6916 registers set in the block when following the jump. */
6918 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
6919 && GET_CODE (p) == JUMP_INSN
6920 && GET_CODE (PATTERN (p)) == SET
6921 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6922 && JUMP_LABEL (p) != 0
6923 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6924 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6926 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6927 if ((GET_CODE (q) != NOTE
6928 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6929 || (PREV_INSN (q) && GET_CODE (PREV_INSN (q)) == CALL_INSN
6930 && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6931 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
6934 /* If we ran into a BARRIER, this code is an extension of the
6935 basic block when the branch is taken. */
6936 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
6938 /* Don't allow ourself to keep walking around an
6939 always-executed loop. */
6940 if (next_real_insn (q) == next)
6946 /* Similarly, don't put a branch in our path more than once. */
6947 for (i = 0; i < path_entry; i++)
6948 if (data->path[i].branch == p)
6951 if (i != path_entry)
6954 data->path[path_entry].branch = p;
6955 data->path[path_entry++].status = TAKEN;
6957 /* This branch now ends our path. It was possible that we
6958 didn't see this branch the last time around (when the
6959 insn in front of the target was a JUMP_INSN that was
6960 turned into a no-op). */
6961 path_size = path_entry;
6964 /* Mark block so we won't scan it again later. */
6965 PUT_MODE (NEXT_INSN (p), QImode);
6967 /* Detect a branch around a block of code. */
6968 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
6972 if (next_real_insn (q) == next)
6978 for (i = 0; i < path_entry; i++)
6979 if (data->path[i].branch == p)
6982 if (i != path_entry)
6985 /* This is no_labels_between_p (p, q) with an added check for
6986 reaching the end of a function (in case Q precedes P). */
6987 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6988 if (GET_CODE (tmp) == CODE_LABEL)
6993 data->path[path_entry].branch = p;
6994 data->path[path_entry++].status = AROUND;
6996 path_size = path_entry;
6999 /* Mark block so we won't scan it again later. */
7000 PUT_MODE (NEXT_INSN (p), QImode);
7007 data->low_cuid = low_cuid;
7008 data->high_cuid = high_cuid;
7009 data->nsets = nsets;
7012 /* If all jumps in the path are not taken, set our path length to zero
7013 so a rescan won't be done. */
7014 for (i = path_size - 1; i >= 0; i--)
7015 if (data->path[i].status != NOT_TAKEN)
7019 data->path_size = 0;
7021 data->path_size = path_size;
7023 /* End the current branch path. */
7024 data->path[path_size].branch = 0;
7027 /* Perform cse on the instructions of a function.
7028 F is the first instruction.
7029 NREGS is one plus the highest pseudo-reg number used in the instruction.
7031 AFTER_LOOP is 1 if this is the cse call done after loop optimization
7032 (only if -frerun-cse-after-loop).
7034 Returns 1 if jump_optimize should be redone due to simplifications
7035 in conditional jump instructions. */
7038 cse_main (f, nregs, after_loop, file)
7044 struct cse_basic_block_data val;
7048 cse_jumps_altered = 0;
7049 recorded_label_ref = 0;
7050 constant_pool_entries_cost = 0;
7054 init_alias_analysis ();
7058 max_insn_uid = get_max_uid ();
7060 reg_eqv_table = (struct reg_eqv_elem *)
7061 xmalloc (nregs * sizeof (struct reg_eqv_elem));
7063 #ifdef LOAD_EXTEND_OP
7065 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
7066 and change the code and mode as appropriate. */
7067 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
7070 /* Reset the counter indicating how many elements have been made
7072 n_elements_made = 0;
7074 /* Find the largest uid. */
7076 max_uid = get_max_uid ();
7077 uid_cuid = (int *) xcalloc (max_uid + 1, sizeof (int));
7079 /* Compute the mapping from uids to cuids.
7080 CUIDs are numbers assigned to insns, like uids,
7081 except that cuids increase monotonically through the code.
7082 Don't assign cuids to line-number NOTEs, so that the distance in cuids
7083 between two insns is not affected by -g. */
7085 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
7087 if (GET_CODE (insn) != NOTE
7088 || NOTE_LINE_NUMBER (insn) < 0)
7089 INSN_CUID (insn) = ++i;
7091 /* Give a line number note the same cuid as preceding insn. */
7092 INSN_CUID (insn) = i;
7095 ggc_push_context ();
7097 /* Loop over basic blocks.
7098 Compute the maximum number of qty's needed for each basic block
7099 (which is 2 for each SET). */
7104 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
7105 flag_cse_skip_blocks);
7107 /* If this basic block was already processed or has no sets, skip it. */
7108 if (val.nsets == 0 || GET_MODE (insn) == QImode)
7110 PUT_MODE (insn, VOIDmode);
7111 insn = (val.last ? NEXT_INSN (val.last) : 0);
7116 cse_basic_block_start = val.low_cuid;
7117 cse_basic_block_end = val.high_cuid;
7118 max_qty = val.nsets * 2;
7121 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
7122 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
7125 /* Make MAX_QTY bigger to give us room to optimize
7126 past the end of this basic block, if that should prove useful. */
7132 /* If this basic block is being extended by following certain jumps,
7133 (see `cse_end_of_basic_block'), we reprocess the code from the start.
7134 Otherwise, we start after this basic block. */
7135 if (val.path_size > 0)
7136 cse_basic_block (insn, val.last, val.path, 0);
7139 int old_cse_jumps_altered = cse_jumps_altered;
7142 /* When cse changes a conditional jump to an unconditional
7143 jump, we want to reprocess the block, since it will give
7144 us a new branch path to investigate. */
7145 cse_jumps_altered = 0;
7146 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
7147 if (cse_jumps_altered == 0
7148 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7151 cse_jumps_altered |= old_cse_jumps_altered;
7164 if (max_elements_made < n_elements_made)
7165 max_elements_made = n_elements_made;
7168 end_alias_analysis ();
7170 free (reg_eqv_table);
7172 return cse_jumps_altered || recorded_label_ref;
7175 /* Process a single basic block. FROM and TO and the limits of the basic
7176 block. NEXT_BRANCH points to the branch path when following jumps or
7177 a null path when not following jumps.
7179 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
7180 loop. This is true when we are being called for the last time on a
7181 block and this CSE pass is before loop.c. */
7184 cse_basic_block (from, to, next_branch, around_loop)
7186 struct branch_path *next_branch;
7191 rtx libcall_insn = NULL_RTX;
7194 /* This array is undefined before max_reg, so only allocate
7195 the space actually needed and adjust the start. */
7198 = (struct qty_table_elem *) xmalloc ((max_qty - max_reg)
7199 * sizeof (struct qty_table_elem));
7200 qty_table -= max_reg;
7204 /* TO might be a label. If so, protect it from being deleted. */
7205 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7208 for (insn = from; insn != to; insn = NEXT_INSN (insn))
7210 enum rtx_code code = GET_CODE (insn);
7212 /* If we have processed 1,000 insns, flush the hash table to
7213 avoid extreme quadratic behavior. We must not include NOTEs
7214 in the count since there may be more of them when generating
7215 debugging information. If we clear the table at different
7216 times, code generated with -g -O might be different than code
7217 generated with -O but not -g.
7219 ??? This is a real kludge and needs to be done some other way.
7221 if (code != NOTE && num_insns++ > 1000)
7223 flush_hash_table ();
7227 /* See if this is a branch that is part of the path. If so, and it is
7228 to be taken, do so. */
7229 if (next_branch->branch == insn)
7231 enum taken status = next_branch++->status;
7232 if (status != NOT_TAKEN)
7234 if (status == TAKEN)
7235 record_jump_equiv (insn, 1);
7237 invalidate_skipped_block (NEXT_INSN (insn));
7239 /* Set the last insn as the jump insn; it doesn't affect cc0.
7240 Then follow this branch. */
7245 insn = JUMP_LABEL (insn);
7250 if (GET_MODE (insn) == QImode)
7251 PUT_MODE (insn, VOIDmode);
7253 if (GET_RTX_CLASS (code) == 'i')
7257 /* Process notes first so we have all notes in canonical forms when
7258 looking for duplicate operations. */
7260 if (REG_NOTES (insn))
7261 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7263 /* Track when we are inside in LIBCALL block. Inside such a block,
7264 we do not want to record destinations. The last insn of a
7265 LIBCALL block is not considered to be part of the block, since
7266 its destination is the result of the block and hence should be
7269 if (REG_NOTES (insn) != 0)
7271 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7272 libcall_insn = XEXP (p, 0);
7273 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7277 cse_insn (insn, libcall_insn);
7279 /* If we haven't already found an insn where we added a LABEL_REF,
7281 if (GET_CODE (insn) == INSN && ! recorded_label_ref
7282 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
7284 recorded_label_ref = 1;
7287 /* If INSN is now an unconditional jump, skip to the end of our
7288 basic block by pretending that we just did the last insn in the
7289 basic block. If we are jumping to the end of our block, show
7290 that we can have one usage of TO. */
7292 if (any_uncondjump_p (insn))
7296 free (qty_table + max_reg);
7300 if (JUMP_LABEL (insn) == to)
7303 /* Maybe TO was deleted because the jump is unconditional.
7304 If so, there is nothing left in this basic block. */
7305 /* ??? Perhaps it would be smarter to set TO
7306 to whatever follows this insn,
7307 and pretend the basic block had always ended here. */
7308 if (INSN_DELETED_P (to))
7311 insn = PREV_INSN (to);
7314 /* See if it is ok to keep on going past the label
7315 which used to end our basic block. Remember that we incremented
7316 the count of that label, so we decrement it here. If we made
7317 a jump unconditional, TO_USAGE will be one; in that case, we don't
7318 want to count the use in that jump. */
7320 if (to != 0 && NEXT_INSN (insn) == to
7321 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7323 struct cse_basic_block_data val;
7326 insn = NEXT_INSN (to);
7328 /* If TO was the last insn in the function, we are done. */
7331 free (qty_table + max_reg);
7335 /* If TO was preceded by a BARRIER we are done with this block
7336 because it has no continuation. */
7337 prev = prev_nonnote_insn (to);
7338 if (prev && GET_CODE (prev) == BARRIER)
7340 free (qty_table + max_reg);
7344 /* Find the end of the following block. Note that we won't be
7345 following branches in this case. */
7348 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7350 /* If the tables we allocated have enough space left
7351 to handle all the SETs in the next basic block,
7352 continue through it. Otherwise, return,
7353 and that block will be scanned individually. */
7354 if (val.nsets * 2 + next_qty > max_qty)
7357 cse_basic_block_start = val.low_cuid;
7358 cse_basic_block_end = val.high_cuid;
7361 /* Prevent TO from being deleted if it is a label. */
7362 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7365 /* Back up so we process the first insn in the extension. */
7366 insn = PREV_INSN (insn);
7370 if (next_qty > max_qty)
7373 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7374 the previous insn is the only insn that branches to the head of a loop,
7375 we can cse into the loop. Don't do this if we changed the jump
7376 structure of a loop unless we aren't going to be following jumps. */
7378 insn = prev_nonnote_insn (to);
7379 if ((cse_jumps_altered == 0
7380 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7381 && around_loop && to != 0
7382 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7383 && GET_CODE (insn) == JUMP_INSN
7384 && JUMP_LABEL (insn) != 0
7385 && LABEL_NUSES (JUMP_LABEL (insn)) == 1)
7386 cse_around_loop (JUMP_LABEL (insn));
7388 free (qty_table + max_reg);
7390 return to ? NEXT_INSN (to) : 0;
7393 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7394 there isn't a REG_LABEL note. Return one if so. DATA is the insn. */
7397 check_for_label_ref (rtl, data)
7401 rtx insn = (rtx) data;
7403 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7404 we must rerun jump since it needs to place the note. If this is a
7405 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7406 since no REG_LABEL will be added. */
7407 return (GET_CODE (*rtl) == LABEL_REF
7408 && ! LABEL_REF_NONLOCAL_P (*rtl)
7409 && LABEL_P (XEXP (*rtl, 0))
7410 && INSN_UID (XEXP (*rtl, 0)) != 0
7411 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7414 /* Count the number of times registers are used (not set) in X.
7415 COUNTS is an array in which we accumulate the count, INCR is how much
7416 we count each register usage.
7418 Don't count a usage of DEST, which is the SET_DEST of a SET which
7419 contains X in its SET_SRC. This is because such a SET does not
7420 modify the liveness of DEST. */
7423 count_reg_usage (x, counts, dest, incr)
7436 switch (code = GET_CODE (x))
7440 counts[REGNO (x)] += incr;
7454 /* If we are clobbering a MEM, mark any registers inside the address
7456 if (GET_CODE (XEXP (x, 0)) == MEM)
7457 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7461 /* Unless we are setting a REG, count everything in SET_DEST. */
7462 if (GET_CODE (SET_DEST (x)) != REG)
7463 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
7465 /* If SRC has side-effects, then we can't delete this insn, so the
7466 usage of SET_DEST inside SRC counts.
7468 ??? Strictly-speaking, we might be preserving this insn
7469 because some other SET has side-effects, but that's hard
7470 to do and can't happen now. */
7471 count_reg_usage (SET_SRC (x), counts,
7472 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
7477 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
7482 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
7484 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7487 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
7492 if (REG_NOTE_KIND (x) == REG_EQUAL
7493 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
7494 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
7495 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7502 fmt = GET_RTX_FORMAT (code);
7503 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7506 count_reg_usage (XEXP (x, i), counts, dest, incr);
7507 else if (fmt[i] == 'E')
7508 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7509 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7513 /* Return true if set is live. */
7515 set_live_p (set, insn, counts)
7517 rtx insn ATTRIBUTE_UNUSED; /* Only used with HAVE_cc0. */
7524 if (set_noop_p (set))
7528 else if (GET_CODE (SET_DEST (set)) == CC0
7529 && !side_effects_p (SET_SRC (set))
7530 && ((tem = next_nonnote_insn (insn)) == 0
7532 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7535 else if (GET_CODE (SET_DEST (set)) != REG
7536 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7537 || counts[REGNO (SET_DEST (set))] != 0
7538 || side_effects_p (SET_SRC (set))
7539 /* An ADDRESSOF expression can turn into a use of the
7540 internal arg pointer, so always consider the
7541 internal arg pointer live. If it is truly dead,
7542 flow will delete the initializing insn. */
7543 || (SET_DEST (set) == current_function_internal_arg_pointer))
7548 /* Return true if insn is live. */
7551 insn_live_p (insn, counts)
7556 if (GET_CODE (PATTERN (insn)) == SET)
7557 return set_live_p (PATTERN (insn), insn, counts);
7558 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7560 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7562 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7564 if (GET_CODE (elt) == SET)
7566 if (set_live_p (elt, insn, counts))
7569 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7578 /* Return true if libcall is dead as a whole. */
7581 dead_libcall_p (insn, counts)
7586 /* See if there's a REG_EQUAL note on this insn and try to
7587 replace the source with the REG_EQUAL expression.
7589 We assume that insns with REG_RETVALs can only be reg->reg
7590 copies at this point. */
7591 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7594 rtx set = single_set (insn);
7595 rtx new = simplify_rtx (XEXP (note, 0));
7598 new = XEXP (note, 0);
7600 /* While changing insn, we must update the counts accordingly. */
7601 count_reg_usage (insn, counts, NULL_RTX, -1);
7603 if (set && validate_change (insn, &SET_SRC (set), new, 0))
7605 count_reg_usage (insn, counts, NULL_RTX, 1);
7606 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7607 remove_note (insn, note);
7610 count_reg_usage (insn, counts, NULL_RTX, 1);
7615 /* Scan all the insns and delete any that are dead; i.e., they store a register
7616 that is never used or they copy a register to itself.
7618 This is used to remove insns made obviously dead by cse, loop or other
7619 optimizations. It improves the heuristics in loop since it won't try to
7620 move dead invariants out of loops or make givs for dead quantities. The
7621 remaining passes of the compilation are also sped up. */
7624 delete_trivially_dead_insns (insns, nreg)
7630 int in_libcall = 0, dead_libcall = 0;
7631 int ndead = 0, nlastdead, niterations = 0;
7633 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7634 /* First count the number of times each register is used. */
7635 counts = (int *) xcalloc (nreg, sizeof (int));
7636 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7637 count_reg_usage (insn, counts, NULL_RTX, 1);
7643 /* Go from the last insn to the first and delete insns that only set unused
7644 registers or copy a register to itself. As we delete an insn, remove
7645 usage counts for registers it uses.
7647 The first jump optimization pass may leave a real insn as the last
7648 insn in the function. We must not skip that insn or we may end
7649 up deleting code that is not really dead. */
7650 insn = get_last_insn ();
7651 if (! INSN_P (insn))
7652 insn = prev_real_insn (insn);
7654 for (; insn; insn = prev)
7658 prev = prev_real_insn (insn);
7660 /* Don't delete any insns that are part of a libcall block unless
7661 we can delete the whole libcall block.
7663 Flow or loop might get confused if we did that. Remember
7664 that we are scanning backwards. */
7665 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7669 dead_libcall = dead_libcall_p (insn, counts);
7671 else if (in_libcall)
7672 live_insn = ! dead_libcall;
7674 live_insn = insn_live_p (insn, counts);
7676 /* If this is a dead insn, delete it and show registers in it aren't
7681 count_reg_usage (insn, counts, NULL_RTX, -1);
7682 delete_insn_and_edges (insn);
7686 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7693 while (ndead != nlastdead);
7695 if (rtl_dump_file && ndead)
7696 fprintf (rtl_dump_file, "Deleted %i trivially dead insns; %i iterations\n",
7697 ndead, niterations);
7700 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);