1 /* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 /* This module is essentially the "combiner" phase of the U. of Arizona
22 Portable Optimizer, but redone to work on our list-structured
23 representation for RTL instead of their string representation.
25 The LOG_LINKS of each insn identify the most recent assignment
26 to each REG used in the insn. It is a list of previous insns,
27 each of which contains a SET for a REG that is used in this insn
28 and not used or set in between. LOG_LINKs never cross basic blocks.
29 They were set up by the preceding pass (lifetime analysis).
31 We try to combine each pair of insns joined by a logical link.
32 We also try to combine triples of insns A, B and C when
33 C has a link back to B and B has a link back to A.
35 LOG_LINKS does not have links for use of the CC0. They don't
36 need to, because the insn that sets the CC0 is always immediately
37 before the insn that tests it. So we always regard a branch
38 insn as having a logical link to the preceding insn. The same is true
39 for an insn explicitly using CC0.
41 We check (with use_crosses_set_p) to avoid combining in such a way
42 as to move a computation to a place where its value would be different.
44 Combination is done by mathematically substituting the previous
45 insn(s) values for the regs they set into the expressions in
46 the later insns that refer to these regs. If the result is a valid insn
47 for our target machine, according to the machine description,
48 we install it, delete the earlier insns, and update the data flow
49 information (LOG_LINKS and REG_NOTES) for what we did.
51 There are a few exceptions where the dataflow information created by
52 flow.c aren't completely updated:
54 - reg_live_length is not updated
55 - reg_n_refs is not adjusted in the rare case when a register is
56 no longer required in a computation
57 - there are extremely rare cases (see distribute_regnotes) when a
59 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
60 removed because there is no way to know which register it was
63 To simplify substitution, we combine only when the earlier insn(s)
64 consist of only a single assignment. To simplify updating afterward,
65 we never combine when a subroutine call appears in the middle.
67 Since we do not represent assignments to CC0 explicitly except when that
68 is all an insn does, there is no LOG_LINKS entry in an insn that uses
69 the condition code for the insn that set the condition code.
70 Fortunately, these two insns must be consecutive.
71 Therefore, every JUMP_INSN is taken to have an implicit logical link
72 to the preceding insn. This is not quite right, since non-jumps can
73 also use the condition code; but in practice such insns would not
83 /* Must precede rtl.h for FFS. */
89 #include "hard-reg-set.h"
91 #include "basic-block.h"
92 #include "insn-config.h"
93 #include "insn-flags.h"
94 #include "insn-codes.h"
95 #include "insn-attr.h"
99 /* It is not safe to use ordinary gen_lowpart in combine.
100 Use gen_lowpart_for_combine instead. See comments there. */
101 #define gen_lowpart dont_use_gen_lowpart_you_dummy
103 /* Number of attempts to combine instructions in this function. */
105 static int combine_attempts;
107 /* Number of attempts that got as far as substitution in this function. */
109 static int combine_merges;
111 /* Number of instructions combined with added SETs in this function. */
113 static int combine_extras;
115 /* Number of instructions combined in this function. */
117 static int combine_successes;
119 /* Totals over entire compilation. */
121 static int total_attempts, total_merges, total_extras, total_successes;
123 /* Define a defulat value for REVERSIBLE_CC_MODE.
124 We can never assume that a condition code mode is safe to reverse unless
125 the md tells us so. */
126 #ifndef REVERSIBLE_CC_MODE
127 #define REVERSIBLE_CC_MODE(MODE) 0
130 /* Vector mapping INSN_UIDs to cuids.
131 The cuids are like uids but increase monotonically always.
132 Combine always uses cuids so that it can compare them.
133 But actually renumbering the uids, which we used to do,
134 proves to be a bad idea because it makes it hard to compare
135 the dumps produced by earlier passes with those from later passes. */
137 static int *uid_cuid;
139 /* Get the cuid of an insn. */
141 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
143 /* Maximum register number, which is the size of the tables below. */
145 static int combine_max_regno;
147 /* Record last point of death of (hard or pseudo) register n. */
149 static rtx *reg_last_death;
151 /* Record last point of modification of (hard or pseudo) register n. */
153 static rtx *reg_last_set;
155 /* Record the cuid of the last insn that invalidated memory
156 (anything that writes memory, and subroutine calls, but not pushes). */
158 static int mem_last_set;
160 /* Record the cuid of the last CALL_INSN
161 so we can tell whether a potential combination crosses any calls. */
163 static int last_call_cuid;
165 /* When `subst' is called, this is the insn that is being modified
166 (by combining in a previous insn). The PATTERN of this insn
167 is still the old pattern partially modified and it should not be
168 looked at, but this may be used to examine the successors of the insn
169 to judge whether a simplification is valid. */
171 static rtx subst_insn;
173 /* This is the lowest CUID that `subst' is currently dealing with.
174 get_last_value will not return a value if the register was set at or
175 after this CUID. If not for this mechanism, we could get confused if
176 I2 or I1 in try_combine were an insn that used the old value of a register
177 to obtain a new value. In that case, we might erroneously get the
178 new value of the register when we wanted the old one. */
180 static int subst_low_cuid;
182 /* This contains any hard registers that are used in newpat; reg_dead_at_p
183 must consider all these registers to be always live. */
185 static HARD_REG_SET newpat_used_regs;
187 /* This is an insn to which a LOG_LINKS entry has been added. If this
188 insn is the earlier than I2 or I3, combine should rescan starting at
191 static rtx added_links_insn;
193 /* This is the value of undobuf.num_undo when we started processing this
194 substitution. This will prevent gen_rtx_combine from re-used a piece
195 from the previous expression. Doing so can produce circular rtl
198 static int previous_num_undos;
200 /* Basic block number of the block in which we are performing combines. */
201 static int this_basic_block;
203 /* The next group of arrays allows the recording of the last value assigned
204 to (hard or pseudo) register n. We use this information to see if a
205 operation being processed is redundant given a prior operation performed
206 on the register. For example, an `and' with a constant is redundant if
207 all the zero bits are already known to be turned off.
209 We use an approach similar to that used by cse, but change it in the
212 (1) We do not want to reinitialize at each label.
213 (2) It is useful, but not critical, to know the actual value assigned
214 to a register. Often just its form is helpful.
216 Therefore, we maintain the following arrays:
218 reg_last_set_value the last value assigned
219 reg_last_set_label records the value of label_tick when the
220 register was assigned
221 reg_last_set_table_tick records the value of label_tick when a
222 value using the register is assigned
223 reg_last_set_invalid set to non-zero when it is not valid
224 to use the value of this register in some
227 To understand the usage of these tables, it is important to understand
228 the distinction between the value in reg_last_set_value being valid
229 and the register being validly contained in some other expression in the
232 Entry I in reg_last_set_value is valid if it is non-zero, and either
233 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
235 Register I may validly appear in any expression returned for the value
236 of another register if reg_n_sets[i] is 1. It may also appear in the
237 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
238 reg_last_set_invalid[j] is zero.
240 If an expression is found in the table containing a register which may
241 not validly appear in an expression, the register is replaced by
242 something that won't match, (clobber (const_int 0)).
244 reg_last_set_invalid[i] is set non-zero when register I is being assigned
245 to and reg_last_set_table_tick[i] == label_tick. */
247 /* Record last value assigned to (hard or pseudo) register n. */
249 static rtx *reg_last_set_value;
251 /* Record the value of label_tick when the value for register n is placed in
252 reg_last_set_value[n]. */
254 static int *reg_last_set_label;
256 /* Record the value of label_tick when an expression involving register n
257 is placed in reg_last_set_value. */
259 static int *reg_last_set_table_tick;
261 /* Set non-zero if references to register n in expressions should not be
264 static char *reg_last_set_invalid;
266 /* Incremented for each label. */
268 static int label_tick;
270 /* Some registers that are set more than once and used in more than one
271 basic block are nevertheless always set in similar ways. For example,
272 a QImode register may be loaded from memory in two places on a machine
273 where byte loads zero extend.
275 We record in the following array what we know about the nonzero
276 bits of a register, specifically which bits are known to be zero.
278 If an entry is zero, it means that we don't know anything special. */
280 static unsigned HOST_WIDE_INT *reg_nonzero_bits;
282 /* Mode used to compute significance in reg_nonzero_bits. It is the largest
283 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
285 static enum machine_mode nonzero_bits_mode;
287 /* Nonzero if we know that a register has some leading bits that are always
288 equal to the sign bit. */
290 static char *reg_sign_bit_copies;
292 /* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
293 It is zero while computing them and after combine has completed. This
294 former test prevents propagating values based on previously set values,
295 which can be incorrect if a variable is modified in a loop. */
297 static int nonzero_sign_valid;
299 /* These arrays are maintained in parallel with reg_last_set_value
300 and are used to store the mode in which the register was last set,
301 the bits that were known to be zero when it was last set, and the
302 number of sign bits copies it was known to have when it was last set. */
304 static enum machine_mode *reg_last_set_mode;
305 static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
306 static char *reg_last_set_sign_bit_copies;
308 /* Record one modification to rtl structure
309 to be undone by storing old_contents into *where.
310 is_int is 1 if the contents are an int. */
315 union {rtx r; int i;} old_contents;
316 union {rtx *r; int *i;} where;
319 /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
320 num_undo says how many are currently recorded.
322 storage is nonzero if we must undo the allocation of new storage.
323 The value of storage is what to pass to obfree.
325 other_insn is nonzero if we have modified some other insn in the process
326 of working on subst_insn. It must be verified too. */
334 struct undo undo[MAX_UNDO];
338 static struct undobuf undobuf;
340 /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
341 insn. The substitution can be undone by undo_all. If INTO is already
342 set to NEWVAL, do not record this change. Because computing NEWVAL might
343 also call SUBST, we have to compute it before we put anything into
346 #define SUBST(INTO, NEWVAL) \
347 do { rtx _new = (NEWVAL); \
348 if (undobuf.num_undo < MAX_UNDO) \
350 undobuf.undo[undobuf.num_undo].is_int = 0; \
351 undobuf.undo[undobuf.num_undo].where.r = &INTO; \
352 undobuf.undo[undobuf.num_undo].old_contents.r = INTO; \
354 if (undobuf.undo[undobuf.num_undo].old_contents.r != INTO) \
355 undobuf.num_undo++; \
359 /* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT
361 Note that substitution for the value of a CONST_INT is not safe. */
363 #define SUBST_INT(INTO, NEWVAL) \
364 do { if (undobuf.num_undo < MAX_UNDO) \
366 undobuf.undo[undobuf.num_undo].is_int = 1; \
367 undobuf.undo[undobuf.num_undo].where.i = (int *) &INTO; \
368 undobuf.undo[undobuf.num_undo].old_contents.i = INTO; \
370 if (undobuf.undo[undobuf.num_undo].old_contents.i != INTO) \
371 undobuf.num_undo++; \
375 /* Number of times the pseudo being substituted for
376 was found and replaced. */
378 static int n_occurrences;
380 static void init_reg_last_arrays PROTO(());
381 static void setup_incoming_promotions PROTO(());
382 static void set_nonzero_bits_and_sign_copies PROTO((rtx, rtx));
383 static int can_combine_p PROTO((rtx, rtx, rtx, rtx, rtx *, rtx *));
384 static int combinable_i3pat PROTO((rtx, rtx *, rtx, rtx, int, rtx *));
385 static rtx try_combine PROTO((rtx, rtx, rtx));
386 static void undo_all PROTO((void));
387 static rtx *find_split_point PROTO((rtx *, rtx));
388 static rtx subst PROTO((rtx, rtx, rtx, int, int));
389 static rtx simplify_rtx PROTO((rtx, enum machine_mode, int, int));
390 static rtx simplify_if_then_else PROTO((rtx));
391 static rtx simplify_set PROTO((rtx));
392 static rtx simplify_logical PROTO((rtx, int));
393 static rtx expand_compound_operation PROTO((rtx));
394 static rtx expand_field_assignment PROTO((rtx));
395 static rtx make_extraction PROTO((enum machine_mode, rtx, int, rtx, int,
397 static rtx extract_left_shift PROTO((rtx, int));
398 static rtx make_compound_operation PROTO((rtx, enum rtx_code));
399 static int get_pos_from_mask PROTO((unsigned HOST_WIDE_INT, int *));
400 static rtx force_to_mode PROTO((rtx, enum machine_mode,
401 unsigned HOST_WIDE_INT, rtx, int));
402 static rtx if_then_else_cond PROTO((rtx, rtx *, rtx *));
403 static rtx known_cond PROTO((rtx, enum rtx_code, rtx, rtx));
404 static rtx make_field_assignment PROTO((rtx));
405 static rtx apply_distributive_law PROTO((rtx));
406 static rtx simplify_and_const_int PROTO((rtx, enum machine_mode, rtx,
407 unsigned HOST_WIDE_INT));
408 static unsigned HOST_WIDE_INT nonzero_bits PROTO((rtx, enum machine_mode));
409 static int num_sign_bit_copies PROTO((rtx, enum machine_mode));
410 static int merge_outer_ops PROTO((enum rtx_code *, HOST_WIDE_INT *,
411 enum rtx_code, HOST_WIDE_INT,
412 enum machine_mode, int *));
413 static rtx simplify_shift_const PROTO((rtx, enum rtx_code, enum machine_mode,
415 static int recog_for_combine PROTO((rtx *, rtx, rtx *));
416 static rtx gen_lowpart_for_combine PROTO((enum machine_mode, rtx));
417 static rtx gen_rtx_combine PVPROTO((enum rtx_code code, enum machine_mode mode,
419 static rtx gen_binary PROTO((enum rtx_code, enum machine_mode,
421 static rtx gen_unary PROTO((enum rtx_code, enum machine_mode,
422 enum machine_mode, rtx));
423 static enum rtx_code simplify_comparison PROTO((enum rtx_code, rtx *, rtx *));
424 static int reversible_comparison_p PROTO((rtx));
425 static void update_table_tick PROTO((rtx));
426 static void record_value_for_reg PROTO((rtx, rtx, rtx));
427 static void record_dead_and_set_regs_1 PROTO((rtx, rtx));
428 static void record_dead_and_set_regs PROTO((rtx));
429 static int get_last_value_validate PROTO((rtx *, int, int));
430 static rtx get_last_value PROTO((rtx));
431 static int use_crosses_set_p PROTO((rtx, int));
432 static void reg_dead_at_p_1 PROTO((rtx, rtx));
433 static int reg_dead_at_p PROTO((rtx, rtx));
434 static void move_deaths PROTO((rtx, int, rtx, rtx *));
435 static int reg_bitfield_target_p PROTO((rtx, rtx));
436 static void distribute_notes PROTO((rtx, rtx, rtx, rtx, rtx, rtx));
437 static void distribute_links PROTO((rtx));
438 static void mark_used_regs_combine PROTO((rtx));
440 /* Main entry point for combiner. F is the first insn of the function.
441 NREGS is the first unused pseudo-reg number. */
444 combine_instructions (f, nregs)
448 register rtx insn, next, prev;
450 register rtx links, nextlinks;
452 combine_attempts = 0;
455 combine_successes = 0;
456 undobuf.num_undo = previous_num_undos = 0;
458 combine_max_regno = nregs;
461 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
462 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
464 bzero ((char *) reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
465 bzero (reg_sign_bit_copies, nregs * sizeof (char));
467 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
468 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
469 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
470 reg_last_set_table_tick = (int *) alloca (nregs * sizeof (int));
471 reg_last_set_label = (int *) alloca (nregs * sizeof (int));
472 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
474 = (enum machine_mode *) alloca (nregs * sizeof (enum machine_mode));
475 reg_last_set_nonzero_bits
476 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
477 reg_last_set_sign_bit_copies
478 = (char *) alloca (nregs * sizeof (char));
480 init_reg_last_arrays ();
482 init_recog_no_volatile ();
484 /* Compute maximum uid value so uid_cuid can be allocated. */
486 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
487 if (INSN_UID (insn) > i)
490 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
492 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
494 /* Don't use reg_nonzero_bits when computing it. This can cause problems
495 when, for example, we have j <<= 1 in a loop. */
497 nonzero_sign_valid = 0;
499 /* Compute the mapping from uids to cuids.
500 Cuids are numbers assigned to insns, like uids,
501 except that cuids increase monotonically through the code.
503 Scan all SETs and see if we can deduce anything about what
504 bits are known to be zero for some registers and how many copies
505 of the sign bit are known to exist for those registers.
507 Also set any known values so that we can use it while searching
508 for what bits are known to be set. */
512 setup_incoming_promotions ();
514 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
516 INSN_CUID (insn) = ++i;
520 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
522 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
523 record_dead_and_set_regs (insn);
526 if (GET_CODE (insn) == CODE_LABEL)
530 nonzero_sign_valid = 1;
532 /* Now scan all the insns in forward order. */
534 this_basic_block = -1;
538 init_reg_last_arrays ();
539 setup_incoming_promotions ();
541 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
545 /* If INSN starts a new basic block, update our basic block number. */
546 if (this_basic_block + 1 < n_basic_blocks
547 && basic_block_head[this_basic_block + 1] == insn)
550 if (GET_CODE (insn) == CODE_LABEL)
553 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
555 /* Try this insn with each insn it links back to. */
557 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
558 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
561 /* Try each sequence of three linked insns ending with this one. */
563 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
564 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
565 nextlinks = XEXP (nextlinks, 1))
566 if ((next = try_combine (insn, XEXP (links, 0),
567 XEXP (nextlinks, 0))) != 0)
571 /* Try to combine a jump insn that uses CC0
572 with a preceding insn that sets CC0, and maybe with its
573 logical predecessor as well.
574 This is how we make decrement-and-branch insns.
575 We need this special code because data flow connections
576 via CC0 do not get entered in LOG_LINKS. */
578 if (GET_CODE (insn) == JUMP_INSN
579 && (prev = prev_nonnote_insn (insn)) != 0
580 && GET_CODE (prev) == INSN
581 && sets_cc0_p (PATTERN (prev)))
583 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
586 for (nextlinks = LOG_LINKS (prev); nextlinks;
587 nextlinks = XEXP (nextlinks, 1))
588 if ((next = try_combine (insn, prev,
589 XEXP (nextlinks, 0))) != 0)
593 /* Do the same for an insn that explicitly references CC0. */
594 if (GET_CODE (insn) == INSN
595 && (prev = prev_nonnote_insn (insn)) != 0
596 && GET_CODE (prev) == INSN
597 && sets_cc0_p (PATTERN (prev))
598 && GET_CODE (PATTERN (insn)) == SET
599 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
601 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
604 for (nextlinks = LOG_LINKS (prev); nextlinks;
605 nextlinks = XEXP (nextlinks, 1))
606 if ((next = try_combine (insn, prev,
607 XEXP (nextlinks, 0))) != 0)
611 /* Finally, see if any of the insns that this insn links to
612 explicitly references CC0. If so, try this insn, that insn,
613 and its predecessor if it sets CC0. */
614 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
615 if (GET_CODE (XEXP (links, 0)) == INSN
616 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
617 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
618 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
619 && GET_CODE (prev) == INSN
620 && sets_cc0_p (PATTERN (prev))
621 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
625 /* Try combining an insn with two different insns whose results it
627 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
628 for (nextlinks = XEXP (links, 1); nextlinks;
629 nextlinks = XEXP (nextlinks, 1))
630 if ((next = try_combine (insn, XEXP (links, 0),
631 XEXP (nextlinks, 0))) != 0)
634 if (GET_CODE (insn) != NOTE)
635 record_dead_and_set_regs (insn);
642 total_attempts += combine_attempts;
643 total_merges += combine_merges;
644 total_extras += combine_extras;
645 total_successes += combine_successes;
647 nonzero_sign_valid = 0;
650 /* Wipe the reg_last_xxx arrays in preparation for another pass. */
653 init_reg_last_arrays ()
655 int nregs = combine_max_regno;
657 bzero ((char *) reg_last_death, nregs * sizeof (rtx));
658 bzero ((char *) reg_last_set, nregs * sizeof (rtx));
659 bzero ((char *) reg_last_set_value, nregs * sizeof (rtx));
660 bzero ((char *) reg_last_set_table_tick, nregs * sizeof (int));
661 bzero ((char *) reg_last_set_label, nregs * sizeof (int));
662 bzero (reg_last_set_invalid, nregs * sizeof (char));
663 bzero ((char *) reg_last_set_mode, nregs * sizeof (enum machine_mode));
664 bzero ((char *) reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
665 bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char));
668 /* Set up any promoted values for incoming argument registers. */
671 setup_incoming_promotions ()
673 #ifdef PROMOTE_FUNCTION_ARGS
676 enum machine_mode mode;
678 rtx first = get_insns ();
680 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
681 if (FUNCTION_ARG_REGNO_P (regno)
682 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
683 record_value_for_reg (reg, first,
684 gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
686 gen_rtx (CLOBBER, mode, const0_rtx)));
690 /* Called via note_stores. If X is a pseudo that is used in more than
691 one basic block, is narrower that HOST_BITS_PER_WIDE_INT, and is being
692 set, record what bits are known zero. If we are clobbering X,
693 ignore this "set" because the clobbered value won't be used.
695 If we are setting only a portion of X and we can't figure out what
696 portion, assume all bits will be used since we don't know what will
699 Similarly, set how many bits of X are known to be copies of the sign bit
700 at all locations in the function. This is the smallest number implied
704 set_nonzero_bits_and_sign_copies (x, set)
710 if (GET_CODE (x) == REG
711 && REGNO (x) >= FIRST_PSEUDO_REGISTER
712 && reg_n_sets[REGNO (x)] > 1
713 && reg_basic_block[REGNO (x)] < 0
714 /* If this register is undefined at the start of the file, we can't
715 say what its contents were. */
716 && ! (basic_block_live_at_start[0][REGNO (x) / REGSET_ELT_BITS]
717 & ((REGSET_ELT_TYPE) 1 << (REGNO (x) % REGSET_ELT_BITS)))
718 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
720 if (GET_CODE (set) == CLOBBER)
722 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
723 reg_sign_bit_copies[REGNO (x)] = 0;
727 /* If this is a complex assignment, see if we can convert it into a
728 simple assignment. */
729 set = expand_field_assignment (set);
731 /* If this is a simple assignment, or we have a paradoxical SUBREG,
732 set what we know about X. */
734 if (SET_DEST (set) == x
735 || (GET_CODE (SET_DEST (set)) == SUBREG
736 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
737 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
738 && SUBREG_REG (SET_DEST (set)) == x))
740 rtx src = SET_SRC (set);
742 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
743 /* If X is narrower than a word and SRC is a non-negative
744 constant that would appear negative in the mode of X,
745 sign-extend it for use in reg_nonzero_bits because some
746 machines (maybe most) will actually do the sign-extension
747 and this is the conservative approach.
749 ??? For 2.5, try to tighten up the MD files in this regard
750 instead of this kludge. */
752 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
753 && GET_CODE (src) == CONST_INT
755 && 0 != (INTVAL (src)
757 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
758 src = GEN_INT (INTVAL (src)
759 | ((HOST_WIDE_INT) (-1)
760 << GET_MODE_BITSIZE (GET_MODE (x))));
763 reg_nonzero_bits[REGNO (x)]
764 |= nonzero_bits (src, nonzero_bits_mode);
765 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
766 if (reg_sign_bit_copies[REGNO (x)] == 0
767 || reg_sign_bit_copies[REGNO (x)] > num)
768 reg_sign_bit_copies[REGNO (x)] = num;
772 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
773 reg_sign_bit_copies[REGNO (x)] = 0;
778 /* See if INSN can be combined into I3. PRED and SUCC are optionally
779 insns that were previously combined into I3 or that will be combined
780 into the merger of INSN and I3.
782 Return 0 if the combination is not allowed for any reason.
784 If the combination is allowed, *PDEST will be set to the single
785 destination of INSN and *PSRC to the single source, and this function
789 can_combine_p (insn, i3, pred, succ, pdest, psrc)
796 rtx set = 0, src, dest;
798 int all_adjacent = (succ ? (next_active_insn (insn) == succ
799 && next_active_insn (succ) == i3)
800 : next_active_insn (insn) == i3);
802 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
803 or a PARALLEL consisting of such a SET and CLOBBERs.
805 If INSN has CLOBBER parallel parts, ignore them for our processing.
806 By definition, these happen during the execution of the insn. When it
807 is merged with another insn, all bets are off. If they are, in fact,
808 needed and aren't also supplied in I3, they may be added by
809 recog_for_combine. Otherwise, it won't match.
811 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
814 Get the source and destination of INSN. If more than one, can't
817 if (GET_CODE (PATTERN (insn)) == SET)
818 set = PATTERN (insn);
819 else if (GET_CODE (PATTERN (insn)) == PARALLEL
820 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
822 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
824 rtx elt = XVECEXP (PATTERN (insn), 0, i);
826 switch (GET_CODE (elt))
828 /* We can ignore CLOBBERs. */
833 /* Ignore SETs whose result isn't used but not those that
834 have side-effects. */
835 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
836 && ! side_effects_p (elt))
839 /* If we have already found a SET, this is a second one and
840 so we cannot combine with this insn. */
848 /* Anything else means we can't combine. */
854 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
855 so don't do anything with it. */
856 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
865 set = expand_field_assignment (set);
866 src = SET_SRC (set), dest = SET_DEST (set);
868 /* Don't eliminate a store in the stack pointer. */
869 if (dest == stack_pointer_rtx
870 /* If we couldn't eliminate a field assignment, we can't combine. */
871 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
872 /* Don't combine with an insn that sets a register to itself if it has
873 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
874 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
875 /* Can't merge a function call. */
876 || GET_CODE (src) == CALL
877 /* Don't eliminate a function call argument. */
878 || (GET_CODE (i3) == CALL_INSN && find_reg_fusage (i3, USE, dest))
879 /* Don't substitute into an incremented register. */
880 || FIND_REG_INC_NOTE (i3, dest)
881 || (succ && FIND_REG_INC_NOTE (succ, dest))
882 /* Don't combine the end of a libcall into anything. */
883 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
884 /* Make sure that DEST is not used after SUCC but before I3. */
885 || (succ && ! all_adjacent
886 && reg_used_between_p (dest, succ, i3))
887 /* Make sure that the value that is to be substituted for the register
888 does not use any registers whose values alter in between. However,
889 If the insns are adjacent, a use can't cross a set even though we
890 think it might (this can happen for a sequence of insns each setting
891 the same destination; reg_last_set of that register might point to
892 a NOTE). If INSN has a REG_EQUIV note, the register is always
893 equivalent to the memory so the substitution is valid even if there
894 are intervening stores. Also, don't move a volatile asm or
895 UNSPEC_VOLATILE across any other insns. */
897 && (((GET_CODE (src) != MEM
898 || ! find_reg_note (insn, REG_EQUIV, src))
899 && use_crosses_set_p (src, INSN_CUID (insn)))
900 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
901 || GET_CODE (src) == UNSPEC_VOLATILE))
902 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
903 better register allocation by not doing the combine. */
904 || find_reg_note (i3, REG_NO_CONFLICT, dest)
905 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
906 /* Don't combine across a CALL_INSN, because that would possibly
907 change whether the life span of some REGs crosses calls or not,
908 and it is a pain to update that information.
909 Exception: if source is a constant, moving it later can't hurt.
910 Accept that special case, because it helps -fforce-addr a lot. */
911 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
914 /* DEST must either be a REG or CC0. */
915 if (GET_CODE (dest) == REG)
917 /* If register alignment is being enforced for multi-word items in all
918 cases except for parameters, it is possible to have a register copy
919 insn referencing a hard register that is not allowed to contain the
920 mode being copied and which would not be valid as an operand of most
921 insns. Eliminate this problem by not combining with such an insn.
923 Also, on some machines we don't want to extend the life of a hard
926 if (GET_CODE (src) == REG
927 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
928 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
929 /* Don't extend the life of a hard register unless it is
930 user variable (if we have few registers) or it can't
931 fit into the desired register (meaning something special
933 || (REGNO (src) < FIRST_PSEUDO_REGISTER
934 && (! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src))
935 #ifdef SMALL_REGISTER_CLASSES
936 || ! REG_USERVAR_P (src)
941 else if (GET_CODE (dest) != CC0)
944 /* Don't substitute for a register intended as a clobberable operand.
945 Similarly, don't substitute an expression containing a register that
946 will be clobbered in I3. */
947 if (GET_CODE (PATTERN (i3)) == PARALLEL)
948 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
949 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
950 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
952 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
955 /* If INSN contains anything volatile, or is an `asm' (whether volatile
956 or not), reject, unless nothing volatile comes between it and I3,
957 with the exception of SUCC. */
959 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
960 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
961 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
962 && p != succ && volatile_refs_p (PATTERN (p)))
965 /* If there are any volatile insns between INSN and I3, reject, because
966 they might affect machine state. */
968 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
969 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
970 && p != succ && volatile_insn_p (PATTERN (p)))
973 /* If INSN or I2 contains an autoincrement or autodecrement,
974 make sure that register is not used between there and I3,
975 and not already used in I3 either.
976 Also insist that I3 not be a jump; if it were one
977 and the incremented register were spilled, we would lose. */
980 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
981 if (REG_NOTE_KIND (link) == REG_INC
982 && (GET_CODE (i3) == JUMP_INSN
983 || reg_used_between_p (XEXP (link, 0), insn, i3)
984 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
989 /* Don't combine an insn that follows a CC0-setting insn.
990 An insn that uses CC0 must not be separated from the one that sets it.
991 We do, however, allow I2 to follow a CC0-setting insn if that insn
992 is passed as I1; in that case it will be deleted also.
993 We also allow combining in this case if all the insns are adjacent
994 because that would leave the two CC0 insns adjacent as well.
995 It would be more logical to test whether CC0 occurs inside I1 or I2,
996 but that would be much slower, and this ought to be equivalent. */
998 p = prev_nonnote_insn (insn);
999 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1004 /* If we get here, we have passed all the tests and the combination is
1013 /* LOC is the location within I3 that contains its pattern or the component
1014 of a PARALLEL of the pattern. We validate that it is valid for combining.
1016 One problem is if I3 modifies its output, as opposed to replacing it
1017 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1018 so would produce an insn that is not equivalent to the original insns.
1022 (set (reg:DI 101) (reg:DI 100))
1023 (set (subreg:SI (reg:DI 101) 0) <foo>)
1025 This is NOT equivalent to:
1027 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1028 (set (reg:DI 101) (reg:DI 100))])
1030 Not only does this modify 100 (in which case it might still be valid
1031 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1033 We can also run into a problem if I2 sets a register that I1
1034 uses and I1 gets directly substituted into I3 (not via I2). In that
1035 case, we would be getting the wrong value of I2DEST into I3, so we
1036 must reject the combination. This case occurs when I2 and I1 both
1037 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1038 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1039 of a SET must prevent combination from occurring.
1041 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
1042 if the destination of a SET is a hard register that isn't a user
1045 Before doing the above check, we first try to expand a field assignment
1046 into a set of logical operations.
1048 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1049 we place a register that is both set and used within I3. If more than one
1050 such register is detected, we fail.
1052 Return 1 if the combination is valid, zero otherwise. */
1055 combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1061 rtx *pi3dest_killed;
1065 if (GET_CODE (x) == SET)
1067 rtx set = expand_field_assignment (x);
1068 rtx dest = SET_DEST (set);
1069 rtx src = SET_SRC (set);
1070 rtx inner_dest = dest, inner_src = src;
1074 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1075 || GET_CODE (inner_dest) == SUBREG
1076 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1077 inner_dest = XEXP (inner_dest, 0);
1079 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1082 while (GET_CODE (inner_src) == STRICT_LOW_PART
1083 || GET_CODE (inner_src) == SUBREG
1084 || GET_CODE (inner_src) == ZERO_EXTRACT)
1085 inner_src = XEXP (inner_src, 0);
1087 /* If it is better that two different modes keep two different pseudos,
1088 avoid combining them. This avoids producing the following pattern
1090 (set (subreg:SI (reg/v:QI 21) 0)
1091 (lshiftrt:SI (reg/v:SI 20)
1093 If that were made, reload could not handle the pair of
1094 reg 20/21, since it would try to get any GENERAL_REGS
1095 but some of them don't handle QImode. */
1097 if (rtx_equal_p (inner_src, i2dest)
1098 && GET_CODE (inner_dest) == REG
1099 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1103 /* Check for the case where I3 modifies its output, as
1105 if ((inner_dest != dest
1106 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1107 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1108 /* This is the same test done in can_combine_p except that we
1109 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
1111 || (GET_CODE (inner_dest) == REG
1112 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1113 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1114 GET_MODE (inner_dest))
1115 #ifdef SMALL_REGISTER_CLASSES
1116 || (GET_CODE (src) != CALL && ! REG_USERVAR_P (inner_dest))
1119 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1122 /* If DEST is used in I3, it is being killed in this insn,
1123 so record that for later.
1124 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1125 STACK_POINTER_REGNUM, since these are always considered to be
1126 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
1127 if (pi3dest_killed && GET_CODE (dest) == REG
1128 && reg_referenced_p (dest, PATTERN (i3))
1129 && REGNO (dest) != FRAME_POINTER_REGNUM
1130 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1131 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1133 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1134 && (REGNO (dest) != ARG_POINTER_REGNUM
1135 || ! fixed_regs [REGNO (dest)])
1137 && REGNO (dest) != STACK_POINTER_REGNUM)
1139 if (*pi3dest_killed)
1142 *pi3dest_killed = dest;
1146 else if (GET_CODE (x) == PARALLEL)
1150 for (i = 0; i < XVECLEN (x, 0); i++)
1151 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1152 i1_not_in_src, pi3dest_killed))
1159 /* Try to combine the insns I1 and I2 into I3.
1160 Here I1 and I2 appear earlier than I3.
1161 I1 can be zero; then we combine just I2 into I3.
1163 It we are combining three insns and the resulting insn is not recognized,
1164 try splitting it into two insns. If that happens, I2 and I3 are retained
1165 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1168 Return 0 if the combination does not work. Then nothing is changed.
1169 If we did the combination, return the insn at which combine should
1173 try_combine (i3, i2, i1)
1174 register rtx i3, i2, i1;
1176 /* New patterns for I3 and I3, respectively. */
1177 rtx newpat, newi2pat = 0;
1178 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1179 int added_sets_1, added_sets_2;
1180 /* Total number of SETs to put into I3. */
1182 /* Nonzero is I2's body now appears in I3. */
1184 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1185 int insn_code_number, i2_code_number, other_code_number;
1186 /* Contains I3 if the destination of I3 is used in its source, which means
1187 that the old life of I3 is being killed. If that usage is placed into
1188 I2 and not in I3, a REG_DEAD note must be made. */
1189 rtx i3dest_killed = 0;
1190 /* SET_DEST and SET_SRC of I2 and I1. */
1191 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1192 /* PATTERN (I2), or a copy of it in certain cases. */
1194 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1195 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1196 int i1_feeds_i3 = 0;
1197 /* Notes that must be added to REG_NOTES in I3 and I2. */
1198 rtx new_i3_notes, new_i2_notes;
1199 /* Notes that we substituted I3 into I2 instead of the normal case. */
1200 int i3_subst_into_i2 = 0;
1201 /* Notes that I1, I2 or I3 is a MULT operation. */
1209 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1210 This can occur when flow deletes an insn that it has merged into an
1211 auto-increment address. We also can't do anything if I3 has a
1212 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1215 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1216 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1217 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
1218 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
1223 undobuf.num_undo = previous_num_undos = 0;
1224 undobuf.other_insn = 0;
1226 /* Save the current high-water-mark so we can free storage if we didn't
1227 accept this combination. */
1228 undobuf.storage = (char *) oballoc (0);
1230 /* Reset the hard register usage information. */
1231 CLEAR_HARD_REG_SET (newpat_used_regs);
1233 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1234 code below, set I1 to be the earlier of the two insns. */
1235 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1236 temp = i1, i1 = i2, i2 = temp;
1238 added_links_insn = 0;
1240 /* First check for one important special-case that the code below will
1241 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1242 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1243 we may be able to replace that destination with the destination of I3.
1244 This occurs in the common code where we compute both a quotient and
1245 remainder into a structure, in which case we want to do the computation
1246 directly into the structure to avoid register-register copies.
1248 We make very conservative checks below and only try to handle the
1249 most common cases of this. For example, we only handle the case
1250 where I2 and I3 are adjacent to avoid making difficult register
1253 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1254 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1255 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1256 #ifdef SMALL_REGISTER_CLASSES
1257 && (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1258 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1259 || REG_USERVAR_P (SET_DEST (PATTERN (i3))))
1261 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1262 && GET_CODE (PATTERN (i2)) == PARALLEL
1263 && ! side_effects_p (SET_DEST (PATTERN (i3)))
1264 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1265 below would need to check what is inside (and reg_overlap_mentioned_p
1266 doesn't support those codes anyway). Don't allow those destinations;
1267 the resulting insn isn't likely to be recognized anyway. */
1268 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1269 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1270 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1271 SET_DEST (PATTERN (i3)))
1272 && next_real_insn (i2) == i3)
1274 rtx p2 = PATTERN (i2);
1276 /* Make sure that the destination of I3,
1277 which we are going to substitute into one output of I2,
1278 is not used within another output of I2. We must avoid making this:
1279 (parallel [(set (mem (reg 69)) ...)
1280 (set (reg 69) ...)])
1281 which is not well-defined as to order of actions.
1282 (Besides, reload can't handle output reloads for this.)
1284 The problem can also happen if the dest of I3 is a memory ref,
1285 if another dest in I2 is an indirect memory ref. */
1286 for (i = 0; i < XVECLEN (p2, 0); i++)
1287 if (GET_CODE (XVECEXP (p2, 0, i)) == SET
1288 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1289 SET_DEST (XVECEXP (p2, 0, i))))
1292 if (i == XVECLEN (p2, 0))
1293 for (i = 0; i < XVECLEN (p2, 0); i++)
1294 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1299 subst_low_cuid = INSN_CUID (i2);
1301 added_sets_2 = added_sets_1 = 0;
1302 i2dest = SET_SRC (PATTERN (i3));
1304 /* Replace the dest in I2 with our dest and make the resulting
1305 insn the new pattern for I3. Then skip to where we
1306 validate the pattern. Everything was set up above. */
1307 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1308 SET_DEST (PATTERN (i3)));
1311 i3_subst_into_i2 = 1;
1312 goto validate_replacement;
1317 /* If we have no I1 and I2 looks like:
1318 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1320 make up a dummy I1 that is
1323 (set (reg:CC X) (compare:CC Y (const_int 0)))
1325 (We can ignore any trailing CLOBBERs.)
1327 This undoes a previous combination and allows us to match a branch-and-
1330 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1331 && XVECLEN (PATTERN (i2), 0) >= 2
1332 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1333 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1335 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1336 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1337 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1338 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1339 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1340 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1342 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1343 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1348 /* We make I1 with the same INSN_UID as I2. This gives it
1349 the same INSN_CUID for value tracking. Our fake I1 will
1350 never appear in the insn stream so giving it the same INSN_UID
1351 as I2 will not cause a problem. */
1353 i1 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
1354 XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
1356 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1357 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1358 SET_DEST (PATTERN (i1)));
1363 /* Verify that I2 and I1 are valid for combining. */
1364 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1365 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1371 /* Record whether I2DEST is used in I2SRC and similarly for the other
1372 cases. Knowing this will help in register status updating below. */
1373 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1374 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1375 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1377 /* See if I1 directly feeds into I3. It does if I1DEST is not used
1379 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1381 /* Ensure that I3's pattern can be the destination of combines. */
1382 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1383 i1 && i2dest_in_i1src && i1_feeds_i3,
1390 /* See if any of the insns is a MULT operation. Unless one is, we will
1391 reject a combination that is, since it must be slower. Be conservative
1393 if (GET_CODE (i2src) == MULT
1394 || (i1 != 0 && GET_CODE (i1src) == MULT)
1395 || (GET_CODE (PATTERN (i3)) == SET
1396 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1399 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1400 We used to do this EXCEPT in one case: I3 has a post-inc in an
1401 output operand. However, that exception can give rise to insns like
1403 which is a famous insn on the PDP-11 where the value of r3 used as the
1404 source was model-dependent. Avoid this sort of thing. */
1407 if (!(GET_CODE (PATTERN (i3)) == SET
1408 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1409 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1410 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1411 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1412 /* It's not the exception. */
1415 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1416 if (REG_NOTE_KIND (link) == REG_INC
1417 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1419 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1426 /* See if the SETs in I1 or I2 need to be kept around in the merged
1427 instruction: whenever the value set there is still needed past I3.
1428 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1430 For the SET in I1, we have two cases: If I1 and I2 independently
1431 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1432 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1433 in I1 needs to be kept around unless I1DEST dies or is set in either
1434 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1435 I1DEST. If so, we know I1 feeds into I2. */
1437 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1440 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1441 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1443 /* If the set in I2 needs to be kept around, we must make a copy of
1444 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1445 PATTERN (I2), we are only substituting for the original I1DEST, not into
1446 an already-substituted copy. This also prevents making self-referential
1447 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1450 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1451 ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1455 i2pat = copy_rtx (i2pat);
1459 /* Substitute in the latest insn for the regs set by the earlier ones. */
1461 maxreg = max_reg_num ();
1465 /* It is possible that the source of I2 or I1 may be performing an
1466 unneeded operation, such as a ZERO_EXTEND of something that is known
1467 to have the high part zero. Handle that case by letting subst look at
1468 the innermost one of them.
1470 Another way to do this would be to have a function that tries to
1471 simplify a single insn instead of merging two or more insns. We don't
1472 do this because of the potential of infinite loops and because
1473 of the potential extra memory required. However, doing it the way
1474 we are is a bit of a kludge and doesn't catch all cases.
1476 But only do this if -fexpensive-optimizations since it slows things down
1477 and doesn't usually win. */
1479 if (flag_expensive_optimizations)
1481 /* Pass pc_rtx so no substitutions are done, just simplifications.
1482 The cases that we are interested in here do not involve the few
1483 cases were is_replaced is checked. */
1486 subst_low_cuid = INSN_CUID (i1);
1487 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1491 subst_low_cuid = INSN_CUID (i2);
1492 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1495 previous_num_undos = undobuf.num_undo;
1499 /* Many machines that don't use CC0 have insns that can both perform an
1500 arithmetic operation and set the condition code. These operations will
1501 be represented as a PARALLEL with the first element of the vector
1502 being a COMPARE of an arithmetic operation with the constant zero.
1503 The second element of the vector will set some pseudo to the result
1504 of the same arithmetic operation. If we simplify the COMPARE, we won't
1505 match such a pattern and so will generate an extra insn. Here we test
1506 for this case, where both the comparison and the operation result are
1507 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1508 I2SRC. Later we will make the PARALLEL that contains I2. */
1510 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1511 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1512 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1513 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1516 enum machine_mode compare_mode;
1518 newpat = PATTERN (i3);
1519 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1523 #ifdef EXTRA_CC_MODES
1524 /* See if a COMPARE with the operand we substituted in should be done
1525 with the mode that is currently being used. If not, do the same
1526 processing we do in `subst' for a SET; namely, if the destination
1527 is used only once, try to replace it with a register of the proper
1528 mode and also replace the COMPARE. */
1529 if (undobuf.other_insn == 0
1530 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1531 &undobuf.other_insn))
1532 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1534 != GET_MODE (SET_DEST (newpat))))
1536 int regno = REGNO (SET_DEST (newpat));
1537 rtx new_dest = gen_rtx (REG, compare_mode, regno);
1539 if (regno < FIRST_PSEUDO_REGISTER
1540 || (reg_n_sets[regno] == 1 && ! added_sets_2
1541 && ! REG_USERVAR_P (SET_DEST (newpat))))
1543 if (regno >= FIRST_PSEUDO_REGISTER)
1544 SUBST (regno_reg_rtx[regno], new_dest);
1546 SUBST (SET_DEST (newpat), new_dest);
1547 SUBST (XEXP (*cc_use, 0), new_dest);
1548 SUBST (SET_SRC (newpat),
1549 gen_rtx_combine (COMPARE, compare_mode,
1550 i2src, const0_rtx));
1553 undobuf.other_insn = 0;
1560 n_occurrences = 0; /* `subst' counts here */
1562 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1563 need to make a unique copy of I2SRC each time we substitute it
1564 to avoid self-referential rtl. */
1566 subst_low_cuid = INSN_CUID (i2);
1567 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1568 ! i1_feeds_i3 && i1dest_in_i1src);
1569 previous_num_undos = undobuf.num_undo;
1571 /* Record whether i2's body now appears within i3's body. */
1572 i2_is_used = n_occurrences;
1575 /* If we already got a failure, don't try to do more. Otherwise,
1576 try to substitute in I1 if we have it. */
1578 if (i1 && GET_CODE (newpat) != CLOBBER)
1580 /* Before we can do this substitution, we must redo the test done
1581 above (see detailed comments there) that ensures that I1DEST
1582 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1584 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1592 subst_low_cuid = INSN_CUID (i1);
1593 newpat = subst (newpat, i1dest, i1src, 0, 0);
1594 previous_num_undos = undobuf.num_undo;
1597 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1598 to count all the ways that I2SRC and I1SRC can be used. */
1599 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
1600 && i2_is_used + added_sets_2 > 1)
1601 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
1602 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1604 /* Fail if we tried to make a new register (we used to abort, but there's
1605 really no reason to). */
1606 || max_reg_num () != maxreg
1607 /* Fail if we couldn't do something and have a CLOBBER. */
1608 || GET_CODE (newpat) == CLOBBER
1609 /* Fail if this new pattern is a MULT and we didn't have one before
1610 at the outer level. */
1611 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
1618 /* If the actions of the earlier insns must be kept
1619 in addition to substituting them into the latest one,
1620 we must make a new PARALLEL for the latest insn
1621 to hold additional the SETs. */
1623 if (added_sets_1 || added_sets_2)
1627 if (GET_CODE (newpat) == PARALLEL)
1629 rtvec old = XVEC (newpat, 0);
1630 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1631 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1632 bcopy ((char *) &old->elem[0], (char *) &XVECEXP (newpat, 0, 0),
1633 sizeof (old->elem[0]) * old->num_elem);
1638 total_sets = 1 + added_sets_1 + added_sets_2;
1639 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1640 XVECEXP (newpat, 0, 0) = old;
1644 XVECEXP (newpat, 0, --total_sets)
1645 = (GET_CODE (PATTERN (i1)) == PARALLEL
1646 ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1650 /* If there is no I1, use I2's body as is. We used to also not do
1651 the subst call below if I2 was substituted into I3,
1652 but that could lose a simplification. */
1654 XVECEXP (newpat, 0, --total_sets) = i2pat;
1656 /* See comment where i2pat is assigned. */
1657 XVECEXP (newpat, 0, --total_sets)
1658 = subst (i2pat, i1dest, i1src, 0, 0);
1662 /* We come here when we are replacing a destination in I2 with the
1663 destination of I3. */
1664 validate_replacement:
1666 /* Note which hard regs this insn has as inputs. */
1667 mark_used_regs_combine (newpat);
1669 /* Is the result of combination a valid instruction? */
1670 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1672 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1673 the second SET's destination is a register that is unused. In that case,
1674 we just need the first SET. This can occur when simplifying a divmod
1675 insn. We *must* test for this case here because the code below that
1676 splits two independent SETs doesn't handle this case correctly when it
1677 updates the register status. Also check the case where the first
1678 SET's destination is unused. That would not cause incorrect code, but
1679 does cause an unneeded insn to remain. */
1681 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1682 && XVECLEN (newpat, 0) == 2
1683 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1684 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1685 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1686 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1687 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1688 && asm_noperands (newpat) < 0)
1690 newpat = XVECEXP (newpat, 0, 0);
1691 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1694 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1695 && XVECLEN (newpat, 0) == 2
1696 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1697 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1698 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1699 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1700 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1701 && asm_noperands (newpat) < 0)
1703 newpat = XVECEXP (newpat, 0, 1);
1704 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1707 /* If we were combining three insns and the result is a simple SET
1708 with no ASM_OPERANDS that wasn't recognized, try to split it into two
1709 insns. There are two ways to do this. It can be split using a
1710 machine-specific method (like when you have an addition of a large
1711 constant) or by combine in the function find_split_point. */
1713 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1714 && asm_noperands (newpat) < 0)
1716 rtx m_split, *split;
1717 rtx ni2dest = i2dest;
1719 /* See if the MD file can split NEWPAT. If it can't, see if letting it
1720 use I2DEST as a scratch register will help. In the latter case,
1721 convert I2DEST to the mode of the source of NEWPAT if we can. */
1723 m_split = split_insns (newpat, i3);
1725 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1726 inputs of NEWPAT. */
1728 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1729 possible to try that as a scratch reg. This would require adding
1730 more code to make it work though. */
1732 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
1734 /* If I2DEST is a hard register or the only use of a pseudo,
1735 we can change its mode. */
1736 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
1737 && GET_MODE (SET_DEST (newpat)) != VOIDmode
1738 && GET_CODE (i2dest) == REG
1739 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1740 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1741 && ! REG_USERVAR_P (i2dest))))
1742 ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1745 m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1746 gen_rtvec (2, newpat,
1753 if (m_split && GET_CODE (m_split) == SEQUENCE
1754 && XVECLEN (m_split, 0) == 2
1755 && (next_real_insn (i2) == i3
1756 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1760 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
1761 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
1763 i3set = single_set (XVECEXP (m_split, 0, 1));
1764 i2set = single_set (XVECEXP (m_split, 0, 0));
1766 /* In case we changed the mode of I2DEST, replace it in the
1767 pseudo-register table here. We can't do it above in case this
1768 code doesn't get executed and we do a split the other way. */
1770 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1771 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1773 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1775 /* If I2 or I3 has multiple SETs, we won't know how to track
1776 register status, so don't use these insns. */
1778 if (i2_code_number >= 0 && i2set && i3set)
1779 insn_code_number = recog_for_combine (&newi3pat, i3,
1782 if (insn_code_number >= 0)
1785 /* It is possible that both insns now set the destination of I3.
1786 If so, we must show an extra use of it. */
1788 if (insn_code_number >= 0 && GET_CODE (SET_DEST (i3set)) == REG
1789 && GET_CODE (SET_DEST (i2set)) == REG
1790 && REGNO (SET_DEST (i3set)) == REGNO (SET_DEST (i2set)))
1791 reg_n_sets[REGNO (SET_DEST (i2set))]++;
1794 /* If we can split it and use I2DEST, go ahead and see if that
1795 helps things be recognized. Verify that none of the registers
1796 are set between I2 and I3. */
1797 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
1799 && GET_CODE (i2dest) == REG
1801 /* We need I2DEST in the proper mode. If it is a hard register
1802 or the only use of a pseudo, we can change its mode. */
1803 && (GET_MODE (*split) == GET_MODE (i2dest)
1804 || GET_MODE (*split) == VOIDmode
1805 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1806 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1807 && ! REG_USERVAR_P (i2dest)))
1808 && (next_real_insn (i2) == i3
1809 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1810 /* We can't overwrite I2DEST if its value is still used by
1812 && ! reg_referenced_p (i2dest, newpat))
1814 rtx newdest = i2dest;
1815 enum rtx_code split_code = GET_CODE (*split);
1816 enum machine_mode split_mode = GET_MODE (*split);
1818 /* Get NEWDEST as a register in the proper mode. We have already
1819 validated that we can do this. */
1820 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
1822 newdest = gen_rtx (REG, split_mode, REGNO (i2dest));
1824 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1825 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1828 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1829 an ASHIFT. This can occur if it was inside a PLUS and hence
1830 appeared to be a memory address. This is a kludge. */
1831 if (split_code == MULT
1832 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1833 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1834 SUBST (*split, gen_rtx_combine (ASHIFT, split_mode,
1835 XEXP (*split, 0), GEN_INT (i)));
1837 #ifdef INSN_SCHEDULING
1838 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1839 be written as a ZERO_EXTEND. */
1840 if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
1841 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, split_mode,
1845 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
1846 SUBST (*split, newdest);
1847 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1849 /* If the split point was a MULT and we didn't have one before,
1850 don't use one now. */
1851 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
1852 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1856 /* Check for a case where we loaded from memory in a narrow mode and
1857 then sign extended it, but we need both registers. In that case,
1858 we have a PARALLEL with both loads from the same memory location.
1859 We can split this into a load from memory followed by a register-register
1860 copy. This saves at least one insn, more if register allocation can
1863 We cannot do this if the destination of the second assignment is
1864 a register that we have already assumed is zero-extended. Similarly
1865 for a SUBREG of such a register. */
1867 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1868 && GET_CODE (newpat) == PARALLEL
1869 && XVECLEN (newpat, 0) == 2
1870 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1871 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
1872 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1873 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1874 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
1875 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1877 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1878 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1879 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
1880 (GET_CODE (temp) == REG
1881 && reg_nonzero_bits[REGNO (temp)] != 0
1882 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
1883 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
1884 && (reg_nonzero_bits[REGNO (temp)]
1885 != GET_MODE_MASK (word_mode))))
1886 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
1887 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
1888 (GET_CODE (temp) == REG
1889 && reg_nonzero_bits[REGNO (temp)] != 0
1890 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
1891 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
1892 && (reg_nonzero_bits[REGNO (temp)]
1893 != GET_MODE_MASK (word_mode)))))
1894 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1895 SET_SRC (XVECEXP (newpat, 0, 1)))
1896 && ! find_reg_note (i3, REG_UNUSED,
1897 SET_DEST (XVECEXP (newpat, 0, 0))))
1901 newi2pat = XVECEXP (newpat, 0, 0);
1902 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
1903 newpat = XVECEXP (newpat, 0, 1);
1904 SUBST (SET_SRC (newpat),
1905 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
1906 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1907 if (i2_code_number >= 0)
1908 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1910 if (insn_code_number >= 0)
1915 /* If we will be able to accept this, we have made a change to the
1916 destination of I3. This can invalidate a LOG_LINKS pointing
1917 to I3. No other part of combine.c makes such a transformation.
1919 The new I3 will have a destination that was previously the
1920 destination of I1 or I2 and which was used in i2 or I3. Call
1921 distribute_links to make a LOG_LINK from the next use of
1922 that destination. */
1924 PATTERN (i3) = newpat;
1925 distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
1927 /* I3 now uses what used to be its destination and which is
1928 now I2's destination. That means we need a LOG_LINK from
1929 I3 to I2. But we used to have one, so we still will.
1931 However, some later insn might be using I2's dest and have
1932 a LOG_LINK pointing at I3. We must remove this link.
1933 The simplest way to remove the link is to point it at I1,
1934 which we know will be a NOTE. */
1936 for (insn = NEXT_INSN (i3);
1937 insn && (this_basic_block == n_basic_blocks - 1
1938 || insn != basic_block_head[this_basic_block + 1]);
1939 insn = NEXT_INSN (insn))
1941 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1942 && reg_referenced_p (ni2dest, PATTERN (insn)))
1944 for (link = LOG_LINKS (insn); link;
1945 link = XEXP (link, 1))
1946 if (XEXP (link, 0) == i3)
1947 XEXP (link, 0) = i1;
1955 /* Similarly, check for a case where we have a PARALLEL of two independent
1956 SETs but we started with three insns. In this case, we can do the sets
1957 as two separate insns. This case occurs when some SET allows two
1958 other insns to combine, but the destination of that SET is still live. */
1960 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1961 && GET_CODE (newpat) == PARALLEL
1962 && XVECLEN (newpat, 0) == 2
1963 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1964 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
1965 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
1966 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1967 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1968 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1969 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1971 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
1972 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
1973 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
1974 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1975 XVECEXP (newpat, 0, 0))
1976 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
1977 XVECEXP (newpat, 0, 1)))
1979 newi2pat = XVECEXP (newpat, 0, 1);
1980 newpat = XVECEXP (newpat, 0, 0);
1982 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1983 if (i2_code_number >= 0)
1984 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1987 /* If it still isn't recognized, fail and change things back the way they
1989 if ((insn_code_number < 0
1990 /* Is the result a reasonable ASM_OPERANDS? */
1991 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
1997 /* If we had to change another insn, make sure it is valid also. */
1998 if (undobuf.other_insn)
2000 rtx other_pat = PATTERN (undobuf.other_insn);
2001 rtx new_other_notes;
2004 CLEAR_HARD_REG_SET (newpat_used_regs);
2006 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2009 if (other_code_number < 0 && ! check_asm_operands (other_pat))
2015 PATTERN (undobuf.other_insn) = other_pat;
2017 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2018 are still valid. Then add any non-duplicate notes added by
2019 recog_for_combine. */
2020 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2022 next = XEXP (note, 1);
2024 if (REG_NOTE_KIND (note) == REG_UNUSED
2025 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
2027 if (GET_CODE (XEXP (note, 0)) == REG)
2028 reg_n_deaths[REGNO (XEXP (note, 0))]--;
2030 remove_note (undobuf.other_insn, note);
2034 for (note = new_other_notes; note; note = XEXP (note, 1))
2035 if (GET_CODE (XEXP (note, 0)) == REG)
2036 reg_n_deaths[REGNO (XEXP (note, 0))]++;
2038 distribute_notes (new_other_notes, undobuf.other_insn,
2039 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
2042 /* We now know that we can do this combination. Merge the insns and
2043 update the status of registers and LOG_LINKS. */
2046 rtx i3notes, i2notes, i1notes = 0;
2047 rtx i3links, i2links, i1links = 0;
2050 /* Compute which registers we expect to eliminate. */
2051 rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src
2053 rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest;
2055 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2057 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2058 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2060 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2062 /* Ensure that we do not have something that should not be shared but
2063 occurs multiple times in the new insns. Check this by first
2064 resetting all the `used' flags and then copying anything is shared. */
2066 reset_used_flags (i3notes);
2067 reset_used_flags (i2notes);
2068 reset_used_flags (i1notes);
2069 reset_used_flags (newpat);
2070 reset_used_flags (newi2pat);
2071 if (undobuf.other_insn)
2072 reset_used_flags (PATTERN (undobuf.other_insn));
2074 i3notes = copy_rtx_if_shared (i3notes);
2075 i2notes = copy_rtx_if_shared (i2notes);
2076 i1notes = copy_rtx_if_shared (i1notes);
2077 newpat = copy_rtx_if_shared (newpat);
2078 newi2pat = copy_rtx_if_shared (newi2pat);
2079 if (undobuf.other_insn)
2080 reset_used_flags (PATTERN (undobuf.other_insn));
2082 INSN_CODE (i3) = insn_code_number;
2083 PATTERN (i3) = newpat;
2084 if (undobuf.other_insn)
2085 INSN_CODE (undobuf.other_insn) = other_code_number;
2087 /* We had one special case above where I2 had more than one set and
2088 we replaced a destination of one of those sets with the destination
2089 of I3. In that case, we have to update LOG_LINKS of insns later
2090 in this basic block. Note that this (expensive) case is rare.
2092 Also, in this case, we must pretend that all REG_NOTEs for I2
2093 actually came from I3, so that REG_UNUSED notes from I2 will be
2094 properly handled. */
2096 if (i3_subst_into_i2)
2098 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2099 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2100 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2101 && ! find_reg_note (i2, REG_UNUSED,
2102 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2103 for (temp = NEXT_INSN (i2);
2104 temp && (this_basic_block == n_basic_blocks - 1
2105 || basic_block_head[this_basic_block] != temp);
2106 temp = NEXT_INSN (temp))
2107 if (temp != i3 && GET_RTX_CLASS (GET_CODE (temp)) == 'i')
2108 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2109 if (XEXP (link, 0) == i2)
2110 XEXP (link, 0) = i3;
2115 while (XEXP (link, 1))
2116 link = XEXP (link, 1);
2117 XEXP (link, 1) = i2notes;
2131 INSN_CODE (i2) = i2_code_number;
2132 PATTERN (i2) = newi2pat;
2136 PUT_CODE (i2, NOTE);
2137 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2138 NOTE_SOURCE_FILE (i2) = 0;
2145 PUT_CODE (i1, NOTE);
2146 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2147 NOTE_SOURCE_FILE (i1) = 0;
2150 /* Get death notes for everything that is now used in either I3 or
2151 I2 and used to die in a previous insn. */
2153 move_deaths (newpat, i1 ? INSN_CUID (i1) : INSN_CUID (i2), i3, &midnotes);
2155 move_deaths (newi2pat, INSN_CUID (i1), i2, &midnotes);
2157 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2159 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2162 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2165 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2168 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2171 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2172 know these are REG_UNUSED and want them to go to the desired insn,
2173 so we always pass it as i3. We have not counted the notes in
2174 reg_n_deaths yet, so we need to do so now. */
2176 if (newi2pat && new_i2_notes)
2178 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2179 if (GET_CODE (XEXP (temp, 0)) == REG)
2180 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2182 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2187 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2188 if (GET_CODE (XEXP (temp, 0)) == REG)
2189 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2191 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2194 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
2195 put a REG_DEAD note for it somewhere. Similarly for I2 and I1.
2196 Show an additional death due to the REG_DEAD note we make here. If
2197 we discard it in distribute_notes, we will decrement it again. */
2201 if (GET_CODE (i3dest_killed) == REG)
2202 reg_n_deaths[REGNO (i3dest_killed)]++;
2204 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed,
2206 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2207 NULL_RTX, NULL_RTX);
2210 /* For I2 and I1, we have to be careful. If NEWI2PAT exists and sets
2211 I2DEST or I1DEST, the death must be somewhere before I2, not I3. If
2212 we passed I3 in that case, it might delete I2. */
2214 if (i2dest_in_i2src)
2216 if (GET_CODE (i2dest) == REG)
2217 reg_n_deaths[REGNO (i2dest)]++;
2219 if (newi2pat && reg_set_p (i2dest, newi2pat))
2220 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2221 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2223 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2224 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2225 NULL_RTX, NULL_RTX);
2228 if (i1dest_in_i1src)
2230 if (GET_CODE (i1dest) == REG)
2231 reg_n_deaths[REGNO (i1dest)]++;
2233 if (newi2pat && reg_set_p (i1dest, newi2pat))
2234 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2235 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2237 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2238 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2239 NULL_RTX, NULL_RTX);
2242 distribute_links (i3links);
2243 distribute_links (i2links);
2244 distribute_links (i1links);
2246 if (GET_CODE (i2dest) == REG)
2249 rtx i2_insn = 0, i2_val = 0, set;
2251 /* The insn that used to set this register doesn't exist, and
2252 this life of the register may not exist either. See if one of
2253 I3's links points to an insn that sets I2DEST. If it does,
2254 that is now the last known value for I2DEST. If we don't update
2255 this and I2 set the register to a value that depended on its old
2256 contents, we will get confused. If this insn is used, thing
2257 will be set correctly in combine_instructions. */
2259 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2260 if ((set = single_set (XEXP (link, 0))) != 0
2261 && rtx_equal_p (i2dest, SET_DEST (set)))
2262 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2264 record_value_for_reg (i2dest, i2_insn, i2_val);
2266 /* If the reg formerly set in I2 died only once and that was in I3,
2267 zero its use count so it won't make `reload' do any work. */
2268 if (! added_sets_2 && newi2pat == 0 && ! i2dest_in_i2src)
2270 regno = REGNO (i2dest);
2271 reg_n_sets[regno]--;
2272 if (reg_n_sets[regno] == 0
2273 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2274 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2275 reg_n_refs[regno] = 0;
2279 if (i1 && GET_CODE (i1dest) == REG)
2282 rtx i1_insn = 0, i1_val = 0, set;
2284 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2285 if ((set = single_set (XEXP (link, 0))) != 0
2286 && rtx_equal_p (i1dest, SET_DEST (set)))
2287 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2289 record_value_for_reg (i1dest, i1_insn, i1_val);
2291 regno = REGNO (i1dest);
2292 if (! added_sets_1 && ! i1dest_in_i1src)
2294 reg_n_sets[regno]--;
2295 if (reg_n_sets[regno] == 0
2296 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2297 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2298 reg_n_refs[regno] = 0;
2302 /* Update reg_nonzero_bits et al for any changes that may have been made
2305 note_stores (newpat, set_nonzero_bits_and_sign_copies);
2307 note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
2309 /* If I3 is now an unconditional jump, ensure that it has a
2310 BARRIER following it since it may have initially been a
2311 conditional jump. It may also be the last nonnote insn. */
2313 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
2314 && ((temp = next_nonnote_insn (i3)) == NULL_RTX
2315 || GET_CODE (temp) != BARRIER))
2316 emit_barrier_after (i3);
2319 combine_successes++;
2321 if (added_links_insn
2322 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2323 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2324 return added_links_insn;
2326 return newi2pat ? i2 : i3;
2329 /* Undo all the modifications recorded in undobuf. */
2335 if (undobuf.num_undo > MAX_UNDO)
2336 undobuf.num_undo = MAX_UNDO;
2337 for (i = undobuf.num_undo - 1; i >= 0; i--)
2339 if (undobuf.undo[i].is_int)
2340 *undobuf.undo[i].where.i = undobuf.undo[i].old_contents.i;
2342 *undobuf.undo[i].where.r = undobuf.undo[i].old_contents.r;
2346 obfree (undobuf.storage);
2347 undobuf.num_undo = 0;
2350 /* Find the innermost point within the rtx at LOC, possibly LOC itself,
2351 where we have an arithmetic expression and return that point. LOC will
2354 try_combine will call this function to see if an insn can be split into
2358 find_split_point (loc, insn)
2363 enum rtx_code code = GET_CODE (x);
2365 int len = 0, pos, unsignedp;
2368 /* First special-case some codes. */
2372 #ifdef INSN_SCHEDULING
2373 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2375 if (GET_CODE (SUBREG_REG (x)) == MEM)
2378 return find_split_point (&SUBREG_REG (x), insn);
2382 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2383 using LO_SUM and HIGH. */
2384 if (GET_CODE (XEXP (x, 0)) == CONST
2385 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2388 gen_rtx_combine (LO_SUM, Pmode,
2389 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2391 return &XEXP (XEXP (x, 0), 0);
2395 /* If we have a PLUS whose second operand is a constant and the
2396 address is not valid, perhaps will can split it up using
2397 the machine-specific way to split large constants. We use
2398 the first psuedo-reg (one of the virtual regs) as a placeholder;
2399 it will not remain in the result. */
2400 if (GET_CODE (XEXP (x, 0)) == PLUS
2401 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2402 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2404 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2405 rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2408 /* This should have produced two insns, each of which sets our
2409 placeholder. If the source of the second is a valid address,
2410 we can make put both sources together and make a split point
2413 if (seq && XVECLEN (seq, 0) == 2
2414 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2415 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2416 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2417 && ! reg_mentioned_p (reg,
2418 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2419 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2420 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2421 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2422 && memory_address_p (GET_MODE (x),
2423 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2425 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2426 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2428 /* Replace the placeholder in SRC2 with SRC1. If we can
2429 find where in SRC2 it was placed, that can become our
2430 split point and we can replace this address with SRC2.
2431 Just try two obvious places. */
2433 src2 = replace_rtx (src2, reg, src1);
2435 if (XEXP (src2, 0) == src1)
2436 split = &XEXP (src2, 0);
2437 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2438 && XEXP (XEXP (src2, 0), 0) == src1)
2439 split = &XEXP (XEXP (src2, 0), 0);
2443 SUBST (XEXP (x, 0), src2);
2448 /* If that didn't work, perhaps the first operand is complex and
2449 needs to be computed separately, so make a split point there.
2450 This will occur on machines that just support REG + CONST
2451 and have a constant moved through some previous computation. */
2453 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2454 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2455 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2457 return &XEXP (XEXP (x, 0), 0);
2463 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2464 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2465 we need to put the operand into a register. So split at that
2468 if (SET_DEST (x) == cc0_rtx
2469 && GET_CODE (SET_SRC (x)) != COMPARE
2470 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2471 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2472 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2473 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2474 return &SET_SRC (x);
2477 /* See if we can split SET_SRC as it stands. */
2478 split = find_split_point (&SET_SRC (x), insn);
2479 if (split && split != &SET_SRC (x))
2482 /* See if this is a bitfield assignment with everything constant. If
2483 so, this is an IOR of an AND, so split it into that. */
2484 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2485 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2486 <= HOST_BITS_PER_WIDE_INT)
2487 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2488 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2489 && GET_CODE (SET_SRC (x)) == CONST_INT
2490 && ((INTVAL (XEXP (SET_DEST (x), 1))
2491 + INTVAL (XEXP (SET_DEST (x), 2)))
2492 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2493 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2495 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2496 int len = INTVAL (XEXP (SET_DEST (x), 1));
2497 int src = INTVAL (SET_SRC (x));
2498 rtx dest = XEXP (SET_DEST (x), 0);
2499 enum machine_mode mode = GET_MODE (dest);
2500 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
2503 pos = GET_MODE_BITSIZE (mode) - len - pos;
2508 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
2511 gen_binary (IOR, mode,
2512 gen_binary (AND, mode, dest,
2513 GEN_INT (~ (mask << pos)
2514 & GET_MODE_MASK (mode))),
2515 GEN_INT (src << pos)));
2517 SUBST (SET_DEST (x), dest);
2519 split = find_split_point (&SET_SRC (x), insn);
2520 if (split && split != &SET_SRC (x))
2524 /* Otherwise, see if this is an operation that we can split into two.
2525 If so, try to split that. */
2526 code = GET_CODE (SET_SRC (x));
2531 /* If we are AND'ing with a large constant that is only a single
2532 bit and the result is only being used in a context where we
2533 need to know if it is zero or non-zero, replace it with a bit
2534 extraction. This will avoid the large constant, which might
2535 have taken more than one insn to make. If the constant were
2536 not a valid argument to the AND but took only one insn to make,
2537 this is no worse, but if it took more than one insn, it will
2540 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2541 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2542 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2543 && GET_CODE (SET_DEST (x)) == REG
2544 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2545 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2546 && XEXP (*split, 0) == SET_DEST (x)
2547 && XEXP (*split, 1) == const0_rtx)
2550 make_extraction (GET_MODE (SET_DEST (x)),
2551 XEXP (SET_SRC (x), 0),
2552 pos, NULL_RTX, 1, 1, 0, 0));
2553 return find_split_point (loc, insn);
2558 inner = XEXP (SET_SRC (x), 0);
2560 len = GET_MODE_BITSIZE (GET_MODE (inner));
2566 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2567 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2569 inner = XEXP (SET_SRC (x), 0);
2570 len = INTVAL (XEXP (SET_SRC (x), 1));
2571 pos = INTVAL (XEXP (SET_SRC (x), 2));
2574 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2576 unsignedp = (code == ZERO_EXTRACT);
2581 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2583 enum machine_mode mode = GET_MODE (SET_SRC (x));
2585 /* For unsigned, we have a choice of a shift followed by an
2586 AND or two shifts. Use two shifts for field sizes where the
2587 constant might be too large. We assume here that we can
2588 always at least get 8-bit constants in an AND insn, which is
2589 true for every current RISC. */
2591 if (unsignedp && len <= 8)
2596 gen_rtx_combine (LSHIFTRT, mode,
2597 gen_lowpart_for_combine (mode, inner),
2599 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
2601 split = find_split_point (&SET_SRC (x), insn);
2602 if (split && split != &SET_SRC (x))
2609 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
2610 gen_rtx_combine (ASHIFT, mode,
2611 gen_lowpart_for_combine (mode, inner),
2612 GEN_INT (GET_MODE_BITSIZE (mode)
2614 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
2616 split = find_split_point (&SET_SRC (x), insn);
2617 if (split && split != &SET_SRC (x))
2622 /* See if this is a simple operation with a constant as the second
2623 operand. It might be that this constant is out of range and hence
2624 could be used as a split point. */
2625 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2626 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2627 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2628 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2629 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2630 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2631 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2633 return &XEXP (SET_SRC (x), 1);
2635 /* Finally, see if this is a simple operation with its first operand
2636 not in a register. The operation might require this operand in a
2637 register, so return it as a split point. We can always do this
2638 because if the first operand were another operation, we would have
2639 already found it as a split point. */
2640 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2641 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2642 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2643 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2644 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2645 return &XEXP (SET_SRC (x), 0);
2651 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2652 it is better to write this as (not (ior A B)) so we can split it.
2653 Similarly for IOR. */
2654 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2657 gen_rtx_combine (NOT, GET_MODE (x),
2658 gen_rtx_combine (code == IOR ? AND : IOR,
2660 XEXP (XEXP (x, 0), 0),
2661 XEXP (XEXP (x, 1), 0))));
2662 return find_split_point (loc, insn);
2665 /* Many RISC machines have a large set of logical insns. If the
2666 second operand is a NOT, put it first so we will try to split the
2667 other operand first. */
2668 if (GET_CODE (XEXP (x, 1)) == NOT)
2670 rtx tem = XEXP (x, 0);
2671 SUBST (XEXP (x, 0), XEXP (x, 1));
2672 SUBST (XEXP (x, 1), tem);
2677 /* Otherwise, select our actions depending on our rtx class. */
2678 switch (GET_RTX_CLASS (code))
2680 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2682 split = find_split_point (&XEXP (x, 2), insn);
2685 /* ... fall through ... */
2689 split = find_split_point (&XEXP (x, 1), insn);
2692 /* ... fall through ... */
2694 /* Some machines have (and (shift ...) ...) insns. If X is not
2695 an AND, but XEXP (X, 0) is, use it as our split point. */
2696 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2697 return &XEXP (x, 0);
2699 split = find_split_point (&XEXP (x, 0), insn);
2705 /* Otherwise, we don't have a split point. */
2709 /* Throughout X, replace FROM with TO, and return the result.
2710 The result is TO if X is FROM;
2711 otherwise the result is X, but its contents may have been modified.
2712 If they were modified, a record was made in undobuf so that
2713 undo_all will (among other things) return X to its original state.
2715 If the number of changes necessary is too much to record to undo,
2716 the excess changes are not made, so the result is invalid.
2717 The changes already made can still be undone.
2718 undobuf.num_undo is incremented for such changes, so by testing that
2719 the caller can tell whether the result is valid.
2721 `n_occurrences' is incremented each time FROM is replaced.
2723 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2725 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
2726 by copying if `n_occurrences' is non-zero. */
2729 subst (x, from, to, in_dest, unique_copy)
2730 register rtx x, from, to;
2734 register enum rtx_code code = GET_CODE (x);
2735 enum machine_mode op0_mode = VOIDmode;
2737 register int len, i;
2740 /* Two expressions are equal if they are identical copies of a shared
2741 RTX or if they are both registers with the same register number
2744 #define COMBINE_RTX_EQUAL_P(X,Y) \
2746 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2747 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2749 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
2752 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
2755 /* If X and FROM are the same register but different modes, they will
2756 not have been seen as equal above. However, flow.c will make a
2757 LOG_LINKS entry for that case. If we do nothing, we will try to
2758 rerecognize our original insn and, when it succeeds, we will
2759 delete the feeding insn, which is incorrect.
2761 So force this insn not to match in this (rare) case. */
2762 if (! in_dest && code == REG && GET_CODE (from) == REG
2763 && REGNO (x) == REGNO (from))
2764 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
2766 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2767 of which may contain things that can be combined. */
2768 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2771 /* It is possible to have a subexpression appear twice in the insn.
2772 Suppose that FROM is a register that appears within TO.
2773 Then, after that subexpression has been scanned once by `subst',
2774 the second time it is scanned, TO may be found. If we were
2775 to scan TO here, we would find FROM within it and create a
2776 self-referent rtl structure which is completely wrong. */
2777 if (COMBINE_RTX_EQUAL_P (x, to))
2780 len = GET_RTX_LENGTH (code);
2781 fmt = GET_RTX_FORMAT (code);
2783 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2784 set up to skip this common case. All other cases where we want to
2785 suppress replacing something inside a SET_SRC are handled via the
2788 && (GET_CODE (SET_DEST (x)) == REG
2789 || GET_CODE (SET_DEST (x)) == CC0
2790 || GET_CODE (SET_DEST (x)) == PC))
2793 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2795 op0_mode = GET_MODE (XEXP (x, 0));
2797 for (i = 0; i < len; i++)
2802 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2804 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
2806 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2811 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
2813 /* If this substitution failed, this whole thing fails. */
2814 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2818 SUBST (XVECEXP (x, i, j), new);
2821 else if (fmt[i] == 'e')
2823 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
2825 /* In general, don't install a subreg involving two modes not
2826 tieable. It can worsen register allocation, and can even
2827 make invalid reload insns, since the reg inside may need to
2828 be copied from in the outside mode, and that may be invalid
2829 if it is an fp reg copied in integer mode.
2831 We allow two exceptions to this: It is valid if it is inside
2832 another SUBREG and the mode of that SUBREG and the mode of
2833 the inside of TO is tieable and it is valid if X is a SET
2834 that copies FROM to CC0. */
2835 if (GET_CODE (to) == SUBREG
2836 && ! MODES_TIEABLE_P (GET_MODE (to),
2837 GET_MODE (SUBREG_REG (to)))
2838 && ! (code == SUBREG
2839 && MODES_TIEABLE_P (GET_MODE (x),
2840 GET_MODE (SUBREG_REG (to))))
2842 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
2845 return gen_rtx (CLOBBER, VOIDmode, const0_rtx);
2847 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2851 /* If we are in a SET_DEST, suppress most cases unless we
2852 have gone inside a MEM, in which case we want to
2853 simplify the address. We assume here that things that
2854 are actually part of the destination have their inner
2855 parts in the first expression. This is true for SUBREG,
2856 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2857 things aside from REG and MEM that should appear in a
2859 new = subst (XEXP (x, i), from, to,
2861 && (code == SUBREG || code == STRICT_LOW_PART
2862 || code == ZERO_EXTRACT))
2864 && i == 0), unique_copy);
2866 /* If we found that we will have to reject this combination,
2867 indicate that by returning the CLOBBER ourselves, rather than
2868 an expression containing it. This will speed things up as
2869 well as prevent accidents where two CLOBBERs are considered
2870 to be equal, thus producing an incorrect simplification. */
2872 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2875 SUBST (XEXP (x, i), new);
2879 /* Try to simplify X. If the simplification changed the code, it is likely
2880 that further simplification will help, so loop, but limit the number
2881 of repetitions that will be performed. */
2883 for (i = 0; i < 4; i++)
2885 /* If X is sufficiently simple, don't bother trying to do anything
2887 if (code != CONST_INT && code != REG && code != CLOBBER)
2888 x = simplify_rtx (x, op0_mode, i == 3, in_dest);
2890 if (GET_CODE (x) == code)
2893 code = GET_CODE (x);
2895 /* We no longer know the original mode of operand 0 since we
2896 have changed the form of X) */
2897 op0_mode = VOIDmode;
2903 /* Simplify X, a piece of RTL. We just operate on the expression at the
2904 outer level; call `subst' to simplify recursively. Return the new
2907 OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
2908 will be the iteration even if an expression with a code different from
2909 X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */
2912 simplify_rtx (x, op0_mode, last, in_dest)
2914 enum machine_mode op0_mode;
2918 enum rtx_code code = GET_CODE (x);
2919 enum machine_mode mode = GET_MODE (x);
2923 /* If this is a commutative operation, put a constant last and a complex
2924 expression first. We don't need to do this for comparisons here. */
2925 if (GET_RTX_CLASS (code) == 'c'
2926 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2927 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
2928 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
2929 || (GET_CODE (XEXP (x, 0)) == SUBREG
2930 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
2931 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
2934 SUBST (XEXP (x, 0), XEXP (x, 1));
2935 SUBST (XEXP (x, 1), temp);
2938 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
2939 sign extension of a PLUS with a constant, reverse the order of the sign
2940 extension and the addition. Note that this not the same as the original
2941 code, but overflow is undefined for signed values. Also note that the
2942 PLUS will have been partially moved "inside" the sign-extension, so that
2943 the first operand of X will really look like:
2944 (ashiftrt (plus (ashift A C4) C5) C4).
2946 (plus (ashiftrt (ashift A C4) C2) C4)
2947 and replace the first operand of X with that expression. Later parts
2948 of this function may simplify the expression further.
2950 For example, if we start with (mult (sign_extend (plus A C1)) C2),
2951 we swap the SIGN_EXTEND and PLUS. Later code will apply the
2952 distributive law to produce (plus (mult (sign_extend X) C1) C3).
2954 We do this to simplify address expressions. */
2956 if ((code == PLUS || code == MINUS || code == MULT)
2957 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
2958 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
2959 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
2960 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
2961 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2962 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
2963 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
2964 && (temp = simplify_binary_operation (ASHIFTRT, mode,
2965 XEXP (XEXP (XEXP (x, 0), 0), 1),
2966 XEXP (XEXP (x, 0), 1))) != 0)
2969 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
2970 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
2971 INTVAL (XEXP (XEXP (x, 0), 1)));
2973 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
2974 INTVAL (XEXP (XEXP (x, 0), 1)));
2976 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
2979 /* If this is a simple operation applied to an IF_THEN_ELSE, try
2980 applying it to the arms of the IF_THEN_ELSE. This often simplifies
2981 things. Check for cases where both arms are testing the same
2984 Don't do anything if all operands are very simple. */
2986 if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
2987 || GET_RTX_CLASS (code) == '<')
2988 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
2989 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
2990 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
2992 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
2993 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
2994 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
2996 || (GET_RTX_CLASS (code) == '1'
2997 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
2998 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
2999 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3002 rtx cond, true, false;
3004 cond = if_then_else_cond (x, &true, &false);
3007 rtx cop1 = const0_rtx;
3008 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3010 /* Simplify the alternative arms; this may collapse the true and
3011 false arms to store-flag values. */
3012 true = subst (true, pc_rtx, pc_rtx, 0, 0);
3013 false = subst (false, pc_rtx, pc_rtx, 0, 0);
3015 /* Restarting if we generate a store-flag expression will cause
3016 us to loop. Just drop through in this case. */
3018 /* If the result values are STORE_FLAG_VALUE and zero, we can
3019 just make the comparison operation. */
3020 if (true == const_true_rtx && false == const0_rtx)
3021 x = gen_binary (cond_code, mode, cond, cop1);
3022 else if (true == const0_rtx && false == const_true_rtx)
3023 x = gen_binary (reverse_condition (cond_code), mode, cond, cop1);
3025 /* Likewise, we can make the negate of a comparison operation
3026 if the result values are - STORE_FLAG_VALUE and zero. */
3027 else if (GET_CODE (true) == CONST_INT
3028 && INTVAL (true) == - STORE_FLAG_VALUE
3029 && false == const0_rtx)
3030 x = gen_unary (NEG, mode, mode,
3031 gen_binary (cond_code, mode, cond, cop1));
3032 else if (GET_CODE (false) == CONST_INT
3033 && INTVAL (false) == - STORE_FLAG_VALUE
3034 && true == const0_rtx)
3035 x = gen_unary (NEG, mode, mode,
3036 gen_binary (reverse_condition (cond_code),
3039 return gen_rtx (IF_THEN_ELSE, mode,
3040 gen_binary (cond_code, VOIDmode, cond, cop1),
3043 code = GET_CODE (x);
3044 op0_mode = VOIDmode;
3048 /* Try to fold this expression in case we have constants that weren't
3051 switch (GET_RTX_CLASS (code))
3054 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3057 temp = simplify_relational_operation (code, op0_mode,
3058 XEXP (x, 0), XEXP (x, 1));
3059 #ifdef FLOAT_STORE_FLAG_VALUE
3060 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3061 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3062 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
3067 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3071 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3072 XEXP (x, 1), XEXP (x, 2));
3077 x = temp, code = GET_CODE (temp);
3079 /* First see if we can apply the inverse distributive law. */
3080 if (code == PLUS || code == MINUS
3081 || code == AND || code == IOR || code == XOR)
3083 x = apply_distributive_law (x);
3084 code = GET_CODE (x);
3087 /* If CODE is an associative operation not otherwise handled, see if we
3088 can associate some operands. This can win if they are constants or
3089 if they are logically related (i.e. (a & b) & a. */
3090 if ((code == PLUS || code == MINUS
3091 || code == MULT || code == AND || code == IOR || code == XOR
3092 || code == DIV || code == UDIV
3093 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3094 && INTEGRAL_MODE_P (mode))
3096 if (GET_CODE (XEXP (x, 0)) == code)
3098 rtx other = XEXP (XEXP (x, 0), 0);
3099 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3100 rtx inner_op1 = XEXP (x, 1);
3103 /* Make sure we pass the constant operand if any as the second
3104 one if this is a commutative operation. */
3105 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3107 rtx tem = inner_op0;
3108 inner_op0 = inner_op1;
3111 inner = simplify_binary_operation (code == MINUS ? PLUS
3112 : code == DIV ? MULT
3113 : code == UDIV ? MULT
3115 mode, inner_op0, inner_op1);
3117 /* For commutative operations, try the other pair if that one
3119 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3121 other = XEXP (XEXP (x, 0), 1);
3122 inner = simplify_binary_operation (code, mode,
3123 XEXP (XEXP (x, 0), 0),
3128 return gen_binary (code, mode, other, inner);
3132 /* A little bit of algebraic simplification here. */
3136 /* Ensure that our address has any ASHIFTs converted to MULT in case
3137 address-recognizing predicates are called later. */
3138 temp = make_compound_operation (XEXP (x, 0), MEM);
3139 SUBST (XEXP (x, 0), temp);
3143 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
3144 is paradoxical. If we can't do that safely, then it becomes
3145 something nonsensical so that this combination won't take place. */
3147 if (GET_CODE (SUBREG_REG (x)) == MEM
3148 && (GET_MODE_SIZE (mode)
3149 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3151 rtx inner = SUBREG_REG (x);
3152 int endian_offset = 0;
3153 /* Don't change the mode of the MEM
3154 if that would change the meaning of the address. */
3155 if (MEM_VOLATILE_P (SUBREG_REG (x))
3156 || mode_dependent_address_p (XEXP (inner, 0)))
3157 return gen_rtx (CLOBBER, mode, const0_rtx);
3159 #if BYTES_BIG_ENDIAN
3160 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3161 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3162 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3163 endian_offset -= UNITS_PER_WORD - GET_MODE_SIZE (GET_MODE (inner));
3165 /* Note if the plus_constant doesn't make a valid address
3166 then this combination won't be accepted. */
3167 x = gen_rtx (MEM, mode,
3168 plus_constant (XEXP (inner, 0),
3169 (SUBREG_WORD (x) * UNITS_PER_WORD
3171 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
3172 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
3173 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
3177 /* If we are in a SET_DEST, these other cases can't apply. */
3181 /* Changing mode twice with SUBREG => just change it once,
3182 or not at all if changing back to starting mode. */
3183 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3185 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3186 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3187 return SUBREG_REG (SUBREG_REG (x));
3189 SUBST_INT (SUBREG_WORD (x),
3190 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3191 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3194 /* SUBREG of a hard register => just change the register number
3195 and/or mode. If the hard register is not valid in that mode,
3196 suppress this combination. If the hard register is the stack,
3197 frame, or argument pointer, leave this as a SUBREG. */
3199 if (GET_CODE (SUBREG_REG (x)) == REG
3200 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3201 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
3202 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3203 && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM
3205 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3206 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3208 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
3210 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3212 return gen_rtx (REG, mode,
3213 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
3215 return gen_rtx (CLOBBER, mode, const0_rtx);
3218 /* For a constant, try to pick up the part we want. Handle a full
3219 word and low-order part. Only do this if we are narrowing
3220 the constant; if it is being widened, we have no idea what
3221 the extra bits will have been set to. */
3223 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3224 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3225 && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD
3226 && GET_MODE_CLASS (mode) == MODE_INT)
3228 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
3234 /* If we want a subreg of a constant, at offset 0,
3235 take the low bits. On a little-endian machine, that's
3236 always valid. On a big-endian machine, it's valid
3237 only if the constant's mode fits in one word. */
3238 if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x)
3239 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (op0_mode)
3240 #if WORDS_BIG_ENDIAN
3241 && GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD
3244 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3246 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
3247 since we are saying that the high bits don't matter. */
3248 if (CONSTANT_P (SUBREG_REG (x)) && GET_MODE (SUBREG_REG (x)) == VOIDmode
3249 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (op0_mode))
3250 return SUBREG_REG (x);
3252 /* Note that we cannot do any narrowing for non-constants since
3253 we might have been counting on using the fact that some bits were
3254 zero. We now do this in the SET. */
3259 /* (not (plus X -1)) can become (neg X). */
3260 if (GET_CODE (XEXP (x, 0)) == PLUS
3261 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
3262 return gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
3264 /* Similarly, (not (neg X)) is (plus X -1). */
3265 if (GET_CODE (XEXP (x, 0)) == NEG)
3266 return gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0),
3269 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
3270 if (GET_CODE (XEXP (x, 0)) == XOR
3271 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3272 && (temp = simplify_unary_operation (NOT, mode,
3273 XEXP (XEXP (x, 0), 1),
3275 return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
3277 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3278 other than 1, but that is not valid. We could do a similar
3279 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3280 but this doesn't seem common enough to bother with. */
3281 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3282 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3283 return gen_rtx (ROTATE, mode, gen_unary (NOT, mode, mode, const1_rtx),
3284 XEXP (XEXP (x, 0), 1));
3286 if (GET_CODE (XEXP (x, 0)) == SUBREG
3287 && subreg_lowpart_p (XEXP (x, 0))
3288 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3289 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3290 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3291 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3293 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3295 x = gen_rtx (ROTATE, inner_mode,
3296 gen_unary (NOT, inner_mode, inner_mode, const1_rtx),
3297 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3298 return gen_lowpart_for_combine (mode, x);
3301 #if STORE_FLAG_VALUE == -1
3302 /* (not (comparison foo bar)) can be done by reversing the comparison
3304 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3305 && reversible_comparison_p (XEXP (x, 0)))
3306 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3307 mode, XEXP (XEXP (x, 0), 0),
3308 XEXP (XEXP (x, 0), 1));
3310 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
3311 is (lt foo (const_int 0)), so we can perform the above
3314 if (XEXP (x, 1) == const1_rtx
3315 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3316 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3317 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3318 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
3321 /* Apply De Morgan's laws to reduce number of patterns for machines
3322 with negating logical insns (and-not, nand, etc.). If result has
3323 only one NOT, put it first, since that is how the patterns are
3326 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3328 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3330 if (GET_CODE (in1) == NOT)
3331 in1 = XEXP (in1, 0);
3333 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3335 if (GET_CODE (in2) == NOT)
3336 in2 = XEXP (in2, 0);
3337 else if (GET_CODE (in2) == CONST_INT
3338 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3339 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
3341 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3343 if (GET_CODE (in2) == NOT)
3346 in2 = in1; in1 = tem;
3349 return gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3355 /* (neg (plus X 1)) can become (not X). */
3356 if (GET_CODE (XEXP (x, 0)) == PLUS
3357 && XEXP (XEXP (x, 0), 1) == const1_rtx)
3358 return gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
3360 /* Similarly, (neg (not X)) is (plus X 1). */
3361 if (GET_CODE (XEXP (x, 0)) == NOT)
3362 return plus_constant (XEXP (XEXP (x, 0), 0), 1);
3364 /* (neg (minus X Y)) can become (minus Y X). */
3365 if (GET_CODE (XEXP (x, 0)) == MINUS
3366 && (! FLOAT_MODE_P (mode)
3367 /* x-y != -(y-x) with IEEE floating point. */
3368 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3370 return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3371 XEXP (XEXP (x, 0), 0));
3373 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
3374 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
3375 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
3376 return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3378 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3379 if we can then eliminate the NEG (e.g.,
3380 if the operand is a constant). */
3382 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3384 temp = simplify_unary_operation (NEG, mode,
3385 XEXP (XEXP (x, 0), 0), mode);
3388 SUBST (XEXP (XEXP (x, 0), 0), temp);
3393 temp = expand_compound_operation (XEXP (x, 0));
3395 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3396 replaced by (lshiftrt X C). This will convert
3397 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3399 if (GET_CODE (temp) == ASHIFTRT
3400 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3401 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
3402 return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3403 INTVAL (XEXP (temp, 1)));
3405 /* If X has only a single bit that might be nonzero, say, bit I, convert
3406 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3407 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3408 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3409 or a SUBREG of one since we'd be making the expression more
3410 complex if it was just a register. */
3412 if (GET_CODE (temp) != REG
3413 && ! (GET_CODE (temp) == SUBREG
3414 && GET_CODE (SUBREG_REG (temp)) == REG)
3415 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
3417 rtx temp1 = simplify_shift_const
3418 (NULL_RTX, ASHIFTRT, mode,
3419 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
3420 GET_MODE_BITSIZE (mode) - 1 - i),
3421 GET_MODE_BITSIZE (mode) - 1 - i);
3423 /* If all we did was surround TEMP with the two shifts, we
3424 haven't improved anything, so don't use it. Otherwise,
3425 we are better off with TEMP1. */
3426 if (GET_CODE (temp1) != ASHIFTRT
3427 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3428 || XEXP (XEXP (temp1, 0), 0) != temp)
3433 case FLOAT_TRUNCATE:
3434 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3435 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3436 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3437 return XEXP (XEXP (x, 0), 0);
3439 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
3440 (OP:SF foo:SF) if OP is NEG or ABS. */
3441 if ((GET_CODE (XEXP (x, 0)) == ABS
3442 || GET_CODE (XEXP (x, 0)) == NEG)
3443 && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
3444 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
3445 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3446 XEXP (XEXP (XEXP (x, 0), 0), 0));
3448 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
3449 is (float_truncate:SF x). */
3450 if (GET_CODE (XEXP (x, 0)) == SUBREG
3451 && subreg_lowpart_p (XEXP (x, 0))
3452 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
3453 return SUBREG_REG (XEXP (x, 0));
3458 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3459 using cc0, in which case we want to leave it as a COMPARE
3460 so we can distinguish it from a register-register-copy. */
3461 if (XEXP (x, 1) == const0_rtx)
3464 /* In IEEE floating point, x-0 is not the same as x. */
3465 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3466 || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
3468 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3474 /* (const (const X)) can become (const X). Do it this way rather than
3475 returning the inner CONST since CONST can be shared with a
3477 if (GET_CODE (XEXP (x, 0)) == CONST)
3478 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3483 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3484 can add in an offset. find_split_point will split this address up
3485 again if it doesn't match. */
3486 if (GET_CODE (XEXP (x, 0)) == HIGH
3487 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3493 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3494 outermost. That's because that's the way indexed addresses are
3495 supposed to appear. This code used to check many more cases, but
3496 they are now checked elsewhere. */
3497 if (GET_CODE (XEXP (x, 0)) == PLUS
3498 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3499 return gen_binary (PLUS, mode,
3500 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3502 XEXP (XEXP (x, 0), 1));
3504 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3505 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3506 bit-field and can be replaced by either a sign_extend or a
3507 sign_extract. The `and' may be a zero_extend. */
3508 if (GET_CODE (XEXP (x, 0)) == XOR
3509 && GET_CODE (XEXP (x, 1)) == CONST_INT
3510 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3511 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3512 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
3513 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3514 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3515 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3516 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
3517 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
3518 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3519 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3521 return simplify_shift_const
3522 (NULL_RTX, ASHIFTRT, mode,
3523 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3524 XEXP (XEXP (XEXP (x, 0), 0), 0),
3525 GET_MODE_BITSIZE (mode) - (i + 1)),
3526 GET_MODE_BITSIZE (mode) - (i + 1));
3528 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
3529 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
3530 is 1. This produces better code than the alternative immediately
3532 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3533 && reversible_comparison_p (XEXP (x, 0))
3534 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
3535 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx)))
3537 gen_unary (NEG, mode, mode,
3538 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3539 mode, XEXP (XEXP (x, 0), 0),
3540 XEXP (XEXP (x, 0), 1)));
3542 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
3543 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3544 the bitsize of the mode - 1. This allows simplification of
3545 "a = (b & 8) == 0;" */
3546 if (XEXP (x, 1) == constm1_rtx
3547 && GET_CODE (XEXP (x, 0)) != REG
3548 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3549 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
3550 && nonzero_bits (XEXP (x, 0), mode) == 1)
3551 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
3552 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3553 gen_rtx_combine (XOR, mode,
3554 XEXP (x, 0), const1_rtx),
3555 GET_MODE_BITSIZE (mode) - 1),
3556 GET_MODE_BITSIZE (mode) - 1);
3558 /* If we are adding two things that have no bits in common, convert
3559 the addition into an IOR. This will often be further simplified,
3560 for example in cases like ((a & 1) + (a & 2)), which can
3563 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3564 && (nonzero_bits (XEXP (x, 0), mode)
3565 & nonzero_bits (XEXP (x, 1), mode)) == 0)
3566 return gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
3570 #if STORE_FLAG_VALUE == 1
3571 /* (minus 1 (comparison foo bar)) can be done by reversing the comparison
3573 if (XEXP (x, 0) == const1_rtx
3574 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
3575 && reversible_comparison_p (XEXP (x, 1)))
3576 return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))),
3577 mode, XEXP (XEXP (x, 1), 0),
3578 XEXP (XEXP (x, 1), 1));
3581 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3582 (and <foo> (const_int pow2-1)) */
3583 if (GET_CODE (XEXP (x, 1)) == AND
3584 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3585 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3586 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
3587 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
3588 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
3590 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
3592 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
3593 return gen_binary (MINUS, mode,
3594 gen_binary (MINUS, mode, XEXP (x, 0),
3595 XEXP (XEXP (x, 1), 0)),
3596 XEXP (XEXP (x, 1), 1));
3600 /* If we have (mult (plus A B) C), apply the distributive law and then
3601 the inverse distributive law to see if things simplify. This
3602 occurs mostly in addresses, often when unrolling loops. */
3604 if (GET_CODE (XEXP (x, 0)) == PLUS)
3606 x = apply_distributive_law
3607 (gen_binary (PLUS, mode,
3608 gen_binary (MULT, mode,
3609 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3610 gen_binary (MULT, mode,
3611 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3613 if (GET_CODE (x) != MULT)
3619 /* If this is a divide by a power of two, treat it as a shift if
3620 its first operand is a shift. */
3621 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3622 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3623 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3624 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3625 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3626 || GET_CODE (XEXP (x, 0)) == ROTATE
3627 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3628 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
3632 case GT: case GTU: case GE: case GEU:
3633 case LT: case LTU: case LE: case LEU:
3634 /* If the first operand is a condition code, we can't do anything
3636 if (GET_CODE (XEXP (x, 0)) == COMPARE
3637 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3639 && XEXP (x, 0) != cc0_rtx
3643 rtx op0 = XEXP (x, 0);
3644 rtx op1 = XEXP (x, 1);
3645 enum rtx_code new_code;
3647 if (GET_CODE (op0) == COMPARE)
3648 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3650 /* Simplify our comparison, if possible. */
3651 new_code = simplify_comparison (code, &op0, &op1);
3653 #if STORE_FLAG_VALUE == 1
3654 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
3655 if only the low-order bit is possibly nonzero in X (such as when
3656 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
3657 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
3658 known to be either 0 or -1, NE becomes a NEG and EQ becomes
3661 Remove any ZERO_EXTRACT we made when thinking this was a
3662 comparison. It may now be simpler to use, e.g., an AND. If a
3663 ZERO_EXTRACT is indeed appropriate, it will be placed back by
3664 the call to make_compound_operation in the SET case. */
3666 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3667 && op1 == const0_rtx
3668 && nonzero_bits (op0, mode) == 1)
3669 return gen_lowpart_for_combine (mode,
3670 expand_compound_operation (op0));
3672 else if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3673 && op1 == const0_rtx
3674 && (num_sign_bit_copies (op0, mode)
3675 == GET_MODE_BITSIZE (mode)))
3677 op0 = expand_compound_operation (op0);
3678 return gen_unary (NEG, mode, mode,
3679 gen_lowpart_for_combine (mode, op0));
3682 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3683 && op1 == const0_rtx
3684 && nonzero_bits (op0, mode) == 1)
3686 op0 = expand_compound_operation (op0);
3687 return gen_binary (XOR, mode,
3688 gen_lowpart_for_combine (mode, op0),
3692 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3693 && op1 == const0_rtx
3694 && (num_sign_bit_copies (op0, mode)
3695 == GET_MODE_BITSIZE (mode)))
3697 op0 = expand_compound_operation (op0);
3698 return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
3702 #if STORE_FLAG_VALUE == -1
3703 /* If STORE_FLAG_VALUE is -1, we have cases similar to
3705 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3706 && op1 == const0_rtx
3707 && (num_sign_bit_copies (op0, mode)
3708 == GET_MODE_BITSIZE (mode)))
3709 return gen_lowpart_for_combine (mode,
3710 expand_compound_operation (op0));
3712 else if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3713 && op1 == const0_rtx
3714 && nonzero_bits (op0, mode) == 1)
3716 op0 = expand_compound_operation (op0);
3717 return gen_unary (NEG, mode, mode,
3718 gen_lowpart_for_combine (mode, op0));
3721 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3722 && op1 == const0_rtx
3723 && (num_sign_bit_copies (op0, mode)
3724 == GET_MODE_BITSIZE (mode)))
3726 op0 = expand_compound_operation (op0);
3727 return gen_unary (NOT, mode, mode,
3728 gen_lowpart_for_combine (mode, op0));
3731 /* If X is 0/1, (eq X 0) is X-1. */
3732 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3733 && op1 == const0_rtx
3734 && nonzero_bits (op0, mode) == 1)
3736 op0 = expand_compound_operation (op0);
3737 return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
3741 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
3742 one bit that might be nonzero, we can convert (ne x 0) to
3743 (ashift x c) where C puts the bit in the sign bit. Remove any
3744 AND with STORE_FLAG_VALUE when we are done, since we are only
3745 going to test the sign bit. */
3746 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3747 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3748 && (STORE_FLAG_VALUE
3749 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
3750 && op1 == const0_rtx
3751 && mode == GET_MODE (op0)
3752 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
3754 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3755 expand_compound_operation (op0),
3756 GET_MODE_BITSIZE (mode) - 1 - i);
3757 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
3763 /* If the code changed, return a whole new comparison. */
3764 if (new_code != code)
3765 return gen_rtx_combine (new_code, mode, op0, op1);
3767 /* Otherwise, keep this operation, but maybe change its operands.
3768 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
3769 SUBST (XEXP (x, 0), op0);
3770 SUBST (XEXP (x, 1), op1);
3775 return simplify_if_then_else (x);
3781 /* If we are processing SET_DEST, we are done. */
3785 return expand_compound_operation (x);
3788 return simplify_set (x);
3793 return simplify_logical (x, last);
3796 /* (abs (neg <foo>)) -> (abs <foo>) */
3797 if (GET_CODE (XEXP (x, 0)) == NEG)
3798 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3800 /* If operand is something known to be positive, ignore the ABS. */
3801 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
3802 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
3803 <= HOST_BITS_PER_WIDE_INT)
3804 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3805 & ((HOST_WIDE_INT) 1
3806 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
3811 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
3812 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
3813 return gen_rtx_combine (NEG, mode, XEXP (x, 0));
3818 /* (ffs (*_extend <X>)) = (ffs <X>) */
3819 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3820 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3821 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3825 /* (float (sign_extend <X>)) = (float <X>). */
3826 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
3827 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3835 /* If this is a shift by a constant amount, simplify it. */
3836 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3837 return simplify_shift_const (x, code, mode, XEXP (x, 0),
3838 INTVAL (XEXP (x, 1)));
3840 #ifdef SHIFT_COUNT_TRUNCATED
3841 else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
3843 force_to_mode (XEXP (x, 1), GET_MODE (x),
3845 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
3856 /* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
3859 simplify_if_then_else (x)
3862 enum machine_mode mode = GET_MODE (x);
3863 rtx cond = XEXP (x, 0);
3864 rtx true = XEXP (x, 1);
3865 rtx false = XEXP (x, 2);
3866 enum rtx_code true_code = GET_CODE (cond);
3867 int comparison_p = GET_RTX_CLASS (true_code) == '<';
3871 /* Simplify storing of the truth value. */
3872 if (comparison_p && true == const_true_rtx && false == const0_rtx)
3873 return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
3875 /* Also when the truth value has to be reversed. */
3876 if (comparison_p && reversible_comparison_p (cond)
3877 && true == const0_rtx && false == const_true_rtx)
3878 return gen_binary (reverse_condition (true_code),
3879 mode, XEXP (cond, 0), XEXP (cond, 1));
3881 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
3882 in it is being compared against certain values. Get the true and false
3883 comparisons and see if that says anything about the value of each arm. */
3885 if (comparison_p && reversible_comparison_p (cond)
3886 && GET_CODE (XEXP (cond, 0)) == REG)
3889 rtx from = XEXP (cond, 0);
3890 enum rtx_code false_code = reverse_condition (true_code);
3891 rtx true_val = XEXP (cond, 1);
3892 rtx false_val = true_val;
3895 /* If FALSE_CODE is EQ, swap the codes and arms. */
3897 if (false_code == EQ)
3899 swapped = 1, true_code = EQ, false_code = NE;
3900 temp = true, true = false, false = temp;
3903 /* If we are comparing against zero and the expression being tested has
3904 only a single bit that might be nonzero, that is its value when it is
3905 not equal to zero. Similarly if it is known to be -1 or 0. */
3907 if (true_code == EQ && true_val == const0_rtx
3908 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
3909 false_code = EQ, false_val = GEN_INT (nzb);
3910 else if (true_code == EQ && true_val == const0_rtx
3911 && (num_sign_bit_copies (from, GET_MODE (from))
3912 == GET_MODE_BITSIZE (GET_MODE (from))))
3913 false_code = EQ, false_val = constm1_rtx;
3915 /* Now simplify an arm if we know the value of the register in the
3916 branch and it is used in the arm. Be careful due to the potential
3917 of locally-shared RTL. */
3919 if (reg_mentioned_p (from, true))
3920 true = subst (known_cond (copy_rtx (true), true_code, from, true_val),
3921 pc_rtx, pc_rtx, 0, 0);
3922 if (reg_mentioned_p (from, false))
3923 false = subst (known_cond (copy_rtx (false), false_code,
3925 pc_rtx, pc_rtx, 0, 0);
3927 SUBST (XEXP (x, 1), swapped ? false : true);
3928 SUBST (XEXP (x, 2), swapped ? true : false);
3930 true = XEXP (x, 1), false = XEXP (x, 2), true_code = GET_CODE (cond);
3933 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
3934 reversed, do so to avoid needing two sets of patterns for
3935 subtract-and-branch insns. Similarly if we have a constant in the true
3936 arm, the false arm is the same as the first operand of the comparison, or
3937 the false arm is more complicated than the true arm. */
3939 if (comparison_p && reversible_comparison_p (cond)
3941 || (CONSTANT_P (true)
3942 && GET_CODE (false) != CONST_INT && false != pc_rtx)
3943 || true == const0_rtx
3944 || (GET_RTX_CLASS (GET_CODE (true)) == 'o'
3945 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
3946 || (GET_CODE (true) == SUBREG
3947 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true))) == 'o'
3948 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
3949 || reg_mentioned_p (true, false)
3950 || rtx_equal_p (false, XEXP (cond, 0))))
3952 true_code = reverse_condition (true_code);
3954 gen_binary (true_code, GET_MODE (cond), XEXP (cond, 0),
3957 SUBST (XEXP (x, 1), false);
3958 SUBST (XEXP (x, 2), true);
3960 temp = true, true = false, false = temp, cond = XEXP (x, 0);
3963 /* If the two arms are identical, we don't need the comparison. */
3965 if (rtx_equal_p (true, false) && ! side_effects_p (cond))
3968 /* Look for cases where we have (abs x) or (neg (abs X)). */
3970 if (GET_MODE_CLASS (mode) == MODE_INT
3971 && GET_CODE (false) == NEG
3972 && rtx_equal_p (true, XEXP (false, 0))
3974 && rtx_equal_p (true, XEXP (cond, 0))
3975 && ! side_effects_p (true))
3980 return gen_unary (ABS, mode, mode, true);
3983 return gen_unary (NEG, mode, mode, gen_unary (ABS, mode, mode, true));
3986 /* Look for MIN or MAX. */
3988 if ((! FLOAT_MODE_P (mode) | flag_fast_math)
3990 && rtx_equal_p (XEXP (cond, 0), true)
3991 && rtx_equal_p (XEXP (cond, 1), false)
3992 && ! side_effects_p (cond))
3997 return gen_binary (SMAX, mode, true, false);
4000 return gen_binary (SMIN, mode, true, false);
4003 return gen_binary (UMAX, mode, true, false);
4006 return gen_binary (UMIN, mode, true, false);
4009 #if STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1
4011 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4012 second operand is zero, this can be done as (OP Z (mult COND C2)) where
4013 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4014 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4015 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
4016 neither of the above, but it isn't worth checking for. */
4018 if (comparison_p && mode != VOIDmode && ! side_effects_p (x))
4020 rtx t = make_compound_operation (true, SET);
4021 rtx f = make_compound_operation (false, SET);
4022 rtx cond_op0 = XEXP (cond, 0);
4023 rtx cond_op1 = XEXP (cond, 1);
4024 enum rtx_code op, extend_op = NIL;
4025 enum machine_mode m = mode;
4028 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4029 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4030 || GET_CODE (t) == ASHIFT
4031 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4032 && rtx_equal_p (XEXP (t, 0), f))
4033 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4035 /* If an identity-zero op is commutative, check whether there
4036 would be a match if we swapped the operands. */
4037 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4038 || GET_CODE (t) == XOR)
4039 && rtx_equal_p (XEXP (t, 1), f))
4040 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4041 else if (GET_CODE (t) == SIGN_EXTEND
4042 && (GET_CODE (XEXP (t, 0)) == PLUS
4043 || GET_CODE (XEXP (t, 0)) == MINUS
4044 || GET_CODE (XEXP (t, 0)) == IOR
4045 || GET_CODE (XEXP (t, 0)) == XOR
4046 || GET_CODE (XEXP (t, 0)) == ASHIFT
4047 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4048 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4049 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4050 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4051 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4052 && (num_sign_bit_copies (f, GET_MODE (f))
4053 > (GET_MODE_BITSIZE (mode)
4054 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4056 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4057 extend_op = SIGN_EXTEND;
4058 m = GET_MODE (XEXP (t, 0));
4060 else if (GET_CODE (t) == SIGN_EXTEND
4061 && (GET_CODE (XEXP (t, 0)) == PLUS
4062 || GET_CODE (XEXP (t, 0)) == IOR
4063 || GET_CODE (XEXP (t, 0)) == XOR)
4064 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4065 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4066 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4067 && (num_sign_bit_copies (f, GET_MODE (f))
4068 > (GET_MODE_BITSIZE (mode)
4069 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4071 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4072 extend_op = SIGN_EXTEND;
4073 m = GET_MODE (XEXP (t, 0));
4075 else if (GET_CODE (t) == ZERO_EXTEND
4076 && (GET_CODE (XEXP (t, 0)) == PLUS
4077 || GET_CODE (XEXP (t, 0)) == MINUS
4078 || GET_CODE (XEXP (t, 0)) == IOR
4079 || GET_CODE (XEXP (t, 0)) == XOR
4080 || GET_CODE (XEXP (t, 0)) == ASHIFT
4081 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4082 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4083 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4084 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4085 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4086 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4087 && ((nonzero_bits (f, GET_MODE (f))
4088 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4091 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4092 extend_op = ZERO_EXTEND;
4093 m = GET_MODE (XEXP (t, 0));
4095 else if (GET_CODE (t) == ZERO_EXTEND
4096 && (GET_CODE (XEXP (t, 0)) == PLUS
4097 || GET_CODE (XEXP (t, 0)) == IOR
4098 || GET_CODE (XEXP (t, 0)) == XOR)
4099 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4100 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4101 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4102 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4103 && ((nonzero_bits (f, GET_MODE (f))
4104 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4107 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4108 extend_op = ZERO_EXTEND;
4109 m = GET_MODE (XEXP (t, 0));
4114 temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4115 pc_rtx, pc_rtx, 0, 0);
4116 temp = gen_binary (MULT, m, temp,
4117 gen_binary (MULT, m, c1, const_true_rtx));
4118 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4119 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4121 if (extend_op != NIL)
4122 temp = gen_unary (extend_op, mode, m, temp);
4129 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4130 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4131 negation of a single bit, we can convert this operation to a shift. We
4132 can actually do this more generally, but it doesn't seem worth it. */
4134 if (true_code == NE && XEXP (cond, 1) == const0_rtx
4135 && false == const0_rtx && GET_CODE (true) == CONST_INT
4136 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4137 && (i = exact_log2 (INTVAL (true))) >= 0)
4138 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4139 == GET_MODE_BITSIZE (mode))
4140 && (i = exact_log2 (- INTVAL (true))) >= 0)))
4142 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4143 gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
4148 /* Simplify X, a SET expression. Return the new expression. */
4154 rtx src = SET_SRC (x);
4155 rtx dest = SET_DEST (x);
4156 enum machine_mode mode
4157 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4161 /* (set (pc) (return)) gets written as (return). */
4162 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4165 /* Now that we know for sure which bits of SRC we are using, see if we can
4166 simplify the expression for the object knowing that we only need the
4169 if (GET_MODE_CLASS (mode) == MODE_INT)
4170 src = force_to_mode (src, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
4172 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4173 the comparison result and try to simplify it unless we already have used
4174 undobuf.other_insn. */
4175 if ((GET_CODE (src) == COMPARE
4180 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4181 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4182 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
4183 && rtx_equal_p (XEXP (*cc_use, 0), dest))
4185 enum rtx_code old_code = GET_CODE (*cc_use);
4186 enum rtx_code new_code;
4188 int other_changed = 0;
4189 enum machine_mode compare_mode = GET_MODE (dest);
4191 if (GET_CODE (src) == COMPARE)
4192 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4194 op0 = src, op1 = const0_rtx;
4196 /* Simplify our comparison, if possible. */
4197 new_code = simplify_comparison (old_code, &op0, &op1);
4199 #ifdef EXTRA_CC_MODES
4200 /* If this machine has CC modes other than CCmode, check to see if we
4201 need to use a different CC mode here. */
4202 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
4203 #endif /* EXTRA_CC_MODES */
4205 #if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
4206 /* If the mode changed, we have to change SET_DEST, the mode in the
4207 compare, and the mode in the place SET_DEST is used. If SET_DEST is
4208 a hard register, just build new versions with the proper mode. If it
4209 is a pseudo, we lose unless it is only time we set the pseudo, in
4210 which case we can safely change its mode. */
4211 if (compare_mode != GET_MODE (dest))
4213 int regno = REGNO (dest);
4214 rtx new_dest = gen_rtx (REG, compare_mode, regno);
4216 if (regno < FIRST_PSEUDO_REGISTER
4217 || (reg_n_sets[regno] == 1 && ! REG_USERVAR_P (dest)))
4219 if (regno >= FIRST_PSEUDO_REGISTER)
4220 SUBST (regno_reg_rtx[regno], new_dest);
4222 SUBST (SET_DEST (x), new_dest);
4223 SUBST (XEXP (*cc_use, 0), new_dest);
4231 /* If the code changed, we have to build a new comparison in
4232 undobuf.other_insn. */
4233 if (new_code != old_code)
4235 unsigned HOST_WIDE_INT mask;
4237 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
4240 /* If the only change we made was to change an EQ into an NE or
4241 vice versa, OP0 has only one bit that might be nonzero, and OP1
4242 is zero, check if changing the user of the condition code will
4243 produce a valid insn. If it won't, we can keep the original code
4244 in that insn by surrounding our operation with an XOR. */
4246 if (((old_code == NE && new_code == EQ)
4247 || (old_code == EQ && new_code == NE))
4248 && ! other_changed && op1 == const0_rtx
4249 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
4250 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
4252 rtx pat = PATTERN (other_insn), note = 0;
4254 if ((recog_for_combine (&pat, other_insn, ¬e) < 0
4255 && ! check_asm_operands (pat)))
4257 PUT_CODE (*cc_use, old_code);
4260 op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
4268 undobuf.other_insn = other_insn;
4271 /* If we are now comparing against zero, change our source if
4272 needed. If we do not use cc0, we always have a COMPARE. */
4273 if (op1 == const0_rtx && dest == cc0_rtx)
4275 SUBST (SET_SRC (x), op0);
4281 /* Otherwise, if we didn't previously have a COMPARE in the
4282 correct mode, we need one. */
4283 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
4286 gen_rtx_combine (COMPARE, compare_mode, op0, op1));
4291 /* Otherwise, update the COMPARE if needed. */
4292 SUBST (XEXP (src, 0), op0);
4293 SUBST (XEXP (src, 1), op1);
4298 /* Get SET_SRC in a form where we have placed back any
4299 compound expressions. Then do the checks below. */
4300 src = make_compound_operation (src, SET);
4301 SUBST (SET_SRC (x), src);
4304 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
4305 and X being a REG or (subreg (reg)), we may be able to convert this to
4306 (set (subreg:m2 x) (op)).
4308 We can always do this if M1 is narrower than M2 because that means that
4309 we only care about the low bits of the result.
4311 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
4312 perform a narrower operation that requested since the high-order bits will
4313 be undefined. On machine where it is defined, this transformation is safe
4314 as long as M1 and M2 have the same number of words. */
4316 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4317 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
4318 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
4320 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4321 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
4322 #ifndef WORD_REGISTER_OPERATIONS
4323 && (GET_MODE_SIZE (GET_MODE (src))
4324 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4326 && (GET_CODE (dest) == REG
4327 || (GET_CODE (dest) == SUBREG
4328 && GET_CODE (SUBREG_REG (dest)) == REG)))
4330 SUBST (SET_DEST (x),
4331 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
4333 SUBST (SET_SRC (x), SUBREG_REG (src));
4335 src = SET_SRC (x), dest = SET_DEST (x);
4338 #ifdef LOAD_EXTEND_OP
4339 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
4340 would require a paradoxical subreg. Replace the subreg with a
4341 zero_extend to avoid the reload that would otherwise be required. */
4343 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4344 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
4345 && SUBREG_WORD (src) == 0
4346 && (GET_MODE_SIZE (GET_MODE (src))
4347 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4348 && GET_CODE (SUBREG_REG (src)) == MEM)
4351 gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
4352 GET_MODE (src), XEXP (src, 0)));
4358 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
4359 are comparing an item known to be 0 or -1 against 0, use a logical
4360 operation instead. Check for one of the arms being an IOR of the other
4361 arm with some value. We compute three terms to be IOR'ed together. In
4362 practice, at most two will be nonzero. Then we do the IOR's. */
4364 if (GET_CODE (dest) != PC
4365 && GET_CODE (src) == IF_THEN_ELSE
4366 #ifdef HAVE_conditional_move
4367 && ! HAVE_conditional_move
4369 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
4370 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
4371 && XEXP (XEXP (src, 0), 1) == const0_rtx
4372 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
4373 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
4374 GET_MODE (XEXP (XEXP (src, 0), 0)))
4375 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
4376 && ! side_effects_p (src))
4378 rtx true = (GET_CODE (XEXP (src, 0)) == NE
4379 ? XEXP (src, 1) : XEXP (src, 2));
4380 rtx false = (GET_CODE (XEXP (src, 0)) == NE
4381 ? XEXP (src, 2) : XEXP (src, 1));
4382 rtx term1 = const0_rtx, term2, term3;
4384 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
4385 term1 = false, true = XEXP (true, 1), false = const0_rtx;
4386 else if (GET_CODE (true) == IOR
4387 && rtx_equal_p (XEXP (true, 1), false))
4388 term1 = false, true = XEXP (true, 0), false = const0_rtx;
4389 else if (GET_CODE (false) == IOR
4390 && rtx_equal_p (XEXP (false, 0), true))
4391 term1 = true, false = XEXP (false, 1), true = const0_rtx;
4392 else if (GET_CODE (false) == IOR
4393 && rtx_equal_p (XEXP (false, 1), true))
4394 term1 = true, false = XEXP (false, 0), true = const0_rtx;
4396 term2 = gen_binary (AND, GET_MODE (src), XEXP (XEXP (src, 0), 0), true);
4397 term3 = gen_binary (AND, GET_MODE (src),
4398 gen_unary (NOT, GET_MODE (src), GET_MODE (src),
4399 XEXP (XEXP (src, 0), 0)),
4403 gen_binary (IOR, GET_MODE (src),
4404 gen_binary (IOR, GET_MODE (src), term1, term2),
4410 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
4411 whole thing fail. */
4412 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
4414 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
4417 /* Convert this into a field assignment operation, if possible. */
4418 return make_field_assignment (x);
4421 /* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
4422 result. LAST is nonzero if this is the last retry. */
4425 simplify_logical (x, last)
4429 enum machine_mode mode = GET_MODE (x);
4430 rtx op0 = XEXP (x, 0);
4431 rtx op1 = XEXP (x, 1);
4433 switch (GET_CODE (x))
4436 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
4437 insn (and may simplify more). */
4438 if (GET_CODE (op0) == XOR
4439 && rtx_equal_p (XEXP (op0, 0), op1)
4440 && ! side_effects_p (op1))
4441 x = gen_binary (AND, mode,
4442 gen_unary (NOT, mode, mode, XEXP (op0, 1)), op1);
4444 if (GET_CODE (op0) == XOR
4445 && rtx_equal_p (XEXP (op0, 1), op1)
4446 && ! side_effects_p (op1))
4447 x = gen_binary (AND, mode,
4448 gen_unary (NOT, mode, mode, XEXP (op0, 0)), op1);
4450 /* Similarly for (~ (A ^ B)) & A. */
4451 if (GET_CODE (op0) == NOT
4452 && GET_CODE (XEXP (op0, 0)) == XOR
4453 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
4454 && ! side_effects_p (op1))
4455 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
4457 if (GET_CODE (op0) == NOT
4458 && GET_CODE (XEXP (op0, 0)) == XOR
4459 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
4460 && ! side_effects_p (op1))
4461 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
4463 if (GET_CODE (op1) == CONST_INT)
4465 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
4467 /* If we have (ior (and (X C1) C2)) and the next restart would be
4468 the last, simplify this by making C1 as small as possible
4471 && GET_CODE (x) == IOR && GET_CODE (op0) == AND
4472 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4473 && GET_CODE (op1) == CONST_INT)
4474 return gen_binary (IOR, mode,
4475 gen_binary (AND, mode, XEXP (op0, 0),
4476 GEN_INT (INTVAL (XEXP (op0, 1))
4477 & ~ INTVAL (op1))), op1);
4479 if (GET_CODE (x) != AND)
4483 /* Convert (A | B) & A to A. */
4484 if (GET_CODE (op0) == IOR
4485 && (rtx_equal_p (XEXP (op0, 0), op1)
4486 || rtx_equal_p (XEXP (op0, 1), op1))
4487 && ! side_effects_p (XEXP (op0, 0))
4488 && ! side_effects_p (XEXP (op0, 1)))
4491 /* In the following group of tests (and those in case IOR below),
4492 we start with some combination of logical operations and apply
4493 the distributive law followed by the inverse distributive law.
4494 Most of the time, this results in no change. However, if some of
4495 the operands are the same or inverses of each other, simplifications
4498 For example, (and (ior A B) (not B)) can occur as the result of
4499 expanding a bit field assignment. When we apply the distributive
4500 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
4501 which then simplifies to (and (A (not B))).
4503 If we have (and (ior A B) C), apply the distributive law and then
4504 the inverse distributive law to see if things simplify. */
4506 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
4508 x = apply_distributive_law
4509 (gen_binary (GET_CODE (op0), mode,
4510 gen_binary (AND, mode, XEXP (op0, 0), op1),
4511 gen_binary (AND, mode, XEXP (op0, 1), op1)));
4512 if (GET_CODE (x) != AND)
4516 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
4517 return apply_distributive_law
4518 (gen_binary (GET_CODE (op1), mode,
4519 gen_binary (AND, mode, XEXP (op1, 0), op0),
4520 gen_binary (AND, mode, XEXP (op1, 1), op0)));
4522 /* Similarly, taking advantage of the fact that
4523 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
4525 if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
4526 return apply_distributive_law
4527 (gen_binary (XOR, mode,
4528 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
4529 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 1))));
4531 else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
4532 return apply_distributive_law
4533 (gen_binary (XOR, mode,
4534 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
4535 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 1))));
4539 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
4540 if (GET_CODE (op1) == CONST_INT
4541 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4542 && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0)
4545 /* Convert (A & B) | A to A. */
4546 if (GET_CODE (op0) == AND
4547 && (rtx_equal_p (XEXP (op0, 0), op1)
4548 || rtx_equal_p (XEXP (op0, 1), op1))
4549 && ! side_effects_p (XEXP (op0, 0))
4550 && ! side_effects_p (XEXP (op0, 1)))
4553 /* If we have (ior (and A B) C), apply the distributive law and then
4554 the inverse distributive law to see if things simplify. */
4556 if (GET_CODE (op0) == AND)
4558 x = apply_distributive_law
4559 (gen_binary (AND, mode,
4560 gen_binary (IOR, mode, XEXP (op0, 0), op1),
4561 gen_binary (IOR, mode, XEXP (op0, 1), op1)));
4563 if (GET_CODE (x) != IOR)
4567 if (GET_CODE (op1) == AND)
4569 x = apply_distributive_law
4570 (gen_binary (AND, mode,
4571 gen_binary (IOR, mode, XEXP (op1, 0), op0),
4572 gen_binary (IOR, mode, XEXP (op1, 1), op0)));
4574 if (GET_CODE (x) != IOR)
4578 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
4579 mode size to (rotate A CX). */
4581 if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
4582 || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
4583 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
4584 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4585 && GET_CODE (XEXP (op1, 1)) == CONST_INT
4586 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
4587 == GET_MODE_BITSIZE (mode)))
4588 return gen_rtx (ROTATE, mode, XEXP (op0, 0),
4589 (GET_CODE (op0) == ASHIFT
4590 ? XEXP (op0, 1) : XEXP (op1, 1)));
4592 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
4593 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
4594 does not affect any of the bits in OP1, it can really be done
4595 as a PLUS and we can associate. We do this by seeing if OP1
4596 can be safely shifted left C bits. */
4597 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
4598 && GET_CODE (XEXP (op0, 0)) == PLUS
4599 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
4600 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4601 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
4603 int count = INTVAL (XEXP (op0, 1));
4604 HOST_WIDE_INT mask = INTVAL (op1) << count;
4606 if (mask >> count == INTVAL (op1)
4607 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
4609 SUBST (XEXP (XEXP (op0, 0), 1),
4610 GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
4617 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
4618 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
4621 int num_negated = 0;
4623 if (GET_CODE (op0) == NOT)
4624 num_negated++, op0 = XEXP (op0, 0);
4625 if (GET_CODE (op1) == NOT)
4626 num_negated++, op1 = XEXP (op1, 0);
4628 if (num_negated == 2)
4630 SUBST (XEXP (x, 0), op0);
4631 SUBST (XEXP (x, 1), op1);
4633 else if (num_negated == 1)
4634 return gen_unary (NOT, mode, mode, gen_binary (XOR, mode, op0, op1));
4637 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
4638 correspond to a machine insn or result in further simplifications
4639 if B is a constant. */
4641 if (GET_CODE (op0) == AND
4642 && rtx_equal_p (XEXP (op0, 1), op1)
4643 && ! side_effects_p (op1))
4644 return gen_binary (AND, mode,
4645 gen_unary (NOT, mode, mode, XEXP (op0, 0)),
4648 else if (GET_CODE (op0) == AND
4649 && rtx_equal_p (XEXP (op0, 0), op1)
4650 && ! side_effects_p (op1))
4651 return gen_binary (AND, mode,
4652 gen_unary (NOT, mode, mode, XEXP (op0, 1)),
4655 #if STORE_FLAG_VALUE == 1
4656 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
4658 if (op1 == const1_rtx
4659 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
4660 && reversible_comparison_p (op0))
4661 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
4662 mode, XEXP (op0, 0), XEXP (op0, 1));
4664 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
4665 is (lt foo (const_int 0)), so we can perform the above
4668 if (op1 == const1_rtx
4669 && GET_CODE (op0) == LSHIFTRT
4670 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4671 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
4672 return gen_rtx_combine (GE, mode, XEXP (op0, 0), const0_rtx);
4675 /* (xor (comparison foo bar) (const_int sign-bit))
4676 when STORE_FLAG_VALUE is the sign bit. */
4677 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4678 && (STORE_FLAG_VALUE
4679 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
4680 && op1 == const_true_rtx
4681 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
4682 && reversible_comparison_p (op0))
4683 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
4684 mode, XEXP (op0, 0), XEXP (op0, 1));
4691 /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
4692 operations" because they can be replaced with two more basic operations.
4693 ZERO_EXTEND is also considered "compound" because it can be replaced with
4694 an AND operation, which is simpler, though only one operation.
4696 The function expand_compound_operation is called with an rtx expression
4697 and will convert it to the appropriate shifts and AND operations,
4698 simplifying at each stage.
4700 The function make_compound_operation is called to convert an expression
4701 consisting of shifts and ANDs into the equivalent compound expression.
4702 It is the inverse of this function, loosely speaking. */
4705 expand_compound_operation (x)
4713 switch (GET_CODE (x))
4718 /* We can't necessarily use a const_int for a multiword mode;
4719 it depends on implicitly extending the value.
4720 Since we don't know the right way to extend it,
4721 we can't tell whether the implicit way is right.
4723 Even for a mode that is no wider than a const_int,
4724 we can't win, because we need to sign extend one of its bits through
4725 the rest of it, and we don't know which bit. */
4726 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
4729 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
4730 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
4731 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
4732 reloaded. If not for that, MEM's would very rarely be safe.
4734 Reject MODEs bigger than a word, because we might not be able
4735 to reference a two-register group starting with an arbitrary register
4736 (and currently gen_lowpart might crash for a SUBREG). */
4738 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
4741 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
4742 /* If the inner object has VOIDmode (the only way this can happen
4743 is if it is a ASM_OPERANDS), we can't do anything since we don't
4744 know how much masking to do. */
4753 /* If the operand is a CLOBBER, just return it. */
4754 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
4757 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4758 || GET_CODE (XEXP (x, 2)) != CONST_INT
4759 || GET_MODE (XEXP (x, 0)) == VOIDmode)
4762 len = INTVAL (XEXP (x, 1));
4763 pos = INTVAL (XEXP (x, 2));
4765 /* If this goes outside the object being extracted, replace the object
4766 with a (use (mem ...)) construct that only combine understands
4767 and is used only for this purpose. */
4768 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4769 SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
4772 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
4780 /* If we reach here, we want to return a pair of shifts. The inner
4781 shift is a left shift of BITSIZE - POS - LEN bits. The outer
4782 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
4783 logical depending on the value of UNSIGNEDP.
4785 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
4786 converted into an AND of a shift.
4788 We must check for the case where the left shift would have a negative
4789 count. This can happen in a case like (x >> 31) & 255 on machines
4790 that can't shift by a constant. On those machines, we would first
4791 combine the shift with the AND to produce a variable-position
4792 extraction. Then the constant of 31 would be substituted in to produce
4793 a such a position. */
4795 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
4796 if (modewidth >= pos - len)
4797 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
4799 simplify_shift_const (NULL_RTX, ASHIFT,
4802 modewidth - pos - len),
4805 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
4806 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
4807 simplify_shift_const (NULL_RTX, LSHIFTRT,
4810 ((HOST_WIDE_INT) 1 << len) - 1);
4812 /* Any other cases we can't handle. */
4816 /* If we couldn't do this for some reason, return the original
4818 if (GET_CODE (tem) == CLOBBER)
4824 /* X is a SET which contains an assignment of one object into
4825 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
4826 or certain SUBREGS). If possible, convert it into a series of
4829 We half-heartedly support variable positions, but do not at all
4830 support variable lengths. */
4833 expand_field_assignment (x)
4837 rtx pos; /* Always counts from low bit. */
4840 enum machine_mode compute_mode;
4842 /* Loop until we find something we can't simplify. */
4845 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
4846 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
4848 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
4849 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4852 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4853 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
4855 inner = XEXP (SET_DEST (x), 0);
4856 len = INTVAL (XEXP (SET_DEST (x), 1));
4857 pos = XEXP (SET_DEST (x), 2);
4859 /* If the position is constant and spans the width of INNER,
4860 surround INNER with a USE to indicate this. */
4861 if (GET_CODE (pos) == CONST_INT
4862 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
4863 inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
4866 if (GET_CODE (pos) == CONST_INT)
4867 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
4869 else if (GET_CODE (pos) == MINUS
4870 && GET_CODE (XEXP (pos, 1)) == CONST_INT
4871 && (INTVAL (XEXP (pos, 1))
4872 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
4873 /* If position is ADJUST - X, new position is X. */
4874 pos = XEXP (pos, 0);
4876 pos = gen_binary (MINUS, GET_MODE (pos),
4877 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
4883 /* A SUBREG between two modes that occupy the same numbers of words
4884 can be done by moving the SUBREG to the source. */
4885 else if (GET_CODE (SET_DEST (x)) == SUBREG
4886 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
4887 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
4888 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
4889 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
4891 x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
4892 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
4899 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4900 inner = SUBREG_REG (inner);
4902 compute_mode = GET_MODE (inner);
4904 /* Compute a mask of LEN bits, if we can do this on the host machine. */
4905 if (len < HOST_BITS_PER_WIDE_INT)
4906 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
4910 /* Now compute the equivalent expression. Make a copy of INNER
4911 for the SET_DEST in case it is a MEM into which we will substitute;
4912 we don't want shared RTL in that case. */
4913 x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
4914 gen_binary (IOR, compute_mode,
4915 gen_binary (AND, compute_mode,
4916 gen_unary (NOT, compute_mode,
4922 gen_binary (ASHIFT, compute_mode,
4923 gen_binary (AND, compute_mode,
4924 gen_lowpart_for_combine
4934 /* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
4935 it is an RTX that represents a variable starting position; otherwise,
4936 POS is the (constant) starting bit position (counted from the LSB).
4938 INNER may be a USE. This will occur when we started with a bitfield
4939 that went outside the boundary of the object in memory, which is
4940 allowed on most machines. To isolate this case, we produce a USE
4941 whose mode is wide enough and surround the MEM with it. The only
4942 code that understands the USE is this routine. If it is not removed,
4943 it will cause the resulting insn not to match.
4945 UNSIGNEDP is non-zero for an unsigned reference and zero for a
4948 IN_DEST is non-zero if this is a reference in the destination of a
4949 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
4950 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
4953 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
4954 ZERO_EXTRACT should be built even for bits starting at bit 0.
4956 MODE is the desired mode of the result (if IN_DEST == 0). */
4959 make_extraction (mode, inner, pos, pos_rtx, len,
4960 unsignedp, in_dest, in_compare)
4961 enum machine_mode mode;
4967 int in_dest, in_compare;
4969 /* This mode describes the size of the storage area
4970 to fetch the overall value from. Within that, we
4971 ignore the POS lowest bits, etc. */
4972 enum machine_mode is_mode = GET_MODE (inner);
4973 enum machine_mode inner_mode;
4974 enum machine_mode wanted_mem_mode = byte_mode;
4975 enum machine_mode pos_mode = word_mode;
4976 enum machine_mode extraction_mode = word_mode;
4977 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
4980 rtx orig_pos_rtx = pos_rtx;
4983 /* Get some information about INNER and get the innermost object. */
4984 if (GET_CODE (inner) == USE)
4985 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
4986 /* We don't need to adjust the position because we set up the USE
4987 to pretend that it was a full-word object. */
4988 spans_byte = 1, inner = XEXP (inner, 0);
4989 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4991 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
4992 consider just the QI as the memory to extract from.
4993 The subreg adds or removes high bits; its mode is
4994 irrelevant to the meaning of this extraction,
4995 since POS and LEN count from the lsb. */
4996 if (GET_CODE (SUBREG_REG (inner)) == MEM)
4997 is_mode = GET_MODE (SUBREG_REG (inner));
4998 inner = SUBREG_REG (inner);
5001 inner_mode = GET_MODE (inner);
5003 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
5004 pos = INTVAL (pos_rtx), pos_rtx = 0;
5006 /* See if this can be done without an extraction. We never can if the
5007 width of the field is not the same as that of some integer mode. For
5008 registers, we can only avoid the extraction if the position is at the
5009 low-order bit and this is either not in the destination or we have the
5010 appropriate STRICT_LOW_PART operation available.
5012 For MEM, we can avoid an extract if the field starts on an appropriate
5013 boundary and we can change the mode of the memory reference. However,
5014 we cannot directly access the MEM if we have a USE and the underlying
5015 MEM is not TMODE. This combination means that MEM was being used in a
5016 context where bits outside its mode were being referenced; that is only
5017 valid in bit-field insns. */
5019 if (tmode != BLKmode
5020 && ! (spans_byte && inner_mode != tmode)
5021 && ((pos_rtx == 0 && pos == 0 && GET_CODE (inner) != MEM
5023 || (GET_CODE (inner) == REG
5024 && (movstrict_optab->handlers[(int) tmode].insn_code
5025 != CODE_FOR_nothing))))
5026 || (GET_CODE (inner) == MEM && pos_rtx == 0
5028 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5029 : BITS_PER_UNIT)) == 0
5030 /* We can't do this if we are widening INNER_MODE (it
5031 may not be aligned, for one thing). */
5032 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5033 && (inner_mode == tmode
5034 || (! mode_dependent_address_p (XEXP (inner, 0))
5035 && ! MEM_VOLATILE_P (inner))))))
5037 /* If INNER is a MEM, make a new MEM that encompasses just the desired
5038 field. If the original and current mode are the same, we need not
5039 adjust the offset. Otherwise, we do if bytes big endian.
5041 If INNER is not a MEM, get a piece consisting of the just the field
5042 of interest (in this case POS must be 0). */
5044 if (GET_CODE (inner) == MEM)
5047 /* POS counts from lsb, but make OFFSET count in memory order. */
5048 if (BYTES_BIG_ENDIAN)
5049 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5051 offset = pos / BITS_PER_UNIT;
5053 new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
5054 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
5055 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
5056 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
5058 else if (GET_CODE (inner) == REG)
5060 /* We can't call gen_lowpart_for_combine here since we always want
5061 a SUBREG and it would sometimes return a new hard register. */
5062 if (tmode != inner_mode)
5063 new = gen_rtx (SUBREG, tmode, inner,
5065 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
5066 ? ((GET_MODE_SIZE (inner_mode)
5067 - GET_MODE_SIZE (tmode))
5074 new = force_to_mode (inner, tmode,
5075 len >= HOST_BITS_PER_WIDE_INT
5076 ? GET_MODE_MASK (tmode)
5077 : ((HOST_WIDE_INT) 1 << len) - 1,
5080 /* If this extraction is going into the destination of a SET,
5081 make a STRICT_LOW_PART unless we made a MEM. */
5084 return (GET_CODE (new) == MEM ? new
5085 : (GET_CODE (new) != SUBREG
5086 ? gen_rtx (CLOBBER, tmode, const0_rtx)
5087 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
5089 /* Otherwise, sign- or zero-extend unless we already are in the
5092 return (mode == tmode ? new
5093 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
5097 /* Unless this is a COMPARE or we have a funny memory reference,
5098 don't do anything with zero-extending field extracts starting at
5099 the low-order bit since they are simple AND operations. */
5100 if (pos_rtx == 0 && pos == 0 && ! in_dest
5101 && ! in_compare && ! spans_byte && unsignedp)
5104 /* Unless we are allowed to span bytes, reject this if we would be
5105 spanning bytes or if the position is not a constant and the length
5106 is not 1. In all other cases, we would only be going outside
5107 out object in cases when an original shift would have been
5110 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
5111 || (pos_rtx != 0 && len != 1)))
5114 /* Get the mode to use should INNER be a MEM, the mode for the position,
5115 and the mode for the result. */
5119 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
5120 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
5121 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
5126 if (! in_dest && unsignedp)
5128 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
5129 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
5130 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
5135 if (! in_dest && ! unsignedp)
5137 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
5138 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
5139 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
5143 /* Never narrow an object, since that might not be safe. */
5145 if (mode != VOIDmode
5146 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
5147 extraction_mode = mode;
5149 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
5150 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5151 pos_mode = GET_MODE (pos_rtx);
5153 /* If this is not from memory or we have to change the mode of memory and
5154 cannot, the desired mode is EXTRACTION_MODE. */
5155 if (GET_CODE (inner) != MEM
5156 || (inner_mode != wanted_mem_mode
5157 && (mode_dependent_address_p (XEXP (inner, 0))
5158 || MEM_VOLATILE_P (inner))))
5159 wanted_mem_mode = extraction_mode;
5164 /* If position is constant, compute new position. Otherwise, build
5167 pos = (MAX (GET_MODE_BITSIZE (is_mode), GET_MODE_BITSIZE (wanted_mem_mode))
5171 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
5172 GEN_INT (MAX (GET_MODE_BITSIZE (is_mode),
5173 GET_MODE_BITSIZE (wanted_mem_mode))
5178 /* If INNER has a wider mode, make it smaller. If this is a constant
5179 extract, try to adjust the byte to point to the byte containing
5181 if (wanted_mem_mode != VOIDmode
5182 && GET_MODE_SIZE (wanted_mem_mode) < GET_MODE_SIZE (is_mode)
5183 && ((GET_CODE (inner) == MEM
5184 && (inner_mode == wanted_mem_mode
5185 || (! mode_dependent_address_p (XEXP (inner, 0))
5186 && ! MEM_VOLATILE_P (inner))))))
5190 /* The computations below will be correct if the machine is big
5191 endian in both bits and bytes or little endian in bits and bytes.
5192 If it is mixed, we must adjust. */
5194 /* If bytes are big endian and we had a paradoxical SUBREG, we must
5195 adjust OFFSET to compensate. */
5196 #if BYTES_BIG_ENDIAN
5198 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
5199 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
5202 /* If this is a constant position, we can move to the desired byte. */
5205 offset += pos / BITS_PER_UNIT;
5206 pos %= GET_MODE_BITSIZE (wanted_mem_mode);
5209 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
5210 if (! spans_byte && is_mode != wanted_mem_mode)
5211 offset = (GET_MODE_SIZE (is_mode)
5212 - GET_MODE_SIZE (wanted_mem_mode) - offset);
5215 if (offset != 0 || inner_mode != wanted_mem_mode)
5217 rtx newmem = gen_rtx (MEM, wanted_mem_mode,
5218 plus_constant (XEXP (inner, 0), offset));
5219 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
5220 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
5221 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
5226 /* If INNER is not memory, we can always get it into the proper mode. */
5227 else if (GET_CODE (inner) != MEM)
5228 inner = force_to_mode (inner, extraction_mode,
5229 pos_rtx || len + orig_pos >= HOST_BITS_PER_WIDE_INT
5230 ? GET_MODE_MASK (extraction_mode)
5231 : (((HOST_WIDE_INT) 1 << len) - 1) << orig_pos,
5234 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
5235 have to zero extend. Otherwise, we can just use a SUBREG. */
5237 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
5238 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
5239 else if (pos_rtx != 0
5240 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5241 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
5243 /* Make POS_RTX unless we already have it and it is correct. If we don't
5244 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
5246 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
5247 pos_rtx = orig_pos_rtx;
5249 else if (pos_rtx == 0)
5250 pos_rtx = GEN_INT (pos);
5252 /* Make the required operation. See if we can use existing rtx. */
5253 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5254 extraction_mode, inner, GEN_INT (len), pos_rtx);
5256 new = gen_lowpart_for_combine (mode, new);
5261 /* See if X contains an ASHIFT of COUNT or more bits that can be commuted
5262 with any other operations in X. Return X without that shift if so. */
5265 extract_left_shift (x, count)
5269 enum rtx_code code = GET_CODE (x);
5270 enum machine_mode mode = GET_MODE (x);
5276 /* This is the shift itself. If it is wide enough, we will return
5277 either the value being shifted if the shift count is equal to
5278 COUNT or a shift for the difference. */
5279 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5280 && INTVAL (XEXP (x, 1)) >= count)
5281 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
5282 INTVAL (XEXP (x, 1)) - count);
5286 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
5287 return gen_unary (code, mode, mode, tem);
5291 case PLUS: case IOR: case XOR: case AND:
5292 /* If we can safely shift this constant and we find the inner shift,
5293 make a new operation. */
5294 if (GET_CODE (XEXP (x,1)) == CONST_INT
5295 && (INTVAL (XEXP (x, 1)) & (((HOST_WIDE_INT) 1 << count)) - 1) == 0
5296 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
5297 return gen_binary (code, mode, tem,
5298 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
5306 /* Look at the expression rooted at X. Look for expressions
5307 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
5308 Form these expressions.
5310 Return the new rtx, usually just X.
5312 Also, for machines like the Vax that don't have logical shift insns,
5313 try to convert logical to arithmetic shift operations in cases where
5314 they are equivalent. This undoes the canonicalizations to logical
5315 shifts done elsewhere.
5317 We try, as much as possible, to re-use rtl expressions to save memory.
5319 IN_CODE says what kind of expression we are processing. Normally, it is
5320 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
5321 being kludges), it is MEM. When processing the arguments of a comparison
5322 or a COMPARE against zero, it is COMPARE. */
5325 make_compound_operation (x, in_code)
5327 enum rtx_code in_code;
5329 enum rtx_code code = GET_CODE (x);
5330 enum machine_mode mode = GET_MODE (x);
5331 int mode_width = GET_MODE_BITSIZE (mode);
5333 enum rtx_code next_code;
5339 /* Select the code to be used in recursive calls. Once we are inside an
5340 address, we stay there. If we have a comparison, set to COMPARE,
5341 but once inside, go back to our default of SET. */
5343 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
5344 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
5345 && XEXP (x, 1) == const0_rtx) ? COMPARE
5346 : in_code == COMPARE ? SET : in_code);
5348 /* Process depending on the code of this operation. If NEW is set
5349 non-zero, it will be returned. */
5354 /* Convert shifts by constants into multiplications if inside
5356 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5357 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
5358 && INTVAL (XEXP (x, 1)) >= 0)
5360 new = make_compound_operation (XEXP (x, 0), next_code);
5361 new = gen_rtx_combine (MULT, mode, new,
5362 GEN_INT ((HOST_WIDE_INT) 1
5363 << INTVAL (XEXP (x, 1))));
5368 /* If the second operand is not a constant, we can't do anything
5370 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5373 /* If the constant is a power of two minus one and the first operand
5374 is a logical right shift, make an extraction. */
5375 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5376 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5378 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5379 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
5380 0, in_code == COMPARE);
5383 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
5384 else if (GET_CODE (XEXP (x, 0)) == SUBREG
5385 && subreg_lowpart_p (XEXP (x, 0))
5386 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
5387 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5389 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
5391 new = make_extraction (mode, new, 0,
5392 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
5393 0, in_code == COMPARE);
5395 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
5396 else if ((GET_CODE (XEXP (x, 0)) == XOR
5397 || GET_CODE (XEXP (x, 0)) == IOR)
5398 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
5399 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
5400 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5402 /* Apply the distributive law, and then try to make extractions. */
5403 new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
5404 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 0),
5406 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 1),
5408 new = make_compound_operation (new, in_code);
5411 /* If we are have (and (rotate X C) M) and C is larger than the number
5412 of bits in M, this is an extraction. */
5414 else if (GET_CODE (XEXP (x, 0)) == ROTATE
5415 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5416 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
5417 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
5419 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5420 new = make_extraction (mode, new,
5421 (GET_MODE_BITSIZE (mode)
5422 - INTVAL (XEXP (XEXP (x, 0), 1))),
5423 NULL_RTX, i, 1, 0, in_code == COMPARE);
5426 /* On machines without logical shifts, if the operand of the AND is
5427 a logical shift and our mask turns off all the propagated sign
5428 bits, we can replace the logical shift with an arithmetic shift. */
5429 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5430 && (lshr_optab->handlers[(int) mode].insn_code
5431 == CODE_FOR_nothing)
5432 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
5433 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5434 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5435 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5436 && mode_width <= HOST_BITS_PER_WIDE_INT)
5438 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
5440 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
5441 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
5443 gen_rtx_combine (ASHIFTRT, mode,
5444 make_compound_operation (XEXP (XEXP (x, 0), 0),
5446 XEXP (XEXP (x, 0), 1)));
5449 /* If the constant is one less than a power of two, this might be
5450 representable by an extraction even if no shift is present.
5451 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
5452 we are in a COMPARE. */
5453 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5454 new = make_extraction (mode,
5455 make_compound_operation (XEXP (x, 0),
5457 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
5459 /* If we are in a comparison and this is an AND with a power of two,
5460 convert this into the appropriate bit extract. */
5461 else if (in_code == COMPARE
5462 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
5463 new = make_extraction (mode,
5464 make_compound_operation (XEXP (x, 0),
5466 i, NULL_RTX, 1, 1, 0, 1);
5471 /* If the sign bit is known to be zero, replace this with an
5472 arithmetic shift. */
5473 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
5474 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5475 && mode_width <= HOST_BITS_PER_WIDE_INT
5476 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
5478 new = gen_rtx_combine (ASHIFTRT, mode,
5479 make_compound_operation (XEXP (x, 0),
5485 /* ... fall through ... */
5491 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
5492 this is a SIGN_EXTRACT. */
5493 if (GET_CODE (rhs) == CONST_INT
5494 && GET_CODE (lhs) == ASHIFT
5495 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
5496 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
5498 new = make_compound_operation (XEXP (lhs, 0), next_code);
5499 new = make_extraction (mode, new,
5500 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
5501 NULL_RTX, mode_width - INTVAL (rhs),
5502 code == LSHIFTRT, 0, in_code == COMPARE);
5505 /* See if we have operations between an ASHIFTRT and an ASHIFT.
5506 If so, try to merge the shifts into a SIGN_EXTEND. We could
5507 also do this for some cases of SIGN_EXTRACT, but it doesn't
5508 seem worth the effort; the case checked for occurs on Alpha. */
5510 if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
5511 && ! (GET_CODE (lhs) == SUBREG
5512 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
5513 && GET_CODE (rhs) == CONST_INT
5514 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
5515 && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
5516 new = make_extraction (mode, make_compound_operation (new, next_code),
5517 0, NULL_RTX, mode_width - INTVAL (rhs),
5518 code == LSHIFTRT, 0, in_code == COMPARE);
5523 /* Call ourselves recursively on the inner expression. If we are
5524 narrowing the object and it has a different RTL code from
5525 what it originally did, do this SUBREG as a force_to_mode. */
5527 tem = make_compound_operation (SUBREG_REG (x), in_code);
5528 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
5529 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
5530 && subreg_lowpart_p (x))
5532 rtx newer = force_to_mode (tem, mode,
5533 GET_MODE_MASK (mode), NULL_RTX, 0);
5535 /* If we have something other than a SUBREG, we might have
5536 done an expansion, so rerun outselves. */
5537 if (GET_CODE (newer) != SUBREG)
5538 newer = make_compound_operation (newer, in_code);
5546 x = gen_lowpart_for_combine (mode, new);
5547 code = GET_CODE (x);
5550 /* Now recursively process each operand of this operation. */
5551 fmt = GET_RTX_FORMAT (code);
5552 for (i = 0; i < GET_RTX_LENGTH (code); i++)
5555 new = make_compound_operation (XEXP (x, i), next_code);
5556 SUBST (XEXP (x, i), new);
5562 /* Given M see if it is a value that would select a field of bits
5563 within an item, but not the entire word. Return -1 if not.
5564 Otherwise, return the starting position of the field, where 0 is the
5567 *PLEN is set to the length of the field. */
5570 get_pos_from_mask (m, plen)
5571 unsigned HOST_WIDE_INT m;
5574 /* Get the bit number of the first 1 bit from the right, -1 if none. */
5575 int pos = exact_log2 (m & - m);
5580 /* Now shift off the low-order zero bits and see if we have a power of
5582 *plen = exact_log2 ((m >> pos) + 1);
5590 /* See if X can be simplified knowing that we will only refer to it in
5591 MODE and will only refer to those bits that are nonzero in MASK.
5592 If other bits are being computed or if masking operations are done
5593 that select a superset of the bits in MASK, they can sometimes be
5596 Return a possibly simplified expression, but always convert X to
5597 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
5599 Also, if REG is non-zero and X is a register equal in value to REG,
5602 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
5603 are all off in X. This is used when X will be complemented, by either
5604 NOT, NEG, or XOR. */
5607 force_to_mode (x, mode, mask, reg, just_select)
5609 enum machine_mode mode;
5610 unsigned HOST_WIDE_INT mask;
5614 enum rtx_code code = GET_CODE (x);
5615 int next_select = just_select || code == XOR || code == NOT || code == NEG;
5616 enum machine_mode op_mode;
5617 unsigned HOST_WIDE_INT fuller_mask, nonzero;
5620 /* If this is a CALL, don't do anything. Some of the code below
5621 will do the wrong thing since the mode of a CALL is VOIDmode. */
5625 /* We want to perform the operation is its present mode unless we know
5626 that the operation is valid in MODE, in which case we do the operation
5628 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
5629 && code_to_optab[(int) code] != 0
5630 && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
5631 != CODE_FOR_nothing))
5632 ? mode : GET_MODE (x));
5634 /* It is not valid to do a right-shift in a narrower mode
5635 than the one it came in with. */
5636 if ((code == LSHIFTRT || code == ASHIFTRT)
5637 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
5638 op_mode = GET_MODE (x);
5640 /* Truncate MASK to fit OP_MODE. */
5642 mask &= GET_MODE_MASK (op_mode);
5644 /* When we have an arithmetic operation, or a shift whose count we
5645 do not know, we need to assume that all bit the up to the highest-order
5646 bit in MASK will be needed. This is how we form such a mask. */
5648 fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
5649 ? GET_MODE_MASK (op_mode)
5650 : ((HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) - 1);
5652 fuller_mask = ~ (HOST_WIDE_INT) 0;
5654 /* Determine what bits of X are guaranteed to be (non)zero. */
5655 nonzero = nonzero_bits (x, mode);
5657 /* If none of the bits in X are needed, return a zero. */
5658 if (! just_select && (nonzero & mask) == 0)
5661 /* If X is a CONST_INT, return a new one. Do this here since the
5662 test below will fail. */
5663 if (GET_CODE (x) == CONST_INT)
5665 HOST_WIDE_INT cval = INTVAL (x) & mask;
5666 int width = GET_MODE_BITSIZE (mode);
5668 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
5669 number, sign extend it. */
5670 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
5671 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
5672 cval |= (HOST_WIDE_INT) -1 << width;
5674 return GEN_INT (cval);
5677 /* If X is narrower than MODE and we want all the bits in X's mode, just
5678 get X in the proper mode. */
5679 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
5680 && (GET_MODE_MASK (GET_MODE (x)) & ~ mask) == 0)
5681 return gen_lowpart_for_combine (mode, x);
5683 /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
5684 MASK are already known to be zero in X, we need not do anything. */
5685 if (GET_MODE (x) == mode && code != SUBREG && (~ mask & nonzero) == 0)
5691 /* If X is a (clobber (const_int)), return it since we know we are
5692 generating something that won't match. */
5695 #if ! BITS_BIG_ENDIAN
5697 /* X is a (use (mem ..)) that was made from a bit-field extraction that
5698 spanned the boundary of the MEM. If we are now masking so it is
5699 within that boundary, we don't need the USE any more. */
5700 if ((mask & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5701 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
5708 x = expand_compound_operation (x);
5709 if (GET_CODE (x) != code)
5710 return force_to_mode (x, mode, mask, reg, next_select);
5714 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
5715 || rtx_equal_p (reg, get_last_value (x))))
5720 if (subreg_lowpart_p (x)
5721 /* We can ignore the effect of this SUBREG if it narrows the mode or
5722 if the constant masks to zero all the bits the mode doesn't
5724 && ((GET_MODE_SIZE (GET_MODE (x))
5725 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
5727 & GET_MODE_MASK (GET_MODE (x))
5728 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
5729 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
5733 /* If this is an AND with a constant, convert it into an AND
5734 whose constant is the AND of that constant with MASK. If it
5735 remains an AND of MASK, delete it since it is redundant. */
5737 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5738 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
5740 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
5741 mask & INTVAL (XEXP (x, 1)));
5743 /* If X is still an AND, see if it is an AND with a mask that
5744 is just some low-order bits. If so, and it is MASK, we don't
5747 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
5748 && INTVAL (XEXP (x, 1)) == mask)
5751 /* If it remains an AND, try making another AND with the bits
5752 in the mode mask that aren't in MASK turned on. If the
5753 constant in the AND is wide enough, this might make a
5754 cheaper constant. */
5756 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
5757 && GET_MODE_MASK (GET_MODE (x)) != mask)
5759 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
5760 | (GET_MODE_MASK (GET_MODE (x)) & ~ mask));
5761 int width = GET_MODE_BITSIZE (GET_MODE (x));
5764 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
5765 number, sign extend it. */
5766 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
5767 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
5768 cval |= (HOST_WIDE_INT) -1 << width;
5770 y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
5771 if (rtx_cost (y, SET) < rtx_cost (x, SET))
5781 /* In (and (plus FOO C1) M), if M is a mask that just turns off
5782 low-order bits (as in an alignment operation) and FOO is already
5783 aligned to that boundary, mask C1 to that boundary as well.
5784 This may eliminate that PLUS and, later, the AND. */
5785 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5786 && exact_log2 (- mask) >= 0
5787 && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0
5788 && (INTVAL (XEXP (x, 1)) & ~ mask) != 0)
5789 return force_to_mode (plus_constant (XEXP (x, 0),
5790 INTVAL (XEXP (x, 1)) & mask),
5791 mode, mask, reg, next_select);
5793 /* ... fall through ... */
5797 /* For PLUS, MINUS and MULT, we need any bits less significant than the
5798 most significant bit in MASK since carries from those bits will
5799 affect the bits we are interested in. */
5805 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
5806 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
5807 operation which may be a bitfield extraction. Ensure that the
5808 constant we form is not wider than the mode of X. */
5810 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5811 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5812 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5813 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5814 && GET_CODE (XEXP (x, 1)) == CONST_INT
5815 && ((INTVAL (XEXP (XEXP (x, 0), 1))
5816 + floor_log2 (INTVAL (XEXP (x, 1))))
5817 < GET_MODE_BITSIZE (GET_MODE (x)))
5818 && (INTVAL (XEXP (x, 1))
5819 & ~ nonzero_bits (XEXP (x, 0), GET_MODE (x)) == 0))
5821 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
5822 << INTVAL (XEXP (XEXP (x, 0), 1)));
5823 temp = gen_binary (GET_CODE (x), GET_MODE (x),
5824 XEXP (XEXP (x, 0), 0), temp);
5825 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (x, 1));
5826 return force_to_mode (x, mode, mask, reg, next_select);
5830 /* For most binary operations, just propagate into the operation and
5831 change the mode if we have an operation of that mode. */
5833 op0 = gen_lowpart_for_combine (op_mode,
5834 force_to_mode (XEXP (x, 0), mode, mask,
5836 op1 = gen_lowpart_for_combine (op_mode,
5837 force_to_mode (XEXP (x, 1), mode, mask,
5840 /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside
5841 MASK since OP1 might have been sign-extended but we never want
5842 to turn on extra bits, since combine might have previously relied
5843 on them being off. */
5844 if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR)
5845 && (INTVAL (op1) & mask) != 0)
5846 op1 = GEN_INT (INTVAL (op1) & mask);
5848 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
5849 x = gen_binary (code, op_mode, op0, op1);
5853 /* For left shifts, do the same, but just for the first operand.
5854 However, we cannot do anything with shifts where we cannot
5855 guarantee that the counts are smaller than the size of the mode
5856 because such a count will have a different meaning in a
5859 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
5860 && INTVAL (XEXP (x, 1)) >= 0
5861 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
5862 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
5863 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
5864 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
5867 /* If the shift count is a constant and we can do arithmetic in
5868 the mode of the shift, refine which bits we need. Otherwise, use the
5869 conservative form of the mask. */
5870 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5871 && INTVAL (XEXP (x, 1)) >= 0
5872 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
5873 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
5874 mask >>= INTVAL (XEXP (x, 1));
5878 op0 = gen_lowpart_for_combine (op_mode,
5879 force_to_mode (XEXP (x, 0), op_mode,
5880 mask, reg, next_select));
5882 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
5883 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
5887 /* Here we can only do something if the shift count is a constant,
5888 this shift constant is valid for the host, and we can do arithmetic
5891 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5892 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
5893 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
5895 rtx inner = XEXP (x, 0);
5897 /* Select the mask of the bits we need for the shift operand. */
5898 mask <<= INTVAL (XEXP (x, 1));
5900 /* We can only change the mode of the shift if we can do arithmetic
5901 in the mode of the shift and MASK is no wider than the width of
5903 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
5904 || (mask & ~ GET_MODE_MASK (op_mode)) != 0)
5905 op_mode = GET_MODE (x);
5907 inner = force_to_mode (inner, op_mode, mask, reg, next_select);
5909 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
5910 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
5913 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
5914 shift and AND produces only copies of the sign bit (C2 is one less
5915 than a power of two), we can do this with just a shift. */
5917 if (GET_CODE (x) == LSHIFTRT
5918 && GET_CODE (XEXP (x, 1)) == CONST_INT
5919 && ((INTVAL (XEXP (x, 1))
5920 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
5921 >= GET_MODE_BITSIZE (GET_MODE (x)))
5922 && exact_log2 (mask + 1) >= 0
5923 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5924 >= exact_log2 (mask + 1)))
5925 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
5926 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
5927 - exact_log2 (mask + 1)));
5931 /* If we are just looking for the sign bit, we don't need this shift at
5932 all, even if it has a variable count. */
5933 if (mask == ((HOST_WIDE_INT) 1
5934 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))
5935 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
5937 /* If this is a shift by a constant, get a mask that contains those bits
5938 that are not copies of the sign bit. We then have two cases: If
5939 MASK only includes those bits, this can be a logical shift, which may
5940 allow simplifications. If MASK is a single-bit field not within
5941 those bits, we are requesting a copy of the sign bit and hence can
5942 shift the sign bit to the appropriate location. */
5944 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
5945 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
5949 nonzero = GET_MODE_MASK (GET_MODE (x));
5950 nonzero >>= INTVAL (XEXP (x, 1));
5952 if ((mask & ~ nonzero) == 0
5953 || (i = exact_log2 (mask)) >= 0)
5955 x = simplify_shift_const
5956 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
5957 i < 0 ? INTVAL (XEXP (x, 1))
5958 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
5960 if (GET_CODE (x) != ASHIFTRT)
5961 return force_to_mode (x, mode, mask, reg, next_select);
5965 /* If MASK is 1, convert this to a LSHIFTRT. This can be done
5966 even if the shift count isn't a constant. */
5968 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
5970 /* If this is a sign-extension operation that just affects bits
5971 we don't care about, remove it. Be sure the call above returned
5972 something that is still a shift. */
5974 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
5975 && GET_CODE (XEXP (x, 1)) == CONST_INT
5976 && INTVAL (XEXP (x, 1)) >= 0
5977 && (INTVAL (XEXP (x, 1))
5978 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
5979 && GET_CODE (XEXP (x, 0)) == ASHIFT
5980 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5981 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
5982 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
5989 /* If the shift count is constant and we can do computations
5990 in the mode of X, compute where the bits we care about are.
5991 Otherwise, we can't do anything. Don't change the mode of
5992 the shift or propagate MODE into the shift, though. */
5993 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5994 && INTVAL (XEXP (x, 1)) >= 0)
5996 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
5997 GET_MODE (x), GEN_INT (mask),
5999 if (temp && GET_CODE(temp) == CONST_INT)
6001 force_to_mode (XEXP (x, 0), GET_MODE (x),
6002 INTVAL (temp), reg, next_select));
6007 /* If we just want the low-order bit, the NEG isn't needed since it
6008 won't change the low-order bit. */
6010 return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
6012 /* We need any bits less significant than the most significant bit in
6013 MASK since carries from those bits will affect the bits we are
6019 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
6020 same as the XOR case above. Ensure that the constant we form is not
6021 wider than the mode of X. */
6023 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6024 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6025 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6026 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
6027 < GET_MODE_BITSIZE (GET_MODE (x)))
6028 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
6030 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
6031 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
6032 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
6034 return force_to_mode (x, mode, mask, reg, next_select);
6038 op0 = gen_lowpart_for_combine (op_mode,
6039 force_to_mode (XEXP (x, 0), mode, mask,
6041 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6042 x = gen_unary (code, op_mode, op_mode, op0);
6046 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
6047 in STORE_FLAG_VALUE and FOO has no bits that might be nonzero not
6049 if ((mask & ~ STORE_FLAG_VALUE) == 0 && XEXP (x, 0) == const0_rtx
6050 && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0)
6051 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6056 /* We have no way of knowing if the IF_THEN_ELSE can itself be
6057 written in a narrower mode. We play it safe and do not do so. */
6060 gen_lowpart_for_combine (GET_MODE (x),
6061 force_to_mode (XEXP (x, 1), mode,
6062 mask, reg, next_select)));
6064 gen_lowpart_for_combine (GET_MODE (x),
6065 force_to_mode (XEXP (x, 2), mode,
6066 mask, reg,next_select)));
6070 /* Ensure we return a value of the proper mode. */
6071 return gen_lowpart_for_combine (mode, x);
6074 /* Return nonzero if X is an expression that has one of two values depending on
6075 whether some other value is zero or nonzero. In that case, we return the
6076 value that is being tested, *PTRUE is set to the value if the rtx being
6077 returned has a nonzero value, and *PFALSE is set to the other alternative.
6079 If we return zero, we set *PTRUE and *PFALSE to X. */
6082 if_then_else_cond (x, ptrue, pfalse)
6084 rtx *ptrue, *pfalse;
6086 enum machine_mode mode = GET_MODE (x);
6087 enum rtx_code code = GET_CODE (x);
6088 int size = GET_MODE_BITSIZE (mode);
6089 rtx cond0, cond1, true0, true1, false0, false1;
6090 unsigned HOST_WIDE_INT nz;
6092 /* If this is a unary operation whose operand has one of two values, apply
6093 our opcode to compute those values. */
6094 if (GET_RTX_CLASS (code) == '1'
6095 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
6097 *ptrue = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), true0);
6098 *pfalse = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), false0);
6102 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
6103 make can't possibly match and would supress other optimizations. */
6104 else if (code == COMPARE)
6107 /* If this is a binary operation, see if either side has only one of two
6108 values. If either one does or if both do and they are conditional on
6109 the same value, compute the new true and false values. */
6110 else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
6111 || GET_RTX_CLASS (code) == '<')
6113 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
6114 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
6116 if ((cond0 != 0 || cond1 != 0)
6117 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
6119 *ptrue = gen_binary (code, mode, true0, true1);
6120 *pfalse = gen_binary (code, mode, false0, false1);
6121 return cond0 ? cond0 : cond1;
6124 #if STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1
6126 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
6127 operands is zero when the other is non-zero, and vice-versa. */
6129 if ((code == PLUS || code == IOR || code == XOR || code == MINUS
6131 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6133 rtx op0 = XEXP (XEXP (x, 0), 1);
6134 rtx op1 = XEXP (XEXP (x, 1), 1);
6136 cond0 = XEXP (XEXP (x, 0), 0);
6137 cond1 = XEXP (XEXP (x, 1), 0);
6139 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6140 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6141 && reversible_comparison_p (cond1)
6142 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6143 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6144 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6145 || ((swap_condition (GET_CODE (cond0))
6146 == reverse_condition (GET_CODE (cond1)))
6147 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6148 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6149 && ! side_effects_p (x))
6151 *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
6152 *pfalse = gen_binary (MULT, mode,
6154 ? gen_unary (NEG, mode, mode, op1) : op1),
6160 /* Similarly for MULT, AND and UMIN, execpt that for these the result
6162 if ((code == MULT || code == AND || code == UMIN)
6163 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6165 cond0 = XEXP (XEXP (x, 0), 0);
6166 cond1 = XEXP (XEXP (x, 1), 0);
6168 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6169 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6170 && reversible_comparison_p (cond1)
6171 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6172 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6173 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6174 || ((swap_condition (GET_CODE (cond0))
6175 == reverse_condition (GET_CODE (cond1)))
6176 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6177 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6178 && ! side_effects_p (x))
6180 *ptrue = *pfalse = const0_rtx;
6187 else if (code == IF_THEN_ELSE)
6189 /* If we have IF_THEN_ELSE already, extract the condition and
6190 canonicalize it if it is NE or EQ. */
6191 cond0 = XEXP (x, 0);
6192 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
6193 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
6194 return XEXP (cond0, 0);
6195 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
6197 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
6198 return XEXP (cond0, 0);
6204 /* If X is a normal SUBREG with both inner and outer modes integral,
6205 we can narrow both the true and false values of the inner expression,
6206 if there is a condition. */
6207 else if (code == SUBREG && GET_MODE_CLASS (mode) == MODE_INT
6208 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
6209 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
6210 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
6213 *ptrue = force_to_mode (true0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
6215 = force_to_mode (false0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
6220 /* If X is a constant, this isn't special and will cause confusions
6221 if we treat it as such. Likewise if it is equivalent to a constant. */
6222 else if (CONSTANT_P (x)
6223 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
6226 /* If X is known to be either 0 or -1, those are the true and
6227 false values when testing X. */
6228 else if (num_sign_bit_copies (x, mode) == size)
6230 *ptrue = constm1_rtx, *pfalse = const0_rtx;
6234 /* Likewise for 0 or a single bit. */
6235 else if (exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
6237 *ptrue = GEN_INT (nz), *pfalse = const0_rtx;
6241 /* Otherwise fail; show no condition with true and false values the same. */
6242 *ptrue = *pfalse = x;
6246 /* Return the value of expression X given the fact that condition COND
6247 is known to be true when applied to REG as its first operand and VAL
6248 as its second. X is known to not be shared and so can be modified in
6251 We only handle the simplest cases, and specifically those cases that
6252 arise with IF_THEN_ELSE expressions. */
6255 known_cond (x, cond, reg, val)
6260 enum rtx_code code = GET_CODE (x);
6265 if (side_effects_p (x))
6268 if (cond == EQ && rtx_equal_p (x, reg))
6271 /* If X is (abs REG) and we know something about REG's relationship
6272 with zero, we may be able to simplify this. */
6274 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
6277 case GE: case GT: case EQ:
6280 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), GET_MODE (XEXP (x, 0)),
6284 /* The only other cases we handle are MIN, MAX, and comparisons if the
6285 operands are the same as REG and VAL. */
6287 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
6289 if (rtx_equal_p (XEXP (x, 0), val))
6290 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
6292 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
6294 if (GET_RTX_CLASS (code) == '<')
6295 return (comparison_dominates_p (cond, code) ? const_true_rtx
6296 : (comparison_dominates_p (cond,
6297 reverse_condition (code))
6300 else if (code == SMAX || code == SMIN
6301 || code == UMIN || code == UMAX)
6303 int unsignedp = (code == UMIN || code == UMAX);
6305 if (code == SMAX || code == UMAX)
6306 cond = reverse_condition (cond);
6311 return unsignedp ? x : XEXP (x, 1);
6313 return unsignedp ? x : XEXP (x, 0);
6315 return unsignedp ? XEXP (x, 1) : x;
6317 return unsignedp ? XEXP (x, 0) : x;
6323 fmt = GET_RTX_FORMAT (code);
6324 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6327 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
6328 else if (fmt[i] == 'E')
6329 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6330 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
6337 /* See if X, a SET operation, can be rewritten as a bit-field assignment.
6338 Return that assignment if so.
6340 We only handle the most common cases. */
6343 make_field_assignment (x)
6346 rtx dest = SET_DEST (x);
6347 rtx src = SET_SRC (x);
6352 enum machine_mode mode;
6354 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
6355 a clear of a one-bit field. We will have changed it to
6356 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
6359 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
6360 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
6361 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
6362 && (rtx_equal_p (dest, XEXP (src, 1))
6363 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6364 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
6366 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
6368 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
6371 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
6372 && subreg_lowpart_p (XEXP (src, 0))
6373 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
6374 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
6375 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
6376 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
6377 && (rtx_equal_p (dest, XEXP (src, 1))
6378 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6379 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
6381 assign = make_extraction (VOIDmode, dest, 0,
6382 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
6384 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
6387 /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
6389 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
6390 && XEXP (XEXP (src, 0), 0) == const1_rtx
6391 && (rtx_equal_p (dest, XEXP (src, 1))
6392 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6393 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
6395 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
6397 return gen_rtx (SET, VOIDmode, assign, const1_rtx);
6400 /* The other case we handle is assignments into a constant-position
6401 field. They look like (ior (and DEST C1) OTHER). If C1 represents
6402 a mask that has all one bits except for a group of zero bits and
6403 OTHER is known to have zeros where C1 has ones, this is such an
6404 assignment. Compute the position and length from C1. Shift OTHER
6405 to the appropriate position, force it to the required mode, and
6406 make the extraction. Check for the AND in both operands. */
6408 if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == AND
6409 && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT
6410 && (rtx_equal_p (XEXP (XEXP (src, 0), 0), dest)
6411 || rtx_equal_p (XEXP (XEXP (src, 0), 0), get_last_value (dest))
6412 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 0), 1)), dest)))
6413 c1 = INTVAL (XEXP (XEXP (src, 0), 1)), other = XEXP (src, 1);
6414 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 1)) == AND
6415 && GET_CODE (XEXP (XEXP (src, 1), 1)) == CONST_INT
6416 && (rtx_equal_p (XEXP (XEXP (src, 1), 0), dest)
6417 || rtx_equal_p (XEXP (XEXP (src, 1), 0), get_last_value (dest))
6418 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 1), 0)),
6420 c1 = INTVAL (XEXP (XEXP (src, 1), 1)), other = XEXP (src, 0);
6424 pos = get_pos_from_mask (c1 ^ GET_MODE_MASK (GET_MODE (dest)), &len);
6425 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
6426 || (GET_MODE_BITSIZE (GET_MODE (other)) <= HOST_BITS_PER_WIDE_INT
6427 && (c1 & nonzero_bits (other, GET_MODE (other))) != 0))
6430 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
6432 /* The mode to use for the source is the mode of the assignment, or of
6433 what is inside a possible STRICT_LOW_PART. */
6434 mode = (GET_CODE (assign) == STRICT_LOW_PART
6435 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
6437 /* Shift OTHER right POS places and make it the source, restricting it
6438 to the proper length and mode. */
6440 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
6441 GET_MODE (src), other, pos),
6443 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
6444 ? GET_MODE_MASK (mode)
6445 : ((HOST_WIDE_INT) 1 << len) - 1,
6448 return gen_rtx_combine (SET, VOIDmode, assign, src);
6451 /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
6455 apply_distributive_law (x)
6458 enum rtx_code code = GET_CODE (x);
6459 rtx lhs, rhs, other;
6461 enum rtx_code inner_code;
6463 /* Distributivity is not true for floating point.
6464 It can change the value. So don't do it.
6465 -- rms and moshier@world.std.com. */
6466 if (FLOAT_MODE_P (GET_MODE (x)))
6469 /* The outer operation can only be one of the following: */
6470 if (code != IOR && code != AND && code != XOR
6471 && code != PLUS && code != MINUS)
6474 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
6476 /* If either operand is a primitive we can't do anything, so get out fast. */
6477 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
6478 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
6481 lhs = expand_compound_operation (lhs);
6482 rhs = expand_compound_operation (rhs);
6483 inner_code = GET_CODE (lhs);
6484 if (inner_code != GET_CODE (rhs))
6487 /* See if the inner and outer operations distribute. */
6494 /* These all distribute except over PLUS. */
6495 if (code == PLUS || code == MINUS)
6500 if (code != PLUS && code != MINUS)
6505 /* This is also a multiply, so it distributes over everything. */
6509 /* Non-paradoxical SUBREGs distributes over all operations, provided
6510 the inner modes and word numbers are the same, this is an extraction
6511 of a low-order part, we don't convert an fp operation to int or
6512 vice versa, and we would not be converting a single-word
6513 operation into a multi-word operation. The latter test is not
6514 required, but it prevents generating unneeded multi-word operations.
6515 Some of the previous tests are redundant given the latter test, but
6516 are retained because they are required for correctness.
6518 We produce the result slightly differently in this case. */
6520 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
6521 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
6522 || ! subreg_lowpart_p (lhs)
6523 || (GET_MODE_CLASS (GET_MODE (lhs))
6524 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
6525 || (GET_MODE_SIZE (GET_MODE (lhs))
6526 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
6527 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
6530 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
6531 SUBREG_REG (lhs), SUBREG_REG (rhs));
6532 return gen_lowpart_for_combine (GET_MODE (x), tem);
6538 /* Set LHS and RHS to the inner operands (A and B in the example
6539 above) and set OTHER to the common operand (C in the example).
6540 These is only one way to do this unless the inner operation is
6542 if (GET_RTX_CLASS (inner_code) == 'c'
6543 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
6544 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
6545 else if (GET_RTX_CLASS (inner_code) == 'c'
6546 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
6547 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
6548 else if (GET_RTX_CLASS (inner_code) == 'c'
6549 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
6550 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
6551 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
6552 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
6556 /* Form the new inner operation, seeing if it simplifies first. */
6557 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
6559 /* There is one exception to the general way of distributing:
6560 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
6561 if (code == XOR && inner_code == IOR)
6564 other = gen_unary (NOT, GET_MODE (x), GET_MODE (x), other);
6567 /* We may be able to continuing distributing the result, so call
6568 ourselves recursively on the inner operation before forming the
6569 outer operation, which we return. */
6570 return gen_binary (inner_code, GET_MODE (x),
6571 apply_distributive_law (tem), other);
6574 /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
6577 Return an equivalent form, if different from X. Otherwise, return X. If
6578 X is zero, we are to always construct the equivalent form. */
6581 simplify_and_const_int (x, mode, varop, constop)
6583 enum machine_mode mode;
6585 unsigned HOST_WIDE_INT constop;
6587 unsigned HOST_WIDE_INT nonzero;
6590 /* Simplify VAROP knowing that we will be only looking at some of the
6592 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
6594 /* If VAROP is a CLOBBER, we will fail so return it; if it is a
6595 CONST_INT, we are done. */
6596 if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
6599 /* See what bits may be nonzero in VAROP. Unlike the general case of
6600 a call to nonzero_bits, here we don't care about bits outside
6603 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
6605 /* Turn off all bits in the constant that are known to already be zero.
6606 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
6607 which is tested below. */
6611 /* If we don't have any bits left, return zero. */
6615 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
6616 a power of two, we can replace this with a ASHIFT. */
6617 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
6618 && (i = exact_log2 (constop)) >= 0)
6619 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
6621 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
6622 or XOR, then try to apply the distributive law. This may eliminate
6623 operations if either branch can be simplified because of the AND.
6624 It may also make some cases more complex, but those cases probably
6625 won't match a pattern either with or without this. */
6627 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
6629 gen_lowpart_for_combine
6631 apply_distributive_law
6632 (gen_binary (GET_CODE (varop), GET_MODE (varop),
6633 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
6634 XEXP (varop, 0), constop),
6635 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
6636 XEXP (varop, 1), constop))));
6638 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
6639 if we already had one (just check for the simplest cases). */
6640 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
6641 && GET_MODE (XEXP (x, 0)) == mode
6642 && SUBREG_REG (XEXP (x, 0)) == varop)
6643 varop = XEXP (x, 0);
6645 varop = gen_lowpart_for_combine (mode, varop);
6647 /* If we can't make the SUBREG, try to return what we were given. */
6648 if (GET_CODE (varop) == CLOBBER)
6649 return x ? x : varop;
6651 /* If we are only masking insignificant bits, return VAROP. */
6652 if (constop == nonzero)
6655 /* Otherwise, return an AND. See how much, if any, of X we can use. */
6656 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
6657 x = gen_binary (AND, mode, varop, GEN_INT (constop));
6661 if (GET_CODE (XEXP (x, 1)) != CONST_INT
6662 || INTVAL (XEXP (x, 1)) != constop)
6663 SUBST (XEXP (x, 1), GEN_INT (constop));
6665 SUBST (XEXP (x, 0), varop);
6671 /* Given an expression, X, compute which bits in X can be non-zero.
6672 We don't care about bits outside of those defined in MODE.
6674 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
6675 a shift, AND, or zero_extract, we can do better. */
6677 static unsigned HOST_WIDE_INT
6678 nonzero_bits (x, mode)
6680 enum machine_mode mode;
6682 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
6683 unsigned HOST_WIDE_INT inner_nz;
6685 int mode_width = GET_MODE_BITSIZE (mode);
6688 /* For floating-point values, assume all bits are needed. */
6689 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
6692 /* If X is wider than MODE, use its mode instead. */
6693 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
6695 mode = GET_MODE (x);
6696 nonzero = GET_MODE_MASK (mode);
6697 mode_width = GET_MODE_BITSIZE (mode);
6700 if (mode_width > HOST_BITS_PER_WIDE_INT)
6701 /* Our only callers in this case look for single bit values. So
6702 just return the mode mask. Those tests will then be false. */
6705 #ifndef WORD_REGISTER_OPERATIONS
6706 /* If MODE is wider than X, but both are a single word for both the host
6707 and target machines, we can compute this from which bits of the
6708 object might be nonzero in its own mode, taking into account the fact
6709 that on many CISC machines, accessing an object in a wider mode
6710 causes the high-order bits to become undefined. So they are
6711 not known to be zero. */
6713 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
6714 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
6715 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6716 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
6718 nonzero &= nonzero_bits (x, GET_MODE (x));
6719 nonzero |= GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x));
6724 code = GET_CODE (x);
6728 #ifdef STACK_BOUNDARY
6729 /* If this is the stack pointer, we may know something about its
6730 alignment. If PUSH_ROUNDING is defined, it is possible for the
6731 stack to be momentarily aligned only to that amount, so we pick
6732 the least alignment. */
6734 if (x == stack_pointer_rtx)
6736 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6738 #ifdef PUSH_ROUNDING
6739 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
6742 return nonzero & ~ (sp_alignment - 1);
6746 /* If X is a register whose nonzero bits value is current, use it.
6747 Otherwise, if X is a register whose value we can find, use that
6748 value. Otherwise, use the previously-computed global nonzero bits
6749 for this register. */
6751 if (reg_last_set_value[REGNO (x)] != 0
6752 && reg_last_set_mode[REGNO (x)] == mode
6753 && (reg_n_sets[REGNO (x)] == 1
6754 || reg_last_set_label[REGNO (x)] == label_tick)
6755 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
6756 return reg_last_set_nonzero_bits[REGNO (x)];
6758 tem = get_last_value (x);
6762 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
6763 /* If X is narrower than MODE and TEM is a non-negative
6764 constant that would appear negative in the mode of X,
6765 sign-extend it for use in reg_nonzero_bits because some
6766 machines (maybe most) will actually do the sign-extension
6767 and this is the conservative approach.
6769 ??? For 2.5, try to tighten up the MD files in this regard
6770 instead of this kludge. */
6772 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
6773 && GET_CODE (tem) == CONST_INT
6775 && 0 != (INTVAL (tem)
6776 & ((HOST_WIDE_INT) 1
6777 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
6778 tem = GEN_INT (INTVAL (tem)
6779 | ((HOST_WIDE_INT) (-1)
6780 << GET_MODE_BITSIZE (GET_MODE (x))));
6782 return nonzero_bits (tem, mode);
6784 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
6785 return reg_nonzero_bits[REGNO (x)] & nonzero;
6790 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
6791 /* If X is negative in MODE, sign-extend the value. */
6792 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
6793 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
6794 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
6800 #ifdef LOAD_EXTEND_OP
6801 /* In many, if not most, RISC machines, reading a byte from memory
6802 zeros the rest of the register. Noticing that fact saves a lot
6803 of extra zero-extends. */
6804 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
6805 nonzero &= GET_MODE_MASK (GET_MODE (x));
6815 /* If this produces an integer result, we know which bits are set.
6816 Code here used to clear bits outside the mode of X, but that is
6819 if (GET_MODE_CLASS (mode) == MODE_INT
6820 && mode_width <= HOST_BITS_PER_WIDE_INT)
6821 nonzero = STORE_FLAG_VALUE;
6825 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6826 == GET_MODE_BITSIZE (GET_MODE (x)))
6829 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
6830 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
6834 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6835 == GET_MODE_BITSIZE (GET_MODE (x)))
6840 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
6844 nonzero &= nonzero_bits (XEXP (x, 0), mode);
6845 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6846 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6850 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
6851 Otherwise, show all the bits in the outer mode but not the inner
6853 inner_nz = nonzero_bits (XEXP (x, 0), mode);
6854 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6856 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6859 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
6860 inner_nz |= (GET_MODE_MASK (mode)
6861 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
6864 nonzero &= inner_nz;
6868 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
6869 & nonzero_bits (XEXP (x, 1), mode));
6873 case UMIN: case UMAX: case SMIN: case SMAX:
6874 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
6875 | nonzero_bits (XEXP (x, 1), mode));
6878 case PLUS: case MINUS:
6880 case DIV: case UDIV:
6881 case MOD: case UMOD:
6882 /* We can apply the rules of arithmetic to compute the number of
6883 high- and low-order zero bits of these operations. We start by
6884 computing the width (position of the highest-order non-zero bit)
6885 and the number of low-order zero bits for each value. */
6887 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
6888 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
6889 int width0 = floor_log2 (nz0) + 1;
6890 int width1 = floor_log2 (nz1) + 1;
6891 int low0 = floor_log2 (nz0 & -nz0);
6892 int low1 = floor_log2 (nz1 & -nz1);
6893 HOST_WIDE_INT op0_maybe_minusp
6894 = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
6895 HOST_WIDE_INT op1_maybe_minusp
6896 = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
6897 int result_width = mode_width;
6903 result_width = MAX (width0, width1) + 1;
6904 result_low = MIN (low0, low1);
6907 result_low = MIN (low0, low1);
6910 result_width = width0 + width1;
6911 result_low = low0 + low1;
6914 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6915 result_width = width0;
6918 result_width = width0;
6921 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6922 result_width = MIN (width0, width1);
6923 result_low = MIN (low0, low1);
6926 result_width = MIN (width0, width1);
6927 result_low = MIN (low0, low1);
6931 if (result_width < mode_width)
6932 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
6935 nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
6940 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6941 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6942 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
6946 /* If this is a SUBREG formed for a promoted variable that has
6947 been zero-extended, we know that at least the high-order bits
6948 are zero, though others might be too. */
6950 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
6951 nonzero = (GET_MODE_MASK (GET_MODE (x))
6952 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
6954 /* If the inner mode is a single word for both the host and target
6955 machines, we can compute this from which bits of the inner
6956 object might be nonzero. */
6957 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
6958 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
6959 <= HOST_BITS_PER_WIDE_INT))
6961 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
6963 #ifndef WORD_REGISTER_OPERATIONS
6964 /* On many CISC machines, accessing an object in a wider mode
6965 causes the high-order bits to become undefined. So they are
6966 not known to be zero. */
6967 if (GET_MODE_SIZE (GET_MODE (x))
6968 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6969 nonzero |= (GET_MODE_MASK (GET_MODE (x))
6970 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
6979 /* The nonzero bits are in two classes: any bits within MODE
6980 that aren't in GET_MODE (x) are always significant. The rest of the
6981 nonzero bits are those that are significant in the operand of
6982 the shift when shifted the appropriate number of bits. This
6983 shows that high-order bits are cleared by the right shift and
6984 low-order bits by left shifts. */
6985 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6986 && INTVAL (XEXP (x, 1)) >= 0
6987 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6989 enum machine_mode inner_mode = GET_MODE (x);
6990 int width = GET_MODE_BITSIZE (inner_mode);
6991 int count = INTVAL (XEXP (x, 1));
6992 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
6993 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
6994 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
6995 unsigned HOST_WIDE_INT outer = 0;
6997 if (mode_width > width)
6998 outer = (op_nonzero & nonzero & ~ mode_mask);
7000 if (code == LSHIFTRT)
7002 else if (code == ASHIFTRT)
7006 /* If the sign bit may have been nonzero before the shift, we
7007 need to mark all the places it could have been copied to
7008 by the shift as possibly nonzero. */
7009 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
7010 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
7012 else if (code == ASHIFT)
7015 inner = ((inner << (count % width)
7016 | (inner >> (width - (count % width)))) & mode_mask);
7018 nonzero &= (outer | inner);
7023 /* This is at most the number of bits in the mode. */
7024 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
7028 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
7029 | nonzero_bits (XEXP (x, 2), mode));
7036 /* Return the number of bits at the high-order end of X that are known to
7037 be equal to the sign bit. X will be used in mode MODE; if MODE is
7038 VOIDmode, X will be used in its own mode. The returned value will always
7039 be between 1 and the number of bits in MODE. */
7042 num_sign_bit_copies (x, mode)
7044 enum machine_mode mode;
7046 enum rtx_code code = GET_CODE (x);
7048 int num0, num1, result;
7049 unsigned HOST_WIDE_INT nonzero;
7052 /* If we weren't given a mode, use the mode of X. If the mode is still
7053 VOIDmode, we don't know anything. Likewise if one of the modes is
7056 if (mode == VOIDmode)
7057 mode = GET_MODE (x);
7059 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
7062 bitwidth = GET_MODE_BITSIZE (mode);
7064 /* For a smaller object, just ignore the high bits. */
7065 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
7066 return MAX (1, (num_sign_bit_copies (x, GET_MODE (x))
7067 - (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)));
7069 #ifndef WORD_REGISTER_OPERATIONS
7070 /* If this machine does not do all register operations on the entire
7071 register and MODE is wider than the mode of X, we can say nothing
7072 at all about the high-order bits. */
7073 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
7081 if (reg_last_set_value[REGNO (x)] != 0
7082 && reg_last_set_mode[REGNO (x)] == mode
7083 && (reg_n_sets[REGNO (x)] == 1
7084 || reg_last_set_label[REGNO (x)] == label_tick)
7085 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7086 return reg_last_set_sign_bit_copies[REGNO (x)];
7088 tem = get_last_value (x);
7090 return num_sign_bit_copies (tem, mode);
7092 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
7093 return reg_sign_bit_copies[REGNO (x)];
7097 #ifdef LOAD_EXTEND_OP
7098 /* Some RISC machines sign-extend all loads of smaller than a word. */
7099 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
7100 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
7105 /* If the constant is negative, take its 1's complement and remask.
7106 Then see how many zero bits we have. */
7107 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
7108 if (bitwidth <= HOST_BITS_PER_WIDE_INT
7109 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7110 nonzero = (~ nonzero) & GET_MODE_MASK (mode);
7112 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
7115 /* If this is a SUBREG for a promoted object that is sign-extended
7116 and we are looking at it in a wider mode, we know that at least the
7117 high-order bits are known to be sign bit copies. */
7119 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
7120 return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1,
7121 num_sign_bit_copies (SUBREG_REG (x), mode));
7123 /* For a smaller object, just ignore the high bits. */
7124 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
7126 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
7127 return MAX (1, (num0
7128 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7132 #ifdef WORD_REGISTER_OPERATIONS
7133 /* For paradoxical SUBREGs on machines where all register operations
7134 affect the entire register, just look inside. Note that we are
7135 passing MODE to the recursive call, so the number of sign bit copies
7136 will remain relative to that mode, not the inner mode. */
7138 if (GET_MODE_SIZE (GET_MODE (x))
7139 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7140 return num_sign_bit_copies (SUBREG_REG (x), mode);
7145 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
7146 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
7150 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7151 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
7154 /* For a smaller object, just ignore the high bits. */
7155 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
7156 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7160 return num_sign_bit_copies (XEXP (x, 0), mode);
7162 case ROTATE: case ROTATERT:
7163 /* If we are rotating left by a number of bits less than the number
7164 of sign bit copies, we can just subtract that amount from the
7166 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7167 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
7169 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7170 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
7171 : bitwidth - INTVAL (XEXP (x, 1))));
7176 /* In general, this subtracts one sign bit copy. But if the value
7177 is known to be positive, the number of sign bit copies is the
7178 same as that of the input. Finally, if the input has just one bit
7179 that might be nonzero, all the bits are copies of the sign bit. */
7180 nonzero = nonzero_bits (XEXP (x, 0), mode);
7184 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7186 && bitwidth <= HOST_BITS_PER_WIDE_INT
7187 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
7192 case IOR: case AND: case XOR:
7193 case SMIN: case SMAX: case UMIN: case UMAX:
7194 /* Logical operations will preserve the number of sign-bit copies.
7195 MIN and MAX operations always return one of the operands. */
7196 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7197 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7198 return MIN (num0, num1);
7200 case PLUS: case MINUS:
7201 /* For addition and subtraction, we can have a 1-bit carry. However,
7202 if we are subtracting 1 from a positive number, there will not
7203 be such a carry. Furthermore, if the positive number is known to
7204 be 0 or 1, we know the result is either -1 or 0. */
7206 if (code == PLUS && XEXP (x, 1) == constm1_rtx
7207 && bitwidth <= HOST_BITS_PER_WIDE_INT)
7209 nonzero = nonzero_bits (XEXP (x, 0), mode);
7210 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
7211 return (nonzero == 1 || nonzero == 0 ? bitwidth
7212 : bitwidth - floor_log2 (nonzero) - 1);
7215 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7216 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7217 return MAX (1, MIN (num0, num1) - 1);
7220 /* The number of bits of the product is the sum of the number of
7221 bits of both terms. However, unless one of the terms if known
7222 to be positive, we must allow for an additional bit since negating
7223 a negative number can remove one sign bit copy. */
7225 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7226 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7228 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
7230 && bitwidth <= HOST_BITS_PER_WIDE_INT
7231 && ((nonzero_bits (XEXP (x, 0), mode)
7232 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7233 && (nonzero_bits (XEXP (x, 1), mode)
7234 & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) != 0))
7237 return MAX (1, result);
7240 /* The result must be <= the first operand. */
7241 return num_sign_bit_copies (XEXP (x, 0), mode);
7244 /* The result must be <= the scond operand. */
7245 return num_sign_bit_copies (XEXP (x, 1), mode);
7248 /* Similar to unsigned division, except that we have to worry about
7249 the case where the divisor is negative, in which case we have
7251 result = num_sign_bit_copies (XEXP (x, 0), mode);
7253 && bitwidth <= HOST_BITS_PER_WIDE_INT
7254 && (nonzero_bits (XEXP (x, 1), mode)
7255 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7261 result = num_sign_bit_copies (XEXP (x, 1), mode);
7263 && bitwidth <= HOST_BITS_PER_WIDE_INT
7264 && (nonzero_bits (XEXP (x, 1), mode)
7265 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7271 /* Shifts by a constant add to the number of bits equal to the
7273 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7274 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7275 && INTVAL (XEXP (x, 1)) > 0)
7276 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
7281 /* Left shifts destroy copies. */
7282 if (GET_CODE (XEXP (x, 1)) != CONST_INT
7283 || INTVAL (XEXP (x, 1)) < 0
7284 || INTVAL (XEXP (x, 1)) >= bitwidth)
7287 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7288 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
7291 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
7292 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
7293 return MIN (num0, num1);
7295 #if STORE_FLAG_VALUE == -1
7296 case EQ: case NE: case GE: case GT: case LE: case LT:
7297 case GEU: case GTU: case LEU: case LTU:
7302 /* If we haven't been able to figure it out by one of the above rules,
7303 see if some of the high-order bits are known to be zero. If so,
7304 count those bits and return one less than that amount. If we can't
7305 safely compute the mask for this mode, always return BITWIDTH. */
7307 if (bitwidth > HOST_BITS_PER_WIDE_INT)
7310 nonzero = nonzero_bits (x, mode);
7311 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
7312 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
7315 /* Return the number of "extended" bits there are in X, when interpreted
7316 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
7317 unsigned quantities, this is the number of high-order zero bits.
7318 For signed quantities, this is the number of copies of the sign bit
7319 minus 1. In both case, this function returns the number of "spare"
7320 bits. For example, if two quantities for which this function returns
7321 at least 1 are added, the addition is known not to overflow.
7323 This function will always return 0 unless called during combine, which
7324 implies that it must be called from a define_split. */
7327 extended_count (x, mode, unsignedp)
7329 enum machine_mode mode;
7332 if (nonzero_sign_valid == 0)
7336 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7337 && (GET_MODE_BITSIZE (mode) - 1
7338 - floor_log2 (nonzero_bits (x, mode))))
7339 : num_sign_bit_copies (x, mode) - 1);
7342 /* This function is called from `simplify_shift_const' to merge two
7343 outer operations. Specifically, we have already found that we need
7344 to perform operation *POP0 with constant *PCONST0 at the outermost
7345 position. We would now like to also perform OP1 with constant CONST1
7346 (with *POP0 being done last).
7348 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
7349 the resulting operation. *PCOMP_P is set to 1 if we would need to
7350 complement the innermost operand, otherwise it is unchanged.
7352 MODE is the mode in which the operation will be done. No bits outside
7353 the width of this mode matter. It is assumed that the width of this mode
7354 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
7356 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
7357 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
7358 result is simply *PCONST0.
7360 If the resulting operation cannot be expressed as one operation, we
7361 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
7364 merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
7365 enum rtx_code *pop0;
7366 HOST_WIDE_INT *pconst0;
7368 HOST_WIDE_INT const1;
7369 enum machine_mode mode;
7372 enum rtx_code op0 = *pop0;
7373 HOST_WIDE_INT const0 = *pconst0;
7375 const0 &= GET_MODE_MASK (mode);
7376 const1 &= GET_MODE_MASK (mode);
7378 /* If OP0 is an AND, clear unimportant bits in CONST1. */
7382 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
7385 if (op1 == NIL || op0 == SET)
7388 else if (op0 == NIL)
7389 op0 = op1, const0 = const1;
7391 else if (op0 == op1)
7413 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
7414 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
7417 /* If the two constants aren't the same, we can't do anything. The
7418 remaining six cases can all be done. */
7419 else if (const0 != const1)
7427 /* (a & b) | b == b */
7429 else /* op1 == XOR */
7430 /* (a ^ b) | b == a | b */
7436 /* (a & b) ^ b == (~a) & b */
7437 op0 = AND, *pcomp_p = 1;
7438 else /* op1 == IOR */
7439 /* (a | b) ^ b == a & ~b */
7440 op0 = AND, *pconst0 = ~ const0;
7445 /* (a | b) & b == b */
7447 else /* op1 == XOR */
7448 /* (a ^ b) & b) == (~a) & b */
7453 /* Check for NO-OP cases. */
7454 const0 &= GET_MODE_MASK (mode);
7456 && (op0 == IOR || op0 == XOR || op0 == PLUS))
7458 else if (const0 == 0 && op0 == AND)
7460 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
7469 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
7470 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
7471 that we started with.
7473 The shift is normally computed in the widest mode we find in VAROP, as
7474 long as it isn't a different number of words than RESULT_MODE. Exceptions
7475 are ASHIFTRT and ROTATE, which are always done in their original mode, */
7478 simplify_shift_const (x, code, result_mode, varop, count)
7481 enum machine_mode result_mode;
7485 enum rtx_code orig_code = code;
7486 int orig_count = count;
7487 enum machine_mode mode = result_mode;
7488 enum machine_mode shift_mode, tmode;
7490 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
7491 /* We form (outer_op (code varop count) (outer_const)). */
7492 enum rtx_code outer_op = NIL;
7493 HOST_WIDE_INT outer_const = 0;
7495 int complement_p = 0;
7498 /* If we were given an invalid count, don't do anything except exactly
7499 what was requested. */
7501 if (count < 0 || count > GET_MODE_BITSIZE (mode))
7506 return gen_rtx (code, mode, varop, GEN_INT (count));
7509 /* Unless one of the branches of the `if' in this loop does a `continue',
7510 we will `break' the loop after the `if'. */
7514 /* If we have an operand of (clobber (const_int 0)), just return that
7516 if (GET_CODE (varop) == CLOBBER)
7519 /* If we discovered we had to complement VAROP, leave. Making a NOT
7520 here would cause an infinite loop. */
7524 /* Convert ROTATETRT to ROTATE. */
7525 if (code == ROTATERT)
7526 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
7528 /* We need to determine what mode we will do the shift in. If the
7529 shift is a ASHIFTRT or ROTATE, we must always do it in the mode it
7530 was originally done in. Otherwise, we can do it in MODE, the widest
7531 mode encountered. */
7532 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
7534 /* Handle cases where the count is greater than the size of the mode
7535 minus 1. For ASHIFT, use the size minus one as the count (this can
7536 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
7537 take the count modulo the size. For other shifts, the result is
7540 Since these shifts are being produced by the compiler by combining
7541 multiple operations, each of which are defined, we know what the
7542 result is supposed to be. */
7544 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
7546 if (code == ASHIFTRT)
7547 count = GET_MODE_BITSIZE (shift_mode) - 1;
7548 else if (code == ROTATE || code == ROTATERT)
7549 count %= GET_MODE_BITSIZE (shift_mode);
7552 /* We can't simply return zero because there may be an
7560 /* Negative counts are invalid and should not have been made (a
7561 programmer-specified negative count should have been handled
7566 /* An arithmetic right shift of a quantity known to be -1 or 0
7568 if (code == ASHIFTRT
7569 && (num_sign_bit_copies (varop, shift_mode)
7570 == GET_MODE_BITSIZE (shift_mode)))
7576 /* If we are doing an arithmetic right shift and discarding all but
7577 the sign bit copies, this is equivalent to doing a shift by the
7578 bitsize minus one. Convert it into that shift because it will often
7579 allow other simplifications. */
7581 if (code == ASHIFTRT
7582 && (count + num_sign_bit_copies (varop, shift_mode)
7583 >= GET_MODE_BITSIZE (shift_mode)))
7584 count = GET_MODE_BITSIZE (shift_mode) - 1;
7586 /* We simplify the tests below and elsewhere by converting
7587 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
7588 `make_compound_operation' will convert it to a ASHIFTRT for
7589 those machines (such as Vax) that don't have a LSHIFTRT. */
7590 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
7592 && ((nonzero_bits (varop, shift_mode)
7593 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
7597 switch (GET_CODE (varop))
7603 new = expand_compound_operation (varop);
7612 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
7613 minus the width of a smaller mode, we can do this with a
7614 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
7615 if ((code == ASHIFTRT || code == LSHIFTRT)
7616 && ! mode_dependent_address_p (XEXP (varop, 0))
7617 && ! MEM_VOLATILE_P (varop)
7618 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7619 MODE_INT, 1)) != BLKmode)
7621 #if BYTES_BIG_ENDIAN
7622 new = gen_rtx (MEM, tmode, XEXP (varop, 0));
7624 new = gen_rtx (MEM, tmode,
7625 plus_constant (XEXP (varop, 0),
7626 count / BITS_PER_UNIT));
7627 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
7628 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
7629 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
7631 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7632 : ZERO_EXTEND, mode, new);
7639 /* Similar to the case above, except that we can only do this if
7640 the resulting mode is the same as that of the underlying
7641 MEM and adjust the address depending on the *bits* endianness
7642 because of the way that bit-field extract insns are defined. */
7643 if ((code == ASHIFTRT || code == LSHIFTRT)
7644 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7645 MODE_INT, 1)) != BLKmode
7646 && tmode == GET_MODE (XEXP (varop, 0)))
7649 new = XEXP (varop, 0);
7651 new = copy_rtx (XEXP (varop, 0));
7652 SUBST (XEXP (new, 0),
7653 plus_constant (XEXP (new, 0),
7654 count / BITS_PER_UNIT));
7657 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7658 : ZERO_EXTEND, mode, new);
7665 /* If VAROP is a SUBREG, strip it as long as the inner operand has
7666 the same number of words as what we've seen so far. Then store
7667 the widest mode in MODE. */
7668 if (subreg_lowpart_p (varop)
7669 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7670 > GET_MODE_SIZE (GET_MODE (varop)))
7671 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7672 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
7675 varop = SUBREG_REG (varop);
7676 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
7677 mode = GET_MODE (varop);
7683 /* Some machines use MULT instead of ASHIFT because MULT
7684 is cheaper. But it is still better on those machines to
7685 merge two shifts into one. */
7686 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7687 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7689 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
7690 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
7696 /* Similar, for when divides are cheaper. */
7697 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7698 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7700 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
7701 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
7707 /* If we are extracting just the sign bit of an arithmetic right
7708 shift, that shift is not needed. */
7709 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
7711 varop = XEXP (varop, 0);
7715 /* ... fall through ... */
7720 /* Here we have two nested shifts. The result is usually the
7721 AND of a new shift with a mask. We compute the result below. */
7722 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7723 && INTVAL (XEXP (varop, 1)) >= 0
7724 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
7725 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7726 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7728 enum rtx_code first_code = GET_CODE (varop);
7729 int first_count = INTVAL (XEXP (varop, 1));
7730 unsigned HOST_WIDE_INT mask;
7733 /* We have one common special case. We can't do any merging if
7734 the inner code is an ASHIFTRT of a smaller mode. However, if
7735 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
7736 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
7737 we can convert it to
7738 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
7739 This simplifies certain SIGN_EXTEND operations. */
7740 if (code == ASHIFT && first_code == ASHIFTRT
7741 && (GET_MODE_BITSIZE (result_mode)
7742 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
7744 /* C3 has the low-order C1 bits zero. */
7746 mask = (GET_MODE_MASK (mode)
7747 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
7749 varop = simplify_and_const_int (NULL_RTX, result_mode,
7750 XEXP (varop, 0), mask);
7751 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
7753 count = first_count;
7758 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
7759 than C1 high-order bits equal to the sign bit, we can convert
7760 this to either an ASHIFT or a ASHIFTRT depending on the
7763 We cannot do this if VAROP's mode is not SHIFT_MODE. */
7765 if (code == ASHIFTRT && first_code == ASHIFT
7766 && GET_MODE (varop) == shift_mode
7767 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
7770 count -= first_count;
7772 count = - count, code = ASHIFT;
7773 varop = XEXP (varop, 0);
7777 /* There are some cases we can't do. If CODE is ASHIFTRT,
7778 we can only do this if FIRST_CODE is also ASHIFTRT.
7780 We can't do the case when CODE is ROTATE and FIRST_CODE is
7783 If the mode of this shift is not the mode of the outer shift,
7784 we can't do this if either shift is ASHIFTRT or ROTATE.
7786 Finally, we can't do any of these if the mode is too wide
7787 unless the codes are the same.
7789 Handle the case where the shift codes are the same
7792 if (code == first_code)
7794 if (GET_MODE (varop) != result_mode
7795 && (code == ASHIFTRT || code == ROTATE))
7798 count += first_count;
7799 varop = XEXP (varop, 0);
7803 if (code == ASHIFTRT
7804 || (code == ROTATE && first_code == ASHIFTRT)
7805 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
7806 || (GET_MODE (varop) != result_mode
7807 && (first_code == ASHIFTRT || first_code == ROTATE
7808 || code == ROTATE)))
7811 /* To compute the mask to apply after the shift, shift the
7812 nonzero bits of the inner shift the same way the
7813 outer shift will. */
7815 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
7818 = simplify_binary_operation (code, result_mode, mask_rtx,
7821 /* Give up if we can't compute an outer operation to use. */
7823 || GET_CODE (mask_rtx) != CONST_INT
7824 || ! merge_outer_ops (&outer_op, &outer_const, AND,
7826 result_mode, &complement_p))
7829 /* If the shifts are in the same direction, we add the
7830 counts. Otherwise, we subtract them. */
7831 if ((code == ASHIFTRT || code == LSHIFTRT)
7832 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
7833 count += first_count;
7835 count -= first_count;
7837 /* If COUNT is positive, the new shift is usually CODE,
7838 except for the two exceptions below, in which case it is
7839 FIRST_CODE. If the count is negative, FIRST_CODE should
7842 && ((first_code == ROTATE && code == ASHIFT)
7843 || (first_code == ASHIFTRT && code == LSHIFTRT)))
7846 code = first_code, count = - count;
7848 varop = XEXP (varop, 0);
7852 /* If we have (A << B << C) for any shift, we can convert this to
7853 (A << C << B). This wins if A is a constant. Only try this if
7854 B is not a constant. */
7856 else if (GET_CODE (varop) == code
7857 && GET_CODE (XEXP (varop, 1)) != CONST_INT
7859 = simplify_binary_operation (code, mode,
7863 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
7870 /* Make this fit the case below. */
7871 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
7872 GEN_INT (GET_MODE_MASK (mode)));
7878 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
7879 with C the size of VAROP - 1 and the shift is logical if
7880 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7881 we have an (le X 0) operation. If we have an arithmetic shift
7882 and STORE_FLAG_VALUE is 1 or we have a logical shift with
7883 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
7885 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
7886 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
7887 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7888 && (code == LSHIFTRT || code == ASHIFTRT)
7889 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7890 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7893 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
7896 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7897 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7902 /* If we have (shift (logical)), move the logical to the outside
7903 to allow it to possibly combine with another logical and the
7904 shift to combine with another shift. This also canonicalizes to
7905 what a ZERO_EXTRACT looks like. Also, some machines have
7906 (and (shift)) insns. */
7908 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7909 && (new = simplify_binary_operation (code, result_mode,
7911 GEN_INT (count))) != 0
7912 && GET_CODE(new) == CONST_INT
7913 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
7914 INTVAL (new), result_mode, &complement_p))
7916 varop = XEXP (varop, 0);
7920 /* If we can't do that, try to simplify the shift in each arm of the
7921 logical expression, make a new logical expression, and apply
7922 the inverse distributive law. */
7924 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
7925 XEXP (varop, 0), count);
7926 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
7927 XEXP (varop, 1), count);
7929 varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
7930 varop = apply_distributive_law (varop);
7937 /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
7938 says that the sign bit can be tested, FOO has mode MODE, C is
7939 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
7940 that may be nonzero. */
7941 if (code == LSHIFTRT
7942 && XEXP (varop, 1) == const0_rtx
7943 && GET_MODE (XEXP (varop, 0)) == result_mode
7944 && count == GET_MODE_BITSIZE (result_mode) - 1
7945 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7946 && ((STORE_FLAG_VALUE
7947 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
7948 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
7949 && merge_outer_ops (&outer_op, &outer_const, XOR,
7950 (HOST_WIDE_INT) 1, result_mode,
7953 varop = XEXP (varop, 0);
7960 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
7961 than the number of bits in the mode is equivalent to A. */
7962 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
7963 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
7965 varop = XEXP (varop, 0);
7970 /* NEG commutes with ASHIFT since it is multiplication. Move the
7971 NEG outside to allow shifts to combine. */
7973 && merge_outer_ops (&outer_op, &outer_const, NEG,
7974 (HOST_WIDE_INT) 0, result_mode,
7977 varop = XEXP (varop, 0);
7983 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
7984 is one less than the number of bits in the mode is
7985 equivalent to (xor A 1). */
7986 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
7987 && XEXP (varop, 1) == constm1_rtx
7988 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
7989 && merge_outer_ops (&outer_op, &outer_const, XOR,
7990 (HOST_WIDE_INT) 1, result_mode,
7994 varop = XEXP (varop, 0);
7998 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
7999 that might be nonzero in BAR are those being shifted out and those
8000 bits are known zero in FOO, we can replace the PLUS with FOO.
8001 Similarly in the other operand order. This code occurs when
8002 we are computing the size of a variable-size array. */
8004 if ((code == ASHIFTRT || code == LSHIFTRT)
8005 && count < HOST_BITS_PER_WIDE_INT
8006 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
8007 && (nonzero_bits (XEXP (varop, 1), result_mode)
8008 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
8010 varop = XEXP (varop, 0);
8013 else if ((code == ASHIFTRT || code == LSHIFTRT)
8014 && count < HOST_BITS_PER_WIDE_INT
8015 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8016 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
8018 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
8019 & nonzero_bits (XEXP (varop, 1),
8022 varop = XEXP (varop, 1);
8026 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
8028 && GET_CODE (XEXP (varop, 1)) == CONST_INT
8029 && (new = simplify_binary_operation (ASHIFT, result_mode,
8031 GEN_INT (count))) != 0
8032 && GET_CODE(new) == CONST_INT
8033 && merge_outer_ops (&outer_op, &outer_const, PLUS,
8034 INTVAL (new), result_mode, &complement_p))
8036 varop = XEXP (varop, 0);
8042 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
8043 with C the size of VAROP - 1 and the shift is logical if
8044 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8045 we have a (gt X 0) operation. If the shift is arithmetic with
8046 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
8047 we have a (neg (gt X 0)) operation. */
8049 if (GET_CODE (XEXP (varop, 0)) == ASHIFTRT
8050 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
8051 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8052 && (code == LSHIFTRT || code == ASHIFTRT)
8053 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
8054 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
8055 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8058 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
8061 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8062 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8072 /* We need to determine what mode to do the shift in. If the shift is
8073 a ASHIFTRT or ROTATE, we must always do it in the mode it was originally
8074 done in. Otherwise, we can do it in MODE, the widest mode encountered.
8075 The code we care about is that of the shift that will actually be done,
8076 not the shift that was originally requested. */
8077 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
8079 /* We have now finished analyzing the shift. The result should be
8080 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
8081 OUTER_OP is non-NIL, it is an operation that needs to be applied
8082 to the result of the shift. OUTER_CONST is the relevant constant,
8083 but we must turn off all bits turned off in the shift.
8085 If we were passed a value for X, see if we can use any pieces of
8086 it. If not, make new rtx. */
8088 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
8089 && GET_CODE (XEXP (x, 1)) == CONST_INT
8090 && INTVAL (XEXP (x, 1)) == count)
8091 const_rtx = XEXP (x, 1);
8093 const_rtx = GEN_INT (count);
8095 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
8096 && GET_MODE (XEXP (x, 0)) == shift_mode
8097 && SUBREG_REG (XEXP (x, 0)) == varop)
8098 varop = XEXP (x, 0);
8099 else if (GET_MODE (varop) != shift_mode)
8100 varop = gen_lowpart_for_combine (shift_mode, varop);
8102 /* If we can't make the SUBREG, try to return what we were given. */
8103 if (GET_CODE (varop) == CLOBBER)
8104 return x ? x : varop;
8106 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
8111 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
8112 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
8114 SUBST (XEXP (x, 0), varop);
8115 SUBST (XEXP (x, 1), const_rtx);
8118 /* If we have an outer operation and we just made a shift, it is
8119 possible that we could have simplified the shift were it not
8120 for the outer operation. So try to do the simplification
8123 if (outer_op != NIL && GET_CODE (x) == code
8124 && GET_CODE (XEXP (x, 1)) == CONST_INT)
8125 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
8126 INTVAL (XEXP (x, 1)));
8128 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
8129 turn off all the bits that the shift would have turned off. */
8130 if (orig_code == LSHIFTRT && result_mode != shift_mode)
8131 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
8132 GET_MODE_MASK (result_mode) >> orig_count);
8134 /* Do the remainder of the processing in RESULT_MODE. */
8135 x = gen_lowpart_for_combine (result_mode, x);
8137 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
8140 x = gen_unary (NOT, result_mode, result_mode, x);
8142 if (outer_op != NIL)
8144 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
8145 outer_const &= GET_MODE_MASK (result_mode);
8147 if (outer_op == AND)
8148 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
8149 else if (outer_op == SET)
8150 /* This means that we have determined that the result is
8151 equivalent to a constant. This should be rare. */
8152 x = GEN_INT (outer_const);
8153 else if (GET_RTX_CLASS (outer_op) == '1')
8154 x = gen_unary (outer_op, result_mode, result_mode, x);
8156 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
8162 /* Like recog, but we receive the address of a pointer to a new pattern.
8163 We try to match the rtx that the pointer points to.
8164 If that fails, we may try to modify or replace the pattern,
8165 storing the replacement into the same pointer object.
8167 Modifications include deletion or addition of CLOBBERs.
8169 PNOTES is a pointer to a location where any REG_UNUSED notes added for
8170 the CLOBBERs are placed.
8172 The value is the final insn code from the pattern ultimately matched,
8176 recog_for_combine (pnewpat, insn, pnotes)
8181 register rtx pat = *pnewpat;
8182 int insn_code_number;
8183 int num_clobbers_to_add = 0;
8187 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
8188 we use to indicate that something didn't match. If we find such a
8189 thing, force rejection. */
8190 if (GET_CODE (pat) == PARALLEL)
8191 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
8192 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
8193 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
8196 /* Is the result of combination a valid instruction? */
8197 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
8199 /* If it isn't, there is the possibility that we previously had an insn
8200 that clobbered some register as a side effect, but the combined
8201 insn doesn't need to do that. So try once more without the clobbers
8202 unless this represents an ASM insn. */
8204 if (insn_code_number < 0 && ! check_asm_operands (pat)
8205 && GET_CODE (pat) == PARALLEL)
8209 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
8210 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
8213 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
8217 SUBST_INT (XVECLEN (pat, 0), pos);
8220 pat = XVECEXP (pat, 0, 0);
8222 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
8225 /* If we had any clobbers to add, make a new pattern than contains
8226 them. Then check to make sure that all of them are dead. */
8227 if (num_clobbers_to_add)
8229 rtx newpat = gen_rtx (PARALLEL, VOIDmode,
8230 gen_rtvec (GET_CODE (pat) == PARALLEL
8231 ? XVECLEN (pat, 0) + num_clobbers_to_add
8232 : num_clobbers_to_add + 1));
8234 if (GET_CODE (pat) == PARALLEL)
8235 for (i = 0; i < XVECLEN (pat, 0); i++)
8236 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
8238 XVECEXP (newpat, 0, 0) = pat;
8240 add_clobbers (newpat, insn_code_number);
8242 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
8243 i < XVECLEN (newpat, 0); i++)
8245 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
8246 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
8248 notes = gen_rtx (EXPR_LIST, REG_UNUSED,
8249 XEXP (XVECEXP (newpat, 0, i), 0), notes);
8257 return insn_code_number;
8260 /* Like gen_lowpart but for use by combine. In combine it is not possible
8261 to create any new pseudoregs. However, it is safe to create
8262 invalid memory addresses, because combine will try to recognize
8263 them and all they will do is make the combine attempt fail.
8265 If for some reason this cannot do its job, an rtx
8266 (clobber (const_int 0)) is returned.
8267 An insn containing that will not be recognized. */
8272 gen_lowpart_for_combine (mode, x)
8273 enum machine_mode mode;
8278 if (GET_MODE (x) == mode)
8281 /* We can only support MODE being wider than a word if X is a
8282 constant integer or has a mode the same size. */
8284 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
8285 && ! ((GET_MODE (x) == VOIDmode
8286 && (GET_CODE (x) == CONST_INT
8287 || GET_CODE (x) == CONST_DOUBLE))
8288 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
8289 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8291 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
8292 won't know what to do. So we will strip off the SUBREG here and
8293 process normally. */
8294 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
8297 if (GET_MODE (x) == mode)
8301 result = gen_lowpart_common (mode, x);
8305 if (GET_CODE (x) == MEM)
8307 register int offset = 0;
8310 /* Refuse to work on a volatile memory ref or one with a mode-dependent
8312 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
8313 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8315 /* If we want to refer to something bigger than the original memref,
8316 generate a perverse subreg instead. That will force a reload
8317 of the original memref X. */
8318 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
8319 return gen_rtx (SUBREG, mode, x, 0);
8321 #if WORDS_BIG_ENDIAN
8322 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
8323 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
8325 #if BYTES_BIG_ENDIAN
8326 /* Adjust the address so that the address-after-the-data
8328 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
8329 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
8331 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
8332 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
8333 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
8334 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
8338 /* If X is a comparison operator, rewrite it in a new mode. This
8339 probably won't match, but may allow further simplifications. */
8340 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
8341 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
8343 /* If we couldn't simplify X any other way, just enclose it in a
8344 SUBREG. Normally, this SUBREG won't match, but some patterns may
8345 include an explicit SUBREG or we may simplify it further in combine. */
8350 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
8351 word = ((GET_MODE_SIZE (GET_MODE (x))
8352 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
8354 return gen_rtx (SUBREG, mode, x, word);
8358 /* Make an rtx expression. This is a subset of gen_rtx and only supports
8359 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
8361 If the identical expression was previously in the insn (in the undobuf),
8362 it will be returned. Only if it is not found will a new expression
8367 gen_rtx_combine VPROTO((enum rtx_code code, enum machine_mode mode, ...))
8371 enum machine_mode mode;
8383 code = va_arg (p, enum rtx_code);
8384 mode = va_arg (p, enum machine_mode);
8387 n_args = GET_RTX_LENGTH (code);
8388 fmt = GET_RTX_FORMAT (code);
8390 if (n_args == 0 || n_args > 3)
8393 /* Get each arg and verify that it is supposed to be an expression. */
8394 for (j = 0; j < n_args; j++)
8399 args[j] = va_arg (p, rtx);
8402 /* See if this is in undobuf. Be sure we don't use objects that came
8403 from another insn; this could produce circular rtl structures. */
8405 for (i = previous_num_undos; i < undobuf.num_undo; i++)
8406 if (!undobuf.undo[i].is_int
8407 && GET_CODE (undobuf.undo[i].old_contents.r) == code
8408 && GET_MODE (undobuf.undo[i].old_contents.r) == mode)
8410 for (j = 0; j < n_args; j++)
8411 if (XEXP (undobuf.undo[i].old_contents.r, j) != args[j])
8415 return undobuf.undo[i].old_contents.r;
8418 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
8419 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
8420 rt = rtx_alloc (code);
8421 PUT_MODE (rt, mode);
8422 XEXP (rt, 0) = args[0];
8425 XEXP (rt, 1) = args[1];
8427 XEXP (rt, 2) = args[2];
8432 /* These routines make binary and unary operations by first seeing if they
8433 fold; if not, a new expression is allocated. */
8436 gen_binary (code, mode, op0, op1)
8438 enum machine_mode mode;
8444 if (GET_RTX_CLASS (code) == 'c'
8445 && (GET_CODE (op0) == CONST_INT
8446 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
8447 tem = op0, op0 = op1, op1 = tem;
8449 if (GET_RTX_CLASS (code) == '<')
8451 enum machine_mode op_mode = GET_MODE (op0);
8453 /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
8454 just (REL_OP X Y). */
8455 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
8457 op1 = XEXP (op0, 1);
8458 op0 = XEXP (op0, 0);
8459 op_mode = GET_MODE (op0);
8462 if (op_mode == VOIDmode)
8463 op_mode = GET_MODE (op1);
8464 result = simplify_relational_operation (code, op_mode, op0, op1);
8467 result = simplify_binary_operation (code, mode, op0, op1);
8472 /* Put complex operands first and constants second. */
8473 if (GET_RTX_CLASS (code) == 'c'
8474 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
8475 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
8476 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
8477 || (GET_CODE (op0) == SUBREG
8478 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
8479 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
8480 return gen_rtx_combine (code, mode, op1, op0);
8482 return gen_rtx_combine (code, mode, op0, op1);
8486 gen_unary (code, mode, op0_mode, op0)
8488 enum machine_mode mode, op0_mode;
8491 rtx result = simplify_unary_operation (code, mode, op0, op0_mode);
8496 return gen_rtx_combine (code, mode, op0);
8499 /* Simplify a comparison between *POP0 and *POP1 where CODE is the
8500 comparison code that will be tested.
8502 The result is a possibly different comparison code to use. *POP0 and
8503 *POP1 may be updated.
8505 It is possible that we might detect that a comparison is either always
8506 true or always false. However, we do not perform general constant
8507 folding in combine, so this knowledge isn't useful. Such tautologies
8508 should have been detected earlier. Hence we ignore all such cases. */
8510 static enum rtx_code
8511 simplify_comparison (code, pop0, pop1)
8520 enum machine_mode mode, tmode;
8522 /* Try a few ways of applying the same transformation to both operands. */
8525 #ifndef WORD_REGISTER_OPERATIONS
8526 /* The test below this one won't handle SIGN_EXTENDs on these machines,
8527 so check specially. */
8528 if (code != GTU && code != GEU && code != LTU && code != LEU
8529 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
8530 && GET_CODE (XEXP (op0, 0)) == ASHIFT
8531 && GET_CODE (XEXP (op1, 0)) == ASHIFT
8532 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
8533 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
8534 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
8535 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
8536 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8537 && GET_CODE (XEXP (op1, 1)) == CONST_INT
8538 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8539 && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
8540 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
8541 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
8542 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
8543 && (INTVAL (XEXP (op0, 1))
8544 == (GET_MODE_BITSIZE (GET_MODE (op0))
8546 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
8548 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
8549 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
8553 /* If both operands are the same constant shift, see if we can ignore the
8554 shift. We can if the shift is a rotate or if the bits shifted out of
8555 this shift are known to be zero for both inputs and if the type of
8556 comparison is compatible with the shift. */
8557 if (GET_CODE (op0) == GET_CODE (op1)
8558 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
8559 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
8560 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
8561 && (code != GT && code != LT && code != GE && code != LE))
8562 || (GET_CODE (op0) == ASHIFTRT
8563 && (code != GTU && code != LTU
8564 && code != GEU && code != GEU)))
8565 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8566 && INTVAL (XEXP (op0, 1)) >= 0
8567 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
8568 && XEXP (op0, 1) == XEXP (op1, 1))
8570 enum machine_mode mode = GET_MODE (op0);
8571 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
8572 int shift_count = INTVAL (XEXP (op0, 1));
8574 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
8575 mask &= (mask >> shift_count) << shift_count;
8576 else if (GET_CODE (op0) == ASHIFT)
8577 mask = (mask & (mask << shift_count)) >> shift_count;
8579 if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
8580 && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
8581 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
8586 /* If both operands are AND's of a paradoxical SUBREG by constant, the
8587 SUBREGs are of the same mode, and, in both cases, the AND would
8588 be redundant if the comparison was done in the narrower mode,
8589 do the comparison in the narrower mode (e.g., we are AND'ing with 1
8590 and the operand's possibly nonzero bits are 0xffffff01; in that case
8591 if we only care about QImode, we don't need the AND). This case
8592 occurs if the output mode of an scc insn is not SImode and
8593 STORE_FLAG_VALUE == 1 (e.g., the 386).
8595 Similarly, check for a case where the AND's are ZERO_EXTEND
8596 operations from some narrower mode even though a SUBREG is not
8599 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
8600 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8601 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
8603 rtx inner_op0 = XEXP (op0, 0);
8604 rtx inner_op1 = XEXP (op1, 0);
8605 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
8606 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
8609 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
8610 && (GET_MODE_SIZE (GET_MODE (inner_op0))
8611 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
8612 && (GET_MODE (SUBREG_REG (inner_op0))
8613 == GET_MODE (SUBREG_REG (inner_op1)))
8614 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
8615 <= HOST_BITS_PER_WIDE_INT)
8616 && (0 == (~c0) & nonzero_bits (SUBREG_REG (inner_op0),
8617 GET_MODE (SUBREG_REG (op0))))
8618 && (0 == (~c1) & nonzero_bits (SUBREG_REG (inner_op1),
8619 GET_MODE (SUBREG_REG (inner_op1)))))
8621 op0 = SUBREG_REG (inner_op0);
8622 op1 = SUBREG_REG (inner_op1);
8624 /* The resulting comparison is always unsigned since we masked
8625 off the original sign bit. */
8626 code = unsigned_condition (code);
8632 for (tmode = GET_CLASS_NARROWEST_MODE
8633 (GET_MODE_CLASS (GET_MODE (op0)));
8634 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
8635 if (c0 == GET_MODE_MASK (tmode))
8637 op0 = gen_lowpart_for_combine (tmode, inner_op0);
8638 op1 = gen_lowpart_for_combine (tmode, inner_op1);
8639 code = unsigned_condition (code);
8648 /* If both operands are NOT, we can strip off the outer operation
8649 and adjust the comparison code for swapped operands; similarly for
8650 NEG, except that this must be an equality comparison. */
8651 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
8652 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
8653 && (code == EQ || code == NE)))
8654 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
8660 /* If the first operand is a constant, swap the operands and adjust the
8661 comparison code appropriately. */
8662 if (CONSTANT_P (op0))
8664 tem = op0, op0 = op1, op1 = tem;
8665 code = swap_condition (code);
8668 /* We now enter a loop during which we will try to simplify the comparison.
8669 For the most part, we only are concerned with comparisons with zero,
8670 but some things may really be comparisons with zero but not start
8671 out looking that way. */
8673 while (GET_CODE (op1) == CONST_INT)
8675 enum machine_mode mode = GET_MODE (op0);
8676 int mode_width = GET_MODE_BITSIZE (mode);
8677 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
8678 int equality_comparison_p;
8679 int sign_bit_comparison_p;
8680 int unsigned_comparison_p;
8681 HOST_WIDE_INT const_op;
8683 /* We only want to handle integral modes. This catches VOIDmode,
8684 CCmode, and the floating-point modes. An exception is that we
8685 can handle VOIDmode if OP0 is a COMPARE or a comparison
8688 if (GET_MODE_CLASS (mode) != MODE_INT
8689 && ! (mode == VOIDmode
8690 && (GET_CODE (op0) == COMPARE
8691 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
8694 /* Get the constant we are comparing against and turn off all bits
8695 not on in our mode. */
8696 const_op = INTVAL (op1);
8697 if (mode_width <= HOST_BITS_PER_WIDE_INT)
8700 /* If we are comparing against a constant power of two and the value
8701 being compared can only have that single bit nonzero (e.g., it was
8702 `and'ed with that bit), we can replace this with a comparison
8705 && (code == EQ || code == NE || code == GE || code == GEU
8706 || code == LT || code == LTU)
8707 && mode_width <= HOST_BITS_PER_WIDE_INT
8708 && exact_log2 (const_op) >= 0
8709 && nonzero_bits (op0, mode) == const_op)
8711 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
8712 op1 = const0_rtx, const_op = 0;
8715 /* Similarly, if we are comparing a value known to be either -1 or
8716 0 with -1, change it to the opposite comparison against zero. */
8719 && (code == EQ || code == NE || code == GT || code == LE
8720 || code == GEU || code == LTU)
8721 && num_sign_bit_copies (op0, mode) == mode_width)
8723 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
8724 op1 = const0_rtx, const_op = 0;
8727 /* Do some canonicalizations based on the comparison code. We prefer
8728 comparisons against zero and then prefer equality comparisons.
8729 If we can reduce the size of a constant, we will do that too. */
8734 /* < C is equivalent to <= (C - 1) */
8738 op1 = GEN_INT (const_op);
8740 /* ... fall through to LE case below. */
8746 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
8750 op1 = GEN_INT (const_op);
8754 /* If we are doing a <= 0 comparison on a value known to have
8755 a zero sign bit, we can replace this with == 0. */
8756 else if (const_op == 0
8757 && mode_width <= HOST_BITS_PER_WIDE_INT
8758 && (nonzero_bits (op0, mode)
8759 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
8764 /* >= C is equivalent to > (C - 1). */
8768 op1 = GEN_INT (const_op);
8770 /* ... fall through to GT below. */
8776 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
8780 op1 = GEN_INT (const_op);
8784 /* If we are doing a > 0 comparison on a value known to have
8785 a zero sign bit, we can replace this with != 0. */
8786 else if (const_op == 0
8787 && mode_width <= HOST_BITS_PER_WIDE_INT
8788 && (nonzero_bits (op0, mode)
8789 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
8794 /* < C is equivalent to <= (C - 1). */
8798 op1 = GEN_INT (const_op);
8800 /* ... fall through ... */
8803 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
8804 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
8806 const_op = 0, op1 = const0_rtx;
8814 /* unsigned <= 0 is equivalent to == 0 */
8818 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
8819 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
8821 const_op = 0, op1 = const0_rtx;
8827 /* >= C is equivalent to < (C - 1). */
8831 op1 = GEN_INT (const_op);
8833 /* ... fall through ... */
8836 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
8837 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
8839 const_op = 0, op1 = const0_rtx;
8847 /* unsigned > 0 is equivalent to != 0 */
8851 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
8852 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
8854 const_op = 0, op1 = const0_rtx;
8860 /* Compute some predicates to simplify code below. */
8862 equality_comparison_p = (code == EQ || code == NE);
8863 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
8864 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
8867 /* If this is a sign bit comparison and we can do arithmetic in
8868 MODE, say that we will only be needing the sign bit of OP0. */
8869 if (sign_bit_comparison_p
8870 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
8871 op0 = force_to_mode (op0, mode,
8873 << (GET_MODE_BITSIZE (mode) - 1)),
8876 /* Now try cases based on the opcode of OP0. If none of the cases
8877 does a "continue", we exit this loop immediately after the
8880 switch (GET_CODE (op0))
8883 /* If we are extracting a single bit from a variable position in
8884 a constant that has only a single bit set and are comparing it
8885 with zero, we can convert this into an equality comparison
8886 between the position and the location of the single bit. We can't
8887 do this if bit endian and we don't have an extzv since we then
8888 can't know what mode to use for the endianness adjustment. */
8890 #if ! BITS_BIG_ENDIAN || defined (HAVE_extzv)
8891 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
8892 && XEXP (op0, 1) == const1_rtx
8893 && equality_comparison_p && const_op == 0
8894 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
8897 i = (GET_MODE_BITSIZE
8898 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
8901 op0 = XEXP (op0, 2);
8905 /* Result is nonzero iff shift count is equal to I. */
8906 code = reverse_condition (code);
8911 /* ... fall through ... */
8914 tem = expand_compound_operation (op0);
8923 /* If testing for equality, we can take the NOT of the constant. */
8924 if (equality_comparison_p
8925 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
8927 op0 = XEXP (op0, 0);
8932 /* If just looking at the sign bit, reverse the sense of the
8934 if (sign_bit_comparison_p)
8936 op0 = XEXP (op0, 0);
8937 code = (code == GE ? LT : GE);
8943 /* If testing for equality, we can take the NEG of the constant. */
8944 if (equality_comparison_p
8945 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
8947 op0 = XEXP (op0, 0);
8952 /* The remaining cases only apply to comparisons with zero. */
8956 /* When X is ABS or is known positive,
8957 (neg X) is < 0 if and only if X != 0. */
8959 if (sign_bit_comparison_p
8960 && (GET_CODE (XEXP (op0, 0)) == ABS
8961 || (mode_width <= HOST_BITS_PER_WIDE_INT
8962 && (nonzero_bits (XEXP (op0, 0), mode)
8963 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
8965 op0 = XEXP (op0, 0);
8966 code = (code == LT ? NE : EQ);
8970 /* If we have NEG of something whose two high-order bits are the
8971 same, we know that "(-a) < 0" is equivalent to "a > 0". */
8972 if (num_sign_bit_copies (op0, mode) >= 2)
8974 op0 = XEXP (op0, 0);
8975 code = swap_condition (code);
8981 /* If we are testing equality and our count is a constant, we
8982 can perform the inverse operation on our RHS. */
8983 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8984 && (tem = simplify_binary_operation (ROTATERT, mode,
8985 op1, XEXP (op0, 1))) != 0)
8987 op0 = XEXP (op0, 0);
8992 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
8993 a particular bit. Convert it to an AND of a constant of that
8994 bit. This will be converted into a ZERO_EXTRACT. */
8995 if (const_op == 0 && sign_bit_comparison_p
8996 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8997 && mode_width <= HOST_BITS_PER_WIDE_INT)
8999 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9002 - INTVAL (XEXP (op0, 1)))));
9003 code = (code == LT ? NE : EQ);
9007 /* ... fall through ... */
9010 /* ABS is ignorable inside an equality comparison with zero. */
9011 if (const_op == 0 && equality_comparison_p)
9013 op0 = XEXP (op0, 0);
9020 /* Can simplify (compare (zero/sign_extend FOO) CONST)
9021 to (compare FOO CONST) if CONST fits in FOO's mode and we
9022 are either testing inequality or have an unsigned comparison
9023 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
9024 if (! unsigned_comparison_p
9025 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9026 <= HOST_BITS_PER_WIDE_INT)
9027 && ((unsigned HOST_WIDE_INT) const_op
9028 < (((HOST_WIDE_INT) 1
9029 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
9031 op0 = XEXP (op0, 0);
9037 /* Check for the case where we are comparing A - C1 with C2,
9038 both constants are smaller than 1/2 the maxium positive
9039 value in MODE, and the comparison is equality or unsigned.
9040 In that case, if A is either zero-extended to MODE or has
9041 sufficient sign bits so that the high-order bit in MODE
9042 is a copy of the sign in the inner mode, we can prove that it is
9043 safe to do the operation in the wider mode. This simplifies
9044 many range checks. */
9046 if (mode_width <= HOST_BITS_PER_WIDE_INT
9047 && subreg_lowpart_p (op0)
9048 && GET_CODE (SUBREG_REG (op0)) == PLUS
9049 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
9050 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
9051 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
9052 < GET_MODE_MASK (mode) / 2)
9053 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
9054 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
9055 GET_MODE (SUBREG_REG (op0)))
9056 & ~ GET_MODE_MASK (mode))
9057 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
9058 GET_MODE (SUBREG_REG (op0)))
9059 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9060 - GET_MODE_BITSIZE (mode)))))
9062 op0 = SUBREG_REG (op0);
9066 /* If the inner mode is narrower and we are extracting the low part,
9067 we can treat the SUBREG as if it were a ZERO_EXTEND. */
9068 if (subreg_lowpart_p (op0)
9069 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
9070 /* Fall through */ ;
9074 /* ... fall through ... */
9077 if ((unsigned_comparison_p || equality_comparison_p)
9078 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9079 <= HOST_BITS_PER_WIDE_INT)
9080 && ((unsigned HOST_WIDE_INT) const_op
9081 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
9083 op0 = XEXP (op0, 0);
9089 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
9090 this for equality comparisons due to pathological cases involving
9092 if (equality_comparison_p
9093 && 0 != (tem = simplify_binary_operation (MINUS, mode,
9094 op1, XEXP (op0, 1))))
9096 op0 = XEXP (op0, 0);
9101 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
9102 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
9103 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
9105 op0 = XEXP (XEXP (op0, 0), 0);
9106 code = (code == LT ? EQ : NE);
9112 /* (eq (minus A B) C) -> (eq A (plus B C)) or
9113 (eq B (minus A C)), whichever simplifies. We can only do
9114 this for equality comparisons due to pathological cases involving
9116 if (equality_comparison_p
9117 && 0 != (tem = simplify_binary_operation (PLUS, mode,
9118 XEXP (op0, 1), op1)))
9120 op0 = XEXP (op0, 0);
9125 if (equality_comparison_p
9126 && 0 != (tem = simplify_binary_operation (MINUS, mode,
9127 XEXP (op0, 0), op1)))
9129 op0 = XEXP (op0, 1);
9134 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
9135 of bits in X minus 1, is one iff X > 0. */
9136 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
9137 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9138 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
9139 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
9141 op0 = XEXP (op0, 1);
9142 code = (code == GE ? LE : GT);
9148 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
9149 if C is zero or B is a constant. */
9150 if (equality_comparison_p
9151 && 0 != (tem = simplify_binary_operation (XOR, mode,
9152 XEXP (op0, 1), op1)))
9154 op0 = XEXP (op0, 0);
9161 case LT: case LTU: case LE: case LEU:
9162 case GT: case GTU: case GE: case GEU:
9163 /* We can't do anything if OP0 is a condition code value, rather
9164 than an actual data value. */
9167 || XEXP (op0, 0) == cc0_rtx
9169 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
9172 /* Get the two operands being compared. */
9173 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
9174 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
9176 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
9178 /* Check for the cases where we simply want the result of the
9179 earlier test or the opposite of that result. */
9181 || (code == EQ && reversible_comparison_p (op0))
9182 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9183 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9184 && (STORE_FLAG_VALUE
9185 & (((HOST_WIDE_INT) 1
9186 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
9188 || (code == GE && reversible_comparison_p (op0)))))
9190 code = (code == LT || code == NE
9191 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
9192 op0 = tem, op1 = tem1;
9198 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
9200 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
9201 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
9202 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
9204 op0 = XEXP (op0, 1);
9205 code = (code == GE ? GT : LE);
9211 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
9212 will be converted to a ZERO_EXTRACT later. */
9213 if (const_op == 0 && equality_comparison_p
9214 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9215 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
9217 op0 = simplify_and_const_int
9218 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
9220 XEXP (XEXP (op0, 0), 1)),
9225 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
9226 zero and X is a comparison and C1 and C2 describe only bits set
9227 in STORE_FLAG_VALUE, we can compare with X. */
9228 if (const_op == 0 && equality_comparison_p
9229 && mode_width <= HOST_BITS_PER_WIDE_INT
9230 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9231 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
9232 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9233 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
9234 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
9236 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
9237 << INTVAL (XEXP (XEXP (op0, 0), 1)));
9238 if ((~ STORE_FLAG_VALUE & mask) == 0
9239 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
9240 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
9241 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
9243 op0 = XEXP (XEXP (op0, 0), 0);
9248 /* If we are doing an equality comparison of an AND of a bit equal
9249 to the sign bit, replace this with a LT or GE comparison of
9250 the underlying value. */
9251 if (equality_comparison_p
9253 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9254 && mode_width <= HOST_BITS_PER_WIDE_INT
9255 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
9256 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
9258 op0 = XEXP (op0, 0);
9259 code = (code == EQ ? GE : LT);
9263 /* If this AND operation is really a ZERO_EXTEND from a narrower
9264 mode, the constant fits within that mode, and this is either an
9265 equality or unsigned comparison, try to do this comparison in
9266 the narrower mode. */
9267 if ((equality_comparison_p || unsigned_comparison_p)
9268 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9269 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
9270 & GET_MODE_MASK (mode))
9272 && const_op >> i == 0
9273 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
9275 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
9281 /* If we have (compare (ashift FOO N) (const_int C)) and
9282 the high order N bits of FOO (N+1 if an inequality comparison)
9283 are known to be zero, we can do this by comparing FOO with C
9284 shifted right N bits so long as the low-order N bits of C are
9286 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
9287 && INTVAL (XEXP (op0, 1)) >= 0
9288 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
9289 < HOST_BITS_PER_WIDE_INT)
9291 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
9292 && mode_width <= HOST_BITS_PER_WIDE_INT
9293 && (nonzero_bits (XEXP (op0, 0), mode)
9294 & ~ (mask >> (INTVAL (XEXP (op0, 1))
9295 + ! equality_comparison_p))) == 0)
9297 const_op >>= INTVAL (XEXP (op0, 1));
9298 op1 = GEN_INT (const_op);
9299 op0 = XEXP (op0, 0);
9303 /* If we are doing a sign bit comparison, it means we are testing
9304 a particular bit. Convert it to the appropriate AND. */
9305 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
9306 && mode_width <= HOST_BITS_PER_WIDE_INT)
9308 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9311 - INTVAL (XEXP (op0, 1)))));
9312 code = (code == LT ? NE : EQ);
9316 /* If this an equality comparison with zero and we are shifting
9317 the low bit to the sign bit, we can convert this to an AND of the
9319 if (const_op == 0 && equality_comparison_p
9320 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9321 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
9323 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9330 /* If this is an equality comparison with zero, we can do this
9331 as a logical shift, which might be much simpler. */
9332 if (equality_comparison_p && const_op == 0
9333 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
9335 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
9337 INTVAL (XEXP (op0, 1)));
9341 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
9342 do the comparison in a narrower mode. */
9343 if (! unsigned_comparison_p
9344 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9345 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9346 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
9347 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
9348 MODE_INT, 1)) != BLKmode
9349 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
9350 || ((unsigned HOST_WIDE_INT) - const_op
9351 <= GET_MODE_MASK (tmode))))
9353 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
9357 /* ... fall through ... */
9359 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
9360 the low order N bits of FOO are known to be zero, we can do this
9361 by comparing FOO with C shifted left N bits so long as no
9363 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
9364 && INTVAL (XEXP (op0, 1)) >= 0
9365 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9366 && mode_width <= HOST_BITS_PER_WIDE_INT
9367 && (nonzero_bits (XEXP (op0, 0), mode)
9368 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
9370 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
9373 const_op <<= INTVAL (XEXP (op0, 1));
9374 op1 = GEN_INT (const_op);
9375 op0 = XEXP (op0, 0);
9379 /* If we are using this shift to extract just the sign bit, we
9380 can replace this with an LT or GE comparison. */
9382 && (equality_comparison_p || sign_bit_comparison_p)
9383 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9384 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
9386 op0 = XEXP (op0, 0);
9387 code = (code == NE || code == GT ? LT : GE);
9396 /* Now make any compound operations involved in this comparison. Then,
9397 check for an outmost SUBREG on OP0 that isn't doing anything or is
9398 paradoxical. The latter case can only occur when it is known that the
9399 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
9400 We can never remove a SUBREG for a non-equality comparison because the
9401 sign bit is in a different place in the underlying object. */
9403 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
9404 op1 = make_compound_operation (op1, SET);
9406 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
9407 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9408 && (code == NE || code == EQ)
9409 && ((GET_MODE_SIZE (GET_MODE (op0))
9410 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
9412 op0 = SUBREG_REG (op0);
9413 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
9416 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
9417 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9418 && (code == NE || code == EQ)
9419 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9420 <= HOST_BITS_PER_WIDE_INT)
9421 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
9422 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
9423 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
9425 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
9426 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
9427 op0 = SUBREG_REG (op0), op1 = tem;
9429 /* We now do the opposite procedure: Some machines don't have compare
9430 insns in all modes. If OP0's mode is an integer mode smaller than a
9431 word and we can't do a compare in that mode, see if there is a larger
9432 mode for which we can do the compare. There are a number of cases in
9433 which we can use the wider mode. */
9435 mode = GET_MODE (op0);
9436 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
9437 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
9438 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
9439 for (tmode = GET_MODE_WIDER_MODE (mode);
9441 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
9442 tmode = GET_MODE_WIDER_MODE (tmode))
9443 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
9445 /* If the only nonzero bits in OP0 and OP1 are those in the
9446 narrower mode and this is an equality or unsigned comparison,
9447 we can use the wider mode. Similarly for sign-extended
9448 values, in which case it is true for all comparisons. */
9449 if (((code == EQ || code == NE
9450 || code == GEU || code == GTU || code == LEU || code == LTU)
9451 && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
9452 && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
9453 || ((num_sign_bit_copies (op0, tmode)
9454 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
9455 && (num_sign_bit_copies (op1, tmode)
9456 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
9458 op0 = gen_lowpart_for_combine (tmode, op0);
9459 op1 = gen_lowpart_for_combine (tmode, op1);
9463 /* If this is a test for negative, we can make an explicit
9464 test of the sign bit. */
9466 if (op1 == const0_rtx && (code == LT || code == GE)
9467 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9469 op0 = gen_binary (AND, tmode,
9470 gen_lowpart_for_combine (tmode, op0),
9471 GEN_INT ((HOST_WIDE_INT) 1
9472 << (GET_MODE_BITSIZE (mode) - 1)));
9473 code = (code == LT) ? NE : EQ;
9478 #ifdef CANONICALIZE_COMPARISON
9479 /* If this machine only supports a subset of valid comparisons, see if we
9480 can convert an unsupported one into a supported one. */
9481 CANONICALIZE_COMPARISON (code, op0, op1);
9490 /* Return 1 if we know that X, a comparison operation, is not operating
9491 on a floating-point value or is EQ or NE, meaning that we can safely
9495 reversible_comparison_p (x)
9498 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
9500 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
9503 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
9506 case MODE_PARTIAL_INT:
9507 case MODE_COMPLEX_INT:
9511 /* If the mode of the condition codes tells us that this is safe,
9512 we need look no further. */
9513 if (REVERSIBLE_CC_MODE (GET_MODE (XEXP (x, 0))))
9516 /* Otherwise try and find where the condition codes were last set and
9518 x = get_last_value (XEXP (x, 0));
9519 return (x && GET_CODE (x) == COMPARE
9520 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))));
9526 /* Utility function for following routine. Called when X is part of a value
9527 being stored into reg_last_set_value. Sets reg_last_set_table_tick
9528 for each register mentioned. Similar to mention_regs in cse.c */
9531 update_table_tick (x)
9534 register enum rtx_code code = GET_CODE (x);
9535 register char *fmt = GET_RTX_FORMAT (code);
9540 int regno = REGNO (x);
9541 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9542 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9544 for (i = regno; i < endregno; i++)
9545 reg_last_set_table_tick[i] = label_tick;
9550 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9551 /* Note that we can't have an "E" in values stored; see
9552 get_last_value_validate. */
9554 update_table_tick (XEXP (x, i));
9557 /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
9558 are saying that the register is clobbered and we no longer know its
9559 value. If INSN is zero, don't update reg_last_set; this is only permitted
9560 with VALUE also zero and is used to invalidate the register. */
9563 record_value_for_reg (reg, insn, value)
9568 int regno = REGNO (reg);
9569 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9570 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
9573 /* If VALUE contains REG and we have a previous value for REG, substitute
9574 the previous value. */
9575 if (value && insn && reg_overlap_mentioned_p (reg, value))
9579 /* Set things up so get_last_value is allowed to see anything set up to
9581 subst_low_cuid = INSN_CUID (insn);
9582 tem = get_last_value (reg);
9585 value = replace_rtx (copy_rtx (value), reg, tem);
9588 /* For each register modified, show we don't know its value, that
9589 we don't know about its bitwise content, that its value has been
9590 updated, and that we don't know the location of the death of the
9592 for (i = regno; i < endregno; i ++)
9595 reg_last_set[i] = insn;
9596 reg_last_set_value[i] = 0;
9597 reg_last_set_mode[i] = 0;
9598 reg_last_set_nonzero_bits[i] = 0;
9599 reg_last_set_sign_bit_copies[i] = 0;
9600 reg_last_death[i] = 0;
9603 /* Mark registers that are being referenced in this value. */
9605 update_table_tick (value);
9607 /* Now update the status of each register being set.
9608 If someone is using this register in this block, set this register
9609 to invalid since we will get confused between the two lives in this
9610 basic block. This makes using this register always invalid. In cse, we
9611 scan the table to invalidate all entries using this register, but this
9612 is too much work for us. */
9614 for (i = regno; i < endregno; i++)
9616 reg_last_set_label[i] = label_tick;
9617 if (value && reg_last_set_table_tick[i] == label_tick)
9618 reg_last_set_invalid[i] = 1;
9620 reg_last_set_invalid[i] = 0;
9623 /* The value being assigned might refer to X (like in "x++;"). In that
9624 case, we must replace it with (clobber (const_int 0)) to prevent
9626 if (value && ! get_last_value_validate (&value,
9627 reg_last_set_label[regno], 0))
9629 value = copy_rtx (value);
9630 if (! get_last_value_validate (&value, reg_last_set_label[regno], 1))
9634 /* For the main register being modified, update the value, the mode, the
9635 nonzero bits, and the number of sign bit copies. */
9637 reg_last_set_value[regno] = value;
9641 subst_low_cuid = INSN_CUID (insn);
9642 reg_last_set_mode[regno] = GET_MODE (reg);
9643 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
9644 reg_last_set_sign_bit_copies[regno]
9645 = num_sign_bit_copies (value, GET_MODE (reg));
9649 /* Used for communication between the following two routines. */
9650 static rtx record_dead_insn;
9652 /* Called via note_stores from record_dead_and_set_regs to handle one
9653 SET or CLOBBER in an insn. */
9656 record_dead_and_set_regs_1 (dest, setter)
9659 if (GET_CODE (dest) == REG)
9661 /* If we are setting the whole register, we know its value. Otherwise
9662 show that we don't know the value. We can handle SUBREG in
9664 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
9665 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
9666 else if (GET_CODE (setter) == SET
9667 && GET_CODE (SET_DEST (setter)) == SUBREG
9668 && SUBREG_REG (SET_DEST (setter)) == dest
9669 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
9670 && subreg_lowpart_p (SET_DEST (setter)))
9671 record_value_for_reg (dest, record_dead_insn,
9672 gen_lowpart_for_combine (GET_MODE (dest),
9675 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
9677 else if (GET_CODE (dest) == MEM
9678 /* Ignore pushes, they clobber nothing. */
9679 && ! push_operand (dest, GET_MODE (dest)))
9680 mem_last_set = INSN_CUID (record_dead_insn);
9683 /* Update the records of when each REG was most recently set or killed
9684 for the things done by INSN. This is the last thing done in processing
9685 INSN in the combiner loop.
9687 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
9688 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
9689 and also the similar information mem_last_set (which insn most recently
9690 modified memory) and last_call_cuid (which insn was the most recent
9691 subroutine call). */
9694 record_dead_and_set_regs (insn)
9700 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
9702 if (REG_NOTE_KIND (link) == REG_DEAD
9703 && GET_CODE (XEXP (link, 0)) == REG)
9705 int regno = REGNO (XEXP (link, 0));
9707 = regno + (regno < FIRST_PSEUDO_REGISTER
9708 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
9711 for (i = regno; i < endregno; i++)
9712 reg_last_death[i] = insn;
9714 else if (REG_NOTE_KIND (link) == REG_INC)
9715 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
9718 if (GET_CODE (insn) == CALL_INSN)
9720 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
9721 if (call_used_regs[i])
9723 reg_last_set_value[i] = 0;
9724 reg_last_set_mode[i] = 0;
9725 reg_last_set_nonzero_bits[i] = 0;
9726 reg_last_set_sign_bit_copies[i] = 0;
9727 reg_last_death[i] = 0;
9730 last_call_cuid = mem_last_set = INSN_CUID (insn);
9733 record_dead_insn = insn;
9734 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
9737 /* Utility routine for the following function. Verify that all the registers
9738 mentioned in *LOC are valid when *LOC was part of a value set when
9739 label_tick == TICK. Return 0 if some are not.
9741 If REPLACE is non-zero, replace the invalid reference with
9742 (clobber (const_int 0)) and return 1. This replacement is useful because
9743 we often can get useful information about the form of a value (e.g., if
9744 it was produced by a shift that always produces -1 or 0) even though
9745 we don't know exactly what registers it was produced from. */
9748 get_last_value_validate (loc, tick, replace)
9754 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
9755 int len = GET_RTX_LENGTH (GET_CODE (x));
9758 if (GET_CODE (x) == REG)
9760 int regno = REGNO (x);
9761 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9762 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9765 for (j = regno; j < endregno; j++)
9766 if (reg_last_set_invalid[j]
9767 /* If this is a pseudo-register that was only set once, it is
9769 || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1)
9770 && reg_last_set_label[j] > tick))
9773 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
9780 for (i = 0; i < len; i++)
9782 && get_last_value_validate (&XEXP (x, i), tick, replace) == 0)
9783 /* Don't bother with these. They shouldn't occur anyway. */
9787 /* If we haven't found a reason for it to be invalid, it is valid. */
9791 /* Get the last value assigned to X, if known. Some registers
9792 in the value may be replaced with (clobber (const_int 0)) if their value
9793 is known longer known reliably. */
9802 /* If this is a non-paradoxical SUBREG, get the value of its operand and
9803 then convert it to the desired mode. If this is a paradoxical SUBREG,
9804 we cannot predict what values the "extra" bits might have. */
9805 if (GET_CODE (x) == SUBREG
9806 && subreg_lowpart_p (x)
9807 && (GET_MODE_SIZE (GET_MODE (x))
9808 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
9809 && (value = get_last_value (SUBREG_REG (x))) != 0)
9810 return gen_lowpart_for_combine (GET_MODE (x), value);
9812 if (GET_CODE (x) != REG)
9816 value = reg_last_set_value[regno];
9818 /* If we don't have a value or if it isn't for this basic block, return 0. */
9821 || (reg_n_sets[regno] != 1
9822 && reg_last_set_label[regno] != label_tick))
9825 /* If the value was set in a later insn that the ones we are processing,
9826 we can't use it even if the register was only set once, but make a quick
9827 check to see if the previous insn set it to something. This is commonly
9828 the case when the same pseudo is used by repeated insns. */
9830 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
9834 for (insn = prev_nonnote_insn (subst_insn);
9835 insn && INSN_CUID (insn) >= subst_low_cuid;
9836 insn = prev_nonnote_insn (insn))
9840 && (set = single_set (insn)) != 0
9841 && rtx_equal_p (SET_DEST (set), x))
9843 value = SET_SRC (set);
9845 /* Make sure that VALUE doesn't reference X. Replace any
9846 expliit references with a CLOBBER. If there are any remaining
9847 references (rare), don't use the value. */
9849 if (reg_mentioned_p (x, value))
9850 value = replace_rtx (copy_rtx (value), x,
9851 gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
9853 if (reg_overlap_mentioned_p (x, value))
9860 /* If the value has all its registers valid, return it. */
9861 if (get_last_value_validate (&value, reg_last_set_label[regno], 0))
9864 /* Otherwise, make a copy and replace any invalid register with
9865 (clobber (const_int 0)). If that fails for some reason, return 0. */
9867 value = copy_rtx (value);
9868 if (get_last_value_validate (&value, reg_last_set_label[regno], 1))
9874 /* Return nonzero if expression X refers to a REG or to memory
9875 that is set in an instruction more recent than FROM_CUID. */
9878 use_crosses_set_p (x, from_cuid)
9884 register enum rtx_code code = GET_CODE (x);
9888 register int regno = REGNO (x);
9889 int endreg = regno + (regno < FIRST_PSEUDO_REGISTER
9890 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9892 #ifdef PUSH_ROUNDING
9893 /* Don't allow uses of the stack pointer to be moved,
9894 because we don't know whether the move crosses a push insn. */
9895 if (regno == STACK_POINTER_REGNUM)
9898 for (;regno < endreg; regno++)
9899 if (reg_last_set[regno]
9900 && INSN_CUID (reg_last_set[regno]) > from_cuid)
9905 if (code == MEM && mem_last_set > from_cuid)
9908 fmt = GET_RTX_FORMAT (code);
9910 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9915 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9916 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
9919 else if (fmt[i] == 'e'
9920 && use_crosses_set_p (XEXP (x, i), from_cuid))
9926 /* Define three variables used for communication between the following
9929 static int reg_dead_regno, reg_dead_endregno;
9930 static int reg_dead_flag;
9932 /* Function called via note_stores from reg_dead_at_p.
9934 If DEST is within [reg_dead_rengno, reg_dead_endregno), set
9935 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
9938 reg_dead_at_p_1 (dest, x)
9942 int regno, endregno;
9944 if (GET_CODE (dest) != REG)
9947 regno = REGNO (dest);
9948 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9949 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
9951 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
9952 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
9955 /* Return non-zero if REG is known to be dead at INSN.
9957 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
9958 referencing REG, it is dead. If we hit a SET referencing REG, it is
9959 live. Otherwise, see if it is live or dead at the start of the basic
9960 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
9961 must be assumed to be always live. */
9964 reg_dead_at_p (reg, insn)
9970 /* Set variables for reg_dead_at_p_1. */
9971 reg_dead_regno = REGNO (reg);
9972 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
9973 ? HARD_REGNO_NREGS (reg_dead_regno,
9979 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */
9980 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
9982 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
9983 if (TEST_HARD_REG_BIT (newpat_used_regs, i))
9987 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
9988 beginning of function. */
9989 for (; insn && GET_CODE (insn) != CODE_LABEL;
9990 insn = prev_nonnote_insn (insn))
9992 note_stores (PATTERN (insn), reg_dead_at_p_1);
9994 return reg_dead_flag == 1 ? 1 : 0;
9996 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
10000 /* Get the basic block number that we were in. */
10005 for (block = 0; block < n_basic_blocks; block++)
10006 if (insn == basic_block_head[block])
10009 if (block == n_basic_blocks)
10013 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
10014 if (basic_block_live_at_start[block][i / REGSET_ELT_BITS]
10015 & ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS)))
10021 /* Note hard registers in X that are used. This code is similar to
10022 that in flow.c, but much simpler since we don't care about pseudos. */
10025 mark_used_regs_combine (x)
10028 register RTX_CODE code = GET_CODE (x);
10029 register int regno;
10041 case ADDR_DIFF_VEC:
10044 /* CC0 must die in the insn after it is set, so we don't need to take
10045 special note of it here. */
10051 /* If we are clobbering a MEM, mark any hard registers inside the
10052 address as used. */
10053 if (GET_CODE (XEXP (x, 0)) == MEM)
10054 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
10059 /* A hard reg in a wide mode may really be multiple registers.
10060 If so, mark all of them just like the first. */
10061 if (regno < FIRST_PSEUDO_REGISTER)
10063 /* None of this applies to the stack, frame or arg pointers */
10064 if (regno == STACK_POINTER_REGNUM
10065 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
10066 || regno == HARD_FRAME_POINTER_REGNUM
10068 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
10069 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
10071 || regno == FRAME_POINTER_REGNUM)
10074 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
10076 SET_HARD_REG_BIT (newpat_used_regs, regno + i);
10082 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
10084 register rtx testreg = SET_DEST (x);
10086 while (GET_CODE (testreg) == SUBREG
10087 || GET_CODE (testreg) == ZERO_EXTRACT
10088 || GET_CODE (testreg) == SIGN_EXTRACT
10089 || GET_CODE (testreg) == STRICT_LOW_PART)
10090 testreg = XEXP (testreg, 0);
10092 if (GET_CODE (testreg) == MEM)
10093 mark_used_regs_combine (XEXP (testreg, 0));
10095 mark_used_regs_combine (SET_SRC (x));
10100 /* Recursively scan the operands of this expression. */
10103 register char *fmt = GET_RTX_FORMAT (code);
10105 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10108 mark_used_regs_combine (XEXP (x, i));
10109 else if (fmt[i] == 'E')
10113 for (j = 0; j < XVECLEN (x, i); j++)
10114 mark_used_regs_combine (XVECEXP (x, i, j));
10121 /* Remove register number REGNO from the dead registers list of INSN.
10123 Return the note used to record the death, if there was one. */
10126 remove_death (regno, insn)
10130 register rtx note = find_regno_note (insn, REG_DEAD, regno);
10134 reg_n_deaths[regno]--;
10135 remove_note (insn, note);
10141 /* For each register (hardware or pseudo) used within expression X, if its
10142 death is in an instruction with cuid between FROM_CUID (inclusive) and
10143 TO_INSN (exclusive), put a REG_DEAD note for that register in the
10144 list headed by PNOTES.
10146 This is done when X is being merged by combination into TO_INSN. These
10147 notes will then be distributed as needed. */
10150 move_deaths (x, from_cuid, to_insn, pnotes)
10156 register char *fmt;
10157 register int len, i;
10158 register enum rtx_code code = GET_CODE (x);
10162 register int regno = REGNO (x);
10163 register rtx where_dead = reg_last_death[regno];
10165 if (where_dead && INSN_CUID (where_dead) >= from_cuid
10166 && INSN_CUID (where_dead) < INSN_CUID (to_insn))
10168 rtx note = remove_death (regno, where_dead);
10170 /* It is possible for the call above to return 0. This can occur
10171 when reg_last_death points to I2 or I1 that we combined with.
10172 In that case make a new note.
10174 We must also check for the case where X is a hard register
10175 and NOTE is a death note for a range of hard registers
10176 including X. In that case, we must put REG_DEAD notes for
10177 the remaining registers in place of NOTE. */
10179 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
10180 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
10181 != GET_MODE_SIZE (GET_MODE (x))))
10183 int deadregno = REGNO (XEXP (note, 0));
10185 = (deadregno + HARD_REGNO_NREGS (deadregno,
10186 GET_MODE (XEXP (note, 0))));
10187 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10190 for (i = deadregno; i < deadend; i++)
10191 if (i < regno || i >= ourend)
10192 REG_NOTES (where_dead)
10193 = gen_rtx (EXPR_LIST, REG_DEAD,
10194 gen_rtx (REG, reg_raw_mode[i], i),
10195 REG_NOTES (where_dead));
10198 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
10200 XEXP (note, 1) = *pnotes;
10204 *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
10206 reg_n_deaths[regno]++;
10212 else if (GET_CODE (x) == SET)
10214 rtx dest = SET_DEST (x);
10216 move_deaths (SET_SRC (x), from_cuid, to_insn, pnotes);
10218 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
10219 that accesses one word of a multi-word item, some
10220 piece of everything register in the expression is used by
10221 this insn, so remove any old death. */
10223 if (GET_CODE (dest) == ZERO_EXTRACT
10224 || GET_CODE (dest) == STRICT_LOW_PART
10225 || (GET_CODE (dest) == SUBREG
10226 && (((GET_MODE_SIZE (GET_MODE (dest))
10227 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
10228 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
10229 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
10231 move_deaths (dest, from_cuid, to_insn, pnotes);
10235 /* If this is some other SUBREG, we know it replaces the entire
10236 value, so use that as the destination. */
10237 if (GET_CODE (dest) == SUBREG)
10238 dest = SUBREG_REG (dest);
10240 /* If this is a MEM, adjust deaths of anything used in the address.
10241 For a REG (the only other possibility), the entire value is
10242 being replaced so the old value is not used in this insn. */
10244 if (GET_CODE (dest) == MEM)
10245 move_deaths (XEXP (dest, 0), from_cuid, to_insn, pnotes);
10249 else if (GET_CODE (x) == CLOBBER)
10252 len = GET_RTX_LENGTH (code);
10253 fmt = GET_RTX_FORMAT (code);
10255 for (i = 0; i < len; i++)
10260 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
10261 move_deaths (XVECEXP (x, i, j), from_cuid, to_insn, pnotes);
10263 else if (fmt[i] == 'e')
10264 move_deaths (XEXP (x, i), from_cuid, to_insn, pnotes);
10268 /* Return 1 if X is the target of a bit-field assignment in BODY, the
10269 pattern of an insn. X must be a REG. */
10272 reg_bitfield_target_p (x, body)
10278 if (GET_CODE (body) == SET)
10280 rtx dest = SET_DEST (body);
10282 int regno, tregno, endregno, endtregno;
10284 if (GET_CODE (dest) == ZERO_EXTRACT)
10285 target = XEXP (dest, 0);
10286 else if (GET_CODE (dest) == STRICT_LOW_PART)
10287 target = SUBREG_REG (XEXP (dest, 0));
10291 if (GET_CODE (target) == SUBREG)
10292 target = SUBREG_REG (target);
10294 if (GET_CODE (target) != REG)
10297 tregno = REGNO (target), regno = REGNO (x);
10298 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
10299 return target == x;
10301 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
10302 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10304 return endregno > tregno && regno < endtregno;
10307 else if (GET_CODE (body) == PARALLEL)
10308 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
10309 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
10315 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
10316 as appropriate. I3 and I2 are the insns resulting from the combination
10317 insns including FROM (I2 may be zero).
10319 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
10320 not need REG_DEAD notes because they are being substituted for. This
10321 saves searching in the most common cases.
10323 Each note in the list is either ignored or placed on some insns, depending
10324 on the type of note. */
10327 distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
10331 rtx elim_i2, elim_i1;
10333 rtx note, next_note;
10336 for (note = notes; note; note = next_note)
10338 rtx place = 0, place2 = 0;
10340 /* If this NOTE references a pseudo register, ensure it references
10341 the latest copy of that register. */
10342 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
10343 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
10344 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
10346 next_note = XEXP (note, 1);
10347 switch (REG_NOTE_KIND (note))
10350 /* Any clobbers for i3 may still exist, and so we must process
10351 REG_UNUSED notes from that insn.
10353 Any clobbers from i2 or i1 can only exist if they were added by
10354 recog_for_combine. In that case, recog_for_combine created the
10355 necessary REG_UNUSED notes. Trying to keep any original
10356 REG_UNUSED notes from these insns can cause incorrect output
10357 if it is for the same register as the original i3 dest.
10358 In that case, we will notice that the register is set in i3,
10359 and then add a REG_UNUSED note for the destination of i3, which
10360 is wrong. However, it is possible to have REG_UNUSED notes from
10361 i2 or i1 for register which were both used and clobbered, so
10362 we keep notes from i2 or i1 if they will turn into REG_DEAD
10365 /* If this register is set or clobbered in I3, put the note there
10366 unless there is one already. */
10367 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
10369 if (from_insn != i3)
10372 if (! (GET_CODE (XEXP (note, 0)) == REG
10373 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
10374 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
10377 /* Otherwise, if this register is used by I3, then this register
10378 now dies here, so we must put a REG_DEAD note here unless there
10380 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
10381 && ! (GET_CODE (XEXP (note, 0)) == REG
10382 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
10383 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
10385 PUT_REG_NOTE_KIND (note, REG_DEAD);
10393 /* These notes say something about results of an insn. We can
10394 only support them if they used to be on I3 in which case they
10395 remain on I3. Otherwise they are ignored.
10397 If the note refers to an expression that is not a constant, we
10398 must also ignore the note since we cannot tell whether the
10399 equivalence is still true. It might be possible to do
10400 slightly better than this (we only have a problem if I2DEST
10401 or I1DEST is present in the expression), but it doesn't
10402 seem worth the trouble. */
10404 if (from_insn == i3
10405 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
10410 case REG_NO_CONFLICT:
10412 /* These notes say something about how a register is used. They must
10413 be present on any use of the register in I2 or I3. */
10414 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
10417 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
10427 /* It is too much trouble to try to see if this note is still
10428 correct in all situations. It is better to simply delete it. */
10432 /* If the insn previously containing this note still exists,
10433 put it back where it was. Otherwise move it to the previous
10434 insn. Adjust the corresponding REG_LIBCALL note. */
10435 if (GET_CODE (from_insn) != NOTE)
10439 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
10440 place = prev_real_insn (from_insn);
10442 XEXP (tem, 0) = place;
10447 /* This is handled similarly to REG_RETVAL. */
10448 if (GET_CODE (from_insn) != NOTE)
10452 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
10453 place = next_real_insn (from_insn);
10455 XEXP (tem, 0) = place;
10460 /* If the register is used as an input in I3, it dies there.
10461 Similarly for I2, if it is non-zero and adjacent to I3.
10463 If the register is not used as an input in either I3 or I2
10464 and it is not one of the registers we were supposed to eliminate,
10465 there are two possibilities. We might have a non-adjacent I2
10466 or we might have somehow eliminated an additional register
10467 from a computation. For example, we might have had A & B where
10468 we discover that B will always be zero. In this case we will
10469 eliminate the reference to A.
10471 In both cases, we must search to see if we can find a previous
10472 use of A and put the death note there. */
10475 && GET_CODE (from_insn) == CALL_INSN
10476 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
10478 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
10480 else if (i2 != 0 && next_nonnote_insn (i2) == i3
10481 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
10484 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
10487 /* If the register is used in both I2 and I3 and it dies in I3,
10488 we might have added another reference to it. If reg_n_refs
10489 was 2, bump it to 3. This has to be correct since the
10490 register must have been set somewhere. The reason this is
10491 done is because local-alloc.c treats 2 references as a
10494 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
10495 && reg_n_refs[REGNO (XEXP (note, 0))]== 2
10496 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
10497 reg_n_refs[REGNO (XEXP (note, 0))] = 3;
10500 for (tem = prev_nonnote_insn (i3);
10501 tem && (GET_CODE (tem) == INSN
10502 || GET_CODE (tem) == CALL_INSN);
10503 tem = prev_nonnote_insn (tem))
10505 /* If the register is being set at TEM, see if that is all
10506 TEM is doing. If so, delete TEM. Otherwise, make this
10507 into a REG_UNUSED note instead. */
10508 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
10510 rtx set = single_set (tem);
10512 /* Verify that it was the set, and not a clobber that
10513 modified the register. */
10515 if (set != 0 && ! side_effects_p (SET_SRC (set))
10516 && rtx_equal_p (XEXP (note, 0), SET_DEST (set)))
10518 /* Move the notes and links of TEM elsewhere.
10519 This might delete other dead insns recursively.
10520 First set the pattern to something that won't use
10523 PATTERN (tem) = pc_rtx;
10525 distribute_notes (REG_NOTES (tem), tem, tem,
10526 NULL_RTX, NULL_RTX, NULL_RTX);
10527 distribute_links (LOG_LINKS (tem));
10529 PUT_CODE (tem, NOTE);
10530 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
10531 NOTE_SOURCE_FILE (tem) = 0;
10535 PUT_REG_NOTE_KIND (note, REG_UNUSED);
10537 /* If there isn't already a REG_UNUSED note, put one
10539 if (! find_regno_note (tem, REG_UNUSED,
10540 REGNO (XEXP (note, 0))))
10545 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
10546 || (GET_CODE (tem) == CALL_INSN
10547 && find_reg_fusage (tem, USE, XEXP (note, 0))))
10554 /* If the register is set or already dead at PLACE, we needn't do
10555 anything with this note if it is still a REG_DEAD note.
10557 Note that we cannot use just `dead_or_set_p' here since we can
10558 convert an assignment to a register into a bit-field assignment.
10559 Therefore, we must also omit the note if the register is the
10560 target of a bitfield assignment. */
10562 if (place && REG_NOTE_KIND (note) == REG_DEAD)
10564 int regno = REGNO (XEXP (note, 0));
10566 if (dead_or_set_p (place, XEXP (note, 0))
10567 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
10569 /* Unless the register previously died in PLACE, clear
10570 reg_last_death. [I no longer understand why this is
10572 if (reg_last_death[regno] != place)
10573 reg_last_death[regno] = 0;
10577 reg_last_death[regno] = place;
10579 /* If this is a death note for a hard reg that is occupying
10580 multiple registers, ensure that we are still using all
10581 parts of the object. If we find a piece of the object
10582 that is unused, we must add a USE for that piece before
10583 PLACE and put the appropriate REG_DEAD note on it.
10585 An alternative would be to put a REG_UNUSED for the pieces
10586 on the insn that set the register, but that can't be done if
10587 it is not in the same block. It is simpler, though less
10588 efficient, to add the USE insns. */
10590 if (place && regno < FIRST_PSEUDO_REGISTER
10591 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
10594 = regno + HARD_REGNO_NREGS (regno,
10595 GET_MODE (XEXP (note, 0)));
10599 for (i = regno; i < endregno; i++)
10600 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
10601 && ! find_regno_fusage (place, USE, i))
10603 rtx piece = gen_rtx (REG, reg_raw_mode[i], i);
10606 /* See if we already placed a USE note for this
10607 register in front of PLACE. */
10609 GET_CODE (PREV_INSN (p)) == INSN
10610 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
10612 if (rtx_equal_p (piece,
10613 XEXP (PATTERN (PREV_INSN (p)), 0)))
10622 = emit_insn_before (gen_rtx (USE, VOIDmode,
10625 REG_NOTES (use_insn)
10626 = gen_rtx (EXPR_LIST, REG_DEAD, piece,
10627 REG_NOTES (use_insn));
10633 /* Check for the case where the register dying partially
10634 overlaps the register set by this insn. */
10636 for (i = regno; i < endregno; i++)
10637 if (dead_or_set_regno_p (place, i))
10645 /* Put only REG_DEAD notes for pieces that are
10646 still used and that are not already dead or set. */
10648 for (i = regno; i < endregno; i++)
10650 rtx piece = gen_rtx (REG, reg_raw_mode[i], i);
10652 if ((reg_referenced_p (piece, PATTERN (place))
10653 || (GET_CODE (place) == CALL_INSN
10654 && find_reg_fusage (place, USE, piece)))
10655 && ! dead_or_set_p (place, piece)
10656 && ! reg_bitfield_target_p (piece,
10658 REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
10660 REG_NOTES (place));
10670 /* Any other notes should not be present at this point in the
10677 XEXP (note, 1) = REG_NOTES (place);
10678 REG_NOTES (place) = note;
10680 else if ((REG_NOTE_KIND (note) == REG_DEAD
10681 || REG_NOTE_KIND (note) == REG_UNUSED)
10682 && GET_CODE (XEXP (note, 0)) == REG)
10683 reg_n_deaths[REGNO (XEXP (note, 0))]--;
10687 if ((REG_NOTE_KIND (note) == REG_DEAD
10688 || REG_NOTE_KIND (note) == REG_UNUSED)
10689 && GET_CODE (XEXP (note, 0)) == REG)
10690 reg_n_deaths[REGNO (XEXP (note, 0))]++;
10692 REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
10693 XEXP (note, 0), REG_NOTES (place2));
10698 /* Similarly to above, distribute the LOG_LINKS that used to be present on
10699 I3, I2, and I1 to new locations. This is also called in one case to
10700 add a link pointing at I3 when I3's destination is changed. */
10703 distribute_links (links)
10706 rtx link, next_link;
10708 for (link = links; link; link = next_link)
10714 next_link = XEXP (link, 1);
10716 /* If the insn that this link points to is a NOTE or isn't a single
10717 set, ignore it. In the latter case, it isn't clear what we
10718 can do other than ignore the link, since we can't tell which
10719 register it was for. Such links wouldn't be used by combine
10722 It is not possible for the destination of the target of the link to
10723 have been changed by combine. The only potential of this is if we
10724 replace I3, I2, and I1 by I3 and I2. But in that case the
10725 destination of I2 also remains unchanged. */
10727 if (GET_CODE (XEXP (link, 0)) == NOTE
10728 || (set = single_set (XEXP (link, 0))) == 0)
10731 reg = SET_DEST (set);
10732 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
10733 || GET_CODE (reg) == SIGN_EXTRACT
10734 || GET_CODE (reg) == STRICT_LOW_PART)
10735 reg = XEXP (reg, 0);
10737 /* A LOG_LINK is defined as being placed on the first insn that uses
10738 a register and points to the insn that sets the register. Start
10739 searching at the next insn after the target of the link and stop
10740 when we reach a set of the register or the end of the basic block.
10742 Note that this correctly handles the link that used to point from
10743 I3 to I2. Also note that not much searching is typically done here
10744 since most links don't point very far away. */
10746 for (insn = NEXT_INSN (XEXP (link, 0));
10747 (insn && (this_basic_block == n_basic_blocks - 1
10748 || basic_block_head[this_basic_block + 1] != insn));
10749 insn = NEXT_INSN (insn))
10750 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
10751 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
10753 if (reg_referenced_p (reg, PATTERN (insn)))
10757 else if (GET_CODE (insn) == CALL_INSN
10758 && find_reg_fusage (insn, USE, reg))
10764 /* If we found a place to put the link, place it there unless there
10765 is already a link to the same insn as LINK at that point. */
10771 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
10772 if (XEXP (link2, 0) == XEXP (link, 0))
10777 XEXP (link, 1) = LOG_LINKS (place);
10778 LOG_LINKS (place) = link;
10780 /* Set added_links_insn to the earliest insn we added a
10782 if (added_links_insn == 0
10783 || INSN_CUID (added_links_insn) > INSN_CUID (place))
10784 added_links_insn = place;
10791 dump_combine_stats (file)
10796 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
10797 combine_attempts, combine_merges, combine_extras, combine_successes);
10801 dump_combine_total_stats (file)
10806 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
10807 total_attempts, total_merges, total_extras, total_successes);