1 /* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 88, 92, 93, 94, 1995 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 /* This module is essentially the "combiner" phase of the U. of Arizona
22 Portable Optimizer, but redone to work on our list-structured
23 representation for RTL instead of their string representation.
25 The LOG_LINKS of each insn identify the most recent assignment
26 to each REG used in the insn. It is a list of previous insns,
27 each of which contains a SET for a REG that is used in this insn
28 and not used or set in between. LOG_LINKs never cross basic blocks.
29 They were set up by the preceding pass (lifetime analysis).
31 We try to combine each pair of insns joined by a logical link.
32 We also try to combine triples of insns A, B and C when
33 C has a link back to B and B has a link back to A.
35 LOG_LINKS does not have links for use of the CC0. They don't
36 need to, because the insn that sets the CC0 is always immediately
37 before the insn that tests it. So we always regard a branch
38 insn as having a logical link to the preceding insn. The same is true
39 for an insn explicitly using CC0.
41 We check (with use_crosses_set_p) to avoid combining in such a way
42 as to move a computation to a place where its value would be different.
44 Combination is done by mathematically substituting the previous
45 insn(s) values for the regs they set into the expressions in
46 the later insns that refer to these regs. If the result is a valid insn
47 for our target machine, according to the machine description,
48 we install it, delete the earlier insns, and update the data flow
49 information (LOG_LINKS and REG_NOTES) for what we did.
51 There are a few exceptions where the dataflow information created by
52 flow.c aren't completely updated:
54 - reg_live_length is not updated
55 - reg_n_refs is not adjusted in the rare case when a register is
56 no longer required in a computation
57 - there are extremely rare cases (see distribute_regnotes) when a
59 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
60 removed because there is no way to know which register it was
63 To simplify substitution, we combine only when the earlier insn(s)
64 consist of only a single assignment. To simplify updating afterward,
65 we never combine when a subroutine call appears in the middle.
67 Since we do not represent assignments to CC0 explicitly except when that
68 is all an insn does, there is no LOG_LINKS entry in an insn that uses
69 the condition code for the insn that set the condition code.
70 Fortunately, these two insns must be consecutive.
71 Therefore, every JUMP_INSN is taken to have an implicit logical link
72 to the preceding insn. This is not quite right, since non-jumps can
73 also use the condition code; but in practice such insns would not
83 /* Must precede rtl.h for FFS. */
89 #include "hard-reg-set.h"
91 #include "basic-block.h"
92 #include "insn-config.h"
93 #include "insn-flags.h"
94 #include "insn-codes.h"
95 #include "insn-attr.h"
99 /* It is not safe to use ordinary gen_lowpart in combine.
100 Use gen_lowpart_for_combine instead. See comments there. */
101 #define gen_lowpart dont_use_gen_lowpart_you_dummy
103 /* Number of attempts to combine instructions in this function. */
105 static int combine_attempts;
107 /* Number of attempts that got as far as substitution in this function. */
109 static int combine_merges;
111 /* Number of instructions combined with added SETs in this function. */
113 static int combine_extras;
115 /* Number of instructions combined in this function. */
117 static int combine_successes;
119 /* Totals over entire compilation. */
121 static int total_attempts, total_merges, total_extras, total_successes;
123 /* Define a defulat value for REVERSIBLE_CC_MODE.
124 We can never assume that a condition code mode is safe to reverse unless
125 the md tells us so. */
126 #ifndef REVERSIBLE_CC_MODE
127 #define REVERSIBLE_CC_MODE(MODE) 0
130 /* Vector mapping INSN_UIDs to cuids.
131 The cuids are like uids but increase monotonically always.
132 Combine always uses cuids so that it can compare them.
133 But actually renumbering the uids, which we used to do,
134 proves to be a bad idea because it makes it hard to compare
135 the dumps produced by earlier passes with those from later passes. */
137 static int *uid_cuid;
138 static int max_uid_cuid;
140 /* Get the cuid of an insn. */
142 #define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid_cuid \
144 : uid_cuid[INSN_UID (INSN)])
146 /* Maximum register number, which is the size of the tables below. */
148 static int combine_max_regno;
150 /* Record last point of death of (hard or pseudo) register n. */
152 static rtx *reg_last_death;
154 /* Record last point of modification of (hard or pseudo) register n. */
156 static rtx *reg_last_set;
158 /* Record the cuid of the last insn that invalidated memory
159 (anything that writes memory, and subroutine calls, but not pushes). */
161 static int mem_last_set;
163 /* Record the cuid of the last CALL_INSN
164 so we can tell whether a potential combination crosses any calls. */
166 static int last_call_cuid;
168 /* When `subst' is called, this is the insn that is being modified
169 (by combining in a previous insn). The PATTERN of this insn
170 is still the old pattern partially modified and it should not be
171 looked at, but this may be used to examine the successors of the insn
172 to judge whether a simplification is valid. */
174 static rtx subst_insn;
176 /* This is the lowest CUID that `subst' is currently dealing with.
177 get_last_value will not return a value if the register was set at or
178 after this CUID. If not for this mechanism, we could get confused if
179 I2 or I1 in try_combine were an insn that used the old value of a register
180 to obtain a new value. In that case, we might erroneously get the
181 new value of the register when we wanted the old one. */
183 static int subst_low_cuid;
185 /* This contains any hard registers that are used in newpat; reg_dead_at_p
186 must consider all these registers to be always live. */
188 static HARD_REG_SET newpat_used_regs;
190 /* This is an insn to which a LOG_LINKS entry has been added. If this
191 insn is the earlier than I2 or I3, combine should rescan starting at
194 static rtx added_links_insn;
196 /* This is the value of undobuf.num_undo when we started processing this
197 substitution. This will prevent gen_rtx_combine from re-used a piece
198 from the previous expression. Doing so can produce circular rtl
201 static int previous_num_undos;
203 /* Basic block number of the block in which we are performing combines. */
204 static int this_basic_block;
206 /* The next group of arrays allows the recording of the last value assigned
207 to (hard or pseudo) register n. We use this information to see if a
208 operation being processed is redundant given a prior operation performed
209 on the register. For example, an `and' with a constant is redundant if
210 all the zero bits are already known to be turned off.
212 We use an approach similar to that used by cse, but change it in the
215 (1) We do not want to reinitialize at each label.
216 (2) It is useful, but not critical, to know the actual value assigned
217 to a register. Often just its form is helpful.
219 Therefore, we maintain the following arrays:
221 reg_last_set_value the last value assigned
222 reg_last_set_label records the value of label_tick when the
223 register was assigned
224 reg_last_set_table_tick records the value of label_tick when a
225 value using the register is assigned
226 reg_last_set_invalid set to non-zero when it is not valid
227 to use the value of this register in some
230 To understand the usage of these tables, it is important to understand
231 the distinction between the value in reg_last_set_value being valid
232 and the register being validly contained in some other expression in the
235 Entry I in reg_last_set_value is valid if it is non-zero, and either
236 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
238 Register I may validly appear in any expression returned for the value
239 of another register if reg_n_sets[i] is 1. It may also appear in the
240 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
241 reg_last_set_invalid[j] is zero.
243 If an expression is found in the table containing a register which may
244 not validly appear in an expression, the register is replaced by
245 something that won't match, (clobber (const_int 0)).
247 reg_last_set_invalid[i] is set non-zero when register I is being assigned
248 to and reg_last_set_table_tick[i] == label_tick. */
250 /* Record last value assigned to (hard or pseudo) register n. */
252 static rtx *reg_last_set_value;
254 /* Record the value of label_tick when the value for register n is placed in
255 reg_last_set_value[n]. */
257 static int *reg_last_set_label;
259 /* Record the value of label_tick when an expression involving register n
260 is placed in reg_last_set_value. */
262 static int *reg_last_set_table_tick;
264 /* Set non-zero if references to register n in expressions should not be
267 static char *reg_last_set_invalid;
269 /* Incremented for each label. */
271 static int label_tick;
273 /* Some registers that are set more than once and used in more than one
274 basic block are nevertheless always set in similar ways. For example,
275 a QImode register may be loaded from memory in two places on a machine
276 where byte loads zero extend.
278 We record in the following array what we know about the nonzero
279 bits of a register, specifically which bits are known to be zero.
281 If an entry is zero, it means that we don't know anything special. */
283 static unsigned HOST_WIDE_INT *reg_nonzero_bits;
285 /* Mode used to compute significance in reg_nonzero_bits. It is the largest
286 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
288 static enum machine_mode nonzero_bits_mode;
290 /* Nonzero if we know that a register has some leading bits that are always
291 equal to the sign bit. */
293 static char *reg_sign_bit_copies;
295 /* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
296 It is zero while computing them and after combine has completed. This
297 former test prevents propagating values based on previously set values,
298 which can be incorrect if a variable is modified in a loop. */
300 static int nonzero_sign_valid;
302 /* These arrays are maintained in parallel with reg_last_set_value
303 and are used to store the mode in which the register was last set,
304 the bits that were known to be zero when it was last set, and the
305 number of sign bits copies it was known to have when it was last set. */
307 static enum machine_mode *reg_last_set_mode;
308 static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
309 static char *reg_last_set_sign_bit_copies;
311 /* Record one modification to rtl structure
312 to be undone by storing old_contents into *where.
313 is_int is 1 if the contents are an int. */
318 union {rtx r; int i;} old_contents;
319 union {rtx *r; int *i;} where;
322 /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
323 num_undo says how many are currently recorded.
325 storage is nonzero if we must undo the allocation of new storage.
326 The value of storage is what to pass to obfree.
328 other_insn is nonzero if we have modified some other insn in the process
329 of working on subst_insn. It must be verified too. */
337 struct undo undo[MAX_UNDO];
341 static struct undobuf undobuf;
343 /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
344 insn. The substitution can be undone by undo_all. If INTO is already
345 set to NEWVAL, do not record this change. Because computing NEWVAL might
346 also call SUBST, we have to compute it before we put anything into
349 #define SUBST(INTO, NEWVAL) \
350 do { rtx _new = (NEWVAL); \
351 if (undobuf.num_undo < MAX_UNDO) \
353 undobuf.undo[undobuf.num_undo].is_int = 0; \
354 undobuf.undo[undobuf.num_undo].where.r = &INTO; \
355 undobuf.undo[undobuf.num_undo].old_contents.r = INTO; \
357 if (undobuf.undo[undobuf.num_undo].old_contents.r != INTO) \
358 undobuf.num_undo++; \
362 /* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT
364 Note that substitution for the value of a CONST_INT is not safe. */
366 #define SUBST_INT(INTO, NEWVAL) \
367 do { if (undobuf.num_undo < MAX_UNDO) \
369 undobuf.undo[undobuf.num_undo].is_int = 1; \
370 undobuf.undo[undobuf.num_undo].where.i = (int *) &INTO; \
371 undobuf.undo[undobuf.num_undo].old_contents.i = INTO; \
373 if (undobuf.undo[undobuf.num_undo].old_contents.i != INTO) \
374 undobuf.num_undo++; \
378 /* Number of times the pseudo being substituted for
379 was found and replaced. */
381 static int n_occurrences;
383 static void init_reg_last_arrays PROTO(());
384 static void setup_incoming_promotions PROTO(());
385 static void set_nonzero_bits_and_sign_copies PROTO((rtx, rtx));
386 static int can_combine_p PROTO((rtx, rtx, rtx, rtx, rtx *, rtx *));
387 static int combinable_i3pat PROTO((rtx, rtx *, rtx, rtx, int, rtx *));
388 static rtx try_combine PROTO((rtx, rtx, rtx));
389 static void undo_all PROTO((void));
390 static rtx *find_split_point PROTO((rtx *, rtx));
391 static rtx subst PROTO((rtx, rtx, rtx, int, int));
392 static rtx simplify_rtx PROTO((rtx, enum machine_mode, int, int));
393 static rtx simplify_if_then_else PROTO((rtx));
394 static rtx simplify_set PROTO((rtx));
395 static rtx simplify_logical PROTO((rtx, int));
396 static rtx expand_compound_operation PROTO((rtx));
397 static rtx expand_field_assignment PROTO((rtx));
398 static rtx make_extraction PROTO((enum machine_mode, rtx, int, rtx, int,
400 static rtx extract_left_shift PROTO((rtx, int));
401 static rtx make_compound_operation PROTO((rtx, enum rtx_code));
402 static int get_pos_from_mask PROTO((unsigned HOST_WIDE_INT, int *));
403 static rtx force_to_mode PROTO((rtx, enum machine_mode,
404 unsigned HOST_WIDE_INT, rtx, int));
405 static rtx if_then_else_cond PROTO((rtx, rtx *, rtx *));
406 static rtx known_cond PROTO((rtx, enum rtx_code, rtx, rtx));
407 static rtx make_field_assignment PROTO((rtx));
408 static rtx apply_distributive_law PROTO((rtx));
409 static rtx simplify_and_const_int PROTO((rtx, enum machine_mode, rtx,
410 unsigned HOST_WIDE_INT));
411 static unsigned HOST_WIDE_INT nonzero_bits PROTO((rtx, enum machine_mode));
412 static int num_sign_bit_copies PROTO((rtx, enum machine_mode));
413 static int merge_outer_ops PROTO((enum rtx_code *, HOST_WIDE_INT *,
414 enum rtx_code, HOST_WIDE_INT,
415 enum machine_mode, int *));
416 static rtx simplify_shift_const PROTO((rtx, enum rtx_code, enum machine_mode,
418 static int recog_for_combine PROTO((rtx *, rtx, rtx *));
419 static rtx gen_lowpart_for_combine PROTO((enum machine_mode, rtx));
420 static rtx gen_rtx_combine PVPROTO((enum rtx_code code, enum machine_mode mode,
422 static rtx gen_binary PROTO((enum rtx_code, enum machine_mode,
424 static rtx gen_unary PROTO((enum rtx_code, enum machine_mode,
425 enum machine_mode, rtx));
426 static enum rtx_code simplify_comparison PROTO((enum rtx_code, rtx *, rtx *));
427 static int reversible_comparison_p PROTO((rtx));
428 static void update_table_tick PROTO((rtx));
429 static void record_value_for_reg PROTO((rtx, rtx, rtx));
430 static void record_dead_and_set_regs_1 PROTO((rtx, rtx));
431 static void record_dead_and_set_regs PROTO((rtx));
432 static int get_last_value_validate PROTO((rtx *, int, int));
433 static rtx get_last_value PROTO((rtx));
434 static int use_crosses_set_p PROTO((rtx, int));
435 static void reg_dead_at_p_1 PROTO((rtx, rtx));
436 static int reg_dead_at_p PROTO((rtx, rtx));
437 static void move_deaths PROTO((rtx, int, rtx, rtx *));
438 static int reg_bitfield_target_p PROTO((rtx, rtx));
439 static void distribute_notes PROTO((rtx, rtx, rtx, rtx, rtx, rtx));
440 static void distribute_links PROTO((rtx));
441 static void mark_used_regs_combine PROTO((rtx));
443 /* Main entry point for combiner. F is the first insn of the function.
444 NREGS is the first unused pseudo-reg number. */
447 combine_instructions (f, nregs)
451 register rtx insn, next, prev;
453 register rtx links, nextlinks;
455 combine_attempts = 0;
458 combine_successes = 0;
459 undobuf.num_undo = previous_num_undos = 0;
461 combine_max_regno = nregs;
464 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
465 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
467 bzero ((char *) reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
468 bzero (reg_sign_bit_copies, nregs * sizeof (char));
470 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
471 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
472 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
473 reg_last_set_table_tick = (int *) alloca (nregs * sizeof (int));
474 reg_last_set_label = (int *) alloca (nregs * sizeof (int));
475 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
477 = (enum machine_mode *) alloca (nregs * sizeof (enum machine_mode));
478 reg_last_set_nonzero_bits
479 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
480 reg_last_set_sign_bit_copies
481 = (char *) alloca (nregs * sizeof (char));
483 init_reg_last_arrays ();
485 init_recog_no_volatile ();
487 /* Compute maximum uid value so uid_cuid can be allocated. */
489 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
490 if (INSN_UID (insn) > i)
493 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
496 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
498 /* Don't use reg_nonzero_bits when computing it. This can cause problems
499 when, for example, we have j <<= 1 in a loop. */
501 nonzero_sign_valid = 0;
503 /* Compute the mapping from uids to cuids.
504 Cuids are numbers assigned to insns, like uids,
505 except that cuids increase monotonically through the code.
507 Scan all SETs and see if we can deduce anything about what
508 bits are known to be zero for some registers and how many copies
509 of the sign bit are known to exist for those registers.
511 Also set any known values so that we can use it while searching
512 for what bits are known to be set. */
516 setup_incoming_promotions ();
518 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
520 uid_cuid[INSN_UID (insn)] = ++i;
524 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
526 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
527 record_dead_and_set_regs (insn);
530 if (GET_CODE (insn) == CODE_LABEL)
534 nonzero_sign_valid = 1;
536 /* Now scan all the insns in forward order. */
538 this_basic_block = -1;
542 init_reg_last_arrays ();
543 setup_incoming_promotions ();
545 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
549 /* If INSN starts a new basic block, update our basic block number. */
550 if (this_basic_block + 1 < n_basic_blocks
551 && basic_block_head[this_basic_block + 1] == insn)
554 if (GET_CODE (insn) == CODE_LABEL)
557 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
559 /* Try this insn with each insn it links back to. */
561 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
562 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
565 /* Try each sequence of three linked insns ending with this one. */
567 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
568 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
569 nextlinks = XEXP (nextlinks, 1))
570 if ((next = try_combine (insn, XEXP (links, 0),
571 XEXP (nextlinks, 0))) != 0)
575 /* Try to combine a jump insn that uses CC0
576 with a preceding insn that sets CC0, and maybe with its
577 logical predecessor as well.
578 This is how we make decrement-and-branch insns.
579 We need this special code because data flow connections
580 via CC0 do not get entered in LOG_LINKS. */
582 if (GET_CODE (insn) == JUMP_INSN
583 && (prev = prev_nonnote_insn (insn)) != 0
584 && GET_CODE (prev) == INSN
585 && sets_cc0_p (PATTERN (prev)))
587 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
590 for (nextlinks = LOG_LINKS (prev); nextlinks;
591 nextlinks = XEXP (nextlinks, 1))
592 if ((next = try_combine (insn, prev,
593 XEXP (nextlinks, 0))) != 0)
597 /* Do the same for an insn that explicitly references CC0. */
598 if (GET_CODE (insn) == INSN
599 && (prev = prev_nonnote_insn (insn)) != 0
600 && GET_CODE (prev) == INSN
601 && sets_cc0_p (PATTERN (prev))
602 && GET_CODE (PATTERN (insn)) == SET
603 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
605 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
608 for (nextlinks = LOG_LINKS (prev); nextlinks;
609 nextlinks = XEXP (nextlinks, 1))
610 if ((next = try_combine (insn, prev,
611 XEXP (nextlinks, 0))) != 0)
615 /* Finally, see if any of the insns that this insn links to
616 explicitly references CC0. If so, try this insn, that insn,
617 and its predecessor if it sets CC0. */
618 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
619 if (GET_CODE (XEXP (links, 0)) == INSN
620 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
621 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
622 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
623 && GET_CODE (prev) == INSN
624 && sets_cc0_p (PATTERN (prev))
625 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
629 /* Try combining an insn with two different insns whose results it
631 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
632 for (nextlinks = XEXP (links, 1); nextlinks;
633 nextlinks = XEXP (nextlinks, 1))
634 if ((next = try_combine (insn, XEXP (links, 0),
635 XEXP (nextlinks, 0))) != 0)
638 if (GET_CODE (insn) != NOTE)
639 record_dead_and_set_regs (insn);
646 total_attempts += combine_attempts;
647 total_merges += combine_merges;
648 total_extras += combine_extras;
649 total_successes += combine_successes;
651 nonzero_sign_valid = 0;
654 /* Wipe the reg_last_xxx arrays in preparation for another pass. */
657 init_reg_last_arrays ()
659 int nregs = combine_max_regno;
661 bzero ((char *) reg_last_death, nregs * sizeof (rtx));
662 bzero ((char *) reg_last_set, nregs * sizeof (rtx));
663 bzero ((char *) reg_last_set_value, nregs * sizeof (rtx));
664 bzero ((char *) reg_last_set_table_tick, nregs * sizeof (int));
665 bzero ((char *) reg_last_set_label, nregs * sizeof (int));
666 bzero (reg_last_set_invalid, nregs * sizeof (char));
667 bzero ((char *) reg_last_set_mode, nregs * sizeof (enum machine_mode));
668 bzero ((char *) reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
669 bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char));
672 /* Set up any promoted values for incoming argument registers. */
675 setup_incoming_promotions ()
677 #ifdef PROMOTE_FUNCTION_ARGS
680 enum machine_mode mode;
682 rtx first = get_insns ();
684 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
685 if (FUNCTION_ARG_REGNO_P (regno)
686 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
687 record_value_for_reg (reg, first,
688 gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
690 gen_rtx (CLOBBER, mode, const0_rtx)));
694 /* Called via note_stores. If X is a pseudo that is used in more than
695 one basic block, is narrower that HOST_BITS_PER_WIDE_INT, and is being
696 set, record what bits are known zero. If we are clobbering X,
697 ignore this "set" because the clobbered value won't be used.
699 If we are setting only a portion of X and we can't figure out what
700 portion, assume all bits will be used since we don't know what will
703 Similarly, set how many bits of X are known to be copies of the sign bit
704 at all locations in the function. This is the smallest number implied
708 set_nonzero_bits_and_sign_copies (x, set)
714 if (GET_CODE (x) == REG
715 && REGNO (x) >= FIRST_PSEUDO_REGISTER
716 && reg_n_sets[REGNO (x)] > 1
717 && reg_basic_block[REGNO (x)] < 0
718 /* If this register is undefined at the start of the file, we can't
719 say what its contents were. */
720 && ! (basic_block_live_at_start[0][REGNO (x) / REGSET_ELT_BITS]
721 & ((REGSET_ELT_TYPE) 1 << (REGNO (x) % REGSET_ELT_BITS)))
722 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
724 if (GET_CODE (set) == CLOBBER)
726 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
727 reg_sign_bit_copies[REGNO (x)] = 0;
731 /* If this is a complex assignment, see if we can convert it into a
732 simple assignment. */
733 set = expand_field_assignment (set);
735 /* If this is a simple assignment, or we have a paradoxical SUBREG,
736 set what we know about X. */
738 if (SET_DEST (set) == x
739 || (GET_CODE (SET_DEST (set)) == SUBREG
740 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
741 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
742 && SUBREG_REG (SET_DEST (set)) == x))
744 rtx src = SET_SRC (set);
746 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
747 /* If X is narrower than a word and SRC is a non-negative
748 constant that would appear negative in the mode of X,
749 sign-extend it for use in reg_nonzero_bits because some
750 machines (maybe most) will actually do the sign-extension
751 and this is the conservative approach.
753 ??? For 2.5, try to tighten up the MD files in this regard
754 instead of this kludge. */
756 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
757 && GET_CODE (src) == CONST_INT
759 && 0 != (INTVAL (src)
761 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
762 src = GEN_INT (INTVAL (src)
763 | ((HOST_WIDE_INT) (-1)
764 << GET_MODE_BITSIZE (GET_MODE (x))));
767 reg_nonzero_bits[REGNO (x)]
768 |= nonzero_bits (src, nonzero_bits_mode);
769 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
770 if (reg_sign_bit_copies[REGNO (x)] == 0
771 || reg_sign_bit_copies[REGNO (x)] > num)
772 reg_sign_bit_copies[REGNO (x)] = num;
776 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
777 reg_sign_bit_copies[REGNO (x)] = 0;
782 /* See if INSN can be combined into I3. PRED and SUCC are optionally
783 insns that were previously combined into I3 or that will be combined
784 into the merger of INSN and I3.
786 Return 0 if the combination is not allowed for any reason.
788 If the combination is allowed, *PDEST will be set to the single
789 destination of INSN and *PSRC to the single source, and this function
793 can_combine_p (insn, i3, pred, succ, pdest, psrc)
800 rtx set = 0, src, dest;
802 int all_adjacent = (succ ? (next_active_insn (insn) == succ
803 && next_active_insn (succ) == i3)
804 : next_active_insn (insn) == i3);
806 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
807 or a PARALLEL consisting of such a SET and CLOBBERs.
809 If INSN has CLOBBER parallel parts, ignore them for our processing.
810 By definition, these happen during the execution of the insn. When it
811 is merged with another insn, all bets are off. If they are, in fact,
812 needed and aren't also supplied in I3, they may be added by
813 recog_for_combine. Otherwise, it won't match.
815 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
818 Get the source and destination of INSN. If more than one, can't
821 if (GET_CODE (PATTERN (insn)) == SET)
822 set = PATTERN (insn);
823 else if (GET_CODE (PATTERN (insn)) == PARALLEL
824 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
826 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
828 rtx elt = XVECEXP (PATTERN (insn), 0, i);
830 switch (GET_CODE (elt))
832 /* We can ignore CLOBBERs. */
837 /* Ignore SETs whose result isn't used but not those that
838 have side-effects. */
839 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
840 && ! side_effects_p (elt))
843 /* If we have already found a SET, this is a second one and
844 so we cannot combine with this insn. */
852 /* Anything else means we can't combine. */
858 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
859 so don't do anything with it. */
860 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
869 set = expand_field_assignment (set);
870 src = SET_SRC (set), dest = SET_DEST (set);
872 /* Don't eliminate a store in the stack pointer. */
873 if (dest == stack_pointer_rtx
874 /* If we couldn't eliminate a field assignment, we can't combine. */
875 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
876 /* Don't combine with an insn that sets a register to itself if it has
877 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
878 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
879 /* Can't merge a function call. */
880 || GET_CODE (src) == CALL
881 /* Don't eliminate a function call argument. */
882 || (GET_CODE (i3) == CALL_INSN
883 && (find_reg_fusage (i3, USE, dest)
884 || (GET_CODE (dest) == REG
885 && REGNO (dest) < FIRST_PSEUDO_REGISTER
886 && global_regs[REGNO (dest)])))
887 /* Don't substitute into an incremented register. */
888 || FIND_REG_INC_NOTE (i3, dest)
889 || (succ && FIND_REG_INC_NOTE (succ, dest))
890 /* Don't combine the end of a libcall into anything. */
891 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
892 /* Make sure that DEST is not used after SUCC but before I3. */
893 || (succ && ! all_adjacent
894 && reg_used_between_p (dest, succ, i3))
895 /* Make sure that the value that is to be substituted for the register
896 does not use any registers whose values alter in between. However,
897 If the insns are adjacent, a use can't cross a set even though we
898 think it might (this can happen for a sequence of insns each setting
899 the same destination; reg_last_set of that register might point to
900 a NOTE). If INSN has a REG_EQUIV note, the register is always
901 equivalent to the memory so the substitution is valid even if there
902 are intervening stores. Also, don't move a volatile asm or
903 UNSPEC_VOLATILE across any other insns. */
905 && (((GET_CODE (src) != MEM
906 || ! find_reg_note (insn, REG_EQUIV, src))
907 && use_crosses_set_p (src, INSN_CUID (insn)))
908 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
909 || GET_CODE (src) == UNSPEC_VOLATILE))
910 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
911 better register allocation by not doing the combine. */
912 || find_reg_note (i3, REG_NO_CONFLICT, dest)
913 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
914 /* Don't combine across a CALL_INSN, because that would possibly
915 change whether the life span of some REGs crosses calls or not,
916 and it is a pain to update that information.
917 Exception: if source is a constant, moving it later can't hurt.
918 Accept that special case, because it helps -fforce-addr a lot. */
919 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
922 /* DEST must either be a REG or CC0. */
923 if (GET_CODE (dest) == REG)
925 /* If register alignment is being enforced for multi-word items in all
926 cases except for parameters, it is possible to have a register copy
927 insn referencing a hard register that is not allowed to contain the
928 mode being copied and which would not be valid as an operand of most
929 insns. Eliminate this problem by not combining with such an insn.
931 Also, on some machines we don't want to extend the life of a hard
934 if (GET_CODE (src) == REG
935 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
936 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
937 /* Don't extend the life of a hard register unless it is
938 user variable (if we have few registers) or it can't
939 fit into the desired register (meaning something special
941 || (REGNO (src) < FIRST_PSEUDO_REGISTER
942 && (! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src))
943 #ifdef SMALL_REGISTER_CLASSES
944 || ! REG_USERVAR_P (src)
949 else if (GET_CODE (dest) != CC0)
952 /* Don't substitute for a register intended as a clobberable operand.
953 Similarly, don't substitute an expression containing a register that
954 will be clobbered in I3. */
955 if (GET_CODE (PATTERN (i3)) == PARALLEL)
956 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
957 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
958 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
960 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
963 /* If INSN contains anything volatile, or is an `asm' (whether volatile
964 or not), reject, unless nothing volatile comes between it and I3,
965 with the exception of SUCC. */
967 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
968 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
969 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
970 && p != succ && volatile_refs_p (PATTERN (p)))
973 /* If there are any volatile insns between INSN and I3, reject, because
974 they might affect machine state. */
976 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
977 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
978 && p != succ && volatile_insn_p (PATTERN (p)))
981 /* If INSN or I2 contains an autoincrement or autodecrement,
982 make sure that register is not used between there and I3,
983 and not already used in I3 either.
984 Also insist that I3 not be a jump; if it were one
985 and the incremented register were spilled, we would lose. */
988 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
989 if (REG_NOTE_KIND (link) == REG_INC
990 && (GET_CODE (i3) == JUMP_INSN
991 || reg_used_between_p (XEXP (link, 0), insn, i3)
992 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
997 /* Don't combine an insn that follows a CC0-setting insn.
998 An insn that uses CC0 must not be separated from the one that sets it.
999 We do, however, allow I2 to follow a CC0-setting insn if that insn
1000 is passed as I1; in that case it will be deleted also.
1001 We also allow combining in this case if all the insns are adjacent
1002 because that would leave the two CC0 insns adjacent as well.
1003 It would be more logical to test whether CC0 occurs inside I1 or I2,
1004 but that would be much slower, and this ought to be equivalent. */
1006 p = prev_nonnote_insn (insn);
1007 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1012 /* If we get here, we have passed all the tests and the combination is
1021 /* LOC is the location within I3 that contains its pattern or the component
1022 of a PARALLEL of the pattern. We validate that it is valid for combining.
1024 One problem is if I3 modifies its output, as opposed to replacing it
1025 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1026 so would produce an insn that is not equivalent to the original insns.
1030 (set (reg:DI 101) (reg:DI 100))
1031 (set (subreg:SI (reg:DI 101) 0) <foo>)
1033 This is NOT equivalent to:
1035 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1036 (set (reg:DI 101) (reg:DI 100))])
1038 Not only does this modify 100 (in which case it might still be valid
1039 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1041 We can also run into a problem if I2 sets a register that I1
1042 uses and I1 gets directly substituted into I3 (not via I2). In that
1043 case, we would be getting the wrong value of I2DEST into I3, so we
1044 must reject the combination. This case occurs when I2 and I1 both
1045 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1046 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1047 of a SET must prevent combination from occurring.
1049 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
1050 if the destination of a SET is a hard register that isn't a user
1053 Before doing the above check, we first try to expand a field assignment
1054 into a set of logical operations.
1056 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1057 we place a register that is both set and used within I3. If more than one
1058 such register is detected, we fail.
1060 Return 1 if the combination is valid, zero otherwise. */
1063 combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1069 rtx *pi3dest_killed;
1073 if (GET_CODE (x) == SET)
1075 rtx set = expand_field_assignment (x);
1076 rtx dest = SET_DEST (set);
1077 rtx src = SET_SRC (set);
1078 rtx inner_dest = dest, inner_src = src;
1082 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1083 || GET_CODE (inner_dest) == SUBREG
1084 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1085 inner_dest = XEXP (inner_dest, 0);
1087 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1090 while (GET_CODE (inner_src) == STRICT_LOW_PART
1091 || GET_CODE (inner_src) == SUBREG
1092 || GET_CODE (inner_src) == ZERO_EXTRACT)
1093 inner_src = XEXP (inner_src, 0);
1095 /* If it is better that two different modes keep two different pseudos,
1096 avoid combining them. This avoids producing the following pattern
1098 (set (subreg:SI (reg/v:QI 21) 0)
1099 (lshiftrt:SI (reg/v:SI 20)
1101 If that were made, reload could not handle the pair of
1102 reg 20/21, since it would try to get any GENERAL_REGS
1103 but some of them don't handle QImode. */
1105 if (rtx_equal_p (inner_src, i2dest)
1106 && GET_CODE (inner_dest) == REG
1107 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1111 /* Check for the case where I3 modifies its output, as
1113 if ((inner_dest != dest
1114 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1115 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1116 /* This is the same test done in can_combine_p except that we
1117 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
1119 || (GET_CODE (inner_dest) == REG
1120 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1121 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1122 GET_MODE (inner_dest))
1123 #ifdef SMALL_REGISTER_CLASSES
1124 || (GET_CODE (src) != CALL && ! REG_USERVAR_P (inner_dest))
1127 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1130 /* If DEST is used in I3, it is being killed in this insn,
1131 so record that for later.
1132 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1133 STACK_POINTER_REGNUM, since these are always considered to be
1134 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
1135 if (pi3dest_killed && GET_CODE (dest) == REG
1136 && reg_referenced_p (dest, PATTERN (i3))
1137 && REGNO (dest) != FRAME_POINTER_REGNUM
1138 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1139 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1141 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1142 && (REGNO (dest) != ARG_POINTER_REGNUM
1143 || ! fixed_regs [REGNO (dest)])
1145 && REGNO (dest) != STACK_POINTER_REGNUM)
1147 if (*pi3dest_killed)
1150 *pi3dest_killed = dest;
1154 else if (GET_CODE (x) == PARALLEL)
1158 for (i = 0; i < XVECLEN (x, 0); i++)
1159 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1160 i1_not_in_src, pi3dest_killed))
1167 /* Try to combine the insns I1 and I2 into I3.
1168 Here I1 and I2 appear earlier than I3.
1169 I1 can be zero; then we combine just I2 into I3.
1171 It we are combining three insns and the resulting insn is not recognized,
1172 try splitting it into two insns. If that happens, I2 and I3 are retained
1173 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1176 Return 0 if the combination does not work. Then nothing is changed.
1177 If we did the combination, return the insn at which combine should
1181 try_combine (i3, i2, i1)
1182 register rtx i3, i2, i1;
1184 /* New patterns for I3 and I3, respectively. */
1185 rtx newpat, newi2pat = 0;
1186 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1187 int added_sets_1, added_sets_2;
1188 /* Total number of SETs to put into I3. */
1190 /* Nonzero is I2's body now appears in I3. */
1192 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1193 int insn_code_number, i2_code_number, other_code_number;
1194 /* Contains I3 if the destination of I3 is used in its source, which means
1195 that the old life of I3 is being killed. If that usage is placed into
1196 I2 and not in I3, a REG_DEAD note must be made. */
1197 rtx i3dest_killed = 0;
1198 /* SET_DEST and SET_SRC of I2 and I1. */
1199 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1200 /* PATTERN (I2), or a copy of it in certain cases. */
1202 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1203 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1204 int i1_feeds_i3 = 0;
1205 /* Notes that must be added to REG_NOTES in I3 and I2. */
1206 rtx new_i3_notes, new_i2_notes;
1207 /* Notes that we substituted I3 into I2 instead of the normal case. */
1208 int i3_subst_into_i2 = 0;
1209 /* Notes that I1, I2 or I3 is a MULT operation. */
1217 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1218 This can occur when flow deletes an insn that it has merged into an
1219 auto-increment address. We also can't do anything if I3 has a
1220 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1223 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1224 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1225 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
1226 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
1231 undobuf.num_undo = previous_num_undos = 0;
1232 undobuf.other_insn = 0;
1234 /* Save the current high-water-mark so we can free storage if we didn't
1235 accept this combination. */
1236 undobuf.storage = (char *) oballoc (0);
1238 /* Reset the hard register usage information. */
1239 CLEAR_HARD_REG_SET (newpat_used_regs);
1241 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1242 code below, set I1 to be the earlier of the two insns. */
1243 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1244 temp = i1, i1 = i2, i2 = temp;
1246 added_links_insn = 0;
1248 /* First check for one important special-case that the code below will
1249 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1250 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1251 we may be able to replace that destination with the destination of I3.
1252 This occurs in the common code where we compute both a quotient and
1253 remainder into a structure, in which case we want to do the computation
1254 directly into the structure to avoid register-register copies.
1256 We make very conservative checks below and only try to handle the
1257 most common cases of this. For example, we only handle the case
1258 where I2 and I3 are adjacent to avoid making difficult register
1261 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1262 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1263 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1264 #ifdef SMALL_REGISTER_CLASSES
1265 && (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1266 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1267 || REG_USERVAR_P (SET_DEST (PATTERN (i3))))
1269 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1270 && GET_CODE (PATTERN (i2)) == PARALLEL
1271 && ! side_effects_p (SET_DEST (PATTERN (i3)))
1272 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1273 below would need to check what is inside (and reg_overlap_mentioned_p
1274 doesn't support those codes anyway). Don't allow those destinations;
1275 the resulting insn isn't likely to be recognized anyway. */
1276 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1277 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1278 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1279 SET_DEST (PATTERN (i3)))
1280 && next_real_insn (i2) == i3)
1282 rtx p2 = PATTERN (i2);
1284 /* Make sure that the destination of I3,
1285 which we are going to substitute into one output of I2,
1286 is not used within another output of I2. We must avoid making this:
1287 (parallel [(set (mem (reg 69)) ...)
1288 (set (reg 69) ...)])
1289 which is not well-defined as to order of actions.
1290 (Besides, reload can't handle output reloads for this.)
1292 The problem can also happen if the dest of I3 is a memory ref,
1293 if another dest in I2 is an indirect memory ref. */
1294 for (i = 0; i < XVECLEN (p2, 0); i++)
1295 if (GET_CODE (XVECEXP (p2, 0, i)) == SET
1296 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1297 SET_DEST (XVECEXP (p2, 0, i))))
1300 if (i == XVECLEN (p2, 0))
1301 for (i = 0; i < XVECLEN (p2, 0); i++)
1302 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1307 subst_low_cuid = INSN_CUID (i2);
1309 added_sets_2 = added_sets_1 = 0;
1310 i2dest = SET_SRC (PATTERN (i3));
1312 /* Replace the dest in I2 with our dest and make the resulting
1313 insn the new pattern for I3. Then skip to where we
1314 validate the pattern. Everything was set up above. */
1315 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1316 SET_DEST (PATTERN (i3)));
1319 i3_subst_into_i2 = 1;
1320 goto validate_replacement;
1325 /* If we have no I1 and I2 looks like:
1326 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1328 make up a dummy I1 that is
1331 (set (reg:CC X) (compare:CC Y (const_int 0)))
1333 (We can ignore any trailing CLOBBERs.)
1335 This undoes a previous combination and allows us to match a branch-and-
1338 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1339 && XVECLEN (PATTERN (i2), 0) >= 2
1340 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1341 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1343 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1344 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1345 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1346 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1347 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1348 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1350 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1351 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1356 /* We make I1 with the same INSN_UID as I2. This gives it
1357 the same INSN_CUID for value tracking. Our fake I1 will
1358 never appear in the insn stream so giving it the same INSN_UID
1359 as I2 will not cause a problem. */
1361 i1 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
1362 XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
1364 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1365 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1366 SET_DEST (PATTERN (i1)));
1371 /* Verify that I2 and I1 are valid for combining. */
1372 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1373 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1379 /* Record whether I2DEST is used in I2SRC and similarly for the other
1380 cases. Knowing this will help in register status updating below. */
1381 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1382 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1383 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1385 /* See if I1 directly feeds into I3. It does if I1DEST is not used
1387 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1389 /* Ensure that I3's pattern can be the destination of combines. */
1390 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1391 i1 && i2dest_in_i1src && i1_feeds_i3,
1398 /* See if any of the insns is a MULT operation. Unless one is, we will
1399 reject a combination that is, since it must be slower. Be conservative
1401 if (GET_CODE (i2src) == MULT
1402 || (i1 != 0 && GET_CODE (i1src) == MULT)
1403 || (GET_CODE (PATTERN (i3)) == SET
1404 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1407 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1408 We used to do this EXCEPT in one case: I3 has a post-inc in an
1409 output operand. However, that exception can give rise to insns like
1411 which is a famous insn on the PDP-11 where the value of r3 used as the
1412 source was model-dependent. Avoid this sort of thing. */
1415 if (!(GET_CODE (PATTERN (i3)) == SET
1416 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1417 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1418 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1419 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1420 /* It's not the exception. */
1423 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1424 if (REG_NOTE_KIND (link) == REG_INC
1425 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1427 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1434 /* See if the SETs in I1 or I2 need to be kept around in the merged
1435 instruction: whenever the value set there is still needed past I3.
1436 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1438 For the SET in I1, we have two cases: If I1 and I2 independently
1439 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1440 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1441 in I1 needs to be kept around unless I1DEST dies or is set in either
1442 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1443 I1DEST. If so, we know I1 feeds into I2. */
1445 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1448 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1449 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1451 /* If the set in I2 needs to be kept around, we must make a copy of
1452 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1453 PATTERN (I2), we are only substituting for the original I1DEST, not into
1454 an already-substituted copy. This also prevents making self-referential
1455 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1458 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1459 ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1463 i2pat = copy_rtx (i2pat);
1467 /* Substitute in the latest insn for the regs set by the earlier ones. */
1469 maxreg = max_reg_num ();
1473 /* It is possible that the source of I2 or I1 may be performing an
1474 unneeded operation, such as a ZERO_EXTEND of something that is known
1475 to have the high part zero. Handle that case by letting subst look at
1476 the innermost one of them.
1478 Another way to do this would be to have a function that tries to
1479 simplify a single insn instead of merging two or more insns. We don't
1480 do this because of the potential of infinite loops and because
1481 of the potential extra memory required. However, doing it the way
1482 we are is a bit of a kludge and doesn't catch all cases.
1484 But only do this if -fexpensive-optimizations since it slows things down
1485 and doesn't usually win. */
1487 if (flag_expensive_optimizations)
1489 /* Pass pc_rtx so no substitutions are done, just simplifications.
1490 The cases that we are interested in here do not involve the few
1491 cases were is_replaced is checked. */
1494 subst_low_cuid = INSN_CUID (i1);
1495 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1499 subst_low_cuid = INSN_CUID (i2);
1500 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1503 previous_num_undos = undobuf.num_undo;
1507 /* Many machines that don't use CC0 have insns that can both perform an
1508 arithmetic operation and set the condition code. These operations will
1509 be represented as a PARALLEL with the first element of the vector
1510 being a COMPARE of an arithmetic operation with the constant zero.
1511 The second element of the vector will set some pseudo to the result
1512 of the same arithmetic operation. If we simplify the COMPARE, we won't
1513 match such a pattern and so will generate an extra insn. Here we test
1514 for this case, where both the comparison and the operation result are
1515 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1516 I2SRC. Later we will make the PARALLEL that contains I2. */
1518 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1519 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1520 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1521 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1524 enum machine_mode compare_mode;
1526 newpat = PATTERN (i3);
1527 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1531 #ifdef EXTRA_CC_MODES
1532 /* See if a COMPARE with the operand we substituted in should be done
1533 with the mode that is currently being used. If not, do the same
1534 processing we do in `subst' for a SET; namely, if the destination
1535 is used only once, try to replace it with a register of the proper
1536 mode and also replace the COMPARE. */
1537 if (undobuf.other_insn == 0
1538 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1539 &undobuf.other_insn))
1540 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1542 != GET_MODE (SET_DEST (newpat))))
1544 int regno = REGNO (SET_DEST (newpat));
1545 rtx new_dest = gen_rtx (REG, compare_mode, regno);
1547 if (regno < FIRST_PSEUDO_REGISTER
1548 || (reg_n_sets[regno] == 1 && ! added_sets_2
1549 && ! REG_USERVAR_P (SET_DEST (newpat))))
1551 if (regno >= FIRST_PSEUDO_REGISTER)
1552 SUBST (regno_reg_rtx[regno], new_dest);
1554 SUBST (SET_DEST (newpat), new_dest);
1555 SUBST (XEXP (*cc_use, 0), new_dest);
1556 SUBST (SET_SRC (newpat),
1557 gen_rtx_combine (COMPARE, compare_mode,
1558 i2src, const0_rtx));
1561 undobuf.other_insn = 0;
1568 n_occurrences = 0; /* `subst' counts here */
1570 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1571 need to make a unique copy of I2SRC each time we substitute it
1572 to avoid self-referential rtl. */
1574 subst_low_cuid = INSN_CUID (i2);
1575 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1576 ! i1_feeds_i3 && i1dest_in_i1src);
1577 previous_num_undos = undobuf.num_undo;
1579 /* Record whether i2's body now appears within i3's body. */
1580 i2_is_used = n_occurrences;
1583 /* If we already got a failure, don't try to do more. Otherwise,
1584 try to substitute in I1 if we have it. */
1586 if (i1 && GET_CODE (newpat) != CLOBBER)
1588 /* Before we can do this substitution, we must redo the test done
1589 above (see detailed comments there) that ensures that I1DEST
1590 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1592 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1600 subst_low_cuid = INSN_CUID (i1);
1601 newpat = subst (newpat, i1dest, i1src, 0, 0);
1602 previous_num_undos = undobuf.num_undo;
1605 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1606 to count all the ways that I2SRC and I1SRC can be used. */
1607 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
1608 && i2_is_used + added_sets_2 > 1)
1609 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
1610 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1612 /* Fail if we tried to make a new register (we used to abort, but there's
1613 really no reason to). */
1614 || max_reg_num () != maxreg
1615 /* Fail if we couldn't do something and have a CLOBBER. */
1616 || GET_CODE (newpat) == CLOBBER
1617 /* Fail if this new pattern is a MULT and we didn't have one before
1618 at the outer level. */
1619 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
1626 /* If the actions of the earlier insns must be kept
1627 in addition to substituting them into the latest one,
1628 we must make a new PARALLEL for the latest insn
1629 to hold additional the SETs. */
1631 if (added_sets_1 || added_sets_2)
1635 if (GET_CODE (newpat) == PARALLEL)
1637 rtvec old = XVEC (newpat, 0);
1638 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1639 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1640 bcopy ((char *) &old->elem[0], (char *) &XVECEXP (newpat, 0, 0),
1641 sizeof (old->elem[0]) * old->num_elem);
1646 total_sets = 1 + added_sets_1 + added_sets_2;
1647 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1648 XVECEXP (newpat, 0, 0) = old;
1652 XVECEXP (newpat, 0, --total_sets)
1653 = (GET_CODE (PATTERN (i1)) == PARALLEL
1654 ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1658 /* If there is no I1, use I2's body as is. We used to also not do
1659 the subst call below if I2 was substituted into I3,
1660 but that could lose a simplification. */
1662 XVECEXP (newpat, 0, --total_sets) = i2pat;
1664 /* See comment where i2pat is assigned. */
1665 XVECEXP (newpat, 0, --total_sets)
1666 = subst (i2pat, i1dest, i1src, 0, 0);
1670 /* We come here when we are replacing a destination in I2 with the
1671 destination of I3. */
1672 validate_replacement:
1674 /* Note which hard regs this insn has as inputs. */
1675 mark_used_regs_combine (newpat);
1677 /* Is the result of combination a valid instruction? */
1678 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1680 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1681 the second SET's destination is a register that is unused. In that case,
1682 we just need the first SET. This can occur when simplifying a divmod
1683 insn. We *must* test for this case here because the code below that
1684 splits two independent SETs doesn't handle this case correctly when it
1685 updates the register status. Also check the case where the first
1686 SET's destination is unused. That would not cause incorrect code, but
1687 does cause an unneeded insn to remain. */
1689 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1690 && XVECLEN (newpat, 0) == 2
1691 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1692 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1693 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1694 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1695 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1696 && asm_noperands (newpat) < 0)
1698 newpat = XVECEXP (newpat, 0, 0);
1699 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1702 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1703 && XVECLEN (newpat, 0) == 2
1704 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1705 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1706 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1707 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1708 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1709 && asm_noperands (newpat) < 0)
1711 newpat = XVECEXP (newpat, 0, 1);
1712 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1715 /* If we were combining three insns and the result is a simple SET
1716 with no ASM_OPERANDS that wasn't recognized, try to split it into two
1717 insns. There are two ways to do this. It can be split using a
1718 machine-specific method (like when you have an addition of a large
1719 constant) or by combine in the function find_split_point. */
1721 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1722 && asm_noperands (newpat) < 0)
1724 rtx m_split, *split;
1725 rtx ni2dest = i2dest;
1727 /* See if the MD file can split NEWPAT. If it can't, see if letting it
1728 use I2DEST as a scratch register will help. In the latter case,
1729 convert I2DEST to the mode of the source of NEWPAT if we can. */
1731 m_split = split_insns (newpat, i3);
1733 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1734 inputs of NEWPAT. */
1736 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1737 possible to try that as a scratch reg. This would require adding
1738 more code to make it work though. */
1740 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
1742 /* If I2DEST is a hard register or the only use of a pseudo,
1743 we can change its mode. */
1744 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
1745 && GET_MODE (SET_DEST (newpat)) != VOIDmode
1746 && GET_CODE (i2dest) == REG
1747 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1748 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1749 && ! REG_USERVAR_P (i2dest))))
1750 ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1753 m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1754 gen_rtvec (2, newpat,
1761 if (m_split && GET_CODE (m_split) == SEQUENCE
1762 && XVECLEN (m_split, 0) == 2
1763 && (next_real_insn (i2) == i3
1764 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1768 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
1769 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
1771 i3set = single_set (XVECEXP (m_split, 0, 1));
1772 i2set = single_set (XVECEXP (m_split, 0, 0));
1774 /* In case we changed the mode of I2DEST, replace it in the
1775 pseudo-register table here. We can't do it above in case this
1776 code doesn't get executed and we do a split the other way. */
1778 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1779 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1781 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1783 /* If I2 or I3 has multiple SETs, we won't know how to track
1784 register status, so don't use these insns. */
1786 if (i2_code_number >= 0 && i2set && i3set)
1787 insn_code_number = recog_for_combine (&newi3pat, i3,
1790 if (insn_code_number >= 0)
1793 /* It is possible that both insns now set the destination of I3.
1794 If so, we must show an extra use of it. */
1796 if (insn_code_number >= 0 && GET_CODE (SET_DEST (i3set)) == REG
1797 && GET_CODE (SET_DEST (i2set)) == REG
1798 && REGNO (SET_DEST (i3set)) == REGNO (SET_DEST (i2set)))
1799 reg_n_sets[REGNO (SET_DEST (i2set))]++;
1802 /* If we can split it and use I2DEST, go ahead and see if that
1803 helps things be recognized. Verify that none of the registers
1804 are set between I2 and I3. */
1805 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
1807 && GET_CODE (i2dest) == REG
1809 /* We need I2DEST in the proper mode. If it is a hard register
1810 or the only use of a pseudo, we can change its mode. */
1811 && (GET_MODE (*split) == GET_MODE (i2dest)
1812 || GET_MODE (*split) == VOIDmode
1813 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1814 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1815 && ! REG_USERVAR_P (i2dest)))
1816 && (next_real_insn (i2) == i3
1817 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1818 /* We can't overwrite I2DEST if its value is still used by
1820 && ! reg_referenced_p (i2dest, newpat))
1822 rtx newdest = i2dest;
1823 enum rtx_code split_code = GET_CODE (*split);
1824 enum machine_mode split_mode = GET_MODE (*split);
1826 /* Get NEWDEST as a register in the proper mode. We have already
1827 validated that we can do this. */
1828 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
1830 newdest = gen_rtx (REG, split_mode, REGNO (i2dest));
1832 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1833 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1836 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1837 an ASHIFT. This can occur if it was inside a PLUS and hence
1838 appeared to be a memory address. This is a kludge. */
1839 if (split_code == MULT
1840 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1841 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1843 SUBST (*split, gen_rtx_combine (ASHIFT, split_mode,
1844 XEXP (*split, 0), GEN_INT (i)));
1845 /* Update split_code because we may not have a multiply
1847 split_code = GET_CODE (*split);
1850 #ifdef INSN_SCHEDULING
1851 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1852 be written as a ZERO_EXTEND. */
1853 if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
1854 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, split_mode,
1858 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
1859 SUBST (*split, newdest);
1860 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1862 /* If the split point was a MULT and we didn't have one before,
1863 don't use one now. */
1864 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
1865 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1869 /* Check for a case where we loaded from memory in a narrow mode and
1870 then sign extended it, but we need both registers. In that case,
1871 we have a PARALLEL with both loads from the same memory location.
1872 We can split this into a load from memory followed by a register-register
1873 copy. This saves at least one insn, more if register allocation can
1876 We cannot do this if the destination of the second assignment is
1877 a register that we have already assumed is zero-extended. Similarly
1878 for a SUBREG of such a register. */
1880 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1881 && GET_CODE (newpat) == PARALLEL
1882 && XVECLEN (newpat, 0) == 2
1883 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1884 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
1885 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1886 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1887 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
1888 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1890 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1891 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1892 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
1893 (GET_CODE (temp) == REG
1894 && reg_nonzero_bits[REGNO (temp)] != 0
1895 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
1896 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
1897 && (reg_nonzero_bits[REGNO (temp)]
1898 != GET_MODE_MASK (word_mode))))
1899 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
1900 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
1901 (GET_CODE (temp) == REG
1902 && reg_nonzero_bits[REGNO (temp)] != 0
1903 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
1904 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
1905 && (reg_nonzero_bits[REGNO (temp)]
1906 != GET_MODE_MASK (word_mode)))))
1907 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1908 SET_SRC (XVECEXP (newpat, 0, 1)))
1909 && ! find_reg_note (i3, REG_UNUSED,
1910 SET_DEST (XVECEXP (newpat, 0, 0))))
1914 newi2pat = XVECEXP (newpat, 0, 0);
1915 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
1916 newpat = XVECEXP (newpat, 0, 1);
1917 SUBST (SET_SRC (newpat),
1918 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
1919 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1920 if (i2_code_number >= 0)
1921 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1923 if (insn_code_number >= 0)
1928 /* If we will be able to accept this, we have made a change to the
1929 destination of I3. This can invalidate a LOG_LINKS pointing
1930 to I3. No other part of combine.c makes such a transformation.
1932 The new I3 will have a destination that was previously the
1933 destination of I1 or I2 and which was used in i2 or I3. Call
1934 distribute_links to make a LOG_LINK from the next use of
1935 that destination. */
1937 PATTERN (i3) = newpat;
1938 distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
1940 /* I3 now uses what used to be its destination and which is
1941 now I2's destination. That means we need a LOG_LINK from
1942 I3 to I2. But we used to have one, so we still will.
1944 However, some later insn might be using I2's dest and have
1945 a LOG_LINK pointing at I3. We must remove this link.
1946 The simplest way to remove the link is to point it at I1,
1947 which we know will be a NOTE. */
1949 for (insn = NEXT_INSN (i3);
1950 insn && (this_basic_block == n_basic_blocks - 1
1951 || insn != basic_block_head[this_basic_block + 1]);
1952 insn = NEXT_INSN (insn))
1954 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1955 && reg_referenced_p (ni2dest, PATTERN (insn)))
1957 for (link = LOG_LINKS (insn); link;
1958 link = XEXP (link, 1))
1959 if (XEXP (link, 0) == i3)
1960 XEXP (link, 0) = i1;
1968 /* Similarly, check for a case where we have a PARALLEL of two independent
1969 SETs but we started with three insns. In this case, we can do the sets
1970 as two separate insns. This case occurs when some SET allows two
1971 other insns to combine, but the destination of that SET is still live. */
1973 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1974 && GET_CODE (newpat) == PARALLEL
1975 && XVECLEN (newpat, 0) == 2
1976 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1977 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
1978 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
1979 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1980 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1981 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1982 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1984 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
1985 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
1986 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
1987 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1988 XVECEXP (newpat, 0, 0))
1989 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
1990 XVECEXP (newpat, 0, 1)))
1992 newi2pat = XVECEXP (newpat, 0, 1);
1993 newpat = XVECEXP (newpat, 0, 0);
1995 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1996 if (i2_code_number >= 0)
1997 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2000 /* If it still isn't recognized, fail and change things back the way they
2002 if ((insn_code_number < 0
2003 /* Is the result a reasonable ASM_OPERANDS? */
2004 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2010 /* If we had to change another insn, make sure it is valid also. */
2011 if (undobuf.other_insn)
2013 rtx other_pat = PATTERN (undobuf.other_insn);
2014 rtx new_other_notes;
2017 CLEAR_HARD_REG_SET (newpat_used_regs);
2019 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2022 if (other_code_number < 0 && ! check_asm_operands (other_pat))
2028 PATTERN (undobuf.other_insn) = other_pat;
2030 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2031 are still valid. Then add any non-duplicate notes added by
2032 recog_for_combine. */
2033 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2035 next = XEXP (note, 1);
2037 if (REG_NOTE_KIND (note) == REG_UNUSED
2038 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
2040 if (GET_CODE (XEXP (note, 0)) == REG)
2041 reg_n_deaths[REGNO (XEXP (note, 0))]--;
2043 remove_note (undobuf.other_insn, note);
2047 for (note = new_other_notes; note; note = XEXP (note, 1))
2048 if (GET_CODE (XEXP (note, 0)) == REG)
2049 reg_n_deaths[REGNO (XEXP (note, 0))]++;
2051 distribute_notes (new_other_notes, undobuf.other_insn,
2052 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
2055 /* We now know that we can do this combination. Merge the insns and
2056 update the status of registers and LOG_LINKS. */
2059 rtx i3notes, i2notes, i1notes = 0;
2060 rtx i3links, i2links, i1links = 0;
2063 /* Compute which registers we expect to eliminate. */
2064 rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src
2066 rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest;
2068 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2070 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2071 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2073 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2075 /* Ensure that we do not have something that should not be shared but
2076 occurs multiple times in the new insns. Check this by first
2077 resetting all the `used' flags and then copying anything is shared. */
2079 reset_used_flags (i3notes);
2080 reset_used_flags (i2notes);
2081 reset_used_flags (i1notes);
2082 reset_used_flags (newpat);
2083 reset_used_flags (newi2pat);
2084 if (undobuf.other_insn)
2085 reset_used_flags (PATTERN (undobuf.other_insn));
2087 i3notes = copy_rtx_if_shared (i3notes);
2088 i2notes = copy_rtx_if_shared (i2notes);
2089 i1notes = copy_rtx_if_shared (i1notes);
2090 newpat = copy_rtx_if_shared (newpat);
2091 newi2pat = copy_rtx_if_shared (newi2pat);
2092 if (undobuf.other_insn)
2093 reset_used_flags (PATTERN (undobuf.other_insn));
2095 INSN_CODE (i3) = insn_code_number;
2096 PATTERN (i3) = newpat;
2097 if (undobuf.other_insn)
2098 INSN_CODE (undobuf.other_insn) = other_code_number;
2100 /* We had one special case above where I2 had more than one set and
2101 we replaced a destination of one of those sets with the destination
2102 of I3. In that case, we have to update LOG_LINKS of insns later
2103 in this basic block. Note that this (expensive) case is rare.
2105 Also, in this case, we must pretend that all REG_NOTEs for I2
2106 actually came from I3, so that REG_UNUSED notes from I2 will be
2107 properly handled. */
2109 if (i3_subst_into_i2)
2111 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2112 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2113 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2114 && ! find_reg_note (i2, REG_UNUSED,
2115 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2116 for (temp = NEXT_INSN (i2);
2117 temp && (this_basic_block == n_basic_blocks - 1
2118 || basic_block_head[this_basic_block] != temp);
2119 temp = NEXT_INSN (temp))
2120 if (temp != i3 && GET_RTX_CLASS (GET_CODE (temp)) == 'i')
2121 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2122 if (XEXP (link, 0) == i2)
2123 XEXP (link, 0) = i3;
2128 while (XEXP (link, 1))
2129 link = XEXP (link, 1);
2130 XEXP (link, 1) = i2notes;
2144 INSN_CODE (i2) = i2_code_number;
2145 PATTERN (i2) = newi2pat;
2149 PUT_CODE (i2, NOTE);
2150 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2151 NOTE_SOURCE_FILE (i2) = 0;
2158 PUT_CODE (i1, NOTE);
2159 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2160 NOTE_SOURCE_FILE (i1) = 0;
2163 /* Get death notes for everything that is now used in either I3 or
2164 I2 and used to die in a previous insn. */
2166 move_deaths (newpat, i1 ? INSN_CUID (i1) : INSN_CUID (i2), i3, &midnotes);
2168 move_deaths (newi2pat, INSN_CUID (i1), i2, &midnotes);
2170 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2172 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2175 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2178 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2181 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2184 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2185 know these are REG_UNUSED and want them to go to the desired insn,
2186 so we always pass it as i3. We have not counted the notes in
2187 reg_n_deaths yet, so we need to do so now. */
2189 if (newi2pat && new_i2_notes)
2191 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2192 if (GET_CODE (XEXP (temp, 0)) == REG)
2193 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2195 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2200 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2201 if (GET_CODE (XEXP (temp, 0)) == REG)
2202 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2204 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2207 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
2208 put a REG_DEAD note for it somewhere. Similarly for I2 and I1.
2209 Show an additional death due to the REG_DEAD note we make here. If
2210 we discard it in distribute_notes, we will decrement it again. */
2214 if (GET_CODE (i3dest_killed) == REG)
2215 reg_n_deaths[REGNO (i3dest_killed)]++;
2217 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed,
2219 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2220 NULL_RTX, NULL_RTX);
2223 /* For I2 and I1, we have to be careful. If NEWI2PAT exists and sets
2224 I2DEST or I1DEST, the death must be somewhere before I2, not I3. If
2225 we passed I3 in that case, it might delete I2. */
2227 if (i2dest_in_i2src)
2229 if (GET_CODE (i2dest) == REG)
2230 reg_n_deaths[REGNO (i2dest)]++;
2232 if (newi2pat && reg_set_p (i2dest, newi2pat))
2233 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2234 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2236 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2237 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2238 NULL_RTX, NULL_RTX);
2241 if (i1dest_in_i1src)
2243 if (GET_CODE (i1dest) == REG)
2244 reg_n_deaths[REGNO (i1dest)]++;
2246 if (newi2pat && reg_set_p (i1dest, newi2pat))
2247 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2248 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2250 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2251 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2252 NULL_RTX, NULL_RTX);
2255 distribute_links (i3links);
2256 distribute_links (i2links);
2257 distribute_links (i1links);
2259 if (GET_CODE (i2dest) == REG)
2262 rtx i2_insn = 0, i2_val = 0, set;
2264 /* The insn that used to set this register doesn't exist, and
2265 this life of the register may not exist either. See if one of
2266 I3's links points to an insn that sets I2DEST. If it does,
2267 that is now the last known value for I2DEST. If we don't update
2268 this and I2 set the register to a value that depended on its old
2269 contents, we will get confused. If this insn is used, thing
2270 will be set correctly in combine_instructions. */
2272 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2273 if ((set = single_set (XEXP (link, 0))) != 0
2274 && rtx_equal_p (i2dest, SET_DEST (set)))
2275 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2277 record_value_for_reg (i2dest, i2_insn, i2_val);
2279 /* If the reg formerly set in I2 died only once and that was in I3,
2280 zero its use count so it won't make `reload' do any work. */
2281 if (! added_sets_2 && newi2pat == 0 && ! i2dest_in_i2src)
2283 regno = REGNO (i2dest);
2284 reg_n_sets[regno]--;
2285 if (reg_n_sets[regno] == 0
2286 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2287 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2288 reg_n_refs[regno] = 0;
2292 if (i1 && GET_CODE (i1dest) == REG)
2295 rtx i1_insn = 0, i1_val = 0, set;
2297 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2298 if ((set = single_set (XEXP (link, 0))) != 0
2299 && rtx_equal_p (i1dest, SET_DEST (set)))
2300 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2302 record_value_for_reg (i1dest, i1_insn, i1_val);
2304 regno = REGNO (i1dest);
2305 if (! added_sets_1 && ! i1dest_in_i1src)
2307 reg_n_sets[regno]--;
2308 if (reg_n_sets[regno] == 0
2309 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2310 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2311 reg_n_refs[regno] = 0;
2315 /* Update reg_nonzero_bits et al for any changes that may have been made
2318 note_stores (newpat, set_nonzero_bits_and_sign_copies);
2320 note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
2322 /* If I3 is now an unconditional jump, ensure that it has a
2323 BARRIER following it since it may have initially been a
2324 conditional jump. It may also be the last nonnote insn. */
2326 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
2327 && ((temp = next_nonnote_insn (i3)) == NULL_RTX
2328 || GET_CODE (temp) != BARRIER))
2329 emit_barrier_after (i3);
2332 combine_successes++;
2334 if (added_links_insn
2335 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2336 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2337 return added_links_insn;
2339 return newi2pat ? i2 : i3;
2342 /* Undo all the modifications recorded in undobuf. */
2348 if (undobuf.num_undo > MAX_UNDO)
2349 undobuf.num_undo = MAX_UNDO;
2350 for (i = undobuf.num_undo - 1; i >= 0; i--)
2352 if (undobuf.undo[i].is_int)
2353 *undobuf.undo[i].where.i = undobuf.undo[i].old_contents.i;
2355 *undobuf.undo[i].where.r = undobuf.undo[i].old_contents.r;
2359 obfree (undobuf.storage);
2360 undobuf.num_undo = 0;
2363 /* Find the innermost point within the rtx at LOC, possibly LOC itself,
2364 where we have an arithmetic expression and return that point. LOC will
2367 try_combine will call this function to see if an insn can be split into
2371 find_split_point (loc, insn)
2376 enum rtx_code code = GET_CODE (x);
2378 int len = 0, pos, unsignedp;
2381 /* First special-case some codes. */
2385 #ifdef INSN_SCHEDULING
2386 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2388 if (GET_CODE (SUBREG_REG (x)) == MEM)
2391 return find_split_point (&SUBREG_REG (x), insn);
2395 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2396 using LO_SUM and HIGH. */
2397 if (GET_CODE (XEXP (x, 0)) == CONST
2398 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2401 gen_rtx_combine (LO_SUM, Pmode,
2402 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2404 return &XEXP (XEXP (x, 0), 0);
2408 /* If we have a PLUS whose second operand is a constant and the
2409 address is not valid, perhaps will can split it up using
2410 the machine-specific way to split large constants. We use
2411 the first psuedo-reg (one of the virtual regs) as a placeholder;
2412 it will not remain in the result. */
2413 if (GET_CODE (XEXP (x, 0)) == PLUS
2414 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2415 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2417 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2418 rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2421 /* This should have produced two insns, each of which sets our
2422 placeholder. If the source of the second is a valid address,
2423 we can make put both sources together and make a split point
2426 if (seq && XVECLEN (seq, 0) == 2
2427 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2428 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2429 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2430 && ! reg_mentioned_p (reg,
2431 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2432 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2433 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2434 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2435 && memory_address_p (GET_MODE (x),
2436 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2438 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2439 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2441 /* Replace the placeholder in SRC2 with SRC1. If we can
2442 find where in SRC2 it was placed, that can become our
2443 split point and we can replace this address with SRC2.
2444 Just try two obvious places. */
2446 src2 = replace_rtx (src2, reg, src1);
2448 if (XEXP (src2, 0) == src1)
2449 split = &XEXP (src2, 0);
2450 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2451 && XEXP (XEXP (src2, 0), 0) == src1)
2452 split = &XEXP (XEXP (src2, 0), 0);
2456 SUBST (XEXP (x, 0), src2);
2461 /* If that didn't work, perhaps the first operand is complex and
2462 needs to be computed separately, so make a split point there.
2463 This will occur on machines that just support REG + CONST
2464 and have a constant moved through some previous computation. */
2466 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2467 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2468 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2470 return &XEXP (XEXP (x, 0), 0);
2476 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2477 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2478 we need to put the operand into a register. So split at that
2481 if (SET_DEST (x) == cc0_rtx
2482 && GET_CODE (SET_SRC (x)) != COMPARE
2483 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2484 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2485 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2486 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2487 return &SET_SRC (x);
2490 /* See if we can split SET_SRC as it stands. */
2491 split = find_split_point (&SET_SRC (x), insn);
2492 if (split && split != &SET_SRC (x))
2495 /* See if this is a bitfield assignment with everything constant. If
2496 so, this is an IOR of an AND, so split it into that. */
2497 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2498 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2499 <= HOST_BITS_PER_WIDE_INT)
2500 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2501 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2502 && GET_CODE (SET_SRC (x)) == CONST_INT
2503 && ((INTVAL (XEXP (SET_DEST (x), 1))
2504 + INTVAL (XEXP (SET_DEST (x), 2)))
2505 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2506 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2508 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2509 int len = INTVAL (XEXP (SET_DEST (x), 1));
2510 int src = INTVAL (SET_SRC (x));
2511 rtx dest = XEXP (SET_DEST (x), 0);
2512 enum machine_mode mode = GET_MODE (dest);
2513 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
2515 if (BITS_BIG_ENDIAN)
2516 pos = GET_MODE_BITSIZE (mode) - len - pos;
2520 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
2523 gen_binary (IOR, mode,
2524 gen_binary (AND, mode, dest,
2525 GEN_INT (~ (mask << pos)
2526 & GET_MODE_MASK (mode))),
2527 GEN_INT (src << pos)));
2529 SUBST (SET_DEST (x), dest);
2531 split = find_split_point (&SET_SRC (x), insn);
2532 if (split && split != &SET_SRC (x))
2536 /* Otherwise, see if this is an operation that we can split into two.
2537 If so, try to split that. */
2538 code = GET_CODE (SET_SRC (x));
2543 /* If we are AND'ing with a large constant that is only a single
2544 bit and the result is only being used in a context where we
2545 need to know if it is zero or non-zero, replace it with a bit
2546 extraction. This will avoid the large constant, which might
2547 have taken more than one insn to make. If the constant were
2548 not a valid argument to the AND but took only one insn to make,
2549 this is no worse, but if it took more than one insn, it will
2552 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2553 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2554 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2555 && GET_CODE (SET_DEST (x)) == REG
2556 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2557 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2558 && XEXP (*split, 0) == SET_DEST (x)
2559 && XEXP (*split, 1) == const0_rtx)
2562 make_extraction (GET_MODE (SET_DEST (x)),
2563 XEXP (SET_SRC (x), 0),
2564 pos, NULL_RTX, 1, 1, 0, 0));
2565 return find_split_point (loc, insn);
2570 inner = XEXP (SET_SRC (x), 0);
2572 len = GET_MODE_BITSIZE (GET_MODE (inner));
2578 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2579 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2581 inner = XEXP (SET_SRC (x), 0);
2582 len = INTVAL (XEXP (SET_SRC (x), 1));
2583 pos = INTVAL (XEXP (SET_SRC (x), 2));
2585 if (BITS_BIG_ENDIAN)
2586 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2587 unsignedp = (code == ZERO_EXTRACT);
2592 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2594 enum machine_mode mode = GET_MODE (SET_SRC (x));
2596 /* For unsigned, we have a choice of a shift followed by an
2597 AND or two shifts. Use two shifts for field sizes where the
2598 constant might be too large. We assume here that we can
2599 always at least get 8-bit constants in an AND insn, which is
2600 true for every current RISC. */
2602 if (unsignedp && len <= 8)
2607 gen_rtx_combine (LSHIFTRT, mode,
2608 gen_lowpart_for_combine (mode, inner),
2610 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
2612 split = find_split_point (&SET_SRC (x), insn);
2613 if (split && split != &SET_SRC (x))
2620 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
2621 gen_rtx_combine (ASHIFT, mode,
2622 gen_lowpart_for_combine (mode, inner),
2623 GEN_INT (GET_MODE_BITSIZE (mode)
2625 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
2627 split = find_split_point (&SET_SRC (x), insn);
2628 if (split && split != &SET_SRC (x))
2633 /* See if this is a simple operation with a constant as the second
2634 operand. It might be that this constant is out of range and hence
2635 could be used as a split point. */
2636 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2637 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2638 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2639 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2640 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2641 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2642 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2644 return &XEXP (SET_SRC (x), 1);
2646 /* Finally, see if this is a simple operation with its first operand
2647 not in a register. The operation might require this operand in a
2648 register, so return it as a split point. We can always do this
2649 because if the first operand were another operation, we would have
2650 already found it as a split point. */
2651 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2652 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2653 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2654 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2655 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2656 return &XEXP (SET_SRC (x), 0);
2662 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2663 it is better to write this as (not (ior A B)) so we can split it.
2664 Similarly for IOR. */
2665 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2668 gen_rtx_combine (NOT, GET_MODE (x),
2669 gen_rtx_combine (code == IOR ? AND : IOR,
2671 XEXP (XEXP (x, 0), 0),
2672 XEXP (XEXP (x, 1), 0))));
2673 return find_split_point (loc, insn);
2676 /* Many RISC machines have a large set of logical insns. If the
2677 second operand is a NOT, put it first so we will try to split the
2678 other operand first. */
2679 if (GET_CODE (XEXP (x, 1)) == NOT)
2681 rtx tem = XEXP (x, 0);
2682 SUBST (XEXP (x, 0), XEXP (x, 1));
2683 SUBST (XEXP (x, 1), tem);
2688 /* Otherwise, select our actions depending on our rtx class. */
2689 switch (GET_RTX_CLASS (code))
2691 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2693 split = find_split_point (&XEXP (x, 2), insn);
2696 /* ... fall through ... */
2700 split = find_split_point (&XEXP (x, 1), insn);
2703 /* ... fall through ... */
2705 /* Some machines have (and (shift ...) ...) insns. If X is not
2706 an AND, but XEXP (X, 0) is, use it as our split point. */
2707 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2708 return &XEXP (x, 0);
2710 split = find_split_point (&XEXP (x, 0), insn);
2716 /* Otherwise, we don't have a split point. */
2720 /* Throughout X, replace FROM with TO, and return the result.
2721 The result is TO if X is FROM;
2722 otherwise the result is X, but its contents may have been modified.
2723 If they were modified, a record was made in undobuf so that
2724 undo_all will (among other things) return X to its original state.
2726 If the number of changes necessary is too much to record to undo,
2727 the excess changes are not made, so the result is invalid.
2728 The changes already made can still be undone.
2729 undobuf.num_undo is incremented for such changes, so by testing that
2730 the caller can tell whether the result is valid.
2732 `n_occurrences' is incremented each time FROM is replaced.
2734 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2736 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
2737 by copying if `n_occurrences' is non-zero. */
2740 subst (x, from, to, in_dest, unique_copy)
2741 register rtx x, from, to;
2745 register enum rtx_code code = GET_CODE (x);
2746 enum machine_mode op0_mode = VOIDmode;
2748 register int len, i;
2751 /* Two expressions are equal if they are identical copies of a shared
2752 RTX or if they are both registers with the same register number
2755 #define COMBINE_RTX_EQUAL_P(X,Y) \
2757 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2758 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2760 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
2763 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
2766 /* If X and FROM are the same register but different modes, they will
2767 not have been seen as equal above. However, flow.c will make a
2768 LOG_LINKS entry for that case. If we do nothing, we will try to
2769 rerecognize our original insn and, when it succeeds, we will
2770 delete the feeding insn, which is incorrect.
2772 So force this insn not to match in this (rare) case. */
2773 if (! in_dest && code == REG && GET_CODE (from) == REG
2774 && REGNO (x) == REGNO (from))
2775 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
2777 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2778 of which may contain things that can be combined. */
2779 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2782 /* It is possible to have a subexpression appear twice in the insn.
2783 Suppose that FROM is a register that appears within TO.
2784 Then, after that subexpression has been scanned once by `subst',
2785 the second time it is scanned, TO may be found. If we were
2786 to scan TO here, we would find FROM within it and create a
2787 self-referent rtl structure which is completely wrong. */
2788 if (COMBINE_RTX_EQUAL_P (x, to))
2791 len = GET_RTX_LENGTH (code);
2792 fmt = GET_RTX_FORMAT (code);
2794 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2795 set up to skip this common case. All other cases where we want to
2796 suppress replacing something inside a SET_SRC are handled via the
2799 && (GET_CODE (SET_DEST (x)) == REG
2800 || GET_CODE (SET_DEST (x)) == CC0
2801 || GET_CODE (SET_DEST (x)) == PC))
2804 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2806 op0_mode = GET_MODE (XEXP (x, 0));
2808 for (i = 0; i < len; i++)
2813 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2815 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
2817 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2822 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
2824 /* If this substitution failed, this whole thing fails. */
2825 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2829 SUBST (XVECEXP (x, i, j), new);
2832 else if (fmt[i] == 'e')
2834 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
2836 /* In general, don't install a subreg involving two modes not
2837 tieable. It can worsen register allocation, and can even
2838 make invalid reload insns, since the reg inside may need to
2839 be copied from in the outside mode, and that may be invalid
2840 if it is an fp reg copied in integer mode.
2842 We allow two exceptions to this: It is valid if it is inside
2843 another SUBREG and the mode of that SUBREG and the mode of
2844 the inside of TO is tieable and it is valid if X is a SET
2845 that copies FROM to CC0. */
2846 if (GET_CODE (to) == SUBREG
2847 && ! MODES_TIEABLE_P (GET_MODE (to),
2848 GET_MODE (SUBREG_REG (to)))
2849 && ! (code == SUBREG
2850 && MODES_TIEABLE_P (GET_MODE (x),
2851 GET_MODE (SUBREG_REG (to))))
2853 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
2856 return gen_rtx (CLOBBER, VOIDmode, const0_rtx);
2858 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2862 /* If we are in a SET_DEST, suppress most cases unless we
2863 have gone inside a MEM, in which case we want to
2864 simplify the address. We assume here that things that
2865 are actually part of the destination have their inner
2866 parts in the first expression. This is true for SUBREG,
2867 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2868 things aside from REG and MEM that should appear in a
2870 new = subst (XEXP (x, i), from, to,
2872 && (code == SUBREG || code == STRICT_LOW_PART
2873 || code == ZERO_EXTRACT))
2875 && i == 0), unique_copy);
2877 /* If we found that we will have to reject this combination,
2878 indicate that by returning the CLOBBER ourselves, rather than
2879 an expression containing it. This will speed things up as
2880 well as prevent accidents where two CLOBBERs are considered
2881 to be equal, thus producing an incorrect simplification. */
2883 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2886 SUBST (XEXP (x, i), new);
2890 /* Try to simplify X. If the simplification changed the code, it is likely
2891 that further simplification will help, so loop, but limit the number
2892 of repetitions that will be performed. */
2894 for (i = 0; i < 4; i++)
2896 /* If X is sufficiently simple, don't bother trying to do anything
2898 if (code != CONST_INT && code != REG && code != CLOBBER)
2899 x = simplify_rtx (x, op0_mode, i == 3, in_dest);
2901 if (GET_CODE (x) == code)
2904 code = GET_CODE (x);
2906 /* We no longer know the original mode of operand 0 since we
2907 have changed the form of X) */
2908 op0_mode = VOIDmode;
2914 /* Simplify X, a piece of RTL. We just operate on the expression at the
2915 outer level; call `subst' to simplify recursively. Return the new
2918 OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
2919 will be the iteration even if an expression with a code different from
2920 X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */
2923 simplify_rtx (x, op0_mode, last, in_dest)
2925 enum machine_mode op0_mode;
2929 enum rtx_code code = GET_CODE (x);
2930 enum machine_mode mode = GET_MODE (x);
2934 /* If this is a commutative operation, put a constant last and a complex
2935 expression first. We don't need to do this for comparisons here. */
2936 if (GET_RTX_CLASS (code) == 'c'
2937 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2938 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
2939 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
2940 || (GET_CODE (XEXP (x, 0)) == SUBREG
2941 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
2942 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
2945 SUBST (XEXP (x, 0), XEXP (x, 1));
2946 SUBST (XEXP (x, 1), temp);
2949 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
2950 sign extension of a PLUS with a constant, reverse the order of the sign
2951 extension and the addition. Note that this not the same as the original
2952 code, but overflow is undefined for signed values. Also note that the
2953 PLUS will have been partially moved "inside" the sign-extension, so that
2954 the first operand of X will really look like:
2955 (ashiftrt (plus (ashift A C4) C5) C4).
2957 (plus (ashiftrt (ashift A C4) C2) C4)
2958 and replace the first operand of X with that expression. Later parts
2959 of this function may simplify the expression further.
2961 For example, if we start with (mult (sign_extend (plus A C1)) C2),
2962 we swap the SIGN_EXTEND and PLUS. Later code will apply the
2963 distributive law to produce (plus (mult (sign_extend X) C1) C3).
2965 We do this to simplify address expressions. */
2967 if ((code == PLUS || code == MINUS || code == MULT)
2968 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
2969 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
2970 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
2971 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
2972 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2973 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
2974 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
2975 && (temp = simplify_binary_operation (ASHIFTRT, mode,
2976 XEXP (XEXP (XEXP (x, 0), 0), 1),
2977 XEXP (XEXP (x, 0), 1))) != 0)
2980 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
2981 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
2982 INTVAL (XEXP (XEXP (x, 0), 1)));
2984 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
2985 INTVAL (XEXP (XEXP (x, 0), 1)));
2987 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
2990 /* If this is a simple operation applied to an IF_THEN_ELSE, try
2991 applying it to the arms of the IF_THEN_ELSE. This often simplifies
2992 things. Check for cases where both arms are testing the same
2995 Don't do anything if all operands are very simple. */
2997 if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
2998 || GET_RTX_CLASS (code) == '<')
2999 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3000 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3001 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3003 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3004 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3005 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3007 || (GET_RTX_CLASS (code) == '1'
3008 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3009 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3010 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3013 rtx cond, true, false;
3015 cond = if_then_else_cond (x, &true, &false);
3018 rtx cop1 = const0_rtx;
3019 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3021 /* Simplify the alternative arms; this may collapse the true and
3022 false arms to store-flag values. */
3023 true = subst (true, pc_rtx, pc_rtx, 0, 0);
3024 false = subst (false, pc_rtx, pc_rtx, 0, 0);
3026 /* Restarting if we generate a store-flag expression will cause
3027 us to loop. Just drop through in this case. */
3029 /* If the result values are STORE_FLAG_VALUE and zero, we can
3030 just make the comparison operation. */
3031 if (true == const_true_rtx && false == const0_rtx)
3032 x = gen_binary (cond_code, mode, cond, cop1);
3033 else if (true == const0_rtx && false == const_true_rtx)
3034 x = gen_binary (reverse_condition (cond_code), mode, cond, cop1);
3036 /* Likewise, we can make the negate of a comparison operation
3037 if the result values are - STORE_FLAG_VALUE and zero. */
3038 else if (GET_CODE (true) == CONST_INT
3039 && INTVAL (true) == - STORE_FLAG_VALUE
3040 && false == const0_rtx)
3041 x = gen_unary (NEG, mode, mode,
3042 gen_binary (cond_code, mode, cond, cop1));
3043 else if (GET_CODE (false) == CONST_INT
3044 && INTVAL (false) == - STORE_FLAG_VALUE
3045 && true == const0_rtx)
3046 x = gen_unary (NEG, mode, mode,
3047 gen_binary (reverse_condition (cond_code),
3050 return gen_rtx (IF_THEN_ELSE, mode,
3051 gen_binary (cond_code, VOIDmode, cond, cop1),
3054 code = GET_CODE (x);
3055 op0_mode = VOIDmode;
3059 /* Try to fold this expression in case we have constants that weren't
3062 switch (GET_RTX_CLASS (code))
3065 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3068 temp = simplify_relational_operation (code, op0_mode,
3069 XEXP (x, 0), XEXP (x, 1));
3070 #ifdef FLOAT_STORE_FLAG_VALUE
3071 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3072 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3073 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
3078 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3082 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3083 XEXP (x, 1), XEXP (x, 2));
3088 x = temp, code = GET_CODE (temp);
3090 /* First see if we can apply the inverse distributive law. */
3091 if (code == PLUS || code == MINUS
3092 || code == AND || code == IOR || code == XOR)
3094 x = apply_distributive_law (x);
3095 code = GET_CODE (x);
3098 /* If CODE is an associative operation not otherwise handled, see if we
3099 can associate some operands. This can win if they are constants or
3100 if they are logically related (i.e. (a & b) & a. */
3101 if ((code == PLUS || code == MINUS
3102 || code == MULT || code == AND || code == IOR || code == XOR
3103 || code == DIV || code == UDIV
3104 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3105 && INTEGRAL_MODE_P (mode))
3107 if (GET_CODE (XEXP (x, 0)) == code)
3109 rtx other = XEXP (XEXP (x, 0), 0);
3110 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3111 rtx inner_op1 = XEXP (x, 1);
3114 /* Make sure we pass the constant operand if any as the second
3115 one if this is a commutative operation. */
3116 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3118 rtx tem = inner_op0;
3119 inner_op0 = inner_op1;
3122 inner = simplify_binary_operation (code == MINUS ? PLUS
3123 : code == DIV ? MULT
3124 : code == UDIV ? MULT
3126 mode, inner_op0, inner_op1);
3128 /* For commutative operations, try the other pair if that one
3130 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3132 other = XEXP (XEXP (x, 0), 1);
3133 inner = simplify_binary_operation (code, mode,
3134 XEXP (XEXP (x, 0), 0),
3139 return gen_binary (code, mode, other, inner);
3143 /* A little bit of algebraic simplification here. */
3147 /* Ensure that our address has any ASHIFTs converted to MULT in case
3148 address-recognizing predicates are called later. */
3149 temp = make_compound_operation (XEXP (x, 0), MEM);
3150 SUBST (XEXP (x, 0), temp);
3154 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
3155 is paradoxical. If we can't do that safely, then it becomes
3156 something nonsensical so that this combination won't take place. */
3158 if (GET_CODE (SUBREG_REG (x)) == MEM
3159 && (GET_MODE_SIZE (mode)
3160 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3162 rtx inner = SUBREG_REG (x);
3163 int endian_offset = 0;
3164 /* Don't change the mode of the MEM
3165 if that would change the meaning of the address. */
3166 if (MEM_VOLATILE_P (SUBREG_REG (x))
3167 || mode_dependent_address_p (XEXP (inner, 0)))
3168 return gen_rtx (CLOBBER, mode, const0_rtx);
3170 if (BYTES_BIG_ENDIAN)
3172 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3173 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3174 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3175 endian_offset -= (UNITS_PER_WORD
3176 - GET_MODE_SIZE (GET_MODE (inner)));
3178 /* Note if the plus_constant doesn't make a valid address
3179 then this combination won't be accepted. */
3180 x = gen_rtx (MEM, mode,
3181 plus_constant (XEXP (inner, 0),
3182 (SUBREG_WORD (x) * UNITS_PER_WORD
3184 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
3185 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
3186 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
3190 /* If we are in a SET_DEST, these other cases can't apply. */
3194 /* Changing mode twice with SUBREG => just change it once,
3195 or not at all if changing back to starting mode. */
3196 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3198 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3199 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3200 return SUBREG_REG (SUBREG_REG (x));
3202 SUBST_INT (SUBREG_WORD (x),
3203 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3204 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3207 /* SUBREG of a hard register => just change the register number
3208 and/or mode. If the hard register is not valid in that mode,
3209 suppress this combination. If the hard register is the stack,
3210 frame, or argument pointer, leave this as a SUBREG. */
3212 if (GET_CODE (SUBREG_REG (x)) == REG
3213 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3214 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
3215 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3216 && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM
3218 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3219 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3221 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
3223 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3225 return gen_rtx (REG, mode,
3226 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
3228 return gen_rtx (CLOBBER, mode, const0_rtx);
3231 /* For a constant, try to pick up the part we want. Handle a full
3232 word and low-order part. Only do this if we are narrowing
3233 the constant; if it is being widened, we have no idea what
3234 the extra bits will have been set to. */
3236 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3237 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3238 && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD
3239 && GET_MODE_CLASS (mode) == MODE_INT)
3241 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
3247 /* If we want a subreg of a constant, at offset 0,
3248 take the low bits. On a little-endian machine, that's
3249 always valid. On a big-endian machine, it's valid
3250 only if the constant's mode fits in one word. */
3251 if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x)
3252 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (op0_mode)
3253 && (! WORDS_BIG_ENDIAN
3254 || GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD))
3255 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3257 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
3258 since we are saying that the high bits don't matter. */
3259 if (CONSTANT_P (SUBREG_REG (x)) && GET_MODE (SUBREG_REG (x)) == VOIDmode
3260 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (op0_mode))
3261 return SUBREG_REG (x);
3263 /* Note that we cannot do any narrowing for non-constants since
3264 we might have been counting on using the fact that some bits were
3265 zero. We now do this in the SET. */
3270 /* (not (plus X -1)) can become (neg X). */
3271 if (GET_CODE (XEXP (x, 0)) == PLUS
3272 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
3273 return gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
3275 /* Similarly, (not (neg X)) is (plus X -1). */
3276 if (GET_CODE (XEXP (x, 0)) == NEG)
3277 return gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0),
3280 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
3281 if (GET_CODE (XEXP (x, 0)) == XOR
3282 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3283 && (temp = simplify_unary_operation (NOT, mode,
3284 XEXP (XEXP (x, 0), 1),
3286 return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
3288 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3289 other than 1, but that is not valid. We could do a similar
3290 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3291 but this doesn't seem common enough to bother with. */
3292 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3293 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3294 return gen_rtx (ROTATE, mode, gen_unary (NOT, mode, mode, const1_rtx),
3295 XEXP (XEXP (x, 0), 1));
3297 if (GET_CODE (XEXP (x, 0)) == SUBREG
3298 && subreg_lowpart_p (XEXP (x, 0))
3299 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3300 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3301 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3302 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3304 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3306 x = gen_rtx (ROTATE, inner_mode,
3307 gen_unary (NOT, inner_mode, inner_mode, const1_rtx),
3308 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3309 return gen_lowpart_for_combine (mode, x);
3312 #if STORE_FLAG_VALUE == -1
3313 /* (not (comparison foo bar)) can be done by reversing the comparison
3315 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3316 && reversible_comparison_p (XEXP (x, 0)))
3317 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3318 mode, XEXP (XEXP (x, 0), 0),
3319 XEXP (XEXP (x, 0), 1));
3321 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
3322 is (lt foo (const_int 0)), so we can perform the above
3325 if (XEXP (x, 1) == const1_rtx
3326 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3327 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3328 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3329 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
3332 /* Apply De Morgan's laws to reduce number of patterns for machines
3333 with negating logical insns (and-not, nand, etc.). If result has
3334 only one NOT, put it first, since that is how the patterns are
3337 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3339 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3341 if (GET_CODE (in1) == NOT)
3342 in1 = XEXP (in1, 0);
3344 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3346 if (GET_CODE (in2) == NOT)
3347 in2 = XEXP (in2, 0);
3348 else if (GET_CODE (in2) == CONST_INT
3349 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3350 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
3352 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3354 if (GET_CODE (in2) == NOT)
3357 in2 = in1; in1 = tem;
3360 return gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3366 /* (neg (plus X 1)) can become (not X). */
3367 if (GET_CODE (XEXP (x, 0)) == PLUS
3368 && XEXP (XEXP (x, 0), 1) == const1_rtx)
3369 return gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
3371 /* Similarly, (neg (not X)) is (plus X 1). */
3372 if (GET_CODE (XEXP (x, 0)) == NOT)
3373 return plus_constant (XEXP (XEXP (x, 0), 0), 1);
3375 /* (neg (minus X Y)) can become (minus Y X). */
3376 if (GET_CODE (XEXP (x, 0)) == MINUS
3377 && (! FLOAT_MODE_P (mode)
3378 /* x-y != -(y-x) with IEEE floating point. */
3379 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3381 return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3382 XEXP (XEXP (x, 0), 0));
3384 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
3385 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
3386 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
3387 return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3389 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3390 if we can then eliminate the NEG (e.g.,
3391 if the operand is a constant). */
3393 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3395 temp = simplify_unary_operation (NEG, mode,
3396 XEXP (XEXP (x, 0), 0), mode);
3399 SUBST (XEXP (XEXP (x, 0), 0), temp);
3404 temp = expand_compound_operation (XEXP (x, 0));
3406 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3407 replaced by (lshiftrt X C). This will convert
3408 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3410 if (GET_CODE (temp) == ASHIFTRT
3411 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3412 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
3413 return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3414 INTVAL (XEXP (temp, 1)));
3416 /* If X has only a single bit that might be nonzero, say, bit I, convert
3417 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3418 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3419 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3420 or a SUBREG of one since we'd be making the expression more
3421 complex if it was just a register. */
3423 if (GET_CODE (temp) != REG
3424 && ! (GET_CODE (temp) == SUBREG
3425 && GET_CODE (SUBREG_REG (temp)) == REG)
3426 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
3428 rtx temp1 = simplify_shift_const
3429 (NULL_RTX, ASHIFTRT, mode,
3430 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
3431 GET_MODE_BITSIZE (mode) - 1 - i),
3432 GET_MODE_BITSIZE (mode) - 1 - i);
3434 /* If all we did was surround TEMP with the two shifts, we
3435 haven't improved anything, so don't use it. Otherwise,
3436 we are better off with TEMP1. */
3437 if (GET_CODE (temp1) != ASHIFTRT
3438 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3439 || XEXP (XEXP (temp1, 0), 0) != temp)
3444 case FLOAT_TRUNCATE:
3445 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3446 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3447 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3448 return XEXP (XEXP (x, 0), 0);
3450 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
3451 (OP:SF foo:SF) if OP is NEG or ABS. */
3452 if ((GET_CODE (XEXP (x, 0)) == ABS
3453 || GET_CODE (XEXP (x, 0)) == NEG)
3454 && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
3455 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
3456 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3457 XEXP (XEXP (XEXP (x, 0), 0), 0));
3459 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
3460 is (float_truncate:SF x). */
3461 if (GET_CODE (XEXP (x, 0)) == SUBREG
3462 && subreg_lowpart_p (XEXP (x, 0))
3463 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
3464 return SUBREG_REG (XEXP (x, 0));
3469 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3470 using cc0, in which case we want to leave it as a COMPARE
3471 so we can distinguish it from a register-register-copy. */
3472 if (XEXP (x, 1) == const0_rtx)
3475 /* In IEEE floating point, x-0 is not the same as x. */
3476 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3477 || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
3479 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3485 /* (const (const X)) can become (const X). Do it this way rather than
3486 returning the inner CONST since CONST can be shared with a
3488 if (GET_CODE (XEXP (x, 0)) == CONST)
3489 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3494 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3495 can add in an offset. find_split_point will split this address up
3496 again if it doesn't match. */
3497 if (GET_CODE (XEXP (x, 0)) == HIGH
3498 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3504 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3505 outermost. That's because that's the way indexed addresses are
3506 supposed to appear. This code used to check many more cases, but
3507 they are now checked elsewhere. */
3508 if (GET_CODE (XEXP (x, 0)) == PLUS
3509 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3510 return gen_binary (PLUS, mode,
3511 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3513 XEXP (XEXP (x, 0), 1));
3515 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3516 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3517 bit-field and can be replaced by either a sign_extend or a
3518 sign_extract. The `and' may be a zero_extend. */
3519 if (GET_CODE (XEXP (x, 0)) == XOR
3520 && GET_CODE (XEXP (x, 1)) == CONST_INT
3521 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3522 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3523 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
3524 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3525 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3526 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3527 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
3528 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
3529 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3530 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3532 return simplify_shift_const
3533 (NULL_RTX, ASHIFTRT, mode,
3534 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3535 XEXP (XEXP (XEXP (x, 0), 0), 0),
3536 GET_MODE_BITSIZE (mode) - (i + 1)),
3537 GET_MODE_BITSIZE (mode) - (i + 1));
3539 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
3540 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
3541 is 1. This produces better code than the alternative immediately
3543 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3544 && reversible_comparison_p (XEXP (x, 0))
3545 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
3546 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx)))
3548 gen_unary (NEG, mode, mode,
3549 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3550 mode, XEXP (XEXP (x, 0), 0),
3551 XEXP (XEXP (x, 0), 1)));
3553 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
3554 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3555 the bitsize of the mode - 1. This allows simplification of
3556 "a = (b & 8) == 0;" */
3557 if (XEXP (x, 1) == constm1_rtx
3558 && GET_CODE (XEXP (x, 0)) != REG
3559 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3560 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
3561 && nonzero_bits (XEXP (x, 0), mode) == 1)
3562 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
3563 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3564 gen_rtx_combine (XOR, mode,
3565 XEXP (x, 0), const1_rtx),
3566 GET_MODE_BITSIZE (mode) - 1),
3567 GET_MODE_BITSIZE (mode) - 1);
3569 /* If we are adding two things that have no bits in common, convert
3570 the addition into an IOR. This will often be further simplified,
3571 for example in cases like ((a & 1) + (a & 2)), which can
3574 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3575 && (nonzero_bits (XEXP (x, 0), mode)
3576 & nonzero_bits (XEXP (x, 1), mode)) == 0)
3577 return gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
3581 #if STORE_FLAG_VALUE == 1
3582 /* (minus 1 (comparison foo bar)) can be done by reversing the comparison
3584 if (XEXP (x, 0) == const1_rtx
3585 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
3586 && reversible_comparison_p (XEXP (x, 1)))
3587 return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))),
3588 mode, XEXP (XEXP (x, 1), 0),
3589 XEXP (XEXP (x, 1), 1));
3592 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3593 (and <foo> (const_int pow2-1)) */
3594 if (GET_CODE (XEXP (x, 1)) == AND
3595 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3596 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3597 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
3598 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
3599 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
3601 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
3603 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
3604 return gen_binary (MINUS, mode,
3605 gen_binary (MINUS, mode, XEXP (x, 0),
3606 XEXP (XEXP (x, 1), 0)),
3607 XEXP (XEXP (x, 1), 1));
3611 /* If we have (mult (plus A B) C), apply the distributive law and then
3612 the inverse distributive law to see if things simplify. This
3613 occurs mostly in addresses, often when unrolling loops. */
3615 if (GET_CODE (XEXP (x, 0)) == PLUS)
3617 x = apply_distributive_law
3618 (gen_binary (PLUS, mode,
3619 gen_binary (MULT, mode,
3620 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3621 gen_binary (MULT, mode,
3622 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3624 if (GET_CODE (x) != MULT)
3630 /* If this is a divide by a power of two, treat it as a shift if
3631 its first operand is a shift. */
3632 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3633 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3634 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3635 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3636 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3637 || GET_CODE (XEXP (x, 0)) == ROTATE
3638 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3639 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
3643 case GT: case GTU: case GE: case GEU:
3644 case LT: case LTU: case LE: case LEU:
3645 /* If the first operand is a condition code, we can't do anything
3647 if (GET_CODE (XEXP (x, 0)) == COMPARE
3648 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3650 && XEXP (x, 0) != cc0_rtx
3654 rtx op0 = XEXP (x, 0);
3655 rtx op1 = XEXP (x, 1);
3656 enum rtx_code new_code;
3658 if (GET_CODE (op0) == COMPARE)
3659 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3661 /* Simplify our comparison, if possible. */
3662 new_code = simplify_comparison (code, &op0, &op1);
3664 #if STORE_FLAG_VALUE == 1
3665 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
3666 if only the low-order bit is possibly nonzero in X (such as when
3667 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
3668 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
3669 known to be either 0 or -1, NE becomes a NEG and EQ becomes
3672 Remove any ZERO_EXTRACT we made when thinking this was a
3673 comparison. It may now be simpler to use, e.g., an AND. If a
3674 ZERO_EXTRACT is indeed appropriate, it will be placed back by
3675 the call to make_compound_operation in the SET case. */
3677 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3678 && op1 == const0_rtx
3679 && nonzero_bits (op0, mode) == 1)
3680 return gen_lowpart_for_combine (mode,
3681 expand_compound_operation (op0));
3683 else if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3684 && op1 == const0_rtx
3685 && (num_sign_bit_copies (op0, mode)
3686 == GET_MODE_BITSIZE (mode)))
3688 op0 = expand_compound_operation (op0);
3689 return gen_unary (NEG, mode, mode,
3690 gen_lowpart_for_combine (mode, op0));
3693 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3694 && op1 == const0_rtx
3695 && nonzero_bits (op0, mode) == 1)
3697 op0 = expand_compound_operation (op0);
3698 return gen_binary (XOR, mode,
3699 gen_lowpart_for_combine (mode, op0),
3703 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3704 && op1 == const0_rtx
3705 && (num_sign_bit_copies (op0, mode)
3706 == GET_MODE_BITSIZE (mode)))
3708 op0 = expand_compound_operation (op0);
3709 return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
3713 #if STORE_FLAG_VALUE == -1
3714 /* If STORE_FLAG_VALUE is -1, we have cases similar to
3716 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3717 && op1 == const0_rtx
3718 && (num_sign_bit_copies (op0, mode)
3719 == GET_MODE_BITSIZE (mode)))
3720 return gen_lowpart_for_combine (mode,
3721 expand_compound_operation (op0));
3723 else if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3724 && op1 == const0_rtx
3725 && nonzero_bits (op0, mode) == 1)
3727 op0 = expand_compound_operation (op0);
3728 return gen_unary (NEG, mode, mode,
3729 gen_lowpart_for_combine (mode, op0));
3732 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3733 && op1 == const0_rtx
3734 && (num_sign_bit_copies (op0, mode)
3735 == GET_MODE_BITSIZE (mode)))
3737 op0 = expand_compound_operation (op0);
3738 return gen_unary (NOT, mode, mode,
3739 gen_lowpart_for_combine (mode, op0));
3742 /* If X is 0/1, (eq X 0) is X-1. */
3743 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3744 && op1 == const0_rtx
3745 && nonzero_bits (op0, mode) == 1)
3747 op0 = expand_compound_operation (op0);
3748 return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
3752 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
3753 one bit that might be nonzero, we can convert (ne x 0) to
3754 (ashift x c) where C puts the bit in the sign bit. Remove any
3755 AND with STORE_FLAG_VALUE when we are done, since we are only
3756 going to test the sign bit. */
3757 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3758 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3759 && (STORE_FLAG_VALUE
3760 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
3761 && op1 == const0_rtx
3762 && mode == GET_MODE (op0)
3763 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
3765 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3766 expand_compound_operation (op0),
3767 GET_MODE_BITSIZE (mode) - 1 - i);
3768 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
3774 /* If the code changed, return a whole new comparison. */
3775 if (new_code != code)
3776 return gen_rtx_combine (new_code, mode, op0, op1);
3778 /* Otherwise, keep this operation, but maybe change its operands.
3779 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
3780 SUBST (XEXP (x, 0), op0);
3781 SUBST (XEXP (x, 1), op1);
3786 return simplify_if_then_else (x);
3792 /* If we are processing SET_DEST, we are done. */
3796 return expand_compound_operation (x);
3799 return simplify_set (x);
3804 return simplify_logical (x, last);
3807 /* (abs (neg <foo>)) -> (abs <foo>) */
3808 if (GET_CODE (XEXP (x, 0)) == NEG)
3809 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3811 /* If operand is something known to be positive, ignore the ABS. */
3812 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
3813 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
3814 <= HOST_BITS_PER_WIDE_INT)
3815 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3816 & ((HOST_WIDE_INT) 1
3817 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
3822 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
3823 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
3824 return gen_rtx_combine (NEG, mode, XEXP (x, 0));
3829 /* (ffs (*_extend <X>)) = (ffs <X>) */
3830 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3831 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3832 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3836 /* (float (sign_extend <X>)) = (float <X>). */
3837 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
3838 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3846 /* If this is a shift by a constant amount, simplify it. */
3847 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3848 return simplify_shift_const (x, code, mode, XEXP (x, 0),
3849 INTVAL (XEXP (x, 1)));
3851 #ifdef SHIFT_COUNT_TRUNCATED
3852 else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
3854 force_to_mode (XEXP (x, 1), GET_MODE (x),
3856 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
3867 /* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
3870 simplify_if_then_else (x)
3873 enum machine_mode mode = GET_MODE (x);
3874 rtx cond = XEXP (x, 0);
3875 rtx true = XEXP (x, 1);
3876 rtx false = XEXP (x, 2);
3877 enum rtx_code true_code = GET_CODE (cond);
3878 int comparison_p = GET_RTX_CLASS (true_code) == '<';
3882 /* Simplify storing of the truth value. */
3883 if (comparison_p && true == const_true_rtx && false == const0_rtx)
3884 return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
3886 /* Also when the truth value has to be reversed. */
3887 if (comparison_p && reversible_comparison_p (cond)
3888 && true == const0_rtx && false == const_true_rtx)
3889 return gen_binary (reverse_condition (true_code),
3890 mode, XEXP (cond, 0), XEXP (cond, 1));
3892 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
3893 in it is being compared against certain values. Get the true and false
3894 comparisons and see if that says anything about the value of each arm. */
3896 if (comparison_p && reversible_comparison_p (cond)
3897 && GET_CODE (XEXP (cond, 0)) == REG)
3900 rtx from = XEXP (cond, 0);
3901 enum rtx_code false_code = reverse_condition (true_code);
3902 rtx true_val = XEXP (cond, 1);
3903 rtx false_val = true_val;
3906 /* If FALSE_CODE is EQ, swap the codes and arms. */
3908 if (false_code == EQ)
3910 swapped = 1, true_code = EQ, false_code = NE;
3911 temp = true, true = false, false = temp;
3914 /* If we are comparing against zero and the expression being tested has
3915 only a single bit that might be nonzero, that is its value when it is
3916 not equal to zero. Similarly if it is known to be -1 or 0. */
3918 if (true_code == EQ && true_val == const0_rtx
3919 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
3920 false_code = EQ, false_val = GEN_INT (nzb);
3921 else if (true_code == EQ && true_val == const0_rtx
3922 && (num_sign_bit_copies (from, GET_MODE (from))
3923 == GET_MODE_BITSIZE (GET_MODE (from))))
3924 false_code = EQ, false_val = constm1_rtx;
3926 /* Now simplify an arm if we know the value of the register in the
3927 branch and it is used in the arm. Be careful due to the potential
3928 of locally-shared RTL. */
3930 if (reg_mentioned_p (from, true))
3931 true = subst (known_cond (copy_rtx (true), true_code, from, true_val),
3932 pc_rtx, pc_rtx, 0, 0);
3933 if (reg_mentioned_p (from, false))
3934 false = subst (known_cond (copy_rtx (false), false_code,
3936 pc_rtx, pc_rtx, 0, 0);
3938 SUBST (XEXP (x, 1), swapped ? false : true);
3939 SUBST (XEXP (x, 2), swapped ? true : false);
3941 true = XEXP (x, 1), false = XEXP (x, 2), true_code = GET_CODE (cond);
3944 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
3945 reversed, do so to avoid needing two sets of patterns for
3946 subtract-and-branch insns. Similarly if we have a constant in the true
3947 arm, the false arm is the same as the first operand of the comparison, or
3948 the false arm is more complicated than the true arm. */
3950 if (comparison_p && reversible_comparison_p (cond)
3952 || (CONSTANT_P (true)
3953 && GET_CODE (false) != CONST_INT && false != pc_rtx)
3954 || true == const0_rtx
3955 || (GET_RTX_CLASS (GET_CODE (true)) == 'o'
3956 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
3957 || (GET_CODE (true) == SUBREG
3958 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true))) == 'o'
3959 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
3960 || reg_mentioned_p (true, false)
3961 || rtx_equal_p (false, XEXP (cond, 0))))
3963 true_code = reverse_condition (true_code);
3965 gen_binary (true_code, GET_MODE (cond), XEXP (cond, 0),
3968 SUBST (XEXP (x, 1), false);
3969 SUBST (XEXP (x, 2), true);
3971 temp = true, true = false, false = temp, cond = XEXP (x, 0);
3974 /* If the two arms are identical, we don't need the comparison. */
3976 if (rtx_equal_p (true, false) && ! side_effects_p (cond))
3979 /* Look for cases where we have (abs x) or (neg (abs X)). */
3981 if (GET_MODE_CLASS (mode) == MODE_INT
3982 && GET_CODE (false) == NEG
3983 && rtx_equal_p (true, XEXP (false, 0))
3985 && rtx_equal_p (true, XEXP (cond, 0))
3986 && ! side_effects_p (true))
3991 return gen_unary (ABS, mode, mode, true);
3994 return gen_unary (NEG, mode, mode, gen_unary (ABS, mode, mode, true));
3997 /* Look for MIN or MAX. */
3999 if ((! FLOAT_MODE_P (mode) || flag_fast_math)
4001 && rtx_equal_p (XEXP (cond, 0), true)
4002 && rtx_equal_p (XEXP (cond, 1), false)
4003 && ! side_effects_p (cond))
4008 return gen_binary (SMAX, mode, true, false);
4011 return gen_binary (SMIN, mode, true, false);
4014 return gen_binary (UMAX, mode, true, false);
4017 return gen_binary (UMIN, mode, true, false);
4020 #if STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1
4022 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4023 second operand is zero, this can be done as (OP Z (mult COND C2)) where
4024 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4025 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4026 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
4027 neither of the above, but it isn't worth checking for. */
4029 if (comparison_p && mode != VOIDmode && ! side_effects_p (x))
4031 rtx t = make_compound_operation (true, SET);
4032 rtx f = make_compound_operation (false, SET);
4033 rtx cond_op0 = XEXP (cond, 0);
4034 rtx cond_op1 = XEXP (cond, 1);
4035 enum rtx_code op, extend_op = NIL;
4036 enum machine_mode m = mode;
4039 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4040 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4041 || GET_CODE (t) == ASHIFT
4042 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4043 && rtx_equal_p (XEXP (t, 0), f))
4044 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4046 /* If an identity-zero op is commutative, check whether there
4047 would be a match if we swapped the operands. */
4048 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4049 || GET_CODE (t) == XOR)
4050 && rtx_equal_p (XEXP (t, 1), f))
4051 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4052 else if (GET_CODE (t) == SIGN_EXTEND
4053 && (GET_CODE (XEXP (t, 0)) == PLUS
4054 || GET_CODE (XEXP (t, 0)) == MINUS
4055 || GET_CODE (XEXP (t, 0)) == IOR
4056 || GET_CODE (XEXP (t, 0)) == XOR
4057 || GET_CODE (XEXP (t, 0)) == ASHIFT
4058 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4059 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4060 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4061 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4062 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4063 && (num_sign_bit_copies (f, GET_MODE (f))
4064 > (GET_MODE_BITSIZE (mode)
4065 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4067 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4068 extend_op = SIGN_EXTEND;
4069 m = GET_MODE (XEXP (t, 0));
4071 else if (GET_CODE (t) == SIGN_EXTEND
4072 && (GET_CODE (XEXP (t, 0)) == PLUS
4073 || GET_CODE (XEXP (t, 0)) == IOR
4074 || GET_CODE (XEXP (t, 0)) == XOR)
4075 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4076 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4077 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4078 && (num_sign_bit_copies (f, GET_MODE (f))
4079 > (GET_MODE_BITSIZE (mode)
4080 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4082 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4083 extend_op = SIGN_EXTEND;
4084 m = GET_MODE (XEXP (t, 0));
4086 else if (GET_CODE (t) == ZERO_EXTEND
4087 && (GET_CODE (XEXP (t, 0)) == PLUS
4088 || GET_CODE (XEXP (t, 0)) == MINUS
4089 || GET_CODE (XEXP (t, 0)) == IOR
4090 || GET_CODE (XEXP (t, 0)) == XOR
4091 || GET_CODE (XEXP (t, 0)) == ASHIFT
4092 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4093 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4094 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4095 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4096 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4097 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4098 && ((nonzero_bits (f, GET_MODE (f))
4099 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4102 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4103 extend_op = ZERO_EXTEND;
4104 m = GET_MODE (XEXP (t, 0));
4106 else if (GET_CODE (t) == ZERO_EXTEND
4107 && (GET_CODE (XEXP (t, 0)) == PLUS
4108 || GET_CODE (XEXP (t, 0)) == IOR
4109 || GET_CODE (XEXP (t, 0)) == XOR)
4110 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4111 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4112 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4113 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4114 && ((nonzero_bits (f, GET_MODE (f))
4115 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4118 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4119 extend_op = ZERO_EXTEND;
4120 m = GET_MODE (XEXP (t, 0));
4125 temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4126 pc_rtx, pc_rtx, 0, 0);
4127 temp = gen_binary (MULT, m, temp,
4128 gen_binary (MULT, m, c1, const_true_rtx));
4129 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4130 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4132 if (extend_op != NIL)
4133 temp = gen_unary (extend_op, mode, m, temp);
4140 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4141 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4142 negation of a single bit, we can convert this operation to a shift. We
4143 can actually do this more generally, but it doesn't seem worth it. */
4145 if (true_code == NE && XEXP (cond, 1) == const0_rtx
4146 && false == const0_rtx && GET_CODE (true) == CONST_INT
4147 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4148 && (i = exact_log2 (INTVAL (true))) >= 0)
4149 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4150 == GET_MODE_BITSIZE (mode))
4151 && (i = exact_log2 (- INTVAL (true))) >= 0)))
4153 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4154 gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
4159 /* Simplify X, a SET expression. Return the new expression. */
4165 rtx src = SET_SRC (x);
4166 rtx dest = SET_DEST (x);
4167 enum machine_mode mode
4168 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4172 /* (set (pc) (return)) gets written as (return). */
4173 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4176 /* Now that we know for sure which bits of SRC we are using, see if we can
4177 simplify the expression for the object knowing that we only need the
4180 if (GET_MODE_CLASS (mode) == MODE_INT)
4181 src = force_to_mode (src, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
4183 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4184 the comparison result and try to simplify it unless we already have used
4185 undobuf.other_insn. */
4186 if ((GET_CODE (src) == COMPARE
4191 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4192 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4193 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
4194 && rtx_equal_p (XEXP (*cc_use, 0), dest))
4196 enum rtx_code old_code = GET_CODE (*cc_use);
4197 enum rtx_code new_code;
4199 int other_changed = 0;
4200 enum machine_mode compare_mode = GET_MODE (dest);
4202 if (GET_CODE (src) == COMPARE)
4203 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4205 op0 = src, op1 = const0_rtx;
4207 /* Simplify our comparison, if possible. */
4208 new_code = simplify_comparison (old_code, &op0, &op1);
4210 #ifdef EXTRA_CC_MODES
4211 /* If this machine has CC modes other than CCmode, check to see if we
4212 need to use a different CC mode here. */
4213 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
4214 #endif /* EXTRA_CC_MODES */
4216 #if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
4217 /* If the mode changed, we have to change SET_DEST, the mode in the
4218 compare, and the mode in the place SET_DEST is used. If SET_DEST is
4219 a hard register, just build new versions with the proper mode. If it
4220 is a pseudo, we lose unless it is only time we set the pseudo, in
4221 which case we can safely change its mode. */
4222 if (compare_mode != GET_MODE (dest))
4224 int regno = REGNO (dest);
4225 rtx new_dest = gen_rtx (REG, compare_mode, regno);
4227 if (regno < FIRST_PSEUDO_REGISTER
4228 || (reg_n_sets[regno] == 1 && ! REG_USERVAR_P (dest)))
4230 if (regno >= FIRST_PSEUDO_REGISTER)
4231 SUBST (regno_reg_rtx[regno], new_dest);
4233 SUBST (SET_DEST (x), new_dest);
4234 SUBST (XEXP (*cc_use, 0), new_dest);
4242 /* If the code changed, we have to build a new comparison in
4243 undobuf.other_insn. */
4244 if (new_code != old_code)
4246 unsigned HOST_WIDE_INT mask;
4248 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
4251 /* If the only change we made was to change an EQ into an NE or
4252 vice versa, OP0 has only one bit that might be nonzero, and OP1
4253 is zero, check if changing the user of the condition code will
4254 produce a valid insn. If it won't, we can keep the original code
4255 in that insn by surrounding our operation with an XOR. */
4257 if (((old_code == NE && new_code == EQ)
4258 || (old_code == EQ && new_code == NE))
4259 && ! other_changed && op1 == const0_rtx
4260 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
4261 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
4263 rtx pat = PATTERN (other_insn), note = 0;
4265 if ((recog_for_combine (&pat, other_insn, ¬e) < 0
4266 && ! check_asm_operands (pat)))
4268 PUT_CODE (*cc_use, old_code);
4271 op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
4279 undobuf.other_insn = other_insn;
4282 /* If we are now comparing against zero, change our source if
4283 needed. If we do not use cc0, we always have a COMPARE. */
4284 if (op1 == const0_rtx && dest == cc0_rtx)
4286 SUBST (SET_SRC (x), op0);
4292 /* Otherwise, if we didn't previously have a COMPARE in the
4293 correct mode, we need one. */
4294 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
4297 gen_rtx_combine (COMPARE, compare_mode, op0, op1));
4302 /* Otherwise, update the COMPARE if needed. */
4303 SUBST (XEXP (src, 0), op0);
4304 SUBST (XEXP (src, 1), op1);
4309 /* Get SET_SRC in a form where we have placed back any
4310 compound expressions. Then do the checks below. */
4311 src = make_compound_operation (src, SET);
4312 SUBST (SET_SRC (x), src);
4315 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
4316 and X being a REG or (subreg (reg)), we may be able to convert this to
4317 (set (subreg:m2 x) (op)).
4319 We can always do this if M1 is narrower than M2 because that means that
4320 we only care about the low bits of the result.
4322 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
4323 perform a narrower operation that requested since the high-order bits will
4324 be undefined. On machine where it is defined, this transformation is safe
4325 as long as M1 and M2 have the same number of words. */
4327 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4328 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
4329 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
4331 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4332 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
4333 #ifndef WORD_REGISTER_OPERATIONS
4334 && (GET_MODE_SIZE (GET_MODE (src))
4335 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4337 && (GET_CODE (dest) == REG
4338 || (GET_CODE (dest) == SUBREG
4339 && GET_CODE (SUBREG_REG (dest)) == REG)))
4341 SUBST (SET_DEST (x),
4342 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
4344 SUBST (SET_SRC (x), SUBREG_REG (src));
4346 src = SET_SRC (x), dest = SET_DEST (x);
4349 #ifdef LOAD_EXTEND_OP
4350 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
4351 would require a paradoxical subreg. Replace the subreg with a
4352 zero_extend to avoid the reload that would otherwise be required. */
4354 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4355 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
4356 && SUBREG_WORD (src) == 0
4357 && (GET_MODE_SIZE (GET_MODE (src))
4358 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4359 && GET_CODE (SUBREG_REG (src)) == MEM)
4362 gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
4363 GET_MODE (src), XEXP (src, 0)));
4369 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
4370 are comparing an item known to be 0 or -1 against 0, use a logical
4371 operation instead. Check for one of the arms being an IOR of the other
4372 arm with some value. We compute three terms to be IOR'ed together. In
4373 practice, at most two will be nonzero. Then we do the IOR's. */
4375 if (GET_CODE (dest) != PC
4376 && GET_CODE (src) == IF_THEN_ELSE
4377 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
4378 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
4379 && XEXP (XEXP (src, 0), 1) == const0_rtx
4380 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
4381 #ifdef HAVE_conditional_move
4382 && ! can_conditionally_move_p (GET_MODE (src))
4384 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
4385 GET_MODE (XEXP (XEXP (src, 0), 0)))
4386 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
4387 && ! side_effects_p (src))
4389 rtx true = (GET_CODE (XEXP (src, 0)) == NE
4390 ? XEXP (src, 1) : XEXP (src, 2));
4391 rtx false = (GET_CODE (XEXP (src, 0)) == NE
4392 ? XEXP (src, 2) : XEXP (src, 1));
4393 rtx term1 = const0_rtx, term2, term3;
4395 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
4396 term1 = false, true = XEXP (true, 1), false = const0_rtx;
4397 else if (GET_CODE (true) == IOR
4398 && rtx_equal_p (XEXP (true, 1), false))
4399 term1 = false, true = XEXP (true, 0), false = const0_rtx;
4400 else if (GET_CODE (false) == IOR
4401 && rtx_equal_p (XEXP (false, 0), true))
4402 term1 = true, false = XEXP (false, 1), true = const0_rtx;
4403 else if (GET_CODE (false) == IOR
4404 && rtx_equal_p (XEXP (false, 1), true))
4405 term1 = true, false = XEXP (false, 0), true = const0_rtx;
4407 term2 = gen_binary (AND, GET_MODE (src), XEXP (XEXP (src, 0), 0), true);
4408 term3 = gen_binary (AND, GET_MODE (src),
4409 gen_unary (NOT, GET_MODE (src), GET_MODE (src),
4410 XEXP (XEXP (src, 0), 0)),
4414 gen_binary (IOR, GET_MODE (src),
4415 gen_binary (IOR, GET_MODE (src), term1, term2),
4421 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
4422 whole thing fail. */
4423 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
4425 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
4428 /* Convert this into a field assignment operation, if possible. */
4429 return make_field_assignment (x);
4432 /* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
4433 result. LAST is nonzero if this is the last retry. */
4436 simplify_logical (x, last)
4440 enum machine_mode mode = GET_MODE (x);
4441 rtx op0 = XEXP (x, 0);
4442 rtx op1 = XEXP (x, 1);
4444 switch (GET_CODE (x))
4447 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
4448 insn (and may simplify more). */
4449 if (GET_CODE (op0) == XOR
4450 && rtx_equal_p (XEXP (op0, 0), op1)
4451 && ! side_effects_p (op1))
4452 x = gen_binary (AND, mode,
4453 gen_unary (NOT, mode, mode, XEXP (op0, 1)), op1);
4455 if (GET_CODE (op0) == XOR
4456 && rtx_equal_p (XEXP (op0, 1), op1)
4457 && ! side_effects_p (op1))
4458 x = gen_binary (AND, mode,
4459 gen_unary (NOT, mode, mode, XEXP (op0, 0)), op1);
4461 /* Similarly for (~ (A ^ B)) & A. */
4462 if (GET_CODE (op0) == NOT
4463 && GET_CODE (XEXP (op0, 0)) == XOR
4464 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
4465 && ! side_effects_p (op1))
4466 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
4468 if (GET_CODE (op0) == NOT
4469 && GET_CODE (XEXP (op0, 0)) == XOR
4470 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
4471 && ! side_effects_p (op1))
4472 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
4474 if (GET_CODE (op1) == CONST_INT)
4476 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
4478 /* If we have (ior (and (X C1) C2)) and the next restart would be
4479 the last, simplify this by making C1 as small as possible
4482 && GET_CODE (x) == IOR && GET_CODE (op0) == AND
4483 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4484 && GET_CODE (op1) == CONST_INT)
4485 return gen_binary (IOR, mode,
4486 gen_binary (AND, mode, XEXP (op0, 0),
4487 GEN_INT (INTVAL (XEXP (op0, 1))
4488 & ~ INTVAL (op1))), op1);
4490 if (GET_CODE (x) != AND)
4494 /* Convert (A | B) & A to A. */
4495 if (GET_CODE (op0) == IOR
4496 && (rtx_equal_p (XEXP (op0, 0), op1)
4497 || rtx_equal_p (XEXP (op0, 1), op1))
4498 && ! side_effects_p (XEXP (op0, 0))
4499 && ! side_effects_p (XEXP (op0, 1)))
4502 /* In the following group of tests (and those in case IOR below),
4503 we start with some combination of logical operations and apply
4504 the distributive law followed by the inverse distributive law.
4505 Most of the time, this results in no change. However, if some of
4506 the operands are the same or inverses of each other, simplifications
4509 For example, (and (ior A B) (not B)) can occur as the result of
4510 expanding a bit field assignment. When we apply the distributive
4511 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
4512 which then simplifies to (and (A (not B))).
4514 If we have (and (ior A B) C), apply the distributive law and then
4515 the inverse distributive law to see if things simplify. */
4517 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
4519 x = apply_distributive_law
4520 (gen_binary (GET_CODE (op0), mode,
4521 gen_binary (AND, mode, XEXP (op0, 0), op1),
4522 gen_binary (AND, mode, XEXP (op0, 1), op1)));
4523 if (GET_CODE (x) != AND)
4527 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
4528 return apply_distributive_law
4529 (gen_binary (GET_CODE (op1), mode,
4530 gen_binary (AND, mode, XEXP (op1, 0), op0),
4531 gen_binary (AND, mode, XEXP (op1, 1), op0)));
4533 /* Similarly, taking advantage of the fact that
4534 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
4536 if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
4537 return apply_distributive_law
4538 (gen_binary (XOR, mode,
4539 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
4540 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 1))));
4542 else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
4543 return apply_distributive_law
4544 (gen_binary (XOR, mode,
4545 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
4546 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 1))));
4550 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
4551 if (GET_CODE (op1) == CONST_INT
4552 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4553 && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0)
4556 /* Convert (A & B) | A to A. */
4557 if (GET_CODE (op0) == AND
4558 && (rtx_equal_p (XEXP (op0, 0), op1)
4559 || rtx_equal_p (XEXP (op0, 1), op1))
4560 && ! side_effects_p (XEXP (op0, 0))
4561 && ! side_effects_p (XEXP (op0, 1)))
4564 /* If we have (ior (and A B) C), apply the distributive law and then
4565 the inverse distributive law to see if things simplify. */
4567 if (GET_CODE (op0) == AND)
4569 x = apply_distributive_law
4570 (gen_binary (AND, mode,
4571 gen_binary (IOR, mode, XEXP (op0, 0), op1),
4572 gen_binary (IOR, mode, XEXP (op0, 1), op1)));
4574 if (GET_CODE (x) != IOR)
4578 if (GET_CODE (op1) == AND)
4580 x = apply_distributive_law
4581 (gen_binary (AND, mode,
4582 gen_binary (IOR, mode, XEXP (op1, 0), op0),
4583 gen_binary (IOR, mode, XEXP (op1, 1), op0)));
4585 if (GET_CODE (x) != IOR)
4589 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
4590 mode size to (rotate A CX). */
4592 if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
4593 || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
4594 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
4595 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4596 && GET_CODE (XEXP (op1, 1)) == CONST_INT
4597 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
4598 == GET_MODE_BITSIZE (mode)))
4599 return gen_rtx (ROTATE, mode, XEXP (op0, 0),
4600 (GET_CODE (op0) == ASHIFT
4601 ? XEXP (op0, 1) : XEXP (op1, 1)));
4603 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
4604 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
4605 does not affect any of the bits in OP1, it can really be done
4606 as a PLUS and we can associate. We do this by seeing if OP1
4607 can be safely shifted left C bits. */
4608 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
4609 && GET_CODE (XEXP (op0, 0)) == PLUS
4610 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
4611 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4612 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
4614 int count = INTVAL (XEXP (op0, 1));
4615 HOST_WIDE_INT mask = INTVAL (op1) << count;
4617 if (mask >> count == INTVAL (op1)
4618 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
4620 SUBST (XEXP (XEXP (op0, 0), 1),
4621 GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
4628 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
4629 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
4632 int num_negated = 0;
4634 if (GET_CODE (op0) == NOT)
4635 num_negated++, op0 = XEXP (op0, 0);
4636 if (GET_CODE (op1) == NOT)
4637 num_negated++, op1 = XEXP (op1, 0);
4639 if (num_negated == 2)
4641 SUBST (XEXP (x, 0), op0);
4642 SUBST (XEXP (x, 1), op1);
4644 else if (num_negated == 1)
4645 return gen_unary (NOT, mode, mode, gen_binary (XOR, mode, op0, op1));
4648 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
4649 correspond to a machine insn or result in further simplifications
4650 if B is a constant. */
4652 if (GET_CODE (op0) == AND
4653 && rtx_equal_p (XEXP (op0, 1), op1)
4654 && ! side_effects_p (op1))
4655 return gen_binary (AND, mode,
4656 gen_unary (NOT, mode, mode, XEXP (op0, 0)),
4659 else if (GET_CODE (op0) == AND
4660 && rtx_equal_p (XEXP (op0, 0), op1)
4661 && ! side_effects_p (op1))
4662 return gen_binary (AND, mode,
4663 gen_unary (NOT, mode, mode, XEXP (op0, 1)),
4666 #if STORE_FLAG_VALUE == 1
4667 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
4669 if (op1 == const1_rtx
4670 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
4671 && reversible_comparison_p (op0))
4672 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
4673 mode, XEXP (op0, 0), XEXP (op0, 1));
4675 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
4676 is (lt foo (const_int 0)), so we can perform the above
4679 if (op1 == const1_rtx
4680 && GET_CODE (op0) == LSHIFTRT
4681 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4682 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
4683 return gen_rtx_combine (GE, mode, XEXP (op0, 0), const0_rtx);
4686 /* (xor (comparison foo bar) (const_int sign-bit))
4687 when STORE_FLAG_VALUE is the sign bit. */
4688 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4689 && (STORE_FLAG_VALUE
4690 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
4691 && op1 == const_true_rtx
4692 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
4693 && reversible_comparison_p (op0))
4694 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
4695 mode, XEXP (op0, 0), XEXP (op0, 1));
4702 /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
4703 operations" because they can be replaced with two more basic operations.
4704 ZERO_EXTEND is also considered "compound" because it can be replaced with
4705 an AND operation, which is simpler, though only one operation.
4707 The function expand_compound_operation is called with an rtx expression
4708 and will convert it to the appropriate shifts and AND operations,
4709 simplifying at each stage.
4711 The function make_compound_operation is called to convert an expression
4712 consisting of shifts and ANDs into the equivalent compound expression.
4713 It is the inverse of this function, loosely speaking. */
4716 expand_compound_operation (x)
4724 switch (GET_CODE (x))
4729 /* We can't necessarily use a const_int for a multiword mode;
4730 it depends on implicitly extending the value.
4731 Since we don't know the right way to extend it,
4732 we can't tell whether the implicit way is right.
4734 Even for a mode that is no wider than a const_int,
4735 we can't win, because we need to sign extend one of its bits through
4736 the rest of it, and we don't know which bit. */
4737 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
4740 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
4741 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
4742 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
4743 reloaded. If not for that, MEM's would very rarely be safe.
4745 Reject MODEs bigger than a word, because we might not be able
4746 to reference a two-register group starting with an arbitrary register
4747 (and currently gen_lowpart might crash for a SUBREG). */
4749 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
4752 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
4753 /* If the inner object has VOIDmode (the only way this can happen
4754 is if it is a ASM_OPERANDS), we can't do anything since we don't
4755 know how much masking to do. */
4764 /* If the operand is a CLOBBER, just return it. */
4765 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
4768 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4769 || GET_CODE (XEXP (x, 2)) != CONST_INT
4770 || GET_MODE (XEXP (x, 0)) == VOIDmode)
4773 len = INTVAL (XEXP (x, 1));
4774 pos = INTVAL (XEXP (x, 2));
4776 /* If this goes outside the object being extracted, replace the object
4777 with a (use (mem ...)) construct that only combine understands
4778 and is used only for this purpose. */
4779 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4780 SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
4782 if (BITS_BIG_ENDIAN)
4783 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
4791 /* If we reach here, we want to return a pair of shifts. The inner
4792 shift is a left shift of BITSIZE - POS - LEN bits. The outer
4793 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
4794 logical depending on the value of UNSIGNEDP.
4796 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
4797 converted into an AND of a shift.
4799 We must check for the case where the left shift would have a negative
4800 count. This can happen in a case like (x >> 31) & 255 on machines
4801 that can't shift by a constant. On those machines, we would first
4802 combine the shift with the AND to produce a variable-position
4803 extraction. Then the constant of 31 would be substituted in to produce
4804 a such a position. */
4806 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
4807 if (modewidth >= pos - len)
4808 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
4810 simplify_shift_const (NULL_RTX, ASHIFT,
4813 modewidth - pos - len),
4816 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
4817 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
4818 simplify_shift_const (NULL_RTX, LSHIFTRT,
4821 ((HOST_WIDE_INT) 1 << len) - 1);
4823 /* Any other cases we can't handle. */
4827 /* If we couldn't do this for some reason, return the original
4829 if (GET_CODE (tem) == CLOBBER)
4835 /* X is a SET which contains an assignment of one object into
4836 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
4837 or certain SUBREGS). If possible, convert it into a series of
4840 We half-heartedly support variable positions, but do not at all
4841 support variable lengths. */
4844 expand_field_assignment (x)
4848 rtx pos; /* Always counts from low bit. */
4851 enum machine_mode compute_mode;
4853 /* Loop until we find something we can't simplify. */
4856 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
4857 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
4859 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
4860 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4863 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4864 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
4866 inner = XEXP (SET_DEST (x), 0);
4867 len = INTVAL (XEXP (SET_DEST (x), 1));
4868 pos = XEXP (SET_DEST (x), 2);
4870 /* If the position is constant and spans the width of INNER,
4871 surround INNER with a USE to indicate this. */
4872 if (GET_CODE (pos) == CONST_INT
4873 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
4874 inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
4876 if (BITS_BIG_ENDIAN)
4878 if (GET_CODE (pos) == CONST_INT)
4879 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
4881 else if (GET_CODE (pos) == MINUS
4882 && GET_CODE (XEXP (pos, 1)) == CONST_INT
4883 && (INTVAL (XEXP (pos, 1))
4884 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
4885 /* If position is ADJUST - X, new position is X. */
4886 pos = XEXP (pos, 0);
4888 pos = gen_binary (MINUS, GET_MODE (pos),
4889 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
4895 /* A SUBREG between two modes that occupy the same numbers of words
4896 can be done by moving the SUBREG to the source. */
4897 else if (GET_CODE (SET_DEST (x)) == SUBREG
4898 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
4899 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
4900 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
4901 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
4903 x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
4904 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
4911 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4912 inner = SUBREG_REG (inner);
4914 compute_mode = GET_MODE (inner);
4916 /* Compute a mask of LEN bits, if we can do this on the host machine. */
4917 if (len < HOST_BITS_PER_WIDE_INT)
4918 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
4922 /* Now compute the equivalent expression. Make a copy of INNER
4923 for the SET_DEST in case it is a MEM into which we will substitute;
4924 we don't want shared RTL in that case. */
4925 x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
4926 gen_binary (IOR, compute_mode,
4927 gen_binary (AND, compute_mode,
4928 gen_unary (NOT, compute_mode,
4934 gen_binary (ASHIFT, compute_mode,
4935 gen_binary (AND, compute_mode,
4936 gen_lowpart_for_combine
4946 /* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
4947 it is an RTX that represents a variable starting position; otherwise,
4948 POS is the (constant) starting bit position (counted from the LSB).
4950 INNER may be a USE. This will occur when we started with a bitfield
4951 that went outside the boundary of the object in memory, which is
4952 allowed on most machines. To isolate this case, we produce a USE
4953 whose mode is wide enough and surround the MEM with it. The only
4954 code that understands the USE is this routine. If it is not removed,
4955 it will cause the resulting insn not to match.
4957 UNSIGNEDP is non-zero for an unsigned reference and zero for a
4960 IN_DEST is non-zero if this is a reference in the destination of a
4961 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
4962 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
4965 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
4966 ZERO_EXTRACT should be built even for bits starting at bit 0.
4968 MODE is the desired mode of the result (if IN_DEST == 0). */
4971 make_extraction (mode, inner, pos, pos_rtx, len,
4972 unsignedp, in_dest, in_compare)
4973 enum machine_mode mode;
4979 int in_dest, in_compare;
4981 /* This mode describes the size of the storage area
4982 to fetch the overall value from. Within that, we
4983 ignore the POS lowest bits, etc. */
4984 enum machine_mode is_mode = GET_MODE (inner);
4985 enum machine_mode inner_mode;
4986 enum machine_mode wanted_mem_mode = byte_mode;
4987 enum machine_mode pos_mode = word_mode;
4988 enum machine_mode extraction_mode = word_mode;
4989 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
4992 rtx orig_pos_rtx = pos_rtx;
4995 /* Get some information about INNER and get the innermost object. */
4996 if (GET_CODE (inner) == USE)
4997 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
4998 /* We don't need to adjust the position because we set up the USE
4999 to pretend that it was a full-word object. */
5000 spans_byte = 1, inner = XEXP (inner, 0);
5001 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5003 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5004 consider just the QI as the memory to extract from.
5005 The subreg adds or removes high bits; its mode is
5006 irrelevant to the meaning of this extraction,
5007 since POS and LEN count from the lsb. */
5008 if (GET_CODE (SUBREG_REG (inner)) == MEM)
5009 is_mode = GET_MODE (SUBREG_REG (inner));
5010 inner = SUBREG_REG (inner);
5013 inner_mode = GET_MODE (inner);
5015 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
5016 pos = INTVAL (pos_rtx), pos_rtx = 0;
5018 /* See if this can be done without an extraction. We never can if the
5019 width of the field is not the same as that of some integer mode. For
5020 registers, we can only avoid the extraction if the position is at the
5021 low-order bit and this is either not in the destination or we have the
5022 appropriate STRICT_LOW_PART operation available.
5024 For MEM, we can avoid an extract if the field starts on an appropriate
5025 boundary and we can change the mode of the memory reference. However,
5026 we cannot directly access the MEM if we have a USE and the underlying
5027 MEM is not TMODE. This combination means that MEM was being used in a
5028 context where bits outside its mode were being referenced; that is only
5029 valid in bit-field insns. */
5031 if (tmode != BLKmode
5032 && ! (spans_byte && inner_mode != tmode)
5033 && ((pos_rtx == 0 && pos == 0 && GET_CODE (inner) != MEM
5035 || (GET_CODE (inner) == REG
5036 && (movstrict_optab->handlers[(int) tmode].insn_code
5037 != CODE_FOR_nothing))))
5038 || (GET_CODE (inner) == MEM && pos_rtx == 0
5040 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5041 : BITS_PER_UNIT)) == 0
5042 /* We can't do this if we are widening INNER_MODE (it
5043 may not be aligned, for one thing). */
5044 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5045 && (inner_mode == tmode
5046 || (! mode_dependent_address_p (XEXP (inner, 0))
5047 && ! MEM_VOLATILE_P (inner))))))
5049 /* If INNER is a MEM, make a new MEM that encompasses just the desired
5050 field. If the original and current mode are the same, we need not
5051 adjust the offset. Otherwise, we do if bytes big endian.
5053 If INNER is not a MEM, get a piece consisting of the just the field
5054 of interest (in this case POS must be 0). */
5056 if (GET_CODE (inner) == MEM)
5059 /* POS counts from lsb, but make OFFSET count in memory order. */
5060 if (BYTES_BIG_ENDIAN)
5061 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5063 offset = pos / BITS_PER_UNIT;
5065 new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
5066 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
5067 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
5068 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
5070 else if (GET_CODE (inner) == REG)
5072 /* We can't call gen_lowpart_for_combine here since we always want
5073 a SUBREG and it would sometimes return a new hard register. */
5074 if (tmode != inner_mode)
5075 new = gen_rtx (SUBREG, tmode, inner,
5077 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
5078 ? ((GET_MODE_SIZE (inner_mode)
5079 - GET_MODE_SIZE (tmode))
5086 new = force_to_mode (inner, tmode,
5087 len >= HOST_BITS_PER_WIDE_INT
5088 ? GET_MODE_MASK (tmode)
5089 : ((HOST_WIDE_INT) 1 << len) - 1,
5092 /* If this extraction is going into the destination of a SET,
5093 make a STRICT_LOW_PART unless we made a MEM. */
5096 return (GET_CODE (new) == MEM ? new
5097 : (GET_CODE (new) != SUBREG
5098 ? gen_rtx (CLOBBER, tmode, const0_rtx)
5099 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
5101 /* Otherwise, sign- or zero-extend unless we already are in the
5104 return (mode == tmode ? new
5105 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
5109 /* Unless this is a COMPARE or we have a funny memory reference,
5110 don't do anything with zero-extending field extracts starting at
5111 the low-order bit since they are simple AND operations. */
5112 if (pos_rtx == 0 && pos == 0 && ! in_dest
5113 && ! in_compare && ! spans_byte && unsignedp)
5116 /* Unless we are allowed to span bytes, reject this if we would be
5117 spanning bytes or if the position is not a constant and the length
5118 is not 1. In all other cases, we would only be going outside
5119 out object in cases when an original shift would have been
5122 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
5123 || (pos_rtx != 0 && len != 1)))
5126 /* Get the mode to use should INNER be a MEM, the mode for the position,
5127 and the mode for the result. */
5131 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
5132 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
5133 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
5138 if (! in_dest && unsignedp)
5140 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
5141 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
5142 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
5147 if (! in_dest && ! unsignedp)
5149 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
5150 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
5151 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
5155 /* Never narrow an object, since that might not be safe. */
5157 if (mode != VOIDmode
5158 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
5159 extraction_mode = mode;
5161 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
5162 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5163 pos_mode = GET_MODE (pos_rtx);
5165 /* If this is not from memory or we have to change the mode of memory and
5166 cannot, the desired mode is EXTRACTION_MODE. */
5167 if (GET_CODE (inner) != MEM
5168 || (inner_mode != wanted_mem_mode
5169 && (mode_dependent_address_p (XEXP (inner, 0))
5170 || MEM_VOLATILE_P (inner))))
5171 wanted_mem_mode = extraction_mode;
5175 if (BITS_BIG_ENDIAN)
5177 /* If position is constant, compute new position. Otherwise,
5178 build subtraction. */
5180 pos = (MAX (GET_MODE_BITSIZE (is_mode),
5181 GET_MODE_BITSIZE (wanted_mem_mode))
5185 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
5186 GEN_INT (MAX (GET_MODE_BITSIZE (is_mode),
5187 GET_MODE_BITSIZE (wanted_mem_mode))
5192 /* If INNER has a wider mode, make it smaller. If this is a constant
5193 extract, try to adjust the byte to point to the byte containing
5195 if (wanted_mem_mode != VOIDmode
5196 && GET_MODE_SIZE (wanted_mem_mode) < GET_MODE_SIZE (is_mode)
5197 && ((GET_CODE (inner) == MEM
5198 && (inner_mode == wanted_mem_mode
5199 || (! mode_dependent_address_p (XEXP (inner, 0))
5200 && ! MEM_VOLATILE_P (inner))))))
5204 /* The computations below will be correct if the machine is big
5205 endian in both bits and bytes or little endian in bits and bytes.
5206 If it is mixed, we must adjust. */
5208 /* If bytes are big endian and we had a paradoxical SUBREG, we must
5209 adjust OFFSET to compensate. */
5210 if (BYTES_BIG_ENDIAN
5212 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
5213 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
5215 /* If this is a constant position, we can move to the desired byte. */
5218 offset += pos / BITS_PER_UNIT;
5219 pos %= GET_MODE_BITSIZE (wanted_mem_mode);
5222 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
5224 && is_mode != wanted_mem_mode)
5225 offset = (GET_MODE_SIZE (is_mode)
5226 - GET_MODE_SIZE (wanted_mem_mode) - offset);
5228 if (offset != 0 || inner_mode != wanted_mem_mode)
5230 rtx newmem = gen_rtx (MEM, wanted_mem_mode,
5231 plus_constant (XEXP (inner, 0), offset));
5232 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
5233 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
5234 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
5239 /* If INNER is not memory, we can always get it into the proper mode. */
5240 else if (GET_CODE (inner) != MEM)
5241 inner = force_to_mode (inner, extraction_mode,
5242 pos_rtx || len + orig_pos >= HOST_BITS_PER_WIDE_INT
5243 ? GET_MODE_MASK (extraction_mode)
5244 : (((HOST_WIDE_INT) 1 << len) - 1) << orig_pos,
5247 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
5248 have to zero extend. Otherwise, we can just use a SUBREG. */
5250 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
5251 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
5252 else if (pos_rtx != 0
5253 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5254 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
5256 /* Make POS_RTX unless we already have it and it is correct. If we don't
5257 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
5259 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
5260 pos_rtx = orig_pos_rtx;
5262 else if (pos_rtx == 0)
5263 pos_rtx = GEN_INT (pos);
5265 /* Make the required operation. See if we can use existing rtx. */
5266 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5267 extraction_mode, inner, GEN_INT (len), pos_rtx);
5269 new = gen_lowpart_for_combine (mode, new);
5274 /* See if X contains an ASHIFT of COUNT or more bits that can be commuted
5275 with any other operations in X. Return X without that shift if so. */
5278 extract_left_shift (x, count)
5282 enum rtx_code code = GET_CODE (x);
5283 enum machine_mode mode = GET_MODE (x);
5289 /* This is the shift itself. If it is wide enough, we will return
5290 either the value being shifted if the shift count is equal to
5291 COUNT or a shift for the difference. */
5292 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5293 && INTVAL (XEXP (x, 1)) >= count)
5294 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
5295 INTVAL (XEXP (x, 1)) - count);
5299 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
5300 return gen_unary (code, mode, mode, tem);
5304 case PLUS: case IOR: case XOR: case AND:
5305 /* If we can safely shift this constant and we find the inner shift,
5306 make a new operation. */
5307 if (GET_CODE (XEXP (x,1)) == CONST_INT
5308 && (INTVAL (XEXP (x, 1)) & (((HOST_WIDE_INT) 1 << count)) - 1) == 0
5309 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
5310 return gen_binary (code, mode, tem,
5311 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
5319 /* Look at the expression rooted at X. Look for expressions
5320 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
5321 Form these expressions.
5323 Return the new rtx, usually just X.
5325 Also, for machines like the Vax that don't have logical shift insns,
5326 try to convert logical to arithmetic shift operations in cases where
5327 they are equivalent. This undoes the canonicalizations to logical
5328 shifts done elsewhere.
5330 We try, as much as possible, to re-use rtl expressions to save memory.
5332 IN_CODE says what kind of expression we are processing. Normally, it is
5333 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
5334 being kludges), it is MEM. When processing the arguments of a comparison
5335 or a COMPARE against zero, it is COMPARE. */
5338 make_compound_operation (x, in_code)
5340 enum rtx_code in_code;
5342 enum rtx_code code = GET_CODE (x);
5343 enum machine_mode mode = GET_MODE (x);
5344 int mode_width = GET_MODE_BITSIZE (mode);
5346 enum rtx_code next_code;
5352 /* Select the code to be used in recursive calls. Once we are inside an
5353 address, we stay there. If we have a comparison, set to COMPARE,
5354 but once inside, go back to our default of SET. */
5356 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
5357 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
5358 && XEXP (x, 1) == const0_rtx) ? COMPARE
5359 : in_code == COMPARE ? SET : in_code);
5361 /* Process depending on the code of this operation. If NEW is set
5362 non-zero, it will be returned. */
5367 /* Convert shifts by constants into multiplications if inside
5369 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5370 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
5371 && INTVAL (XEXP (x, 1)) >= 0)
5373 new = make_compound_operation (XEXP (x, 0), next_code);
5374 new = gen_rtx_combine (MULT, mode, new,
5375 GEN_INT ((HOST_WIDE_INT) 1
5376 << INTVAL (XEXP (x, 1))));
5381 /* If the second operand is not a constant, we can't do anything
5383 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5386 /* If the constant is a power of two minus one and the first operand
5387 is a logical right shift, make an extraction. */
5388 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5389 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5391 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5392 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
5393 0, in_code == COMPARE);
5396 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
5397 else if (GET_CODE (XEXP (x, 0)) == SUBREG
5398 && subreg_lowpart_p (XEXP (x, 0))
5399 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
5400 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5402 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
5404 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
5405 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
5406 0, in_code == COMPARE);
5408 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
5409 else if ((GET_CODE (XEXP (x, 0)) == XOR
5410 || GET_CODE (XEXP (x, 0)) == IOR)
5411 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
5412 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
5413 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5415 /* Apply the distributive law, and then try to make extractions. */
5416 new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
5417 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 0),
5419 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 1),
5421 new = make_compound_operation (new, in_code);
5424 /* If we are have (and (rotate X C) M) and C is larger than the number
5425 of bits in M, this is an extraction. */
5427 else if (GET_CODE (XEXP (x, 0)) == ROTATE
5428 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5429 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
5430 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
5432 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5433 new = make_extraction (mode, new,
5434 (GET_MODE_BITSIZE (mode)
5435 - INTVAL (XEXP (XEXP (x, 0), 1))),
5436 NULL_RTX, i, 1, 0, in_code == COMPARE);
5439 /* On machines without logical shifts, if the operand of the AND is
5440 a logical shift and our mask turns off all the propagated sign
5441 bits, we can replace the logical shift with an arithmetic shift. */
5442 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5443 && (lshr_optab->handlers[(int) mode].insn_code
5444 == CODE_FOR_nothing)
5445 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
5446 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5447 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5448 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5449 && mode_width <= HOST_BITS_PER_WIDE_INT)
5451 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
5453 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
5454 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
5456 gen_rtx_combine (ASHIFTRT, mode,
5457 make_compound_operation (XEXP (XEXP (x, 0), 0),
5459 XEXP (XEXP (x, 0), 1)));
5462 /* If the constant is one less than a power of two, this might be
5463 representable by an extraction even if no shift is present.
5464 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
5465 we are in a COMPARE. */
5466 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5467 new = make_extraction (mode,
5468 make_compound_operation (XEXP (x, 0),
5470 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
5472 /* If we are in a comparison and this is an AND with a power of two,
5473 convert this into the appropriate bit extract. */
5474 else if (in_code == COMPARE
5475 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
5476 new = make_extraction (mode,
5477 make_compound_operation (XEXP (x, 0),
5479 i, NULL_RTX, 1, 1, 0, 1);
5484 /* If the sign bit is known to be zero, replace this with an
5485 arithmetic shift. */
5486 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
5487 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5488 && mode_width <= HOST_BITS_PER_WIDE_INT
5489 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
5491 new = gen_rtx_combine (ASHIFTRT, mode,
5492 make_compound_operation (XEXP (x, 0),
5498 /* ... fall through ... */
5504 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
5505 this is a SIGN_EXTRACT. */
5506 if (GET_CODE (rhs) == CONST_INT
5507 && GET_CODE (lhs) == ASHIFT
5508 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
5509 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
5511 new = make_compound_operation (XEXP (lhs, 0), next_code);
5512 new = make_extraction (mode, new,
5513 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
5514 NULL_RTX, mode_width - INTVAL (rhs),
5515 code == LSHIFTRT, 0, in_code == COMPARE);
5518 /* See if we have operations between an ASHIFTRT and an ASHIFT.
5519 If so, try to merge the shifts into a SIGN_EXTEND. We could
5520 also do this for some cases of SIGN_EXTRACT, but it doesn't
5521 seem worth the effort; the case checked for occurs on Alpha. */
5523 if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
5524 && ! (GET_CODE (lhs) == SUBREG
5525 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
5526 && GET_CODE (rhs) == CONST_INT
5527 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
5528 && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
5529 new = make_extraction (mode, make_compound_operation (new, next_code),
5530 0, NULL_RTX, mode_width - INTVAL (rhs),
5531 code == LSHIFTRT, 0, in_code == COMPARE);
5536 /* Call ourselves recursively on the inner expression. If we are
5537 narrowing the object and it has a different RTL code from
5538 what it originally did, do this SUBREG as a force_to_mode. */
5540 tem = make_compound_operation (SUBREG_REG (x), in_code);
5541 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
5542 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
5543 && subreg_lowpart_p (x))
5545 rtx newer = force_to_mode (tem, mode,
5546 GET_MODE_MASK (mode), NULL_RTX, 0);
5548 /* If we have something other than a SUBREG, we might have
5549 done an expansion, so rerun outselves. */
5550 if (GET_CODE (newer) != SUBREG)
5551 newer = make_compound_operation (newer, in_code);
5559 x = gen_lowpart_for_combine (mode, new);
5560 code = GET_CODE (x);
5563 /* Now recursively process each operand of this operation. */
5564 fmt = GET_RTX_FORMAT (code);
5565 for (i = 0; i < GET_RTX_LENGTH (code); i++)
5568 new = make_compound_operation (XEXP (x, i), next_code);
5569 SUBST (XEXP (x, i), new);
5575 /* Given M see if it is a value that would select a field of bits
5576 within an item, but not the entire word. Return -1 if not.
5577 Otherwise, return the starting position of the field, where 0 is the
5580 *PLEN is set to the length of the field. */
5583 get_pos_from_mask (m, plen)
5584 unsigned HOST_WIDE_INT m;
5587 /* Get the bit number of the first 1 bit from the right, -1 if none. */
5588 int pos = exact_log2 (m & - m);
5593 /* Now shift off the low-order zero bits and see if we have a power of
5595 *plen = exact_log2 ((m >> pos) + 1);
5603 /* See if X can be simplified knowing that we will only refer to it in
5604 MODE and will only refer to those bits that are nonzero in MASK.
5605 If other bits are being computed or if masking operations are done
5606 that select a superset of the bits in MASK, they can sometimes be
5609 Return a possibly simplified expression, but always convert X to
5610 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
5612 Also, if REG is non-zero and X is a register equal in value to REG,
5615 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
5616 are all off in X. This is used when X will be complemented, by either
5617 NOT, NEG, or XOR. */
5620 force_to_mode (x, mode, mask, reg, just_select)
5622 enum machine_mode mode;
5623 unsigned HOST_WIDE_INT mask;
5627 enum rtx_code code = GET_CODE (x);
5628 int next_select = just_select || code == XOR || code == NOT || code == NEG;
5629 enum machine_mode op_mode;
5630 unsigned HOST_WIDE_INT fuller_mask, nonzero;
5633 /* If this is a CALL, don't do anything. Some of the code below
5634 will do the wrong thing since the mode of a CALL is VOIDmode. */
5638 /* We want to perform the operation is its present mode unless we know
5639 that the operation is valid in MODE, in which case we do the operation
5641 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
5642 && code_to_optab[(int) code] != 0
5643 && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
5644 != CODE_FOR_nothing))
5645 ? mode : GET_MODE (x));
5647 /* It is not valid to do a right-shift in a narrower mode
5648 than the one it came in with. */
5649 if ((code == LSHIFTRT || code == ASHIFTRT)
5650 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
5651 op_mode = GET_MODE (x);
5653 /* Truncate MASK to fit OP_MODE. */
5655 mask &= GET_MODE_MASK (op_mode);
5657 /* When we have an arithmetic operation, or a shift whose count we
5658 do not know, we need to assume that all bit the up to the highest-order
5659 bit in MASK will be needed. This is how we form such a mask. */
5661 fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
5662 ? GET_MODE_MASK (op_mode)
5663 : ((HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) - 1);
5665 fuller_mask = ~ (HOST_WIDE_INT) 0;
5667 /* Determine what bits of X are guaranteed to be (non)zero. */
5668 nonzero = nonzero_bits (x, mode);
5670 /* If none of the bits in X are needed, return a zero. */
5671 if (! just_select && (nonzero & mask) == 0)
5674 /* If X is a CONST_INT, return a new one. Do this here since the
5675 test below will fail. */
5676 if (GET_CODE (x) == CONST_INT)
5678 HOST_WIDE_INT cval = INTVAL (x) & mask;
5679 int width = GET_MODE_BITSIZE (mode);
5681 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
5682 number, sign extend it. */
5683 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
5684 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
5685 cval |= (HOST_WIDE_INT) -1 << width;
5687 return GEN_INT (cval);
5690 /* If X is narrower than MODE and we want all the bits in X's mode, just
5691 get X in the proper mode. */
5692 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
5693 && (GET_MODE_MASK (GET_MODE (x)) & ~ mask) == 0)
5694 return gen_lowpart_for_combine (mode, x);
5696 /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
5697 MASK are already known to be zero in X, we need not do anything. */
5698 if (GET_MODE (x) == mode && code != SUBREG && (~ mask & nonzero) == 0)
5704 /* If X is a (clobber (const_int)), return it since we know we are
5705 generating something that won't match. */
5709 /* X is a (use (mem ..)) that was made from a bit-field extraction that
5710 spanned the boundary of the MEM. If we are now masking so it is
5711 within that boundary, we don't need the USE any more. */
5712 if (! BITS_BIG_ENDIAN
5713 && (mask & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5714 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
5721 x = expand_compound_operation (x);
5722 if (GET_CODE (x) != code)
5723 return force_to_mode (x, mode, mask, reg, next_select);
5727 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
5728 || rtx_equal_p (reg, get_last_value (x))))
5733 if (subreg_lowpart_p (x)
5734 /* We can ignore the effect of this SUBREG if it narrows the mode or
5735 if the constant masks to zero all the bits the mode doesn't
5737 && ((GET_MODE_SIZE (GET_MODE (x))
5738 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
5740 & GET_MODE_MASK (GET_MODE (x))
5741 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
5742 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
5746 /* If this is an AND with a constant, convert it into an AND
5747 whose constant is the AND of that constant with MASK. If it
5748 remains an AND of MASK, delete it since it is redundant. */
5750 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5751 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
5753 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
5754 mask & INTVAL (XEXP (x, 1)));
5756 /* If X is still an AND, see if it is an AND with a mask that
5757 is just some low-order bits. If so, and it is MASK, we don't
5760 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
5761 && INTVAL (XEXP (x, 1)) == mask)
5764 /* If it remains an AND, try making another AND with the bits
5765 in the mode mask that aren't in MASK turned on. If the
5766 constant in the AND is wide enough, this might make a
5767 cheaper constant. */
5769 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
5770 && GET_MODE_MASK (GET_MODE (x)) != mask)
5772 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
5773 | (GET_MODE_MASK (GET_MODE (x)) & ~ mask));
5774 int width = GET_MODE_BITSIZE (GET_MODE (x));
5777 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
5778 number, sign extend it. */
5779 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
5780 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
5781 cval |= (HOST_WIDE_INT) -1 << width;
5783 y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
5784 if (rtx_cost (y, SET) < rtx_cost (x, SET))
5794 /* In (and (plus FOO C1) M), if M is a mask that just turns off
5795 low-order bits (as in an alignment operation) and FOO is already
5796 aligned to that boundary, mask C1 to that boundary as well.
5797 This may eliminate that PLUS and, later, the AND. */
5800 int width = GET_MODE_BITSIZE (mode);
5801 unsigned HOST_WIDE_INT smask = mask;
5803 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
5804 number, sign extend it. */
5806 if (width < HOST_BITS_PER_WIDE_INT
5807 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
5808 smask |= (HOST_WIDE_INT) -1 << width;
5810 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5811 && exact_log2 (- smask) >= 0
5812 && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0
5813 && (INTVAL (XEXP (x, 1)) & ~ mask) != 0)
5814 return force_to_mode (plus_constant (XEXP (x, 0),
5815 INTVAL (XEXP (x, 1)) & mask),
5816 mode, mask, reg, next_select);
5819 /* ... fall through ... */
5823 /* For PLUS, MINUS and MULT, we need any bits less significant than the
5824 most significant bit in MASK since carries from those bits will
5825 affect the bits we are interested in. */
5831 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
5832 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
5833 operation which may be a bitfield extraction. Ensure that the
5834 constant we form is not wider than the mode of X. */
5836 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5837 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5838 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5839 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5840 && GET_CODE (XEXP (x, 1)) == CONST_INT
5841 && ((INTVAL (XEXP (XEXP (x, 0), 1))
5842 + floor_log2 (INTVAL (XEXP (x, 1))))
5843 < GET_MODE_BITSIZE (GET_MODE (x)))
5844 && (INTVAL (XEXP (x, 1))
5845 & ~ nonzero_bits (XEXP (x, 0), GET_MODE (x)) == 0))
5847 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
5848 << INTVAL (XEXP (XEXP (x, 0), 1)));
5849 temp = gen_binary (GET_CODE (x), GET_MODE (x),
5850 XEXP (XEXP (x, 0), 0), temp);
5851 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (x, 1));
5852 return force_to_mode (x, mode, mask, reg, next_select);
5856 /* For most binary operations, just propagate into the operation and
5857 change the mode if we have an operation of that mode. */
5859 op0 = gen_lowpart_for_combine (op_mode,
5860 force_to_mode (XEXP (x, 0), mode, mask,
5862 op1 = gen_lowpart_for_combine (op_mode,
5863 force_to_mode (XEXP (x, 1), mode, mask,
5866 /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside
5867 MASK since OP1 might have been sign-extended but we never want
5868 to turn on extra bits, since combine might have previously relied
5869 on them being off. */
5870 if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR)
5871 && (INTVAL (op1) & mask) != 0)
5872 op1 = GEN_INT (INTVAL (op1) & mask);
5874 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
5875 x = gen_binary (code, op_mode, op0, op1);
5879 /* For left shifts, do the same, but just for the first operand.
5880 However, we cannot do anything with shifts where we cannot
5881 guarantee that the counts are smaller than the size of the mode
5882 because such a count will have a different meaning in a
5885 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
5886 && INTVAL (XEXP (x, 1)) >= 0
5887 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
5888 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
5889 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
5890 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
5893 /* If the shift count is a constant and we can do arithmetic in
5894 the mode of the shift, refine which bits we need. Otherwise, use the
5895 conservative form of the mask. */
5896 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5897 && INTVAL (XEXP (x, 1)) >= 0
5898 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
5899 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
5900 mask >>= INTVAL (XEXP (x, 1));
5904 op0 = gen_lowpart_for_combine (op_mode,
5905 force_to_mode (XEXP (x, 0), op_mode,
5906 mask, reg, next_select));
5908 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
5909 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
5913 /* Here we can only do something if the shift count is a constant,
5914 this shift constant is valid for the host, and we can do arithmetic
5917 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5918 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
5919 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
5921 rtx inner = XEXP (x, 0);
5923 /* Select the mask of the bits we need for the shift operand. */
5924 mask <<= INTVAL (XEXP (x, 1));
5926 /* We can only change the mode of the shift if we can do arithmetic
5927 in the mode of the shift and MASK is no wider than the width of
5929 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
5930 || (mask & ~ GET_MODE_MASK (op_mode)) != 0)
5931 op_mode = GET_MODE (x);
5933 inner = force_to_mode (inner, op_mode, mask, reg, next_select);
5935 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
5936 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
5939 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
5940 shift and AND produces only copies of the sign bit (C2 is one less
5941 than a power of two), we can do this with just a shift. */
5943 if (GET_CODE (x) == LSHIFTRT
5944 && GET_CODE (XEXP (x, 1)) == CONST_INT
5945 && ((INTVAL (XEXP (x, 1))
5946 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
5947 >= GET_MODE_BITSIZE (GET_MODE (x)))
5948 && exact_log2 (mask + 1) >= 0
5949 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5950 >= exact_log2 (mask + 1)))
5951 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
5952 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
5953 - exact_log2 (mask + 1)));
5957 /* If we are just looking for the sign bit, we don't need this shift at
5958 all, even if it has a variable count. */
5959 if (mask == ((HOST_WIDE_INT) 1
5960 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))
5961 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
5963 /* If this is a shift by a constant, get a mask that contains those bits
5964 that are not copies of the sign bit. We then have two cases: If
5965 MASK only includes those bits, this can be a logical shift, which may
5966 allow simplifications. If MASK is a single-bit field not within
5967 those bits, we are requesting a copy of the sign bit and hence can
5968 shift the sign bit to the appropriate location. */
5970 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
5971 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
5975 nonzero = GET_MODE_MASK (GET_MODE (x));
5976 nonzero >>= INTVAL (XEXP (x, 1));
5978 if ((mask & ~ nonzero) == 0
5979 || (i = exact_log2 (mask)) >= 0)
5981 x = simplify_shift_const
5982 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
5983 i < 0 ? INTVAL (XEXP (x, 1))
5984 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
5986 if (GET_CODE (x) != ASHIFTRT)
5987 return force_to_mode (x, mode, mask, reg, next_select);
5991 /* If MASK is 1, convert this to a LSHIFTRT. This can be done
5992 even if the shift count isn't a constant. */
5994 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
5996 /* If this is a sign-extension operation that just affects bits
5997 we don't care about, remove it. Be sure the call above returned
5998 something that is still a shift. */
6000 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
6001 && GET_CODE (XEXP (x, 1)) == CONST_INT
6002 && INTVAL (XEXP (x, 1)) >= 0
6003 && (INTVAL (XEXP (x, 1))
6004 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
6005 && GET_CODE (XEXP (x, 0)) == ASHIFT
6006 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6007 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
6008 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
6015 /* If the shift count is constant and we can do computations
6016 in the mode of X, compute where the bits we care about are.
6017 Otherwise, we can't do anything. Don't change the mode of
6018 the shift or propagate MODE into the shift, though. */
6019 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6020 && INTVAL (XEXP (x, 1)) >= 0)
6022 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
6023 GET_MODE (x), GEN_INT (mask),
6025 if (temp && GET_CODE(temp) == CONST_INT)
6027 force_to_mode (XEXP (x, 0), GET_MODE (x),
6028 INTVAL (temp), reg, next_select));
6033 /* If we just want the low-order bit, the NEG isn't needed since it
6034 won't change the low-order bit. */
6036 return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
6038 /* We need any bits less significant than the most significant bit in
6039 MASK since carries from those bits will affect the bits we are
6045 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
6046 same as the XOR case above. Ensure that the constant we form is not
6047 wider than the mode of X. */
6049 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6050 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6051 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6052 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
6053 < GET_MODE_BITSIZE (GET_MODE (x)))
6054 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
6056 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
6057 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
6058 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
6060 return force_to_mode (x, mode, mask, reg, next_select);
6064 op0 = gen_lowpart_for_combine (op_mode,
6065 force_to_mode (XEXP (x, 0), mode, mask,
6067 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6068 x = gen_unary (code, op_mode, op_mode, op0);
6072 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
6073 in STORE_FLAG_VALUE and FOO has no bits that might be nonzero not
6075 if ((mask & ~ STORE_FLAG_VALUE) == 0 && XEXP (x, 0) == const0_rtx
6076 && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0)
6077 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6082 /* We have no way of knowing if the IF_THEN_ELSE can itself be
6083 written in a narrower mode. We play it safe and do not do so. */
6086 gen_lowpart_for_combine (GET_MODE (x),
6087 force_to_mode (XEXP (x, 1), mode,
6088 mask, reg, next_select)));
6090 gen_lowpart_for_combine (GET_MODE (x),
6091 force_to_mode (XEXP (x, 2), mode,
6092 mask, reg,next_select)));
6096 /* Ensure we return a value of the proper mode. */
6097 return gen_lowpart_for_combine (mode, x);
6100 /* Return nonzero if X is an expression that has one of two values depending on
6101 whether some other value is zero or nonzero. In that case, we return the
6102 value that is being tested, *PTRUE is set to the value if the rtx being
6103 returned has a nonzero value, and *PFALSE is set to the other alternative.
6105 If we return zero, we set *PTRUE and *PFALSE to X. */
6108 if_then_else_cond (x, ptrue, pfalse)
6110 rtx *ptrue, *pfalse;
6112 enum machine_mode mode = GET_MODE (x);
6113 enum rtx_code code = GET_CODE (x);
6114 int size = GET_MODE_BITSIZE (mode);
6115 rtx cond0, cond1, true0, true1, false0, false1;
6116 unsigned HOST_WIDE_INT nz;
6118 /* If this is a unary operation whose operand has one of two values, apply
6119 our opcode to compute those values. */
6120 if (GET_RTX_CLASS (code) == '1'
6121 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
6123 *ptrue = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), true0);
6124 *pfalse = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), false0);
6128 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
6129 make can't possibly match and would supress other optimizations. */
6130 else if (code == COMPARE)
6133 /* If this is a binary operation, see if either side has only one of two
6134 values. If either one does or if both do and they are conditional on
6135 the same value, compute the new true and false values. */
6136 else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
6137 || GET_RTX_CLASS (code) == '<')
6139 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
6140 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
6142 if ((cond0 != 0 || cond1 != 0)
6143 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
6145 *ptrue = gen_binary (code, mode, true0, true1);
6146 *pfalse = gen_binary (code, mode, false0, false1);
6147 return cond0 ? cond0 : cond1;
6150 #if STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1
6152 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
6153 operands is zero when the other is non-zero, and vice-versa. */
6155 if ((code == PLUS || code == IOR || code == XOR || code == MINUS
6157 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6159 rtx op0 = XEXP (XEXP (x, 0), 1);
6160 rtx op1 = XEXP (XEXP (x, 1), 1);
6162 cond0 = XEXP (XEXP (x, 0), 0);
6163 cond1 = XEXP (XEXP (x, 1), 0);
6165 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6166 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6167 && reversible_comparison_p (cond1)
6168 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6169 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6170 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6171 || ((swap_condition (GET_CODE (cond0))
6172 == reverse_condition (GET_CODE (cond1)))
6173 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6174 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6175 && ! side_effects_p (x))
6177 *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
6178 *pfalse = gen_binary (MULT, mode,
6180 ? gen_unary (NEG, mode, mode, op1) : op1),
6186 /* Similarly for MULT, AND and UMIN, execpt that for these the result
6188 if ((code == MULT || code == AND || code == UMIN)
6189 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6191 cond0 = XEXP (XEXP (x, 0), 0);
6192 cond1 = XEXP (XEXP (x, 1), 0);
6194 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6195 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6196 && reversible_comparison_p (cond1)
6197 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6198 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6199 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6200 || ((swap_condition (GET_CODE (cond0))
6201 == reverse_condition (GET_CODE (cond1)))
6202 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6203 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6204 && ! side_effects_p (x))
6206 *ptrue = *pfalse = const0_rtx;
6213 else if (code == IF_THEN_ELSE)
6215 /* If we have IF_THEN_ELSE already, extract the condition and
6216 canonicalize it if it is NE or EQ. */
6217 cond0 = XEXP (x, 0);
6218 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
6219 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
6220 return XEXP (cond0, 0);
6221 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
6223 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
6224 return XEXP (cond0, 0);
6230 /* If X is a normal SUBREG with both inner and outer modes integral,
6231 we can narrow both the true and false values of the inner expression,
6232 if there is a condition. */
6233 else if (code == SUBREG && GET_MODE_CLASS (mode) == MODE_INT
6234 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
6235 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
6236 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
6239 *ptrue = force_to_mode (true0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
6241 = force_to_mode (false0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
6246 /* If X is a constant, this isn't special and will cause confusions
6247 if we treat it as such. Likewise if it is equivalent to a constant. */
6248 else if (CONSTANT_P (x)
6249 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
6252 /* If X is known to be either 0 or -1, those are the true and
6253 false values when testing X. */
6254 else if (num_sign_bit_copies (x, mode) == size)
6256 *ptrue = constm1_rtx, *pfalse = const0_rtx;
6260 /* Likewise for 0 or a single bit. */
6261 else if (exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
6263 *ptrue = GEN_INT (nz), *pfalse = const0_rtx;
6267 /* Otherwise fail; show no condition with true and false values the same. */
6268 *ptrue = *pfalse = x;
6272 /* Return the value of expression X given the fact that condition COND
6273 is known to be true when applied to REG as its first operand and VAL
6274 as its second. X is known to not be shared and so can be modified in
6277 We only handle the simplest cases, and specifically those cases that
6278 arise with IF_THEN_ELSE expressions. */
6281 known_cond (x, cond, reg, val)
6286 enum rtx_code code = GET_CODE (x);
6291 if (side_effects_p (x))
6294 if (cond == EQ && rtx_equal_p (x, reg))
6297 /* If X is (abs REG) and we know something about REG's relationship
6298 with zero, we may be able to simplify this. */
6300 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
6303 case GE: case GT: case EQ:
6306 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), GET_MODE (XEXP (x, 0)),
6310 /* The only other cases we handle are MIN, MAX, and comparisons if the
6311 operands are the same as REG and VAL. */
6313 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
6315 if (rtx_equal_p (XEXP (x, 0), val))
6316 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
6318 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
6320 if (GET_RTX_CLASS (code) == '<')
6321 return (comparison_dominates_p (cond, code) ? const_true_rtx
6322 : (comparison_dominates_p (cond,
6323 reverse_condition (code))
6326 else if (code == SMAX || code == SMIN
6327 || code == UMIN || code == UMAX)
6329 int unsignedp = (code == UMIN || code == UMAX);
6331 if (code == SMAX || code == UMAX)
6332 cond = reverse_condition (cond);
6337 return unsignedp ? x : XEXP (x, 1);
6339 return unsignedp ? x : XEXP (x, 0);
6341 return unsignedp ? XEXP (x, 1) : x;
6343 return unsignedp ? XEXP (x, 0) : x;
6349 fmt = GET_RTX_FORMAT (code);
6350 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6353 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
6354 else if (fmt[i] == 'E')
6355 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6356 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
6363 /* See if X, a SET operation, can be rewritten as a bit-field assignment.
6364 Return that assignment if so.
6366 We only handle the most common cases. */
6369 make_field_assignment (x)
6372 rtx dest = SET_DEST (x);
6373 rtx src = SET_SRC (x);
6378 enum machine_mode mode;
6380 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
6381 a clear of a one-bit field. We will have changed it to
6382 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
6385 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
6386 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
6387 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
6388 && (rtx_equal_p (dest, XEXP (src, 1))
6389 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6390 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
6392 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
6394 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
6397 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
6398 && subreg_lowpart_p (XEXP (src, 0))
6399 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
6400 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
6401 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
6402 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
6403 && (rtx_equal_p (dest, XEXP (src, 1))
6404 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6405 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
6407 assign = make_extraction (VOIDmode, dest, 0,
6408 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
6410 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
6413 /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
6415 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
6416 && XEXP (XEXP (src, 0), 0) == const1_rtx
6417 && (rtx_equal_p (dest, XEXP (src, 1))
6418 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6419 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
6421 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
6423 return gen_rtx (SET, VOIDmode, assign, const1_rtx);
6426 /* The other case we handle is assignments into a constant-position
6427 field. They look like (ior (and DEST C1) OTHER). If C1 represents
6428 a mask that has all one bits except for a group of zero bits and
6429 OTHER is known to have zeros where C1 has ones, this is such an
6430 assignment. Compute the position and length from C1. Shift OTHER
6431 to the appropriate position, force it to the required mode, and
6432 make the extraction. Check for the AND in both operands. */
6434 if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == AND
6435 && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT
6436 && (rtx_equal_p (XEXP (XEXP (src, 0), 0), dest)
6437 || rtx_equal_p (XEXP (XEXP (src, 0), 0), get_last_value (dest))
6438 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 0), 1)), dest)))
6439 c1 = INTVAL (XEXP (XEXP (src, 0), 1)), other = XEXP (src, 1);
6440 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 1)) == AND
6441 && GET_CODE (XEXP (XEXP (src, 1), 1)) == CONST_INT
6442 && (rtx_equal_p (XEXP (XEXP (src, 1), 0), dest)
6443 || rtx_equal_p (XEXP (XEXP (src, 1), 0), get_last_value (dest))
6444 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 1), 0)),
6446 c1 = INTVAL (XEXP (XEXP (src, 1), 1)), other = XEXP (src, 0);
6450 pos = get_pos_from_mask (c1 ^ GET_MODE_MASK (GET_MODE (dest)), &len);
6451 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
6452 || (GET_MODE_BITSIZE (GET_MODE (other)) <= HOST_BITS_PER_WIDE_INT
6453 && (c1 & nonzero_bits (other, GET_MODE (other))) != 0))
6456 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
6458 /* The mode to use for the source is the mode of the assignment, or of
6459 what is inside a possible STRICT_LOW_PART. */
6460 mode = (GET_CODE (assign) == STRICT_LOW_PART
6461 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
6463 /* Shift OTHER right POS places and make it the source, restricting it
6464 to the proper length and mode. */
6466 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
6467 GET_MODE (src), other, pos),
6469 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
6470 ? GET_MODE_MASK (mode)
6471 : ((HOST_WIDE_INT) 1 << len) - 1,
6474 return gen_rtx_combine (SET, VOIDmode, assign, src);
6477 /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
6481 apply_distributive_law (x)
6484 enum rtx_code code = GET_CODE (x);
6485 rtx lhs, rhs, other;
6487 enum rtx_code inner_code;
6489 /* Distributivity is not true for floating point.
6490 It can change the value. So don't do it.
6491 -- rms and moshier@world.std.com. */
6492 if (FLOAT_MODE_P (GET_MODE (x)))
6495 /* The outer operation can only be one of the following: */
6496 if (code != IOR && code != AND && code != XOR
6497 && code != PLUS && code != MINUS)
6500 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
6502 /* If either operand is a primitive we can't do anything, so get out fast. */
6503 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
6504 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
6507 lhs = expand_compound_operation (lhs);
6508 rhs = expand_compound_operation (rhs);
6509 inner_code = GET_CODE (lhs);
6510 if (inner_code != GET_CODE (rhs))
6513 /* See if the inner and outer operations distribute. */
6520 /* These all distribute except over PLUS. */
6521 if (code == PLUS || code == MINUS)
6526 if (code != PLUS && code != MINUS)
6531 /* This is also a multiply, so it distributes over everything. */
6535 /* Non-paradoxical SUBREGs distributes over all operations, provided
6536 the inner modes and word numbers are the same, this is an extraction
6537 of a low-order part, we don't convert an fp operation to int or
6538 vice versa, and we would not be converting a single-word
6539 operation into a multi-word operation. The latter test is not
6540 required, but it prevents generating unneeded multi-word operations.
6541 Some of the previous tests are redundant given the latter test, but
6542 are retained because they are required for correctness.
6544 We produce the result slightly differently in this case. */
6546 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
6547 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
6548 || ! subreg_lowpart_p (lhs)
6549 || (GET_MODE_CLASS (GET_MODE (lhs))
6550 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
6551 || (GET_MODE_SIZE (GET_MODE (lhs))
6552 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
6553 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
6556 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
6557 SUBREG_REG (lhs), SUBREG_REG (rhs));
6558 return gen_lowpart_for_combine (GET_MODE (x), tem);
6564 /* Set LHS and RHS to the inner operands (A and B in the example
6565 above) and set OTHER to the common operand (C in the example).
6566 These is only one way to do this unless the inner operation is
6568 if (GET_RTX_CLASS (inner_code) == 'c'
6569 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
6570 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
6571 else if (GET_RTX_CLASS (inner_code) == 'c'
6572 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
6573 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
6574 else if (GET_RTX_CLASS (inner_code) == 'c'
6575 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
6576 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
6577 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
6578 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
6582 /* Form the new inner operation, seeing if it simplifies first. */
6583 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
6585 /* There is one exception to the general way of distributing:
6586 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
6587 if (code == XOR && inner_code == IOR)
6590 other = gen_unary (NOT, GET_MODE (x), GET_MODE (x), other);
6593 /* We may be able to continuing distributing the result, so call
6594 ourselves recursively on the inner operation before forming the
6595 outer operation, which we return. */
6596 return gen_binary (inner_code, GET_MODE (x),
6597 apply_distributive_law (tem), other);
6600 /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
6603 Return an equivalent form, if different from X. Otherwise, return X. If
6604 X is zero, we are to always construct the equivalent form. */
6607 simplify_and_const_int (x, mode, varop, constop)
6609 enum machine_mode mode;
6611 unsigned HOST_WIDE_INT constop;
6613 unsigned HOST_WIDE_INT nonzero;
6614 int width = GET_MODE_BITSIZE (mode);
6617 /* Simplify VAROP knowing that we will be only looking at some of the
6619 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
6621 /* If VAROP is a CLOBBER, we will fail so return it; if it is a
6622 CONST_INT, we are done. */
6623 if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
6626 /* See what bits may be nonzero in VAROP. Unlike the general case of
6627 a call to nonzero_bits, here we don't care about bits outside
6630 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
6632 /* If this would be an entire word for the target, but is not for
6633 the host, then sign-extend on the host so that the number will look
6634 the same way on the host that it would on the target.
6636 For example, when building a 64 bit alpha hosted 32 bit sparc
6637 targeted compiler, then we want the 32 bit unsigned value -1 to be
6638 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
6639 The later confuses the sparc backend. */
6641 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
6642 && (nonzero & ((HOST_WIDE_INT) 1 << (width - 1))))
6643 nonzero |= ((HOST_WIDE_INT) (-1) << width);
6645 /* Turn off all bits in the constant that are known to already be zero.
6646 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
6647 which is tested below. */
6651 /* If we don't have any bits left, return zero. */
6655 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
6656 a power of two, we can replace this with a ASHIFT. */
6657 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
6658 && (i = exact_log2 (constop)) >= 0)
6659 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
6661 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
6662 or XOR, then try to apply the distributive law. This may eliminate
6663 operations if either branch can be simplified because of the AND.
6664 It may also make some cases more complex, but those cases probably
6665 won't match a pattern either with or without this. */
6667 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
6669 gen_lowpart_for_combine
6671 apply_distributive_law
6672 (gen_binary (GET_CODE (varop), GET_MODE (varop),
6673 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
6674 XEXP (varop, 0), constop),
6675 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
6676 XEXP (varop, 1), constop))));
6678 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
6679 if we already had one (just check for the simplest cases). */
6680 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
6681 && GET_MODE (XEXP (x, 0)) == mode
6682 && SUBREG_REG (XEXP (x, 0)) == varop)
6683 varop = XEXP (x, 0);
6685 varop = gen_lowpart_for_combine (mode, varop);
6687 /* If we can't make the SUBREG, try to return what we were given. */
6688 if (GET_CODE (varop) == CLOBBER)
6689 return x ? x : varop;
6691 /* If we are only masking insignificant bits, return VAROP. */
6692 if (constop == nonzero)
6695 /* Otherwise, return an AND. See how much, if any, of X we can use. */
6696 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
6697 x = gen_binary (AND, mode, varop, GEN_INT (constop));
6701 if (GET_CODE (XEXP (x, 1)) != CONST_INT
6702 || INTVAL (XEXP (x, 1)) != constop)
6703 SUBST (XEXP (x, 1), GEN_INT (constop));
6705 SUBST (XEXP (x, 0), varop);
6711 /* Given an expression, X, compute which bits in X can be non-zero.
6712 We don't care about bits outside of those defined in MODE.
6714 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
6715 a shift, AND, or zero_extract, we can do better. */
6717 static unsigned HOST_WIDE_INT
6718 nonzero_bits (x, mode)
6720 enum machine_mode mode;
6722 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
6723 unsigned HOST_WIDE_INT inner_nz;
6725 int mode_width = GET_MODE_BITSIZE (mode);
6728 /* For floating-point values, assume all bits are needed. */
6729 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
6732 /* If X is wider than MODE, use its mode instead. */
6733 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
6735 mode = GET_MODE (x);
6736 nonzero = GET_MODE_MASK (mode);
6737 mode_width = GET_MODE_BITSIZE (mode);
6740 if (mode_width > HOST_BITS_PER_WIDE_INT)
6741 /* Our only callers in this case look for single bit values. So
6742 just return the mode mask. Those tests will then be false. */
6745 #ifndef WORD_REGISTER_OPERATIONS
6746 /* If MODE is wider than X, but both are a single word for both the host
6747 and target machines, we can compute this from which bits of the
6748 object might be nonzero in its own mode, taking into account the fact
6749 that on many CISC machines, accessing an object in a wider mode
6750 causes the high-order bits to become undefined. So they are
6751 not known to be zero. */
6753 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
6754 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
6755 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6756 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
6758 nonzero &= nonzero_bits (x, GET_MODE (x));
6759 nonzero |= GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x));
6764 code = GET_CODE (x);
6768 #ifdef STACK_BOUNDARY
6769 /* If this is the stack pointer, we may know something about its
6770 alignment. If PUSH_ROUNDING is defined, it is possible for the
6771 stack to be momentarily aligned only to that amount, so we pick
6772 the least alignment. */
6774 if (x == stack_pointer_rtx)
6776 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6778 #ifdef PUSH_ROUNDING
6779 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
6782 nonzero &= ~ (sp_alignment - 1);
6786 #ifdef POINTERS_EXTEND_UNSIGNED
6787 /* If pointers extend unsigned and this is a pointer in Pmode, say that
6788 all the bits above ptr_mode are known to be zero. */
6789 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
6790 && REGNO_POINTER_FLAG (REGNO (x)))
6791 nonzero &= GET_MODE_MASK (ptr_mode);
6794 /* If X is a register whose nonzero bits value is current, use it.
6795 Otherwise, if X is a register whose value we can find, use that
6796 value. Otherwise, use the previously-computed global nonzero bits
6797 for this register. */
6799 if (reg_last_set_value[REGNO (x)] != 0
6800 && reg_last_set_mode[REGNO (x)] == mode
6801 && (reg_n_sets[REGNO (x)] == 1
6802 || reg_last_set_label[REGNO (x)] == label_tick)
6803 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
6804 return reg_last_set_nonzero_bits[REGNO (x)];
6806 tem = get_last_value (x);
6810 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
6811 /* If X is narrower than MODE and TEM is a non-negative
6812 constant that would appear negative in the mode of X,
6813 sign-extend it for use in reg_nonzero_bits because some
6814 machines (maybe most) will actually do the sign-extension
6815 and this is the conservative approach.
6817 ??? For 2.5, try to tighten up the MD files in this regard
6818 instead of this kludge. */
6820 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
6821 && GET_CODE (tem) == CONST_INT
6823 && 0 != (INTVAL (tem)
6824 & ((HOST_WIDE_INT) 1
6825 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
6826 tem = GEN_INT (INTVAL (tem)
6827 | ((HOST_WIDE_INT) (-1)
6828 << GET_MODE_BITSIZE (GET_MODE (x))));
6830 return nonzero_bits (tem, mode);
6832 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
6833 return reg_nonzero_bits[REGNO (x)] & nonzero;
6838 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
6839 /* If X is negative in MODE, sign-extend the value. */
6840 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
6841 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
6842 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
6848 #ifdef LOAD_EXTEND_OP
6849 /* In many, if not most, RISC machines, reading a byte from memory
6850 zeros the rest of the register. Noticing that fact saves a lot
6851 of extra zero-extends. */
6852 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
6853 nonzero &= GET_MODE_MASK (GET_MODE (x));
6863 /* If this produces an integer result, we know which bits are set.
6864 Code here used to clear bits outside the mode of X, but that is
6867 if (GET_MODE_CLASS (mode) == MODE_INT
6868 && mode_width <= HOST_BITS_PER_WIDE_INT)
6869 nonzero = STORE_FLAG_VALUE;
6873 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6874 == GET_MODE_BITSIZE (GET_MODE (x)))
6877 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
6878 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
6882 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6883 == GET_MODE_BITSIZE (GET_MODE (x)))
6888 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
6892 nonzero &= nonzero_bits (XEXP (x, 0), mode);
6893 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6894 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6898 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
6899 Otherwise, show all the bits in the outer mode but not the inner
6901 inner_nz = nonzero_bits (XEXP (x, 0), mode);
6902 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6904 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6907 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
6908 inner_nz |= (GET_MODE_MASK (mode)
6909 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
6912 nonzero &= inner_nz;
6916 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
6917 & nonzero_bits (XEXP (x, 1), mode));
6921 case UMIN: case UMAX: case SMIN: case SMAX:
6922 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
6923 | nonzero_bits (XEXP (x, 1), mode));
6926 case PLUS: case MINUS:
6928 case DIV: case UDIV:
6929 case MOD: case UMOD:
6930 /* We can apply the rules of arithmetic to compute the number of
6931 high- and low-order zero bits of these operations. We start by
6932 computing the width (position of the highest-order non-zero bit)
6933 and the number of low-order zero bits for each value. */
6935 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
6936 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
6937 int width0 = floor_log2 (nz0) + 1;
6938 int width1 = floor_log2 (nz1) + 1;
6939 int low0 = floor_log2 (nz0 & -nz0);
6940 int low1 = floor_log2 (nz1 & -nz1);
6941 HOST_WIDE_INT op0_maybe_minusp
6942 = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
6943 HOST_WIDE_INT op1_maybe_minusp
6944 = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
6945 int result_width = mode_width;
6951 result_width = MAX (width0, width1) + 1;
6952 result_low = MIN (low0, low1);
6955 result_low = MIN (low0, low1);
6958 result_width = width0 + width1;
6959 result_low = low0 + low1;
6962 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6963 result_width = width0;
6966 result_width = width0;
6969 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6970 result_width = MIN (width0, width1);
6971 result_low = MIN (low0, low1);
6974 result_width = MIN (width0, width1);
6975 result_low = MIN (low0, low1);
6979 if (result_width < mode_width)
6980 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
6983 nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
6988 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6989 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6990 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
6994 /* If this is a SUBREG formed for a promoted variable that has
6995 been zero-extended, we know that at least the high-order bits
6996 are zero, though others might be too. */
6998 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
6999 nonzero = (GET_MODE_MASK (GET_MODE (x))
7000 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
7002 /* If the inner mode is a single word for both the host and target
7003 machines, we can compute this from which bits of the inner
7004 object might be nonzero. */
7005 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
7006 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7007 <= HOST_BITS_PER_WIDE_INT))
7009 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
7011 #ifndef WORD_REGISTER_OPERATIONS
7012 /* On many CISC machines, accessing an object in a wider mode
7013 causes the high-order bits to become undefined. So they are
7014 not known to be zero. */
7015 if (GET_MODE_SIZE (GET_MODE (x))
7016 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7017 nonzero |= (GET_MODE_MASK (GET_MODE (x))
7018 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
7027 /* The nonzero bits are in two classes: any bits within MODE
7028 that aren't in GET_MODE (x) are always significant. The rest of the
7029 nonzero bits are those that are significant in the operand of
7030 the shift when shifted the appropriate number of bits. This
7031 shows that high-order bits are cleared by the right shift and
7032 low-order bits by left shifts. */
7033 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7034 && INTVAL (XEXP (x, 1)) >= 0
7035 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7037 enum machine_mode inner_mode = GET_MODE (x);
7038 int width = GET_MODE_BITSIZE (inner_mode);
7039 int count = INTVAL (XEXP (x, 1));
7040 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
7041 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
7042 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
7043 unsigned HOST_WIDE_INT outer = 0;
7045 if (mode_width > width)
7046 outer = (op_nonzero & nonzero & ~ mode_mask);
7048 if (code == LSHIFTRT)
7050 else if (code == ASHIFTRT)
7054 /* If the sign bit may have been nonzero before the shift, we
7055 need to mark all the places it could have been copied to
7056 by the shift as possibly nonzero. */
7057 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
7058 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
7060 else if (code == ASHIFT)
7063 inner = ((inner << (count % width)
7064 | (inner >> (width - (count % width)))) & mode_mask);
7066 nonzero &= (outer | inner);
7071 /* This is at most the number of bits in the mode. */
7072 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
7076 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
7077 | nonzero_bits (XEXP (x, 2), mode));
7084 /* Return the number of bits at the high-order end of X that are known to
7085 be equal to the sign bit. X will be used in mode MODE; if MODE is
7086 VOIDmode, X will be used in its own mode. The returned value will always
7087 be between 1 and the number of bits in MODE. */
7090 num_sign_bit_copies (x, mode)
7092 enum machine_mode mode;
7094 enum rtx_code code = GET_CODE (x);
7096 int num0, num1, result;
7097 unsigned HOST_WIDE_INT nonzero;
7100 /* If we weren't given a mode, use the mode of X. If the mode is still
7101 VOIDmode, we don't know anything. Likewise if one of the modes is
7104 if (mode == VOIDmode)
7105 mode = GET_MODE (x);
7107 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
7110 bitwidth = GET_MODE_BITSIZE (mode);
7112 /* For a smaller object, just ignore the high bits. */
7113 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
7114 return MAX (1, (num_sign_bit_copies (x, GET_MODE (x))
7115 - (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)));
7117 #ifndef WORD_REGISTER_OPERATIONS
7118 /* If this machine does not do all register operations on the entire
7119 register and MODE is wider than the mode of X, we can say nothing
7120 at all about the high-order bits. */
7121 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
7129 #ifdef POINTERS_EXTEND_UNSIGNED
7130 /* If pointers extend signed and this is a pointer in Pmode, say that
7131 all the bits above ptr_mode are known to be sign bit copies. */
7132 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
7133 && REGNO_POINTER_FLAG (REGNO (x)))
7134 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
7137 if (reg_last_set_value[REGNO (x)] != 0
7138 && reg_last_set_mode[REGNO (x)] == mode
7139 && (reg_n_sets[REGNO (x)] == 1
7140 || reg_last_set_label[REGNO (x)] == label_tick)
7141 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7142 return reg_last_set_sign_bit_copies[REGNO (x)];
7144 tem = get_last_value (x);
7146 return num_sign_bit_copies (tem, mode);
7148 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
7149 return reg_sign_bit_copies[REGNO (x)];
7153 #ifdef LOAD_EXTEND_OP
7154 /* Some RISC machines sign-extend all loads of smaller than a word. */
7155 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
7156 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
7161 /* If the constant is negative, take its 1's complement and remask.
7162 Then see how many zero bits we have. */
7163 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
7164 if (bitwidth <= HOST_BITS_PER_WIDE_INT
7165 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7166 nonzero = (~ nonzero) & GET_MODE_MASK (mode);
7168 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
7171 /* If this is a SUBREG for a promoted object that is sign-extended
7172 and we are looking at it in a wider mode, we know that at least the
7173 high-order bits are known to be sign bit copies. */
7175 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
7176 return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1,
7177 num_sign_bit_copies (SUBREG_REG (x), mode));
7179 /* For a smaller object, just ignore the high bits. */
7180 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
7182 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
7183 return MAX (1, (num0
7184 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7188 #ifdef WORD_REGISTER_OPERATIONS
7189 #ifdef LOAD_EXTEND_OP
7190 /* For paradoxical SUBREGs on machines where all register operations
7191 affect the entire register, just look inside. Note that we are
7192 passing MODE to the recursive call, so the number of sign bit copies
7193 will remain relative to that mode, not the inner mode. */
7195 /* This works only if loads sign extend. Otherwise, if we get a
7196 reload for the inner part, it may be loaded from the stack, and
7197 then we lose all sign bit copies that existed before the store
7200 if ((GET_MODE_SIZE (GET_MODE (x))
7201 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7202 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND)
7203 return num_sign_bit_copies (SUBREG_REG (x), mode);
7209 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
7210 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
7214 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7215 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
7218 /* For a smaller object, just ignore the high bits. */
7219 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
7220 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7224 return num_sign_bit_copies (XEXP (x, 0), mode);
7226 case ROTATE: case ROTATERT:
7227 /* If we are rotating left by a number of bits less than the number
7228 of sign bit copies, we can just subtract that amount from the
7230 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7231 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
7233 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7234 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
7235 : bitwidth - INTVAL (XEXP (x, 1))));
7240 /* In general, this subtracts one sign bit copy. But if the value
7241 is known to be positive, the number of sign bit copies is the
7242 same as that of the input. Finally, if the input has just one bit
7243 that might be nonzero, all the bits are copies of the sign bit. */
7244 nonzero = nonzero_bits (XEXP (x, 0), mode);
7248 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7250 && bitwidth <= HOST_BITS_PER_WIDE_INT
7251 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
7256 case IOR: case AND: case XOR:
7257 case SMIN: case SMAX: case UMIN: case UMAX:
7258 /* Logical operations will preserve the number of sign-bit copies.
7259 MIN and MAX operations always return one of the operands. */
7260 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7261 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7262 return MIN (num0, num1);
7264 case PLUS: case MINUS:
7265 /* For addition and subtraction, we can have a 1-bit carry. However,
7266 if we are subtracting 1 from a positive number, there will not
7267 be such a carry. Furthermore, if the positive number is known to
7268 be 0 or 1, we know the result is either -1 or 0. */
7270 if (code == PLUS && XEXP (x, 1) == constm1_rtx
7271 && bitwidth <= HOST_BITS_PER_WIDE_INT)
7273 nonzero = nonzero_bits (XEXP (x, 0), mode);
7274 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
7275 return (nonzero == 1 || nonzero == 0 ? bitwidth
7276 : bitwidth - floor_log2 (nonzero) - 1);
7279 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7280 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7281 return MAX (1, MIN (num0, num1) - 1);
7284 /* The number of bits of the product is the sum of the number of
7285 bits of both terms. However, unless one of the terms if known
7286 to be positive, we must allow for an additional bit since negating
7287 a negative number can remove one sign bit copy. */
7289 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7290 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7292 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
7294 && bitwidth <= HOST_BITS_PER_WIDE_INT
7295 && ((nonzero_bits (XEXP (x, 0), mode)
7296 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7297 && (nonzero_bits (XEXP (x, 1), mode)
7298 & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) != 0))
7301 return MAX (1, result);
7304 /* The result must be <= the first operand. */
7305 return num_sign_bit_copies (XEXP (x, 0), mode);
7308 /* The result must be <= the scond operand. */
7309 return num_sign_bit_copies (XEXP (x, 1), mode);
7312 /* Similar to unsigned division, except that we have to worry about
7313 the case where the divisor is negative, in which case we have
7315 result = num_sign_bit_copies (XEXP (x, 0), mode);
7317 && bitwidth <= HOST_BITS_PER_WIDE_INT
7318 && (nonzero_bits (XEXP (x, 1), mode)
7319 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7325 result = num_sign_bit_copies (XEXP (x, 1), mode);
7327 && bitwidth <= HOST_BITS_PER_WIDE_INT
7328 && (nonzero_bits (XEXP (x, 1), mode)
7329 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7335 /* Shifts by a constant add to the number of bits equal to the
7337 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7338 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7339 && INTVAL (XEXP (x, 1)) > 0)
7340 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
7345 /* Left shifts destroy copies. */
7346 if (GET_CODE (XEXP (x, 1)) != CONST_INT
7347 || INTVAL (XEXP (x, 1)) < 0
7348 || INTVAL (XEXP (x, 1)) >= bitwidth)
7351 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7352 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
7355 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
7356 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
7357 return MIN (num0, num1);
7359 #if STORE_FLAG_VALUE == -1
7360 case EQ: case NE: case GE: case GT: case LE: case LT:
7361 case GEU: case GTU: case LEU: case LTU:
7366 /* If we haven't been able to figure it out by one of the above rules,
7367 see if some of the high-order bits are known to be zero. If so,
7368 count those bits and return one less than that amount. If we can't
7369 safely compute the mask for this mode, always return BITWIDTH. */
7371 if (bitwidth > HOST_BITS_PER_WIDE_INT)
7374 nonzero = nonzero_bits (x, mode);
7375 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
7376 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
7379 /* Return the number of "extended" bits there are in X, when interpreted
7380 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
7381 unsigned quantities, this is the number of high-order zero bits.
7382 For signed quantities, this is the number of copies of the sign bit
7383 minus 1. In both case, this function returns the number of "spare"
7384 bits. For example, if two quantities for which this function returns
7385 at least 1 are added, the addition is known not to overflow.
7387 This function will always return 0 unless called during combine, which
7388 implies that it must be called from a define_split. */
7391 extended_count (x, mode, unsignedp)
7393 enum machine_mode mode;
7396 if (nonzero_sign_valid == 0)
7400 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7401 && (GET_MODE_BITSIZE (mode) - 1
7402 - floor_log2 (nonzero_bits (x, mode))))
7403 : num_sign_bit_copies (x, mode) - 1);
7406 /* This function is called from `simplify_shift_const' to merge two
7407 outer operations. Specifically, we have already found that we need
7408 to perform operation *POP0 with constant *PCONST0 at the outermost
7409 position. We would now like to also perform OP1 with constant CONST1
7410 (with *POP0 being done last).
7412 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
7413 the resulting operation. *PCOMP_P is set to 1 if we would need to
7414 complement the innermost operand, otherwise it is unchanged.
7416 MODE is the mode in which the operation will be done. No bits outside
7417 the width of this mode matter. It is assumed that the width of this mode
7418 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
7420 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
7421 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
7422 result is simply *PCONST0.
7424 If the resulting operation cannot be expressed as one operation, we
7425 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
7428 merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
7429 enum rtx_code *pop0;
7430 HOST_WIDE_INT *pconst0;
7432 HOST_WIDE_INT const1;
7433 enum machine_mode mode;
7436 enum rtx_code op0 = *pop0;
7437 HOST_WIDE_INT const0 = *pconst0;
7438 int width = GET_MODE_BITSIZE (mode);
7440 const0 &= GET_MODE_MASK (mode);
7441 const1 &= GET_MODE_MASK (mode);
7443 /* If OP0 is an AND, clear unimportant bits in CONST1. */
7447 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
7450 if (op1 == NIL || op0 == SET)
7453 else if (op0 == NIL)
7454 op0 = op1, const0 = const1;
7456 else if (op0 == op1)
7478 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
7479 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
7482 /* If the two constants aren't the same, we can't do anything. The
7483 remaining six cases can all be done. */
7484 else if (const0 != const1)
7492 /* (a & b) | b == b */
7494 else /* op1 == XOR */
7495 /* (a ^ b) | b == a | b */
7501 /* (a & b) ^ b == (~a) & b */
7502 op0 = AND, *pcomp_p = 1;
7503 else /* op1 == IOR */
7504 /* (a | b) ^ b == a & ~b */
7505 op0 = AND, *pconst0 = ~ const0;
7510 /* (a | b) & b == b */
7512 else /* op1 == XOR */
7513 /* (a ^ b) & b) == (~a) & b */
7518 /* Check for NO-OP cases. */
7519 const0 &= GET_MODE_MASK (mode);
7521 && (op0 == IOR || op0 == XOR || op0 == PLUS))
7523 else if (const0 == 0 && op0 == AND)
7525 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
7528 /* If this would be an entire word for the target, but is not for
7529 the host, then sign-extend on the host so that the number will look
7530 the same way on the host that it would on the target.
7532 For example, when building a 64 bit alpha hosted 32 bit sparc
7533 targeted compiler, then we want the 32 bit unsigned value -1 to be
7534 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
7535 The later confuses the sparc backend. */
7537 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
7538 && (const0 & ((HOST_WIDE_INT) 1 << (width - 1))))
7539 const0 |= ((HOST_WIDE_INT) (-1) << width);
7547 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
7548 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
7549 that we started with.
7551 The shift is normally computed in the widest mode we find in VAROP, as
7552 long as it isn't a different number of words than RESULT_MODE. Exceptions
7553 are ASHIFTRT and ROTATE, which are always done in their original mode, */
7556 simplify_shift_const (x, code, result_mode, varop, count)
7559 enum machine_mode result_mode;
7563 enum rtx_code orig_code = code;
7564 int orig_count = count;
7565 enum machine_mode mode = result_mode;
7566 enum machine_mode shift_mode, tmode;
7568 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
7569 /* We form (outer_op (code varop count) (outer_const)). */
7570 enum rtx_code outer_op = NIL;
7571 HOST_WIDE_INT outer_const = 0;
7573 int complement_p = 0;
7576 /* If we were given an invalid count, don't do anything except exactly
7577 what was requested. */
7579 if (count < 0 || count > GET_MODE_BITSIZE (mode))
7584 return gen_rtx (code, mode, varop, GEN_INT (count));
7587 /* Unless one of the branches of the `if' in this loop does a `continue',
7588 we will `break' the loop after the `if'. */
7592 /* If we have an operand of (clobber (const_int 0)), just return that
7594 if (GET_CODE (varop) == CLOBBER)
7597 /* If we discovered we had to complement VAROP, leave. Making a NOT
7598 here would cause an infinite loop. */
7602 /* Convert ROTATETRT to ROTATE. */
7603 if (code == ROTATERT)
7604 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
7606 /* We need to determine what mode we will do the shift in. If the
7607 shift is a ASHIFTRT or ROTATE, we must always do it in the mode it
7608 was originally done in. Otherwise, we can do it in MODE, the widest
7609 mode encountered. */
7610 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
7612 /* Handle cases where the count is greater than the size of the mode
7613 minus 1. For ASHIFT, use the size minus one as the count (this can
7614 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
7615 take the count modulo the size. For other shifts, the result is
7618 Since these shifts are being produced by the compiler by combining
7619 multiple operations, each of which are defined, we know what the
7620 result is supposed to be. */
7622 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
7624 if (code == ASHIFTRT)
7625 count = GET_MODE_BITSIZE (shift_mode) - 1;
7626 else if (code == ROTATE || code == ROTATERT)
7627 count %= GET_MODE_BITSIZE (shift_mode);
7630 /* We can't simply return zero because there may be an
7638 /* Negative counts are invalid and should not have been made (a
7639 programmer-specified negative count should have been handled
7644 /* An arithmetic right shift of a quantity known to be -1 or 0
7646 if (code == ASHIFTRT
7647 && (num_sign_bit_copies (varop, shift_mode)
7648 == GET_MODE_BITSIZE (shift_mode)))
7654 /* If we are doing an arithmetic right shift and discarding all but
7655 the sign bit copies, this is equivalent to doing a shift by the
7656 bitsize minus one. Convert it into that shift because it will often
7657 allow other simplifications. */
7659 if (code == ASHIFTRT
7660 && (count + num_sign_bit_copies (varop, shift_mode)
7661 >= GET_MODE_BITSIZE (shift_mode)))
7662 count = GET_MODE_BITSIZE (shift_mode) - 1;
7664 /* We simplify the tests below and elsewhere by converting
7665 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
7666 `make_compound_operation' will convert it to a ASHIFTRT for
7667 those machines (such as Vax) that don't have a LSHIFTRT. */
7668 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
7670 && ((nonzero_bits (varop, shift_mode)
7671 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
7675 switch (GET_CODE (varop))
7681 new = expand_compound_operation (varop);
7690 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
7691 minus the width of a smaller mode, we can do this with a
7692 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
7693 if ((code == ASHIFTRT || code == LSHIFTRT)
7694 && ! mode_dependent_address_p (XEXP (varop, 0))
7695 && ! MEM_VOLATILE_P (varop)
7696 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7697 MODE_INT, 1)) != BLKmode)
7699 if (BYTES_BIG_ENDIAN)
7700 new = gen_rtx (MEM, tmode, XEXP (varop, 0));
7702 new = gen_rtx (MEM, tmode,
7703 plus_constant (XEXP (varop, 0),
7704 count / BITS_PER_UNIT));
7705 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
7706 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
7707 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
7708 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7709 : ZERO_EXTEND, mode, new);
7716 /* Similar to the case above, except that we can only do this if
7717 the resulting mode is the same as that of the underlying
7718 MEM and adjust the address depending on the *bits* endianness
7719 because of the way that bit-field extract insns are defined. */
7720 if ((code == ASHIFTRT || code == LSHIFTRT)
7721 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7722 MODE_INT, 1)) != BLKmode
7723 && tmode == GET_MODE (XEXP (varop, 0)))
7725 if (BITS_BIG_ENDIAN)
7726 new = XEXP (varop, 0);
7729 new = copy_rtx (XEXP (varop, 0));
7730 SUBST (XEXP (new, 0),
7731 plus_constant (XEXP (new, 0),
7732 count / BITS_PER_UNIT));
7735 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7736 : ZERO_EXTEND, mode, new);
7743 /* If VAROP is a SUBREG, strip it as long as the inner operand has
7744 the same number of words as what we've seen so far. Then store
7745 the widest mode in MODE. */
7746 if (subreg_lowpart_p (varop)
7747 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7748 > GET_MODE_SIZE (GET_MODE (varop)))
7749 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7750 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
7753 varop = SUBREG_REG (varop);
7754 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
7755 mode = GET_MODE (varop);
7761 /* Some machines use MULT instead of ASHIFT because MULT
7762 is cheaper. But it is still better on those machines to
7763 merge two shifts into one. */
7764 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7765 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7767 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
7768 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
7774 /* Similar, for when divides are cheaper. */
7775 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7776 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7778 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
7779 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
7785 /* If we are extracting just the sign bit of an arithmetic right
7786 shift, that shift is not needed. */
7787 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
7789 varop = XEXP (varop, 0);
7793 /* ... fall through ... */
7798 /* Here we have two nested shifts. The result is usually the
7799 AND of a new shift with a mask. We compute the result below. */
7800 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7801 && INTVAL (XEXP (varop, 1)) >= 0
7802 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
7803 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7804 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7806 enum rtx_code first_code = GET_CODE (varop);
7807 int first_count = INTVAL (XEXP (varop, 1));
7808 unsigned HOST_WIDE_INT mask;
7811 /* We have one common special case. We can't do any merging if
7812 the inner code is an ASHIFTRT of a smaller mode. However, if
7813 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
7814 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
7815 we can convert it to
7816 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
7817 This simplifies certain SIGN_EXTEND operations. */
7818 if (code == ASHIFT && first_code == ASHIFTRT
7819 && (GET_MODE_BITSIZE (result_mode)
7820 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
7822 /* C3 has the low-order C1 bits zero. */
7824 mask = (GET_MODE_MASK (mode)
7825 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
7827 varop = simplify_and_const_int (NULL_RTX, result_mode,
7828 XEXP (varop, 0), mask);
7829 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
7831 count = first_count;
7836 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
7837 than C1 high-order bits equal to the sign bit, we can convert
7838 this to either an ASHIFT or a ASHIFTRT depending on the
7841 We cannot do this if VAROP's mode is not SHIFT_MODE. */
7843 if (code == ASHIFTRT && first_code == ASHIFT
7844 && GET_MODE (varop) == shift_mode
7845 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
7848 count -= first_count;
7850 count = - count, code = ASHIFT;
7851 varop = XEXP (varop, 0);
7855 /* There are some cases we can't do. If CODE is ASHIFTRT,
7856 we can only do this if FIRST_CODE is also ASHIFTRT.
7858 We can't do the case when CODE is ROTATE and FIRST_CODE is
7861 If the mode of this shift is not the mode of the outer shift,
7862 we can't do this if either shift is ASHIFTRT or ROTATE.
7864 Finally, we can't do any of these if the mode is too wide
7865 unless the codes are the same.
7867 Handle the case where the shift codes are the same
7870 if (code == first_code)
7872 if (GET_MODE (varop) != result_mode
7873 && (code == ASHIFTRT || code == ROTATE))
7876 count += first_count;
7877 varop = XEXP (varop, 0);
7881 if (code == ASHIFTRT
7882 || (code == ROTATE && first_code == ASHIFTRT)
7883 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
7884 || (GET_MODE (varop) != result_mode
7885 && (first_code == ASHIFTRT || first_code == ROTATE
7886 || code == ROTATE)))
7889 /* To compute the mask to apply after the shift, shift the
7890 nonzero bits of the inner shift the same way the
7891 outer shift will. */
7893 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
7896 = simplify_binary_operation (code, result_mode, mask_rtx,
7899 /* Give up if we can't compute an outer operation to use. */
7901 || GET_CODE (mask_rtx) != CONST_INT
7902 || ! merge_outer_ops (&outer_op, &outer_const, AND,
7904 result_mode, &complement_p))
7907 /* If the shifts are in the same direction, we add the
7908 counts. Otherwise, we subtract them. */
7909 if ((code == ASHIFTRT || code == LSHIFTRT)
7910 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
7911 count += first_count;
7913 count -= first_count;
7915 /* If COUNT is positive, the new shift is usually CODE,
7916 except for the two exceptions below, in which case it is
7917 FIRST_CODE. If the count is negative, FIRST_CODE should
7920 && ((first_code == ROTATE && code == ASHIFT)
7921 || (first_code == ASHIFTRT && code == LSHIFTRT)))
7924 code = first_code, count = - count;
7926 varop = XEXP (varop, 0);
7930 /* If we have (A << B << C) for any shift, we can convert this to
7931 (A << C << B). This wins if A is a constant. Only try this if
7932 B is not a constant. */
7934 else if (GET_CODE (varop) == code
7935 && GET_CODE (XEXP (varop, 1)) != CONST_INT
7937 = simplify_binary_operation (code, mode,
7941 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
7948 /* Make this fit the case below. */
7949 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
7950 GEN_INT (GET_MODE_MASK (mode)));
7956 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
7957 with C the size of VAROP - 1 and the shift is logical if
7958 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7959 we have an (le X 0) operation. If we have an arithmetic shift
7960 and STORE_FLAG_VALUE is 1 or we have a logical shift with
7961 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
7963 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
7964 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
7965 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7966 && (code == LSHIFTRT || code == ASHIFTRT)
7967 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7968 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7971 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
7974 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7975 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7980 /* If we have (shift (logical)), move the logical to the outside
7981 to allow it to possibly combine with another logical and the
7982 shift to combine with another shift. This also canonicalizes to
7983 what a ZERO_EXTRACT looks like. Also, some machines have
7984 (and (shift)) insns. */
7986 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7987 && (new = simplify_binary_operation (code, result_mode,
7989 GEN_INT (count))) != 0
7990 && GET_CODE(new) == CONST_INT
7991 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
7992 INTVAL (new), result_mode, &complement_p))
7994 varop = XEXP (varop, 0);
7998 /* If we can't do that, try to simplify the shift in each arm of the
7999 logical expression, make a new logical expression, and apply
8000 the inverse distributive law. */
8002 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
8003 XEXP (varop, 0), count);
8004 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
8005 XEXP (varop, 1), count);
8007 varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
8008 varop = apply_distributive_law (varop);
8015 /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
8016 says that the sign bit can be tested, FOO has mode MODE, C is
8017 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
8018 that may be nonzero. */
8019 if (code == LSHIFTRT
8020 && XEXP (varop, 1) == const0_rtx
8021 && GET_MODE (XEXP (varop, 0)) == result_mode
8022 && count == GET_MODE_BITSIZE (result_mode) - 1
8023 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8024 && ((STORE_FLAG_VALUE
8025 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
8026 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
8027 && merge_outer_ops (&outer_op, &outer_const, XOR,
8028 (HOST_WIDE_INT) 1, result_mode,
8031 varop = XEXP (varop, 0);
8038 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
8039 than the number of bits in the mode is equivalent to A. */
8040 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
8041 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
8043 varop = XEXP (varop, 0);
8048 /* NEG commutes with ASHIFT since it is multiplication. Move the
8049 NEG outside to allow shifts to combine. */
8051 && merge_outer_ops (&outer_op, &outer_const, NEG,
8052 (HOST_WIDE_INT) 0, result_mode,
8055 varop = XEXP (varop, 0);
8061 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
8062 is one less than the number of bits in the mode is
8063 equivalent to (xor A 1). */
8064 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
8065 && XEXP (varop, 1) == constm1_rtx
8066 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
8067 && merge_outer_ops (&outer_op, &outer_const, XOR,
8068 (HOST_WIDE_INT) 1, result_mode,
8072 varop = XEXP (varop, 0);
8076 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
8077 that might be nonzero in BAR are those being shifted out and those
8078 bits are known zero in FOO, we can replace the PLUS with FOO.
8079 Similarly in the other operand order. This code occurs when
8080 we are computing the size of a variable-size array. */
8082 if ((code == ASHIFTRT || code == LSHIFTRT)
8083 && count < HOST_BITS_PER_WIDE_INT
8084 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
8085 && (nonzero_bits (XEXP (varop, 1), result_mode)
8086 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
8088 varop = XEXP (varop, 0);
8091 else if ((code == ASHIFTRT || code == LSHIFTRT)
8092 && count < HOST_BITS_PER_WIDE_INT
8093 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8094 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
8096 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
8097 & nonzero_bits (XEXP (varop, 1),
8100 varop = XEXP (varop, 1);
8104 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
8106 && GET_CODE (XEXP (varop, 1)) == CONST_INT
8107 && (new = simplify_binary_operation (ASHIFT, result_mode,
8109 GEN_INT (count))) != 0
8110 && GET_CODE(new) == CONST_INT
8111 && merge_outer_ops (&outer_op, &outer_const, PLUS,
8112 INTVAL (new), result_mode, &complement_p))
8114 varop = XEXP (varop, 0);
8120 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
8121 with C the size of VAROP - 1 and the shift is logical if
8122 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8123 we have a (gt X 0) operation. If the shift is arithmetic with
8124 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
8125 we have a (neg (gt X 0)) operation. */
8127 if (GET_CODE (XEXP (varop, 0)) == ASHIFTRT
8128 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
8129 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8130 && (code == LSHIFTRT || code == ASHIFTRT)
8131 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
8132 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
8133 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8136 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
8139 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8140 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8150 /* We need to determine what mode to do the shift in. If the shift is
8151 a ASHIFTRT or ROTATE, we must always do it in the mode it was originally
8152 done in. Otherwise, we can do it in MODE, the widest mode encountered.
8153 The code we care about is that of the shift that will actually be done,
8154 not the shift that was originally requested. */
8155 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
8157 /* We have now finished analyzing the shift. The result should be
8158 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
8159 OUTER_OP is non-NIL, it is an operation that needs to be applied
8160 to the result of the shift. OUTER_CONST is the relevant constant,
8161 but we must turn off all bits turned off in the shift.
8163 If we were passed a value for X, see if we can use any pieces of
8164 it. If not, make new rtx. */
8166 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
8167 && GET_CODE (XEXP (x, 1)) == CONST_INT
8168 && INTVAL (XEXP (x, 1)) == count)
8169 const_rtx = XEXP (x, 1);
8171 const_rtx = GEN_INT (count);
8173 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
8174 && GET_MODE (XEXP (x, 0)) == shift_mode
8175 && SUBREG_REG (XEXP (x, 0)) == varop)
8176 varop = XEXP (x, 0);
8177 else if (GET_MODE (varop) != shift_mode)
8178 varop = gen_lowpart_for_combine (shift_mode, varop);
8180 /* If we can't make the SUBREG, try to return what we were given. */
8181 if (GET_CODE (varop) == CLOBBER)
8182 return x ? x : varop;
8184 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
8189 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
8190 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
8192 SUBST (XEXP (x, 0), varop);
8193 SUBST (XEXP (x, 1), const_rtx);
8196 /* If we have an outer operation and we just made a shift, it is
8197 possible that we could have simplified the shift were it not
8198 for the outer operation. So try to do the simplification
8201 if (outer_op != NIL && GET_CODE (x) == code
8202 && GET_CODE (XEXP (x, 1)) == CONST_INT)
8203 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
8204 INTVAL (XEXP (x, 1)));
8206 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
8207 turn off all the bits that the shift would have turned off. */
8208 if (orig_code == LSHIFTRT && result_mode != shift_mode)
8209 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
8210 GET_MODE_MASK (result_mode) >> orig_count);
8212 /* Do the remainder of the processing in RESULT_MODE. */
8213 x = gen_lowpart_for_combine (result_mode, x);
8215 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
8218 x = gen_unary (NOT, result_mode, result_mode, x);
8220 if (outer_op != NIL)
8222 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
8224 int width = GET_MODE_BITSIZE (result_mode);
8226 outer_const &= GET_MODE_MASK (result_mode);
8228 /* If this would be an entire word for the target, but is not for
8229 the host, then sign-extend on the host so that the number will
8230 look the same way on the host that it would on the target.
8232 For example, when building a 64 bit alpha hosted 32 bit sparc
8233 targeted compiler, then we want the 32 bit unsigned value -1 to be
8234 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
8235 The later confuses the sparc backend. */
8237 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
8238 && (outer_const & ((HOST_WIDE_INT) 1 << (width - 1))))
8239 outer_const |= ((HOST_WIDE_INT) (-1) << width);
8242 if (outer_op == AND)
8243 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
8244 else if (outer_op == SET)
8245 /* This means that we have determined that the result is
8246 equivalent to a constant. This should be rare. */
8247 x = GEN_INT (outer_const);
8248 else if (GET_RTX_CLASS (outer_op) == '1')
8249 x = gen_unary (outer_op, result_mode, result_mode, x);
8251 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
8257 /* Like recog, but we receive the address of a pointer to a new pattern.
8258 We try to match the rtx that the pointer points to.
8259 If that fails, we may try to modify or replace the pattern,
8260 storing the replacement into the same pointer object.
8262 Modifications include deletion or addition of CLOBBERs.
8264 PNOTES is a pointer to a location where any REG_UNUSED notes added for
8265 the CLOBBERs are placed.
8267 The value is the final insn code from the pattern ultimately matched,
8271 recog_for_combine (pnewpat, insn, pnotes)
8276 register rtx pat = *pnewpat;
8277 int insn_code_number;
8278 int num_clobbers_to_add = 0;
8282 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
8283 we use to indicate that something didn't match. If we find such a
8284 thing, force rejection. */
8285 if (GET_CODE (pat) == PARALLEL)
8286 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
8287 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
8288 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
8291 /* Is the result of combination a valid instruction? */
8292 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
8294 /* If it isn't, there is the possibility that we previously had an insn
8295 that clobbered some register as a side effect, but the combined
8296 insn doesn't need to do that. So try once more without the clobbers
8297 unless this represents an ASM insn. */
8299 if (insn_code_number < 0 && ! check_asm_operands (pat)
8300 && GET_CODE (pat) == PARALLEL)
8304 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
8305 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
8308 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
8312 SUBST_INT (XVECLEN (pat, 0), pos);
8315 pat = XVECEXP (pat, 0, 0);
8317 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
8320 /* If we had any clobbers to add, make a new pattern than contains
8321 them. Then check to make sure that all of them are dead. */
8322 if (num_clobbers_to_add)
8324 rtx newpat = gen_rtx (PARALLEL, VOIDmode,
8325 gen_rtvec (GET_CODE (pat) == PARALLEL
8326 ? XVECLEN (pat, 0) + num_clobbers_to_add
8327 : num_clobbers_to_add + 1));
8329 if (GET_CODE (pat) == PARALLEL)
8330 for (i = 0; i < XVECLEN (pat, 0); i++)
8331 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
8333 XVECEXP (newpat, 0, 0) = pat;
8335 add_clobbers (newpat, insn_code_number);
8337 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
8338 i < XVECLEN (newpat, 0); i++)
8340 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
8341 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
8343 notes = gen_rtx (EXPR_LIST, REG_UNUSED,
8344 XEXP (XVECEXP (newpat, 0, i), 0), notes);
8352 return insn_code_number;
8355 /* Like gen_lowpart but for use by combine. In combine it is not possible
8356 to create any new pseudoregs. However, it is safe to create
8357 invalid memory addresses, because combine will try to recognize
8358 them and all they will do is make the combine attempt fail.
8360 If for some reason this cannot do its job, an rtx
8361 (clobber (const_int 0)) is returned.
8362 An insn containing that will not be recognized. */
8367 gen_lowpart_for_combine (mode, x)
8368 enum machine_mode mode;
8373 if (GET_MODE (x) == mode)
8376 /* We can only support MODE being wider than a word if X is a
8377 constant integer or has a mode the same size. */
8379 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
8380 && ! ((GET_MODE (x) == VOIDmode
8381 && (GET_CODE (x) == CONST_INT
8382 || GET_CODE (x) == CONST_DOUBLE))
8383 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
8384 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8386 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
8387 won't know what to do. So we will strip off the SUBREG here and
8388 process normally. */
8389 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
8392 if (GET_MODE (x) == mode)
8396 result = gen_lowpart_common (mode, x);
8398 && GET_CODE (result) == SUBREG
8399 && GET_CODE (SUBREG_REG (result)) == REG
8400 && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
8401 && (GET_MODE_SIZE (GET_MODE (result))
8402 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (result)))))
8403 reg_changes_size[REGNO (SUBREG_REG (result))] = 1;
8408 if (GET_CODE (x) == MEM)
8410 register int offset = 0;
8413 /* Refuse to work on a volatile memory ref or one with a mode-dependent
8415 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
8416 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8418 /* If we want to refer to something bigger than the original memref,
8419 generate a perverse subreg instead. That will force a reload
8420 of the original memref X. */
8421 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
8422 return gen_rtx (SUBREG, mode, x, 0);
8424 if (WORDS_BIG_ENDIAN)
8425 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
8426 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
8427 if (BYTES_BIG_ENDIAN)
8429 /* Adjust the address so that the address-after-the-data is
8431 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
8432 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
8434 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
8435 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
8436 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
8437 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
8441 /* If X is a comparison operator, rewrite it in a new mode. This
8442 probably won't match, but may allow further simplifications. */
8443 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
8444 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
8446 /* If we couldn't simplify X any other way, just enclose it in a
8447 SUBREG. Normally, this SUBREG won't match, but some patterns may
8448 include an explicit SUBREG or we may simplify it further in combine. */
8453 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
8454 word = ((GET_MODE_SIZE (GET_MODE (x))
8455 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
8457 return gen_rtx (SUBREG, mode, x, word);
8461 /* Make an rtx expression. This is a subset of gen_rtx and only supports
8462 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
8464 If the identical expression was previously in the insn (in the undobuf),
8465 it will be returned. Only if it is not found will a new expression
8470 gen_rtx_combine VPROTO((enum rtx_code code, enum machine_mode mode, ...))
8474 enum machine_mode mode;
8486 code = va_arg (p, enum rtx_code);
8487 mode = va_arg (p, enum machine_mode);
8490 n_args = GET_RTX_LENGTH (code);
8491 fmt = GET_RTX_FORMAT (code);
8493 if (n_args == 0 || n_args > 3)
8496 /* Get each arg and verify that it is supposed to be an expression. */
8497 for (j = 0; j < n_args; j++)
8502 args[j] = va_arg (p, rtx);
8505 /* See if this is in undobuf. Be sure we don't use objects that came
8506 from another insn; this could produce circular rtl structures. */
8508 for (i = previous_num_undos; i < undobuf.num_undo; i++)
8509 if (!undobuf.undo[i].is_int
8510 && GET_CODE (undobuf.undo[i].old_contents.r) == code
8511 && GET_MODE (undobuf.undo[i].old_contents.r) == mode)
8513 for (j = 0; j < n_args; j++)
8514 if (XEXP (undobuf.undo[i].old_contents.r, j) != args[j])
8518 return undobuf.undo[i].old_contents.r;
8521 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
8522 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
8523 rt = rtx_alloc (code);
8524 PUT_MODE (rt, mode);
8525 XEXP (rt, 0) = args[0];
8528 XEXP (rt, 1) = args[1];
8530 XEXP (rt, 2) = args[2];
8535 /* These routines make binary and unary operations by first seeing if they
8536 fold; if not, a new expression is allocated. */
8539 gen_binary (code, mode, op0, op1)
8541 enum machine_mode mode;
8547 if (GET_RTX_CLASS (code) == 'c'
8548 && (GET_CODE (op0) == CONST_INT
8549 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
8550 tem = op0, op0 = op1, op1 = tem;
8552 if (GET_RTX_CLASS (code) == '<')
8554 enum machine_mode op_mode = GET_MODE (op0);
8556 /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
8557 just (REL_OP X Y). */
8558 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
8560 op1 = XEXP (op0, 1);
8561 op0 = XEXP (op0, 0);
8562 op_mode = GET_MODE (op0);
8565 if (op_mode == VOIDmode)
8566 op_mode = GET_MODE (op1);
8567 result = simplify_relational_operation (code, op_mode, op0, op1);
8570 result = simplify_binary_operation (code, mode, op0, op1);
8575 /* Put complex operands first and constants second. */
8576 if (GET_RTX_CLASS (code) == 'c'
8577 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
8578 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
8579 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
8580 || (GET_CODE (op0) == SUBREG
8581 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
8582 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
8583 return gen_rtx_combine (code, mode, op1, op0);
8585 return gen_rtx_combine (code, mode, op0, op1);
8589 gen_unary (code, mode, op0_mode, op0)
8591 enum machine_mode mode, op0_mode;
8594 rtx result = simplify_unary_operation (code, mode, op0, op0_mode);
8599 return gen_rtx_combine (code, mode, op0);
8602 /* Simplify a comparison between *POP0 and *POP1 where CODE is the
8603 comparison code that will be tested.
8605 The result is a possibly different comparison code to use. *POP0 and
8606 *POP1 may be updated.
8608 It is possible that we might detect that a comparison is either always
8609 true or always false. However, we do not perform general constant
8610 folding in combine, so this knowledge isn't useful. Such tautologies
8611 should have been detected earlier. Hence we ignore all such cases. */
8613 static enum rtx_code
8614 simplify_comparison (code, pop0, pop1)
8623 enum machine_mode mode, tmode;
8625 /* Try a few ways of applying the same transformation to both operands. */
8628 #ifndef WORD_REGISTER_OPERATIONS
8629 /* The test below this one won't handle SIGN_EXTENDs on these machines,
8630 so check specially. */
8631 if (code != GTU && code != GEU && code != LTU && code != LEU
8632 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
8633 && GET_CODE (XEXP (op0, 0)) == ASHIFT
8634 && GET_CODE (XEXP (op1, 0)) == ASHIFT
8635 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
8636 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
8637 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
8638 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
8639 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8640 && GET_CODE (XEXP (op1, 1)) == CONST_INT
8641 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8642 && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
8643 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
8644 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
8645 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
8646 && (INTVAL (XEXP (op0, 1))
8647 == (GET_MODE_BITSIZE (GET_MODE (op0))
8649 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
8651 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
8652 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
8656 /* If both operands are the same constant shift, see if we can ignore the
8657 shift. We can if the shift is a rotate or if the bits shifted out of
8658 this shift are known to be zero for both inputs and if the type of
8659 comparison is compatible with the shift. */
8660 if (GET_CODE (op0) == GET_CODE (op1)
8661 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
8662 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
8663 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
8664 && (code != GT && code != LT && code != GE && code != LE))
8665 || (GET_CODE (op0) == ASHIFTRT
8666 && (code != GTU && code != LTU
8667 && code != GEU && code != GEU)))
8668 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8669 && INTVAL (XEXP (op0, 1)) >= 0
8670 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
8671 && XEXP (op0, 1) == XEXP (op1, 1))
8673 enum machine_mode mode = GET_MODE (op0);
8674 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
8675 int shift_count = INTVAL (XEXP (op0, 1));
8677 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
8678 mask &= (mask >> shift_count) << shift_count;
8679 else if (GET_CODE (op0) == ASHIFT)
8680 mask = (mask & (mask << shift_count)) >> shift_count;
8682 if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
8683 && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
8684 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
8689 /* If both operands are AND's of a paradoxical SUBREG by constant, the
8690 SUBREGs are of the same mode, and, in both cases, the AND would
8691 be redundant if the comparison was done in the narrower mode,
8692 do the comparison in the narrower mode (e.g., we are AND'ing with 1
8693 and the operand's possibly nonzero bits are 0xffffff01; in that case
8694 if we only care about QImode, we don't need the AND). This case
8695 occurs if the output mode of an scc insn is not SImode and
8696 STORE_FLAG_VALUE == 1 (e.g., the 386).
8698 Similarly, check for a case where the AND's are ZERO_EXTEND
8699 operations from some narrower mode even though a SUBREG is not
8702 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
8703 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8704 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
8706 rtx inner_op0 = XEXP (op0, 0);
8707 rtx inner_op1 = XEXP (op1, 0);
8708 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
8709 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
8712 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
8713 && (GET_MODE_SIZE (GET_MODE (inner_op0))
8714 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
8715 && (GET_MODE (SUBREG_REG (inner_op0))
8716 == GET_MODE (SUBREG_REG (inner_op1)))
8717 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
8718 <= HOST_BITS_PER_WIDE_INT)
8719 && (0 == (~c0) & nonzero_bits (SUBREG_REG (inner_op0),
8720 GET_MODE (SUBREG_REG (op0))))
8721 && (0 == (~c1) & nonzero_bits (SUBREG_REG (inner_op1),
8722 GET_MODE (SUBREG_REG (inner_op1)))))
8724 op0 = SUBREG_REG (inner_op0);
8725 op1 = SUBREG_REG (inner_op1);
8727 /* The resulting comparison is always unsigned since we masked
8728 off the original sign bit. */
8729 code = unsigned_condition (code);
8735 for (tmode = GET_CLASS_NARROWEST_MODE
8736 (GET_MODE_CLASS (GET_MODE (op0)));
8737 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
8738 if (c0 == GET_MODE_MASK (tmode))
8740 op0 = gen_lowpart_for_combine (tmode, inner_op0);
8741 op1 = gen_lowpart_for_combine (tmode, inner_op1);
8742 code = unsigned_condition (code);
8751 /* If both operands are NOT, we can strip off the outer operation
8752 and adjust the comparison code for swapped operands; similarly for
8753 NEG, except that this must be an equality comparison. */
8754 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
8755 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
8756 && (code == EQ || code == NE)))
8757 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
8763 /* If the first operand is a constant, swap the operands and adjust the
8764 comparison code appropriately. */
8765 if (CONSTANT_P (op0))
8767 tem = op0, op0 = op1, op1 = tem;
8768 code = swap_condition (code);
8771 /* We now enter a loop during which we will try to simplify the comparison.
8772 For the most part, we only are concerned with comparisons with zero,
8773 but some things may really be comparisons with zero but not start
8774 out looking that way. */
8776 while (GET_CODE (op1) == CONST_INT)
8778 enum machine_mode mode = GET_MODE (op0);
8779 int mode_width = GET_MODE_BITSIZE (mode);
8780 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
8781 int equality_comparison_p;
8782 int sign_bit_comparison_p;
8783 int unsigned_comparison_p;
8784 HOST_WIDE_INT const_op;
8786 /* We only want to handle integral modes. This catches VOIDmode,
8787 CCmode, and the floating-point modes. An exception is that we
8788 can handle VOIDmode if OP0 is a COMPARE or a comparison
8791 if (GET_MODE_CLASS (mode) != MODE_INT
8792 && ! (mode == VOIDmode
8793 && (GET_CODE (op0) == COMPARE
8794 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
8797 /* Get the constant we are comparing against and turn off all bits
8798 not on in our mode. */
8799 const_op = INTVAL (op1);
8800 if (mode_width <= HOST_BITS_PER_WIDE_INT)
8803 /* If we are comparing against a constant power of two and the value
8804 being compared can only have that single bit nonzero (e.g., it was
8805 `and'ed with that bit), we can replace this with a comparison
8808 && (code == EQ || code == NE || code == GE || code == GEU
8809 || code == LT || code == LTU)
8810 && mode_width <= HOST_BITS_PER_WIDE_INT
8811 && exact_log2 (const_op) >= 0
8812 && nonzero_bits (op0, mode) == const_op)
8814 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
8815 op1 = const0_rtx, const_op = 0;
8818 /* Similarly, if we are comparing a value known to be either -1 or
8819 0 with -1, change it to the opposite comparison against zero. */
8822 && (code == EQ || code == NE || code == GT || code == LE
8823 || code == GEU || code == LTU)
8824 && num_sign_bit_copies (op0, mode) == mode_width)
8826 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
8827 op1 = const0_rtx, const_op = 0;
8830 /* Do some canonicalizations based on the comparison code. We prefer
8831 comparisons against zero and then prefer equality comparisons.
8832 If we can reduce the size of a constant, we will do that too. */
8837 /* < C is equivalent to <= (C - 1) */
8841 op1 = GEN_INT (const_op);
8843 /* ... fall through to LE case below. */
8849 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
8853 op1 = GEN_INT (const_op);
8857 /* If we are doing a <= 0 comparison on a value known to have
8858 a zero sign bit, we can replace this with == 0. */
8859 else if (const_op == 0
8860 && mode_width <= HOST_BITS_PER_WIDE_INT
8861 && (nonzero_bits (op0, mode)
8862 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
8867 /* >= C is equivalent to > (C - 1). */
8871 op1 = GEN_INT (const_op);
8873 /* ... fall through to GT below. */
8879 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
8883 op1 = GEN_INT (const_op);
8887 /* If we are doing a > 0 comparison on a value known to have
8888 a zero sign bit, we can replace this with != 0. */
8889 else if (const_op == 0
8890 && mode_width <= HOST_BITS_PER_WIDE_INT
8891 && (nonzero_bits (op0, mode)
8892 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
8897 /* < C is equivalent to <= (C - 1). */
8901 op1 = GEN_INT (const_op);
8903 /* ... fall through ... */
8906 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
8907 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
8909 const_op = 0, op1 = const0_rtx;
8917 /* unsigned <= 0 is equivalent to == 0 */
8921 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
8922 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
8924 const_op = 0, op1 = const0_rtx;
8930 /* >= C is equivalent to < (C - 1). */
8934 op1 = GEN_INT (const_op);
8936 /* ... fall through ... */
8939 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
8940 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
8942 const_op = 0, op1 = const0_rtx;
8950 /* unsigned > 0 is equivalent to != 0 */
8954 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
8955 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
8957 const_op = 0, op1 = const0_rtx;
8963 /* Compute some predicates to simplify code below. */
8965 equality_comparison_p = (code == EQ || code == NE);
8966 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
8967 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
8970 /* If this is a sign bit comparison and we can do arithmetic in
8971 MODE, say that we will only be needing the sign bit of OP0. */
8972 if (sign_bit_comparison_p
8973 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
8974 op0 = force_to_mode (op0, mode,
8976 << (GET_MODE_BITSIZE (mode) - 1)),
8979 /* Now try cases based on the opcode of OP0. If none of the cases
8980 does a "continue", we exit this loop immediately after the
8983 switch (GET_CODE (op0))
8986 /* If we are extracting a single bit from a variable position in
8987 a constant that has only a single bit set and are comparing it
8988 with zero, we can convert this into an equality comparison
8989 between the position and the location of the single bit. We can't
8990 do this if bit endian and we don't have an extzv since we then
8991 can't know what mode to use for the endianness adjustment. */
8993 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
8994 && XEXP (op0, 1) == const1_rtx
8995 && equality_comparison_p && const_op == 0
8996 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0
8997 && (! BITS_BIG_ENDIAN
9004 if (BITS_BIG_ENDIAN)
9005 i = (GET_MODE_BITSIZE
9006 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
9009 op0 = XEXP (op0, 2);
9013 /* Result is nonzero iff shift count is equal to I. */
9014 code = reverse_condition (code);
9018 /* ... fall through ... */
9021 tem = expand_compound_operation (op0);
9030 /* If testing for equality, we can take the NOT of the constant. */
9031 if (equality_comparison_p
9032 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
9034 op0 = XEXP (op0, 0);
9039 /* If just looking at the sign bit, reverse the sense of the
9041 if (sign_bit_comparison_p)
9043 op0 = XEXP (op0, 0);
9044 code = (code == GE ? LT : GE);
9050 /* If testing for equality, we can take the NEG of the constant. */
9051 if (equality_comparison_p
9052 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
9054 op0 = XEXP (op0, 0);
9059 /* The remaining cases only apply to comparisons with zero. */
9063 /* When X is ABS or is known positive,
9064 (neg X) is < 0 if and only if X != 0. */
9066 if (sign_bit_comparison_p
9067 && (GET_CODE (XEXP (op0, 0)) == ABS
9068 || (mode_width <= HOST_BITS_PER_WIDE_INT
9069 && (nonzero_bits (XEXP (op0, 0), mode)
9070 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
9072 op0 = XEXP (op0, 0);
9073 code = (code == LT ? NE : EQ);
9077 /* If we have NEG of something whose two high-order bits are the
9078 same, we know that "(-a) < 0" is equivalent to "a > 0". */
9079 if (num_sign_bit_copies (op0, mode) >= 2)
9081 op0 = XEXP (op0, 0);
9082 code = swap_condition (code);
9088 /* If we are testing equality and our count is a constant, we
9089 can perform the inverse operation on our RHS. */
9090 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
9091 && (tem = simplify_binary_operation (ROTATERT, mode,
9092 op1, XEXP (op0, 1))) != 0)
9094 op0 = XEXP (op0, 0);
9099 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
9100 a particular bit. Convert it to an AND of a constant of that
9101 bit. This will be converted into a ZERO_EXTRACT. */
9102 if (const_op == 0 && sign_bit_comparison_p
9103 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9104 && mode_width <= HOST_BITS_PER_WIDE_INT)
9106 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9109 - INTVAL (XEXP (op0, 1)))));
9110 code = (code == LT ? NE : EQ);
9114 /* ... fall through ... */
9117 /* ABS is ignorable inside an equality comparison with zero. */
9118 if (const_op == 0 && equality_comparison_p)
9120 op0 = XEXP (op0, 0);
9127 /* Can simplify (compare (zero/sign_extend FOO) CONST)
9128 to (compare FOO CONST) if CONST fits in FOO's mode and we
9129 are either testing inequality or have an unsigned comparison
9130 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
9131 if (! unsigned_comparison_p
9132 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9133 <= HOST_BITS_PER_WIDE_INT)
9134 && ((unsigned HOST_WIDE_INT) const_op
9135 < (((HOST_WIDE_INT) 1
9136 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
9138 op0 = XEXP (op0, 0);
9144 /* Check for the case where we are comparing A - C1 with C2,
9145 both constants are smaller than 1/2 the maxium positive
9146 value in MODE, and the comparison is equality or unsigned.
9147 In that case, if A is either zero-extended to MODE or has
9148 sufficient sign bits so that the high-order bit in MODE
9149 is a copy of the sign in the inner mode, we can prove that it is
9150 safe to do the operation in the wider mode. This simplifies
9151 many range checks. */
9153 if (mode_width <= HOST_BITS_PER_WIDE_INT
9154 && subreg_lowpart_p (op0)
9155 && GET_CODE (SUBREG_REG (op0)) == PLUS
9156 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
9157 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
9158 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
9159 < GET_MODE_MASK (mode) / 2)
9160 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
9161 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
9162 GET_MODE (SUBREG_REG (op0)))
9163 & ~ GET_MODE_MASK (mode))
9164 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
9165 GET_MODE (SUBREG_REG (op0)))
9166 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9167 - GET_MODE_BITSIZE (mode)))))
9169 op0 = SUBREG_REG (op0);
9173 /* If the inner mode is narrower and we are extracting the low part,
9174 we can treat the SUBREG as if it were a ZERO_EXTEND. */
9175 if (subreg_lowpart_p (op0)
9176 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
9177 /* Fall through */ ;
9181 /* ... fall through ... */
9184 if ((unsigned_comparison_p || equality_comparison_p)
9185 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9186 <= HOST_BITS_PER_WIDE_INT)
9187 && ((unsigned HOST_WIDE_INT) const_op
9188 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
9190 op0 = XEXP (op0, 0);
9196 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
9197 this for equality comparisons due to pathological cases involving
9199 if (equality_comparison_p
9200 && 0 != (tem = simplify_binary_operation (MINUS, mode,
9201 op1, XEXP (op0, 1))))
9203 op0 = XEXP (op0, 0);
9208 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
9209 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
9210 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
9212 op0 = XEXP (XEXP (op0, 0), 0);
9213 code = (code == LT ? EQ : NE);
9219 /* (eq (minus A B) C) -> (eq A (plus B C)) or
9220 (eq B (minus A C)), whichever simplifies. We can only do
9221 this for equality comparisons due to pathological cases involving
9223 if (equality_comparison_p
9224 && 0 != (tem = simplify_binary_operation (PLUS, mode,
9225 XEXP (op0, 1), op1)))
9227 op0 = XEXP (op0, 0);
9232 if (equality_comparison_p
9233 && 0 != (tem = simplify_binary_operation (MINUS, mode,
9234 XEXP (op0, 0), op1)))
9236 op0 = XEXP (op0, 1);
9241 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
9242 of bits in X minus 1, is one iff X > 0. */
9243 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
9244 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9245 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
9246 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
9248 op0 = XEXP (op0, 1);
9249 code = (code == GE ? LE : GT);
9255 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
9256 if C is zero or B is a constant. */
9257 if (equality_comparison_p
9258 && 0 != (tem = simplify_binary_operation (XOR, mode,
9259 XEXP (op0, 1), op1)))
9261 op0 = XEXP (op0, 0);
9268 case LT: case LTU: case LE: case LEU:
9269 case GT: case GTU: case GE: case GEU:
9270 /* We can't do anything if OP0 is a condition code value, rather
9271 than an actual data value. */
9274 || XEXP (op0, 0) == cc0_rtx
9276 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
9279 /* Get the two operands being compared. */
9280 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
9281 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
9283 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
9285 /* Check for the cases where we simply want the result of the
9286 earlier test or the opposite of that result. */
9288 || (code == EQ && reversible_comparison_p (op0))
9289 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9290 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9291 && (STORE_FLAG_VALUE
9292 & (((HOST_WIDE_INT) 1
9293 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
9295 || (code == GE && reversible_comparison_p (op0)))))
9297 code = (code == LT || code == NE
9298 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
9299 op0 = tem, op1 = tem1;
9305 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
9307 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
9308 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
9309 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
9311 op0 = XEXP (op0, 1);
9312 code = (code == GE ? GT : LE);
9318 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
9319 will be converted to a ZERO_EXTRACT later. */
9320 if (const_op == 0 && equality_comparison_p
9321 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9322 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
9324 op0 = simplify_and_const_int
9325 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
9327 XEXP (XEXP (op0, 0), 1)),
9332 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
9333 zero and X is a comparison and C1 and C2 describe only bits set
9334 in STORE_FLAG_VALUE, we can compare with X. */
9335 if (const_op == 0 && equality_comparison_p
9336 && mode_width <= HOST_BITS_PER_WIDE_INT
9337 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9338 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
9339 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9340 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
9341 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
9343 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
9344 << INTVAL (XEXP (XEXP (op0, 0), 1)));
9345 if ((~ STORE_FLAG_VALUE & mask) == 0
9346 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
9347 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
9348 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
9350 op0 = XEXP (XEXP (op0, 0), 0);
9355 /* If we are doing an equality comparison of an AND of a bit equal
9356 to the sign bit, replace this with a LT or GE comparison of
9357 the underlying value. */
9358 if (equality_comparison_p
9360 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9361 && mode_width <= HOST_BITS_PER_WIDE_INT
9362 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
9363 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
9365 op0 = XEXP (op0, 0);
9366 code = (code == EQ ? GE : LT);
9370 /* If this AND operation is really a ZERO_EXTEND from a narrower
9371 mode, the constant fits within that mode, and this is either an
9372 equality or unsigned comparison, try to do this comparison in
9373 the narrower mode. */
9374 if ((equality_comparison_p || unsigned_comparison_p)
9375 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9376 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
9377 & GET_MODE_MASK (mode))
9379 && const_op >> i == 0
9380 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
9382 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
9388 /* If we have (compare (ashift FOO N) (const_int C)) and
9389 the high order N bits of FOO (N+1 if an inequality comparison)
9390 are known to be zero, we can do this by comparing FOO with C
9391 shifted right N bits so long as the low-order N bits of C are
9393 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
9394 && INTVAL (XEXP (op0, 1)) >= 0
9395 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
9396 < HOST_BITS_PER_WIDE_INT)
9398 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
9399 && mode_width <= HOST_BITS_PER_WIDE_INT
9400 && (nonzero_bits (XEXP (op0, 0), mode)
9401 & ~ (mask >> (INTVAL (XEXP (op0, 1))
9402 + ! equality_comparison_p))) == 0)
9404 const_op >>= INTVAL (XEXP (op0, 1));
9405 op1 = GEN_INT (const_op);
9406 op0 = XEXP (op0, 0);
9410 /* If we are doing a sign bit comparison, it means we are testing
9411 a particular bit. Convert it to the appropriate AND. */
9412 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
9413 && mode_width <= HOST_BITS_PER_WIDE_INT)
9415 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9418 - INTVAL (XEXP (op0, 1)))));
9419 code = (code == LT ? NE : EQ);
9423 /* If this an equality comparison with zero and we are shifting
9424 the low bit to the sign bit, we can convert this to an AND of the
9426 if (const_op == 0 && equality_comparison_p
9427 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9428 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
9430 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9437 /* If this is an equality comparison with zero, we can do this
9438 as a logical shift, which might be much simpler. */
9439 if (equality_comparison_p && const_op == 0
9440 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
9442 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
9444 INTVAL (XEXP (op0, 1)));
9448 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
9449 do the comparison in a narrower mode. */
9450 if (! unsigned_comparison_p
9451 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9452 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9453 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
9454 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
9455 MODE_INT, 1)) != BLKmode
9456 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
9457 || ((unsigned HOST_WIDE_INT) - const_op
9458 <= GET_MODE_MASK (tmode))))
9460 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
9464 /* ... fall through ... */
9466 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
9467 the low order N bits of FOO are known to be zero, we can do this
9468 by comparing FOO with C shifted left N bits so long as no
9470 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
9471 && INTVAL (XEXP (op0, 1)) >= 0
9472 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9473 && mode_width <= HOST_BITS_PER_WIDE_INT
9474 && (nonzero_bits (XEXP (op0, 0), mode)
9475 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
9477 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
9480 const_op <<= INTVAL (XEXP (op0, 1));
9481 op1 = GEN_INT (const_op);
9482 op0 = XEXP (op0, 0);
9486 /* If we are using this shift to extract just the sign bit, we
9487 can replace this with an LT or GE comparison. */
9489 && (equality_comparison_p || sign_bit_comparison_p)
9490 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9491 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
9493 op0 = XEXP (op0, 0);
9494 code = (code == NE || code == GT ? LT : GE);
9503 /* Now make any compound operations involved in this comparison. Then,
9504 check for an outmost SUBREG on OP0 that isn't doing anything or is
9505 paradoxical. The latter case can only occur when it is known that the
9506 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
9507 We can never remove a SUBREG for a non-equality comparison because the
9508 sign bit is in a different place in the underlying object. */
9510 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
9511 op1 = make_compound_operation (op1, SET);
9513 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
9514 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9515 && (code == NE || code == EQ)
9516 && ((GET_MODE_SIZE (GET_MODE (op0))
9517 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
9519 op0 = SUBREG_REG (op0);
9520 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
9523 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
9524 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9525 && (code == NE || code == EQ)
9526 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9527 <= HOST_BITS_PER_WIDE_INT)
9528 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
9529 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
9530 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
9532 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
9533 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
9534 op0 = SUBREG_REG (op0), op1 = tem;
9536 /* We now do the opposite procedure: Some machines don't have compare
9537 insns in all modes. If OP0's mode is an integer mode smaller than a
9538 word and we can't do a compare in that mode, see if there is a larger
9539 mode for which we can do the compare. There are a number of cases in
9540 which we can use the wider mode. */
9542 mode = GET_MODE (op0);
9543 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
9544 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
9545 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
9546 for (tmode = GET_MODE_WIDER_MODE (mode);
9548 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
9549 tmode = GET_MODE_WIDER_MODE (tmode))
9550 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
9552 /* If the only nonzero bits in OP0 and OP1 are those in the
9553 narrower mode and this is an equality or unsigned comparison,
9554 we can use the wider mode. Similarly for sign-extended
9555 values, in which case it is true for all comparisons. */
9556 if (((code == EQ || code == NE
9557 || code == GEU || code == GTU || code == LEU || code == LTU)
9558 && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
9559 && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
9560 || ((num_sign_bit_copies (op0, tmode)
9561 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
9562 && (num_sign_bit_copies (op1, tmode)
9563 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
9565 op0 = gen_lowpart_for_combine (tmode, op0);
9566 op1 = gen_lowpart_for_combine (tmode, op1);
9570 /* If this is a test for negative, we can make an explicit
9571 test of the sign bit. */
9573 if (op1 == const0_rtx && (code == LT || code == GE)
9574 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9576 op0 = gen_binary (AND, tmode,
9577 gen_lowpart_for_combine (tmode, op0),
9578 GEN_INT ((HOST_WIDE_INT) 1
9579 << (GET_MODE_BITSIZE (mode) - 1)));
9580 code = (code == LT) ? NE : EQ;
9585 #ifdef CANONICALIZE_COMPARISON
9586 /* If this machine only supports a subset of valid comparisons, see if we
9587 can convert an unsupported one into a supported one. */
9588 CANONICALIZE_COMPARISON (code, op0, op1);
9597 /* Return 1 if we know that X, a comparison operation, is not operating
9598 on a floating-point value or is EQ or NE, meaning that we can safely
9602 reversible_comparison_p (x)
9605 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
9607 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
9610 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
9613 case MODE_PARTIAL_INT:
9614 case MODE_COMPLEX_INT:
9618 /* If the mode of the condition codes tells us that this is safe,
9619 we need look no further. */
9620 if (REVERSIBLE_CC_MODE (GET_MODE (XEXP (x, 0))))
9623 /* Otherwise try and find where the condition codes were last set and
9625 x = get_last_value (XEXP (x, 0));
9626 return (x && GET_CODE (x) == COMPARE
9627 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))));
9633 /* Utility function for following routine. Called when X is part of a value
9634 being stored into reg_last_set_value. Sets reg_last_set_table_tick
9635 for each register mentioned. Similar to mention_regs in cse.c */
9638 update_table_tick (x)
9641 register enum rtx_code code = GET_CODE (x);
9642 register char *fmt = GET_RTX_FORMAT (code);
9647 int regno = REGNO (x);
9648 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9649 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9651 for (i = regno; i < endregno; i++)
9652 reg_last_set_table_tick[i] = label_tick;
9657 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9658 /* Note that we can't have an "E" in values stored; see
9659 get_last_value_validate. */
9661 update_table_tick (XEXP (x, i));
9664 /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
9665 are saying that the register is clobbered and we no longer know its
9666 value. If INSN is zero, don't update reg_last_set; this is only permitted
9667 with VALUE also zero and is used to invalidate the register. */
9670 record_value_for_reg (reg, insn, value)
9675 int regno = REGNO (reg);
9676 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9677 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
9680 /* If VALUE contains REG and we have a previous value for REG, substitute
9681 the previous value. */
9682 if (value && insn && reg_overlap_mentioned_p (reg, value))
9686 /* Set things up so get_last_value is allowed to see anything set up to
9688 subst_low_cuid = INSN_CUID (insn);
9689 tem = get_last_value (reg);
9692 value = replace_rtx (copy_rtx (value), reg, tem);
9695 /* For each register modified, show we don't know its value, that
9696 we don't know about its bitwise content, that its value has been
9697 updated, and that we don't know the location of the death of the
9699 for (i = regno; i < endregno; i ++)
9702 reg_last_set[i] = insn;
9703 reg_last_set_value[i] = 0;
9704 reg_last_set_mode[i] = 0;
9705 reg_last_set_nonzero_bits[i] = 0;
9706 reg_last_set_sign_bit_copies[i] = 0;
9707 reg_last_death[i] = 0;
9710 /* Mark registers that are being referenced in this value. */
9712 update_table_tick (value);
9714 /* Now update the status of each register being set.
9715 If someone is using this register in this block, set this register
9716 to invalid since we will get confused between the two lives in this
9717 basic block. This makes using this register always invalid. In cse, we
9718 scan the table to invalidate all entries using this register, but this
9719 is too much work for us. */
9721 for (i = regno; i < endregno; i++)
9723 reg_last_set_label[i] = label_tick;
9724 if (value && reg_last_set_table_tick[i] == label_tick)
9725 reg_last_set_invalid[i] = 1;
9727 reg_last_set_invalid[i] = 0;
9730 /* The value being assigned might refer to X (like in "x++;"). In that
9731 case, we must replace it with (clobber (const_int 0)) to prevent
9733 if (value && ! get_last_value_validate (&value,
9734 reg_last_set_label[regno], 0))
9736 value = copy_rtx (value);
9737 if (! get_last_value_validate (&value, reg_last_set_label[regno], 1))
9741 /* For the main register being modified, update the value, the mode, the
9742 nonzero bits, and the number of sign bit copies. */
9744 reg_last_set_value[regno] = value;
9748 subst_low_cuid = INSN_CUID (insn);
9749 reg_last_set_mode[regno] = GET_MODE (reg);
9750 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
9751 reg_last_set_sign_bit_copies[regno]
9752 = num_sign_bit_copies (value, GET_MODE (reg));
9756 /* Used for communication between the following two routines. */
9757 static rtx record_dead_insn;
9759 /* Called via note_stores from record_dead_and_set_regs to handle one
9760 SET or CLOBBER in an insn. */
9763 record_dead_and_set_regs_1 (dest, setter)
9766 if (GET_CODE (dest) == SUBREG)
9767 dest = SUBREG_REG (dest);
9769 if (GET_CODE (dest) == REG)
9771 /* If we are setting the whole register, we know its value. Otherwise
9772 show that we don't know the value. We can handle SUBREG in
9774 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
9775 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
9776 else if (GET_CODE (setter) == SET
9777 && GET_CODE (SET_DEST (setter)) == SUBREG
9778 && SUBREG_REG (SET_DEST (setter)) == dest
9779 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
9780 && subreg_lowpart_p (SET_DEST (setter)))
9781 record_value_for_reg (dest, record_dead_insn,
9782 gen_lowpart_for_combine (GET_MODE (dest),
9785 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
9787 else if (GET_CODE (dest) == MEM
9788 /* Ignore pushes, they clobber nothing. */
9789 && ! push_operand (dest, GET_MODE (dest)))
9790 mem_last_set = INSN_CUID (record_dead_insn);
9793 /* Update the records of when each REG was most recently set or killed
9794 for the things done by INSN. This is the last thing done in processing
9795 INSN in the combiner loop.
9797 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
9798 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
9799 and also the similar information mem_last_set (which insn most recently
9800 modified memory) and last_call_cuid (which insn was the most recent
9801 subroutine call). */
9804 record_dead_and_set_regs (insn)
9810 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
9812 if (REG_NOTE_KIND (link) == REG_DEAD
9813 && GET_CODE (XEXP (link, 0)) == REG)
9815 int regno = REGNO (XEXP (link, 0));
9817 = regno + (regno < FIRST_PSEUDO_REGISTER
9818 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
9821 for (i = regno; i < endregno; i++)
9822 reg_last_death[i] = insn;
9824 else if (REG_NOTE_KIND (link) == REG_INC)
9825 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
9828 if (GET_CODE (insn) == CALL_INSN)
9830 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
9831 if (call_used_regs[i])
9833 reg_last_set_value[i] = 0;
9834 reg_last_set_mode[i] = 0;
9835 reg_last_set_nonzero_bits[i] = 0;
9836 reg_last_set_sign_bit_copies[i] = 0;
9837 reg_last_death[i] = 0;
9840 last_call_cuid = mem_last_set = INSN_CUID (insn);
9843 record_dead_insn = insn;
9844 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
9847 /* Utility routine for the following function. Verify that all the registers
9848 mentioned in *LOC are valid when *LOC was part of a value set when
9849 label_tick == TICK. Return 0 if some are not.
9851 If REPLACE is non-zero, replace the invalid reference with
9852 (clobber (const_int 0)) and return 1. This replacement is useful because
9853 we often can get useful information about the form of a value (e.g., if
9854 it was produced by a shift that always produces -1 or 0) even though
9855 we don't know exactly what registers it was produced from. */
9858 get_last_value_validate (loc, tick, replace)
9864 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
9865 int len = GET_RTX_LENGTH (GET_CODE (x));
9868 if (GET_CODE (x) == REG)
9870 int regno = REGNO (x);
9871 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9872 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9875 for (j = regno; j < endregno; j++)
9876 if (reg_last_set_invalid[j]
9877 /* If this is a pseudo-register that was only set once, it is
9879 || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1)
9880 && reg_last_set_label[j] > tick))
9883 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
9890 for (i = 0; i < len; i++)
9892 && get_last_value_validate (&XEXP (x, i), tick, replace) == 0)
9893 /* Don't bother with these. They shouldn't occur anyway. */
9897 /* If we haven't found a reason for it to be invalid, it is valid. */
9901 /* Get the last value assigned to X, if known. Some registers
9902 in the value may be replaced with (clobber (const_int 0)) if their value
9903 is known longer known reliably. */
9912 /* If this is a non-paradoxical SUBREG, get the value of its operand and
9913 then convert it to the desired mode. If this is a paradoxical SUBREG,
9914 we cannot predict what values the "extra" bits might have. */
9915 if (GET_CODE (x) == SUBREG
9916 && subreg_lowpart_p (x)
9917 && (GET_MODE_SIZE (GET_MODE (x))
9918 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
9919 && (value = get_last_value (SUBREG_REG (x))) != 0)
9920 return gen_lowpart_for_combine (GET_MODE (x), value);
9922 if (GET_CODE (x) != REG)
9926 value = reg_last_set_value[regno];
9928 /* If we don't have a value or if it isn't for this basic block, return 0. */
9931 || (reg_n_sets[regno] != 1
9932 && reg_last_set_label[regno] != label_tick))
9935 /* If the value was set in a later insn than the ones we are processing,
9936 we can't use it even if the register was only set once, but make a quick
9937 check to see if the previous insn set it to something. This is commonly
9938 the case when the same pseudo is used by repeated insns. */
9940 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
9944 /* Skip over USE insns. They are not useful here, and they may have
9945 been made by combine, in which case they do not have a INSN_CUID
9946 value. We can't use prev_real_insn, because that would incorrectly
9947 take us backwards across labels. */
9948 for (insn = prev_nonnote_insn (subst_insn);
9949 insn && ((GET_CODE (insn) == INSN
9950 && GET_CODE (PATTERN (insn)) == USE)
9951 || INSN_CUID (insn) >= subst_low_cuid);
9952 insn = prev_nonnote_insn (insn))
9956 && (set = single_set (insn)) != 0
9957 && rtx_equal_p (SET_DEST (set), x))
9959 value = SET_SRC (set);
9961 /* Make sure that VALUE doesn't reference X. Replace any
9962 expliit references with a CLOBBER. If there are any remaining
9963 references (rare), don't use the value. */
9965 if (reg_mentioned_p (x, value))
9966 value = replace_rtx (copy_rtx (value), x,
9967 gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
9969 if (reg_overlap_mentioned_p (x, value))
9976 /* If the value has all its registers valid, return it. */
9977 if (get_last_value_validate (&value, reg_last_set_label[regno], 0))
9980 /* Otherwise, make a copy and replace any invalid register with
9981 (clobber (const_int 0)). If that fails for some reason, return 0. */
9983 value = copy_rtx (value);
9984 if (get_last_value_validate (&value, reg_last_set_label[regno], 1))
9990 /* Return nonzero if expression X refers to a REG or to memory
9991 that is set in an instruction more recent than FROM_CUID. */
9994 use_crosses_set_p (x, from_cuid)
10000 register enum rtx_code code = GET_CODE (x);
10004 register int regno = REGNO (x);
10005 int endreg = regno + (regno < FIRST_PSEUDO_REGISTER
10006 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10008 #ifdef PUSH_ROUNDING
10009 /* Don't allow uses of the stack pointer to be moved,
10010 because we don't know whether the move crosses a push insn. */
10011 if (regno == STACK_POINTER_REGNUM)
10014 for (;regno < endreg; regno++)
10015 if (reg_last_set[regno]
10016 && INSN_CUID (reg_last_set[regno]) > from_cuid)
10021 if (code == MEM && mem_last_set > from_cuid)
10024 fmt = GET_RTX_FORMAT (code);
10026 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10031 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
10032 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
10035 else if (fmt[i] == 'e'
10036 && use_crosses_set_p (XEXP (x, i), from_cuid))
10042 /* Define three variables used for communication between the following
10045 static int reg_dead_regno, reg_dead_endregno;
10046 static int reg_dead_flag;
10048 /* Function called via note_stores from reg_dead_at_p.
10050 If DEST is within [reg_dead_rengno, reg_dead_endregno), set
10051 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
10054 reg_dead_at_p_1 (dest, x)
10058 int regno, endregno;
10060 if (GET_CODE (dest) != REG)
10063 regno = REGNO (dest);
10064 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10065 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
10067 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
10068 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
10071 /* Return non-zero if REG is known to be dead at INSN.
10073 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
10074 referencing REG, it is dead. If we hit a SET referencing REG, it is
10075 live. Otherwise, see if it is live or dead at the start of the basic
10076 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
10077 must be assumed to be always live. */
10080 reg_dead_at_p (reg, insn)
10086 /* Set variables for reg_dead_at_p_1. */
10087 reg_dead_regno = REGNO (reg);
10088 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
10089 ? HARD_REGNO_NREGS (reg_dead_regno,
10095 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */
10096 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
10098 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
10099 if (TEST_HARD_REG_BIT (newpat_used_regs, i))
10103 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
10104 beginning of function. */
10105 for (; insn && GET_CODE (insn) != CODE_LABEL;
10106 insn = prev_nonnote_insn (insn))
10108 note_stores (PATTERN (insn), reg_dead_at_p_1);
10110 return reg_dead_flag == 1 ? 1 : 0;
10112 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
10116 /* Get the basic block number that we were in. */
10121 for (block = 0; block < n_basic_blocks; block++)
10122 if (insn == basic_block_head[block])
10125 if (block == n_basic_blocks)
10129 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
10130 if (basic_block_live_at_start[block][i / REGSET_ELT_BITS]
10131 & ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS)))
10137 /* Note hard registers in X that are used. This code is similar to
10138 that in flow.c, but much simpler since we don't care about pseudos. */
10141 mark_used_regs_combine (x)
10144 register RTX_CODE code = GET_CODE (x);
10145 register int regno;
10157 case ADDR_DIFF_VEC:
10160 /* CC0 must die in the insn after it is set, so we don't need to take
10161 special note of it here. */
10167 /* If we are clobbering a MEM, mark any hard registers inside the
10168 address as used. */
10169 if (GET_CODE (XEXP (x, 0)) == MEM)
10170 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
10175 /* A hard reg in a wide mode may really be multiple registers.
10176 If so, mark all of them just like the first. */
10177 if (regno < FIRST_PSEUDO_REGISTER)
10179 /* None of this applies to the stack, frame or arg pointers */
10180 if (regno == STACK_POINTER_REGNUM
10181 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
10182 || regno == HARD_FRAME_POINTER_REGNUM
10184 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
10185 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
10187 || regno == FRAME_POINTER_REGNUM)
10190 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
10192 SET_HARD_REG_BIT (newpat_used_regs, regno + i);
10198 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
10200 register rtx testreg = SET_DEST (x);
10202 while (GET_CODE (testreg) == SUBREG
10203 || GET_CODE (testreg) == ZERO_EXTRACT
10204 || GET_CODE (testreg) == SIGN_EXTRACT
10205 || GET_CODE (testreg) == STRICT_LOW_PART)
10206 testreg = XEXP (testreg, 0);
10208 if (GET_CODE (testreg) == MEM)
10209 mark_used_regs_combine (XEXP (testreg, 0));
10211 mark_used_regs_combine (SET_SRC (x));
10216 /* Recursively scan the operands of this expression. */
10219 register char *fmt = GET_RTX_FORMAT (code);
10221 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10224 mark_used_regs_combine (XEXP (x, i));
10225 else if (fmt[i] == 'E')
10229 for (j = 0; j < XVECLEN (x, i); j++)
10230 mark_used_regs_combine (XVECEXP (x, i, j));
10237 /* Remove register number REGNO from the dead registers list of INSN.
10239 Return the note used to record the death, if there was one. */
10242 remove_death (regno, insn)
10246 register rtx note = find_regno_note (insn, REG_DEAD, regno);
10250 reg_n_deaths[regno]--;
10251 remove_note (insn, note);
10257 /* For each register (hardware or pseudo) used within expression X, if its
10258 death is in an instruction with cuid between FROM_CUID (inclusive) and
10259 TO_INSN (exclusive), put a REG_DEAD note for that register in the
10260 list headed by PNOTES.
10262 This is done when X is being merged by combination into TO_INSN. These
10263 notes will then be distributed as needed. */
10266 move_deaths (x, from_cuid, to_insn, pnotes)
10272 register char *fmt;
10273 register int len, i;
10274 register enum rtx_code code = GET_CODE (x);
10278 register int regno = REGNO (x);
10279 register rtx where_dead = reg_last_death[regno];
10281 if (where_dead && INSN_CUID (where_dead) >= from_cuid
10282 && INSN_CUID (where_dead) < INSN_CUID (to_insn))
10284 rtx note = remove_death (regno, where_dead);
10286 /* It is possible for the call above to return 0. This can occur
10287 when reg_last_death points to I2 or I1 that we combined with.
10288 In that case make a new note.
10290 We must also check for the case where X is a hard register
10291 and NOTE is a death note for a range of hard registers
10292 including X. In that case, we must put REG_DEAD notes for
10293 the remaining registers in place of NOTE. */
10295 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
10296 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
10297 != GET_MODE_SIZE (GET_MODE (x))))
10299 int deadregno = REGNO (XEXP (note, 0));
10301 = (deadregno + HARD_REGNO_NREGS (deadregno,
10302 GET_MODE (XEXP (note, 0))));
10303 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10306 for (i = deadregno; i < deadend; i++)
10307 if (i < regno || i >= ourend)
10308 REG_NOTES (where_dead)
10309 = gen_rtx (EXPR_LIST, REG_DEAD,
10310 gen_rtx (REG, reg_raw_mode[i], i),
10311 REG_NOTES (where_dead));
10314 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
10316 XEXP (note, 1) = *pnotes;
10320 *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
10322 reg_n_deaths[regno]++;
10328 else if (GET_CODE (x) == SET)
10330 rtx dest = SET_DEST (x);
10332 move_deaths (SET_SRC (x), from_cuid, to_insn, pnotes);
10334 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
10335 that accesses one word of a multi-word item, some
10336 piece of everything register in the expression is used by
10337 this insn, so remove any old death. */
10339 if (GET_CODE (dest) == ZERO_EXTRACT
10340 || GET_CODE (dest) == STRICT_LOW_PART
10341 || (GET_CODE (dest) == SUBREG
10342 && (((GET_MODE_SIZE (GET_MODE (dest))
10343 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
10344 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
10345 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
10347 move_deaths (dest, from_cuid, to_insn, pnotes);
10351 /* If this is some other SUBREG, we know it replaces the entire
10352 value, so use that as the destination. */
10353 if (GET_CODE (dest) == SUBREG)
10354 dest = SUBREG_REG (dest);
10356 /* If this is a MEM, adjust deaths of anything used in the address.
10357 For a REG (the only other possibility), the entire value is
10358 being replaced so the old value is not used in this insn. */
10360 if (GET_CODE (dest) == MEM)
10361 move_deaths (XEXP (dest, 0), from_cuid, to_insn, pnotes);
10365 else if (GET_CODE (x) == CLOBBER)
10368 len = GET_RTX_LENGTH (code);
10369 fmt = GET_RTX_FORMAT (code);
10371 for (i = 0; i < len; i++)
10376 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
10377 move_deaths (XVECEXP (x, i, j), from_cuid, to_insn, pnotes);
10379 else if (fmt[i] == 'e')
10380 move_deaths (XEXP (x, i), from_cuid, to_insn, pnotes);
10384 /* Return 1 if X is the target of a bit-field assignment in BODY, the
10385 pattern of an insn. X must be a REG. */
10388 reg_bitfield_target_p (x, body)
10394 if (GET_CODE (body) == SET)
10396 rtx dest = SET_DEST (body);
10398 int regno, tregno, endregno, endtregno;
10400 if (GET_CODE (dest) == ZERO_EXTRACT)
10401 target = XEXP (dest, 0);
10402 else if (GET_CODE (dest) == STRICT_LOW_PART)
10403 target = SUBREG_REG (XEXP (dest, 0));
10407 if (GET_CODE (target) == SUBREG)
10408 target = SUBREG_REG (target);
10410 if (GET_CODE (target) != REG)
10413 tregno = REGNO (target), regno = REGNO (x);
10414 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
10415 return target == x;
10417 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
10418 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10420 return endregno > tregno && regno < endtregno;
10423 else if (GET_CODE (body) == PARALLEL)
10424 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
10425 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
10431 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
10432 as appropriate. I3 and I2 are the insns resulting from the combination
10433 insns including FROM (I2 may be zero).
10435 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
10436 not need REG_DEAD notes because they are being substituted for. This
10437 saves searching in the most common cases.
10439 Each note in the list is either ignored or placed on some insns, depending
10440 on the type of note. */
10443 distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
10447 rtx elim_i2, elim_i1;
10449 rtx note, next_note;
10452 for (note = notes; note; note = next_note)
10454 rtx place = 0, place2 = 0;
10456 /* If this NOTE references a pseudo register, ensure it references
10457 the latest copy of that register. */
10458 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
10459 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
10460 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
10462 next_note = XEXP (note, 1);
10463 switch (REG_NOTE_KIND (note))
10466 /* Any clobbers for i3 may still exist, and so we must process
10467 REG_UNUSED notes from that insn.
10469 Any clobbers from i2 or i1 can only exist if they were added by
10470 recog_for_combine. In that case, recog_for_combine created the
10471 necessary REG_UNUSED notes. Trying to keep any original
10472 REG_UNUSED notes from these insns can cause incorrect output
10473 if it is for the same register as the original i3 dest.
10474 In that case, we will notice that the register is set in i3,
10475 and then add a REG_UNUSED note for the destination of i3, which
10476 is wrong. However, it is possible to have REG_UNUSED notes from
10477 i2 or i1 for register which were both used and clobbered, so
10478 we keep notes from i2 or i1 if they will turn into REG_DEAD
10481 /* If this register is set or clobbered in I3, put the note there
10482 unless there is one already. */
10483 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
10485 if (from_insn != i3)
10488 if (! (GET_CODE (XEXP (note, 0)) == REG
10489 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
10490 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
10493 /* Otherwise, if this register is used by I3, then this register
10494 now dies here, so we must put a REG_DEAD note here unless there
10496 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
10497 && ! (GET_CODE (XEXP (note, 0)) == REG
10498 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
10499 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
10501 PUT_REG_NOTE_KIND (note, REG_DEAD);
10509 /* These notes say something about results of an insn. We can
10510 only support them if they used to be on I3 in which case they
10511 remain on I3. Otherwise they are ignored.
10513 If the note refers to an expression that is not a constant, we
10514 must also ignore the note since we cannot tell whether the
10515 equivalence is still true. It might be possible to do
10516 slightly better than this (we only have a problem if I2DEST
10517 or I1DEST is present in the expression), but it doesn't
10518 seem worth the trouble. */
10520 if (from_insn == i3
10521 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
10526 case REG_NO_CONFLICT:
10528 /* These notes say something about how a register is used. They must
10529 be present on any use of the register in I2 or I3. */
10530 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
10533 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
10543 /* It is too much trouble to try to see if this note is still
10544 correct in all situations. It is better to simply delete it. */
10548 /* If the insn previously containing this note still exists,
10549 put it back where it was. Otherwise move it to the previous
10550 insn. Adjust the corresponding REG_LIBCALL note. */
10551 if (GET_CODE (from_insn) != NOTE)
10555 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
10556 place = prev_real_insn (from_insn);
10558 XEXP (tem, 0) = place;
10563 /* This is handled similarly to REG_RETVAL. */
10564 if (GET_CODE (from_insn) != NOTE)
10568 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
10569 place = next_real_insn (from_insn);
10571 XEXP (tem, 0) = place;
10576 /* If the register is used as an input in I3, it dies there.
10577 Similarly for I2, if it is non-zero and adjacent to I3.
10579 If the register is not used as an input in either I3 or I2
10580 and it is not one of the registers we were supposed to eliminate,
10581 there are two possibilities. We might have a non-adjacent I2
10582 or we might have somehow eliminated an additional register
10583 from a computation. For example, we might have had A & B where
10584 we discover that B will always be zero. In this case we will
10585 eliminate the reference to A.
10587 In both cases, we must search to see if we can find a previous
10588 use of A and put the death note there. */
10591 && GET_CODE (from_insn) == CALL_INSN
10592 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
10594 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
10596 else if (i2 != 0 && next_nonnote_insn (i2) == i3
10597 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
10600 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
10603 /* If the register is used in both I2 and I3 and it dies in I3,
10604 we might have added another reference to it. If reg_n_refs
10605 was 2, bump it to 3. This has to be correct since the
10606 register must have been set somewhere. The reason this is
10607 done is because local-alloc.c treats 2 references as a
10610 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
10611 && reg_n_refs[REGNO (XEXP (note, 0))]== 2
10612 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
10613 reg_n_refs[REGNO (XEXP (note, 0))] = 3;
10617 for (tem = prev_nonnote_insn (i3);
10619 && (GET_CODE (tem) == INSN || GET_CODE (tem) == CALL_INSN);
10620 tem = prev_nonnote_insn (tem))
10622 /* If the register is being set at TEM, see if that is all
10623 TEM is doing. If so, delete TEM. Otherwise, make this
10624 into a REG_UNUSED note instead. */
10625 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
10627 rtx set = single_set (tem);
10629 /* Verify that it was the set, and not a clobber that
10630 modified the register. */
10632 if (set != 0 && ! side_effects_p (SET_SRC (set))
10633 && rtx_equal_p (XEXP (note, 0), SET_DEST (set)))
10635 /* Move the notes and links of TEM elsewhere.
10636 This might delete other dead insns recursively.
10637 First set the pattern to something that won't use
10640 PATTERN (tem) = pc_rtx;
10642 distribute_notes (REG_NOTES (tem), tem, tem,
10643 NULL_RTX, NULL_RTX, NULL_RTX);
10644 distribute_links (LOG_LINKS (tem));
10646 PUT_CODE (tem, NOTE);
10647 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
10648 NOTE_SOURCE_FILE (tem) = 0;
10652 PUT_REG_NOTE_KIND (note, REG_UNUSED);
10654 /* If there isn't already a REG_UNUSED note, put one
10656 if (! find_regno_note (tem, REG_UNUSED,
10657 REGNO (XEXP (note, 0))))
10662 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
10663 || (GET_CODE (tem) == CALL_INSN
10664 && find_reg_fusage (tem, USE, XEXP (note, 0))))
10671 /* If we haven't found an insn for the death note and it
10672 is still a REG_DEAD note, but we have hit a CODE_LABEL,
10673 insert a USE insn for the register at that label and
10674 put the death node there. This prevents problems with
10675 call-state tracking in caller-save.c. */
10676 if (REG_NOTE_KIND (note) == REG_DEAD && place == 0 && tem != 0)
10679 = emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (note, 0)),
10682 /* If this insn was emitted between blocks, then update
10683 basic_block_head of the current block to include it. */
10684 if (basic_block_end[this_basic_block - 1] == tem)
10685 basic_block_head[this_basic_block] = place;
10689 /* If the register is set or already dead at PLACE, we needn't do
10690 anything with this note if it is still a REG_DEAD note.
10692 Note that we cannot use just `dead_or_set_p' here since we can
10693 convert an assignment to a register into a bit-field assignment.
10694 Therefore, we must also omit the note if the register is the
10695 target of a bitfield assignment. */
10697 if (place && REG_NOTE_KIND (note) == REG_DEAD)
10699 int regno = REGNO (XEXP (note, 0));
10701 if (dead_or_set_p (place, XEXP (note, 0))
10702 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
10704 /* Unless the register previously died in PLACE, clear
10705 reg_last_death. [I no longer understand why this is
10707 if (reg_last_death[regno] != place)
10708 reg_last_death[regno] = 0;
10712 reg_last_death[regno] = place;
10714 /* If this is a death note for a hard reg that is occupying
10715 multiple registers, ensure that we are still using all
10716 parts of the object. If we find a piece of the object
10717 that is unused, we must add a USE for that piece before
10718 PLACE and put the appropriate REG_DEAD note on it.
10720 An alternative would be to put a REG_UNUSED for the pieces
10721 on the insn that set the register, but that can't be done if
10722 it is not in the same block. It is simpler, though less
10723 efficient, to add the USE insns. */
10725 if (place && regno < FIRST_PSEUDO_REGISTER
10726 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
10729 = regno + HARD_REGNO_NREGS (regno,
10730 GET_MODE (XEXP (note, 0)));
10734 for (i = regno; i < endregno; i++)
10735 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
10736 && ! find_regno_fusage (place, USE, i))
10738 rtx piece = gen_rtx (REG, reg_raw_mode[i], i);
10741 /* See if we already placed a USE note for this
10742 register in front of PLACE. */
10744 GET_CODE (PREV_INSN (p)) == INSN
10745 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
10747 if (rtx_equal_p (piece,
10748 XEXP (PATTERN (PREV_INSN (p)), 0)))
10757 = emit_insn_before (gen_rtx (USE, VOIDmode,
10760 REG_NOTES (use_insn)
10761 = gen_rtx (EXPR_LIST, REG_DEAD, piece,
10762 REG_NOTES (use_insn));
10768 /* Check for the case where the register dying partially
10769 overlaps the register set by this insn. */
10771 for (i = regno; i < endregno; i++)
10772 if (dead_or_set_regno_p (place, i))
10780 /* Put only REG_DEAD notes for pieces that are
10781 still used and that are not already dead or set. */
10783 for (i = regno; i < endregno; i++)
10785 rtx piece = gen_rtx (REG, reg_raw_mode[i], i);
10787 if ((reg_referenced_p (piece, PATTERN (place))
10788 || (GET_CODE (place) == CALL_INSN
10789 && find_reg_fusage (place, USE, piece)))
10790 && ! dead_or_set_p (place, piece)
10791 && ! reg_bitfield_target_p (piece,
10793 REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
10795 REG_NOTES (place));
10805 /* Any other notes should not be present at this point in the
10812 XEXP (note, 1) = REG_NOTES (place);
10813 REG_NOTES (place) = note;
10815 else if ((REG_NOTE_KIND (note) == REG_DEAD
10816 || REG_NOTE_KIND (note) == REG_UNUSED)
10817 && GET_CODE (XEXP (note, 0)) == REG)
10818 reg_n_deaths[REGNO (XEXP (note, 0))]--;
10822 if ((REG_NOTE_KIND (note) == REG_DEAD
10823 || REG_NOTE_KIND (note) == REG_UNUSED)
10824 && GET_CODE (XEXP (note, 0)) == REG)
10825 reg_n_deaths[REGNO (XEXP (note, 0))]++;
10827 REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
10828 XEXP (note, 0), REG_NOTES (place2));
10833 /* Similarly to above, distribute the LOG_LINKS that used to be present on
10834 I3, I2, and I1 to new locations. This is also called in one case to
10835 add a link pointing at I3 when I3's destination is changed. */
10838 distribute_links (links)
10841 rtx link, next_link;
10843 for (link = links; link; link = next_link)
10849 next_link = XEXP (link, 1);
10851 /* If the insn that this link points to is a NOTE or isn't a single
10852 set, ignore it. In the latter case, it isn't clear what we
10853 can do other than ignore the link, since we can't tell which
10854 register it was for. Such links wouldn't be used by combine
10857 It is not possible for the destination of the target of the link to
10858 have been changed by combine. The only potential of this is if we
10859 replace I3, I2, and I1 by I3 and I2. But in that case the
10860 destination of I2 also remains unchanged. */
10862 if (GET_CODE (XEXP (link, 0)) == NOTE
10863 || (set = single_set (XEXP (link, 0))) == 0)
10866 reg = SET_DEST (set);
10867 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
10868 || GET_CODE (reg) == SIGN_EXTRACT
10869 || GET_CODE (reg) == STRICT_LOW_PART)
10870 reg = XEXP (reg, 0);
10872 /* A LOG_LINK is defined as being placed on the first insn that uses
10873 a register and points to the insn that sets the register. Start
10874 searching at the next insn after the target of the link and stop
10875 when we reach a set of the register or the end of the basic block.
10877 Note that this correctly handles the link that used to point from
10878 I3 to I2. Also note that not much searching is typically done here
10879 since most links don't point very far away. */
10881 for (insn = NEXT_INSN (XEXP (link, 0));
10882 (insn && (this_basic_block == n_basic_blocks - 1
10883 || basic_block_head[this_basic_block + 1] != insn));
10884 insn = NEXT_INSN (insn))
10885 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
10886 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
10888 if (reg_referenced_p (reg, PATTERN (insn)))
10892 else if (GET_CODE (insn) == CALL_INSN
10893 && find_reg_fusage (insn, USE, reg))
10899 /* If we found a place to put the link, place it there unless there
10900 is already a link to the same insn as LINK at that point. */
10906 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
10907 if (XEXP (link2, 0) == XEXP (link, 0))
10912 XEXP (link, 1) = LOG_LINKS (place);
10913 LOG_LINKS (place) = link;
10915 /* Set added_links_insn to the earliest insn we added a
10917 if (added_links_insn == 0
10918 || INSN_CUID (added_links_insn) > INSN_CUID (place))
10919 added_links_insn = place;
10926 dump_combine_stats (file)
10931 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
10932 combine_attempts, combine_merges, combine_extras, combine_successes);
10936 dump_combine_total_stats (file)
10941 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
10942 total_attempts, total_merges, total_extras, total_successes);