1 /* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This module is essentially the "combiner" phase of the U. of Arizona
23 Portable Optimizer, but redone to work on our list-structured
24 representation for RTL instead of their string representation.
26 The LOG_LINKS of each insn identify the most recent assignment
27 to each REG used in the insn. It is a list of previous insns,
28 each of which contains a SET for a REG that is used in this insn
29 and not used or set in between. LOG_LINKs never cross basic blocks.
30 They were set up by the preceding pass (lifetime analysis).
32 We try to combine each pair of insns joined by a logical link.
33 We also try to combine triples of insns A, B and C when
34 C has a link back to B and B has a link back to A.
36 LOG_LINKS does not have links for use of the CC0. They don't
37 need to, because the insn that sets the CC0 is always immediately
38 before the insn that tests it. So we always regard a branch
39 insn as having a logical link to the preceding insn. The same is true
40 for an insn explicitly using CC0.
42 We check (with use_crosses_set_p) to avoid combining in such a way
43 as to move a computation to a place where its value would be different.
45 Combination is done by mathematically substituting the previous
46 insn(s) values for the regs they set into the expressions in
47 the later insns that refer to these regs. If the result is a valid insn
48 for our target machine, according to the machine description,
49 we install it, delete the earlier insns, and update the data flow
50 information (LOG_LINKS and REG_NOTES) for what we did.
52 There are a few exceptions where the dataflow information created by
53 flow.c aren't completely updated:
55 - reg_live_length is not updated
56 - reg_n_refs is not adjusted in the rare case when a register is
57 no longer required in a computation
58 - there are extremely rare cases (see distribute_regnotes) when a
60 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
61 removed because there is no way to know which register it was
64 To simplify substitution, we combine only when the earlier insn(s)
65 consist of only a single assignment. To simplify updating afterward,
66 we never combine when a subroutine call appears in the middle.
68 Since we do not represent assignments to CC0 explicitly except when that
69 is all an insn does, there is no LOG_LINKS entry in an insn that uses
70 the condition code for the insn that set the condition code.
71 Fortunately, these two insns must be consecutive.
72 Therefore, every JUMP_INSN is taken to have an implicit logical link
73 to the preceding insn. This is not quite right, since non-jumps can
74 also use the condition code; but in practice such insns would not
83 #include "hard-reg-set.h"
84 #include "basic-block.h"
85 #include "insn-config.h"
87 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
89 #include "insn-attr.h"
94 /* It is not safe to use ordinary gen_lowpart in combine.
95 Use gen_lowpart_for_combine instead. See comments there. */
96 #define gen_lowpart dont_use_gen_lowpart_you_dummy
98 /* Number of attempts to combine instructions in this function. */
100 static int combine_attempts;
102 /* Number of attempts that got as far as substitution in this function. */
104 static int combine_merges;
106 /* Number of instructions combined with added SETs in this function. */
108 static int combine_extras;
110 /* Number of instructions combined in this function. */
112 static int combine_successes;
114 /* Totals over entire compilation. */
116 static int total_attempts, total_merges, total_extras, total_successes;
119 /* Vector mapping INSN_UIDs to cuids.
120 The cuids are like uids but increase monotonically always.
121 Combine always uses cuids so that it can compare them.
122 But actually renumbering the uids, which we used to do,
123 proves to be a bad idea because it makes it hard to compare
124 the dumps produced by earlier passes with those from later passes. */
126 static int *uid_cuid;
127 static int max_uid_cuid;
129 /* Get the cuid of an insn. */
131 #define INSN_CUID(INSN) \
132 (INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
134 /* In case BITS_PER_WORD == HOST_BITS_PER_WIDE_INT, shifting by
135 BITS_PER_WORD would invoke undefined behavior. Work around it. */
137 #define UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD(val) \
138 (((unsigned HOST_WIDE_INT)(val) << (BITS_PER_WORD - 1)) << 1)
140 /* Maximum register number, which is the size of the tables below. */
142 static unsigned int combine_max_regno;
144 /* Record last point of death of (hard or pseudo) register n. */
146 static rtx *reg_last_death;
148 /* Record last point of modification of (hard or pseudo) register n. */
150 static rtx *reg_last_set;
152 /* Record the cuid of the last insn that invalidated memory
153 (anything that writes memory, and subroutine calls, but not pushes). */
155 static int mem_last_set;
157 /* Record the cuid of the last CALL_INSN
158 so we can tell whether a potential combination crosses any calls. */
160 static int last_call_cuid;
162 /* When `subst' is called, this is the insn that is being modified
163 (by combining in a previous insn). The PATTERN of this insn
164 is still the old pattern partially modified and it should not be
165 looked at, but this may be used to examine the successors of the insn
166 to judge whether a simplification is valid. */
168 static rtx subst_insn;
170 /* This is an insn that belongs before subst_insn, but is not currently
171 on the insn chain. */
173 static rtx subst_prev_insn;
175 /* This is the lowest CUID that `subst' is currently dealing with.
176 get_last_value will not return a value if the register was set at or
177 after this CUID. If not for this mechanism, we could get confused if
178 I2 or I1 in try_combine were an insn that used the old value of a register
179 to obtain a new value. In that case, we might erroneously get the
180 new value of the register when we wanted the old one. */
182 static int subst_low_cuid;
184 /* This contains any hard registers that are used in newpat; reg_dead_at_p
185 must consider all these registers to be always live. */
187 static HARD_REG_SET newpat_used_regs;
189 /* This is an insn to which a LOG_LINKS entry has been added. If this
190 insn is the earlier than I2 or I3, combine should rescan starting at
193 static rtx added_links_insn;
195 /* Basic block number of the block in which we are performing combines. */
196 static int this_basic_block;
198 /* A bitmap indicating which blocks had registers go dead at entry.
199 After combine, we'll need to re-do global life analysis with
200 those blocks as starting points. */
201 static sbitmap refresh_blocks;
202 static int need_refresh;
204 /* The next group of arrays allows the recording of the last value assigned
205 to (hard or pseudo) register n. We use this information to see if a
206 operation being processed is redundant given a prior operation performed
207 on the register. For example, an `and' with a constant is redundant if
208 all the zero bits are already known to be turned off.
210 We use an approach similar to that used by cse, but change it in the
213 (1) We do not want to reinitialize at each label.
214 (2) It is useful, but not critical, to know the actual value assigned
215 to a register. Often just its form is helpful.
217 Therefore, we maintain the following arrays:
219 reg_last_set_value the last value assigned
220 reg_last_set_label records the value of label_tick when the
221 register was assigned
222 reg_last_set_table_tick records the value of label_tick when a
223 value using the register is assigned
224 reg_last_set_invalid set to non-zero when it is not valid
225 to use the value of this register in some
228 To understand the usage of these tables, it is important to understand
229 the distinction between the value in reg_last_set_value being valid
230 and the register being validly contained in some other expression in the
233 Entry I in reg_last_set_value is valid if it is non-zero, and either
234 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
236 Register I may validly appear in any expression returned for the value
237 of another register if reg_n_sets[i] is 1. It may also appear in the
238 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
239 reg_last_set_invalid[j] is zero.
241 If an expression is found in the table containing a register which may
242 not validly appear in an expression, the register is replaced by
243 something that won't match, (clobber (const_int 0)).
245 reg_last_set_invalid[i] is set non-zero when register I is being assigned
246 to and reg_last_set_table_tick[i] == label_tick. */
248 /* Record last value assigned to (hard or pseudo) register n. */
250 static rtx *reg_last_set_value;
252 /* Record the value of label_tick when the value for register n is placed in
253 reg_last_set_value[n]. */
255 static int *reg_last_set_label;
257 /* Record the value of label_tick when an expression involving register n
258 is placed in reg_last_set_value. */
260 static int *reg_last_set_table_tick;
262 /* Set non-zero if references to register n in expressions should not be
265 static char *reg_last_set_invalid;
267 /* Incremented for each label. */
269 static int label_tick;
271 /* Some registers that are set more than once and used in more than one
272 basic block are nevertheless always set in similar ways. For example,
273 a QImode register may be loaded from memory in two places on a machine
274 where byte loads zero extend.
276 We record in the following array what we know about the nonzero
277 bits of a register, specifically which bits are known to be zero.
279 If an entry is zero, it means that we don't know anything special. */
281 static unsigned HOST_WIDE_INT *reg_nonzero_bits;
283 /* Mode used to compute significance in reg_nonzero_bits. It is the largest
284 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
286 static enum machine_mode nonzero_bits_mode;
288 /* Nonzero if we know that a register has some leading bits that are always
289 equal to the sign bit. */
291 static unsigned char *reg_sign_bit_copies;
293 /* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
294 It is zero while computing them and after combine has completed. This
295 former test prevents propagating values based on previously set values,
296 which can be incorrect if a variable is modified in a loop. */
298 static int nonzero_sign_valid;
300 /* These arrays are maintained in parallel with reg_last_set_value
301 and are used to store the mode in which the register was last set,
302 the bits that were known to be zero when it was last set, and the
303 number of sign bits copies it was known to have when it was last set. */
305 static enum machine_mode *reg_last_set_mode;
306 static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
307 static char *reg_last_set_sign_bit_copies;
309 /* Record one modification to rtl structure
310 to be undone by storing old_contents into *where.
311 is_int is 1 if the contents are an int. */
317 union {rtx r; unsigned int i;} old_contents;
318 union {rtx *r; unsigned int *i;} where;
321 /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
322 num_undo says how many are currently recorded.
324 other_insn is nonzero if we have modified some other insn in the process
325 of working on subst_insn. It must be verified too. */
334 static struct undobuf undobuf;
336 /* Number of times the pseudo being substituted for
337 was found and replaced. */
339 static int n_occurrences;
341 static void do_SUBST PARAMS ((rtx *, rtx));
342 static void do_SUBST_INT PARAMS ((unsigned int *,
344 static void init_reg_last_arrays PARAMS ((void));
345 static void setup_incoming_promotions PARAMS ((void));
346 static void set_nonzero_bits_and_sign_copies PARAMS ((rtx, rtx, void *));
347 static int cant_combine_insn_p PARAMS ((rtx));
348 static int can_combine_p PARAMS ((rtx, rtx, rtx, rtx, rtx *, rtx *));
349 static int sets_function_arg_p PARAMS ((rtx));
350 static int combinable_i3pat PARAMS ((rtx, rtx *, rtx, rtx, int, rtx *));
351 static int contains_muldiv PARAMS ((rtx));
352 static rtx try_combine PARAMS ((rtx, rtx, rtx, int *));
353 static void undo_all PARAMS ((void));
354 static void undo_commit PARAMS ((void));
355 static rtx *find_split_point PARAMS ((rtx *, rtx));
356 static rtx subst PARAMS ((rtx, rtx, rtx, int, int));
357 static rtx combine_simplify_rtx PARAMS ((rtx, enum machine_mode, int, int));
358 static rtx simplify_if_then_else PARAMS ((rtx));
359 static rtx simplify_set PARAMS ((rtx));
360 static rtx simplify_logical PARAMS ((rtx, int));
361 static rtx expand_compound_operation PARAMS ((rtx));
362 static rtx expand_field_assignment PARAMS ((rtx));
363 static rtx make_extraction PARAMS ((enum machine_mode, rtx, HOST_WIDE_INT,
364 rtx, unsigned HOST_WIDE_INT, int,
366 static rtx extract_left_shift PARAMS ((rtx, int));
367 static rtx make_compound_operation PARAMS ((rtx, enum rtx_code));
368 static int get_pos_from_mask PARAMS ((unsigned HOST_WIDE_INT,
369 unsigned HOST_WIDE_INT *));
370 static rtx force_to_mode PARAMS ((rtx, enum machine_mode,
371 unsigned HOST_WIDE_INT, rtx, int));
372 static rtx if_then_else_cond PARAMS ((rtx, rtx *, rtx *));
373 static rtx known_cond PARAMS ((rtx, enum rtx_code, rtx, rtx));
374 static int rtx_equal_for_field_assignment_p PARAMS ((rtx, rtx));
375 static rtx make_field_assignment PARAMS ((rtx));
376 static rtx apply_distributive_law PARAMS ((rtx));
377 static rtx simplify_and_const_int PARAMS ((rtx, enum machine_mode, rtx,
378 unsigned HOST_WIDE_INT));
379 static unsigned HOST_WIDE_INT nonzero_bits PARAMS ((rtx, enum machine_mode));
380 static unsigned int num_sign_bit_copies PARAMS ((rtx, enum machine_mode));
381 static int merge_outer_ops PARAMS ((enum rtx_code *, HOST_WIDE_INT *,
382 enum rtx_code, HOST_WIDE_INT,
383 enum machine_mode, int *));
384 static rtx simplify_shift_const PARAMS ((rtx, enum rtx_code, enum machine_mode,
386 static int recog_for_combine PARAMS ((rtx *, rtx, rtx *));
387 static rtx gen_lowpart_for_combine PARAMS ((enum machine_mode, rtx));
388 static rtx gen_binary PARAMS ((enum rtx_code, enum machine_mode,
390 static enum rtx_code simplify_comparison PARAMS ((enum rtx_code, rtx *, rtx *));
391 static void update_table_tick PARAMS ((rtx));
392 static void record_value_for_reg PARAMS ((rtx, rtx, rtx));
393 static void check_promoted_subreg PARAMS ((rtx, rtx));
394 static void record_dead_and_set_regs_1 PARAMS ((rtx, rtx, void *));
395 static void record_dead_and_set_regs PARAMS ((rtx));
396 static int get_last_value_validate PARAMS ((rtx *, rtx, int, int));
397 static rtx get_last_value PARAMS ((rtx));
398 static int use_crosses_set_p PARAMS ((rtx, int));
399 static void reg_dead_at_p_1 PARAMS ((rtx, rtx, void *));
400 static int reg_dead_at_p PARAMS ((rtx, rtx));
401 static void move_deaths PARAMS ((rtx, rtx, int, rtx, rtx *));
402 static int reg_bitfield_target_p PARAMS ((rtx, rtx));
403 static void distribute_notes PARAMS ((rtx, rtx, rtx, rtx, rtx, rtx));
404 static void distribute_links PARAMS ((rtx));
405 static void mark_used_regs_combine PARAMS ((rtx));
406 static int insn_cuid PARAMS ((rtx));
407 static void record_promoted_value PARAMS ((rtx, rtx));
408 static rtx reversed_comparison PARAMS ((rtx, enum machine_mode, rtx, rtx));
409 static enum rtx_code combine_reversed_comparison_code PARAMS ((rtx));
411 /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
412 insn. The substitution can be undone by undo_all. If INTO is already
413 set to NEWVAL, do not record this change. Because computing NEWVAL might
414 also call SUBST, we have to compute it before we put anything into
418 do_SUBST (into, newval)
424 if (oldval == newval)
428 buf = undobuf.frees, undobuf.frees = buf->next;
430 buf = (struct undo *) xmalloc (sizeof (struct undo));
434 buf->old_contents.r = oldval;
437 buf->next = undobuf.undos, undobuf.undos = buf;
440 #define SUBST(INTO, NEWVAL) do_SUBST(&(INTO), (NEWVAL))
442 /* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
443 for the value of a HOST_WIDE_INT value (including CONST_INT) is
447 do_SUBST_INT (into, newval)
448 unsigned int *into, newval;
451 unsigned int oldval = *into;
453 if (oldval == newval)
457 buf = undobuf.frees, undobuf.frees = buf->next;
459 buf = (struct undo *) xmalloc (sizeof (struct undo));
463 buf->old_contents.i = oldval;
466 buf->next = undobuf.undos, undobuf.undos = buf;
469 #define SUBST_INT(INTO, NEWVAL) do_SUBST_INT(&(INTO), (NEWVAL))
471 /* Main entry point for combiner. F is the first insn of the function.
472 NREGS is the first unused pseudo-reg number.
474 Return non-zero if the combiner has turned an indirect jump
475 instruction into a direct jump. */
477 combine_instructions (f, nregs)
481 register rtx insn, next;
486 register rtx links, nextlinks;
488 int new_direct_jump_p = 0;
490 combine_attempts = 0;
493 combine_successes = 0;
495 combine_max_regno = nregs;
497 reg_nonzero_bits = ((unsigned HOST_WIDE_INT *)
498 xcalloc (nregs, sizeof (unsigned HOST_WIDE_INT)));
500 = (unsigned char *) xcalloc (nregs, sizeof (unsigned char));
502 reg_last_death = (rtx *) xmalloc (nregs * sizeof (rtx));
503 reg_last_set = (rtx *) xmalloc (nregs * sizeof (rtx));
504 reg_last_set_value = (rtx *) xmalloc (nregs * sizeof (rtx));
505 reg_last_set_table_tick = (int *) xmalloc (nregs * sizeof (int));
506 reg_last_set_label = (int *) xmalloc (nregs * sizeof (int));
507 reg_last_set_invalid = (char *) xmalloc (nregs * sizeof (char));
509 = (enum machine_mode *) xmalloc (nregs * sizeof (enum machine_mode));
510 reg_last_set_nonzero_bits
511 = (unsigned HOST_WIDE_INT *) xmalloc (nregs * sizeof (HOST_WIDE_INT));
512 reg_last_set_sign_bit_copies
513 = (char *) xmalloc (nregs * sizeof (char));
515 init_reg_last_arrays ();
517 init_recog_no_volatile ();
519 /* Compute maximum uid value so uid_cuid can be allocated. */
521 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
522 if (INSN_UID (insn) > i)
525 uid_cuid = (int *) xmalloc ((i + 1) * sizeof (int));
528 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
530 /* Don't use reg_nonzero_bits when computing it. This can cause problems
531 when, for example, we have j <<= 1 in a loop. */
533 nonzero_sign_valid = 0;
535 /* Compute the mapping from uids to cuids.
536 Cuids are numbers assigned to insns, like uids,
537 except that cuids increase monotonically through the code.
539 Scan all SETs and see if we can deduce anything about what
540 bits are known to be zero for some registers and how many copies
541 of the sign bit are known to exist for those registers.
543 Also set any known values so that we can use it while searching
544 for what bits are known to be set. */
548 /* We need to initialize it here, because record_dead_and_set_regs may call
550 subst_prev_insn = NULL_RTX;
552 setup_incoming_promotions ();
554 refresh_blocks = sbitmap_alloc (n_basic_blocks);
555 sbitmap_zero (refresh_blocks);
558 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
560 uid_cuid[INSN_UID (insn)] = ++i;
566 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
568 record_dead_and_set_regs (insn);
571 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
572 if (REG_NOTE_KIND (links) == REG_INC)
573 set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
578 if (GET_CODE (insn) == CODE_LABEL)
582 nonzero_sign_valid = 1;
584 /* Now scan all the insns in forward order. */
586 this_basic_block = -1;
590 init_reg_last_arrays ();
591 setup_incoming_promotions ();
593 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
597 /* If INSN starts a new basic block, update our basic block number. */
598 if (this_basic_block + 1 < n_basic_blocks
599 && BLOCK_HEAD (this_basic_block + 1) == insn)
602 if (GET_CODE (insn) == CODE_LABEL)
605 else if (INSN_P (insn))
607 /* See if we know about function return values before this
608 insn based upon SUBREG flags. */
609 check_promoted_subreg (insn, PATTERN (insn));
611 /* Try this insn with each insn it links back to. */
613 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
614 if ((next = try_combine (insn, XEXP (links, 0),
615 NULL_RTX, &new_direct_jump_p)) != 0)
618 /* Try each sequence of three linked insns ending with this one. */
620 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
622 rtx link = XEXP (links, 0);
624 /* If the linked insn has been replaced by a note, then there
625 is no point in persuing this chain any further. */
626 if (GET_CODE (link) == NOTE)
629 for (nextlinks = LOG_LINKS (link);
631 nextlinks = XEXP (nextlinks, 1))
632 if ((next = try_combine (insn, XEXP (links, 0),
634 &new_direct_jump_p)) != 0)
639 /* Try to combine a jump insn that uses CC0
640 with a preceding insn that sets CC0, and maybe with its
641 logical predecessor as well.
642 This is how we make decrement-and-branch insns.
643 We need this special code because data flow connections
644 via CC0 do not get entered in LOG_LINKS. */
646 if (GET_CODE (insn) == JUMP_INSN
647 && (prev = prev_nonnote_insn (insn)) != 0
648 && GET_CODE (prev) == INSN
649 && sets_cc0_p (PATTERN (prev)))
651 if ((next = try_combine (insn, prev,
652 NULL_RTX, &new_direct_jump_p)) != 0)
655 for (nextlinks = LOG_LINKS (prev); nextlinks;
656 nextlinks = XEXP (nextlinks, 1))
657 if ((next = try_combine (insn, prev,
659 &new_direct_jump_p)) != 0)
663 /* Do the same for an insn that explicitly references CC0. */
664 if (GET_CODE (insn) == INSN
665 && (prev = prev_nonnote_insn (insn)) != 0
666 && GET_CODE (prev) == INSN
667 && sets_cc0_p (PATTERN (prev))
668 && GET_CODE (PATTERN (insn)) == SET
669 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
671 if ((next = try_combine (insn, prev,
672 NULL_RTX, &new_direct_jump_p)) != 0)
675 for (nextlinks = LOG_LINKS (prev); nextlinks;
676 nextlinks = XEXP (nextlinks, 1))
677 if ((next = try_combine (insn, prev,
679 &new_direct_jump_p)) != 0)
683 /* Finally, see if any of the insns that this insn links to
684 explicitly references CC0. If so, try this insn, that insn,
685 and its predecessor if it sets CC0. */
686 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
687 if (GET_CODE (XEXP (links, 0)) == INSN
688 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
689 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
690 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
691 && GET_CODE (prev) == INSN
692 && sets_cc0_p (PATTERN (prev))
693 && (next = try_combine (insn, XEXP (links, 0),
694 prev, &new_direct_jump_p)) != 0)
698 /* Try combining an insn with two different insns whose results it
700 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
701 for (nextlinks = XEXP (links, 1); nextlinks;
702 nextlinks = XEXP (nextlinks, 1))
703 if ((next = try_combine (insn, XEXP (links, 0),
705 &new_direct_jump_p)) != 0)
708 if (GET_CODE (insn) != NOTE)
709 record_dead_and_set_regs (insn);
716 delete_noop_moves (f);
720 compute_bb_for_insn (get_max_uid ());
721 update_life_info (refresh_blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
726 sbitmap_free (refresh_blocks);
727 free (reg_nonzero_bits);
728 free (reg_sign_bit_copies);
729 free (reg_last_death);
731 free (reg_last_set_value);
732 free (reg_last_set_table_tick);
733 free (reg_last_set_label);
734 free (reg_last_set_invalid);
735 free (reg_last_set_mode);
736 free (reg_last_set_nonzero_bits);
737 free (reg_last_set_sign_bit_copies);
741 struct undo *undo, *next;
742 for (undo = undobuf.frees; undo; undo = next)
750 total_attempts += combine_attempts;
751 total_merges += combine_merges;
752 total_extras += combine_extras;
753 total_successes += combine_successes;
755 nonzero_sign_valid = 0;
757 /* Make recognizer allow volatile MEMs again. */
760 return new_direct_jump_p;
763 /* Wipe the reg_last_xxx arrays in preparation for another pass. */
766 init_reg_last_arrays ()
768 unsigned int nregs = combine_max_regno;
770 memset ((char *) reg_last_death, 0, nregs * sizeof (rtx));
771 memset ((char *) reg_last_set, 0, nregs * sizeof (rtx));
772 memset ((char *) reg_last_set_value, 0, nregs * sizeof (rtx));
773 memset ((char *) reg_last_set_table_tick, 0, nregs * sizeof (int));
774 memset ((char *) reg_last_set_label, 0, nregs * sizeof (int));
775 memset (reg_last_set_invalid, 0, nregs * sizeof (char));
776 memset ((char *) reg_last_set_mode, 0, nregs * sizeof (enum machine_mode));
777 memset ((char *) reg_last_set_nonzero_bits, 0, nregs * sizeof (HOST_WIDE_INT));
778 memset (reg_last_set_sign_bit_copies, 0, nregs * sizeof (char));
781 /* Set up any promoted values for incoming argument registers. */
784 setup_incoming_promotions ()
786 #ifdef PROMOTE_FUNCTION_ARGS
789 enum machine_mode mode;
791 rtx first = get_insns ();
793 #ifndef OUTGOING_REGNO
794 #define OUTGOING_REGNO(N) N
796 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
797 /* Check whether this register can hold an incoming pointer
798 argument. FUNCTION_ARG_REGNO_P tests outgoing register
799 numbers, so translate if necessary due to register windows. */
800 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (regno))
801 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
804 (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
807 gen_rtx_CLOBBER (mode, const0_rtx)));
812 /* Called via note_stores. If X is a pseudo that is narrower than
813 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
815 If we are setting only a portion of X and we can't figure out what
816 portion, assume all bits will be used since we don't know what will
819 Similarly, set how many bits of X are known to be copies of the sign bit
820 at all locations in the function. This is the smallest number implied
824 set_nonzero_bits_and_sign_copies (x, set, data)
827 void *data ATTRIBUTE_UNUSED;
831 if (GET_CODE (x) == REG
832 && REGNO (x) >= FIRST_PSEUDO_REGISTER
833 /* If this register is undefined at the start of the file, we can't
834 say what its contents were. */
835 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, REGNO (x))
836 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
838 if (set == 0 || GET_CODE (set) == CLOBBER)
840 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
841 reg_sign_bit_copies[REGNO (x)] = 1;
845 /* If this is a complex assignment, see if we can convert it into a
846 simple assignment. */
847 set = expand_field_assignment (set);
849 /* If this is a simple assignment, or we have a paradoxical SUBREG,
850 set what we know about X. */
852 if (SET_DEST (set) == x
853 || (GET_CODE (SET_DEST (set)) == SUBREG
854 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
855 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
856 && SUBREG_REG (SET_DEST (set)) == x))
858 rtx src = SET_SRC (set);
860 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
861 /* If X is narrower than a word and SRC is a non-negative
862 constant that would appear negative in the mode of X,
863 sign-extend it for use in reg_nonzero_bits because some
864 machines (maybe most) will actually do the sign-extension
865 and this is the conservative approach.
867 ??? For 2.5, try to tighten up the MD files in this regard
868 instead of this kludge. */
870 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
871 && GET_CODE (src) == CONST_INT
873 && 0 != (INTVAL (src)
875 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
876 src = GEN_INT (INTVAL (src)
877 | ((HOST_WIDE_INT) (-1)
878 << GET_MODE_BITSIZE (GET_MODE (x))));
881 reg_nonzero_bits[REGNO (x)]
882 |= nonzero_bits (src, nonzero_bits_mode);
883 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
884 if (reg_sign_bit_copies[REGNO (x)] == 0
885 || reg_sign_bit_copies[REGNO (x)] > num)
886 reg_sign_bit_copies[REGNO (x)] = num;
890 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
891 reg_sign_bit_copies[REGNO (x)] = 1;
896 /* See if INSN can be combined into I3. PRED and SUCC are optionally
897 insns that were previously combined into I3 or that will be combined
898 into the merger of INSN and I3.
900 Return 0 if the combination is not allowed for any reason.
902 If the combination is allowed, *PDEST will be set to the single
903 destination of INSN and *PSRC to the single source, and this function
907 can_combine_p (insn, i3, pred, succ, pdest, psrc)
910 rtx pred ATTRIBUTE_UNUSED;
915 rtx set = 0, src, dest;
920 int all_adjacent = (succ ? (next_active_insn (insn) == succ
921 && next_active_insn (succ) == i3)
922 : next_active_insn (insn) == i3);
924 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
925 or a PARALLEL consisting of such a SET and CLOBBERs.
927 If INSN has CLOBBER parallel parts, ignore them for our processing.
928 By definition, these happen during the execution of the insn. When it
929 is merged with another insn, all bets are off. If they are, in fact,
930 needed and aren't also supplied in I3, they may be added by
931 recog_for_combine. Otherwise, it won't match.
933 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
936 Get the source and destination of INSN. If more than one, can't
939 if (GET_CODE (PATTERN (insn)) == SET)
940 set = PATTERN (insn);
941 else if (GET_CODE (PATTERN (insn)) == PARALLEL
942 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
944 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
946 rtx elt = XVECEXP (PATTERN (insn), 0, i);
948 switch (GET_CODE (elt))
950 /* This is important to combine floating point insns
953 /* Combining an isolated USE doesn't make sense.
954 We depend here on combinable_i3_pat to reject them. */
955 /* The code below this loop only verifies that the inputs of
956 the SET in INSN do not change. We call reg_set_between_p
957 to verify that the REG in the USE does not change betweeen
959 If the USE in INSN was for a pseudo register, the matching
960 insn pattern will likely match any register; combining this
961 with any other USE would only be safe if we knew that the
962 used registers have identical values, or if there was
963 something to tell them apart, e.g. different modes. For
964 now, we forgo such compilcated tests and simply disallow
965 combining of USES of pseudo registers with any other USE. */
966 if (GET_CODE (XEXP (elt, 0)) == REG
967 && GET_CODE (PATTERN (i3)) == PARALLEL)
969 rtx i3pat = PATTERN (i3);
970 int i = XVECLEN (i3pat, 0) - 1;
971 unsigned int regno = REGNO (XEXP (elt, 0));
975 rtx i3elt = XVECEXP (i3pat, 0, i);
977 if (GET_CODE (i3elt) == USE
978 && GET_CODE (XEXP (i3elt, 0)) == REG
979 && (REGNO (XEXP (i3elt, 0)) == regno
980 ? reg_set_between_p (XEXP (elt, 0),
981 PREV_INSN (insn), i3)
982 : regno >= FIRST_PSEUDO_REGISTER))
989 /* We can ignore CLOBBERs. */
994 /* Ignore SETs whose result isn't used but not those that
995 have side-effects. */
996 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
997 && ! side_effects_p (elt))
1000 /* If we have already found a SET, this is a second one and
1001 so we cannot combine with this insn. */
1009 /* Anything else means we can't combine. */
1015 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1016 so don't do anything with it. */
1017 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1026 set = expand_field_assignment (set);
1027 src = SET_SRC (set), dest = SET_DEST (set);
1029 /* Don't eliminate a store in the stack pointer. */
1030 if (dest == stack_pointer_rtx
1031 /* If we couldn't eliminate a field assignment, we can't combine. */
1032 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
1033 /* Don't combine with an insn that sets a register to itself if it has
1034 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
1035 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1036 /* Can't merge an ASM_OPERANDS. */
1037 || GET_CODE (src) == ASM_OPERANDS
1038 /* Can't merge a function call. */
1039 || GET_CODE (src) == CALL
1040 /* Don't eliminate a function call argument. */
1041 || (GET_CODE (i3) == CALL_INSN
1042 && (find_reg_fusage (i3, USE, dest)
1043 || (GET_CODE (dest) == REG
1044 && REGNO (dest) < FIRST_PSEUDO_REGISTER
1045 && global_regs[REGNO (dest)])))
1046 /* Don't substitute into an incremented register. */
1047 || FIND_REG_INC_NOTE (i3, dest)
1048 || (succ && FIND_REG_INC_NOTE (succ, dest))
1050 /* Don't combine the end of a libcall into anything. */
1051 /* ??? This gives worse code, and appears to be unnecessary, since no
1052 pass after flow uses REG_LIBCALL/REG_RETVAL notes. Local-alloc does
1053 use REG_RETVAL notes for noconflict blocks, but other code here
1054 makes sure that those insns don't disappear. */
1055 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1057 /* Make sure that DEST is not used after SUCC but before I3. */
1058 || (succ && ! all_adjacent
1059 && reg_used_between_p (dest, succ, i3))
1060 /* Make sure that the value that is to be substituted for the register
1061 does not use any registers whose values alter in between. However,
1062 If the insns are adjacent, a use can't cross a set even though we
1063 think it might (this can happen for a sequence of insns each setting
1064 the same destination; reg_last_set of that register might point to
1065 a NOTE). If INSN has a REG_EQUIV note, the register is always
1066 equivalent to the memory so the substitution is valid even if there
1067 are intervening stores. Also, don't move a volatile asm or
1068 UNSPEC_VOLATILE across any other insns. */
1070 && (((GET_CODE (src) != MEM
1071 || ! find_reg_note (insn, REG_EQUIV, src))
1072 && use_crosses_set_p (src, INSN_CUID (insn)))
1073 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1074 || GET_CODE (src) == UNSPEC_VOLATILE))
1075 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
1076 better register allocation by not doing the combine. */
1077 || find_reg_note (i3, REG_NO_CONFLICT, dest)
1078 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
1079 /* Don't combine across a CALL_INSN, because that would possibly
1080 change whether the life span of some REGs crosses calls or not,
1081 and it is a pain to update that information.
1082 Exception: if source is a constant, moving it later can't hurt.
1083 Accept that special case, because it helps -fforce-addr a lot. */
1084 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
1087 /* DEST must either be a REG or CC0. */
1088 if (GET_CODE (dest) == REG)
1090 /* If register alignment is being enforced for multi-word items in all
1091 cases except for parameters, it is possible to have a register copy
1092 insn referencing a hard register that is not allowed to contain the
1093 mode being copied and which would not be valid as an operand of most
1094 insns. Eliminate this problem by not combining with such an insn.
1096 Also, on some machines we don't want to extend the life of a hard
1099 if (GET_CODE (src) == REG
1100 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1101 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
1102 /* Don't extend the life of a hard register unless it is
1103 user variable (if we have few registers) or it can't
1104 fit into the desired register (meaning something special
1106 Also avoid substituting a return register into I3, because
1107 reload can't handle a conflict with constraints of other
1109 || (REGNO (src) < FIRST_PSEUDO_REGISTER
1110 && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))))
1113 else if (GET_CODE (dest) != CC0)
1116 /* Don't substitute for a register intended as a clobberable operand.
1117 Similarly, don't substitute an expression containing a register that
1118 will be clobbered in I3. */
1119 if (GET_CODE (PATTERN (i3)) == PARALLEL)
1120 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1121 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
1122 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
1124 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
1127 /* If INSN contains anything volatile, or is an `asm' (whether volatile
1128 or not), reject, unless nothing volatile comes between it and I3 */
1130 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1132 /* Make sure succ doesn't contain a volatile reference. */
1133 if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1136 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1137 if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
1141 /* If INSN is an asm, and DEST is a hard register, reject, since it has
1142 to be an explicit register variable, and was chosen for a reason. */
1144 if (GET_CODE (src) == ASM_OPERANDS
1145 && GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1148 /* If there are any volatile insns between INSN and I3, reject, because
1149 they might affect machine state. */
1151 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1152 if (INSN_P (p) && p != succ && volatile_insn_p (PATTERN (p)))
1155 /* If INSN or I2 contains an autoincrement or autodecrement,
1156 make sure that register is not used between there and I3,
1157 and not already used in I3 either.
1158 Also insist that I3 not be a jump; if it were one
1159 and the incremented register were spilled, we would lose. */
1162 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1163 if (REG_NOTE_KIND (link) == REG_INC
1164 && (GET_CODE (i3) == JUMP_INSN
1165 || reg_used_between_p (XEXP (link, 0), insn, i3)
1166 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1171 /* Don't combine an insn that follows a CC0-setting insn.
1172 An insn that uses CC0 must not be separated from the one that sets it.
1173 We do, however, allow I2 to follow a CC0-setting insn if that insn
1174 is passed as I1; in that case it will be deleted also.
1175 We also allow combining in this case if all the insns are adjacent
1176 because that would leave the two CC0 insns adjacent as well.
1177 It would be more logical to test whether CC0 occurs inside I1 or I2,
1178 but that would be much slower, and this ought to be equivalent. */
1180 p = prev_nonnote_insn (insn);
1181 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1186 /* If we get here, we have passed all the tests and the combination is
1195 /* Check if PAT is an insn - or a part of it - used to set up an
1196 argument for a function in a hard register. */
1199 sets_function_arg_p (pat)
1205 switch (GET_CODE (pat))
1208 return sets_function_arg_p (PATTERN (pat));
1211 for (i = XVECLEN (pat, 0); --i >= 0;)
1212 if (sets_function_arg_p (XVECEXP (pat, 0, i)))
1218 inner_dest = SET_DEST (pat);
1219 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1220 || GET_CODE (inner_dest) == SUBREG
1221 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1222 inner_dest = XEXP (inner_dest, 0);
1224 return (GET_CODE (inner_dest) == REG
1225 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1226 && FUNCTION_ARG_REGNO_P (REGNO (inner_dest)));
1235 /* LOC is the location within I3 that contains its pattern or the component
1236 of a PARALLEL of the pattern. We validate that it is valid for combining.
1238 One problem is if I3 modifies its output, as opposed to replacing it
1239 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1240 so would produce an insn that is not equivalent to the original insns.
1244 (set (reg:DI 101) (reg:DI 100))
1245 (set (subreg:SI (reg:DI 101) 0) <foo>)
1247 This is NOT equivalent to:
1249 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1250 (set (reg:DI 101) (reg:DI 100))])
1252 Not only does this modify 100 (in which case it might still be valid
1253 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1255 We can also run into a problem if I2 sets a register that I1
1256 uses and I1 gets directly substituted into I3 (not via I2). In that
1257 case, we would be getting the wrong value of I2DEST into I3, so we
1258 must reject the combination. This case occurs when I2 and I1 both
1259 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1260 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1261 of a SET must prevent combination from occurring.
1263 Before doing the above check, we first try to expand a field assignment
1264 into a set of logical operations.
1266 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1267 we place a register that is both set and used within I3. If more than one
1268 such register is detected, we fail.
1270 Return 1 if the combination is valid, zero otherwise. */
1273 combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1279 rtx *pi3dest_killed;
1283 if (GET_CODE (x) == SET)
1285 rtx set = expand_field_assignment (x);
1286 rtx dest = SET_DEST (set);
1287 rtx src = SET_SRC (set);
1288 rtx inner_dest = dest;
1291 rtx inner_src = src;
1296 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1297 || GET_CODE (inner_dest) == SUBREG
1298 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1299 inner_dest = XEXP (inner_dest, 0);
1301 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1304 while (GET_CODE (inner_src) == STRICT_LOW_PART
1305 || GET_CODE (inner_src) == SUBREG
1306 || GET_CODE (inner_src) == ZERO_EXTRACT)
1307 inner_src = XEXP (inner_src, 0);
1309 /* If it is better that two different modes keep two different pseudos,
1310 avoid combining them. This avoids producing the following pattern
1312 (set (subreg:SI (reg/v:QI 21) 0)
1313 (lshiftrt:SI (reg/v:SI 20)
1315 If that were made, reload could not handle the pair of
1316 reg 20/21, since it would try to get any GENERAL_REGS
1317 but some of them don't handle QImode. */
1319 if (rtx_equal_p (inner_src, i2dest)
1320 && GET_CODE (inner_dest) == REG
1321 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1325 /* Check for the case where I3 modifies its output, as
1327 if ((inner_dest != dest
1328 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1329 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1331 /* This is the same test done in can_combine_p except we can't test
1332 all_adjacent; we don't have to, since this instruction will stay
1333 in place, thus we are not considering increasing the lifetime of
1336 Also, if this insn sets a function argument, combining it with
1337 something that might need a spill could clobber a previous
1338 function argument; the all_adjacent test in can_combine_p also
1339 checks this; here, we do a more specific test for this case. */
1341 || (GET_CODE (inner_dest) == REG
1342 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1343 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1344 GET_MODE (inner_dest))))
1345 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1348 /* If DEST is used in I3, it is being killed in this insn,
1349 so record that for later.
1350 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1351 STACK_POINTER_REGNUM, since these are always considered to be
1352 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
1353 if (pi3dest_killed && GET_CODE (dest) == REG
1354 && reg_referenced_p (dest, PATTERN (i3))
1355 && REGNO (dest) != FRAME_POINTER_REGNUM
1356 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1357 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1359 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1360 && (REGNO (dest) != ARG_POINTER_REGNUM
1361 || ! fixed_regs [REGNO (dest)])
1363 && REGNO (dest) != STACK_POINTER_REGNUM)
1365 if (*pi3dest_killed)
1368 *pi3dest_killed = dest;
1372 else if (GET_CODE (x) == PARALLEL)
1376 for (i = 0; i < XVECLEN (x, 0); i++)
1377 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1378 i1_not_in_src, pi3dest_killed))
1385 /* Return 1 if X is an arithmetic expression that contains a multiplication
1386 and division. We don't count multiplications by powers of two here. */
1392 switch (GET_CODE (x))
1394 case MOD: case DIV: case UMOD: case UDIV:
1398 return ! (GET_CODE (XEXP (x, 1)) == CONST_INT
1399 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
1401 switch (GET_RTX_CLASS (GET_CODE (x)))
1403 case 'c': case '<': case '2':
1404 return contains_muldiv (XEXP (x, 0))
1405 || contains_muldiv (XEXP (x, 1));
1408 return contains_muldiv (XEXP (x, 0));
1416 /* Determine whether INSN can be used in a combination. Return nonzero if
1417 not. This is used in try_combine to detect early some cases where we
1418 can't perform combinations. */
1421 cant_combine_insn_p (insn)
1427 /* If this isn't really an insn, we can't do anything.
1428 This can occur when flow deletes an insn that it has merged into an
1429 auto-increment address. */
1430 if (! INSN_P (insn))
1433 /* Never combine loads and stores involving hard regs. The register
1434 allocator can usually handle such reg-reg moves by tying. If we allow
1435 the combiner to make substitutions of hard regs, we risk aborting in
1436 reload on machines that have SMALL_REGISTER_CLASSES.
1437 As an exception, we allow combinations involving fixed regs; these are
1438 not available to the register allocator so there's no risk involved. */
1440 set = single_set (insn);
1443 src = SET_SRC (set);
1444 dest = SET_DEST (set);
1445 if (GET_CODE (src) == SUBREG)
1446 src = SUBREG_REG (src);
1447 if (GET_CODE (dest) == SUBREG)
1448 dest = SUBREG_REG (dest);
1449 if (REG_P (src) && REG_P (dest)
1450 && ((REGNO (src) < FIRST_PSEUDO_REGISTER
1451 && ! fixed_regs[REGNO (src)])
1452 || (REGNO (dest) < FIRST_PSEUDO_REGISTER
1453 && ! fixed_regs[REGNO (dest)])))
1459 /* Try to combine the insns I1 and I2 into I3.
1460 Here I1 and I2 appear earlier than I3.
1461 I1 can be zero; then we combine just I2 into I3.
1463 If we are combining three insns and the resulting insn is not recognized,
1464 try splitting it into two insns. If that happens, I2 and I3 are retained
1465 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1468 Return 0 if the combination does not work. Then nothing is changed.
1469 If we did the combination, return the insn at which combine should
1472 Set NEW_DIRECT_JUMP_P to a non-zero value if try_combine creates a
1473 new direct jump instruction. */
1476 try_combine (i3, i2, i1, new_direct_jump_p)
1477 register rtx i3, i2, i1;
1478 register int *new_direct_jump_p;
1480 /* New patterns for I3 and I2, respectively. */
1481 rtx newpat, newi2pat = 0;
1482 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1483 int added_sets_1, added_sets_2;
1484 /* Total number of SETs to put into I3. */
1486 /* Nonzero is I2's body now appears in I3. */
1488 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1489 int insn_code_number, i2_code_number = 0, other_code_number = 0;
1490 /* Contains I3 if the destination of I3 is used in its source, which means
1491 that the old life of I3 is being killed. If that usage is placed into
1492 I2 and not in I3, a REG_DEAD note must be made. */
1493 rtx i3dest_killed = 0;
1494 /* SET_DEST and SET_SRC of I2 and I1. */
1495 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1496 /* PATTERN (I2), or a copy of it in certain cases. */
1498 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1499 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1500 int i1_feeds_i3 = 0;
1501 /* Notes that must be added to REG_NOTES in I3 and I2. */
1502 rtx new_i3_notes, new_i2_notes;
1503 /* Notes that we substituted I3 into I2 instead of the normal case. */
1504 int i3_subst_into_i2 = 0;
1505 /* Notes that I1, I2 or I3 is a MULT operation. */
1513 /* Exit early if one of the insns involved can't be used for
1515 if (cant_combine_insn_p (i3)
1516 || cant_combine_insn_p (i2)
1517 || (i1 && cant_combine_insn_p (i1))
1518 /* We also can't do anything if I3 has a
1519 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1522 /* ??? This gives worse code, and appears to be unnecessary, since no
1523 pass after flow uses REG_LIBCALL/REG_RETVAL notes. */
1524 || find_reg_note (i3, REG_LIBCALL, NULL_RTX)
1530 undobuf.other_insn = 0;
1532 /* Reset the hard register usage information. */
1533 CLEAR_HARD_REG_SET (newpat_used_regs);
1535 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1536 code below, set I1 to be the earlier of the two insns. */
1537 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1538 temp = i1, i1 = i2, i2 = temp;
1540 added_links_insn = 0;
1542 /* First check for one important special-case that the code below will
1543 not handle. Namely, the case where I1 is zero, I2 is a PARALLEL
1544 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1545 we may be able to replace that destination with the destination of I3.
1546 This occurs in the common code where we compute both a quotient and
1547 remainder into a structure, in which case we want to do the computation
1548 directly into the structure to avoid register-register copies.
1550 Note that this case handles both multiple sets in I2 and also
1551 cases where I2 has a number of CLOBBER or PARALLELs.
1553 We make very conservative checks below and only try to handle the
1554 most common cases of this. For example, we only handle the case
1555 where I2 and I3 are adjacent to avoid making difficult register
1558 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1559 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1560 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1561 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1562 && GET_CODE (PATTERN (i2)) == PARALLEL
1563 && ! side_effects_p (SET_DEST (PATTERN (i3)))
1564 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1565 below would need to check what is inside (and reg_overlap_mentioned_p
1566 doesn't support those codes anyway). Don't allow those destinations;
1567 the resulting insn isn't likely to be recognized anyway. */
1568 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1569 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1570 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1571 SET_DEST (PATTERN (i3)))
1572 && next_real_insn (i2) == i3)
1574 rtx p2 = PATTERN (i2);
1576 /* Make sure that the destination of I3,
1577 which we are going to substitute into one output of I2,
1578 is not used within another output of I2. We must avoid making this:
1579 (parallel [(set (mem (reg 69)) ...)
1580 (set (reg 69) ...)])
1581 which is not well-defined as to order of actions.
1582 (Besides, reload can't handle output reloads for this.)
1584 The problem can also happen if the dest of I3 is a memory ref,
1585 if another dest in I2 is an indirect memory ref. */
1586 for (i = 0; i < XVECLEN (p2, 0); i++)
1587 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1588 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1589 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1590 SET_DEST (XVECEXP (p2, 0, i))))
1593 if (i == XVECLEN (p2, 0))
1594 for (i = 0; i < XVECLEN (p2, 0); i++)
1595 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1596 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1597 && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1602 subst_low_cuid = INSN_CUID (i2);
1604 added_sets_2 = added_sets_1 = 0;
1605 i2dest = SET_SRC (PATTERN (i3));
1607 /* Replace the dest in I2 with our dest and make the resulting
1608 insn the new pattern for I3. Then skip to where we
1609 validate the pattern. Everything was set up above. */
1610 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1611 SET_DEST (PATTERN (i3)));
1614 i3_subst_into_i2 = 1;
1615 goto validate_replacement;
1619 /* If I2 is setting a double-word pseudo to a constant and I3 is setting
1620 one of those words to another constant, merge them by making a new
1623 && (temp = single_set (i2)) != 0
1624 && (GET_CODE (SET_SRC (temp)) == CONST_INT
1625 || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
1626 && GET_CODE (SET_DEST (temp)) == REG
1627 && GET_MODE_CLASS (GET_MODE (SET_DEST (temp))) == MODE_INT
1628 && GET_MODE_SIZE (GET_MODE (SET_DEST (temp))) == 2 * UNITS_PER_WORD
1629 && GET_CODE (PATTERN (i3)) == SET
1630 && GET_CODE (SET_DEST (PATTERN (i3))) == SUBREG
1631 && SUBREG_REG (SET_DEST (PATTERN (i3))) == SET_DEST (temp)
1632 && GET_MODE_CLASS (GET_MODE (SET_DEST (PATTERN (i3)))) == MODE_INT
1633 && GET_MODE_SIZE (GET_MODE (SET_DEST (PATTERN (i3)))) == UNITS_PER_WORD
1634 && GET_CODE (SET_SRC (PATTERN (i3))) == CONST_INT)
1636 HOST_WIDE_INT lo, hi;
1638 if (GET_CODE (SET_SRC (temp)) == CONST_INT)
1639 lo = INTVAL (SET_SRC (temp)), hi = lo < 0 ? -1 : 0;
1642 lo = CONST_DOUBLE_LOW (SET_SRC (temp));
1643 hi = CONST_DOUBLE_HIGH (SET_SRC (temp));
1646 if (subreg_lowpart_p (SET_DEST (PATTERN (i3))))
1648 /* We don't handle the case of the target word being wider
1649 than a host wide int. */
1650 if (HOST_BITS_PER_WIDE_INT < BITS_PER_WORD)
1653 lo &= ~(UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1);
1654 lo |= INTVAL (SET_SRC (PATTERN (i3)));
1656 else if (HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1657 hi = INTVAL (SET_SRC (PATTERN (i3)));
1658 else if (HOST_BITS_PER_WIDE_INT >= 2 * BITS_PER_WORD)
1660 int sign = -(int) ((unsigned HOST_WIDE_INT) lo
1661 >> (HOST_BITS_PER_WIDE_INT - 1));
1663 lo &= ~ (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1664 (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
1665 lo |= (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1666 (INTVAL (SET_SRC (PATTERN (i3)))));
1668 hi = lo < 0 ? -1 : 0;
1671 /* We don't handle the case of the higher word not fitting
1672 entirely in either hi or lo. */
1677 subst_low_cuid = INSN_CUID (i2);
1678 added_sets_2 = added_sets_1 = 0;
1679 i2dest = SET_DEST (temp);
1681 SUBST (SET_SRC (temp),
1682 immed_double_const (lo, hi, GET_MODE (SET_DEST (temp))));
1684 newpat = PATTERN (i2);
1685 goto validate_replacement;
1689 /* If we have no I1 and I2 looks like:
1690 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1692 make up a dummy I1 that is
1695 (set (reg:CC X) (compare:CC Y (const_int 0)))
1697 (We can ignore any trailing CLOBBERs.)
1699 This undoes a previous combination and allows us to match a branch-and-
1702 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1703 && XVECLEN (PATTERN (i2), 0) >= 2
1704 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1705 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1707 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1708 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1709 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1710 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1711 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1712 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1714 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1715 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1720 /* We make I1 with the same INSN_UID as I2. This gives it
1721 the same INSN_CUID for value tracking. Our fake I1 will
1722 never appear in the insn stream so giving it the same INSN_UID
1723 as I2 will not cause a problem. */
1725 subst_prev_insn = i1
1726 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1727 XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
1730 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1731 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1732 SET_DEST (PATTERN (i1)));
1737 /* Verify that I2 and I1 are valid for combining. */
1738 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1739 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1745 /* Record whether I2DEST is used in I2SRC and similarly for the other
1746 cases. Knowing this will help in register status updating below. */
1747 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1748 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1749 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1751 /* See if I1 directly feeds into I3. It does if I1DEST is not used
1753 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1755 /* Ensure that I3's pattern can be the destination of combines. */
1756 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1757 i1 && i2dest_in_i1src && i1_feeds_i3,
1764 /* See if any of the insns is a MULT operation. Unless one is, we will
1765 reject a combination that is, since it must be slower. Be conservative
1767 if (GET_CODE (i2src) == MULT
1768 || (i1 != 0 && GET_CODE (i1src) == MULT)
1769 || (GET_CODE (PATTERN (i3)) == SET
1770 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1773 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1774 We used to do this EXCEPT in one case: I3 has a post-inc in an
1775 output operand. However, that exception can give rise to insns like
1777 which is a famous insn on the PDP-11 where the value of r3 used as the
1778 source was model-dependent. Avoid this sort of thing. */
1781 if (!(GET_CODE (PATTERN (i3)) == SET
1782 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1783 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1784 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1785 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1786 /* It's not the exception. */
1789 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1790 if (REG_NOTE_KIND (link) == REG_INC
1791 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1793 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1800 /* See if the SETs in I1 or I2 need to be kept around in the merged
1801 instruction: whenever the value set there is still needed past I3.
1802 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1804 For the SET in I1, we have two cases: If I1 and I2 independently
1805 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1806 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1807 in I1 needs to be kept around unless I1DEST dies or is set in either
1808 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1809 I1DEST. If so, we know I1 feeds into I2. */
1811 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1814 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1815 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1817 /* If the set in I2 needs to be kept around, we must make a copy of
1818 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1819 PATTERN (I2), we are only substituting for the original I1DEST, not into
1820 an already-substituted copy. This also prevents making self-referential
1821 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1824 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1825 ? gen_rtx_SET (VOIDmode, i2dest, i2src)
1829 i2pat = copy_rtx (i2pat);
1833 /* Substitute in the latest insn for the regs set by the earlier ones. */
1835 maxreg = max_reg_num ();
1839 /* It is possible that the source of I2 or I1 may be performing an
1840 unneeded operation, such as a ZERO_EXTEND of something that is known
1841 to have the high part zero. Handle that case by letting subst look at
1842 the innermost one of them.
1844 Another way to do this would be to have a function that tries to
1845 simplify a single insn instead of merging two or more insns. We don't
1846 do this because of the potential of infinite loops and because
1847 of the potential extra memory required. However, doing it the way
1848 we are is a bit of a kludge and doesn't catch all cases.
1850 But only do this if -fexpensive-optimizations since it slows things down
1851 and doesn't usually win. */
1853 if (flag_expensive_optimizations)
1855 /* Pass pc_rtx so no substitutions are done, just simplifications.
1856 The cases that we are interested in here do not involve the few
1857 cases were is_replaced is checked. */
1860 subst_low_cuid = INSN_CUID (i1);
1861 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1865 subst_low_cuid = INSN_CUID (i2);
1866 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1871 /* Many machines that don't use CC0 have insns that can both perform an
1872 arithmetic operation and set the condition code. These operations will
1873 be represented as a PARALLEL with the first element of the vector
1874 being a COMPARE of an arithmetic operation with the constant zero.
1875 The second element of the vector will set some pseudo to the result
1876 of the same arithmetic operation. If we simplify the COMPARE, we won't
1877 match such a pattern and so will generate an extra insn. Here we test
1878 for this case, where both the comparison and the operation result are
1879 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1880 I2SRC. Later we will make the PARALLEL that contains I2. */
1882 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1883 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1884 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1885 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1887 #ifdef EXTRA_CC_MODES
1889 enum machine_mode compare_mode;
1892 newpat = PATTERN (i3);
1893 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1897 #ifdef EXTRA_CC_MODES
1898 /* See if a COMPARE with the operand we substituted in should be done
1899 with the mode that is currently being used. If not, do the same
1900 processing we do in `subst' for a SET; namely, if the destination
1901 is used only once, try to replace it with a register of the proper
1902 mode and also replace the COMPARE. */
1903 if (undobuf.other_insn == 0
1904 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1905 &undobuf.other_insn))
1906 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1908 != GET_MODE (SET_DEST (newpat))))
1910 unsigned int regno = REGNO (SET_DEST (newpat));
1911 rtx new_dest = gen_rtx_REG (compare_mode, regno);
1913 if (regno < FIRST_PSEUDO_REGISTER
1914 || (REG_N_SETS (regno) == 1 && ! added_sets_2
1915 && ! REG_USERVAR_P (SET_DEST (newpat))))
1917 if (regno >= FIRST_PSEUDO_REGISTER)
1918 SUBST (regno_reg_rtx[regno], new_dest);
1920 SUBST (SET_DEST (newpat), new_dest);
1921 SUBST (XEXP (*cc_use, 0), new_dest);
1922 SUBST (SET_SRC (newpat),
1923 gen_rtx_COMPARE (compare_mode, i2src, const0_rtx));
1926 undobuf.other_insn = 0;
1933 n_occurrences = 0; /* `subst' counts here */
1935 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1936 need to make a unique copy of I2SRC each time we substitute it
1937 to avoid self-referential rtl. */
1939 subst_low_cuid = INSN_CUID (i2);
1940 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1941 ! i1_feeds_i3 && i1dest_in_i1src);
1943 /* Record whether i2's body now appears within i3's body. */
1944 i2_is_used = n_occurrences;
1947 /* If we already got a failure, don't try to do more. Otherwise,
1948 try to substitute in I1 if we have it. */
1950 if (i1 && GET_CODE (newpat) != CLOBBER)
1952 /* Before we can do this substitution, we must redo the test done
1953 above (see detailed comments there) that ensures that I1DEST
1954 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1956 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1964 subst_low_cuid = INSN_CUID (i1);
1965 newpat = subst (newpat, i1dest, i1src, 0, 0);
1968 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1969 to count all the ways that I2SRC and I1SRC can be used. */
1970 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
1971 && i2_is_used + added_sets_2 > 1)
1972 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
1973 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1975 /* Fail if we tried to make a new register (we used to abort, but there's
1976 really no reason to). */
1977 || max_reg_num () != maxreg
1978 /* Fail if we couldn't do something and have a CLOBBER. */
1979 || GET_CODE (newpat) == CLOBBER
1980 /* Fail if this new pattern is a MULT and we didn't have one before
1981 at the outer level. */
1982 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
1989 /* If the actions of the earlier insns must be kept
1990 in addition to substituting them into the latest one,
1991 we must make a new PARALLEL for the latest insn
1992 to hold additional the SETs. */
1994 if (added_sets_1 || added_sets_2)
1998 if (GET_CODE (newpat) == PARALLEL)
2000 rtvec old = XVEC (newpat, 0);
2001 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
2002 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
2003 memcpy (XVEC (newpat, 0)->elem, &old->elem[0],
2004 sizeof (old->elem[0]) * old->num_elem);
2009 total_sets = 1 + added_sets_1 + added_sets_2;
2010 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
2011 XVECEXP (newpat, 0, 0) = old;
2015 XVECEXP (newpat, 0, --total_sets)
2016 = (GET_CODE (PATTERN (i1)) == PARALLEL
2017 ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
2021 /* If there is no I1, use I2's body as is. We used to also not do
2022 the subst call below if I2 was substituted into I3,
2023 but that could lose a simplification. */
2025 XVECEXP (newpat, 0, --total_sets) = i2pat;
2027 /* See comment where i2pat is assigned. */
2028 XVECEXP (newpat, 0, --total_sets)
2029 = subst (i2pat, i1dest, i1src, 0, 0);
2033 /* We come here when we are replacing a destination in I2 with the
2034 destination of I3. */
2035 validate_replacement:
2037 /* Note which hard regs this insn has as inputs. */
2038 mark_used_regs_combine (newpat);
2040 /* Is the result of combination a valid instruction? */
2041 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2043 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
2044 the second SET's destination is a register that is unused. In that case,
2045 we just need the first SET. This can occur when simplifying a divmod
2046 insn. We *must* test for this case here because the code below that
2047 splits two independent SETs doesn't handle this case correctly when it
2048 updates the register status. Also check the case where the first
2049 SET's destination is unused. That would not cause incorrect code, but
2050 does cause an unneeded insn to remain. */
2052 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
2053 && XVECLEN (newpat, 0) == 2
2054 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2055 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2056 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
2057 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
2058 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
2059 && asm_noperands (newpat) < 0)
2061 newpat = XVECEXP (newpat, 0, 0);
2062 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2065 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
2066 && XVECLEN (newpat, 0) == 2
2067 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2068 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2069 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
2070 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
2071 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
2072 && asm_noperands (newpat) < 0)
2074 newpat = XVECEXP (newpat, 0, 1);
2075 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2078 /* If we were combining three insns and the result is a simple SET
2079 with no ASM_OPERANDS that wasn't recognized, try to split it into two
2080 insns. There are two ways to do this. It can be split using a
2081 machine-specific method (like when you have an addition of a large
2082 constant) or by combine in the function find_split_point. */
2084 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
2085 && asm_noperands (newpat) < 0)
2087 rtx m_split, *split;
2088 rtx ni2dest = i2dest;
2090 /* See if the MD file can split NEWPAT. If it can't, see if letting it
2091 use I2DEST as a scratch register will help. In the latter case,
2092 convert I2DEST to the mode of the source of NEWPAT if we can. */
2094 m_split = split_insns (newpat, i3);
2096 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
2097 inputs of NEWPAT. */
2099 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
2100 possible to try that as a scratch reg. This would require adding
2101 more code to make it work though. */
2103 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
2105 /* If I2DEST is a hard register or the only use of a pseudo,
2106 we can change its mode. */
2107 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
2108 && GET_MODE (SET_DEST (newpat)) != VOIDmode
2109 && GET_CODE (i2dest) == REG
2110 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
2111 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
2112 && ! REG_USERVAR_P (i2dest))))
2113 ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
2116 m_split = split_insns (gen_rtx_PARALLEL
2118 gen_rtvec (2, newpat,
2119 gen_rtx_CLOBBER (VOIDmode,
2122 /* If the split with the mode-changed register didn't work, try
2123 the original register. */
2124 if (! m_split && ni2dest != i2dest)
2127 m_split = split_insns (gen_rtx_PARALLEL
2129 gen_rtvec (2, newpat,
2130 gen_rtx_CLOBBER (VOIDmode,
2136 if (m_split && GET_CODE (m_split) != SEQUENCE)
2138 insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
2139 if (insn_code_number >= 0)
2142 else if (m_split && GET_CODE (m_split) == SEQUENCE
2143 && XVECLEN (m_split, 0) == 2
2144 && (next_real_insn (i2) == i3
2145 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
2149 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
2150 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
2152 i3set = single_set (XVECEXP (m_split, 0, 1));
2153 i2set = single_set (XVECEXP (m_split, 0, 0));
2155 /* In case we changed the mode of I2DEST, replace it in the
2156 pseudo-register table here. We can't do it above in case this
2157 code doesn't get executed and we do a split the other way. */
2159 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2160 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
2162 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2164 /* If I2 or I3 has multiple SETs, we won't know how to track
2165 register status, so don't use these insns. If I2's destination
2166 is used between I2 and I3, we also can't use these insns. */
2168 if (i2_code_number >= 0 && i2set && i3set
2169 && (next_real_insn (i2) == i3
2170 || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
2171 insn_code_number = recog_for_combine (&newi3pat, i3,
2173 if (insn_code_number >= 0)
2176 /* It is possible that both insns now set the destination of I3.
2177 If so, we must show an extra use of it. */
2179 if (insn_code_number >= 0)
2181 rtx new_i3_dest = SET_DEST (i3set);
2182 rtx new_i2_dest = SET_DEST (i2set);
2184 while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
2185 || GET_CODE (new_i3_dest) == STRICT_LOW_PART
2186 || GET_CODE (new_i3_dest) == SUBREG)
2187 new_i3_dest = XEXP (new_i3_dest, 0);
2189 while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
2190 || GET_CODE (new_i2_dest) == STRICT_LOW_PART
2191 || GET_CODE (new_i2_dest) == SUBREG)
2192 new_i2_dest = XEXP (new_i2_dest, 0);
2194 if (GET_CODE (new_i3_dest) == REG
2195 && GET_CODE (new_i2_dest) == REG
2196 && REGNO (new_i3_dest) == REGNO (new_i2_dest))
2197 REG_N_SETS (REGNO (new_i2_dest))++;
2201 /* If we can split it and use I2DEST, go ahead and see if that
2202 helps things be recognized. Verify that none of the registers
2203 are set between I2 and I3. */
2204 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
2206 && GET_CODE (i2dest) == REG
2208 /* We need I2DEST in the proper mode. If it is a hard register
2209 or the only use of a pseudo, we can change its mode. */
2210 && (GET_MODE (*split) == GET_MODE (i2dest)
2211 || GET_MODE (*split) == VOIDmode
2212 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
2213 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
2214 && ! REG_USERVAR_P (i2dest)))
2215 && (next_real_insn (i2) == i3
2216 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
2217 /* We can't overwrite I2DEST if its value is still used by
2219 && ! reg_referenced_p (i2dest, newpat))
2221 rtx newdest = i2dest;
2222 enum rtx_code split_code = GET_CODE (*split);
2223 enum machine_mode split_mode = GET_MODE (*split);
2225 /* Get NEWDEST as a register in the proper mode. We have already
2226 validated that we can do this. */
2227 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
2229 newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
2231 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2232 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
2235 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2236 an ASHIFT. This can occur if it was inside a PLUS and hence
2237 appeared to be a memory address. This is a kludge. */
2238 if (split_code == MULT
2239 && GET_CODE (XEXP (*split, 1)) == CONST_INT
2240 && INTVAL (XEXP (*split, 1)) > 0
2241 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
2243 SUBST (*split, gen_rtx_ASHIFT (split_mode,
2244 XEXP (*split, 0), GEN_INT (i)));
2245 /* Update split_code because we may not have a multiply
2247 split_code = GET_CODE (*split);
2250 #ifdef INSN_SCHEDULING
2251 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2252 be written as a ZERO_EXTEND. */
2253 if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
2254 SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode,
2255 SUBREG_REG (*split)));
2258 newi2pat = gen_rtx_SET (VOIDmode, newdest, *split);
2259 SUBST (*split, newdest);
2260 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2262 /* If the split point was a MULT and we didn't have one before,
2263 don't use one now. */
2264 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
2265 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2269 /* Check for a case where we loaded from memory in a narrow mode and
2270 then sign extended it, but we need both registers. In that case,
2271 we have a PARALLEL with both loads from the same memory location.
2272 We can split this into a load from memory followed by a register-register
2273 copy. This saves at least one insn, more if register allocation can
2276 We cannot do this if the destination of the second assignment is
2277 a register that we have already assumed is zero-extended. Similarly
2278 for a SUBREG of such a register. */
2280 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2281 && GET_CODE (newpat) == PARALLEL
2282 && XVECLEN (newpat, 0) == 2
2283 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2284 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2285 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2286 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2287 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2288 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2290 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2291 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2292 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2293 (GET_CODE (temp) == REG
2294 && reg_nonzero_bits[REGNO (temp)] != 0
2295 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2296 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2297 && (reg_nonzero_bits[REGNO (temp)]
2298 != GET_MODE_MASK (word_mode))))
2299 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2300 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2301 (GET_CODE (temp) == REG
2302 && reg_nonzero_bits[REGNO (temp)] != 0
2303 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2304 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2305 && (reg_nonzero_bits[REGNO (temp)]
2306 != GET_MODE_MASK (word_mode)))))
2307 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2308 SET_SRC (XVECEXP (newpat, 0, 1)))
2309 && ! find_reg_note (i3, REG_UNUSED,
2310 SET_DEST (XVECEXP (newpat, 0, 0))))
2314 newi2pat = XVECEXP (newpat, 0, 0);
2315 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
2316 newpat = XVECEXP (newpat, 0, 1);
2317 SUBST (SET_SRC (newpat),
2318 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
2319 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2321 if (i2_code_number >= 0)
2322 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2324 if (insn_code_number >= 0)
2329 /* If we will be able to accept this, we have made a change to the
2330 destination of I3. This can invalidate a LOG_LINKS pointing
2331 to I3. No other part of combine.c makes such a transformation.
2333 The new I3 will have a destination that was previously the
2334 destination of I1 or I2 and which was used in i2 or I3. Call
2335 distribute_links to make a LOG_LINK from the next use of
2336 that destination. */
2338 PATTERN (i3) = newpat;
2339 distribute_links (gen_rtx_INSN_LIST (VOIDmode, i3, NULL_RTX));
2341 /* I3 now uses what used to be its destination and which is
2342 now I2's destination. That means we need a LOG_LINK from
2343 I3 to I2. But we used to have one, so we still will.
2345 However, some later insn might be using I2's dest and have
2346 a LOG_LINK pointing at I3. We must remove this link.
2347 The simplest way to remove the link is to point it at I1,
2348 which we know will be a NOTE. */
2350 for (insn = NEXT_INSN (i3);
2351 insn && (this_basic_block == n_basic_blocks - 1
2352 || insn != BLOCK_HEAD (this_basic_block + 1));
2353 insn = NEXT_INSN (insn))
2355 if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
2357 for (link = LOG_LINKS (insn); link;
2358 link = XEXP (link, 1))
2359 if (XEXP (link, 0) == i3)
2360 XEXP (link, 0) = i1;
2368 /* Similarly, check for a case where we have a PARALLEL of two independent
2369 SETs but we started with three insns. In this case, we can do the sets
2370 as two separate insns. This case occurs when some SET allows two
2371 other insns to combine, but the destination of that SET is still live. */
2373 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2374 && GET_CODE (newpat) == PARALLEL
2375 && XVECLEN (newpat, 0) == 2
2376 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2377 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2378 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2379 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2380 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2381 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2382 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2384 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
2385 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2386 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2387 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2388 XVECEXP (newpat, 0, 0))
2389 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2390 XVECEXP (newpat, 0, 1))
2391 && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
2392 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1)))))
2394 /* Normally, it doesn't matter which of the two is done first,
2395 but it does if one references cc0. In that case, it has to
2398 if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2400 newi2pat = XVECEXP (newpat, 0, 0);
2401 newpat = XVECEXP (newpat, 0, 1);
2406 newi2pat = XVECEXP (newpat, 0, 1);
2407 newpat = XVECEXP (newpat, 0, 0);
2410 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2412 if (i2_code_number >= 0)
2413 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2416 /* If it still isn't recognized, fail and change things back the way they
2418 if ((insn_code_number < 0
2419 /* Is the result a reasonable ASM_OPERANDS? */
2420 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2426 /* If we had to change another insn, make sure it is valid also. */
2427 if (undobuf.other_insn)
2429 rtx other_pat = PATTERN (undobuf.other_insn);
2430 rtx new_other_notes;
2433 CLEAR_HARD_REG_SET (newpat_used_regs);
2435 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2438 if (other_code_number < 0 && ! check_asm_operands (other_pat))
2444 PATTERN (undobuf.other_insn) = other_pat;
2446 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2447 are still valid. Then add any non-duplicate notes added by
2448 recog_for_combine. */
2449 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2451 next = XEXP (note, 1);
2453 if (REG_NOTE_KIND (note) == REG_UNUSED
2454 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
2456 if (GET_CODE (XEXP (note, 0)) == REG)
2457 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
2459 remove_note (undobuf.other_insn, note);
2463 for (note = new_other_notes; note; note = XEXP (note, 1))
2464 if (GET_CODE (XEXP (note, 0)) == REG)
2465 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
2467 distribute_notes (new_other_notes, undobuf.other_insn,
2468 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
2471 /* If I2 is the setter CC0 and I3 is the user CC0 then check whether
2472 they are adjacent to each other or not. */
2474 rtx p = prev_nonnote_insn (i3);
2475 if (p && p != i2 && GET_CODE (p) == INSN && newi2pat
2476 && sets_cc0_p (newi2pat))
2484 /* We now know that we can do this combination. Merge the insns and
2485 update the status of registers and LOG_LINKS. */
2488 rtx i3notes, i2notes, i1notes = 0;
2489 rtx i3links, i2links, i1links = 0;
2492 /* Compute which registers we expect to eliminate. newi2pat may be setting
2493 either i3dest or i2dest, so we must check it. Also, i1dest may be the
2494 same as i3dest, in which case newi2pat may be setting i1dest. */
2495 rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
2496 || i2dest_in_i2src || i2dest_in_i1src
2498 rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
2499 || (newi2pat && reg_set_p (i1dest, newi2pat))
2502 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2504 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2505 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2507 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2509 /* Ensure that we do not have something that should not be shared but
2510 occurs multiple times in the new insns. Check this by first
2511 resetting all the `used' flags and then copying anything is shared. */
2513 reset_used_flags (i3notes);
2514 reset_used_flags (i2notes);
2515 reset_used_flags (i1notes);
2516 reset_used_flags (newpat);
2517 reset_used_flags (newi2pat);
2518 if (undobuf.other_insn)
2519 reset_used_flags (PATTERN (undobuf.other_insn));
2521 i3notes = copy_rtx_if_shared (i3notes);
2522 i2notes = copy_rtx_if_shared (i2notes);
2523 i1notes = copy_rtx_if_shared (i1notes);
2524 newpat = copy_rtx_if_shared (newpat);
2525 newi2pat = copy_rtx_if_shared (newi2pat);
2526 if (undobuf.other_insn)
2527 reset_used_flags (PATTERN (undobuf.other_insn));
2529 INSN_CODE (i3) = insn_code_number;
2530 PATTERN (i3) = newpat;
2531 if (undobuf.other_insn)
2532 INSN_CODE (undobuf.other_insn) = other_code_number;
2534 /* We had one special case above where I2 had more than one set and
2535 we replaced a destination of one of those sets with the destination
2536 of I3. In that case, we have to update LOG_LINKS of insns later
2537 in this basic block. Note that this (expensive) case is rare.
2539 Also, in this case, we must pretend that all REG_NOTEs for I2
2540 actually came from I3, so that REG_UNUSED notes from I2 will be
2541 properly handled. */
2543 if (i3_subst_into_i2)
2545 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2546 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != USE
2547 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2548 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2549 && ! find_reg_note (i2, REG_UNUSED,
2550 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2551 for (temp = NEXT_INSN (i2);
2552 temp && (this_basic_block == n_basic_blocks - 1
2553 || BLOCK_HEAD (this_basic_block) != temp);
2554 temp = NEXT_INSN (temp))
2555 if (temp != i3 && INSN_P (temp))
2556 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2557 if (XEXP (link, 0) == i2)
2558 XEXP (link, 0) = i3;
2563 while (XEXP (link, 1))
2564 link = XEXP (link, 1);
2565 XEXP (link, 1) = i2notes;
2579 INSN_CODE (i2) = i2_code_number;
2580 PATTERN (i2) = newi2pat;
2584 PUT_CODE (i2, NOTE);
2585 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2586 NOTE_SOURCE_FILE (i2) = 0;
2593 PUT_CODE (i1, NOTE);
2594 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2595 NOTE_SOURCE_FILE (i1) = 0;
2598 /* Get death notes for everything that is now used in either I3 or
2599 I2 and used to die in a previous insn. If we built two new
2600 patterns, move from I1 to I2 then I2 to I3 so that we get the
2601 proper movement on registers that I2 modifies. */
2605 move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2606 move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2609 move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2612 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2614 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2617 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2620 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2623 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2626 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2627 know these are REG_UNUSED and want them to go to the desired insn,
2628 so we always pass it as i3. We have not counted the notes in
2629 reg_n_deaths yet, so we need to do so now. */
2631 if (newi2pat && new_i2_notes)
2633 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2634 if (GET_CODE (XEXP (temp, 0)) == REG)
2635 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2637 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2642 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2643 if (GET_CODE (XEXP (temp, 0)) == REG)
2644 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2646 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2649 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
2650 put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets
2651 I3DEST, the death must be somewhere before I2, not I3. If we passed I3
2652 in that case, it might delete I2. Similarly for I2 and I1.
2653 Show an additional death due to the REG_DEAD note we make here. If
2654 we discard it in distribute_notes, we will decrement it again. */
2658 if (GET_CODE (i3dest_killed) == REG)
2659 REG_N_DEATHS (REGNO (i3dest_killed))++;
2661 if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
2662 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2664 NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
2666 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2668 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2672 if (i2dest_in_i2src)
2674 if (GET_CODE (i2dest) == REG)
2675 REG_N_DEATHS (REGNO (i2dest))++;
2677 if (newi2pat && reg_set_p (i2dest, newi2pat))
2678 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
2679 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2681 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
2682 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2683 NULL_RTX, NULL_RTX);
2686 if (i1dest_in_i1src)
2688 if (GET_CODE (i1dest) == REG)
2689 REG_N_DEATHS (REGNO (i1dest))++;
2691 if (newi2pat && reg_set_p (i1dest, newi2pat))
2692 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
2693 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2695 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
2696 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2697 NULL_RTX, NULL_RTX);
2700 distribute_links (i3links);
2701 distribute_links (i2links);
2702 distribute_links (i1links);
2704 if (GET_CODE (i2dest) == REG)
2707 rtx i2_insn = 0, i2_val = 0, set;
2709 /* The insn that used to set this register doesn't exist, and
2710 this life of the register may not exist either. See if one of
2711 I3's links points to an insn that sets I2DEST. If it does,
2712 that is now the last known value for I2DEST. If we don't update
2713 this and I2 set the register to a value that depended on its old
2714 contents, we will get confused. If this insn is used, thing
2715 will be set correctly in combine_instructions. */
2717 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2718 if ((set = single_set (XEXP (link, 0))) != 0
2719 && rtx_equal_p (i2dest, SET_DEST (set)))
2720 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2722 record_value_for_reg (i2dest, i2_insn, i2_val);
2724 /* If the reg formerly set in I2 died only once and that was in I3,
2725 zero its use count so it won't make `reload' do any work. */
2727 && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
2728 && ! i2dest_in_i2src)
2730 regno = REGNO (i2dest);
2731 REG_N_SETS (regno)--;
2735 if (i1 && GET_CODE (i1dest) == REG)
2738 rtx i1_insn = 0, i1_val = 0, set;
2740 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2741 if ((set = single_set (XEXP (link, 0))) != 0
2742 && rtx_equal_p (i1dest, SET_DEST (set)))
2743 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2745 record_value_for_reg (i1dest, i1_insn, i1_val);
2747 regno = REGNO (i1dest);
2748 if (! added_sets_1 && ! i1dest_in_i1src)
2749 REG_N_SETS (regno)--;
2752 /* Update reg_nonzero_bits et al for any changes that may have been made
2753 to this insn. The order of set_nonzero_bits_and_sign_copies() is
2754 important. Because newi2pat can affect nonzero_bits of newpat */
2756 note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
2757 note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
2759 /* Set new_direct_jump_p if a new return or simple jump instruction
2762 If I3 is now an unconditional jump, ensure that it has a
2763 BARRIER following it since it may have initially been a
2764 conditional jump. It may also be the last nonnote insn. */
2766 if (GET_CODE (newpat) == RETURN || any_uncondjump_p (i3))
2768 *new_direct_jump_p = 1;
2770 if ((temp = next_nonnote_insn (i3)) == NULL_RTX
2771 || GET_CODE (temp) != BARRIER)
2772 emit_barrier_after (i3);
2774 /* An NOOP jump does not need barrier, but it does need cleaning up
2776 if (GET_CODE (newpat) == SET
2777 && SET_SRC (newpat) == pc_rtx
2778 && SET_DEST (newpat) == pc_rtx)
2779 *new_direct_jump_p = 1;
2782 combine_successes++;
2785 /* Clear this here, so that subsequent get_last_value calls are not
2787 subst_prev_insn = NULL_RTX;
2789 if (added_links_insn
2790 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2791 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2792 return added_links_insn;
2794 return newi2pat ? i2 : i3;
2797 /* Undo all the modifications recorded in undobuf. */
2802 struct undo *undo, *next;
2804 for (undo = undobuf.undos; undo; undo = next)
2808 *undo->where.i = undo->old_contents.i;
2810 *undo->where.r = undo->old_contents.r;
2812 undo->next = undobuf.frees;
2813 undobuf.frees = undo;
2818 /* Clear this here, so that subsequent get_last_value calls are not
2820 subst_prev_insn = NULL_RTX;
2823 /* We've committed to accepting the changes we made. Move all
2824 of the undos to the free list. */
2829 struct undo *undo, *next;
2831 for (undo = undobuf.undos; undo; undo = next)
2834 undo->next = undobuf.frees;
2835 undobuf.frees = undo;
2841 /* Find the innermost point within the rtx at LOC, possibly LOC itself,
2842 where we have an arithmetic expression and return that point. LOC will
2845 try_combine will call this function to see if an insn can be split into
2849 find_split_point (loc, insn)
2854 enum rtx_code code = GET_CODE (x);
2856 unsigned HOST_WIDE_INT len = 0;
2857 HOST_WIDE_INT pos = 0;
2859 rtx inner = NULL_RTX;
2861 /* First special-case some codes. */
2865 #ifdef INSN_SCHEDULING
2866 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2868 if (GET_CODE (SUBREG_REG (x)) == MEM)
2871 return find_split_point (&SUBREG_REG (x), insn);
2875 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2876 using LO_SUM and HIGH. */
2877 if (GET_CODE (XEXP (x, 0)) == CONST
2878 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2881 gen_rtx_LO_SUM (Pmode,
2882 gen_rtx_HIGH (Pmode, XEXP (x, 0)),
2884 return &XEXP (XEXP (x, 0), 0);
2888 /* If we have a PLUS whose second operand is a constant and the
2889 address is not valid, perhaps will can split it up using
2890 the machine-specific way to split large constants. We use
2891 the first pseudo-reg (one of the virtual regs) as a placeholder;
2892 it will not remain in the result. */
2893 if (GET_CODE (XEXP (x, 0)) == PLUS
2894 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2895 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2897 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2898 rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
2901 /* This should have produced two insns, each of which sets our
2902 placeholder. If the source of the second is a valid address,
2903 we can make put both sources together and make a split point
2906 if (seq && XVECLEN (seq, 0) == 2
2907 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2908 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2909 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2910 && ! reg_mentioned_p (reg,
2911 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2912 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2913 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2914 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2915 && memory_address_p (GET_MODE (x),
2916 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2918 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2919 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2921 /* Replace the placeholder in SRC2 with SRC1. If we can
2922 find where in SRC2 it was placed, that can become our
2923 split point and we can replace this address with SRC2.
2924 Just try two obvious places. */
2926 src2 = replace_rtx (src2, reg, src1);
2928 if (XEXP (src2, 0) == src1)
2929 split = &XEXP (src2, 0);
2930 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2931 && XEXP (XEXP (src2, 0), 0) == src1)
2932 split = &XEXP (XEXP (src2, 0), 0);
2936 SUBST (XEXP (x, 0), src2);
2941 /* If that didn't work, perhaps the first operand is complex and
2942 needs to be computed separately, so make a split point there.
2943 This will occur on machines that just support REG + CONST
2944 and have a constant moved through some previous computation. */
2946 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2947 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2948 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2950 return &XEXP (XEXP (x, 0), 0);
2956 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2957 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2958 we need to put the operand into a register. So split at that
2961 if (SET_DEST (x) == cc0_rtx
2962 && GET_CODE (SET_SRC (x)) != COMPARE
2963 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2964 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2965 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2966 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2967 return &SET_SRC (x);
2970 /* See if we can split SET_SRC as it stands. */
2971 split = find_split_point (&SET_SRC (x), insn);
2972 if (split && split != &SET_SRC (x))
2975 /* See if we can split SET_DEST as it stands. */
2976 split = find_split_point (&SET_DEST (x), insn);
2977 if (split && split != &SET_DEST (x))
2980 /* See if this is a bitfield assignment with everything constant. If
2981 so, this is an IOR of an AND, so split it into that. */
2982 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2983 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2984 <= HOST_BITS_PER_WIDE_INT)
2985 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2986 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2987 && GET_CODE (SET_SRC (x)) == CONST_INT
2988 && ((INTVAL (XEXP (SET_DEST (x), 1))
2989 + INTVAL (XEXP (SET_DEST (x), 2)))
2990 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2991 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2993 HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
2994 unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
2995 unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
2996 rtx dest = XEXP (SET_DEST (x), 0);
2997 enum machine_mode mode = GET_MODE (dest);
2998 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
3000 if (BITS_BIG_ENDIAN)
3001 pos = GET_MODE_BITSIZE (mode) - len - pos;
3005 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
3008 gen_binary (IOR, mode,
3009 gen_binary (AND, mode, dest,
3010 GEN_INT (~(mask << pos)
3011 & GET_MODE_MASK (mode))),
3012 GEN_INT (src << pos)));
3014 SUBST (SET_DEST (x), dest);
3016 split = find_split_point (&SET_SRC (x), insn);
3017 if (split && split != &SET_SRC (x))
3021 /* Otherwise, see if this is an operation that we can split into two.
3022 If so, try to split that. */
3023 code = GET_CODE (SET_SRC (x));
3028 /* If we are AND'ing with a large constant that is only a single
3029 bit and the result is only being used in a context where we
3030 need to know if it is zero or non-zero, replace it with a bit
3031 extraction. This will avoid the large constant, which might
3032 have taken more than one insn to make. If the constant were
3033 not a valid argument to the AND but took only one insn to make,
3034 this is no worse, but if it took more than one insn, it will
3037 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3038 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
3039 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
3040 && GET_CODE (SET_DEST (x)) == REG
3041 && (split = find_single_use (SET_DEST (x), insn, (rtx*)0)) != 0
3042 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
3043 && XEXP (*split, 0) == SET_DEST (x)
3044 && XEXP (*split, 1) == const0_rtx)
3046 rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
3047 XEXP (SET_SRC (x), 0),
3048 pos, NULL_RTX, 1, 1, 0, 0);
3049 if (extraction != 0)
3051 SUBST (SET_SRC (x), extraction);
3052 return find_split_point (loc, insn);
3058 /* if STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
3059 is known to be on, this can be converted into a NEG of a shift. */
3060 if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
3061 && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
3062 && 1 <= (pos = exact_log2
3063 (nonzero_bits (XEXP (SET_SRC (x), 0),
3064 GET_MODE (XEXP (SET_SRC (x), 0))))))
3066 enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
3070 gen_rtx_LSHIFTRT (mode,
3071 XEXP (SET_SRC (x), 0),
3074 split = find_split_point (&SET_SRC (x), insn);
3075 if (split && split != &SET_SRC (x))
3081 inner = XEXP (SET_SRC (x), 0);
3083 /* We can't optimize if either mode is a partial integer
3084 mode as we don't know how many bits are significant
3086 if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
3087 || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
3091 len = GET_MODE_BITSIZE (GET_MODE (inner));
3097 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3098 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
3100 inner = XEXP (SET_SRC (x), 0);
3101 len = INTVAL (XEXP (SET_SRC (x), 1));
3102 pos = INTVAL (XEXP (SET_SRC (x), 2));
3104 if (BITS_BIG_ENDIAN)
3105 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
3106 unsignedp = (code == ZERO_EXTRACT);
3114 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
3116 enum machine_mode mode = GET_MODE (SET_SRC (x));
3118 /* For unsigned, we have a choice of a shift followed by an
3119 AND or two shifts. Use two shifts for field sizes where the
3120 constant might be too large. We assume here that we can
3121 always at least get 8-bit constants in an AND insn, which is
3122 true for every current RISC. */
3124 if (unsignedp && len <= 8)
3129 (mode, gen_lowpart_for_combine (mode, inner),
3131 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
3133 split = find_split_point (&SET_SRC (x), insn);
3134 if (split && split != &SET_SRC (x))
3141 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
3142 gen_rtx_ASHIFT (mode,
3143 gen_lowpart_for_combine (mode, inner),
3144 GEN_INT (GET_MODE_BITSIZE (mode)
3146 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
3148 split = find_split_point (&SET_SRC (x), insn);
3149 if (split && split != &SET_SRC (x))
3154 /* See if this is a simple operation with a constant as the second
3155 operand. It might be that this constant is out of range and hence
3156 could be used as a split point. */
3157 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3158 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3159 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
3160 && CONSTANT_P (XEXP (SET_SRC (x), 1))
3161 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
3162 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
3163 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
3165 return &XEXP (SET_SRC (x), 1);
3167 /* Finally, see if this is a simple operation with its first operand
3168 not in a register. The operation might require this operand in a
3169 register, so return it as a split point. We can always do this
3170 because if the first operand were another operation, we would have
3171 already found it as a split point. */
3172 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3173 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3174 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
3175 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
3176 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
3177 return &XEXP (SET_SRC (x), 0);
3183 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
3184 it is better to write this as (not (ior A B)) so we can split it.
3185 Similarly for IOR. */
3186 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
3189 gen_rtx_NOT (GET_MODE (x),
3190 gen_rtx_fmt_ee (code == IOR ? AND : IOR,
3192 XEXP (XEXP (x, 0), 0),
3193 XEXP (XEXP (x, 1), 0))));
3194 return find_split_point (loc, insn);
3197 /* Many RISC machines have a large set of logical insns. If the
3198 second operand is a NOT, put it first so we will try to split the
3199 other operand first. */
3200 if (GET_CODE (XEXP (x, 1)) == NOT)
3202 rtx tem = XEXP (x, 0);
3203 SUBST (XEXP (x, 0), XEXP (x, 1));
3204 SUBST (XEXP (x, 1), tem);
3212 /* Otherwise, select our actions depending on our rtx class. */
3213 switch (GET_RTX_CLASS (code))
3215 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
3217 split = find_split_point (&XEXP (x, 2), insn);
3220 /* ... fall through ... */
3224 split = find_split_point (&XEXP (x, 1), insn);
3227 /* ... fall through ... */
3229 /* Some machines have (and (shift ...) ...) insns. If X is not
3230 an AND, but XEXP (X, 0) is, use it as our split point. */
3231 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
3232 return &XEXP (x, 0);
3234 split = find_split_point (&XEXP (x, 0), insn);
3240 /* Otherwise, we don't have a split point. */
3244 /* Throughout X, replace FROM with TO, and return the result.
3245 The result is TO if X is FROM;
3246 otherwise the result is X, but its contents may have been modified.
3247 If they were modified, a record was made in undobuf so that
3248 undo_all will (among other things) return X to its original state.
3250 If the number of changes necessary is too much to record to undo,
3251 the excess changes are not made, so the result is invalid.
3252 The changes already made can still be undone.
3253 undobuf.num_undo is incremented for such changes, so by testing that
3254 the caller can tell whether the result is valid.
3256 `n_occurrences' is incremented each time FROM is replaced.
3258 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
3260 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
3261 by copying if `n_occurrences' is non-zero. */
3264 subst (x, from, to, in_dest, unique_copy)
3265 register rtx x, from, to;
3269 register enum rtx_code code = GET_CODE (x);
3270 enum machine_mode op0_mode = VOIDmode;
3271 register const char *fmt;
3272 register int len, i;
3275 /* Two expressions are equal if they are identical copies of a shared
3276 RTX or if they are both registers with the same register number
3279 #define COMBINE_RTX_EQUAL_P(X,Y) \
3281 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
3282 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3284 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3287 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3290 /* If X and FROM are the same register but different modes, they will
3291 not have been seen as equal above. However, flow.c will make a
3292 LOG_LINKS entry for that case. If we do nothing, we will try to
3293 rerecognize our original insn and, when it succeeds, we will
3294 delete the feeding insn, which is incorrect.
3296 So force this insn not to match in this (rare) case. */
3297 if (! in_dest && code == REG && GET_CODE (from) == REG
3298 && REGNO (x) == REGNO (from))
3299 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
3301 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3302 of which may contain things that can be combined. */
3303 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
3306 /* It is possible to have a subexpression appear twice in the insn.
3307 Suppose that FROM is a register that appears within TO.
3308 Then, after that subexpression has been scanned once by `subst',
3309 the second time it is scanned, TO may be found. If we were
3310 to scan TO here, we would find FROM within it and create a
3311 self-referent rtl structure which is completely wrong. */
3312 if (COMBINE_RTX_EQUAL_P (x, to))
3315 /* Parallel asm_operands need special attention because all of the
3316 inputs are shared across the arms. Furthermore, unsharing the
3317 rtl results in recognition failures. Failure to handle this case
3318 specially can result in circular rtl.
3320 Solve this by doing a normal pass across the first entry of the
3321 parallel, and only processing the SET_DESTs of the subsequent
3324 if (code == PARALLEL
3325 && GET_CODE (XVECEXP (x, 0, 0)) == SET
3326 && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
3328 new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
3330 /* If this substitution failed, this whole thing fails. */
3331 if (GET_CODE (new) == CLOBBER
3332 && XEXP (new, 0) == const0_rtx)
3335 SUBST (XVECEXP (x, 0, 0), new);
3337 for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
3339 rtx dest = SET_DEST (XVECEXP (x, 0, i));
3341 if (GET_CODE (dest) != REG
3342 && GET_CODE (dest) != CC0
3343 && GET_CODE (dest) != PC)
3345 new = subst (dest, from, to, 0, unique_copy);
3347 /* If this substitution failed, this whole thing fails. */
3348 if (GET_CODE (new) == CLOBBER
3349 && XEXP (new, 0) == const0_rtx)
3352 SUBST (SET_DEST (XVECEXP (x, 0, i)), new);
3358 len = GET_RTX_LENGTH (code);
3359 fmt = GET_RTX_FORMAT (code);
3361 /* We don't need to process a SET_DEST that is a register, CC0,
3362 or PC, so set up to skip this common case. All other cases
3363 where we want to suppress replacing something inside a
3364 SET_SRC are handled via the IN_DEST operand. */
3366 && (GET_CODE (SET_DEST (x)) == REG
3367 || GET_CODE (SET_DEST (x)) == CC0
3368 || GET_CODE (SET_DEST (x)) == PC))
3371 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3374 op0_mode = GET_MODE (XEXP (x, 0));
3376 for (i = 0; i < len; i++)
3381 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3383 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3385 new = (unique_copy && n_occurrences
3386 ? copy_rtx (to) : to);
3391 new = subst (XVECEXP (x, i, j), from, to, 0,
3394 /* If this substitution failed, this whole thing
3396 if (GET_CODE (new) == CLOBBER
3397 && XEXP (new, 0) == const0_rtx)
3401 SUBST (XVECEXP (x, i, j), new);
3404 else if (fmt[i] == 'e')
3406 /* If this is a register being set, ignore it. */
3409 && (code == SUBREG || code == STRICT_LOW_PART
3410 || code == ZERO_EXTRACT)
3412 && GET_CODE (new) == REG)
3415 else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
3417 /* In general, don't install a subreg involving two
3418 modes not tieable. It can worsen register
3419 allocation, and can even make invalid reload
3420 insns, since the reg inside may need to be copied
3421 from in the outside mode, and that may be invalid
3422 if it is an fp reg copied in integer mode.
3424 We allow two exceptions to this: It is valid if
3425 it is inside another SUBREG and the mode of that
3426 SUBREG and the mode of the inside of TO is
3427 tieable and it is valid if X is a SET that copies
3430 if (GET_CODE (to) == SUBREG
3431 && ! MODES_TIEABLE_P (GET_MODE (to),
3432 GET_MODE (SUBREG_REG (to)))
3433 && ! (code == SUBREG
3434 && MODES_TIEABLE_P (GET_MODE (x),
3435 GET_MODE (SUBREG_REG (to))))
3437 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
3440 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3442 #ifdef CLASS_CANNOT_CHANGE_MODE
3444 && GET_CODE (to) == REG
3445 && REGNO (to) < FIRST_PSEUDO_REGISTER
3446 && (TEST_HARD_REG_BIT
3447 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
3449 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (to),
3451 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3454 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3458 /* If we are in a SET_DEST, suppress most cases unless we
3459 have gone inside a MEM, in which case we want to
3460 simplify the address. We assume here that things that
3461 are actually part of the destination have their inner
3462 parts in the first expression. This is true for SUBREG,
3463 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3464 things aside from REG and MEM that should appear in a
3466 new = subst (XEXP (x, i), from, to,
3468 && (code == SUBREG || code == STRICT_LOW_PART
3469 || code == ZERO_EXTRACT))
3471 && i == 0), unique_copy);
3473 /* If we found that we will have to reject this combination,
3474 indicate that by returning the CLOBBER ourselves, rather than
3475 an expression containing it. This will speed things up as
3476 well as prevent accidents where two CLOBBERs are considered
3477 to be equal, thus producing an incorrect simplification. */
3479 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3482 SUBST (XEXP (x, i), new);
3487 /* Try to simplify X. If the simplification changed the code, it is likely
3488 that further simplification will help, so loop, but limit the number
3489 of repetitions that will be performed. */
3491 for (i = 0; i < 4; i++)
3493 /* If X is sufficiently simple, don't bother trying to do anything
3495 if (code != CONST_INT && code != REG && code != CLOBBER)
3496 x = combine_simplify_rtx (x, op0_mode, i == 3, in_dest);
3498 if (GET_CODE (x) == code)
3501 code = GET_CODE (x);
3503 /* We no longer know the original mode of operand 0 since we
3504 have changed the form of X) */
3505 op0_mode = VOIDmode;
3511 /* Simplify X, a piece of RTL. We just operate on the expression at the
3512 outer level; call `subst' to simplify recursively. Return the new
3515 OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
3516 will be the iteration even if an expression with a code different from
3517 X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */
3520 combine_simplify_rtx (x, op0_mode, last, in_dest)
3522 enum machine_mode op0_mode;
3526 enum rtx_code code = GET_CODE (x);
3527 enum machine_mode mode = GET_MODE (x);
3532 /* If this is a commutative operation, put a constant last and a complex
3533 expression first. We don't need to do this for comparisons here. */
3534 if (GET_RTX_CLASS (code) == 'c'
3535 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
3538 SUBST (XEXP (x, 0), XEXP (x, 1));
3539 SUBST (XEXP (x, 1), temp);
3542 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3543 sign extension of a PLUS with a constant, reverse the order of the sign
3544 extension and the addition. Note that this not the same as the original
3545 code, but overflow is undefined for signed values. Also note that the
3546 PLUS will have been partially moved "inside" the sign-extension, so that
3547 the first operand of X will really look like:
3548 (ashiftrt (plus (ashift A C4) C5) C4).
3550 (plus (ashiftrt (ashift A C4) C2) C4)
3551 and replace the first operand of X with that expression. Later parts
3552 of this function may simplify the expression further.
3554 For example, if we start with (mult (sign_extend (plus A C1)) C2),
3555 we swap the SIGN_EXTEND and PLUS. Later code will apply the
3556 distributive law to produce (plus (mult (sign_extend X) C1) C3).
3558 We do this to simplify address expressions. */
3560 if ((code == PLUS || code == MINUS || code == MULT)
3561 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3562 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3563 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3564 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3565 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3566 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3567 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3568 && (temp = simplify_binary_operation (ASHIFTRT, mode,
3569 XEXP (XEXP (XEXP (x, 0), 0), 1),
3570 XEXP (XEXP (x, 0), 1))) != 0)
3573 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3574 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3575 INTVAL (XEXP (XEXP (x, 0), 1)));
3577 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3578 INTVAL (XEXP (XEXP (x, 0), 1)));
3580 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3583 /* If this is a simple operation applied to an IF_THEN_ELSE, try
3584 applying it to the arms of the IF_THEN_ELSE. This often simplifies
3585 things. Check for cases where both arms are testing the same
3588 Don't do anything if all operands are very simple. */
3590 if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3591 || GET_RTX_CLASS (code) == '<')
3592 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3593 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3594 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3596 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3597 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3598 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3600 || (GET_RTX_CLASS (code) == '1'
3601 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3602 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3603 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3606 rtx cond, true_rtx, false_rtx;
3608 cond = if_then_else_cond (x, &true_rtx, &false_rtx);
3610 /* If everything is a comparison, what we have is highly unlikely
3611 to be simpler, so don't use it. */
3612 && ! (GET_RTX_CLASS (code) == '<'
3613 && (GET_RTX_CLASS (GET_CODE (true_rtx)) == '<'
3614 || GET_RTX_CLASS (GET_CODE (false_rtx)) == '<')))
3616 rtx cop1 = const0_rtx;
3617 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3619 if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3622 /* Simplify the alternative arms; this may collapse the true and
3623 false arms to store-flag values. */
3624 true_rtx = subst (true_rtx, pc_rtx, pc_rtx, 0, 0);
3625 false_rtx = subst (false_rtx, pc_rtx, pc_rtx, 0, 0);
3627 /* If true_rtx and false_rtx are not general_operands, an if_then_else
3628 is unlikely to be simpler. */
3629 if (general_operand (true_rtx, VOIDmode)
3630 && general_operand (false_rtx, VOIDmode))
3632 /* Restarting if we generate a store-flag expression will cause
3633 us to loop. Just drop through in this case. */
3635 /* If the result values are STORE_FLAG_VALUE and zero, we can
3636 just make the comparison operation. */
3637 if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
3638 x = gen_binary (cond_code, mode, cond, cop1);
3639 else if (true_rtx == const0_rtx && false_rtx == const_true_rtx)
3640 x = gen_binary (reverse_condition (cond_code),
3643 /* Likewise, we can make the negate of a comparison operation
3644 if the result values are - STORE_FLAG_VALUE and zero. */
3645 else if (GET_CODE (true_rtx) == CONST_INT
3646 && INTVAL (true_rtx) == - STORE_FLAG_VALUE
3647 && false_rtx == const0_rtx)
3648 x = simplify_gen_unary (NEG, mode,
3649 gen_binary (cond_code, mode, cond,
3652 else if (GET_CODE (false_rtx) == CONST_INT
3653 && INTVAL (false_rtx) == - STORE_FLAG_VALUE
3654 && true_rtx == const0_rtx)
3655 x = simplify_gen_unary (NEG, mode,
3656 gen_binary (reverse_condition
3661 return gen_rtx_IF_THEN_ELSE (mode,
3662 gen_binary (cond_code, VOIDmode,
3664 true_rtx, false_rtx);
3666 code = GET_CODE (x);
3667 op0_mode = VOIDmode;
3672 /* Try to fold this expression in case we have constants that weren't
3675 switch (GET_RTX_CLASS (code))
3678 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3682 enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
3683 if (cmp_mode == VOIDmode)
3685 cmp_mode = GET_MODE (XEXP (x, 1));
3686 if (cmp_mode == VOIDmode)
3687 cmp_mode = op0_mode;
3689 temp = simplify_relational_operation (code, cmp_mode,
3690 XEXP (x, 0), XEXP (x, 1));
3692 #ifdef FLOAT_STORE_FLAG_VALUE
3693 if (temp != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3695 if (temp == const0_rtx)
3696 temp = CONST0_RTX (mode);
3698 temp = immed_real_const_1 (FLOAT_STORE_FLAG_VALUE (mode), mode);
3704 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3708 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3709 XEXP (x, 1), XEXP (x, 2));
3716 code = GET_CODE (temp);
3717 op0_mode = VOIDmode;
3718 mode = GET_MODE (temp);
3721 /* First see if we can apply the inverse distributive law. */
3722 if (code == PLUS || code == MINUS
3723 || code == AND || code == IOR || code == XOR)
3725 x = apply_distributive_law (x);
3726 code = GET_CODE (x);
3727 op0_mode = VOIDmode;
3730 /* If CODE is an associative operation not otherwise handled, see if we
3731 can associate some operands. This can win if they are constants or
3732 if they are logically related (i.e. (a & b) & a). */
3733 if ((code == PLUS || code == MINUS || code == MULT || code == DIV
3734 || code == AND || code == IOR || code == XOR
3735 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3736 && ((INTEGRAL_MODE_P (mode) && code != DIV)
3737 || (flag_unsafe_math_optimizations && FLOAT_MODE_P (mode))))
3739 if (GET_CODE (XEXP (x, 0)) == code)
3741 rtx other = XEXP (XEXP (x, 0), 0);
3742 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3743 rtx inner_op1 = XEXP (x, 1);
3746 /* Make sure we pass the constant operand if any as the second
3747 one if this is a commutative operation. */
3748 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3750 rtx tem = inner_op0;
3751 inner_op0 = inner_op1;
3754 inner = simplify_binary_operation (code == MINUS ? PLUS
3755 : code == DIV ? MULT
3757 mode, inner_op0, inner_op1);
3759 /* For commutative operations, try the other pair if that one
3761 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3763 other = XEXP (XEXP (x, 0), 1);
3764 inner = simplify_binary_operation (code, mode,
3765 XEXP (XEXP (x, 0), 0),
3770 return gen_binary (code, mode, other, inner);
3774 /* A little bit of algebraic simplification here. */
3778 /* Ensure that our address has any ASHIFTs converted to MULT in case
3779 address-recognizing predicates are called later. */
3780 temp = make_compound_operation (XEXP (x, 0), MEM);
3781 SUBST (XEXP (x, 0), temp);
3785 if (op0_mode == VOIDmode)
3786 op0_mode = GET_MODE (SUBREG_REG (x));
3788 /* simplify_subreg can't use gen_lowpart_for_combine. */
3789 if (CONSTANT_P (SUBREG_REG (x))
3790 && subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x))
3791 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3795 temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
3801 /* Note that we cannot do any narrowing for non-constants since
3802 we might have been counting on using the fact that some bits were
3803 zero. We now do this in the SET. */
3808 /* (not (plus X -1)) can become (neg X). */
3809 if (GET_CODE (XEXP (x, 0)) == PLUS
3810 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
3811 return gen_rtx_NEG (mode, XEXP (XEXP (x, 0), 0));
3813 /* Similarly, (not (neg X)) is (plus X -1). */
3814 if (GET_CODE (XEXP (x, 0)) == NEG)
3815 return gen_rtx_PLUS (mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3817 /* (not (xor X C)) for C constant is (xor X D) with D = ~C. */
3818 if (GET_CODE (XEXP (x, 0)) == XOR
3819 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3820 && (temp = simplify_unary_operation (NOT, mode,
3821 XEXP (XEXP (x, 0), 1),
3823 return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
3825 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3826 other than 1, but that is not valid. We could do a similar
3827 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3828 but this doesn't seem common enough to bother with. */
3829 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3830 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3831 return gen_rtx_ROTATE (mode, simplify_gen_unary (NOT, mode,
3833 XEXP (XEXP (x, 0), 1));
3835 if (GET_CODE (XEXP (x, 0)) == SUBREG
3836 && subreg_lowpart_p (XEXP (x, 0))
3837 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3838 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3839 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3840 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3842 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3844 x = gen_rtx_ROTATE (inner_mode,
3845 simplify_gen_unary (NOT, inner_mode, const1_rtx,
3847 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3848 return gen_lowpart_for_combine (mode, x);
3851 /* If STORE_FLAG_VALUE is -1, (not (comparison foo bar)) can be done by
3852 reversing the comparison code if valid. */
3853 if (STORE_FLAG_VALUE == -1
3854 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3855 && (reversed = reversed_comparison (x, mode, XEXP (XEXP (x, 0), 0),
3856 XEXP (XEXP (x, 0), 1))))
3859 /* (not (ashiftrt foo C)) where C is the number of bits in FOO minus 1
3860 is (ge foo (const_int 0)) if STORE_FLAG_VALUE is -1, so we can
3861 perform the above simplification. */
3863 if (STORE_FLAG_VALUE == -1
3864 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3865 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3866 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3867 return gen_rtx_GE (mode, XEXP (XEXP (x, 0), 0), const0_rtx);
3869 /* Apply De Morgan's laws to reduce number of patterns for machines
3870 with negating logical insns (and-not, nand, etc.). If result has
3871 only one NOT, put it first, since that is how the patterns are
3874 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3876 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3877 enum machine_mode op_mode;
3879 op_mode = GET_MODE (in1);
3880 in1 = simplify_gen_unary (NOT, op_mode, in1, op_mode);
3882 op_mode = GET_MODE (in2);
3883 if (op_mode == VOIDmode)
3885 in2 = simplify_gen_unary (NOT, op_mode, in2, op_mode);
3887 if (GET_CODE (in2) == NOT && GET_CODE (in1) != NOT)
3890 in2 = in1; in1 = tem;
3893 return gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3899 /* (neg (plus X 1)) can become (not X). */
3900 if (GET_CODE (XEXP (x, 0)) == PLUS
3901 && XEXP (XEXP (x, 0), 1) == const1_rtx)
3902 return gen_rtx_NOT (mode, XEXP (XEXP (x, 0), 0));
3904 /* Similarly, (neg (not X)) is (plus X 1). */
3905 if (GET_CODE (XEXP (x, 0)) == NOT)
3906 return plus_constant (XEXP (XEXP (x, 0), 0), 1);
3908 /* (neg (minus X Y)) can become (minus Y X). */
3909 if (GET_CODE (XEXP (x, 0)) == MINUS
3910 && (! FLOAT_MODE_P (mode)
3911 /* x-y != -(y-x) with IEEE floating point. */
3912 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3913 || flag_unsafe_math_optimizations))
3914 return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3915 XEXP (XEXP (x, 0), 0));
3917 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
3918 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
3919 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
3920 return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3922 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3923 if we can then eliminate the NEG (e.g.,
3924 if the operand is a constant). */
3926 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3928 temp = simplify_unary_operation (NEG, mode,
3929 XEXP (XEXP (x, 0), 0), mode);
3931 return gen_binary (ASHIFT, mode, temp, XEXP (XEXP (x, 0), 1));
3934 temp = expand_compound_operation (XEXP (x, 0));
3936 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3937 replaced by (lshiftrt X C). This will convert
3938 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3940 if (GET_CODE (temp) == ASHIFTRT
3941 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3942 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
3943 return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3944 INTVAL (XEXP (temp, 1)));
3946 /* If X has only a single bit that might be nonzero, say, bit I, convert
3947 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3948 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3949 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3950 or a SUBREG of one since we'd be making the expression more
3951 complex if it was just a register. */
3953 if (GET_CODE (temp) != REG
3954 && ! (GET_CODE (temp) == SUBREG
3955 && GET_CODE (SUBREG_REG (temp)) == REG)
3956 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
3958 rtx temp1 = simplify_shift_const
3959 (NULL_RTX, ASHIFTRT, mode,
3960 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
3961 GET_MODE_BITSIZE (mode) - 1 - i),
3962 GET_MODE_BITSIZE (mode) - 1 - i);
3964 /* If all we did was surround TEMP with the two shifts, we
3965 haven't improved anything, so don't use it. Otherwise,
3966 we are better off with TEMP1. */
3967 if (GET_CODE (temp1) != ASHIFTRT
3968 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3969 || XEXP (XEXP (temp1, 0), 0) != temp)
3975 /* We can't handle truncation to a partial integer mode here
3976 because we don't know the real bitsize of the partial
3978 if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
3981 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3982 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
3983 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
3985 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
3986 GET_MODE_MASK (mode), NULL_RTX, 0));
3988 /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */
3989 if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3990 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3991 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3992 return XEXP (XEXP (x, 0), 0);
3994 /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
3995 (OP:SI foo:SI) if OP is NEG or ABS. */
3996 if ((GET_CODE (XEXP (x, 0)) == ABS
3997 || GET_CODE (XEXP (x, 0)) == NEG)
3998 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
3999 || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
4000 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
4001 return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4002 XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
4004 /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
4006 if (GET_CODE (XEXP (x, 0)) == SUBREG
4007 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
4008 && subreg_lowpart_p (XEXP (x, 0)))
4009 return SUBREG_REG (XEXP (x, 0));
4011 /* If we know that the value is already truncated, we can
4012 replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION
4013 is nonzero for the corresponding modes. But don't do this
4014 for an (LSHIFTRT (MULT ...)) since this will cause problems
4015 with the umulXi3_highpart patterns. */
4016 if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4017 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4018 && num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4019 >= GET_MODE_BITSIZE (mode) + 1
4020 && ! (GET_CODE (XEXP (x, 0)) == LSHIFTRT
4021 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT))
4022 return gen_lowpart_for_combine (mode, XEXP (x, 0));
4024 /* A truncate of a comparison can be replaced with a subreg if
4025 STORE_FLAG_VALUE permits. This is like the previous test,
4026 but it works even if the comparison is done in a mode larger
4027 than HOST_BITS_PER_WIDE_INT. */
4028 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4029 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4030 && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
4031 return gen_lowpart_for_combine (mode, XEXP (x, 0));
4033 /* Similarly, a truncate of a register whose value is a
4034 comparison can be replaced with a subreg if STORE_FLAG_VALUE
4036 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4037 && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
4038 && (temp = get_last_value (XEXP (x, 0)))
4039 && GET_RTX_CLASS (GET_CODE (temp)) == '<')
4040 return gen_lowpart_for_combine (mode, XEXP (x, 0));
4044 case FLOAT_TRUNCATE:
4045 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
4046 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
4047 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
4048 return XEXP (XEXP (x, 0), 0);
4050 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
4051 (OP:SF foo:SF) if OP is NEG or ABS. */
4052 if ((GET_CODE (XEXP (x, 0)) == ABS
4053 || GET_CODE (XEXP (x, 0)) == NEG)
4054 && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
4055 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
4056 return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4057 XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
4059 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
4060 is (float_truncate:SF x). */
4061 if (GET_CODE (XEXP (x, 0)) == SUBREG
4062 && subreg_lowpart_p (XEXP (x, 0))
4063 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
4064 return SUBREG_REG (XEXP (x, 0));
4069 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
4070 using cc0, in which case we want to leave it as a COMPARE
4071 so we can distinguish it from a register-register-copy. */
4072 if (XEXP (x, 1) == const0_rtx)
4075 /* In IEEE floating point, x-0 is not the same as x. */
4076 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4077 || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
4078 || flag_unsafe_math_optimizations)
4079 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
4085 /* (const (const X)) can become (const X). Do it this way rather than
4086 returning the inner CONST since CONST can be shared with a
4088 if (GET_CODE (XEXP (x, 0)) == CONST)
4089 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4094 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
4095 can add in an offset. find_split_point will split this address up
4096 again if it doesn't match. */
4097 if (GET_CODE (XEXP (x, 0)) == HIGH
4098 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
4104 /* If we have (plus (plus (A const) B)), associate it so that CONST is
4105 outermost. That's because that's the way indexed addresses are
4106 supposed to appear. This code used to check many more cases, but
4107 they are now checked elsewhere. */
4108 if (GET_CODE (XEXP (x, 0)) == PLUS
4109 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
4110 return gen_binary (PLUS, mode,
4111 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
4113 XEXP (XEXP (x, 0), 1));
4115 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
4116 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
4117 bit-field and can be replaced by either a sign_extend or a
4118 sign_extract. The `and' may be a zero_extend and the two
4119 <c>, -<c> constants may be reversed. */
4120 if (GET_CODE (XEXP (x, 0)) == XOR
4121 && GET_CODE (XEXP (x, 1)) == CONST_INT
4122 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4123 && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
4124 && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
4125 || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
4126 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4127 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
4128 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4129 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
4130 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
4131 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
4132 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
4133 == (unsigned int) i + 1))))
4134 return simplify_shift_const
4135 (NULL_RTX, ASHIFTRT, mode,
4136 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4137 XEXP (XEXP (XEXP (x, 0), 0), 0),
4138 GET_MODE_BITSIZE (mode) - (i + 1)),
4139 GET_MODE_BITSIZE (mode) - (i + 1));
4141 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
4142 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
4143 is 1. This produces better code than the alternative immediately
4145 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4146 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
4147 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx))
4148 && (reversed = reversed_comparison (XEXP (x, 0), mode,
4149 XEXP (XEXP (x, 0), 0),
4150 XEXP (XEXP (x, 0), 1))))
4152 simplify_gen_unary (NEG, mode, reversed, mode);
4154 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
4155 can become (ashiftrt (ashift (xor x 1) C) C) where C is
4156 the bitsize of the mode - 1. This allows simplification of
4157 "a = (b & 8) == 0;" */
4158 if (XEXP (x, 1) == constm1_rtx
4159 && GET_CODE (XEXP (x, 0)) != REG
4160 && ! (GET_CODE (XEXP (x,0)) == SUBREG
4161 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
4162 && nonzero_bits (XEXP (x, 0), mode) == 1)
4163 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
4164 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4165 gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx),
4166 GET_MODE_BITSIZE (mode) - 1),
4167 GET_MODE_BITSIZE (mode) - 1);
4169 /* If we are adding two things that have no bits in common, convert
4170 the addition into an IOR. This will often be further simplified,
4171 for example in cases like ((a & 1) + (a & 2)), which can
4174 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4175 && (nonzero_bits (XEXP (x, 0), mode)
4176 & nonzero_bits (XEXP (x, 1), mode)) == 0)
4178 /* Try to simplify the expression further. */
4179 rtx tor = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
4180 temp = combine_simplify_rtx (tor, mode, last, in_dest);
4182 /* If we could, great. If not, do not go ahead with the IOR
4183 replacement, since PLUS appears in many special purpose
4184 address arithmetic instructions. */
4185 if (GET_CODE (temp) != CLOBBER && temp != tor)
4191 /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
4192 by reversing the comparison code if valid. */
4193 if (STORE_FLAG_VALUE == 1
4194 && XEXP (x, 0) == const1_rtx
4195 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
4196 && (reversed = reversed_comparison (XEXP (x, 1), mode,
4197 XEXP (XEXP (x, 1), 0),
4198 XEXP (XEXP (x, 1), 1))))
4201 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
4202 (and <foo> (const_int pow2-1)) */
4203 if (GET_CODE (XEXP (x, 1)) == AND
4204 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4205 && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
4206 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4207 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
4208 -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
4210 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
4212 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
4213 return gen_binary (MINUS, mode,
4214 gen_binary (MINUS, mode, XEXP (x, 0),
4215 XEXP (XEXP (x, 1), 0)),
4216 XEXP (XEXP (x, 1), 1));
4220 /* If we have (mult (plus A B) C), apply the distributive law and then
4221 the inverse distributive law to see if things simplify. This
4222 occurs mostly in addresses, often when unrolling loops. */
4224 if (GET_CODE (XEXP (x, 0)) == PLUS)
4226 x = apply_distributive_law
4227 (gen_binary (PLUS, mode,
4228 gen_binary (MULT, mode,
4229 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4230 gen_binary (MULT, mode,
4231 XEXP (XEXP (x, 0), 1),
4232 copy_rtx (XEXP (x, 1)))));
4234 if (GET_CODE (x) != MULT)
4237 /* Try simplify a*(b/c) as (a*b)/c. */
4238 if (FLOAT_MODE_P (mode) && flag_unsafe_math_optimizations
4239 && GET_CODE (XEXP (x, 0)) == DIV)
4241 rtx tem = simplify_binary_operation (MULT, mode,
4242 XEXP (XEXP (x, 0), 0),
4245 return gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1));
4250 /* If this is a divide by a power of two, treat it as a shift if
4251 its first operand is a shift. */
4252 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4253 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
4254 && (GET_CODE (XEXP (x, 0)) == ASHIFT
4255 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
4256 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
4257 || GET_CODE (XEXP (x, 0)) == ROTATE
4258 || GET_CODE (XEXP (x, 0)) == ROTATERT))
4259 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
4263 case GT: case GTU: case GE: case GEU:
4264 case LT: case LTU: case LE: case LEU:
4265 case UNEQ: case LTGT:
4266 case UNGT: case UNGE:
4267 case UNLT: case UNLE:
4268 case UNORDERED: case ORDERED:
4269 /* If the first operand is a condition code, we can't do anything
4271 if (GET_CODE (XEXP (x, 0)) == COMPARE
4272 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
4274 && XEXP (x, 0) != cc0_rtx
4278 rtx op0 = XEXP (x, 0);
4279 rtx op1 = XEXP (x, 1);
4280 enum rtx_code new_code;
4282 if (GET_CODE (op0) == COMPARE)
4283 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4285 /* Simplify our comparison, if possible. */
4286 new_code = simplify_comparison (code, &op0, &op1);
4288 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
4289 if only the low-order bit is possibly nonzero in X (such as when
4290 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
4291 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
4292 known to be either 0 or -1, NE becomes a NEG and EQ becomes
4295 Remove any ZERO_EXTRACT we made when thinking this was a
4296 comparison. It may now be simpler to use, e.g., an AND. If a
4297 ZERO_EXTRACT is indeed appropriate, it will be placed back by
4298 the call to make_compound_operation in the SET case. */
4300 if (STORE_FLAG_VALUE == 1
4301 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4302 && op1 == const0_rtx
4303 && mode == GET_MODE (op0)
4304 && nonzero_bits (op0, mode) == 1)
4305 return gen_lowpart_for_combine (mode,
4306 expand_compound_operation (op0));
4308 else if (STORE_FLAG_VALUE == 1
4309 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4310 && op1 == const0_rtx
4311 && mode == GET_MODE (op0)
4312 && (num_sign_bit_copies (op0, mode)
4313 == GET_MODE_BITSIZE (mode)))
4315 op0 = expand_compound_operation (op0);
4316 return simplify_gen_unary (NEG, mode,
4317 gen_lowpart_for_combine (mode, op0),
4321 else if (STORE_FLAG_VALUE == 1
4322 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4323 && op1 == const0_rtx
4324 && mode == GET_MODE (op0)
4325 && nonzero_bits (op0, mode) == 1)
4327 op0 = expand_compound_operation (op0);
4328 return gen_binary (XOR, mode,
4329 gen_lowpart_for_combine (mode, op0),
4333 else if (STORE_FLAG_VALUE == 1
4334 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4335 && op1 == const0_rtx
4336 && mode == GET_MODE (op0)
4337 && (num_sign_bit_copies (op0, mode)
4338 == GET_MODE_BITSIZE (mode)))
4340 op0 = expand_compound_operation (op0);
4341 return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
4344 /* If STORE_FLAG_VALUE is -1, we have cases similar to
4346 if (STORE_FLAG_VALUE == -1
4347 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4348 && op1 == const0_rtx
4349 && (num_sign_bit_copies (op0, mode)
4350 == GET_MODE_BITSIZE (mode)))
4351 return gen_lowpart_for_combine (mode,
4352 expand_compound_operation (op0));
4354 else if (STORE_FLAG_VALUE == -1
4355 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4356 && op1 == const0_rtx
4357 && mode == GET_MODE (op0)
4358 && nonzero_bits (op0, mode) == 1)
4360 op0 = expand_compound_operation (op0);
4361 return simplify_gen_unary (NEG, mode,
4362 gen_lowpart_for_combine (mode, op0),
4366 else if (STORE_FLAG_VALUE == -1
4367 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4368 && op1 == const0_rtx
4369 && mode == GET_MODE (op0)
4370 && (num_sign_bit_copies (op0, mode)
4371 == GET_MODE_BITSIZE (mode)))
4373 op0 = expand_compound_operation (op0);
4374 return simplify_gen_unary (NOT, mode,
4375 gen_lowpart_for_combine (mode, op0),
4379 /* If X is 0/1, (eq X 0) is X-1. */
4380 else if (STORE_FLAG_VALUE == -1
4381 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4382 && op1 == const0_rtx
4383 && mode == GET_MODE (op0)
4384 && nonzero_bits (op0, mode) == 1)
4386 op0 = expand_compound_operation (op0);
4387 return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
4390 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
4391 one bit that might be nonzero, we can convert (ne x 0) to
4392 (ashift x c) where C puts the bit in the sign bit. Remove any
4393 AND with STORE_FLAG_VALUE when we are done, since we are only
4394 going to test the sign bit. */
4395 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4396 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4397 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
4398 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE(mode)-1))
4399 && op1 == const0_rtx
4400 && mode == GET_MODE (op0)
4401 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
4403 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4404 expand_compound_operation (op0),
4405 GET_MODE_BITSIZE (mode) - 1 - i);
4406 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4412 /* If the code changed, return a whole new comparison. */
4413 if (new_code != code)
4414 return gen_rtx_fmt_ee (new_code, mode, op0, op1);
4416 /* Otherwise, keep this operation, but maybe change its operands.
4417 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
4418 SUBST (XEXP (x, 0), op0);
4419 SUBST (XEXP (x, 1), op1);
4424 return simplify_if_then_else (x);
4430 /* If we are processing SET_DEST, we are done. */
4434 return expand_compound_operation (x);
4437 return simplify_set (x);
4442 return simplify_logical (x, last);
4445 /* (abs (neg <foo>)) -> (abs <foo>) */
4446 if (GET_CODE (XEXP (x, 0)) == NEG)
4447 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4449 /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4451 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4454 /* If operand is something known to be positive, ignore the ABS. */
4455 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4456 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4457 <= HOST_BITS_PER_WIDE_INT)
4458 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4459 & ((HOST_WIDE_INT) 1
4460 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4464 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4465 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4466 return gen_rtx_NEG (mode, XEXP (x, 0));
4471 /* (ffs (*_extend <X>)) = (ffs <X>) */
4472 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4473 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4474 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4478 /* (float (sign_extend <X>)) = (float <X>). */
4479 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4480 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4488 /* If this is a shift by a constant amount, simplify it. */
4489 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4490 return simplify_shift_const (x, code, mode, XEXP (x, 0),
4491 INTVAL (XEXP (x, 1)));
4493 #ifdef SHIFT_COUNT_TRUNCATED
4494 else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
4496 force_to_mode (XEXP (x, 1), GET_MODE (x),
4498 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4507 rtx op0 = XEXP (x, 0);
4508 rtx op1 = XEXP (x, 1);
4511 if (GET_CODE (op1) != PARALLEL)
4513 len = XVECLEN (op1, 0);
4515 && GET_CODE (XVECEXP (op1, 0, 0)) == CONST_INT
4516 && GET_CODE (op0) == VEC_CONCAT)
4518 int offset = INTVAL (XVECEXP (op1, 0, 0)) * GET_MODE_SIZE (GET_MODE (x));
4520 /* Try to find the element in the VEC_CONCAT. */
4523 if (GET_MODE (op0) == GET_MODE (x))
4525 if (GET_CODE (op0) == VEC_CONCAT)
4527 HOST_WIDE_INT op0_size = GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)));
4528 if (op0_size < offset)
4529 op0 = XEXP (op0, 0);
4533 op0 = XEXP (op0, 1);
4551 /* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
4554 simplify_if_then_else (x)
4557 enum machine_mode mode = GET_MODE (x);
4558 rtx cond = XEXP (x, 0);
4559 rtx true_rtx = XEXP (x, 1);
4560 rtx false_rtx = XEXP (x, 2);
4561 enum rtx_code true_code = GET_CODE (cond);
4562 int comparison_p = GET_RTX_CLASS (true_code) == '<';
4565 enum rtx_code false_code;
4568 /* Simplify storing of the truth value. */
4569 if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
4570 return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
4572 /* Also when the truth value has to be reversed. */
4574 && true_rtx == const0_rtx && false_rtx == const_true_rtx
4575 && (reversed = reversed_comparison (cond, mode, XEXP (cond, 0),
4579 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4580 in it is being compared against certain values. Get the true and false
4581 comparisons and see if that says anything about the value of each arm. */
4584 && ((false_code = combine_reversed_comparison_code (cond))
4586 && GET_CODE (XEXP (cond, 0)) == REG)
4589 rtx from = XEXP (cond, 0);
4590 rtx true_val = XEXP (cond, 1);
4591 rtx false_val = true_val;
4594 /* If FALSE_CODE is EQ, swap the codes and arms. */
4596 if (false_code == EQ)
4598 swapped = 1, true_code = EQ, false_code = NE;
4599 temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4602 /* If we are comparing against zero and the expression being tested has
4603 only a single bit that might be nonzero, that is its value when it is
4604 not equal to zero. Similarly if it is known to be -1 or 0. */
4606 if (true_code == EQ && true_val == const0_rtx
4607 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4608 false_code = EQ, false_val = GEN_INT (nzb);
4609 else if (true_code == EQ && true_val == const0_rtx
4610 && (num_sign_bit_copies (from, GET_MODE (from))
4611 == GET_MODE_BITSIZE (GET_MODE (from))))
4612 false_code = EQ, false_val = constm1_rtx;
4614 /* Now simplify an arm if we know the value of the register in the
4615 branch and it is used in the arm. Be careful due to the potential
4616 of locally-shared RTL. */
4618 if (reg_mentioned_p (from, true_rtx))
4619 true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code,
4621 pc_rtx, pc_rtx, 0, 0);
4622 if (reg_mentioned_p (from, false_rtx))
4623 false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code,
4625 pc_rtx, pc_rtx, 0, 0);
4627 SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx);
4628 SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx);
4630 true_rtx = XEXP (x, 1);
4631 false_rtx = XEXP (x, 2);
4632 true_code = GET_CODE (cond);
4635 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4636 reversed, do so to avoid needing two sets of patterns for
4637 subtract-and-branch insns. Similarly if we have a constant in the true
4638 arm, the false arm is the same as the first operand of the comparison, or
4639 the false arm is more complicated than the true arm. */
4642 && combine_reversed_comparison_code (cond) != UNKNOWN
4643 && (true_rtx == pc_rtx
4644 || (CONSTANT_P (true_rtx)
4645 && GET_CODE (false_rtx) != CONST_INT && false_rtx != pc_rtx)
4646 || true_rtx == const0_rtx
4647 || (GET_RTX_CLASS (GET_CODE (true_rtx)) == 'o'
4648 && GET_RTX_CLASS (GET_CODE (false_rtx)) != 'o')
4649 || (GET_CODE (true_rtx) == SUBREG
4650 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true_rtx))) == 'o'
4651 && GET_RTX_CLASS (GET_CODE (false_rtx)) != 'o')
4652 || reg_mentioned_p (true_rtx, false_rtx)
4653 || rtx_equal_p (false_rtx, XEXP (cond, 0))))
4655 true_code = reversed_comparison_code (cond, NULL);
4657 reversed_comparison (cond, GET_MODE (cond), XEXP (cond, 0),
4660 SUBST (XEXP (x, 1), false_rtx);
4661 SUBST (XEXP (x, 2), true_rtx);
4663 temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4666 /* It is possible that the conditional has been simplified out. */
4667 true_code = GET_CODE (cond);
4668 comparison_p = GET_RTX_CLASS (true_code) == '<';
4671 /* If the two arms are identical, we don't need the comparison. */
4673 if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond))
4676 /* Convert a == b ? b : a to "a". */
4677 if (true_code == EQ && ! side_effects_p (cond)
4678 && (! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
4679 && rtx_equal_p (XEXP (cond, 0), false_rtx)
4680 && rtx_equal_p (XEXP (cond, 1), true_rtx))
4682 else if (true_code == NE && ! side_effects_p (cond)
4683 && (! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
4684 && rtx_equal_p (XEXP (cond, 0), true_rtx)
4685 && rtx_equal_p (XEXP (cond, 1), false_rtx))
4688 /* Look for cases where we have (abs x) or (neg (abs X)). */
4690 if (GET_MODE_CLASS (mode) == MODE_INT
4691 && GET_CODE (false_rtx) == NEG
4692 && rtx_equal_p (true_rtx, XEXP (false_rtx, 0))
4694 && rtx_equal_p (true_rtx, XEXP (cond, 0))
4695 && ! side_effects_p (true_rtx))
4700 return simplify_gen_unary (ABS, mode, true_rtx, mode);
4704 simplify_gen_unary (NEG, mode,
4705 simplify_gen_unary (ABS, mode, true_rtx, mode),
4711 /* Look for MIN or MAX. */
4713 if ((! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
4715 && rtx_equal_p (XEXP (cond, 0), true_rtx)
4716 && rtx_equal_p (XEXP (cond, 1), false_rtx)
4717 && ! side_effects_p (cond))
4722 return gen_binary (SMAX, mode, true_rtx, false_rtx);
4725 return gen_binary (SMIN, mode, true_rtx, false_rtx);
4728 return gen_binary (UMAX, mode, true_rtx, false_rtx);
4731 return gen_binary (UMIN, mode, true_rtx, false_rtx);
4736 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4737 second operand is zero, this can be done as (OP Z (mult COND C2)) where
4738 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4739 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4740 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
4741 neither 1 or -1, but it isn't worth checking for. */
4743 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
4744 && comparison_p && mode != VOIDmode && ! side_effects_p (x))
4746 rtx t = make_compound_operation (true_rtx, SET);
4747 rtx f = make_compound_operation (false_rtx, SET);
4748 rtx cond_op0 = XEXP (cond, 0);
4749 rtx cond_op1 = XEXP (cond, 1);
4750 enum rtx_code op = NIL, extend_op = NIL;
4751 enum machine_mode m = mode;
4752 rtx z = 0, c1 = NULL_RTX;
4754 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4755 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4756 || GET_CODE (t) == ASHIFT
4757 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4758 && rtx_equal_p (XEXP (t, 0), f))
4759 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4761 /* If an identity-zero op is commutative, check whether there
4762 would be a match if we swapped the operands. */
4763 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4764 || GET_CODE (t) == XOR)
4765 && rtx_equal_p (XEXP (t, 1), f))
4766 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4767 else if (GET_CODE (t) == SIGN_EXTEND
4768 && (GET_CODE (XEXP (t, 0)) == PLUS
4769 || GET_CODE (XEXP (t, 0)) == MINUS
4770 || GET_CODE (XEXP (t, 0)) == IOR
4771 || GET_CODE (XEXP (t, 0)) == XOR
4772 || GET_CODE (XEXP (t, 0)) == ASHIFT
4773 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4774 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4775 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4776 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4777 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4778 && (num_sign_bit_copies (f, GET_MODE (f))
4779 > (GET_MODE_BITSIZE (mode)
4780 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4782 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4783 extend_op = SIGN_EXTEND;
4784 m = GET_MODE (XEXP (t, 0));
4786 else if (GET_CODE (t) == SIGN_EXTEND
4787 && (GET_CODE (XEXP (t, 0)) == PLUS
4788 || GET_CODE (XEXP (t, 0)) == IOR
4789 || GET_CODE (XEXP (t, 0)) == XOR)
4790 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4791 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4792 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4793 && (num_sign_bit_copies (f, GET_MODE (f))
4794 > (GET_MODE_BITSIZE (mode)
4795 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4797 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4798 extend_op = SIGN_EXTEND;
4799 m = GET_MODE (XEXP (t, 0));
4801 else if (GET_CODE (t) == ZERO_EXTEND
4802 && (GET_CODE (XEXP (t, 0)) == PLUS
4803 || GET_CODE (XEXP (t, 0)) == MINUS
4804 || GET_CODE (XEXP (t, 0)) == IOR
4805 || GET_CODE (XEXP (t, 0)) == XOR
4806 || GET_CODE (XEXP (t, 0)) == ASHIFT
4807 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4808 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4809 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4810 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4811 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4812 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4813 && ((nonzero_bits (f, GET_MODE (f))
4814 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4817 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4818 extend_op = ZERO_EXTEND;
4819 m = GET_MODE (XEXP (t, 0));
4821 else if (GET_CODE (t) == ZERO_EXTEND
4822 && (GET_CODE (XEXP (t, 0)) == PLUS
4823 || GET_CODE (XEXP (t, 0)) == IOR
4824 || GET_CODE (XEXP (t, 0)) == XOR)
4825 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4826 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4827 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4828 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4829 && ((nonzero_bits (f, GET_MODE (f))
4830 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4833 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4834 extend_op = ZERO_EXTEND;
4835 m = GET_MODE (XEXP (t, 0));
4840 temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4841 pc_rtx, pc_rtx, 0, 0);
4842 temp = gen_binary (MULT, m, temp,
4843 gen_binary (MULT, m, c1, const_true_rtx));
4844 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4845 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4847 if (extend_op != NIL)
4848 temp = simplify_gen_unary (extend_op, mode, temp, m);
4854 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4855 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4856 negation of a single bit, we can convert this operation to a shift. We
4857 can actually do this more generally, but it doesn't seem worth it. */
4859 if (true_code == NE && XEXP (cond, 1) == const0_rtx
4860 && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT
4861 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4862 && (i = exact_log2 (INTVAL (true_rtx))) >= 0)
4863 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4864 == GET_MODE_BITSIZE (mode))
4865 && (i = exact_log2 (-INTVAL (true_rtx))) >= 0)))
4867 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4868 gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
4873 /* Simplify X, a SET expression. Return the new expression. */
4879 rtx src = SET_SRC (x);
4880 rtx dest = SET_DEST (x);
4881 enum machine_mode mode
4882 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4886 /* (set (pc) (return)) gets written as (return). */
4887 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4890 /* Now that we know for sure which bits of SRC we are using, see if we can
4891 simplify the expression for the object knowing that we only need the
4894 if (GET_MODE_CLASS (mode) == MODE_INT)
4896 src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0);
4897 SUBST (SET_SRC (x), src);
4900 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4901 the comparison result and try to simplify it unless we already have used
4902 undobuf.other_insn. */
4903 if ((GET_CODE (src) == COMPARE
4908 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4909 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4910 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
4911 && rtx_equal_p (XEXP (*cc_use, 0), dest))
4913 enum rtx_code old_code = GET_CODE (*cc_use);
4914 enum rtx_code new_code;
4916 int other_changed = 0;
4917 enum machine_mode compare_mode = GET_MODE (dest);
4919 if (GET_CODE (src) == COMPARE)
4920 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4922 op0 = src, op1 = const0_rtx;
4924 /* Simplify our comparison, if possible. */
4925 new_code = simplify_comparison (old_code, &op0, &op1);
4927 #ifdef EXTRA_CC_MODES
4928 /* If this machine has CC modes other than CCmode, check to see if we
4929 need to use a different CC mode here. */
4930 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
4931 #endif /* EXTRA_CC_MODES */
4933 #if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
4934 /* If the mode changed, we have to change SET_DEST, the mode in the
4935 compare, and the mode in the place SET_DEST is used. If SET_DEST is
4936 a hard register, just build new versions with the proper mode. If it
4937 is a pseudo, we lose unless it is only time we set the pseudo, in
4938 which case we can safely change its mode. */
4939 if (compare_mode != GET_MODE (dest))
4941 unsigned int regno = REGNO (dest);
4942 rtx new_dest = gen_rtx_REG (compare_mode, regno);
4944 if (regno < FIRST_PSEUDO_REGISTER
4945 || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest)))
4947 if (regno >= FIRST_PSEUDO_REGISTER)
4948 SUBST (regno_reg_rtx[regno], new_dest);
4950 SUBST (SET_DEST (x), new_dest);
4951 SUBST (XEXP (*cc_use, 0), new_dest);
4959 /* If the code changed, we have to build a new comparison in
4960 undobuf.other_insn. */
4961 if (new_code != old_code)
4963 unsigned HOST_WIDE_INT mask;
4965 SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use),
4968 /* If the only change we made was to change an EQ into an NE or
4969 vice versa, OP0 has only one bit that might be nonzero, and OP1
4970 is zero, check if changing the user of the condition code will
4971 produce a valid insn. If it won't, we can keep the original code
4972 in that insn by surrounding our operation with an XOR. */
4974 if (((old_code == NE && new_code == EQ)
4975 || (old_code == EQ && new_code == NE))
4976 && ! other_changed && op1 == const0_rtx
4977 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
4978 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
4980 rtx pat = PATTERN (other_insn), note = 0;
4982 if ((recog_for_combine (&pat, other_insn, ¬e) < 0
4983 && ! check_asm_operands (pat)))
4985 PUT_CODE (*cc_use, old_code);
4988 op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
4996 undobuf.other_insn = other_insn;
4999 /* If we are now comparing against zero, change our source if
5000 needed. If we do not use cc0, we always have a COMPARE. */
5001 if (op1 == const0_rtx && dest == cc0_rtx)
5003 SUBST (SET_SRC (x), op0);
5009 /* Otherwise, if we didn't previously have a COMPARE in the
5010 correct mode, we need one. */
5011 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
5013 SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
5018 /* Otherwise, update the COMPARE if needed. */
5019 SUBST (XEXP (src, 0), op0);
5020 SUBST (XEXP (src, 1), op1);
5025 /* Get SET_SRC in a form where we have placed back any
5026 compound expressions. Then do the checks below. */
5027 src = make_compound_operation (src, SET);
5028 SUBST (SET_SRC (x), src);
5031 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
5032 and X being a REG or (subreg (reg)), we may be able to convert this to
5033 (set (subreg:m2 x) (op)).
5035 We can always do this if M1 is narrower than M2 because that means that
5036 we only care about the low bits of the result.
5038 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
5039 perform a narrower operation than requested since the high-order bits will
5040 be undefined. On machine where it is defined, this transformation is safe
5041 as long as M1 and M2 have the same number of words. */
5043 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5044 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
5045 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
5047 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5048 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
5049 #ifndef WORD_REGISTER_OPERATIONS
5050 && (GET_MODE_SIZE (GET_MODE (src))
5051 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5053 #ifdef CLASS_CANNOT_CHANGE_MODE
5054 && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
5055 && (TEST_HARD_REG_BIT
5056 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
5058 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (src),
5059 GET_MODE (SUBREG_REG (src))))
5061 && (GET_CODE (dest) == REG
5062 || (GET_CODE (dest) == SUBREG
5063 && GET_CODE (SUBREG_REG (dest)) == REG)))
5065 SUBST (SET_DEST (x),
5066 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
5068 SUBST (SET_SRC (x), SUBREG_REG (src));
5070 src = SET_SRC (x), dest = SET_DEST (x);
5073 #ifdef LOAD_EXTEND_OP
5074 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
5075 would require a paradoxical subreg. Replace the subreg with a
5076 zero_extend to avoid the reload that would otherwise be required. */
5078 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5079 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
5080 && SUBREG_BYTE (src) == 0
5081 && (GET_MODE_SIZE (GET_MODE (src))
5082 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5083 && GET_CODE (SUBREG_REG (src)) == MEM)
5086 gen_rtx (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
5087 GET_MODE (src), SUBREG_REG (src)));
5093 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
5094 are comparing an item known to be 0 or -1 against 0, use a logical
5095 operation instead. Check for one of the arms being an IOR of the other
5096 arm with some value. We compute three terms to be IOR'ed together. In
5097 practice, at most two will be nonzero. Then we do the IOR's. */
5099 if (GET_CODE (dest) != PC
5100 && GET_CODE (src) == IF_THEN_ELSE
5101 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
5102 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
5103 && XEXP (XEXP (src, 0), 1) == const0_rtx
5104 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
5105 #ifdef HAVE_conditional_move
5106 && ! can_conditionally_move_p (GET_MODE (src))
5108 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
5109 GET_MODE (XEXP (XEXP (src, 0), 0)))
5110 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
5111 && ! side_effects_p (src))
5113 rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
5114 ? XEXP (src, 1) : XEXP (src, 2));
5115 rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE
5116 ? XEXP (src, 2) : XEXP (src, 1));
5117 rtx term1 = const0_rtx, term2, term3;
5119 if (GET_CODE (true_rtx) == IOR
5120 && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
5121 term1 = false_rtx, true_rtx = XEXP(true_rtx, 1), false_rtx = const0_rtx;
5122 else if (GET_CODE (true_rtx) == IOR
5123 && rtx_equal_p (XEXP (true_rtx, 1), false_rtx))
5124 term1 = false_rtx, true_rtx = XEXP(true_rtx, 0), false_rtx = const0_rtx;
5125 else if (GET_CODE (false_rtx) == IOR
5126 && rtx_equal_p (XEXP (false_rtx, 0), true_rtx))
5127 term1 = true_rtx, false_rtx = XEXP(false_rtx, 1), true_rtx = const0_rtx;
5128 else if (GET_CODE (false_rtx) == IOR
5129 && rtx_equal_p (XEXP (false_rtx, 1), true_rtx))
5130 term1 = true_rtx, false_rtx = XEXP(false_rtx, 0), true_rtx = const0_rtx;
5132 term2 = gen_binary (AND, GET_MODE (src),
5133 XEXP (XEXP (src, 0), 0), true_rtx);
5134 term3 = gen_binary (AND, GET_MODE (src),
5135 simplify_gen_unary (NOT, GET_MODE (src),
5136 XEXP (XEXP (src, 0), 0),
5141 gen_binary (IOR, GET_MODE (src),
5142 gen_binary (IOR, GET_MODE (src), term1, term2),
5148 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
5149 whole thing fail. */
5150 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
5152 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
5155 /* Convert this into a field assignment operation, if possible. */
5156 return make_field_assignment (x);
5159 /* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
5160 result. LAST is nonzero if this is the last retry. */
5163 simplify_logical (x, last)
5167 enum machine_mode mode = GET_MODE (x);
5168 rtx op0 = XEXP (x, 0);
5169 rtx op1 = XEXP (x, 1);
5172 switch (GET_CODE (x))
5175 /* Convert (A ^ B) & A to A & (~B) since the latter is often a single
5176 insn (and may simplify more). */
5177 if (GET_CODE (op0) == XOR
5178 && rtx_equal_p (XEXP (op0, 0), op1)
5179 && ! side_effects_p (op1))
5180 x = gen_binary (AND, mode,
5181 simplify_gen_unary (NOT, mode, XEXP (op0, 1), mode),
5184 if (GET_CODE (op0) == XOR
5185 && rtx_equal_p (XEXP (op0, 1), op1)
5186 && ! side_effects_p (op1))
5187 x = gen_binary (AND, mode,
5188 simplify_gen_unary (NOT, mode, XEXP (op0, 0), mode),
5191 /* Similarly for (~(A ^ B)) & A. */
5192 if (GET_CODE (op0) == NOT
5193 && GET_CODE (XEXP (op0, 0)) == XOR
5194 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
5195 && ! side_effects_p (op1))
5196 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
5198 if (GET_CODE (op0) == NOT
5199 && GET_CODE (XEXP (op0, 0)) == XOR
5200 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
5201 && ! side_effects_p (op1))
5202 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
5204 /* We can call simplify_and_const_int only if we don't lose
5205 any (sign) bits when converting INTVAL (op1) to
5206 "unsigned HOST_WIDE_INT". */
5207 if (GET_CODE (op1) == CONST_INT
5208 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5209 || INTVAL (op1) > 0))
5211 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
5213 /* If we have (ior (and (X C1) C2)) and the next restart would be
5214 the last, simplify this by making C1 as small as possible
5217 && GET_CODE (x) == IOR && GET_CODE (op0) == AND
5218 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5219 && GET_CODE (op1) == CONST_INT)
5220 return gen_binary (IOR, mode,
5221 gen_binary (AND, mode, XEXP (op0, 0),
5222 GEN_INT (INTVAL (XEXP (op0, 1))
5223 & ~INTVAL (op1))), op1);
5225 if (GET_CODE (x) != AND)
5228 if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
5229 || GET_RTX_CLASS (GET_CODE (x)) == '2')
5230 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
5233 /* Convert (A | B) & A to A. */
5234 if (GET_CODE (op0) == IOR
5235 && (rtx_equal_p (XEXP (op0, 0), op1)
5236 || rtx_equal_p (XEXP (op0, 1), op1))
5237 && ! side_effects_p (XEXP (op0, 0))
5238 && ! side_effects_p (XEXP (op0, 1)))
5241 /* In the following group of tests (and those in case IOR below),
5242 we start with some combination of logical operations and apply
5243 the distributive law followed by the inverse distributive law.
5244 Most of the time, this results in no change. However, if some of
5245 the operands are the same or inverses of each other, simplifications
5248 For example, (and (ior A B) (not B)) can occur as the result of
5249 expanding a bit field assignment. When we apply the distributive
5250 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
5251 which then simplifies to (and (A (not B))).
5253 If we have (and (ior A B) C), apply the distributive law and then
5254 the inverse distributive law to see if things simplify. */
5256 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
5258 x = apply_distributive_law
5259 (gen_binary (GET_CODE (op0), mode,
5260 gen_binary (AND, mode, XEXP (op0, 0), op1),
5261 gen_binary (AND, mode, XEXP (op0, 1),
5263 if (GET_CODE (x) != AND)
5267 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
5268 return apply_distributive_law
5269 (gen_binary (GET_CODE (op1), mode,
5270 gen_binary (AND, mode, XEXP (op1, 0), op0),
5271 gen_binary (AND, mode, XEXP (op1, 1),
5274 /* Similarly, taking advantage of the fact that
5275 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
5277 if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
5278 return apply_distributive_law
5279 (gen_binary (XOR, mode,
5280 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
5281 gen_binary (IOR, mode, copy_rtx (XEXP (op0, 0)),
5284 else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
5285 return apply_distributive_law
5286 (gen_binary (XOR, mode,
5287 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
5288 gen_binary (IOR, mode, copy_rtx (XEXP (op1, 0)), XEXP (op0, 1))));
5292 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
5293 if (GET_CODE (op1) == CONST_INT
5294 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5295 && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
5298 /* Convert (A & B) | A to A. */
5299 if (GET_CODE (op0) == AND
5300 && (rtx_equal_p (XEXP (op0, 0), op1)
5301 || rtx_equal_p (XEXP (op0, 1), op1))
5302 && ! side_effects_p (XEXP (op0, 0))
5303 && ! side_effects_p (XEXP (op0, 1)))
5306 /* If we have (ior (and A B) C), apply the distributive law and then
5307 the inverse distributive law to see if things simplify. */
5309 if (GET_CODE (op0) == AND)
5311 x = apply_distributive_law
5312 (gen_binary (AND, mode,
5313 gen_binary (IOR, mode, XEXP (op0, 0), op1),
5314 gen_binary (IOR, mode, XEXP (op0, 1),
5317 if (GET_CODE (x) != IOR)
5321 if (GET_CODE (op1) == AND)
5323 x = apply_distributive_law
5324 (gen_binary (AND, mode,
5325 gen_binary (IOR, mode, XEXP (op1, 0), op0),
5326 gen_binary (IOR, mode, XEXP (op1, 1),
5329 if (GET_CODE (x) != IOR)
5333 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
5334 mode size to (rotate A CX). */
5336 if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
5337 || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
5338 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
5339 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5340 && GET_CODE (XEXP (op1, 1)) == CONST_INT
5341 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
5342 == GET_MODE_BITSIZE (mode)))
5343 return gen_rtx_ROTATE (mode, XEXP (op0, 0),
5344 (GET_CODE (op0) == ASHIFT
5345 ? XEXP (op0, 1) : XEXP (op1, 1)));
5347 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
5348 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
5349 does not affect any of the bits in OP1, it can really be done
5350 as a PLUS and we can associate. We do this by seeing if OP1
5351 can be safely shifted left C bits. */
5352 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
5353 && GET_CODE (XEXP (op0, 0)) == PLUS
5354 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
5355 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5356 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
5358 int count = INTVAL (XEXP (op0, 1));
5359 HOST_WIDE_INT mask = INTVAL (op1) << count;
5361 if (mask >> count == INTVAL (op1)
5362 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
5364 SUBST (XEXP (XEXP (op0, 0), 1),
5365 GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
5372 /* If we are XORing two things that have no bits in common,
5373 convert them into an IOR. This helps to detect rotation encoded
5374 using those methods and possibly other simplifications. */
5376 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5377 && (nonzero_bits (op0, mode)
5378 & nonzero_bits (op1, mode)) == 0)
5379 return (gen_binary (IOR, mode, op0, op1));
5381 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
5382 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
5385 int num_negated = 0;
5387 if (GET_CODE (op0) == NOT)
5388 num_negated++, op0 = XEXP (op0, 0);
5389 if (GET_CODE (op1) == NOT)
5390 num_negated++, op1 = XEXP (op1, 0);
5392 if (num_negated == 2)
5394 SUBST (XEXP (x, 0), op0);
5395 SUBST (XEXP (x, 1), op1);
5397 else if (num_negated == 1)
5399 simplify_gen_unary (NOT, mode, gen_binary (XOR, mode, op0, op1),
5403 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
5404 correspond to a machine insn or result in further simplifications
5405 if B is a constant. */
5407 if (GET_CODE (op0) == AND
5408 && rtx_equal_p (XEXP (op0, 1), op1)
5409 && ! side_effects_p (op1))
5410 return gen_binary (AND, mode,
5411 simplify_gen_unary (NOT, mode, XEXP (op0, 0), mode),
5414 else if (GET_CODE (op0) == AND
5415 && rtx_equal_p (XEXP (op0, 0), op1)
5416 && ! side_effects_p (op1))
5417 return gen_binary (AND, mode,
5418 simplify_gen_unary (NOT, mode, XEXP (op0, 1), mode),
5421 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
5422 comparison if STORE_FLAG_VALUE is 1. */
5423 if (STORE_FLAG_VALUE == 1
5424 && op1 == const1_rtx
5425 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5426 && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0),
5430 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5431 is (lt foo (const_int 0)), so we can perform the above
5432 simplification if STORE_FLAG_VALUE is 1. */
5434 if (STORE_FLAG_VALUE == 1
5435 && op1 == const1_rtx
5436 && GET_CODE (op0) == LSHIFTRT
5437 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5438 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
5439 return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx);
5441 /* (xor (comparison foo bar) (const_int sign-bit))
5442 when STORE_FLAG_VALUE is the sign bit. */
5443 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5444 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5445 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
5446 && op1 == const_true_rtx
5447 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5448 && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0),
5461 /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5462 operations" because they can be replaced with two more basic operations.
5463 ZERO_EXTEND is also considered "compound" because it can be replaced with
5464 an AND operation, which is simpler, though only one operation.
5466 The function expand_compound_operation is called with an rtx expression
5467 and will convert it to the appropriate shifts and AND operations,
5468 simplifying at each stage.
5470 The function make_compound_operation is called to convert an expression
5471 consisting of shifts and ANDs into the equivalent compound expression.
5472 It is the inverse of this function, loosely speaking. */
5475 expand_compound_operation (x)
5478 unsigned HOST_WIDE_INT pos = 0, len;
5480 unsigned int modewidth;
5483 switch (GET_CODE (x))
5488 /* We can't necessarily use a const_int for a multiword mode;
5489 it depends on implicitly extending the value.
5490 Since we don't know the right way to extend it,
5491 we can't tell whether the implicit way is right.
5493 Even for a mode that is no wider than a const_int,
5494 we can't win, because we need to sign extend one of its bits through
5495 the rest of it, and we don't know which bit. */
5496 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
5499 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5500 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
5501 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5502 reloaded. If not for that, MEM's would very rarely be safe.
5504 Reject MODEs bigger than a word, because we might not be able
5505 to reference a two-register group starting with an arbitrary register
5506 (and currently gen_lowpart might crash for a SUBREG). */
5508 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
5511 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5512 /* If the inner object has VOIDmode (the only way this can happen
5513 is if it is a ASM_OPERANDS), we can't do anything since we don't
5514 know how much masking to do. */
5523 /* If the operand is a CLOBBER, just return it. */
5524 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5527 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5528 || GET_CODE (XEXP (x, 2)) != CONST_INT
5529 || GET_MODE (XEXP (x, 0)) == VOIDmode)
5532 len = INTVAL (XEXP (x, 1));
5533 pos = INTVAL (XEXP (x, 2));
5535 /* If this goes outside the object being extracted, replace the object
5536 with a (use (mem ...)) construct that only combine understands
5537 and is used only for this purpose. */
5538 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
5539 SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
5541 if (BITS_BIG_ENDIAN)
5542 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5549 /* Convert sign extension to zero extension, if we know that the high
5550 bit is not set, as this is easier to optimize. It will be converted
5551 back to cheaper alternative in make_extraction. */
5552 if (GET_CODE (x) == SIGN_EXTEND
5553 && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5554 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5555 & ~(((unsigned HOST_WIDE_INT)
5556 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5560 rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
5561 return expand_compound_operation (temp);
5564 /* We can optimize some special cases of ZERO_EXTEND. */
5565 if (GET_CODE (x) == ZERO_EXTEND)
5567 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5568 know that the last value didn't have any inappropriate bits
5570 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5571 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5572 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5573 && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
5574 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5575 return XEXP (XEXP (x, 0), 0);
5577 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5578 if (GET_CODE (XEXP (x, 0)) == SUBREG
5579 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5580 && subreg_lowpart_p (XEXP (x, 0))
5581 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5582 && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
5583 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5584 return SUBREG_REG (XEXP (x, 0));
5586 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5587 is a comparison and STORE_FLAG_VALUE permits. This is like
5588 the first case, but it works even when GET_MODE (x) is larger
5589 than HOST_WIDE_INT. */
5590 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5591 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5592 && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<'
5593 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5594 <= HOST_BITS_PER_WIDE_INT)
5595 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5596 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5597 return XEXP (XEXP (x, 0), 0);
5599 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5600 if (GET_CODE (XEXP (x, 0)) == SUBREG
5601 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5602 && subreg_lowpart_p (XEXP (x, 0))
5603 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<'
5604 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5605 <= HOST_BITS_PER_WIDE_INT)
5606 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5607 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5608 return SUBREG_REG (XEXP (x, 0));
5612 /* If we reach here, we want to return a pair of shifts. The inner
5613 shift is a left shift of BITSIZE - POS - LEN bits. The outer
5614 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
5615 logical depending on the value of UNSIGNEDP.
5617 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5618 converted into an AND of a shift.
5620 We must check for the case where the left shift would have a negative
5621 count. This can happen in a case like (x >> 31) & 255 on machines
5622 that can't shift by a constant. On those machines, we would first
5623 combine the shift with the AND to produce a variable-position
5624 extraction. Then the constant of 31 would be substituted in to produce
5625 a such a position. */
5627 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
5628 if (modewidth + len >= pos)
5629 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
5631 simplify_shift_const (NULL_RTX, ASHIFT,
5634 modewidth - pos - len),
5637 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5638 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5639 simplify_shift_const (NULL_RTX, LSHIFTRT,
5642 ((HOST_WIDE_INT) 1 << len) - 1);
5644 /* Any other cases we can't handle. */
5647 /* If we couldn't do this for some reason, return the original
5649 if (GET_CODE (tem) == CLOBBER)
5655 /* X is a SET which contains an assignment of one object into
5656 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5657 or certain SUBREGS). If possible, convert it into a series of
5660 We half-heartedly support variable positions, but do not at all
5661 support variable lengths. */
5664 expand_field_assignment (x)
5668 rtx pos; /* Always counts from low bit. */
5671 enum machine_mode compute_mode;
5673 /* Loop until we find something we can't simplify. */
5676 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5677 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5679 int byte_offset = SUBREG_BYTE (XEXP (SET_DEST (x), 0));
5681 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5682 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
5683 pos = GEN_INT (BITS_PER_WORD * (byte_offset / UNITS_PER_WORD));
5685 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5686 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5688 inner = XEXP (SET_DEST (x), 0);
5689 len = INTVAL (XEXP (SET_DEST (x), 1));
5690 pos = XEXP (SET_DEST (x), 2);
5692 /* If the position is constant and spans the width of INNER,
5693 surround INNER with a USE to indicate this. */
5694 if (GET_CODE (pos) == CONST_INT
5695 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
5696 inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
5698 if (BITS_BIG_ENDIAN)
5700 if (GET_CODE (pos) == CONST_INT)
5701 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5703 else if (GET_CODE (pos) == MINUS
5704 && GET_CODE (XEXP (pos, 1)) == CONST_INT
5705 && (INTVAL (XEXP (pos, 1))
5706 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5707 /* If position is ADJUST - X, new position is X. */
5708 pos = XEXP (pos, 0);
5710 pos = gen_binary (MINUS, GET_MODE (pos),
5711 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
5717 /* A SUBREG between two modes that occupy the same numbers of words
5718 can be done by moving the SUBREG to the source. */
5719 else if (GET_CODE (SET_DEST (x)) == SUBREG
5720 /* We need SUBREGs to compute nonzero_bits properly. */
5721 && nonzero_sign_valid
5722 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
5723 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5724 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
5725 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
5727 x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
5728 gen_lowpart_for_combine
5729 (GET_MODE (SUBREG_REG (SET_DEST (x))),
5736 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5737 inner = SUBREG_REG (inner);
5739 compute_mode = GET_MODE (inner);
5741 /* Don't attempt bitwise arithmetic on non-integral modes. */
5742 if (! INTEGRAL_MODE_P (compute_mode))
5744 enum machine_mode imode;
5746 /* Something is probably seriously wrong if this matches. */
5747 if (! FLOAT_MODE_P (compute_mode))
5750 /* Try to find an integral mode to pun with. */
5751 imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
5752 if (imode == BLKmode)
5755 compute_mode = imode;
5756 inner = gen_lowpart_for_combine (imode, inner);
5759 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5760 if (len < HOST_BITS_PER_WIDE_INT)
5761 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
5765 /* Now compute the equivalent expression. Make a copy of INNER
5766 for the SET_DEST in case it is a MEM into which we will substitute;
5767 we don't want shared RTL in that case. */
5769 (VOIDmode, copy_rtx (inner),
5770 gen_binary (IOR, compute_mode,
5771 gen_binary (AND, compute_mode,
5772 simplify_gen_unary (NOT, compute_mode,
5778 gen_binary (ASHIFT, compute_mode,
5779 gen_binary (AND, compute_mode,
5780 gen_lowpart_for_combine
5781 (compute_mode, SET_SRC (x)),
5789 /* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
5790 it is an RTX that represents a variable starting position; otherwise,
5791 POS is the (constant) starting bit position (counted from the LSB).
5793 INNER may be a USE. This will occur when we started with a bitfield
5794 that went outside the boundary of the object in memory, which is
5795 allowed on most machines. To isolate this case, we produce a USE
5796 whose mode is wide enough and surround the MEM with it. The only
5797 code that understands the USE is this routine. If it is not removed,
5798 it will cause the resulting insn not to match.
5800 UNSIGNEDP is non-zero for an unsigned reference and zero for a
5803 IN_DEST is non-zero if this is a reference in the destination of a
5804 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
5805 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5808 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
5809 ZERO_EXTRACT should be built even for bits starting at bit 0.
5811 MODE is the desired mode of the result (if IN_DEST == 0).
5813 The result is an RTX for the extraction or NULL_RTX if the target
5817 make_extraction (mode, inner, pos, pos_rtx, len,
5818 unsignedp, in_dest, in_compare)
5819 enum machine_mode mode;
5823 unsigned HOST_WIDE_INT len;
5825 int in_dest, in_compare;
5827 /* This mode describes the size of the storage area
5828 to fetch the overall value from. Within that, we
5829 ignore the POS lowest bits, etc. */
5830 enum machine_mode is_mode = GET_MODE (inner);
5831 enum machine_mode inner_mode;
5832 enum machine_mode wanted_inner_mode = byte_mode;
5833 enum machine_mode wanted_inner_reg_mode = word_mode;
5834 enum machine_mode pos_mode = word_mode;
5835 enum machine_mode extraction_mode = word_mode;
5836 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5839 rtx orig_pos_rtx = pos_rtx;
5840 HOST_WIDE_INT orig_pos;
5842 /* Get some information about INNER and get the innermost object. */
5843 if (GET_CODE (inner) == USE)
5844 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
5845 /* We don't need to adjust the position because we set up the USE
5846 to pretend that it was a full-word object. */
5847 spans_byte = 1, inner = XEXP (inner, 0);
5848 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5850 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5851 consider just the QI as the memory to extract from.
5852 The subreg adds or removes high bits; its mode is
5853 irrelevant to the meaning of this extraction,
5854 since POS and LEN count from the lsb. */
5855 if (GET_CODE (SUBREG_REG (inner)) == MEM)
5856 is_mode = GET_MODE (SUBREG_REG (inner));
5857 inner = SUBREG_REG (inner);
5860 inner_mode = GET_MODE (inner);
5862 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
5863 pos = INTVAL (pos_rtx), pos_rtx = 0;
5865 /* See if this can be done without an extraction. We never can if the
5866 width of the field is not the same as that of some integer mode. For
5867 registers, we can only avoid the extraction if the position is at the
5868 low-order bit and this is either not in the destination or we have the
5869 appropriate STRICT_LOW_PART operation available.
5871 For MEM, we can avoid an extract if the field starts on an appropriate
5872 boundary and we can change the mode of the memory reference. However,
5873 we cannot directly access the MEM if we have a USE and the underlying
5874 MEM is not TMODE. This combination means that MEM was being used in a
5875 context where bits outside its mode were being referenced; that is only
5876 valid in bit-field insns. */
5878 if (tmode != BLKmode
5879 && ! (spans_byte && inner_mode != tmode)
5880 && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
5881 && GET_CODE (inner) != MEM
5883 || (GET_CODE (inner) == REG
5884 && (movstrict_optab->handlers[(int) tmode].insn_code
5885 != CODE_FOR_nothing))))
5886 || (GET_CODE (inner) == MEM && pos_rtx == 0
5888 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5889 : BITS_PER_UNIT)) == 0
5890 /* We can't do this if we are widening INNER_MODE (it
5891 may not be aligned, for one thing). */
5892 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5893 && (inner_mode == tmode
5894 || (! mode_dependent_address_p (XEXP (inner, 0))
5895 && ! MEM_VOLATILE_P (inner))))))
5897 /* If INNER is a MEM, make a new MEM that encompasses just the desired
5898 field. If the original and current mode are the same, we need not
5899 adjust the offset. Otherwise, we do if bytes big endian.
5901 If INNER is not a MEM, get a piece consisting of just the field
5902 of interest (in this case POS % BITS_PER_WORD must be 0). */
5904 if (GET_CODE (inner) == MEM)
5906 HOST_WIDE_INT offset;
5908 /* POS counts from lsb, but make OFFSET count in memory order. */
5909 if (BYTES_BIG_ENDIAN)
5910 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5912 offset = pos / BITS_PER_UNIT;
5914 new = adjust_address_nv (inner, tmode, offset);
5916 else if (GET_CODE (inner) == REG)
5918 /* We can't call gen_lowpart_for_combine here since we always want
5919 a SUBREG and it would sometimes return a new hard register. */
5920 if (tmode != inner_mode)
5922 HOST_WIDE_INT final_word = pos / BITS_PER_WORD;
5924 if (WORDS_BIG_ENDIAN
5925 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD)
5926 final_word = ((GET_MODE_SIZE (inner_mode)
5927 - GET_MODE_SIZE (tmode))
5928 / UNITS_PER_WORD) - final_word;
5930 final_word *= UNITS_PER_WORD;
5931 if (BYTES_BIG_ENDIAN &&
5932 GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (tmode))
5933 final_word += (GET_MODE_SIZE (inner_mode)
5934 - GET_MODE_SIZE (tmode)) % UNITS_PER_WORD;
5936 new = gen_rtx_SUBREG (tmode, inner, final_word);
5942 new = force_to_mode (inner, tmode,
5943 len >= HOST_BITS_PER_WIDE_INT
5944 ? ~(unsigned HOST_WIDE_INT) 0
5945 : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
5948 /* If this extraction is going into the destination of a SET,
5949 make a STRICT_LOW_PART unless we made a MEM. */
5952 return (GET_CODE (new) == MEM ? new
5953 : (GET_CODE (new) != SUBREG
5954 ? gen_rtx_CLOBBER (tmode, const0_rtx)
5955 : gen_rtx_STRICT_LOW_PART (VOIDmode, new)));
5960 /* If we know that no extraneous bits are set, and that the high
5961 bit is not set, convert the extraction to the cheaper of
5962 sign and zero extension, that are equivalent in these cases. */
5963 if (flag_expensive_optimizations
5964 && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
5965 && ((nonzero_bits (new, tmode)
5966 & ~(((unsigned HOST_WIDE_INT)
5967 GET_MODE_MASK (tmode))
5971 rtx temp = gen_rtx_ZERO_EXTEND (mode, new);
5972 rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new);
5974 /* Prefer ZERO_EXTENSION, since it gives more information to
5976 if (rtx_cost (temp, SET) <= rtx_cost (temp1, SET))
5981 /* Otherwise, sign- or zero-extend unless we already are in the
5984 return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
5988 /* Unless this is a COMPARE or we have a funny memory reference,
5989 don't do anything with zero-extending field extracts starting at
5990 the low-order bit since they are simple AND operations. */
5991 if (pos_rtx == 0 && pos == 0 && ! in_dest
5992 && ! in_compare && ! spans_byte && unsignedp)
5995 /* Unless we are allowed to span bytes or INNER is not MEM, reject this if
5996 we would be spanning bytes or if the position is not a constant and the
5997 length is not 1. In all other cases, we would only be going outside
5998 our object in cases when an original shift would have been
6000 if (! spans_byte && GET_CODE (inner) == MEM
6001 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
6002 || (pos_rtx != 0 && len != 1)))
6005 /* Get the mode to use should INNER not be a MEM, the mode for the position,
6006 and the mode for the result. */
6010 wanted_inner_reg_mode
6011 = insn_data[(int) CODE_FOR_insv].operand[0].mode;
6012 if (wanted_inner_reg_mode == VOIDmode)
6013 wanted_inner_reg_mode = word_mode;
6015 pos_mode = insn_data[(int) CODE_FOR_insv].operand[2].mode;
6016 if (pos_mode == VOIDmode)
6017 pos_mode = word_mode;
6019 extraction_mode = insn_data[(int) CODE_FOR_insv].operand[3].mode;
6020 if (extraction_mode == VOIDmode)
6021 extraction_mode = word_mode;
6026 if (! in_dest && unsignedp)
6028 wanted_inner_reg_mode
6029 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
6030 if (wanted_inner_reg_mode == VOIDmode)
6031 wanted_inner_reg_mode = word_mode;
6033 pos_mode = insn_data[(int) CODE_FOR_extzv].operand[3].mode;
6034 if (pos_mode == VOIDmode)
6035 pos_mode = word_mode;
6037 extraction_mode = insn_data[(int) CODE_FOR_extzv].operand[0].mode;
6038 if (extraction_mode == VOIDmode)
6039 extraction_mode = word_mode;
6044 if (! in_dest && ! unsignedp)
6046 wanted_inner_reg_mode
6047 = insn_data[(int) CODE_FOR_extv].operand[1].mode;
6048 if (wanted_inner_reg_mode == VOIDmode)
6049 wanted_inner_reg_mode = word_mode;
6051 pos_mode = insn_data[(int) CODE_FOR_extv].operand[3].mode;
6052 if (pos_mode == VOIDmode)
6053 pos_mode = word_mode;
6055 extraction_mode = insn_data[(int) CODE_FOR_extv].operand[0].mode;
6056 if (extraction_mode == VOIDmode)
6057 extraction_mode = word_mode;
6061 /* Never narrow an object, since that might not be safe. */
6063 if (mode != VOIDmode
6064 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
6065 extraction_mode = mode;
6067 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
6068 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6069 pos_mode = GET_MODE (pos_rtx);
6071 /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
6072 if we have to change the mode of memory and cannot, the desired mode is
6074 if (GET_CODE (inner) != MEM)
6075 wanted_inner_mode = wanted_inner_reg_mode;
6076 else if (inner_mode != wanted_inner_mode
6077 && (mode_dependent_address_p (XEXP (inner, 0))
6078 || MEM_VOLATILE_P (inner)))
6079 wanted_inner_mode = extraction_mode;
6083 if (BITS_BIG_ENDIAN)
6085 /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6086 BITS_BIG_ENDIAN style. If position is constant, compute new
6087 position. Otherwise, build subtraction.
6088 Note that POS is relative to the mode of the original argument.
6089 If it's a MEM we need to recompute POS relative to that.
6090 However, if we're extracting from (or inserting into) a register,
6091 we want to recompute POS relative to wanted_inner_mode. */
6092 int width = (GET_CODE (inner) == MEM
6093 ? GET_MODE_BITSIZE (is_mode)
6094 : GET_MODE_BITSIZE (wanted_inner_mode));
6097 pos = width - len - pos;
6100 = gen_rtx_MINUS (GET_MODE (pos_rtx), GEN_INT (width - len), pos_rtx);
6101 /* POS may be less than 0 now, but we check for that below.
6102 Note that it can only be less than 0 if GET_CODE (inner) != MEM. */
6105 /* If INNER has a wider mode, make it smaller. If this is a constant
6106 extract, try to adjust the byte to point to the byte containing
6108 if (wanted_inner_mode != VOIDmode
6109 && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
6110 && ((GET_CODE (inner) == MEM
6111 && (inner_mode == wanted_inner_mode
6112 || (! mode_dependent_address_p (XEXP (inner, 0))
6113 && ! MEM_VOLATILE_P (inner))))))
6117 /* The computations below will be correct if the machine is big
6118 endian in both bits and bytes or little endian in bits and bytes.
6119 If it is mixed, we must adjust. */
6121 /* If bytes are big endian and we had a paradoxical SUBREG, we must
6122 adjust OFFSET to compensate. */
6123 if (BYTES_BIG_ENDIAN
6125 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
6126 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
6128 /* If this is a constant position, we can move to the desired byte. */
6131 offset += pos / BITS_PER_UNIT;
6132 pos %= GET_MODE_BITSIZE (wanted_inner_mode);
6135 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
6137 && is_mode != wanted_inner_mode)
6138 offset = (GET_MODE_SIZE (is_mode)
6139 - GET_MODE_SIZE (wanted_inner_mode) - offset);
6141 if (offset != 0 || inner_mode != wanted_inner_mode)
6142 inner = adjust_address_nv (inner, wanted_inner_mode, offset);
6145 /* If INNER is not memory, we can always get it into the proper mode. If we
6146 are changing its mode, POS must be a constant and smaller than the size
6148 else if (GET_CODE (inner) != MEM)
6150 if (GET_MODE (inner) != wanted_inner_mode
6152 || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
6155 inner = force_to_mode (inner, wanted_inner_mode,
6157 || len + orig_pos >= HOST_BITS_PER_WIDE_INT
6158 ? ~(unsigned HOST_WIDE_INT) 0
6159 : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
6164 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
6165 have to zero extend. Otherwise, we can just use a SUBREG. */
6167 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
6169 rtx temp = gen_rtx_ZERO_EXTEND (pos_mode, pos_rtx);
6171 /* If we know that no extraneous bits are set, and that the high
6172 bit is not set, convert extraction to cheaper one - eighter
6173 SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
6175 if (flag_expensive_optimizations
6176 && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
6177 && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
6178 & ~(((unsigned HOST_WIDE_INT)
6179 GET_MODE_MASK (GET_MODE (pos_rtx)))
6183 rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
6185 /* Prefer ZERO_EXTENSION, since it gives more information to
6187 if (rtx_cost (temp1, SET) < rtx_cost (temp, SET))
6192 else if (pos_rtx != 0
6193 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6194 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
6196 /* Make POS_RTX unless we already have it and it is correct. If we don't
6197 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
6199 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
6200 pos_rtx = orig_pos_rtx;
6202 else if (pos_rtx == 0)
6203 pos_rtx = GEN_INT (pos);
6205 /* Make the required operation. See if we can use existing rtx. */
6206 new = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
6207 extraction_mode, inner, GEN_INT (len), pos_rtx);
6209 new = gen_lowpart_for_combine (mode, new);
6214 /* See if X contains an ASHIFT of COUNT or more bits that can be commuted
6215 with any other operations in X. Return X without that shift if so. */
6218 extract_left_shift (x, count)
6222 enum rtx_code code = GET_CODE (x);
6223 enum machine_mode mode = GET_MODE (x);
6229 /* This is the shift itself. If it is wide enough, we will return
6230 either the value being shifted if the shift count is equal to
6231 COUNT or a shift for the difference. */
6232 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6233 && INTVAL (XEXP (x, 1)) >= count)
6234 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
6235 INTVAL (XEXP (x, 1)) - count);
6239 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6240 return simplify_gen_unary (code, mode, tem, mode);
6244 case PLUS: case IOR: case XOR: case AND:
6245 /* If we can safely shift this constant and we find the inner shift,
6246 make a new operation. */
6247 if (GET_CODE (XEXP (x,1)) == CONST_INT
6248 && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
6249 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6250 return gen_binary (code, mode, tem,
6251 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
6262 /* Look at the expression rooted at X. Look for expressions
6263 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
6264 Form these expressions.
6266 Return the new rtx, usually just X.
6268 Also, for machines like the VAX that don't have logical shift insns,
6269 try to convert logical to arithmetic shift operations in cases where
6270 they are equivalent. This undoes the canonicalizations to logical
6271 shifts done elsewhere.
6273 We try, as much as possible, to re-use rtl expressions to save memory.
6275 IN_CODE says what kind of expression we are processing. Normally, it is
6276 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
6277 being kludges), it is MEM. When processing the arguments of a comparison
6278 or a COMPARE against zero, it is COMPARE. */
6281 make_compound_operation (x, in_code)
6283 enum rtx_code in_code;
6285 enum rtx_code code = GET_CODE (x);
6286 enum machine_mode mode = GET_MODE (x);
6287 int mode_width = GET_MODE_BITSIZE (mode);
6289 enum rtx_code next_code;
6295 /* Select the code to be used in recursive calls. Once we are inside an
6296 address, we stay there. If we have a comparison, set to COMPARE,
6297 but once inside, go back to our default of SET. */
6299 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
6300 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
6301 && XEXP (x, 1) == const0_rtx) ? COMPARE
6302 : in_code == COMPARE ? SET : in_code);
6304 /* Process depending on the code of this operation. If NEW is set
6305 non-zero, it will be returned. */
6310 /* Convert shifts by constants into multiplications if inside
6312 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
6313 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6314 && INTVAL (XEXP (x, 1)) >= 0)
6316 new = make_compound_operation (XEXP (x, 0), next_code);
6317 new = gen_rtx_MULT (mode, new,
6318 GEN_INT ((HOST_WIDE_INT) 1
6319 << INTVAL (XEXP (x, 1))));
6324 /* If the second operand is not a constant, we can't do anything
6326 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6329 /* If the constant is a power of two minus one and the first operand
6330 is a logical right shift, make an extraction. */
6331 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6332 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6334 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6335 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
6336 0, in_code == COMPARE);
6339 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
6340 else if (GET_CODE (XEXP (x, 0)) == SUBREG
6341 && subreg_lowpart_p (XEXP (x, 0))
6342 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
6343 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6345 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
6347 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
6348 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
6349 0, in_code == COMPARE);
6351 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
6352 else if ((GET_CODE (XEXP (x, 0)) == XOR
6353 || GET_CODE (XEXP (x, 0)) == IOR)
6354 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
6355 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
6356 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6358 /* Apply the distributive law, and then try to make extractions. */
6359 new = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
6360 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
6362 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
6364 new = make_compound_operation (new, in_code);
6367 /* If we are have (and (rotate X C) M) and C is larger than the number
6368 of bits in M, this is an extraction. */
6370 else if (GET_CODE (XEXP (x, 0)) == ROTATE
6371 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6372 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
6373 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
6375 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6376 new = make_extraction (mode, new,
6377 (GET_MODE_BITSIZE (mode)
6378 - INTVAL (XEXP (XEXP (x, 0), 1))),
6379 NULL_RTX, i, 1, 0, in_code == COMPARE);
6382 /* On machines without logical shifts, if the operand of the AND is
6383 a logical shift and our mask turns off all the propagated sign
6384 bits, we can replace the logical shift with an arithmetic shift. */
6385 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6386 && (lshr_optab->handlers[(int) mode].insn_code
6387 == CODE_FOR_nothing)
6388 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
6389 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6390 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6391 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6392 && mode_width <= HOST_BITS_PER_WIDE_INT)
6394 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
6396 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
6397 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
6399 gen_rtx_ASHIFTRT (mode,
6400 make_compound_operation
6401 (XEXP (XEXP (x, 0), 0), next_code),
6402 XEXP (XEXP (x, 0), 1)));
6405 /* If the constant is one less than a power of two, this might be
6406 representable by an extraction even if no shift is present.
6407 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6408 we are in a COMPARE. */
6409 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6410 new = make_extraction (mode,
6411 make_compound_operation (XEXP (x, 0),
6413 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
6415 /* If we are in a comparison and this is an AND with a power of two,
6416 convert this into the appropriate bit extract. */
6417 else if (in_code == COMPARE
6418 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
6419 new = make_extraction (mode,
6420 make_compound_operation (XEXP (x, 0),
6422 i, NULL_RTX, 1, 1, 0, 1);
6427 /* If the sign bit is known to be zero, replace this with an
6428 arithmetic shift. */
6429 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
6430 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6431 && mode_width <= HOST_BITS_PER_WIDE_INT
6432 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
6434 new = gen_rtx_ASHIFTRT (mode,
6435 make_compound_operation (XEXP (x, 0),
6441 /* ... fall through ... */
6447 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6448 this is a SIGN_EXTRACT. */
6449 if (GET_CODE (rhs) == CONST_INT
6450 && GET_CODE (lhs) == ASHIFT
6451 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6452 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
6454 new = make_compound_operation (XEXP (lhs, 0), next_code);
6455 new = make_extraction (mode, new,
6456 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6457 NULL_RTX, mode_width - INTVAL (rhs),
6458 code == LSHIFTRT, 0, in_code == COMPARE);
6462 /* See if we have operations between an ASHIFTRT and an ASHIFT.
6463 If so, try to merge the shifts into a SIGN_EXTEND. We could
6464 also do this for some cases of SIGN_EXTRACT, but it doesn't
6465 seem worth the effort; the case checked for occurs on Alpha. */
6467 if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
6468 && ! (GET_CODE (lhs) == SUBREG
6469 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
6470 && GET_CODE (rhs) == CONST_INT
6471 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6472 && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6473 new = make_extraction (mode, make_compound_operation (new, next_code),
6474 0, NULL_RTX, mode_width - INTVAL (rhs),
6475 code == LSHIFTRT, 0, in_code == COMPARE);
6480 /* Call ourselves recursively on the inner expression. If we are
6481 narrowing the object and it has a different RTL code from
6482 what it originally did, do this SUBREG as a force_to_mode. */
6484 tem = make_compound_operation (SUBREG_REG (x), in_code);
6485 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6486 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6487 && subreg_lowpart_p (x))
6489 rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0,
6492 /* If we have something other than a SUBREG, we might have
6493 done an expansion, so rerun outselves. */
6494 if (GET_CODE (newer) != SUBREG)
6495 newer = make_compound_operation (newer, in_code);
6500 /* If this is a paradoxical subreg, and the new code is a sign or
6501 zero extension, omit the subreg and widen the extension. If it
6502 is a regular subreg, we can still get rid of the subreg by not
6503 widening so much, or in fact removing the extension entirely. */
6504 if ((GET_CODE (tem) == SIGN_EXTEND
6505 || GET_CODE (tem) == ZERO_EXTEND)
6506 && subreg_lowpart_p (x))
6508 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (tem))
6509 || (GET_MODE_SIZE (mode) >
6510 GET_MODE_SIZE (GET_MODE (XEXP (tem, 0)))))
6511 tem = gen_rtx_fmt_e (GET_CODE (tem), mode, XEXP (tem, 0));
6513 tem = gen_lowpart_for_combine (mode, XEXP (tem, 0));
6524 x = gen_lowpart_for_combine (mode, new);
6525 code = GET_CODE (x);
6528 /* Now recursively process each operand of this operation. */
6529 fmt = GET_RTX_FORMAT (code);
6530 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6533 new = make_compound_operation (XEXP (x, i), next_code);
6534 SUBST (XEXP (x, i), new);
6540 /* Given M see if it is a value that would select a field of bits
6541 within an item, but not the entire word. Return -1 if not.
6542 Otherwise, return the starting position of the field, where 0 is the
6545 *PLEN is set to the length of the field. */
6548 get_pos_from_mask (m, plen)
6549 unsigned HOST_WIDE_INT m;
6550 unsigned HOST_WIDE_INT *plen;
6552 /* Get the bit number of the first 1 bit from the right, -1 if none. */
6553 int pos = exact_log2 (m & -m);
6559 /* Now shift off the low-order zero bits and see if we have a power of
6561 len = exact_log2 ((m >> pos) + 1);
6570 /* See if X can be simplified knowing that we will only refer to it in
6571 MODE and will only refer to those bits that are nonzero in MASK.
6572 If other bits are being computed or if masking operations are done
6573 that select a superset of the bits in MASK, they can sometimes be
6576 Return a possibly simplified expression, but always convert X to
6577 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
6579 Also, if REG is non-zero and X is a register equal in value to REG,
6582 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6583 are all off in X. This is used when X will be complemented, by either
6584 NOT, NEG, or XOR. */
6587 force_to_mode (x, mode, mask, reg, just_select)
6589 enum machine_mode mode;
6590 unsigned HOST_WIDE_INT mask;
6594 enum rtx_code code = GET_CODE (x);
6595 int next_select = just_select || code == XOR || code == NOT || code == NEG;
6596 enum machine_mode op_mode;
6597 unsigned HOST_WIDE_INT fuller_mask, nonzero;
6600 /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the
6601 code below will do the wrong thing since the mode of such an
6602 expression is VOIDmode.
6604 Also do nothing if X is a CLOBBER; this can happen if X was
6605 the return value from a call to gen_lowpart_for_combine. */
6606 if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
6609 /* We want to perform the operation is its present mode unless we know
6610 that the operation is valid in MODE, in which case we do the operation
6612 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6613 && code_to_optab[(int) code] != 0
6614 && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
6615 != CODE_FOR_nothing))
6616 ? mode : GET_MODE (x));
6618 /* It is not valid to do a right-shift in a narrower mode
6619 than the one it came in with. */
6620 if ((code == LSHIFTRT || code == ASHIFTRT)
6621 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
6622 op_mode = GET_MODE (x);
6624 /* Truncate MASK to fit OP_MODE. */
6626 mask &= GET_MODE_MASK (op_mode);
6628 /* When we have an arithmetic operation, or a shift whose count we
6629 do not know, we need to assume that all bit the up to the highest-order
6630 bit in MASK will be needed. This is how we form such a mask. */
6632 fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
6633 ? GET_MODE_MASK (op_mode)
6634 : (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
6637 fuller_mask = ~(HOST_WIDE_INT) 0;
6639 /* Determine what bits of X are guaranteed to be (non)zero. */
6640 nonzero = nonzero_bits (x, mode);
6642 /* If none of the bits in X are needed, return a zero. */
6643 if (! just_select && (nonzero & mask) == 0)
6646 /* If X is a CONST_INT, return a new one. Do this here since the
6647 test below will fail. */
6648 if (GET_CODE (x) == CONST_INT)
6650 HOST_WIDE_INT cval = INTVAL (x) & mask;
6651 int width = GET_MODE_BITSIZE (mode);
6653 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6654 number, sign extend it. */
6655 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6656 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6657 cval |= (HOST_WIDE_INT) -1 << width;
6659 return GEN_INT (cval);
6662 /* If X is narrower than MODE and we want all the bits in X's mode, just
6663 get X in the proper mode. */
6664 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
6665 && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
6666 return gen_lowpart_for_combine (mode, x);
6668 /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
6669 MASK are already known to be zero in X, we need not do anything. */
6670 if (GET_MODE (x) == mode && code != SUBREG && (~mask & nonzero) == 0)
6676 /* If X is a (clobber (const_int)), return it since we know we are
6677 generating something that won't match. */
6681 /* X is a (use (mem ..)) that was made from a bit-field extraction that
6682 spanned the boundary of the MEM. If we are now masking so it is
6683 within that boundary, we don't need the USE any more. */
6684 if (! BITS_BIG_ENDIAN
6685 && (mask & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6686 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6693 x = expand_compound_operation (x);
6694 if (GET_CODE (x) != code)
6695 return force_to_mode (x, mode, mask, reg, next_select);
6699 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
6700 || rtx_equal_p (reg, get_last_value (x))))
6705 if (subreg_lowpart_p (x)
6706 /* We can ignore the effect of this SUBREG if it narrows the mode or
6707 if the constant masks to zero all the bits the mode doesn't
6709 && ((GET_MODE_SIZE (GET_MODE (x))
6710 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6712 & GET_MODE_MASK (GET_MODE (x))
6713 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
6714 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
6718 /* If this is an AND with a constant, convert it into an AND
6719 whose constant is the AND of that constant with MASK. If it
6720 remains an AND of MASK, delete it since it is redundant. */
6722 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6724 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6725 mask & INTVAL (XEXP (x, 1)));
6727 /* If X is still an AND, see if it is an AND with a mask that
6728 is just some low-order bits. If so, and it is MASK, we don't
6731 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6732 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) == mask)
6735 /* If it remains an AND, try making another AND with the bits
6736 in the mode mask that aren't in MASK turned on. If the
6737 constant in the AND is wide enough, this might make a
6738 cheaper constant. */
6740 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6741 && GET_MODE_MASK (GET_MODE (x)) != mask
6742 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
6744 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
6745 | (GET_MODE_MASK (GET_MODE (x)) & ~mask));
6746 int width = GET_MODE_BITSIZE (GET_MODE (x));
6749 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6750 number, sign extend it. */
6751 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6752 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6753 cval |= (HOST_WIDE_INT) -1 << width;
6755 y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
6756 if (rtx_cost (y, SET) < rtx_cost (x, SET))
6766 /* In (and (plus FOO C1) M), if M is a mask that just turns off
6767 low-order bits (as in an alignment operation) and FOO is already
6768 aligned to that boundary, mask C1 to that boundary as well.
6769 This may eliminate that PLUS and, later, the AND. */
6772 unsigned int width = GET_MODE_BITSIZE (mode);
6773 unsigned HOST_WIDE_INT smask = mask;
6775 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6776 number, sign extend it. */
6778 if (width < HOST_BITS_PER_WIDE_INT
6779 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6780 smask |= (HOST_WIDE_INT) -1 << width;
6782 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6783 && exact_log2 (- smask) >= 0)
6787 && (XEXP (x, 0) == stack_pointer_rtx
6788 || XEXP (x, 0) == frame_pointer_rtx))
6790 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6791 unsigned HOST_WIDE_INT sp_mask = GET_MODE_MASK (mode);
6793 sp_mask &= ~(sp_alignment - 1);
6794 if ((sp_mask & ~smask) == 0
6795 && ((INTVAL (XEXP (x, 1)) - STACK_BIAS) & ~smask) != 0)
6796 return force_to_mode (plus_constant (XEXP (x, 0),
6797 ((INTVAL (XEXP (x, 1)) -
6798 STACK_BIAS) & smask)
6800 mode, smask, reg, next_select);
6803 if ((nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
6804 && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
6805 return force_to_mode (plus_constant (XEXP (x, 0),
6806 (INTVAL (XEXP (x, 1))
6808 mode, smask, reg, next_select);
6812 /* ... fall through ... */
6815 /* For PLUS, MINUS and MULT, we need any bits less significant than the
6816 most significant bit in MASK since carries from those bits will
6817 affect the bits we are interested in. */
6822 /* If X is (minus C Y) where C's least set bit is larger than any bit
6823 in the mask, then we may replace with (neg Y). */
6824 if (GET_CODE (XEXP (x, 0)) == CONST_INT
6825 && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0))
6826 & -INTVAL (XEXP (x, 0))))
6829 x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1),
6831 return force_to_mode (x, mode, mask, reg, next_select);
6834 /* Similarly, if C contains every bit in the mask, then we may
6835 replace with (not Y). */
6836 if (GET_CODE (XEXP (x, 0)) == CONST_INT
6837 && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) mask)
6838 == INTVAL (XEXP (x, 0))))
6840 x = simplify_gen_unary (NOT, GET_MODE (x),
6841 XEXP (x, 1), GET_MODE (x));
6842 return force_to_mode (x, mode, mask, reg, next_select);
6850 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
6851 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
6852 operation which may be a bitfield extraction. Ensure that the
6853 constant we form is not wider than the mode of X. */
6855 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6856 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6857 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6858 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6859 && GET_CODE (XEXP (x, 1)) == CONST_INT
6860 && ((INTVAL (XEXP (XEXP (x, 0), 1))
6861 + floor_log2 (INTVAL (XEXP (x, 1))))
6862 < GET_MODE_BITSIZE (GET_MODE (x)))
6863 && (INTVAL (XEXP (x, 1))
6864 & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
6866 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
6867 << INTVAL (XEXP (XEXP (x, 0), 1)));
6868 temp = gen_binary (GET_CODE (x), GET_MODE (x),
6869 XEXP (XEXP (x, 0), 0), temp);
6870 x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
6871 XEXP (XEXP (x, 0), 1));
6872 return force_to_mode (x, mode, mask, reg, next_select);
6876 /* For most binary operations, just propagate into the operation and
6877 change the mode if we have an operation of that mode. */
6879 op0 = gen_lowpart_for_combine (op_mode,
6880 force_to_mode (XEXP (x, 0), mode, mask,
6882 op1 = gen_lowpart_for_combine (op_mode,
6883 force_to_mode (XEXP (x, 1), mode, mask,
6886 /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside
6887 MASK since OP1 might have been sign-extended but we never want
6888 to turn on extra bits, since combine might have previously relied
6889 on them being off. */
6890 if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR)
6891 && (INTVAL (op1) & mask) != 0)
6892 op1 = GEN_INT (INTVAL (op1) & mask);
6894 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6895 x = gen_binary (code, op_mode, op0, op1);
6899 /* For left shifts, do the same, but just for the first operand.
6900 However, we cannot do anything with shifts where we cannot
6901 guarantee that the counts are smaller than the size of the mode
6902 because such a count will have a different meaning in a
6905 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6906 && INTVAL (XEXP (x, 1)) >= 0
6907 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
6908 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
6909 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
6910 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
6913 /* If the shift count is a constant and we can do arithmetic in
6914 the mode of the shift, refine which bits we need. Otherwise, use the
6915 conservative form of the mask. */
6916 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6917 && INTVAL (XEXP (x, 1)) >= 0
6918 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
6919 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6920 mask >>= INTVAL (XEXP (x, 1));
6924 op0 = gen_lowpart_for_combine (op_mode,
6925 force_to_mode (XEXP (x, 0), op_mode,
6926 mask, reg, next_select));
6928 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6929 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
6933 /* Here we can only do something if the shift count is a constant,
6934 this shift constant is valid for the host, and we can do arithmetic
6937 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6938 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6939 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6941 rtx inner = XEXP (x, 0);
6942 unsigned HOST_WIDE_INT inner_mask;
6944 /* Select the mask of the bits we need for the shift operand. */
6945 inner_mask = mask << INTVAL (XEXP (x, 1));
6947 /* We can only change the mode of the shift if we can do arithmetic
6948 in the mode of the shift and INNER_MASK is no wider than the
6949 width of OP_MODE. */
6950 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
6951 || (inner_mask & ~GET_MODE_MASK (op_mode)) != 0)
6952 op_mode = GET_MODE (x);
6954 inner = force_to_mode (inner, op_mode, inner_mask, reg, next_select);
6956 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
6957 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
6960 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
6961 shift and AND produces only copies of the sign bit (C2 is one less
6962 than a power of two), we can do this with just a shift. */
6964 if (GET_CODE (x) == LSHIFTRT
6965 && GET_CODE (XEXP (x, 1)) == CONST_INT
6966 /* The shift puts one of the sign bit copies in the least significant
6968 && ((INTVAL (XEXP (x, 1))
6969 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
6970 >= GET_MODE_BITSIZE (GET_MODE (x)))
6971 && exact_log2 (mask + 1) >= 0
6972 /* Number of bits left after the shift must be more than the mask
6974 && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
6975 <= GET_MODE_BITSIZE (GET_MODE (x)))
6976 /* Must be more sign bit copies than the mask needs. */
6977 && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6978 >= exact_log2 (mask + 1)))
6979 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6980 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
6981 - exact_log2 (mask + 1)));
6986 /* If we are just looking for the sign bit, we don't need this shift at
6987 all, even if it has a variable count. */
6988 if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6989 && (mask == ((unsigned HOST_WIDE_INT) 1
6990 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
6991 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6993 /* If this is a shift by a constant, get a mask that contains those bits
6994 that are not copies of the sign bit. We then have two cases: If
6995 MASK only includes those bits, this can be a logical shift, which may
6996 allow simplifications. If MASK is a single-bit field not within
6997 those bits, we are requesting a copy of the sign bit and hence can
6998 shift the sign bit to the appropriate location. */
7000 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
7001 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7005 /* If the considered data is wider then HOST_WIDE_INT, we can't
7006 represent a mask for all its bits in a single scalar.
7007 But we only care about the lower bits, so calculate these. */
7009 if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
7011 nonzero = ~(HOST_WIDE_INT) 0;
7013 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7014 is the number of bits a full-width mask would have set.
7015 We need only shift if these are fewer than nonzero can
7016 hold. If not, we must keep all bits set in nonzero. */
7018 if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7019 < HOST_BITS_PER_WIDE_INT)
7020 nonzero >>= INTVAL (XEXP (x, 1))
7021 + HOST_BITS_PER_WIDE_INT
7022 - GET_MODE_BITSIZE (GET_MODE (x)) ;
7026 nonzero = GET_MODE_MASK (GET_MODE (x));
7027 nonzero >>= INTVAL (XEXP (x, 1));
7030 if ((mask & ~nonzero) == 0
7031 || (i = exact_log2 (mask)) >= 0)
7033 x = simplify_shift_const
7034 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7035 i < 0 ? INTVAL (XEXP (x, 1))
7036 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
7038 if (GET_CODE (x) != ASHIFTRT)
7039 return force_to_mode (x, mode, mask, reg, next_select);
7043 /* If MASK is 1, convert this to a LSHIFTRT. This can be done
7044 even if the shift count isn't a constant. */
7046 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
7050 /* If this is a zero- or sign-extension operation that just affects bits
7051 we don't care about, remove it. Be sure the call above returned
7052 something that is still a shift. */
7054 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
7055 && GET_CODE (XEXP (x, 1)) == CONST_INT
7056 && INTVAL (XEXP (x, 1)) >= 0
7057 && (INTVAL (XEXP (x, 1))
7058 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
7059 && GET_CODE (XEXP (x, 0)) == ASHIFT
7060 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7061 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
7062 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
7069 /* If the shift count is constant and we can do computations
7070 in the mode of X, compute where the bits we care about are.
7071 Otherwise, we can't do anything. Don't change the mode of
7072 the shift or propagate MODE into the shift, though. */
7073 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7074 && INTVAL (XEXP (x, 1)) >= 0)
7076 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
7077 GET_MODE (x), GEN_INT (mask),
7079 if (temp && GET_CODE(temp) == CONST_INT)
7081 force_to_mode (XEXP (x, 0), GET_MODE (x),
7082 INTVAL (temp), reg, next_select));
7087 /* If we just want the low-order bit, the NEG isn't needed since it
7088 won't change the low-order bit. */
7090 return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
7092 /* We need any bits less significant than the most significant bit in
7093 MASK since carries from those bits will affect the bits we are
7099 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
7100 same as the XOR case above. Ensure that the constant we form is not
7101 wider than the mode of X. */
7103 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7104 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7105 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7106 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
7107 < GET_MODE_BITSIZE (GET_MODE (x)))
7108 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
7110 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
7111 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
7112 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
7114 return force_to_mode (x, mode, mask, reg, next_select);
7117 /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
7118 use the full mask inside the NOT. */
7122 op0 = gen_lowpart_for_combine (op_mode,
7123 force_to_mode (XEXP (x, 0), mode, mask,
7125 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7126 x = simplify_gen_unary (code, op_mode, op0, op_mode);
7130 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
7131 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
7132 which is equal to STORE_FLAG_VALUE. */
7133 if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
7134 && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
7135 && nonzero_bits (XEXP (x, 0), mode) == STORE_FLAG_VALUE)
7136 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7141 /* We have no way of knowing if the IF_THEN_ELSE can itself be
7142 written in a narrower mode. We play it safe and do not do so. */
7145 gen_lowpart_for_combine (GET_MODE (x),
7146 force_to_mode (XEXP (x, 1), mode,
7147 mask, reg, next_select)));
7149 gen_lowpart_for_combine (GET_MODE (x),
7150 force_to_mode (XEXP (x, 2), mode,
7151 mask, reg,next_select)));
7158 /* Ensure we return a value of the proper mode. */
7159 return gen_lowpart_for_combine (mode, x);
7162 /* Return nonzero if X is an expression that has one of two values depending on
7163 whether some other value is zero or nonzero. In that case, we return the
7164 value that is being tested, *PTRUE is set to the value if the rtx being
7165 returned has a nonzero value, and *PFALSE is set to the other alternative.
7167 If we return zero, we set *PTRUE and *PFALSE to X. */
7170 if_then_else_cond (x, ptrue, pfalse)
7172 rtx *ptrue, *pfalse;
7174 enum machine_mode mode = GET_MODE (x);
7175 enum rtx_code code = GET_CODE (x);
7176 rtx cond0, cond1, true0, true1, false0, false1;
7177 unsigned HOST_WIDE_INT nz;
7179 /* If we are comparing a value against zero, we are done. */
7180 if ((code == NE || code == EQ)
7181 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
7183 *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
7184 *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
7188 /* If this is a unary operation whose operand has one of two values, apply
7189 our opcode to compute those values. */
7190 else if (GET_RTX_CLASS (code) == '1'
7191 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
7193 *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0)));
7194 *pfalse = simplify_gen_unary (code, mode, false0,
7195 GET_MODE (XEXP (x, 0)));
7199 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
7200 make can't possibly match and would suppress other optimizations. */
7201 else if (code == COMPARE)
7204 /* If this is a binary operation, see if either side has only one of two
7205 values. If either one does or if both do and they are conditional on
7206 the same value, compute the new true and false values. */
7207 else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
7208 || GET_RTX_CLASS (code) == '<')
7210 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
7211 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
7213 if ((cond0 != 0 || cond1 != 0)
7214 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
7216 /* If if_then_else_cond returned zero, then true/false are the
7217 same rtl. We must copy one of them to prevent invalid rtl
7220 true0 = copy_rtx (true0);
7221 else if (cond1 == 0)
7222 true1 = copy_rtx (true1);
7224 *ptrue = gen_binary (code, mode, true0, true1);
7225 *pfalse = gen_binary (code, mode, false0, false1);
7226 return cond0 ? cond0 : cond1;
7229 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
7230 operands is zero when the other is non-zero, and vice-versa,
7231 and STORE_FLAG_VALUE is 1 or -1. */
7233 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7234 && (code == PLUS || code == IOR || code == XOR || code == MINUS
7236 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7238 rtx op0 = XEXP (XEXP (x, 0), 1);
7239 rtx op1 = XEXP (XEXP (x, 1), 1);
7241 cond0 = XEXP (XEXP (x, 0), 0);
7242 cond1 = XEXP (XEXP (x, 1), 0);
7244 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7245 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
7246 && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1)
7247 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7248 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7249 || ((swap_condition (GET_CODE (cond0))
7250 == combine_reversed_comparison_code (cond1))
7251 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7252 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7253 && ! side_effects_p (x))
7255 *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
7256 *pfalse = gen_binary (MULT, mode,
7258 ? simplify_gen_unary (NEG, mode, op1,
7266 /* Similarly for MULT, AND and UMIN, execpt that for these the result
7268 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7269 && (code == MULT || code == AND || code == UMIN)
7270 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7272 cond0 = XEXP (XEXP (x, 0), 0);
7273 cond1 = XEXP (XEXP (x, 1), 0);
7275 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7276 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
7277 && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1)
7278 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7279 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7280 || ((swap_condition (GET_CODE (cond0))
7281 == combine_reversed_comparison_code (cond1))
7282 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7283 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7284 && ! side_effects_p (x))
7286 *ptrue = *pfalse = const0_rtx;
7292 else if (code == IF_THEN_ELSE)
7294 /* If we have IF_THEN_ELSE already, extract the condition and
7295 canonicalize it if it is NE or EQ. */
7296 cond0 = XEXP (x, 0);
7297 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
7298 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
7299 return XEXP (cond0, 0);
7300 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
7302 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
7303 return XEXP (cond0, 0);
7309 /* If X is a SUBREG, we can narrow both the true and false values
7310 if the inner expression, if there is a condition. */
7311 else if (code == SUBREG
7312 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
7315 *ptrue = simplify_gen_subreg (mode, true0,
7316 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7317 *pfalse = simplify_gen_subreg (mode, false0,
7318 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7323 /* If X is a constant, this isn't special and will cause confusions
7324 if we treat it as such. Likewise if it is equivalent to a constant. */
7325 else if (CONSTANT_P (x)
7326 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
7329 /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
7330 will be least confusing to the rest of the compiler. */
7331 else if (mode == BImode)
7333 *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
7337 /* If X is known to be either 0 or -1, those are the true and
7338 false values when testing X. */
7339 else if (x == constm1_rtx || x == const0_rtx
7340 || (mode != VOIDmode
7341 && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode)))
7343 *ptrue = constm1_rtx, *pfalse = const0_rtx;
7347 /* Likewise for 0 or a single bit. */
7348 else if (mode != VOIDmode
7349 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7350 && exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
7352 *ptrue = GEN_INT (nz), *pfalse = const0_rtx;
7356 /* Otherwise fail; show no condition with true and false values the same. */
7357 *ptrue = *pfalse = x;
7361 /* Return the value of expression X given the fact that condition COND
7362 is known to be true when applied to REG as its first operand and VAL
7363 as its second. X is known to not be shared and so can be modified in
7366 We only handle the simplest cases, and specifically those cases that
7367 arise with IF_THEN_ELSE expressions. */
7370 known_cond (x, cond, reg, val)
7375 enum rtx_code code = GET_CODE (x);
7380 if (side_effects_p (x))
7383 if (cond == EQ && rtx_equal_p (x, reg) && !FLOAT_MODE_P (cond))
7385 if (cond == UNEQ && rtx_equal_p (x, reg))
7388 /* If X is (abs REG) and we know something about REG's relationship
7389 with zero, we may be able to simplify this. */
7391 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
7394 case GE: case GT: case EQ:
7397 return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)),
7399 GET_MODE (XEXP (x, 0)));
7404 /* The only other cases we handle are MIN, MAX, and comparisons if the
7405 operands are the same as REG and VAL. */
7407 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
7409 if (rtx_equal_p (XEXP (x, 0), val))
7410 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
7412 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
7414 if (GET_RTX_CLASS (code) == '<')
7416 if (comparison_dominates_p (cond, code))
7417 return const_true_rtx;
7419 code = combine_reversed_comparison_code (x);
7421 && comparison_dominates_p (cond, code))
7426 else if (code == SMAX || code == SMIN
7427 || code == UMIN || code == UMAX)
7429 int unsignedp = (code == UMIN || code == UMAX);
7431 /* Do not reverse the condition when it is NE or EQ.
7432 This is because we cannot conclude anything about
7433 the value of 'SMAX (x, y)' when x is not equal to y,
7434 but we can when x equals y. */
7435 if ((code == SMAX || code == UMAX)
7436 && ! (cond == EQ || cond == NE))
7437 cond = reverse_condition (cond);
7442 return unsignedp ? x : XEXP (x, 1);
7444 return unsignedp ? x : XEXP (x, 0);
7446 return unsignedp ? XEXP (x, 1) : x;
7448 return unsignedp ? XEXP (x, 0) : x;
7456 fmt = GET_RTX_FORMAT (code);
7457 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7460 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
7461 else if (fmt[i] == 'E')
7462 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7463 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
7470 /* See if X and Y are equal for the purposes of seeing if we can rewrite an
7471 assignment as a field assignment. */
7474 rtx_equal_for_field_assignment_p (x, y)
7478 if (x == y || rtx_equal_p (x, y))
7481 if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
7484 /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
7485 Note that all SUBREGs of MEM are paradoxical; otherwise they
7486 would have been rewritten. */
7487 if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
7488 && GET_CODE (SUBREG_REG (y)) == MEM
7489 && rtx_equal_p (SUBREG_REG (y),
7490 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
7493 if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
7494 && GET_CODE (SUBREG_REG (x)) == MEM
7495 && rtx_equal_p (SUBREG_REG (x),
7496 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
7499 /* We used to see if get_last_value of X and Y were the same but that's
7500 not correct. In one direction, we'll cause the assignment to have
7501 the wrong destination and in the case, we'll import a register into this
7502 insn that might have already have been dead. So fail if none of the
7503 above cases are true. */
7507 /* See if X, a SET operation, can be rewritten as a bit-field assignment.
7508 Return that assignment if so.
7510 We only handle the most common cases. */
7513 make_field_assignment (x)
7516 rtx dest = SET_DEST (x);
7517 rtx src = SET_SRC (x);
7522 unsigned HOST_WIDE_INT len;
7524 enum machine_mode mode;
7526 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7527 a clear of a one-bit field. We will have changed it to
7528 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
7531 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
7532 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
7533 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
7534 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7536 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7539 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7543 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7544 && subreg_lowpart_p (XEXP (src, 0))
7545 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7546 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7547 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7548 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
7549 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7551 assign = make_extraction (VOIDmode, dest, 0,
7552 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7555 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7559 /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
7561 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7562 && XEXP (XEXP (src, 0), 0) == const1_rtx
7563 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7565 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7568 return gen_rtx_SET (VOIDmode, assign, const1_rtx);
7572 /* The other case we handle is assignments into a constant-position
7573 field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
7574 a mask that has all one bits except for a group of zero bits and
7575 OTHER is known to have zeros where C1 has ones, this is such an
7576 assignment. Compute the position and length from C1. Shift OTHER
7577 to the appropriate position, force it to the required mode, and
7578 make the extraction. Check for the AND in both operands. */
7580 if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
7583 rhs = expand_compound_operation (XEXP (src, 0));
7584 lhs = expand_compound_operation (XEXP (src, 1));
7586 if (GET_CODE (rhs) == AND
7587 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
7588 && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
7589 c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
7590 else if (GET_CODE (lhs) == AND
7591 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
7592 && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
7593 c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
7597 pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
7598 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
7599 || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
7600 || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
7603 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
7607 /* The mode to use for the source is the mode of the assignment, or of
7608 what is inside a possible STRICT_LOW_PART. */
7609 mode = (GET_CODE (assign) == STRICT_LOW_PART
7610 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
7612 /* Shift OTHER right POS places and make it the source, restricting it
7613 to the proper length and mode. */
7615 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
7616 GET_MODE (src), other, pos),
7618 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
7619 ? ~(unsigned HOST_WIDE_INT) 0
7620 : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
7623 return gen_rtx_SET (VOIDmode, assign, src);
7626 /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7630 apply_distributive_law (x)
7633 enum rtx_code code = GET_CODE (x);
7634 rtx lhs, rhs, other;
7636 enum rtx_code inner_code;
7638 /* Distributivity is not true for floating point.
7639 It can change the value. So don't do it.
7640 -- rms and moshier@world.std.com. */
7641 if (FLOAT_MODE_P (GET_MODE (x)))
7644 /* The outer operation can only be one of the following: */
7645 if (code != IOR && code != AND && code != XOR
7646 && code != PLUS && code != MINUS)
7649 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
7651 /* If either operand is a primitive we can't do anything, so get out
7653 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
7654 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
7657 lhs = expand_compound_operation (lhs);
7658 rhs = expand_compound_operation (rhs);
7659 inner_code = GET_CODE (lhs);
7660 if (inner_code != GET_CODE (rhs))
7663 /* See if the inner and outer operations distribute. */
7670 /* These all distribute except over PLUS. */
7671 if (code == PLUS || code == MINUS)
7676 if (code != PLUS && code != MINUS)
7681 /* This is also a multiply, so it distributes over everything. */
7685 /* Non-paradoxical SUBREGs distributes over all operations, provided
7686 the inner modes and byte offsets are the same, this is an extraction
7687 of a low-order part, we don't convert an fp operation to int or
7688 vice versa, and we would not be converting a single-word
7689 operation into a multi-word operation. The latter test is not
7690 required, but it prevents generating unneeded multi-word operations.
7691 Some of the previous tests are redundant given the latter test, but
7692 are retained because they are required for correctness.
7694 We produce the result slightly differently in this case. */
7696 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
7697 || SUBREG_BYTE (lhs) != SUBREG_BYTE (rhs)
7698 || ! subreg_lowpart_p (lhs)
7699 || (GET_MODE_CLASS (GET_MODE (lhs))
7700 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
7701 || (GET_MODE_SIZE (GET_MODE (lhs))
7702 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
7703 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
7706 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
7707 SUBREG_REG (lhs), SUBREG_REG (rhs));
7708 return gen_lowpart_for_combine (GET_MODE (x), tem);
7714 /* Set LHS and RHS to the inner operands (A and B in the example
7715 above) and set OTHER to the common operand (C in the example).
7716 These is only one way to do this unless the inner operation is
7718 if (GET_RTX_CLASS (inner_code) == 'c'
7719 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
7720 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
7721 else if (GET_RTX_CLASS (inner_code) == 'c'
7722 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
7723 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
7724 else if (GET_RTX_CLASS (inner_code) == 'c'
7725 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
7726 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
7727 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
7728 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
7732 /* Form the new inner operation, seeing if it simplifies first. */
7733 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
7735 /* There is one exception to the general way of distributing:
7736 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
7737 if (code == XOR && inner_code == IOR)
7740 other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x));
7743 /* We may be able to continuing distributing the result, so call
7744 ourselves recursively on the inner operation before forming the
7745 outer operation, which we return. */
7746 return gen_binary (inner_code, GET_MODE (x),
7747 apply_distributive_law (tem), other);
7750 /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
7753 Return an equivalent form, if different from X. Otherwise, return X. If
7754 X is zero, we are to always construct the equivalent form. */
7757 simplify_and_const_int (x, mode, varop, constop)
7759 enum machine_mode mode;
7761 unsigned HOST_WIDE_INT constop;
7763 unsigned HOST_WIDE_INT nonzero;
7766 /* Simplify VAROP knowing that we will be only looking at some of the
7768 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
7770 /* If VAROP is a CLOBBER, we will fail so return it; if it is a
7771 CONST_INT, we are done. */
7772 if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
7775 /* See what bits may be nonzero in VAROP. Unlike the general case of
7776 a call to nonzero_bits, here we don't care about bits outside
7779 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
7780 nonzero = trunc_int_for_mode (nonzero, mode);
7782 /* Turn off all bits in the constant that are known to already be zero.
7783 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
7784 which is tested below. */
7788 /* If we don't have any bits left, return zero. */
7792 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
7793 a power of two, we can replace this with a ASHIFT. */
7794 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
7795 && (i = exact_log2 (constop)) >= 0)
7796 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
7798 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
7799 or XOR, then try to apply the distributive law. This may eliminate
7800 operations if either branch can be simplified because of the AND.
7801 It may also make some cases more complex, but those cases probably
7802 won't match a pattern either with or without this. */
7804 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
7806 gen_lowpart_for_combine
7808 apply_distributive_law
7809 (gen_binary (GET_CODE (varop), GET_MODE (varop),
7810 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7811 XEXP (varop, 0), constop),
7812 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7813 XEXP (varop, 1), constop))));
7815 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
7816 if we already had one (just check for the simplest cases). */
7817 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7818 && GET_MODE (XEXP (x, 0)) == mode
7819 && SUBREG_REG (XEXP (x, 0)) == varop)
7820 varop = XEXP (x, 0);
7822 varop = gen_lowpart_for_combine (mode, varop);
7824 /* If we can't make the SUBREG, try to return what we were given. */
7825 if (GET_CODE (varop) == CLOBBER)
7826 return x ? x : varop;
7828 /* If we are only masking insignificant bits, return VAROP. */
7829 if (constop == nonzero)
7832 /* Otherwise, return an AND. See how much, if any, of X we can use. */
7833 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
7834 x = gen_binary (AND, mode, varop, GEN_INT (constop));
7838 if (GET_CODE (XEXP (x, 1)) != CONST_INT
7839 || (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) != constop)
7840 SUBST (XEXP (x, 1), GEN_INT (constop));
7842 SUBST (XEXP (x, 0), varop);
7848 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
7849 We don't let nonzero_bits recur into num_sign_bit_copies, because that
7850 is less useful. We can't allow both, because that results in exponential
7851 run time recursion. There is a nullstone testcase that triggered
7852 this. This macro avoids accidental uses of num_sign_bit_copies. */
7853 #define num_sign_bit_copies()
7855 /* Given an expression, X, compute which bits in X can be non-zero.
7856 We don't care about bits outside of those defined in MODE.
7858 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
7859 a shift, AND, or zero_extract, we can do better. */
7861 static unsigned HOST_WIDE_INT
7862 nonzero_bits (x, mode)
7864 enum machine_mode mode;
7866 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
7867 unsigned HOST_WIDE_INT inner_nz;
7869 unsigned int mode_width = GET_MODE_BITSIZE (mode);
7872 /* For floating-point values, assume all bits are needed. */
7873 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
7876 /* If X is wider than MODE, use its mode instead. */
7877 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
7879 mode = GET_MODE (x);
7880 nonzero = GET_MODE_MASK (mode);
7881 mode_width = GET_MODE_BITSIZE (mode);
7884 if (mode_width > HOST_BITS_PER_WIDE_INT)
7885 /* Our only callers in this case look for single bit values. So
7886 just return the mode mask. Those tests will then be false. */
7889 #ifndef WORD_REGISTER_OPERATIONS
7890 /* If MODE is wider than X, but both are a single word for both the host
7891 and target machines, we can compute this from which bits of the
7892 object might be nonzero in its own mode, taking into account the fact
7893 that on many CISC machines, accessing an object in a wider mode
7894 causes the high-order bits to become undefined. So they are
7895 not known to be zero. */
7897 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
7898 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
7899 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
7900 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
7902 nonzero &= nonzero_bits (x, GET_MODE (x));
7903 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
7908 code = GET_CODE (x);
7912 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
7913 /* If pointers extend unsigned and this is a pointer in Pmode, say that
7914 all the bits above ptr_mode are known to be zero. */
7915 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
7917 nonzero &= GET_MODE_MASK (ptr_mode);
7920 #ifdef STACK_BOUNDARY
7921 /* If this is the stack pointer, we may know something about its
7922 alignment. If PUSH_ROUNDING is defined, it is possible for the
7923 stack to be momentarily aligned only to that amount, so we pick
7924 the least alignment. */
7926 /* We can't check for arg_pointer_rtx here, because it is not
7927 guaranteed to have as much alignment as the stack pointer.
7928 In particular, in the Irix6 n64 ABI, the stack has 128 bit
7929 alignment but the argument pointer has only 64 bit alignment. */
7931 if ((x == frame_pointer_rtx
7932 || x == stack_pointer_rtx
7933 || x == hard_frame_pointer_rtx
7934 || (REGNO (x) >= FIRST_VIRTUAL_REGISTER
7935 && REGNO (x) <= LAST_VIRTUAL_REGISTER))
7941 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
7943 #ifdef PUSH_ROUNDING
7944 if (REGNO (x) == STACK_POINTER_REGNUM && PUSH_ARGS)
7945 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
7948 /* We must return here, otherwise we may get a worse result from
7949 one of the choices below. There is nothing useful below as
7950 far as the stack pointer is concerned. */
7951 return nonzero &= ~(sp_alignment - 1);
7955 /* If X is a register whose nonzero bits value is current, use it.
7956 Otherwise, if X is a register whose value we can find, use that
7957 value. Otherwise, use the previously-computed global nonzero bits
7958 for this register. */
7960 if (reg_last_set_value[REGNO (x)] != 0
7961 && reg_last_set_mode[REGNO (x)] == mode
7962 && (reg_last_set_label[REGNO (x)] == label_tick
7963 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
7964 && REG_N_SETS (REGNO (x)) == 1
7965 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
7967 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7968 return reg_last_set_nonzero_bits[REGNO (x)];
7970 tem = get_last_value (x);
7974 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7975 /* If X is narrower than MODE and TEM is a non-negative
7976 constant that would appear negative in the mode of X,
7977 sign-extend it for use in reg_nonzero_bits because some
7978 machines (maybe most) will actually do the sign-extension
7979 and this is the conservative approach.
7981 ??? For 2.5, try to tighten up the MD files in this regard
7982 instead of this kludge. */
7984 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
7985 && GET_CODE (tem) == CONST_INT
7987 && 0 != (INTVAL (tem)
7988 & ((HOST_WIDE_INT) 1
7989 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
7990 tem = GEN_INT (INTVAL (tem)
7991 | ((HOST_WIDE_INT) (-1)
7992 << GET_MODE_BITSIZE (GET_MODE (x))));
7994 return nonzero_bits (tem, mode);
7996 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
7997 return reg_nonzero_bits[REGNO (x)] & nonzero;
8002 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8003 /* If X is negative in MODE, sign-extend the value. */
8004 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
8005 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
8006 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
8012 #ifdef LOAD_EXTEND_OP
8013 /* In many, if not most, RISC machines, reading a byte from memory
8014 zeros the rest of the register. Noticing that fact saves a lot
8015 of extra zero-extends. */
8016 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
8017 nonzero &= GET_MODE_MASK (GET_MODE (x));
8022 case UNEQ: case LTGT:
8023 case GT: case GTU: case UNGT:
8024 case LT: case LTU: case UNLT:
8025 case GE: case GEU: case UNGE:
8026 case LE: case LEU: case UNLE:
8027 case UNORDERED: case ORDERED:
8029 /* If this produces an integer result, we know which bits are set.
8030 Code here used to clear bits outside the mode of X, but that is
8033 if (GET_MODE_CLASS (mode) == MODE_INT
8034 && mode_width <= HOST_BITS_PER_WIDE_INT)
8035 nonzero = STORE_FLAG_VALUE;
8040 /* Disabled to avoid exponential mutual recursion between nonzero_bits
8041 and num_sign_bit_copies. */
8042 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8043 == GET_MODE_BITSIZE (GET_MODE (x)))
8047 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
8048 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
8053 /* Disabled to avoid exponential mutual recursion between nonzero_bits
8054 and num_sign_bit_copies. */
8055 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8056 == GET_MODE_BITSIZE (GET_MODE (x)))
8062 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
8066 nonzero &= nonzero_bits (XEXP (x, 0), mode);
8067 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
8068 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
8072 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
8073 Otherwise, show all the bits in the outer mode but not the inner
8075 inner_nz = nonzero_bits (XEXP (x, 0), mode);
8076 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
8078 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
8080 & (((HOST_WIDE_INT) 1
8081 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
8082 inner_nz |= (GET_MODE_MASK (mode)
8083 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
8086 nonzero &= inner_nz;
8090 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
8091 & nonzero_bits (XEXP (x, 1), mode));
8095 case UMIN: case UMAX: case SMIN: case SMAX:
8096 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
8097 | nonzero_bits (XEXP (x, 1), mode));
8100 case PLUS: case MINUS:
8102 case DIV: case UDIV:
8103 case MOD: case UMOD:
8104 /* We can apply the rules of arithmetic to compute the number of
8105 high- and low-order zero bits of these operations. We start by
8106 computing the width (position of the highest-order non-zero bit)
8107 and the number of low-order zero bits for each value. */
8109 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
8110 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
8111 int width0 = floor_log2 (nz0) + 1;
8112 int width1 = floor_log2 (nz1) + 1;
8113 int low0 = floor_log2 (nz0 & -nz0);
8114 int low1 = floor_log2 (nz1 & -nz1);
8115 HOST_WIDE_INT op0_maybe_minusp
8116 = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
8117 HOST_WIDE_INT op1_maybe_minusp
8118 = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
8119 unsigned int result_width = mode_width;
8127 && (XEXP (x, 0) == stack_pointer_rtx
8128 || XEXP (x, 0) == frame_pointer_rtx)
8129 && GET_CODE (XEXP (x, 1)) == CONST_INT)
8131 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
8133 nz0 = (GET_MODE_MASK (mode) & ~(sp_alignment - 1));
8134 nz1 = INTVAL (XEXP (x, 1)) - STACK_BIAS;
8135 width0 = floor_log2 (nz0) + 1;
8136 width1 = floor_log2 (nz1) + 1;
8137 low0 = floor_log2 (nz0 & -nz0);
8138 low1 = floor_log2 (nz1 & -nz1);
8141 result_width = MAX (width0, width1) + 1;
8142 result_low = MIN (low0, low1);
8145 result_low = MIN (low0, low1);
8148 result_width = width0 + width1;
8149 result_low = low0 + low1;
8154 if (! op0_maybe_minusp && ! op1_maybe_minusp)
8155 result_width = width0;
8160 result_width = width0;
8165 if (! op0_maybe_minusp && ! op1_maybe_minusp)
8166 result_width = MIN (width0, width1);
8167 result_low = MIN (low0, low1);
8172 result_width = MIN (width0, width1);
8173 result_low = MIN (low0, low1);
8179 if (result_width < mode_width)
8180 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
8183 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
8185 #ifdef POINTERS_EXTEND_UNSIGNED
8186 /* If pointers extend unsigned and this is an addition or subtraction
8187 to a pointer in Pmode, all the bits above ptr_mode are known to be
8189 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
8190 && (code == PLUS || code == MINUS)
8191 && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0)))
8192 nonzero &= GET_MODE_MASK (ptr_mode);
8198 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8199 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
8200 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
8204 /* If this is a SUBREG formed for a promoted variable that has
8205 been zero-extended, we know that at least the high-order bits
8206 are zero, though others might be too. */
8208 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
8209 nonzero = (GET_MODE_MASK (GET_MODE (x))
8210 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
8212 /* If the inner mode is a single word for both the host and target
8213 machines, we can compute this from which bits of the inner
8214 object might be nonzero. */
8215 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
8216 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8217 <= HOST_BITS_PER_WIDE_INT))
8219 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
8221 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
8222 /* If this is a typical RISC machine, we only have to worry
8223 about the way loads are extended. */
8224 if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
8226 & (((unsigned HOST_WIDE_INT) 1
8227 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
8229 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
8232 /* On many CISC machines, accessing an object in a wider mode
8233 causes the high-order bits to become undefined. So they are
8234 not known to be zero. */
8235 if (GET_MODE_SIZE (GET_MODE (x))
8236 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8237 nonzero |= (GET_MODE_MASK (GET_MODE (x))
8238 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
8247 /* The nonzero bits are in two classes: any bits within MODE
8248 that aren't in GET_MODE (x) are always significant. The rest of the
8249 nonzero bits are those that are significant in the operand of
8250 the shift when shifted the appropriate number of bits. This
8251 shows that high-order bits are cleared by the right shift and
8252 low-order bits by left shifts. */
8253 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8254 && INTVAL (XEXP (x, 1)) >= 0
8255 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
8257 enum machine_mode inner_mode = GET_MODE (x);
8258 unsigned int width = GET_MODE_BITSIZE (inner_mode);
8259 int count = INTVAL (XEXP (x, 1));
8260 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
8261 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
8262 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
8263 unsigned HOST_WIDE_INT outer = 0;
8265 if (mode_width > width)
8266 outer = (op_nonzero & nonzero & ~mode_mask);
8268 if (code == LSHIFTRT)
8270 else if (code == ASHIFTRT)
8274 /* If the sign bit may have been nonzero before the shift, we
8275 need to mark all the places it could have been copied to
8276 by the shift as possibly nonzero. */
8277 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
8278 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
8280 else if (code == ASHIFT)
8283 inner = ((inner << (count % width)
8284 | (inner >> (width - (count % width)))) & mode_mask);
8286 nonzero &= (outer | inner);
8291 /* This is at most the number of bits in the mode. */
8292 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
8296 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
8297 | nonzero_bits (XEXP (x, 2), mode));
8307 /* See the macro definition above. */
8308 #undef num_sign_bit_copies
8310 /* Return the number of bits at the high-order end of X that are known to
8311 be equal to the sign bit. X will be used in mode MODE; if MODE is
8312 VOIDmode, X will be used in its own mode. The returned value will always
8313 be between 1 and the number of bits in MODE. */
8316 num_sign_bit_copies (x, mode)
8318 enum machine_mode mode;
8320 enum rtx_code code = GET_CODE (x);
8321 unsigned int bitwidth;
8322 int num0, num1, result;
8323 unsigned HOST_WIDE_INT nonzero;
8326 /* If we weren't given a mode, use the mode of X. If the mode is still
8327 VOIDmode, we don't know anything. Likewise if one of the modes is
8330 if (mode == VOIDmode)
8331 mode = GET_MODE (x);
8333 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
8336 bitwidth = GET_MODE_BITSIZE (mode);
8338 /* For a smaller object, just ignore the high bits. */
8339 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
8341 num0 = num_sign_bit_copies (x, GET_MODE (x));
8343 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
8346 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
8348 #ifndef WORD_REGISTER_OPERATIONS
8349 /* If this machine does not do all register operations on the entire
8350 register and MODE is wider than the mode of X, we can say nothing
8351 at all about the high-order bits. */
8354 /* Likewise on machines that do, if the mode of the object is smaller
8355 than a word and loads of that size don't sign extend, we can say
8356 nothing about the high order bits. */
8357 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
8358 #ifdef LOAD_EXTEND_OP
8359 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
8370 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
8371 /* If pointers extend signed and this is a pointer in Pmode, say that
8372 all the bits above ptr_mode are known to be sign bit copies. */
8373 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
8375 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
8378 if (reg_last_set_value[REGNO (x)] != 0
8379 && reg_last_set_mode[REGNO (x)] == mode
8380 && (reg_last_set_label[REGNO (x)] == label_tick
8381 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8382 && REG_N_SETS (REGNO (x)) == 1
8383 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
8385 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8386 return reg_last_set_sign_bit_copies[REGNO (x)];
8388 tem = get_last_value (x);
8390 return num_sign_bit_copies (tem, mode);
8392 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
8393 return reg_sign_bit_copies[REGNO (x)];
8397 #ifdef LOAD_EXTEND_OP
8398 /* Some RISC machines sign-extend all loads of smaller than a word. */
8399 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
8400 return MAX (1, ((int) bitwidth
8401 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
8406 /* If the constant is negative, take its 1's complement and remask.
8407 Then see how many zero bits we have. */
8408 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
8409 if (bitwidth <= HOST_BITS_PER_WIDE_INT
8410 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8411 nonzero = (~nonzero) & GET_MODE_MASK (mode);
8413 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
8416 /* If this is a SUBREG for a promoted object that is sign-extended
8417 and we are looking at it in a wider mode, we know that at least the
8418 high-order bits are known to be sign bit copies. */
8420 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
8422 num0 = num_sign_bit_copies (SUBREG_REG (x), mode);
8423 return MAX ((int) bitwidth
8424 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
8428 /* For a smaller object, just ignore the high bits. */
8429 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
8431 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
8432 return MAX (1, (num0
8433 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8437 #ifdef WORD_REGISTER_OPERATIONS
8438 #ifdef LOAD_EXTEND_OP
8439 /* For paradoxical SUBREGs on machines where all register operations
8440 affect the entire register, just look inside. Note that we are
8441 passing MODE to the recursive call, so the number of sign bit copies
8442 will remain relative to that mode, not the inner mode. */
8444 /* This works only if loads sign extend. Otherwise, if we get a
8445 reload for the inner part, it may be loaded from the stack, and
8446 then we lose all sign bit copies that existed before the store
8449 if ((GET_MODE_SIZE (GET_MODE (x))
8450 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8451 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND)
8452 return num_sign_bit_copies (SUBREG_REG (x), mode);
8458 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
8459 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
8463 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8464 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
8467 /* For a smaller object, just ignore the high bits. */
8468 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
8469 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8473 return num_sign_bit_copies (XEXP (x, 0), mode);
8475 case ROTATE: case ROTATERT:
8476 /* If we are rotating left by a number of bits less than the number
8477 of sign bit copies, we can just subtract that amount from the
8479 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8480 && INTVAL (XEXP (x, 1)) >= 0
8481 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
8483 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8484 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
8485 : (int) bitwidth - INTVAL (XEXP (x, 1))));
8490 /* In general, this subtracts one sign bit copy. But if the value
8491 is known to be positive, the number of sign bit copies is the
8492 same as that of the input. Finally, if the input has just one bit
8493 that might be nonzero, all the bits are copies of the sign bit. */
8494 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8495 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8496 return num0 > 1 ? num0 - 1 : 1;
8498 nonzero = nonzero_bits (XEXP (x, 0), mode);
8503 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
8508 case IOR: case AND: case XOR:
8509 case SMIN: case SMAX: case UMIN: case UMAX:
8510 /* Logical operations will preserve the number of sign-bit copies.
8511 MIN and MAX operations always return one of the operands. */
8512 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8513 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8514 return MIN (num0, num1);
8516 case PLUS: case MINUS:
8517 /* For addition and subtraction, we can have a 1-bit carry. However,
8518 if we are subtracting 1 from a positive number, there will not
8519 be such a carry. Furthermore, if the positive number is known to
8520 be 0 or 1, we know the result is either -1 or 0. */
8522 if (code == PLUS && XEXP (x, 1) == constm1_rtx
8523 && bitwidth <= HOST_BITS_PER_WIDE_INT)
8525 nonzero = nonzero_bits (XEXP (x, 0), mode);
8526 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
8527 return (nonzero == 1 || nonzero == 0 ? bitwidth
8528 : bitwidth - floor_log2 (nonzero) - 1);
8531 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8532 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8533 result = MAX (1, MIN (num0, num1) - 1);
8535 #ifdef POINTERS_EXTEND_UNSIGNED
8536 /* If pointers extend signed and this is an addition or subtraction
8537 to a pointer in Pmode, all the bits above ptr_mode are known to be
8539 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
8540 && (code == PLUS || code == MINUS)
8541 && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0)))
8542 result = MAX ((GET_MODE_BITSIZE (Pmode)
8543 - GET_MODE_BITSIZE (ptr_mode) + 1),
8549 /* The number of bits of the product is the sum of the number of
8550 bits of both terms. However, unless one of the terms if known
8551 to be positive, we must allow for an additional bit since negating
8552 a negative number can remove one sign bit copy. */
8554 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8555 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8557 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
8559 && (bitwidth > HOST_BITS_PER_WIDE_INT
8560 || (((nonzero_bits (XEXP (x, 0), mode)
8561 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8562 && ((nonzero_bits (XEXP (x, 1), mode)
8563 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
8566 return MAX (1, result);
8569 /* The result must be <= the first operand. If the first operand
8570 has the high bit set, we know nothing about the number of sign
8572 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8574 else if ((nonzero_bits (XEXP (x, 0), mode)
8575 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8578 return num_sign_bit_copies (XEXP (x, 0), mode);
8581 /* The result must be <= the scond operand. */
8582 return num_sign_bit_copies (XEXP (x, 1), mode);
8585 /* Similar to unsigned division, except that we have to worry about
8586 the case where the divisor is negative, in which case we have
8588 result = num_sign_bit_copies (XEXP (x, 0), mode);
8590 && (bitwidth > HOST_BITS_PER_WIDE_INT
8591 || (nonzero_bits (XEXP (x, 1), mode)
8592 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8598 result = num_sign_bit_copies (XEXP (x, 1), mode);
8600 && (bitwidth > HOST_BITS_PER_WIDE_INT
8601 || (nonzero_bits (XEXP (x, 1), mode)
8602 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8608 /* Shifts by a constant add to the number of bits equal to the
8610 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8611 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8612 && INTVAL (XEXP (x, 1)) > 0)
8613 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
8618 /* Left shifts destroy copies. */
8619 if (GET_CODE (XEXP (x, 1)) != CONST_INT
8620 || INTVAL (XEXP (x, 1)) < 0
8621 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
8624 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8625 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
8628 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
8629 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
8630 return MIN (num0, num1);
8632 case EQ: case NE: case GE: case GT: case LE: case LT:
8633 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
8634 case GEU: case GTU: case LEU: case LTU:
8635 case UNORDERED: case ORDERED:
8636 /* If the constant is negative, take its 1's complement and remask.
8637 Then see how many zero bits we have. */
8638 nonzero = STORE_FLAG_VALUE;
8639 if (bitwidth <= HOST_BITS_PER_WIDE_INT
8640 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8641 nonzero = (~nonzero) & GET_MODE_MASK (mode);
8643 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
8650 /* If we haven't been able to figure it out by one of the above rules,
8651 see if some of the high-order bits are known to be zero. If so,
8652 count those bits and return one less than that amount. If we can't
8653 safely compute the mask for this mode, always return BITWIDTH. */
8655 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8658 nonzero = nonzero_bits (x, mode);
8659 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
8660 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
8663 /* Return the number of "extended" bits there are in X, when interpreted
8664 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
8665 unsigned quantities, this is the number of high-order zero bits.
8666 For signed quantities, this is the number of copies of the sign bit
8667 minus 1. In both case, this function returns the number of "spare"
8668 bits. For example, if two quantities for which this function returns
8669 at least 1 are added, the addition is known not to overflow.
8671 This function will always return 0 unless called during combine, which
8672 implies that it must be called from a define_split. */
8675 extended_count (x, mode, unsignedp)
8677 enum machine_mode mode;
8680 if (nonzero_sign_valid == 0)
8684 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8685 ? (GET_MODE_BITSIZE (mode) - 1
8686 - floor_log2 (nonzero_bits (x, mode)))
8688 : num_sign_bit_copies (x, mode) - 1);
8691 /* This function is called from `simplify_shift_const' to merge two
8692 outer operations. Specifically, we have already found that we need
8693 to perform operation *POP0 with constant *PCONST0 at the outermost
8694 position. We would now like to also perform OP1 with constant CONST1
8695 (with *POP0 being done last).
8697 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
8698 the resulting operation. *PCOMP_P is set to 1 if we would need to
8699 complement the innermost operand, otherwise it is unchanged.
8701 MODE is the mode in which the operation will be done. No bits outside
8702 the width of this mode matter. It is assumed that the width of this mode
8703 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
8705 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
8706 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
8707 result is simply *PCONST0.
8709 If the resulting operation cannot be expressed as one operation, we
8710 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
8713 merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
8714 enum rtx_code *pop0;
8715 HOST_WIDE_INT *pconst0;
8717 HOST_WIDE_INT const1;
8718 enum machine_mode mode;
8721 enum rtx_code op0 = *pop0;
8722 HOST_WIDE_INT const0 = *pconst0;
8724 const0 &= GET_MODE_MASK (mode);
8725 const1 &= GET_MODE_MASK (mode);
8727 /* If OP0 is an AND, clear unimportant bits in CONST1. */
8731 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
8734 if (op1 == NIL || op0 == SET)
8737 else if (op0 == NIL)
8738 op0 = op1, const0 = const1;
8740 else if (op0 == op1)
8764 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
8765 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
8768 /* If the two constants aren't the same, we can't do anything. The
8769 remaining six cases can all be done. */
8770 else if (const0 != const1)
8778 /* (a & b) | b == b */
8780 else /* op1 == XOR */
8781 /* (a ^ b) | b == a | b */
8787 /* (a & b) ^ b == (~a) & b */
8788 op0 = AND, *pcomp_p = 1;
8789 else /* op1 == IOR */
8790 /* (a | b) ^ b == a & ~b */
8791 op0 = AND, *pconst0 = ~const0;
8796 /* (a | b) & b == b */
8798 else /* op1 == XOR */
8799 /* (a ^ b) & b) == (~a) & b */
8806 /* Check for NO-OP cases. */
8807 const0 &= GET_MODE_MASK (mode);
8809 && (op0 == IOR || op0 == XOR || op0 == PLUS))
8811 else if (const0 == 0 && op0 == AND)
8813 else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
8817 /* ??? Slightly redundant with the above mask, but not entirely.
8818 Moving this above means we'd have to sign-extend the mode mask
8819 for the final test. */
8820 const0 = trunc_int_for_mode (const0, mode);
8828 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
8829 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
8830 that we started with.
8832 The shift is normally computed in the widest mode we find in VAROP, as
8833 long as it isn't a different number of words than RESULT_MODE. Exceptions
8834 are ASHIFTRT and ROTATE, which are always done in their original mode, */
8837 simplify_shift_const (x, code, result_mode, varop, input_count)
8840 enum machine_mode result_mode;
8844 enum rtx_code orig_code = code;
8845 int orig_count = input_count;
8848 enum machine_mode mode = result_mode;
8849 enum machine_mode shift_mode, tmode;
8850 unsigned int mode_words
8851 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
8852 /* We form (outer_op (code varop count) (outer_const)). */
8853 enum rtx_code outer_op = NIL;
8854 HOST_WIDE_INT outer_const = 0;
8856 int complement_p = 0;
8859 /* If we were given an invalid count, don't do anything except exactly
8860 what was requested. */
8862 if (input_count < 0 || input_count > (int) GET_MODE_BITSIZE (mode))
8867 return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (input_count));
8870 count = input_count;
8872 /* Make sure and truncate the "natural" shift on the way in. We don't
8873 want to do this inside the loop as it makes it more difficult to
8875 #ifdef SHIFT_COUNT_TRUNCATED
8876 if (SHIFT_COUNT_TRUNCATED)
8877 count %= GET_MODE_BITSIZE (mode);
8880 /* Unless one of the branches of the `if' in this loop does a `continue',
8881 we will `break' the loop after the `if'. */
8885 /* If we have an operand of (clobber (const_int 0)), just return that
8887 if (GET_CODE (varop) == CLOBBER)
8890 /* If we discovered we had to complement VAROP, leave. Making a NOT
8891 here would cause an infinite loop. */
8895 /* Convert ROTATERT to ROTATE. */
8896 if (code == ROTATERT)
8897 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
8899 /* We need to determine what mode we will do the shift in. If the
8900 shift is a right shift or a ROTATE, we must always do it in the mode
8901 it was originally done in. Otherwise, we can do it in MODE, the
8902 widest mode encountered. */
8904 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8905 ? result_mode : mode);
8907 /* Handle cases where the count is greater than the size of the mode
8908 minus 1. For ASHIFT, use the size minus one as the count (this can
8909 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
8910 take the count modulo the size. For other shifts, the result is
8913 Since these shifts are being produced by the compiler by combining
8914 multiple operations, each of which are defined, we know what the
8915 result is supposed to be. */
8917 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
8919 if (code == ASHIFTRT)
8920 count = GET_MODE_BITSIZE (shift_mode) - 1;
8921 else if (code == ROTATE || code == ROTATERT)
8922 count %= GET_MODE_BITSIZE (shift_mode);
8925 /* We can't simply return zero because there may be an
8933 /* An arithmetic right shift of a quantity known to be -1 or 0
8935 if (code == ASHIFTRT
8936 && (num_sign_bit_copies (varop, shift_mode)
8937 == GET_MODE_BITSIZE (shift_mode)))
8943 /* If we are doing an arithmetic right shift and discarding all but
8944 the sign bit copies, this is equivalent to doing a shift by the
8945 bitsize minus one. Convert it into that shift because it will often
8946 allow other simplifications. */
8948 if (code == ASHIFTRT
8949 && (count + num_sign_bit_copies (varop, shift_mode)
8950 >= GET_MODE_BITSIZE (shift_mode)))
8951 count = GET_MODE_BITSIZE (shift_mode) - 1;
8953 /* We simplify the tests below and elsewhere by converting
8954 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
8955 `make_compound_operation' will convert it to a ASHIFTRT for
8956 those machines (such as VAX) that don't have a LSHIFTRT. */
8957 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
8959 && ((nonzero_bits (varop, shift_mode)
8960 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
8964 switch (GET_CODE (varop))
8970 new = expand_compound_operation (varop);
8979 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
8980 minus the width of a smaller mode, we can do this with a
8981 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
8982 if ((code == ASHIFTRT || code == LSHIFTRT)
8983 && ! mode_dependent_address_p (XEXP (varop, 0))
8984 && ! MEM_VOLATILE_P (varop)
8985 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8986 MODE_INT, 1)) != BLKmode)
8988 new = adjust_address_nv (varop, tmode,
8989 BYTES_BIG_ENDIAN ? 0
8990 : count / BITS_PER_UNIT);
8992 varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
8993 : ZERO_EXTEND, mode, new);
9000 /* Similar to the case above, except that we can only do this if
9001 the resulting mode is the same as that of the underlying
9002 MEM and adjust the address depending on the *bits* endianness
9003 because of the way that bit-field extract insns are defined. */
9004 if ((code == ASHIFTRT || code == LSHIFTRT)
9005 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9006 MODE_INT, 1)) != BLKmode
9007 && tmode == GET_MODE (XEXP (varop, 0)))
9009 if (BITS_BIG_ENDIAN)
9010 new = XEXP (varop, 0);
9013 new = copy_rtx (XEXP (varop, 0));
9014 SUBST (XEXP (new, 0),
9015 plus_constant (XEXP (new, 0),
9016 count / BITS_PER_UNIT));
9019 varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
9020 : ZERO_EXTEND, mode, new);
9027 /* If VAROP is a SUBREG, strip it as long as the inner operand has
9028 the same number of words as what we've seen so far. Then store
9029 the widest mode in MODE. */
9030 if (subreg_lowpart_p (varop)
9031 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9032 > GET_MODE_SIZE (GET_MODE (varop)))
9033 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9034 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
9037 varop = SUBREG_REG (varop);
9038 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
9039 mode = GET_MODE (varop);
9045 /* Some machines use MULT instead of ASHIFT because MULT
9046 is cheaper. But it is still better on those machines to
9047 merge two shifts into one. */
9048 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9049 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9052 = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
9053 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
9059 /* Similar, for when divides are cheaper. */
9060 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9061 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9064 = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
9065 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
9071 /* If we are extracting just the sign bit of an arithmetic
9072 right shift, that shift is not needed. However, the sign
9073 bit of a wider mode may be different from what would be
9074 interpreted as the sign bit in a narrower mode, so, if
9075 the result is narrower, don't discard the shift. */
9076 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
9077 && (GET_MODE_BITSIZE (result_mode)
9078 >= GET_MODE_BITSIZE (GET_MODE (varop))))
9080 varop = XEXP (varop, 0);
9084 /* ... fall through ... */
9089 /* Here we have two nested shifts. The result is usually the
9090 AND of a new shift with a mask. We compute the result below. */
9091 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9092 && INTVAL (XEXP (varop, 1)) >= 0
9093 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
9094 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9095 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9097 enum rtx_code first_code = GET_CODE (varop);
9098 unsigned int first_count = INTVAL (XEXP (varop, 1));
9099 unsigned HOST_WIDE_INT mask;
9102 /* We have one common special case. We can't do any merging if
9103 the inner code is an ASHIFTRT of a smaller mode. However, if
9104 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
9105 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
9106 we can convert it to
9107 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
9108 This simplifies certain SIGN_EXTEND operations. */
9109 if (code == ASHIFT && first_code == ASHIFTRT
9110 && (GET_MODE_BITSIZE (result_mode)
9111 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
9113 /* C3 has the low-order C1 bits zero. */
9115 mask = (GET_MODE_MASK (mode)
9116 & ~(((HOST_WIDE_INT) 1 << first_count) - 1));
9118 varop = simplify_and_const_int (NULL_RTX, result_mode,
9119 XEXP (varop, 0), mask);
9120 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
9122 count = first_count;
9127 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
9128 than C1 high-order bits equal to the sign bit, we can convert
9129 this to either an ASHIFT or a ASHIFTRT depending on the
9132 We cannot do this if VAROP's mode is not SHIFT_MODE. */
9134 if (code == ASHIFTRT && first_code == ASHIFT
9135 && GET_MODE (varop) == shift_mode
9136 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
9139 varop = XEXP (varop, 0);
9141 signed_count = count - first_count;
9142 if (signed_count < 0)
9143 count = -signed_count, code = ASHIFT;
9145 count = signed_count;
9150 /* There are some cases we can't do. If CODE is ASHIFTRT,
9151 we can only do this if FIRST_CODE is also ASHIFTRT.
9153 We can't do the case when CODE is ROTATE and FIRST_CODE is
9156 If the mode of this shift is not the mode of the outer shift,
9157 we can't do this if either shift is a right shift or ROTATE.
9159 Finally, we can't do any of these if the mode is too wide
9160 unless the codes are the same.
9162 Handle the case where the shift codes are the same
9165 if (code == first_code)
9167 if (GET_MODE (varop) != result_mode
9168 && (code == ASHIFTRT || code == LSHIFTRT
9172 count += first_count;
9173 varop = XEXP (varop, 0);
9177 if (code == ASHIFTRT
9178 || (code == ROTATE && first_code == ASHIFTRT)
9179 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
9180 || (GET_MODE (varop) != result_mode
9181 && (first_code == ASHIFTRT || first_code == LSHIFTRT
9182 || first_code == ROTATE
9183 || code == ROTATE)))
9186 /* To compute the mask to apply after the shift, shift the
9187 nonzero bits of the inner shift the same way the
9188 outer shift will. */
9190 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
9193 = simplify_binary_operation (code, result_mode, mask_rtx,
9196 /* Give up if we can't compute an outer operation to use. */
9198 || GET_CODE (mask_rtx) != CONST_INT
9199 || ! merge_outer_ops (&outer_op, &outer_const, AND,
9201 result_mode, &complement_p))
9204 /* If the shifts are in the same direction, we add the
9205 counts. Otherwise, we subtract them. */
9206 signed_count = count;
9207 if ((code == ASHIFTRT || code == LSHIFTRT)
9208 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
9209 signed_count += first_count;
9211 signed_count -= first_count;
9213 /* If COUNT is positive, the new shift is usually CODE,
9214 except for the two exceptions below, in which case it is
9215 FIRST_CODE. If the count is negative, FIRST_CODE should
9217 if (signed_count > 0
9218 && ((first_code == ROTATE && code == ASHIFT)
9219 || (first_code == ASHIFTRT && code == LSHIFTRT)))
9220 code = first_code, count = signed_count;
9221 else if (signed_count < 0)
9222 code = first_code, count = -signed_count;
9224 count = signed_count;
9226 varop = XEXP (varop, 0);
9230 /* If we have (A << B << C) for any shift, we can convert this to
9231 (A << C << B). This wins if A is a constant. Only try this if
9232 B is not a constant. */
9234 else if (GET_CODE (varop) == code
9235 && GET_CODE (XEXP (varop, 1)) != CONST_INT
9237 = simplify_binary_operation (code, mode,
9241 varop = gen_rtx_fmt_ee (code, mode, new, XEXP (varop, 1));
9248 /* Make this fit the case below. */
9249 varop = gen_rtx_XOR (mode, XEXP (varop, 0),
9250 GEN_INT (GET_MODE_MASK (mode)));
9256 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
9257 with C the size of VAROP - 1 and the shift is logical if
9258 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9259 we have an (le X 0) operation. If we have an arithmetic shift
9260 and STORE_FLAG_VALUE is 1 or we have a logical shift with
9261 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
9263 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
9264 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
9265 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9266 && (code == LSHIFTRT || code == ASHIFTRT)
9267 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
9268 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9271 varop = gen_rtx_LE (GET_MODE (varop), XEXP (varop, 1),
9274 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9275 varop = gen_rtx_NEG (GET_MODE (varop), varop);
9280 /* If we have (shift (logical)), move the logical to the outside
9281 to allow it to possibly combine with another logical and the
9282 shift to combine with another shift. This also canonicalizes to
9283 what a ZERO_EXTRACT looks like. Also, some machines have
9284 (and (shift)) insns. */
9286 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9287 && (new = simplify_binary_operation (code, result_mode,
9289 GEN_INT (count))) != 0
9290 && GET_CODE (new) == CONST_INT
9291 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
9292 INTVAL (new), result_mode, &complement_p))
9294 varop = XEXP (varop, 0);
9298 /* If we can't do that, try to simplify the shift in each arm of the
9299 logical expression, make a new logical expression, and apply
9300 the inverse distributive law. */
9302 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9303 XEXP (varop, 0), count);
9304 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9305 XEXP (varop, 1), count);
9307 varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
9308 varop = apply_distributive_law (varop);
9315 /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
9316 says that the sign bit can be tested, FOO has mode MODE, C is
9317 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
9318 that may be nonzero. */
9319 if (code == LSHIFTRT
9320 && XEXP (varop, 1) == const0_rtx
9321 && GET_MODE (XEXP (varop, 0)) == result_mode
9322 && count == GET_MODE_BITSIZE (result_mode) - 1
9323 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9324 && ((STORE_FLAG_VALUE
9325 & ((HOST_WIDE_INT) 1
9326 < (GET_MODE_BITSIZE (result_mode) - 1))))
9327 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9328 && merge_outer_ops (&outer_op, &outer_const, XOR,
9329 (HOST_WIDE_INT) 1, result_mode,
9332 varop = XEXP (varop, 0);
9339 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9340 than the number of bits in the mode is equivalent to A. */
9341 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
9342 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
9344 varop = XEXP (varop, 0);
9349 /* NEG commutes with ASHIFT since it is multiplication. Move the
9350 NEG outside to allow shifts to combine. */
9352 && merge_outer_ops (&outer_op, &outer_const, NEG,
9353 (HOST_WIDE_INT) 0, result_mode,
9356 varop = XEXP (varop, 0);
9362 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9363 is one less than the number of bits in the mode is
9364 equivalent to (xor A 1). */
9365 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
9366 && XEXP (varop, 1) == constm1_rtx
9367 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9368 && merge_outer_ops (&outer_op, &outer_const, XOR,
9369 (HOST_WIDE_INT) 1, result_mode,
9373 varop = XEXP (varop, 0);
9377 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
9378 that might be nonzero in BAR are those being shifted out and those
9379 bits are known zero in FOO, we can replace the PLUS with FOO.
9380 Similarly in the other operand order. This code occurs when
9381 we are computing the size of a variable-size array. */
9383 if ((code == ASHIFTRT || code == LSHIFTRT)
9384 && count < HOST_BITS_PER_WIDE_INT
9385 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
9386 && (nonzero_bits (XEXP (varop, 1), result_mode)
9387 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
9389 varop = XEXP (varop, 0);
9392 else if ((code == ASHIFTRT || code == LSHIFTRT)
9393 && count < HOST_BITS_PER_WIDE_INT
9394 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9395 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9397 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9398 & nonzero_bits (XEXP (varop, 1),
9401 varop = XEXP (varop, 1);
9405 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
9407 && GET_CODE (XEXP (varop, 1)) == CONST_INT
9408 && (new = simplify_binary_operation (ASHIFT, result_mode,
9410 GEN_INT (count))) != 0
9411 && GET_CODE (new) == CONST_INT
9412 && merge_outer_ops (&outer_op, &outer_const, PLUS,
9413 INTVAL (new), result_mode, &complement_p))
9415 varop = XEXP (varop, 0);
9421 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9422 with C the size of VAROP - 1 and the shift is logical if
9423 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9424 we have a (gt X 0) operation. If the shift is arithmetic with
9425 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9426 we have a (neg (gt X 0)) operation. */
9428 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9429 && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
9430 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
9431 && (code == LSHIFTRT || code == ASHIFTRT)
9432 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9433 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
9434 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9437 varop = gen_rtx_GT (GET_MODE (varop), XEXP (varop, 1),
9440 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9441 varop = gen_rtx_NEG (GET_MODE (varop), varop);
9448 /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9449 if the truncate does not affect the value. */
9450 if (code == LSHIFTRT
9451 && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
9452 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9453 && (INTVAL (XEXP (XEXP (varop, 0), 1))
9454 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
9455 - GET_MODE_BITSIZE (GET_MODE (varop)))))
9457 rtx varop_inner = XEXP (varop, 0);
9460 = gen_rtx_LSHIFTRT (GET_MODE (varop_inner),
9461 XEXP (varop_inner, 0),
9463 (count + INTVAL (XEXP (varop_inner, 1))));
9464 varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner);
9477 /* We need to determine what mode to do the shift in. If the shift is
9478 a right shift or ROTATE, we must always do it in the mode it was
9479 originally done in. Otherwise, we can do it in MODE, the widest mode
9480 encountered. The code we care about is that of the shift that will
9481 actually be done, not the shift that was originally requested. */
9483 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9484 ? result_mode : mode);
9486 /* We have now finished analyzing the shift. The result should be
9487 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
9488 OUTER_OP is non-NIL, it is an operation that needs to be applied
9489 to the result of the shift. OUTER_CONST is the relevant constant,
9490 but we must turn off all bits turned off in the shift.
9492 If we were passed a value for X, see if we can use any pieces of
9493 it. If not, make new rtx. */
9495 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
9496 && GET_CODE (XEXP (x, 1)) == CONST_INT
9497 && INTVAL (XEXP (x, 1)) == count)
9498 const_rtx = XEXP (x, 1);
9500 const_rtx = GEN_INT (count);
9502 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
9503 && GET_MODE (XEXP (x, 0)) == shift_mode
9504 && SUBREG_REG (XEXP (x, 0)) == varop)
9505 varop = XEXP (x, 0);
9506 else if (GET_MODE (varop) != shift_mode)
9507 varop = gen_lowpart_for_combine (shift_mode, varop);
9509 /* If we can't make the SUBREG, try to return what we were given. */
9510 if (GET_CODE (varop) == CLOBBER)
9511 return x ? x : varop;
9513 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
9518 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
9519 x = gen_rtx_fmt_ee (code, shift_mode, varop, const_rtx);
9521 SUBST (XEXP (x, 0), varop);
9522 SUBST (XEXP (x, 1), const_rtx);
9525 /* If we have an outer operation and we just made a shift, it is
9526 possible that we could have simplified the shift were it not
9527 for the outer operation. So try to do the simplification
9530 if (outer_op != NIL && GET_CODE (x) == code
9531 && GET_CODE (XEXP (x, 1)) == CONST_INT)
9532 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
9533 INTVAL (XEXP (x, 1)));
9535 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
9536 turn off all the bits that the shift would have turned off. */
9537 if (orig_code == LSHIFTRT && result_mode != shift_mode)
9538 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
9539 GET_MODE_MASK (result_mode) >> orig_count);
9541 /* Do the remainder of the processing in RESULT_MODE. */
9542 x = gen_lowpart_for_combine (result_mode, x);
9544 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9547 x =simplify_gen_unary (NOT, result_mode, x, result_mode);
9549 if (outer_op != NIL)
9551 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
9552 outer_const = trunc_int_for_mode (outer_const, result_mode);
9554 if (outer_op == AND)
9555 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
9556 else if (outer_op == SET)
9557 /* This means that we have determined that the result is
9558 equivalent to a constant. This should be rare. */
9559 x = GEN_INT (outer_const);
9560 else if (GET_RTX_CLASS (outer_op) == '1')
9561 x = simplify_gen_unary (outer_op, result_mode, x, result_mode);
9563 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
9569 /* Like recog, but we receive the address of a pointer to a new pattern.
9570 We try to match the rtx that the pointer points to.
9571 If that fails, we may try to modify or replace the pattern,
9572 storing the replacement into the same pointer object.
9574 Modifications include deletion or addition of CLOBBERs.
9576 PNOTES is a pointer to a location where any REG_UNUSED notes added for
9577 the CLOBBERs are placed.
9579 The value is the final insn code from the pattern ultimately matched,
9583 recog_for_combine (pnewpat, insn, pnotes)
9588 register rtx pat = *pnewpat;
9589 int insn_code_number;
9590 int num_clobbers_to_add = 0;
9595 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9596 we use to indicate that something didn't match. If we find such a
9597 thing, force rejection. */
9598 if (GET_CODE (pat) == PARALLEL)
9599 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
9600 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
9601 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
9604 /* Remove the old notes prior to trying to recognize the new pattern. */
9605 old_notes = REG_NOTES (insn);
9606 REG_NOTES (insn) = 0;
9608 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9610 /* If it isn't, there is the possibility that we previously had an insn
9611 that clobbered some register as a side effect, but the combined
9612 insn doesn't need to do that. So try once more without the clobbers
9613 unless this represents an ASM insn. */
9615 if (insn_code_number < 0 && ! check_asm_operands (pat)
9616 && GET_CODE (pat) == PARALLEL)
9620 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
9621 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
9624 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
9628 SUBST_INT (XVECLEN (pat, 0), pos);
9631 pat = XVECEXP (pat, 0, 0);
9633 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9636 /* Recognize all noop sets, these will be killed by followup pass. */
9637 if (insn_code_number < 0 && GET_CODE (pat) == SET && set_noop_p (pat))
9638 insn_code_number = NOOP_MOVE_INSN_CODE, num_clobbers_to_add = 0;
9640 REG_NOTES (insn) = old_notes;
9642 /* If we had any clobbers to add, make a new pattern than contains
9643 them. Then check to make sure that all of them are dead. */
9644 if (num_clobbers_to_add)
9646 rtx newpat = gen_rtx_PARALLEL (VOIDmode,
9647 rtvec_alloc (GET_CODE (pat) == PARALLEL
9649 + num_clobbers_to_add)
9650 : num_clobbers_to_add + 1));
9652 if (GET_CODE (pat) == PARALLEL)
9653 for (i = 0; i < XVECLEN (pat, 0); i++)
9654 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
9656 XVECEXP (newpat, 0, 0) = pat;
9658 add_clobbers (newpat, insn_code_number);
9660 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
9661 i < XVECLEN (newpat, 0); i++)
9663 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
9664 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
9666 notes = gen_rtx_EXPR_LIST (REG_UNUSED,
9667 XEXP (XVECEXP (newpat, 0, i), 0), notes);
9675 return insn_code_number;
9678 /* Like gen_lowpart but for use by combine. In combine it is not possible
9679 to create any new pseudoregs. However, it is safe to create
9680 invalid memory addresses, because combine will try to recognize
9681 them and all they will do is make the combine attempt fail.
9683 If for some reason this cannot do its job, an rtx
9684 (clobber (const_int 0)) is returned.
9685 An insn containing that will not be recognized. */
9690 gen_lowpart_for_combine (mode, x)
9691 enum machine_mode mode;
9696 if (GET_MODE (x) == mode)
9699 /* We can only support MODE being wider than a word if X is a
9700 constant integer or has a mode the same size. */
9702 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
9703 && ! ((GET_MODE (x) == VOIDmode
9704 && (GET_CODE (x) == CONST_INT
9705 || GET_CODE (x) == CONST_DOUBLE))
9706 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
9707 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9709 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
9710 won't know what to do. So we will strip off the SUBREG here and
9711 process normally. */
9712 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
9715 if (GET_MODE (x) == mode)
9719 result = gen_lowpart_common (mode, x);
9720 #ifdef CLASS_CANNOT_CHANGE_MODE
9722 && GET_CODE (result) == SUBREG
9723 && GET_CODE (SUBREG_REG (result)) == REG
9724 && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
9725 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (result),
9726 GET_MODE (SUBREG_REG (result))))
9727 REG_CHANGES_MODE (REGNO (SUBREG_REG (result))) = 1;
9733 if (GET_CODE (x) == MEM)
9735 register int offset = 0;
9737 /* Refuse to work on a volatile memory ref or one with a mode-dependent
9739 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
9740 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9742 /* If we want to refer to something bigger than the original memref,
9743 generate a perverse subreg instead. That will force a reload
9744 of the original memref X. */
9745 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
9746 return gen_rtx_SUBREG (mode, x, 0);
9748 if (WORDS_BIG_ENDIAN)
9749 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
9750 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
9752 if (BYTES_BIG_ENDIAN)
9754 /* Adjust the address so that the address-after-the-data is
9756 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
9757 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
9760 return adjust_address_nv (x, mode, offset);
9763 /* If X is a comparison operator, rewrite it in a new mode. This
9764 probably won't match, but may allow further simplifications. */
9765 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
9766 return gen_rtx_fmt_ee (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
9768 /* If we couldn't simplify X any other way, just enclose it in a
9769 SUBREG. Normally, this SUBREG won't match, but some patterns may
9770 include an explicit SUBREG or we may simplify it further in combine. */
9776 offset = subreg_lowpart_offset (mode, GET_MODE (x));
9777 res = simplify_gen_subreg (mode, x, GET_MODE (x), offset);
9780 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9784 /* These routines make binary and unary operations by first seeing if they
9785 fold; if not, a new expression is allocated. */
9788 gen_binary (code, mode, op0, op1)
9790 enum machine_mode mode;
9796 if (GET_RTX_CLASS (code) == 'c'
9797 && swap_commutative_operands_p (op0, op1))
9798 tem = op0, op0 = op1, op1 = tem;
9800 if (GET_RTX_CLASS (code) == '<')
9802 enum machine_mode op_mode = GET_MODE (op0);
9804 /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
9805 just (REL_OP X Y). */
9806 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
9808 op1 = XEXP (op0, 1);
9809 op0 = XEXP (op0, 0);
9810 op_mode = GET_MODE (op0);
9813 if (op_mode == VOIDmode)
9814 op_mode = GET_MODE (op1);
9815 result = simplify_relational_operation (code, op_mode, op0, op1);
9818 result = simplify_binary_operation (code, mode, op0, op1);
9823 /* Put complex operands first and constants second. */
9824 if (GET_RTX_CLASS (code) == 'c'
9825 && swap_commutative_operands_p (op0, op1))
9826 return gen_rtx_fmt_ee (code, mode, op1, op0);
9828 /* If we are turning off bits already known off in OP0, we need not do
9830 else if (code == AND && GET_CODE (op1) == CONST_INT
9831 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9832 && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
9835 return gen_rtx_fmt_ee (code, mode, op0, op1);
9838 /* Simplify a comparison between *POP0 and *POP1 where CODE is the
9839 comparison code that will be tested.
9841 The result is a possibly different comparison code to use. *POP0 and
9842 *POP1 may be updated.
9844 It is possible that we might detect that a comparison is either always
9845 true or always false. However, we do not perform general constant
9846 folding in combine, so this knowledge isn't useful. Such tautologies
9847 should have been detected earlier. Hence we ignore all such cases. */
9849 static enum rtx_code
9850 simplify_comparison (code, pop0, pop1)
9859 enum machine_mode mode, tmode;
9861 /* Try a few ways of applying the same transformation to both operands. */
9864 #ifndef WORD_REGISTER_OPERATIONS
9865 /* The test below this one won't handle SIGN_EXTENDs on these machines,
9866 so check specially. */
9867 if (code != GTU && code != GEU && code != LTU && code != LEU
9868 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
9869 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9870 && GET_CODE (XEXP (op1, 0)) == ASHIFT
9871 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
9872 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
9873 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
9874 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
9875 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9876 && GET_CODE (XEXP (op1, 1)) == CONST_INT
9877 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9878 && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
9879 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
9880 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
9881 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
9882 && (INTVAL (XEXP (op0, 1))
9883 == (GET_MODE_BITSIZE (GET_MODE (op0))
9885 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
9887 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
9888 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
9892 /* If both operands are the same constant shift, see if we can ignore the
9893 shift. We can if the shift is a rotate or if the bits shifted out of
9894 this shift are known to be zero for both inputs and if the type of
9895 comparison is compatible with the shift. */
9896 if (GET_CODE (op0) == GET_CODE (op1)
9897 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9898 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
9899 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
9900 && (code != GT && code != LT && code != GE && code != LE))
9901 || (GET_CODE (op0) == ASHIFTRT
9902 && (code != GTU && code != LTU
9903 && code != GEU && code != LEU)))
9904 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9905 && INTVAL (XEXP (op0, 1)) >= 0
9906 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9907 && XEXP (op0, 1) == XEXP (op1, 1))
9909 enum machine_mode mode = GET_MODE (op0);
9910 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
9911 int shift_count = INTVAL (XEXP (op0, 1));
9913 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
9914 mask &= (mask >> shift_count) << shift_count;
9915 else if (GET_CODE (op0) == ASHIFT)
9916 mask = (mask & (mask << shift_count)) >> shift_count;
9918 if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
9919 && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
9920 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
9925 /* If both operands are AND's of a paradoxical SUBREG by constant, the
9926 SUBREGs are of the same mode, and, in both cases, the AND would
9927 be redundant if the comparison was done in the narrower mode,
9928 do the comparison in the narrower mode (e.g., we are AND'ing with 1
9929 and the operand's possibly nonzero bits are 0xffffff01; in that case
9930 if we only care about QImode, we don't need the AND). This case
9931 occurs if the output mode of an scc insn is not SImode and
9932 STORE_FLAG_VALUE == 1 (e.g., the 386).
9934 Similarly, check for a case where the AND's are ZERO_EXTEND
9935 operations from some narrower mode even though a SUBREG is not
9938 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
9939 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9940 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
9942 rtx inner_op0 = XEXP (op0, 0);
9943 rtx inner_op1 = XEXP (op1, 0);
9944 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
9945 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
9948 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
9949 && (GET_MODE_SIZE (GET_MODE (inner_op0))
9950 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
9951 && (GET_MODE (SUBREG_REG (inner_op0))
9952 == GET_MODE (SUBREG_REG (inner_op1)))
9953 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
9954 <= HOST_BITS_PER_WIDE_INT)
9955 && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
9956 GET_MODE (SUBREG_REG (inner_op0)))))
9957 && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
9958 GET_MODE (SUBREG_REG (inner_op1))))))
9960 op0 = SUBREG_REG (inner_op0);
9961 op1 = SUBREG_REG (inner_op1);
9963 /* The resulting comparison is always unsigned since we masked
9964 off the original sign bit. */
9965 code = unsigned_condition (code);
9971 for (tmode = GET_CLASS_NARROWEST_MODE
9972 (GET_MODE_CLASS (GET_MODE (op0)));
9973 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
9974 if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
9976 op0 = gen_lowpart_for_combine (tmode, inner_op0);
9977 op1 = gen_lowpart_for_combine (tmode, inner_op1);
9978 code = unsigned_condition (code);
9987 /* If both operands are NOT, we can strip off the outer operation
9988 and adjust the comparison code for swapped operands; similarly for
9989 NEG, except that this must be an equality comparison. */
9990 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
9991 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
9992 && (code == EQ || code == NE)))
9993 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
9999 /* If the first operand is a constant, swap the operands and adjust the
10000 comparison code appropriately, but don't do this if the second operand
10001 is already a constant integer. */
10002 if (swap_commutative_operands_p (op0, op1))
10004 tem = op0, op0 = op1, op1 = tem;
10005 code = swap_condition (code);
10008 /* We now enter a loop during which we will try to simplify the comparison.
10009 For the most part, we only are concerned with comparisons with zero,
10010 but some things may really be comparisons with zero but not start
10011 out looking that way. */
10013 while (GET_CODE (op1) == CONST_INT)
10015 enum machine_mode mode = GET_MODE (op0);
10016 unsigned int mode_width = GET_MODE_BITSIZE (mode);
10017 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
10018 int equality_comparison_p;
10019 int sign_bit_comparison_p;
10020 int unsigned_comparison_p;
10021 HOST_WIDE_INT const_op;
10023 /* We only want to handle integral modes. This catches VOIDmode,
10024 CCmode, and the floating-point modes. An exception is that we
10025 can handle VOIDmode if OP0 is a COMPARE or a comparison
10028 if (GET_MODE_CLASS (mode) != MODE_INT
10029 && ! (mode == VOIDmode
10030 && (GET_CODE (op0) == COMPARE
10031 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
10034 /* Get the constant we are comparing against and turn off all bits
10035 not on in our mode. */
10036 const_op = trunc_int_for_mode (INTVAL (op1), mode);
10037 op1 = GEN_INT (const_op);
10039 /* If we are comparing against a constant power of two and the value
10040 being compared can only have that single bit nonzero (e.g., it was
10041 `and'ed with that bit), we can replace this with a comparison
10044 && (code == EQ || code == NE || code == GE || code == GEU
10045 || code == LT || code == LTU)
10046 && mode_width <= HOST_BITS_PER_WIDE_INT
10047 && exact_log2 (const_op) >= 0
10048 && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
10050 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
10051 op1 = const0_rtx, const_op = 0;
10054 /* Similarly, if we are comparing a value known to be either -1 or
10055 0 with -1, change it to the opposite comparison against zero. */
10058 && (code == EQ || code == NE || code == GT || code == LE
10059 || code == GEU || code == LTU)
10060 && num_sign_bit_copies (op0, mode) == mode_width)
10062 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
10063 op1 = const0_rtx, const_op = 0;
10066 /* Do some canonicalizations based on the comparison code. We prefer
10067 comparisons against zero and then prefer equality comparisons.
10068 If we can reduce the size of a constant, we will do that too. */
10073 /* < C is equivalent to <= (C - 1) */
10077 op1 = GEN_INT (const_op);
10079 /* ... fall through to LE case below. */
10085 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
10089 op1 = GEN_INT (const_op);
10093 /* If we are doing a <= 0 comparison on a value known to have
10094 a zero sign bit, we can replace this with == 0. */
10095 else if (const_op == 0
10096 && mode_width <= HOST_BITS_PER_WIDE_INT
10097 && (nonzero_bits (op0, mode)
10098 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10103 /* >= C is equivalent to > (C - 1). */
10107 op1 = GEN_INT (const_op);
10109 /* ... fall through to GT below. */
10115 /* > C is equivalent to >= (C + 1); we do this for C < 0. */
10119 op1 = GEN_INT (const_op);
10123 /* If we are doing a > 0 comparison on a value known to have
10124 a zero sign bit, we can replace this with != 0. */
10125 else if (const_op == 0
10126 && mode_width <= HOST_BITS_PER_WIDE_INT
10127 && (nonzero_bits (op0, mode)
10128 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10133 /* < C is equivalent to <= (C - 1). */
10137 op1 = GEN_INT (const_op);
10139 /* ... fall through ... */
10142 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
10143 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10144 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10146 const_op = 0, op1 = const0_rtx;
10154 /* unsigned <= 0 is equivalent to == 0 */
10158 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
10159 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10160 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10162 const_op = 0, op1 = const0_rtx;
10168 /* >= C is equivalent to < (C - 1). */
10172 op1 = GEN_INT (const_op);
10174 /* ... fall through ... */
10177 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
10178 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10179 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10181 const_op = 0, op1 = const0_rtx;
10189 /* unsigned > 0 is equivalent to != 0 */
10193 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
10194 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10195 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10197 const_op = 0, op1 = const0_rtx;
10206 /* Compute some predicates to simplify code below. */
10208 equality_comparison_p = (code == EQ || code == NE);
10209 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
10210 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
10213 /* If this is a sign bit comparison and we can do arithmetic in
10214 MODE, say that we will only be needing the sign bit of OP0. */
10215 if (sign_bit_comparison_p
10216 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10217 op0 = force_to_mode (op0, mode,
10219 << (GET_MODE_BITSIZE (mode) - 1)),
10222 /* Now try cases based on the opcode of OP0. If none of the cases
10223 does a "continue", we exit this loop immediately after the
10226 switch (GET_CODE (op0))
10229 /* If we are extracting a single bit from a variable position in
10230 a constant that has only a single bit set and are comparing it
10231 with zero, we can convert this into an equality comparison
10232 between the position and the location of the single bit. */
10234 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
10235 && XEXP (op0, 1) == const1_rtx
10236 && equality_comparison_p && const_op == 0
10237 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
10239 if (BITS_BIG_ENDIAN)
10242 mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
10243 if (mode == VOIDmode)
10245 i = (GET_MODE_BITSIZE (mode) - 1 - i);
10247 i = BITS_PER_WORD - 1 - i;
10251 op0 = XEXP (op0, 2);
10255 /* Result is nonzero iff shift count is equal to I. */
10256 code = reverse_condition (code);
10260 /* ... fall through ... */
10263 tem = expand_compound_operation (op0);
10272 /* If testing for equality, we can take the NOT of the constant. */
10273 if (equality_comparison_p
10274 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
10276 op0 = XEXP (op0, 0);
10281 /* If just looking at the sign bit, reverse the sense of the
10283 if (sign_bit_comparison_p)
10285 op0 = XEXP (op0, 0);
10286 code = (code == GE ? LT : GE);
10292 /* If testing for equality, we can take the NEG of the constant. */
10293 if (equality_comparison_p
10294 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
10296 op0 = XEXP (op0, 0);
10301 /* The remaining cases only apply to comparisons with zero. */
10305 /* When X is ABS or is known positive,
10306 (neg X) is < 0 if and only if X != 0. */
10308 if (sign_bit_comparison_p
10309 && (GET_CODE (XEXP (op0, 0)) == ABS
10310 || (mode_width <= HOST_BITS_PER_WIDE_INT
10311 && (nonzero_bits (XEXP (op0, 0), mode)
10312 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
10314 op0 = XEXP (op0, 0);
10315 code = (code == LT ? NE : EQ);
10319 /* If we have NEG of something whose two high-order bits are the
10320 same, we know that "(-a) < 0" is equivalent to "a > 0". */
10321 if (num_sign_bit_copies (op0, mode) >= 2)
10323 op0 = XEXP (op0, 0);
10324 code = swap_condition (code);
10330 /* If we are testing equality and our count is a constant, we
10331 can perform the inverse operation on our RHS. */
10332 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10333 && (tem = simplify_binary_operation (ROTATERT, mode,
10334 op1, XEXP (op0, 1))) != 0)
10336 op0 = XEXP (op0, 0);
10341 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10342 a particular bit. Convert it to an AND of a constant of that
10343 bit. This will be converted into a ZERO_EXTRACT. */
10344 if (const_op == 0 && sign_bit_comparison_p
10345 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10346 && mode_width <= HOST_BITS_PER_WIDE_INT)
10348 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10351 - INTVAL (XEXP (op0, 1)))));
10352 code = (code == LT ? NE : EQ);
10356 /* Fall through. */
10359 /* ABS is ignorable inside an equality comparison with zero. */
10360 if (const_op == 0 && equality_comparison_p)
10362 op0 = XEXP (op0, 0);
10368 /* Can simplify (compare (zero/sign_extend FOO) CONST)
10369 to (compare FOO CONST) if CONST fits in FOO's mode and we
10370 are either testing inequality or have an unsigned comparison
10371 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
10372 if (! unsigned_comparison_p
10373 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10374 <= HOST_BITS_PER_WIDE_INT)
10375 && ((unsigned HOST_WIDE_INT) const_op
10376 < (((unsigned HOST_WIDE_INT) 1
10377 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
10379 op0 = XEXP (op0, 0);
10385 /* Check for the case where we are comparing A - C1 with C2,
10386 both constants are smaller than 1/2 the maximum positive
10387 value in MODE, and the comparison is equality or unsigned.
10388 In that case, if A is either zero-extended to MODE or has
10389 sufficient sign bits so that the high-order bit in MODE
10390 is a copy of the sign in the inner mode, we can prove that it is
10391 safe to do the operation in the wider mode. This simplifies
10392 many range checks. */
10394 if (mode_width <= HOST_BITS_PER_WIDE_INT
10395 && subreg_lowpart_p (op0)
10396 && GET_CODE (SUBREG_REG (op0)) == PLUS
10397 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
10398 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
10399 && (-INTVAL (XEXP (SUBREG_REG (op0), 1))
10400 < (HOST_WIDE_INT) (GET_MODE_MASK (mode) / 2))
10401 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
10402 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
10403 GET_MODE (SUBREG_REG (op0)))
10404 & ~GET_MODE_MASK (mode))
10405 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
10406 GET_MODE (SUBREG_REG (op0)))
10407 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10408 - GET_MODE_BITSIZE (mode)))))
10410 op0 = SUBREG_REG (op0);
10414 /* If the inner mode is narrower and we are extracting the low part,
10415 we can treat the SUBREG as if it were a ZERO_EXTEND. */
10416 if (subreg_lowpart_p (op0)
10417 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10418 /* Fall through */ ;
10422 /* ... fall through ... */
10425 if ((unsigned_comparison_p || equality_comparison_p)
10426 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10427 <= HOST_BITS_PER_WIDE_INT)
10428 && ((unsigned HOST_WIDE_INT) const_op
10429 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
10431 op0 = XEXP (op0, 0);
10437 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
10438 this for equality comparisons due to pathological cases involving
10440 if (equality_comparison_p
10441 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10442 op1, XEXP (op0, 1))))
10444 op0 = XEXP (op0, 0);
10449 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
10450 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10451 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10453 op0 = XEXP (XEXP (op0, 0), 0);
10454 code = (code == LT ? EQ : NE);
10460 /* We used to optimize signed comparisons against zero, but that
10461 was incorrect. Unsigned comparisons against zero (GTU, LEU)
10462 arrive here as equality comparisons, or (GEU, LTU) are
10463 optimized away. No need to special-case them. */
10465 /* (eq (minus A B) C) -> (eq A (plus B C)) or
10466 (eq B (minus A C)), whichever simplifies. We can only do
10467 this for equality comparisons due to pathological cases involving
10469 if (equality_comparison_p
10470 && 0 != (tem = simplify_binary_operation (PLUS, mode,
10471 XEXP (op0, 1), op1)))
10473 op0 = XEXP (op0, 0);
10478 if (equality_comparison_p
10479 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10480 XEXP (op0, 0), op1)))
10482 op0 = XEXP (op0, 1);
10487 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10488 of bits in X minus 1, is one iff X > 0. */
10489 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
10490 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10491 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
10492 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10494 op0 = XEXP (op0, 1);
10495 code = (code == GE ? LE : GT);
10501 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
10502 if C is zero or B is a constant. */
10503 if (equality_comparison_p
10504 && 0 != (tem = simplify_binary_operation (XOR, mode,
10505 XEXP (op0, 1), op1)))
10507 op0 = XEXP (op0, 0);
10514 case UNEQ: case LTGT:
10515 case LT: case LTU: case UNLT: case LE: case LEU: case UNLE:
10516 case GT: case GTU: case UNGT: case GE: case GEU: case UNGE:
10517 case UNORDERED: case ORDERED:
10518 /* We can't do anything if OP0 is a condition code value, rather
10519 than an actual data value. */
10522 || XEXP (op0, 0) == cc0_rtx
10524 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
10527 /* Get the two operands being compared. */
10528 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10529 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10531 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10533 /* Check for the cases where we simply want the result of the
10534 earlier test or the opposite of that result. */
10535 if (code == NE || code == EQ
10536 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10537 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10538 && (STORE_FLAG_VALUE
10539 & (((HOST_WIDE_INT) 1
10540 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
10541 && (code == LT || code == GE)))
10543 enum rtx_code new_code;
10544 if (code == LT || code == NE)
10545 new_code = GET_CODE (op0);
10547 new_code = combine_reversed_comparison_code (op0);
10549 if (new_code != UNKNOWN)
10560 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
10562 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
10563 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
10564 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10566 op0 = XEXP (op0, 1);
10567 code = (code == GE ? GT : LE);
10573 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
10574 will be converted to a ZERO_EXTRACT later. */
10575 if (const_op == 0 && equality_comparison_p
10576 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10577 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
10579 op0 = simplify_and_const_int
10580 (op0, mode, gen_rtx_LSHIFTRT (mode,
10582 XEXP (XEXP (op0, 0), 1)),
10583 (HOST_WIDE_INT) 1);
10587 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10588 zero and X is a comparison and C1 and C2 describe only bits set
10589 in STORE_FLAG_VALUE, we can compare with X. */
10590 if (const_op == 0 && equality_comparison_p
10591 && mode_width <= HOST_BITS_PER_WIDE_INT
10592 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10593 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10594 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10595 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
10596 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
10598 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10599 << INTVAL (XEXP (XEXP (op0, 0), 1)));
10600 if ((~STORE_FLAG_VALUE & mask) == 0
10601 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
10602 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
10603 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
10605 op0 = XEXP (XEXP (op0, 0), 0);
10610 /* If we are doing an equality comparison of an AND of a bit equal
10611 to the sign bit, replace this with a LT or GE comparison of
10612 the underlying value. */
10613 if (equality_comparison_p
10615 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10616 && mode_width <= HOST_BITS_PER_WIDE_INT
10617 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10618 == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
10620 op0 = XEXP (op0, 0);
10621 code = (code == EQ ? GE : LT);
10625 /* If this AND operation is really a ZERO_EXTEND from a narrower
10626 mode, the constant fits within that mode, and this is either an
10627 equality or unsigned comparison, try to do this comparison in
10628 the narrower mode. */
10629 if ((equality_comparison_p || unsigned_comparison_p)
10630 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10631 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
10632 & GET_MODE_MASK (mode))
10634 && const_op >> i == 0
10635 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
10637 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
10641 /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1 fits
10642 in both M1 and M2 and the SUBREG is either paradoxical or
10643 represents the low part, permute the SUBREG and the AND and
10645 if (GET_CODE (XEXP (op0, 0)) == SUBREG
10647 #ifdef WORD_REGISTER_OPERATIONS
10649 > (GET_MODE_BITSIZE
10650 (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10651 && mode_width <= BITS_PER_WORD)
10654 <= (GET_MODE_BITSIZE
10655 (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10656 && subreg_lowpart_p (XEXP (op0, 0))))
10657 #ifndef WORD_REGISTER_OPERATIONS
10658 /* It is unsafe to commute the AND into the SUBREG if the SUBREG
10659 is paradoxical and WORD_REGISTER_OPERATIONS is not defined.
10660 As originally written the upper bits have a defined value
10661 due to the AND operation. However, if we commute the AND
10662 inside the SUBREG then they no longer have defined values
10663 and the meaning of the code has been changed. */
10664 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
10665 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
10667 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10668 && mode_width <= HOST_BITS_PER_WIDE_INT
10669 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
10670 <= HOST_BITS_PER_WIDE_INT)
10671 && (INTVAL (XEXP (op0, 1)) & ~mask) == 0
10672 && 0 == (~GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
10673 & INTVAL (XEXP (op0, 1)))
10674 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) != mask
10675 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
10676 != GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10680 = gen_lowpart_for_combine
10682 gen_binary (AND, GET_MODE (SUBREG_REG (XEXP (op0, 0))),
10683 SUBREG_REG (XEXP (op0, 0)), XEXP (op0, 1)));
10687 /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
10688 (eq (and (lshiftrt X) 1) 0). */
10689 if (const_op == 0 && equality_comparison_p
10690 && XEXP (op0, 1) == const1_rtx
10691 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10692 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == NOT)
10694 op0 = simplify_and_const_int
10696 gen_rtx_LSHIFTRT (mode, XEXP (XEXP (XEXP (op0, 0), 0), 0),
10697 XEXP (XEXP (op0, 0), 1)),
10698 (HOST_WIDE_INT) 1);
10699 code = (code == NE ? EQ : NE);
10705 /* If we have (compare (ashift FOO N) (const_int C)) and
10706 the high order N bits of FOO (N+1 if an inequality comparison)
10707 are known to be zero, we can do this by comparing FOO with C
10708 shifted right N bits so long as the low-order N bits of C are
10710 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10711 && INTVAL (XEXP (op0, 1)) >= 0
10712 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
10713 < HOST_BITS_PER_WIDE_INT)
10715 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
10716 && mode_width <= HOST_BITS_PER_WIDE_INT
10717 && (nonzero_bits (XEXP (op0, 0), mode)
10718 & ~(mask >> (INTVAL (XEXP (op0, 1))
10719 + ! equality_comparison_p))) == 0)
10721 /* We must perform a logical shift, not an arithmetic one,
10722 as we want the top N bits of C to be zero. */
10723 unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
10725 temp >>= INTVAL (XEXP (op0, 1));
10726 op1 = GEN_INT (trunc_int_for_mode (temp, mode));
10727 op0 = XEXP (op0, 0);
10731 /* If we are doing a sign bit comparison, it means we are testing
10732 a particular bit. Convert it to the appropriate AND. */
10733 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10734 && mode_width <= HOST_BITS_PER_WIDE_INT)
10736 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10739 - INTVAL (XEXP (op0, 1)))));
10740 code = (code == LT ? NE : EQ);
10744 /* If this an equality comparison with zero and we are shifting
10745 the low bit to the sign bit, we can convert this to an AND of the
10747 if (const_op == 0 && equality_comparison_p
10748 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10749 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10751 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10752 (HOST_WIDE_INT) 1);
10758 /* If this is an equality comparison with zero, we can do this
10759 as a logical shift, which might be much simpler. */
10760 if (equality_comparison_p && const_op == 0
10761 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
10763 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
10765 INTVAL (XEXP (op0, 1)));
10769 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
10770 do the comparison in a narrower mode. */
10771 if (! unsigned_comparison_p
10772 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10773 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10774 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10775 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10776 MODE_INT, 1)) != BLKmode
10777 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
10778 || ((unsigned HOST_WIDE_INT) -const_op
10779 <= GET_MODE_MASK (tmode))))
10781 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
10785 /* Likewise if OP0 is a PLUS of a sign extension with a
10786 constant, which is usually represented with the PLUS
10787 between the shifts. */
10788 if (! unsigned_comparison_p
10789 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10790 && GET_CODE (XEXP (op0, 0)) == PLUS
10791 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10792 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
10793 && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
10794 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10795 MODE_INT, 1)) != BLKmode
10796 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
10797 || ((unsigned HOST_WIDE_INT) -const_op
10798 <= GET_MODE_MASK (tmode))))
10800 rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
10801 rtx add_const = XEXP (XEXP (op0, 0), 1);
10802 rtx new_const = gen_binary (ASHIFTRT, GET_MODE (op0), add_const,
10805 op0 = gen_binary (PLUS, tmode,
10806 gen_lowpart_for_combine (tmode, inner),
10811 /* ... fall through ... */
10813 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
10814 the low order N bits of FOO are known to be zero, we can do this
10815 by comparing FOO with C shifted left N bits so long as no
10816 overflow occurs. */
10817 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10818 && INTVAL (XEXP (op0, 1)) >= 0
10819 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10820 && mode_width <= HOST_BITS_PER_WIDE_INT
10821 && (nonzero_bits (XEXP (op0, 0), mode)
10822 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
10824 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
10827 const_op <<= INTVAL (XEXP (op0, 1));
10828 op1 = GEN_INT (const_op);
10829 op0 = XEXP (op0, 0);
10833 /* If we are using this shift to extract just the sign bit, we
10834 can replace this with an LT or GE comparison. */
10836 && (equality_comparison_p || sign_bit_comparison_p)
10837 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10838 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10840 op0 = XEXP (op0, 0);
10841 code = (code == NE || code == GT ? LT : GE);
10853 /* Now make any compound operations involved in this comparison. Then,
10854 check for an outmost SUBREG on OP0 that is not doing anything or is
10855 paradoxical. The latter case can only occur when it is known that the
10856 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
10857 We can never remove a SUBREG for a non-equality comparison because the
10858 sign bit is in a different place in the underlying object. */
10860 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
10861 op1 = make_compound_operation (op1, SET);
10863 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10864 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10865 && (code == NE || code == EQ)
10866 && ((GET_MODE_SIZE (GET_MODE (op0))
10867 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
10869 op0 = SUBREG_REG (op0);
10870 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
10873 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10874 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10875 && (code == NE || code == EQ)
10876 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10877 <= HOST_BITS_PER_WIDE_INT)
10878 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
10879 & ~GET_MODE_MASK (GET_MODE (op0))) == 0
10880 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
10882 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
10883 & ~GET_MODE_MASK (GET_MODE (op0))) == 0))
10884 op0 = SUBREG_REG (op0), op1 = tem;
10886 /* We now do the opposite procedure: Some machines don't have compare
10887 insns in all modes. If OP0's mode is an integer mode smaller than a
10888 word and we can't do a compare in that mode, see if there is a larger
10889 mode for which we can do the compare. There are a number of cases in
10890 which we can use the wider mode. */
10892 mode = GET_MODE (op0);
10893 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10894 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
10895 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
10896 for (tmode = GET_MODE_WIDER_MODE (mode);
10898 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
10899 tmode = GET_MODE_WIDER_MODE (tmode))
10900 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
10902 /* If the only nonzero bits in OP0 and OP1 are those in the
10903 narrower mode and this is an equality or unsigned comparison,
10904 we can use the wider mode. Similarly for sign-extended
10905 values, in which case it is true for all comparisons. */
10906 if (((code == EQ || code == NE
10907 || code == GEU || code == GTU || code == LEU || code == LTU)
10908 && (nonzero_bits (op0, tmode) & ~GET_MODE_MASK (mode)) == 0
10909 && (nonzero_bits (op1, tmode) & ~GET_MODE_MASK (mode)) == 0)
10910 || ((num_sign_bit_copies (op0, tmode)
10911 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
10912 && (num_sign_bit_copies (op1, tmode)
10913 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
10915 /* If OP0 is an AND and we don't have an AND in MODE either,
10916 make a new AND in the proper mode. */
10917 if (GET_CODE (op0) == AND
10918 && (add_optab->handlers[(int) mode].insn_code
10919 == CODE_FOR_nothing))
10920 op0 = gen_binary (AND, tmode,
10921 gen_lowpart_for_combine (tmode,
10923 gen_lowpart_for_combine (tmode,
10926 op0 = gen_lowpart_for_combine (tmode, op0);
10927 op1 = gen_lowpart_for_combine (tmode, op1);
10931 /* If this is a test for negative, we can make an explicit
10932 test of the sign bit. */
10934 if (op1 == const0_rtx && (code == LT || code == GE)
10935 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10937 op0 = gen_binary (AND, tmode,
10938 gen_lowpart_for_combine (tmode, op0),
10939 GEN_INT ((HOST_WIDE_INT) 1
10940 << (GET_MODE_BITSIZE (mode) - 1)));
10941 code = (code == LT) ? NE : EQ;
10946 #ifdef CANONICALIZE_COMPARISON
10947 /* If this machine only supports a subset of valid comparisons, see if we
10948 can convert an unsupported one into a supported one. */
10949 CANONICALIZE_COMPARISON (code, op0, op1);
10958 /* Like jump.c' reversed_comparison_code, but use combine infrastructure for
10959 searching backward. */
10960 static enum rtx_code
10961 combine_reversed_comparison_code (exp)
10964 enum rtx_code code1 = reversed_comparison_code (exp, NULL);
10967 if (code1 != UNKNOWN
10968 || GET_MODE_CLASS (GET_MODE (XEXP (exp, 0))) != MODE_CC)
10970 /* Otherwise try and find where the condition codes were last set and
10972 x = get_last_value (XEXP (exp, 0));
10973 if (!x || GET_CODE (x) != COMPARE)
10975 return reversed_comparison_code_parts (GET_CODE (exp),
10976 XEXP (x, 0), XEXP (x, 1), NULL);
10978 /* Return comparison with reversed code of EXP and operands OP0 and OP1.
10979 Return NULL_RTX in case we fail to do the reversal. */
10981 reversed_comparison (exp, mode, op0, op1)
10983 enum machine_mode mode;
10985 enum rtx_code reversed_code = combine_reversed_comparison_code (exp);
10986 if (reversed_code == UNKNOWN)
10989 return gen_binary (reversed_code, mode, op0, op1);
10992 /* Utility function for following routine. Called when X is part of a value
10993 being stored into reg_last_set_value. Sets reg_last_set_table_tick
10994 for each register mentioned. Similar to mention_regs in cse.c */
10997 update_table_tick (x)
11000 register enum rtx_code code = GET_CODE (x);
11001 register const char *fmt = GET_RTX_FORMAT (code);
11006 unsigned int regno = REGNO (x);
11007 unsigned int endregno
11008 = regno + (regno < FIRST_PSEUDO_REGISTER
11009 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11012 for (r = regno; r < endregno; r++)
11013 reg_last_set_table_tick[r] = label_tick;
11018 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11019 /* Note that we can't have an "E" in values stored; see
11020 get_last_value_validate. */
11022 update_table_tick (XEXP (x, i));
11025 /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
11026 are saying that the register is clobbered and we no longer know its
11027 value. If INSN is zero, don't update reg_last_set; this is only permitted
11028 with VALUE also zero and is used to invalidate the register. */
11031 record_value_for_reg (reg, insn, value)
11036 unsigned int regno = REGNO (reg);
11037 unsigned int endregno
11038 = regno + (regno < FIRST_PSEUDO_REGISTER
11039 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
11042 /* If VALUE contains REG and we have a previous value for REG, substitute
11043 the previous value. */
11044 if (value && insn && reg_overlap_mentioned_p (reg, value))
11048 /* Set things up so get_last_value is allowed to see anything set up to
11050 subst_low_cuid = INSN_CUID (insn);
11051 tem = get_last_value (reg);
11053 /* If TEM is simply a binary operation with two CLOBBERs as operands,
11054 it isn't going to be useful and will take a lot of time to process,
11055 so just use the CLOBBER. */
11059 if ((GET_RTX_CLASS (GET_CODE (tem)) == '2'
11060 || GET_RTX_CLASS (GET_CODE (tem)) == 'c')
11061 && GET_CODE (XEXP (tem, 0)) == CLOBBER
11062 && GET_CODE (XEXP (tem, 1)) == CLOBBER)
11063 tem = XEXP (tem, 0);
11065 value = replace_rtx (copy_rtx (value), reg, tem);
11069 /* For each register modified, show we don't know its value, that
11070 we don't know about its bitwise content, that its value has been
11071 updated, and that we don't know the location of the death of the
11073 for (i = regno; i < endregno; i++)
11076 reg_last_set[i] = insn;
11078 reg_last_set_value[i] = 0;
11079 reg_last_set_mode[i] = 0;
11080 reg_last_set_nonzero_bits[i] = 0;
11081 reg_last_set_sign_bit_copies[i] = 0;
11082 reg_last_death[i] = 0;
11085 /* Mark registers that are being referenced in this value. */
11087 update_table_tick (value);
11089 /* Now update the status of each register being set.
11090 If someone is using this register in this block, set this register
11091 to invalid since we will get confused between the two lives in this
11092 basic block. This makes using this register always invalid. In cse, we
11093 scan the table to invalidate all entries using this register, but this
11094 is too much work for us. */
11096 for (i = regno; i < endregno; i++)
11098 reg_last_set_label[i] = label_tick;
11099 if (value && reg_last_set_table_tick[i] == label_tick)
11100 reg_last_set_invalid[i] = 1;
11102 reg_last_set_invalid[i] = 0;
11105 /* The value being assigned might refer to X (like in "x++;"). In that
11106 case, we must replace it with (clobber (const_int 0)) to prevent
11108 if (value && ! get_last_value_validate (&value, insn,
11109 reg_last_set_label[regno], 0))
11111 value = copy_rtx (value);
11112 if (! get_last_value_validate (&value, insn,
11113 reg_last_set_label[regno], 1))
11117 /* For the main register being modified, update the value, the mode, the
11118 nonzero bits, and the number of sign bit copies. */
11120 reg_last_set_value[regno] = value;
11124 subst_low_cuid = INSN_CUID (insn);
11125 reg_last_set_mode[regno] = GET_MODE (reg);
11126 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
11127 reg_last_set_sign_bit_copies[regno]
11128 = num_sign_bit_copies (value, GET_MODE (reg));
11132 /* Called via note_stores from record_dead_and_set_regs to handle one
11133 SET or CLOBBER in an insn. DATA is the instruction in which the
11134 set is occurring. */
11137 record_dead_and_set_regs_1 (dest, setter, data)
11141 rtx record_dead_insn = (rtx) data;
11143 if (GET_CODE (dest) == SUBREG)
11144 dest = SUBREG_REG (dest);
11146 if (GET_CODE (dest) == REG)
11148 /* If we are setting the whole register, we know its value. Otherwise
11149 show that we don't know the value. We can handle SUBREG in
11151 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
11152 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
11153 else if (GET_CODE (setter) == SET
11154 && GET_CODE (SET_DEST (setter)) == SUBREG
11155 && SUBREG_REG (SET_DEST (setter)) == dest
11156 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
11157 && subreg_lowpart_p (SET_DEST (setter)))
11158 record_value_for_reg (dest, record_dead_insn,
11159 gen_lowpart_for_combine (GET_MODE (dest),
11160 SET_SRC (setter)));
11162 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
11164 else if (GET_CODE (dest) == MEM
11165 /* Ignore pushes, they clobber nothing. */
11166 && ! push_operand (dest, GET_MODE (dest)))
11167 mem_last_set = INSN_CUID (record_dead_insn);
11170 /* Update the records of when each REG was most recently set or killed
11171 for the things done by INSN. This is the last thing done in processing
11172 INSN in the combiner loop.
11174 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
11175 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
11176 and also the similar information mem_last_set (which insn most recently
11177 modified memory) and last_call_cuid (which insn was the most recent
11178 subroutine call). */
11181 record_dead_and_set_regs (insn)
11187 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
11189 if (REG_NOTE_KIND (link) == REG_DEAD
11190 && GET_CODE (XEXP (link, 0)) == REG)
11192 unsigned int regno = REGNO (XEXP (link, 0));
11193 unsigned int endregno
11194 = regno + (regno < FIRST_PSEUDO_REGISTER
11195 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
11198 for (i = regno; i < endregno; i++)
11199 reg_last_death[i] = insn;
11201 else if (REG_NOTE_KIND (link) == REG_INC)
11202 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
11205 if (GET_CODE (insn) == CALL_INSN)
11207 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
11208 if (call_used_regs[i])
11210 reg_last_set_value[i] = 0;
11211 reg_last_set_mode[i] = 0;
11212 reg_last_set_nonzero_bits[i] = 0;
11213 reg_last_set_sign_bit_copies[i] = 0;
11214 reg_last_death[i] = 0;
11217 last_call_cuid = mem_last_set = INSN_CUID (insn);
11220 note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
11223 /* If a SUBREG has the promoted bit set, it is in fact a property of the
11224 register present in the SUBREG, so for each such SUBREG go back and
11225 adjust nonzero and sign bit information of the registers that are
11226 known to have some zero/sign bits set.
11228 This is needed because when combine blows the SUBREGs away, the
11229 information on zero/sign bits is lost and further combines can be
11230 missed because of that. */
11233 record_promoted_value (insn, subreg)
11238 unsigned int regno = REGNO (SUBREG_REG (subreg));
11239 enum machine_mode mode = GET_MODE (subreg);
11241 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
11244 for (links = LOG_LINKS (insn); links;)
11246 insn = XEXP (links, 0);
11247 set = single_set (insn);
11249 if (! set || GET_CODE (SET_DEST (set)) != REG
11250 || REGNO (SET_DEST (set)) != regno
11251 || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
11253 links = XEXP (links, 1);
11257 if (reg_last_set[regno] == insn)
11259 if (SUBREG_PROMOTED_UNSIGNED_P (subreg))
11260 reg_last_set_nonzero_bits[regno] &= GET_MODE_MASK (mode);
11263 if (GET_CODE (SET_SRC (set)) == REG)
11265 regno = REGNO (SET_SRC (set));
11266 links = LOG_LINKS (insn);
11273 /* Scan X for promoted SUBREGs. For each one found,
11274 note what it implies to the registers used in it. */
11277 check_promoted_subreg (insn, x)
11281 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
11282 && GET_CODE (SUBREG_REG (x)) == REG)
11283 record_promoted_value (insn, x);
11286 const char *format = GET_RTX_FORMAT (GET_CODE (x));
11289 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
11293 check_promoted_subreg (insn, XEXP (x, i));
11297 if (XVEC (x, i) != 0)
11298 for (j = 0; j < XVECLEN (x, i); j++)
11299 check_promoted_subreg (insn, XVECEXP (x, i, j));
11305 /* Utility routine for the following function. Verify that all the registers
11306 mentioned in *LOC are valid when *LOC was part of a value set when
11307 label_tick == TICK. Return 0 if some are not.
11309 If REPLACE is non-zero, replace the invalid reference with
11310 (clobber (const_int 0)) and return 1. This replacement is useful because
11311 we often can get useful information about the form of a value (e.g., if
11312 it was produced by a shift that always produces -1 or 0) even though
11313 we don't know exactly what registers it was produced from. */
11316 get_last_value_validate (loc, insn, tick, replace)
11323 const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
11324 int len = GET_RTX_LENGTH (GET_CODE (x));
11327 if (GET_CODE (x) == REG)
11329 unsigned int regno = REGNO (x);
11330 unsigned int endregno
11331 = regno + (regno < FIRST_PSEUDO_REGISTER
11332 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11335 for (j = regno; j < endregno; j++)
11336 if (reg_last_set_invalid[j]
11337 /* If this is a pseudo-register that was only set once and not
11338 live at the beginning of the function, it is always valid. */
11339 || (! (regno >= FIRST_PSEUDO_REGISTER
11340 && REG_N_SETS (regno) == 1
11341 && (! REGNO_REG_SET_P
11342 (BASIC_BLOCK (0)->global_live_at_start, regno)))
11343 && reg_last_set_label[j] > tick))
11346 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11352 /* If this is a memory reference, make sure that there were
11353 no stores after it that might have clobbered the value. We don't
11354 have alias info, so we assume any store invalidates it. */
11355 else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x)
11356 && INSN_CUID (insn) <= mem_last_set)
11359 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11363 for (i = 0; i < len; i++)
11365 && get_last_value_validate (&XEXP (x, i), insn, tick, replace) == 0)
11366 /* Don't bother with these. They shouldn't occur anyway. */
11370 /* If we haven't found a reason for it to be invalid, it is valid. */
11374 /* Get the last value assigned to X, if known. Some registers
11375 in the value may be replaced with (clobber (const_int 0)) if their value
11376 is known longer known reliably. */
11382 unsigned int regno;
11385 /* If this is a non-paradoxical SUBREG, get the value of its operand and
11386 then convert it to the desired mode. If this is a paradoxical SUBREG,
11387 we cannot predict what values the "extra" bits might have. */
11388 if (GET_CODE (x) == SUBREG
11389 && subreg_lowpart_p (x)
11390 && (GET_MODE_SIZE (GET_MODE (x))
11391 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
11392 && (value = get_last_value (SUBREG_REG (x))) != 0)
11393 return gen_lowpart_for_combine (GET_MODE (x), value);
11395 if (GET_CODE (x) != REG)
11399 value = reg_last_set_value[regno];
11401 /* If we don't have a value, or if it isn't for this basic block and
11402 it's either a hard register, set more than once, or it's a live
11403 at the beginning of the function, return 0.
11405 Because if it's not live at the beginnning of the function then the reg
11406 is always set before being used (is never used without being set).
11407 And, if it's set only once, and it's always set before use, then all
11408 uses must have the same last value, even if it's not from this basic
11412 || (reg_last_set_label[regno] != label_tick
11413 && (regno < FIRST_PSEUDO_REGISTER
11414 || REG_N_SETS (regno) != 1
11415 || (REGNO_REG_SET_P
11416 (BASIC_BLOCK (0)->global_live_at_start, regno)))))
11419 /* If the value was set in a later insn than the ones we are processing,
11420 we can't use it even if the register was only set once. */
11421 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
11424 /* If the value has all its registers valid, return it. */
11425 if (get_last_value_validate (&value, reg_last_set[regno],
11426 reg_last_set_label[regno], 0))
11429 /* Otherwise, make a copy and replace any invalid register with
11430 (clobber (const_int 0)). If that fails for some reason, return 0. */
11432 value = copy_rtx (value);
11433 if (get_last_value_validate (&value, reg_last_set[regno],
11434 reg_last_set_label[regno], 1))
11440 /* Return nonzero if expression X refers to a REG or to memory
11441 that is set in an instruction more recent than FROM_CUID. */
11444 use_crosses_set_p (x, from_cuid)
11448 register const char *fmt;
11450 register enum rtx_code code = GET_CODE (x);
11454 unsigned int regno = REGNO (x);
11455 unsigned endreg = regno + (regno < FIRST_PSEUDO_REGISTER
11456 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11458 #ifdef PUSH_ROUNDING
11459 /* Don't allow uses of the stack pointer to be moved,
11460 because we don't know whether the move crosses a push insn. */
11461 if (regno == STACK_POINTER_REGNUM && PUSH_ARGS)
11464 for (; regno < endreg; regno++)
11465 if (reg_last_set[regno]
11466 && INSN_CUID (reg_last_set[regno]) > from_cuid)
11471 if (code == MEM && mem_last_set > from_cuid)
11474 fmt = GET_RTX_FORMAT (code);
11476 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11481 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11482 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
11485 else if (fmt[i] == 'e'
11486 && use_crosses_set_p (XEXP (x, i), from_cuid))
11492 /* Define three variables used for communication between the following
11495 static unsigned int reg_dead_regno, reg_dead_endregno;
11496 static int reg_dead_flag;
11498 /* Function called via note_stores from reg_dead_at_p.
11500 If DEST is within [reg_dead_regno, reg_dead_endregno), set
11501 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
11504 reg_dead_at_p_1 (dest, x, data)
11507 void *data ATTRIBUTE_UNUSED;
11509 unsigned int regno, endregno;
11511 if (GET_CODE (dest) != REG)
11514 regno = REGNO (dest);
11515 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
11516 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
11518 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
11519 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
11522 /* Return non-zero if REG is known to be dead at INSN.
11524 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
11525 referencing REG, it is dead. If we hit a SET referencing REG, it is
11526 live. Otherwise, see if it is live or dead at the start of the basic
11527 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
11528 must be assumed to be always live. */
11531 reg_dead_at_p (reg, insn)
11538 /* Set variables for reg_dead_at_p_1. */
11539 reg_dead_regno = REGNO (reg);
11540 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
11541 ? HARD_REGNO_NREGS (reg_dead_regno,
11547 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */
11548 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
11550 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11551 if (TEST_HARD_REG_BIT (newpat_used_regs, i))
11555 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
11556 beginning of function. */
11557 for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
11558 insn = prev_nonnote_insn (insn))
11560 note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
11562 return reg_dead_flag == 1 ? 1 : 0;
11564 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
11568 /* Get the basic block number that we were in. */
11573 for (block = 0; block < n_basic_blocks; block++)
11574 if (insn == BLOCK_HEAD (block))
11577 if (block == n_basic_blocks)
11581 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11582 if (REGNO_REG_SET_P (BASIC_BLOCK (block)->global_live_at_start, i))
11588 /* Note hard registers in X that are used. This code is similar to
11589 that in flow.c, but much simpler since we don't care about pseudos. */
11592 mark_used_regs_combine (x)
11595 RTX_CODE code = GET_CODE (x);
11596 unsigned int regno;
11608 case ADDR_DIFF_VEC:
11611 /* CC0 must die in the insn after it is set, so we don't need to take
11612 special note of it here. */
11618 /* If we are clobbering a MEM, mark any hard registers inside the
11619 address as used. */
11620 if (GET_CODE (XEXP (x, 0)) == MEM)
11621 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
11626 /* A hard reg in a wide mode may really be multiple registers.
11627 If so, mark all of them just like the first. */
11628 if (regno < FIRST_PSEUDO_REGISTER)
11630 unsigned int endregno, r;
11632 /* None of this applies to the stack, frame or arg pointers */
11633 if (regno == STACK_POINTER_REGNUM
11634 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
11635 || regno == HARD_FRAME_POINTER_REGNUM
11637 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
11638 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
11640 || regno == FRAME_POINTER_REGNUM)
11643 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11644 for (r = regno; r < endregno; r++)
11645 SET_HARD_REG_BIT (newpat_used_regs, r);
11651 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
11653 register rtx testreg = SET_DEST (x);
11655 while (GET_CODE (testreg) == SUBREG
11656 || GET_CODE (testreg) == ZERO_EXTRACT
11657 || GET_CODE (testreg) == SIGN_EXTRACT
11658 || GET_CODE (testreg) == STRICT_LOW_PART)
11659 testreg = XEXP (testreg, 0);
11661 if (GET_CODE (testreg) == MEM)
11662 mark_used_regs_combine (XEXP (testreg, 0));
11664 mark_used_regs_combine (SET_SRC (x));
11672 /* Recursively scan the operands of this expression. */
11675 register const char *fmt = GET_RTX_FORMAT (code);
11677 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11680 mark_used_regs_combine (XEXP (x, i));
11681 else if (fmt[i] == 'E')
11685 for (j = 0; j < XVECLEN (x, i); j++)
11686 mark_used_regs_combine (XVECEXP (x, i, j));
11692 /* Remove register number REGNO from the dead registers list of INSN.
11694 Return the note used to record the death, if there was one. */
11697 remove_death (regno, insn)
11698 unsigned int regno;
11701 register rtx note = find_regno_note (insn, REG_DEAD, regno);
11705 REG_N_DEATHS (regno)--;
11706 remove_note (insn, note);
11712 /* For each register (hardware or pseudo) used within expression X, if its
11713 death is in an instruction with cuid between FROM_CUID (inclusive) and
11714 TO_INSN (exclusive), put a REG_DEAD note for that register in the
11715 list headed by PNOTES.
11717 That said, don't move registers killed by maybe_kill_insn.
11719 This is done when X is being merged by combination into TO_INSN. These
11720 notes will then be distributed as needed. */
11723 move_deaths (x, maybe_kill_insn, from_cuid, to_insn, pnotes)
11725 rtx maybe_kill_insn;
11730 register const char *fmt;
11731 register int len, i;
11732 register enum rtx_code code = GET_CODE (x);
11736 unsigned int regno = REGNO (x);
11737 register rtx where_dead = reg_last_death[regno];
11738 register rtx before_dead, after_dead;
11740 /* Don't move the register if it gets killed in between from and to */
11741 if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
11742 && ! reg_referenced_p (x, maybe_kill_insn))
11745 /* WHERE_DEAD could be a USE insn made by combine, so first we
11746 make sure that we have insns with valid INSN_CUID values. */
11747 before_dead = where_dead;
11748 while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
11749 before_dead = PREV_INSN (before_dead);
11751 after_dead = where_dead;
11752 while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
11753 after_dead = NEXT_INSN (after_dead);
11755 if (before_dead && after_dead
11756 && INSN_CUID (before_dead) >= from_cuid
11757 && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
11758 || (where_dead != after_dead
11759 && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
11761 rtx note = remove_death (regno, where_dead);
11763 /* It is possible for the call above to return 0. This can occur
11764 when reg_last_death points to I2 or I1 that we combined with.
11765 In that case make a new note.
11767 We must also check for the case where X is a hard register
11768 and NOTE is a death note for a range of hard registers
11769 including X. In that case, we must put REG_DEAD notes for
11770 the remaining registers in place of NOTE. */
11772 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
11773 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11774 > GET_MODE_SIZE (GET_MODE (x))))
11776 unsigned int deadregno = REGNO (XEXP (note, 0));
11777 unsigned int deadend
11778 = (deadregno + HARD_REGNO_NREGS (deadregno,
11779 GET_MODE (XEXP (note, 0))));
11780 unsigned int ourend
11781 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11784 for (i = deadregno; i < deadend; i++)
11785 if (i < regno || i >= ourend)
11786 REG_NOTES (where_dead)
11787 = gen_rtx_EXPR_LIST (REG_DEAD,
11788 gen_rtx_REG (reg_raw_mode[i], i),
11789 REG_NOTES (where_dead));
11792 /* If we didn't find any note, or if we found a REG_DEAD note that
11793 covers only part of the given reg, and we have a multi-reg hard
11794 register, then to be safe we must check for REG_DEAD notes
11795 for each register other than the first. They could have
11796 their own REG_DEAD notes lying around. */
11797 else if ((note == 0
11799 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11800 < GET_MODE_SIZE (GET_MODE (x)))))
11801 && regno < FIRST_PSEUDO_REGISTER
11802 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
11804 unsigned int ourend
11805 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11806 unsigned int i, offset;
11810 offset = HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0)));
11814 for (i = regno + offset; i < ourend; i++)
11815 move_deaths (gen_rtx_REG (reg_raw_mode[i], i),
11816 maybe_kill_insn, from_cuid, to_insn, &oldnotes);
11819 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
11821 XEXP (note, 1) = *pnotes;
11825 *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
11827 REG_N_DEATHS (regno)++;
11833 else if (GET_CODE (x) == SET)
11835 rtx dest = SET_DEST (x);
11837 move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
11839 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
11840 that accesses one word of a multi-word item, some
11841 piece of everything register in the expression is used by
11842 this insn, so remove any old death. */
11843 /* ??? So why do we test for equality of the sizes? */
11845 if (GET_CODE (dest) == ZERO_EXTRACT
11846 || GET_CODE (dest) == STRICT_LOW_PART
11847 || (GET_CODE (dest) == SUBREG
11848 && (((GET_MODE_SIZE (GET_MODE (dest))
11849 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
11850 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
11851 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
11853 move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
11857 /* If this is some other SUBREG, we know it replaces the entire
11858 value, so use that as the destination. */
11859 if (GET_CODE (dest) == SUBREG)
11860 dest = SUBREG_REG (dest);
11862 /* If this is a MEM, adjust deaths of anything used in the address.
11863 For a REG (the only other possibility), the entire value is
11864 being replaced so the old value is not used in this insn. */
11866 if (GET_CODE (dest) == MEM)
11867 move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
11872 else if (GET_CODE (x) == CLOBBER)
11875 len = GET_RTX_LENGTH (code);
11876 fmt = GET_RTX_FORMAT (code);
11878 for (i = 0; i < len; i++)
11883 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11884 move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
11887 else if (fmt[i] == 'e')
11888 move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
11892 /* Return 1 if X is the target of a bit-field assignment in BODY, the
11893 pattern of an insn. X must be a REG. */
11896 reg_bitfield_target_p (x, body)
11902 if (GET_CODE (body) == SET)
11904 rtx dest = SET_DEST (body);
11906 unsigned int regno, tregno, endregno, endtregno;
11908 if (GET_CODE (dest) == ZERO_EXTRACT)
11909 target = XEXP (dest, 0);
11910 else if (GET_CODE (dest) == STRICT_LOW_PART)
11911 target = SUBREG_REG (XEXP (dest, 0));
11915 if (GET_CODE (target) == SUBREG)
11916 target = SUBREG_REG (target);
11918 if (GET_CODE (target) != REG)
11921 tregno = REGNO (target), regno = REGNO (x);
11922 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
11923 return target == x;
11925 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
11926 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11928 return endregno > tregno && regno < endtregno;
11931 else if (GET_CODE (body) == PARALLEL)
11932 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
11933 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
11939 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
11940 as appropriate. I3 and I2 are the insns resulting from the combination
11941 insns including FROM (I2 may be zero).
11943 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
11944 not need REG_DEAD notes because they are being substituted for. This
11945 saves searching in the most common cases.
11947 Each note in the list is either ignored or placed on some insns, depending
11948 on the type of note. */
11951 distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
11955 rtx elim_i2, elim_i1;
11957 rtx note, next_note;
11960 for (note = notes; note; note = next_note)
11962 rtx place = 0, place2 = 0;
11964 /* If this NOTE references a pseudo register, ensure it references
11965 the latest copy of that register. */
11966 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
11967 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
11968 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
11970 next_note = XEXP (note, 1);
11971 switch (REG_NOTE_KIND (note))
11975 case REG_EXEC_COUNT:
11976 /* Doesn't matter much where we put this, as long as it's somewhere.
11977 It is preferable to keep these notes on branches, which is most
11978 likely to be i3. */
11982 case REG_NON_LOCAL_GOTO:
11983 if (GET_CODE (i3) == JUMP_INSN)
11985 else if (i2 && GET_CODE (i2) == JUMP_INSN)
11991 case REG_EH_REGION:
11992 /* These notes must remain with the call or trapping instruction. */
11993 if (GET_CODE (i3) == CALL_INSN)
11995 else if (i2 && GET_CODE (i2) == CALL_INSN)
11997 else if (flag_non_call_exceptions)
11999 if (may_trap_p (i3))
12001 else if (i2 && may_trap_p (i2))
12003 /* ??? Otherwise assume we've combined things such that we
12004 can now prove that the instructions can't trap. Drop the
12005 note in this case. */
12011 case REG_EH_RETHROW:
12013 /* These notes must remain with the call. It should not be
12014 possible for both I2 and I3 to be a call. */
12015 if (GET_CODE (i3) == CALL_INSN)
12017 else if (i2 && GET_CODE (i2) == CALL_INSN)
12024 /* Any clobbers for i3 may still exist, and so we must process
12025 REG_UNUSED notes from that insn.
12027 Any clobbers from i2 or i1 can only exist if they were added by
12028 recog_for_combine. In that case, recog_for_combine created the
12029 necessary REG_UNUSED notes. Trying to keep any original
12030 REG_UNUSED notes from these insns can cause incorrect output
12031 if it is for the same register as the original i3 dest.
12032 In that case, we will notice that the register is set in i3,
12033 and then add a REG_UNUSED note for the destination of i3, which
12034 is wrong. However, it is possible to have REG_UNUSED notes from
12035 i2 or i1 for register which were both used and clobbered, so
12036 we keep notes from i2 or i1 if they will turn into REG_DEAD
12039 /* If this register is set or clobbered in I3, put the note there
12040 unless there is one already. */
12041 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
12043 if (from_insn != i3)
12046 if (! (GET_CODE (XEXP (note, 0)) == REG
12047 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
12048 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
12051 /* Otherwise, if this register is used by I3, then this register
12052 now dies here, so we must put a REG_DEAD note here unless there
12054 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
12055 && ! (GET_CODE (XEXP (note, 0)) == REG
12056 ? find_regno_note (i3, REG_DEAD,
12057 REGNO (XEXP (note, 0)))
12058 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
12060 PUT_REG_NOTE_KIND (note, REG_DEAD);
12068 /* These notes say something about results of an insn. We can
12069 only support them if they used to be on I3 in which case they
12070 remain on I3. Otherwise they are ignored.
12072 If the note refers to an expression that is not a constant, we
12073 must also ignore the note since we cannot tell whether the
12074 equivalence is still true. It might be possible to do
12075 slightly better than this (we only have a problem if I2DEST
12076 or I1DEST is present in the expression), but it doesn't
12077 seem worth the trouble. */
12079 if (from_insn == i3
12080 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
12085 case REG_NO_CONFLICT:
12086 /* These notes say something about how a register is used. They must
12087 be present on any use of the register in I2 or I3. */
12088 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
12091 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
12101 /* This can show up in several ways -- either directly in the
12102 pattern, or hidden off in the constant pool with (or without?)
12103 a REG_EQUAL note. */
12104 /* ??? Ignore the without-reg_equal-note problem for now. */
12105 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
12106 || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
12107 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12108 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
12112 && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
12113 || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
12114 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12115 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
12126 /* These notes say something about the value of a register prior
12127 to the execution of an insn. It is too much trouble to see
12128 if the note is still correct in all situations. It is better
12129 to simply delete it. */
12133 /* If the insn previously containing this note still exists,
12134 put it back where it was. Otherwise move it to the previous
12135 insn. Adjust the corresponding REG_LIBCALL note. */
12136 if (GET_CODE (from_insn) != NOTE)
12140 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
12141 place = prev_real_insn (from_insn);
12143 XEXP (tem, 0) = place;
12144 /* If we're deleting the last remaining instruction of a
12145 libcall sequence, don't add the notes. */
12146 else if (XEXP (note, 0) == from_insn)
12152 /* This is handled similarly to REG_RETVAL. */
12153 if (GET_CODE (from_insn) != NOTE)
12157 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
12158 place = next_real_insn (from_insn);
12160 XEXP (tem, 0) = place;
12161 /* If we're deleting the last remaining instruction of a
12162 libcall sequence, don't add the notes. */
12163 else if (XEXP (note, 0) == from_insn)
12169 /* If the register is used as an input in I3, it dies there.
12170 Similarly for I2, if it is non-zero and adjacent to I3.
12172 If the register is not used as an input in either I3 or I2
12173 and it is not one of the registers we were supposed to eliminate,
12174 there are two possibilities. We might have a non-adjacent I2
12175 or we might have somehow eliminated an additional register
12176 from a computation. For example, we might have had A & B where
12177 we discover that B will always be zero. In this case we will
12178 eliminate the reference to A.
12180 In both cases, we must search to see if we can find a previous
12181 use of A and put the death note there. */
12184 && GET_CODE (from_insn) == CALL_INSN
12185 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
12187 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
12189 else if (i2 != 0 && next_nonnote_insn (i2) == i3
12190 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12193 if (rtx_equal_p (XEXP (note, 0), elim_i2)
12194 || rtx_equal_p (XEXP (note, 0), elim_i1))
12199 basic_block bb = BASIC_BLOCK (this_basic_block);
12201 for (tem = PREV_INSN (i3); place == 0; tem = PREV_INSN (tem))
12203 if (! INSN_P (tem))
12205 if (tem == bb->head)
12210 /* If the register is being set at TEM, see if that is all
12211 TEM is doing. If so, delete TEM. Otherwise, make this
12212 into a REG_UNUSED note instead. */
12213 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
12215 rtx set = single_set (tem);
12216 rtx inner_dest = 0;
12218 rtx cc0_setter = NULL_RTX;
12222 for (inner_dest = SET_DEST (set);
12223 (GET_CODE (inner_dest) == STRICT_LOW_PART
12224 || GET_CODE (inner_dest) == SUBREG
12225 || GET_CODE (inner_dest) == ZERO_EXTRACT);
12226 inner_dest = XEXP (inner_dest, 0))
12229 /* Verify that it was the set, and not a clobber that
12230 modified the register.
12232 CC0 targets must be careful to maintain setter/user
12233 pairs. If we cannot delete the setter due to side
12234 effects, mark the user with an UNUSED note instead
12237 if (set != 0 && ! side_effects_p (SET_SRC (set))
12238 && rtx_equal_p (XEXP (note, 0), inner_dest)
12240 && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
12241 || ((cc0_setter = prev_cc0_setter (tem)) != NULL
12242 && sets_cc0_p (PATTERN (cc0_setter)) > 0))
12246 /* Move the notes and links of TEM elsewhere.
12247 This might delete other dead insns recursively.
12248 First set the pattern to something that won't use
12251 PATTERN (tem) = pc_rtx;
12253 distribute_notes (REG_NOTES (tem), tem, tem,
12254 NULL_RTX, NULL_RTX, NULL_RTX);
12255 distribute_links (LOG_LINKS (tem));
12257 PUT_CODE (tem, NOTE);
12258 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
12259 NOTE_SOURCE_FILE (tem) = 0;
12262 /* Delete the setter too. */
12265 PATTERN (cc0_setter) = pc_rtx;
12267 distribute_notes (REG_NOTES (cc0_setter),
12268 cc0_setter, cc0_setter,
12269 NULL_RTX, NULL_RTX, NULL_RTX);
12270 distribute_links (LOG_LINKS (cc0_setter));
12272 PUT_CODE (cc0_setter, NOTE);
12273 NOTE_LINE_NUMBER (cc0_setter)
12274 = NOTE_INSN_DELETED;
12275 NOTE_SOURCE_FILE (cc0_setter) = 0;
12279 /* If the register is both set and used here, put the
12280 REG_DEAD note here, but place a REG_UNUSED note
12281 here too unless there already is one. */
12282 else if (reg_referenced_p (XEXP (note, 0),
12287 if (! find_regno_note (tem, REG_UNUSED,
12288 REGNO (XEXP (note, 0))))
12290 = gen_rtx_EXPR_LIST (REG_UNUSED, XEXP (note, 0),
12295 PUT_REG_NOTE_KIND (note, REG_UNUSED);
12297 /* If there isn't already a REG_UNUSED note, put one
12299 if (! find_regno_note (tem, REG_UNUSED,
12300 REGNO (XEXP (note, 0))))
12305 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
12306 || (GET_CODE (tem) == CALL_INSN
12307 && find_reg_fusage (tem, USE, XEXP (note, 0))))
12311 /* If we are doing a 3->2 combination, and we have a
12312 register which formerly died in i3 and was not used
12313 by i2, which now no longer dies in i3 and is used in
12314 i2 but does not die in i2, and place is between i2
12315 and i3, then we may need to move a link from place to
12317 if (i2 && INSN_UID (place) <= max_uid_cuid
12318 && INSN_CUID (place) > INSN_CUID (i2)
12320 && INSN_CUID (from_insn) > INSN_CUID (i2)
12321 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12323 rtx links = LOG_LINKS (place);
12324 LOG_LINKS (place) = 0;
12325 distribute_links (links);
12330 if (tem == bb->head)
12334 /* We haven't found an insn for the death note and it
12335 is still a REG_DEAD note, but we have hit the beginning
12336 of the block. If the existing life info says the reg
12337 was dead, there's nothing left to do. Otherwise, we'll
12338 need to do a global life update after combine. */
12339 if (REG_NOTE_KIND (note) == REG_DEAD && place == 0
12340 && REGNO_REG_SET_P (bb->global_live_at_start,
12341 REGNO (XEXP (note, 0))))
12343 SET_BIT (refresh_blocks, this_basic_block);
12348 /* If the register is set or already dead at PLACE, we needn't do
12349 anything with this note if it is still a REG_DEAD note.
12350 We can here if it is set at all, not if is it totally replace,
12351 which is what `dead_or_set_p' checks, so also check for it being
12354 if (place && REG_NOTE_KIND (note) == REG_DEAD)
12356 unsigned int regno = REGNO (XEXP (note, 0));
12358 /* Similarly, if the instruction on which we want to place
12359 the note is a noop, we'll need do a global live update
12360 after we remove them in delete_noop_moves. */
12361 if (noop_move_p (place))
12363 SET_BIT (refresh_blocks, this_basic_block);
12367 if (dead_or_set_p (place, XEXP (note, 0))
12368 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
12370 /* Unless the register previously died in PLACE, clear
12371 reg_last_death. [I no longer understand why this is
12373 if (reg_last_death[regno] != place)
12374 reg_last_death[regno] = 0;
12378 reg_last_death[regno] = place;
12380 /* If this is a death note for a hard reg that is occupying
12381 multiple registers, ensure that we are still using all
12382 parts of the object. If we find a piece of the object
12383 that is unused, we must arrange for an appropriate REG_DEAD
12384 note to be added for it. However, we can't just emit a USE
12385 and tag the note to it, since the register might actually
12386 be dead; so we recourse, and the recursive call then finds
12387 the previous insn that used this register. */
12389 if (place && regno < FIRST_PSEUDO_REGISTER
12390 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
12392 unsigned int endregno
12393 = regno + HARD_REGNO_NREGS (regno,
12394 GET_MODE (XEXP (note, 0)));
12398 for (i = regno; i < endregno; i++)
12399 if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
12400 && ! find_regno_fusage (place, USE, i))
12401 || dead_or_set_regno_p (place, i))
12406 /* Put only REG_DEAD notes for pieces that are
12407 not already dead or set. */
12409 for (i = regno; i < endregno;
12410 i += HARD_REGNO_NREGS (i, reg_raw_mode[i]))
12412 rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
12413 basic_block bb = BASIC_BLOCK (this_basic_block);
12415 if (! dead_or_set_p (place, piece)
12416 && ! reg_bitfield_target_p (piece,
12420 = gen_rtx_EXPR_LIST (REG_DEAD, piece, NULL_RTX);
12422 distribute_notes (new_note, place, place,
12423 NULL_RTX, NULL_RTX, NULL_RTX);
12425 else if (! refers_to_regno_p (i, i + 1,
12426 PATTERN (place), 0)
12427 && ! find_regno_fusage (place, USE, i))
12428 for (tem = PREV_INSN (place); ;
12429 tem = PREV_INSN (tem))
12431 if (! INSN_P (tem))
12433 if (tem == bb->head)
12435 SET_BIT (refresh_blocks,
12442 if (dead_or_set_p (tem, piece)
12443 || reg_bitfield_target_p (piece,
12447 = gen_rtx_EXPR_LIST (REG_UNUSED, piece,
12462 /* Any other notes should not be present at this point in the
12469 XEXP (note, 1) = REG_NOTES (place);
12470 REG_NOTES (place) = note;
12472 else if ((REG_NOTE_KIND (note) == REG_DEAD
12473 || REG_NOTE_KIND (note) == REG_UNUSED)
12474 && GET_CODE (XEXP (note, 0)) == REG)
12475 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
12479 if ((REG_NOTE_KIND (note) == REG_DEAD
12480 || REG_NOTE_KIND (note) == REG_UNUSED)
12481 && GET_CODE (XEXP (note, 0)) == REG)
12482 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
12484 REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
12485 REG_NOTE_KIND (note),
12487 REG_NOTES (place2));
12492 /* Similarly to above, distribute the LOG_LINKS that used to be present on
12493 I3, I2, and I1 to new locations. This is also called in one case to
12494 add a link pointing at I3 when I3's destination is changed. */
12497 distribute_links (links)
12500 rtx link, next_link;
12502 for (link = links; link; link = next_link)
12508 next_link = XEXP (link, 1);
12510 /* If the insn that this link points to is a NOTE or isn't a single
12511 set, ignore it. In the latter case, it isn't clear what we
12512 can do other than ignore the link, since we can't tell which
12513 register it was for. Such links wouldn't be used by combine
12516 It is not possible for the destination of the target of the link to
12517 have been changed by combine. The only potential of this is if we
12518 replace I3, I2, and I1 by I3 and I2. But in that case the
12519 destination of I2 also remains unchanged. */
12521 if (GET_CODE (XEXP (link, 0)) == NOTE
12522 || (set = single_set (XEXP (link, 0))) == 0)
12525 reg = SET_DEST (set);
12526 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
12527 || GET_CODE (reg) == SIGN_EXTRACT
12528 || GET_CODE (reg) == STRICT_LOW_PART)
12529 reg = XEXP (reg, 0);
12531 /* A LOG_LINK is defined as being placed on the first insn that uses
12532 a register and points to the insn that sets the register. Start
12533 searching at the next insn after the target of the link and stop
12534 when we reach a set of the register or the end of the basic block.
12536 Note that this correctly handles the link that used to point from
12537 I3 to I2. Also note that not much searching is typically done here
12538 since most links don't point very far away. */
12540 for (insn = NEXT_INSN (XEXP (link, 0));
12541 (insn && (this_basic_block == n_basic_blocks - 1
12542 || BLOCK_HEAD (this_basic_block + 1) != insn));
12543 insn = NEXT_INSN (insn))
12544 if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
12546 if (reg_referenced_p (reg, PATTERN (insn)))
12550 else if (GET_CODE (insn) == CALL_INSN
12551 && find_reg_fusage (insn, USE, reg))
12557 /* If we found a place to put the link, place it there unless there
12558 is already a link to the same insn as LINK at that point. */
12564 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
12565 if (XEXP (link2, 0) == XEXP (link, 0))
12570 XEXP (link, 1) = LOG_LINKS (place);
12571 LOG_LINKS (place) = link;
12573 /* Set added_links_insn to the earliest insn we added a
12575 if (added_links_insn == 0
12576 || INSN_CUID (added_links_insn) > INSN_CUID (place))
12577 added_links_insn = place;
12583 /* Compute INSN_CUID for INSN, which is an insn made by combine. */
12589 while (insn != 0 && INSN_UID (insn) > max_uid_cuid
12590 && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
12591 insn = NEXT_INSN (insn);
12593 if (INSN_UID (insn) > max_uid_cuid)
12596 return INSN_CUID (insn);
12600 dump_combine_stats (file)
12605 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
12606 combine_attempts, combine_merges, combine_extras, combine_successes);
12610 dump_combine_total_stats (file)
12615 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
12616 total_attempts, total_merges, total_extras, total_successes);