1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
34 #include "addresses.h"
40 #include "basic-block.h"
44 #include "tree-pass.h"
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
50 #define STACK_PUSH_CODE PRE_INC
54 #ifndef STACK_POP_CODE
55 #ifdef STACK_GROWS_DOWNWARD
56 #define STACK_POP_CODE POST_INC
58 #define STACK_POP_CODE POST_DEC
62 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx);
63 static rtx *find_single_use_1 (rtx, rtx *);
64 static void validate_replace_src_1 (rtx *, void *);
65 static rtx split_insn (rtx);
67 /* Nonzero means allow operands to be volatile.
68 This should be 0 if you are generating rtl, such as if you are calling
69 the functions in optabs.c and expmed.c (most of the time).
70 This should be 1 if all valid insns need to be recognized,
71 such as in regclass.c and final.c and reload.c.
73 init_recog and init_recog_no_volatile are responsible for setting this. */
77 struct recog_data recog_data;
79 /* Contains a vector of operand_alternative structures for every operand.
80 Set up by preprocess_constraints. */
81 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
83 /* On return from `constrain_operands', indicate which alternative
86 int which_alternative;
88 /* Nonzero after end of reload pass.
89 Set to 1 or 0 by toplev.c.
90 Controls the significance of (SUBREG (MEM)). */
94 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
95 int epilogue_completed;
97 /* Initialize data used by the function `recog'.
98 This must be called once in the compilation of a function
99 before any insn recognition may be done in the function. */
102 init_recog_no_volatile (void)
114 /* Check that X is an insn-body for an `asm' with operands
115 and that the operands mentioned in it are legitimate. */
118 check_asm_operands (rtx x)
122 const char **constraints;
125 /* Post-reload, be more strict with things. */
126 if (reload_completed)
128 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
129 extract_insn (make_insn_raw (x));
130 constrain_operands (1);
131 return which_alternative >= 0;
134 noperands = asm_noperands (x);
140 operands = alloca (noperands * sizeof (rtx));
141 constraints = alloca (noperands * sizeof (char *));
143 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
145 for (i = 0; i < noperands; i++)
147 const char *c = constraints[i];
150 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
151 c = constraints[c[0] - '0'];
153 if (! asm_operand_ok (operands[i], c))
160 /* Static data for the next two routines. */
162 typedef struct change_t
170 static change_t *changes;
171 static int changes_allocated;
173 static int num_changes = 0;
175 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
176 at which NEW will be placed. If OBJECT is zero, no validation is done,
177 the change is simply made.
179 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
180 will be called with the address and mode as parameters. If OBJECT is
181 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
184 IN_GROUP is nonzero if this is part of a group of changes that must be
185 performed as a group. In that case, the changes will be stored. The
186 function `apply_change_group' will validate and apply the changes.
188 If IN_GROUP is zero, this is a single change. Try to recognize the insn
189 or validate the memory reference with the change applied. If the result
190 is not valid for the machine, suppress the change and return zero.
191 Otherwise, perform the change and return 1. */
194 validate_change (rtx object, rtx *loc, rtx new, int in_group)
198 if (old == new || rtx_equal_p (old, new))
201 gcc_assert (in_group != 0 || num_changes == 0);
205 /* Save the information describing this change. */
206 if (num_changes >= changes_allocated)
208 if (changes_allocated == 0)
209 /* This value allows for repeated substitutions inside complex
210 indexed addresses, or changes in up to 5 insns. */
211 changes_allocated = MAX_RECOG_OPERANDS * 5;
213 changes_allocated *= 2;
215 changes = xrealloc (changes, sizeof (change_t) * changes_allocated);
218 changes[num_changes].object = object;
219 changes[num_changes].loc = loc;
220 changes[num_changes].old = old;
222 if (object && !MEM_P (object))
224 /* Set INSN_CODE to force rerecognition of insn. Save old code in
226 changes[num_changes].old_code = INSN_CODE (object);
227 INSN_CODE (object) = -1;
232 /* If we are making a group of changes, return 1. Otherwise, validate the
233 change group we made. */
238 return apply_change_group ();
241 /* Keep X canonicalized if some changes have made it non-canonical; only
242 modifies the operands of X, not (for example) its code. Simplifications
243 are not the job of this routine.
245 Return true if anything was changed. */
247 canonicalize_change_group (rtx insn, rtx x)
249 if (COMMUTATIVE_P (x)
250 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
252 /* Oops, the caller has made X no longer canonical.
253 Let's redo the changes in the correct order. */
254 rtx tem = XEXP (x, 0);
255 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
256 validate_change (insn, &XEXP (x, 1), tem, 1);
264 /* This subroutine of apply_change_group verifies whether the changes to INSN
265 were valid; i.e. whether INSN can still be recognized. */
268 insn_invalid_p (rtx insn)
270 rtx pat = PATTERN (insn);
271 int num_clobbers = 0;
272 /* If we are before reload and the pattern is a SET, see if we can add
274 int icode = recog (pat, insn,
275 (GET_CODE (pat) == SET
276 && ! reload_completed && ! reload_in_progress)
277 ? &num_clobbers : 0);
278 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
281 /* If this is an asm and the operand aren't legal, then fail. Likewise if
282 this is not an asm and the insn wasn't recognized. */
283 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
284 || (!is_asm && icode < 0))
287 /* If we have to add CLOBBERs, fail if we have to add ones that reference
288 hard registers since our callers can't know if they are live or not.
289 Otherwise, add them. */
290 if (num_clobbers > 0)
294 if (added_clobbers_hard_reg_p (icode))
297 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
298 XVECEXP (newpat, 0, 0) = pat;
299 add_clobbers (newpat, icode);
300 PATTERN (insn) = pat = newpat;
303 /* After reload, verify that all constraints are satisfied. */
304 if (reload_completed)
308 if (! constrain_operands (1))
312 INSN_CODE (insn) = icode;
316 /* Return number of changes made and not validated yet. */
318 num_changes_pending (void)
323 /* Tentatively apply the changes numbered NUM and up.
324 Return 1 if all changes are valid, zero otherwise. */
327 verify_changes (int num)
330 rtx last_validated = NULL_RTX;
332 /* The changes have been applied and all INSN_CODEs have been reset to force
335 The changes are valid if we aren't given an object, or if we are
336 given a MEM and it still is a valid address, or if this is in insn
337 and it is recognized. In the latter case, if reload has completed,
338 we also require that the operands meet the constraints for
341 for (i = num; i < num_changes; i++)
343 rtx object = changes[i].object;
345 /* If there is no object to test or if it is the same as the one we
346 already tested, ignore it. */
347 if (object == 0 || object == last_validated)
352 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
355 else if (insn_invalid_p (object))
357 rtx pat = PATTERN (object);
359 /* Perhaps we couldn't recognize the insn because there were
360 extra CLOBBERs at the end. If so, try to re-recognize
361 without the last CLOBBER (later iterations will cause each of
362 them to be eliminated, in turn). But don't do this if we
363 have an ASM_OPERAND. */
364 if (GET_CODE (pat) == PARALLEL
365 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
366 && asm_noperands (PATTERN (object)) < 0)
370 if (XVECLEN (pat, 0) == 2)
371 newpat = XVECEXP (pat, 0, 0);
377 = gen_rtx_PARALLEL (VOIDmode,
378 rtvec_alloc (XVECLEN (pat, 0) - 1));
379 for (j = 0; j < XVECLEN (newpat, 0); j++)
380 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
383 /* Add a new change to this group to replace the pattern
384 with this new pattern. Then consider this change
385 as having succeeded. The change we added will
386 cause the entire call to fail if things remain invalid.
388 Note that this can lose if a later change than the one
389 we are processing specified &XVECEXP (PATTERN (object), 0, X)
390 but this shouldn't occur. */
392 validate_change (object, &PATTERN (object), newpat, 1);
395 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
396 /* If this insn is a CLOBBER or USE, it is always valid, but is
402 last_validated = object;
405 return (i == num_changes);
408 /* A group of changes has previously been issued with validate_change and
409 verified with verify_changes. Update the BB_DIRTY flags of the affected
410 blocks, and clear num_changes. */
413 confirm_change_group (void)
418 for (i = 0; i < num_changes; i++)
419 if (changes[i].object
420 && INSN_P (changes[i].object)
421 && (bb = BLOCK_FOR_INSN (changes[i].object)))
422 bb->flags |= BB_DIRTY;
427 /* Apply a group of changes previously issued with `validate_change'.
428 If all changes are valid, call confirm_change_group and return 1,
429 otherwise, call cancel_changes and return 0. */
432 apply_change_group (void)
434 if (verify_changes (0))
436 confirm_change_group ();
447 /* Return the number of changes so far in the current group. */
450 num_validated_changes (void)
455 /* Retract the changes numbered NUM and up. */
458 cancel_changes (int num)
462 /* Back out all the changes. Do this in the opposite order in which
464 for (i = num_changes - 1; i >= num; i--)
466 *changes[i].loc = changes[i].old;
467 if (changes[i].object && !MEM_P (changes[i].object))
468 INSN_CODE (changes[i].object) = changes[i].old_code;
473 /* Replace every occurrence of FROM in X with TO. Mark each change with
474 validate_change passing OBJECT. */
477 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
483 enum machine_mode op0_mode = VOIDmode;
484 int prev_changes = num_changes;
491 fmt = GET_RTX_FORMAT (code);
493 op0_mode = GET_MODE (XEXP (x, 0));
495 /* X matches FROM if it is the same rtx or they are both referring to the
496 same register in the same mode. Avoid calling rtx_equal_p unless the
497 operands look similar. */
500 || (REG_P (x) && REG_P (from)
501 && GET_MODE (x) == GET_MODE (from)
502 && REGNO (x) == REGNO (from))
503 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
504 && rtx_equal_p (x, from)))
506 validate_change (object, loc, to, 1);
510 /* Call ourself recursively to perform the replacements.
511 We must not replace inside already replaced expression, otherwise we
512 get infinite recursion for replacements like (reg X)->(subreg (reg X))
513 done by regmove, so we must special case shared ASM_OPERANDS. */
515 if (GET_CODE (x) == PARALLEL)
517 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
519 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
520 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
522 /* Verify that operands are really shared. */
523 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
524 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
526 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
530 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object);
534 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
537 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
538 else if (fmt[i] == 'E')
539 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
540 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
543 /* If we didn't substitute, there is nothing more to do. */
544 if (num_changes == prev_changes)
547 /* Allow substituted expression to have different mode. This is used by
548 regmove to change mode of pseudo register. */
549 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
550 op0_mode = GET_MODE (XEXP (x, 0));
552 /* Do changes needed to keep rtx consistent. Don't do any other
553 simplifications, as it is not our job. */
555 if (SWAPPABLE_OPERANDS_P (x)
556 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
558 validate_change (object, loc,
559 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
560 : swap_condition (code),
561 GET_MODE (x), XEXP (x, 1),
570 /* If we have a PLUS whose second operand is now a CONST_INT, use
571 simplify_gen_binary to try to simplify it.
572 ??? We may want later to remove this, once simplification is
573 separated from this function. */
574 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
575 validate_change (object, loc,
577 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
580 if (GET_CODE (XEXP (x, 1)) == CONST_INT
581 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
582 validate_change (object, loc,
584 (PLUS, GET_MODE (x), XEXP (x, 0),
585 simplify_gen_unary (NEG,
586 GET_MODE (x), XEXP (x, 1),
591 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
593 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
595 /* If any of the above failed, substitute in something that
596 we know won't be recognized. */
598 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
599 validate_change (object, loc, new, 1);
603 /* All subregs possible to simplify should be simplified. */
604 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
607 /* Subregs of VOIDmode operands are incorrect. */
608 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
609 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
611 validate_change (object, loc, new, 1);
615 /* If we are replacing a register with memory, try to change the memory
616 to be the mode required for memory in extract operations (this isn't
617 likely to be an insertion operation; if it was, nothing bad will
618 happen, we might just fail in some cases). */
620 if (MEM_P (XEXP (x, 0))
621 && GET_CODE (XEXP (x, 1)) == CONST_INT
622 && GET_CODE (XEXP (x, 2)) == CONST_INT
623 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
624 && !MEM_VOLATILE_P (XEXP (x, 0)))
626 enum machine_mode wanted_mode = VOIDmode;
627 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
628 int pos = INTVAL (XEXP (x, 2));
630 if (GET_CODE (x) == ZERO_EXTRACT)
632 enum machine_mode new_mode
633 = mode_for_extraction (EP_extzv, 1);
634 if (new_mode != MAX_MACHINE_MODE)
635 wanted_mode = new_mode;
637 else if (GET_CODE (x) == SIGN_EXTRACT)
639 enum machine_mode new_mode
640 = mode_for_extraction (EP_extv, 1);
641 if (new_mode != MAX_MACHINE_MODE)
642 wanted_mode = new_mode;
645 /* If we have a narrower mode, we can do something. */
646 if (wanted_mode != VOIDmode
647 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
649 int offset = pos / BITS_PER_UNIT;
652 /* If the bytes and bits are counted differently, we
653 must adjust the offset. */
654 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
656 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
659 pos %= GET_MODE_BITSIZE (wanted_mode);
661 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
663 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
664 validate_change (object, &XEXP (x, 0), newmem, 1);
675 /* Try replacing every occurrence of FROM in INSN with TO. After all
676 changes have been made, validate by seeing if INSN is still valid. */
679 validate_replace_rtx (rtx from, rtx to, rtx insn)
681 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
682 return apply_change_group ();
685 /* Try replacing every occurrence of FROM in INSN with TO. */
688 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
690 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
693 /* Function called by note_uses to replace used subexpressions. */
694 struct validate_replace_src_data
696 rtx from; /* Old RTX */
697 rtx to; /* New RTX */
698 rtx insn; /* Insn in which substitution is occurring. */
702 validate_replace_src_1 (rtx *x, void *data)
704 struct validate_replace_src_data *d
705 = (struct validate_replace_src_data *) data;
707 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
710 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
714 validate_replace_src_group (rtx from, rtx to, rtx insn)
716 struct validate_replace_src_data d;
721 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
724 /* Try simplify INSN.
725 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
726 pattern and return true if something was simplified. */
729 validate_simplify_insn (rtx insn)
735 pat = PATTERN (insn);
737 if (GET_CODE (pat) == SET)
739 newpat = simplify_rtx (SET_SRC (pat));
740 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
741 validate_change (insn, &SET_SRC (pat), newpat, 1);
742 newpat = simplify_rtx (SET_DEST (pat));
743 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
744 validate_change (insn, &SET_DEST (pat), newpat, 1);
746 else if (GET_CODE (pat) == PARALLEL)
747 for (i = 0; i < XVECLEN (pat, 0); i++)
749 rtx s = XVECEXP (pat, 0, i);
751 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
753 newpat = simplify_rtx (SET_SRC (s));
754 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
755 validate_change (insn, &SET_SRC (s), newpat, 1);
756 newpat = simplify_rtx (SET_DEST (s));
757 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
758 validate_change (insn, &SET_DEST (s), newpat, 1);
761 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
765 /* Return 1 if the insn using CC0 set by INSN does not contain
766 any ordered tests applied to the condition codes.
767 EQ and NE tests do not count. */
770 next_insn_tests_no_inequality (rtx insn)
772 rtx next = next_cc0_user (insn);
774 /* If there is no next insn, we have to take the conservative choice. */
778 return (INSN_P (next)
779 && ! inequality_comparisons_p (PATTERN (next)));
783 /* This is used by find_single_use to locate an rtx that contains exactly one
784 use of DEST, which is typically either a REG or CC0. It returns a
785 pointer to the innermost rtx expression containing DEST. Appearances of
786 DEST that are being used to totally replace it are not counted. */
789 find_single_use_1 (rtx dest, rtx *loc)
792 enum rtx_code code = GET_CODE (x);
810 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
811 of a REG that occupies all of the REG, the insn uses DEST if
812 it is mentioned in the destination or the source. Otherwise, we
813 need just check the source. */
814 if (GET_CODE (SET_DEST (x)) != CC0
815 && GET_CODE (SET_DEST (x)) != PC
816 && !REG_P (SET_DEST (x))
817 && ! (GET_CODE (SET_DEST (x)) == SUBREG
818 && REG_P (SUBREG_REG (SET_DEST (x)))
819 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
820 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
821 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
822 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
825 return find_single_use_1 (dest, &SET_SRC (x));
829 return find_single_use_1 (dest, &XEXP (x, 0));
835 /* If it wasn't one of the common cases above, check each expression and
836 vector of this code. Look for a unique usage of DEST. */
838 fmt = GET_RTX_FORMAT (code);
839 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
843 if (dest == XEXP (x, i)
844 || (REG_P (dest) && REG_P (XEXP (x, i))
845 && REGNO (dest) == REGNO (XEXP (x, i))))
848 this_result = find_single_use_1 (dest, &XEXP (x, i));
851 result = this_result;
852 else if (this_result)
853 /* Duplicate usage. */
856 else if (fmt[i] == 'E')
860 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
862 if (XVECEXP (x, i, j) == dest
864 && REG_P (XVECEXP (x, i, j))
865 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
868 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
871 result = this_result;
872 else if (this_result)
881 /* See if DEST, produced in INSN, is used only a single time in the
882 sequel. If so, return a pointer to the innermost rtx expression in which
885 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
887 This routine will return usually zero either before flow is called (because
888 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
889 note can't be trusted).
891 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
892 care about REG_DEAD notes or LOG_LINKS.
894 Otherwise, we find the single use by finding an insn that has a
895 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
896 only referenced once in that insn, we know that it must be the first
897 and last insn referencing DEST. */
900 find_single_use (rtx dest, rtx insn, rtx *ploc)
909 next = NEXT_INSN (insn);
911 || (!NONJUMP_INSN_P (next) && !JUMP_P (next)))
914 result = find_single_use_1 (dest, &PATTERN (next));
921 if (reload_completed || reload_in_progress || !REG_P (dest))
924 for (next = next_nonnote_insn (insn);
925 next != 0 && !LABEL_P (next);
926 next = next_nonnote_insn (next))
927 if (INSN_P (next) && dead_or_set_p (next, dest))
929 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
930 if (XEXP (link, 0) == insn)
935 result = find_single_use_1 (dest, &PATTERN (next));
945 /* Return 1 if OP is a valid general operand for machine mode MODE.
946 This is either a register reference, a memory reference,
947 or a constant. In the case of a memory reference, the address
948 is checked for general validity for the target machine.
950 Register and memory references must have mode MODE in order to be valid,
951 but some constants have no machine mode and are valid for any mode.
953 If MODE is VOIDmode, OP is checked for validity for whatever mode
956 The main use of this function is as a predicate in match_operand
957 expressions in the machine description.
959 For an explanation of this function's behavior for registers of
960 class NO_REGS, see the comment for `register_operand'. */
963 general_operand (rtx op, enum machine_mode mode)
965 enum rtx_code code = GET_CODE (op);
967 if (mode == VOIDmode)
968 mode = GET_MODE (op);
970 /* Don't accept CONST_INT or anything similar
971 if the caller wants something floating. */
972 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
973 && GET_MODE_CLASS (mode) != MODE_INT
974 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
977 if (GET_CODE (op) == CONST_INT
979 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
983 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
985 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
986 && LEGITIMATE_CONSTANT_P (op));
988 /* Except for certain constants with VOIDmode, already checked for,
989 OP's mode must match MODE if MODE specifies a mode. */
991 if (GET_MODE (op) != mode)
996 rtx sub = SUBREG_REG (op);
998 #ifdef INSN_SCHEDULING
999 /* On machines that have insn scheduling, we want all memory
1000 reference to be explicit, so outlaw paradoxical SUBREGs.
1001 However, we must allow them after reload so that they can
1002 get cleaned up by cleanup_subreg_operands. */
1003 if (!reload_completed && MEM_P (sub)
1004 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
1007 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1008 may result in incorrect reference. We should simplify all valid
1009 subregs of MEM anyway. But allow this after reload because we
1010 might be called from cleanup_subreg_operands.
1012 ??? This is a kludge. */
1013 if (!reload_completed && SUBREG_BYTE (op) != 0
1017 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1018 create such rtl, and we must reject it. */
1019 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1020 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1024 code = GET_CODE (op);
1028 /* A register whose class is NO_REGS is not a general operand. */
1029 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1030 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
1034 rtx y = XEXP (op, 0);
1036 if (! volatile_ok && MEM_VOLATILE_P (op))
1039 /* Use the mem's mode, since it will be reloaded thus. */
1040 if (memory_address_p (GET_MODE (op), y))
1047 /* Return 1 if OP is a valid memory address for a memory reference
1050 The main use of this function is as a predicate in match_operand
1051 expressions in the machine description. */
1054 address_operand (rtx op, enum machine_mode mode)
1056 return memory_address_p (mode, op);
1059 /* Return 1 if OP is a register reference of mode MODE.
1060 If MODE is VOIDmode, accept a register in any mode.
1062 The main use of this function is as a predicate in match_operand
1063 expressions in the machine description.
1065 As a special exception, registers whose class is NO_REGS are
1066 not accepted by `register_operand'. The reason for this change
1067 is to allow the representation of special architecture artifacts
1068 (such as a condition code register) without extending the rtl
1069 definitions. Since registers of class NO_REGS cannot be used
1070 as registers in any case where register classes are examined,
1071 it is most consistent to keep this function from accepting them. */
1074 register_operand (rtx op, enum machine_mode mode)
1076 if (GET_MODE (op) != mode && mode != VOIDmode)
1079 if (GET_CODE (op) == SUBREG)
1081 rtx sub = SUBREG_REG (op);
1083 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1084 because it is guaranteed to be reloaded into one.
1085 Just make sure the MEM is valid in itself.
1086 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1087 but currently it does result from (SUBREG (REG)...) where the
1088 reg went on the stack.) */
1089 if (! reload_completed && MEM_P (sub))
1090 return general_operand (op, mode);
1092 #ifdef CANNOT_CHANGE_MODE_CLASS
1094 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1095 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1096 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1097 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1101 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1102 create such rtl, and we must reject it. */
1103 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1104 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1110 /* We don't consider registers whose class is NO_REGS
1111 to be a register operand. */
1113 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1114 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1117 /* Return 1 for a register in Pmode; ignore the tested mode. */
1120 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1122 return register_operand (op, Pmode);
1125 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1126 or a hard register. */
1129 scratch_operand (rtx op, enum machine_mode mode)
1131 if (GET_MODE (op) != mode && mode != VOIDmode)
1134 return (GET_CODE (op) == SCRATCH
1136 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1139 /* Return 1 if OP is a valid immediate operand for mode MODE.
1141 The main use of this function is as a predicate in match_operand
1142 expressions in the machine description. */
1145 immediate_operand (rtx op, enum machine_mode mode)
1147 /* Don't accept CONST_INT or anything similar
1148 if the caller wants something floating. */
1149 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1150 && GET_MODE_CLASS (mode) != MODE_INT
1151 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1154 if (GET_CODE (op) == CONST_INT
1156 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1159 return (CONSTANT_P (op)
1160 && (GET_MODE (op) == mode || mode == VOIDmode
1161 || GET_MODE (op) == VOIDmode)
1162 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1163 && LEGITIMATE_CONSTANT_P (op));
1166 /* Returns 1 if OP is an operand that is a CONST_INT. */
1169 const_int_operand (rtx op, enum machine_mode mode)
1171 if (GET_CODE (op) != CONST_INT)
1174 if (mode != VOIDmode
1175 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1181 /* Returns 1 if OP is an operand that is a constant integer or constant
1182 floating-point number. */
1185 const_double_operand (rtx op, enum machine_mode mode)
1187 /* Don't accept CONST_INT or anything similar
1188 if the caller wants something floating. */
1189 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1190 && GET_MODE_CLASS (mode) != MODE_INT
1191 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1194 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1195 && (mode == VOIDmode || GET_MODE (op) == mode
1196 || GET_MODE (op) == VOIDmode));
1199 /* Return 1 if OP is a general operand that is not an immediate operand. */
1202 nonimmediate_operand (rtx op, enum machine_mode mode)
1204 return (general_operand (op, mode) && ! CONSTANT_P (op));
1207 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1210 nonmemory_operand (rtx op, enum machine_mode mode)
1212 if (CONSTANT_P (op))
1214 /* Don't accept CONST_INT or anything similar
1215 if the caller wants something floating. */
1216 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1217 && GET_MODE_CLASS (mode) != MODE_INT
1218 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1221 if (GET_CODE (op) == CONST_INT
1223 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1226 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1227 || mode == VOIDmode)
1228 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1229 && LEGITIMATE_CONSTANT_P (op));
1232 if (GET_MODE (op) != mode && mode != VOIDmode)
1235 if (GET_CODE (op) == SUBREG)
1237 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1238 because it is guaranteed to be reloaded into one.
1239 Just make sure the MEM is valid in itself.
1240 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1241 but currently it does result from (SUBREG (REG)...) where the
1242 reg went on the stack.) */
1243 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1244 return general_operand (op, mode);
1245 op = SUBREG_REG (op);
1248 /* We don't consider registers whose class is NO_REGS
1249 to be a register operand. */
1251 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1252 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1255 /* Return 1 if OP is a valid operand that stands for pushing a
1256 value of mode MODE onto the stack.
1258 The main use of this function is as a predicate in match_operand
1259 expressions in the machine description. */
1262 push_operand (rtx op, enum machine_mode mode)
1264 unsigned int rounded_size = GET_MODE_SIZE (mode);
1266 #ifdef PUSH_ROUNDING
1267 rounded_size = PUSH_ROUNDING (rounded_size);
1273 if (mode != VOIDmode && GET_MODE (op) != mode)
1278 if (rounded_size == GET_MODE_SIZE (mode))
1280 if (GET_CODE (op) != STACK_PUSH_CODE)
1285 if (GET_CODE (op) != PRE_MODIFY
1286 || GET_CODE (XEXP (op, 1)) != PLUS
1287 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1288 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1289 #ifdef STACK_GROWS_DOWNWARD
1290 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1292 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1298 return XEXP (op, 0) == stack_pointer_rtx;
1301 /* Return 1 if OP is a valid operand that stands for popping a
1302 value of mode MODE off the stack.
1304 The main use of this function is as a predicate in match_operand
1305 expressions in the machine description. */
1308 pop_operand (rtx op, enum machine_mode mode)
1313 if (mode != VOIDmode && GET_MODE (op) != mode)
1318 if (GET_CODE (op) != STACK_POP_CODE)
1321 return XEXP (op, 0) == stack_pointer_rtx;
1324 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1327 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1329 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1336 /* Return 1 if OP is a valid memory reference with mode MODE,
1337 including a valid address.
1339 The main use of this function is as a predicate in match_operand
1340 expressions in the machine description. */
1343 memory_operand (rtx op, enum machine_mode mode)
1347 if (! reload_completed)
1348 /* Note that no SUBREG is a memory operand before end of reload pass,
1349 because (SUBREG (MEM...)) forces reloading into a register. */
1350 return MEM_P (op) && general_operand (op, mode);
1352 if (mode != VOIDmode && GET_MODE (op) != mode)
1356 if (GET_CODE (inner) == SUBREG)
1357 inner = SUBREG_REG (inner);
1359 return (MEM_P (inner) && general_operand (op, mode));
1362 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1363 that is, a memory reference whose address is a general_operand. */
1366 indirect_operand (rtx op, enum machine_mode mode)
1368 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1369 if (! reload_completed
1370 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1372 int offset = SUBREG_BYTE (op);
1373 rtx inner = SUBREG_REG (op);
1375 if (mode != VOIDmode && GET_MODE (op) != mode)
1378 /* The only way that we can have a general_operand as the resulting
1379 address is if OFFSET is zero and the address already is an operand
1380 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1383 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1384 || (GET_CODE (XEXP (inner, 0)) == PLUS
1385 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1386 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1387 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1391 && memory_operand (op, mode)
1392 && general_operand (XEXP (op, 0), Pmode));
1395 /* Return 1 if this is a comparison operator. This allows the use of
1396 MATCH_OPERATOR to recognize all the branch insns. */
1399 comparison_operator (rtx op, enum machine_mode mode)
1401 return ((mode == VOIDmode || GET_MODE (op) == mode)
1402 && COMPARISON_P (op));
1405 /* If BODY is an insn body that uses ASM_OPERANDS,
1406 return the number of operands (both input and output) in the insn.
1407 Otherwise return -1. */
1410 asm_noperands (rtx body)
1412 switch (GET_CODE (body))
1415 /* No output operands: return number of input operands. */
1416 return ASM_OPERANDS_INPUT_LENGTH (body);
1418 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1419 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1420 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1424 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1425 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1427 /* Multiple output operands, or 1 output plus some clobbers:
1428 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1432 /* Count backwards through CLOBBERs to determine number of SETs. */
1433 for (i = XVECLEN (body, 0); i > 0; i--)
1435 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1437 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1441 /* N_SETS is now number of output operands. */
1444 /* Verify that all the SETs we have
1445 came from a single original asm_operands insn
1446 (so that invalid combinations are blocked). */
1447 for (i = 0; i < n_sets; i++)
1449 rtx elt = XVECEXP (body, 0, i);
1450 if (GET_CODE (elt) != SET)
1452 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1454 /* If these ASM_OPERANDS rtx's came from different original insns
1455 then they aren't allowed together. */
1456 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1457 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1460 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1463 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1465 /* 0 outputs, but some clobbers:
1466 body is [(asm_operands ...) (clobber (reg ...))...]. */
1469 /* Make sure all the other parallel things really are clobbers. */
1470 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1471 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1474 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1483 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1484 copy its operands (both input and output) into the vector OPERANDS,
1485 the locations of the operands within the insn into the vector OPERAND_LOCS,
1486 and the constraints for the operands into CONSTRAINTS.
1487 Write the modes of the operands into MODES.
1488 Return the assembler-template.
1490 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1491 we don't store that info. */
1494 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1495 const char **constraints, enum machine_mode *modes,
1502 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1504 asmop = SET_SRC (body);
1505 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1507 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1509 for (i = 1; i < noperands; i++)
1512 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1514 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1516 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1518 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1521 /* The output is in the SET.
1522 Its constraint is in the ASM_OPERANDS itself. */
1524 operands[0] = SET_DEST (body);
1526 operand_locs[0] = &SET_DEST (body);
1528 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1530 modes[0] = GET_MODE (SET_DEST (body));
1532 else if (GET_CODE (body) == ASM_OPERANDS)
1535 /* No output operands: BODY is (asm_operands ....). */
1537 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1539 /* The input operands are found in the 1st element vector. */
1540 /* Constraints for inputs are in the 2nd element vector. */
1541 for (i = 0; i < noperands; i++)
1544 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1546 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1548 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1550 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1553 else if (GET_CODE (body) == PARALLEL
1554 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1555 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1557 asmop = SET_SRC (XVECEXP (body, 0, 0));
1558 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1559 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1560 int nout = 0; /* Does not include CLOBBERs. */
1562 /* At least one output, plus some CLOBBERs. */
1564 /* The outputs are in the SETs.
1565 Their constraints are in the ASM_OPERANDS itself. */
1566 for (i = 0; i < nparallel; i++)
1568 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1569 break; /* Past last SET */
1572 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1574 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1576 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1578 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1582 for (i = 0; i < nin; i++)
1585 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1587 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1589 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1591 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1594 else if (GET_CODE (body) == PARALLEL
1595 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1597 /* No outputs, but some CLOBBERs. */
1599 asmop = XVECEXP (body, 0, 0);
1600 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1602 for (i = 0; i < nin; i++)
1605 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1607 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1609 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1611 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1618 #ifdef USE_MAPPED_LOCATION
1619 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1621 loc->file = ASM_OPERANDS_SOURCE_FILE (asmop);
1622 loc->line = ASM_OPERANDS_SOURCE_LINE (asmop);
1626 return ASM_OPERANDS_TEMPLATE (asmop);
1629 /* Check if an asm_operand matches its constraints.
1630 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1633 asm_operand_ok (rtx op, const char *constraint)
1637 /* Use constrain_operands after reload. */
1638 gcc_assert (!reload_completed);
1642 char c = *constraint;
1659 case '0': case '1': case '2': case '3': case '4':
1660 case '5': case '6': case '7': case '8': case '9':
1661 /* For best results, our caller should have given us the
1662 proper matching constraint, but we can't actually fail
1663 the check if they didn't. Indicate that results are
1667 while (ISDIGIT (*constraint));
1673 if (address_operand (op, VOIDmode))
1678 case 'V': /* non-offsettable */
1679 if (memory_operand (op, VOIDmode))
1683 case 'o': /* offsettable */
1684 if (offsettable_nonstrict_memref_p (op))
1689 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1690 excepting those that expand_call created. Further, on some
1691 machines which do not have generalized auto inc/dec, an inc/dec
1692 is not a memory_operand.
1694 Match any memory and hope things are resolved after reload. */
1698 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1699 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1706 || GET_CODE (XEXP (op, 0)) == PRE_INC
1707 || GET_CODE (XEXP (op, 0)) == POST_INC))
1713 if (GET_CODE (op) == CONST_DOUBLE
1714 || (GET_CODE (op) == CONST_VECTOR
1715 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1720 if (GET_CODE (op) == CONST_DOUBLE
1721 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1725 if (GET_CODE (op) == CONST_DOUBLE
1726 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1731 if (GET_CODE (op) == CONST_INT
1732 || (GET_CODE (op) == CONST_DOUBLE
1733 && GET_MODE (op) == VOIDmode))
1738 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1743 if (GET_CODE (op) == CONST_INT
1744 || (GET_CODE (op) == CONST_DOUBLE
1745 && GET_MODE (op) == VOIDmode))
1750 if (GET_CODE (op) == CONST_INT
1751 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1755 if (GET_CODE (op) == CONST_INT
1756 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1760 if (GET_CODE (op) == CONST_INT
1761 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1765 if (GET_CODE (op) == CONST_INT
1766 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1770 if (GET_CODE (op) == CONST_INT
1771 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1775 if (GET_CODE (op) == CONST_INT
1776 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1780 if (GET_CODE (op) == CONST_INT
1781 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1785 if (GET_CODE (op) == CONST_INT
1786 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1795 if (general_operand (op, VOIDmode))
1800 /* For all other letters, we first check for a register class,
1801 otherwise it is an EXTRA_CONSTRAINT. */
1802 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1805 if (GET_MODE (op) == BLKmode)
1807 if (register_operand (op, VOIDmode))
1810 #ifdef EXTRA_CONSTRAINT_STR
1811 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1813 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
1814 /* Every memory operand can be reloaded to fit. */
1815 && memory_operand (op, VOIDmode))
1817 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint)
1818 /* Every address operand can be reloaded to fit. */
1819 && address_operand (op, VOIDmode))
1824 len = CONSTRAINT_LEN (c, constraint);
1827 while (--len && *constraint);
1835 /* Given an rtx *P, if it is a sum containing an integer constant term,
1836 return the location (type rtx *) of the pointer to that constant term.
1837 Otherwise, return a null pointer. */
1840 find_constant_term_loc (rtx *p)
1843 enum rtx_code code = GET_CODE (*p);
1845 /* If *P IS such a constant term, P is its location. */
1847 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1851 /* Otherwise, if not a sum, it has no constant term. */
1853 if (GET_CODE (*p) != PLUS)
1856 /* If one of the summands is constant, return its location. */
1858 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1859 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1862 /* Otherwise, check each summand for containing a constant term. */
1864 if (XEXP (*p, 0) != 0)
1866 tem = find_constant_term_loc (&XEXP (*p, 0));
1871 if (XEXP (*p, 1) != 0)
1873 tem = find_constant_term_loc (&XEXP (*p, 1));
1881 /* Return 1 if OP is a memory reference
1882 whose address contains no side effects
1883 and remains valid after the addition
1884 of a positive integer less than the
1885 size of the object being referenced.
1887 We assume that the original address is valid and do not check it.
1889 This uses strict_memory_address_p as a subroutine, so
1890 don't use it before reload. */
1893 offsettable_memref_p (rtx op)
1895 return ((MEM_P (op))
1896 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1899 /* Similar, but don't require a strictly valid mem ref:
1900 consider pseudo-regs valid as index or base regs. */
1903 offsettable_nonstrict_memref_p (rtx op)
1905 return ((MEM_P (op))
1906 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1909 /* Return 1 if Y is a memory address which contains no side effects
1910 and would remain valid after the addition of a positive integer
1911 less than the size of that mode.
1913 We assume that the original address is valid and do not check it.
1914 We do check that it is valid for narrower modes.
1916 If STRICTP is nonzero, we require a strictly valid address,
1917 for the sake of use in reload.c. */
1920 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1922 enum rtx_code ycode = GET_CODE (y);
1926 int (*addressp) (enum machine_mode, rtx) =
1927 (strictp ? strict_memory_address_p : memory_address_p);
1928 unsigned int mode_sz = GET_MODE_SIZE (mode);
1930 if (CONSTANT_ADDRESS_P (y))
1933 /* Adjusting an offsettable address involves changing to a narrower mode.
1934 Make sure that's OK. */
1936 if (mode_dependent_address_p (y))
1939 /* ??? How much offset does an offsettable BLKmode reference need?
1940 Clearly that depends on the situation in which it's being used.
1941 However, the current situation in which we test 0xffffffff is
1942 less than ideal. Caveat user. */
1944 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1946 /* If the expression contains a constant term,
1947 see if it remains valid when max possible offset is added. */
1949 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1954 *y2 = plus_constant (*y2, mode_sz - 1);
1955 /* Use QImode because an odd displacement may be automatically invalid
1956 for any wider mode. But it should be valid for a single byte. */
1957 good = (*addressp) (QImode, y);
1959 /* In any case, restore old contents of memory. */
1964 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1967 /* The offset added here is chosen as the maximum offset that
1968 any instruction could need to add when operating on something
1969 of the specified mode. We assume that if Y and Y+c are
1970 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1971 go inside a LO_SUM here, so we do so as well. */
1972 if (GET_CODE (y) == LO_SUM
1974 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1975 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1976 plus_constant (XEXP (y, 1), mode_sz - 1));
1978 z = plus_constant (y, mode_sz - 1);
1980 /* Use QImode because an odd displacement may be automatically invalid
1981 for any wider mode. But it should be valid for a single byte. */
1982 return (*addressp) (QImode, z);
1985 /* Return 1 if ADDR is an address-expression whose effect depends
1986 on the mode of the memory reference it is used in.
1988 Autoincrement addressing is a typical example of mode-dependence
1989 because the amount of the increment depends on the mode. */
1992 mode_dependent_address_p (rtx addr)
1994 /* Auto-increment addressing with anything other than post_modify
1995 or pre_modify always introduces a mode dependency. Catch such
1996 cases now instead of deferring to the target. */
1997 if (GET_CODE (addr) == PRE_INC
1998 || GET_CODE (addr) == POST_INC
1999 || GET_CODE (addr) == PRE_DEC
2000 || GET_CODE (addr) == POST_DEC)
2003 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
2005 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2006 win: ATTRIBUTE_UNUSED_LABEL
2010 /* Like extract_insn, but save insn extracted and don't extract again, when
2011 called again for the same insn expecting that recog_data still contain the
2012 valid information. This is used primary by gen_attr infrastructure that
2013 often does extract insn again and again. */
2015 extract_insn_cached (rtx insn)
2017 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2019 extract_insn (insn);
2020 recog_data.insn = insn;
2023 /* Do cached extract_insn, constrain_operands and complain about failures.
2024 Used by insn_attrtab. */
2026 extract_constrain_insn_cached (rtx insn)
2028 extract_insn_cached (insn);
2029 if (which_alternative == -1
2030 && !constrain_operands (reload_completed))
2031 fatal_insn_not_found (insn);
2034 /* Do cached constrain_operands and complain about failures. */
2036 constrain_operands_cached (int strict)
2038 if (which_alternative == -1)
2039 return constrain_operands (strict);
2044 /* Analyze INSN and fill in recog_data. */
2047 extract_insn (rtx insn)
2052 rtx body = PATTERN (insn);
2054 recog_data.insn = NULL;
2055 recog_data.n_operands = 0;
2056 recog_data.n_alternatives = 0;
2057 recog_data.n_dups = 0;
2058 which_alternative = -1;
2060 switch (GET_CODE (body))
2070 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2075 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2076 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2077 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2083 recog_data.n_operands = noperands = asm_noperands (body);
2086 /* This insn is an `asm' with operands. */
2088 /* expand_asm_operands makes sure there aren't too many operands. */
2089 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2091 /* Now get the operand values and constraints out of the insn. */
2092 decode_asm_operands (body, recog_data.operand,
2093 recog_data.operand_loc,
2094 recog_data.constraints,
2095 recog_data.operand_mode, NULL);
2098 const char *p = recog_data.constraints[0];
2099 recog_data.n_alternatives = 1;
2101 recog_data.n_alternatives += (*p++ == ',');
2105 fatal_insn_not_found (insn);
2109 /* Ordinary insn: recognize it, get the operands via insn_extract
2110 and get the constraints. */
2112 icode = recog_memoized (insn);
2114 fatal_insn_not_found (insn);
2116 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2117 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2118 recog_data.n_dups = insn_data[icode].n_dups;
2120 insn_extract (insn);
2122 for (i = 0; i < noperands; i++)
2124 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2125 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2126 /* VOIDmode match_operands gets mode from their real operand. */
2127 if (recog_data.operand_mode[i] == VOIDmode)
2128 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2131 for (i = 0; i < noperands; i++)
2132 recog_data.operand_type[i]
2133 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2134 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2137 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2140 /* After calling extract_insn, you can use this function to extract some
2141 information from the constraint strings into a more usable form.
2142 The collected data is stored in recog_op_alt. */
2144 preprocess_constraints (void)
2148 for (i = 0; i < recog_data.n_operands; i++)
2149 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2150 * sizeof (struct operand_alternative)));
2152 for (i = 0; i < recog_data.n_operands; i++)
2155 struct operand_alternative *op_alt;
2156 const char *p = recog_data.constraints[i];
2158 op_alt = recog_op_alt[i];
2160 for (j = 0; j < recog_data.n_alternatives; j++)
2162 op_alt[j].cl = NO_REGS;
2163 op_alt[j].constraint = p;
2164 op_alt[j].matches = -1;
2165 op_alt[j].matched = -1;
2167 if (*p == '\0' || *p == ',')
2169 op_alt[j].anything_ok = 1;
2179 while (c != ',' && c != '\0');
2180 if (c == ',' || c == '\0')
2188 case '=': case '+': case '*': case '%':
2189 case 'E': case 'F': case 'G': case 'H':
2190 case 's': case 'i': case 'n':
2191 case 'I': case 'J': case 'K': case 'L':
2192 case 'M': case 'N': case 'O': case 'P':
2193 /* These don't say anything we care about. */
2197 op_alt[j].reject += 6;
2200 op_alt[j].reject += 600;
2203 op_alt[j].earlyclobber = 1;
2206 case '0': case '1': case '2': case '3': case '4':
2207 case '5': case '6': case '7': case '8': case '9':
2210 op_alt[j].matches = strtoul (p, &end, 10);
2211 recog_op_alt[op_alt[j].matches][j].matched = i;
2217 op_alt[j].memory_ok = 1;
2220 op_alt[j].decmem_ok = 1;
2223 op_alt[j].incmem_ok = 1;
2226 op_alt[j].nonoffmem_ok = 1;
2229 op_alt[j].offmem_ok = 1;
2232 op_alt[j].anything_ok = 1;
2236 op_alt[j].is_address = 1;
2237 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2238 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
2244 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2248 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2250 op_alt[j].memory_ok = 1;
2253 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2255 op_alt[j].is_address = 1;
2257 = (reg_class_subunion
2258 [(int) op_alt[j].cl]
2259 [(int) base_reg_class (VOIDmode, ADDRESS,
2265 = (reg_class_subunion
2266 [(int) op_alt[j].cl]
2267 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2270 p += CONSTRAINT_LEN (c, p);
2276 /* Check the operands of an insn against the insn's operand constraints
2277 and return 1 if they are valid.
2278 The information about the insn's operands, constraints, operand modes
2279 etc. is obtained from the global variables set up by extract_insn.
2281 WHICH_ALTERNATIVE is set to a number which indicates which
2282 alternative of constraints was matched: 0 for the first alternative,
2283 1 for the next, etc.
2285 In addition, when two operands are required to match
2286 and it happens that the output operand is (reg) while the
2287 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2288 make the output operand look like the input.
2289 This is because the output operand is the one the template will print.
2291 This is used in final, just before printing the assembler code and by
2292 the routines that determine an insn's attribute.
2294 If STRICT is a positive nonzero value, it means that we have been
2295 called after reload has been completed. In that case, we must
2296 do all checks strictly. If it is zero, it means that we have been called
2297 before reload has completed. In that case, we first try to see if we can
2298 find an alternative that matches strictly. If not, we try again, this
2299 time assuming that reload will fix up the insn. This provides a "best
2300 guess" for the alternative and is used to compute attributes of insns prior
2301 to reload. A negative value of STRICT is used for this internal call. */
2309 constrain_operands (int strict)
2311 const char *constraints[MAX_RECOG_OPERANDS];
2312 int matching_operands[MAX_RECOG_OPERANDS];
2313 int earlyclobber[MAX_RECOG_OPERANDS];
2316 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2317 int funny_match_index;
2319 which_alternative = 0;
2320 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2323 for (c = 0; c < recog_data.n_operands; c++)
2325 constraints[c] = recog_data.constraints[c];
2326 matching_operands[c] = -1;
2331 int seen_earlyclobber_at = -1;
2334 funny_match_index = 0;
2336 for (opno = 0; opno < recog_data.n_operands; opno++)
2338 rtx op = recog_data.operand[opno];
2339 enum machine_mode mode = GET_MODE (op);
2340 const char *p = constraints[opno];
2346 earlyclobber[opno] = 0;
2348 /* A unary operator may be accepted by the predicate, but it
2349 is irrelevant for matching constraints. */
2353 if (GET_CODE (op) == SUBREG)
2355 if (REG_P (SUBREG_REG (op))
2356 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2357 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2358 GET_MODE (SUBREG_REG (op)),
2361 op = SUBREG_REG (op);
2364 /* An empty constraint or empty alternative
2365 allows anything which matched the pattern. */
2366 if (*p == 0 || *p == ',')
2370 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2379 case '?': case '!': case '*': case '%':
2384 /* Ignore rest of this alternative as far as
2385 constraint checking is concerned. */
2388 while (*p && *p != ',');
2393 earlyclobber[opno] = 1;
2394 if (seen_earlyclobber_at < 0)
2395 seen_earlyclobber_at = opno;
2398 case '0': case '1': case '2': case '3': case '4':
2399 case '5': case '6': case '7': case '8': case '9':
2401 /* This operand must be the same as a previous one.
2402 This kind of constraint is used for instructions such
2403 as add when they take only two operands.
2405 Note that the lower-numbered operand is passed first.
2407 If we are not testing strictly, assume that this
2408 constraint will be satisfied. */
2413 match = strtoul (p, &end, 10);
2420 rtx op1 = recog_data.operand[match];
2421 rtx op2 = recog_data.operand[opno];
2423 /* A unary operator may be accepted by the predicate,
2424 but it is irrelevant for matching constraints. */
2426 op1 = XEXP (op1, 0);
2428 op2 = XEXP (op2, 0);
2430 val = operands_match_p (op1, op2);
2433 matching_operands[opno] = match;
2434 matching_operands[match] = opno;
2439 /* If output is *x and input is *--x, arrange later
2440 to change the output to *--x as well, since the
2441 output op is the one that will be printed. */
2442 if (val == 2 && strict > 0)
2444 funny_match[funny_match_index].this = opno;
2445 funny_match[funny_match_index++].other = match;
2452 /* p is used for address_operands. When we are called by
2453 gen_reload, no one will have checked that the address is
2454 strictly valid, i.e., that all pseudos requiring hard regs
2455 have gotten them. */
2457 || (strict_memory_address_p (recog_data.operand_mode[opno],
2462 /* No need to check general_operand again;
2463 it was done in insn-recog.c. Well, except that reload
2464 doesn't check the validity of its replacements, but
2465 that should only matter when there's a bug. */
2467 /* Anything goes unless it is a REG and really has a hard reg
2468 but the hard reg is not in the class GENERAL_REGS. */
2472 || GENERAL_REGS == ALL_REGS
2473 || (reload_in_progress
2474 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2475 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2478 else if (strict < 0 || general_operand (op, mode))
2483 /* This is used for a MATCH_SCRATCH in the cases when
2484 we don't actually need anything. So anything goes
2490 /* Memory operands must be valid, to the extent
2491 required by STRICT. */
2495 && !strict_memory_address_p (GET_MODE (op),
2499 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2503 /* Before reload, accept what reload can turn into mem. */
2504 else if (strict < 0 && CONSTANT_P (op))
2506 /* During reload, accept a pseudo */
2507 else if (reload_in_progress && REG_P (op)
2508 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2514 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2515 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2521 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2522 || GET_CODE (XEXP (op, 0)) == POST_INC))
2528 if (GET_CODE (op) == CONST_DOUBLE
2529 || (GET_CODE (op) == CONST_VECTOR
2530 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2536 if (GET_CODE (op) == CONST_DOUBLE
2537 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2542 if (GET_CODE (op) == CONST_INT
2543 || (GET_CODE (op) == CONST_DOUBLE
2544 && GET_MODE (op) == VOIDmode))
2547 if (CONSTANT_P (op))
2552 if (GET_CODE (op) == CONST_INT
2553 || (GET_CODE (op) == CONST_DOUBLE
2554 && GET_MODE (op) == VOIDmode))
2566 if (GET_CODE (op) == CONST_INT
2567 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2573 && ((strict > 0 && ! offsettable_memref_p (op))
2575 && !(CONSTANT_P (op) || MEM_P (op)))
2576 || (reload_in_progress
2578 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2583 if ((strict > 0 && offsettable_memref_p (op))
2584 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2585 /* Before reload, accept what reload can handle. */
2587 && (CONSTANT_P (op) || MEM_P (op)))
2588 /* During reload, accept a pseudo */
2589 || (reload_in_progress && REG_P (op)
2590 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2599 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2605 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2606 || (strict == 0 && GET_CODE (op) == SCRATCH)
2608 && reg_fits_class_p (op, cl, offset, mode)))
2611 #ifdef EXTRA_CONSTRAINT_STR
2612 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2615 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2616 /* Every memory operand can be reloaded to fit. */
2617 && ((strict < 0 && MEM_P (op))
2618 /* Before reload, accept what reload can turn
2620 || (strict < 0 && CONSTANT_P (op))
2621 /* During reload, accept a pseudo */
2622 || (reload_in_progress && REG_P (op)
2623 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2625 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2626 /* Every address operand can be reloaded to fit. */
2633 while (p += len, c);
2635 constraints[opno] = p;
2636 /* If this operand did not win somehow,
2637 this alternative loses. */
2641 /* This alternative won; the operands are ok.
2642 Change whichever operands this alternative says to change. */
2647 /* See if any earlyclobber operand conflicts with some other
2650 if (strict > 0 && seen_earlyclobber_at >= 0)
2651 for (eopno = seen_earlyclobber_at;
2652 eopno < recog_data.n_operands;
2654 /* Ignore earlyclobber operands now in memory,
2655 because we would often report failure when we have
2656 two memory operands, one of which was formerly a REG. */
2657 if (earlyclobber[eopno]
2658 && REG_P (recog_data.operand[eopno]))
2659 for (opno = 0; opno < recog_data.n_operands; opno++)
2660 if ((MEM_P (recog_data.operand[opno])
2661 || recog_data.operand_type[opno] != OP_OUT)
2663 /* Ignore things like match_operator operands. */
2664 && *recog_data.constraints[opno] != 0
2665 && ! (matching_operands[opno] == eopno
2666 && operands_match_p (recog_data.operand[opno],
2667 recog_data.operand[eopno]))
2668 && ! safe_from_earlyclobber (recog_data.operand[opno],
2669 recog_data.operand[eopno]))
2674 while (--funny_match_index >= 0)
2676 recog_data.operand[funny_match[funny_match_index].other]
2677 = recog_data.operand[funny_match[funny_match_index].this];
2684 which_alternative++;
2686 while (which_alternative < recog_data.n_alternatives);
2688 which_alternative = -1;
2689 /* If we are about to reject this, but we are not to test strictly,
2690 try a very loose test. Only return failure if it fails also. */
2692 return constrain_operands (-1);
2697 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2698 is a hard reg in class CLASS when its regno is offset by OFFSET
2699 and changed to mode MODE.
2700 If REG occupies multiple hard regs, all of them must be in CLASS. */
2703 reg_fits_class_p (rtx operand, enum reg_class cl, int offset,
2704 enum machine_mode mode)
2706 int regno = REGNO (operand);
2711 if (regno < FIRST_PSEUDO_REGISTER
2712 && TEST_HARD_REG_BIT (reg_class_contents[(int) cl],
2717 for (sr = hard_regno_nregs[regno][mode] - 1;
2719 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) cl],
2728 /* Split single instruction. Helper function for split_all_insns and
2729 split_all_insns_noflow. Return last insn in the sequence if successful,
2730 or NULL if unsuccessful. */
2733 split_insn (rtx insn)
2735 /* Split insns here to get max fine-grain parallelism. */
2736 rtx first = PREV_INSN (insn);
2737 rtx last = try_split (PATTERN (insn), insn, 1);
2742 /* try_split returns the NOTE that INSN became. */
2743 SET_INSN_DELETED (insn);
2745 /* ??? Coddle to md files that generate subregs in post-reload
2746 splitters instead of computing the proper hard register. */
2747 if (reload_completed && first != last)
2749 first = NEXT_INSN (first);
2753 cleanup_subreg_operands (first);
2756 first = NEXT_INSN (first);
2762 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2765 split_all_insns (int upd_life)
2771 blocks = sbitmap_alloc (last_basic_block);
2772 sbitmap_zero (blocks);
2775 FOR_EACH_BB_REVERSE (bb)
2778 bool finish = false;
2780 for (insn = BB_HEAD (bb); !finish ; insn = next)
2782 /* Can't use `next_real_insn' because that might go across
2783 CODE_LABELS and short-out basic blocks. */
2784 next = NEXT_INSN (insn);
2785 finish = (insn == BB_END (bb));
2788 rtx set = single_set (insn);
2790 /* Don't split no-op move insns. These should silently
2791 disappear later in final. Splitting such insns would
2792 break the code that handles REG_NO_CONFLICT blocks. */
2793 if (set && set_noop_p (set))
2795 /* Nops get in the way while scheduling, so delete them
2796 now if register allocation has already been done. It
2797 is too risky to try to do this before register
2798 allocation, and there are unlikely to be very many
2799 nops then anyways. */
2800 if (reload_completed)
2802 /* If the no-op set has a REG_UNUSED note, we need
2803 to update liveness information. */
2804 if (find_reg_note (insn, REG_UNUSED, NULL_RTX))
2806 SET_BIT (blocks, bb->index);
2809 /* ??? Is life info affected by deleting edges? */
2810 delete_insn_and_edges (insn);
2815 rtx last = split_insn (insn);
2818 /* The split sequence may include barrier, but the
2819 BB boundary we are interested in will be set to
2822 while (BARRIER_P (last))
2823 last = PREV_INSN (last);
2824 SET_BIT (blocks, bb->index);
2834 int old_last_basic_block = last_basic_block;
2836 find_many_sub_basic_blocks (blocks);
2838 if (old_last_basic_block != last_basic_block && upd_life)
2839 blocks = sbitmap_resize (blocks, last_basic_block, 1);
2842 if (changed && upd_life)
2843 update_life_info (blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
2846 #ifdef ENABLE_CHECKING
2847 verify_flow_info ();
2850 sbitmap_free (blocks);
2853 /* Same as split_all_insns, but do not expect CFG to be available.
2854 Used by machine dependent reorg passes. */
2857 split_all_insns_noflow (void)
2861 for (insn = get_insns (); insn; insn = next)
2863 next = NEXT_INSN (insn);
2866 /* Don't split no-op move insns. These should silently
2867 disappear later in final. Splitting such insns would
2868 break the code that handles REG_NO_CONFLICT blocks. */
2869 rtx set = single_set (insn);
2870 if (set && set_noop_p (set))
2872 /* Nops get in the way while scheduling, so delete them
2873 now if register allocation has already been done. It
2874 is too risky to try to do this before register
2875 allocation, and there are unlikely to be very many
2878 ??? Should we use delete_insn when the CFG isn't valid? */
2879 if (reload_completed)
2880 delete_insn_and_edges (insn);
2889 #ifdef HAVE_peephole2
2890 struct peep2_insn_data
2896 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2897 static int peep2_current;
2898 /* The number of instructions available to match a peep2. */
2899 int peep2_current_count;
2901 /* A non-insn marker indicating the last insn of the block.
2902 The live_before regset for this element is correct, indicating
2903 global_live_at_end for the block. */
2904 #define PEEP2_EOB pc_rtx
2906 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2907 does not exist. Used by the recognizer to find the next insn to match
2908 in a multi-insn pattern. */
2911 peep2_next_insn (int n)
2913 gcc_assert (n <= peep2_current_count);
2916 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2917 n -= MAX_INSNS_PER_PEEP2 + 1;
2919 return peep2_insn_data[n].insn;
2922 /* Return true if REGNO is dead before the Nth non-note insn
2926 peep2_regno_dead_p (int ofs, int regno)
2928 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2930 ofs += peep2_current;
2931 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2932 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2934 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2936 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2939 /* Similarly for a REG. */
2942 peep2_reg_dead_p (int ofs, rtx reg)
2946 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2948 ofs += peep2_current;
2949 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2950 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2952 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2954 regno = REGNO (reg);
2955 n = hard_regno_nregs[regno][GET_MODE (reg)];
2957 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2962 /* Try to find a hard register of mode MODE, matching the register class in
2963 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2964 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2965 in which case the only condition is that the register must be available
2966 before CURRENT_INSN.
2967 Registers that already have bits set in REG_SET will not be considered.
2969 If an appropriate register is available, it will be returned and the
2970 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2974 peep2_find_free_register (int from, int to, const char *class_str,
2975 enum machine_mode mode, HARD_REG_SET *reg_set)
2977 static int search_ofs;
2982 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
2983 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
2985 from += peep2_current;
2986 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2987 from -= MAX_INSNS_PER_PEEP2 + 1;
2988 to += peep2_current;
2989 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2990 to -= MAX_INSNS_PER_PEEP2 + 1;
2992 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2993 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2997 HARD_REG_SET this_live;
2999 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
3001 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3002 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
3003 IOR_HARD_REG_SET (live, this_live);
3006 cl = (class_str[0] == 'r' ? GENERAL_REGS
3007 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3009 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3011 int raw_regno, regno, success, j;
3013 /* Distribute the free registers as much as possible. */
3014 raw_regno = search_ofs + i;
3015 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3016 raw_regno -= FIRST_PSEUDO_REGISTER;
3017 #ifdef REG_ALLOC_ORDER
3018 regno = reg_alloc_order[raw_regno];
3023 /* Don't allocate fixed registers. */
3024 if (fixed_regs[regno])
3026 /* Make sure the register is of the right class. */
3027 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
3029 /* And can support the mode we need. */
3030 if (! HARD_REGNO_MODE_OK (regno, mode))
3032 /* And that we don't create an extra save/restore. */
3033 if (! call_used_regs[regno] && ! regs_ever_live[regno])
3035 /* And we don't clobber traceback for noreturn functions. */
3036 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3037 && (! reload_completed || frame_pointer_needed))
3041 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3043 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3044 || TEST_HARD_REG_BIT (live, regno + j))
3052 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3053 SET_HARD_REG_BIT (*reg_set, regno + j);
3055 /* Start the next search with the next register. */
3056 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3058 search_ofs = raw_regno;
3060 return gen_rtx_REG (mode, regno);
3068 /* Perform the peephole2 optimization pass. */
3071 peephole2_optimize (void)
3077 #ifdef HAVE_conditional_execution
3081 bool do_cleanup_cfg = false;
3082 bool do_global_life_update = false;
3083 bool do_rebuild_jump_labels = false;
3085 /* Initialize the regsets we're going to use. */
3086 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3087 peep2_insn_data[i].live_before = ALLOC_REG_SET (®_obstack);
3088 live = ALLOC_REG_SET (®_obstack);
3090 #ifdef HAVE_conditional_execution
3091 blocks = sbitmap_alloc (last_basic_block);
3092 sbitmap_zero (blocks);
3095 count_or_remove_death_notes (NULL, 1);
3098 FOR_EACH_BB_REVERSE (bb)
3100 struct propagate_block_info *pbi;
3101 reg_set_iterator rsi;
3104 /* Indicate that all slots except the last holds invalid data. */
3105 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3106 peep2_insn_data[i].insn = NULL_RTX;
3107 peep2_current_count = 0;
3109 /* Indicate that the last slot contains live_after data. */
3110 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3111 peep2_current = MAX_INSNS_PER_PEEP2;
3113 /* Start up propagation. */
3114 COPY_REG_SET (live, bb->il.rtl->global_live_at_end);
3115 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3117 #ifdef HAVE_conditional_execution
3118 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3120 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3123 for (insn = BB_END (bb); ; insn = prev)
3125 prev = PREV_INSN (insn);
3128 rtx try, before_try, x;
3131 bool was_call = false;
3133 /* Record this insn. */
3134 if (--peep2_current < 0)
3135 peep2_current = MAX_INSNS_PER_PEEP2;
3136 if (peep2_current_count < MAX_INSNS_PER_PEEP2
3137 && peep2_insn_data[peep2_current].insn == NULL_RTX)
3138 peep2_current_count++;
3139 peep2_insn_data[peep2_current].insn = insn;
3140 propagate_one_insn (pbi, insn);
3141 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3143 if (RTX_FRAME_RELATED_P (insn))
3145 /* If an insn has RTX_FRAME_RELATED_P set, peephole
3146 substitution would lose the
3147 REG_FRAME_RELATED_EXPR that is attached. */
3148 peep2_current_count = 0;
3152 /* Match the peephole. */
3153 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3157 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3158 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3159 cfg-related call notes. */
3160 for (i = 0; i <= match_len; ++i)
3163 rtx old_insn, new_insn, note;
3165 j = i + peep2_current;
3166 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3167 j -= MAX_INSNS_PER_PEEP2 + 1;
3168 old_insn = peep2_insn_data[j].insn;
3169 if (!CALL_P (old_insn))
3174 while (new_insn != NULL_RTX)
3176 if (CALL_P (new_insn))
3178 new_insn = NEXT_INSN (new_insn);
3181 gcc_assert (new_insn != NULL_RTX);
3183 CALL_INSN_FUNCTION_USAGE (new_insn)
3184 = CALL_INSN_FUNCTION_USAGE (old_insn);
3186 for (note = REG_NOTES (old_insn);
3188 note = XEXP (note, 1))
3189 switch (REG_NOTE_KIND (note))
3193 REG_NOTES (new_insn)
3194 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3196 REG_NOTES (new_insn));
3198 /* Discard all other reg notes. */
3202 /* Croak if there is another call in the sequence. */
3203 while (++i <= match_len)
3205 j = i + peep2_current;
3206 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3207 j -= MAX_INSNS_PER_PEEP2 + 1;
3208 old_insn = peep2_insn_data[j].insn;
3209 gcc_assert (!CALL_P (old_insn));
3214 i = match_len + peep2_current;
3215 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3216 i -= MAX_INSNS_PER_PEEP2 + 1;
3218 note = find_reg_note (peep2_insn_data[i].insn,
3219 REG_EH_REGION, NULL_RTX);
3221 /* Replace the old sequence with the new. */
3222 try = emit_insn_after_setloc (try, peep2_insn_data[i].insn,
3223 INSN_LOCATOR (peep2_insn_data[i].insn));
3224 before_try = PREV_INSN (insn);
3225 delete_insn_chain (insn, peep2_insn_data[i].insn);
3227 /* Re-insert the EH_REGION notes. */
3228 if (note || (was_call && nonlocal_goto_handler_labels))
3233 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3234 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3237 for (x = try ; x != before_try ; x = PREV_INSN (x))
3239 || (flag_non_call_exceptions
3240 && may_trap_p (PATTERN (x))
3241 && !find_reg_note (x, REG_EH_REGION, NULL)))
3245 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3249 if (x != BB_END (bb) && eh_edge)
3254 nfte = split_block (bb, x);
3255 flags = (eh_edge->flags
3256 & (EDGE_EH | EDGE_ABNORMAL));
3258 flags |= EDGE_ABNORMAL_CALL;
3259 nehe = make_edge (nfte->src, eh_edge->dest,
3262 nehe->probability = eh_edge->probability;
3264 = REG_BR_PROB_BASE - nehe->probability;
3266 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3267 #ifdef HAVE_conditional_execution
3268 SET_BIT (blocks, nfte->dest->index);
3276 /* Converting possibly trapping insn to non-trapping is
3277 possible. Zap dummy outgoing edges. */
3278 do_cleanup_cfg |= purge_dead_edges (bb);
3281 #ifdef HAVE_conditional_execution
3282 /* With conditional execution, we cannot back up the
3283 live information so easily, since the conditional
3284 death data structures are not so self-contained.
3285 So record that we've made a modification to this
3286 block and update life information at the end. */
3287 SET_BIT (blocks, bb->index);
3290 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3291 peep2_insn_data[i].insn = NULL_RTX;
3292 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3293 peep2_current_count = 0;
3295 /* Back up lifetime information past the end of the
3296 newly created sequence. */
3297 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3299 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3301 /* Update life information for the new sequence. */
3308 i = MAX_INSNS_PER_PEEP2;
3309 if (peep2_current_count < MAX_INSNS_PER_PEEP2
3310 && peep2_insn_data[i].insn == NULL_RTX)
3311 peep2_current_count++;
3312 peep2_insn_data[i].insn = x;
3313 propagate_one_insn (pbi, x);
3314 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3320 /* ??? Should verify that LIVE now matches what we
3321 had before the new sequence. */
3326 /* If we generated a jump instruction, it won't have
3327 JUMP_LABEL set. Recompute after we're done. */
3328 for (x = try; x != before_try; x = PREV_INSN (x))
3331 do_rebuild_jump_labels = true;
3337 if (insn == BB_HEAD (bb))
3341 /* Some peepholes can decide the don't need one or more of their
3342 inputs. If this happens, local life update is not enough. */
3343 EXECUTE_IF_AND_COMPL_IN_BITMAP (bb->il.rtl->global_live_at_start, live,
3346 do_global_life_update = true;
3350 free_propagate_block_info (pbi);
3353 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3354 FREE_REG_SET (peep2_insn_data[i].live_before);
3355 FREE_REG_SET (live);
3357 if (do_rebuild_jump_labels)
3358 rebuild_jump_labels (get_insns ());
3360 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3361 we've changed global life since exception handlers are no longer
3366 do_global_life_update = true;
3368 if (do_global_life_update)
3369 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3370 #ifdef HAVE_conditional_execution
3373 count_or_remove_death_notes (blocks, 1);
3374 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3376 sbitmap_free (blocks);
3379 #endif /* HAVE_peephole2 */
3381 /* Common predicates for use with define_bypass. */
3383 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3384 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3385 must be either a single_set or a PARALLEL with SETs inside. */
3388 store_data_bypass_p (rtx out_insn, rtx in_insn)
3390 rtx out_set, in_set;
3391 rtx out_pat, in_pat;
3392 rtx out_exp, in_exp;
3395 in_set = single_set (in_insn);
3398 if (!MEM_P (SET_DEST (in_set)))
3401 out_set = single_set (out_insn);
3404 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3409 out_pat = PATTERN (out_insn);
3411 if (GET_CODE (out_pat) != PARALLEL)
3414 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3416 out_exp = XVECEXP (out_pat, 0, i);
3418 if (GET_CODE (out_exp) == CLOBBER)
3421 gcc_assert (GET_CODE (out_exp) == SET);
3423 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3430 in_pat = PATTERN (in_insn);
3431 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3433 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3435 in_exp = XVECEXP (in_pat, 0, i);
3437 if (GET_CODE (in_exp) == CLOBBER)
3440 gcc_assert (GET_CODE (in_exp) == SET);
3442 if (!MEM_P (SET_DEST (in_exp)))
3445 out_set = single_set (out_insn);
3448 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3453 out_pat = PATTERN (out_insn);
3454 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3456 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3458 out_exp = XVECEXP (out_pat, 0, j);
3460 if (GET_CODE (out_exp) == CLOBBER)
3463 gcc_assert (GET_CODE (out_exp) == SET);
3465 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3475 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3476 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3477 or multiple set; IN_INSN should be single_set for truth, but for convenience
3478 of insn categorization may be any JUMP or CALL insn. */
3481 if_test_bypass_p (rtx out_insn, rtx in_insn)
3483 rtx out_set, in_set;
3485 in_set = single_set (in_insn);
3488 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3492 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3494 in_set = SET_SRC (in_set);
3496 out_set = single_set (out_insn);
3499 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3500 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3508 out_pat = PATTERN (out_insn);
3509 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3511 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3513 rtx exp = XVECEXP (out_pat, 0, i);
3515 if (GET_CODE (exp) == CLOBBER)
3518 gcc_assert (GET_CODE (exp) == SET);
3520 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3521 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3530 gate_handle_peephole2 (void)
3532 return (optimize > 0 && flag_peephole2);
3536 rest_of_handle_peephole2 (void)
3538 #ifdef HAVE_peephole2
3539 peephole2_optimize ();
3544 struct tree_opt_pass pass_peephole2 =
3546 "peephole2", /* name */
3547 gate_handle_peephole2, /* gate */
3548 rest_of_handle_peephole2, /* execute */
3551 0, /* static_pass_number */
3552 TV_PEEPHOLE2, /* tv_id */
3553 0, /* properties_required */
3554 0, /* properties_provided */
3555 0, /* properties_destroyed */
3556 0, /* todo_flags_start */
3557 TODO_dump_func, /* todo_flags_finish */
3562 rest_of_handle_split_all_insns (void)
3564 split_all_insns (1);
3568 struct tree_opt_pass pass_split_all_insns =
3570 "split1", /* name */
3572 rest_of_handle_split_all_insns, /* execute */
3575 0, /* static_pass_number */
3577 0, /* properties_required */
3578 0, /* properties_provided */
3579 0, /* properties_destroyed */
3580 0, /* todo_flags_start */
3581 TODO_dump_func, /* todo_flags_finish */
3585 /* The placement of the splitting that we do for shorten_branches
3586 depends on whether regstack is used by the target or not. */
3588 gate_do_final_split (void)
3590 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3597 struct tree_opt_pass pass_split_for_shorten_branches =
3599 "split3", /* name */
3600 gate_do_final_split, /* gate */
3601 split_all_insns_noflow, /* execute */
3604 0, /* static_pass_number */
3605 TV_SHORTEN_BRANCH, /* tv_id */
3606 0, /* properties_required */
3607 0, /* properties_provided */
3608 0, /* properties_destroyed */
3609 0, /* todo_flags_start */
3610 TODO_dump_func, /* todo_flags_finish */
3616 gate_handle_split_before_regstack (void)
3618 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3619 /* If flow2 creates new instructions which need splitting
3620 and scheduling after reload is not done, they might not be
3621 split until final which doesn't allow splitting
3622 if HAVE_ATTR_length. */
3623 # ifdef INSN_SCHEDULING
3624 return (optimize && !flag_schedule_insns_after_reload);
3633 struct tree_opt_pass pass_split_before_regstack =
3635 "split2", /* name */
3636 gate_handle_split_before_regstack, /* gate */
3637 rest_of_handle_split_all_insns, /* execute */
3640 0, /* static_pass_number */
3641 TV_SHORTEN_BRANCH, /* tv_id */
3642 0, /* properties_required */
3643 0, /* properties_provided */
3644 0, /* properties_destroyed */
3645 0, /* todo_flags_start */
3646 TODO_dump_func, /* todo_flags_finish */