1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 #include "coretypes.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
39 #include "basic-block.h"
43 #ifndef STACK_PUSH_CODE
44 #ifdef STACK_GROWS_DOWNWARD
45 #define STACK_PUSH_CODE PRE_DEC
47 #define STACK_PUSH_CODE PRE_INC
51 #ifndef STACK_POP_CODE
52 #ifdef STACK_GROWS_DOWNWARD
53 #define STACK_POP_CODE POST_INC
55 #define STACK_POP_CODE POST_DEC
59 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx);
60 static rtx *find_single_use_1 (rtx, rtx *);
61 static void validate_replace_src_1 (rtx *, void *);
62 static rtx split_insn (rtx);
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in regclass.c and final.c and reload.c.
70 init_recog and init_recog_no_volatile are responsible for setting this. */
74 struct recog_data recog_data;
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
80 /* On return from `constrain_operands', indicate which alternative
83 int which_alternative;
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed;
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
99 init_recog_no_volatile (void)
110 /* Try recognizing the instruction INSN,
111 and return the code number that results.
112 Remember the code so that repeated calls do not
113 need to spend the time for actual rerecognition.
115 This function is the normal interface to instruction recognition.
116 The automatically-generated function `recog' is normally called
117 through this one. (The only exception is in combine.c.) */
120 recog_memoized_1 (rtx insn)
122 if (INSN_CODE (insn) < 0)
123 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
124 return INSN_CODE (insn);
127 /* Check that X is an insn-body for an `asm' with operands
128 and that the operands mentioned in it are legitimate. */
131 check_asm_operands (rtx x)
135 const char **constraints;
138 /* Post-reload, be more strict with things. */
139 if (reload_completed)
141 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
142 extract_insn (make_insn_raw (x));
143 constrain_operands (1);
144 return which_alternative >= 0;
147 noperands = asm_noperands (x);
153 operands = alloca (noperands * sizeof (rtx));
154 constraints = alloca (noperands * sizeof (char *));
156 decode_asm_operands (x, operands, NULL, constraints, NULL);
158 for (i = 0; i < noperands; i++)
160 const char *c = constraints[i];
163 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
164 c = constraints[c[0] - '0'];
166 if (! asm_operand_ok (operands[i], c))
173 /* Static data for the next two routines. */
175 typedef struct change_t
183 static change_t *changes;
184 static int changes_allocated;
186 static int num_changes = 0;
188 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
189 at which NEW will be placed. If OBJECT is zero, no validation is done,
190 the change is simply made.
192 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
193 will be called with the address and mode as parameters. If OBJECT is
194 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
197 IN_GROUP is nonzero if this is part of a group of changes that must be
198 performed as a group. In that case, the changes will be stored. The
199 function `apply_change_group' will validate and apply the changes.
201 If IN_GROUP is zero, this is a single change. Try to recognize the insn
202 or validate the memory reference with the change applied. If the result
203 is not valid for the machine, suppress the change and return zero.
204 Otherwise, perform the change and return 1. */
207 validate_change (rtx object, rtx *loc, rtx new, int in_group)
211 if (old == new || rtx_equal_p (old, new))
214 if (in_group == 0 && num_changes != 0)
219 /* Save the information describing this change. */
220 if (num_changes >= changes_allocated)
222 if (changes_allocated == 0)
223 /* This value allows for repeated substitutions inside complex
224 indexed addresses, or changes in up to 5 insns. */
225 changes_allocated = MAX_RECOG_OPERANDS * 5;
227 changes_allocated *= 2;
229 changes = xrealloc (changes, sizeof (change_t) * changes_allocated);
232 changes[num_changes].object = object;
233 changes[num_changes].loc = loc;
234 changes[num_changes].old = old;
236 if (object && !MEM_P (object))
238 /* Set INSN_CODE to force rerecognition of insn. Save old code in
240 changes[num_changes].old_code = INSN_CODE (object);
241 INSN_CODE (object) = -1;
246 /* If we are making a group of changes, return 1. Otherwise, validate the
247 change group we made. */
252 return apply_change_group ();
255 /* This subroutine of apply_change_group verifies whether the changes to INSN
256 were valid; i.e. whether INSN can still be recognized. */
259 insn_invalid_p (rtx insn)
261 rtx pat = PATTERN (insn);
262 int num_clobbers = 0;
263 /* If we are before reload and the pattern is a SET, see if we can add
265 int icode = recog (pat, insn,
266 (GET_CODE (pat) == SET
267 && ! reload_completed && ! reload_in_progress)
268 ? &num_clobbers : 0);
269 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
272 /* If this is an asm and the operand aren't legal, then fail. Likewise if
273 this is not an asm and the insn wasn't recognized. */
274 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
275 || (!is_asm && icode < 0))
278 /* If we have to add CLOBBERs, fail if we have to add ones that reference
279 hard registers since our callers can't know if they are live or not.
280 Otherwise, add them. */
281 if (num_clobbers > 0)
285 if (added_clobbers_hard_reg_p (icode))
288 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
289 XVECEXP (newpat, 0, 0) = pat;
290 add_clobbers (newpat, icode);
291 PATTERN (insn) = pat = newpat;
294 /* After reload, verify that all constraints are satisfied. */
295 if (reload_completed)
299 if (! constrain_operands (1))
303 INSN_CODE (insn) = icode;
307 /* Return number of changes made and not validated yet. */
309 num_changes_pending (void)
314 /* Apply a group of changes previously issued with `validate_change'.
315 Return 1 if all changes are valid, zero otherwise. */
318 apply_change_group (void)
321 rtx last_validated = NULL_RTX;
323 /* The changes have been applied and all INSN_CODEs have been reset to force
326 The changes are valid if we aren't given an object, or if we are
327 given a MEM and it still is a valid address, or if this is in insn
328 and it is recognized. In the latter case, if reload has completed,
329 we also require that the operands meet the constraints for
332 for (i = 0; i < num_changes; i++)
334 rtx object = changes[i].object;
336 /* If there is no object to test or if it is the same as the one we
337 already tested, ignore it. */
338 if (object == 0 || object == last_validated)
343 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
346 else if (insn_invalid_p (object))
348 rtx pat = PATTERN (object);
350 /* Perhaps we couldn't recognize the insn because there were
351 extra CLOBBERs at the end. If so, try to re-recognize
352 without the last CLOBBER (later iterations will cause each of
353 them to be eliminated, in turn). But don't do this if we
354 have an ASM_OPERAND. */
355 if (GET_CODE (pat) == PARALLEL
356 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
357 && asm_noperands (PATTERN (object)) < 0)
361 if (XVECLEN (pat, 0) == 2)
362 newpat = XVECEXP (pat, 0, 0);
368 = gen_rtx_PARALLEL (VOIDmode,
369 rtvec_alloc (XVECLEN (pat, 0) - 1));
370 for (j = 0; j < XVECLEN (newpat, 0); j++)
371 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
374 /* Add a new change to this group to replace the pattern
375 with this new pattern. Then consider this change
376 as having succeeded. The change we added will
377 cause the entire call to fail if things remain invalid.
379 Note that this can lose if a later change than the one
380 we are processing specified &XVECEXP (PATTERN (object), 0, X)
381 but this shouldn't occur. */
383 validate_change (object, &PATTERN (object), newpat, 1);
386 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
387 /* If this insn is a CLOBBER or USE, it is always valid, but is
393 last_validated = object;
396 if (i == num_changes)
400 for (i = 0; i < num_changes; i++)
401 if (changes[i].object
402 && INSN_P (changes[i].object)
403 && (bb = BLOCK_FOR_INSN (changes[i].object)))
404 bb->flags |= BB_DIRTY;
416 /* Return the number of changes so far in the current group. */
419 num_validated_changes (void)
424 /* Retract the changes numbered NUM and up. */
427 cancel_changes (int num)
431 /* Back out all the changes. Do this in the opposite order in which
433 for (i = num_changes - 1; i >= num; i--)
435 *changes[i].loc = changes[i].old;
436 if (changes[i].object && !MEM_P (changes[i].object))
437 INSN_CODE (changes[i].object) = changes[i].old_code;
442 /* Replace every occurrence of FROM in X with TO. Mark each change with
443 validate_change passing OBJECT. */
446 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
452 enum machine_mode op0_mode = VOIDmode;
453 int prev_changes = num_changes;
460 fmt = GET_RTX_FORMAT (code);
462 op0_mode = GET_MODE (XEXP (x, 0));
464 /* X matches FROM if it is the same rtx or they are both referring to the
465 same register in the same mode. Avoid calling rtx_equal_p unless the
466 operands look similar. */
469 || (REG_P (x) && REG_P (from)
470 && GET_MODE (x) == GET_MODE (from)
471 && REGNO (x) == REGNO (from))
472 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
473 && rtx_equal_p (x, from)))
475 validate_change (object, loc, to, 1);
479 /* Call ourself recursively to perform the replacements.
480 We must not replace inside already replaced expression, otherwise we
481 get infinite recursion for replacements like (reg X)->(subreg (reg X))
482 done by regmove, so we must special case shared ASM_OPERANDS. */
484 if (GET_CODE (x) == PARALLEL)
486 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
488 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
489 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
491 /* Verify that operands are really shared. */
492 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0))) !=
493 ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, j))))
495 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
499 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object);
503 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
506 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
507 else if (fmt[i] == 'E')
508 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
509 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
512 /* If we didn't substitute, there is nothing more to do. */
513 if (num_changes == prev_changes)
516 /* Allow substituted expression to have different mode. This is used by
517 regmove to change mode of pseudo register. */
518 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
519 op0_mode = GET_MODE (XEXP (x, 0));
521 /* Do changes needed to keep rtx consistent. Don't do any other
522 simplifications, as it is not our job. */
524 if (SWAPPABLE_OPERANDS_P (x)
525 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
527 validate_change (object, loc,
528 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
529 : swap_condition (code),
530 GET_MODE (x), XEXP (x, 1),
539 /* If we have a PLUS whose second operand is now a CONST_INT, use
540 simplify_gen_binary to try to simplify it.
541 ??? We may want later to remove this, once simplification is
542 separated from this function. */
543 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
544 validate_change (object, loc,
546 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
549 if (GET_CODE (XEXP (x, 1)) == CONST_INT
550 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
551 validate_change (object, loc,
553 (PLUS, GET_MODE (x), XEXP (x, 0),
554 simplify_gen_unary (NEG,
555 GET_MODE (x), XEXP (x, 1),
560 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
562 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
564 /* If any of the above failed, substitute in something that
565 we know won't be recognized. */
567 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
568 validate_change (object, loc, new, 1);
572 /* All subregs possible to simplify should be simplified. */
573 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
576 /* Subregs of VOIDmode operands are incorrect. */
577 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
578 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
580 validate_change (object, loc, new, 1);
584 /* If we are replacing a register with memory, try to change the memory
585 to be the mode required for memory in extract operations (this isn't
586 likely to be an insertion operation; if it was, nothing bad will
587 happen, we might just fail in some cases). */
589 if (MEM_P (XEXP (x, 0))
590 && GET_CODE (XEXP (x, 1)) == CONST_INT
591 && GET_CODE (XEXP (x, 2)) == CONST_INT
592 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
593 && !MEM_VOLATILE_P (XEXP (x, 0)))
595 enum machine_mode wanted_mode = VOIDmode;
596 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
597 int pos = INTVAL (XEXP (x, 2));
599 if (GET_CODE (x) == ZERO_EXTRACT)
601 enum machine_mode new_mode
602 = mode_for_extraction (EP_extzv, 1);
603 if (new_mode != MAX_MACHINE_MODE)
604 wanted_mode = new_mode;
606 else if (GET_CODE (x) == SIGN_EXTRACT)
608 enum machine_mode new_mode
609 = mode_for_extraction (EP_extv, 1);
610 if (new_mode != MAX_MACHINE_MODE)
611 wanted_mode = new_mode;
614 /* If we have a narrower mode, we can do something. */
615 if (wanted_mode != VOIDmode
616 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
618 int offset = pos / BITS_PER_UNIT;
621 /* If the bytes and bits are counted differently, we
622 must adjust the offset. */
623 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
625 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
628 pos %= GET_MODE_BITSIZE (wanted_mode);
630 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
632 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
633 validate_change (object, &XEXP (x, 0), newmem, 1);
644 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
645 with TO. After all changes have been made, validate by seeing
646 if INSN is still valid. */
649 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
651 validate_replace_rtx_1 (loc, from, to, insn);
652 return apply_change_group ();
655 /* Try replacing every occurrence of FROM in INSN with TO. After all
656 changes have been made, validate by seeing if INSN is still valid. */
659 validate_replace_rtx (rtx from, rtx to, rtx insn)
661 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
662 return apply_change_group ();
665 /* Try replacing every occurrence of FROM in INSN with TO. */
668 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
670 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
673 /* Function called by note_uses to replace used subexpressions. */
674 struct validate_replace_src_data
676 rtx from; /* Old RTX */
677 rtx to; /* New RTX */
678 rtx insn; /* Insn in which substitution is occurring. */
682 validate_replace_src_1 (rtx *x, void *data)
684 struct validate_replace_src_data *d
685 = (struct validate_replace_src_data *) data;
687 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
690 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
694 validate_replace_src_group (rtx from, rtx to, rtx insn)
696 struct validate_replace_src_data d;
701 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
705 /* Return 1 if the insn using CC0 set by INSN does not contain
706 any ordered tests applied to the condition codes.
707 EQ and NE tests do not count. */
710 next_insn_tests_no_inequality (rtx insn)
712 rtx next = next_cc0_user (insn);
714 /* If there is no next insn, we have to take the conservative choice. */
718 return ((GET_CODE (next) == JUMP_INSN
719 || GET_CODE (next) == INSN
720 || GET_CODE (next) == CALL_INSN)
721 && ! inequality_comparisons_p (PATTERN (next)));
725 /* This is used by find_single_use to locate an rtx that contains exactly one
726 use of DEST, which is typically either a REG or CC0. It returns a
727 pointer to the innermost rtx expression containing DEST. Appearances of
728 DEST that are being used to totally replace it are not counted. */
731 find_single_use_1 (rtx dest, rtx *loc)
734 enum rtx_code code = GET_CODE (x);
752 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
753 of a REG that occupies all of the REG, the insn uses DEST if
754 it is mentioned in the destination or the source. Otherwise, we
755 need just check the source. */
756 if (GET_CODE (SET_DEST (x)) != CC0
757 && GET_CODE (SET_DEST (x)) != PC
758 && !REG_P (SET_DEST (x))
759 && ! (GET_CODE (SET_DEST (x)) == SUBREG
760 && REG_P (SUBREG_REG (SET_DEST (x)))
761 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
762 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
763 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
764 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
767 return find_single_use_1 (dest, &SET_SRC (x));
771 return find_single_use_1 (dest, &XEXP (x, 0));
777 /* If it wasn't one of the common cases above, check each expression and
778 vector of this code. Look for a unique usage of DEST. */
780 fmt = GET_RTX_FORMAT (code);
781 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
785 if (dest == XEXP (x, i)
786 || (REG_P (dest) && REG_P (XEXP (x, i))
787 && REGNO (dest) == REGNO (XEXP (x, i))))
790 this_result = find_single_use_1 (dest, &XEXP (x, i));
793 result = this_result;
794 else if (this_result)
795 /* Duplicate usage. */
798 else if (fmt[i] == 'E')
802 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
804 if (XVECEXP (x, i, j) == dest
806 && REG_P (XVECEXP (x, i, j))
807 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
810 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
813 result = this_result;
814 else if (this_result)
823 /* See if DEST, produced in INSN, is used only a single time in the
824 sequel. If so, return a pointer to the innermost rtx expression in which
827 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
829 This routine will return usually zero either before flow is called (because
830 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
831 note can't be trusted).
833 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
834 care about REG_DEAD notes or LOG_LINKS.
836 Otherwise, we find the single use by finding an insn that has a
837 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
838 only referenced once in that insn, we know that it must be the first
839 and last insn referencing DEST. */
842 find_single_use (rtx dest, rtx insn, rtx *ploc)
851 next = NEXT_INSN (insn);
853 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
856 result = find_single_use_1 (dest, &PATTERN (next));
863 if (reload_completed || reload_in_progress || !REG_P (dest))
866 for (next = next_nonnote_insn (insn);
867 next != 0 && GET_CODE (next) != CODE_LABEL;
868 next = next_nonnote_insn (next))
869 if (INSN_P (next) && dead_or_set_p (next, dest))
871 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
872 if (XEXP (link, 0) == insn)
877 result = find_single_use_1 (dest, &PATTERN (next));
887 /* Return 1 if OP is a valid general operand for machine mode MODE.
888 This is either a register reference, a memory reference,
889 or a constant. In the case of a memory reference, the address
890 is checked for general validity for the target machine.
892 Register and memory references must have mode MODE in order to be valid,
893 but some constants have no machine mode and are valid for any mode.
895 If MODE is VOIDmode, OP is checked for validity for whatever mode
898 The main use of this function is as a predicate in match_operand
899 expressions in the machine description.
901 For an explanation of this function's behavior for registers of
902 class NO_REGS, see the comment for `register_operand'. */
905 general_operand (rtx op, enum machine_mode mode)
907 enum rtx_code code = GET_CODE (op);
909 if (mode == VOIDmode)
910 mode = GET_MODE (op);
912 /* Don't accept CONST_INT or anything similar
913 if the caller wants something floating. */
914 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
915 && GET_MODE_CLASS (mode) != MODE_INT
916 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
919 if (GET_CODE (op) == CONST_INT
921 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
925 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
927 #ifdef LEGITIMATE_PIC_OPERAND_P
928 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
930 && LEGITIMATE_CONSTANT_P (op));
932 /* Except for certain constants with VOIDmode, already checked for,
933 OP's mode must match MODE if MODE specifies a mode. */
935 if (GET_MODE (op) != mode)
940 rtx sub = SUBREG_REG (op);
942 #ifdef INSN_SCHEDULING
943 /* On machines that have insn scheduling, we want all memory
944 reference to be explicit, so outlaw paradoxical SUBREGs. */
946 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
949 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
950 may result in incorrect reference. We should simplify all valid
951 subregs of MEM anyway. But allow this after reload because we
952 might be called from cleanup_subreg_operands.
954 ??? This is a kludge. */
955 if (!reload_completed && SUBREG_BYTE (op) != 0
959 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
960 create such rtl, and we must reject it. */
961 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
962 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
966 code = GET_CODE (op);
970 /* A register whose class is NO_REGS is not a general operand. */
971 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
972 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
976 rtx y = XEXP (op, 0);
978 if (! volatile_ok && MEM_VOLATILE_P (op))
981 /* Use the mem's mode, since it will be reloaded thus. */
982 mode = GET_MODE (op);
983 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
992 /* Return 1 if OP is a valid memory address for a memory reference
995 The main use of this function is as a predicate in match_operand
996 expressions in the machine description. */
999 address_operand (rtx op, enum machine_mode mode)
1001 return memory_address_p (mode, op);
1004 /* Return 1 if OP is a register reference of mode MODE.
1005 If MODE is VOIDmode, accept a register in any mode.
1007 The main use of this function is as a predicate in match_operand
1008 expressions in the machine description.
1010 As a special exception, registers whose class is NO_REGS are
1011 not accepted by `register_operand'. The reason for this change
1012 is to allow the representation of special architecture artifacts
1013 (such as a condition code register) without extending the rtl
1014 definitions. Since registers of class NO_REGS cannot be used
1015 as registers in any case where register classes are examined,
1016 it is most consistent to keep this function from accepting them. */
1019 register_operand (rtx op, enum machine_mode mode)
1021 if (GET_MODE (op) != mode && mode != VOIDmode)
1024 if (GET_CODE (op) == SUBREG)
1026 rtx sub = SUBREG_REG (op);
1028 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1029 because it is guaranteed to be reloaded into one.
1030 Just make sure the MEM is valid in itself.
1031 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1032 but currently it does result from (SUBREG (REG)...) where the
1033 reg went on the stack.) */
1034 if (! reload_completed && MEM_P (sub))
1035 return general_operand (op, mode);
1037 #ifdef CANNOT_CHANGE_MODE_CLASS
1039 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1040 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1041 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1042 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1046 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1047 create such rtl, and we must reject it. */
1048 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1049 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1055 /* We don't consider registers whose class is NO_REGS
1056 to be a register operand. */
1058 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1059 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1062 /* Return 1 for a register in Pmode; ignore the tested mode. */
1065 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1067 return register_operand (op, Pmode);
1070 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1071 or a hard register. */
1074 scratch_operand (rtx op, enum machine_mode mode)
1076 if (GET_MODE (op) != mode && mode != VOIDmode)
1079 return (GET_CODE (op) == SCRATCH
1081 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1084 /* Return 1 if OP is a valid immediate operand for mode MODE.
1086 The main use of this function is as a predicate in match_operand
1087 expressions in the machine description. */
1090 immediate_operand (rtx op, enum machine_mode mode)
1092 /* Don't accept CONST_INT or anything similar
1093 if the caller wants something floating. */
1094 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1095 && GET_MODE_CLASS (mode) != MODE_INT
1096 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1099 if (GET_CODE (op) == CONST_INT
1101 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1104 return (CONSTANT_P (op)
1105 && (GET_MODE (op) == mode || mode == VOIDmode
1106 || GET_MODE (op) == VOIDmode)
1107 #ifdef LEGITIMATE_PIC_OPERAND_P
1108 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1110 && LEGITIMATE_CONSTANT_P (op));
1113 /* Returns 1 if OP is an operand that is a CONST_INT. */
1116 const_int_operand (rtx op, enum machine_mode mode)
1118 if (GET_CODE (op) != CONST_INT)
1121 if (mode != VOIDmode
1122 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1128 /* Returns 1 if OP is an operand that is a constant integer or constant
1129 floating-point number. */
1132 const_double_operand (rtx op, enum machine_mode mode)
1134 /* Don't accept CONST_INT or anything similar
1135 if the caller wants something floating. */
1136 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1137 && GET_MODE_CLASS (mode) != MODE_INT
1138 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1141 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1142 && (mode == VOIDmode || GET_MODE (op) == mode
1143 || GET_MODE (op) == VOIDmode));
1146 /* Return 1 if OP is a general operand that is not an immediate operand. */
1149 nonimmediate_operand (rtx op, enum machine_mode mode)
1151 return (general_operand (op, mode) && ! CONSTANT_P (op));
1154 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1157 nonmemory_operand (rtx op, enum machine_mode mode)
1159 if (CONSTANT_P (op))
1161 /* Don't accept CONST_INT or anything similar
1162 if the caller wants something floating. */
1163 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1164 && GET_MODE_CLASS (mode) != MODE_INT
1165 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1168 if (GET_CODE (op) == CONST_INT
1170 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1173 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1174 || mode == VOIDmode)
1175 #ifdef LEGITIMATE_PIC_OPERAND_P
1176 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1178 && LEGITIMATE_CONSTANT_P (op));
1181 if (GET_MODE (op) != mode && mode != VOIDmode)
1184 if (GET_CODE (op) == SUBREG)
1186 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1187 because it is guaranteed to be reloaded into one.
1188 Just make sure the MEM is valid in itself.
1189 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1190 but currently it does result from (SUBREG (REG)...) where the
1191 reg went on the stack.) */
1192 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1193 return general_operand (op, mode);
1194 op = SUBREG_REG (op);
1197 /* We don't consider registers whose class is NO_REGS
1198 to be a register operand. */
1200 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1201 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1204 /* Return 1 if OP is a valid operand that stands for pushing a
1205 value of mode MODE onto the stack.
1207 The main use of this function is as a predicate in match_operand
1208 expressions in the machine description. */
1211 push_operand (rtx op, enum machine_mode mode)
1213 unsigned int rounded_size = GET_MODE_SIZE (mode);
1215 #ifdef PUSH_ROUNDING
1216 rounded_size = PUSH_ROUNDING (rounded_size);
1222 if (mode != VOIDmode && GET_MODE (op) != mode)
1227 if (rounded_size == GET_MODE_SIZE (mode))
1229 if (GET_CODE (op) != STACK_PUSH_CODE)
1234 if (GET_CODE (op) != PRE_MODIFY
1235 || GET_CODE (XEXP (op, 1)) != PLUS
1236 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1237 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1238 #ifdef STACK_GROWS_DOWNWARD
1239 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1241 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1247 return XEXP (op, 0) == stack_pointer_rtx;
1250 /* Return 1 if OP is a valid operand that stands for popping a
1251 value of mode MODE off the stack.
1253 The main use of this function is as a predicate in match_operand
1254 expressions in the machine description. */
1257 pop_operand (rtx op, enum machine_mode mode)
1262 if (mode != VOIDmode && GET_MODE (op) != mode)
1267 if (GET_CODE (op) != STACK_POP_CODE)
1270 return XEXP (op, 0) == stack_pointer_rtx;
1273 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1276 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1278 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1285 /* Return 1 if OP is a valid memory reference with mode MODE,
1286 including a valid address.
1288 The main use of this function is as a predicate in match_operand
1289 expressions in the machine description. */
1292 memory_operand (rtx op, enum machine_mode mode)
1296 if (! reload_completed)
1297 /* Note that no SUBREG is a memory operand before end of reload pass,
1298 because (SUBREG (MEM...)) forces reloading into a register. */
1299 return MEM_P (op) && general_operand (op, mode);
1301 if (mode != VOIDmode && GET_MODE (op) != mode)
1305 if (GET_CODE (inner) == SUBREG)
1306 inner = SUBREG_REG (inner);
1308 return (MEM_P (inner) && general_operand (op, mode));
1311 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1312 that is, a memory reference whose address is a general_operand. */
1315 indirect_operand (rtx op, enum machine_mode mode)
1317 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1318 if (! reload_completed
1319 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1321 int offset = SUBREG_BYTE (op);
1322 rtx inner = SUBREG_REG (op);
1324 if (mode != VOIDmode && GET_MODE (op) != mode)
1327 /* The only way that we can have a general_operand as the resulting
1328 address is if OFFSET is zero and the address already is an operand
1329 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1332 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1333 || (GET_CODE (XEXP (inner, 0)) == PLUS
1334 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1335 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1336 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1340 && memory_operand (op, mode)
1341 && general_operand (XEXP (op, 0), Pmode));
1344 /* Return 1 if this is a comparison operator. This allows the use of
1345 MATCH_OPERATOR to recognize all the branch insns. */
1348 comparison_operator (rtx op, enum machine_mode mode)
1350 return ((mode == VOIDmode || GET_MODE (op) == mode)
1351 && COMPARISON_P (op));
1354 /* If BODY is an insn body that uses ASM_OPERANDS,
1355 return the number of operands (both input and output) in the insn.
1356 Otherwise return -1. */
1359 asm_noperands (rtx body)
1361 switch (GET_CODE (body))
1364 /* No output operands: return number of input operands. */
1365 return ASM_OPERANDS_INPUT_LENGTH (body);
1367 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1368 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1369 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1373 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1374 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1376 /* Multiple output operands, or 1 output plus some clobbers:
1377 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1381 /* Count backwards through CLOBBERs to determine number of SETs. */
1382 for (i = XVECLEN (body, 0); i > 0; i--)
1384 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1386 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1390 /* N_SETS is now number of output operands. */
1393 /* Verify that all the SETs we have
1394 came from a single original asm_operands insn
1395 (so that invalid combinations are blocked). */
1396 for (i = 0; i < n_sets; i++)
1398 rtx elt = XVECEXP (body, 0, i);
1399 if (GET_CODE (elt) != SET)
1401 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1403 /* If these ASM_OPERANDS rtx's came from different original insns
1404 then they aren't allowed together. */
1405 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1406 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1409 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1412 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1414 /* 0 outputs, but some clobbers:
1415 body is [(asm_operands ...) (clobber (reg ...))...]. */
1418 /* Make sure all the other parallel things really are clobbers. */
1419 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1420 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1423 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1432 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1433 copy its operands (both input and output) into the vector OPERANDS,
1434 the locations of the operands within the insn into the vector OPERAND_LOCS,
1435 and the constraints for the operands into CONSTRAINTS.
1436 Write the modes of the operands into MODES.
1437 Return the assembler-template.
1439 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1440 we don't store that info. */
1443 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1444 const char **constraints, enum machine_mode *modes)
1448 const char *template = 0;
1450 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1452 rtx asmop = SET_SRC (body);
1453 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1455 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1457 for (i = 1; i < noperands; i++)
1460 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1462 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1464 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1466 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1469 /* The output is in the SET.
1470 Its constraint is in the ASM_OPERANDS itself. */
1472 operands[0] = SET_DEST (body);
1474 operand_locs[0] = &SET_DEST (body);
1476 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1478 modes[0] = GET_MODE (SET_DEST (body));
1479 template = ASM_OPERANDS_TEMPLATE (asmop);
1481 else if (GET_CODE (body) == ASM_OPERANDS)
1484 /* No output operands: BODY is (asm_operands ....). */
1486 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1488 /* The input operands are found in the 1st element vector. */
1489 /* Constraints for inputs are in the 2nd element vector. */
1490 for (i = 0; i < noperands; i++)
1493 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1495 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1497 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1499 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1501 template = ASM_OPERANDS_TEMPLATE (asmop);
1503 else if (GET_CODE (body) == PARALLEL
1504 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1505 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1507 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1508 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1509 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1510 int nout = 0; /* Does not include CLOBBERs. */
1512 /* At least one output, plus some CLOBBERs. */
1514 /* The outputs are in the SETs.
1515 Their constraints are in the ASM_OPERANDS itself. */
1516 for (i = 0; i < nparallel; i++)
1518 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1519 break; /* Past last SET */
1522 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1524 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1526 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1528 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1532 for (i = 0; i < nin; i++)
1535 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1537 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1539 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1541 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1544 template = ASM_OPERANDS_TEMPLATE (asmop);
1546 else if (GET_CODE (body) == PARALLEL
1547 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1549 /* No outputs, but some CLOBBERs. */
1551 rtx asmop = XVECEXP (body, 0, 0);
1552 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1554 for (i = 0; i < nin; i++)
1557 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1559 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1561 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1563 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1566 template = ASM_OPERANDS_TEMPLATE (asmop);
1572 /* Check if an asm_operand matches its constraints.
1573 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1576 asm_operand_ok (rtx op, const char *constraint)
1580 /* Use constrain_operands after reload. */
1581 if (reload_completed)
1586 char c = *constraint;
1603 case '0': case '1': case '2': case '3': case '4':
1604 case '5': case '6': case '7': case '8': case '9':
1605 /* For best results, our caller should have given us the
1606 proper matching constraint, but we can't actually fail
1607 the check if they didn't. Indicate that results are
1611 while (ISDIGIT (*constraint));
1617 if (address_operand (op, VOIDmode))
1622 case 'V': /* non-offsettable */
1623 if (memory_operand (op, VOIDmode))
1627 case 'o': /* offsettable */
1628 if (offsettable_nonstrict_memref_p (op))
1633 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1634 excepting those that expand_call created. Further, on some
1635 machines which do not have generalized auto inc/dec, an inc/dec
1636 is not a memory_operand.
1638 Match any memory and hope things are resolved after reload. */
1642 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1643 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1650 || GET_CODE (XEXP (op, 0)) == PRE_INC
1651 || GET_CODE (XEXP (op, 0)) == POST_INC))
1657 if (GET_CODE (op) == CONST_DOUBLE
1658 || (GET_CODE (op) == CONST_VECTOR
1659 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1664 if (GET_CODE (op) == CONST_DOUBLE
1665 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1669 if (GET_CODE (op) == CONST_DOUBLE
1670 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1675 if (GET_CODE (op) == CONST_INT
1676 || (GET_CODE (op) == CONST_DOUBLE
1677 && GET_MODE (op) == VOIDmode))
1683 #ifdef LEGITIMATE_PIC_OPERAND_P
1684 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1691 if (GET_CODE (op) == CONST_INT
1692 || (GET_CODE (op) == CONST_DOUBLE
1693 && GET_MODE (op) == VOIDmode))
1698 if (GET_CODE (op) == CONST_INT
1699 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1703 if (GET_CODE (op) == CONST_INT
1704 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1708 if (GET_CODE (op) == CONST_INT
1709 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1713 if (GET_CODE (op) == CONST_INT
1714 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1718 if (GET_CODE (op) == CONST_INT
1719 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1723 if (GET_CODE (op) == CONST_INT
1724 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1728 if (GET_CODE (op) == CONST_INT
1729 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1733 if (GET_CODE (op) == CONST_INT
1734 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1743 if (general_operand (op, VOIDmode))
1748 /* For all other letters, we first check for a register class,
1749 otherwise it is an EXTRA_CONSTRAINT. */
1750 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1753 if (GET_MODE (op) == BLKmode)
1755 if (register_operand (op, VOIDmode))
1758 #ifdef EXTRA_CONSTRAINT_STR
1759 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1761 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
1762 /* Every memory operand can be reloaded to fit. */
1763 && memory_operand (op, VOIDmode))
1765 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint)
1766 /* Every address operand can be reloaded to fit. */
1767 && address_operand (op, VOIDmode))
1772 len = CONSTRAINT_LEN (c, constraint);
1775 while (--len && *constraint);
1783 /* Given an rtx *P, if it is a sum containing an integer constant term,
1784 return the location (type rtx *) of the pointer to that constant term.
1785 Otherwise, return a null pointer. */
1788 find_constant_term_loc (rtx *p)
1791 enum rtx_code code = GET_CODE (*p);
1793 /* If *P IS such a constant term, P is its location. */
1795 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1799 /* Otherwise, if not a sum, it has no constant term. */
1801 if (GET_CODE (*p) != PLUS)
1804 /* If one of the summands is constant, return its location. */
1806 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1807 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1810 /* Otherwise, check each summand for containing a constant term. */
1812 if (XEXP (*p, 0) != 0)
1814 tem = find_constant_term_loc (&XEXP (*p, 0));
1819 if (XEXP (*p, 1) != 0)
1821 tem = find_constant_term_loc (&XEXP (*p, 1));
1829 /* Return 1 if OP is a memory reference
1830 whose address contains no side effects
1831 and remains valid after the addition
1832 of a positive integer less than the
1833 size of the object being referenced.
1835 We assume that the original address is valid and do not check it.
1837 This uses strict_memory_address_p as a subroutine, so
1838 don't use it before reload. */
1841 offsettable_memref_p (rtx op)
1843 return ((MEM_P (op))
1844 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1847 /* Similar, but don't require a strictly valid mem ref:
1848 consider pseudo-regs valid as index or base regs. */
1851 offsettable_nonstrict_memref_p (rtx op)
1853 return ((MEM_P (op))
1854 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1857 /* Return 1 if Y is a memory address which contains no side effects
1858 and would remain valid after the addition of a positive integer
1859 less than the size of that mode.
1861 We assume that the original address is valid and do not check it.
1862 We do check that it is valid for narrower modes.
1864 If STRICTP is nonzero, we require a strictly valid address,
1865 for the sake of use in reload.c. */
1868 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1870 enum rtx_code ycode = GET_CODE (y);
1874 int (*addressp) (enum machine_mode, rtx) =
1875 (strictp ? strict_memory_address_p : memory_address_p);
1876 unsigned int mode_sz = GET_MODE_SIZE (mode);
1878 if (CONSTANT_ADDRESS_P (y))
1881 /* Adjusting an offsettable address involves changing to a narrower mode.
1882 Make sure that's OK. */
1884 if (mode_dependent_address_p (y))
1887 /* ??? How much offset does an offsettable BLKmode reference need?
1888 Clearly that depends on the situation in which it's being used.
1889 However, the current situation in which we test 0xffffffff is
1890 less than ideal. Caveat user. */
1892 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1894 /* If the expression contains a constant term,
1895 see if it remains valid when max possible offset is added. */
1897 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1902 *y2 = plus_constant (*y2, mode_sz - 1);
1903 /* Use QImode because an odd displacement may be automatically invalid
1904 for any wider mode. But it should be valid for a single byte. */
1905 good = (*addressp) (QImode, y);
1907 /* In any case, restore old contents of memory. */
1912 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1915 /* The offset added here is chosen as the maximum offset that
1916 any instruction could need to add when operating on something
1917 of the specified mode. We assume that if Y and Y+c are
1918 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1919 go inside a LO_SUM here, so we do so as well. */
1920 if (GET_CODE (y) == LO_SUM
1922 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1923 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1924 plus_constant (XEXP (y, 1), mode_sz - 1));
1926 z = plus_constant (y, mode_sz - 1);
1928 /* Use QImode because an odd displacement may be automatically invalid
1929 for any wider mode. But it should be valid for a single byte. */
1930 return (*addressp) (QImode, z);
1933 /* Return 1 if ADDR is an address-expression whose effect depends
1934 on the mode of the memory reference it is used in.
1936 Autoincrement addressing is a typical example of mode-dependence
1937 because the amount of the increment depends on the mode. */
1940 mode_dependent_address_p (rtx addr ATTRIBUTE_UNUSED /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */)
1942 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1944 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1945 win: ATTRIBUTE_UNUSED_LABEL
1949 /* Like extract_insn, but save insn extracted and don't extract again, when
1950 called again for the same insn expecting that recog_data still contain the
1951 valid information. This is used primary by gen_attr infrastructure that
1952 often does extract insn again and again. */
1954 extract_insn_cached (rtx insn)
1956 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
1958 extract_insn (insn);
1959 recog_data.insn = insn;
1961 /* Do cached extract_insn, constrain_operands and complain about failures.
1962 Used by insn_attrtab. */
1964 extract_constrain_insn_cached (rtx insn)
1966 extract_insn_cached (insn);
1967 if (which_alternative == -1
1968 && !constrain_operands (reload_completed))
1969 fatal_insn_not_found (insn);
1971 /* Do cached constrain_operands and complain about failures. */
1973 constrain_operands_cached (int strict)
1975 if (which_alternative == -1)
1976 return constrain_operands (strict);
1981 /* Analyze INSN and fill in recog_data. */
1984 extract_insn (rtx insn)
1989 rtx body = PATTERN (insn);
1991 recog_data.insn = NULL;
1992 recog_data.n_operands = 0;
1993 recog_data.n_alternatives = 0;
1994 recog_data.n_dups = 0;
1995 which_alternative = -1;
1997 switch (GET_CODE (body))
2007 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2012 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2013 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2014 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2020 recog_data.n_operands = noperands = asm_noperands (body);
2023 /* This insn is an `asm' with operands. */
2025 /* expand_asm_operands makes sure there aren't too many operands. */
2026 if (noperands > MAX_RECOG_OPERANDS)
2029 /* Now get the operand values and constraints out of the insn. */
2030 decode_asm_operands (body, recog_data.operand,
2031 recog_data.operand_loc,
2032 recog_data.constraints,
2033 recog_data.operand_mode);
2036 const char *p = recog_data.constraints[0];
2037 recog_data.n_alternatives = 1;
2039 recog_data.n_alternatives += (*p++ == ',');
2043 fatal_insn_not_found (insn);
2047 /* Ordinary insn: recognize it, get the operands via insn_extract
2048 and get the constraints. */
2050 icode = recog_memoized (insn);
2052 fatal_insn_not_found (insn);
2054 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2055 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2056 recog_data.n_dups = insn_data[icode].n_dups;
2058 insn_extract (insn);
2060 for (i = 0; i < noperands; i++)
2062 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2063 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2064 /* VOIDmode match_operands gets mode from their real operand. */
2065 if (recog_data.operand_mode[i] == VOIDmode)
2066 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2069 for (i = 0; i < noperands; i++)
2070 recog_data.operand_type[i]
2071 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2072 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2075 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2079 /* After calling extract_insn, you can use this function to extract some
2080 information from the constraint strings into a more usable form.
2081 The collected data is stored in recog_op_alt. */
2083 preprocess_constraints (void)
2087 for (i = 0; i < recog_data.n_operands; i++)
2088 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2089 * sizeof (struct operand_alternative)));
2091 for (i = 0; i < recog_data.n_operands; i++)
2094 struct operand_alternative *op_alt;
2095 const char *p = recog_data.constraints[i];
2097 op_alt = recog_op_alt[i];
2099 for (j = 0; j < recog_data.n_alternatives; j++)
2101 op_alt[j].class = NO_REGS;
2102 op_alt[j].constraint = p;
2103 op_alt[j].matches = -1;
2104 op_alt[j].matched = -1;
2106 if (*p == '\0' || *p == ',')
2108 op_alt[j].anything_ok = 1;
2118 while (c != ',' && c != '\0');
2119 if (c == ',' || c == '\0')
2127 case '=': case '+': case '*': case '%':
2128 case 'E': case 'F': case 'G': case 'H':
2129 case 's': case 'i': case 'n':
2130 case 'I': case 'J': case 'K': case 'L':
2131 case 'M': case 'N': case 'O': case 'P':
2132 /* These don't say anything we care about. */
2136 op_alt[j].reject += 6;
2139 op_alt[j].reject += 600;
2142 op_alt[j].earlyclobber = 1;
2145 case '0': case '1': case '2': case '3': case '4':
2146 case '5': case '6': case '7': case '8': case '9':
2149 op_alt[j].matches = strtoul (p, &end, 10);
2150 recog_op_alt[op_alt[j].matches][j].matched = i;
2156 op_alt[j].memory_ok = 1;
2159 op_alt[j].decmem_ok = 1;
2162 op_alt[j].incmem_ok = 1;
2165 op_alt[j].nonoffmem_ok = 1;
2168 op_alt[j].offmem_ok = 1;
2171 op_alt[j].anything_ok = 1;
2175 op_alt[j].is_address = 1;
2176 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class]
2177 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2181 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2185 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2187 op_alt[j].memory_ok = 1;
2190 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2192 op_alt[j].is_address = 1;
2194 = (reg_class_subunion
2195 [(int) op_alt[j].class]
2196 [(int) MODE_BASE_REG_CLASS (VOIDmode)]);
2201 = (reg_class_subunion
2202 [(int) op_alt[j].class]
2203 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2206 p += CONSTRAINT_LEN (c, p);
2212 /* Check the operands of an insn against the insn's operand constraints
2213 and return 1 if they are valid.
2214 The information about the insn's operands, constraints, operand modes
2215 etc. is obtained from the global variables set up by extract_insn.
2217 WHICH_ALTERNATIVE is set to a number which indicates which
2218 alternative of constraints was matched: 0 for the first alternative,
2219 1 for the next, etc.
2221 In addition, when two operands are required to match
2222 and it happens that the output operand is (reg) while the
2223 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2224 make the output operand look like the input.
2225 This is because the output operand is the one the template will print.
2227 This is used in final, just before printing the assembler code and by
2228 the routines that determine an insn's attribute.
2230 If STRICT is a positive nonzero value, it means that we have been
2231 called after reload has been completed. In that case, we must
2232 do all checks strictly. If it is zero, it means that we have been called
2233 before reload has completed. In that case, we first try to see if we can
2234 find an alternative that matches strictly. If not, we try again, this
2235 time assuming that reload will fix up the insn. This provides a "best
2236 guess" for the alternative and is used to compute attributes of insns prior
2237 to reload. A negative value of STRICT is used for this internal call. */
2245 constrain_operands (int strict)
2247 const char *constraints[MAX_RECOG_OPERANDS];
2248 int matching_operands[MAX_RECOG_OPERANDS];
2249 int earlyclobber[MAX_RECOG_OPERANDS];
2252 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2253 int funny_match_index;
2255 which_alternative = 0;
2256 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2259 for (c = 0; c < recog_data.n_operands; c++)
2261 constraints[c] = recog_data.constraints[c];
2262 matching_operands[c] = -1;
2269 funny_match_index = 0;
2271 for (opno = 0; opno < recog_data.n_operands; opno++)
2273 rtx op = recog_data.operand[opno];
2274 enum machine_mode mode = GET_MODE (op);
2275 const char *p = constraints[opno];
2281 earlyclobber[opno] = 0;
2283 /* A unary operator may be accepted by the predicate, but it
2284 is irrelevant for matching constraints. */
2288 if (GET_CODE (op) == SUBREG)
2290 if (REG_P (SUBREG_REG (op))
2291 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2292 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2293 GET_MODE (SUBREG_REG (op)),
2296 op = SUBREG_REG (op);
2299 /* An empty constraint or empty alternative
2300 allows anything which matched the pattern. */
2301 if (*p == 0 || *p == ',')
2305 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2314 case '?': case '!': case '*': case '%':
2319 /* Ignore rest of this alternative as far as
2320 constraint checking is concerned. */
2323 while (*p && *p != ',');
2328 earlyclobber[opno] = 1;
2331 case '0': case '1': case '2': case '3': case '4':
2332 case '5': case '6': case '7': case '8': case '9':
2334 /* This operand must be the same as a previous one.
2335 This kind of constraint is used for instructions such
2336 as add when they take only two operands.
2338 Note that the lower-numbered operand is passed first.
2340 If we are not testing strictly, assume that this
2341 constraint will be satisfied. */
2346 match = strtoul (p, &end, 10);
2353 rtx op1 = recog_data.operand[match];
2354 rtx op2 = recog_data.operand[opno];
2356 /* A unary operator may be accepted by the predicate,
2357 but it is irrelevant for matching constraints. */
2359 op1 = XEXP (op1, 0);
2361 op2 = XEXP (op2, 0);
2363 val = operands_match_p (op1, op2);
2366 matching_operands[opno] = match;
2367 matching_operands[match] = opno;
2372 /* If output is *x and input is *--x, arrange later
2373 to change the output to *--x as well, since the
2374 output op is the one that will be printed. */
2375 if (val == 2 && strict > 0)
2377 funny_match[funny_match_index].this = opno;
2378 funny_match[funny_match_index++].other = match;
2385 /* p is used for address_operands. When we are called by
2386 gen_reload, no one will have checked that the address is
2387 strictly valid, i.e., that all pseudos requiring hard regs
2388 have gotten them. */
2390 || (strict_memory_address_p (recog_data.operand_mode[opno],
2395 /* No need to check general_operand again;
2396 it was done in insn-recog.c. */
2398 /* Anything goes unless it is a REG and really has a hard reg
2399 but the hard reg is not in the class GENERAL_REGS. */
2401 || GENERAL_REGS == ALL_REGS
2403 || (reload_in_progress
2404 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2405 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2410 /* This is used for a MATCH_SCRATCH in the cases when
2411 we don't actually need anything. So anything goes
2417 /* Memory operands must be valid, to the extent
2418 required by STRICT. */
2422 && !strict_memory_address_p (GET_MODE (op),
2426 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2430 /* Before reload, accept what reload can turn into mem. */
2431 else if (strict < 0 && CONSTANT_P (op))
2433 /* During reload, accept a pseudo */
2434 else if (reload_in_progress && REG_P (op)
2435 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2441 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2442 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2448 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2449 || GET_CODE (XEXP (op, 0)) == POST_INC))
2455 if (GET_CODE (op) == CONST_DOUBLE
2456 || (GET_CODE (op) == CONST_VECTOR
2457 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2463 if (GET_CODE (op) == CONST_DOUBLE
2464 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2469 if (GET_CODE (op) == CONST_INT
2470 || (GET_CODE (op) == CONST_DOUBLE
2471 && GET_MODE (op) == VOIDmode))
2474 if (CONSTANT_P (op))
2479 if (GET_CODE (op) == CONST_INT
2480 || (GET_CODE (op) == CONST_DOUBLE
2481 && GET_MODE (op) == VOIDmode))
2493 if (GET_CODE (op) == CONST_INT
2494 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2500 && ((strict > 0 && ! offsettable_memref_p (op))
2502 && !(CONSTANT_P (op) || MEM_P (op)))
2503 || (reload_in_progress
2505 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2510 if ((strict > 0 && offsettable_memref_p (op))
2511 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2512 /* Before reload, accept what reload can handle. */
2514 && (CONSTANT_P (op) || MEM_P (op)))
2515 /* During reload, accept a pseudo */
2516 || (reload_in_progress && REG_P (op)
2517 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2523 enum reg_class class;
2526 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2527 if (class != NO_REGS)
2532 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2533 || (strict == 0 && GET_CODE (op) == SCRATCH)
2535 && reg_fits_class_p (op, class, offset, mode)))
2538 #ifdef EXTRA_CONSTRAINT_STR
2539 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2542 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2543 /* Every memory operand can be reloaded to fit. */
2544 && ((strict < 0 && MEM_P (op))
2545 /* Before reload, accept what reload can turn
2547 || (strict < 0 && CONSTANT_P (op))
2548 /* During reload, accept a pseudo */
2549 || (reload_in_progress && REG_P (op)
2550 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2552 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2553 /* Every address operand can be reloaded to fit. */
2560 while (p += len, c);
2562 constraints[opno] = p;
2563 /* If this operand did not win somehow,
2564 this alternative loses. */
2568 /* This alternative won; the operands are ok.
2569 Change whichever operands this alternative says to change. */
2574 /* See if any earlyclobber operand conflicts with some other
2578 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2579 /* Ignore earlyclobber operands now in memory,
2580 because we would often report failure when we have
2581 two memory operands, one of which was formerly a REG. */
2582 if (earlyclobber[eopno]
2583 && REG_P (recog_data.operand[eopno]))
2584 for (opno = 0; opno < recog_data.n_operands; opno++)
2585 if ((MEM_P (recog_data.operand[opno])
2586 || recog_data.operand_type[opno] != OP_OUT)
2588 /* Ignore things like match_operator operands. */
2589 && *recog_data.constraints[opno] != 0
2590 && ! (matching_operands[opno] == eopno
2591 && operands_match_p (recog_data.operand[opno],
2592 recog_data.operand[eopno]))
2593 && ! safe_from_earlyclobber (recog_data.operand[opno],
2594 recog_data.operand[eopno]))
2599 while (--funny_match_index >= 0)
2601 recog_data.operand[funny_match[funny_match_index].other]
2602 = recog_data.operand[funny_match[funny_match_index].this];
2609 which_alternative++;
2611 while (which_alternative < recog_data.n_alternatives);
2613 which_alternative = -1;
2614 /* If we are about to reject this, but we are not to test strictly,
2615 try a very loose test. Only return failure if it fails also. */
2617 return constrain_operands (-1);
2622 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2623 is a hard reg in class CLASS when its regno is offset by OFFSET
2624 and changed to mode MODE.
2625 If REG occupies multiple hard regs, all of them must be in CLASS. */
2628 reg_fits_class_p (rtx operand, enum reg_class class, int offset,
2629 enum machine_mode mode)
2631 int regno = REGNO (operand);
2632 if (regno < FIRST_PSEUDO_REGISTER
2633 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2638 for (sr = hard_regno_nregs[regno][mode] - 1;
2640 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2649 /* Split single instruction. Helper function for split_all_insns and
2650 split_all_insns_noflow. Return last insn in the sequence if successful,
2651 or NULL if unsuccessful. */
2654 split_insn (rtx insn)
2656 /* Split insns here to get max fine-grain parallelism. */
2657 rtx first = PREV_INSN (insn);
2658 rtx last = try_split (PATTERN (insn), insn, 1);
2663 /* try_split returns the NOTE that INSN became. */
2664 SET_INSN_DELETED (insn);
2666 /* ??? Coddle to md files that generate subregs in post-reload
2667 splitters instead of computing the proper hard register. */
2668 if (reload_completed && first != last)
2670 first = NEXT_INSN (first);
2674 cleanup_subreg_operands (first);
2677 first = NEXT_INSN (first);
2683 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2686 split_all_insns (int upd_life)
2692 blocks = sbitmap_alloc (last_basic_block);
2693 sbitmap_zero (blocks);
2696 FOR_EACH_BB_REVERSE (bb)
2699 bool finish = false;
2701 for (insn = BB_HEAD (bb); !finish ; insn = next)
2703 /* Can't use `next_real_insn' because that might go across
2704 CODE_LABELS and short-out basic blocks. */
2705 next = NEXT_INSN (insn);
2706 finish = (insn == BB_END (bb));
2709 rtx set = single_set (insn);
2711 /* Don't split no-op move insns. These should silently
2712 disappear later in final. Splitting such insns would
2713 break the code that handles REG_NO_CONFLICT blocks. */
2714 if (set && set_noop_p (set))
2716 /* Nops get in the way while scheduling, so delete them
2717 now if register allocation has already been done. It
2718 is too risky to try to do this before register
2719 allocation, and there are unlikely to be very many
2720 nops then anyways. */
2721 if (reload_completed)
2723 /* If the no-op set has a REG_UNUSED note, we need
2724 to update liveness information. */
2725 if (find_reg_note (insn, REG_UNUSED, NULL_RTX))
2727 SET_BIT (blocks, bb->index);
2730 /* ??? Is life info affected by deleting edges? */
2731 delete_insn_and_edges (insn);
2736 rtx last = split_insn (insn);
2739 /* The split sequence may include barrier, but the
2740 BB boundary we are interested in will be set to
2743 while (GET_CODE (last) == BARRIER)
2744 last = PREV_INSN (last);
2745 SET_BIT (blocks, bb->index);
2755 int old_last_basic_block = last_basic_block;
2757 find_many_sub_basic_blocks (blocks);
2759 if (old_last_basic_block != last_basic_block && upd_life)
2760 blocks = sbitmap_resize (blocks, last_basic_block, 1);
2763 if (changed && upd_life)
2764 update_life_info (blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
2767 #ifdef ENABLE_CHECKING
2768 verify_flow_info ();
2771 sbitmap_free (blocks);
2774 /* Same as split_all_insns, but do not expect CFG to be available.
2775 Used by machine dependent reorg passes. */
2778 split_all_insns_noflow (void)
2782 for (insn = get_insns (); insn; insn = next)
2784 next = NEXT_INSN (insn);
2787 /* Don't split no-op move insns. These should silently
2788 disappear later in final. Splitting such insns would
2789 break the code that handles REG_NO_CONFLICT blocks. */
2790 rtx set = single_set (insn);
2791 if (set && set_noop_p (set))
2793 /* Nops get in the way while scheduling, so delete them
2794 now if register allocation has already been done. It
2795 is too risky to try to do this before register
2796 allocation, and there are unlikely to be very many
2799 ??? Should we use delete_insn when the CFG isn't valid? */
2800 if (reload_completed)
2801 delete_insn_and_edges (insn);
2809 #ifdef HAVE_peephole2
2810 struct peep2_insn_data
2816 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2817 static int peep2_current;
2819 /* A non-insn marker indicating the last insn of the block.
2820 The live_before regset for this element is correct, indicating
2821 global_live_at_end for the block. */
2822 #define PEEP2_EOB pc_rtx
2824 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2825 does not exist. Used by the recognizer to find the next insn to match
2826 in a multi-insn pattern. */
2829 peep2_next_insn (int n)
2831 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2835 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2836 n -= MAX_INSNS_PER_PEEP2 + 1;
2838 if (peep2_insn_data[n].insn == PEEP2_EOB)
2840 return peep2_insn_data[n].insn;
2843 /* Return true if REGNO is dead before the Nth non-note insn
2847 peep2_regno_dead_p (int ofs, int regno)
2849 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2852 ofs += peep2_current;
2853 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2854 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2856 if (peep2_insn_data[ofs].insn == NULL_RTX)
2859 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2862 /* Similarly for a REG. */
2865 peep2_reg_dead_p (int ofs, rtx reg)
2869 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2872 ofs += peep2_current;
2873 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2874 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2876 if (peep2_insn_data[ofs].insn == NULL_RTX)
2879 regno = REGNO (reg);
2880 n = hard_regno_nregs[regno][GET_MODE (reg)];
2882 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2887 /* Try to find a hard register of mode MODE, matching the register class in
2888 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2889 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2890 in which case the only condition is that the register must be available
2891 before CURRENT_INSN.
2892 Registers that already have bits set in REG_SET will not be considered.
2894 If an appropriate register is available, it will be returned and the
2895 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2899 peep2_find_free_register (int from, int to, const char *class_str,
2900 enum machine_mode mode, HARD_REG_SET *reg_set)
2902 static int search_ofs;
2903 enum reg_class class;
2907 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2910 from += peep2_current;
2911 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2912 from -= MAX_INSNS_PER_PEEP2 + 1;
2913 to += peep2_current;
2914 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2915 to -= MAX_INSNS_PER_PEEP2 + 1;
2917 if (peep2_insn_data[from].insn == NULL_RTX)
2919 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2923 HARD_REG_SET this_live;
2925 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2927 if (peep2_insn_data[from].insn == NULL_RTX)
2929 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2930 IOR_HARD_REG_SET (live, this_live);
2933 class = (class_str[0] == 'r' ? GENERAL_REGS
2934 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
2936 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2938 int raw_regno, regno, success, j;
2940 /* Distribute the free registers as much as possible. */
2941 raw_regno = search_ofs + i;
2942 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2943 raw_regno -= FIRST_PSEUDO_REGISTER;
2944 #ifdef REG_ALLOC_ORDER
2945 regno = reg_alloc_order[raw_regno];
2950 /* Don't allocate fixed registers. */
2951 if (fixed_regs[regno])
2953 /* Make sure the register is of the right class. */
2954 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
2956 /* And can support the mode we need. */
2957 if (! HARD_REGNO_MODE_OK (regno, mode))
2959 /* And that we don't create an extra save/restore. */
2960 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2962 /* And we don't clobber traceback for noreturn functions. */
2963 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2964 && (! reload_completed || frame_pointer_needed))
2968 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2970 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2971 || TEST_HARD_REG_BIT (live, regno + j))
2979 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2980 SET_HARD_REG_BIT (*reg_set, regno + j);
2982 /* Start the next search with the next register. */
2983 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
2985 search_ofs = raw_regno;
2987 return gen_rtx_REG (mode, regno);
2995 /* Perform the peephole2 optimization pass. */
2998 peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED)
3000 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3005 #ifdef HAVE_conditional_execution
3009 bool do_cleanup_cfg = false;
3010 bool do_rebuild_jump_labels = false;
3012 /* Initialize the regsets we're going to use. */
3013 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3014 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3015 live = INITIALIZE_REG_SET (rs_heads[i]);
3017 #ifdef HAVE_conditional_execution
3018 blocks = sbitmap_alloc (last_basic_block);
3019 sbitmap_zero (blocks);
3022 count_or_remove_death_notes (NULL, 1);
3025 FOR_EACH_BB_REVERSE (bb)
3027 struct propagate_block_info *pbi;
3029 /* Indicate that all slots except the last holds invalid data. */
3030 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3031 peep2_insn_data[i].insn = NULL_RTX;
3033 /* Indicate that the last slot contains live_after data. */
3034 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3035 peep2_current = MAX_INSNS_PER_PEEP2;
3037 /* Start up propagation. */
3038 COPY_REG_SET (live, bb->global_live_at_end);
3039 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3041 #ifdef HAVE_conditional_execution
3042 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3044 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3047 for (insn = BB_END (bb); ; insn = prev)
3049 prev = PREV_INSN (insn);
3052 rtx try, before_try, x;
3055 bool was_call = false;
3057 /* Record this insn. */
3058 if (--peep2_current < 0)
3059 peep2_current = MAX_INSNS_PER_PEEP2;
3060 peep2_insn_data[peep2_current].insn = insn;
3061 propagate_one_insn (pbi, insn);
3062 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3064 /* Match the peephole. */
3065 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3068 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3069 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3070 cfg-related call notes. */
3071 for (i = 0; i <= match_len; ++i)
3074 rtx old_insn, new_insn, note;
3076 j = i + peep2_current;
3077 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3078 j -= MAX_INSNS_PER_PEEP2 + 1;
3079 old_insn = peep2_insn_data[j].insn;
3080 if (GET_CODE (old_insn) != CALL_INSN)
3085 while (new_insn != NULL_RTX)
3087 if (GET_CODE (new_insn) == CALL_INSN)
3089 new_insn = NEXT_INSN (new_insn);
3092 if (new_insn == NULL_RTX)
3095 CALL_INSN_FUNCTION_USAGE (new_insn)
3096 = CALL_INSN_FUNCTION_USAGE (old_insn);
3098 for (note = REG_NOTES (old_insn);
3100 note = XEXP (note, 1))
3101 switch (REG_NOTE_KIND (note))
3105 case REG_ALWAYS_RETURN:
3106 REG_NOTES (new_insn)
3107 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3109 REG_NOTES (new_insn));
3111 /* Discard all other reg notes. */
3115 /* Croak if there is another call in the sequence. */
3116 while (++i <= match_len)
3118 j = i + peep2_current;
3119 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3120 j -= MAX_INSNS_PER_PEEP2 + 1;
3121 old_insn = peep2_insn_data[j].insn;
3122 if (GET_CODE (old_insn) == CALL_INSN)
3128 i = match_len + peep2_current;
3129 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3130 i -= MAX_INSNS_PER_PEEP2 + 1;
3132 note = find_reg_note (peep2_insn_data[i].insn,
3133 REG_EH_REGION, NULL_RTX);
3135 /* Replace the old sequence with the new. */
3136 try = emit_insn_after_setloc (try, peep2_insn_data[i].insn,
3137 INSN_LOCATOR (peep2_insn_data[i].insn));
3138 before_try = PREV_INSN (insn);
3139 delete_insn_chain (insn, peep2_insn_data[i].insn);
3141 /* Re-insert the EH_REGION notes. */
3142 if (note || (was_call && nonlocal_goto_handler_labels))
3146 for (eh_edge = bb->succ; eh_edge
3147 ; eh_edge = eh_edge->succ_next)
3148 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3151 for (x = try ; x != before_try ; x = PREV_INSN (x))
3152 if (GET_CODE (x) == CALL_INSN
3153 || (flag_non_call_exceptions
3154 && may_trap_p (PATTERN (x))
3155 && !find_reg_note (x, REG_EH_REGION, NULL)))
3159 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3163 if (x != BB_END (bb) && eh_edge)
3168 nfte = split_block (bb, x);
3169 flags = (eh_edge->flags
3170 & (EDGE_EH | EDGE_ABNORMAL));
3171 if (GET_CODE (x) == CALL_INSN)
3172 flags |= EDGE_ABNORMAL_CALL;
3173 nehe = make_edge (nfte->src, eh_edge->dest,
3176 nehe->probability = eh_edge->probability;
3178 = REG_BR_PROB_BASE - nehe->probability;
3180 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3181 #ifdef HAVE_conditional_execution
3182 SET_BIT (blocks, nfte->dest->index);
3190 /* Converting possibly trapping insn to non-trapping is
3191 possible. Zap dummy outgoing edges. */
3192 do_cleanup_cfg |= purge_dead_edges (bb);
3195 #ifdef HAVE_conditional_execution
3196 /* With conditional execution, we cannot back up the
3197 live information so easily, since the conditional
3198 death data structures are not so self-contained.
3199 So record that we've made a modification to this
3200 block and update life information at the end. */
3201 SET_BIT (blocks, bb->index);
3204 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3205 peep2_insn_data[i].insn = NULL_RTX;
3206 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3208 /* Back up lifetime information past the end of the
3209 newly created sequence. */
3210 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3212 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3214 /* Update life information for the new sequence. */
3221 i = MAX_INSNS_PER_PEEP2;
3222 peep2_insn_data[i].insn = x;
3223 propagate_one_insn (pbi, x);
3224 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3230 /* ??? Should verify that LIVE now matches what we
3231 had before the new sequence. */
3236 /* If we generated a jump instruction, it won't have
3237 JUMP_LABEL set. Recompute after we're done. */
3238 for (x = try; x != before_try; x = PREV_INSN (x))
3239 if (GET_CODE (x) == JUMP_INSN)
3241 do_rebuild_jump_labels = true;
3247 if (insn == BB_HEAD (bb))
3251 free_propagate_block_info (pbi);
3254 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3255 FREE_REG_SET (peep2_insn_data[i].live_before);
3256 FREE_REG_SET (live);
3258 if (do_rebuild_jump_labels)
3259 rebuild_jump_labels (get_insns ());
3261 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3262 we've changed global life since exception handlers are no longer
3267 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3269 #ifdef HAVE_conditional_execution
3272 count_or_remove_death_notes (blocks, 1);
3273 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3275 sbitmap_free (blocks);
3278 #endif /* HAVE_peephole2 */
3280 /* Common predicates for use with define_bypass. */
3282 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3283 data not the address operand(s) of the store. IN_INSN must be
3284 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3288 store_data_bypass_p (rtx out_insn, rtx in_insn)
3290 rtx out_set, in_set;
3292 in_set = single_set (in_insn);
3296 if (!MEM_P (SET_DEST (in_set)))
3299 out_set = single_set (out_insn);
3302 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3310 out_pat = PATTERN (out_insn);
3311 if (GET_CODE (out_pat) != PARALLEL)
3314 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3316 rtx exp = XVECEXP (out_pat, 0, i);
3318 if (GET_CODE (exp) == CLOBBER)
3321 if (GET_CODE (exp) != SET)
3324 if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3332 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3333 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3334 or multiple set; IN_INSN should be single_set for truth, but for convenience
3335 of insn categorization may be any JUMP or CALL insn. */
3338 if_test_bypass_p (rtx out_insn, rtx in_insn)
3340 rtx out_set, in_set;
3342 in_set = single_set (in_insn);
3345 if (GET_CODE (in_insn) == JUMP_INSN || GET_CODE (in_insn) == CALL_INSN)
3350 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3352 in_set = SET_SRC (in_set);
3354 out_set = single_set (out_insn);
3357 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3358 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3366 out_pat = PATTERN (out_insn);
3367 if (GET_CODE (out_pat) != PARALLEL)
3370 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3372 rtx exp = XVECEXP (out_pat, 0, i);
3374 if (GET_CODE (exp) == CLOBBER)
3377 if (GET_CODE (exp) != SET)
3380 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3381 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))