1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 #include "coretypes.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
39 #include "basic-block.h"
43 #ifndef STACK_PUSH_CODE
44 #ifdef STACK_GROWS_DOWNWARD
45 #define STACK_PUSH_CODE PRE_DEC
47 #define STACK_PUSH_CODE PRE_INC
51 #ifndef STACK_POP_CODE
52 #ifdef STACK_GROWS_DOWNWARD
53 #define STACK_POP_CODE POST_INC
55 #define STACK_POP_CODE POST_DEC
59 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
60 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
61 static void validate_replace_src_1 PARAMS ((rtx *, void *));
62 static rtx split_insn PARAMS ((rtx));
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in regclass.c and final.c and reload.c.
70 init_recog and init_recog_no_volatile are responsible for setting this. */
74 struct recog_data recog_data;
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
80 /* On return from `constrain_operands', indicate which alternative
83 int which_alternative;
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed;
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
99 init_recog_no_volatile ()
110 /* Try recognizing the instruction INSN,
111 and return the code number that results.
112 Remember the code so that repeated calls do not
113 need to spend the time for actual rerecognition.
115 This function is the normal interface to instruction recognition.
116 The automatically-generated function `recog' is normally called
117 through this one. (The only exception is in combine.c.) */
120 recog_memoized_1 (insn)
123 if (INSN_CODE (insn) < 0)
124 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
125 return INSN_CODE (insn);
128 /* Check that X is an insn-body for an `asm' with operands
129 and that the operands mentioned in it are legitimate. */
132 check_asm_operands (x)
137 const char **constraints;
140 /* Post-reload, be more strict with things. */
141 if (reload_completed)
143 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
144 extract_insn (make_insn_raw (x));
145 constrain_operands (1);
146 return which_alternative >= 0;
149 noperands = asm_noperands (x);
155 operands = (rtx *) alloca (noperands * sizeof (rtx));
156 constraints = (const char **) alloca (noperands * sizeof (char *));
158 decode_asm_operands (x, operands, NULL, constraints, NULL);
160 for (i = 0; i < noperands; i++)
162 const char *c = constraints[i];
165 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
166 c = constraints[c[0] - '0'];
168 if (! asm_operand_ok (operands[i], c))
175 /* Static data for the next two routines. */
177 typedef struct change_t
185 static change_t *changes;
186 static int changes_allocated;
188 static int num_changes = 0;
190 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
191 at which NEW will be placed. If OBJECT is zero, no validation is done,
192 the change is simply made.
194 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
195 will be called with the address and mode as parameters. If OBJECT is
196 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
199 IN_GROUP is nonzero if this is part of a group of changes that must be
200 performed as a group. In that case, the changes will be stored. The
201 function `apply_change_group' will validate and apply the changes.
203 If IN_GROUP is zero, this is a single change. Try to recognize the insn
204 or validate the memory reference with the change applied. If the result
205 is not valid for the machine, suppress the change and return zero.
206 Otherwise, perform the change and return 1. */
209 validate_change (object, loc, new, in_group)
217 if (old == new || rtx_equal_p (old, new))
220 if (in_group == 0 && num_changes != 0)
225 /* Save the information describing this change. */
226 if (num_changes >= changes_allocated)
228 if (changes_allocated == 0)
229 /* This value allows for repeated substitutions inside complex
230 indexed addresses, or changes in up to 5 insns. */
231 changes_allocated = MAX_RECOG_OPERANDS * 5;
233 changes_allocated *= 2;
236 (change_t*) xrealloc (changes,
237 sizeof (change_t) * changes_allocated);
240 changes[num_changes].object = object;
241 changes[num_changes].loc = loc;
242 changes[num_changes].old = old;
244 if (object && GET_CODE (object) != MEM)
246 /* Set INSN_CODE to force rerecognition of insn. Save old code in
248 changes[num_changes].old_code = INSN_CODE (object);
249 INSN_CODE (object) = -1;
254 /* If we are making a group of changes, return 1. Otherwise, validate the
255 change group we made. */
260 return apply_change_group ();
263 /* This subroutine of apply_change_group verifies whether the changes to INSN
264 were valid; i.e. whether INSN can still be recognized. */
267 insn_invalid_p (insn)
270 rtx pat = PATTERN (insn);
271 int num_clobbers = 0;
272 /* If we are before reload and the pattern is a SET, see if we can add
274 int icode = recog (pat, insn,
275 (GET_CODE (pat) == SET
276 && ! reload_completed && ! reload_in_progress)
277 ? &num_clobbers : 0);
278 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
281 /* If this is an asm and the operand aren't legal, then fail. Likewise if
282 this is not an asm and the insn wasn't recognized. */
283 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
284 || (!is_asm && icode < 0))
287 /* If we have to add CLOBBERs, fail if we have to add ones that reference
288 hard registers since our callers can't know if they are live or not.
289 Otherwise, add them. */
290 if (num_clobbers > 0)
294 if (added_clobbers_hard_reg_p (icode))
297 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
298 XVECEXP (newpat, 0, 0) = pat;
299 add_clobbers (newpat, icode);
300 PATTERN (insn) = pat = newpat;
303 /* After reload, verify that all constraints are satisfied. */
304 if (reload_completed)
308 if (! constrain_operands (1))
312 INSN_CODE (insn) = icode;
316 /* Return number of changes made and not validated yet. */
318 num_changes_pending ()
323 /* Apply a group of changes previously issued with `validate_change'.
324 Return 1 if all changes are valid, zero otherwise. */
327 apply_change_group ()
330 rtx last_validated = NULL_RTX;
332 /* The changes have been applied and all INSN_CODEs have been reset to force
335 The changes are valid if we aren't given an object, or if we are
336 given a MEM and it still is a valid address, or if this is in insn
337 and it is recognized. In the latter case, if reload has completed,
338 we also require that the operands meet the constraints for
341 for (i = 0; i < num_changes; i++)
343 rtx object = changes[i].object;
345 /* if there is no object to test or if it is the same as the one we
346 already tested, ignore it. */
347 if (object == 0 || object == last_validated)
350 if (GET_CODE (object) == MEM)
352 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
355 else if (insn_invalid_p (object))
357 rtx pat = PATTERN (object);
359 /* Perhaps we couldn't recognize the insn because there were
360 extra CLOBBERs at the end. If so, try to re-recognize
361 without the last CLOBBER (later iterations will cause each of
362 them to be eliminated, in turn). But don't do this if we
363 have an ASM_OPERAND. */
364 if (GET_CODE (pat) == PARALLEL
365 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
366 && asm_noperands (PATTERN (object)) < 0)
370 if (XVECLEN (pat, 0) == 2)
371 newpat = XVECEXP (pat, 0, 0);
377 = gen_rtx_PARALLEL (VOIDmode,
378 rtvec_alloc (XVECLEN (pat, 0) - 1));
379 for (j = 0; j < XVECLEN (newpat, 0); j++)
380 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
383 /* Add a new change to this group to replace the pattern
384 with this new pattern. Then consider this change
385 as having succeeded. The change we added will
386 cause the entire call to fail if things remain invalid.
388 Note that this can lose if a later change than the one
389 we are processing specified &XVECEXP (PATTERN (object), 0, X)
390 but this shouldn't occur. */
392 validate_change (object, &PATTERN (object), newpat, 1);
395 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
396 /* If this insn is a CLOBBER or USE, it is always valid, but is
402 last_validated = object;
405 if (i == num_changes)
409 for (i = 0; i < num_changes; i++)
410 if (changes[i].object
411 && INSN_P (changes[i].object)
412 && (bb = BLOCK_FOR_INSN (changes[i].object)))
413 bb->flags |= BB_DIRTY;
425 /* Return the number of changes so far in the current group. */
428 num_validated_changes ()
433 /* Retract the changes numbered NUM and up. */
441 /* Back out all the changes. Do this in the opposite order in which
443 for (i = num_changes - 1; i >= num; i--)
445 *changes[i].loc = changes[i].old;
446 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
447 INSN_CODE (changes[i].object) = changes[i].old_code;
452 /* Replace every occurrence of FROM in X with TO. Mark each change with
453 validate_change passing OBJECT. */
456 validate_replace_rtx_1 (loc, from, to, object)
458 rtx from, to, object;
464 enum machine_mode op0_mode = VOIDmode;
465 int prev_changes = num_changes;
472 fmt = GET_RTX_FORMAT (code);
474 op0_mode = GET_MODE (XEXP (x, 0));
476 /* X matches FROM if it is the same rtx or they are both referring to the
477 same register in the same mode. Avoid calling rtx_equal_p unless the
478 operands look similar. */
481 || (GET_CODE (x) == REG && GET_CODE (from) == REG
482 && GET_MODE (x) == GET_MODE (from)
483 && REGNO (x) == REGNO (from))
484 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
485 && rtx_equal_p (x, from)))
487 validate_change (object, loc, to, 1);
491 /* Call ourself recursively to perform the replacements. */
493 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
496 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
497 else if (fmt[i] == 'E')
498 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
499 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
502 /* If we didn't substitute, there is nothing more to do. */
503 if (num_changes == prev_changes)
506 /* Allow substituted expression to have different mode. This is used by
507 regmove to change mode of pseudo register. */
508 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
509 op0_mode = GET_MODE (XEXP (x, 0));
511 /* Do changes needed to keep rtx consistent. Don't do any other
512 simplifications, as it is not our job. */
514 if ((GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
515 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
517 validate_change (object, loc,
518 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
519 : swap_condition (code),
520 GET_MODE (x), XEXP (x, 1),
529 /* If we have a PLUS whose second operand is now a CONST_INT, use
530 simplify_gen_binary to try to simplify it.
531 ??? We may want later to remove this, once simplification is
532 separated from this function. */
533 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
534 validate_change (object, loc,
536 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
539 if (GET_CODE (XEXP (x, 1)) == CONST_INT
540 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
541 validate_change (object, loc,
543 (PLUS, GET_MODE (x), XEXP (x, 0),
544 simplify_gen_unary (NEG,
545 GET_MODE (x), XEXP (x, 1),
550 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
552 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
554 /* If any of the above failed, substitute in something that
555 we know won't be recognized. */
557 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
558 validate_change (object, loc, new, 1);
562 /* All subregs possible to simplify should be simplified. */
563 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
566 /* Subregs of VOIDmode operands are incorrect. */
567 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
568 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
570 validate_change (object, loc, new, 1);
574 /* If we are replacing a register with memory, try to change the memory
575 to be the mode required for memory in extract operations (this isn't
576 likely to be an insertion operation; if it was, nothing bad will
577 happen, we might just fail in some cases). */
579 if (GET_CODE (XEXP (x, 0)) == MEM
580 && GET_CODE (XEXP (x, 1)) == CONST_INT
581 && GET_CODE (XEXP (x, 2)) == CONST_INT
582 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
583 && !MEM_VOLATILE_P (XEXP (x, 0)))
585 enum machine_mode wanted_mode = VOIDmode;
586 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
587 int pos = INTVAL (XEXP (x, 2));
589 if (GET_CODE (x) == ZERO_EXTRACT)
591 enum machine_mode new_mode
592 = mode_for_extraction (EP_extzv, 1);
593 if (new_mode != MAX_MACHINE_MODE)
594 wanted_mode = new_mode;
596 else if (GET_CODE (x) == SIGN_EXTRACT)
598 enum machine_mode new_mode
599 = mode_for_extraction (EP_extv, 1);
600 if (new_mode != MAX_MACHINE_MODE)
601 wanted_mode = new_mode;
604 /* If we have a narrower mode, we can do something. */
605 if (wanted_mode != VOIDmode
606 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
608 int offset = pos / BITS_PER_UNIT;
611 /* If the bytes and bits are counted differently, we
612 must adjust the offset. */
613 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
615 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
618 pos %= GET_MODE_BITSIZE (wanted_mode);
620 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
622 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
623 validate_change (object, &XEXP (x, 0), newmem, 1);
634 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
635 with TO. After all changes have been made, validate by seeing
636 if INSN is still valid. */
639 validate_replace_rtx_subexp (from, to, insn, loc)
640 rtx from, to, insn, *loc;
642 validate_replace_rtx_1 (loc, from, to, insn);
643 return apply_change_group ();
646 /* Try replacing every occurrence of FROM in INSN with TO. After all
647 changes have been made, validate by seeing if INSN is still valid. */
650 validate_replace_rtx (from, to, insn)
653 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
654 return apply_change_group ();
657 /* Try replacing every occurrence of FROM in INSN with TO. */
660 validate_replace_rtx_group (from, to, insn)
663 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
666 /* Function called by note_uses to replace used subexpressions. */
667 struct validate_replace_src_data
669 rtx from; /* Old RTX */
670 rtx to; /* New RTX */
671 rtx insn; /* Insn in which substitution is occurring. */
675 validate_replace_src_1 (x, data)
679 struct validate_replace_src_data *d
680 = (struct validate_replace_src_data *) data;
682 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
685 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
689 validate_replace_src_group (from, to, insn)
692 struct validate_replace_src_data d;
697 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
700 /* Same as validate_replace_src_group, but validate by seeing if
701 INSN is still valid. */
703 validate_replace_src (from, to, insn)
706 validate_replace_src_group (from, to, insn);
707 return apply_change_group ();
711 /* Return 1 if the insn using CC0 set by INSN does not contain
712 any ordered tests applied to the condition codes.
713 EQ and NE tests do not count. */
716 next_insn_tests_no_inequality (insn)
719 rtx next = next_cc0_user (insn);
721 /* If there is no next insn, we have to take the conservative choice. */
725 return ((GET_CODE (next) == JUMP_INSN
726 || GET_CODE (next) == INSN
727 || GET_CODE (next) == CALL_INSN)
728 && ! inequality_comparisons_p (PATTERN (next)));
732 /* This is used by find_single_use to locate an rtx that contains exactly one
733 use of DEST, which is typically either a REG or CC0. It returns a
734 pointer to the innermost rtx expression containing DEST. Appearances of
735 DEST that are being used to totally replace it are not counted. */
738 find_single_use_1 (dest, loc)
743 enum rtx_code code = GET_CODE (x);
761 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
762 of a REG that occupies all of the REG, the insn uses DEST if
763 it is mentioned in the destination or the source. Otherwise, we
764 need just check the source. */
765 if (GET_CODE (SET_DEST (x)) != CC0
766 && GET_CODE (SET_DEST (x)) != PC
767 && GET_CODE (SET_DEST (x)) != REG
768 && ! (GET_CODE (SET_DEST (x)) == SUBREG
769 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
770 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
771 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
772 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
773 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
776 return find_single_use_1 (dest, &SET_SRC (x));
780 return find_single_use_1 (dest, &XEXP (x, 0));
786 /* If it wasn't one of the common cases above, check each expression and
787 vector of this code. Look for a unique usage of DEST. */
789 fmt = GET_RTX_FORMAT (code);
790 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
794 if (dest == XEXP (x, i)
795 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
796 && REGNO (dest) == REGNO (XEXP (x, i))))
799 this_result = find_single_use_1 (dest, &XEXP (x, i));
802 result = this_result;
803 else if (this_result)
804 /* Duplicate usage. */
807 else if (fmt[i] == 'E')
811 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
813 if (XVECEXP (x, i, j) == dest
814 || (GET_CODE (dest) == REG
815 && GET_CODE (XVECEXP (x, i, j)) == REG
816 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
819 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
822 result = this_result;
823 else if (this_result)
832 /* See if DEST, produced in INSN, is used only a single time in the
833 sequel. If so, return a pointer to the innermost rtx expression in which
836 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
838 This routine will return usually zero either before flow is called (because
839 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
840 note can't be trusted).
842 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
843 care about REG_DEAD notes or LOG_LINKS.
845 Otherwise, we find the single use by finding an insn that has a
846 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
847 only referenced once in that insn, we know that it must be the first
848 and last insn referencing DEST. */
851 find_single_use (dest, insn, ploc)
863 next = NEXT_INSN (insn);
865 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
868 result = find_single_use_1 (dest, &PATTERN (next));
875 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
878 for (next = next_nonnote_insn (insn);
879 next != 0 && GET_CODE (next) != CODE_LABEL;
880 next = next_nonnote_insn (next))
881 if (INSN_P (next) && dead_or_set_p (next, dest))
883 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
884 if (XEXP (link, 0) == insn)
889 result = find_single_use_1 (dest, &PATTERN (next));
899 /* Return 1 if OP is a valid general operand for machine mode MODE.
900 This is either a register reference, a memory reference,
901 or a constant. In the case of a memory reference, the address
902 is checked for general validity for the target machine.
904 Register and memory references must have mode MODE in order to be valid,
905 but some constants have no machine mode and are valid for any mode.
907 If MODE is VOIDmode, OP is checked for validity for whatever mode
910 The main use of this function is as a predicate in match_operand
911 expressions in the machine description.
913 For an explanation of this function's behavior for registers of
914 class NO_REGS, see the comment for `register_operand'. */
917 general_operand (op, mode)
919 enum machine_mode mode;
921 enum rtx_code code = GET_CODE (op);
923 if (mode == VOIDmode)
924 mode = GET_MODE (op);
926 /* Don't accept CONST_INT or anything similar
927 if the caller wants something floating. */
928 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
929 && GET_MODE_CLASS (mode) != MODE_INT
930 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
933 if (GET_CODE (op) == CONST_INT
935 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
939 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
941 #ifdef LEGITIMATE_PIC_OPERAND_P
942 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
944 && LEGITIMATE_CONSTANT_P (op));
946 /* Except for certain constants with VOIDmode, already checked for,
947 OP's mode must match MODE if MODE specifies a mode. */
949 if (GET_MODE (op) != mode)
954 rtx sub = SUBREG_REG (op);
956 #ifdef INSN_SCHEDULING
957 /* On machines that have insn scheduling, we want all memory
958 reference to be explicit, so outlaw paradoxical SUBREGs. */
959 if (GET_CODE (sub) == MEM
960 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
963 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
964 may result in incorrect reference. We should simplify all valid
965 subregs of MEM anyway. But allow this after reload because we
966 might be called from cleanup_subreg_operands.
968 ??? This is a kludge. */
969 if (!reload_completed && SUBREG_BYTE (op) != 0
970 && GET_CODE (sub) == MEM)
973 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
974 create such rtl, and we must reject it. */
975 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
976 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
980 code = GET_CODE (op);
984 /* A register whose class is NO_REGS is not a general operand. */
985 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
986 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
990 rtx y = XEXP (op, 0);
992 if (! volatile_ok && MEM_VOLATILE_P (op))
995 if (GET_CODE (y) == ADDRESSOF)
998 /* Use the mem's mode, since it will be reloaded thus. */
999 mode = GET_MODE (op);
1000 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
1003 /* Pretend this is an operand for now; we'll run force_operand
1004 on its replacement in fixup_var_refs_1. */
1005 if (code == ADDRESSOF)
1014 /* Return 1 if OP is a valid memory address for a memory reference
1017 The main use of this function is as a predicate in match_operand
1018 expressions in the machine description. */
1021 address_operand (op, mode)
1023 enum machine_mode mode;
1025 return memory_address_p (mode, op);
1028 /* Return 1 if OP is a register reference of mode MODE.
1029 If MODE is VOIDmode, accept a register in any mode.
1031 The main use of this function is as a predicate in match_operand
1032 expressions in the machine description.
1034 As a special exception, registers whose class is NO_REGS are
1035 not accepted by `register_operand'. The reason for this change
1036 is to allow the representation of special architecture artifacts
1037 (such as a condition code register) without extending the rtl
1038 definitions. Since registers of class NO_REGS cannot be used
1039 as registers in any case where register classes are examined,
1040 it is most consistent to keep this function from accepting them. */
1043 register_operand (op, mode)
1045 enum machine_mode mode;
1047 if (GET_MODE (op) != mode && mode != VOIDmode)
1050 if (GET_CODE (op) == SUBREG)
1052 rtx sub = SUBREG_REG (op);
1054 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1055 because it is guaranteed to be reloaded into one.
1056 Just make sure the MEM is valid in itself.
1057 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1058 but currently it does result from (SUBREG (REG)...) where the
1059 reg went on the stack.) */
1060 if (! reload_completed && GET_CODE (sub) == MEM)
1061 return general_operand (op, mode);
1063 #ifdef CANNOT_CHANGE_MODE_CLASS
1064 if (GET_CODE (sub) == REG
1065 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1066 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1067 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1068 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1072 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1073 create such rtl, and we must reject it. */
1074 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1075 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1081 /* If we have an ADDRESSOF, consider it valid since it will be
1082 converted into something that will not be a MEM. */
1083 if (GET_CODE (op) == ADDRESSOF)
1086 /* We don't consider registers whose class is NO_REGS
1087 to be a register operand. */
1088 return (GET_CODE (op) == REG
1089 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1090 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1093 /* Return 1 for a register in Pmode; ignore the tested mode. */
1096 pmode_register_operand (op, mode)
1098 enum machine_mode mode ATTRIBUTE_UNUSED;
1100 return register_operand (op, Pmode);
1103 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1104 or a hard register. */
1107 scratch_operand (op, mode)
1109 enum machine_mode mode;
1111 if (GET_MODE (op) != mode && mode != VOIDmode)
1114 return (GET_CODE (op) == SCRATCH
1115 || (GET_CODE (op) == REG
1116 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1119 /* Return 1 if OP is a valid immediate operand for mode MODE.
1121 The main use of this function is as a predicate in match_operand
1122 expressions in the machine description. */
1125 immediate_operand (op, mode)
1127 enum machine_mode mode;
1129 /* Don't accept CONST_INT or anything similar
1130 if the caller wants something floating. */
1131 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1132 && GET_MODE_CLASS (mode) != MODE_INT
1133 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1136 if (GET_CODE (op) == CONST_INT
1138 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1141 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1142 result in 0/1. It seems a safe assumption that this is
1143 in range for everyone. */
1144 if (GET_CODE (op) == CONSTANT_P_RTX)
1147 return (CONSTANT_P (op)
1148 && (GET_MODE (op) == mode || mode == VOIDmode
1149 || GET_MODE (op) == VOIDmode)
1150 #ifdef LEGITIMATE_PIC_OPERAND_P
1151 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1153 && LEGITIMATE_CONSTANT_P (op));
1156 /* Returns 1 if OP is an operand that is a CONST_INT. */
1159 const_int_operand (op, mode)
1161 enum machine_mode mode;
1163 if (GET_CODE (op) != CONST_INT)
1166 if (mode != VOIDmode
1167 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1173 /* Returns 1 if OP is an operand that is a constant integer or constant
1174 floating-point number. */
1177 const_double_operand (op, mode)
1179 enum machine_mode mode;
1181 /* Don't accept CONST_INT or anything similar
1182 if the caller wants something floating. */
1183 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1184 && GET_MODE_CLASS (mode) != MODE_INT
1185 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1188 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1189 && (mode == VOIDmode || GET_MODE (op) == mode
1190 || GET_MODE (op) == VOIDmode));
1193 /* Return 1 if OP is a general operand that is not an immediate operand. */
1196 nonimmediate_operand (op, mode)
1198 enum machine_mode mode;
1200 return (general_operand (op, mode) && ! CONSTANT_P (op));
1203 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1206 nonmemory_operand (op, mode)
1208 enum machine_mode mode;
1210 if (CONSTANT_P (op))
1212 /* Don't accept CONST_INT or anything similar
1213 if the caller wants something floating. */
1214 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1215 && GET_MODE_CLASS (mode) != MODE_INT
1216 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1219 if (GET_CODE (op) == CONST_INT
1221 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1224 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1225 || mode == VOIDmode)
1226 #ifdef LEGITIMATE_PIC_OPERAND_P
1227 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1229 && LEGITIMATE_CONSTANT_P (op));
1232 if (GET_MODE (op) != mode && mode != VOIDmode)
1235 if (GET_CODE (op) == SUBREG)
1237 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1238 because it is guaranteed to be reloaded into one.
1239 Just make sure the MEM is valid in itself.
1240 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1241 but currently it does result from (SUBREG (REG)...) where the
1242 reg went on the stack.) */
1243 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1244 return general_operand (op, mode);
1245 op = SUBREG_REG (op);
1248 /* We don't consider registers whose class is NO_REGS
1249 to be a register operand. */
1250 return (GET_CODE (op) == REG
1251 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1252 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1255 /* Return 1 if OP is a valid operand that stands for pushing a
1256 value of mode MODE onto the stack.
1258 The main use of this function is as a predicate in match_operand
1259 expressions in the machine description. */
1262 push_operand (op, mode)
1264 enum machine_mode mode;
1266 unsigned int rounded_size = GET_MODE_SIZE (mode);
1268 #ifdef PUSH_ROUNDING
1269 rounded_size = PUSH_ROUNDING (rounded_size);
1272 if (GET_CODE (op) != MEM)
1275 if (mode != VOIDmode && GET_MODE (op) != mode)
1280 if (rounded_size == GET_MODE_SIZE (mode))
1282 if (GET_CODE (op) != STACK_PUSH_CODE)
1287 if (GET_CODE (op) != PRE_MODIFY
1288 || GET_CODE (XEXP (op, 1)) != PLUS
1289 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1290 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1291 #ifdef STACK_GROWS_DOWNWARD
1292 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1294 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1300 return XEXP (op, 0) == stack_pointer_rtx;
1303 /* Return 1 if OP is a valid operand that stands for popping a
1304 value of mode MODE off the stack.
1306 The main use of this function is as a predicate in match_operand
1307 expressions in the machine description. */
1310 pop_operand (op, mode)
1312 enum machine_mode mode;
1314 if (GET_CODE (op) != MEM)
1317 if (mode != VOIDmode && GET_MODE (op) != mode)
1322 if (GET_CODE (op) != STACK_POP_CODE)
1325 return XEXP (op, 0) == stack_pointer_rtx;
1328 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1331 memory_address_p (mode, addr)
1332 enum machine_mode mode ATTRIBUTE_UNUSED;
1335 if (GET_CODE (addr) == ADDRESSOF)
1338 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1345 /* Return 1 if OP is a valid memory reference with mode MODE,
1346 including a valid address.
1348 The main use of this function is as a predicate in match_operand
1349 expressions in the machine description. */
1352 memory_operand (op, mode)
1354 enum machine_mode mode;
1358 if (! reload_completed)
1359 /* Note that no SUBREG is a memory operand before end of reload pass,
1360 because (SUBREG (MEM...)) forces reloading into a register. */
1361 return GET_CODE (op) == MEM && general_operand (op, mode);
1363 if (mode != VOIDmode && GET_MODE (op) != mode)
1367 if (GET_CODE (inner) == SUBREG)
1368 inner = SUBREG_REG (inner);
1370 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1373 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1374 that is, a memory reference whose address is a general_operand. */
1377 indirect_operand (op, mode)
1379 enum machine_mode mode;
1381 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1382 if (! reload_completed
1383 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1385 int offset = SUBREG_BYTE (op);
1386 rtx inner = SUBREG_REG (op);
1388 if (mode != VOIDmode && GET_MODE (op) != mode)
1391 /* The only way that we can have a general_operand as the resulting
1392 address is if OFFSET is zero and the address already is an operand
1393 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1396 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1397 || (GET_CODE (XEXP (inner, 0)) == PLUS
1398 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1399 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1400 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1403 return (GET_CODE (op) == MEM
1404 && memory_operand (op, mode)
1405 && general_operand (XEXP (op, 0), Pmode));
1408 /* Return 1 if this is a comparison operator. This allows the use of
1409 MATCH_OPERATOR to recognize all the branch insns. */
1412 comparison_operator (op, mode)
1414 enum machine_mode mode;
1416 return ((mode == VOIDmode || GET_MODE (op) == mode)
1417 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1420 /* If BODY is an insn body that uses ASM_OPERANDS,
1421 return the number of operands (both input and output) in the insn.
1422 Otherwise return -1. */
1425 asm_noperands (body)
1428 switch (GET_CODE (body))
1431 /* No output operands: return number of input operands. */
1432 return ASM_OPERANDS_INPUT_LENGTH (body);
1434 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1435 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1436 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1440 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1441 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1443 /* Multiple output operands, or 1 output plus some clobbers:
1444 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1448 /* Count backwards through CLOBBERs to determine number of SETs. */
1449 for (i = XVECLEN (body, 0); i > 0; i--)
1451 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1453 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1457 /* N_SETS is now number of output operands. */
1460 /* Verify that all the SETs we have
1461 came from a single original asm_operands insn
1462 (so that invalid combinations are blocked). */
1463 for (i = 0; i < n_sets; i++)
1465 rtx elt = XVECEXP (body, 0, i);
1466 if (GET_CODE (elt) != SET)
1468 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1470 /* If these ASM_OPERANDS rtx's came from different original insns
1471 then they aren't allowed together. */
1472 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1473 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1476 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1479 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1481 /* 0 outputs, but some clobbers:
1482 body is [(asm_operands ...) (clobber (reg ...))...]. */
1485 /* Make sure all the other parallel things really are clobbers. */
1486 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1487 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1490 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1499 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1500 copy its operands (both input and output) into the vector OPERANDS,
1501 the locations of the operands within the insn into the vector OPERAND_LOCS,
1502 and the constraints for the operands into CONSTRAINTS.
1503 Write the modes of the operands into MODES.
1504 Return the assembler-template.
1506 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1507 we don't store that info. */
1510 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1514 const char **constraints;
1515 enum machine_mode *modes;
1519 const char *template = 0;
1521 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1523 rtx asmop = SET_SRC (body);
1524 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1526 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1528 for (i = 1; i < noperands; i++)
1531 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1533 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1535 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1537 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1540 /* The output is in the SET.
1541 Its constraint is in the ASM_OPERANDS itself. */
1543 operands[0] = SET_DEST (body);
1545 operand_locs[0] = &SET_DEST (body);
1547 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1549 modes[0] = GET_MODE (SET_DEST (body));
1550 template = ASM_OPERANDS_TEMPLATE (asmop);
1552 else if (GET_CODE (body) == ASM_OPERANDS)
1555 /* No output operands: BODY is (asm_operands ....). */
1557 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1559 /* The input operands are found in the 1st element vector. */
1560 /* Constraints for inputs are in the 2nd element vector. */
1561 for (i = 0; i < noperands; i++)
1564 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1566 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1568 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1570 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1572 template = ASM_OPERANDS_TEMPLATE (asmop);
1574 else if (GET_CODE (body) == PARALLEL
1575 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1576 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1578 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1579 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1580 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1581 int nout = 0; /* Does not include CLOBBERs. */
1583 /* At least one output, plus some CLOBBERs. */
1585 /* The outputs are in the SETs.
1586 Their constraints are in the ASM_OPERANDS itself. */
1587 for (i = 0; i < nparallel; i++)
1589 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1590 break; /* Past last SET */
1593 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1595 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1597 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1599 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1603 for (i = 0; i < nin; i++)
1606 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1608 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1610 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1612 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1615 template = ASM_OPERANDS_TEMPLATE (asmop);
1617 else if (GET_CODE (body) == PARALLEL
1618 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1620 /* No outputs, but some CLOBBERs. */
1622 rtx asmop = XVECEXP (body, 0, 0);
1623 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1625 for (i = 0; i < nin; i++)
1628 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1630 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1632 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1634 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1637 template = ASM_OPERANDS_TEMPLATE (asmop);
1643 /* Check if an asm_operand matches it's constraints.
1644 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1647 asm_operand_ok (op, constraint)
1649 const char *constraint;
1653 /* Use constrain_operands after reload. */
1654 if (reload_completed)
1659 char c = *constraint;
1676 case '0': case '1': case '2': case '3': case '4':
1677 case '5': case '6': case '7': case '8': case '9':
1678 /* For best results, our caller should have given us the
1679 proper matching constraint, but we can't actually fail
1680 the check if they didn't. Indicate that results are
1684 while (ISDIGIT (*constraint));
1690 if (address_operand (op, VOIDmode))
1695 case 'V': /* non-offsettable */
1696 if (memory_operand (op, VOIDmode))
1700 case 'o': /* offsettable */
1701 if (offsettable_nonstrict_memref_p (op))
1706 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1707 excepting those that expand_call created. Further, on some
1708 machines which do not have generalized auto inc/dec, an inc/dec
1709 is not a memory_operand.
1711 Match any memory and hope things are resolved after reload. */
1713 if (GET_CODE (op) == MEM
1715 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1716 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1721 if (GET_CODE (op) == MEM
1723 || GET_CODE (XEXP (op, 0)) == PRE_INC
1724 || GET_CODE (XEXP (op, 0)) == POST_INC))
1730 if (GET_CODE (op) == CONST_DOUBLE
1731 || (GET_CODE (op) == CONST_VECTOR
1732 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1737 if (GET_CODE (op) == CONST_DOUBLE
1738 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1742 if (GET_CODE (op) == CONST_DOUBLE
1743 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1748 if (GET_CODE (op) == CONST_INT
1749 || (GET_CODE (op) == CONST_DOUBLE
1750 && GET_MODE (op) == VOIDmode))
1756 #ifdef LEGITIMATE_PIC_OPERAND_P
1757 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1764 if (GET_CODE (op) == CONST_INT
1765 || (GET_CODE (op) == CONST_DOUBLE
1766 && GET_MODE (op) == VOIDmode))
1771 if (GET_CODE (op) == CONST_INT
1772 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1776 if (GET_CODE (op) == CONST_INT
1777 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1781 if (GET_CODE (op) == CONST_INT
1782 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1786 if (GET_CODE (op) == CONST_INT
1787 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1791 if (GET_CODE (op) == CONST_INT
1792 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1796 if (GET_CODE (op) == CONST_INT
1797 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1801 if (GET_CODE (op) == CONST_INT
1802 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1806 if (GET_CODE (op) == CONST_INT
1807 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1815 if (general_operand (op, VOIDmode))
1820 /* For all other letters, we first check for a register class,
1821 otherwise it is an EXTRA_CONSTRAINT. */
1822 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1825 if (GET_MODE (op) == BLKmode)
1827 if (register_operand (op, VOIDmode))
1830 #ifdef EXTRA_CONSTRAINT_STR
1831 if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1833 if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1835 /* Every memory operand can be reloaded to fit. */
1836 if (memory_operand (op, VOIDmode))
1839 if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1841 /* Every address operand can be reloaded to fit. */
1842 if (address_operand (op, VOIDmode))
1848 len = CONSTRAINT_LEN (c, constraint);
1851 while (--len && *constraint);
1859 /* Given an rtx *P, if it is a sum containing an integer constant term,
1860 return the location (type rtx *) of the pointer to that constant term.
1861 Otherwise, return a null pointer. */
1864 find_constant_term_loc (p)
1868 enum rtx_code code = GET_CODE (*p);
1870 /* If *P IS such a constant term, P is its location. */
1872 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1876 /* Otherwise, if not a sum, it has no constant term. */
1878 if (GET_CODE (*p) != PLUS)
1881 /* If one of the summands is constant, return its location. */
1883 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1884 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1887 /* Otherwise, check each summand for containing a constant term. */
1889 if (XEXP (*p, 0) != 0)
1891 tem = find_constant_term_loc (&XEXP (*p, 0));
1896 if (XEXP (*p, 1) != 0)
1898 tem = find_constant_term_loc (&XEXP (*p, 1));
1906 /* Return 1 if OP is a memory reference
1907 whose address contains no side effects
1908 and remains valid after the addition
1909 of a positive integer less than the
1910 size of the object being referenced.
1912 We assume that the original address is valid and do not check it.
1914 This uses strict_memory_address_p as a subroutine, so
1915 don't use it before reload. */
1918 offsettable_memref_p (op)
1921 return ((GET_CODE (op) == MEM)
1922 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1925 /* Similar, but don't require a strictly valid mem ref:
1926 consider pseudo-regs valid as index or base regs. */
1929 offsettable_nonstrict_memref_p (op)
1932 return ((GET_CODE (op) == MEM)
1933 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1936 /* Return 1 if Y is a memory address which contains no side effects
1937 and would remain valid after the addition of a positive integer
1938 less than the size of that mode.
1940 We assume that the original address is valid and do not check it.
1941 We do check that it is valid for narrower modes.
1943 If STRICTP is nonzero, we require a strictly valid address,
1944 for the sake of use in reload.c. */
1947 offsettable_address_p (strictp, mode, y)
1949 enum machine_mode mode;
1952 enum rtx_code ycode = GET_CODE (y);
1956 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1957 (strictp ? strict_memory_address_p : memory_address_p);
1958 unsigned int mode_sz = GET_MODE_SIZE (mode);
1960 if (CONSTANT_ADDRESS_P (y))
1963 /* Adjusting an offsettable address involves changing to a narrower mode.
1964 Make sure that's OK. */
1966 if (mode_dependent_address_p (y))
1969 /* ??? How much offset does an offsettable BLKmode reference need?
1970 Clearly that depends on the situation in which it's being used.
1971 However, the current situation in which we test 0xffffffff is
1972 less than ideal. Caveat user. */
1974 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1976 /* If the expression contains a constant term,
1977 see if it remains valid when max possible offset is added. */
1979 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1984 *y2 = plus_constant (*y2, mode_sz - 1);
1985 /* Use QImode because an odd displacement may be automatically invalid
1986 for any wider mode. But it should be valid for a single byte. */
1987 good = (*addressp) (QImode, y);
1989 /* In any case, restore old contents of memory. */
1994 if (GET_RTX_CLASS (ycode) == 'a')
1997 /* The offset added here is chosen as the maximum offset that
1998 any instruction could need to add when operating on something
1999 of the specified mode. We assume that if Y and Y+c are
2000 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2001 go inside a LO_SUM here, so we do so as well. */
2002 if (GET_CODE (y) == LO_SUM
2004 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2005 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
2006 plus_constant (XEXP (y, 1), mode_sz - 1));
2008 z = plus_constant (y, mode_sz - 1);
2010 /* Use QImode because an odd displacement may be automatically invalid
2011 for any wider mode. But it should be valid for a single byte. */
2012 return (*addressp) (QImode, z);
2015 /* Return 1 if ADDR is an address-expression whose effect depends
2016 on the mode of the memory reference it is used in.
2018 Autoincrement addressing is a typical example of mode-dependence
2019 because the amount of the increment depends on the mode. */
2022 mode_dependent_address_p (addr)
2023 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
2025 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
2027 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2028 win: ATTRIBUTE_UNUSED_LABEL
2032 /* Like extract_insn, but save insn extracted and don't extract again, when
2033 called again for the same insn expecting that recog_data still contain the
2034 valid information. This is used primary by gen_attr infrastructure that
2035 often does extract insn again and again. */
2037 extract_insn_cached (insn)
2040 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2042 extract_insn (insn);
2043 recog_data.insn = insn;
2045 /* Do cached extract_insn, constrain_operand and complain about failures.
2046 Used by insn_attrtab. */
2048 extract_constrain_insn_cached (insn)
2051 extract_insn_cached (insn);
2052 if (which_alternative == -1
2053 && !constrain_operands (reload_completed))
2054 fatal_insn_not_found (insn);
2056 /* Do cached constrain_operand and complain about failures. */
2058 constrain_operands_cached (strict)
2061 if (which_alternative == -1)
2062 return constrain_operands (strict);
2067 /* Analyze INSN and fill in recog_data. */
2076 rtx body = PATTERN (insn);
2078 recog_data.insn = NULL;
2079 recog_data.n_operands = 0;
2080 recog_data.n_alternatives = 0;
2081 recog_data.n_dups = 0;
2082 which_alternative = -1;
2084 switch (GET_CODE (body))
2094 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2099 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2100 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2101 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2107 recog_data.n_operands = noperands = asm_noperands (body);
2110 /* This insn is an `asm' with operands. */
2112 /* expand_asm_operands makes sure there aren't too many operands. */
2113 if (noperands > MAX_RECOG_OPERANDS)
2116 /* Now get the operand values and constraints out of the insn. */
2117 decode_asm_operands (body, recog_data.operand,
2118 recog_data.operand_loc,
2119 recog_data.constraints,
2120 recog_data.operand_mode);
2123 const char *p = recog_data.constraints[0];
2124 recog_data.n_alternatives = 1;
2126 recog_data.n_alternatives += (*p++ == ',');
2130 fatal_insn_not_found (insn);
2134 /* Ordinary insn: recognize it, get the operands via insn_extract
2135 and get the constraints. */
2137 icode = recog_memoized (insn);
2139 fatal_insn_not_found (insn);
2141 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2142 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2143 recog_data.n_dups = insn_data[icode].n_dups;
2145 insn_extract (insn);
2147 for (i = 0; i < noperands; i++)
2149 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2150 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2151 /* VOIDmode match_operands gets mode from their real operand. */
2152 if (recog_data.operand_mode[i] == VOIDmode)
2153 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2156 for (i = 0; i < noperands; i++)
2157 recog_data.operand_type[i]
2158 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2159 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2162 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2166 /* After calling extract_insn, you can use this function to extract some
2167 information from the constraint strings into a more usable form.
2168 The collected data is stored in recog_op_alt. */
2170 preprocess_constraints ()
2174 memset (recog_op_alt, 0, sizeof recog_op_alt);
2175 for (i = 0; i < recog_data.n_operands; i++)
2178 struct operand_alternative *op_alt;
2179 const char *p = recog_data.constraints[i];
2181 op_alt = recog_op_alt[i];
2183 for (j = 0; j < recog_data.n_alternatives; j++)
2185 op_alt[j].class = NO_REGS;
2186 op_alt[j].constraint = p;
2187 op_alt[j].matches = -1;
2188 op_alt[j].matched = -1;
2190 if (*p == '\0' || *p == ',')
2192 op_alt[j].anything_ok = 1;
2202 while (c != ',' && c != '\0');
2203 if (c == ',' || c == '\0')
2211 case '=': case '+': case '*': case '%':
2212 case 'E': case 'F': case 'G': case 'H':
2213 case 's': case 'i': case 'n':
2214 case 'I': case 'J': case 'K': case 'L':
2215 case 'M': case 'N': case 'O': case 'P':
2216 /* These don't say anything we care about. */
2220 op_alt[j].reject += 6;
2223 op_alt[j].reject += 600;
2226 op_alt[j].earlyclobber = 1;
2229 case '0': case '1': case '2': case '3': case '4':
2230 case '5': case '6': case '7': case '8': case '9':
2233 op_alt[j].matches = strtoul (p, &end, 10);
2234 recog_op_alt[op_alt[j].matches][j].matched = i;
2240 op_alt[j].memory_ok = 1;
2243 op_alt[j].decmem_ok = 1;
2246 op_alt[j].incmem_ok = 1;
2249 op_alt[j].nonoffmem_ok = 1;
2252 op_alt[j].offmem_ok = 1;
2255 op_alt[j].anything_ok = 1;
2259 op_alt[j].is_address = 1;
2260 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class]
2261 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2265 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2269 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2271 op_alt[j].memory_ok = 1;
2274 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2276 op_alt[j].is_address = 1;
2278 = (reg_class_subunion
2279 [(int) op_alt[j].class]
2280 [(int) MODE_BASE_REG_CLASS (VOIDmode)]);
2285 = (reg_class_subunion
2286 [(int) op_alt[j].class]
2287 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2290 p += CONSTRAINT_LEN (c, p);
2296 /* Check the operands of an insn against the insn's operand constraints
2297 and return 1 if they are valid.
2298 The information about the insn's operands, constraints, operand modes
2299 etc. is obtained from the global variables set up by extract_insn.
2301 WHICH_ALTERNATIVE is set to a number which indicates which
2302 alternative of constraints was matched: 0 for the first alternative,
2303 1 for the next, etc.
2305 In addition, when two operands are required to match
2306 and it happens that the output operand is (reg) while the
2307 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2308 make the output operand look like the input.
2309 This is because the output operand is the one the template will print.
2311 This is used in final, just before printing the assembler code and by
2312 the routines that determine an insn's attribute.
2314 If STRICT is a positive nonzero value, it means that we have been
2315 called after reload has been completed. In that case, we must
2316 do all checks strictly. If it is zero, it means that we have been called
2317 before reload has completed. In that case, we first try to see if we can
2318 find an alternative that matches strictly. If not, we try again, this
2319 time assuming that reload will fix up the insn. This provides a "best
2320 guess" for the alternative and is used to compute attributes of insns prior
2321 to reload. A negative value of STRICT is used for this internal call. */
2329 constrain_operands (strict)
2332 const char *constraints[MAX_RECOG_OPERANDS];
2333 int matching_operands[MAX_RECOG_OPERANDS];
2334 int earlyclobber[MAX_RECOG_OPERANDS];
2337 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2338 int funny_match_index;
2340 which_alternative = 0;
2341 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2344 for (c = 0; c < recog_data.n_operands; c++)
2346 constraints[c] = recog_data.constraints[c];
2347 matching_operands[c] = -1;
2354 funny_match_index = 0;
2356 for (opno = 0; opno < recog_data.n_operands; opno++)
2358 rtx op = recog_data.operand[opno];
2359 enum machine_mode mode = GET_MODE (op);
2360 const char *p = constraints[opno];
2366 earlyclobber[opno] = 0;
2368 /* A unary operator may be accepted by the predicate, but it
2369 is irrelevant for matching constraints. */
2370 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2373 if (GET_CODE (op) == SUBREG)
2375 if (GET_CODE (SUBREG_REG (op)) == REG
2376 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2377 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2378 GET_MODE (SUBREG_REG (op)),
2381 op = SUBREG_REG (op);
2384 /* An empty constraint or empty alternative
2385 allows anything which matched the pattern. */
2386 if (*p == 0 || *p == ',')
2390 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2399 case '?': case '!': case '*': case '%':
2404 /* Ignore rest of this alternative as far as
2405 constraint checking is concerned. */
2408 while (*p && *p != ',');
2413 earlyclobber[opno] = 1;
2416 case '0': case '1': case '2': case '3': case '4':
2417 case '5': case '6': case '7': case '8': case '9':
2419 /* This operand must be the same as a previous one.
2420 This kind of constraint is used for instructions such
2421 as add when they take only two operands.
2423 Note that the lower-numbered operand is passed first.
2425 If we are not testing strictly, assume that this
2426 constraint will be satisfied. */
2431 match = strtoul (p, &end, 10);
2438 rtx op1 = recog_data.operand[match];
2439 rtx op2 = recog_data.operand[opno];
2441 /* A unary operator may be accepted by the predicate,
2442 but it is irrelevant for matching constraints. */
2443 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2444 op1 = XEXP (op1, 0);
2445 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2446 op2 = XEXP (op2, 0);
2448 val = operands_match_p (op1, op2);
2451 matching_operands[opno] = match;
2452 matching_operands[match] = opno;
2457 /* If output is *x and input is *--x, arrange later
2458 to change the output to *--x as well, since the
2459 output op is the one that will be printed. */
2460 if (val == 2 && strict > 0)
2462 funny_match[funny_match_index].this = opno;
2463 funny_match[funny_match_index++].other = match;
2470 /* p is used for address_operands. When we are called by
2471 gen_reload, no one will have checked that the address is
2472 strictly valid, i.e., that all pseudos requiring hard regs
2473 have gotten them. */
2475 || (strict_memory_address_p (recog_data.operand_mode[opno],
2480 /* No need to check general_operand again;
2481 it was done in insn-recog.c. */
2483 /* Anything goes unless it is a REG and really has a hard reg
2484 but the hard reg is not in the class GENERAL_REGS. */
2486 || GENERAL_REGS == ALL_REGS
2487 || GET_CODE (op) != REG
2488 || (reload_in_progress
2489 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2490 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2495 /* This is used for a MATCH_SCRATCH in the cases when
2496 we don't actually need anything. So anything goes
2502 if (GET_CODE (op) == MEM
2503 /* Before reload, accept what reload can turn into mem. */
2504 || (strict < 0 && CONSTANT_P (op))
2505 /* During reload, accept a pseudo */
2506 || (reload_in_progress && GET_CODE (op) == REG
2507 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2512 if (GET_CODE (op) == MEM
2513 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2514 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2519 if (GET_CODE (op) == MEM
2520 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2521 || GET_CODE (XEXP (op, 0)) == POST_INC))
2527 if (GET_CODE (op) == CONST_DOUBLE
2528 || (GET_CODE (op) == CONST_VECTOR
2529 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2535 if (GET_CODE (op) == CONST_DOUBLE
2536 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2541 if (GET_CODE (op) == CONST_INT
2542 || (GET_CODE (op) == CONST_DOUBLE
2543 && GET_MODE (op) == VOIDmode))
2546 if (CONSTANT_P (op))
2551 if (GET_CODE (op) == CONST_INT
2552 || (GET_CODE (op) == CONST_DOUBLE
2553 && GET_MODE (op) == VOIDmode))
2565 if (GET_CODE (op) == CONST_INT
2566 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2571 if (GET_CODE (op) == MEM
2572 && ((strict > 0 && ! offsettable_memref_p (op))
2574 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2575 || (reload_in_progress
2576 && !(GET_CODE (op) == REG
2577 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2582 if ((strict > 0 && offsettable_memref_p (op))
2583 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2584 /* Before reload, accept what reload can handle. */
2586 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2587 /* During reload, accept a pseudo */
2588 || (reload_in_progress && GET_CODE (op) == REG
2589 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2595 enum reg_class class;
2598 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2599 if (class != NO_REGS)
2603 && GET_CODE (op) == REG
2604 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2605 || (strict == 0 && GET_CODE (op) == SCRATCH)
2606 || (GET_CODE (op) == REG
2607 && reg_fits_class_p (op, class, offset, mode)))
2610 #ifdef EXTRA_CONSTRAINT_STR
2611 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2614 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2616 /* Every memory operand can be reloaded to fit. */
2617 if (strict < 0 && GET_CODE (op) == MEM)
2620 /* Before reload, accept what reload can turn into mem. */
2621 if (strict < 0 && CONSTANT_P (op))
2624 /* During reload, accept a pseudo */
2625 if (reload_in_progress && GET_CODE (op) == REG
2626 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2629 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2631 /* Every address operand can be reloaded to fit. */
2639 while (p += len, c);
2641 constraints[opno] = p;
2642 /* If this operand did not win somehow,
2643 this alternative loses. */
2647 /* This alternative won; the operands are ok.
2648 Change whichever operands this alternative says to change. */
2653 /* See if any earlyclobber operand conflicts with some other
2657 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2658 /* Ignore earlyclobber operands now in memory,
2659 because we would often report failure when we have
2660 two memory operands, one of which was formerly a REG. */
2661 if (earlyclobber[eopno]
2662 && GET_CODE (recog_data.operand[eopno]) == REG)
2663 for (opno = 0; opno < recog_data.n_operands; opno++)
2664 if ((GET_CODE (recog_data.operand[opno]) == MEM
2665 || recog_data.operand_type[opno] != OP_OUT)
2667 /* Ignore things like match_operator operands. */
2668 && *recog_data.constraints[opno] != 0
2669 && ! (matching_operands[opno] == eopno
2670 && operands_match_p (recog_data.operand[opno],
2671 recog_data.operand[eopno]))
2672 && ! safe_from_earlyclobber (recog_data.operand[opno],
2673 recog_data.operand[eopno]))
2678 while (--funny_match_index >= 0)
2680 recog_data.operand[funny_match[funny_match_index].other]
2681 = recog_data.operand[funny_match[funny_match_index].this];
2688 which_alternative++;
2690 while (which_alternative < recog_data.n_alternatives);
2692 which_alternative = -1;
2693 /* If we are about to reject this, but we are not to test strictly,
2694 try a very loose test. Only return failure if it fails also. */
2696 return constrain_operands (-1);
2701 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2702 is a hard reg in class CLASS when its regno is offset by OFFSET
2703 and changed to mode MODE.
2704 If REG occupies multiple hard regs, all of them must be in CLASS. */
2707 reg_fits_class_p (operand, class, offset, mode)
2709 enum reg_class class;
2711 enum machine_mode mode;
2713 int regno = REGNO (operand);
2714 if (regno < FIRST_PSEUDO_REGISTER
2715 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2720 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2722 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2731 /* Split single instruction. Helper function for split_all_insns.
2732 Return last insn in the sequence if successful, or NULL if unsuccessful. */
2740 /* Don't split no-op move insns. These should silently
2741 disappear later in final. Splitting such insns would
2742 break the code that handles REG_NO_CONFLICT blocks. */
2744 else if ((set = single_set (insn)) != NULL && set_noop_p (set))
2746 /* Nops get in the way while scheduling, so delete them
2747 now if register allocation has already been done. It
2748 is too risky to try to do this before register
2749 allocation, and there are unlikely to be very many
2750 nops then anyways. */
2751 if (reload_completed)
2752 delete_insn_and_edges (insn);
2756 /* Split insns here to get max fine-grain parallelism. */
2757 rtx first = PREV_INSN (insn);
2758 rtx last = try_split (PATTERN (insn), insn, 1);
2762 /* try_split returns the NOTE that INSN became. */
2763 PUT_CODE (insn, NOTE);
2764 NOTE_SOURCE_FILE (insn) = 0;
2765 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2767 /* ??? Coddle to md files that generate subregs in post-
2768 reload splitters instead of computing the proper
2770 if (reload_completed && first != last)
2772 first = NEXT_INSN (first);
2776 cleanup_subreg_operands (first);
2779 first = NEXT_INSN (first);
2787 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2790 split_all_insns (upd_life)
2797 blocks = sbitmap_alloc (last_basic_block);
2798 sbitmap_zero (blocks);
2801 FOR_EACH_BB_REVERSE (bb)
2804 bool finish = false;
2806 for (insn = bb->head; !finish ; insn = next)
2810 /* Can't use `next_real_insn' because that might go across
2811 CODE_LABELS and short-out basic blocks. */
2812 next = NEXT_INSN (insn);
2813 finish = (insn == bb->end);
2814 last = split_insn (insn);
2817 /* The split sequence may include barrier, but the
2818 BB boundary we are interested in will be set to previous
2821 while (GET_CODE (last) == BARRIER)
2822 last = PREV_INSN (last);
2823 SET_BIT (blocks, bb->index);
2832 int old_last_basic_block = last_basic_block;
2834 find_many_sub_basic_blocks (blocks);
2836 if (old_last_basic_block != last_basic_block && upd_life)
2837 blocks = sbitmap_resize (blocks, last_basic_block, 1);
2840 if (changed && upd_life)
2841 update_life_info (blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
2842 PROP_DEATH_NOTES | PROP_REG_INFO);
2844 #ifdef ENABLE_CHECKING
2845 verify_flow_info ();
2848 sbitmap_free (blocks);
2851 /* Same as split_all_insns, but do not expect CFG to be available.
2852 Used by machine dependent reorg passes. */
2855 split_all_insns_noflow ()
2859 for (insn = get_insns (); insn; insn = next)
2861 next = NEXT_INSN (insn);
2867 #ifdef HAVE_peephole2
2868 struct peep2_insn_data
2874 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2875 static int peep2_current;
2877 /* A non-insn marker indicating the last insn of the block.
2878 The live_before regset for this element is correct, indicating
2879 global_live_at_end for the block. */
2880 #define PEEP2_EOB pc_rtx
2882 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2883 does not exist. Used by the recognizer to find the next insn to match
2884 in a multi-insn pattern. */
2890 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2894 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2895 n -= MAX_INSNS_PER_PEEP2 + 1;
2897 if (peep2_insn_data[n].insn == PEEP2_EOB)
2899 return peep2_insn_data[n].insn;
2902 /* Return true if REGNO is dead before the Nth non-note insn
2906 peep2_regno_dead_p (ofs, regno)
2910 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2913 ofs += peep2_current;
2914 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2915 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2917 if (peep2_insn_data[ofs].insn == NULL_RTX)
2920 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2923 /* Similarly for a REG. */
2926 peep2_reg_dead_p (ofs, reg)
2932 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2935 ofs += peep2_current;
2936 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2937 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2939 if (peep2_insn_data[ofs].insn == NULL_RTX)
2942 regno = REGNO (reg);
2943 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2945 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2950 /* Try to find a hard register of mode MODE, matching the register class in
2951 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2952 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2953 in which case the only condition is that the register must be available
2954 before CURRENT_INSN.
2955 Registers that already have bits set in REG_SET will not be considered.
2957 If an appropriate register is available, it will be returned and the
2958 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2962 peep2_find_free_register (from, to, class_str, mode, reg_set)
2964 const char *class_str;
2965 enum machine_mode mode;
2966 HARD_REG_SET *reg_set;
2968 static int search_ofs;
2969 enum reg_class class;
2973 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2976 from += peep2_current;
2977 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2978 from -= MAX_INSNS_PER_PEEP2 + 1;
2979 to += peep2_current;
2980 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2981 to -= MAX_INSNS_PER_PEEP2 + 1;
2983 if (peep2_insn_data[from].insn == NULL_RTX)
2985 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2989 HARD_REG_SET this_live;
2991 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2993 if (peep2_insn_data[from].insn == NULL_RTX)
2995 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2996 IOR_HARD_REG_SET (live, this_live);
2999 class = (class_str[0] == 'r' ? GENERAL_REGS
3000 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3002 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3004 int raw_regno, regno, success, j;
3006 /* Distribute the free registers as much as possible. */
3007 raw_regno = search_ofs + i;
3008 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3009 raw_regno -= FIRST_PSEUDO_REGISTER;
3010 #ifdef REG_ALLOC_ORDER
3011 regno = reg_alloc_order[raw_regno];
3016 /* Don't allocate fixed registers. */
3017 if (fixed_regs[regno])
3019 /* Make sure the register is of the right class. */
3020 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
3022 /* And can support the mode we need. */
3023 if (! HARD_REGNO_MODE_OK (regno, mode))
3025 /* And that we don't create an extra save/restore. */
3026 if (! call_used_regs[regno] && ! regs_ever_live[regno])
3028 /* And we don't clobber traceback for noreturn functions. */
3029 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3030 && (! reload_completed || frame_pointer_needed))
3034 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3036 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3037 || TEST_HARD_REG_BIT (live, regno + j))
3045 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3046 SET_HARD_REG_BIT (*reg_set, regno + j);
3048 /* Start the next search with the next register. */
3049 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3051 search_ofs = raw_regno;
3053 return gen_rtx_REG (mode, regno);
3061 /* Perform the peephole2 optimization pass. */
3064 peephole2_optimize (dump_file)
3065 FILE *dump_file ATTRIBUTE_UNUSED;
3067 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3072 #ifdef HAVE_conditional_execution
3076 bool do_cleanup_cfg = false;
3077 bool do_rebuild_jump_labels = false;
3079 /* Initialize the regsets we're going to use. */
3080 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3081 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3082 live = INITIALIZE_REG_SET (rs_heads[i]);
3084 #ifdef HAVE_conditional_execution
3085 blocks = sbitmap_alloc (last_basic_block);
3086 sbitmap_zero (blocks);
3089 count_or_remove_death_notes (NULL, 1);
3092 FOR_EACH_BB_REVERSE (bb)
3094 struct propagate_block_info *pbi;
3096 /* Indicate that all slots except the last holds invalid data. */
3097 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3098 peep2_insn_data[i].insn = NULL_RTX;
3100 /* Indicate that the last slot contains live_after data. */
3101 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3102 peep2_current = MAX_INSNS_PER_PEEP2;
3104 /* Start up propagation. */
3105 COPY_REG_SET (live, bb->global_live_at_end);
3106 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3108 #ifdef HAVE_conditional_execution
3109 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3111 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3114 for (insn = bb->end; ; insn = prev)
3116 prev = PREV_INSN (insn);
3119 rtx try, before_try, x;
3122 bool was_call = false;
3124 /* Record this insn. */
3125 if (--peep2_current < 0)
3126 peep2_current = MAX_INSNS_PER_PEEP2;
3127 peep2_insn_data[peep2_current].insn = insn;
3128 propagate_one_insn (pbi, insn);
3129 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3131 /* Match the peephole. */
3132 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3135 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3136 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3137 cfg-related call notes. */
3138 for (i = 0; i <= match_len; ++i)
3141 rtx old_insn, new_insn, note;
3143 j = i + peep2_current;
3144 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3145 j -= MAX_INSNS_PER_PEEP2 + 1;
3146 old_insn = peep2_insn_data[j].insn;
3147 if (GET_CODE (old_insn) != CALL_INSN)
3152 while (new_insn != NULL_RTX)
3154 if (GET_CODE (new_insn) == CALL_INSN)
3156 new_insn = NEXT_INSN (new_insn);
3159 if (new_insn == NULL_RTX)
3162 CALL_INSN_FUNCTION_USAGE (new_insn)
3163 = CALL_INSN_FUNCTION_USAGE (old_insn);
3165 for (note = REG_NOTES (old_insn);
3167 note = XEXP (note, 1))
3168 switch (REG_NOTE_KIND (note))
3172 case REG_ALWAYS_RETURN:
3173 REG_NOTES (new_insn)
3174 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3176 REG_NOTES (new_insn));
3178 /* Discard all other reg notes. */
3182 /* Croak if there is another call in the sequence. */
3183 while (++i <= match_len)
3185 j = i + peep2_current;
3186 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3187 j -= MAX_INSNS_PER_PEEP2 + 1;
3188 old_insn = peep2_insn_data[j].insn;
3189 if (GET_CODE (old_insn) == CALL_INSN)
3195 i = match_len + peep2_current;
3196 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3197 i -= MAX_INSNS_PER_PEEP2 + 1;
3199 note = find_reg_note (peep2_insn_data[i].insn,
3200 REG_EH_REGION, NULL_RTX);
3202 /* Replace the old sequence with the new. */
3203 try = emit_insn_after_setloc (try, peep2_insn_data[i].insn,
3204 INSN_LOCATOR (peep2_insn_data[i].insn));
3205 before_try = PREV_INSN (insn);
3206 delete_insn_chain (insn, peep2_insn_data[i].insn);
3208 /* Re-insert the EH_REGION notes. */
3209 if (note || (was_call && nonlocal_goto_handler_labels))
3213 for (eh_edge = bb->succ; eh_edge
3214 ; eh_edge = eh_edge->succ_next)
3215 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3218 for (x = try ; x != before_try ; x = PREV_INSN (x))
3219 if (GET_CODE (x) == CALL_INSN
3220 || (flag_non_call_exceptions
3221 && may_trap_p (PATTERN (x))
3222 && !find_reg_note (x, REG_EH_REGION, NULL)))
3226 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3230 if (x != bb->end && eh_edge)
3235 nfte = split_block (bb, x);
3236 flags = (eh_edge->flags
3237 & (EDGE_EH | EDGE_ABNORMAL));
3238 if (GET_CODE (x) == CALL_INSN)
3239 flags |= EDGE_ABNORMAL_CALL;
3240 nehe = make_edge (nfte->src, eh_edge->dest,
3243 nehe->probability = eh_edge->probability;
3245 = REG_BR_PROB_BASE - nehe->probability;
3247 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3248 #ifdef HAVE_conditional_execution
3249 SET_BIT (blocks, nfte->dest->index);
3257 /* Converting possibly trapping insn to non-trapping is
3258 possible. Zap dummy outgoing edges. */
3259 do_cleanup_cfg |= purge_dead_edges (bb);
3262 #ifdef HAVE_conditional_execution
3263 /* With conditional execution, we cannot back up the
3264 live information so easily, since the conditional
3265 death data structures are not so self-contained.
3266 So record that we've made a modification to this
3267 block and update life information at the end. */
3268 SET_BIT (blocks, bb->index);
3271 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3272 peep2_insn_data[i].insn = NULL_RTX;
3273 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3275 /* Back up lifetime information past the end of the
3276 newly created sequence. */
3277 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3279 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3281 /* Update life information for the new sequence. */
3288 i = MAX_INSNS_PER_PEEP2;
3289 peep2_insn_data[i].insn = x;
3290 propagate_one_insn (pbi, x);
3291 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3297 /* ??? Should verify that LIVE now matches what we
3298 had before the new sequence. */
3303 /* If we generated a jump instruction, it won't have
3304 JUMP_LABEL set. Recompute after we're done. */
3305 for (x = try; x != before_try; x = PREV_INSN (x))
3306 if (GET_CODE (x) == JUMP_INSN)
3308 do_rebuild_jump_labels = true;
3314 if (insn == bb->head)
3318 free_propagate_block_info (pbi);
3321 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3322 FREE_REG_SET (peep2_insn_data[i].live_before);
3323 FREE_REG_SET (live);
3325 if (do_rebuild_jump_labels)
3326 rebuild_jump_labels (get_insns ());
3328 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3329 we've changed global life since exception handlers are no longer
3334 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3336 #ifdef HAVE_conditional_execution
3339 count_or_remove_death_notes (blocks, 1);
3340 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3342 sbitmap_free (blocks);
3345 #endif /* HAVE_peephole2 */
3347 /* Common predicates for use with define_bypass. */
3349 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3350 data not the address operand(s) of the store. IN_INSN must be
3351 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3355 store_data_bypass_p (out_insn, in_insn)
3356 rtx out_insn, in_insn;
3358 rtx out_set, in_set;
3360 in_set = single_set (in_insn);
3364 if (GET_CODE (SET_DEST (in_set)) != MEM)
3367 out_set = single_set (out_insn);
3370 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3378 out_pat = PATTERN (out_insn);
3379 if (GET_CODE (out_pat) != PARALLEL)
3382 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3384 rtx exp = XVECEXP (out_pat, 0, i);
3386 if (GET_CODE (exp) == CLOBBER)
3389 if (GET_CODE (exp) != SET)
3392 if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3400 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3401 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3402 or multiple set; IN_INSN should be single_set for truth, but for convenience
3403 of insn categorization may be any JUMP or CALL insn. */
3406 if_test_bypass_p (out_insn, in_insn)
3407 rtx out_insn, in_insn;
3409 rtx out_set, in_set;
3411 in_set = single_set (in_insn);
3414 if (GET_CODE (in_insn) == JUMP_INSN || GET_CODE (in_insn) == CALL_INSN)
3419 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3421 in_set = SET_SRC (in_set);
3423 out_set = single_set (out_insn);
3426 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3427 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3435 out_pat = PATTERN (out_insn);
3436 if (GET_CODE (out_pat) != PARALLEL)
3439 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3441 rtx exp = XVECEXP (out_pat, 0, i);
3443 if (GET_CODE (exp) == CLOBBER)
3446 if (GET_CODE (exp) != SET)
3449 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3450 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))