1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 91-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
25 #include "insn-config.h"
26 #include "insn-attr.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
31 #include "hard-reg-set.h"
35 #ifndef STACK_PUSH_CODE
36 #ifdef STACK_GROWS_DOWNWARD
37 #define STACK_PUSH_CODE PRE_DEC
39 #define STACK_PUSH_CODE PRE_INC
43 /* Import from final.c: */
44 extern rtx alter_subreg ();
46 static void validate_replace_rtx_1 PROTO((rtx *, rtx, rtx, rtx));
47 static rtx *find_single_use_1 PROTO((rtx, rtx *));
48 static rtx *find_constant_term_loc PROTO((rtx *));
50 /* Nonzero means allow operands to be volatile.
51 This should be 0 if you are generating rtl, such as if you are calling
52 the functions in optabs.c and expmed.c (most of the time).
53 This should be 1 if all valid insns need to be recognized,
54 such as in regclass.c and final.c and reload.c.
56 init_recog and init_recog_no_volatile are responsible for setting this. */
60 /* On return from `constrain_operands', indicate which alternative
63 int which_alternative;
65 /* Nonzero after end of reload pass.
66 Set to 1 or 0 by toplev.c.
67 Controls the significance of (SUBREG (MEM)). */
71 /* Initialize data used by the function `recog'.
72 This must be called once in the compilation of a function
73 before any insn recognition may be done in the function. */
76 init_recog_no_volatile ()
87 /* Try recognizing the instruction INSN,
88 and return the code number that results.
89 Remember the code so that repeated calls do not
90 need to spend the time for actual rerecognition.
92 This function is the normal interface to instruction recognition.
93 The automatically-generated function `recog' is normally called
94 through this one. (The only exception is in combine.c.) */
100 if (INSN_CODE (insn) < 0)
101 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
102 return INSN_CODE (insn);
105 /* Check that X is an insn-body for an `asm' with operands
106 and that the operands mentioned in it are legitimate. */
109 check_asm_operands (x)
112 int noperands = asm_noperands (x);
121 operands = (rtx *) alloca (noperands * sizeof (rtx));
122 decode_asm_operands (x, operands, NULL_PTR, NULL_PTR, NULL_PTR);
124 for (i = 0; i < noperands; i++)
125 if (!general_operand (operands[i], VOIDmode))
131 /* Static data for the next two routines.
133 The maximum number of changes supported is defined as the maximum
134 number of operands times 5. This allows for repeated substitutions
135 inside complex indexed address, or, alternatively, changes in up
138 #define MAX_CHANGE_LOCS (MAX_RECOG_OPERANDS * 5)
140 static rtx change_objects[MAX_CHANGE_LOCS];
141 static int change_old_codes[MAX_CHANGE_LOCS];
142 static rtx *change_locs[MAX_CHANGE_LOCS];
143 static rtx change_olds[MAX_CHANGE_LOCS];
145 static int num_changes = 0;
147 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
148 at which NEW will be placed. If OBJECT is zero, no validation is done,
149 the change is simply made.
151 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
152 will be called with the address and mode as parameters. If OBJECT is
153 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
156 IN_GROUP is non-zero if this is part of a group of changes that must be
157 performed as a group. In that case, the changes will be stored. The
158 function `apply_change_group' will validate and apply the changes.
160 If IN_GROUP is zero, this is a single change. Try to recognize the insn
161 or validate the memory reference with the change applied. If the result
162 is not valid for the machine, suppress the change and return zero.
163 Otherwise, perform the change and return 1. */
166 validate_change (object, loc, new, in_group)
174 if (old == new || rtx_equal_p (old, new))
177 if (num_changes >= MAX_CHANGE_LOCS
178 || (in_group == 0 && num_changes != 0))
183 /* Save the information describing this change. */
184 change_objects[num_changes] = object;
185 change_locs[num_changes] = loc;
186 change_olds[num_changes] = old;
188 if (object && GET_CODE (object) != MEM)
190 /* Set INSN_CODE to force rerecognition of insn. Save old code in
192 change_old_codes[num_changes] = INSN_CODE (object);
193 INSN_CODE (object) = -1;
198 /* If we are making a group of changes, return 1. Otherwise, validate the
199 change group we made. */
204 return apply_change_group ();
207 /* Apply a group of changes previously issued with `validate_change'.
208 Return 1 if all changes are valid, zero otherwise. */
211 apply_change_group ()
215 /* The changes have been applied and all INSN_CODEs have been reset to force
218 The changes are valid if we aren't given an object, or if we are
219 given a MEM and it still is a valid address, or if this is in insn
220 and it is recognized. In the latter case, if reload has completed,
221 we also require that the operands meet the constraints for
222 the insn. We do not allow modifying an ASM_OPERANDS after reload
223 has completed because verifying the constraints is too difficult. */
225 for (i = 0; i < num_changes; i++)
227 rtx object = change_objects[i];
232 if (GET_CODE (object) == MEM)
234 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
237 else if ((recog_memoized (object) < 0
238 && (asm_noperands (PATTERN (object)) < 0
239 || ! check_asm_operands (PATTERN (object))
240 || reload_completed))
242 && (insn_extract (object),
243 ! constrain_operands (INSN_CODE (object), 1))))
245 rtx pat = PATTERN (object);
247 /* Perhaps we couldn't recognize the insn because there were
248 extra CLOBBERs at the end. If so, try to re-recognize
249 without the last CLOBBER (later iterations will cause each of
250 them to be eliminated, in turn). But don't do this if we
251 have an ASM_OPERAND. */
252 if (GET_CODE (pat) == PARALLEL
253 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
254 && asm_noperands (PATTERN (object)) < 0)
258 if (XVECLEN (pat, 0) == 2)
259 newpat = XVECEXP (pat, 0, 0);
264 newpat = gen_rtx_PARALLEL (VOIDmode,
265 gen_rtvec (XVECLEN (pat, 0) - 1));
266 for (j = 0; j < XVECLEN (newpat, 0); j++)
267 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
270 /* Add a new change to this group to replace the pattern
271 with this new pattern. Then consider this change
272 as having succeeded. The change we added will
273 cause the entire call to fail if things remain invalid.
275 Note that this can lose if a later change than the one
276 we are processing specified &XVECEXP (PATTERN (object), 0, X)
277 but this shouldn't occur. */
279 validate_change (object, &PATTERN (object), newpat, 1);
281 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
282 /* If this insn is a CLOBBER or USE, it is always valid, but is
290 if (i == num_changes)
302 /* Return the number of changes so far in the current group. */
305 num_validated_changes ()
310 /* Retract the changes numbered NUM and up. */
318 /* Back out all the changes. Do this in the opposite order in which
320 for (i = num_changes - 1; i >= num; i--)
322 *change_locs[i] = change_olds[i];
323 if (change_objects[i] && GET_CODE (change_objects[i]) != MEM)
324 INSN_CODE (change_objects[i]) = change_old_codes[i];
329 /* Replace every occurrence of FROM in X with TO. Mark each change with
330 validate_change passing OBJECT. */
333 validate_replace_rtx_1 (loc, from, to, object)
335 rtx from, to, object;
339 register rtx x = *loc;
340 enum rtx_code code = GET_CODE (x);
342 /* X matches FROM if it is the same rtx or they are both referring to the
343 same register in the same mode. Avoid calling rtx_equal_p unless the
344 operands look similar. */
347 || (GET_CODE (x) == REG && GET_CODE (from) == REG
348 && GET_MODE (x) == GET_MODE (from)
349 && REGNO (x) == REGNO (from))
350 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
351 && rtx_equal_p (x, from)))
353 validate_change (object, loc, to, 1);
357 /* For commutative or comparison operations, try replacing each argument
358 separately and seeing if we made any changes. If so, put a constant
360 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
362 int prev_changes = num_changes;
364 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
365 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
366 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
368 validate_change (object, loc,
369 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
370 : swap_condition (code),
371 GET_MODE (x), XEXP (x, 1),
379 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
380 done the substitution, otherwise we won't. */
385 /* If we have have a PLUS whose second operand is now a CONST_INT, use
386 plus_constant to try to simplify it. */
387 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
388 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
393 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
395 validate_change (object, loc,
396 plus_constant (XEXP (x, 0), - INTVAL (to)),
404 /* In these cases, the operation to be performed depends on the mode
405 of the operand. If we are replacing the operand with a VOIDmode
406 constant, we lose the information. So try to simplify the operation
407 in that case. If it fails, substitute in something that we know
408 won't be recognized. */
409 if (GET_MODE (to) == VOIDmode
410 && (XEXP (x, 0) == from
411 || (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (from) == REG
412 && GET_MODE (XEXP (x, 0)) == GET_MODE (from)
413 && REGNO (XEXP (x, 0)) == REGNO (from))))
415 rtx new = simplify_unary_operation (code, GET_MODE (x), to,
418 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
420 validate_change (object, loc, new, 1);
426 /* If we have a SUBREG of a register that we are replacing and we are
427 replacing it with a MEM, make a new MEM and try replacing the
428 SUBREG with it. Don't do this if the MEM has a mode-dependent address
429 or if we would be widening it. */
431 if (SUBREG_REG (x) == from
432 && GET_CODE (from) == REG
433 && GET_CODE (to) == MEM
434 && ! mode_dependent_address_p (XEXP (to, 0))
435 && ! MEM_VOLATILE_P (to)
436 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
438 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
439 enum machine_mode mode = GET_MODE (x);
442 if (BYTES_BIG_ENDIAN)
443 offset += (MIN (UNITS_PER_WORD,
444 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
445 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
447 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
448 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (to);
449 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (to);
450 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (to);
451 validate_change (object, loc, new, 1);
458 /* If we are replacing a register with memory, try to change the memory
459 to be the mode required for memory in extract operations (this isn't
460 likely to be an insertion operation; if it was, nothing bad will
461 happen, we might just fail in some cases). */
463 if (XEXP (x, 0) == from && GET_CODE (from) == REG && GET_CODE (to) == MEM
464 && GET_CODE (XEXP (x, 1)) == CONST_INT
465 && GET_CODE (XEXP (x, 2)) == CONST_INT
466 && ! mode_dependent_address_p (XEXP (to, 0))
467 && ! MEM_VOLATILE_P (to))
469 enum machine_mode wanted_mode = VOIDmode;
470 enum machine_mode is_mode = GET_MODE (to);
471 int pos = INTVAL (XEXP (x, 2));
474 if (code == ZERO_EXTRACT)
475 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
478 if (code == SIGN_EXTRACT)
479 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
482 /* If we have a narrower mode, we can do something. */
483 if (wanted_mode != VOIDmode
484 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
486 int offset = pos / BITS_PER_UNIT;
489 /* If the bytes and bits are counted differently, we
490 must adjust the offset. */
491 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
492 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
495 pos %= GET_MODE_BITSIZE (wanted_mode);
497 newmem = gen_rtx_MEM (wanted_mode,
498 plus_constant (XEXP (to, 0), offset));
499 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (to);
500 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (to);
501 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (to);
503 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
504 validate_change (object, &XEXP (x, 0), newmem, 1);
514 /* For commutative or comparison operations we've already performed
515 replacements. Don't try to perform them again. */
516 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
518 fmt = GET_RTX_FORMAT (code);
519 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
522 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
523 else if (fmt[i] == 'E')
524 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
525 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
530 /* Try replacing every occurrence of FROM in INSN with TO. After all
531 changes have been made, validate by seeing if INSN is still valid. */
534 validate_replace_rtx (from, to, insn)
537 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
538 return apply_change_group ();
542 /* Return 1 if the insn using CC0 set by INSN does not contain
543 any ordered tests applied to the condition codes.
544 EQ and NE tests do not count. */
547 next_insn_tests_no_inequality (insn)
550 register rtx next = next_cc0_user (insn);
552 /* If there is no next insn, we have to take the conservative choice. */
556 return ((GET_CODE (next) == JUMP_INSN
557 || GET_CODE (next) == INSN
558 || GET_CODE (next) == CALL_INSN)
559 && ! inequality_comparisons_p (PATTERN (next)));
562 #if 0 /* This is useless since the insn that sets the cc's
563 must be followed immediately by the use of them. */
564 /* Return 1 if the CC value set up by INSN is not used. */
567 next_insns_test_no_inequality (insn)
570 register rtx next = NEXT_INSN (insn);
572 for (; next != 0; next = NEXT_INSN (next))
574 if (GET_CODE (next) == CODE_LABEL
575 || GET_CODE (next) == BARRIER)
577 if (GET_CODE (next) == NOTE)
579 if (inequality_comparisons_p (PATTERN (next)))
581 if (sets_cc0_p (PATTERN (next)) == 1)
583 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
591 /* This is used by find_single_use to locate an rtx that contains exactly one
592 use of DEST, which is typically either a REG or CC0. It returns a
593 pointer to the innermost rtx expression containing DEST. Appearances of
594 DEST that are being used to totally replace it are not counted. */
597 find_single_use_1 (dest, loc)
602 enum rtx_code code = GET_CODE (x);
619 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
620 of a REG that occupies all of the REG, the insn uses DEST if
621 it is mentioned in the destination or the source. Otherwise, we
622 need just check the source. */
623 if (GET_CODE (SET_DEST (x)) != CC0
624 && GET_CODE (SET_DEST (x)) != PC
625 && GET_CODE (SET_DEST (x)) != REG
626 && ! (GET_CODE (SET_DEST (x)) == SUBREG
627 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
628 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
629 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
630 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
631 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
634 return find_single_use_1 (dest, &SET_SRC (x));
638 return find_single_use_1 (dest, &XEXP (x, 0));
644 /* If it wasn't one of the common cases above, check each expression and
645 vector of this code. Look for a unique usage of DEST. */
647 fmt = GET_RTX_FORMAT (code);
648 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
652 if (dest == XEXP (x, i)
653 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
654 && REGNO (dest) == REGNO (XEXP (x, i))))
657 this_result = find_single_use_1 (dest, &XEXP (x, i));
660 result = this_result;
661 else if (this_result)
662 /* Duplicate usage. */
665 else if (fmt[i] == 'E')
669 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
671 if (XVECEXP (x, i, j) == dest
672 || (GET_CODE (dest) == REG
673 && GET_CODE (XVECEXP (x, i, j)) == REG
674 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
677 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
680 result = this_result;
681 else if (this_result)
690 /* See if DEST, produced in INSN, is used only a single time in the
691 sequel. If so, return a pointer to the innermost rtx expression in which
694 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
696 This routine will return usually zero either before flow is called (because
697 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
698 note can't be trusted).
700 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
701 care about REG_DEAD notes or LOG_LINKS.
703 Otherwise, we find the single use by finding an insn that has a
704 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
705 only referenced once in that insn, we know that it must be the first
706 and last insn referencing DEST. */
709 find_single_use (dest, insn, ploc)
721 next = NEXT_INSN (insn);
723 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
726 result = find_single_use_1 (dest, &PATTERN (next));
733 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
736 for (next = next_nonnote_insn (insn);
737 next != 0 && GET_CODE (next) != CODE_LABEL;
738 next = next_nonnote_insn (next))
739 if (GET_RTX_CLASS (GET_CODE (next)) == 'i' && dead_or_set_p (next, dest))
741 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
742 if (XEXP (link, 0) == insn)
747 result = find_single_use_1 (dest, &PATTERN (next));
757 /* Return 1 if OP is a valid general operand for machine mode MODE.
758 This is either a register reference, a memory reference,
759 or a constant. In the case of a memory reference, the address
760 is checked for general validity for the target machine.
762 Register and memory references must have mode MODE in order to be valid,
763 but some constants have no machine mode and are valid for any mode.
765 If MODE is VOIDmode, OP is checked for validity for whatever mode
768 The main use of this function is as a predicate in match_operand
769 expressions in the machine description.
771 For an explanation of this function's behavior for registers of
772 class NO_REGS, see the comment for `register_operand'. */
775 general_operand (op, mode)
777 enum machine_mode mode;
779 register enum rtx_code code = GET_CODE (op);
780 int mode_altering_drug = 0;
782 if (mode == VOIDmode)
783 mode = GET_MODE (op);
785 /* Don't accept CONST_INT or anything similar
786 if the caller wants something floating. */
787 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
788 && GET_MODE_CLASS (mode) != MODE_INT
789 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
793 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
794 #ifdef LEGITIMATE_PIC_OPERAND_P
795 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
797 && LEGITIMATE_CONSTANT_P (op));
799 /* Except for certain constants with VOIDmode, already checked for,
800 OP's mode must match MODE if MODE specifies a mode. */
802 if (GET_MODE (op) != mode)
807 #ifdef INSN_SCHEDULING
808 /* On machines that have insn scheduling, we want all memory
809 reference to be explicit, so outlaw paradoxical SUBREGs. */
810 if (GET_CODE (SUBREG_REG (op)) == MEM
811 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
815 op = SUBREG_REG (op);
816 code = GET_CODE (op);
818 /* No longer needed, since (SUBREG (MEM...))
819 will load the MEM into a reload reg in the MEM's own mode. */
820 mode_altering_drug = 1;
825 /* A register whose class is NO_REGS is not a general operand. */
826 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
827 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
831 register rtx y = XEXP (op, 0);
832 if (! volatile_ok && MEM_VOLATILE_P (op))
834 if (GET_CODE (y) == ADDRESSOF)
836 /* Use the mem's mode, since it will be reloaded thus. */
837 mode = GET_MODE (op);
838 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
841 /* Pretend this is an operand for now; we'll run force_operand
842 on its replacement in fixup_var_refs_1. */
843 if (code == ADDRESSOF)
849 if (mode_altering_drug)
850 return ! mode_dependent_address_p (XEXP (op, 0));
854 /* Return 1 if OP is a valid memory address for a memory reference
857 The main use of this function is as a predicate in match_operand
858 expressions in the machine description. */
861 address_operand (op, mode)
863 enum machine_mode mode;
865 return memory_address_p (mode, op);
868 /* Return 1 if OP is a register reference of mode MODE.
869 If MODE is VOIDmode, accept a register in any mode.
871 The main use of this function is as a predicate in match_operand
872 expressions in the machine description.
874 As a special exception, registers whose class is NO_REGS are
875 not accepted by `register_operand'. The reason for this change
876 is to allow the representation of special architecture artifacts
877 (such as a condition code register) without extending the rtl
878 definitions. Since registers of class NO_REGS cannot be used
879 as registers in any case where register classes are examined,
880 it is most consistent to keep this function from accepting them. */
883 register_operand (op, mode)
885 enum machine_mode mode;
887 if (GET_MODE (op) != mode && mode != VOIDmode)
890 if (GET_CODE (op) == SUBREG)
892 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
893 because it is guaranteed to be reloaded into one.
894 Just make sure the MEM is valid in itself.
895 (Ideally, (SUBREG (MEM)...) should not exist after reload,
896 but currently it does result from (SUBREG (REG)...) where the
897 reg went on the stack.) */
898 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
899 return general_operand (op, mode);
901 #ifdef CLASS_CANNOT_CHANGE_SIZE
902 if (GET_CODE (SUBREG_REG (op)) == REG
903 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
904 && TEST_HARD_REG_BIT (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
905 REGNO (SUBREG_REG (op)))
906 && (GET_MODE_SIZE (mode)
907 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
908 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
909 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
913 op = SUBREG_REG (op);
916 /* We don't consider registers whose class is NO_REGS
917 to be a register operand. */
918 return (GET_CODE (op) == REG
919 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
920 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
923 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
924 or a hard register. */
927 scratch_operand (op, mode)
929 enum machine_mode mode;
931 return (GET_MODE (op) == mode
932 && (GET_CODE (op) == SCRATCH
933 || (GET_CODE (op) == REG
934 && REGNO (op) < FIRST_PSEUDO_REGISTER)));
937 /* Return 1 if OP is a valid immediate operand for mode MODE.
939 The main use of this function is as a predicate in match_operand
940 expressions in the machine description. */
943 immediate_operand (op, mode)
945 enum machine_mode mode;
947 /* Don't accept CONST_INT or anything similar
948 if the caller wants something floating. */
949 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
950 && GET_MODE_CLASS (mode) != MODE_INT
951 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
954 return (CONSTANT_P (op)
955 && (GET_MODE (op) == mode || mode == VOIDmode
956 || GET_MODE (op) == VOIDmode)
957 #ifdef LEGITIMATE_PIC_OPERAND_P
958 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
960 && LEGITIMATE_CONSTANT_P (op));
963 /* Returns 1 if OP is an operand that is a CONST_INT. */
966 const_int_operand (op, mode)
968 enum machine_mode mode;
970 return GET_CODE (op) == CONST_INT;
973 /* Returns 1 if OP is an operand that is a constant integer or constant
974 floating-point number. */
977 const_double_operand (op, mode)
979 enum machine_mode mode;
981 /* Don't accept CONST_INT or anything similar
982 if the caller wants something floating. */
983 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
984 && GET_MODE_CLASS (mode) != MODE_INT
985 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
988 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
989 && (mode == VOIDmode || GET_MODE (op) == mode
990 || GET_MODE (op) == VOIDmode));
993 /* Return 1 if OP is a general operand that is not an immediate operand. */
996 nonimmediate_operand (op, mode)
998 enum machine_mode mode;
1000 return (general_operand (op, mode) && ! CONSTANT_P (op));
1003 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1006 nonmemory_operand (op, mode)
1008 enum machine_mode mode;
1010 if (CONSTANT_P (op))
1012 /* Don't accept CONST_INT or anything similar
1013 if the caller wants something floating. */
1014 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1015 && GET_MODE_CLASS (mode) != MODE_INT
1016 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1019 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
1020 #ifdef LEGITIMATE_PIC_OPERAND_P
1021 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1023 && LEGITIMATE_CONSTANT_P (op));
1026 if (GET_MODE (op) != mode && mode != VOIDmode)
1029 if (GET_CODE (op) == SUBREG)
1031 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1032 because it is guaranteed to be reloaded into one.
1033 Just make sure the MEM is valid in itself.
1034 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1035 but currently it does result from (SUBREG (REG)...) where the
1036 reg went on the stack.) */
1037 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1038 return general_operand (op, mode);
1039 op = SUBREG_REG (op);
1042 /* We don't consider registers whose class is NO_REGS
1043 to be a register operand. */
1044 return (GET_CODE (op) == REG
1045 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1046 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1049 /* Return 1 if OP is a valid operand that stands for pushing a
1050 value of mode MODE onto the stack.
1052 The main use of this function is as a predicate in match_operand
1053 expressions in the machine description. */
1056 push_operand (op, mode)
1058 enum machine_mode mode;
1060 if (GET_CODE (op) != MEM)
1063 if (GET_MODE (op) != mode)
1068 if (GET_CODE (op) != STACK_PUSH_CODE)
1071 return XEXP (op, 0) == stack_pointer_rtx;
1074 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1077 memory_address_p (mode, addr)
1078 enum machine_mode mode;
1081 if (GET_CODE (addr) == ADDRESSOF)
1084 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1091 /* Return 1 if OP is a valid memory reference with mode MODE,
1092 including a valid address.
1094 The main use of this function is as a predicate in match_operand
1095 expressions in the machine description. */
1098 memory_operand (op, mode)
1100 enum machine_mode mode;
1104 if (! reload_completed)
1105 /* Note that no SUBREG is a memory operand before end of reload pass,
1106 because (SUBREG (MEM...)) forces reloading into a register. */
1107 return GET_CODE (op) == MEM && general_operand (op, mode);
1109 if (mode != VOIDmode && GET_MODE (op) != mode)
1113 if (GET_CODE (inner) == SUBREG)
1114 inner = SUBREG_REG (inner);
1116 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1119 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1120 that is, a memory reference whose address is a general_operand. */
1123 indirect_operand (op, mode)
1125 enum machine_mode mode;
1127 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1128 if (! reload_completed
1129 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1131 register int offset = SUBREG_WORD (op) * UNITS_PER_WORD;
1132 rtx inner = SUBREG_REG (op);
1134 if (BYTES_BIG_ENDIAN)
1135 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op)))
1136 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner))));
1138 if (mode != VOIDmode && GET_MODE (op) != mode)
1141 /* The only way that we can have a general_operand as the resulting
1142 address is if OFFSET is zero and the address already is an operand
1143 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1146 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1147 || (GET_CODE (XEXP (inner, 0)) == PLUS
1148 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1149 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1150 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1153 return (GET_CODE (op) == MEM
1154 && memory_operand (op, mode)
1155 && general_operand (XEXP (op, 0), Pmode));
1158 /* Return 1 if this is a comparison operator. This allows the use of
1159 MATCH_OPERATOR to recognize all the branch insns. */
1162 comparison_operator (op, mode)
1164 enum machine_mode mode;
1166 return ((mode == VOIDmode || GET_MODE (op) == mode)
1167 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1170 /* If BODY is an insn body that uses ASM_OPERANDS,
1171 return the number of operands (both input and output) in the insn.
1172 Otherwise return -1. */
1175 asm_noperands (body)
1178 if (GET_CODE (body) == ASM_OPERANDS)
1179 /* No output operands: return number of input operands. */
1180 return ASM_OPERANDS_INPUT_LENGTH (body);
1181 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1182 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1183 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1184 else if (GET_CODE (body) == PARALLEL
1185 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1186 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1188 /* Multiple output operands, or 1 output plus some clobbers:
1189 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1193 /* Count backwards through CLOBBERs to determine number of SETs. */
1194 for (i = XVECLEN (body, 0); i > 0; i--)
1196 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1198 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1202 /* N_SETS is now number of output operands. */
1205 /* Verify that all the SETs we have
1206 came from a single original asm_operands insn
1207 (so that invalid combinations are blocked). */
1208 for (i = 0; i < n_sets; i++)
1210 rtx elt = XVECEXP (body, 0, i);
1211 if (GET_CODE (elt) != SET)
1213 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1215 /* If these ASM_OPERANDS rtx's came from different original insns
1216 then they aren't allowed together. */
1217 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1218 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1221 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1224 else if (GET_CODE (body) == PARALLEL
1225 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1227 /* 0 outputs, but some clobbers:
1228 body is [(asm_operands ...) (clobber (reg ...))...]. */
1231 /* Make sure all the other parallel things really are clobbers. */
1232 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1233 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1236 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1242 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1243 copy its operands (both input and output) into the vector OPERANDS,
1244 the locations of the operands within the insn into the vector OPERAND_LOCS,
1245 and the constraints for the operands into CONSTRAINTS.
1246 Write the modes of the operands into MODES.
1247 Return the assembler-template.
1249 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1250 we don't store that info. */
1253 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1258 enum machine_mode *modes;
1264 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1266 rtx asmop = SET_SRC (body);
1267 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1269 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1271 for (i = 1; i < noperands; i++)
1274 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1276 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1278 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1280 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1283 /* The output is in the SET.
1284 Its constraint is in the ASM_OPERANDS itself. */
1286 operands[0] = SET_DEST (body);
1288 operand_locs[0] = &SET_DEST (body);
1290 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1292 modes[0] = GET_MODE (SET_DEST (body));
1293 template = ASM_OPERANDS_TEMPLATE (asmop);
1295 else if (GET_CODE (body) == ASM_OPERANDS)
1298 /* No output operands: BODY is (asm_operands ....). */
1300 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1302 /* The input operands are found in the 1st element vector. */
1303 /* Constraints for inputs are in the 2nd element vector. */
1304 for (i = 0; i < noperands; i++)
1307 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1309 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1311 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1313 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1315 template = ASM_OPERANDS_TEMPLATE (asmop);
1317 else if (GET_CODE (body) == PARALLEL
1318 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1320 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1321 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1322 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1323 int nout = 0; /* Does not include CLOBBERs. */
1325 /* At least one output, plus some CLOBBERs. */
1327 /* The outputs are in the SETs.
1328 Their constraints are in the ASM_OPERANDS itself. */
1329 for (i = 0; i < nparallel; i++)
1331 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1332 break; /* Past last SET */
1335 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1337 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1339 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1341 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1345 for (i = 0; i < nin; i++)
1348 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1350 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1352 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1354 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1357 template = ASM_OPERANDS_TEMPLATE (asmop);
1359 else if (GET_CODE (body) == PARALLEL
1360 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1362 /* No outputs, but some CLOBBERs. */
1364 rtx asmop = XVECEXP (body, 0, 0);
1365 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1367 for (i = 0; i < nin; i++)
1370 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1372 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1374 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1376 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1379 template = ASM_OPERANDS_TEMPLATE (asmop);
1385 /* Given an rtx *P, if it is a sum containing an integer constant term,
1386 return the location (type rtx *) of the pointer to that constant term.
1387 Otherwise, return a null pointer. */
1390 find_constant_term_loc (p)
1394 register enum rtx_code code = GET_CODE (*p);
1396 /* If *P IS such a constant term, P is its location. */
1398 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1402 /* Otherwise, if not a sum, it has no constant term. */
1404 if (GET_CODE (*p) != PLUS)
1407 /* If one of the summands is constant, return its location. */
1409 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1410 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1413 /* Otherwise, check each summand for containing a constant term. */
1415 if (XEXP (*p, 0) != 0)
1417 tem = find_constant_term_loc (&XEXP (*p, 0));
1422 if (XEXP (*p, 1) != 0)
1424 tem = find_constant_term_loc (&XEXP (*p, 1));
1432 /* Return 1 if OP is a memory reference
1433 whose address contains no side effects
1434 and remains valid after the addition
1435 of a positive integer less than the
1436 size of the object being referenced.
1438 We assume that the original address is valid and do not check it.
1440 This uses strict_memory_address_p as a subroutine, so
1441 don't use it before reload. */
1444 offsettable_memref_p (op)
1447 return ((GET_CODE (op) == MEM)
1448 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1451 /* Similar, but don't require a strictly valid mem ref:
1452 consider pseudo-regs valid as index or base regs. */
1455 offsettable_nonstrict_memref_p (op)
1458 return ((GET_CODE (op) == MEM)
1459 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1462 /* Return 1 if Y is a memory address which contains no side effects
1463 and would remain valid after the addition of a positive integer
1464 less than the size of that mode.
1466 We assume that the original address is valid and do not check it.
1467 We do check that it is valid for narrower modes.
1469 If STRICTP is nonzero, we require a strictly valid address,
1470 for the sake of use in reload.c. */
1473 offsettable_address_p (strictp, mode, y)
1475 enum machine_mode mode;
1478 register enum rtx_code ycode = GET_CODE (y);
1482 int (*addressp) () = (strictp ? strict_memory_address_p : memory_address_p);
1484 if (CONSTANT_ADDRESS_P (y))
1487 /* Adjusting an offsettable address involves changing to a narrower mode.
1488 Make sure that's OK. */
1490 if (mode_dependent_address_p (y))
1493 /* If the expression contains a constant term,
1494 see if it remains valid when max possible offset is added. */
1496 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1501 *y2 = plus_constant (*y2, GET_MODE_SIZE (mode) - 1);
1502 /* Use QImode because an odd displacement may be automatically invalid
1503 for any wider mode. But it should be valid for a single byte. */
1504 good = (*addressp) (QImode, y);
1506 /* In any case, restore old contents of memory. */
1511 if (ycode == PRE_DEC || ycode == PRE_INC
1512 || ycode == POST_DEC || ycode == POST_INC)
1515 /* The offset added here is chosen as the maximum offset that
1516 any instruction could need to add when operating on something
1517 of the specified mode. We assume that if Y and Y+c are
1518 valid addresses then so is Y+d for all 0<d<c. */
1520 z = plus_constant_for_output (y, GET_MODE_SIZE (mode) - 1);
1522 /* Use QImode because an odd displacement may be automatically invalid
1523 for any wider mode. But it should be valid for a single byte. */
1524 return (*addressp) (QImode, z);
1527 /* Return 1 if ADDR is an address-expression whose effect depends
1528 on the mode of the memory reference it is used in.
1530 Autoincrement addressing is a typical example of mode-dependence
1531 because the amount of the increment depends on the mode. */
1534 mode_dependent_address_p (addr)
1537 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1543 /* Return 1 if OP is a general operand
1544 other than a memory ref with a mode dependent address. */
1547 mode_independent_operand (op, mode)
1548 enum machine_mode mode;
1553 if (! general_operand (op, mode))
1556 if (GET_CODE (op) != MEM)
1559 addr = XEXP (op, 0);
1560 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
1566 /* Given an operand OP that is a valid memory reference
1567 which satisfies offsettable_memref_p,
1568 return a new memory reference whose address has been adjusted by OFFSET.
1569 OFFSET should be positive and less than the size of the object referenced.
1573 adj_offsettable_operand (op, offset)
1577 register enum rtx_code code = GET_CODE (op);
1581 register rtx y = XEXP (op, 0);
1584 if (CONSTANT_ADDRESS_P (y))
1586 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
1587 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1591 if (GET_CODE (y) == PLUS)
1594 register rtx *const_loc;
1598 const_loc = find_constant_term_loc (&z);
1601 *const_loc = plus_constant_for_output (*const_loc, offset);
1606 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
1607 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1613 #ifdef REGISTER_CONSTRAINTS
1615 /* Check the operands of an insn (found in recog_operands)
1616 against the insn's operand constraints (found via INSN_CODE_NUM)
1617 and return 1 if they are valid.
1619 WHICH_ALTERNATIVE is set to a number which indicates which
1620 alternative of constraints was matched: 0 for the first alternative,
1621 1 for the next, etc.
1623 In addition, when two operands are match
1624 and it happens that the output operand is (reg) while the
1625 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
1626 make the output operand look like the input.
1627 This is because the output operand is the one the template will print.
1629 This is used in final, just before printing the assembler code and by
1630 the routines that determine an insn's attribute.
1632 If STRICT is a positive non-zero value, it means that we have been
1633 called after reload has been completed. In that case, we must
1634 do all checks strictly. If it is zero, it means that we have been called
1635 before reload has completed. In that case, we first try to see if we can
1636 find an alternative that matches strictly. If not, we try again, this
1637 time assuming that reload will fix up the insn. This provides a "best
1638 guess" for the alternative and is used to compute attributes of insns prior
1639 to reload. A negative value of STRICT is used for this internal call. */
1647 constrain_operands (insn_code_num, strict)
1651 char *constraints[MAX_RECOG_OPERANDS];
1652 int matching_operands[MAX_RECOG_OPERANDS];
1653 enum op_type {OP_IN, OP_OUT, OP_INOUT} op_types[MAX_RECOG_OPERANDS];
1654 int earlyclobber[MAX_RECOG_OPERANDS];
1656 int noperands = insn_n_operands[insn_code_num];
1658 struct funny_match funny_match[MAX_RECOG_OPERANDS];
1659 int funny_match_index;
1660 int nalternatives = insn_n_alternatives[insn_code_num];
1662 if (noperands == 0 || nalternatives == 0)
1665 for (c = 0; c < noperands; c++)
1667 constraints[c] = insn_operand_constraint[insn_code_num][c];
1668 matching_operands[c] = -1;
1669 op_types[c] = OP_IN;
1672 which_alternative = 0;
1674 while (which_alternative < nalternatives)
1678 funny_match_index = 0;
1680 for (opno = 0; opno < noperands; opno++)
1682 register rtx op = recog_operand[opno];
1683 enum machine_mode mode = GET_MODE (op);
1684 register char *p = constraints[opno];
1689 earlyclobber[opno] = 0;
1691 /* A unary operator may be accepted by the predicate, but it
1692 is irrelevant for matching constraints. */
1693 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
1696 if (GET_CODE (op) == SUBREG)
1698 if (GET_CODE (SUBREG_REG (op)) == REG
1699 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
1700 offset = SUBREG_WORD (op);
1701 op = SUBREG_REG (op);
1704 /* An empty constraint or empty alternative
1705 allows anything which matched the pattern. */
1706 if (*p == 0 || *p == ',')
1709 while (*p && (c = *p++) != ',')
1719 /* Ignore rest of this alternative as far as
1720 constraint checking is concerned. */
1721 while (*p && *p != ',')
1726 op_types[opno] = OP_OUT;
1730 op_types[opno] = OP_INOUT;
1734 earlyclobber[opno] = 1;
1742 /* This operand must be the same as a previous one.
1743 This kind of constraint is used for instructions such
1744 as add when they take only two operands.
1746 Note that the lower-numbered operand is passed first.
1748 If we are not testing strictly, assume that this constraint
1749 will be satisfied. */
1753 val = operands_match_p (recog_operand[c - '0'],
1754 recog_operand[opno]);
1756 matching_operands[opno] = c - '0';
1757 matching_operands[c - '0'] = opno;
1761 /* If output is *x and input is *--x,
1762 arrange later to change the output to *--x as well,
1763 since the output op is the one that will be printed. */
1764 if (val == 2 && strict > 0)
1766 funny_match[funny_match_index].this = opno;
1767 funny_match[funny_match_index++].other = c - '0';
1772 /* p is used for address_operands. When we are called by
1773 gen_reload, no one will have checked that the address is
1774 strictly valid, i.e., that all pseudos requiring hard regs
1775 have gotten them. */
1777 || (strict_memory_address_p
1778 (insn_operand_mode[insn_code_num][opno], op)))
1782 /* No need to check general_operand again;
1783 it was done in insn-recog.c. */
1785 /* Anything goes unless it is a REG and really has a hard reg
1786 but the hard reg is not in the class GENERAL_REGS. */
1788 || GENERAL_REGS == ALL_REGS
1789 || GET_CODE (op) != REG
1790 || (reload_in_progress
1791 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
1792 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
1799 && GET_CODE (op) == REG
1800 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
1801 || (strict == 0 && GET_CODE (op) == SCRATCH)
1802 || (GET_CODE (op) == REG
1803 && ((GENERAL_REGS == ALL_REGS
1804 && REGNO (op) < FIRST_PSEUDO_REGISTER)
1805 || reg_fits_class_p (op, GENERAL_REGS,
1811 /* This is used for a MATCH_SCRATCH in the cases when
1812 we don't actually need anything. So anything goes
1818 if (GET_CODE (op) == MEM
1819 /* Before reload, accept what reload can turn into mem. */
1820 || (strict < 0 && CONSTANT_P (op))
1821 /* During reload, accept a pseudo */
1822 || (reload_in_progress && GET_CODE (op) == REG
1823 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
1828 if (GET_CODE (op) == MEM
1829 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
1830 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1835 if (GET_CODE (op) == MEM
1836 && (GET_CODE (XEXP (op, 0)) == PRE_INC
1837 || GET_CODE (XEXP (op, 0)) == POST_INC))
1842 #ifndef REAL_ARITHMETIC
1843 /* Match any CONST_DOUBLE, but only if
1844 we can examine the bits of it reliably. */
1845 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1846 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1847 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1850 if (GET_CODE (op) == CONST_DOUBLE)
1855 if (GET_CODE (op) == CONST_DOUBLE)
1861 if (GET_CODE (op) == CONST_DOUBLE
1862 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
1867 if (GET_CODE (op) == CONST_INT
1868 || (GET_CODE (op) == CONST_DOUBLE
1869 && GET_MODE (op) == VOIDmode))
1872 if (CONSTANT_P (op))
1877 if (GET_CODE (op) == CONST_INT
1878 || (GET_CODE (op) == CONST_DOUBLE
1879 && GET_MODE (op) == VOIDmode))
1891 if (GET_CODE (op) == CONST_INT
1892 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
1896 #ifdef EXTRA_CONSTRAINT
1902 if (EXTRA_CONSTRAINT (op, c))
1908 if (GET_CODE (op) == MEM
1909 && ((strict > 0 && ! offsettable_memref_p (op))
1911 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
1912 || (reload_in_progress
1913 && !(GET_CODE (op) == REG
1914 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
1919 if ((strict > 0 && offsettable_memref_p (op))
1920 || (strict == 0 && offsettable_nonstrict_memref_p (op))
1921 /* Before reload, accept what reload can handle. */
1923 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
1924 /* During reload, accept a pseudo */
1925 || (reload_in_progress && GET_CODE (op) == REG
1926 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
1933 && GET_CODE (op) == REG
1934 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
1935 || (strict == 0 && GET_CODE (op) == SCRATCH)
1936 || (GET_CODE (op) == REG
1937 && reg_fits_class_p (op, REG_CLASS_FROM_LETTER (c),
1942 constraints[opno] = p;
1943 /* If this operand did not win somehow,
1944 this alternative loses. */
1948 /* This alternative won; the operands are ok.
1949 Change whichever operands this alternative says to change. */
1954 /* See if any earlyclobber operand conflicts with some other
1958 for (eopno = 0; eopno < noperands; eopno++)
1959 /* Ignore earlyclobber operands now in memory,
1960 because we would often report failure when we have
1961 two memory operands, one of which was formerly a REG. */
1962 if (earlyclobber[eopno]
1963 && GET_CODE (recog_operand[eopno]) == REG)
1964 for (opno = 0; opno < noperands; opno++)
1965 if ((GET_CODE (recog_operand[opno]) == MEM
1966 || op_types[opno] != OP_OUT)
1968 /* Ignore things like match_operator operands. */
1969 && *insn_operand_constraint[insn_code_num][opno] != 0
1970 && ! (matching_operands[opno] == eopno
1971 && operands_match_p (recog_operand[opno],
1972 recog_operand[eopno]))
1973 && ! safe_from_earlyclobber (recog_operand[opno],
1974 recog_operand[eopno]))
1979 while (--funny_match_index >= 0)
1981 recog_operand[funny_match[funny_match_index].other]
1982 = recog_operand[funny_match[funny_match_index].this];
1989 which_alternative++;
1992 /* If we are about to reject this, but we are not to test strictly,
1993 try a very loose test. Only return failure if it fails also. */
1995 return constrain_operands (insn_code_num, -1);
2000 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2001 is a hard reg in class CLASS when its regno is offset by OFFSET
2002 and changed to mode MODE.
2003 If REG occupies multiple hard regs, all of them must be in CLASS. */
2006 reg_fits_class_p (operand, class, offset, mode)
2008 register enum reg_class class;
2010 enum machine_mode mode;
2012 register int regno = REGNO (operand);
2013 if (regno < FIRST_PSEUDO_REGISTER
2014 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2019 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2021 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2030 #endif /* REGISTER_CONSTRAINTS */