1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 91-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
25 #include "insn-config.h"
26 #include "insn-attr.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
31 #include "hard-reg-set.h"
35 #ifndef STACK_PUSH_CODE
36 #ifdef STACK_GROWS_DOWNWARD
37 #define STACK_PUSH_CODE PRE_DEC
39 #define STACK_PUSH_CODE PRE_INC
43 /* Import from final.c: */
44 extern rtx alter_subreg ();
46 static void validate_replace_rtx_1 PROTO((rtx *, rtx, rtx, rtx));
47 static rtx *find_single_use_1 PROTO((rtx, rtx *));
48 static rtx *find_constant_term_loc PROTO((rtx *));
50 /* Nonzero means allow operands to be volatile.
51 This should be 0 if you are generating rtl, such as if you are calling
52 the functions in optabs.c and expmed.c (most of the time).
53 This should be 1 if all valid insns need to be recognized,
54 such as in regclass.c and final.c and reload.c.
56 init_recog and init_recog_no_volatile are responsible for setting this. */
60 /* On return from `constrain_operands', indicate which alternative
63 int which_alternative;
65 /* Nonzero after end of reload pass.
66 Set to 1 or 0 by toplev.c.
67 Controls the significance of (SUBREG (MEM)). */
71 /* Initialize data used by the function `recog'.
72 This must be called once in the compilation of a function
73 before any insn recognition may be done in the function. */
76 init_recog_no_volatile ()
87 /* Try recognizing the instruction INSN,
88 and return the code number that results.
89 Remember the code so that repeated calls do not
90 need to spend the time for actual rerecognition.
92 This function is the normal interface to instruction recognition.
93 The automatically-generated function `recog' is normally called
94 through this one. (The only exception is in combine.c.) */
100 if (INSN_CODE (insn) < 0)
101 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
102 return INSN_CODE (insn);
105 /* Check that X is an insn-body for an `asm' with operands
106 and that the operands mentioned in it are legitimate. */
109 check_asm_operands (x)
112 int noperands = asm_noperands (x);
121 operands = (rtx *) alloca (noperands * sizeof (rtx));
122 decode_asm_operands (x, operands, NULL_PTR, NULL_PTR, NULL_PTR);
124 for (i = 0; i < noperands; i++)
125 if (!general_operand (operands[i], VOIDmode))
131 /* Static data for the next two routines.
133 The maximum number of changes supported is defined as the maximum
134 number of operands times 5. This allows for repeated substitutions
135 inside complex indexed address, or, alternatively, changes in up
138 #define MAX_CHANGE_LOCS (MAX_RECOG_OPERANDS * 5)
140 static rtx change_objects[MAX_CHANGE_LOCS];
141 static int change_old_codes[MAX_CHANGE_LOCS];
142 static rtx *change_locs[MAX_CHANGE_LOCS];
143 static rtx change_olds[MAX_CHANGE_LOCS];
145 static int num_changes = 0;
147 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
148 at which NEW will be placed. If OBJECT is zero, no validation is done,
149 the change is simply made.
151 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
152 will be called with the address and mode as parameters. If OBJECT is
153 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
156 IN_GROUP is non-zero if this is part of a group of changes that must be
157 performed as a group. In that case, the changes will be stored. The
158 function `apply_change_group' will validate and apply the changes.
160 If IN_GROUP is zero, this is a single change. Try to recognize the insn
161 or validate the memory reference with the change applied. If the result
162 is not valid for the machine, suppress the change and return zero.
163 Otherwise, perform the change and return 1. */
166 validate_change (object, loc, new, in_group)
174 if (old == new || rtx_equal_p (old, new))
177 if (num_changes >= MAX_CHANGE_LOCS
178 || (in_group == 0 && num_changes != 0))
183 /* Save the information describing this change. */
184 change_objects[num_changes] = object;
185 change_locs[num_changes] = loc;
186 change_olds[num_changes] = old;
188 if (object && GET_CODE (object) != MEM)
190 /* Set INSN_CODE to force rerecognition of insn. Save old code in
192 change_old_codes[num_changes] = INSN_CODE (object);
193 INSN_CODE (object) = -1;
198 /* If we are making a group of changes, return 1. Otherwise, validate the
199 change group we made. */
204 return apply_change_group ();
207 /* Apply a group of changes previously issued with `validate_change'.
208 Return 1 if all changes are valid, zero otherwise. */
211 apply_change_group ()
215 /* The changes have been applied and all INSN_CODEs have been reset to force
218 The changes are valid if we aren't given an object, or if we are
219 given a MEM and it still is a valid address, or if this is in insn
220 and it is recognized. In the latter case, if reload has completed,
221 we also require that the operands meet the constraints for
222 the insn. We do not allow modifying an ASM_OPERANDS after reload
223 has completed because verifying the constraints is too difficult. */
225 for (i = 0; i < num_changes; i++)
227 rtx object = change_objects[i];
232 if (GET_CODE (object) == MEM)
234 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
237 else if ((recog_memoized (object) < 0
238 && (asm_noperands (PATTERN (object)) < 0
239 || ! check_asm_operands (PATTERN (object))
240 || reload_completed))
242 && (insn_extract (object),
243 ! constrain_operands (INSN_CODE (object), 1))))
245 rtx pat = PATTERN (object);
247 /* Perhaps we couldn't recognize the insn because there were
248 extra CLOBBERs at the end. If so, try to re-recognize
249 without the last CLOBBER (later iterations will cause each of
250 them to be eliminated, in turn). But don't do this if we
251 have an ASM_OPERAND. */
252 if (GET_CODE (pat) == PARALLEL
253 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
254 && asm_noperands (PATTERN (object)) < 0)
258 if (XVECLEN (pat, 0) == 2)
259 newpat = XVECEXP (pat, 0, 0);
264 newpat = gen_rtx_PARALLEL (VOIDmode,
265 gen_rtvec (XVECLEN (pat, 0) - 1));
266 for (j = 0; j < XVECLEN (newpat, 0); j++)
267 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
270 /* Add a new change to this group to replace the pattern
271 with this new pattern. Then consider this change
272 as having succeeded. The change we added will
273 cause the entire call to fail if things remain invalid.
275 Note that this can lose if a later change than the one
276 we are processing specified &XVECEXP (PATTERN (object), 0, X)
277 but this shouldn't occur. */
279 validate_change (object, &PATTERN (object), newpat, 1);
281 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
282 /* If this insn is a CLOBBER or USE, it is always valid, but is
290 if (i == num_changes)
302 /* Return the number of changes so far in the current group. */
305 num_validated_changes ()
310 /* Retract the changes numbered NUM and up. */
318 /* Back out all the changes. Do this in the opposite order in which
320 for (i = num_changes - 1; i >= num; i--)
322 *change_locs[i] = change_olds[i];
323 if (change_objects[i] && GET_CODE (change_objects[i]) != MEM)
324 INSN_CODE (change_objects[i]) = change_old_codes[i];
329 /* Replace every occurrence of FROM in X with TO. Mark each change with
330 validate_change passing OBJECT. */
333 validate_replace_rtx_1 (loc, from, to, object)
335 rtx from, to, object;
339 register rtx x = *loc;
340 enum rtx_code code = GET_CODE (x);
342 /* X matches FROM if it is the same rtx or they are both referring to the
343 same register in the same mode. Avoid calling rtx_equal_p unless the
344 operands look similar. */
347 || (GET_CODE (x) == REG && GET_CODE (from) == REG
348 && GET_MODE (x) == GET_MODE (from)
349 && REGNO (x) == REGNO (from))
350 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
351 && rtx_equal_p (x, from)))
353 validate_change (object, loc, to, 1);
357 /* For commutative or comparison operations, try replacing each argument
358 separately and seeing if we made any changes. If so, put a constant
360 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
362 int prev_changes = num_changes;
364 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
365 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
366 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
368 validate_change (object, loc,
369 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
370 : swap_condition (code),
371 GET_MODE (x), XEXP (x, 1),
379 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
380 done the substitution, otherwise we won't. */
385 /* If we have a PLUS whose second operand is now a CONST_INT, use
386 plus_constant to try to simplify it. */
387 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
388 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
393 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
395 validate_change (object, loc,
396 plus_constant (XEXP (x, 0), - INTVAL (to)),
404 /* In these cases, the operation to be performed depends on the mode
405 of the operand. If we are replacing the operand with a VOIDmode
406 constant, we lose the information. So try to simplify the operation
407 in that case. If it fails, substitute in something that we know
408 won't be recognized. */
409 if (GET_MODE (to) == VOIDmode
410 && (XEXP (x, 0) == from
411 || (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (from) == REG
412 && GET_MODE (XEXP (x, 0)) == GET_MODE (from)
413 && REGNO (XEXP (x, 0)) == REGNO (from))))
415 rtx new = simplify_unary_operation (code, GET_MODE (x), to,
418 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
420 validate_change (object, loc, new, 1);
426 /* If we have a SUBREG of a register that we are replacing and we are
427 replacing it with a MEM, make a new MEM and try replacing the
428 SUBREG with it. Don't do this if the MEM has a mode-dependent address
429 or if we would be widening it. */
431 if (SUBREG_REG (x) == from
432 && GET_CODE (from) == REG
433 && GET_CODE (to) == MEM
434 && ! mode_dependent_address_p (XEXP (to, 0))
435 && ! MEM_VOLATILE_P (to)
436 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
438 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
439 enum machine_mode mode = GET_MODE (x);
442 if (BYTES_BIG_ENDIAN)
443 offset += (MIN (UNITS_PER_WORD,
444 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
445 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
447 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
448 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (to);
449 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (to);
450 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (to);
451 validate_change (object, loc, new, 1);
458 /* If we are replacing a register with memory, try to change the memory
459 to be the mode required for memory in extract operations (this isn't
460 likely to be an insertion operation; if it was, nothing bad will
461 happen, we might just fail in some cases). */
463 if (XEXP (x, 0) == from && GET_CODE (from) == REG && GET_CODE (to) == MEM
464 && GET_CODE (XEXP (x, 1)) == CONST_INT
465 && GET_CODE (XEXP (x, 2)) == CONST_INT
466 && ! mode_dependent_address_p (XEXP (to, 0))
467 && ! MEM_VOLATILE_P (to))
469 enum machine_mode wanted_mode = VOIDmode;
470 enum machine_mode is_mode = GET_MODE (to);
471 int pos = INTVAL (XEXP (x, 2));
474 if (code == ZERO_EXTRACT)
475 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
478 if (code == SIGN_EXTRACT)
479 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
482 /* If we have a narrower mode, we can do something. */
483 if (wanted_mode != VOIDmode
484 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
486 int offset = pos / BITS_PER_UNIT;
489 /* If the bytes and bits are counted differently, we
490 must adjust the offset. */
491 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
492 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
495 pos %= GET_MODE_BITSIZE (wanted_mode);
497 newmem = gen_rtx_MEM (wanted_mode,
498 plus_constant (XEXP (to, 0), offset));
499 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (to);
500 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (to);
501 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (to);
503 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
504 validate_change (object, &XEXP (x, 0), newmem, 1);
514 /* For commutative or comparison operations we've already performed
515 replacements. Don't try to perform them again. */
516 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
518 fmt = GET_RTX_FORMAT (code);
519 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
522 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
523 else if (fmt[i] == 'E')
524 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
525 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
530 /* Try replacing every occurrence of FROM in INSN with TO. After all
531 changes have been made, validate by seeing if INSN is still valid. */
534 validate_replace_rtx (from, to, insn)
537 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
538 return apply_change_group ();
541 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
542 SET_DESTs. After all changes have been made, validate by seeing if
543 INSN is still valid. */
546 validate_replace_src (from, to, insn)
549 if ((GET_CODE (insn) != INSN && GET_CODE (insn) != JUMP_INSN)
550 || GET_CODE (PATTERN (insn)) != SET)
553 validate_replace_rtx_1 (&SET_SRC (PATTERN (insn)), from, to, insn);
554 if (GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
555 validate_replace_rtx_1 (&XEXP (SET_DEST (PATTERN (insn)), 0),
557 return apply_change_group ();
561 /* Return 1 if the insn using CC0 set by INSN does not contain
562 any ordered tests applied to the condition codes.
563 EQ and NE tests do not count. */
566 next_insn_tests_no_inequality (insn)
569 register rtx next = next_cc0_user (insn);
571 /* If there is no next insn, we have to take the conservative choice. */
575 return ((GET_CODE (next) == JUMP_INSN
576 || GET_CODE (next) == INSN
577 || GET_CODE (next) == CALL_INSN)
578 && ! inequality_comparisons_p (PATTERN (next)));
581 #if 0 /* This is useless since the insn that sets the cc's
582 must be followed immediately by the use of them. */
583 /* Return 1 if the CC value set up by INSN is not used. */
586 next_insns_test_no_inequality (insn)
589 register rtx next = NEXT_INSN (insn);
591 for (; next != 0; next = NEXT_INSN (next))
593 if (GET_CODE (next) == CODE_LABEL
594 || GET_CODE (next) == BARRIER)
596 if (GET_CODE (next) == NOTE)
598 if (inequality_comparisons_p (PATTERN (next)))
600 if (sets_cc0_p (PATTERN (next)) == 1)
602 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
610 /* This is used by find_single_use to locate an rtx that contains exactly one
611 use of DEST, which is typically either a REG or CC0. It returns a
612 pointer to the innermost rtx expression containing DEST. Appearances of
613 DEST that are being used to totally replace it are not counted. */
616 find_single_use_1 (dest, loc)
621 enum rtx_code code = GET_CODE (x);
638 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
639 of a REG that occupies all of the REG, the insn uses DEST if
640 it is mentioned in the destination or the source. Otherwise, we
641 need just check the source. */
642 if (GET_CODE (SET_DEST (x)) != CC0
643 && GET_CODE (SET_DEST (x)) != PC
644 && GET_CODE (SET_DEST (x)) != REG
645 && ! (GET_CODE (SET_DEST (x)) == SUBREG
646 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
647 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
648 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
649 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
650 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
653 return find_single_use_1 (dest, &SET_SRC (x));
657 return find_single_use_1 (dest, &XEXP (x, 0));
663 /* If it wasn't one of the common cases above, check each expression and
664 vector of this code. Look for a unique usage of DEST. */
666 fmt = GET_RTX_FORMAT (code);
667 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
671 if (dest == XEXP (x, i)
672 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
673 && REGNO (dest) == REGNO (XEXP (x, i))))
676 this_result = find_single_use_1 (dest, &XEXP (x, i));
679 result = this_result;
680 else if (this_result)
681 /* Duplicate usage. */
684 else if (fmt[i] == 'E')
688 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
690 if (XVECEXP (x, i, j) == dest
691 || (GET_CODE (dest) == REG
692 && GET_CODE (XVECEXP (x, i, j)) == REG
693 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
696 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
699 result = this_result;
700 else if (this_result)
709 /* See if DEST, produced in INSN, is used only a single time in the
710 sequel. If so, return a pointer to the innermost rtx expression in which
713 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
715 This routine will return usually zero either before flow is called (because
716 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
717 note can't be trusted).
719 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
720 care about REG_DEAD notes or LOG_LINKS.
722 Otherwise, we find the single use by finding an insn that has a
723 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
724 only referenced once in that insn, we know that it must be the first
725 and last insn referencing DEST. */
728 find_single_use (dest, insn, ploc)
740 next = NEXT_INSN (insn);
742 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
745 result = find_single_use_1 (dest, &PATTERN (next));
752 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
755 for (next = next_nonnote_insn (insn);
756 next != 0 && GET_CODE (next) != CODE_LABEL;
757 next = next_nonnote_insn (next))
758 if (GET_RTX_CLASS (GET_CODE (next)) == 'i' && dead_or_set_p (next, dest))
760 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
761 if (XEXP (link, 0) == insn)
766 result = find_single_use_1 (dest, &PATTERN (next));
776 /* Return 1 if OP is a valid general operand for machine mode MODE.
777 This is either a register reference, a memory reference,
778 or a constant. In the case of a memory reference, the address
779 is checked for general validity for the target machine.
781 Register and memory references must have mode MODE in order to be valid,
782 but some constants have no machine mode and are valid for any mode.
784 If MODE is VOIDmode, OP is checked for validity for whatever mode
787 The main use of this function is as a predicate in match_operand
788 expressions in the machine description.
790 For an explanation of this function's behavior for registers of
791 class NO_REGS, see the comment for `register_operand'. */
794 general_operand (op, mode)
796 enum machine_mode mode;
798 register enum rtx_code code = GET_CODE (op);
799 int mode_altering_drug = 0;
801 if (mode == VOIDmode)
802 mode = GET_MODE (op);
804 /* Don't accept CONST_INT or anything similar
805 if the caller wants something floating. */
806 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
807 && GET_MODE_CLASS (mode) != MODE_INT
808 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
812 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
813 #ifdef LEGITIMATE_PIC_OPERAND_P
814 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
816 && LEGITIMATE_CONSTANT_P (op));
818 /* Except for certain constants with VOIDmode, already checked for,
819 OP's mode must match MODE if MODE specifies a mode. */
821 if (GET_MODE (op) != mode)
826 #ifdef INSN_SCHEDULING
827 /* On machines that have insn scheduling, we want all memory
828 reference to be explicit, so outlaw paradoxical SUBREGs. */
829 if (GET_CODE (SUBREG_REG (op)) == MEM
830 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
834 op = SUBREG_REG (op);
835 code = GET_CODE (op);
837 /* No longer needed, since (SUBREG (MEM...))
838 will load the MEM into a reload reg in the MEM's own mode. */
839 mode_altering_drug = 1;
844 /* A register whose class is NO_REGS is not a general operand. */
845 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
846 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
850 register rtx y = XEXP (op, 0);
851 if (! volatile_ok && MEM_VOLATILE_P (op))
853 if (GET_CODE (y) == ADDRESSOF)
855 /* Use the mem's mode, since it will be reloaded thus. */
856 mode = GET_MODE (op);
857 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
860 /* Pretend this is an operand for now; we'll run force_operand
861 on its replacement in fixup_var_refs_1. */
862 if (code == ADDRESSOF)
868 if (mode_altering_drug)
869 return ! mode_dependent_address_p (XEXP (op, 0));
873 /* Return 1 if OP is a valid memory address for a memory reference
876 The main use of this function is as a predicate in match_operand
877 expressions in the machine description. */
880 address_operand (op, mode)
882 enum machine_mode mode;
884 return memory_address_p (mode, op);
887 /* Return 1 if OP is a register reference of mode MODE.
888 If MODE is VOIDmode, accept a register in any mode.
890 The main use of this function is as a predicate in match_operand
891 expressions in the machine description.
893 As a special exception, registers whose class is NO_REGS are
894 not accepted by `register_operand'. The reason for this change
895 is to allow the representation of special architecture artifacts
896 (such as a condition code register) without extending the rtl
897 definitions. Since registers of class NO_REGS cannot be used
898 as registers in any case where register classes are examined,
899 it is most consistent to keep this function from accepting them. */
902 register_operand (op, mode)
904 enum machine_mode mode;
906 if (GET_MODE (op) != mode && mode != VOIDmode)
909 if (GET_CODE (op) == SUBREG)
911 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
912 because it is guaranteed to be reloaded into one.
913 Just make sure the MEM is valid in itself.
914 (Ideally, (SUBREG (MEM)...) should not exist after reload,
915 but currently it does result from (SUBREG (REG)...) where the
916 reg went on the stack.) */
917 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
918 return general_operand (op, mode);
920 #ifdef CLASS_CANNOT_CHANGE_SIZE
921 if (GET_CODE (SUBREG_REG (op)) == REG
922 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
923 && TEST_HARD_REG_BIT (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
924 REGNO (SUBREG_REG (op)))
925 && (GET_MODE_SIZE (mode)
926 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
927 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
928 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
932 op = SUBREG_REG (op);
935 /* We don't consider registers whose class is NO_REGS
936 to be a register operand. */
937 return (GET_CODE (op) == REG
938 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
939 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
942 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
943 or a hard register. */
946 scratch_operand (op, mode)
948 enum machine_mode mode;
950 return (GET_MODE (op) == mode
951 && (GET_CODE (op) == SCRATCH
952 || (GET_CODE (op) == REG
953 && REGNO (op) < FIRST_PSEUDO_REGISTER)));
956 /* Return 1 if OP is a valid immediate operand for mode MODE.
958 The main use of this function is as a predicate in match_operand
959 expressions in the machine description. */
962 immediate_operand (op, mode)
964 enum machine_mode mode;
966 /* Don't accept CONST_INT or anything similar
967 if the caller wants something floating. */
968 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
969 && GET_MODE_CLASS (mode) != MODE_INT
970 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
973 return (CONSTANT_P (op)
974 && (GET_MODE (op) == mode || mode == VOIDmode
975 || GET_MODE (op) == VOIDmode)
976 #ifdef LEGITIMATE_PIC_OPERAND_P
977 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
979 && LEGITIMATE_CONSTANT_P (op));
982 /* Returns 1 if OP is an operand that is a CONST_INT. */
985 const_int_operand (op, mode)
987 enum machine_mode mode;
989 return GET_CODE (op) == CONST_INT;
992 /* Returns 1 if OP is an operand that is a constant integer or constant
993 floating-point number. */
996 const_double_operand (op, mode)
998 enum machine_mode mode;
1000 /* Don't accept CONST_INT or anything similar
1001 if the caller wants something floating. */
1002 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1003 && GET_MODE_CLASS (mode) != MODE_INT
1004 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1007 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1008 && (mode == VOIDmode || GET_MODE (op) == mode
1009 || GET_MODE (op) == VOIDmode));
1012 /* Return 1 if OP is a general operand that is not an immediate operand. */
1015 nonimmediate_operand (op, mode)
1017 enum machine_mode mode;
1019 return (general_operand (op, mode) && ! CONSTANT_P (op));
1022 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1025 nonmemory_operand (op, mode)
1027 enum machine_mode mode;
1029 if (CONSTANT_P (op))
1031 /* Don't accept CONST_INT or anything similar
1032 if the caller wants something floating. */
1033 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1034 && GET_MODE_CLASS (mode) != MODE_INT
1035 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1038 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
1039 #ifdef LEGITIMATE_PIC_OPERAND_P
1040 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1042 && LEGITIMATE_CONSTANT_P (op));
1045 if (GET_MODE (op) != mode && mode != VOIDmode)
1048 if (GET_CODE (op) == SUBREG)
1050 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1051 because it is guaranteed to be reloaded into one.
1052 Just make sure the MEM is valid in itself.
1053 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1054 but currently it does result from (SUBREG (REG)...) where the
1055 reg went on the stack.) */
1056 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1057 return general_operand (op, mode);
1058 op = SUBREG_REG (op);
1061 /* We don't consider registers whose class is NO_REGS
1062 to be a register operand. */
1063 return (GET_CODE (op) == REG
1064 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1065 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1068 /* Return 1 if OP is a valid operand that stands for pushing a
1069 value of mode MODE onto the stack.
1071 The main use of this function is as a predicate in match_operand
1072 expressions in the machine description. */
1075 push_operand (op, mode)
1077 enum machine_mode mode;
1079 if (GET_CODE (op) != MEM)
1082 if (GET_MODE (op) != mode)
1087 if (GET_CODE (op) != STACK_PUSH_CODE)
1090 return XEXP (op, 0) == stack_pointer_rtx;
1093 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1096 memory_address_p (mode, addr)
1097 enum machine_mode mode;
1100 if (GET_CODE (addr) == ADDRESSOF)
1103 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1110 /* Return 1 if OP is a valid memory reference with mode MODE,
1111 including a valid address.
1113 The main use of this function is as a predicate in match_operand
1114 expressions in the machine description. */
1117 memory_operand (op, mode)
1119 enum machine_mode mode;
1123 if (! reload_completed)
1124 /* Note that no SUBREG is a memory operand before end of reload pass,
1125 because (SUBREG (MEM...)) forces reloading into a register. */
1126 return GET_CODE (op) == MEM && general_operand (op, mode);
1128 if (mode != VOIDmode && GET_MODE (op) != mode)
1132 if (GET_CODE (inner) == SUBREG)
1133 inner = SUBREG_REG (inner);
1135 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1138 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1139 that is, a memory reference whose address is a general_operand. */
1142 indirect_operand (op, mode)
1144 enum machine_mode mode;
1146 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1147 if (! reload_completed
1148 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1150 register int offset = SUBREG_WORD (op) * UNITS_PER_WORD;
1151 rtx inner = SUBREG_REG (op);
1153 if (BYTES_BIG_ENDIAN)
1154 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op)))
1155 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner))));
1157 if (mode != VOIDmode && GET_MODE (op) != mode)
1160 /* The only way that we can have a general_operand as the resulting
1161 address is if OFFSET is zero and the address already is an operand
1162 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1165 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1166 || (GET_CODE (XEXP (inner, 0)) == PLUS
1167 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1168 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1169 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1172 return (GET_CODE (op) == MEM
1173 && memory_operand (op, mode)
1174 && general_operand (XEXP (op, 0), Pmode));
1177 /* Return 1 if this is a comparison operator. This allows the use of
1178 MATCH_OPERATOR to recognize all the branch insns. */
1181 comparison_operator (op, mode)
1183 enum machine_mode mode;
1185 return ((mode == VOIDmode || GET_MODE (op) == mode)
1186 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1189 /* If BODY is an insn body that uses ASM_OPERANDS,
1190 return the number of operands (both input and output) in the insn.
1191 Otherwise return -1. */
1194 asm_noperands (body)
1197 if (GET_CODE (body) == ASM_OPERANDS)
1198 /* No output operands: return number of input operands. */
1199 return ASM_OPERANDS_INPUT_LENGTH (body);
1200 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1201 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1202 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1203 else if (GET_CODE (body) == PARALLEL
1204 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1205 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1207 /* Multiple output operands, or 1 output plus some clobbers:
1208 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1212 /* Count backwards through CLOBBERs to determine number of SETs. */
1213 for (i = XVECLEN (body, 0); i > 0; i--)
1215 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1217 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1221 /* N_SETS is now number of output operands. */
1224 /* Verify that all the SETs we have
1225 came from a single original asm_operands insn
1226 (so that invalid combinations are blocked). */
1227 for (i = 0; i < n_sets; i++)
1229 rtx elt = XVECEXP (body, 0, i);
1230 if (GET_CODE (elt) != SET)
1232 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1234 /* If these ASM_OPERANDS rtx's came from different original insns
1235 then they aren't allowed together. */
1236 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1237 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1240 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1243 else if (GET_CODE (body) == PARALLEL
1244 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1246 /* 0 outputs, but some clobbers:
1247 body is [(asm_operands ...) (clobber (reg ...))...]. */
1250 /* Make sure all the other parallel things really are clobbers. */
1251 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1252 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1255 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1261 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1262 copy its operands (both input and output) into the vector OPERANDS,
1263 the locations of the operands within the insn into the vector OPERAND_LOCS,
1264 and the constraints for the operands into CONSTRAINTS.
1265 Write the modes of the operands into MODES.
1266 Return the assembler-template.
1268 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1269 we don't store that info. */
1272 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1277 enum machine_mode *modes;
1283 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1285 rtx asmop = SET_SRC (body);
1286 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1288 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1290 for (i = 1; i < noperands; i++)
1293 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1295 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1297 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1299 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1302 /* The output is in the SET.
1303 Its constraint is in the ASM_OPERANDS itself. */
1305 operands[0] = SET_DEST (body);
1307 operand_locs[0] = &SET_DEST (body);
1309 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1311 modes[0] = GET_MODE (SET_DEST (body));
1312 template = ASM_OPERANDS_TEMPLATE (asmop);
1314 else if (GET_CODE (body) == ASM_OPERANDS)
1317 /* No output operands: BODY is (asm_operands ....). */
1319 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1321 /* The input operands are found in the 1st element vector. */
1322 /* Constraints for inputs are in the 2nd element vector. */
1323 for (i = 0; i < noperands; i++)
1326 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1328 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1330 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1332 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1334 template = ASM_OPERANDS_TEMPLATE (asmop);
1336 else if (GET_CODE (body) == PARALLEL
1337 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1339 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1340 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1341 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1342 int nout = 0; /* Does not include CLOBBERs. */
1344 /* At least one output, plus some CLOBBERs. */
1346 /* The outputs are in the SETs.
1347 Their constraints are in the ASM_OPERANDS itself. */
1348 for (i = 0; i < nparallel; i++)
1350 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1351 break; /* Past last SET */
1354 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1356 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1358 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1360 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1364 for (i = 0; i < nin; i++)
1367 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1369 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1371 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1373 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1376 template = ASM_OPERANDS_TEMPLATE (asmop);
1378 else if (GET_CODE (body) == PARALLEL
1379 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1381 /* No outputs, but some CLOBBERs. */
1383 rtx asmop = XVECEXP (body, 0, 0);
1384 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1386 for (i = 0; i < nin; i++)
1389 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1391 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1393 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1395 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1398 template = ASM_OPERANDS_TEMPLATE (asmop);
1404 /* Given an rtx *P, if it is a sum containing an integer constant term,
1405 return the location (type rtx *) of the pointer to that constant term.
1406 Otherwise, return a null pointer. */
1409 find_constant_term_loc (p)
1413 register enum rtx_code code = GET_CODE (*p);
1415 /* If *P IS such a constant term, P is its location. */
1417 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1421 /* Otherwise, if not a sum, it has no constant term. */
1423 if (GET_CODE (*p) != PLUS)
1426 /* If one of the summands is constant, return its location. */
1428 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1429 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1432 /* Otherwise, check each summand for containing a constant term. */
1434 if (XEXP (*p, 0) != 0)
1436 tem = find_constant_term_loc (&XEXP (*p, 0));
1441 if (XEXP (*p, 1) != 0)
1443 tem = find_constant_term_loc (&XEXP (*p, 1));
1451 /* Return 1 if OP is a memory reference
1452 whose address contains no side effects
1453 and remains valid after the addition
1454 of a positive integer less than the
1455 size of the object being referenced.
1457 We assume that the original address is valid and do not check it.
1459 This uses strict_memory_address_p as a subroutine, so
1460 don't use it before reload. */
1463 offsettable_memref_p (op)
1466 return ((GET_CODE (op) == MEM)
1467 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1470 /* Similar, but don't require a strictly valid mem ref:
1471 consider pseudo-regs valid as index or base regs. */
1474 offsettable_nonstrict_memref_p (op)
1477 return ((GET_CODE (op) == MEM)
1478 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1481 /* Return 1 if Y is a memory address which contains no side effects
1482 and would remain valid after the addition of a positive integer
1483 less than the size of that mode.
1485 We assume that the original address is valid and do not check it.
1486 We do check that it is valid for narrower modes.
1488 If STRICTP is nonzero, we require a strictly valid address,
1489 for the sake of use in reload.c. */
1492 offsettable_address_p (strictp, mode, y)
1494 enum machine_mode mode;
1497 register enum rtx_code ycode = GET_CODE (y);
1501 int (*addressp) () = (strictp ? strict_memory_address_p : memory_address_p);
1503 if (CONSTANT_ADDRESS_P (y))
1506 /* Adjusting an offsettable address involves changing to a narrower mode.
1507 Make sure that's OK. */
1509 if (mode_dependent_address_p (y))
1512 /* If the expression contains a constant term,
1513 see if it remains valid when max possible offset is added. */
1515 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1520 *y2 = plus_constant (*y2, GET_MODE_SIZE (mode) - 1);
1521 /* Use QImode because an odd displacement may be automatically invalid
1522 for any wider mode. But it should be valid for a single byte. */
1523 good = (*addressp) (QImode, y);
1525 /* In any case, restore old contents of memory. */
1530 if (ycode == PRE_DEC || ycode == PRE_INC
1531 || ycode == POST_DEC || ycode == POST_INC)
1534 /* The offset added here is chosen as the maximum offset that
1535 any instruction could need to add when operating on something
1536 of the specified mode. We assume that if Y and Y+c are
1537 valid addresses then so is Y+d for all 0<d<c. */
1539 z = plus_constant_for_output (y, GET_MODE_SIZE (mode) - 1);
1541 /* Use QImode because an odd displacement may be automatically invalid
1542 for any wider mode. But it should be valid for a single byte. */
1543 return (*addressp) (QImode, z);
1546 /* Return 1 if ADDR is an address-expression whose effect depends
1547 on the mode of the memory reference it is used in.
1549 Autoincrement addressing is a typical example of mode-dependence
1550 because the amount of the increment depends on the mode. */
1553 mode_dependent_address_p (addr)
1556 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1562 /* Return 1 if OP is a general operand
1563 other than a memory ref with a mode dependent address. */
1566 mode_independent_operand (op, mode)
1567 enum machine_mode mode;
1572 if (! general_operand (op, mode))
1575 if (GET_CODE (op) != MEM)
1578 addr = XEXP (op, 0);
1579 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
1585 /* Given an operand OP that is a valid memory reference
1586 which satisfies offsettable_memref_p,
1587 return a new memory reference whose address has been adjusted by OFFSET.
1588 OFFSET should be positive and less than the size of the object referenced.
1592 adj_offsettable_operand (op, offset)
1596 register enum rtx_code code = GET_CODE (op);
1600 register rtx y = XEXP (op, 0);
1603 if (CONSTANT_ADDRESS_P (y))
1605 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
1606 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1610 if (GET_CODE (y) == PLUS)
1613 register rtx *const_loc;
1617 const_loc = find_constant_term_loc (&z);
1620 *const_loc = plus_constant_for_output (*const_loc, offset);
1625 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
1626 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1632 #ifdef REGISTER_CONSTRAINTS
1634 /* Check the operands of an insn (found in recog_operands)
1635 against the insn's operand constraints (found via INSN_CODE_NUM)
1636 and return 1 if they are valid.
1638 WHICH_ALTERNATIVE is set to a number which indicates which
1639 alternative of constraints was matched: 0 for the first alternative,
1640 1 for the next, etc.
1642 In addition, when two operands are match
1643 and it happens that the output operand is (reg) while the
1644 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
1645 make the output operand look like the input.
1646 This is because the output operand is the one the template will print.
1648 This is used in final, just before printing the assembler code and by
1649 the routines that determine an insn's attribute.
1651 If STRICT is a positive non-zero value, it means that we have been
1652 called after reload has been completed. In that case, we must
1653 do all checks strictly. If it is zero, it means that we have been called
1654 before reload has completed. In that case, we first try to see if we can
1655 find an alternative that matches strictly. If not, we try again, this
1656 time assuming that reload will fix up the insn. This provides a "best
1657 guess" for the alternative and is used to compute attributes of insns prior
1658 to reload. A negative value of STRICT is used for this internal call. */
1666 constrain_operands (insn_code_num, strict)
1670 char *constraints[MAX_RECOG_OPERANDS];
1671 int matching_operands[MAX_RECOG_OPERANDS];
1672 enum op_type {OP_IN, OP_OUT, OP_INOUT} op_types[MAX_RECOG_OPERANDS];
1673 int earlyclobber[MAX_RECOG_OPERANDS];
1675 int noperands = insn_n_operands[insn_code_num];
1677 struct funny_match funny_match[MAX_RECOG_OPERANDS];
1678 int funny_match_index;
1679 int nalternatives = insn_n_alternatives[insn_code_num];
1681 if (noperands == 0 || nalternatives == 0)
1684 for (c = 0; c < noperands; c++)
1686 constraints[c] = insn_operand_constraint[insn_code_num][c];
1687 matching_operands[c] = -1;
1688 op_types[c] = OP_IN;
1691 which_alternative = 0;
1693 while (which_alternative < nalternatives)
1697 funny_match_index = 0;
1699 for (opno = 0; opno < noperands; opno++)
1701 register rtx op = recog_operand[opno];
1702 enum machine_mode mode = GET_MODE (op);
1703 register char *p = constraints[opno];
1708 earlyclobber[opno] = 0;
1710 /* A unary operator may be accepted by the predicate, but it
1711 is irrelevant for matching constraints. */
1712 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
1715 if (GET_CODE (op) == SUBREG)
1717 if (GET_CODE (SUBREG_REG (op)) == REG
1718 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
1719 offset = SUBREG_WORD (op);
1720 op = SUBREG_REG (op);
1723 /* An empty constraint or empty alternative
1724 allows anything which matched the pattern. */
1725 if (*p == 0 || *p == ',')
1728 while (*p && (c = *p++) != ',')
1738 /* Ignore rest of this alternative as far as
1739 constraint checking is concerned. */
1740 while (*p && *p != ',')
1745 op_types[opno] = OP_OUT;
1749 op_types[opno] = OP_INOUT;
1753 earlyclobber[opno] = 1;
1761 /* This operand must be the same as a previous one.
1762 This kind of constraint is used for instructions such
1763 as add when they take only two operands.
1765 Note that the lower-numbered operand is passed first.
1767 If we are not testing strictly, assume that this constraint
1768 will be satisfied. */
1772 val = operands_match_p (recog_operand[c - '0'],
1773 recog_operand[opno]);
1775 matching_operands[opno] = c - '0';
1776 matching_operands[c - '0'] = opno;
1780 /* If output is *x and input is *--x,
1781 arrange later to change the output to *--x as well,
1782 since the output op is the one that will be printed. */
1783 if (val == 2 && strict > 0)
1785 funny_match[funny_match_index].this = opno;
1786 funny_match[funny_match_index++].other = c - '0';
1791 /* p is used for address_operands. When we are called by
1792 gen_reload, no one will have checked that the address is
1793 strictly valid, i.e., that all pseudos requiring hard regs
1794 have gotten them. */
1796 || (strict_memory_address_p
1797 (insn_operand_mode[insn_code_num][opno], op)))
1801 /* No need to check general_operand again;
1802 it was done in insn-recog.c. */
1804 /* Anything goes unless it is a REG and really has a hard reg
1805 but the hard reg is not in the class GENERAL_REGS. */
1807 || GENERAL_REGS == ALL_REGS
1808 || GET_CODE (op) != REG
1809 || (reload_in_progress
1810 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
1811 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
1818 && GET_CODE (op) == REG
1819 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
1820 || (strict == 0 && GET_CODE (op) == SCRATCH)
1821 || (GET_CODE (op) == REG
1822 && ((GENERAL_REGS == ALL_REGS
1823 && REGNO (op) < FIRST_PSEUDO_REGISTER)
1824 || reg_fits_class_p (op, GENERAL_REGS,
1830 /* This is used for a MATCH_SCRATCH in the cases when
1831 we don't actually need anything. So anything goes
1837 if (GET_CODE (op) == MEM
1838 /* Before reload, accept what reload can turn into mem. */
1839 || (strict < 0 && CONSTANT_P (op))
1840 /* During reload, accept a pseudo */
1841 || (reload_in_progress && GET_CODE (op) == REG
1842 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
1847 if (GET_CODE (op) == MEM
1848 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
1849 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1854 if (GET_CODE (op) == MEM
1855 && (GET_CODE (XEXP (op, 0)) == PRE_INC
1856 || GET_CODE (XEXP (op, 0)) == POST_INC))
1861 #ifndef REAL_ARITHMETIC
1862 /* Match any CONST_DOUBLE, but only if
1863 we can examine the bits of it reliably. */
1864 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1865 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1866 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1869 if (GET_CODE (op) == CONST_DOUBLE)
1874 if (GET_CODE (op) == CONST_DOUBLE)
1880 if (GET_CODE (op) == CONST_DOUBLE
1881 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
1886 if (GET_CODE (op) == CONST_INT
1887 || (GET_CODE (op) == CONST_DOUBLE
1888 && GET_MODE (op) == VOIDmode))
1891 if (CONSTANT_P (op))
1896 if (GET_CODE (op) == CONST_INT
1897 || (GET_CODE (op) == CONST_DOUBLE
1898 && GET_MODE (op) == VOIDmode))
1910 if (GET_CODE (op) == CONST_INT
1911 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
1915 #ifdef EXTRA_CONSTRAINT
1921 if (EXTRA_CONSTRAINT (op, c))
1927 if (GET_CODE (op) == MEM
1928 && ((strict > 0 && ! offsettable_memref_p (op))
1930 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
1931 || (reload_in_progress
1932 && !(GET_CODE (op) == REG
1933 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
1938 if ((strict > 0 && offsettable_memref_p (op))
1939 || (strict == 0 && offsettable_nonstrict_memref_p (op))
1940 /* Before reload, accept what reload can handle. */
1942 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
1943 /* During reload, accept a pseudo */
1944 || (reload_in_progress && GET_CODE (op) == REG
1945 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
1952 && GET_CODE (op) == REG
1953 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
1954 || (strict == 0 && GET_CODE (op) == SCRATCH)
1955 || (GET_CODE (op) == REG
1956 && reg_fits_class_p (op, REG_CLASS_FROM_LETTER (c),
1961 constraints[opno] = p;
1962 /* If this operand did not win somehow,
1963 this alternative loses. */
1967 /* This alternative won; the operands are ok.
1968 Change whichever operands this alternative says to change. */
1973 /* See if any earlyclobber operand conflicts with some other
1977 for (eopno = 0; eopno < noperands; eopno++)
1978 /* Ignore earlyclobber operands now in memory,
1979 because we would often report failure when we have
1980 two memory operands, one of which was formerly a REG. */
1981 if (earlyclobber[eopno]
1982 && GET_CODE (recog_operand[eopno]) == REG)
1983 for (opno = 0; opno < noperands; opno++)
1984 if ((GET_CODE (recog_operand[opno]) == MEM
1985 || op_types[opno] != OP_OUT)
1987 /* Ignore things like match_operator operands. */
1988 && *insn_operand_constraint[insn_code_num][opno] != 0
1989 && ! (matching_operands[opno] == eopno
1990 && operands_match_p (recog_operand[opno],
1991 recog_operand[eopno]))
1992 && ! safe_from_earlyclobber (recog_operand[opno],
1993 recog_operand[eopno]))
1998 while (--funny_match_index >= 0)
2000 recog_operand[funny_match[funny_match_index].other]
2001 = recog_operand[funny_match[funny_match_index].this];
2008 which_alternative++;
2011 /* If we are about to reject this, but we are not to test strictly,
2012 try a very loose test. Only return failure if it fails also. */
2014 return constrain_operands (insn_code_num, -1);
2019 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2020 is a hard reg in class CLASS when its regno is offset by OFFSET
2021 and changed to mode MODE.
2022 If REG occupies multiple hard regs, all of them must be in CLASS. */
2025 reg_fits_class_p (operand, class, offset, mode)
2027 register enum reg_class class;
2029 enum machine_mode mode;
2031 register int regno = REGNO (operand);
2032 if (regno < FIRST_PSEUDO_REGISTER
2033 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2038 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2040 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2049 #endif /* REGISTER_CONSTRAINTS */