1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 91-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
25 #include "insn-config.h"
26 #include "insn-attr.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
31 #include "hard-reg-set.h"
36 #ifndef STACK_PUSH_CODE
37 #ifdef STACK_GROWS_DOWNWARD
38 #define STACK_PUSH_CODE PRE_DEC
40 #define STACK_PUSH_CODE PRE_INC
44 static void validate_replace_rtx_1 PROTO((rtx *, rtx, rtx, rtx));
45 static rtx *find_single_use_1 PROTO((rtx, rtx *));
46 static rtx *find_constant_term_loc PROTO((rtx *));
47 static int insn_invalid_p PROTO((rtx));
49 /* Nonzero means allow operands to be volatile.
50 This should be 0 if you are generating rtl, such as if you are calling
51 the functions in optabs.c and expmed.c (most of the time).
52 This should be 1 if all valid insns need to be recognized,
53 such as in regclass.c and final.c and reload.c.
55 init_recog and init_recog_no_volatile are responsible for setting this. */
59 /* The next variables are set up by extract_insn. The first four of them
60 are also set up during insn_extract. */
62 /* Indexed by N, gives value of operand N. */
63 rtx recog_operand[MAX_RECOG_OPERANDS];
65 /* Indexed by N, gives location where operand N was found. */
66 rtx *recog_operand_loc[MAX_RECOG_OPERANDS];
68 /* Indexed by N, gives location where the Nth duplicate-appearance of
69 an operand was found. This is something that matched MATCH_DUP. */
70 rtx *recog_dup_loc[MAX_RECOG_OPERANDS];
72 /* Indexed by N, gives the operand number that was duplicated in the
73 Nth duplicate-appearance of an operand. */
74 char recog_dup_num[MAX_RECOG_OPERANDS];
76 /* The number of operands of the insn. */
79 /* The number of MATCH_DUPs in the insn. */
82 /* The number of alternatives in the constraints for the insn. */
83 int recog_n_alternatives;
85 /* Indexed by N, gives the mode of operand N. */
86 enum machine_mode recog_operand_mode[MAX_RECOG_OPERANDS];
88 /* Indexed by N, gives the constraint string for operand N. */
89 char *recog_constraints[MAX_RECOG_OPERANDS];
91 /* Indexed by N, gives the type (in, out, inout) for operand N. */
92 enum op_type recog_op_type[MAX_RECOG_OPERANDS];
94 #ifndef REGISTER_CONSTRAINTS
95 /* Indexed by N, nonzero if operand N should be an address. */
96 char recog_operand_address_p[MAX_RECOG_OPERANDS];
99 /* On return from `constrain_operands', indicate which alternative
102 int which_alternative;
104 /* Nonzero after end of reload pass.
105 Set to 1 or 0 by toplev.c.
106 Controls the significance of (SUBREG (MEM)). */
108 int reload_completed;
110 /* Initialize data used by the function `recog'.
111 This must be called once in the compilation of a function
112 before any insn recognition may be done in the function. */
115 init_recog_no_volatile ()
126 /* Try recognizing the instruction INSN,
127 and return the code number that results.
128 Remember the code so that repeated calls do not
129 need to spend the time for actual rerecognition.
131 This function is the normal interface to instruction recognition.
132 The automatically-generated function `recog' is normally called
133 through this one. (The only exception is in combine.c.) */
136 recog_memoized (insn)
139 if (INSN_CODE (insn) < 0)
140 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
141 return INSN_CODE (insn);
144 /* Check that X is an insn-body for an `asm' with operands
145 and that the operands mentioned in it are legitimate. */
148 check_asm_operands (x)
151 int noperands = asm_noperands (x);
160 operands = (rtx *) alloca (noperands * sizeof (rtx));
161 decode_asm_operands (x, operands, NULL_PTR, NULL_PTR, NULL_PTR);
163 for (i = 0; i < noperands; i++)
164 if (!general_operand (operands[i], VOIDmode))
170 /* Static data for the next two routines. */
172 typedef struct change_t
180 static change_t *changes;
181 static int changes_allocated;
183 static int num_changes = 0;
185 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
186 at which NEW will be placed. If OBJECT is zero, no validation is done,
187 the change is simply made.
189 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
190 will be called with the address and mode as parameters. If OBJECT is
191 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
194 IN_GROUP is non-zero if this is part of a group of changes that must be
195 performed as a group. In that case, the changes will be stored. The
196 function `apply_change_group' will validate and apply the changes.
198 If IN_GROUP is zero, this is a single change. Try to recognize the insn
199 or validate the memory reference with the change applied. If the result
200 is not valid for the machine, suppress the change and return zero.
201 Otherwise, perform the change and return 1. */
204 validate_change (object, loc, new, in_group)
212 if (old == new || rtx_equal_p (old, new))
215 if (in_group == 0 && num_changes != 0)
220 /* Save the information describing this change. */
221 if (num_changes >= changes_allocated)
223 if (changes_allocated == 0)
224 /* This value allows for repeated substitutions inside complex
225 indexed addresses, or changes in up to 5 insns. */
226 changes_allocated = MAX_RECOG_OPERANDS * 5;
228 changes_allocated *= 2;
231 (change_t*) xrealloc (changes,
232 sizeof (change_t) * changes_allocated);
235 changes[num_changes].object = object;
236 changes[num_changes].loc = loc;
237 changes[num_changes].old = old;
239 if (object && GET_CODE (object) != MEM)
241 /* Set INSN_CODE to force rerecognition of insn. Save old code in
243 changes[num_changes].old_code = INSN_CODE (object);
244 INSN_CODE (object) = -1;
249 /* If we are making a group of changes, return 1. Otherwise, validate the
250 change group we made. */
255 return apply_change_group ();
258 /* This subroutine of apply_change_group verifies whether the changes to INSN
259 were valid; i.e. whether INSN can still be recognized. */
262 insn_invalid_p (insn)
265 int icode = recog_memoized (insn);
266 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
268 if (is_asm && ! check_asm_operands (PATTERN (insn)))
270 if (! is_asm && icode < 0)
273 /* After reload, verify that all constraints are satisfied. */
274 if (reload_completed)
278 if (! constrain_operands (1))
285 /* Apply a group of changes previously issued with `validate_change'.
286 Return 1 if all changes are valid, zero otherwise. */
289 apply_change_group ()
293 /* The changes have been applied and all INSN_CODEs have been reset to force
296 The changes are valid if we aren't given an object, or if we are
297 given a MEM and it still is a valid address, or if this is in insn
298 and it is recognized. In the latter case, if reload has completed,
299 we also require that the operands meet the constraints for
302 for (i = 0; i < num_changes; i++)
304 rtx object = changes[i].object;
309 if (GET_CODE (object) == MEM)
311 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
314 else if (insn_invalid_p (object))
316 rtx pat = PATTERN (object);
318 /* Perhaps we couldn't recognize the insn because there were
319 extra CLOBBERs at the end. If so, try to re-recognize
320 without the last CLOBBER (later iterations will cause each of
321 them to be eliminated, in turn). But don't do this if we
322 have an ASM_OPERAND. */
323 if (GET_CODE (pat) == PARALLEL
324 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
325 && asm_noperands (PATTERN (object)) < 0)
329 if (XVECLEN (pat, 0) == 2)
330 newpat = XVECEXP (pat, 0, 0);
335 newpat = gen_rtx_PARALLEL (VOIDmode,
336 gen_rtvec (XVECLEN (pat, 0) - 1));
337 for (j = 0; j < XVECLEN (newpat, 0); j++)
338 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
341 /* Add a new change to this group to replace the pattern
342 with this new pattern. Then consider this change
343 as having succeeded. The change we added will
344 cause the entire call to fail if things remain invalid.
346 Note that this can lose if a later change than the one
347 we are processing specified &XVECEXP (PATTERN (object), 0, X)
348 but this shouldn't occur. */
350 validate_change (object, &PATTERN (object), newpat, 1);
352 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
353 /* If this insn is a CLOBBER or USE, it is always valid, but is
361 if (i == num_changes)
373 /* Return the number of changes so far in the current group. */
376 num_validated_changes ()
381 /* Retract the changes numbered NUM and up. */
389 /* Back out all the changes. Do this in the opposite order in which
391 for (i = num_changes - 1; i >= num; i--)
393 *changes[i].loc = changes[i].old;
394 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
395 INSN_CODE (changes[i].object) = changes[i].old_code;
400 /* Replace every occurrence of FROM in X with TO. Mark each change with
401 validate_change passing OBJECT. */
404 validate_replace_rtx_1 (loc, from, to, object)
406 rtx from, to, object;
410 register rtx x = *loc;
411 enum rtx_code code = GET_CODE (x);
413 /* X matches FROM if it is the same rtx or they are both referring to the
414 same register in the same mode. Avoid calling rtx_equal_p unless the
415 operands look similar. */
418 || (GET_CODE (x) == REG && GET_CODE (from) == REG
419 && GET_MODE (x) == GET_MODE (from)
420 && REGNO (x) == REGNO (from))
421 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
422 && rtx_equal_p (x, from)))
424 validate_change (object, loc, to, 1);
428 /* For commutative or comparison operations, try replacing each argument
429 separately and seeing if we made any changes. If so, put a constant
431 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
433 int prev_changes = num_changes;
435 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
436 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
437 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
439 validate_change (object, loc,
440 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
441 : swap_condition (code),
442 GET_MODE (x), XEXP (x, 1),
450 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
451 done the substitution, otherwise we won't. */
456 /* If we have a PLUS whose second operand is now a CONST_INT, use
457 plus_constant to try to simplify it. */
458 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
459 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
464 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
466 validate_change (object, loc,
467 plus_constant (XEXP (x, 0), - INTVAL (to)),
475 /* In these cases, the operation to be performed depends on the mode
476 of the operand. If we are replacing the operand with a VOIDmode
477 constant, we lose the information. So try to simplify the operation
478 in that case. If it fails, substitute in something that we know
479 won't be recognized. */
480 if (GET_MODE (to) == VOIDmode
481 && (XEXP (x, 0) == from
482 || (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (from) == REG
483 && GET_MODE (XEXP (x, 0)) == GET_MODE (from)
484 && REGNO (XEXP (x, 0)) == REGNO (from))))
486 rtx new = simplify_unary_operation (code, GET_MODE (x), to,
489 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
491 validate_change (object, loc, new, 1);
497 /* If we have a SUBREG of a register that we are replacing and we are
498 replacing it with a MEM, make a new MEM and try replacing the
499 SUBREG with it. Don't do this if the MEM has a mode-dependent address
500 or if we would be widening it. */
502 if (SUBREG_REG (x) == from
503 && GET_CODE (from) == REG
504 && GET_CODE (to) == MEM
505 && ! mode_dependent_address_p (XEXP (to, 0))
506 && ! MEM_VOLATILE_P (to)
507 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
509 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
510 enum machine_mode mode = GET_MODE (x);
513 if (BYTES_BIG_ENDIAN)
514 offset += (MIN (UNITS_PER_WORD,
515 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
516 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
518 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
519 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (to);
520 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (to);
521 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (to);
522 validate_change (object, loc, new, 1);
529 /* If we are replacing a register with memory, try to change the memory
530 to be the mode required for memory in extract operations (this isn't
531 likely to be an insertion operation; if it was, nothing bad will
532 happen, we might just fail in some cases). */
534 if (XEXP (x, 0) == from && GET_CODE (from) == REG && GET_CODE (to) == MEM
535 && GET_CODE (XEXP (x, 1)) == CONST_INT
536 && GET_CODE (XEXP (x, 2)) == CONST_INT
537 && ! mode_dependent_address_p (XEXP (to, 0))
538 && ! MEM_VOLATILE_P (to))
540 enum machine_mode wanted_mode = VOIDmode;
541 enum machine_mode is_mode = GET_MODE (to);
542 int pos = INTVAL (XEXP (x, 2));
545 if (code == ZERO_EXTRACT)
547 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
548 if (wanted_mode == VOIDmode)
549 wanted_mode = word_mode;
553 if (code == SIGN_EXTRACT)
555 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
556 if (wanted_mode == VOIDmode)
557 wanted_mode = word_mode;
561 /* If we have a narrower mode, we can do something. */
562 if (wanted_mode != VOIDmode
563 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
565 int offset = pos / BITS_PER_UNIT;
568 /* If the bytes and bits are counted differently, we
569 must adjust the offset. */
570 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
571 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
574 pos %= GET_MODE_BITSIZE (wanted_mode);
576 newmem = gen_rtx_MEM (wanted_mode,
577 plus_constant (XEXP (to, 0), offset));
578 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (to);
579 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (to);
580 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (to);
582 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
583 validate_change (object, &XEXP (x, 0), newmem, 1);
593 /* For commutative or comparison operations we've already performed
594 replacements. Don't try to perform them again. */
595 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
597 fmt = GET_RTX_FORMAT (code);
598 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
601 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
602 else if (fmt[i] == 'E')
603 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
604 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
609 /* Try replacing every occurrence of FROM in INSN with TO. After all
610 changes have been made, validate by seeing if INSN is still valid. */
613 validate_replace_rtx (from, to, insn)
616 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
617 return apply_change_group ();
620 /* Try replacing every occurrence of FROM in INSN with TO. After all
621 changes have been made, validate by seeing if INSN is still valid. */
624 validate_replace_rtx_group (from, to, insn)
627 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
630 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
631 SET_DESTs. After all changes have been made, validate by seeing if
632 INSN is still valid. */
635 validate_replace_src (from, to, insn)
638 if ((GET_CODE (insn) != INSN && GET_CODE (insn) != JUMP_INSN)
639 || GET_CODE (PATTERN (insn)) != SET)
642 validate_replace_rtx_1 (&SET_SRC (PATTERN (insn)), from, to, insn);
643 if (GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
644 validate_replace_rtx_1 (&XEXP (SET_DEST (PATTERN (insn)), 0),
646 return apply_change_group ();
650 /* Return 1 if the insn using CC0 set by INSN does not contain
651 any ordered tests applied to the condition codes.
652 EQ and NE tests do not count. */
655 next_insn_tests_no_inequality (insn)
658 register rtx next = next_cc0_user (insn);
660 /* If there is no next insn, we have to take the conservative choice. */
664 return ((GET_CODE (next) == JUMP_INSN
665 || GET_CODE (next) == INSN
666 || GET_CODE (next) == CALL_INSN)
667 && ! inequality_comparisons_p (PATTERN (next)));
670 #if 0 /* This is useless since the insn that sets the cc's
671 must be followed immediately by the use of them. */
672 /* Return 1 if the CC value set up by INSN is not used. */
675 next_insns_test_no_inequality (insn)
678 register rtx next = NEXT_INSN (insn);
680 for (; next != 0; next = NEXT_INSN (next))
682 if (GET_CODE (next) == CODE_LABEL
683 || GET_CODE (next) == BARRIER)
685 if (GET_CODE (next) == NOTE)
687 if (inequality_comparisons_p (PATTERN (next)))
689 if (sets_cc0_p (PATTERN (next)) == 1)
691 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
699 /* This is used by find_single_use to locate an rtx that contains exactly one
700 use of DEST, which is typically either a REG or CC0. It returns a
701 pointer to the innermost rtx expression containing DEST. Appearances of
702 DEST that are being used to totally replace it are not counted. */
705 find_single_use_1 (dest, loc)
710 enum rtx_code code = GET_CODE (x);
727 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
728 of a REG that occupies all of the REG, the insn uses DEST if
729 it is mentioned in the destination or the source. Otherwise, we
730 need just check the source. */
731 if (GET_CODE (SET_DEST (x)) != CC0
732 && GET_CODE (SET_DEST (x)) != PC
733 && GET_CODE (SET_DEST (x)) != REG
734 && ! (GET_CODE (SET_DEST (x)) == SUBREG
735 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
736 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
737 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
738 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
739 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
742 return find_single_use_1 (dest, &SET_SRC (x));
746 return find_single_use_1 (dest, &XEXP (x, 0));
752 /* If it wasn't one of the common cases above, check each expression and
753 vector of this code. Look for a unique usage of DEST. */
755 fmt = GET_RTX_FORMAT (code);
756 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
760 if (dest == XEXP (x, i)
761 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
762 && REGNO (dest) == REGNO (XEXP (x, i))))
765 this_result = find_single_use_1 (dest, &XEXP (x, i));
768 result = this_result;
769 else if (this_result)
770 /* Duplicate usage. */
773 else if (fmt[i] == 'E')
777 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
779 if (XVECEXP (x, i, j) == dest
780 || (GET_CODE (dest) == REG
781 && GET_CODE (XVECEXP (x, i, j)) == REG
782 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
785 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
788 result = this_result;
789 else if (this_result)
798 /* See if DEST, produced in INSN, is used only a single time in the
799 sequel. If so, return a pointer to the innermost rtx expression in which
802 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
804 This routine will return usually zero either before flow is called (because
805 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
806 note can't be trusted).
808 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
809 care about REG_DEAD notes or LOG_LINKS.
811 Otherwise, we find the single use by finding an insn that has a
812 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
813 only referenced once in that insn, we know that it must be the first
814 and last insn referencing DEST. */
817 find_single_use (dest, insn, ploc)
829 next = NEXT_INSN (insn);
831 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
834 result = find_single_use_1 (dest, &PATTERN (next));
841 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
844 for (next = next_nonnote_insn (insn);
845 next != 0 && GET_CODE (next) != CODE_LABEL;
846 next = next_nonnote_insn (next))
847 if (GET_RTX_CLASS (GET_CODE (next)) == 'i' && dead_or_set_p (next, dest))
849 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
850 if (XEXP (link, 0) == insn)
855 result = find_single_use_1 (dest, &PATTERN (next));
865 /* Return 1 if OP is a valid general operand for machine mode MODE.
866 This is either a register reference, a memory reference,
867 or a constant. In the case of a memory reference, the address
868 is checked for general validity for the target machine.
870 Register and memory references must have mode MODE in order to be valid,
871 but some constants have no machine mode and are valid for any mode.
873 If MODE is VOIDmode, OP is checked for validity for whatever mode
876 The main use of this function is as a predicate in match_operand
877 expressions in the machine description.
879 For an explanation of this function's behavior for registers of
880 class NO_REGS, see the comment for `register_operand'. */
883 general_operand (op, mode)
885 enum machine_mode mode;
887 register enum rtx_code code = GET_CODE (op);
888 int mode_altering_drug = 0;
890 if (mode == VOIDmode)
891 mode = GET_MODE (op);
893 /* Don't accept CONST_INT or anything similar
894 if the caller wants something floating. */
895 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
896 && GET_MODE_CLASS (mode) != MODE_INT
897 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
901 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
902 #ifdef LEGITIMATE_PIC_OPERAND_P
903 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
905 && LEGITIMATE_CONSTANT_P (op));
907 /* Except for certain constants with VOIDmode, already checked for,
908 OP's mode must match MODE if MODE specifies a mode. */
910 if (GET_MODE (op) != mode)
915 #ifdef INSN_SCHEDULING
916 /* On machines that have insn scheduling, we want all memory
917 reference to be explicit, so outlaw paradoxical SUBREGs. */
918 if (GET_CODE (SUBREG_REG (op)) == MEM
919 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
923 op = SUBREG_REG (op);
924 code = GET_CODE (op);
926 /* No longer needed, since (SUBREG (MEM...))
927 will load the MEM into a reload reg in the MEM's own mode. */
928 mode_altering_drug = 1;
933 /* A register whose class is NO_REGS is not a general operand. */
934 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
935 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
939 register rtx y = XEXP (op, 0);
940 if (! volatile_ok && MEM_VOLATILE_P (op))
942 if (GET_CODE (y) == ADDRESSOF)
944 /* Use the mem's mode, since it will be reloaded thus. */
945 mode = GET_MODE (op);
946 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
949 /* Pretend this is an operand for now; we'll run force_operand
950 on its replacement in fixup_var_refs_1. */
951 if (code == ADDRESSOF)
957 if (mode_altering_drug)
958 return ! mode_dependent_address_p (XEXP (op, 0));
962 /* Return 1 if OP is a valid memory address for a memory reference
965 The main use of this function is as a predicate in match_operand
966 expressions in the machine description. */
969 address_operand (op, mode)
971 enum machine_mode mode;
973 return memory_address_p (mode, op);
976 /* Return 1 if OP is a register reference of mode MODE.
977 If MODE is VOIDmode, accept a register in any mode.
979 The main use of this function is as a predicate in match_operand
980 expressions in the machine description.
982 As a special exception, registers whose class is NO_REGS are
983 not accepted by `register_operand'. The reason for this change
984 is to allow the representation of special architecture artifacts
985 (such as a condition code register) without extending the rtl
986 definitions. Since registers of class NO_REGS cannot be used
987 as registers in any case where register classes are examined,
988 it is most consistent to keep this function from accepting them. */
991 register_operand (op, mode)
993 enum machine_mode mode;
995 if (GET_MODE (op) != mode && mode != VOIDmode)
998 if (GET_CODE (op) == SUBREG)
1000 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1001 because it is guaranteed to be reloaded into one.
1002 Just make sure the MEM is valid in itself.
1003 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1004 but currently it does result from (SUBREG (REG)...) where the
1005 reg went on the stack.) */
1006 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1007 return general_operand (op, mode);
1009 #ifdef CLASS_CANNOT_CHANGE_SIZE
1010 if (GET_CODE (SUBREG_REG (op)) == REG
1011 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1012 && TEST_HARD_REG_BIT (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
1013 REGNO (SUBREG_REG (op)))
1014 && (GET_MODE_SIZE (mode)
1015 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
1016 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1017 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1021 op = SUBREG_REG (op);
1024 /* We don't consider registers whose class is NO_REGS
1025 to be a register operand. */
1026 return (GET_CODE (op) == REG
1027 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1028 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1031 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1032 or a hard register. */
1035 scratch_operand (op, mode)
1037 enum machine_mode mode;
1039 return (GET_MODE (op) == mode
1040 && (GET_CODE (op) == SCRATCH
1041 || (GET_CODE (op) == REG
1042 && REGNO (op) < FIRST_PSEUDO_REGISTER)));
1045 /* Return 1 if OP is a valid immediate operand for mode MODE.
1047 The main use of this function is as a predicate in match_operand
1048 expressions in the machine description. */
1051 immediate_operand (op, mode)
1053 enum machine_mode mode;
1055 /* Don't accept CONST_INT or anything similar
1056 if the caller wants something floating. */
1057 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1058 && GET_MODE_CLASS (mode) != MODE_INT
1059 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1062 return (CONSTANT_P (op)
1063 && (GET_MODE (op) == mode || mode == VOIDmode
1064 || GET_MODE (op) == VOIDmode)
1065 #ifdef LEGITIMATE_PIC_OPERAND_P
1066 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1068 && LEGITIMATE_CONSTANT_P (op));
1071 /* Returns 1 if OP is an operand that is a CONST_INT. */
1074 const_int_operand (op, mode)
1076 enum machine_mode mode ATTRIBUTE_UNUSED;
1078 return GET_CODE (op) == CONST_INT;
1081 /* Returns 1 if OP is an operand that is a constant integer or constant
1082 floating-point number. */
1085 const_double_operand (op, mode)
1087 enum machine_mode mode;
1089 /* Don't accept CONST_INT or anything similar
1090 if the caller wants something floating. */
1091 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1092 && GET_MODE_CLASS (mode) != MODE_INT
1093 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1096 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1097 && (mode == VOIDmode || GET_MODE (op) == mode
1098 || GET_MODE (op) == VOIDmode));
1101 /* Return 1 if OP is a general operand that is not an immediate operand. */
1104 nonimmediate_operand (op, mode)
1106 enum machine_mode mode;
1108 return (general_operand (op, mode) && ! CONSTANT_P (op));
1111 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1114 nonmemory_operand (op, mode)
1116 enum machine_mode mode;
1118 if (CONSTANT_P (op))
1120 /* Don't accept CONST_INT or anything similar
1121 if the caller wants something floating. */
1122 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1123 && GET_MODE_CLASS (mode) != MODE_INT
1124 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1127 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
1128 #ifdef LEGITIMATE_PIC_OPERAND_P
1129 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1131 && LEGITIMATE_CONSTANT_P (op));
1134 if (GET_MODE (op) != mode && mode != VOIDmode)
1137 if (GET_CODE (op) == SUBREG)
1139 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1140 because it is guaranteed to be reloaded into one.
1141 Just make sure the MEM is valid in itself.
1142 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1143 but currently it does result from (SUBREG (REG)...) where the
1144 reg went on the stack.) */
1145 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1146 return general_operand (op, mode);
1147 op = SUBREG_REG (op);
1150 /* We don't consider registers whose class is NO_REGS
1151 to be a register operand. */
1152 return (GET_CODE (op) == REG
1153 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1154 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1157 /* Return 1 if OP is a valid operand that stands for pushing a
1158 value of mode MODE onto the stack.
1160 The main use of this function is as a predicate in match_operand
1161 expressions in the machine description. */
1164 push_operand (op, mode)
1166 enum machine_mode mode;
1168 if (GET_CODE (op) != MEM)
1171 if (GET_MODE (op) != mode)
1176 if (GET_CODE (op) != STACK_PUSH_CODE)
1179 return XEXP (op, 0) == stack_pointer_rtx;
1182 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1185 memory_address_p (mode, addr)
1186 enum machine_mode mode;
1189 if (GET_CODE (addr) == ADDRESSOF)
1192 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1199 /* Return 1 if OP is a valid memory reference with mode MODE,
1200 including a valid address.
1202 The main use of this function is as a predicate in match_operand
1203 expressions in the machine description. */
1206 memory_operand (op, mode)
1208 enum machine_mode mode;
1212 if (! reload_completed)
1213 /* Note that no SUBREG is a memory operand before end of reload pass,
1214 because (SUBREG (MEM...)) forces reloading into a register. */
1215 return GET_CODE (op) == MEM && general_operand (op, mode);
1217 if (mode != VOIDmode && GET_MODE (op) != mode)
1221 if (GET_CODE (inner) == SUBREG)
1222 inner = SUBREG_REG (inner);
1224 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1227 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1228 that is, a memory reference whose address is a general_operand. */
1231 indirect_operand (op, mode)
1233 enum machine_mode mode;
1235 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1236 if (! reload_completed
1237 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1239 register int offset = SUBREG_WORD (op) * UNITS_PER_WORD;
1240 rtx inner = SUBREG_REG (op);
1242 if (BYTES_BIG_ENDIAN)
1243 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op)))
1244 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner))));
1246 if (mode != VOIDmode && GET_MODE (op) != mode)
1249 /* The only way that we can have a general_operand as the resulting
1250 address is if OFFSET is zero and the address already is an operand
1251 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1254 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1255 || (GET_CODE (XEXP (inner, 0)) == PLUS
1256 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1257 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1258 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1261 return (GET_CODE (op) == MEM
1262 && memory_operand (op, mode)
1263 && general_operand (XEXP (op, 0), Pmode));
1266 /* Return 1 if this is a comparison operator. This allows the use of
1267 MATCH_OPERATOR to recognize all the branch insns. */
1270 comparison_operator (op, mode)
1272 enum machine_mode mode;
1274 return ((mode == VOIDmode || GET_MODE (op) == mode)
1275 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1278 /* If BODY is an insn body that uses ASM_OPERANDS,
1279 return the number of operands (both input and output) in the insn.
1280 Otherwise return -1. */
1283 asm_noperands (body)
1286 if (GET_CODE (body) == ASM_OPERANDS)
1287 /* No output operands: return number of input operands. */
1288 return ASM_OPERANDS_INPUT_LENGTH (body);
1289 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1290 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1291 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1292 else if (GET_CODE (body) == PARALLEL
1293 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1294 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1296 /* Multiple output operands, or 1 output plus some clobbers:
1297 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1301 /* Count backwards through CLOBBERs to determine number of SETs. */
1302 for (i = XVECLEN (body, 0); i > 0; i--)
1304 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1306 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1310 /* N_SETS is now number of output operands. */
1313 /* Verify that all the SETs we have
1314 came from a single original asm_operands insn
1315 (so that invalid combinations are blocked). */
1316 for (i = 0; i < n_sets; i++)
1318 rtx elt = XVECEXP (body, 0, i);
1319 if (GET_CODE (elt) != SET)
1321 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1323 /* If these ASM_OPERANDS rtx's came from different original insns
1324 then they aren't allowed together. */
1325 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1326 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1329 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1332 else if (GET_CODE (body) == PARALLEL
1333 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1335 /* 0 outputs, but some clobbers:
1336 body is [(asm_operands ...) (clobber (reg ...))...]. */
1339 /* Make sure all the other parallel things really are clobbers. */
1340 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1341 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1344 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1350 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1351 copy its operands (both input and output) into the vector OPERANDS,
1352 the locations of the operands within the insn into the vector OPERAND_LOCS,
1353 and the constraints for the operands into CONSTRAINTS.
1354 Write the modes of the operands into MODES.
1355 Return the assembler-template.
1357 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1358 we don't store that info. */
1361 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1366 enum machine_mode *modes;
1372 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1374 rtx asmop = SET_SRC (body);
1375 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1377 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1379 for (i = 1; i < noperands; i++)
1382 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1384 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1386 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1388 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1391 /* The output is in the SET.
1392 Its constraint is in the ASM_OPERANDS itself. */
1394 operands[0] = SET_DEST (body);
1396 operand_locs[0] = &SET_DEST (body);
1398 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1400 modes[0] = GET_MODE (SET_DEST (body));
1401 template = ASM_OPERANDS_TEMPLATE (asmop);
1403 else if (GET_CODE (body) == ASM_OPERANDS)
1406 /* No output operands: BODY is (asm_operands ....). */
1408 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1410 /* The input operands are found in the 1st element vector. */
1411 /* Constraints for inputs are in the 2nd element vector. */
1412 for (i = 0; i < noperands; i++)
1415 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1417 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1419 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1421 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1423 template = ASM_OPERANDS_TEMPLATE (asmop);
1425 else if (GET_CODE (body) == PARALLEL
1426 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1428 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1429 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1430 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1431 int nout = 0; /* Does not include CLOBBERs. */
1433 /* At least one output, plus some CLOBBERs. */
1435 /* The outputs are in the SETs.
1436 Their constraints are in the ASM_OPERANDS itself. */
1437 for (i = 0; i < nparallel; i++)
1439 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1440 break; /* Past last SET */
1443 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1445 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1447 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1449 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1453 for (i = 0; i < nin; i++)
1456 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1458 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1460 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1462 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1465 template = ASM_OPERANDS_TEMPLATE (asmop);
1467 else if (GET_CODE (body) == PARALLEL
1468 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1470 /* No outputs, but some CLOBBERs. */
1472 rtx asmop = XVECEXP (body, 0, 0);
1473 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1475 for (i = 0; i < nin; i++)
1478 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1480 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1482 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1484 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1487 template = ASM_OPERANDS_TEMPLATE (asmop);
1493 /* Given an rtx *P, if it is a sum containing an integer constant term,
1494 return the location (type rtx *) of the pointer to that constant term.
1495 Otherwise, return a null pointer. */
1498 find_constant_term_loc (p)
1502 register enum rtx_code code = GET_CODE (*p);
1504 /* If *P IS such a constant term, P is its location. */
1506 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1510 /* Otherwise, if not a sum, it has no constant term. */
1512 if (GET_CODE (*p) != PLUS)
1515 /* If one of the summands is constant, return its location. */
1517 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1518 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1521 /* Otherwise, check each summand for containing a constant term. */
1523 if (XEXP (*p, 0) != 0)
1525 tem = find_constant_term_loc (&XEXP (*p, 0));
1530 if (XEXP (*p, 1) != 0)
1532 tem = find_constant_term_loc (&XEXP (*p, 1));
1540 /* Return 1 if OP is a memory reference
1541 whose address contains no side effects
1542 and remains valid after the addition
1543 of a positive integer less than the
1544 size of the object being referenced.
1546 We assume that the original address is valid and do not check it.
1548 This uses strict_memory_address_p as a subroutine, so
1549 don't use it before reload. */
1552 offsettable_memref_p (op)
1555 return ((GET_CODE (op) == MEM)
1556 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1559 /* Similar, but don't require a strictly valid mem ref:
1560 consider pseudo-regs valid as index or base regs. */
1563 offsettable_nonstrict_memref_p (op)
1566 return ((GET_CODE (op) == MEM)
1567 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1570 /* Return 1 if Y is a memory address which contains no side effects
1571 and would remain valid after the addition of a positive integer
1572 less than the size of that mode.
1574 We assume that the original address is valid and do not check it.
1575 We do check that it is valid for narrower modes.
1577 If STRICTP is nonzero, we require a strictly valid address,
1578 for the sake of use in reload.c. */
1581 offsettable_address_p (strictp, mode, y)
1583 enum machine_mode mode;
1586 register enum rtx_code ycode = GET_CODE (y);
1590 int (*addressp) () = (strictp ? strict_memory_address_p : memory_address_p);
1592 if (CONSTANT_ADDRESS_P (y))
1595 /* Adjusting an offsettable address involves changing to a narrower mode.
1596 Make sure that's OK. */
1598 if (mode_dependent_address_p (y))
1601 /* If the expression contains a constant term,
1602 see if it remains valid when max possible offset is added. */
1604 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1609 *y2 = plus_constant (*y2, GET_MODE_SIZE (mode) - 1);
1610 /* Use QImode because an odd displacement may be automatically invalid
1611 for any wider mode. But it should be valid for a single byte. */
1612 good = (*addressp) (QImode, y);
1614 /* In any case, restore old contents of memory. */
1619 if (ycode == PRE_DEC || ycode == PRE_INC
1620 || ycode == POST_DEC || ycode == POST_INC)
1623 /* The offset added here is chosen as the maximum offset that
1624 any instruction could need to add when operating on something
1625 of the specified mode. We assume that if Y and Y+c are
1626 valid addresses then so is Y+d for all 0<d<c. */
1628 z = plus_constant_for_output (y, GET_MODE_SIZE (mode) - 1);
1630 /* Use QImode because an odd displacement may be automatically invalid
1631 for any wider mode. But it should be valid for a single byte. */
1632 return (*addressp) (QImode, z);
1635 /* Return 1 if ADDR is an address-expression whose effect depends
1636 on the mode of the memory reference it is used in.
1638 Autoincrement addressing is a typical example of mode-dependence
1639 because the amount of the increment depends on the mode. */
1642 mode_dependent_address_p (addr)
1645 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1651 /* Return 1 if OP is a general operand
1652 other than a memory ref with a mode dependent address. */
1655 mode_independent_operand (op, mode)
1656 enum machine_mode mode;
1661 if (! general_operand (op, mode))
1664 if (GET_CODE (op) != MEM)
1667 addr = XEXP (op, 0);
1668 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
1674 /* Given an operand OP that is a valid memory reference
1675 which satisfies offsettable_memref_p,
1676 return a new memory reference whose address has been adjusted by OFFSET.
1677 OFFSET should be positive and less than the size of the object referenced.
1681 adj_offsettable_operand (op, offset)
1685 register enum rtx_code code = GET_CODE (op);
1689 register rtx y = XEXP (op, 0);
1692 if (CONSTANT_ADDRESS_P (y))
1694 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
1695 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1699 if (GET_CODE (y) == PLUS)
1702 register rtx *const_loc;
1706 const_loc = find_constant_term_loc (&z);
1709 *const_loc = plus_constant_for_output (*const_loc, offset);
1714 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
1715 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1721 /* Analyze INSN and compute the variables recog_n_operands, recog_n_dups,
1722 recog_n_alternatives, recog_operand, recog_operand_loc, recog_constraints,
1723 recog_operand_mode, recog_dup_loc and recog_dup_num.
1724 If REGISTER_CONSTRAINTS is not defined, also compute
1725 recog_operand_address_p. */
1733 rtx body = PATTERN (insn);
1735 recog_n_operands = 0;
1736 recog_n_alternatives = 0;
1739 switch (GET_CODE (body))
1751 recog_n_operands = noperands = asm_noperands (body);
1754 /* This insn is an `asm' with operands. */
1756 /* expand_asm_operands makes sure there aren't too many operands. */
1757 if (noperands > MAX_RECOG_OPERANDS)
1760 /* Now get the operand values and constraints out of the insn. */
1761 decode_asm_operands (body, recog_operand, recog_operand_loc,
1762 recog_constraints, recog_operand_mode);
1765 char *p = recog_constraints[0];
1766 recog_n_alternatives = 1;
1768 recog_n_alternatives += (*p++ == ',');
1770 #ifndef REGISTER_CONSTRAINTS
1771 bzero (recog_operand_address_p, sizeof recog_operand_address_p);
1779 /* Ordinary insn: recognize it, get the operands via insn_extract
1780 and get the constraints. */
1782 icode = recog_memoized (insn);
1784 fatal_insn_not_found (insn);
1786 recog_n_operands = noperands = insn_n_operands[icode];
1787 recog_n_alternatives = insn_n_alternatives[icode];
1788 recog_n_dups = insn_n_dups[icode];
1790 insn_extract (insn);
1792 for (i = 0; i < noperands; i++)
1794 #ifdef REGISTER_CONSTRAINTS
1795 recog_constraints[i] = insn_operand_constraint[icode][i];
1797 recog_operand_address_p[i] = insn_operand_address_p[icode][i];
1799 recog_operand_mode[i] = insn_operand_mode[icode][i];
1802 for (i = 0; i < noperands; i++)
1803 recog_op_type[i] = (recog_constraints[i][0] == '=' ? OP_OUT
1804 : recog_constraints[i][0] == '+' ? OP_INOUT
1808 #ifdef REGISTER_CONSTRAINTS
1810 /* Check the operands of an insn against the insn's operand constraints
1811 and return 1 if they are valid.
1812 The information about the insn's operands, constraints, operand modes
1813 etc. is obtained from the global variables set up by extract_insn.
1815 WHICH_ALTERNATIVE is set to a number which indicates which
1816 alternative of constraints was matched: 0 for the first alternative,
1817 1 for the next, etc.
1819 In addition, when two operands are match
1820 and it happens that the output operand is (reg) while the
1821 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
1822 make the output operand look like the input.
1823 This is because the output operand is the one the template will print.
1825 This is used in final, just before printing the assembler code and by
1826 the routines that determine an insn's attribute.
1828 If STRICT is a positive non-zero value, it means that we have been
1829 called after reload has been completed. In that case, we must
1830 do all checks strictly. If it is zero, it means that we have been called
1831 before reload has completed. In that case, we first try to see if we can
1832 find an alternative that matches strictly. If not, we try again, this
1833 time assuming that reload will fix up the insn. This provides a "best
1834 guess" for the alternative and is used to compute attributes of insns prior
1835 to reload. A negative value of STRICT is used for this internal call. */
1843 constrain_operands (strict)
1846 char *constraints[MAX_RECOG_OPERANDS];
1847 int matching_operands[MAX_RECOG_OPERANDS];
1848 int earlyclobber[MAX_RECOG_OPERANDS];
1851 struct funny_match funny_match[MAX_RECOG_OPERANDS];
1852 int funny_match_index;
1854 if (recog_n_operands == 0 || recog_n_alternatives == 0)
1857 for (c = 0; c < recog_n_operands; c++)
1859 constraints[c] = recog_constraints[c];
1860 matching_operands[c] = -1;
1863 which_alternative = 0;
1865 while (which_alternative < recog_n_alternatives)
1869 funny_match_index = 0;
1871 for (opno = 0; opno < recog_n_operands; opno++)
1873 register rtx op = recog_operand[opno];
1874 enum machine_mode mode = GET_MODE (op);
1875 register char *p = constraints[opno];
1880 earlyclobber[opno] = 0;
1882 /* A unary operator may be accepted by the predicate, but it
1883 is irrelevant for matching constraints. */
1884 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
1887 if (GET_CODE (op) == SUBREG)
1889 if (GET_CODE (SUBREG_REG (op)) == REG
1890 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
1891 offset = SUBREG_WORD (op);
1892 op = SUBREG_REG (op);
1895 /* An empty constraint or empty alternative
1896 allows anything which matched the pattern. */
1897 if (*p == 0 || *p == ',')
1900 while (*p && (c = *p++) != ',')
1912 /* Ignore rest of this alternative as far as
1913 constraint checking is concerned. */
1914 while (*p && *p != ',')
1919 earlyclobber[opno] = 1;
1927 /* This operand must be the same as a previous one.
1928 This kind of constraint is used for instructions such
1929 as add when they take only two operands.
1931 Note that the lower-numbered operand is passed first.
1933 If we are not testing strictly, assume that this constraint
1934 will be satisfied. */
1938 val = operands_match_p (recog_operand[c - '0'],
1939 recog_operand[opno]);
1941 matching_operands[opno] = c - '0';
1942 matching_operands[c - '0'] = opno;
1946 /* If output is *x and input is *--x,
1947 arrange later to change the output to *--x as well,
1948 since the output op is the one that will be printed. */
1949 if (val == 2 && strict > 0)
1951 funny_match[funny_match_index].this = opno;
1952 funny_match[funny_match_index++].other = c - '0';
1957 /* p is used for address_operands. When we are called by
1958 gen_reload, no one will have checked that the address is
1959 strictly valid, i.e., that all pseudos requiring hard regs
1960 have gotten them. */
1962 || (strict_memory_address_p (recog_operand_mode[opno],
1967 /* No need to check general_operand again;
1968 it was done in insn-recog.c. */
1970 /* Anything goes unless it is a REG and really has a hard reg
1971 but the hard reg is not in the class GENERAL_REGS. */
1973 || GENERAL_REGS == ALL_REGS
1974 || GET_CODE (op) != REG
1975 || (reload_in_progress
1976 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
1977 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
1984 && GET_CODE (op) == REG
1985 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
1986 || (strict == 0 && GET_CODE (op) == SCRATCH)
1987 || (GET_CODE (op) == REG
1988 && ((GENERAL_REGS == ALL_REGS
1989 && REGNO (op) < FIRST_PSEUDO_REGISTER)
1990 || reg_fits_class_p (op, GENERAL_REGS,
1996 /* This is used for a MATCH_SCRATCH in the cases when
1997 we don't actually need anything. So anything goes
2003 if (GET_CODE (op) == MEM
2004 /* Before reload, accept what reload can turn into mem. */
2005 || (strict < 0 && CONSTANT_P (op))
2006 /* During reload, accept a pseudo */
2007 || (reload_in_progress && GET_CODE (op) == REG
2008 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2013 if (GET_CODE (op) == MEM
2014 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2015 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2020 if (GET_CODE (op) == MEM
2021 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2022 || GET_CODE (XEXP (op, 0)) == POST_INC))
2027 #ifndef REAL_ARITHMETIC
2028 /* Match any CONST_DOUBLE, but only if
2029 we can examine the bits of it reliably. */
2030 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2031 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2032 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2035 if (GET_CODE (op) == CONST_DOUBLE)
2040 if (GET_CODE (op) == CONST_DOUBLE)
2046 if (GET_CODE (op) == CONST_DOUBLE
2047 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2052 if (GET_CODE (op) == CONST_INT
2053 || (GET_CODE (op) == CONST_DOUBLE
2054 && GET_MODE (op) == VOIDmode))
2057 if (CONSTANT_P (op))
2062 if (GET_CODE (op) == CONST_INT
2063 || (GET_CODE (op) == CONST_DOUBLE
2064 && GET_MODE (op) == VOIDmode))
2076 if (GET_CODE (op) == CONST_INT
2077 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2081 #ifdef EXTRA_CONSTRAINT
2087 if (EXTRA_CONSTRAINT (op, c))
2093 if (GET_CODE (op) == MEM
2094 && ((strict > 0 && ! offsettable_memref_p (op))
2096 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2097 || (reload_in_progress
2098 && !(GET_CODE (op) == REG
2099 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2104 if ((strict > 0 && offsettable_memref_p (op))
2105 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2106 /* Before reload, accept what reload can handle. */
2108 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2109 /* During reload, accept a pseudo */
2110 || (reload_in_progress && GET_CODE (op) == REG
2111 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2118 && GET_CODE (op) == REG
2119 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2120 || (strict == 0 && GET_CODE (op) == SCRATCH)
2121 || (GET_CODE (op) == REG
2122 && reg_fits_class_p (op, REG_CLASS_FROM_LETTER (c),
2127 constraints[opno] = p;
2128 /* If this operand did not win somehow,
2129 this alternative loses. */
2133 /* This alternative won; the operands are ok.
2134 Change whichever operands this alternative says to change. */
2139 /* See if any earlyclobber operand conflicts with some other
2143 for (eopno = 0; eopno < recog_n_operands; eopno++)
2144 /* Ignore earlyclobber operands now in memory,
2145 because we would often report failure when we have
2146 two memory operands, one of which was formerly a REG. */
2147 if (earlyclobber[eopno]
2148 && GET_CODE (recog_operand[eopno]) == REG)
2149 for (opno = 0; opno < recog_n_operands; opno++)
2150 if ((GET_CODE (recog_operand[opno]) == MEM
2151 || recog_op_type[opno] != OP_OUT)
2153 /* Ignore things like match_operator operands. */
2154 && *recog_constraints[opno] != 0
2155 && ! (matching_operands[opno] == eopno
2156 && operands_match_p (recog_operand[opno],
2157 recog_operand[eopno]))
2158 && ! safe_from_earlyclobber (recog_operand[opno],
2159 recog_operand[eopno]))
2164 while (--funny_match_index >= 0)
2166 recog_operand[funny_match[funny_match_index].other]
2167 = recog_operand[funny_match[funny_match_index].this];
2174 which_alternative++;
2177 /* If we are about to reject this, but we are not to test strictly,
2178 try a very loose test. Only return failure if it fails also. */
2180 return constrain_operands (-1);
2185 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2186 is a hard reg in class CLASS when its regno is offset by OFFSET
2187 and changed to mode MODE.
2188 If REG occupies multiple hard regs, all of them must be in CLASS. */
2191 reg_fits_class_p (operand, class, offset, mode)
2193 register enum reg_class class;
2195 enum machine_mode mode;
2197 register int regno = REGNO (operand);
2198 if (regno < FIRST_PSEUDO_REGISTER
2199 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2204 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2206 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2215 #endif /* REGISTER_CONSTRAINTS */