1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "insn-flags.h"
30 #include "insn-codes.h"
31 #include "hard-reg-set.h"
38 #include "basic-block.h"
42 #ifndef STACK_PUSH_CODE
43 #ifdef STACK_GROWS_DOWNWARD
44 #define STACK_PUSH_CODE PRE_DEC
46 #define STACK_PUSH_CODE PRE_INC
50 #ifndef STACK_POP_CODE
51 #ifdef STACK_GROWS_DOWNWARD
52 #define STACK_POP_CODE POST_INC
54 #define STACK_POP_CODE POST_DEC
58 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
59 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
60 static rtx *find_constant_term_loc PARAMS ((rtx *));
61 static void validate_replace_src_1 PARAMS ((rtx *, void *));
63 /* Nonzero means allow operands to be volatile.
64 This should be 0 if you are generating rtl, such as if you are calling
65 the functions in optabs.c and expmed.c (most of the time).
66 This should be 1 if all valid insns need to be recognized,
67 such as in regclass.c and final.c and reload.c.
69 init_recog and init_recog_no_volatile are responsible for setting this. */
73 struct recog_data recog_data;
75 /* Contains a vector of operand_alternative structures for every operand.
76 Set up by preprocess_constraints. */
77 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
79 /* On return from `constrain_operands', indicate which alternative
82 int which_alternative;
84 /* Nonzero after end of reload pass.
85 Set to 1 or 0 by toplev.c.
86 Controls the significance of (SUBREG (MEM)). */
90 /* Initialize data used by the function `recog'.
91 This must be called once in the compilation of a function
92 before any insn recognition may be done in the function. */
95 init_recog_no_volatile ()
106 /* Try recognizing the instruction INSN,
107 and return the code number that results.
108 Remember the code so that repeated calls do not
109 need to spend the time for actual rerecognition.
111 This function is the normal interface to instruction recognition.
112 The automatically-generated function `recog' is normally called
113 through this one. (The only exception is in combine.c.) */
116 recog_memoized_1 (insn)
119 if (INSN_CODE (insn) < 0)
120 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
121 return INSN_CODE (insn);
124 /* Check that X is an insn-body for an `asm' with operands
125 and that the operands mentioned in it are legitimate. */
128 check_asm_operands (x)
133 const char **constraints;
136 /* Post-reload, be more strict with things. */
137 if (reload_completed)
139 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
140 extract_insn (make_insn_raw (x));
141 constrain_operands (1);
142 return which_alternative >= 0;
145 noperands = asm_noperands (x);
151 operands = (rtx *) alloca (noperands * sizeof (rtx));
152 constraints = (const char **) alloca (noperands * sizeof (char *));
154 decode_asm_operands (x, operands, NULL_PTR, constraints, NULL_PTR);
156 for (i = 0; i < noperands; i++)
158 const char *c = constraints[i];
161 if (ISDIGIT ((unsigned char)c[0]) && c[1] == '\0')
162 c = constraints[c[0] - '0'];
164 if (! asm_operand_ok (operands[i], c))
171 /* Static data for the next two routines. */
173 typedef struct change_t
181 static change_t *changes;
182 static int changes_allocated;
184 static int num_changes = 0;
186 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
187 at which NEW will be placed. If OBJECT is zero, no validation is done,
188 the change is simply made.
190 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
191 will be called with the address and mode as parameters. If OBJECT is
192 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
195 IN_GROUP is non-zero if this is part of a group of changes that must be
196 performed as a group. In that case, the changes will be stored. The
197 function `apply_change_group' will validate and apply the changes.
199 If IN_GROUP is zero, this is a single change. Try to recognize the insn
200 or validate the memory reference with the change applied. If the result
201 is not valid for the machine, suppress the change and return zero.
202 Otherwise, perform the change and return 1. */
205 validate_change (object, loc, new, in_group)
213 if (old == new || rtx_equal_p (old, new))
216 if (in_group == 0 && num_changes != 0)
221 /* Save the information describing this change. */
222 if (num_changes >= changes_allocated)
224 if (changes_allocated == 0)
225 /* This value allows for repeated substitutions inside complex
226 indexed addresses, or changes in up to 5 insns. */
227 changes_allocated = MAX_RECOG_OPERANDS * 5;
229 changes_allocated *= 2;
232 (change_t*) xrealloc (changes,
233 sizeof (change_t) * changes_allocated);
236 changes[num_changes].object = object;
237 changes[num_changes].loc = loc;
238 changes[num_changes].old = old;
240 if (object && GET_CODE (object) != MEM)
242 /* Set INSN_CODE to force rerecognition of insn. Save old code in
244 changes[num_changes].old_code = INSN_CODE (object);
245 INSN_CODE (object) = -1;
250 /* If we are making a group of changes, return 1. Otherwise, validate the
251 change group we made. */
256 return apply_change_group ();
259 /* This subroutine of apply_change_group verifies whether the changes to INSN
260 were valid; i.e. whether INSN can still be recognized. */
263 insn_invalid_p (insn)
266 rtx pat = PATTERN (insn);
267 int num_clobbers = 0;
268 /* If we are before reload and the pattern is a SET, see if we can add
270 int icode = recog (pat, insn,
271 (GET_CODE (pat) == SET
272 && ! reload_completed && ! reload_in_progress)
273 ? &num_clobbers : NULL_PTR);
274 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
277 /* If this is an asm and the operand aren't legal, then fail. Likewise if
278 this is not an asm and the insn wasn't recognized. */
279 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
280 || (!is_asm && icode < 0))
283 /* If we have to add CLOBBERs, fail if we have to add ones that reference
284 hard registers since our callers can't know if they are live or not.
285 Otherwise, add them. */
286 if (num_clobbers > 0)
290 if (added_clobbers_hard_reg_p (icode))
293 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
294 XVECEXP (newpat, 0, 0) = pat;
295 add_clobbers (newpat, icode);
296 PATTERN (insn) = pat = newpat;
299 /* After reload, verify that all constraints are satisfied. */
300 if (reload_completed)
304 if (! constrain_operands (1))
308 INSN_CODE (insn) = icode;
312 /* Apply a group of changes previously issued with `validate_change'.
313 Return 1 if all changes are valid, zero otherwise. */
316 apply_change_group ()
320 /* The changes have been applied and all INSN_CODEs have been reset to force
323 The changes are valid if we aren't given an object, or if we are
324 given a MEM and it still is a valid address, or if this is in insn
325 and it is recognized. In the latter case, if reload has completed,
326 we also require that the operands meet the constraints for
329 for (i = 0; i < num_changes; i++)
331 rtx object = changes[i].object;
336 if (GET_CODE (object) == MEM)
338 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
341 else if (insn_invalid_p (object))
343 rtx pat = PATTERN (object);
345 /* Perhaps we couldn't recognize the insn because there were
346 extra CLOBBERs at the end. If so, try to re-recognize
347 without the last CLOBBER (later iterations will cause each of
348 them to be eliminated, in turn). But don't do this if we
349 have an ASM_OPERAND. */
350 if (GET_CODE (pat) == PARALLEL
351 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
352 && asm_noperands (PATTERN (object)) < 0)
356 if (XVECLEN (pat, 0) == 2)
357 newpat = XVECEXP (pat, 0, 0);
363 = gen_rtx_PARALLEL (VOIDmode,
364 rtvec_alloc (XVECLEN (pat, 0) - 1));
365 for (j = 0; j < XVECLEN (newpat, 0); j++)
366 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
369 /* Add a new change to this group to replace the pattern
370 with this new pattern. Then consider this change
371 as having succeeded. The change we added will
372 cause the entire call to fail if things remain invalid.
374 Note that this can lose if a later change than the one
375 we are processing specified &XVECEXP (PATTERN (object), 0, X)
376 but this shouldn't occur. */
378 validate_change (object, &PATTERN (object), newpat, 1);
380 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
381 /* If this insn is a CLOBBER or USE, it is always valid, but is
389 if (i == num_changes)
401 /* Return the number of changes so far in the current group. */
404 num_validated_changes ()
409 /* Retract the changes numbered NUM and up. */
417 /* Back out all the changes. Do this in the opposite order in which
419 for (i = num_changes - 1; i >= num; i--)
421 *changes[i].loc = changes[i].old;
422 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
423 INSN_CODE (changes[i].object) = changes[i].old_code;
428 /* Replace every occurrence of FROM in X with TO. Mark each change with
429 validate_change passing OBJECT. */
432 validate_replace_rtx_1 (loc, from, to, object)
434 rtx from, to, object;
437 register const char *fmt;
438 register rtx x = *loc;
444 /* X matches FROM if it is the same rtx or they are both referring to the
445 same register in the same mode. Avoid calling rtx_equal_p unless the
446 operands look similar. */
449 || (GET_CODE (x) == REG && GET_CODE (from) == REG
450 && GET_MODE (x) == GET_MODE (from)
451 && REGNO (x) == REGNO (from))
452 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
453 && rtx_equal_p (x, from)))
455 validate_change (object, loc, to, 1);
459 /* For commutative or comparison operations, try replacing each argument
460 separately and seeing if we made any changes. If so, put a constant
462 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
464 int prev_changes = num_changes;
466 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
467 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
468 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
470 validate_change (object, loc,
471 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
472 : swap_condition (code),
473 GET_MODE (x), XEXP (x, 1),
481 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
482 done the substitution, otherwise we won't. */
487 /* If we have a PLUS whose second operand is now a CONST_INT, use
488 plus_constant to try to simplify it. */
489 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
490 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
495 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
497 validate_change (object, loc,
498 plus_constant (XEXP (x, 0), - INTVAL (to)),
506 /* In these cases, the operation to be performed depends on the mode
507 of the operand. If we are replacing the operand with a VOIDmode
508 constant, we lose the information. So try to simplify the operation
510 if (GET_MODE (to) == VOIDmode
511 && (rtx_equal_p (XEXP (x, 0), from)
512 || (GET_CODE (XEXP (x, 0)) == SUBREG
513 && rtx_equal_p (SUBREG_REG (XEXP (x, 0)), from))))
517 /* If there is a subreg involved, crop to the portion of the
518 constant that we are interested in. */
519 if (GET_CODE (XEXP (x, 0)) == SUBREG)
521 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) <= UNITS_PER_WORD)
522 to = operand_subword (to, SUBREG_WORD (XEXP (x, 0)),
524 else if (GET_MODE_CLASS (GET_MODE (from)) == MODE_INT
525 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
526 <= HOST_BITS_PER_WIDE_INT))
528 int i = SUBREG_WORD (XEXP (x, 0)) * BITS_PER_WORD;
530 unsigned HOST_WIDE_INT vall;
532 if (GET_CODE (to) == CONST_INT)
535 valh = (HOST_WIDE_INT) vall < 0 ? ~0 : 0;
539 vall = CONST_DOUBLE_LOW (to);
540 valh = CONST_DOUBLE_HIGH (to);
543 if (WORDS_BIG_ENDIAN)
544 i = (GET_MODE_BITSIZE (GET_MODE (from))
545 - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - i);
546 if (i > 0 && i < HOST_BITS_PER_WIDE_INT)
547 vall = vall >> i | valh << (HOST_BITS_PER_WIDE_INT - i);
548 else if (i >= HOST_BITS_PER_WIDE_INT)
549 vall = valh >> (i - HOST_BITS_PER_WIDE_INT);
550 to = GEN_INT (trunc_int_for_mode (vall,
551 GET_MODE (XEXP (x, 0))));
554 to = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
557 /* If the above didn't fail, perform the extension from the
558 mode of the operand (and not the mode of FROM). */
560 new = simplify_unary_operation (code, GET_MODE (x), to,
561 GET_MODE (XEXP (x, 0)));
563 /* If any of the above failed, substitute in something that
564 we know won't be recognized. */
566 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
568 validate_change (object, loc, new, 1);
574 /* In case we are replacing by constant, attempt to simplify it to non-SUBREG
575 expression. We can't do this later, since the information about inner mode
577 if (CONSTANT_P (to) && rtx_equal_p (SUBREG_REG (x), from))
579 if (GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
580 && GET_MODE_SIZE (GET_MODE (from)) > UNITS_PER_WORD
581 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
583 rtx temp = operand_subword (to, SUBREG_WORD (x),
587 validate_change (object, loc, temp, 1);
591 if (subreg_lowpart_p (x))
593 rtx new = gen_lowpart_if_possible (GET_MODE (x), to);
596 validate_change (object, loc, new, 1);
601 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
602 since we are saying that the high bits don't matter. */
603 if (GET_MODE (to) == VOIDmode
604 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (from)))
606 validate_change (object, loc, to, 1);
611 /* Changing mode twice with SUBREG => just change it once,
612 or not at all if changing back to starting mode. */
613 if (GET_CODE (to) == SUBREG
614 && rtx_equal_p (SUBREG_REG (x), from))
616 if (GET_MODE (x) == GET_MODE (SUBREG_REG (to))
617 && SUBREG_WORD (x) == 0 && SUBREG_WORD (to) == 0)
619 validate_change (object, loc, SUBREG_REG (to), 1);
623 validate_change (object, loc,
624 gen_rtx_SUBREG (GET_MODE (x), SUBREG_REG (to),
625 SUBREG_WORD (x) + SUBREG_WORD (to)), 1);
629 /* If we have a SUBREG of a register that we are replacing and we are
630 replacing it with a MEM, make a new MEM and try replacing the
631 SUBREG with it. Don't do this if the MEM has a mode-dependent address
632 or if we would be widening it. */
634 if (GET_CODE (from) == REG
635 && GET_CODE (to) == MEM
636 && rtx_equal_p (SUBREG_REG (x), from)
637 && ! mode_dependent_address_p (XEXP (to, 0))
638 && ! MEM_VOLATILE_P (to)
639 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
641 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
642 enum machine_mode mode = GET_MODE (x);
645 if (BYTES_BIG_ENDIAN)
646 offset += (MIN (UNITS_PER_WORD,
647 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
648 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
650 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
651 MEM_COPY_ATTRIBUTES (new, to);
652 validate_change (object, loc, new, 1);
659 /* If we are replacing a register with memory, try to change the memory
660 to be the mode required for memory in extract operations (this isn't
661 likely to be an insertion operation; if it was, nothing bad will
662 happen, we might just fail in some cases). */
664 if (GET_CODE (from) == REG && GET_CODE (to) == MEM
665 && rtx_equal_p (XEXP (x, 0), from)
666 && GET_CODE (XEXP (x, 1)) == CONST_INT
667 && GET_CODE (XEXP (x, 2)) == CONST_INT
668 && ! mode_dependent_address_p (XEXP (to, 0))
669 && ! MEM_VOLATILE_P (to))
671 enum machine_mode wanted_mode = VOIDmode;
672 enum machine_mode is_mode = GET_MODE (to);
673 int pos = INTVAL (XEXP (x, 2));
676 if (code == ZERO_EXTRACT)
678 wanted_mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
679 if (wanted_mode == VOIDmode)
680 wanted_mode = word_mode;
684 if (code == SIGN_EXTRACT)
686 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
687 if (wanted_mode == VOIDmode)
688 wanted_mode = word_mode;
692 /* If we have a narrower mode, we can do something. */
693 if (wanted_mode != VOIDmode
694 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
696 int offset = pos / BITS_PER_UNIT;
699 /* If the bytes and bits are counted differently, we
700 must adjust the offset. */
701 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
702 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
705 pos %= GET_MODE_BITSIZE (wanted_mode);
707 newmem = gen_rtx_MEM (wanted_mode,
708 plus_constant (XEXP (to, 0), offset));
709 MEM_COPY_ATTRIBUTES (newmem, to);
711 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
712 validate_change (object, &XEXP (x, 0), newmem, 1);
722 /* For commutative or comparison operations we've already performed
723 replacements. Don't try to perform them again. */
724 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
726 fmt = GET_RTX_FORMAT (code);
727 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
730 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
731 else if (fmt[i] == 'E')
732 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
733 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
738 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
739 with TO. After all changes have been made, validate by seeing
740 if INSN is still valid. */
743 validate_replace_rtx_subexp (from, to, insn, loc)
744 rtx from, to, insn, *loc;
746 validate_replace_rtx_1 (loc, from, to, insn);
747 return apply_change_group ();
750 /* Try replacing every occurrence of FROM in INSN with TO. After all
751 changes have been made, validate by seeing if INSN is still valid. */
754 validate_replace_rtx (from, to, insn)
757 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
758 return apply_change_group ();
761 /* Try replacing every occurrence of FROM in INSN with TO. */
764 validate_replace_rtx_group (from, to, insn)
767 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
770 /* Function called by note_uses to replace used subexpressions. */
771 struct validate_replace_src_data
773 rtx from; /* Old RTX */
774 rtx to; /* New RTX */
775 rtx insn; /* Insn in which substitution is occurring. */
779 validate_replace_src_1 (x, data)
783 struct validate_replace_src_data *d
784 = (struct validate_replace_src_data *) data;
786 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
789 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
790 SET_DESTs. After all changes have been made, validate by seeing if
791 INSN is still valid. */
794 validate_replace_src (from, to, insn)
797 struct validate_replace_src_data d;
802 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
803 return apply_change_group ();
807 /* Return 1 if the insn using CC0 set by INSN does not contain
808 any ordered tests applied to the condition codes.
809 EQ and NE tests do not count. */
812 next_insn_tests_no_inequality (insn)
815 register rtx next = next_cc0_user (insn);
817 /* If there is no next insn, we have to take the conservative choice. */
821 return ((GET_CODE (next) == JUMP_INSN
822 || GET_CODE (next) == INSN
823 || GET_CODE (next) == CALL_INSN)
824 && ! inequality_comparisons_p (PATTERN (next)));
827 #if 0 /* This is useless since the insn that sets the cc's
828 must be followed immediately by the use of them. */
829 /* Return 1 if the CC value set up by INSN is not used. */
832 next_insns_test_no_inequality (insn)
835 register rtx next = NEXT_INSN (insn);
837 for (; next != 0; next = NEXT_INSN (next))
839 if (GET_CODE (next) == CODE_LABEL
840 || GET_CODE (next) == BARRIER)
842 if (GET_CODE (next) == NOTE)
844 if (inequality_comparisons_p (PATTERN (next)))
846 if (sets_cc0_p (PATTERN (next)) == 1)
848 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
856 /* This is used by find_single_use to locate an rtx that contains exactly one
857 use of DEST, which is typically either a REG or CC0. It returns a
858 pointer to the innermost rtx expression containing DEST. Appearances of
859 DEST that are being used to totally replace it are not counted. */
862 find_single_use_1 (dest, loc)
867 enum rtx_code code = GET_CODE (x);
884 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
885 of a REG that occupies all of the REG, the insn uses DEST if
886 it is mentioned in the destination or the source. Otherwise, we
887 need just check the source. */
888 if (GET_CODE (SET_DEST (x)) != CC0
889 && GET_CODE (SET_DEST (x)) != PC
890 && GET_CODE (SET_DEST (x)) != REG
891 && ! (GET_CODE (SET_DEST (x)) == SUBREG
892 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
893 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
894 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
895 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
896 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
899 return find_single_use_1 (dest, &SET_SRC (x));
903 return find_single_use_1 (dest, &XEXP (x, 0));
909 /* If it wasn't one of the common cases above, check each expression and
910 vector of this code. Look for a unique usage of DEST. */
912 fmt = GET_RTX_FORMAT (code);
913 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
917 if (dest == XEXP (x, i)
918 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
919 && REGNO (dest) == REGNO (XEXP (x, i))))
922 this_result = find_single_use_1 (dest, &XEXP (x, i));
925 result = this_result;
926 else if (this_result)
927 /* Duplicate usage. */
930 else if (fmt[i] == 'E')
934 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
936 if (XVECEXP (x, i, j) == dest
937 || (GET_CODE (dest) == REG
938 && GET_CODE (XVECEXP (x, i, j)) == REG
939 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
942 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
945 result = this_result;
946 else if (this_result)
955 /* See if DEST, produced in INSN, is used only a single time in the
956 sequel. If so, return a pointer to the innermost rtx expression in which
959 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
961 This routine will return usually zero either before flow is called (because
962 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
963 note can't be trusted).
965 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
966 care about REG_DEAD notes or LOG_LINKS.
968 Otherwise, we find the single use by finding an insn that has a
969 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
970 only referenced once in that insn, we know that it must be the first
971 and last insn referencing DEST. */
974 find_single_use (dest, insn, ploc)
986 next = NEXT_INSN (insn);
988 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
991 result = find_single_use_1 (dest, &PATTERN (next));
998 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
1001 for (next = next_nonnote_insn (insn);
1002 next != 0 && GET_CODE (next) != CODE_LABEL;
1003 next = next_nonnote_insn (next))
1004 if (INSN_P (next) && dead_or_set_p (next, dest))
1006 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
1007 if (XEXP (link, 0) == insn)
1012 result = find_single_use_1 (dest, &PATTERN (next));
1022 /* Return 1 if OP is a valid general operand for machine mode MODE.
1023 This is either a register reference, a memory reference,
1024 or a constant. In the case of a memory reference, the address
1025 is checked for general validity for the target machine.
1027 Register and memory references must have mode MODE in order to be valid,
1028 but some constants have no machine mode and are valid for any mode.
1030 If MODE is VOIDmode, OP is checked for validity for whatever mode
1033 The main use of this function is as a predicate in match_operand
1034 expressions in the machine description.
1036 For an explanation of this function's behavior for registers of
1037 class NO_REGS, see the comment for `register_operand'. */
1040 general_operand (op, mode)
1042 enum machine_mode mode;
1044 register enum rtx_code code = GET_CODE (op);
1045 int mode_altering_drug = 0;
1047 if (mode == VOIDmode)
1048 mode = GET_MODE (op);
1050 /* Don't accept CONST_INT or anything similar
1051 if the caller wants something floating. */
1052 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1053 && GET_MODE_CLASS (mode) != MODE_INT
1054 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1057 if (CONSTANT_P (op))
1058 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1059 || mode == VOIDmode)
1060 #ifdef LEGITIMATE_PIC_OPERAND_P
1061 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1063 && LEGITIMATE_CONSTANT_P (op));
1065 /* Except for certain constants with VOIDmode, already checked for,
1066 OP's mode must match MODE if MODE specifies a mode. */
1068 if (GET_MODE (op) != mode)
1073 #ifdef INSN_SCHEDULING
1074 /* On machines that have insn scheduling, we want all memory
1075 reference to be explicit, so outlaw paradoxical SUBREGs. */
1076 if (GET_CODE (SUBREG_REG (op)) == MEM
1077 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
1081 op = SUBREG_REG (op);
1082 code = GET_CODE (op);
1084 /* No longer needed, since (SUBREG (MEM...))
1085 will load the MEM into a reload reg in the MEM's own mode. */
1086 mode_altering_drug = 1;
1091 /* A register whose class is NO_REGS is not a general operand. */
1092 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1093 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
1097 register rtx y = XEXP (op, 0);
1099 if (! volatile_ok && MEM_VOLATILE_P (op))
1102 if (GET_CODE (y) == ADDRESSOF)
1105 /* Use the mem's mode, since it will be reloaded thus. */
1106 mode = GET_MODE (op);
1107 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
1110 /* Pretend this is an operand for now; we'll run force_operand
1111 on its replacement in fixup_var_refs_1. */
1112 if (code == ADDRESSOF)
1118 if (mode_altering_drug)
1119 return ! mode_dependent_address_p (XEXP (op, 0));
1123 /* Return 1 if OP is a valid memory address for a memory reference
1126 The main use of this function is as a predicate in match_operand
1127 expressions in the machine description. */
1130 address_operand (op, mode)
1132 enum machine_mode mode;
1134 return memory_address_p (mode, op);
1137 /* Return 1 if OP is a register reference of mode MODE.
1138 If MODE is VOIDmode, accept a register in any mode.
1140 The main use of this function is as a predicate in match_operand
1141 expressions in the machine description.
1143 As a special exception, registers whose class is NO_REGS are
1144 not accepted by `register_operand'. The reason for this change
1145 is to allow the representation of special architecture artifacts
1146 (such as a condition code register) without extending the rtl
1147 definitions. Since registers of class NO_REGS cannot be used
1148 as registers in any case where register classes are examined,
1149 it is most consistent to keep this function from accepting them. */
1152 register_operand (op, mode)
1154 enum machine_mode mode;
1156 if (GET_MODE (op) != mode && mode != VOIDmode)
1159 if (GET_CODE (op) == SUBREG)
1161 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1162 because it is guaranteed to be reloaded into one.
1163 Just make sure the MEM is valid in itself.
1164 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1165 but currently it does result from (SUBREG (REG)...) where the
1166 reg went on the stack.) */
1167 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1168 return general_operand (op, mode);
1170 #ifdef CLASS_CANNOT_CHANGE_MODE
1171 if (GET_CODE (SUBREG_REG (op)) == REG
1172 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1173 && (TEST_HARD_REG_BIT
1174 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
1175 REGNO (SUBREG_REG (op))))
1176 && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (SUBREG_REG (op)))
1177 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1178 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1182 op = SUBREG_REG (op);
1185 /* If we have an ADDRESSOF, consider it valid since it will be
1186 converted into something that will not be a MEM. */
1187 if (GET_CODE (op) == ADDRESSOF)
1190 /* We don't consider registers whose class is NO_REGS
1191 to be a register operand. */
1192 return (GET_CODE (op) == REG
1193 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1194 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1197 /* Return 1 for a register in Pmode; ignore the tested mode. */
1200 pmode_register_operand (op, mode)
1202 enum machine_mode mode ATTRIBUTE_UNUSED;
1204 return register_operand (op, Pmode);
1207 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1208 or a hard register. */
1211 scratch_operand (op, mode)
1213 enum machine_mode mode;
1215 if (GET_MODE (op) != mode && mode != VOIDmode)
1218 return (GET_CODE (op) == SCRATCH
1219 || (GET_CODE (op) == REG
1220 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1223 /* Return 1 if OP is a valid immediate operand for mode MODE.
1225 The main use of this function is as a predicate in match_operand
1226 expressions in the machine description. */
1229 immediate_operand (op, mode)
1231 enum machine_mode mode;
1233 /* Don't accept CONST_INT or anything similar
1234 if the caller wants something floating. */
1235 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1236 && GET_MODE_CLASS (mode) != MODE_INT
1237 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1240 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1241 result in 0/1. It seems a safe assumption that this is
1242 in range for everyone. */
1243 if (GET_CODE (op) == CONSTANT_P_RTX)
1246 return (CONSTANT_P (op)
1247 && (GET_MODE (op) == mode || mode == VOIDmode
1248 || GET_MODE (op) == VOIDmode)
1249 #ifdef LEGITIMATE_PIC_OPERAND_P
1250 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1252 && LEGITIMATE_CONSTANT_P (op));
1255 /* Returns 1 if OP is an operand that is a CONST_INT. */
1258 const_int_operand (op, mode)
1260 enum machine_mode mode ATTRIBUTE_UNUSED;
1262 return GET_CODE (op) == CONST_INT;
1265 /* Returns 1 if OP is an operand that is a constant integer or constant
1266 floating-point number. */
1269 const_double_operand (op, mode)
1271 enum machine_mode mode;
1273 /* Don't accept CONST_INT or anything similar
1274 if the caller wants something floating. */
1275 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1276 && GET_MODE_CLASS (mode) != MODE_INT
1277 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1280 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1281 && (mode == VOIDmode || GET_MODE (op) == mode
1282 || GET_MODE (op) == VOIDmode));
1285 /* Return 1 if OP is a general operand that is not an immediate operand. */
1288 nonimmediate_operand (op, mode)
1290 enum machine_mode mode;
1292 return (general_operand (op, mode) && ! CONSTANT_P (op));
1295 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1298 nonmemory_operand (op, mode)
1300 enum machine_mode mode;
1302 if (CONSTANT_P (op))
1304 /* Don't accept CONST_INT or anything similar
1305 if the caller wants something floating. */
1306 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1307 && GET_MODE_CLASS (mode) != MODE_INT
1308 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1311 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1312 || mode == VOIDmode)
1313 #ifdef LEGITIMATE_PIC_OPERAND_P
1314 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1316 && LEGITIMATE_CONSTANT_P (op));
1319 if (GET_MODE (op) != mode && mode != VOIDmode)
1322 if (GET_CODE (op) == SUBREG)
1324 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1325 because it is guaranteed to be reloaded into one.
1326 Just make sure the MEM is valid in itself.
1327 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1328 but currently it does result from (SUBREG (REG)...) where the
1329 reg went on the stack.) */
1330 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1331 return general_operand (op, mode);
1332 op = SUBREG_REG (op);
1335 /* We don't consider registers whose class is NO_REGS
1336 to be a register operand. */
1337 return (GET_CODE (op) == REG
1338 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1339 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1342 /* Return 1 if OP is a valid operand that stands for pushing a
1343 value of mode MODE onto the stack.
1345 The main use of this function is as a predicate in match_operand
1346 expressions in the machine description. */
1349 push_operand (op, mode)
1351 enum machine_mode mode;
1353 unsigned int rounded_size = GET_MODE_SIZE (mode);
1355 #ifdef PUSH_ROUNDING
1356 rounded_size = PUSH_ROUNDING (rounded_size);
1359 if (GET_CODE (op) != MEM)
1362 if (mode != VOIDmode && GET_MODE (op) != mode)
1367 if (rounded_size == GET_MODE_SIZE (mode))
1369 if (GET_CODE (op) != STACK_PUSH_CODE)
1374 if (GET_CODE (op) != PRE_MODIFY
1375 || GET_CODE (XEXP (op, 1)) != PLUS
1376 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1377 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1378 #ifdef STACK_GROWS_DOWNWARD
1379 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1381 || INTVAL (XEXP (XEXP (op, 1), 1)) != rounded_size
1387 return XEXP (op, 0) == stack_pointer_rtx;
1390 /* Return 1 if OP is a valid operand that stands for popping a
1391 value of mode MODE off the stack.
1393 The main use of this function is as a predicate in match_operand
1394 expressions in the machine description. */
1397 pop_operand (op, mode)
1399 enum machine_mode mode;
1401 if (GET_CODE (op) != MEM)
1404 if (mode != VOIDmode && GET_MODE (op) != mode)
1409 if (GET_CODE (op) != STACK_POP_CODE)
1412 return XEXP (op, 0) == stack_pointer_rtx;
1415 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1418 memory_address_p (mode, addr)
1419 enum machine_mode mode ATTRIBUTE_UNUSED;
1422 if (GET_CODE (addr) == ADDRESSOF)
1425 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1432 /* Return 1 if OP is a valid memory reference with mode MODE,
1433 including a valid address.
1435 The main use of this function is as a predicate in match_operand
1436 expressions in the machine description. */
1439 memory_operand (op, mode)
1441 enum machine_mode mode;
1445 if (! reload_completed)
1446 /* Note that no SUBREG is a memory operand before end of reload pass,
1447 because (SUBREG (MEM...)) forces reloading into a register. */
1448 return GET_CODE (op) == MEM && general_operand (op, mode);
1450 if (mode != VOIDmode && GET_MODE (op) != mode)
1454 if (GET_CODE (inner) == SUBREG)
1455 inner = SUBREG_REG (inner);
1457 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1460 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1461 that is, a memory reference whose address is a general_operand. */
1464 indirect_operand (op, mode)
1466 enum machine_mode mode;
1468 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1469 if (! reload_completed
1470 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1472 register int offset = SUBREG_WORD (op) * UNITS_PER_WORD;
1473 rtx inner = SUBREG_REG (op);
1475 if (BYTES_BIG_ENDIAN)
1476 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op)))
1477 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner))));
1479 if (mode != VOIDmode && GET_MODE (op) != mode)
1482 /* The only way that we can have a general_operand as the resulting
1483 address is if OFFSET is zero and the address already is an operand
1484 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1487 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1488 || (GET_CODE (XEXP (inner, 0)) == PLUS
1489 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1490 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1491 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1494 return (GET_CODE (op) == MEM
1495 && memory_operand (op, mode)
1496 && general_operand (XEXP (op, 0), Pmode));
1499 /* Return 1 if this is a comparison operator. This allows the use of
1500 MATCH_OPERATOR to recognize all the branch insns. */
1503 comparison_operator (op, mode)
1505 enum machine_mode mode;
1507 return ((mode == VOIDmode || GET_MODE (op) == mode)
1508 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1511 /* If BODY is an insn body that uses ASM_OPERANDS,
1512 return the number of operands (both input and output) in the insn.
1513 Otherwise return -1. */
1516 asm_noperands (body)
1519 switch (GET_CODE (body))
1522 /* No output operands: return number of input operands. */
1523 return ASM_OPERANDS_INPUT_LENGTH (body);
1525 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1526 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1527 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1531 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1532 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1534 /* Multiple output operands, or 1 output plus some clobbers:
1535 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1539 /* Count backwards through CLOBBERs to determine number of SETs. */
1540 for (i = XVECLEN (body, 0); i > 0; i--)
1542 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1544 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1548 /* N_SETS is now number of output operands. */
1551 /* Verify that all the SETs we have
1552 came from a single original asm_operands insn
1553 (so that invalid combinations are blocked). */
1554 for (i = 0; i < n_sets; i++)
1556 rtx elt = XVECEXP (body, 0, i);
1557 if (GET_CODE (elt) != SET)
1559 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1561 /* If these ASM_OPERANDS rtx's came from different original insns
1562 then they aren't allowed together. */
1563 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1564 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1567 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1570 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1572 /* 0 outputs, but some clobbers:
1573 body is [(asm_operands ...) (clobber (reg ...))...]. */
1576 /* Make sure all the other parallel things really are clobbers. */
1577 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1578 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1581 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1590 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1591 copy its operands (both input and output) into the vector OPERANDS,
1592 the locations of the operands within the insn into the vector OPERAND_LOCS,
1593 and the constraints for the operands into CONSTRAINTS.
1594 Write the modes of the operands into MODES.
1595 Return the assembler-template.
1597 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1598 we don't store that info. */
1601 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1605 const char **constraints;
1606 enum machine_mode *modes;
1610 const char *template = 0;
1612 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1614 rtx asmop = SET_SRC (body);
1615 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1617 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1619 for (i = 1; i < noperands; i++)
1622 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1624 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1626 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1628 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1631 /* The output is in the SET.
1632 Its constraint is in the ASM_OPERANDS itself. */
1634 operands[0] = SET_DEST (body);
1636 operand_locs[0] = &SET_DEST (body);
1638 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1640 modes[0] = GET_MODE (SET_DEST (body));
1641 template = ASM_OPERANDS_TEMPLATE (asmop);
1643 else if (GET_CODE (body) == ASM_OPERANDS)
1646 /* No output operands: BODY is (asm_operands ....). */
1648 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1650 /* The input operands are found in the 1st element vector. */
1651 /* Constraints for inputs are in the 2nd element vector. */
1652 for (i = 0; i < noperands; i++)
1655 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1657 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1659 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1661 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1663 template = ASM_OPERANDS_TEMPLATE (asmop);
1665 else if (GET_CODE (body) == PARALLEL
1666 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1668 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1669 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1670 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1671 int nout = 0; /* Does not include CLOBBERs. */
1673 /* At least one output, plus some CLOBBERs. */
1675 /* The outputs are in the SETs.
1676 Their constraints are in the ASM_OPERANDS itself. */
1677 for (i = 0; i < nparallel; i++)
1679 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1680 break; /* Past last SET */
1683 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1685 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1687 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1689 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1693 for (i = 0; i < nin; i++)
1696 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1698 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1700 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1702 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1705 template = ASM_OPERANDS_TEMPLATE (asmop);
1707 else if (GET_CODE (body) == PARALLEL
1708 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1710 /* No outputs, but some CLOBBERs. */
1712 rtx asmop = XVECEXP (body, 0, 0);
1713 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1715 for (i = 0; i < nin; i++)
1718 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1720 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1722 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1724 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1727 template = ASM_OPERANDS_TEMPLATE (asmop);
1733 /* Check if an asm_operand matches it's constraints.
1734 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1737 asm_operand_ok (op, constraint)
1739 const char *constraint;
1743 /* Use constrain_operands after reload. */
1744 if (reload_completed)
1749 char c = *constraint++;
1763 case '0': case '1': case '2': case '3': case '4':
1764 case '5': case '6': case '7': case '8': case '9':
1765 /* For best results, our caller should have given us the
1766 proper matching constraint, but we can't actually fail
1767 the check if they didn't. Indicate that results are
1773 if (address_operand (op, VOIDmode))
1778 case 'V': /* non-offsettable */
1779 if (memory_operand (op, VOIDmode))
1783 case 'o': /* offsettable */
1784 if (offsettable_nonstrict_memref_p (op))
1789 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1790 excepting those that expand_call created. Further, on some
1791 machines which do not have generalized auto inc/dec, an inc/dec
1792 is not a memory_operand.
1794 Match any memory and hope things are resolved after reload. */
1796 if (GET_CODE (op) == MEM
1798 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1799 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1804 if (GET_CODE (op) == MEM
1806 || GET_CODE (XEXP (op, 0)) == PRE_INC
1807 || GET_CODE (XEXP (op, 0)) == POST_INC))
1812 #ifndef REAL_ARITHMETIC
1813 /* Match any floating double constant, but only if
1814 we can examine the bits of it reliably. */
1815 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1816 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1817 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1823 if (GET_CODE (op) == CONST_DOUBLE)
1828 if (GET_CODE (op) == CONST_DOUBLE
1829 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1833 if (GET_CODE (op) == CONST_DOUBLE
1834 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1839 if (GET_CODE (op) == CONST_INT
1840 || (GET_CODE (op) == CONST_DOUBLE
1841 && GET_MODE (op) == VOIDmode))
1847 #ifdef LEGITIMATE_PIC_OPERAND_P
1848 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1855 if (GET_CODE (op) == CONST_INT
1856 || (GET_CODE (op) == CONST_DOUBLE
1857 && GET_MODE (op) == VOIDmode))
1862 if (GET_CODE (op) == CONST_INT
1863 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1867 if (GET_CODE (op) == CONST_INT
1868 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1872 if (GET_CODE (op) == CONST_INT
1873 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1877 if (GET_CODE (op) == CONST_INT
1878 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1882 if (GET_CODE (op) == CONST_INT
1883 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1887 if (GET_CODE (op) == CONST_INT
1888 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1892 if (GET_CODE (op) == CONST_INT
1893 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1897 if (GET_CODE (op) == CONST_INT
1898 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1906 if (general_operand (op, VOIDmode))
1911 /* For all other letters, we first check for a register class,
1912 otherwise it is an EXTRA_CONSTRAINT. */
1913 if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
1916 if (GET_MODE (op) == BLKmode)
1918 if (register_operand (op, VOIDmode))
1921 #ifdef EXTRA_CONSTRAINT
1922 if (EXTRA_CONSTRAINT (op, c))
1932 /* Given an rtx *P, if it is a sum containing an integer constant term,
1933 return the location (type rtx *) of the pointer to that constant term.
1934 Otherwise, return a null pointer. */
1937 find_constant_term_loc (p)
1941 register enum rtx_code code = GET_CODE (*p);
1943 /* If *P IS such a constant term, P is its location. */
1945 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1949 /* Otherwise, if not a sum, it has no constant term. */
1951 if (GET_CODE (*p) != PLUS)
1954 /* If one of the summands is constant, return its location. */
1956 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1957 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1960 /* Otherwise, check each summand for containing a constant term. */
1962 if (XEXP (*p, 0) != 0)
1964 tem = find_constant_term_loc (&XEXP (*p, 0));
1969 if (XEXP (*p, 1) != 0)
1971 tem = find_constant_term_loc (&XEXP (*p, 1));
1979 /* Return 1 if OP is a memory reference
1980 whose address contains no side effects
1981 and remains valid after the addition
1982 of a positive integer less than the
1983 size of the object being referenced.
1985 We assume that the original address is valid and do not check it.
1987 This uses strict_memory_address_p as a subroutine, so
1988 don't use it before reload. */
1991 offsettable_memref_p (op)
1994 return ((GET_CODE (op) == MEM)
1995 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1998 /* Similar, but don't require a strictly valid mem ref:
1999 consider pseudo-regs valid as index or base regs. */
2002 offsettable_nonstrict_memref_p (op)
2005 return ((GET_CODE (op) == MEM)
2006 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
2009 /* Return 1 if Y is a memory address which contains no side effects
2010 and would remain valid after the addition of a positive integer
2011 less than the size of that mode.
2013 We assume that the original address is valid and do not check it.
2014 We do check that it is valid for narrower modes.
2016 If STRICTP is nonzero, we require a strictly valid address,
2017 for the sake of use in reload.c. */
2020 offsettable_address_p (strictp, mode, y)
2022 enum machine_mode mode;
2025 register enum rtx_code ycode = GET_CODE (y);
2029 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
2030 (strictp ? strict_memory_address_p : memory_address_p);
2031 unsigned int mode_sz = GET_MODE_SIZE (mode);
2033 if (CONSTANT_ADDRESS_P (y))
2036 /* Adjusting an offsettable address involves changing to a narrower mode.
2037 Make sure that's OK. */
2039 if (mode_dependent_address_p (y))
2042 /* ??? How much offset does an offsettable BLKmode reference need?
2043 Clearly that depends on the situation in which it's being used.
2044 However, the current situation in which we test 0xffffffff is
2045 less than ideal. Caveat user. */
2047 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
2049 /* If the expression contains a constant term,
2050 see if it remains valid when max possible offset is added. */
2052 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
2057 *y2 = plus_constant (*y2, mode_sz - 1);
2058 /* Use QImode because an odd displacement may be automatically invalid
2059 for any wider mode. But it should be valid for a single byte. */
2060 good = (*addressp) (QImode, y);
2062 /* In any case, restore old contents of memory. */
2067 if (GET_RTX_CLASS (ycode) == 'a')
2070 /* The offset added here is chosen as the maximum offset that
2071 any instruction could need to add when operating on something
2072 of the specified mode. We assume that if Y and Y+c are
2073 valid addresses then so is Y+d for all 0<d<c. */
2075 z = plus_constant_for_output (y, mode_sz - 1);
2077 /* Use QImode because an odd displacement may be automatically invalid
2078 for any wider mode. But it should be valid for a single byte. */
2079 return (*addressp) (QImode, z);
2082 /* Return 1 if ADDR is an address-expression whose effect depends
2083 on the mode of the memory reference it is used in.
2085 Autoincrement addressing is a typical example of mode-dependence
2086 because the amount of the increment depends on the mode. */
2089 mode_dependent_address_p (addr)
2090 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
2092 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
2094 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2095 win: ATTRIBUTE_UNUSED_LABEL
2099 /* Return 1 if OP is a general operand
2100 other than a memory ref with a mode dependent address. */
2103 mode_independent_operand (op, mode)
2104 enum machine_mode mode;
2109 if (! general_operand (op, mode))
2112 if (GET_CODE (op) != MEM)
2115 addr = XEXP (op, 0);
2116 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2118 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2119 lose: ATTRIBUTE_UNUSED_LABEL
2123 /* Given an operand OP that is a valid memory reference which
2124 satisfies offsettable_memref_p, return a new memory reference whose
2125 address has been adjusted by OFFSET. OFFSET should be positive and
2126 less than the size of the object referenced. */
2129 adj_offsettable_operand (op, offset)
2133 register enum rtx_code code = GET_CODE (op);
2137 register rtx y = XEXP (op, 0);
2140 if (CONSTANT_ADDRESS_P (y))
2142 new = gen_rtx_MEM (GET_MODE (op),
2143 plus_constant_for_output (y, offset));
2144 MEM_COPY_ATTRIBUTES (new, op);
2148 if (GET_CODE (y) == PLUS)
2151 register rtx *const_loc;
2155 const_loc = find_constant_term_loc (&z);
2158 *const_loc = plus_constant_for_output (*const_loc, offset);
2163 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
2164 MEM_COPY_ATTRIBUTES (new, op);
2170 /* Like extract_insn, but save insn extracted and don't extract again, when
2171 called again for the same insn expecting that recog_data still contain the
2172 valid information. This is used primary by gen_attr infrastructure that
2173 often does extract insn again and again. */
2175 extract_insn_cached (insn)
2178 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2180 extract_insn (insn);
2181 recog_data.insn = insn;
2183 /* Do cached extract_insn, constrain_operand and complain about failures.
2184 Used by insn_attrtab. */
2186 extract_constrain_insn_cached (insn)
2189 extract_insn_cached (insn);
2190 if (which_alternative == -1
2191 && !constrain_operands (reload_completed))
2192 fatal_insn_not_found (insn);
2194 /* Do cached constrain_operand and complain about failures. */
2196 constrain_operands_cached (strict)
2199 if (which_alternative == -1)
2200 return constrain_operands (strict);
2205 /* Analyze INSN and fill in recog_data. */
2214 rtx body = PATTERN (insn);
2216 recog_data.insn = NULL;
2217 recog_data.n_operands = 0;
2218 recog_data.n_alternatives = 0;
2219 recog_data.n_dups = 0;
2220 which_alternative = -1;
2222 switch (GET_CODE (body))
2232 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2237 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2238 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2239 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2245 recog_data.n_operands = noperands = asm_noperands (body);
2248 /* This insn is an `asm' with operands. */
2250 /* expand_asm_operands makes sure there aren't too many operands. */
2251 if (noperands > MAX_RECOG_OPERANDS)
2254 /* Now get the operand values and constraints out of the insn. */
2255 decode_asm_operands (body, recog_data.operand,
2256 recog_data.operand_loc,
2257 recog_data.constraints,
2258 recog_data.operand_mode);
2261 const char *p = recog_data.constraints[0];
2262 recog_data.n_alternatives = 1;
2264 recog_data.n_alternatives += (*p++ == ',');
2268 fatal_insn_not_found (insn);
2272 /* Ordinary insn: recognize it, get the operands via insn_extract
2273 and get the constraints. */
2275 icode = recog_memoized (insn);
2277 fatal_insn_not_found (insn);
2279 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2280 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2281 recog_data.n_dups = insn_data[icode].n_dups;
2283 insn_extract (insn);
2285 for (i = 0; i < noperands; i++)
2287 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2288 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2289 /* VOIDmode match_operands gets mode from their real operand. */
2290 if (recog_data.operand_mode[i] == VOIDmode)
2291 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2294 for (i = 0; i < noperands; i++)
2295 recog_data.operand_type[i]
2296 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2297 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2300 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2304 /* After calling extract_insn, you can use this function to extract some
2305 information from the constraint strings into a more usable form.
2306 The collected data is stored in recog_op_alt. */
2308 preprocess_constraints ()
2312 memset (recog_op_alt, 0, sizeof recog_op_alt);
2313 for (i = 0; i < recog_data.n_operands; i++)
2316 struct operand_alternative *op_alt;
2317 const char *p = recog_data.constraints[i];
2319 op_alt = recog_op_alt[i];
2321 for (j = 0; j < recog_data.n_alternatives; j++)
2323 op_alt[j].class = NO_REGS;
2324 op_alt[j].constraint = p;
2325 op_alt[j].matches = -1;
2326 op_alt[j].matched = -1;
2328 if (*p == '\0' || *p == ',')
2330 op_alt[j].anything_ok = 1;
2340 while (c != ',' && c != '\0');
2341 if (c == ',' || c == '\0')
2346 case '=': case '+': case '*': case '%':
2347 case 'E': case 'F': case 'G': case 'H':
2348 case 's': case 'i': case 'n':
2349 case 'I': case 'J': case 'K': case 'L':
2350 case 'M': case 'N': case 'O': case 'P':
2351 /* These don't say anything we care about. */
2355 op_alt[j].reject += 6;
2358 op_alt[j].reject += 600;
2361 op_alt[j].earlyclobber = 1;
2364 case '0': case '1': case '2': case '3': case '4':
2365 case '5': case '6': case '7': case '8': case '9':
2366 op_alt[j].matches = c - '0';
2367 recog_op_alt[op_alt[j].matches][j].matched = i;
2371 op_alt[j].memory_ok = 1;
2374 op_alt[j].decmem_ok = 1;
2377 op_alt[j].incmem_ok = 1;
2380 op_alt[j].nonoffmem_ok = 1;
2383 op_alt[j].offmem_ok = 1;
2386 op_alt[j].anything_ok = 1;
2390 op_alt[j].is_address = 1;
2391 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
2395 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2399 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
2407 /* Check the operands of an insn against the insn's operand constraints
2408 and return 1 if they are valid.
2409 The information about the insn's operands, constraints, operand modes
2410 etc. is obtained from the global variables set up by extract_insn.
2412 WHICH_ALTERNATIVE is set to a number which indicates which
2413 alternative of constraints was matched: 0 for the first alternative,
2414 1 for the next, etc.
2416 In addition, when two operands are match
2417 and it happens that the output operand is (reg) while the
2418 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2419 make the output operand look like the input.
2420 This is because the output operand is the one the template will print.
2422 This is used in final, just before printing the assembler code and by
2423 the routines that determine an insn's attribute.
2425 If STRICT is a positive non-zero value, it means that we have been
2426 called after reload has been completed. In that case, we must
2427 do all checks strictly. If it is zero, it means that we have been called
2428 before reload has completed. In that case, we first try to see if we can
2429 find an alternative that matches strictly. If not, we try again, this
2430 time assuming that reload will fix up the insn. This provides a "best
2431 guess" for the alternative and is used to compute attributes of insns prior
2432 to reload. A negative value of STRICT is used for this internal call. */
2440 constrain_operands (strict)
2443 const char *constraints[MAX_RECOG_OPERANDS];
2444 int matching_operands[MAX_RECOG_OPERANDS];
2445 int earlyclobber[MAX_RECOG_OPERANDS];
2448 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2449 int funny_match_index;
2451 which_alternative = 0;
2452 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2455 for (c = 0; c < recog_data.n_operands; c++)
2457 constraints[c] = recog_data.constraints[c];
2458 matching_operands[c] = -1;
2465 funny_match_index = 0;
2467 for (opno = 0; opno < recog_data.n_operands; opno++)
2469 register rtx op = recog_data.operand[opno];
2470 enum machine_mode mode = GET_MODE (op);
2471 register const char *p = constraints[opno];
2476 earlyclobber[opno] = 0;
2478 /* A unary operator may be accepted by the predicate, but it
2479 is irrelevant for matching constraints. */
2480 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2483 if (GET_CODE (op) == SUBREG)
2485 if (GET_CODE (SUBREG_REG (op)) == REG
2486 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2487 offset = SUBREG_WORD (op);
2488 op = SUBREG_REG (op);
2491 /* An empty constraint or empty alternative
2492 allows anything which matched the pattern. */
2493 if (*p == 0 || *p == ',')
2496 while (*p && (c = *p++) != ',')
2499 case '?': case '!': case '*': case '%':
2504 /* Ignore rest of this alternative as far as
2505 constraint checking is concerned. */
2506 while (*p && *p != ',')
2511 earlyclobber[opno] = 1;
2514 case '0': case '1': case '2': case '3': case '4':
2515 case '5': case '6': case '7': case '8': case '9':
2517 /* This operand must be the same as a previous one.
2518 This kind of constraint is used for instructions such
2519 as add when they take only two operands.
2521 Note that the lower-numbered operand is passed first.
2523 If we are not testing strictly, assume that this constraint
2524 will be satisfied. */
2529 rtx op1 = recog_data.operand[c - '0'];
2530 rtx op2 = recog_data.operand[opno];
2532 /* A unary operator may be accepted by the predicate,
2533 but it is irrelevant for matching constraints. */
2534 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2535 op1 = XEXP (op1, 0);
2536 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2537 op2 = XEXP (op2, 0);
2539 val = operands_match_p (op1, op2);
2542 matching_operands[opno] = c - '0';
2543 matching_operands[c - '0'] = opno;
2547 /* If output is *x and input is *--x,
2548 arrange later to change the output to *--x as well,
2549 since the output op is the one that will be printed. */
2550 if (val == 2 && strict > 0)
2552 funny_match[funny_match_index].this = opno;
2553 funny_match[funny_match_index++].other = c - '0';
2558 /* p is used for address_operands. When we are called by
2559 gen_reload, no one will have checked that the address is
2560 strictly valid, i.e., that all pseudos requiring hard regs
2561 have gotten them. */
2563 || (strict_memory_address_p (recog_data.operand_mode[opno],
2568 /* No need to check general_operand again;
2569 it was done in insn-recog.c. */
2571 /* Anything goes unless it is a REG and really has a hard reg
2572 but the hard reg is not in the class GENERAL_REGS. */
2574 || GENERAL_REGS == ALL_REGS
2575 || GET_CODE (op) != REG
2576 || (reload_in_progress
2577 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2578 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2583 /* This is used for a MATCH_SCRATCH in the cases when
2584 we don't actually need anything. So anything goes
2590 if (GET_CODE (op) == MEM
2591 /* Before reload, accept what reload can turn into mem. */
2592 || (strict < 0 && CONSTANT_P (op))
2593 /* During reload, accept a pseudo */
2594 || (reload_in_progress && GET_CODE (op) == REG
2595 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2600 if (GET_CODE (op) == MEM
2601 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2602 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2607 if (GET_CODE (op) == MEM
2608 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2609 || GET_CODE (XEXP (op, 0)) == POST_INC))
2614 #ifndef REAL_ARITHMETIC
2615 /* Match any CONST_DOUBLE, but only if
2616 we can examine the bits of it reliably. */
2617 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2618 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2619 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2622 if (GET_CODE (op) == CONST_DOUBLE)
2627 if (GET_CODE (op) == CONST_DOUBLE)
2633 if (GET_CODE (op) == CONST_DOUBLE
2634 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2639 if (GET_CODE (op) == CONST_INT
2640 || (GET_CODE (op) == CONST_DOUBLE
2641 && GET_MODE (op) == VOIDmode))
2644 if (CONSTANT_P (op))
2649 if (GET_CODE (op) == CONST_INT
2650 || (GET_CODE (op) == CONST_DOUBLE
2651 && GET_MODE (op) == VOIDmode))
2663 if (GET_CODE (op) == CONST_INT
2664 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2669 if (GET_CODE (op) == MEM
2670 && ((strict > 0 && ! offsettable_memref_p (op))
2672 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2673 || (reload_in_progress
2674 && !(GET_CODE (op) == REG
2675 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2680 if ((strict > 0 && offsettable_memref_p (op))
2681 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2682 /* Before reload, accept what reload can handle. */
2684 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2685 /* During reload, accept a pseudo */
2686 || (reload_in_progress && GET_CODE (op) == REG
2687 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2693 enum reg_class class;
2695 class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
2696 if (class != NO_REGS)
2700 && GET_CODE (op) == REG
2701 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2702 || (strict == 0 && GET_CODE (op) == SCRATCH)
2703 || (GET_CODE (op) == REG
2704 && reg_fits_class_p (op, class, offset, mode)))
2707 #ifdef EXTRA_CONSTRAINT
2708 else if (EXTRA_CONSTRAINT (op, c))
2715 constraints[opno] = p;
2716 /* If this operand did not win somehow,
2717 this alternative loses. */
2721 /* This alternative won; the operands are ok.
2722 Change whichever operands this alternative says to change. */
2727 /* See if any earlyclobber operand conflicts with some other
2731 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2732 /* Ignore earlyclobber operands now in memory,
2733 because we would often report failure when we have
2734 two memory operands, one of which was formerly a REG. */
2735 if (earlyclobber[eopno]
2736 && GET_CODE (recog_data.operand[eopno]) == REG)
2737 for (opno = 0; opno < recog_data.n_operands; opno++)
2738 if ((GET_CODE (recog_data.operand[opno]) == MEM
2739 || recog_data.operand_type[opno] != OP_OUT)
2741 /* Ignore things like match_operator operands. */
2742 && *recog_data.constraints[opno] != 0
2743 && ! (matching_operands[opno] == eopno
2744 && operands_match_p (recog_data.operand[opno],
2745 recog_data.operand[eopno]))
2746 && ! safe_from_earlyclobber (recog_data.operand[opno],
2747 recog_data.operand[eopno]))
2752 while (--funny_match_index >= 0)
2754 recog_data.operand[funny_match[funny_match_index].other]
2755 = recog_data.operand[funny_match[funny_match_index].this];
2762 which_alternative++;
2764 while (which_alternative < recog_data.n_alternatives);
2766 which_alternative = -1;
2767 /* If we are about to reject this, but we are not to test strictly,
2768 try a very loose test. Only return failure if it fails also. */
2770 return constrain_operands (-1);
2775 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2776 is a hard reg in class CLASS when its regno is offset by OFFSET
2777 and changed to mode MODE.
2778 If REG occupies multiple hard regs, all of them must be in CLASS. */
2781 reg_fits_class_p (operand, class, offset, mode)
2783 register enum reg_class class;
2785 enum machine_mode mode;
2787 register int regno = REGNO (operand);
2788 if (regno < FIRST_PSEUDO_REGISTER
2789 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2794 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2796 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2805 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2808 split_all_insns (upd_life)
2815 blocks = sbitmap_alloc (n_basic_blocks);
2816 sbitmap_zero (blocks);
2819 for (i = n_basic_blocks - 1; i >= 0; --i)
2821 basic_block bb = BASIC_BLOCK (i);
2824 for (insn = bb->head; insn ; insn = next)
2828 /* Can't use `next_real_insn' because that might go across
2829 CODE_LABELS and short-out basic blocks. */
2830 next = NEXT_INSN (insn);
2831 if (! INSN_P (insn))
2834 /* Don't split no-op move insns. These should silently
2835 disappear later in final. Splitting such insns would
2836 break the code that handles REG_NO_CONFLICT blocks. */
2838 else if ((set = single_set (insn)) != NULL
2839 && rtx_equal_p (SET_SRC (set), SET_DEST (set)))
2841 /* Nops get in the way while scheduling, so delete them
2842 now if register allocation has already been done. It
2843 is too risky to try to do this before register
2844 allocation, and there are unlikely to be very many
2845 nops then anyways. */
2846 if (reload_completed)
2848 PUT_CODE (insn, NOTE);
2849 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2850 NOTE_SOURCE_FILE (insn) = 0;
2855 /* Split insns here to get max fine-grain parallelism. */
2856 rtx first = PREV_INSN (insn);
2857 rtx last = try_split (PATTERN (insn), insn, 1);
2861 SET_BIT (blocks, i);
2864 /* try_split returns the NOTE that INSN became. */
2865 PUT_CODE (insn, NOTE);
2866 NOTE_SOURCE_FILE (insn) = 0;
2867 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2869 /* ??? Coddle to md files that generate subregs in post-
2870 reload splitters instead of computing the proper
2872 if (reload_completed && first != last)
2874 first = NEXT_INSN (first);
2878 cleanup_subreg_operands (first);
2881 first = NEXT_INSN (first);
2885 if (insn == bb->end)
2893 if (insn == bb->end)
2897 /* ??? When we're called from just after reload, the CFG is in bad
2898 shape, and we may have fallen off the end. This could be fixed
2899 by having reload not try to delete unreachable code. Otherwise
2900 assert we found the end insn. */
2901 if (insn == NULL && upd_life)
2905 if (changed && upd_life)
2907 compute_bb_for_insn (get_max_uid ());
2908 count_or_remove_death_notes (blocks, 1);
2909 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2912 sbitmap_free (blocks);
2915 #ifdef HAVE_peephole2
2916 struct peep2_insn_data
2922 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2923 static int peep2_current;
2925 /* A non-insn marker indicating the last insn of the block.
2926 The live_before regset for this element is correct, indicating
2927 global_live_at_end for the block. */
2928 #define PEEP2_EOB pc_rtx
2930 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2931 does not exist. Used by the recognizer to find the next insn to match
2932 in a multi-insn pattern. */
2938 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2942 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2943 n -= MAX_INSNS_PER_PEEP2 + 1;
2945 if (peep2_insn_data[n].insn == PEEP2_EOB)
2947 return peep2_insn_data[n].insn;
2950 /* Return true if REGNO is dead before the Nth non-note insn
2954 peep2_regno_dead_p (ofs, regno)
2958 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2961 ofs += peep2_current;
2962 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2963 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2965 if (peep2_insn_data[ofs].insn == NULL_RTX)
2968 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2971 /* Similarly for a REG. */
2974 peep2_reg_dead_p (ofs, reg)
2980 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2983 ofs += peep2_current;
2984 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2985 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2987 if (peep2_insn_data[ofs].insn == NULL_RTX)
2990 regno = REGNO (reg);
2991 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2993 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2998 /* Try to find a hard register of mode MODE, matching the register class in
2999 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3000 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3001 in which case the only condition is that the register must be available
3002 before CURRENT_INSN.
3003 Registers that already have bits set in REG_SET will not be considered.
3005 If an appropriate register is available, it will be returned and the
3006 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3010 peep2_find_free_register (from, to, class_str, mode, reg_set)
3012 const char *class_str;
3013 enum machine_mode mode;
3014 HARD_REG_SET *reg_set;
3016 static int search_ofs;
3017 enum reg_class class;
3021 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
3024 from += peep2_current;
3025 if (from >= MAX_INSNS_PER_PEEP2 + 1)
3026 from -= MAX_INSNS_PER_PEEP2 + 1;
3027 to += peep2_current;
3028 if (to >= MAX_INSNS_PER_PEEP2 + 1)
3029 to -= MAX_INSNS_PER_PEEP2 + 1;
3031 if (peep2_insn_data[from].insn == NULL_RTX)
3033 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3037 HARD_REG_SET this_live;
3039 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
3041 if (peep2_insn_data[from].insn == NULL_RTX)
3043 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
3044 IOR_HARD_REG_SET (live, this_live);
3047 class = (class_str[0] == 'r' ? GENERAL_REGS
3048 : REG_CLASS_FROM_LETTER (class_str[0]));
3050 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3052 int raw_regno, regno, success, j;
3054 /* Distribute the free registers as much as possible. */
3055 raw_regno = search_ofs + i;
3056 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3057 raw_regno -= FIRST_PSEUDO_REGISTER;
3058 #ifdef REG_ALLOC_ORDER
3059 regno = reg_alloc_order[raw_regno];
3064 /* Don't allocate fixed registers. */
3065 if (fixed_regs[regno])
3067 /* Make sure the register is of the right class. */
3068 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
3070 /* And can support the mode we need. */
3071 if (! HARD_REGNO_MODE_OK (regno, mode))
3073 /* And that we don't create an extra save/restore. */
3074 if (! call_used_regs[regno] && ! regs_ever_live[regno])
3076 /* And we don't clobber traceback for noreturn functions. */
3077 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3078 && (! reload_completed || frame_pointer_needed))
3082 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3084 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3085 || TEST_HARD_REG_BIT (live, regno + j))
3093 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3094 SET_HARD_REG_BIT (*reg_set, regno + j);
3096 /* Start the next search with the next register. */
3097 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3099 search_ofs = raw_regno;
3101 return gen_rtx_REG (mode, regno);
3109 /* Perform the peephole2 optimization pass. */
3112 peephole2_optimize (dump_file)
3113 FILE *dump_file ATTRIBUTE_UNUSED;
3115 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3119 #ifdef HAVE_conditional_execution
3124 /* Initialize the regsets we're going to use. */
3125 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3126 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3127 live = INITIALIZE_REG_SET (rs_heads[i]);
3129 #ifdef HAVE_conditional_execution
3130 blocks = sbitmap_alloc (n_basic_blocks);
3131 sbitmap_zero (blocks);
3134 count_or_remove_death_notes (NULL, 1);
3137 for (b = n_basic_blocks - 1; b >= 0; --b)
3139 basic_block bb = BASIC_BLOCK (b);
3140 struct propagate_block_info *pbi;
3142 /* Indicate that all slots except the last holds invalid data. */
3143 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3144 peep2_insn_data[i].insn = NULL_RTX;
3146 /* Indicate that the last slot contains live_after data. */
3147 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3148 peep2_current = MAX_INSNS_PER_PEEP2;
3150 /* Start up propagation. */
3151 COPY_REG_SET (live, bb->global_live_at_end);
3152 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3154 #ifdef HAVE_conditional_execution
3155 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3157 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3160 for (insn = bb->end; ; insn = prev)
3162 prev = PREV_INSN (insn);
3168 /* Record this insn. */
3169 if (--peep2_current < 0)
3170 peep2_current = MAX_INSNS_PER_PEEP2;
3171 peep2_insn_data[peep2_current].insn = insn;
3172 propagate_one_insn (pbi, insn);
3173 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3175 /* Match the peephole. */
3176 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3179 i = match_len + peep2_current;
3180 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3181 i -= MAX_INSNS_PER_PEEP2 + 1;
3183 /* Replace the old sequence with the new. */
3184 flow_delete_insn_chain (insn, peep2_insn_data[i].insn);
3185 try = emit_insn_after (try, prev);
3187 /* Adjust the basic block boundaries. */
3188 if (peep2_insn_data[i].insn == bb->end)
3190 if (insn == bb->head)
3191 bb->head = NEXT_INSN (prev);
3193 #ifdef HAVE_conditional_execution
3194 /* With conditional execution, we cannot back up the
3195 live information so easily, since the conditional
3196 death data structures are not so self-contained.
3197 So record that we've made a modification to this
3198 block and update life information at the end. */
3199 SET_BIT (blocks, b);
3202 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3203 peep2_insn_data[i].insn = NULL_RTX;
3204 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3206 /* Back up lifetime information past the end of the
3207 newly created sequence. */
3208 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3210 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3212 /* Update life information for the new sequence. */
3218 i = MAX_INSNS_PER_PEEP2;
3219 peep2_insn_data[i].insn = try;
3220 propagate_one_insn (pbi, try);
3221 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3223 try = PREV_INSN (try);
3225 while (try != prev);
3227 /* ??? Should verify that LIVE now matches what we
3228 had before the new sequence. */
3235 if (insn == bb->head)
3239 free_propagate_block_info (pbi);
3242 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3243 FREE_REG_SET (peep2_insn_data[i].live_before);
3244 FREE_REG_SET (live);
3246 #ifdef HAVE_conditional_execution
3247 count_or_remove_death_notes (blocks, 1);
3248 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3249 sbitmap_free (blocks);
3252 #endif /* HAVE_peephole2 */