1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "insn-flags.h"
30 #include "insn-codes.h"
31 #include "hard-reg-set.h"
38 #include "basic-block.h"
42 #ifndef STACK_PUSH_CODE
43 #ifdef STACK_GROWS_DOWNWARD
44 #define STACK_PUSH_CODE PRE_DEC
46 #define STACK_PUSH_CODE PRE_INC
50 #ifndef STACK_POP_CODE
51 #ifdef STACK_GROWS_DOWNWARD
52 #define STACK_POP_CODE POST_INC
54 #define STACK_POP_CODE POST_DEC
58 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
59 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
60 static rtx *find_constant_term_loc PARAMS ((rtx *));
61 static int insn_invalid_p PARAMS ((rtx));
62 static void validate_replace_src_1 PARAMS ((rtx *, void *));
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in regclass.c and final.c and reload.c.
70 init_recog and init_recog_no_volatile are responsible for setting this. */
74 struct recog_data recog_data;
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
80 /* On return from `constrain_operands', indicate which alternative
83 int which_alternative;
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
91 /* Initialize data used by the function `recog'.
92 This must be called once in the compilation of a function
93 before any insn recognition may be done in the function. */
96 init_recog_no_volatile ()
107 /* Try recognizing the instruction INSN,
108 and return the code number that results.
109 Remember the code so that repeated calls do not
110 need to spend the time for actual rerecognition.
112 This function is the normal interface to instruction recognition.
113 The automatically-generated function `recog' is normally called
114 through this one. (The only exception is in combine.c.) */
117 recog_memoized_1 (insn)
120 if (INSN_CODE (insn) < 0)
121 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
122 return INSN_CODE (insn);
125 /* Check that X is an insn-body for an `asm' with operands
126 and that the operands mentioned in it are legitimate. */
129 check_asm_operands (x)
134 const char **constraints;
137 /* Post-reload, be more strict with things. */
138 if (reload_completed)
140 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
141 extract_insn (make_insn_raw (x));
142 constrain_operands (1);
143 return which_alternative >= 0;
146 noperands = asm_noperands (x);
152 operands = (rtx *) alloca (noperands * sizeof (rtx));
153 constraints = (const char **) alloca (noperands * sizeof (char *));
155 decode_asm_operands (x, operands, NULL_PTR, constraints, NULL_PTR);
157 for (i = 0; i < noperands; i++)
159 const char *c = constraints[i];
162 if (ISDIGIT ((unsigned char)c[0]) && c[1] == '\0')
163 c = constraints[c[0] - '0'];
165 if (! asm_operand_ok (operands[i], c))
172 /* Static data for the next two routines. */
174 typedef struct change_t
182 static change_t *changes;
183 static int changes_allocated;
185 static int num_changes = 0;
187 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
188 at which NEW will be placed. If OBJECT is zero, no validation is done,
189 the change is simply made.
191 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
192 will be called with the address and mode as parameters. If OBJECT is
193 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
196 IN_GROUP is non-zero if this is part of a group of changes that must be
197 performed as a group. In that case, the changes will be stored. The
198 function `apply_change_group' will validate and apply the changes.
200 If IN_GROUP is zero, this is a single change. Try to recognize the insn
201 or validate the memory reference with the change applied. If the result
202 is not valid for the machine, suppress the change and return zero.
203 Otherwise, perform the change and return 1. */
206 validate_change (object, loc, new, in_group)
214 if (old == new || rtx_equal_p (old, new))
217 if (in_group == 0 && num_changes != 0)
222 /* Save the information describing this change. */
223 if (num_changes >= changes_allocated)
225 if (changes_allocated == 0)
226 /* This value allows for repeated substitutions inside complex
227 indexed addresses, or changes in up to 5 insns. */
228 changes_allocated = MAX_RECOG_OPERANDS * 5;
230 changes_allocated *= 2;
233 (change_t*) xrealloc (changes,
234 sizeof (change_t) * changes_allocated);
237 changes[num_changes].object = object;
238 changes[num_changes].loc = loc;
239 changes[num_changes].old = old;
241 if (object && GET_CODE (object) != MEM)
243 /* Set INSN_CODE to force rerecognition of insn. Save old code in
245 changes[num_changes].old_code = INSN_CODE (object);
246 INSN_CODE (object) = -1;
251 /* If we are making a group of changes, return 1. Otherwise, validate the
252 change group we made. */
257 return apply_change_group ();
260 /* This subroutine of apply_change_group verifies whether the changes to INSN
261 were valid; i.e. whether INSN can still be recognized. */
264 insn_invalid_p (insn)
267 int icode = recog_memoized (insn);
268 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
270 if (is_asm && ! check_asm_operands (PATTERN (insn)))
272 if (! is_asm && icode < 0)
275 /* After reload, verify that all constraints are satisfied. */
276 if (reload_completed)
280 if (! constrain_operands (1))
287 /* Apply a group of changes previously issued with `validate_change'.
288 Return 1 if all changes are valid, zero otherwise. */
291 apply_change_group ()
295 /* The changes have been applied and all INSN_CODEs have been reset to force
298 The changes are valid if we aren't given an object, or if we are
299 given a MEM and it still is a valid address, or if this is in insn
300 and it is recognized. In the latter case, if reload has completed,
301 we also require that the operands meet the constraints for
304 for (i = 0; i < num_changes; i++)
306 rtx object = changes[i].object;
311 if (GET_CODE (object) == MEM)
313 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
316 else if (insn_invalid_p (object))
318 rtx pat = PATTERN (object);
320 /* Perhaps we couldn't recognize the insn because there were
321 extra CLOBBERs at the end. If so, try to re-recognize
322 without the last CLOBBER (later iterations will cause each of
323 them to be eliminated, in turn). But don't do this if we
324 have an ASM_OPERAND. */
325 if (GET_CODE (pat) == PARALLEL
326 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
327 && asm_noperands (PATTERN (object)) < 0)
331 if (XVECLEN (pat, 0) == 2)
332 newpat = XVECEXP (pat, 0, 0);
338 = gen_rtx_PARALLEL (VOIDmode,
339 rtvec_alloc (XVECLEN (pat, 0) - 1));
340 for (j = 0; j < XVECLEN (newpat, 0); j++)
341 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
344 /* Add a new change to this group to replace the pattern
345 with this new pattern. Then consider this change
346 as having succeeded. The change we added will
347 cause the entire call to fail if things remain invalid.
349 Note that this can lose if a later change than the one
350 we are processing specified &XVECEXP (PATTERN (object), 0, X)
351 but this shouldn't occur. */
353 validate_change (object, &PATTERN (object), newpat, 1);
355 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
356 /* If this insn is a CLOBBER or USE, it is always valid, but is
364 if (i == num_changes)
376 /* Return the number of changes so far in the current group. */
379 num_validated_changes ()
384 /* Retract the changes numbered NUM and up. */
392 /* Back out all the changes. Do this in the opposite order in which
394 for (i = num_changes - 1; i >= num; i--)
396 *changes[i].loc = changes[i].old;
397 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
398 INSN_CODE (changes[i].object) = changes[i].old_code;
403 /* Replace every occurrence of FROM in X with TO. Mark each change with
404 validate_change passing OBJECT. */
407 validate_replace_rtx_1 (loc, from, to, object)
409 rtx from, to, object;
412 register const char *fmt;
413 register rtx x = *loc;
419 /* X matches FROM if it is the same rtx or they are both referring to the
420 same register in the same mode. Avoid calling rtx_equal_p unless the
421 operands look similar. */
424 || (GET_CODE (x) == REG && GET_CODE (from) == REG
425 && GET_MODE (x) == GET_MODE (from)
426 && REGNO (x) == REGNO (from))
427 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
428 && rtx_equal_p (x, from)))
430 validate_change (object, loc, to, 1);
434 /* For commutative or comparison operations, try replacing each argument
435 separately and seeing if we made any changes. If so, put a constant
437 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
439 int prev_changes = num_changes;
441 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
442 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
443 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
445 validate_change (object, loc,
446 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
447 : swap_condition (code),
448 GET_MODE (x), XEXP (x, 1),
456 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
457 done the substitution, otherwise we won't. */
462 /* If we have a PLUS whose second operand is now a CONST_INT, use
463 plus_constant to try to simplify it. */
464 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
465 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
470 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
472 validate_change (object, loc,
473 plus_constant (XEXP (x, 0), - INTVAL (to)),
481 /* In these cases, the operation to be performed depends on the mode
482 of the operand. If we are replacing the operand with a VOIDmode
483 constant, we lose the information. So try to simplify the operation
485 if (GET_MODE (to) == VOIDmode
486 && (rtx_equal_p (XEXP (x, 0), from)
487 || (GET_CODE (XEXP (x, 0)) == SUBREG
488 && rtx_equal_p (SUBREG_REG (XEXP (x, 0)), from))))
492 /* If there is a subreg involved, crop to the portion of the
493 constant that we are interested in. */
494 if (GET_CODE (XEXP (x, 0)) == SUBREG)
496 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) <= UNITS_PER_WORD)
497 to = operand_subword (to, SUBREG_WORD (XEXP (x, 0)),
499 else if (GET_MODE_CLASS (GET_MODE (from)) == MODE_INT
500 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
501 <= HOST_BITS_PER_WIDE_INT))
503 int i = SUBREG_WORD (XEXP (x, 0)) * BITS_PER_WORD;
505 unsigned HOST_WIDE_INT vall;
507 if (GET_CODE (to) == CONST_INT)
510 valh = (HOST_WIDE_INT) vall < 0 ? ~0 : 0;
514 vall = CONST_DOUBLE_LOW (to);
515 valh = CONST_DOUBLE_HIGH (to);
518 if (WORDS_BIG_ENDIAN)
519 i = (GET_MODE_BITSIZE (GET_MODE (from))
520 - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - i);
521 if (i > 0 && i < HOST_BITS_PER_WIDE_INT)
522 vall = vall >> i | valh << (HOST_BITS_PER_WIDE_INT - i);
523 else if (i >= HOST_BITS_PER_WIDE_INT)
524 vall = valh >> (i - HOST_BITS_PER_WIDE_INT);
525 to = GEN_INT (trunc_int_for_mode (vall,
526 GET_MODE (XEXP (x, 0))));
529 to = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
532 /* If the above didn't fail, perform the extension from the
533 mode of the operand (and not the mode of FROM). */
535 new = simplify_unary_operation (code, GET_MODE (x), to,
536 GET_MODE (XEXP (x, 0)));
538 /* If any of the above failed, substitute in something that
539 we know won't be recognized. */
541 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
543 validate_change (object, loc, new, 1);
549 /* In case we are replacing by constant, attempt to simplify it to non-SUBREG
550 expression. We can't do this later, since the information about inner mode
552 if (CONSTANT_P (to) && rtx_equal_p (SUBREG_REG (x), from))
554 if (GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
555 && GET_MODE_SIZE (GET_MODE (from)) > UNITS_PER_WORD
556 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
558 rtx temp = operand_subword (to, SUBREG_WORD (x),
562 validate_change (object, loc, temp, 1);
566 if (subreg_lowpart_p (x))
568 rtx new = gen_lowpart_if_possible (GET_MODE (x), to);
571 validate_change (object, loc, new, 1);
576 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
577 since we are saying that the high bits don't matter. */
578 if (GET_MODE (to) == VOIDmode
579 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (from)))
581 validate_change (object, loc, to, 1);
586 /* Changing mode twice with SUBREG => just change it once,
587 or not at all if changing back to starting mode. */
588 if (GET_CODE (to) == SUBREG
589 && rtx_equal_p (SUBREG_REG (x), from))
591 if (GET_MODE (x) == GET_MODE (SUBREG_REG (to))
592 && SUBREG_WORD (x) == 0 && SUBREG_WORD (to) == 0)
594 validate_change (object, loc, SUBREG_REG (to), 1);
598 validate_change (object, loc,
599 gen_rtx_SUBREG (GET_MODE (x), SUBREG_REG (to),
600 SUBREG_WORD (x) + SUBREG_WORD (to)), 1);
604 /* If we have a SUBREG of a register that we are replacing and we are
605 replacing it with a MEM, make a new MEM and try replacing the
606 SUBREG with it. Don't do this if the MEM has a mode-dependent address
607 or if we would be widening it. */
609 if (GET_CODE (from) == REG
610 && GET_CODE (to) == MEM
611 && rtx_equal_p (SUBREG_REG (x), from)
612 && ! mode_dependent_address_p (XEXP (to, 0))
613 && ! MEM_VOLATILE_P (to)
614 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
616 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
617 enum machine_mode mode = GET_MODE (x);
620 if (BYTES_BIG_ENDIAN)
621 offset += (MIN (UNITS_PER_WORD,
622 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
623 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
625 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
626 MEM_COPY_ATTRIBUTES (new, to);
627 validate_change (object, loc, new, 1);
634 /* If we are replacing a register with memory, try to change the memory
635 to be the mode required for memory in extract operations (this isn't
636 likely to be an insertion operation; if it was, nothing bad will
637 happen, we might just fail in some cases). */
639 if (GET_CODE (from) == REG && GET_CODE (to) == MEM
640 && rtx_equal_p (XEXP (x, 0), from)
641 && GET_CODE (XEXP (x, 1)) == CONST_INT
642 && GET_CODE (XEXP (x, 2)) == CONST_INT
643 && ! mode_dependent_address_p (XEXP (to, 0))
644 && ! MEM_VOLATILE_P (to))
646 enum machine_mode wanted_mode = VOIDmode;
647 enum machine_mode is_mode = GET_MODE (to);
648 int pos = INTVAL (XEXP (x, 2));
651 if (code == ZERO_EXTRACT)
653 wanted_mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
654 if (wanted_mode == VOIDmode)
655 wanted_mode = word_mode;
659 if (code == SIGN_EXTRACT)
661 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
662 if (wanted_mode == VOIDmode)
663 wanted_mode = word_mode;
667 /* If we have a narrower mode, we can do something. */
668 if (wanted_mode != VOIDmode
669 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
671 int offset = pos / BITS_PER_UNIT;
674 /* If the bytes and bits are counted differently, we
675 must adjust the offset. */
676 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
677 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
680 pos %= GET_MODE_BITSIZE (wanted_mode);
682 newmem = gen_rtx_MEM (wanted_mode,
683 plus_constant (XEXP (to, 0), offset));
684 MEM_COPY_ATTRIBUTES (newmem, to);
686 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
687 validate_change (object, &XEXP (x, 0), newmem, 1);
697 /* For commutative or comparison operations we've already performed
698 replacements. Don't try to perform them again. */
699 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
701 fmt = GET_RTX_FORMAT (code);
702 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
705 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
706 else if (fmt[i] == 'E')
707 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
708 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
713 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
714 with TO. After all changes have been made, validate by seeing
715 if INSN is still valid. */
718 validate_replace_rtx_subexp (from, to, insn, loc)
719 rtx from, to, insn, *loc;
721 validate_replace_rtx_1 (loc, from, to, insn);
722 return apply_change_group ();
725 /* Try replacing every occurrence of FROM in INSN with TO. After all
726 changes have been made, validate by seeing if INSN is still valid. */
729 validate_replace_rtx (from, to, insn)
732 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
733 return apply_change_group ();
736 /* Try replacing every occurrence of FROM in INSN with TO. */
739 validate_replace_rtx_group (from, to, insn)
742 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
745 /* Function called by note_uses to replace used subexpressions. */
746 struct validate_replace_src_data
752 validate_replace_src_1 (x, data)
756 struct validate_replace_src_data *d
757 = (struct validate_replace_src_data *) data;
759 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
762 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
763 SET_DESTs. After all changes have been made, validate by seeing if
764 INSN is still valid. */
767 validate_replace_src (from, to, insn)
770 struct validate_replace_src_data d;
775 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
776 return apply_change_group ();
780 /* Return 1 if the insn using CC0 set by INSN does not contain
781 any ordered tests applied to the condition codes.
782 EQ and NE tests do not count. */
785 next_insn_tests_no_inequality (insn)
788 register rtx next = next_cc0_user (insn);
790 /* If there is no next insn, we have to take the conservative choice. */
794 return ((GET_CODE (next) == JUMP_INSN
795 || GET_CODE (next) == INSN
796 || GET_CODE (next) == CALL_INSN)
797 && ! inequality_comparisons_p (PATTERN (next)));
800 #if 0 /* This is useless since the insn that sets the cc's
801 must be followed immediately by the use of them. */
802 /* Return 1 if the CC value set up by INSN is not used. */
805 next_insns_test_no_inequality (insn)
808 register rtx next = NEXT_INSN (insn);
810 for (; next != 0; next = NEXT_INSN (next))
812 if (GET_CODE (next) == CODE_LABEL
813 || GET_CODE (next) == BARRIER)
815 if (GET_CODE (next) == NOTE)
817 if (inequality_comparisons_p (PATTERN (next)))
819 if (sets_cc0_p (PATTERN (next)) == 1)
821 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
829 /* This is used by find_single_use to locate an rtx that contains exactly one
830 use of DEST, which is typically either a REG or CC0. It returns a
831 pointer to the innermost rtx expression containing DEST. Appearances of
832 DEST that are being used to totally replace it are not counted. */
835 find_single_use_1 (dest, loc)
840 enum rtx_code code = GET_CODE (x);
857 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
858 of a REG that occupies all of the REG, the insn uses DEST if
859 it is mentioned in the destination or the source. Otherwise, we
860 need just check the source. */
861 if (GET_CODE (SET_DEST (x)) != CC0
862 && GET_CODE (SET_DEST (x)) != PC
863 && GET_CODE (SET_DEST (x)) != REG
864 && ! (GET_CODE (SET_DEST (x)) == SUBREG
865 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
866 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
867 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
868 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
869 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
872 return find_single_use_1 (dest, &SET_SRC (x));
876 return find_single_use_1 (dest, &XEXP (x, 0));
882 /* If it wasn't one of the common cases above, check each expression and
883 vector of this code. Look for a unique usage of DEST. */
885 fmt = GET_RTX_FORMAT (code);
886 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
890 if (dest == XEXP (x, i)
891 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
892 && REGNO (dest) == REGNO (XEXP (x, i))))
895 this_result = find_single_use_1 (dest, &XEXP (x, i));
898 result = this_result;
899 else if (this_result)
900 /* Duplicate usage. */
903 else if (fmt[i] == 'E')
907 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
909 if (XVECEXP (x, i, j) == dest
910 || (GET_CODE (dest) == REG
911 && GET_CODE (XVECEXP (x, i, j)) == REG
912 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
915 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
918 result = this_result;
919 else if (this_result)
928 /* See if DEST, produced in INSN, is used only a single time in the
929 sequel. If so, return a pointer to the innermost rtx expression in which
932 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
934 This routine will return usually zero either before flow is called (because
935 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
936 note can't be trusted).
938 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
939 care about REG_DEAD notes or LOG_LINKS.
941 Otherwise, we find the single use by finding an insn that has a
942 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
943 only referenced once in that insn, we know that it must be the first
944 and last insn referencing DEST. */
947 find_single_use (dest, insn, ploc)
959 next = NEXT_INSN (insn);
961 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
964 result = find_single_use_1 (dest, &PATTERN (next));
971 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
974 for (next = next_nonnote_insn (insn);
975 next != 0 && GET_CODE (next) != CODE_LABEL;
976 next = next_nonnote_insn (next))
977 if (INSN_P (next) && dead_or_set_p (next, dest))
979 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
980 if (XEXP (link, 0) == insn)
985 result = find_single_use_1 (dest, &PATTERN (next));
995 /* Return 1 if OP is a valid general operand for machine mode MODE.
996 This is either a register reference, a memory reference,
997 or a constant. In the case of a memory reference, the address
998 is checked for general validity for the target machine.
1000 Register and memory references must have mode MODE in order to be valid,
1001 but some constants have no machine mode and are valid for any mode.
1003 If MODE is VOIDmode, OP is checked for validity for whatever mode
1006 The main use of this function is as a predicate in match_operand
1007 expressions in the machine description.
1009 For an explanation of this function's behavior for registers of
1010 class NO_REGS, see the comment for `register_operand'. */
1013 general_operand (op, mode)
1015 enum machine_mode mode;
1017 register enum rtx_code code = GET_CODE (op);
1018 int mode_altering_drug = 0;
1020 if (mode == VOIDmode)
1021 mode = GET_MODE (op);
1023 /* Don't accept CONST_INT or anything similar
1024 if the caller wants something floating. */
1025 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1026 && GET_MODE_CLASS (mode) != MODE_INT
1027 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1030 if (CONSTANT_P (op))
1031 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1032 || mode == VOIDmode)
1033 #ifdef LEGITIMATE_PIC_OPERAND_P
1034 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1036 && LEGITIMATE_CONSTANT_P (op));
1038 /* Except for certain constants with VOIDmode, already checked for,
1039 OP's mode must match MODE if MODE specifies a mode. */
1041 if (GET_MODE (op) != mode)
1046 #ifdef INSN_SCHEDULING
1047 /* On machines that have insn scheduling, we want all memory
1048 reference to be explicit, so outlaw paradoxical SUBREGs. */
1049 if (GET_CODE (SUBREG_REG (op)) == MEM
1050 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
1054 op = SUBREG_REG (op);
1055 code = GET_CODE (op);
1057 /* No longer needed, since (SUBREG (MEM...))
1058 will load the MEM into a reload reg in the MEM's own mode. */
1059 mode_altering_drug = 1;
1064 /* A register whose class is NO_REGS is not a general operand. */
1065 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1066 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
1070 register rtx y = XEXP (op, 0);
1072 if (! volatile_ok && MEM_VOLATILE_P (op))
1075 if (GET_CODE (y) == ADDRESSOF)
1078 /* Use the mem's mode, since it will be reloaded thus. */
1079 mode = GET_MODE (op);
1080 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
1083 /* Pretend this is an operand for now; we'll run force_operand
1084 on its replacement in fixup_var_refs_1. */
1085 if (code == ADDRESSOF)
1091 if (mode_altering_drug)
1092 return ! mode_dependent_address_p (XEXP (op, 0));
1096 /* Return 1 if OP is a valid memory address for a memory reference
1099 The main use of this function is as a predicate in match_operand
1100 expressions in the machine description. */
1103 address_operand (op, mode)
1105 enum machine_mode mode;
1107 return memory_address_p (mode, op);
1110 /* Return 1 if OP is a register reference of mode MODE.
1111 If MODE is VOIDmode, accept a register in any mode.
1113 The main use of this function is as a predicate in match_operand
1114 expressions in the machine description.
1116 As a special exception, registers whose class is NO_REGS are
1117 not accepted by `register_operand'. The reason for this change
1118 is to allow the representation of special architecture artifacts
1119 (such as a condition code register) without extending the rtl
1120 definitions. Since registers of class NO_REGS cannot be used
1121 as registers in any case where register classes are examined,
1122 it is most consistent to keep this function from accepting them. */
1125 register_operand (op, mode)
1127 enum machine_mode mode;
1129 if (GET_MODE (op) != mode && mode != VOIDmode)
1132 if (GET_CODE (op) == SUBREG)
1134 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1135 because it is guaranteed to be reloaded into one.
1136 Just make sure the MEM is valid in itself.
1137 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1138 but currently it does result from (SUBREG (REG)...) where the
1139 reg went on the stack.) */
1140 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1141 return general_operand (op, mode);
1143 #ifdef CLASS_CANNOT_CHANGE_MODE
1144 if (GET_CODE (SUBREG_REG (op)) == REG
1145 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1146 && (TEST_HARD_REG_BIT
1147 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
1148 REGNO (SUBREG_REG (op))))
1149 && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (SUBREG_REG (op)))
1150 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1151 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1155 op = SUBREG_REG (op);
1158 /* If we have an ADDRESSOF, consider it valid since it will be
1159 converted into something that will not be a MEM. */
1160 if (GET_CODE (op) == ADDRESSOF)
1163 /* We don't consider registers whose class is NO_REGS
1164 to be a register operand. */
1165 return (GET_CODE (op) == REG
1166 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1167 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1170 /* Return 1 for a register in Pmode; ignore the tested mode. */
1173 pmode_register_operand (op, mode)
1175 enum machine_mode mode ATTRIBUTE_UNUSED;
1177 return register_operand (op, Pmode);
1180 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1181 or a hard register. */
1184 scratch_operand (op, mode)
1186 enum machine_mode mode;
1188 if (GET_MODE (op) != mode && mode != VOIDmode)
1191 return (GET_CODE (op) == SCRATCH
1192 || (GET_CODE (op) == REG
1193 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1196 /* Return 1 if OP is a valid immediate operand for mode MODE.
1198 The main use of this function is as a predicate in match_operand
1199 expressions in the machine description. */
1202 immediate_operand (op, mode)
1204 enum machine_mode mode;
1206 /* Don't accept CONST_INT or anything similar
1207 if the caller wants something floating. */
1208 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1209 && GET_MODE_CLASS (mode) != MODE_INT
1210 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1213 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1214 result in 0/1. It seems a safe assumption that this is
1215 in range for everyone. */
1216 if (GET_CODE (op) == CONSTANT_P_RTX)
1219 return (CONSTANT_P (op)
1220 && (GET_MODE (op) == mode || mode == VOIDmode
1221 || GET_MODE (op) == VOIDmode)
1222 #ifdef LEGITIMATE_PIC_OPERAND_P
1223 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1225 && LEGITIMATE_CONSTANT_P (op));
1228 /* Returns 1 if OP is an operand that is a CONST_INT. */
1231 const_int_operand (op, mode)
1233 enum machine_mode mode ATTRIBUTE_UNUSED;
1235 return GET_CODE (op) == CONST_INT;
1238 /* Returns 1 if OP is an operand that is a constant integer or constant
1239 floating-point number. */
1242 const_double_operand (op, mode)
1244 enum machine_mode mode;
1246 /* Don't accept CONST_INT or anything similar
1247 if the caller wants something floating. */
1248 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1249 && GET_MODE_CLASS (mode) != MODE_INT
1250 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1253 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1254 && (mode == VOIDmode || GET_MODE (op) == mode
1255 || GET_MODE (op) == VOIDmode));
1258 /* Return 1 if OP is a general operand that is not an immediate operand. */
1261 nonimmediate_operand (op, mode)
1263 enum machine_mode mode;
1265 return (general_operand (op, mode) && ! CONSTANT_P (op));
1268 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1271 nonmemory_operand (op, mode)
1273 enum machine_mode mode;
1275 if (CONSTANT_P (op))
1277 /* Don't accept CONST_INT or anything similar
1278 if the caller wants something floating. */
1279 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1280 && GET_MODE_CLASS (mode) != MODE_INT
1281 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1284 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1285 || mode == VOIDmode)
1286 #ifdef LEGITIMATE_PIC_OPERAND_P
1287 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1289 && LEGITIMATE_CONSTANT_P (op));
1292 if (GET_MODE (op) != mode && mode != VOIDmode)
1295 if (GET_CODE (op) == SUBREG)
1297 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1298 because it is guaranteed to be reloaded into one.
1299 Just make sure the MEM is valid in itself.
1300 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1301 but currently it does result from (SUBREG (REG)...) where the
1302 reg went on the stack.) */
1303 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1304 return general_operand (op, mode);
1305 op = SUBREG_REG (op);
1308 /* We don't consider registers whose class is NO_REGS
1309 to be a register operand. */
1310 return (GET_CODE (op) == REG
1311 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1312 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1315 /* Return 1 if OP is a valid operand that stands for pushing a
1316 value of mode MODE onto the stack.
1318 The main use of this function is as a predicate in match_operand
1319 expressions in the machine description. */
1322 push_operand (op, mode)
1324 enum machine_mode mode;
1326 if (GET_CODE (op) != MEM)
1329 if (mode != VOIDmode && GET_MODE (op) != mode)
1334 if (GET_CODE (op) != STACK_PUSH_CODE)
1337 return XEXP (op, 0) == stack_pointer_rtx;
1340 /* Return 1 if OP is a valid operand that stands for popping a
1341 value of mode MODE off the stack.
1343 The main use of this function is as a predicate in match_operand
1344 expressions in the machine description. */
1347 pop_operand (op, mode)
1349 enum machine_mode mode;
1351 if (GET_CODE (op) != MEM)
1354 if (mode != VOIDmode && GET_MODE (op) != mode)
1359 if (GET_CODE (op) != STACK_POP_CODE)
1362 return XEXP (op, 0) == stack_pointer_rtx;
1365 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1368 memory_address_p (mode, addr)
1369 enum machine_mode mode ATTRIBUTE_UNUSED;
1372 if (GET_CODE (addr) == ADDRESSOF)
1375 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1382 /* Return 1 if OP is a valid memory reference with mode MODE,
1383 including a valid address.
1385 The main use of this function is as a predicate in match_operand
1386 expressions in the machine description. */
1389 memory_operand (op, mode)
1391 enum machine_mode mode;
1395 if (! reload_completed)
1396 /* Note that no SUBREG is a memory operand before end of reload pass,
1397 because (SUBREG (MEM...)) forces reloading into a register. */
1398 return GET_CODE (op) == MEM && general_operand (op, mode);
1400 if (mode != VOIDmode && GET_MODE (op) != mode)
1404 if (GET_CODE (inner) == SUBREG)
1405 inner = SUBREG_REG (inner);
1407 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1410 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1411 that is, a memory reference whose address is a general_operand. */
1414 indirect_operand (op, mode)
1416 enum machine_mode mode;
1418 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1419 if (! reload_completed
1420 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1422 register int offset = SUBREG_WORD (op) * UNITS_PER_WORD;
1423 rtx inner = SUBREG_REG (op);
1425 if (BYTES_BIG_ENDIAN)
1426 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op)))
1427 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner))));
1429 if (mode != VOIDmode && GET_MODE (op) != mode)
1432 /* The only way that we can have a general_operand as the resulting
1433 address is if OFFSET is zero and the address already is an operand
1434 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1437 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1438 || (GET_CODE (XEXP (inner, 0)) == PLUS
1439 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1440 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1441 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1444 return (GET_CODE (op) == MEM
1445 && memory_operand (op, mode)
1446 && general_operand (XEXP (op, 0), Pmode));
1449 /* Return 1 if this is a comparison operator. This allows the use of
1450 MATCH_OPERATOR to recognize all the branch insns. */
1453 comparison_operator (op, mode)
1455 enum machine_mode mode;
1457 return ((mode == VOIDmode || GET_MODE (op) == mode)
1458 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1461 /* If BODY is an insn body that uses ASM_OPERANDS,
1462 return the number of operands (both input and output) in the insn.
1463 Otherwise return -1. */
1466 asm_noperands (body)
1469 switch (GET_CODE (body))
1472 /* No output operands: return number of input operands. */
1473 return ASM_OPERANDS_INPUT_LENGTH (body);
1475 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1476 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1477 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1481 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1482 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1484 /* Multiple output operands, or 1 output plus some clobbers:
1485 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1489 /* Count backwards through CLOBBERs to determine number of SETs. */
1490 for (i = XVECLEN (body, 0); i > 0; i--)
1492 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1494 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1498 /* N_SETS is now number of output operands. */
1501 /* Verify that all the SETs we have
1502 came from a single original asm_operands insn
1503 (so that invalid combinations are blocked). */
1504 for (i = 0; i < n_sets; i++)
1506 rtx elt = XVECEXP (body, 0, i);
1507 if (GET_CODE (elt) != SET)
1509 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1511 /* If these ASM_OPERANDS rtx's came from different original insns
1512 then they aren't allowed together. */
1513 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1514 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1517 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1520 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1522 /* 0 outputs, but some clobbers:
1523 body is [(asm_operands ...) (clobber (reg ...))...]. */
1526 /* Make sure all the other parallel things really are clobbers. */
1527 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1528 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1531 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1540 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1541 copy its operands (both input and output) into the vector OPERANDS,
1542 the locations of the operands within the insn into the vector OPERAND_LOCS,
1543 and the constraints for the operands into CONSTRAINTS.
1544 Write the modes of the operands into MODES.
1545 Return the assembler-template.
1547 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1548 we don't store that info. */
1551 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1555 const char **constraints;
1556 enum machine_mode *modes;
1560 const char *template = 0;
1562 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1564 rtx asmop = SET_SRC (body);
1565 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1567 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1569 for (i = 1; i < noperands; i++)
1572 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1574 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1576 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1578 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1581 /* The output is in the SET.
1582 Its constraint is in the ASM_OPERANDS itself. */
1584 operands[0] = SET_DEST (body);
1586 operand_locs[0] = &SET_DEST (body);
1588 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1590 modes[0] = GET_MODE (SET_DEST (body));
1591 template = ASM_OPERANDS_TEMPLATE (asmop);
1593 else if (GET_CODE (body) == ASM_OPERANDS)
1596 /* No output operands: BODY is (asm_operands ....). */
1598 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1600 /* The input operands are found in the 1st element vector. */
1601 /* Constraints for inputs are in the 2nd element vector. */
1602 for (i = 0; i < noperands; i++)
1605 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1607 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1609 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1611 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1613 template = ASM_OPERANDS_TEMPLATE (asmop);
1615 else if (GET_CODE (body) == PARALLEL
1616 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1618 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1619 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1620 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1621 int nout = 0; /* Does not include CLOBBERs. */
1623 /* At least one output, plus some CLOBBERs. */
1625 /* The outputs are in the SETs.
1626 Their constraints are in the ASM_OPERANDS itself. */
1627 for (i = 0; i < nparallel; i++)
1629 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1630 break; /* Past last SET */
1633 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1635 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1637 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1639 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1643 for (i = 0; i < nin; i++)
1646 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1648 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1650 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1652 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1655 template = ASM_OPERANDS_TEMPLATE (asmop);
1657 else if (GET_CODE (body) == PARALLEL
1658 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1660 /* No outputs, but some CLOBBERs. */
1662 rtx asmop = XVECEXP (body, 0, 0);
1663 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1665 for (i = 0; i < nin; i++)
1668 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1670 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1672 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1674 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1677 template = ASM_OPERANDS_TEMPLATE (asmop);
1683 /* Check if an asm_operand matches it's constraints.
1684 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1687 asm_operand_ok (op, constraint)
1689 const char *constraint;
1693 /* Use constrain_operands after reload. */
1694 if (reload_completed)
1699 char c = *constraint++;
1713 case '0': case '1': case '2': case '3': case '4':
1714 case '5': case '6': case '7': case '8': case '9':
1715 /* For best results, our caller should have given us the
1716 proper matching constraint, but we can't actually fail
1717 the check if they didn't. Indicate that results are
1723 if (address_operand (op, VOIDmode))
1728 case 'V': /* non-offsettable */
1729 if (memory_operand (op, VOIDmode))
1733 case 'o': /* offsettable */
1734 if (offsettable_nonstrict_memref_p (op))
1739 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1740 excepting those that expand_call created. Further, on some
1741 machines which do not have generalized auto inc/dec, an inc/dec
1742 is not a memory_operand.
1744 Match any memory and hope things are resolved after reload. */
1746 if (GET_CODE (op) == MEM
1748 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1749 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1754 if (GET_CODE (op) == MEM
1756 || GET_CODE (XEXP (op, 0)) == PRE_INC
1757 || GET_CODE (XEXP (op, 0)) == POST_INC))
1762 #ifndef REAL_ARITHMETIC
1763 /* Match any floating double constant, but only if
1764 we can examine the bits of it reliably. */
1765 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1766 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1767 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1773 if (GET_CODE (op) == CONST_DOUBLE)
1778 if (GET_CODE (op) == CONST_DOUBLE
1779 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1783 if (GET_CODE (op) == CONST_DOUBLE
1784 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1789 if (GET_CODE (op) == CONST_INT
1790 || (GET_CODE (op) == CONST_DOUBLE
1791 && GET_MODE (op) == VOIDmode))
1797 #ifdef LEGITIMATE_PIC_OPERAND_P
1798 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1805 if (GET_CODE (op) == CONST_INT
1806 || (GET_CODE (op) == CONST_DOUBLE
1807 && GET_MODE (op) == VOIDmode))
1812 if (GET_CODE (op) == CONST_INT
1813 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1817 if (GET_CODE (op) == CONST_INT
1818 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1822 if (GET_CODE (op) == CONST_INT
1823 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1827 if (GET_CODE (op) == CONST_INT
1828 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1832 if (GET_CODE (op) == CONST_INT
1833 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1837 if (GET_CODE (op) == CONST_INT
1838 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1842 if (GET_CODE (op) == CONST_INT
1843 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1847 if (GET_CODE (op) == CONST_INT
1848 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1856 if (general_operand (op, VOIDmode))
1861 /* For all other letters, we first check for a register class,
1862 otherwise it is an EXTRA_CONSTRAINT. */
1863 if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
1866 if (GET_MODE (op) == BLKmode)
1868 if (register_operand (op, VOIDmode))
1871 #ifdef EXTRA_CONSTRAINT
1872 if (EXTRA_CONSTRAINT (op, c))
1882 /* Given an rtx *P, if it is a sum containing an integer constant term,
1883 return the location (type rtx *) of the pointer to that constant term.
1884 Otherwise, return a null pointer. */
1887 find_constant_term_loc (p)
1891 register enum rtx_code code = GET_CODE (*p);
1893 /* If *P IS such a constant term, P is its location. */
1895 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1899 /* Otherwise, if not a sum, it has no constant term. */
1901 if (GET_CODE (*p) != PLUS)
1904 /* If one of the summands is constant, return its location. */
1906 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1907 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1910 /* Otherwise, check each summand for containing a constant term. */
1912 if (XEXP (*p, 0) != 0)
1914 tem = find_constant_term_loc (&XEXP (*p, 0));
1919 if (XEXP (*p, 1) != 0)
1921 tem = find_constant_term_loc (&XEXP (*p, 1));
1929 /* Return 1 if OP is a memory reference
1930 whose address contains no side effects
1931 and remains valid after the addition
1932 of a positive integer less than the
1933 size of the object being referenced.
1935 We assume that the original address is valid and do not check it.
1937 This uses strict_memory_address_p as a subroutine, so
1938 don't use it before reload. */
1941 offsettable_memref_p (op)
1944 return ((GET_CODE (op) == MEM)
1945 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1948 /* Similar, but don't require a strictly valid mem ref:
1949 consider pseudo-regs valid as index or base regs. */
1952 offsettable_nonstrict_memref_p (op)
1955 return ((GET_CODE (op) == MEM)
1956 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1959 /* Return 1 if Y is a memory address which contains no side effects
1960 and would remain valid after the addition of a positive integer
1961 less than the size of that mode.
1963 We assume that the original address is valid and do not check it.
1964 We do check that it is valid for narrower modes.
1966 If STRICTP is nonzero, we require a strictly valid address,
1967 for the sake of use in reload.c. */
1970 offsettable_address_p (strictp, mode, y)
1972 enum machine_mode mode;
1975 register enum rtx_code ycode = GET_CODE (y);
1979 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1980 (strictp ? strict_memory_address_p : memory_address_p);
1981 unsigned int mode_sz = GET_MODE_SIZE (mode);
1983 if (CONSTANT_ADDRESS_P (y))
1986 /* Adjusting an offsettable address involves changing to a narrower mode.
1987 Make sure that's OK. */
1989 if (mode_dependent_address_p (y))
1992 /* ??? How much offset does an offsettable BLKmode reference need?
1993 Clearly that depends on the situation in which it's being used.
1994 However, the current situation in which we test 0xffffffff is
1995 less than ideal. Caveat user. */
1997 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1999 /* If the expression contains a constant term,
2000 see if it remains valid when max possible offset is added. */
2002 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
2007 *y2 = plus_constant (*y2, mode_sz - 1);
2008 /* Use QImode because an odd displacement may be automatically invalid
2009 for any wider mode. But it should be valid for a single byte. */
2010 good = (*addressp) (QImode, y);
2012 /* In any case, restore old contents of memory. */
2017 if (GET_RTX_CLASS (ycode) == 'a')
2020 /* The offset added here is chosen as the maximum offset that
2021 any instruction could need to add when operating on something
2022 of the specified mode. We assume that if Y and Y+c are
2023 valid addresses then so is Y+d for all 0<d<c. */
2025 z = plus_constant_for_output (y, mode_sz - 1);
2027 /* Use QImode because an odd displacement may be automatically invalid
2028 for any wider mode. But it should be valid for a single byte. */
2029 return (*addressp) (QImode, z);
2032 /* Return 1 if ADDR is an address-expression whose effect depends
2033 on the mode of the memory reference it is used in.
2035 Autoincrement addressing is a typical example of mode-dependence
2036 because the amount of the increment depends on the mode. */
2039 mode_dependent_address_p (addr)
2040 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
2042 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
2044 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2045 win: ATTRIBUTE_UNUSED_LABEL
2049 /* Return 1 if OP is a general operand
2050 other than a memory ref with a mode dependent address. */
2053 mode_independent_operand (op, mode)
2054 enum machine_mode mode;
2059 if (! general_operand (op, mode))
2062 if (GET_CODE (op) != MEM)
2065 addr = XEXP (op, 0);
2066 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2068 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2069 lose: ATTRIBUTE_UNUSED_LABEL
2073 /* Given an operand OP that is a valid memory reference which
2074 satisfies offsettable_memref_p, return a new memory reference whose
2075 address has been adjusted by OFFSET. OFFSET should be positive and
2076 less than the size of the object referenced. */
2079 adj_offsettable_operand (op, offset)
2083 register enum rtx_code code = GET_CODE (op);
2087 register rtx y = XEXP (op, 0);
2090 if (CONSTANT_ADDRESS_P (y))
2092 new = gen_rtx_MEM (GET_MODE (op),
2093 plus_constant_for_output (y, offset));
2094 MEM_COPY_ATTRIBUTES (new, op);
2098 if (GET_CODE (y) == PLUS)
2101 register rtx *const_loc;
2105 const_loc = find_constant_term_loc (&z);
2108 *const_loc = plus_constant_for_output (*const_loc, offset);
2113 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
2114 MEM_COPY_ATTRIBUTES (new, op);
2120 /* Like extract_insn, but save insn extracted and don't extract again, when
2121 called again for the same insn expecting that recog_data still contain the
2122 valid information. This is used primary by gen_attr infrastructure that
2123 often does extract insn again and again. */
2125 extract_insn_cached (insn)
2128 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2130 extract_insn (insn);
2131 recog_data.insn = insn;
2133 /* Do cached extract_insn, constrain_operand and complain about failures.
2134 Used by insn_attrtab. */
2136 extract_constrain_insn_cached (insn)
2139 extract_insn_cached (insn);
2140 if (which_alternative == -1
2141 && !constrain_operands (reload_completed))
2142 fatal_insn_not_found (insn);
2144 /* Do cached constrain_operand and complain about failures. */
2146 constrain_operands_cached (strict)
2149 if (which_alternative == -1)
2150 return constrain_operands (strict);
2155 /* Analyze INSN and fill in recog_data. */
2164 rtx body = PATTERN (insn);
2166 recog_data.insn = NULL;
2167 recog_data.n_operands = 0;
2168 recog_data.n_alternatives = 0;
2169 recog_data.n_dups = 0;
2170 which_alternative = -1;
2172 switch (GET_CODE (body))
2182 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2187 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2188 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2189 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2195 recog_data.n_operands = noperands = asm_noperands (body);
2198 /* This insn is an `asm' with operands. */
2200 /* expand_asm_operands makes sure there aren't too many operands. */
2201 if (noperands > MAX_RECOG_OPERANDS)
2204 /* Now get the operand values and constraints out of the insn. */
2205 decode_asm_operands (body, recog_data.operand,
2206 recog_data.operand_loc,
2207 recog_data.constraints,
2208 recog_data.operand_mode);
2211 const char *p = recog_data.constraints[0];
2212 recog_data.n_alternatives = 1;
2214 recog_data.n_alternatives += (*p++ == ',');
2218 fatal_insn_not_found (insn);
2222 /* Ordinary insn: recognize it, get the operands via insn_extract
2223 and get the constraints. */
2225 icode = recog_memoized (insn);
2227 fatal_insn_not_found (insn);
2229 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2230 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2231 recog_data.n_dups = insn_data[icode].n_dups;
2233 insn_extract (insn);
2235 for (i = 0; i < noperands; i++)
2237 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2238 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2239 /* VOIDmode match_operands gets mode from their real operand. */
2240 if (recog_data.operand_mode[i] == VOIDmode)
2241 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2244 for (i = 0; i < noperands; i++)
2245 recog_data.operand_type[i]
2246 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2247 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2250 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2254 /* After calling extract_insn, you can use this function to extract some
2255 information from the constraint strings into a more usable form.
2256 The collected data is stored in recog_op_alt. */
2258 preprocess_constraints ()
2262 memset (recog_op_alt, 0, sizeof recog_op_alt);
2263 for (i = 0; i < recog_data.n_operands; i++)
2266 struct operand_alternative *op_alt;
2267 const char *p = recog_data.constraints[i];
2269 op_alt = recog_op_alt[i];
2271 for (j = 0; j < recog_data.n_alternatives; j++)
2273 op_alt[j].class = NO_REGS;
2274 op_alt[j].constraint = p;
2275 op_alt[j].matches = -1;
2276 op_alt[j].matched = -1;
2278 if (*p == '\0' || *p == ',')
2280 op_alt[j].anything_ok = 1;
2290 while (c != ',' && c != '\0');
2291 if (c == ',' || c == '\0')
2296 case '=': case '+': case '*': case '%':
2297 case 'E': case 'F': case 'G': case 'H':
2298 case 's': case 'i': case 'n':
2299 case 'I': case 'J': case 'K': case 'L':
2300 case 'M': case 'N': case 'O': case 'P':
2301 /* These don't say anything we care about. */
2305 op_alt[j].reject += 6;
2308 op_alt[j].reject += 600;
2311 op_alt[j].earlyclobber = 1;
2314 case '0': case '1': case '2': case '3': case '4':
2315 case '5': case '6': case '7': case '8': case '9':
2316 op_alt[j].matches = c - '0';
2317 recog_op_alt[op_alt[j].matches][j].matched = i;
2321 op_alt[j].memory_ok = 1;
2324 op_alt[j].decmem_ok = 1;
2327 op_alt[j].incmem_ok = 1;
2330 op_alt[j].nonoffmem_ok = 1;
2333 op_alt[j].offmem_ok = 1;
2336 op_alt[j].anything_ok = 1;
2340 op_alt[j].is_address = 1;
2341 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
2345 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2349 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
2357 /* Check the operands of an insn against the insn's operand constraints
2358 and return 1 if they are valid.
2359 The information about the insn's operands, constraints, operand modes
2360 etc. is obtained from the global variables set up by extract_insn.
2362 WHICH_ALTERNATIVE is set to a number which indicates which
2363 alternative of constraints was matched: 0 for the first alternative,
2364 1 for the next, etc.
2366 In addition, when two operands are match
2367 and it happens that the output operand is (reg) while the
2368 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2369 make the output operand look like the input.
2370 This is because the output operand is the one the template will print.
2372 This is used in final, just before printing the assembler code and by
2373 the routines that determine an insn's attribute.
2375 If STRICT is a positive non-zero value, it means that we have been
2376 called after reload has been completed. In that case, we must
2377 do all checks strictly. If it is zero, it means that we have been called
2378 before reload has completed. In that case, we first try to see if we can
2379 find an alternative that matches strictly. If not, we try again, this
2380 time assuming that reload will fix up the insn. This provides a "best
2381 guess" for the alternative and is used to compute attributes of insns prior
2382 to reload. A negative value of STRICT is used for this internal call. */
2390 constrain_operands (strict)
2393 const char *constraints[MAX_RECOG_OPERANDS];
2394 int matching_operands[MAX_RECOG_OPERANDS];
2395 int earlyclobber[MAX_RECOG_OPERANDS];
2398 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2399 int funny_match_index;
2401 which_alternative = 0;
2402 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2405 for (c = 0; c < recog_data.n_operands; c++)
2407 constraints[c] = recog_data.constraints[c];
2408 matching_operands[c] = -1;
2415 funny_match_index = 0;
2417 for (opno = 0; opno < recog_data.n_operands; opno++)
2419 register rtx op = recog_data.operand[opno];
2420 enum machine_mode mode = GET_MODE (op);
2421 register const char *p = constraints[opno];
2426 earlyclobber[opno] = 0;
2428 /* A unary operator may be accepted by the predicate, but it
2429 is irrelevant for matching constraints. */
2430 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2433 if (GET_CODE (op) == SUBREG)
2435 if (GET_CODE (SUBREG_REG (op)) == REG
2436 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2437 offset = SUBREG_WORD (op);
2438 op = SUBREG_REG (op);
2441 /* An empty constraint or empty alternative
2442 allows anything which matched the pattern. */
2443 if (*p == 0 || *p == ',')
2446 while (*p && (c = *p++) != ',')
2449 case '?': case '!': case '*': case '%':
2454 /* Ignore rest of this alternative as far as
2455 constraint checking is concerned. */
2456 while (*p && *p != ',')
2461 earlyclobber[opno] = 1;
2464 case '0': case '1': case '2': case '3': case '4':
2465 case '5': case '6': case '7': case '8': case '9':
2467 /* This operand must be the same as a previous one.
2468 This kind of constraint is used for instructions such
2469 as add when they take only two operands.
2471 Note that the lower-numbered operand is passed first.
2473 If we are not testing strictly, assume that this constraint
2474 will be satisfied. */
2479 rtx op1 = recog_data.operand[c - '0'];
2480 rtx op2 = recog_data.operand[opno];
2482 /* A unary operator may be accepted by the predicate,
2483 but it is irrelevant for matching constraints. */
2484 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2485 op1 = XEXP (op1, 0);
2486 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2487 op2 = XEXP (op2, 0);
2489 val = operands_match_p (op1, op2);
2492 matching_operands[opno] = c - '0';
2493 matching_operands[c - '0'] = opno;
2497 /* If output is *x and input is *--x,
2498 arrange later to change the output to *--x as well,
2499 since the output op is the one that will be printed. */
2500 if (val == 2 && strict > 0)
2502 funny_match[funny_match_index].this = opno;
2503 funny_match[funny_match_index++].other = c - '0';
2508 /* p is used for address_operands. When we are called by
2509 gen_reload, no one will have checked that the address is
2510 strictly valid, i.e., that all pseudos requiring hard regs
2511 have gotten them. */
2513 || (strict_memory_address_p (recog_data.operand_mode[opno],
2518 /* No need to check general_operand again;
2519 it was done in insn-recog.c. */
2521 /* Anything goes unless it is a REG and really has a hard reg
2522 but the hard reg is not in the class GENERAL_REGS. */
2524 || GENERAL_REGS == ALL_REGS
2525 || GET_CODE (op) != REG
2526 || (reload_in_progress
2527 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2528 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2533 /* This is used for a MATCH_SCRATCH in the cases when
2534 we don't actually need anything. So anything goes
2540 if (GET_CODE (op) == MEM
2541 /* Before reload, accept what reload can turn into mem. */
2542 || (strict < 0 && CONSTANT_P (op))
2543 /* During reload, accept a pseudo */
2544 || (reload_in_progress && GET_CODE (op) == REG
2545 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2550 if (GET_CODE (op) == MEM
2551 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2552 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2557 if (GET_CODE (op) == MEM
2558 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2559 || GET_CODE (XEXP (op, 0)) == POST_INC))
2564 #ifndef REAL_ARITHMETIC
2565 /* Match any CONST_DOUBLE, but only if
2566 we can examine the bits of it reliably. */
2567 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2568 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2569 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2572 if (GET_CODE (op) == CONST_DOUBLE)
2577 if (GET_CODE (op) == CONST_DOUBLE)
2583 if (GET_CODE (op) == CONST_DOUBLE
2584 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2589 if (GET_CODE (op) == CONST_INT
2590 || (GET_CODE (op) == CONST_DOUBLE
2591 && GET_MODE (op) == VOIDmode))
2594 if (CONSTANT_P (op))
2599 if (GET_CODE (op) == CONST_INT
2600 || (GET_CODE (op) == CONST_DOUBLE
2601 && GET_MODE (op) == VOIDmode))
2613 if (GET_CODE (op) == CONST_INT
2614 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2619 if (GET_CODE (op) == MEM
2620 && ((strict > 0 && ! offsettable_memref_p (op))
2622 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2623 || (reload_in_progress
2624 && !(GET_CODE (op) == REG
2625 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2630 if ((strict > 0 && offsettable_memref_p (op))
2631 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2632 /* Before reload, accept what reload can handle. */
2634 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2635 /* During reload, accept a pseudo */
2636 || (reload_in_progress && GET_CODE (op) == REG
2637 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2643 enum reg_class class;
2645 class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
2646 if (class != NO_REGS)
2650 && GET_CODE (op) == REG
2651 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2652 || (strict == 0 && GET_CODE (op) == SCRATCH)
2653 || (GET_CODE (op) == REG
2654 && reg_fits_class_p (op, class, offset, mode)))
2657 #ifdef EXTRA_CONSTRAINT
2658 else if (EXTRA_CONSTRAINT (op, c))
2665 constraints[opno] = p;
2666 /* If this operand did not win somehow,
2667 this alternative loses. */
2671 /* This alternative won; the operands are ok.
2672 Change whichever operands this alternative says to change. */
2677 /* See if any earlyclobber operand conflicts with some other
2681 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2682 /* Ignore earlyclobber operands now in memory,
2683 because we would often report failure when we have
2684 two memory operands, one of which was formerly a REG. */
2685 if (earlyclobber[eopno]
2686 && GET_CODE (recog_data.operand[eopno]) == REG)
2687 for (opno = 0; opno < recog_data.n_operands; opno++)
2688 if ((GET_CODE (recog_data.operand[opno]) == MEM
2689 || recog_data.operand_type[opno] != OP_OUT)
2691 /* Ignore things like match_operator operands. */
2692 && *recog_data.constraints[opno] != 0
2693 && ! (matching_operands[opno] == eopno
2694 && operands_match_p (recog_data.operand[opno],
2695 recog_data.operand[eopno]))
2696 && ! safe_from_earlyclobber (recog_data.operand[opno],
2697 recog_data.operand[eopno]))
2702 while (--funny_match_index >= 0)
2704 recog_data.operand[funny_match[funny_match_index].other]
2705 = recog_data.operand[funny_match[funny_match_index].this];
2712 which_alternative++;
2714 while (which_alternative < recog_data.n_alternatives);
2716 which_alternative = -1;
2717 /* If we are about to reject this, but we are not to test strictly,
2718 try a very loose test. Only return failure if it fails also. */
2720 return constrain_operands (-1);
2725 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2726 is a hard reg in class CLASS when its regno is offset by OFFSET
2727 and changed to mode MODE.
2728 If REG occupies multiple hard regs, all of them must be in CLASS. */
2731 reg_fits_class_p (operand, class, offset, mode)
2733 register enum reg_class class;
2735 enum machine_mode mode;
2737 register int regno = REGNO (operand);
2738 if (regno < FIRST_PSEUDO_REGISTER
2739 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2744 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2746 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2755 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2758 split_all_insns (upd_life)
2765 blocks = sbitmap_alloc (n_basic_blocks);
2766 sbitmap_zero (blocks);
2769 for (i = n_basic_blocks - 1; i >= 0; --i)
2771 basic_block bb = BASIC_BLOCK (i);
2774 for (insn = bb->head; insn ; insn = next)
2778 /* Can't use `next_real_insn' because that might go across
2779 CODE_LABELS and short-out basic blocks. */
2780 next = NEXT_INSN (insn);
2781 if (! INSN_P (insn))
2784 /* Don't split no-op move insns. These should silently
2785 disappear later in final. Splitting such insns would
2786 break the code that handles REG_NO_CONFLICT blocks. */
2788 else if ((set = single_set (insn)) != NULL
2789 && rtx_equal_p (SET_SRC (set), SET_DEST (set)))
2791 /* Nops get in the way while scheduling, so delete them
2792 now if register allocation has already been done. It
2793 is too risky to try to do this before register
2794 allocation, and there are unlikely to be very many
2795 nops then anyways. */
2796 if (reload_completed)
2798 PUT_CODE (insn, NOTE);
2799 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2800 NOTE_SOURCE_FILE (insn) = 0;
2805 /* Split insns here to get max fine-grain parallelism. */
2806 rtx first = PREV_INSN (insn);
2807 rtx last = try_split (PATTERN (insn), insn, 1);
2811 SET_BIT (blocks, i);
2814 /* try_split returns the NOTE that INSN became. */
2815 PUT_CODE (insn, NOTE);
2816 NOTE_SOURCE_FILE (insn) = 0;
2817 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2819 /* ??? Coddle to md files that generate subregs in post-
2820 reload splitters instead of computing the proper
2822 if (reload_completed && first != last)
2824 first = NEXT_INSN (first);
2828 cleanup_subreg_operands (first);
2831 first = NEXT_INSN (first);
2835 if (insn == bb->end)
2843 if (insn == bb->end)
2847 /* ??? When we're called from just after reload, the CFG is in bad
2848 shape, and we may have fallen off the end. This could be fixed
2849 by having reload not try to delete unreachable code. Otherwise
2850 assert we found the end insn. */
2851 if (insn == NULL && upd_life)
2855 if (changed && upd_life)
2857 compute_bb_for_insn (get_max_uid ());
2858 count_or_remove_death_notes (blocks, 1);
2859 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2862 sbitmap_free (blocks);
2865 #ifdef HAVE_peephole2
2866 struct peep2_insn_data
2872 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2873 static int peep2_current;
2875 /* A non-insn marker indicating the last insn of the block.
2876 The live_before regset for this element is correct, indicating
2877 global_live_at_end for the block. */
2878 #define PEEP2_EOB pc_rtx
2880 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2881 does not exist. Used by the recognizer to find the next insn to match
2882 in a multi-insn pattern. */
2888 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2892 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2893 n -= MAX_INSNS_PER_PEEP2 + 1;
2895 if (peep2_insn_data[n].insn == PEEP2_EOB)
2897 return peep2_insn_data[n].insn;
2900 /* Return true if REGNO is dead before the Nth non-note insn
2904 peep2_regno_dead_p (ofs, regno)
2908 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2911 ofs += peep2_current;
2912 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2913 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2915 if (peep2_insn_data[ofs].insn == NULL_RTX)
2918 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2921 /* Similarly for a REG. */
2924 peep2_reg_dead_p (ofs, reg)
2930 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2933 ofs += peep2_current;
2934 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2935 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2937 if (peep2_insn_data[ofs].insn == NULL_RTX)
2940 regno = REGNO (reg);
2941 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2943 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2948 /* Try to find a hard register of mode MODE, matching the register class in
2949 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2950 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2951 in which case the only condition is that the register must be available
2952 before CURRENT_INSN.
2953 Registers that already have bits set in REG_SET will not be considered.
2955 If an appropriate register is available, it will be returned and the
2956 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2960 peep2_find_free_register (from, to, class_str, mode, reg_set)
2962 const char *class_str;
2963 enum machine_mode mode;
2964 HARD_REG_SET *reg_set;
2966 static int search_ofs;
2967 enum reg_class class;
2971 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2974 from += peep2_current;
2975 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2976 from -= MAX_INSNS_PER_PEEP2 + 1;
2977 to += peep2_current;
2978 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2979 to -= MAX_INSNS_PER_PEEP2 + 1;
2981 if (peep2_insn_data[from].insn == NULL_RTX)
2983 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2987 HARD_REG_SET this_live;
2989 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2991 if (peep2_insn_data[from].insn == NULL_RTX)
2993 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2994 IOR_HARD_REG_SET (live, this_live);
2997 class = (class_str[0] == 'r' ? GENERAL_REGS
2998 : REG_CLASS_FROM_LETTER (class_str[0]));
3000 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3002 int raw_regno, regno, success, j;
3004 /* Distribute the free registers as much as possible. */
3005 raw_regno = search_ofs + i;
3006 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3007 raw_regno -= FIRST_PSEUDO_REGISTER;
3008 #ifdef REG_ALLOC_ORDER
3009 regno = reg_alloc_order[raw_regno];
3014 /* Don't allocate fixed registers. */
3015 if (fixed_regs[regno])
3017 /* Make sure the register is of the right class. */
3018 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
3020 /* And can support the mode we need. */
3021 if (! HARD_REGNO_MODE_OK (regno, mode))
3023 /* And that we don't create an extra save/restore. */
3024 if (! call_used_regs[regno] && ! regs_ever_live[regno])
3026 /* And we don't clobber traceback for noreturn functions. */
3027 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3028 && (! reload_completed || frame_pointer_needed))
3032 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3034 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3035 || TEST_HARD_REG_BIT (live, regno + j))
3043 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3044 SET_HARD_REG_BIT (*reg_set, regno + j);
3046 /* Start the next search with the next register. */
3047 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3049 search_ofs = raw_regno;
3051 return gen_rtx_REG (mode, regno);
3059 /* Perform the peephole2 optimization pass. */
3062 peephole2_optimize (dump_file)
3063 FILE *dump_file ATTRIBUTE_UNUSED;
3065 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3069 #ifdef HAVE_conditional_execution
3074 /* Initialize the regsets we're going to use. */
3075 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3076 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3077 live = INITIALIZE_REG_SET (rs_heads[i]);
3079 #ifdef HAVE_conditional_execution
3080 blocks = sbitmap_alloc (n_basic_blocks);
3081 sbitmap_zero (blocks);
3084 count_or_remove_death_notes (NULL, 1);
3087 for (b = n_basic_blocks - 1; b >= 0; --b)
3089 basic_block bb = BASIC_BLOCK (b);
3090 struct propagate_block_info *pbi;
3092 /* Indicate that all slots except the last holds invalid data. */
3093 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3094 peep2_insn_data[i].insn = NULL_RTX;
3096 /* Indicate that the last slot contains live_after data. */
3097 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3098 peep2_current = MAX_INSNS_PER_PEEP2;
3100 /* Start up propagation. */
3101 COPY_REG_SET (live, bb->global_live_at_end);
3102 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3104 #ifdef HAVE_conditional_execution
3105 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3107 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3110 for (insn = bb->end; ; insn = prev)
3112 prev = PREV_INSN (insn);
3118 /* Record this insn. */
3119 if (--peep2_current < 0)
3120 peep2_current = MAX_INSNS_PER_PEEP2;
3121 peep2_insn_data[peep2_current].insn = insn;
3122 propagate_one_insn (pbi, insn);
3123 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3125 /* Match the peephole. */
3126 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3129 i = match_len + peep2_current;
3130 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3131 i -= MAX_INSNS_PER_PEEP2 + 1;
3133 /* Replace the old sequence with the new. */
3134 flow_delete_insn_chain (insn, peep2_insn_data[i].insn);
3135 try = emit_insn_after (try, prev);
3137 /* Adjust the basic block boundaries. */
3138 if (peep2_insn_data[i].insn == bb->end)
3140 if (insn == bb->head)
3141 bb->head = NEXT_INSN (prev);
3143 #ifdef HAVE_conditional_execution
3144 /* With conditional execution, we cannot back up the
3145 live information so easily, since the conditional
3146 death data structures are not so self-contained.
3147 So record that we've made a modification to this
3148 block and update life information at the end. */
3149 SET_BIT (blocks, b);
3152 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3153 peep2_insn_data[i].insn = NULL_RTX;
3154 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3156 /* Back up lifetime information past the end of the
3157 newly created sequence. */
3158 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3160 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3162 /* Update life information for the new sequence. */
3168 i = MAX_INSNS_PER_PEEP2;
3169 peep2_insn_data[i].insn = try;
3170 propagate_one_insn (pbi, try);
3171 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3173 try = PREV_INSN (try);
3175 while (try != prev);
3177 /* ??? Should verify that LIVE now matches what we
3178 had before the new sequence. */
3185 if (insn == bb->head)
3189 free_propagate_block_info (pbi);
3192 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3193 FREE_REG_SET (peep2_insn_data[i].live_before);
3194 FREE_REG_SET (live);
3196 #ifdef HAVE_conditional_execution
3197 count_or_remove_death_notes (blocks, 1);
3198 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3199 sbitmap_free (blocks);
3202 #endif /* HAVE_peephole2 */