1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "insn-flags.h"
30 #include "insn-codes.h"
31 #include "hard-reg-set.h"
38 #include "basic-block.h"
42 #ifndef STACK_PUSH_CODE
43 #ifdef STACK_GROWS_DOWNWARD
44 #define STACK_PUSH_CODE PRE_DEC
46 #define STACK_PUSH_CODE PRE_INC
50 #ifndef STACK_POP_CODE
51 #ifdef STACK_GROWS_DOWNWARD
52 #define STACK_POP_CODE POST_INC
54 #define STACK_POP_CODE POST_DEC
58 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
59 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
60 static rtx *find_constant_term_loc PARAMS ((rtx *));
61 static int insn_invalid_p PARAMS ((rtx));
63 /* Nonzero means allow operands to be volatile.
64 This should be 0 if you are generating rtl, such as if you are calling
65 the functions in optabs.c and expmed.c (most of the time).
66 This should be 1 if all valid insns need to be recognized,
67 such as in regclass.c and final.c and reload.c.
69 init_recog and init_recog_no_volatile are responsible for setting this. */
73 struct recog_data recog_data;
75 /* Contains a vector of operand_alternative structures for every operand.
76 Set up by preprocess_constraints. */
77 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
79 /* On return from `constrain_operands', indicate which alternative
82 int which_alternative;
84 /* Nonzero after end of reload pass.
85 Set to 1 or 0 by toplev.c.
86 Controls the significance of (SUBREG (MEM)). */
90 /* Initialize data used by the function `recog'.
91 This must be called once in the compilation of a function
92 before any insn recognition may be done in the function. */
95 init_recog_no_volatile ()
106 /* Try recognizing the instruction INSN,
107 and return the code number that results.
108 Remember the code so that repeated calls do not
109 need to spend the time for actual rerecognition.
111 This function is the normal interface to instruction recognition.
112 The automatically-generated function `recog' is normally called
113 through this one. (The only exception is in combine.c.) */
116 recog_memoized_1 (insn)
119 if (INSN_CODE (insn) < 0)
120 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
121 return INSN_CODE (insn);
124 /* Check that X is an insn-body for an `asm' with operands
125 and that the operands mentioned in it are legitimate. */
128 check_asm_operands (x)
133 const char **constraints;
136 /* Post-reload, be more strict with things. */
137 if (reload_completed)
139 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
140 extract_insn (make_insn_raw (x));
141 constrain_operands (1);
142 return which_alternative >= 0;
145 noperands = asm_noperands (x);
151 operands = (rtx *) alloca (noperands * sizeof (rtx));
152 constraints = (const char **) alloca (noperands * sizeof (char *));
154 decode_asm_operands (x, operands, NULL_PTR, constraints, NULL_PTR);
156 for (i = 0; i < noperands; i++)
158 const char *c = constraints[i];
161 if (ISDIGIT ((unsigned char)c[0]) && c[1] == '\0')
162 c = constraints[c[0] - '0'];
164 if (! asm_operand_ok (operands[i], c))
171 /* Static data for the next two routines. */
173 typedef struct change_t
181 static change_t *changes;
182 static int changes_allocated;
184 static int num_changes = 0;
186 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
187 at which NEW will be placed. If OBJECT is zero, no validation is done,
188 the change is simply made.
190 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
191 will be called with the address and mode as parameters. If OBJECT is
192 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
195 IN_GROUP is non-zero if this is part of a group of changes that must be
196 performed as a group. In that case, the changes will be stored. The
197 function `apply_change_group' will validate and apply the changes.
199 If IN_GROUP is zero, this is a single change. Try to recognize the insn
200 or validate the memory reference with the change applied. If the result
201 is not valid for the machine, suppress the change and return zero.
202 Otherwise, perform the change and return 1. */
205 validate_change (object, loc, new, in_group)
213 if (old == new || rtx_equal_p (old, new))
216 if (in_group == 0 && num_changes != 0)
221 /* Save the information describing this change. */
222 if (num_changes >= changes_allocated)
224 if (changes_allocated == 0)
225 /* This value allows for repeated substitutions inside complex
226 indexed addresses, or changes in up to 5 insns. */
227 changes_allocated = MAX_RECOG_OPERANDS * 5;
229 changes_allocated *= 2;
232 (change_t*) xrealloc (changes,
233 sizeof (change_t) * changes_allocated);
236 changes[num_changes].object = object;
237 changes[num_changes].loc = loc;
238 changes[num_changes].old = old;
240 if (object && GET_CODE (object) != MEM)
242 /* Set INSN_CODE to force rerecognition of insn. Save old code in
244 changes[num_changes].old_code = INSN_CODE (object);
245 INSN_CODE (object) = -1;
250 /* If we are making a group of changes, return 1. Otherwise, validate the
251 change group we made. */
256 return apply_change_group ();
259 /* This subroutine of apply_change_group verifies whether the changes to INSN
260 were valid; i.e. whether INSN can still be recognized. */
263 insn_invalid_p (insn)
266 int icode = recog_memoized (insn);
267 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
269 if (is_asm && ! check_asm_operands (PATTERN (insn)))
271 if (! is_asm && icode < 0)
274 /* After reload, verify that all constraints are satisfied. */
275 if (reload_completed)
279 if (! constrain_operands (1))
286 /* Apply a group of changes previously issued with `validate_change'.
287 Return 1 if all changes are valid, zero otherwise. */
290 apply_change_group ()
294 /* The changes have been applied and all INSN_CODEs have been reset to force
297 The changes are valid if we aren't given an object, or if we are
298 given a MEM and it still is a valid address, or if this is in insn
299 and it is recognized. In the latter case, if reload has completed,
300 we also require that the operands meet the constraints for
303 for (i = 0; i < num_changes; i++)
305 rtx object = changes[i].object;
310 if (GET_CODE (object) == MEM)
312 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
315 else if (insn_invalid_p (object))
317 rtx pat = PATTERN (object);
319 /* Perhaps we couldn't recognize the insn because there were
320 extra CLOBBERs at the end. If so, try to re-recognize
321 without the last CLOBBER (later iterations will cause each of
322 them to be eliminated, in turn). But don't do this if we
323 have an ASM_OPERAND. */
324 if (GET_CODE (pat) == PARALLEL
325 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
326 && asm_noperands (PATTERN (object)) < 0)
330 if (XVECLEN (pat, 0) == 2)
331 newpat = XVECEXP (pat, 0, 0);
337 = gen_rtx_PARALLEL (VOIDmode,
338 rtvec_alloc (XVECLEN (pat, 0) - 1));
339 for (j = 0; j < XVECLEN (newpat, 0); j++)
340 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
343 /* Add a new change to this group to replace the pattern
344 with this new pattern. Then consider this change
345 as having succeeded. The change we added will
346 cause the entire call to fail if things remain invalid.
348 Note that this can lose if a later change than the one
349 we are processing specified &XVECEXP (PATTERN (object), 0, X)
350 but this shouldn't occur. */
352 validate_change (object, &PATTERN (object), newpat, 1);
354 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
355 /* If this insn is a CLOBBER or USE, it is always valid, but is
363 if (i == num_changes)
375 /* Return the number of changes so far in the current group. */
378 num_validated_changes ()
383 /* Retract the changes numbered NUM and up. */
391 /* Back out all the changes. Do this in the opposite order in which
393 for (i = num_changes - 1; i >= num; i--)
395 *changes[i].loc = changes[i].old;
396 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
397 INSN_CODE (changes[i].object) = changes[i].old_code;
402 /* Replace every occurrence of FROM in X with TO. Mark each change with
403 validate_change passing OBJECT. */
406 validate_replace_rtx_1 (loc, from, to, object)
408 rtx from, to, object;
411 register const char *fmt;
412 register rtx x = *loc;
418 /* X matches FROM if it is the same rtx or they are both referring to the
419 same register in the same mode. Avoid calling rtx_equal_p unless the
420 operands look similar. */
423 || (GET_CODE (x) == REG && GET_CODE (from) == REG
424 && GET_MODE (x) == GET_MODE (from)
425 && REGNO (x) == REGNO (from))
426 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
427 && rtx_equal_p (x, from)))
429 validate_change (object, loc, to, 1);
433 /* For commutative or comparison operations, try replacing each argument
434 separately and seeing if we made any changes. If so, put a constant
436 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
438 int prev_changes = num_changes;
440 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
441 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
442 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
444 validate_change (object, loc,
445 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
446 : swap_condition (code),
447 GET_MODE (x), XEXP (x, 1),
455 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
456 done the substitution, otherwise we won't. */
461 /* If we have a PLUS whose second operand is now a CONST_INT, use
462 plus_constant to try to simplify it. */
463 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
464 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
469 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
471 validate_change (object, loc,
472 plus_constant (XEXP (x, 0), - INTVAL (to)),
480 /* In these cases, the operation to be performed depends on the mode
481 of the operand. If we are replacing the operand with a VOIDmode
482 constant, we lose the information. So try to simplify the operation
484 if (GET_MODE (to) == VOIDmode
485 && (rtx_equal_p (XEXP (x, 0), from)
486 || (GET_CODE (XEXP (x, 0)) == SUBREG
487 && rtx_equal_p (SUBREG_REG (XEXP (x, 0)), from))))
491 /* If there is a subreg involved, crop to the portion of the
492 constant that we are interested in. */
493 if (GET_CODE (XEXP (x, 0)) == SUBREG)
495 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) <= UNITS_PER_WORD)
496 to = operand_subword (to, SUBREG_WORD (XEXP (x, 0)),
498 else if (GET_MODE_CLASS (GET_MODE (from)) == MODE_INT
499 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
500 <= HOST_BITS_PER_WIDE_INT))
502 int i = SUBREG_WORD (XEXP (x, 0)) * BITS_PER_WORD;
504 unsigned HOST_WIDE_INT vall;
506 if (GET_CODE (to) == CONST_INT)
509 valh = (HOST_WIDE_INT) vall < 0 ? ~0 : 0;
513 vall = CONST_DOUBLE_LOW (to);
514 valh = CONST_DOUBLE_HIGH (to);
517 if (WORDS_BIG_ENDIAN)
518 i = (GET_MODE_BITSIZE (GET_MODE (from))
519 - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - i);
520 if (i > 0 && i < HOST_BITS_PER_WIDE_INT)
521 vall = vall >> i | valh << (HOST_BITS_PER_WIDE_INT - i);
522 else if (i >= HOST_BITS_PER_WIDE_INT)
523 vall = valh >> (i - HOST_BITS_PER_WIDE_INT);
524 to = GEN_INT (trunc_int_for_mode (vall,
525 GET_MODE (XEXP (x, 0))));
528 to = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
531 /* If the above didn't fail, perform the extension from the
532 mode of the operand (and not the mode of FROM). */
534 new = simplify_unary_operation (code, GET_MODE (x), to,
535 GET_MODE (XEXP (x, 0)));
537 /* If any of the above failed, substitute in something that
538 we know won't be recognized. */
540 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
542 validate_change (object, loc, new, 1);
548 /* In case we are replacing by constant, attempt to simplify it to non-SUBREG
549 expression. We can't do this later, since the information about inner mode
551 if (CONSTANT_P (to) && rtx_equal_p (SUBREG_REG (x), from))
553 if (GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
554 && GET_MODE_SIZE (GET_MODE (from)) > UNITS_PER_WORD
555 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
557 rtx temp = operand_subword (to, SUBREG_WORD (x),
561 validate_change (object, loc, temp, 1);
565 if (subreg_lowpart_p (x))
567 rtx new = gen_lowpart_if_possible (GET_MODE (x), to);
570 validate_change (object, loc, new, 1);
575 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
576 since we are saying that the high bits don't matter. */
577 if (GET_MODE (to) == VOIDmode
578 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (from)))
580 validate_change (object, loc, to, 1);
585 /* Changing mode twice with SUBREG => just change it once,
586 or not at all if changing back to starting mode. */
587 if (GET_CODE (to) == SUBREG
588 && rtx_equal_p (SUBREG_REG (x), from))
590 if (GET_MODE (x) == GET_MODE (SUBREG_REG (to))
591 && SUBREG_WORD (x) == 0 && SUBREG_WORD (to) == 0)
593 validate_change (object, loc, SUBREG_REG (to), 1);
597 validate_change (object, loc,
598 gen_rtx_SUBREG (GET_MODE (x), SUBREG_REG (to),
599 SUBREG_WORD (x) + SUBREG_WORD (to)), 1);
603 /* If we have a SUBREG of a register that we are replacing and we are
604 replacing it with a MEM, make a new MEM and try replacing the
605 SUBREG with it. Don't do this if the MEM has a mode-dependent address
606 or if we would be widening it. */
608 if (GET_CODE (from) == REG
609 && GET_CODE (to) == MEM
610 && rtx_equal_p (SUBREG_REG (x), from)
611 && ! mode_dependent_address_p (XEXP (to, 0))
612 && ! MEM_VOLATILE_P (to)
613 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
615 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
616 enum machine_mode mode = GET_MODE (x);
619 if (BYTES_BIG_ENDIAN)
620 offset += (MIN (UNITS_PER_WORD,
621 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
622 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
624 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
625 MEM_COPY_ATTRIBUTES (new, to);
626 validate_change (object, loc, new, 1);
633 /* If we are replacing a register with memory, try to change the memory
634 to be the mode required for memory in extract operations (this isn't
635 likely to be an insertion operation; if it was, nothing bad will
636 happen, we might just fail in some cases). */
638 if (GET_CODE (from) == REG && GET_CODE (to) == MEM
639 && rtx_equal_p (XEXP (x, 0), from)
640 && GET_CODE (XEXP (x, 1)) == CONST_INT
641 && GET_CODE (XEXP (x, 2)) == CONST_INT
642 && ! mode_dependent_address_p (XEXP (to, 0))
643 && ! MEM_VOLATILE_P (to))
645 enum machine_mode wanted_mode = VOIDmode;
646 enum machine_mode is_mode = GET_MODE (to);
647 int pos = INTVAL (XEXP (x, 2));
650 if (code == ZERO_EXTRACT)
652 wanted_mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
653 if (wanted_mode == VOIDmode)
654 wanted_mode = word_mode;
658 if (code == SIGN_EXTRACT)
660 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
661 if (wanted_mode == VOIDmode)
662 wanted_mode = word_mode;
666 /* If we have a narrower mode, we can do something. */
667 if (wanted_mode != VOIDmode
668 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
670 int offset = pos / BITS_PER_UNIT;
673 /* If the bytes and bits are counted differently, we
674 must adjust the offset. */
675 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
676 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
679 pos %= GET_MODE_BITSIZE (wanted_mode);
681 newmem = gen_rtx_MEM (wanted_mode,
682 plus_constant (XEXP (to, 0), offset));
683 MEM_COPY_ATTRIBUTES (newmem, to);
685 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
686 validate_change (object, &XEXP (x, 0), newmem, 1);
696 /* For commutative or comparison operations we've already performed
697 replacements. Don't try to perform them again. */
698 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
700 fmt = GET_RTX_FORMAT (code);
701 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
704 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
705 else if (fmt[i] == 'E')
706 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
707 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
712 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
713 with TO. After all changes have been made, validate by seeing
714 if INSN is still valid. */
717 validate_replace_rtx_subexp (from, to, insn, loc)
718 rtx from, to, insn, *loc;
720 validate_replace_rtx_1 (loc, from, to, insn);
721 return apply_change_group ();
724 /* Try replacing every occurrence of FROM in INSN with TO. After all
725 changes have been made, validate by seeing if INSN is still valid. */
728 validate_replace_rtx (from, to, insn)
731 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
732 return apply_change_group ();
735 /* Try replacing every occurrence of FROM in INSN with TO. */
738 validate_replace_rtx_group (from, to, insn)
741 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
744 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
745 SET_DESTs. After all changes have been made, validate by seeing if
746 INSN is still valid. */
749 validate_replace_src (from, to, insn)
752 if ((GET_CODE (insn) != INSN && GET_CODE (insn) != JUMP_INSN)
753 || GET_CODE (PATTERN (insn)) != SET)
756 validate_replace_rtx_1 (&SET_SRC (PATTERN (insn)), from, to, insn);
757 if (GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
758 validate_replace_rtx_1 (&XEXP (SET_DEST (PATTERN (insn)), 0),
760 else if (GET_CODE (SET_DEST (PATTERN (insn))) == ZERO_EXTRACT)
762 validate_replace_rtx_1 (&XEXP (SET_DEST (PATTERN (insn)), 1),
764 validate_replace_rtx_1 (&XEXP (SET_DEST (PATTERN (insn)), 2),
768 return apply_change_group ();
772 /* Return 1 if the insn using CC0 set by INSN does not contain
773 any ordered tests applied to the condition codes.
774 EQ and NE tests do not count. */
777 next_insn_tests_no_inequality (insn)
780 register rtx next = next_cc0_user (insn);
782 /* If there is no next insn, we have to take the conservative choice. */
786 return ((GET_CODE (next) == JUMP_INSN
787 || GET_CODE (next) == INSN
788 || GET_CODE (next) == CALL_INSN)
789 && ! inequality_comparisons_p (PATTERN (next)));
792 #if 0 /* This is useless since the insn that sets the cc's
793 must be followed immediately by the use of them. */
794 /* Return 1 if the CC value set up by INSN is not used. */
797 next_insns_test_no_inequality (insn)
800 register rtx next = NEXT_INSN (insn);
802 for (; next != 0; next = NEXT_INSN (next))
804 if (GET_CODE (next) == CODE_LABEL
805 || GET_CODE (next) == BARRIER)
807 if (GET_CODE (next) == NOTE)
809 if (inequality_comparisons_p (PATTERN (next)))
811 if (sets_cc0_p (PATTERN (next)) == 1)
813 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
821 /* This is used by find_single_use to locate an rtx that contains exactly one
822 use of DEST, which is typically either a REG or CC0. It returns a
823 pointer to the innermost rtx expression containing DEST. Appearances of
824 DEST that are being used to totally replace it are not counted. */
827 find_single_use_1 (dest, loc)
832 enum rtx_code code = GET_CODE (x);
849 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
850 of a REG that occupies all of the REG, the insn uses DEST if
851 it is mentioned in the destination or the source. Otherwise, we
852 need just check the source. */
853 if (GET_CODE (SET_DEST (x)) != CC0
854 && GET_CODE (SET_DEST (x)) != PC
855 && GET_CODE (SET_DEST (x)) != REG
856 && ! (GET_CODE (SET_DEST (x)) == SUBREG
857 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
858 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
859 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
860 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
861 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
864 return find_single_use_1 (dest, &SET_SRC (x));
868 return find_single_use_1 (dest, &XEXP (x, 0));
874 /* If it wasn't one of the common cases above, check each expression and
875 vector of this code. Look for a unique usage of DEST. */
877 fmt = GET_RTX_FORMAT (code);
878 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
882 if (dest == XEXP (x, i)
883 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
884 && REGNO (dest) == REGNO (XEXP (x, i))))
887 this_result = find_single_use_1 (dest, &XEXP (x, i));
890 result = this_result;
891 else if (this_result)
892 /* Duplicate usage. */
895 else if (fmt[i] == 'E')
899 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
901 if (XVECEXP (x, i, j) == dest
902 || (GET_CODE (dest) == REG
903 && GET_CODE (XVECEXP (x, i, j)) == REG
904 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
907 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
910 result = this_result;
911 else if (this_result)
920 /* See if DEST, produced in INSN, is used only a single time in the
921 sequel. If so, return a pointer to the innermost rtx expression in which
924 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
926 This routine will return usually zero either before flow is called (because
927 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
928 note can't be trusted).
930 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
931 care about REG_DEAD notes or LOG_LINKS.
933 Otherwise, we find the single use by finding an insn that has a
934 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
935 only referenced once in that insn, we know that it must be the first
936 and last insn referencing DEST. */
939 find_single_use (dest, insn, ploc)
951 next = NEXT_INSN (insn);
953 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
956 result = find_single_use_1 (dest, &PATTERN (next));
963 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
966 for (next = next_nonnote_insn (insn);
967 next != 0 && GET_CODE (next) != CODE_LABEL;
968 next = next_nonnote_insn (next))
969 if (INSN_P (next) && dead_or_set_p (next, dest))
971 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
972 if (XEXP (link, 0) == insn)
977 result = find_single_use_1 (dest, &PATTERN (next));
987 /* Return 1 if OP is a valid general operand for machine mode MODE.
988 This is either a register reference, a memory reference,
989 or a constant. In the case of a memory reference, the address
990 is checked for general validity for the target machine.
992 Register and memory references must have mode MODE in order to be valid,
993 but some constants have no machine mode and are valid for any mode.
995 If MODE is VOIDmode, OP is checked for validity for whatever mode
998 The main use of this function is as a predicate in match_operand
999 expressions in the machine description.
1001 For an explanation of this function's behavior for registers of
1002 class NO_REGS, see the comment for `register_operand'. */
1005 general_operand (op, mode)
1007 enum machine_mode mode;
1009 register enum rtx_code code = GET_CODE (op);
1010 int mode_altering_drug = 0;
1012 if (mode == VOIDmode)
1013 mode = GET_MODE (op);
1015 /* Don't accept CONST_INT or anything similar
1016 if the caller wants something floating. */
1017 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1018 && GET_MODE_CLASS (mode) != MODE_INT
1019 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1022 if (CONSTANT_P (op))
1023 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1024 || mode == VOIDmode)
1025 #ifdef LEGITIMATE_PIC_OPERAND_P
1026 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1028 && LEGITIMATE_CONSTANT_P (op));
1030 /* Except for certain constants with VOIDmode, already checked for,
1031 OP's mode must match MODE if MODE specifies a mode. */
1033 if (GET_MODE (op) != mode)
1038 #ifdef INSN_SCHEDULING
1039 /* On machines that have insn scheduling, we want all memory
1040 reference to be explicit, so outlaw paradoxical SUBREGs. */
1041 if (GET_CODE (SUBREG_REG (op)) == MEM
1042 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
1046 op = SUBREG_REG (op);
1047 code = GET_CODE (op);
1049 /* No longer needed, since (SUBREG (MEM...))
1050 will load the MEM into a reload reg in the MEM's own mode. */
1051 mode_altering_drug = 1;
1056 /* A register whose class is NO_REGS is not a general operand. */
1057 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1058 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
1062 register rtx y = XEXP (op, 0);
1064 if (! volatile_ok && MEM_VOLATILE_P (op))
1067 if (GET_CODE (y) == ADDRESSOF)
1070 /* Use the mem's mode, since it will be reloaded thus. */
1071 mode = GET_MODE (op);
1072 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
1075 /* Pretend this is an operand for now; we'll run force_operand
1076 on its replacement in fixup_var_refs_1. */
1077 if (code == ADDRESSOF)
1083 if (mode_altering_drug)
1084 return ! mode_dependent_address_p (XEXP (op, 0));
1088 /* Return 1 if OP is a valid memory address for a memory reference
1091 The main use of this function is as a predicate in match_operand
1092 expressions in the machine description. */
1095 address_operand (op, mode)
1097 enum machine_mode mode;
1099 return memory_address_p (mode, op);
1102 /* Return 1 if OP is a register reference of mode MODE.
1103 If MODE is VOIDmode, accept a register in any mode.
1105 The main use of this function is as a predicate in match_operand
1106 expressions in the machine description.
1108 As a special exception, registers whose class is NO_REGS are
1109 not accepted by `register_operand'. The reason for this change
1110 is to allow the representation of special architecture artifacts
1111 (such as a condition code register) without extending the rtl
1112 definitions. Since registers of class NO_REGS cannot be used
1113 as registers in any case where register classes are examined,
1114 it is most consistent to keep this function from accepting them. */
1117 register_operand (op, mode)
1119 enum machine_mode mode;
1121 if (GET_MODE (op) != mode && mode != VOIDmode)
1124 if (GET_CODE (op) == SUBREG)
1126 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1127 because it is guaranteed to be reloaded into one.
1128 Just make sure the MEM is valid in itself.
1129 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1130 but currently it does result from (SUBREG (REG)...) where the
1131 reg went on the stack.) */
1132 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1133 return general_operand (op, mode);
1135 #ifdef CLASS_CANNOT_CHANGE_MODE
1136 if (GET_CODE (SUBREG_REG (op)) == REG
1137 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1138 && (TEST_HARD_REG_BIT
1139 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
1140 REGNO (SUBREG_REG (op))))
1141 && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (SUBREG_REG (op)))
1142 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1143 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1147 op = SUBREG_REG (op);
1150 /* If we have an ADDRESSOF, consider it valid since it will be
1151 converted into something that will not be a MEM. */
1152 if (GET_CODE (op) == ADDRESSOF)
1155 /* We don't consider registers whose class is NO_REGS
1156 to be a register operand. */
1157 return (GET_CODE (op) == REG
1158 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1159 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1162 /* Return 1 for a register in Pmode; ignore the tested mode. */
1165 pmode_register_operand (op, mode)
1167 enum machine_mode mode ATTRIBUTE_UNUSED;
1169 return register_operand (op, Pmode);
1172 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1173 or a hard register. */
1176 scratch_operand (op, mode)
1178 enum machine_mode mode;
1180 if (GET_MODE (op) != mode && mode != VOIDmode)
1183 return (GET_CODE (op) == SCRATCH
1184 || (GET_CODE (op) == REG
1185 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1188 /* Return 1 if OP is a valid immediate operand for mode MODE.
1190 The main use of this function is as a predicate in match_operand
1191 expressions in the machine description. */
1194 immediate_operand (op, mode)
1196 enum machine_mode mode;
1198 /* Don't accept CONST_INT or anything similar
1199 if the caller wants something floating. */
1200 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1201 && GET_MODE_CLASS (mode) != MODE_INT
1202 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1205 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1206 result in 0/1. It seems a safe assumption that this is
1207 in range for everyone. */
1208 if (GET_CODE (op) == CONSTANT_P_RTX)
1211 return (CONSTANT_P (op)
1212 && (GET_MODE (op) == mode || mode == VOIDmode
1213 || GET_MODE (op) == VOIDmode)
1214 #ifdef LEGITIMATE_PIC_OPERAND_P
1215 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1217 && LEGITIMATE_CONSTANT_P (op));
1220 /* Returns 1 if OP is an operand that is a CONST_INT. */
1223 const_int_operand (op, mode)
1225 enum machine_mode mode ATTRIBUTE_UNUSED;
1227 return GET_CODE (op) == CONST_INT;
1230 /* Returns 1 if OP is an operand that is a constant integer or constant
1231 floating-point number. */
1234 const_double_operand (op, mode)
1236 enum machine_mode mode;
1238 /* Don't accept CONST_INT or anything similar
1239 if the caller wants something floating. */
1240 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1241 && GET_MODE_CLASS (mode) != MODE_INT
1242 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1245 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1246 && (mode == VOIDmode || GET_MODE (op) == mode
1247 || GET_MODE (op) == VOIDmode));
1250 /* Return 1 if OP is a general operand that is not an immediate operand. */
1253 nonimmediate_operand (op, mode)
1255 enum machine_mode mode;
1257 return (general_operand (op, mode) && ! CONSTANT_P (op));
1260 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1263 nonmemory_operand (op, mode)
1265 enum machine_mode mode;
1267 if (CONSTANT_P (op))
1269 /* Don't accept CONST_INT or anything similar
1270 if the caller wants something floating. */
1271 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1272 && GET_MODE_CLASS (mode) != MODE_INT
1273 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1276 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1277 || mode == VOIDmode)
1278 #ifdef LEGITIMATE_PIC_OPERAND_P
1279 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1281 && LEGITIMATE_CONSTANT_P (op));
1284 if (GET_MODE (op) != mode && mode != VOIDmode)
1287 if (GET_CODE (op) == SUBREG)
1289 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1290 because it is guaranteed to be reloaded into one.
1291 Just make sure the MEM is valid in itself.
1292 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1293 but currently it does result from (SUBREG (REG)...) where the
1294 reg went on the stack.) */
1295 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1296 return general_operand (op, mode);
1297 op = SUBREG_REG (op);
1300 /* We don't consider registers whose class is NO_REGS
1301 to be a register operand. */
1302 return (GET_CODE (op) == REG
1303 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1304 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1307 /* Return 1 if OP is a valid operand that stands for pushing a
1308 value of mode MODE onto the stack.
1310 The main use of this function is as a predicate in match_operand
1311 expressions in the machine description. */
1314 push_operand (op, mode)
1316 enum machine_mode mode;
1318 if (GET_CODE (op) != MEM)
1321 if (mode != VOIDmode && GET_MODE (op) != mode)
1326 if (GET_CODE (op) != STACK_PUSH_CODE)
1329 return XEXP (op, 0) == stack_pointer_rtx;
1332 /* Return 1 if OP is a valid operand that stands for popping a
1333 value of mode MODE off the stack.
1335 The main use of this function is as a predicate in match_operand
1336 expressions in the machine description. */
1339 pop_operand (op, mode)
1341 enum machine_mode mode;
1343 if (GET_CODE (op) != MEM)
1346 if (mode != VOIDmode && GET_MODE (op) != mode)
1351 if (GET_CODE (op) != STACK_POP_CODE)
1354 return XEXP (op, 0) == stack_pointer_rtx;
1357 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1360 memory_address_p (mode, addr)
1361 enum machine_mode mode ATTRIBUTE_UNUSED;
1364 if (GET_CODE (addr) == ADDRESSOF)
1367 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1374 /* Return 1 if OP is a valid memory reference with mode MODE,
1375 including a valid address.
1377 The main use of this function is as a predicate in match_operand
1378 expressions in the machine description. */
1381 memory_operand (op, mode)
1383 enum machine_mode mode;
1387 if (! reload_completed)
1388 /* Note that no SUBREG is a memory operand before end of reload pass,
1389 because (SUBREG (MEM...)) forces reloading into a register. */
1390 return GET_CODE (op) == MEM && general_operand (op, mode);
1392 if (mode != VOIDmode && GET_MODE (op) != mode)
1396 if (GET_CODE (inner) == SUBREG)
1397 inner = SUBREG_REG (inner);
1399 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1402 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1403 that is, a memory reference whose address is a general_operand. */
1406 indirect_operand (op, mode)
1408 enum machine_mode mode;
1410 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1411 if (! reload_completed
1412 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1414 register int offset = SUBREG_WORD (op) * UNITS_PER_WORD;
1415 rtx inner = SUBREG_REG (op);
1417 if (BYTES_BIG_ENDIAN)
1418 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op)))
1419 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner))));
1421 if (mode != VOIDmode && GET_MODE (op) != mode)
1424 /* The only way that we can have a general_operand as the resulting
1425 address is if OFFSET is zero and the address already is an operand
1426 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1429 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1430 || (GET_CODE (XEXP (inner, 0)) == PLUS
1431 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1432 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1433 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1436 return (GET_CODE (op) == MEM
1437 && memory_operand (op, mode)
1438 && general_operand (XEXP (op, 0), Pmode));
1441 /* Return 1 if this is a comparison operator. This allows the use of
1442 MATCH_OPERATOR to recognize all the branch insns. */
1445 comparison_operator (op, mode)
1447 enum machine_mode mode;
1449 return ((mode == VOIDmode || GET_MODE (op) == mode)
1450 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1453 /* If BODY is an insn body that uses ASM_OPERANDS,
1454 return the number of operands (both input and output) in the insn.
1455 Otherwise return -1. */
1458 asm_noperands (body)
1461 switch (GET_CODE (body))
1464 /* No output operands: return number of input operands. */
1465 return ASM_OPERANDS_INPUT_LENGTH (body);
1467 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1468 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1469 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1473 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1474 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1476 /* Multiple output operands, or 1 output plus some clobbers:
1477 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1481 /* Count backwards through CLOBBERs to determine number of SETs. */
1482 for (i = XVECLEN (body, 0); i > 0; i--)
1484 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1486 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1490 /* N_SETS is now number of output operands. */
1493 /* Verify that all the SETs we have
1494 came from a single original asm_operands insn
1495 (so that invalid combinations are blocked). */
1496 for (i = 0; i < n_sets; i++)
1498 rtx elt = XVECEXP (body, 0, i);
1499 if (GET_CODE (elt) != SET)
1501 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1503 /* If these ASM_OPERANDS rtx's came from different original insns
1504 then they aren't allowed together. */
1505 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1506 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1509 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1512 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1514 /* 0 outputs, but some clobbers:
1515 body is [(asm_operands ...) (clobber (reg ...))...]. */
1518 /* Make sure all the other parallel things really are clobbers. */
1519 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1520 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1523 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1532 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1533 copy its operands (both input and output) into the vector OPERANDS,
1534 the locations of the operands within the insn into the vector OPERAND_LOCS,
1535 and the constraints for the operands into CONSTRAINTS.
1536 Write the modes of the operands into MODES.
1537 Return the assembler-template.
1539 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1540 we don't store that info. */
1543 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1547 const char **constraints;
1548 enum machine_mode *modes;
1552 const char *template = 0;
1554 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1556 rtx asmop = SET_SRC (body);
1557 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1559 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1561 for (i = 1; i < noperands; i++)
1564 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1566 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1568 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1570 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1573 /* The output is in the SET.
1574 Its constraint is in the ASM_OPERANDS itself. */
1576 operands[0] = SET_DEST (body);
1578 operand_locs[0] = &SET_DEST (body);
1580 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1582 modes[0] = GET_MODE (SET_DEST (body));
1583 template = ASM_OPERANDS_TEMPLATE (asmop);
1585 else if (GET_CODE (body) == ASM_OPERANDS)
1588 /* No output operands: BODY is (asm_operands ....). */
1590 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1592 /* The input operands are found in the 1st element vector. */
1593 /* Constraints for inputs are in the 2nd element vector. */
1594 for (i = 0; i < noperands; i++)
1597 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1599 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1601 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1603 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1605 template = ASM_OPERANDS_TEMPLATE (asmop);
1607 else if (GET_CODE (body) == PARALLEL
1608 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1610 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1611 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1612 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1613 int nout = 0; /* Does not include CLOBBERs. */
1615 /* At least one output, plus some CLOBBERs. */
1617 /* The outputs are in the SETs.
1618 Their constraints are in the ASM_OPERANDS itself. */
1619 for (i = 0; i < nparallel; i++)
1621 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1622 break; /* Past last SET */
1625 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1627 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1629 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1631 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1635 for (i = 0; i < nin; i++)
1638 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1640 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1642 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1644 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1647 template = ASM_OPERANDS_TEMPLATE (asmop);
1649 else if (GET_CODE (body) == PARALLEL
1650 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1652 /* No outputs, but some CLOBBERs. */
1654 rtx asmop = XVECEXP (body, 0, 0);
1655 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1657 for (i = 0; i < nin; i++)
1660 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1662 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1664 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1666 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1669 template = ASM_OPERANDS_TEMPLATE (asmop);
1675 /* Check if an asm_operand matches it's constraints.
1676 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1679 asm_operand_ok (op, constraint)
1681 const char *constraint;
1685 /* Use constrain_operands after reload. */
1686 if (reload_completed)
1691 char c = *constraint++;
1705 case '0': case '1': case '2': case '3': case '4':
1706 case '5': case '6': case '7': case '8': case '9':
1707 /* For best results, our caller should have given us the
1708 proper matching constraint, but we can't actually fail
1709 the check if they didn't. Indicate that results are
1715 if (address_operand (op, VOIDmode))
1720 case 'V': /* non-offsettable */
1721 if (memory_operand (op, VOIDmode))
1725 case 'o': /* offsettable */
1726 if (offsettable_nonstrict_memref_p (op))
1731 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1732 excepting those that expand_call created. Further, on some
1733 machines which do not have generalized auto inc/dec, an inc/dec
1734 is not a memory_operand.
1736 Match any memory and hope things are resolved after reload. */
1738 if (GET_CODE (op) == MEM
1740 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1741 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1746 if (GET_CODE (op) == MEM
1748 || GET_CODE (XEXP (op, 0)) == PRE_INC
1749 || GET_CODE (XEXP (op, 0)) == POST_INC))
1754 #ifndef REAL_ARITHMETIC
1755 /* Match any floating double constant, but only if
1756 we can examine the bits of it reliably. */
1757 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1758 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1759 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1765 if (GET_CODE (op) == CONST_DOUBLE)
1770 if (GET_CODE (op) == CONST_DOUBLE
1771 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1775 if (GET_CODE (op) == CONST_DOUBLE
1776 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1781 if (GET_CODE (op) == CONST_INT
1782 || (GET_CODE (op) == CONST_DOUBLE
1783 && GET_MODE (op) == VOIDmode))
1789 #ifdef LEGITIMATE_PIC_OPERAND_P
1790 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1797 if (GET_CODE (op) == CONST_INT
1798 || (GET_CODE (op) == CONST_DOUBLE
1799 && GET_MODE (op) == VOIDmode))
1804 if (GET_CODE (op) == CONST_INT
1805 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1809 if (GET_CODE (op) == CONST_INT
1810 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1814 if (GET_CODE (op) == CONST_INT
1815 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1819 if (GET_CODE (op) == CONST_INT
1820 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1824 if (GET_CODE (op) == CONST_INT
1825 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1829 if (GET_CODE (op) == CONST_INT
1830 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1834 if (GET_CODE (op) == CONST_INT
1835 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1839 if (GET_CODE (op) == CONST_INT
1840 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1848 if (general_operand (op, VOIDmode))
1853 /* For all other letters, we first check for a register class,
1854 otherwise it is an EXTRA_CONSTRAINT. */
1855 if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
1858 if (GET_MODE (op) == BLKmode)
1860 if (register_operand (op, VOIDmode))
1863 #ifdef EXTRA_CONSTRAINT
1864 if (EXTRA_CONSTRAINT (op, c))
1874 /* Given an rtx *P, if it is a sum containing an integer constant term,
1875 return the location (type rtx *) of the pointer to that constant term.
1876 Otherwise, return a null pointer. */
1879 find_constant_term_loc (p)
1883 register enum rtx_code code = GET_CODE (*p);
1885 /* If *P IS such a constant term, P is its location. */
1887 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1891 /* Otherwise, if not a sum, it has no constant term. */
1893 if (GET_CODE (*p) != PLUS)
1896 /* If one of the summands is constant, return its location. */
1898 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1899 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1902 /* Otherwise, check each summand for containing a constant term. */
1904 if (XEXP (*p, 0) != 0)
1906 tem = find_constant_term_loc (&XEXP (*p, 0));
1911 if (XEXP (*p, 1) != 0)
1913 tem = find_constant_term_loc (&XEXP (*p, 1));
1921 /* Return 1 if OP is a memory reference
1922 whose address contains no side effects
1923 and remains valid after the addition
1924 of a positive integer less than the
1925 size of the object being referenced.
1927 We assume that the original address is valid and do not check it.
1929 This uses strict_memory_address_p as a subroutine, so
1930 don't use it before reload. */
1933 offsettable_memref_p (op)
1936 return ((GET_CODE (op) == MEM)
1937 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1940 /* Similar, but don't require a strictly valid mem ref:
1941 consider pseudo-regs valid as index or base regs. */
1944 offsettable_nonstrict_memref_p (op)
1947 return ((GET_CODE (op) == MEM)
1948 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1951 /* Return 1 if Y is a memory address which contains no side effects
1952 and would remain valid after the addition of a positive integer
1953 less than the size of that mode.
1955 We assume that the original address is valid and do not check it.
1956 We do check that it is valid for narrower modes.
1958 If STRICTP is nonzero, we require a strictly valid address,
1959 for the sake of use in reload.c. */
1962 offsettable_address_p (strictp, mode, y)
1964 enum machine_mode mode;
1967 register enum rtx_code ycode = GET_CODE (y);
1971 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1972 (strictp ? strict_memory_address_p : memory_address_p);
1973 unsigned int mode_sz = GET_MODE_SIZE (mode);
1975 if (CONSTANT_ADDRESS_P (y))
1978 /* Adjusting an offsettable address involves changing to a narrower mode.
1979 Make sure that's OK. */
1981 if (mode_dependent_address_p (y))
1984 /* ??? How much offset does an offsettable BLKmode reference need?
1985 Clearly that depends on the situation in which it's being used.
1986 However, the current situation in which we test 0xffffffff is
1987 less than ideal. Caveat user. */
1989 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1991 /* If the expression contains a constant term,
1992 see if it remains valid when max possible offset is added. */
1994 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1999 *y2 = plus_constant (*y2, mode_sz - 1);
2000 /* Use QImode because an odd displacement may be automatically invalid
2001 for any wider mode. But it should be valid for a single byte. */
2002 good = (*addressp) (QImode, y);
2004 /* In any case, restore old contents of memory. */
2009 if (GET_RTX_CLASS (ycode) == 'a')
2012 /* The offset added here is chosen as the maximum offset that
2013 any instruction could need to add when operating on something
2014 of the specified mode. We assume that if Y and Y+c are
2015 valid addresses then so is Y+d for all 0<d<c. */
2017 z = plus_constant_for_output (y, mode_sz - 1);
2019 /* Use QImode because an odd displacement may be automatically invalid
2020 for any wider mode. But it should be valid for a single byte. */
2021 return (*addressp) (QImode, z);
2024 /* Return 1 if ADDR is an address-expression whose effect depends
2025 on the mode of the memory reference it is used in.
2027 Autoincrement addressing is a typical example of mode-dependence
2028 because the amount of the increment depends on the mode. */
2031 mode_dependent_address_p (addr)
2032 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
2034 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
2036 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2037 win: ATTRIBUTE_UNUSED_LABEL
2041 /* Return 1 if OP is a general operand
2042 other than a memory ref with a mode dependent address. */
2045 mode_independent_operand (op, mode)
2046 enum machine_mode mode;
2051 if (! general_operand (op, mode))
2054 if (GET_CODE (op) != MEM)
2057 addr = XEXP (op, 0);
2058 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2060 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2061 lose: ATTRIBUTE_UNUSED_LABEL
2065 /* Given an operand OP that is a valid memory reference which
2066 satisfies offsettable_memref_p, return a new memory reference whose
2067 address has been adjusted by OFFSET. OFFSET should be positive and
2068 less than the size of the object referenced. */
2071 adj_offsettable_operand (op, offset)
2075 register enum rtx_code code = GET_CODE (op);
2079 register rtx y = XEXP (op, 0);
2082 if (CONSTANT_ADDRESS_P (y))
2084 new = gen_rtx_MEM (GET_MODE (op),
2085 plus_constant_for_output (y, offset));
2086 MEM_COPY_ATTRIBUTES (new, op);
2090 if (GET_CODE (y) == PLUS)
2093 register rtx *const_loc;
2097 const_loc = find_constant_term_loc (&z);
2100 *const_loc = plus_constant_for_output (*const_loc, offset);
2105 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
2106 MEM_COPY_ATTRIBUTES (new, op);
2112 /* Like extract_insn, but save insn extracted and don't extract again, when
2113 called again for the same insn expecting that recog_data still contain the
2114 valid information. This is used primary by gen_attr infrastructure that
2115 often does extract insn again and again. */
2117 extract_insn_cached (insn)
2120 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2122 extract_insn (insn);
2123 recog_data.insn = insn;
2125 /* Do cached extract_insn, constrain_operand and complain about failures.
2126 Used by insn_attrtab. */
2128 extract_constrain_insn_cached (insn)
2131 extract_insn_cached (insn);
2132 if (which_alternative == -1
2133 && !constrain_operands (reload_completed))
2134 fatal_insn_not_found (insn);
2136 /* Do cached constrain_operand and complain about failures. */
2138 constrain_operands_cached (strict)
2141 if (which_alternative == -1)
2142 return constrain_operands (strict);
2147 /* Analyze INSN and fill in recog_data. */
2156 rtx body = PATTERN (insn);
2158 recog_data.insn = NULL;
2159 recog_data.n_operands = 0;
2160 recog_data.n_alternatives = 0;
2161 recog_data.n_dups = 0;
2162 which_alternative = -1;
2164 switch (GET_CODE (body))
2174 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2179 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2180 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2181 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2187 recog_data.n_operands = noperands = asm_noperands (body);
2190 /* This insn is an `asm' with operands. */
2192 /* expand_asm_operands makes sure there aren't too many operands. */
2193 if (noperands > MAX_RECOG_OPERANDS)
2196 /* Now get the operand values and constraints out of the insn. */
2197 decode_asm_operands (body, recog_data.operand,
2198 recog_data.operand_loc,
2199 recog_data.constraints,
2200 recog_data.operand_mode);
2203 const char *p = recog_data.constraints[0];
2204 recog_data.n_alternatives = 1;
2206 recog_data.n_alternatives += (*p++ == ',');
2210 fatal_insn_not_found (insn);
2214 /* Ordinary insn: recognize it, get the operands via insn_extract
2215 and get the constraints. */
2217 icode = recog_memoized (insn);
2219 fatal_insn_not_found (insn);
2221 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2222 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2223 recog_data.n_dups = insn_data[icode].n_dups;
2225 insn_extract (insn);
2227 for (i = 0; i < noperands; i++)
2229 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2230 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2231 /* VOIDmode match_operands gets mode from their real operand. */
2232 if (recog_data.operand_mode[i] == VOIDmode)
2233 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2236 for (i = 0; i < noperands; i++)
2237 recog_data.operand_type[i]
2238 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2239 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2242 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2246 /* After calling extract_insn, you can use this function to extract some
2247 information from the constraint strings into a more usable form.
2248 The collected data is stored in recog_op_alt. */
2250 preprocess_constraints ()
2254 memset (recog_op_alt, 0, sizeof recog_op_alt);
2255 for (i = 0; i < recog_data.n_operands; i++)
2258 struct operand_alternative *op_alt;
2259 const char *p = recog_data.constraints[i];
2261 op_alt = recog_op_alt[i];
2263 for (j = 0; j < recog_data.n_alternatives; j++)
2265 op_alt[j].class = NO_REGS;
2266 op_alt[j].constraint = p;
2267 op_alt[j].matches = -1;
2268 op_alt[j].matched = -1;
2270 if (*p == '\0' || *p == ',')
2272 op_alt[j].anything_ok = 1;
2282 while (c != ',' && c != '\0');
2283 if (c == ',' || c == '\0')
2288 case '=': case '+': case '*': case '%':
2289 case 'E': case 'F': case 'G': case 'H':
2290 case 's': case 'i': case 'n':
2291 case 'I': case 'J': case 'K': case 'L':
2292 case 'M': case 'N': case 'O': case 'P':
2293 /* These don't say anything we care about. */
2297 op_alt[j].reject += 6;
2300 op_alt[j].reject += 600;
2303 op_alt[j].earlyclobber = 1;
2306 case '0': case '1': case '2': case '3': case '4':
2307 case '5': case '6': case '7': case '8': case '9':
2308 op_alt[j].matches = c - '0';
2309 recog_op_alt[op_alt[j].matches][j].matched = i;
2313 op_alt[j].memory_ok = 1;
2316 op_alt[j].decmem_ok = 1;
2319 op_alt[j].incmem_ok = 1;
2322 op_alt[j].nonoffmem_ok = 1;
2325 op_alt[j].offmem_ok = 1;
2328 op_alt[j].anything_ok = 1;
2332 op_alt[j].is_address = 1;
2333 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
2337 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2341 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
2349 /* Check the operands of an insn against the insn's operand constraints
2350 and return 1 if they are valid.
2351 The information about the insn's operands, constraints, operand modes
2352 etc. is obtained from the global variables set up by extract_insn.
2354 WHICH_ALTERNATIVE is set to a number which indicates which
2355 alternative of constraints was matched: 0 for the first alternative,
2356 1 for the next, etc.
2358 In addition, when two operands are match
2359 and it happens that the output operand is (reg) while the
2360 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2361 make the output operand look like the input.
2362 This is because the output operand is the one the template will print.
2364 This is used in final, just before printing the assembler code and by
2365 the routines that determine an insn's attribute.
2367 If STRICT is a positive non-zero value, it means that we have been
2368 called after reload has been completed. In that case, we must
2369 do all checks strictly. If it is zero, it means that we have been called
2370 before reload has completed. In that case, we first try to see if we can
2371 find an alternative that matches strictly. If not, we try again, this
2372 time assuming that reload will fix up the insn. This provides a "best
2373 guess" for the alternative and is used to compute attributes of insns prior
2374 to reload. A negative value of STRICT is used for this internal call. */
2382 constrain_operands (strict)
2385 const char *constraints[MAX_RECOG_OPERANDS];
2386 int matching_operands[MAX_RECOG_OPERANDS];
2387 int earlyclobber[MAX_RECOG_OPERANDS];
2390 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2391 int funny_match_index;
2393 which_alternative = 0;
2394 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2397 for (c = 0; c < recog_data.n_operands; c++)
2399 constraints[c] = recog_data.constraints[c];
2400 matching_operands[c] = -1;
2407 funny_match_index = 0;
2409 for (opno = 0; opno < recog_data.n_operands; opno++)
2411 register rtx op = recog_data.operand[opno];
2412 enum machine_mode mode = GET_MODE (op);
2413 register const char *p = constraints[opno];
2418 earlyclobber[opno] = 0;
2420 /* A unary operator may be accepted by the predicate, but it
2421 is irrelevant for matching constraints. */
2422 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2425 if (GET_CODE (op) == SUBREG)
2427 if (GET_CODE (SUBREG_REG (op)) == REG
2428 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2429 offset = SUBREG_WORD (op);
2430 op = SUBREG_REG (op);
2433 /* An empty constraint or empty alternative
2434 allows anything which matched the pattern. */
2435 if (*p == 0 || *p == ',')
2438 while (*p && (c = *p++) != ',')
2441 case '?': case '!': case '*': case '%':
2446 /* Ignore rest of this alternative as far as
2447 constraint checking is concerned. */
2448 while (*p && *p != ',')
2453 earlyclobber[opno] = 1;
2456 case '0': case '1': case '2': case '3': case '4':
2457 case '5': case '6': case '7': case '8': case '9':
2459 /* This operand must be the same as a previous one.
2460 This kind of constraint is used for instructions such
2461 as add when they take only two operands.
2463 Note that the lower-numbered operand is passed first.
2465 If we are not testing strictly, assume that this constraint
2466 will be satisfied. */
2471 rtx op1 = recog_data.operand[c - '0'];
2472 rtx op2 = recog_data.operand[opno];
2474 /* A unary operator may be accepted by the predicate,
2475 but it is irrelevant for matching constraints. */
2476 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2477 op1 = XEXP (op1, 0);
2478 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2479 op2 = XEXP (op2, 0);
2481 val = operands_match_p (op1, op2);
2484 matching_operands[opno] = c - '0';
2485 matching_operands[c - '0'] = opno;
2489 /* If output is *x and input is *--x,
2490 arrange later to change the output to *--x as well,
2491 since the output op is the one that will be printed. */
2492 if (val == 2 && strict > 0)
2494 funny_match[funny_match_index].this = opno;
2495 funny_match[funny_match_index++].other = c - '0';
2500 /* p is used for address_operands. When we are called by
2501 gen_reload, no one will have checked that the address is
2502 strictly valid, i.e., that all pseudos requiring hard regs
2503 have gotten them. */
2505 || (strict_memory_address_p (recog_data.operand_mode[opno],
2510 /* No need to check general_operand again;
2511 it was done in insn-recog.c. */
2513 /* Anything goes unless it is a REG and really has a hard reg
2514 but the hard reg is not in the class GENERAL_REGS. */
2516 || GENERAL_REGS == ALL_REGS
2517 || GET_CODE (op) != REG
2518 || (reload_in_progress
2519 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2520 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2525 /* This is used for a MATCH_SCRATCH in the cases when
2526 we don't actually need anything. So anything goes
2532 if (GET_CODE (op) == MEM
2533 /* Before reload, accept what reload can turn into mem. */
2534 || (strict < 0 && CONSTANT_P (op))
2535 /* During reload, accept a pseudo */
2536 || (reload_in_progress && GET_CODE (op) == REG
2537 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2542 if (GET_CODE (op) == MEM
2543 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2544 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2549 if (GET_CODE (op) == MEM
2550 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2551 || GET_CODE (XEXP (op, 0)) == POST_INC))
2556 #ifndef REAL_ARITHMETIC
2557 /* Match any CONST_DOUBLE, but only if
2558 we can examine the bits of it reliably. */
2559 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2560 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2561 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2564 if (GET_CODE (op) == CONST_DOUBLE)
2569 if (GET_CODE (op) == CONST_DOUBLE)
2575 if (GET_CODE (op) == CONST_DOUBLE
2576 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2581 if (GET_CODE (op) == CONST_INT
2582 || (GET_CODE (op) == CONST_DOUBLE
2583 && GET_MODE (op) == VOIDmode))
2586 if (CONSTANT_P (op))
2591 if (GET_CODE (op) == CONST_INT
2592 || (GET_CODE (op) == CONST_DOUBLE
2593 && GET_MODE (op) == VOIDmode))
2605 if (GET_CODE (op) == CONST_INT
2606 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2611 if (GET_CODE (op) == MEM
2612 && ((strict > 0 && ! offsettable_memref_p (op))
2614 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2615 || (reload_in_progress
2616 && !(GET_CODE (op) == REG
2617 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2622 if ((strict > 0 && offsettable_memref_p (op))
2623 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2624 /* Before reload, accept what reload can handle. */
2626 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2627 /* During reload, accept a pseudo */
2628 || (reload_in_progress && GET_CODE (op) == REG
2629 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2635 enum reg_class class;
2637 class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
2638 if (class != NO_REGS)
2642 && GET_CODE (op) == REG
2643 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2644 || (strict == 0 && GET_CODE (op) == SCRATCH)
2645 || (GET_CODE (op) == REG
2646 && reg_fits_class_p (op, class, offset, mode)))
2649 #ifdef EXTRA_CONSTRAINT
2650 else if (EXTRA_CONSTRAINT (op, c))
2657 constraints[opno] = p;
2658 /* If this operand did not win somehow,
2659 this alternative loses. */
2663 /* This alternative won; the operands are ok.
2664 Change whichever operands this alternative says to change. */
2669 /* See if any earlyclobber operand conflicts with some other
2673 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2674 /* Ignore earlyclobber operands now in memory,
2675 because we would often report failure when we have
2676 two memory operands, one of which was formerly a REG. */
2677 if (earlyclobber[eopno]
2678 && GET_CODE (recog_data.operand[eopno]) == REG)
2679 for (opno = 0; opno < recog_data.n_operands; opno++)
2680 if ((GET_CODE (recog_data.operand[opno]) == MEM
2681 || recog_data.operand_type[opno] != OP_OUT)
2683 /* Ignore things like match_operator operands. */
2684 && *recog_data.constraints[opno] != 0
2685 && ! (matching_operands[opno] == eopno
2686 && operands_match_p (recog_data.operand[opno],
2687 recog_data.operand[eopno]))
2688 && ! safe_from_earlyclobber (recog_data.operand[opno],
2689 recog_data.operand[eopno]))
2694 while (--funny_match_index >= 0)
2696 recog_data.operand[funny_match[funny_match_index].other]
2697 = recog_data.operand[funny_match[funny_match_index].this];
2704 which_alternative++;
2706 while (which_alternative < recog_data.n_alternatives);
2708 which_alternative = -1;
2709 /* If we are about to reject this, but we are not to test strictly,
2710 try a very loose test. Only return failure if it fails also. */
2712 return constrain_operands (-1);
2717 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2718 is a hard reg in class CLASS when its regno is offset by OFFSET
2719 and changed to mode MODE.
2720 If REG occupies multiple hard regs, all of them must be in CLASS. */
2723 reg_fits_class_p (operand, class, offset, mode)
2725 register enum reg_class class;
2727 enum machine_mode mode;
2729 register int regno = REGNO (operand);
2730 if (regno < FIRST_PSEUDO_REGISTER
2731 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2736 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2738 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2747 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2750 split_all_insns (upd_life)
2757 blocks = sbitmap_alloc (n_basic_blocks);
2758 sbitmap_zero (blocks);
2761 for (i = n_basic_blocks - 1; i >= 0; --i)
2763 basic_block bb = BASIC_BLOCK (i);
2766 for (insn = bb->head; insn ; insn = next)
2770 /* Can't use `next_real_insn' because that might go across
2771 CODE_LABELS and short-out basic blocks. */
2772 next = NEXT_INSN (insn);
2773 if (! INSN_P (insn))
2776 /* Don't split no-op move insns. These should silently
2777 disappear later in final. Splitting such insns would
2778 break the code that handles REG_NO_CONFLICT blocks. */
2780 else if ((set = single_set (insn)) != NULL
2781 && rtx_equal_p (SET_SRC (set), SET_DEST (set)))
2783 /* Nops get in the way while scheduling, so delete them
2784 now if register allocation has already been done. It
2785 is too risky to try to do this before register
2786 allocation, and there are unlikely to be very many
2787 nops then anyways. */
2788 if (reload_completed)
2790 PUT_CODE (insn, NOTE);
2791 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2792 NOTE_SOURCE_FILE (insn) = 0;
2797 /* Split insns here to get max fine-grain parallelism. */
2798 rtx first = PREV_INSN (insn);
2799 rtx last = try_split (PATTERN (insn), insn, 1);
2803 SET_BIT (blocks, i);
2806 /* try_split returns the NOTE that INSN became. */
2807 PUT_CODE (insn, NOTE);
2808 NOTE_SOURCE_FILE (insn) = 0;
2809 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2811 /* ??? Coddle to md files that generate subregs in post-
2812 reload splitters instead of computing the proper
2814 if (reload_completed && first != last)
2816 first = NEXT_INSN (first);
2820 cleanup_subreg_operands (first);
2823 first = NEXT_INSN (first);
2827 if (insn == bb->end)
2835 if (insn == bb->end)
2839 /* ??? When we're called from just after reload, the CFG is in bad
2840 shape, and we may have fallen off the end. This could be fixed
2841 by having reload not try to delete unreachable code. Otherwise
2842 assert we found the end insn. */
2843 if (insn == NULL && upd_life)
2847 if (changed && upd_life)
2849 compute_bb_for_insn (get_max_uid ());
2850 count_or_remove_death_notes (blocks, 1);
2851 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2854 sbitmap_free (blocks);
2857 #ifdef HAVE_peephole2
2858 struct peep2_insn_data
2864 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2865 static int peep2_current;
2867 /* A non-insn marker indicating the last insn of the block.
2868 The live_before regset for this element is correct, indicating
2869 global_live_at_end for the block. */
2870 #define PEEP2_EOB pc_rtx
2872 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2873 does not exist. Used by the recognizer to find the next insn to match
2874 in a multi-insn pattern. */
2880 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2884 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2885 n -= MAX_INSNS_PER_PEEP2 + 1;
2887 if (peep2_insn_data[n].insn == PEEP2_EOB)
2889 return peep2_insn_data[n].insn;
2892 /* Return true if REGNO is dead before the Nth non-note insn
2896 peep2_regno_dead_p (ofs, regno)
2900 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2903 ofs += peep2_current;
2904 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2905 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2907 if (peep2_insn_data[ofs].insn == NULL_RTX)
2910 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2913 /* Similarly for a REG. */
2916 peep2_reg_dead_p (ofs, reg)
2922 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2925 ofs += peep2_current;
2926 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2927 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2929 if (peep2_insn_data[ofs].insn == NULL_RTX)
2932 regno = REGNO (reg);
2933 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2935 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2940 /* Try to find a hard register of mode MODE, matching the register class in
2941 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2942 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2943 in which case the only condition is that the register must be available
2944 before CURRENT_INSN.
2945 Registers that already have bits set in REG_SET will not be considered.
2947 If an appropriate register is available, it will be returned and the
2948 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2952 peep2_find_free_register (from, to, class_str, mode, reg_set)
2954 const char *class_str;
2955 enum machine_mode mode;
2956 HARD_REG_SET *reg_set;
2958 static int search_ofs;
2959 enum reg_class class;
2963 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2966 from += peep2_current;
2967 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2968 from -= MAX_INSNS_PER_PEEP2 + 1;
2969 to += peep2_current;
2970 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2971 to -= MAX_INSNS_PER_PEEP2 + 1;
2973 if (peep2_insn_data[from].insn == NULL_RTX)
2975 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2979 HARD_REG_SET this_live;
2981 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2983 if (peep2_insn_data[from].insn == NULL_RTX)
2985 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2986 IOR_HARD_REG_SET (live, this_live);
2989 class = (class_str[0] == 'r' ? GENERAL_REGS
2990 : REG_CLASS_FROM_LETTER (class_str[0]));
2992 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2994 int raw_regno, regno, success, j;
2996 /* Distribute the free registers as much as possible. */
2997 raw_regno = search_ofs + i;
2998 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2999 raw_regno -= FIRST_PSEUDO_REGISTER;
3000 #ifdef REG_ALLOC_ORDER
3001 regno = reg_alloc_order[raw_regno];
3006 /* Don't allocate fixed registers. */
3007 if (fixed_regs[regno])
3009 /* Make sure the register is of the right class. */
3010 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
3012 /* And can support the mode we need. */
3013 if (! HARD_REGNO_MODE_OK (regno, mode))
3015 /* And that we don't create an extra save/restore. */
3016 if (! call_used_regs[regno] && ! regs_ever_live[regno])
3018 /* And we don't clobber traceback for noreturn functions. */
3019 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3020 && (! reload_completed || frame_pointer_needed))
3024 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3026 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3027 || TEST_HARD_REG_BIT (live, regno + j))
3035 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3036 SET_HARD_REG_BIT (*reg_set, regno + j);
3038 /* Start the next search with the next register. */
3039 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3041 search_ofs = raw_regno;
3043 return gen_rtx_REG (mode, regno);
3051 /* Perform the peephole2 optimization pass. */
3054 peephole2_optimize (dump_file)
3055 FILE *dump_file ATTRIBUTE_UNUSED;
3057 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3061 #ifdef HAVE_conditional_execution
3066 /* Initialize the regsets we're going to use. */
3067 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3068 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3069 live = INITIALIZE_REG_SET (rs_heads[i]);
3071 #ifdef HAVE_conditional_execution
3072 blocks = sbitmap_alloc (n_basic_blocks);
3073 sbitmap_zero (blocks);
3076 count_or_remove_death_notes (NULL, 1);
3079 for (b = n_basic_blocks - 1; b >= 0; --b)
3081 basic_block bb = BASIC_BLOCK (b);
3082 struct propagate_block_info *pbi;
3084 /* Indicate that all slots except the last holds invalid data. */
3085 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3086 peep2_insn_data[i].insn = NULL_RTX;
3088 /* Indicate that the last slot contains live_after data. */
3089 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3090 peep2_current = MAX_INSNS_PER_PEEP2;
3092 /* Start up propagation. */
3093 COPY_REG_SET (live, bb->global_live_at_end);
3094 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3096 #ifdef HAVE_conditional_execution
3097 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3099 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3102 for (insn = bb->end; ; insn = prev)
3104 prev = PREV_INSN (insn);
3110 /* Record this insn. */
3111 if (--peep2_current < 0)
3112 peep2_current = MAX_INSNS_PER_PEEP2;
3113 peep2_insn_data[peep2_current].insn = insn;
3114 propagate_one_insn (pbi, insn);
3115 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3117 /* Match the peephole. */
3118 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3121 i = match_len + peep2_current;
3122 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3123 i -= MAX_INSNS_PER_PEEP2 + 1;
3125 /* Replace the old sequence with the new. */
3126 flow_delete_insn_chain (insn, peep2_insn_data[i].insn);
3127 try = emit_insn_after (try, prev);
3129 /* Adjust the basic block boundaries. */
3130 if (peep2_insn_data[i].insn == bb->end)
3132 if (insn == bb->head)
3133 bb->head = NEXT_INSN (prev);
3135 #ifdef HAVE_conditional_execution
3136 /* With conditional execution, we cannot back up the
3137 live information so easily, since the conditional
3138 death data structures are not so self-contained.
3139 So record that we've made a modification to this
3140 block and update life information at the end. */
3141 SET_BIT (blocks, b);
3144 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3145 peep2_insn_data[i].insn = NULL_RTX;
3146 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3148 /* Back up lifetime information past the end of the
3149 newly created sequence. */
3150 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3152 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3154 /* Update life information for the new sequence. */
3160 i = MAX_INSNS_PER_PEEP2;
3161 peep2_insn_data[i].insn = try;
3162 propagate_one_insn (pbi, try);
3163 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3165 try = PREV_INSN (try);
3167 while (try != prev);
3169 /* ??? Should verify that LIVE now matches what we
3170 had before the new sequence. */
3177 if (insn == bb->head)
3181 free_propagate_block_info (pbi);
3184 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3185 FREE_REG_SET (peep2_insn_data[i].live_before);
3186 FREE_REG_SET (live);
3188 #ifdef HAVE_conditional_execution
3189 count_or_remove_death_notes (blocks, 1);
3190 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3191 sbitmap_free (blocks);
3194 #endif /* HAVE_peephole2 */