1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "hard-reg-set.h"
36 #include "basic-block.h"
40 #ifndef STACK_PUSH_CODE
41 #ifdef STACK_GROWS_DOWNWARD
42 #define STACK_PUSH_CODE PRE_DEC
44 #define STACK_PUSH_CODE PRE_INC
48 #ifndef STACK_POP_CODE
49 #ifdef STACK_GROWS_DOWNWARD
50 #define STACK_POP_CODE POST_INC
52 #define STACK_POP_CODE POST_DEC
56 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
57 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
58 static rtx *find_constant_term_loc PARAMS ((rtx *));
59 static void validate_replace_src_1 PARAMS ((rtx *, void *));
61 /* Nonzero means allow operands to be volatile.
62 This should be 0 if you are generating rtl, such as if you are calling
63 the functions in optabs.c and expmed.c (most of the time).
64 This should be 1 if all valid insns need to be recognized,
65 such as in regclass.c and final.c and reload.c.
67 init_recog and init_recog_no_volatile are responsible for setting this. */
71 struct recog_data recog_data;
73 /* Contains a vector of operand_alternative structures for every operand.
74 Set up by preprocess_constraints. */
75 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
77 /* On return from `constrain_operands', indicate which alternative
80 int which_alternative;
82 /* Nonzero after end of reload pass.
83 Set to 1 or 0 by toplev.c.
84 Controls the significance of (SUBREG (MEM)). */
88 /* Initialize data used by the function `recog'.
89 This must be called once in the compilation of a function
90 before any insn recognition may be done in the function. */
93 init_recog_no_volatile ()
104 /* Try recognizing the instruction INSN,
105 and return the code number that results.
106 Remember the code so that repeated calls do not
107 need to spend the time for actual rerecognition.
109 This function is the normal interface to instruction recognition.
110 The automatically-generated function `recog' is normally called
111 through this one. (The only exception is in combine.c.) */
114 recog_memoized_1 (insn)
117 if (INSN_CODE (insn) < 0)
118 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
119 return INSN_CODE (insn);
122 /* Check that X is an insn-body for an `asm' with operands
123 and that the operands mentioned in it are legitimate. */
126 check_asm_operands (x)
131 const char **constraints;
134 /* Post-reload, be more strict with things. */
135 if (reload_completed)
137 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
138 extract_insn (make_insn_raw (x));
139 constrain_operands (1);
140 return which_alternative >= 0;
143 noperands = asm_noperands (x);
149 operands = (rtx *) alloca (noperands * sizeof (rtx));
150 constraints = (const char **) alloca (noperands * sizeof (char *));
152 decode_asm_operands (x, operands, NULL_PTR, constraints, NULL_PTR);
154 for (i = 0; i < noperands; i++)
156 const char *c = constraints[i];
159 if (ISDIGIT ((unsigned char)c[0]) && c[1] == '\0')
160 c = constraints[c[0] - '0'];
162 if (! asm_operand_ok (operands[i], c))
169 /* Static data for the next two routines. */
171 typedef struct change_t
179 static change_t *changes;
180 static int changes_allocated;
182 static int num_changes = 0;
184 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
185 at which NEW will be placed. If OBJECT is zero, no validation is done,
186 the change is simply made.
188 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
189 will be called with the address and mode as parameters. If OBJECT is
190 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
193 IN_GROUP is non-zero if this is part of a group of changes that must be
194 performed as a group. In that case, the changes will be stored. The
195 function `apply_change_group' will validate and apply the changes.
197 If IN_GROUP is zero, this is a single change. Try to recognize the insn
198 or validate the memory reference with the change applied. If the result
199 is not valid for the machine, suppress the change and return zero.
200 Otherwise, perform the change and return 1. */
203 validate_change (object, loc, new, in_group)
211 if (old == new || rtx_equal_p (old, new))
214 if (in_group == 0 && num_changes != 0)
219 /* Save the information describing this change. */
220 if (num_changes >= changes_allocated)
222 if (changes_allocated == 0)
223 /* This value allows for repeated substitutions inside complex
224 indexed addresses, or changes in up to 5 insns. */
225 changes_allocated = MAX_RECOG_OPERANDS * 5;
227 changes_allocated *= 2;
230 (change_t*) xrealloc (changes,
231 sizeof (change_t) * changes_allocated);
234 changes[num_changes].object = object;
235 changes[num_changes].loc = loc;
236 changes[num_changes].old = old;
238 if (object && GET_CODE (object) != MEM)
240 /* Set INSN_CODE to force rerecognition of insn. Save old code in
242 changes[num_changes].old_code = INSN_CODE (object);
243 INSN_CODE (object) = -1;
248 /* If we are making a group of changes, return 1. Otherwise, validate the
249 change group we made. */
254 return apply_change_group ();
257 /* This subroutine of apply_change_group verifies whether the changes to INSN
258 were valid; i.e. whether INSN can still be recognized. */
261 insn_invalid_p (insn)
264 rtx pat = PATTERN (insn);
265 int num_clobbers = 0;
266 /* If we are before reload and the pattern is a SET, see if we can add
268 int icode = recog (pat, insn,
269 (GET_CODE (pat) == SET
270 && ! reload_completed && ! reload_in_progress)
271 ? &num_clobbers : NULL_PTR);
272 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
275 /* If this is an asm and the operand aren't legal, then fail. Likewise if
276 this is not an asm and the insn wasn't recognized. */
277 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
278 || (!is_asm && icode < 0))
281 /* If we have to add CLOBBERs, fail if we have to add ones that reference
282 hard registers since our callers can't know if they are live or not.
283 Otherwise, add them. */
284 if (num_clobbers > 0)
288 if (added_clobbers_hard_reg_p (icode))
291 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
292 XVECEXP (newpat, 0, 0) = pat;
293 add_clobbers (newpat, icode);
294 PATTERN (insn) = pat = newpat;
297 /* After reload, verify that all constraints are satisfied. */
298 if (reload_completed)
302 if (! constrain_operands (1))
306 INSN_CODE (insn) = icode;
310 /* Apply a group of changes previously issued with `validate_change'.
311 Return 1 if all changes are valid, zero otherwise. */
314 apply_change_group ()
318 /* The changes have been applied and all INSN_CODEs have been reset to force
321 The changes are valid if we aren't given an object, or if we are
322 given a MEM and it still is a valid address, or if this is in insn
323 and it is recognized. In the latter case, if reload has completed,
324 we also require that the operands meet the constraints for
327 for (i = 0; i < num_changes; i++)
329 rtx object = changes[i].object;
334 if (GET_CODE (object) == MEM)
336 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
339 else if (insn_invalid_p (object))
341 rtx pat = PATTERN (object);
343 /* Perhaps we couldn't recognize the insn because there were
344 extra CLOBBERs at the end. If so, try to re-recognize
345 without the last CLOBBER (later iterations will cause each of
346 them to be eliminated, in turn). But don't do this if we
347 have an ASM_OPERAND. */
348 if (GET_CODE (pat) == PARALLEL
349 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
350 && asm_noperands (PATTERN (object)) < 0)
354 if (XVECLEN (pat, 0) == 2)
355 newpat = XVECEXP (pat, 0, 0);
361 = gen_rtx_PARALLEL (VOIDmode,
362 rtvec_alloc (XVECLEN (pat, 0) - 1));
363 for (j = 0; j < XVECLEN (newpat, 0); j++)
364 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
367 /* Add a new change to this group to replace the pattern
368 with this new pattern. Then consider this change
369 as having succeeded. The change we added will
370 cause the entire call to fail if things remain invalid.
372 Note that this can lose if a later change than the one
373 we are processing specified &XVECEXP (PATTERN (object), 0, X)
374 but this shouldn't occur. */
376 validate_change (object, &PATTERN (object), newpat, 1);
378 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
379 /* If this insn is a CLOBBER or USE, it is always valid, but is
387 if (i == num_changes)
399 /* Return the number of changes so far in the current group. */
402 num_validated_changes ()
407 /* Retract the changes numbered NUM and up. */
415 /* Back out all the changes. Do this in the opposite order in which
417 for (i = num_changes - 1; i >= num; i--)
419 *changes[i].loc = changes[i].old;
420 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
421 INSN_CODE (changes[i].object) = changes[i].old_code;
426 /* Replace every occurrence of FROM in X with TO. Mark each change with
427 validate_change passing OBJECT. */
430 validate_replace_rtx_1 (loc, from, to, object)
432 rtx from, to, object;
435 register const char *fmt;
436 register rtx x = *loc;
442 /* X matches FROM if it is the same rtx or they are both referring to the
443 same register in the same mode. Avoid calling rtx_equal_p unless the
444 operands look similar. */
447 || (GET_CODE (x) == REG && GET_CODE (from) == REG
448 && GET_MODE (x) == GET_MODE (from)
449 && REGNO (x) == REGNO (from))
450 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
451 && rtx_equal_p (x, from)))
453 validate_change (object, loc, to, 1);
457 /* For commutative or comparison operations, try replacing each argument
458 separately and seeing if we made any changes. If so, put a constant
460 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
462 int prev_changes = num_changes;
464 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
465 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
466 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
468 validate_change (object, loc,
469 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
470 : swap_condition (code),
471 GET_MODE (x), XEXP (x, 1),
479 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
480 done the substitution, otherwise we won't. */
485 /* If we have a PLUS whose second operand is now a CONST_INT, use
486 plus_constant to try to simplify it. */
487 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
488 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
493 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
495 validate_change (object, loc,
496 plus_constant (XEXP (x, 0), - INTVAL (to)),
504 /* In these cases, the operation to be performed depends on the mode
505 of the operand. If we are replacing the operand with a VOIDmode
506 constant, we lose the information. So try to simplify the operation
508 if (GET_MODE (to) == VOIDmode
509 && (rtx_equal_p (XEXP (x, 0), from)
510 || (GET_CODE (XEXP (x, 0)) == SUBREG
511 && rtx_equal_p (SUBREG_REG (XEXP (x, 0)), from))))
515 /* If there is a subreg involved, crop to the portion of the
516 constant that we are interested in. */
517 if (GET_CODE (XEXP (x, 0)) == SUBREG)
519 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) <= UNITS_PER_WORD)
520 to = operand_subword (to,
521 (SUBREG_BYTE (XEXP (x, 0))
524 else if (GET_MODE_CLASS (GET_MODE (from)) == MODE_INT
525 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
526 <= HOST_BITS_PER_WIDE_INT))
528 int i = SUBREG_BYTE (XEXP (x, 0)) * BITS_PER_UNIT;
530 unsigned HOST_WIDE_INT vall;
532 if (GET_CODE (to) == CONST_INT)
535 valh = (HOST_WIDE_INT) vall < 0 ? ~0 : 0;
539 vall = CONST_DOUBLE_LOW (to);
540 valh = CONST_DOUBLE_HIGH (to);
543 if (WORDS_BIG_ENDIAN)
544 i = (GET_MODE_BITSIZE (GET_MODE (from))
545 - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - i);
546 if (i > 0 && i < HOST_BITS_PER_WIDE_INT)
547 vall = vall >> i | valh << (HOST_BITS_PER_WIDE_INT - i);
548 else if (i >= HOST_BITS_PER_WIDE_INT)
549 vall = valh >> (i - HOST_BITS_PER_WIDE_INT);
550 to = GEN_INT (trunc_int_for_mode (vall,
551 GET_MODE (XEXP (x, 0))));
554 to = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
557 /* If the above didn't fail, perform the extension from the
558 mode of the operand (and not the mode of FROM). */
560 new = simplify_unary_operation (code, GET_MODE (x), to,
561 GET_MODE (XEXP (x, 0)));
563 /* If any of the above failed, substitute in something that
564 we know won't be recognized. */
566 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
568 validate_change (object, loc, new, 1);
574 /* In case we are replacing by constant, attempt to simplify it to
575 non-SUBREG expression. We can't do this later, since the information
576 about inner mode may be lost. */
577 if (CONSTANT_P (to) && rtx_equal_p (SUBREG_REG (x), from))
580 unsigned HOST_WIDE_INT val;
582 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
583 since we are saying that the high bits don't matter. */
584 if (GET_MODE (to) == VOIDmode
585 && (GET_MODE_SIZE (GET_MODE (x))
586 >= GET_MODE_SIZE (GET_MODE (from))))
588 rtx new = gen_lowpart_if_possible (GET_MODE (x), to);
591 validate_change (object, loc, new, 1);
596 offset = SUBREG_BYTE (x) * BITS_PER_UNIT;
597 switch (GET_CODE (to))
600 if (GET_MODE (to) != VOIDmode)
603 part = offset >= HOST_BITS_PER_WIDE_INT;
604 if ((BITS_PER_WORD > HOST_BITS_PER_WIDE_INT
606 || (BITS_PER_WORD <= HOST_BITS_PER_WIDE_INT
607 && WORDS_BIG_ENDIAN))
609 val = part ? CONST_DOUBLE_HIGH (to) : CONST_DOUBLE_LOW (to);
610 offset %= HOST_BITS_PER_WIDE_INT;
614 if (GET_CODE (to) == CONST_INT)
618 /* Avoid creating bogus SUBREGs */
619 enum machine_mode mode = GET_MODE (x);
620 enum machine_mode inner_mode = GET_MODE (from);
622 /* We've already picked the word we want from a double, so
623 pretend this is actually an integer. */
624 if (GET_CODE (to) == CONST_DOUBLE)
627 if (GET_MODE_CLASS (mode) != MODE_INT)
630 if (BYTES_BIG_ENDIAN || WORDS_BIG_ENDIAN)
632 if (WORDS_BIG_ENDIAN)
633 offset = GET_MODE_BITSIZE (inner_mode)
634 - GET_MODE_BITSIZE (mode) - offset;
635 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
636 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
637 offset = offset + BITS_PER_WORD - GET_MODE_BITSIZE (mode)
638 - 2 * (offset % BITS_PER_WORD);
641 if (offset >= HOST_BITS_PER_WIDE_INT)
642 to = ((HOST_WIDE_INT) val < 0) ? constm1_rtx : const0_rtx;
646 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
647 val = trunc_int_for_mode (val, mode);
651 validate_change (object, loc, to, 1);
660 /* Changing mode twice with SUBREG => just change it once,
661 or not at all if changing back to starting mode. */
662 if (GET_CODE (to) == SUBREG
663 && rtx_equal_p (SUBREG_REG (x), from))
665 if (GET_MODE (x) == GET_MODE (SUBREG_REG (to))
666 && SUBREG_BYTE (x) == 0 && SUBREG_BYTE (to) == 0)
668 validate_change (object, loc, SUBREG_REG (to), 1);
672 /* Make sure the 2 byte counts added together are an even unit
673 of x's mode, and combine them if so. Otherwise we run
674 into problems with something like:
675 (subreg:HI (subreg:QI (SI:55) 3) 0)
676 we end up with an odd offset into a HI which is invalid. */
678 if (SUBREG_BYTE (to) % GET_MODE_SIZE (GET_MODE (x)) == 0)
679 validate_change (object, loc,
680 gen_rtx_SUBREG (GET_MODE (x), SUBREG_REG (to),
681 SUBREG_BYTE(x) + SUBREG_BYTE (to)),
684 validate_change (object, loc, to, 1);
689 /* If we have a SUBREG of a register that we are replacing and we are
690 replacing it with a MEM, make a new MEM and try replacing the
691 SUBREG with it. Don't do this if the MEM has a mode-dependent address
692 or if we would be widening it. */
694 if (GET_CODE (from) == REG
695 && GET_CODE (to) == MEM
696 && rtx_equal_p (SUBREG_REG (x), from)
697 && ! mode_dependent_address_p (XEXP (to, 0))
698 && ! MEM_VOLATILE_P (to)
699 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
701 int offset = SUBREG_BYTE (x);
702 enum machine_mode mode = GET_MODE (x);
705 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
706 MEM_COPY_ATTRIBUTES (new, to);
707 validate_change (object, loc, new, 1);
714 /* If we are replacing a register with memory, try to change the memory
715 to be the mode required for memory in extract operations (this isn't
716 likely to be an insertion operation; if it was, nothing bad will
717 happen, we might just fail in some cases). */
719 if (GET_CODE (from) == REG && GET_CODE (to) == MEM
720 && rtx_equal_p (XEXP (x, 0), from)
721 && GET_CODE (XEXP (x, 1)) == CONST_INT
722 && GET_CODE (XEXP (x, 2)) == CONST_INT
723 && ! mode_dependent_address_p (XEXP (to, 0))
724 && ! MEM_VOLATILE_P (to))
726 enum machine_mode wanted_mode = VOIDmode;
727 enum machine_mode is_mode = GET_MODE (to);
728 int pos = INTVAL (XEXP (x, 2));
731 if (code == ZERO_EXTRACT)
733 wanted_mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
734 if (wanted_mode == VOIDmode)
735 wanted_mode = word_mode;
739 if (code == SIGN_EXTRACT)
741 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
742 if (wanted_mode == VOIDmode)
743 wanted_mode = word_mode;
747 /* If we have a narrower mode, we can do something. */
748 if (wanted_mode != VOIDmode
749 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
751 int offset = pos / BITS_PER_UNIT;
754 /* If the bytes and bits are counted differently, we
755 must adjust the offset. */
756 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
757 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
760 pos %= GET_MODE_BITSIZE (wanted_mode);
762 newmem = gen_rtx_MEM (wanted_mode,
763 plus_constant (XEXP (to, 0), offset));
764 MEM_COPY_ATTRIBUTES (newmem, to);
766 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
767 validate_change (object, &XEXP (x, 0), newmem, 1);
777 /* For commutative or comparison operations we've already performed
778 replacements. Don't try to perform them again. */
779 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
781 fmt = GET_RTX_FORMAT (code);
782 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
785 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
786 else if (fmt[i] == 'E')
787 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
788 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
793 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
794 with TO. After all changes have been made, validate by seeing
795 if INSN is still valid. */
798 validate_replace_rtx_subexp (from, to, insn, loc)
799 rtx from, to, insn, *loc;
801 validate_replace_rtx_1 (loc, from, to, insn);
802 return apply_change_group ();
805 /* Try replacing every occurrence of FROM in INSN with TO. After all
806 changes have been made, validate by seeing if INSN is still valid. */
809 validate_replace_rtx (from, to, insn)
812 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
813 return apply_change_group ();
816 /* Try replacing every occurrence of FROM in INSN with TO. */
819 validate_replace_rtx_group (from, to, insn)
822 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
825 /* Function called by note_uses to replace used subexpressions. */
826 struct validate_replace_src_data
828 rtx from; /* Old RTX */
829 rtx to; /* New RTX */
830 rtx insn; /* Insn in which substitution is occurring. */
834 validate_replace_src_1 (x, data)
838 struct validate_replace_src_data *d
839 = (struct validate_replace_src_data *) data;
841 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
844 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
845 SET_DESTs. After all changes have been made, validate by seeing if
846 INSN is still valid. */
849 validate_replace_src (from, to, insn)
852 struct validate_replace_src_data d;
857 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
858 return apply_change_group ();
862 /* Return 1 if the insn using CC0 set by INSN does not contain
863 any ordered tests applied to the condition codes.
864 EQ and NE tests do not count. */
867 next_insn_tests_no_inequality (insn)
870 register rtx next = next_cc0_user (insn);
872 /* If there is no next insn, we have to take the conservative choice. */
876 return ((GET_CODE (next) == JUMP_INSN
877 || GET_CODE (next) == INSN
878 || GET_CODE (next) == CALL_INSN)
879 && ! inequality_comparisons_p (PATTERN (next)));
882 #if 0 /* This is useless since the insn that sets the cc's
883 must be followed immediately by the use of them. */
884 /* Return 1 if the CC value set up by INSN is not used. */
887 next_insns_test_no_inequality (insn)
890 register rtx next = NEXT_INSN (insn);
892 for (; next != 0; next = NEXT_INSN (next))
894 if (GET_CODE (next) == CODE_LABEL
895 || GET_CODE (next) == BARRIER)
897 if (GET_CODE (next) == NOTE)
899 if (inequality_comparisons_p (PATTERN (next)))
901 if (sets_cc0_p (PATTERN (next)) == 1)
903 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
911 /* This is used by find_single_use to locate an rtx that contains exactly one
912 use of DEST, which is typically either a REG or CC0. It returns a
913 pointer to the innermost rtx expression containing DEST. Appearances of
914 DEST that are being used to totally replace it are not counted. */
917 find_single_use_1 (dest, loc)
922 enum rtx_code code = GET_CODE (x);
939 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
940 of a REG that occupies all of the REG, the insn uses DEST if
941 it is mentioned in the destination or the source. Otherwise, we
942 need just check the source. */
943 if (GET_CODE (SET_DEST (x)) != CC0
944 && GET_CODE (SET_DEST (x)) != PC
945 && GET_CODE (SET_DEST (x)) != REG
946 && ! (GET_CODE (SET_DEST (x)) == SUBREG
947 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
948 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
949 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
950 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
951 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
954 return find_single_use_1 (dest, &SET_SRC (x));
958 return find_single_use_1 (dest, &XEXP (x, 0));
964 /* If it wasn't one of the common cases above, check each expression and
965 vector of this code. Look for a unique usage of DEST. */
967 fmt = GET_RTX_FORMAT (code);
968 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
972 if (dest == XEXP (x, i)
973 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
974 && REGNO (dest) == REGNO (XEXP (x, i))))
977 this_result = find_single_use_1 (dest, &XEXP (x, i));
980 result = this_result;
981 else if (this_result)
982 /* Duplicate usage. */
985 else if (fmt[i] == 'E')
989 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
991 if (XVECEXP (x, i, j) == dest
992 || (GET_CODE (dest) == REG
993 && GET_CODE (XVECEXP (x, i, j)) == REG
994 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
997 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
1000 result = this_result;
1001 else if (this_result)
1010 /* See if DEST, produced in INSN, is used only a single time in the
1011 sequel. If so, return a pointer to the innermost rtx expression in which
1014 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
1016 This routine will return usually zero either before flow is called (because
1017 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
1018 note can't be trusted).
1020 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
1021 care about REG_DEAD notes or LOG_LINKS.
1023 Otherwise, we find the single use by finding an insn that has a
1024 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
1025 only referenced once in that insn, we know that it must be the first
1026 and last insn referencing DEST. */
1029 find_single_use (dest, insn, ploc)
1039 if (dest == cc0_rtx)
1041 next = NEXT_INSN (insn);
1043 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
1046 result = find_single_use_1 (dest, &PATTERN (next));
1053 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
1056 for (next = next_nonnote_insn (insn);
1057 next != 0 && GET_CODE (next) != CODE_LABEL;
1058 next = next_nonnote_insn (next))
1059 if (INSN_P (next) && dead_or_set_p (next, dest))
1061 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
1062 if (XEXP (link, 0) == insn)
1067 result = find_single_use_1 (dest, &PATTERN (next));
1077 /* Return 1 if OP is a valid general operand for machine mode MODE.
1078 This is either a register reference, a memory reference,
1079 or a constant. In the case of a memory reference, the address
1080 is checked for general validity for the target machine.
1082 Register and memory references must have mode MODE in order to be valid,
1083 but some constants have no machine mode and are valid for any mode.
1085 If MODE is VOIDmode, OP is checked for validity for whatever mode
1088 The main use of this function is as a predicate in match_operand
1089 expressions in the machine description.
1091 For an explanation of this function's behavior for registers of
1092 class NO_REGS, see the comment for `register_operand'. */
1095 general_operand (op, mode)
1097 enum machine_mode mode;
1099 register enum rtx_code code = GET_CODE (op);
1101 if (mode == VOIDmode)
1102 mode = GET_MODE (op);
1104 /* Don't accept CONST_INT or anything similar
1105 if the caller wants something floating. */
1106 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1107 && GET_MODE_CLASS (mode) != MODE_INT
1108 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1111 if (CONSTANT_P (op))
1112 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1113 || mode == VOIDmode)
1114 #ifdef LEGITIMATE_PIC_OPERAND_P
1115 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1117 && LEGITIMATE_CONSTANT_P (op));
1119 /* Except for certain constants with VOIDmode, already checked for,
1120 OP's mode must match MODE if MODE specifies a mode. */
1122 if (GET_MODE (op) != mode)
1127 #ifdef INSN_SCHEDULING
1128 /* On machines that have insn scheduling, we want all memory
1129 reference to be explicit, so outlaw paradoxical SUBREGs. */
1130 if (GET_CODE (SUBREG_REG (op)) == MEM
1131 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
1135 op = SUBREG_REG (op);
1136 code = GET_CODE (op);
1140 /* A register whose class is NO_REGS is not a general operand. */
1141 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1142 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
1146 register rtx y = XEXP (op, 0);
1148 if (! volatile_ok && MEM_VOLATILE_P (op))
1151 if (GET_CODE (y) == ADDRESSOF)
1154 /* Use the mem's mode, since it will be reloaded thus. */
1155 mode = GET_MODE (op);
1156 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
1159 /* Pretend this is an operand for now; we'll run force_operand
1160 on its replacement in fixup_var_refs_1. */
1161 if (code == ADDRESSOF)
1170 /* Return 1 if OP is a valid memory address for a memory reference
1173 The main use of this function is as a predicate in match_operand
1174 expressions in the machine description. */
1177 address_operand (op, mode)
1179 enum machine_mode mode;
1181 return memory_address_p (mode, op);
1184 /* Return 1 if OP is a register reference of mode MODE.
1185 If MODE is VOIDmode, accept a register in any mode.
1187 The main use of this function is as a predicate in match_operand
1188 expressions in the machine description.
1190 As a special exception, registers whose class is NO_REGS are
1191 not accepted by `register_operand'. The reason for this change
1192 is to allow the representation of special architecture artifacts
1193 (such as a condition code register) without extending the rtl
1194 definitions. Since registers of class NO_REGS cannot be used
1195 as registers in any case where register classes are examined,
1196 it is most consistent to keep this function from accepting them. */
1199 register_operand (op, mode)
1201 enum machine_mode mode;
1203 if (GET_MODE (op) != mode && mode != VOIDmode)
1206 if (GET_CODE (op) == SUBREG)
1208 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1209 because it is guaranteed to be reloaded into one.
1210 Just make sure the MEM is valid in itself.
1211 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1212 but currently it does result from (SUBREG (REG)...) where the
1213 reg went on the stack.) */
1214 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1215 return general_operand (op, mode);
1217 #ifdef CLASS_CANNOT_CHANGE_MODE
1218 if (GET_CODE (SUBREG_REG (op)) == REG
1219 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1220 && (TEST_HARD_REG_BIT
1221 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
1222 REGNO (SUBREG_REG (op))))
1223 && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (SUBREG_REG (op)))
1224 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1225 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1229 op = SUBREG_REG (op);
1232 /* If we have an ADDRESSOF, consider it valid since it will be
1233 converted into something that will not be a MEM. */
1234 if (GET_CODE (op) == ADDRESSOF)
1237 /* We don't consider registers whose class is NO_REGS
1238 to be a register operand. */
1239 return (GET_CODE (op) == REG
1240 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1241 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1244 /* Return 1 for a register in Pmode; ignore the tested mode. */
1247 pmode_register_operand (op, mode)
1249 enum machine_mode mode ATTRIBUTE_UNUSED;
1251 return register_operand (op, Pmode);
1254 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1255 or a hard register. */
1258 scratch_operand (op, mode)
1260 enum machine_mode mode;
1262 if (GET_MODE (op) != mode && mode != VOIDmode)
1265 return (GET_CODE (op) == SCRATCH
1266 || (GET_CODE (op) == REG
1267 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1270 /* Return 1 if OP is a valid immediate operand for mode MODE.
1272 The main use of this function is as a predicate in match_operand
1273 expressions in the machine description. */
1276 immediate_operand (op, mode)
1278 enum machine_mode mode;
1280 /* Don't accept CONST_INT or anything similar
1281 if the caller wants something floating. */
1282 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1283 && GET_MODE_CLASS (mode) != MODE_INT
1284 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1287 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1288 result in 0/1. It seems a safe assumption that this is
1289 in range for everyone. */
1290 if (GET_CODE (op) == CONSTANT_P_RTX)
1293 return (CONSTANT_P (op)
1294 && (GET_MODE (op) == mode || mode == VOIDmode
1295 || GET_MODE (op) == VOIDmode)
1296 #ifdef LEGITIMATE_PIC_OPERAND_P
1297 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1299 && LEGITIMATE_CONSTANT_P (op));
1302 /* Returns 1 if OP is an operand that is a CONST_INT. */
1305 const_int_operand (op, mode)
1307 enum machine_mode mode ATTRIBUTE_UNUSED;
1309 return GET_CODE (op) == CONST_INT;
1312 /* Returns 1 if OP is an operand that is a constant integer or constant
1313 floating-point number. */
1316 const_double_operand (op, mode)
1318 enum machine_mode mode;
1320 /* Don't accept CONST_INT or anything similar
1321 if the caller wants something floating. */
1322 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1323 && GET_MODE_CLASS (mode) != MODE_INT
1324 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1327 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1328 && (mode == VOIDmode || GET_MODE (op) == mode
1329 || GET_MODE (op) == VOIDmode));
1332 /* Return 1 if OP is a general operand that is not an immediate operand. */
1335 nonimmediate_operand (op, mode)
1337 enum machine_mode mode;
1339 return (general_operand (op, mode) && ! CONSTANT_P (op));
1342 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1345 nonmemory_operand (op, mode)
1347 enum machine_mode mode;
1349 if (CONSTANT_P (op))
1351 /* Don't accept CONST_INT or anything similar
1352 if the caller wants something floating. */
1353 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1354 && GET_MODE_CLASS (mode) != MODE_INT
1355 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1358 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1359 || mode == VOIDmode)
1360 #ifdef LEGITIMATE_PIC_OPERAND_P
1361 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1363 && LEGITIMATE_CONSTANT_P (op));
1366 if (GET_MODE (op) != mode && mode != VOIDmode)
1369 if (GET_CODE (op) == SUBREG)
1371 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1372 because it is guaranteed to be reloaded into one.
1373 Just make sure the MEM is valid in itself.
1374 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1375 but currently it does result from (SUBREG (REG)...) where the
1376 reg went on the stack.) */
1377 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1378 return general_operand (op, mode);
1379 op = SUBREG_REG (op);
1382 /* We don't consider registers whose class is NO_REGS
1383 to be a register operand. */
1384 return (GET_CODE (op) == REG
1385 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1386 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1389 /* Return 1 if OP is a valid operand that stands for pushing a
1390 value of mode MODE onto the stack.
1392 The main use of this function is as a predicate in match_operand
1393 expressions in the machine description. */
1396 push_operand (op, mode)
1398 enum machine_mode mode;
1400 unsigned int rounded_size = GET_MODE_SIZE (mode);
1402 #ifdef PUSH_ROUNDING
1403 rounded_size = PUSH_ROUNDING (rounded_size);
1406 if (GET_CODE (op) != MEM)
1409 if (mode != VOIDmode && GET_MODE (op) != mode)
1414 if (rounded_size == GET_MODE_SIZE (mode))
1416 if (GET_CODE (op) != STACK_PUSH_CODE)
1421 if (GET_CODE (op) != PRE_MODIFY
1422 || GET_CODE (XEXP (op, 1)) != PLUS
1423 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1424 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1425 #ifdef STACK_GROWS_DOWNWARD
1426 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1428 || INTVAL (XEXP (XEXP (op, 1), 1)) != rounded_size
1434 return XEXP (op, 0) == stack_pointer_rtx;
1437 /* Return 1 if OP is a valid operand that stands for popping a
1438 value of mode MODE off the stack.
1440 The main use of this function is as a predicate in match_operand
1441 expressions in the machine description. */
1444 pop_operand (op, mode)
1446 enum machine_mode mode;
1448 if (GET_CODE (op) != MEM)
1451 if (mode != VOIDmode && GET_MODE (op) != mode)
1456 if (GET_CODE (op) != STACK_POP_CODE)
1459 return XEXP (op, 0) == stack_pointer_rtx;
1462 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1465 memory_address_p (mode, addr)
1466 enum machine_mode mode ATTRIBUTE_UNUSED;
1469 if (GET_CODE (addr) == ADDRESSOF)
1472 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1479 /* Return 1 if OP is a valid memory reference with mode MODE,
1480 including a valid address.
1482 The main use of this function is as a predicate in match_operand
1483 expressions in the machine description. */
1486 memory_operand (op, mode)
1488 enum machine_mode mode;
1492 if (! reload_completed)
1493 /* Note that no SUBREG is a memory operand before end of reload pass,
1494 because (SUBREG (MEM...)) forces reloading into a register. */
1495 return GET_CODE (op) == MEM && general_operand (op, mode);
1497 if (mode != VOIDmode && GET_MODE (op) != mode)
1501 if (GET_CODE (inner) == SUBREG)
1502 inner = SUBREG_REG (inner);
1504 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1507 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1508 that is, a memory reference whose address is a general_operand. */
1511 indirect_operand (op, mode)
1513 enum machine_mode mode;
1515 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1516 if (! reload_completed
1517 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1519 register int offset = SUBREG_BYTE (op);
1520 rtx inner = SUBREG_REG (op);
1522 if (mode != VOIDmode && GET_MODE (op) != mode)
1525 /* The only way that we can have a general_operand as the resulting
1526 address is if OFFSET is zero and the address already is an operand
1527 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1530 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1531 || (GET_CODE (XEXP (inner, 0)) == PLUS
1532 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1533 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1534 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1537 return (GET_CODE (op) == MEM
1538 && memory_operand (op, mode)
1539 && general_operand (XEXP (op, 0), Pmode));
1542 /* Return 1 if this is a comparison operator. This allows the use of
1543 MATCH_OPERATOR to recognize all the branch insns. */
1546 comparison_operator (op, mode)
1548 enum machine_mode mode;
1550 return ((mode == VOIDmode || GET_MODE (op) == mode)
1551 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1554 /* If BODY is an insn body that uses ASM_OPERANDS,
1555 return the number of operands (both input and output) in the insn.
1556 Otherwise return -1. */
1559 asm_noperands (body)
1562 switch (GET_CODE (body))
1565 /* No output operands: return number of input operands. */
1566 return ASM_OPERANDS_INPUT_LENGTH (body);
1568 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1569 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1570 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1574 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1575 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1577 /* Multiple output operands, or 1 output plus some clobbers:
1578 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1582 /* Count backwards through CLOBBERs to determine number of SETs. */
1583 for (i = XVECLEN (body, 0); i > 0; i--)
1585 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1587 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1591 /* N_SETS is now number of output operands. */
1594 /* Verify that all the SETs we have
1595 came from a single original asm_operands insn
1596 (so that invalid combinations are blocked). */
1597 for (i = 0; i < n_sets; i++)
1599 rtx elt = XVECEXP (body, 0, i);
1600 if (GET_CODE (elt) != SET)
1602 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1604 /* If these ASM_OPERANDS rtx's came from different original insns
1605 then they aren't allowed together. */
1606 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1607 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1610 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1613 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1615 /* 0 outputs, but some clobbers:
1616 body is [(asm_operands ...) (clobber (reg ...))...]. */
1619 /* Make sure all the other parallel things really are clobbers. */
1620 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1621 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1624 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1633 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1634 copy its operands (both input and output) into the vector OPERANDS,
1635 the locations of the operands within the insn into the vector OPERAND_LOCS,
1636 and the constraints for the operands into CONSTRAINTS.
1637 Write the modes of the operands into MODES.
1638 Return the assembler-template.
1640 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1641 we don't store that info. */
1644 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1648 const char **constraints;
1649 enum machine_mode *modes;
1653 const char *template = 0;
1655 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1657 rtx asmop = SET_SRC (body);
1658 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1660 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1662 for (i = 1; i < noperands; i++)
1665 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1667 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1669 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1671 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1674 /* The output is in the SET.
1675 Its constraint is in the ASM_OPERANDS itself. */
1677 operands[0] = SET_DEST (body);
1679 operand_locs[0] = &SET_DEST (body);
1681 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1683 modes[0] = GET_MODE (SET_DEST (body));
1684 template = ASM_OPERANDS_TEMPLATE (asmop);
1686 else if (GET_CODE (body) == ASM_OPERANDS)
1689 /* No output operands: BODY is (asm_operands ....). */
1691 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1693 /* The input operands are found in the 1st element vector. */
1694 /* Constraints for inputs are in the 2nd element vector. */
1695 for (i = 0; i < noperands; i++)
1698 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1700 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1702 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1704 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1706 template = ASM_OPERANDS_TEMPLATE (asmop);
1708 else if (GET_CODE (body) == PARALLEL
1709 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1711 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1712 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1713 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1714 int nout = 0; /* Does not include CLOBBERs. */
1716 /* At least one output, plus some CLOBBERs. */
1718 /* The outputs are in the SETs.
1719 Their constraints are in the ASM_OPERANDS itself. */
1720 for (i = 0; i < nparallel; i++)
1722 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1723 break; /* Past last SET */
1726 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1728 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1730 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1732 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1736 for (i = 0; i < nin; i++)
1739 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1741 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1743 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1745 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1748 template = ASM_OPERANDS_TEMPLATE (asmop);
1750 else if (GET_CODE (body) == PARALLEL
1751 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1753 /* No outputs, but some CLOBBERs. */
1755 rtx asmop = XVECEXP (body, 0, 0);
1756 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1758 for (i = 0; i < nin; i++)
1761 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1763 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1765 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1767 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1770 template = ASM_OPERANDS_TEMPLATE (asmop);
1776 /* Check if an asm_operand matches it's constraints.
1777 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1780 asm_operand_ok (op, constraint)
1782 const char *constraint;
1786 /* Use constrain_operands after reload. */
1787 if (reload_completed)
1792 char c = *constraint++;
1806 case '0': case '1': case '2': case '3': case '4':
1807 case '5': case '6': case '7': case '8': case '9':
1808 /* For best results, our caller should have given us the
1809 proper matching constraint, but we can't actually fail
1810 the check if they didn't. Indicate that results are
1816 if (address_operand (op, VOIDmode))
1821 case 'V': /* non-offsettable */
1822 if (memory_operand (op, VOIDmode))
1826 case 'o': /* offsettable */
1827 if (offsettable_nonstrict_memref_p (op))
1832 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1833 excepting those that expand_call created. Further, on some
1834 machines which do not have generalized auto inc/dec, an inc/dec
1835 is not a memory_operand.
1837 Match any memory and hope things are resolved after reload. */
1839 if (GET_CODE (op) == MEM
1841 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1842 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1847 if (GET_CODE (op) == MEM
1849 || GET_CODE (XEXP (op, 0)) == PRE_INC
1850 || GET_CODE (XEXP (op, 0)) == POST_INC))
1855 #ifndef REAL_ARITHMETIC
1856 /* Match any floating double constant, but only if
1857 we can examine the bits of it reliably. */
1858 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1859 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1860 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1866 if (GET_CODE (op) == CONST_DOUBLE)
1871 if (GET_CODE (op) == CONST_DOUBLE
1872 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1876 if (GET_CODE (op) == CONST_DOUBLE
1877 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1882 if (GET_CODE (op) == CONST_INT
1883 || (GET_CODE (op) == CONST_DOUBLE
1884 && GET_MODE (op) == VOIDmode))
1890 #ifdef LEGITIMATE_PIC_OPERAND_P
1891 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1898 if (GET_CODE (op) == CONST_INT
1899 || (GET_CODE (op) == CONST_DOUBLE
1900 && GET_MODE (op) == VOIDmode))
1905 if (GET_CODE (op) == CONST_INT
1906 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1910 if (GET_CODE (op) == CONST_INT
1911 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1915 if (GET_CODE (op) == CONST_INT
1916 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1920 if (GET_CODE (op) == CONST_INT
1921 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1925 if (GET_CODE (op) == CONST_INT
1926 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1930 if (GET_CODE (op) == CONST_INT
1931 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1935 if (GET_CODE (op) == CONST_INT
1936 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1940 if (GET_CODE (op) == CONST_INT
1941 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1949 if (general_operand (op, VOIDmode))
1954 /* For all other letters, we first check for a register class,
1955 otherwise it is an EXTRA_CONSTRAINT. */
1956 if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
1959 if (GET_MODE (op) == BLKmode)
1961 if (register_operand (op, VOIDmode))
1964 #ifdef EXTRA_CONSTRAINT
1965 if (EXTRA_CONSTRAINT (op, c))
1975 /* Given an rtx *P, if it is a sum containing an integer constant term,
1976 return the location (type rtx *) of the pointer to that constant term.
1977 Otherwise, return a null pointer. */
1980 find_constant_term_loc (p)
1984 register enum rtx_code code = GET_CODE (*p);
1986 /* If *P IS such a constant term, P is its location. */
1988 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1992 /* Otherwise, if not a sum, it has no constant term. */
1994 if (GET_CODE (*p) != PLUS)
1997 /* If one of the summands is constant, return its location. */
1999 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
2000 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
2003 /* Otherwise, check each summand for containing a constant term. */
2005 if (XEXP (*p, 0) != 0)
2007 tem = find_constant_term_loc (&XEXP (*p, 0));
2012 if (XEXP (*p, 1) != 0)
2014 tem = find_constant_term_loc (&XEXP (*p, 1));
2022 /* Return 1 if OP is a memory reference
2023 whose address contains no side effects
2024 and remains valid after the addition
2025 of a positive integer less than the
2026 size of the object being referenced.
2028 We assume that the original address is valid and do not check it.
2030 This uses strict_memory_address_p as a subroutine, so
2031 don't use it before reload. */
2034 offsettable_memref_p (op)
2037 return ((GET_CODE (op) == MEM)
2038 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
2041 /* Similar, but don't require a strictly valid mem ref:
2042 consider pseudo-regs valid as index or base regs. */
2045 offsettable_nonstrict_memref_p (op)
2048 return ((GET_CODE (op) == MEM)
2049 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
2052 /* Return 1 if Y is a memory address which contains no side effects
2053 and would remain valid after the addition of a positive integer
2054 less than the size of that mode.
2056 We assume that the original address is valid and do not check it.
2057 We do check that it is valid for narrower modes.
2059 If STRICTP is nonzero, we require a strictly valid address,
2060 for the sake of use in reload.c. */
2063 offsettable_address_p (strictp, mode, y)
2065 enum machine_mode mode;
2068 register enum rtx_code ycode = GET_CODE (y);
2072 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
2073 (strictp ? strict_memory_address_p : memory_address_p);
2074 unsigned int mode_sz = GET_MODE_SIZE (mode);
2076 if (CONSTANT_ADDRESS_P (y))
2079 /* Adjusting an offsettable address involves changing to a narrower mode.
2080 Make sure that's OK. */
2082 if (mode_dependent_address_p (y))
2085 /* ??? How much offset does an offsettable BLKmode reference need?
2086 Clearly that depends on the situation in which it's being used.
2087 However, the current situation in which we test 0xffffffff is
2088 less than ideal. Caveat user. */
2090 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
2092 /* If the expression contains a constant term,
2093 see if it remains valid when max possible offset is added. */
2095 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
2100 *y2 = plus_constant (*y2, mode_sz - 1);
2101 /* Use QImode because an odd displacement may be automatically invalid
2102 for any wider mode. But it should be valid for a single byte. */
2103 good = (*addressp) (QImode, y);
2105 /* In any case, restore old contents of memory. */
2110 if (GET_RTX_CLASS (ycode) == 'a')
2113 /* The offset added here is chosen as the maximum offset that
2114 any instruction could need to add when operating on something
2115 of the specified mode. We assume that if Y and Y+c are
2116 valid addresses then so is Y+d for all 0<d<c. */
2118 z = plus_constant_for_output (y, mode_sz - 1);
2120 /* Use QImode because an odd displacement may be automatically invalid
2121 for any wider mode. But it should be valid for a single byte. */
2122 return (*addressp) (QImode, z);
2125 /* Return 1 if ADDR is an address-expression whose effect depends
2126 on the mode of the memory reference it is used in.
2128 Autoincrement addressing is a typical example of mode-dependence
2129 because the amount of the increment depends on the mode. */
2132 mode_dependent_address_p (addr)
2133 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
2135 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
2137 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2138 win: ATTRIBUTE_UNUSED_LABEL
2142 /* Return 1 if OP is a general operand
2143 other than a memory ref with a mode dependent address. */
2146 mode_independent_operand (op, mode)
2147 enum machine_mode mode;
2152 if (! general_operand (op, mode))
2155 if (GET_CODE (op) != MEM)
2158 addr = XEXP (op, 0);
2159 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2161 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2162 lose: ATTRIBUTE_UNUSED_LABEL
2166 /* Given an operand OP that is a valid memory reference which
2167 satisfies offsettable_memref_p, return a new memory reference whose
2168 address has been adjusted by OFFSET. OFFSET should be positive and
2169 less than the size of the object referenced. */
2172 adj_offsettable_operand (op, offset)
2176 register enum rtx_code code = GET_CODE (op);
2180 register rtx y = XEXP (op, 0);
2183 if (CONSTANT_ADDRESS_P (y))
2185 new = gen_rtx_MEM (GET_MODE (op),
2186 plus_constant_for_output (y, offset));
2187 MEM_COPY_ATTRIBUTES (new, op);
2191 if (GET_CODE (y) == PLUS)
2194 register rtx *const_loc;
2198 const_loc = find_constant_term_loc (&z);
2201 *const_loc = plus_constant_for_output (*const_loc, offset);
2206 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
2207 MEM_COPY_ATTRIBUTES (new, op);
2213 /* Like extract_insn, but save insn extracted and don't extract again, when
2214 called again for the same insn expecting that recog_data still contain the
2215 valid information. This is used primary by gen_attr infrastructure that
2216 often does extract insn again and again. */
2218 extract_insn_cached (insn)
2221 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2223 extract_insn (insn);
2224 recog_data.insn = insn;
2226 /* Do cached extract_insn, constrain_operand and complain about failures.
2227 Used by insn_attrtab. */
2229 extract_constrain_insn_cached (insn)
2232 extract_insn_cached (insn);
2233 if (which_alternative == -1
2234 && !constrain_operands (reload_completed))
2235 fatal_insn_not_found (insn);
2237 /* Do cached constrain_operand and complain about failures. */
2239 constrain_operands_cached (strict)
2242 if (which_alternative == -1)
2243 return constrain_operands (strict);
2248 /* Analyze INSN and fill in recog_data. */
2257 rtx body = PATTERN (insn);
2259 recog_data.insn = NULL;
2260 recog_data.n_operands = 0;
2261 recog_data.n_alternatives = 0;
2262 recog_data.n_dups = 0;
2263 which_alternative = -1;
2265 switch (GET_CODE (body))
2275 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2280 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2281 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2282 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2288 recog_data.n_operands = noperands = asm_noperands (body);
2291 /* This insn is an `asm' with operands. */
2293 /* expand_asm_operands makes sure there aren't too many operands. */
2294 if (noperands > MAX_RECOG_OPERANDS)
2297 /* Now get the operand values and constraints out of the insn. */
2298 decode_asm_operands (body, recog_data.operand,
2299 recog_data.operand_loc,
2300 recog_data.constraints,
2301 recog_data.operand_mode);
2304 const char *p = recog_data.constraints[0];
2305 recog_data.n_alternatives = 1;
2307 recog_data.n_alternatives += (*p++ == ',');
2311 fatal_insn_not_found (insn);
2315 /* Ordinary insn: recognize it, get the operands via insn_extract
2316 and get the constraints. */
2318 icode = recog_memoized (insn);
2320 fatal_insn_not_found (insn);
2322 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2323 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2324 recog_data.n_dups = insn_data[icode].n_dups;
2326 insn_extract (insn);
2328 for (i = 0; i < noperands; i++)
2330 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2331 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2332 /* VOIDmode match_operands gets mode from their real operand. */
2333 if (recog_data.operand_mode[i] == VOIDmode)
2334 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2337 for (i = 0; i < noperands; i++)
2338 recog_data.operand_type[i]
2339 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2340 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2343 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2347 /* After calling extract_insn, you can use this function to extract some
2348 information from the constraint strings into a more usable form.
2349 The collected data is stored in recog_op_alt. */
2351 preprocess_constraints ()
2355 memset (recog_op_alt, 0, sizeof recog_op_alt);
2356 for (i = 0; i < recog_data.n_operands; i++)
2359 struct operand_alternative *op_alt;
2360 const char *p = recog_data.constraints[i];
2362 op_alt = recog_op_alt[i];
2364 for (j = 0; j < recog_data.n_alternatives; j++)
2366 op_alt[j].class = NO_REGS;
2367 op_alt[j].constraint = p;
2368 op_alt[j].matches = -1;
2369 op_alt[j].matched = -1;
2371 if (*p == '\0' || *p == ',')
2373 op_alt[j].anything_ok = 1;
2383 while (c != ',' && c != '\0');
2384 if (c == ',' || c == '\0')
2389 case '=': case '+': case '*': case '%':
2390 case 'E': case 'F': case 'G': case 'H':
2391 case 's': case 'i': case 'n':
2392 case 'I': case 'J': case 'K': case 'L':
2393 case 'M': case 'N': case 'O': case 'P':
2394 /* These don't say anything we care about. */
2398 op_alt[j].reject += 6;
2401 op_alt[j].reject += 600;
2404 op_alt[j].earlyclobber = 1;
2407 case '0': case '1': case '2': case '3': case '4':
2408 case '5': case '6': case '7': case '8': case '9':
2409 op_alt[j].matches = c - '0';
2410 recog_op_alt[op_alt[j].matches][j].matched = i;
2414 op_alt[j].memory_ok = 1;
2417 op_alt[j].decmem_ok = 1;
2420 op_alt[j].incmem_ok = 1;
2423 op_alt[j].nonoffmem_ok = 1;
2426 op_alt[j].offmem_ok = 1;
2429 op_alt[j].anything_ok = 1;
2433 op_alt[j].is_address = 1;
2434 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
2438 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2442 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
2450 /* Check the operands of an insn against the insn's operand constraints
2451 and return 1 if they are valid.
2452 The information about the insn's operands, constraints, operand modes
2453 etc. is obtained from the global variables set up by extract_insn.
2455 WHICH_ALTERNATIVE is set to a number which indicates which
2456 alternative of constraints was matched: 0 for the first alternative,
2457 1 for the next, etc.
2459 In addition, when two operands are match
2460 and it happens that the output operand is (reg) while the
2461 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2462 make the output operand look like the input.
2463 This is because the output operand is the one the template will print.
2465 This is used in final, just before printing the assembler code and by
2466 the routines that determine an insn's attribute.
2468 If STRICT is a positive non-zero value, it means that we have been
2469 called after reload has been completed. In that case, we must
2470 do all checks strictly. If it is zero, it means that we have been called
2471 before reload has completed. In that case, we first try to see if we can
2472 find an alternative that matches strictly. If not, we try again, this
2473 time assuming that reload will fix up the insn. This provides a "best
2474 guess" for the alternative and is used to compute attributes of insns prior
2475 to reload. A negative value of STRICT is used for this internal call. */
2483 constrain_operands (strict)
2486 const char *constraints[MAX_RECOG_OPERANDS];
2487 int matching_operands[MAX_RECOG_OPERANDS];
2488 int earlyclobber[MAX_RECOG_OPERANDS];
2491 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2492 int funny_match_index;
2494 which_alternative = 0;
2495 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2498 for (c = 0; c < recog_data.n_operands; c++)
2500 constraints[c] = recog_data.constraints[c];
2501 matching_operands[c] = -1;
2508 funny_match_index = 0;
2510 for (opno = 0; opno < recog_data.n_operands; opno++)
2512 register rtx op = recog_data.operand[opno];
2513 enum machine_mode mode = GET_MODE (op);
2514 register const char *p = constraints[opno];
2519 earlyclobber[opno] = 0;
2521 /* A unary operator may be accepted by the predicate, but it
2522 is irrelevant for matching constraints. */
2523 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2526 if (GET_CODE (op) == SUBREG)
2528 if (GET_CODE (SUBREG_REG (op)) == REG
2529 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2530 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2531 GET_MODE (SUBREG_REG (op)),
2534 op = SUBREG_REG (op);
2537 /* An empty constraint or empty alternative
2538 allows anything which matched the pattern. */
2539 if (*p == 0 || *p == ',')
2542 while (*p && (c = *p++) != ',')
2545 case '?': case '!': case '*': case '%':
2550 /* Ignore rest of this alternative as far as
2551 constraint checking is concerned. */
2552 while (*p && *p != ',')
2557 earlyclobber[opno] = 1;
2560 case '0': case '1': case '2': case '3': case '4':
2561 case '5': case '6': case '7': case '8': case '9':
2563 /* This operand must be the same as a previous one.
2564 This kind of constraint is used for instructions such
2565 as add when they take only two operands.
2567 Note that the lower-numbered operand is passed first.
2569 If we are not testing strictly, assume that this constraint
2570 will be satisfied. */
2575 rtx op1 = recog_data.operand[c - '0'];
2576 rtx op2 = recog_data.operand[opno];
2578 /* A unary operator may be accepted by the predicate,
2579 but it is irrelevant for matching constraints. */
2580 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2581 op1 = XEXP (op1, 0);
2582 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2583 op2 = XEXP (op2, 0);
2585 val = operands_match_p (op1, op2);
2588 matching_operands[opno] = c - '0';
2589 matching_operands[c - '0'] = opno;
2593 /* If output is *x and input is *--x,
2594 arrange later to change the output to *--x as well,
2595 since the output op is the one that will be printed. */
2596 if (val == 2 && strict > 0)
2598 funny_match[funny_match_index].this = opno;
2599 funny_match[funny_match_index++].other = c - '0';
2604 /* p is used for address_operands. When we are called by
2605 gen_reload, no one will have checked that the address is
2606 strictly valid, i.e., that all pseudos requiring hard regs
2607 have gotten them. */
2609 || (strict_memory_address_p (recog_data.operand_mode[opno],
2614 /* No need to check general_operand again;
2615 it was done in insn-recog.c. */
2617 /* Anything goes unless it is a REG and really has a hard reg
2618 but the hard reg is not in the class GENERAL_REGS. */
2620 || GENERAL_REGS == ALL_REGS
2621 || GET_CODE (op) != REG
2622 || (reload_in_progress
2623 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2624 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2629 /* This is used for a MATCH_SCRATCH in the cases when
2630 we don't actually need anything. So anything goes
2636 if (GET_CODE (op) == MEM
2637 /* Before reload, accept what reload can turn into mem. */
2638 || (strict < 0 && CONSTANT_P (op))
2639 /* During reload, accept a pseudo */
2640 || (reload_in_progress && GET_CODE (op) == REG
2641 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2646 if (GET_CODE (op) == MEM
2647 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2648 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2653 if (GET_CODE (op) == MEM
2654 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2655 || GET_CODE (XEXP (op, 0)) == POST_INC))
2660 #ifndef REAL_ARITHMETIC
2661 /* Match any CONST_DOUBLE, but only if
2662 we can examine the bits of it reliably. */
2663 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2664 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2665 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2668 if (GET_CODE (op) == CONST_DOUBLE)
2673 if (GET_CODE (op) == CONST_DOUBLE)
2679 if (GET_CODE (op) == CONST_DOUBLE
2680 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2685 if (GET_CODE (op) == CONST_INT
2686 || (GET_CODE (op) == CONST_DOUBLE
2687 && GET_MODE (op) == VOIDmode))
2690 if (CONSTANT_P (op))
2695 if (GET_CODE (op) == CONST_INT
2696 || (GET_CODE (op) == CONST_DOUBLE
2697 && GET_MODE (op) == VOIDmode))
2709 if (GET_CODE (op) == CONST_INT
2710 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2715 if (GET_CODE (op) == MEM
2716 && ((strict > 0 && ! offsettable_memref_p (op))
2718 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2719 || (reload_in_progress
2720 && !(GET_CODE (op) == REG
2721 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2726 if ((strict > 0 && offsettable_memref_p (op))
2727 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2728 /* Before reload, accept what reload can handle. */
2730 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2731 /* During reload, accept a pseudo */
2732 || (reload_in_progress && GET_CODE (op) == REG
2733 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2739 enum reg_class class;
2741 class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
2742 if (class != NO_REGS)
2746 && GET_CODE (op) == REG
2747 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2748 || (strict == 0 && GET_CODE (op) == SCRATCH)
2749 || (GET_CODE (op) == REG
2750 && reg_fits_class_p (op, class, offset, mode)))
2753 #ifdef EXTRA_CONSTRAINT
2754 else if (EXTRA_CONSTRAINT (op, c))
2761 constraints[opno] = p;
2762 /* If this operand did not win somehow,
2763 this alternative loses. */
2767 /* This alternative won; the operands are ok.
2768 Change whichever operands this alternative says to change. */
2773 /* See if any earlyclobber operand conflicts with some other
2777 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2778 /* Ignore earlyclobber operands now in memory,
2779 because we would often report failure when we have
2780 two memory operands, one of which was formerly a REG. */
2781 if (earlyclobber[eopno]
2782 && GET_CODE (recog_data.operand[eopno]) == REG)
2783 for (opno = 0; opno < recog_data.n_operands; opno++)
2784 if ((GET_CODE (recog_data.operand[opno]) == MEM
2785 || recog_data.operand_type[opno] != OP_OUT)
2787 /* Ignore things like match_operator operands. */
2788 && *recog_data.constraints[opno] != 0
2789 && ! (matching_operands[opno] == eopno
2790 && operands_match_p (recog_data.operand[opno],
2791 recog_data.operand[eopno]))
2792 && ! safe_from_earlyclobber (recog_data.operand[opno],
2793 recog_data.operand[eopno]))
2798 while (--funny_match_index >= 0)
2800 recog_data.operand[funny_match[funny_match_index].other]
2801 = recog_data.operand[funny_match[funny_match_index].this];
2808 which_alternative++;
2810 while (which_alternative < recog_data.n_alternatives);
2812 which_alternative = -1;
2813 /* If we are about to reject this, but we are not to test strictly,
2814 try a very loose test. Only return failure if it fails also. */
2816 return constrain_operands (-1);
2821 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2822 is a hard reg in class CLASS when its regno is offset by OFFSET
2823 and changed to mode MODE.
2824 If REG occupies multiple hard regs, all of them must be in CLASS. */
2827 reg_fits_class_p (operand, class, offset, mode)
2829 register enum reg_class class;
2831 enum machine_mode mode;
2833 register int regno = REGNO (operand);
2834 if (regno < FIRST_PSEUDO_REGISTER
2835 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2840 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2842 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2851 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2854 split_all_insns (upd_life)
2861 blocks = sbitmap_alloc (n_basic_blocks);
2862 sbitmap_zero (blocks);
2865 for (i = n_basic_blocks - 1; i >= 0; --i)
2867 basic_block bb = BASIC_BLOCK (i);
2870 for (insn = bb->head; insn ; insn = next)
2874 /* Can't use `next_real_insn' because that might go across
2875 CODE_LABELS and short-out basic blocks. */
2876 next = NEXT_INSN (insn);
2877 if (! INSN_P (insn))
2880 /* Don't split no-op move insns. These should silently
2881 disappear later in final. Splitting such insns would
2882 break the code that handles REG_NO_CONFLICT blocks. */
2884 else if ((set = single_set (insn)) != NULL
2885 && rtx_equal_p (SET_SRC (set), SET_DEST (set)))
2887 /* Nops get in the way while scheduling, so delete them
2888 now if register allocation has already been done. It
2889 is too risky to try to do this before register
2890 allocation, and there are unlikely to be very many
2891 nops then anyways. */
2892 if (reload_completed)
2894 PUT_CODE (insn, NOTE);
2895 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2896 NOTE_SOURCE_FILE (insn) = 0;
2901 /* Split insns here to get max fine-grain parallelism. */
2902 rtx first = PREV_INSN (insn);
2903 rtx last = try_split (PATTERN (insn), insn, 1);
2907 SET_BIT (blocks, i);
2910 /* try_split returns the NOTE that INSN became. */
2911 PUT_CODE (insn, NOTE);
2912 NOTE_SOURCE_FILE (insn) = 0;
2913 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2915 /* ??? Coddle to md files that generate subregs in post-
2916 reload splitters instead of computing the proper
2918 if (reload_completed && first != last)
2920 first = NEXT_INSN (first);
2924 cleanup_subreg_operands (first);
2927 first = NEXT_INSN (first);
2931 if (insn == bb->end)
2939 if (insn == bb->end)
2943 /* ??? When we're called from just after reload, the CFG is in bad
2944 shape, and we may have fallen off the end. This could be fixed
2945 by having reload not try to delete unreachable code. Otherwise
2946 assert we found the end insn. */
2947 if (insn == NULL && upd_life)
2951 if (changed && upd_life)
2953 compute_bb_for_insn (get_max_uid ());
2954 count_or_remove_death_notes (blocks, 1);
2955 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2958 sbitmap_free (blocks);
2961 #ifdef HAVE_peephole2
2962 struct peep2_insn_data
2968 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2969 static int peep2_current;
2971 /* A non-insn marker indicating the last insn of the block.
2972 The live_before regset for this element is correct, indicating
2973 global_live_at_end for the block. */
2974 #define PEEP2_EOB pc_rtx
2976 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2977 does not exist. Used by the recognizer to find the next insn to match
2978 in a multi-insn pattern. */
2984 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2988 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2989 n -= MAX_INSNS_PER_PEEP2 + 1;
2991 if (peep2_insn_data[n].insn == PEEP2_EOB)
2993 return peep2_insn_data[n].insn;
2996 /* Return true if REGNO is dead before the Nth non-note insn
3000 peep2_regno_dead_p (ofs, regno)
3004 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
3007 ofs += peep2_current;
3008 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
3009 ofs -= MAX_INSNS_PER_PEEP2 + 1;
3011 if (peep2_insn_data[ofs].insn == NULL_RTX)
3014 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3017 /* Similarly for a REG. */
3020 peep2_reg_dead_p (ofs, reg)
3026 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
3029 ofs += peep2_current;
3030 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
3031 ofs -= MAX_INSNS_PER_PEEP2 + 1;
3033 if (peep2_insn_data[ofs].insn == NULL_RTX)
3036 regno = REGNO (reg);
3037 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3039 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
3044 /* Try to find a hard register of mode MODE, matching the register class in
3045 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3046 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3047 in which case the only condition is that the register must be available
3048 before CURRENT_INSN.
3049 Registers that already have bits set in REG_SET will not be considered.
3051 If an appropriate register is available, it will be returned and the
3052 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3056 peep2_find_free_register (from, to, class_str, mode, reg_set)
3058 const char *class_str;
3059 enum machine_mode mode;
3060 HARD_REG_SET *reg_set;
3062 static int search_ofs;
3063 enum reg_class class;
3067 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
3070 from += peep2_current;
3071 if (from >= MAX_INSNS_PER_PEEP2 + 1)
3072 from -= MAX_INSNS_PER_PEEP2 + 1;
3073 to += peep2_current;
3074 if (to >= MAX_INSNS_PER_PEEP2 + 1)
3075 to -= MAX_INSNS_PER_PEEP2 + 1;
3077 if (peep2_insn_data[from].insn == NULL_RTX)
3079 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3083 HARD_REG_SET this_live;
3085 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
3087 if (peep2_insn_data[from].insn == NULL_RTX)
3089 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
3090 IOR_HARD_REG_SET (live, this_live);
3093 class = (class_str[0] == 'r' ? GENERAL_REGS
3094 : REG_CLASS_FROM_LETTER (class_str[0]));
3096 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3098 int raw_regno, regno, success, j;
3100 /* Distribute the free registers as much as possible. */
3101 raw_regno = search_ofs + i;
3102 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3103 raw_regno -= FIRST_PSEUDO_REGISTER;
3104 #ifdef REG_ALLOC_ORDER
3105 regno = reg_alloc_order[raw_regno];
3110 /* Don't allocate fixed registers. */
3111 if (fixed_regs[regno])
3113 /* Make sure the register is of the right class. */
3114 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
3116 /* And can support the mode we need. */
3117 if (! HARD_REGNO_MODE_OK (regno, mode))
3119 /* And that we don't create an extra save/restore. */
3120 if (! call_used_regs[regno] && ! regs_ever_live[regno])
3122 /* And we don't clobber traceback for noreturn functions. */
3123 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3124 && (! reload_completed || frame_pointer_needed))
3128 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3130 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3131 || TEST_HARD_REG_BIT (live, regno + j))
3139 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3140 SET_HARD_REG_BIT (*reg_set, regno + j);
3142 /* Start the next search with the next register. */
3143 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3145 search_ofs = raw_regno;
3147 return gen_rtx_REG (mode, regno);
3155 /* Perform the peephole2 optimization pass. */
3158 peephole2_optimize (dump_file)
3159 FILE *dump_file ATTRIBUTE_UNUSED;
3161 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3165 #ifdef HAVE_conditional_execution
3170 /* Initialize the regsets we're going to use. */
3171 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3172 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3173 live = INITIALIZE_REG_SET (rs_heads[i]);
3175 #ifdef HAVE_conditional_execution
3176 blocks = sbitmap_alloc (n_basic_blocks);
3177 sbitmap_zero (blocks);
3180 count_or_remove_death_notes (NULL, 1);
3183 for (b = n_basic_blocks - 1; b >= 0; --b)
3185 basic_block bb = BASIC_BLOCK (b);
3186 struct propagate_block_info *pbi;
3188 /* Indicate that all slots except the last holds invalid data. */
3189 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3190 peep2_insn_data[i].insn = NULL_RTX;
3192 /* Indicate that the last slot contains live_after data. */
3193 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3194 peep2_current = MAX_INSNS_PER_PEEP2;
3196 /* Start up propagation. */
3197 COPY_REG_SET (live, bb->global_live_at_end);
3198 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3200 #ifdef HAVE_conditional_execution
3201 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3203 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3206 for (insn = bb->end; ; insn = prev)
3208 prev = PREV_INSN (insn);
3214 /* Record this insn. */
3215 if (--peep2_current < 0)
3216 peep2_current = MAX_INSNS_PER_PEEP2;
3217 peep2_insn_data[peep2_current].insn = insn;
3218 propagate_one_insn (pbi, insn);
3219 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3221 /* Match the peephole. */
3222 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3225 i = match_len + peep2_current;
3226 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3227 i -= MAX_INSNS_PER_PEEP2 + 1;
3229 /* Replace the old sequence with the new. */
3230 flow_delete_insn_chain (insn, peep2_insn_data[i].insn);
3231 try = emit_insn_after (try, prev);
3233 /* Adjust the basic block boundaries. */
3234 if (peep2_insn_data[i].insn == bb->end)
3236 if (insn == bb->head)
3237 bb->head = NEXT_INSN (prev);
3239 #ifdef HAVE_conditional_execution
3240 /* With conditional execution, we cannot back up the
3241 live information so easily, since the conditional
3242 death data structures are not so self-contained.
3243 So record that we've made a modification to this
3244 block and update life information at the end. */
3245 SET_BIT (blocks, b);
3248 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3249 peep2_insn_data[i].insn = NULL_RTX;
3250 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3252 /* Back up lifetime information past the end of the
3253 newly created sequence. */
3254 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3256 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3258 /* Update life information for the new sequence. */
3264 i = MAX_INSNS_PER_PEEP2;
3265 peep2_insn_data[i].insn = try;
3266 propagate_one_insn (pbi, try);
3267 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3269 try = PREV_INSN (try);
3271 while (try != prev);
3273 /* ??? Should verify that LIVE now matches what we
3274 had before the new sequence. */
3281 if (insn == bb->head)
3285 free_propagate_block_info (pbi);
3288 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3289 FREE_REG_SET (peep2_insn_data[i].live_before);
3290 FREE_REG_SET (live);
3292 #ifdef HAVE_conditional_execution
3293 count_or_remove_death_notes (blocks, 1);
3294 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3295 sbitmap_free (blocks);
3298 #endif /* HAVE_peephole2 */