1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 91-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
26 #include "insn-config.h"
27 #include "insn-attr.h"
28 #include "insn-flags.h"
29 #include "insn-codes.h"
32 #include "hard-reg-set.h"
37 #include "basic-block.h"
41 #ifndef STACK_PUSH_CODE
42 #ifdef STACK_GROWS_DOWNWARD
43 #define STACK_PUSH_CODE PRE_DEC
45 #define STACK_PUSH_CODE PRE_INC
49 #ifndef STACK_POP_CODE
50 #ifdef STACK_GROWS_DOWNWARD
51 #define STACK_POP_CODE POST_INC
53 #define STACK_POP_CODE POST_DEC
57 static void validate_replace_rtx_1 PROTO((rtx *, rtx, rtx, rtx));
58 static rtx *find_single_use_1 PROTO((rtx, rtx *));
59 static rtx *find_constant_term_loc PROTO((rtx *));
60 static int insn_invalid_p PROTO((rtx));
62 /* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.c and expmed.c (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in regclass.c and final.c and reload.c.
68 init_recog and init_recog_no_volatile are responsible for setting this. */
72 struct recog_data recog_data;
74 /* Contains a vector of operand_alternative structures for every operand.
75 Set up by preprocess_constraints. */
76 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
78 /* On return from `constrain_operands', indicate which alternative
81 int which_alternative;
83 /* Nonzero after end of reload pass.
84 Set to 1 or 0 by toplev.c.
85 Controls the significance of (SUBREG (MEM)). */
89 /* Initialize data used by the function `recog'.
90 This must be called once in the compilation of a function
91 before any insn recognition may be done in the function. */
94 init_recog_no_volatile ()
105 /* Try recognizing the instruction INSN,
106 and return the code number that results.
107 Remember the code so that repeated calls do not
108 need to spend the time for actual rerecognition.
110 This function is the normal interface to instruction recognition.
111 The automatically-generated function `recog' is normally called
112 through this one. (The only exception is in combine.c.) */
115 recog_memoized (insn)
118 if (INSN_CODE (insn) < 0)
119 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
120 return INSN_CODE (insn);
123 /* Check that X is an insn-body for an `asm' with operands
124 and that the operands mentioned in it are legitimate. */
127 check_asm_operands (x)
132 const char **constraints;
135 /* Post-reload, be more strict with things. */
136 if (reload_completed)
138 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
139 extract_insn (make_insn_raw (x));
140 constrain_operands (1);
141 return which_alternative >= 0;
144 noperands = asm_noperands (x);
150 operands = (rtx *) alloca (noperands * sizeof (rtx));
151 constraints = (const char **) alloca (noperands * sizeof (char *));
153 decode_asm_operands (x, operands, NULL_PTR, constraints, NULL_PTR);
155 for (i = 0; i < noperands; i++)
157 const char *c = constraints[i];
160 if (ISDIGIT ((unsigned char)c[0]) && c[1] == '\0')
161 c = constraints[c[0] - '0'];
163 if (! asm_operand_ok (operands[i], c))
170 /* Static data for the next two routines. */
172 typedef struct change_t
180 static change_t *changes;
181 static int changes_allocated;
183 static int num_changes = 0;
185 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
186 at which NEW will be placed. If OBJECT is zero, no validation is done,
187 the change is simply made.
189 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
190 will be called with the address and mode as parameters. If OBJECT is
191 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
194 IN_GROUP is non-zero if this is part of a group of changes that must be
195 performed as a group. In that case, the changes will be stored. The
196 function `apply_change_group' will validate and apply the changes.
198 If IN_GROUP is zero, this is a single change. Try to recognize the insn
199 or validate the memory reference with the change applied. If the result
200 is not valid for the machine, suppress the change and return zero.
201 Otherwise, perform the change and return 1. */
204 validate_change (object, loc, new, in_group)
212 if (old == new || rtx_equal_p (old, new))
215 if (in_group == 0 && num_changes != 0)
220 /* Save the information describing this change. */
221 if (num_changes >= changes_allocated)
223 if (changes_allocated == 0)
224 /* This value allows for repeated substitutions inside complex
225 indexed addresses, or changes in up to 5 insns. */
226 changes_allocated = MAX_RECOG_OPERANDS * 5;
228 changes_allocated *= 2;
231 (change_t*) xrealloc (changes,
232 sizeof (change_t) * changes_allocated);
235 changes[num_changes].object = object;
236 changes[num_changes].loc = loc;
237 changes[num_changes].old = old;
239 if (object && GET_CODE (object) != MEM)
241 /* Set INSN_CODE to force rerecognition of insn. Save old code in
243 changes[num_changes].old_code = INSN_CODE (object);
244 INSN_CODE (object) = -1;
249 /* If we are making a group of changes, return 1. Otherwise, validate the
250 change group we made. */
255 return apply_change_group ();
258 /* This subroutine of apply_change_group verifies whether the changes to INSN
259 were valid; i.e. whether INSN can still be recognized. */
262 insn_invalid_p (insn)
265 int icode = recog_memoized (insn);
266 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
268 if (is_asm && ! check_asm_operands (PATTERN (insn)))
270 if (! is_asm && icode < 0)
273 /* After reload, verify that all constraints are satisfied. */
274 if (reload_completed)
278 if (! constrain_operands (1))
285 /* Apply a group of changes previously issued with `validate_change'.
286 Return 1 if all changes are valid, zero otherwise. */
289 apply_change_group ()
293 /* The changes have been applied and all INSN_CODEs have been reset to force
296 The changes are valid if we aren't given an object, or if we are
297 given a MEM and it still is a valid address, or if this is in insn
298 and it is recognized. In the latter case, if reload has completed,
299 we also require that the operands meet the constraints for
302 for (i = 0; i < num_changes; i++)
304 rtx object = changes[i].object;
309 if (GET_CODE (object) == MEM)
311 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
314 else if (insn_invalid_p (object))
316 rtx pat = PATTERN (object);
318 /* Perhaps we couldn't recognize the insn because there were
319 extra CLOBBERs at the end. If so, try to re-recognize
320 without the last CLOBBER (later iterations will cause each of
321 them to be eliminated, in turn). But don't do this if we
322 have an ASM_OPERAND. */
323 if (GET_CODE (pat) == PARALLEL
324 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
325 && asm_noperands (PATTERN (object)) < 0)
329 if (XVECLEN (pat, 0) == 2)
330 newpat = XVECEXP (pat, 0, 0);
336 = gen_rtx_PARALLEL (VOIDmode,
337 gen_rtvec (XVECLEN (pat, 0) - 1));
338 for (j = 0; j < XVECLEN (newpat, 0); j++)
339 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
342 /* Add a new change to this group to replace the pattern
343 with this new pattern. Then consider this change
344 as having succeeded. The change we added will
345 cause the entire call to fail if things remain invalid.
347 Note that this can lose if a later change than the one
348 we are processing specified &XVECEXP (PATTERN (object), 0, X)
349 but this shouldn't occur. */
351 validate_change (object, &PATTERN (object), newpat, 1);
353 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
354 /* If this insn is a CLOBBER or USE, it is always valid, but is
362 if (i == num_changes)
374 /* Return the number of changes so far in the current group. */
377 num_validated_changes ()
382 /* Retract the changes numbered NUM and up. */
390 /* Back out all the changes. Do this in the opposite order in which
392 for (i = num_changes - 1; i >= num; i--)
394 *changes[i].loc = changes[i].old;
395 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
396 INSN_CODE (changes[i].object) = changes[i].old_code;
401 /* Replace every occurrence of FROM in X with TO. Mark each change with
402 validate_change passing OBJECT. */
405 validate_replace_rtx_1 (loc, from, to, object)
407 rtx from, to, object;
410 register const char *fmt;
411 register rtx x = *loc;
412 enum rtx_code code = GET_CODE (x);
414 /* X matches FROM if it is the same rtx or they are both referring to the
415 same register in the same mode. Avoid calling rtx_equal_p unless the
416 operands look similar. */
419 || (GET_CODE (x) == REG && GET_CODE (from) == REG
420 && GET_MODE (x) == GET_MODE (from)
421 && REGNO (x) == REGNO (from))
422 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
423 && rtx_equal_p (x, from)))
425 validate_change (object, loc, to, 1);
429 /* For commutative or comparison operations, try replacing each argument
430 separately and seeing if we made any changes. If so, put a constant
432 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
434 int prev_changes = num_changes;
436 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
437 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
438 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
440 validate_change (object, loc,
441 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
442 : swap_condition (code),
443 GET_MODE (x), XEXP (x, 1),
451 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
452 done the substitution, otherwise we won't. */
457 /* If we have a PLUS whose second operand is now a CONST_INT, use
458 plus_constant to try to simplify it. */
459 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
460 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
465 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
467 validate_change (object, loc,
468 plus_constant (XEXP (x, 0), - INTVAL (to)),
476 /* In these cases, the operation to be performed depends on the mode
477 of the operand. If we are replacing the operand with a VOIDmode
478 constant, we lose the information. So try to simplify the operation
479 in that case. If it fails, substitute in something that we know
480 won't be recognized. */
481 if (GET_MODE (to) == VOIDmode
482 && (XEXP (x, 0) == from
483 || (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (from) == REG
484 && GET_MODE (XEXP (x, 0)) == GET_MODE (from)
485 && REGNO (XEXP (x, 0)) == REGNO (from))))
487 rtx new = simplify_unary_operation (code, GET_MODE (x), to,
490 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
492 validate_change (object, loc, new, 1);
498 /* If we have a SUBREG of a register that we are replacing and we are
499 replacing it with a MEM, make a new MEM and try replacing the
500 SUBREG with it. Don't do this if the MEM has a mode-dependent address
501 or if we would be widening it. */
503 if (SUBREG_REG (x) == from
504 && GET_CODE (from) == REG
505 && GET_CODE (to) == MEM
506 && ! mode_dependent_address_p (XEXP (to, 0))
507 && ! MEM_VOLATILE_P (to)
508 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
510 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
511 enum machine_mode mode = GET_MODE (x);
514 if (BYTES_BIG_ENDIAN)
515 offset += (MIN (UNITS_PER_WORD,
516 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
517 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
519 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
520 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (to);
521 MEM_COPY_ATTRIBUTES (new, to);
522 validate_change (object, loc, new, 1);
529 /* If we are replacing a register with memory, try to change the memory
530 to be the mode required for memory in extract operations (this isn't
531 likely to be an insertion operation; if it was, nothing bad will
532 happen, we might just fail in some cases). */
534 if (XEXP (x, 0) == from && GET_CODE (from) == REG && GET_CODE (to) == MEM
535 && GET_CODE (XEXP (x, 1)) == CONST_INT
536 && GET_CODE (XEXP (x, 2)) == CONST_INT
537 && ! mode_dependent_address_p (XEXP (to, 0))
538 && ! MEM_VOLATILE_P (to))
540 enum machine_mode wanted_mode = VOIDmode;
541 enum machine_mode is_mode = GET_MODE (to);
542 int pos = INTVAL (XEXP (x, 2));
545 if (code == ZERO_EXTRACT)
547 wanted_mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
548 if (wanted_mode == VOIDmode)
549 wanted_mode = word_mode;
553 if (code == SIGN_EXTRACT)
555 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
556 if (wanted_mode == VOIDmode)
557 wanted_mode = word_mode;
561 /* If we have a narrower mode, we can do something. */
562 if (wanted_mode != VOIDmode
563 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
565 int offset = pos / BITS_PER_UNIT;
568 /* If the bytes and bits are counted differently, we
569 must adjust the offset. */
570 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
571 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
574 pos %= GET_MODE_BITSIZE (wanted_mode);
576 newmem = gen_rtx_MEM (wanted_mode,
577 plus_constant (XEXP (to, 0), offset));
578 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (to);
579 MEM_COPY_ATTRIBUTES (newmem, to);
581 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
582 validate_change (object, &XEXP (x, 0), newmem, 1);
592 /* For commutative or comparison operations we've already performed
593 replacements. Don't try to perform them again. */
594 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
596 fmt = GET_RTX_FORMAT (code);
597 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
600 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
601 else if (fmt[i] == 'E')
602 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
603 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
608 /* Try replacing every occurrence of FROM in INSN with TO. After all
609 changes have been made, validate by seeing if INSN is still valid. */
612 validate_replace_rtx (from, to, insn)
615 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
616 return apply_change_group ();
619 /* Try replacing every occurrence of FROM in INSN with TO. After all
620 changes have been made, validate by seeing if INSN is still valid. */
623 validate_replace_rtx_group (from, to, insn)
626 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
629 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
630 SET_DESTs. After all changes have been made, validate by seeing if
631 INSN is still valid. */
634 validate_replace_src (from, to, insn)
637 if ((GET_CODE (insn) != INSN && GET_CODE (insn) != JUMP_INSN)
638 || GET_CODE (PATTERN (insn)) != SET)
641 validate_replace_rtx_1 (&SET_SRC (PATTERN (insn)), from, to, insn);
642 if (GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
643 validate_replace_rtx_1 (&XEXP (SET_DEST (PATTERN (insn)), 0),
645 return apply_change_group ();
649 /* Return 1 if the insn using CC0 set by INSN does not contain
650 any ordered tests applied to the condition codes.
651 EQ and NE tests do not count. */
654 next_insn_tests_no_inequality (insn)
657 register rtx next = next_cc0_user (insn);
659 /* If there is no next insn, we have to take the conservative choice. */
663 return ((GET_CODE (next) == JUMP_INSN
664 || GET_CODE (next) == INSN
665 || GET_CODE (next) == CALL_INSN)
666 && ! inequality_comparisons_p (PATTERN (next)));
669 #if 0 /* This is useless since the insn that sets the cc's
670 must be followed immediately by the use of them. */
671 /* Return 1 if the CC value set up by INSN is not used. */
674 next_insns_test_no_inequality (insn)
677 register rtx next = NEXT_INSN (insn);
679 for (; next != 0; next = NEXT_INSN (next))
681 if (GET_CODE (next) == CODE_LABEL
682 || GET_CODE (next) == BARRIER)
684 if (GET_CODE (next) == NOTE)
686 if (inequality_comparisons_p (PATTERN (next)))
688 if (sets_cc0_p (PATTERN (next)) == 1)
690 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
698 /* This is used by find_single_use to locate an rtx that contains exactly one
699 use of DEST, which is typically either a REG or CC0. It returns a
700 pointer to the innermost rtx expression containing DEST. Appearances of
701 DEST that are being used to totally replace it are not counted. */
704 find_single_use_1 (dest, loc)
709 enum rtx_code code = GET_CODE (x);
726 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
727 of a REG that occupies all of the REG, the insn uses DEST if
728 it is mentioned in the destination or the source. Otherwise, we
729 need just check the source. */
730 if (GET_CODE (SET_DEST (x)) != CC0
731 && GET_CODE (SET_DEST (x)) != PC
732 && GET_CODE (SET_DEST (x)) != REG
733 && ! (GET_CODE (SET_DEST (x)) == SUBREG
734 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
735 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
736 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
737 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
738 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
741 return find_single_use_1 (dest, &SET_SRC (x));
745 return find_single_use_1 (dest, &XEXP (x, 0));
751 /* If it wasn't one of the common cases above, check each expression and
752 vector of this code. Look for a unique usage of DEST. */
754 fmt = GET_RTX_FORMAT (code);
755 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
759 if (dest == XEXP (x, i)
760 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
761 && REGNO (dest) == REGNO (XEXP (x, i))))
764 this_result = find_single_use_1 (dest, &XEXP (x, i));
767 result = this_result;
768 else if (this_result)
769 /* Duplicate usage. */
772 else if (fmt[i] == 'E')
776 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
778 if (XVECEXP (x, i, j) == dest
779 || (GET_CODE (dest) == REG
780 && GET_CODE (XVECEXP (x, i, j)) == REG
781 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
784 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
787 result = this_result;
788 else if (this_result)
797 /* See if DEST, produced in INSN, is used only a single time in the
798 sequel. If so, return a pointer to the innermost rtx expression in which
801 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
803 This routine will return usually zero either before flow is called (because
804 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
805 note can't be trusted).
807 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
808 care about REG_DEAD notes or LOG_LINKS.
810 Otherwise, we find the single use by finding an insn that has a
811 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
812 only referenced once in that insn, we know that it must be the first
813 and last insn referencing DEST. */
816 find_single_use (dest, insn, ploc)
828 next = NEXT_INSN (insn);
830 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
833 result = find_single_use_1 (dest, &PATTERN (next));
840 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
843 for (next = next_nonnote_insn (insn);
844 next != 0 && GET_CODE (next) != CODE_LABEL;
845 next = next_nonnote_insn (next))
846 if (GET_RTX_CLASS (GET_CODE (next)) == 'i' && dead_or_set_p (next, dest))
848 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
849 if (XEXP (link, 0) == insn)
854 result = find_single_use_1 (dest, &PATTERN (next));
864 /* Return 1 if OP is a valid general operand for machine mode MODE.
865 This is either a register reference, a memory reference,
866 or a constant. In the case of a memory reference, the address
867 is checked for general validity for the target machine.
869 Register and memory references must have mode MODE in order to be valid,
870 but some constants have no machine mode and are valid for any mode.
872 If MODE is VOIDmode, OP is checked for validity for whatever mode
875 The main use of this function is as a predicate in match_operand
876 expressions in the machine description.
878 For an explanation of this function's behavior for registers of
879 class NO_REGS, see the comment for `register_operand'. */
882 general_operand (op, mode)
884 enum machine_mode mode;
886 register enum rtx_code code = GET_CODE (op);
887 int mode_altering_drug = 0;
889 if (mode == VOIDmode)
890 mode = GET_MODE (op);
892 /* Don't accept CONST_INT or anything similar
893 if the caller wants something floating. */
894 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
895 && GET_MODE_CLASS (mode) != MODE_INT
896 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
900 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
901 #ifdef LEGITIMATE_PIC_OPERAND_P
902 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
904 && LEGITIMATE_CONSTANT_P (op));
906 /* Except for certain constants with VOIDmode, already checked for,
907 OP's mode must match MODE if MODE specifies a mode. */
909 if (GET_MODE (op) != mode)
914 #ifdef INSN_SCHEDULING
915 /* On machines that have insn scheduling, we want all memory
916 reference to be explicit, so outlaw paradoxical SUBREGs. */
917 if (GET_CODE (SUBREG_REG (op)) == MEM
918 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
922 op = SUBREG_REG (op);
923 code = GET_CODE (op);
925 /* No longer needed, since (SUBREG (MEM...))
926 will load the MEM into a reload reg in the MEM's own mode. */
927 mode_altering_drug = 1;
932 /* A register whose class is NO_REGS is not a general operand. */
933 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
934 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
938 register rtx y = XEXP (op, 0);
939 if (! volatile_ok && MEM_VOLATILE_P (op))
941 if (GET_CODE (y) == ADDRESSOF)
943 /* Use the mem's mode, since it will be reloaded thus. */
944 mode = GET_MODE (op);
945 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
948 /* Pretend this is an operand for now; we'll run force_operand
949 on its replacement in fixup_var_refs_1. */
950 if (code == ADDRESSOF)
956 if (mode_altering_drug)
957 return ! mode_dependent_address_p (XEXP (op, 0));
961 /* Return 1 if OP is a valid memory address for a memory reference
964 The main use of this function is as a predicate in match_operand
965 expressions in the machine description. */
968 address_operand (op, mode)
970 enum machine_mode mode;
972 return memory_address_p (mode, op);
975 /* Return 1 if OP is a register reference of mode MODE.
976 If MODE is VOIDmode, accept a register in any mode.
978 The main use of this function is as a predicate in match_operand
979 expressions in the machine description.
981 As a special exception, registers whose class is NO_REGS are
982 not accepted by `register_operand'. The reason for this change
983 is to allow the representation of special architecture artifacts
984 (such as a condition code register) without extending the rtl
985 definitions. Since registers of class NO_REGS cannot be used
986 as registers in any case where register classes are examined,
987 it is most consistent to keep this function from accepting them. */
990 register_operand (op, mode)
992 enum machine_mode mode;
994 if (GET_MODE (op) != mode && mode != VOIDmode)
997 if (GET_CODE (op) == SUBREG)
999 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1000 because it is guaranteed to be reloaded into one.
1001 Just make sure the MEM is valid in itself.
1002 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1003 but currently it does result from (SUBREG (REG)...) where the
1004 reg went on the stack.) */
1005 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1006 return general_operand (op, mode);
1008 #ifdef CLASS_CANNOT_CHANGE_SIZE
1009 if (GET_CODE (SUBREG_REG (op)) == REG
1010 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1011 && TEST_HARD_REG_BIT (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
1012 REGNO (SUBREG_REG (op)))
1013 && (GET_MODE_SIZE (mode)
1014 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
1015 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1016 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1020 op = SUBREG_REG (op);
1023 /* We don't consider registers whose class is NO_REGS
1024 to be a register operand. */
1025 return (GET_CODE (op) == REG
1026 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1027 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1030 /* Return 1 for a register in Pmode; ignore the tested mode. */
1033 pmode_register_operand (op, mode)
1035 enum machine_mode mode ATTRIBUTE_UNUSED;
1037 return register_operand (op, Pmode);
1040 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1041 or a hard register. */
1044 scratch_operand (op, mode)
1046 enum machine_mode mode;
1048 if (GET_MODE (op) != mode && mode != VOIDmode)
1051 return (GET_CODE (op) == SCRATCH
1052 || (GET_CODE (op) == REG
1053 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1056 /* Return 1 if OP is a valid immediate operand for mode MODE.
1058 The main use of this function is as a predicate in match_operand
1059 expressions in the machine description. */
1062 immediate_operand (op, mode)
1064 enum machine_mode mode;
1066 /* Don't accept CONST_INT or anything similar
1067 if the caller wants something floating. */
1068 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1069 && GET_MODE_CLASS (mode) != MODE_INT
1070 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1073 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1074 result in 0/1. It seems a safe assumption that this is
1075 in range for everyone. */
1076 if (GET_CODE (op) == CONSTANT_P_RTX)
1079 return (CONSTANT_P (op)
1080 && (GET_MODE (op) == mode || mode == VOIDmode
1081 || GET_MODE (op) == VOIDmode)
1082 #ifdef LEGITIMATE_PIC_OPERAND_P
1083 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1085 && LEGITIMATE_CONSTANT_P (op));
1088 /* Returns 1 if OP is an operand that is a CONST_INT. */
1091 const_int_operand (op, mode)
1093 enum machine_mode mode ATTRIBUTE_UNUSED;
1095 return GET_CODE (op) == CONST_INT;
1098 /* Returns 1 if OP is an operand that is a constant integer or constant
1099 floating-point number. */
1102 const_double_operand (op, mode)
1104 enum machine_mode mode;
1106 /* Don't accept CONST_INT or anything similar
1107 if the caller wants something floating. */
1108 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1109 && GET_MODE_CLASS (mode) != MODE_INT
1110 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1113 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1114 && (mode == VOIDmode || GET_MODE (op) == mode
1115 || GET_MODE (op) == VOIDmode));
1118 /* Return 1 if OP is a general operand that is not an immediate operand. */
1121 nonimmediate_operand (op, mode)
1123 enum machine_mode mode;
1125 return (general_operand (op, mode) && ! CONSTANT_P (op));
1128 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1131 nonmemory_operand (op, mode)
1133 enum machine_mode mode;
1135 if (CONSTANT_P (op))
1137 /* Don't accept CONST_INT or anything similar
1138 if the caller wants something floating. */
1139 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1140 && GET_MODE_CLASS (mode) != MODE_INT
1141 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1144 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
1145 #ifdef LEGITIMATE_PIC_OPERAND_P
1146 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1148 && LEGITIMATE_CONSTANT_P (op));
1151 if (GET_MODE (op) != mode && mode != VOIDmode)
1154 if (GET_CODE (op) == SUBREG)
1156 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1157 because it is guaranteed to be reloaded into one.
1158 Just make sure the MEM is valid in itself.
1159 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1160 but currently it does result from (SUBREG (REG)...) where the
1161 reg went on the stack.) */
1162 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1163 return general_operand (op, mode);
1164 op = SUBREG_REG (op);
1167 /* We don't consider registers whose class is NO_REGS
1168 to be a register operand. */
1169 return (GET_CODE (op) == REG
1170 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1171 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1174 /* Return 1 if OP is a valid operand that stands for pushing a
1175 value of mode MODE onto the stack.
1177 The main use of this function is as a predicate in match_operand
1178 expressions in the machine description. */
1181 push_operand (op, mode)
1183 enum machine_mode mode;
1185 if (GET_CODE (op) != MEM)
1188 if (mode != VOIDmode && GET_MODE (op) != mode)
1193 if (GET_CODE (op) != STACK_PUSH_CODE)
1196 return XEXP (op, 0) == stack_pointer_rtx;
1199 /* Return 1 if OP is a valid operand that stands for popping a
1200 value of mode MODE off the stack.
1202 The main use of this function is as a predicate in match_operand
1203 expressions in the machine description. */
1206 pop_operand (op, mode)
1208 enum machine_mode mode;
1210 if (GET_CODE (op) != MEM)
1213 if (mode != VOIDmode && GET_MODE (op) != mode)
1218 if (GET_CODE (op) != STACK_POP_CODE)
1221 return XEXP (op, 0) == stack_pointer_rtx;
1224 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1227 memory_address_p (mode, addr)
1228 enum machine_mode mode;
1231 if (GET_CODE (addr) == ADDRESSOF)
1234 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1241 /* Return 1 if OP is a valid memory reference with mode MODE,
1242 including a valid address.
1244 The main use of this function is as a predicate in match_operand
1245 expressions in the machine description. */
1248 memory_operand (op, mode)
1250 enum machine_mode mode;
1254 if (! reload_completed)
1255 /* Note that no SUBREG is a memory operand before end of reload pass,
1256 because (SUBREG (MEM...)) forces reloading into a register. */
1257 return GET_CODE (op) == MEM && general_operand (op, mode);
1259 if (mode != VOIDmode && GET_MODE (op) != mode)
1263 if (GET_CODE (inner) == SUBREG)
1264 inner = SUBREG_REG (inner);
1266 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1269 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1270 that is, a memory reference whose address is a general_operand. */
1273 indirect_operand (op, mode)
1275 enum machine_mode mode;
1277 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1278 if (! reload_completed
1279 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1281 register int offset = SUBREG_WORD (op) * UNITS_PER_WORD;
1282 rtx inner = SUBREG_REG (op);
1284 if (BYTES_BIG_ENDIAN)
1285 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op)))
1286 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner))));
1288 if (mode != VOIDmode && GET_MODE (op) != mode)
1291 /* The only way that we can have a general_operand as the resulting
1292 address is if OFFSET is zero and the address already is an operand
1293 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1296 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1297 || (GET_CODE (XEXP (inner, 0)) == PLUS
1298 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1299 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1300 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1303 return (GET_CODE (op) == MEM
1304 && memory_operand (op, mode)
1305 && general_operand (XEXP (op, 0), Pmode));
1308 /* Return 1 if this is a comparison operator. This allows the use of
1309 MATCH_OPERATOR to recognize all the branch insns. */
1312 comparison_operator (op, mode)
1314 enum machine_mode mode;
1316 return ((mode == VOIDmode || GET_MODE (op) == mode)
1317 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1320 /* If BODY is an insn body that uses ASM_OPERANDS,
1321 return the number of operands (both input and output) in the insn.
1322 Otherwise return -1. */
1325 asm_noperands (body)
1328 if (GET_CODE (body) == ASM_OPERANDS)
1329 /* No output operands: return number of input operands. */
1330 return ASM_OPERANDS_INPUT_LENGTH (body);
1331 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1332 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1333 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1334 else if (GET_CODE (body) == PARALLEL
1335 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1336 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1338 /* Multiple output operands, or 1 output plus some clobbers:
1339 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1343 /* Count backwards through CLOBBERs to determine number of SETs. */
1344 for (i = XVECLEN (body, 0); i > 0; i--)
1346 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1348 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1352 /* N_SETS is now number of output operands. */
1355 /* Verify that all the SETs we have
1356 came from a single original asm_operands insn
1357 (so that invalid combinations are blocked). */
1358 for (i = 0; i < n_sets; i++)
1360 rtx elt = XVECEXP (body, 0, i);
1361 if (GET_CODE (elt) != SET)
1363 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1365 /* If these ASM_OPERANDS rtx's came from different original insns
1366 then they aren't allowed together. */
1367 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1368 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1371 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1374 else if (GET_CODE (body) == PARALLEL
1375 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1377 /* 0 outputs, but some clobbers:
1378 body is [(asm_operands ...) (clobber (reg ...))...]. */
1381 /* Make sure all the other parallel things really are clobbers. */
1382 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1383 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1386 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1392 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1393 copy its operands (both input and output) into the vector OPERANDS,
1394 the locations of the operands within the insn into the vector OPERAND_LOCS,
1395 and the constraints for the operands into CONSTRAINTS.
1396 Write the modes of the operands into MODES.
1397 Return the assembler-template.
1399 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1400 we don't store that info. */
1403 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1407 const char **constraints;
1408 enum machine_mode *modes;
1414 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1416 rtx asmop = SET_SRC (body);
1417 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1419 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1421 for (i = 1; i < noperands; i++)
1424 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1426 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1428 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1430 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1433 /* The output is in the SET.
1434 Its constraint is in the ASM_OPERANDS itself. */
1436 operands[0] = SET_DEST (body);
1438 operand_locs[0] = &SET_DEST (body);
1440 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1442 modes[0] = GET_MODE (SET_DEST (body));
1443 template = ASM_OPERANDS_TEMPLATE (asmop);
1445 else if (GET_CODE (body) == ASM_OPERANDS)
1448 /* No output operands: BODY is (asm_operands ....). */
1450 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1452 /* The input operands are found in the 1st element vector. */
1453 /* Constraints for inputs are in the 2nd element vector. */
1454 for (i = 0; i < noperands; i++)
1457 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1459 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1461 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1463 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1465 template = ASM_OPERANDS_TEMPLATE (asmop);
1467 else if (GET_CODE (body) == PARALLEL
1468 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1470 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1471 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1472 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1473 int nout = 0; /* Does not include CLOBBERs. */
1475 /* At least one output, plus some CLOBBERs. */
1477 /* The outputs are in the SETs.
1478 Their constraints are in the ASM_OPERANDS itself. */
1479 for (i = 0; i < nparallel; i++)
1481 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1482 break; /* Past last SET */
1485 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1487 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1489 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1491 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1495 for (i = 0; i < nin; i++)
1498 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1500 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1502 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1504 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1507 template = ASM_OPERANDS_TEMPLATE (asmop);
1509 else if (GET_CODE (body) == PARALLEL
1510 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1512 /* No outputs, but some CLOBBERs. */
1514 rtx asmop = XVECEXP (body, 0, 0);
1515 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1517 for (i = 0; i < nin; i++)
1520 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1522 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1524 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1526 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1529 template = ASM_OPERANDS_TEMPLATE (asmop);
1535 /* Check if an asm_operand matches it's constraints.
1536 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1539 asm_operand_ok (op, constraint)
1541 const char *constraint;
1545 /* Use constrain_operands after reload. */
1546 if (reload_completed)
1551 switch (*constraint++)
1564 case '0': case '1': case '2': case '3': case '4':
1565 case '5': case '6': case '7': case '8': case '9':
1566 /* For best results, our caller should have given us the
1567 proper matching constraint, but we can't actually fail
1568 the check if they didn't. Indicate that results are
1574 if (address_operand (op, VOIDmode))
1579 case 'V': /* non-offsettable */
1580 if (memory_operand (op, VOIDmode))
1584 case 'o': /* offsettable */
1585 if (offsettable_nonstrict_memref_p (op))
1590 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1591 excepting those that expand_call created. Further, on some
1592 machines which do not have generalized auto inc/dec, an inc/dec
1593 is not a memory_operand.
1595 Match any memory and hope things are resolved after reload. */
1597 if (GET_CODE (op) == MEM
1599 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1600 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1605 if (GET_CODE (op) == MEM
1607 || GET_CODE (XEXP (op, 0)) == PRE_INC
1608 || GET_CODE (XEXP (op, 0)) == POST_INC))
1613 #ifndef REAL_ARITHMETIC
1614 /* Match any floating double constant, but only if
1615 we can examine the bits of it reliably. */
1616 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1617 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1618 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1624 if (GET_CODE (op) == CONST_DOUBLE)
1629 if (GET_CODE (op) == CONST_DOUBLE
1630 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1634 if (GET_CODE (op) == CONST_DOUBLE
1635 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1640 if (GET_CODE (op) == CONST_INT
1641 || (GET_CODE (op) == CONST_DOUBLE
1642 && GET_MODE (op) == VOIDmode))
1648 #ifdef LEGITIMATE_PIC_OPERAND_P
1649 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1656 if (GET_CODE (op) == CONST_INT
1657 || (GET_CODE (op) == CONST_DOUBLE
1658 && GET_MODE (op) == VOIDmode))
1663 if (GET_CODE (op) == CONST_INT
1664 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1668 if (GET_CODE (op) == CONST_INT
1669 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1673 if (GET_CODE (op) == CONST_INT
1674 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1678 if (GET_CODE (op) == CONST_INT
1679 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1683 if (GET_CODE (op) == CONST_INT
1684 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1688 if (GET_CODE (op) == CONST_INT
1689 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1693 if (GET_CODE (op) == CONST_INT
1694 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1698 if (GET_CODE (op) == CONST_INT
1699 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1707 if (general_operand (op, VOIDmode))
1711 #ifdef EXTRA_CONSTRAINT
1713 if (EXTRA_CONSTRAINT (op, 'Q'))
1717 if (EXTRA_CONSTRAINT (op, 'R'))
1721 if (EXTRA_CONSTRAINT (op, 'S'))
1725 if (EXTRA_CONSTRAINT (op, 'T'))
1729 if (EXTRA_CONSTRAINT (op, 'U'))
1736 if (GET_MODE (op) == BLKmode)
1738 if (register_operand (op, VOIDmode))
1747 /* Given an rtx *P, if it is a sum containing an integer constant term,
1748 return the location (type rtx *) of the pointer to that constant term.
1749 Otherwise, return a null pointer. */
1752 find_constant_term_loc (p)
1756 register enum rtx_code code = GET_CODE (*p);
1758 /* If *P IS such a constant term, P is its location. */
1760 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1764 /* Otherwise, if not a sum, it has no constant term. */
1766 if (GET_CODE (*p) != PLUS)
1769 /* If one of the summands is constant, return its location. */
1771 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1772 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1775 /* Otherwise, check each summand for containing a constant term. */
1777 if (XEXP (*p, 0) != 0)
1779 tem = find_constant_term_loc (&XEXP (*p, 0));
1784 if (XEXP (*p, 1) != 0)
1786 tem = find_constant_term_loc (&XEXP (*p, 1));
1794 /* Return 1 if OP is a memory reference
1795 whose address contains no side effects
1796 and remains valid after the addition
1797 of a positive integer less than the
1798 size of the object being referenced.
1800 We assume that the original address is valid and do not check it.
1802 This uses strict_memory_address_p as a subroutine, so
1803 don't use it before reload. */
1806 offsettable_memref_p (op)
1809 return ((GET_CODE (op) == MEM)
1810 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1813 /* Similar, but don't require a strictly valid mem ref:
1814 consider pseudo-regs valid as index or base regs. */
1817 offsettable_nonstrict_memref_p (op)
1820 return ((GET_CODE (op) == MEM)
1821 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1824 /* Return 1 if Y is a memory address which contains no side effects
1825 and would remain valid after the addition of a positive integer
1826 less than the size of that mode.
1828 We assume that the original address is valid and do not check it.
1829 We do check that it is valid for narrower modes.
1831 If STRICTP is nonzero, we require a strictly valid address,
1832 for the sake of use in reload.c. */
1835 offsettable_address_p (strictp, mode, y)
1837 enum machine_mode mode;
1840 register enum rtx_code ycode = GET_CODE (y);
1844 int (*addressp) PROTO ((enum machine_mode, rtx)) =
1845 (strictp ? strict_memory_address_p : memory_address_p);
1847 if (CONSTANT_ADDRESS_P (y))
1850 /* Adjusting an offsettable address involves changing to a narrower mode.
1851 Make sure that's OK. */
1853 if (mode_dependent_address_p (y))
1856 /* If the expression contains a constant term,
1857 see if it remains valid when max possible offset is added. */
1859 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1864 *y2 = plus_constant (*y2, GET_MODE_SIZE (mode) - 1);
1865 /* Use QImode because an odd displacement may be automatically invalid
1866 for any wider mode. But it should be valid for a single byte. */
1867 good = (*addressp) (QImode, y);
1869 /* In any case, restore old contents of memory. */
1874 if (ycode == PRE_DEC || ycode == PRE_INC
1875 || ycode == POST_DEC || ycode == POST_INC)
1878 /* The offset added here is chosen as the maximum offset that
1879 any instruction could need to add when operating on something
1880 of the specified mode. We assume that if Y and Y+c are
1881 valid addresses then so is Y+d for all 0<d<c. */
1883 z = plus_constant_for_output (y, GET_MODE_SIZE (mode) - 1);
1885 /* Use QImode because an odd displacement may be automatically invalid
1886 for any wider mode. But it should be valid for a single byte. */
1887 return (*addressp) (QImode, z);
1890 /* Return 1 if ADDR is an address-expression whose effect depends
1891 on the mode of the memory reference it is used in.
1893 Autoincrement addressing is a typical example of mode-dependence
1894 because the amount of the increment depends on the mode. */
1897 mode_dependent_address_p (addr)
1898 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
1900 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1902 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1903 win: ATTRIBUTE_UNUSED_LABEL
1907 /* Return 1 if OP is a general operand
1908 other than a memory ref with a mode dependent address. */
1911 mode_independent_operand (op, mode)
1912 enum machine_mode mode;
1917 if (! general_operand (op, mode))
1920 if (GET_CODE (op) != MEM)
1923 addr = XEXP (op, 0);
1924 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
1926 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1927 lose: ATTRIBUTE_UNUSED_LABEL
1931 /* Given an operand OP that is a valid memory reference
1932 which satisfies offsettable_memref_p,
1933 return a new memory reference whose address has been adjusted by OFFSET.
1934 OFFSET should be positive and less than the size of the object referenced.
1938 adj_offsettable_operand (op, offset)
1942 register enum rtx_code code = GET_CODE (op);
1946 register rtx y = XEXP (op, 0);
1949 if (CONSTANT_ADDRESS_P (y))
1951 new = gen_rtx_MEM (GET_MODE (op),
1952 plus_constant_for_output (y, offset));
1953 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1957 if (GET_CODE (y) == PLUS)
1960 register rtx *const_loc;
1964 const_loc = find_constant_term_loc (&z);
1967 *const_loc = plus_constant_for_output (*const_loc, offset);
1972 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
1973 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1979 /* Analyze INSN and fill in recog_data. */
1988 rtx body = PATTERN (insn);
1990 recog_data.n_operands = 0;
1991 recog_data.n_alternatives = 0;
1992 recog_data.n_dups = 0;
1994 switch (GET_CODE (body))
2006 recog_data.n_operands = noperands = asm_noperands (body);
2009 /* This insn is an `asm' with operands. */
2011 /* expand_asm_operands makes sure there aren't too many operands. */
2012 if (noperands > MAX_RECOG_OPERANDS)
2015 /* Now get the operand values and constraints out of the insn. */
2016 decode_asm_operands (body, recog_data.operand,
2017 recog_data.operand_loc,
2018 recog_data.constraints,
2019 recog_data.operand_mode);
2022 const char *p = recog_data.constraints[0];
2023 recog_data.n_alternatives = 1;
2025 recog_data.n_alternatives += (*p++ == ',');
2033 /* Ordinary insn: recognize it, get the operands via insn_extract
2034 and get the constraints. */
2036 icode = recog_memoized (insn);
2038 fatal_insn_not_found (insn);
2040 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2041 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2042 recog_data.n_dups = insn_data[icode].n_dups;
2044 insn_extract (insn);
2046 for (i = 0; i < noperands; i++)
2048 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2049 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2052 for (i = 0; i < noperands; i++)
2053 recog_data.operand_type[i]
2054 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2055 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2058 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2062 /* After calling extract_insn, you can use this function to extract some
2063 information from the constraint strings into a more usable form.
2064 The collected data is stored in recog_op_alt. */
2066 preprocess_constraints ()
2070 memset (recog_op_alt, 0, sizeof recog_op_alt);
2071 for (i = 0; i < recog_data.n_operands; i++)
2074 struct operand_alternative *op_alt;
2075 const char *p = recog_data.constraints[i];
2077 op_alt = recog_op_alt[i];
2079 for (j = 0; j < recog_data.n_alternatives; j++)
2081 op_alt[j].class = NO_REGS;
2082 op_alt[j].constraint = p;
2083 op_alt[j].matches = -1;
2084 op_alt[j].matched = -1;
2086 if (*p == '\0' || *p == ',')
2088 op_alt[j].anything_ok = 1;
2098 while (c != ',' && c != '\0');
2099 if (c == ',' || c == '\0')
2104 case '=': case '+': case '*': case '%':
2105 case 'E': case 'F': case 'G': case 'H':
2106 case 's': case 'i': case 'n':
2107 case 'I': case 'J': case 'K': case 'L':
2108 case 'M': case 'N': case 'O': case 'P':
2109 #ifdef EXTRA_CONSTRAINT
2110 case 'Q': case 'R': case 'S': case 'T': case 'U':
2112 /* These don't say anything we care about. */
2116 op_alt[j].reject += 6;
2119 op_alt[j].reject += 600;
2122 op_alt[j].earlyclobber = 1;
2125 case '0': case '1': case '2': case '3': case '4':
2126 case '5': case '6': case '7': case '8': case '9':
2127 op_alt[j].matches = c - '0';
2128 op_alt[op_alt[j].matches].matched = i;
2132 op_alt[j].memory_ok = 1;
2135 op_alt[j].decmem_ok = 1;
2138 op_alt[j].incmem_ok = 1;
2141 op_alt[j].nonoffmem_ok = 1;
2144 op_alt[j].offmem_ok = 1;
2147 op_alt[j].anything_ok = 1;
2151 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
2155 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2159 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
2167 /* Check the operands of an insn against the insn's operand constraints
2168 and return 1 if they are valid.
2169 The information about the insn's operands, constraints, operand modes
2170 etc. is obtained from the global variables set up by extract_insn.
2172 WHICH_ALTERNATIVE is set to a number which indicates which
2173 alternative of constraints was matched: 0 for the first alternative,
2174 1 for the next, etc.
2176 In addition, when two operands are match
2177 and it happens that the output operand is (reg) while the
2178 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2179 make the output operand look like the input.
2180 This is because the output operand is the one the template will print.
2182 This is used in final, just before printing the assembler code and by
2183 the routines that determine an insn's attribute.
2185 If STRICT is a positive non-zero value, it means that we have been
2186 called after reload has been completed. In that case, we must
2187 do all checks strictly. If it is zero, it means that we have been called
2188 before reload has completed. In that case, we first try to see if we can
2189 find an alternative that matches strictly. If not, we try again, this
2190 time assuming that reload will fix up the insn. This provides a "best
2191 guess" for the alternative and is used to compute attributes of insns prior
2192 to reload. A negative value of STRICT is used for this internal call. */
2200 constrain_operands (strict)
2203 const char *constraints[MAX_RECOG_OPERANDS];
2204 int matching_operands[MAX_RECOG_OPERANDS];
2205 int earlyclobber[MAX_RECOG_OPERANDS];
2208 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2209 int funny_match_index;
2211 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2214 for (c = 0; c < recog_data.n_operands; c++)
2216 constraints[c] = recog_data.constraints[c];
2217 matching_operands[c] = -1;
2220 which_alternative = 0;
2222 while (which_alternative < recog_data.n_alternatives)
2226 funny_match_index = 0;
2228 for (opno = 0; opno < recog_data.n_operands; opno++)
2230 register rtx op = recog_data.operand[opno];
2231 enum machine_mode mode = GET_MODE (op);
2232 register const char *p = constraints[opno];
2237 earlyclobber[opno] = 0;
2239 /* A unary operator may be accepted by the predicate, but it
2240 is irrelevant for matching constraints. */
2241 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2244 if (GET_CODE (op) == SUBREG)
2246 if (GET_CODE (SUBREG_REG (op)) == REG
2247 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2248 offset = SUBREG_WORD (op);
2249 op = SUBREG_REG (op);
2252 /* An empty constraint or empty alternative
2253 allows anything which matched the pattern. */
2254 if (*p == 0 || *p == ',')
2257 while (*p && (c = *p++) != ',')
2260 case '?': case '!': case '*': case '%':
2265 /* Ignore rest of this alternative as far as
2266 constraint checking is concerned. */
2267 while (*p && *p != ',')
2272 earlyclobber[opno] = 1;
2275 case '0': case '1': case '2': case '3': case '4':
2276 case '5': case '6': case '7': case '8': case '9':
2278 /* This operand must be the same as a previous one.
2279 This kind of constraint is used for instructions such
2280 as add when they take only two operands.
2282 Note that the lower-numbered operand is passed first.
2284 If we are not testing strictly, assume that this constraint
2285 will be satisfied. */
2290 rtx op1 = recog_data.operand[c - '0'];
2291 rtx op2 = recog_data.operand[opno];
2293 /* A unary operator may be accepted by the predicate,
2294 but it is irrelevant for matching constraints. */
2295 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2296 op1 = XEXP (op1, 0);
2297 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2298 op2 = XEXP (op2, 0);
2300 val = operands_match_p (op1, op2);
2303 matching_operands[opno] = c - '0';
2304 matching_operands[c - '0'] = opno;
2308 /* If output is *x and input is *--x,
2309 arrange later to change the output to *--x as well,
2310 since the output op is the one that will be printed. */
2311 if (val == 2 && strict > 0)
2313 funny_match[funny_match_index].this = opno;
2314 funny_match[funny_match_index++].other = c - '0';
2319 /* p is used for address_operands. When we are called by
2320 gen_reload, no one will have checked that the address is
2321 strictly valid, i.e., that all pseudos requiring hard regs
2322 have gotten them. */
2324 || (strict_memory_address_p (recog_data.operand_mode[opno],
2329 /* No need to check general_operand again;
2330 it was done in insn-recog.c. */
2332 /* Anything goes unless it is a REG and really has a hard reg
2333 but the hard reg is not in the class GENERAL_REGS. */
2335 || GENERAL_REGS == ALL_REGS
2336 || GET_CODE (op) != REG
2337 || (reload_in_progress
2338 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2339 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2346 && GET_CODE (op) == REG
2347 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2348 || (strict == 0 && GET_CODE (op) == SCRATCH)
2349 || (GET_CODE (op) == REG
2350 && ((GENERAL_REGS == ALL_REGS
2351 && REGNO (op) < FIRST_PSEUDO_REGISTER)
2352 || reg_fits_class_p (op, GENERAL_REGS,
2358 /* This is used for a MATCH_SCRATCH in the cases when
2359 we don't actually need anything. So anything goes
2365 if (GET_CODE (op) == MEM
2366 /* Before reload, accept what reload can turn into mem. */
2367 || (strict < 0 && CONSTANT_P (op))
2368 /* During reload, accept a pseudo */
2369 || (reload_in_progress && GET_CODE (op) == REG
2370 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2375 if (GET_CODE (op) == MEM
2376 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2377 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2382 if (GET_CODE (op) == MEM
2383 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2384 || GET_CODE (XEXP (op, 0)) == POST_INC))
2389 #ifndef REAL_ARITHMETIC
2390 /* Match any CONST_DOUBLE, but only if
2391 we can examine the bits of it reliably. */
2392 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2393 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2394 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2397 if (GET_CODE (op) == CONST_DOUBLE)
2402 if (GET_CODE (op) == CONST_DOUBLE)
2408 if (GET_CODE (op) == CONST_DOUBLE
2409 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2414 if (GET_CODE (op) == CONST_INT
2415 || (GET_CODE (op) == CONST_DOUBLE
2416 && GET_MODE (op) == VOIDmode))
2419 if (CONSTANT_P (op))
2424 if (GET_CODE (op) == CONST_INT
2425 || (GET_CODE (op) == CONST_DOUBLE
2426 && GET_MODE (op) == VOIDmode))
2438 if (GET_CODE (op) == CONST_INT
2439 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2443 #ifdef EXTRA_CONSTRAINT
2449 if (EXTRA_CONSTRAINT (op, c))
2455 if (GET_CODE (op) == MEM
2456 && ((strict > 0 && ! offsettable_memref_p (op))
2458 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2459 || (reload_in_progress
2460 && !(GET_CODE (op) == REG
2461 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2466 if ((strict > 0 && offsettable_memref_p (op))
2467 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2468 /* Before reload, accept what reload can handle. */
2470 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2471 /* During reload, accept a pseudo */
2472 || (reload_in_progress && GET_CODE (op) == REG
2473 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2480 && GET_CODE (op) == REG
2481 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2482 || (strict == 0 && GET_CODE (op) == SCRATCH)
2483 || (GET_CODE (op) == REG
2484 && reg_fits_class_p (op, REG_CLASS_FROM_LETTER (c),
2489 constraints[opno] = p;
2490 /* If this operand did not win somehow,
2491 this alternative loses. */
2495 /* This alternative won; the operands are ok.
2496 Change whichever operands this alternative says to change. */
2501 /* See if any earlyclobber operand conflicts with some other
2505 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2506 /* Ignore earlyclobber operands now in memory,
2507 because we would often report failure when we have
2508 two memory operands, one of which was formerly a REG. */
2509 if (earlyclobber[eopno]
2510 && GET_CODE (recog_data.operand[eopno]) == REG)
2511 for (opno = 0; opno < recog_data.n_operands; opno++)
2512 if ((GET_CODE (recog_data.operand[opno]) == MEM
2513 || recog_data.operand_type[opno] != OP_OUT)
2515 /* Ignore things like match_operator operands. */
2516 && *recog_data.constraints[opno] != 0
2517 && ! (matching_operands[opno] == eopno
2518 && operands_match_p (recog_data.operand[opno],
2519 recog_data.operand[eopno]))
2520 && ! safe_from_earlyclobber (recog_data.operand[opno],
2521 recog_data.operand[eopno]))
2526 while (--funny_match_index >= 0)
2528 recog_data.operand[funny_match[funny_match_index].other]
2529 = recog_data.operand[funny_match[funny_match_index].this];
2536 which_alternative++;
2539 /* If we are about to reject this, but we are not to test strictly,
2540 try a very loose test. Only return failure if it fails also. */
2542 return constrain_operands (-1);
2547 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2548 is a hard reg in class CLASS when its regno is offset by OFFSET
2549 and changed to mode MODE.
2550 If REG occupies multiple hard regs, all of them must be in CLASS. */
2553 reg_fits_class_p (operand, class, offset, mode)
2555 register enum reg_class class;
2557 enum machine_mode mode;
2559 register int regno = REGNO (operand);
2560 if (regno < FIRST_PSEUDO_REGISTER
2561 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2566 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2568 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2577 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2580 split_all_insns (upd_life)
2587 blocks = sbitmap_alloc (n_basic_blocks);
2588 sbitmap_zero (blocks);
2591 for (i = n_basic_blocks - 1; i >= 0; --i)
2593 basic_block bb = BASIC_BLOCK (i);
2596 for (insn = bb->head; insn ; insn = next)
2600 /* Can't use `next_real_insn' because that might go across
2601 CODE_LABELS and short-out basic blocks. */
2602 next = NEXT_INSN (insn);
2603 if (GET_CODE (insn) != INSN)
2606 /* Don't split no-op move insns. These should silently
2607 disappear later in final. Splitting such insns would
2608 break the code that handles REG_NO_CONFLICT blocks. */
2610 else if ((set = single_set (insn)) != NULL
2611 && rtx_equal_p (SET_SRC (set), SET_DEST (set)))
2613 /* Nops get in the way while scheduling, so delete them
2614 now if register allocation has already been done. It
2615 is too risky to try to do this before register
2616 allocation, and there are unlikely to be very many
2617 nops then anyways. */
2618 if (reload_completed)
2620 PUT_CODE (insn, NOTE);
2621 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2622 NOTE_SOURCE_FILE (insn) = 0;
2627 /* Split insns here to get max fine-grain parallelism. */
2628 rtx first = PREV_INSN (insn);
2629 rtx last = try_split (PATTERN (insn), insn, 1);
2633 SET_BIT (blocks, i);
2636 /* try_split returns the NOTE that INSN became. */
2637 first = NEXT_INSN (first);
2638 PUT_CODE (insn, NOTE);
2639 NOTE_SOURCE_FILE (insn) = 0;
2640 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2642 if (insn == bb->end)
2650 if (insn == bb->end)
2654 /* ??? When we're called from just after reload, the CFG is in bad
2655 shape, and we may have fallen off the end. This could be fixed
2656 by having reload not try to delete unreachable code. Otherwise
2657 assert we found the end insn. */
2658 if (insn == NULL && upd_life)
2662 if (changed && upd_life)
2664 compute_bb_for_insn (get_max_uid ());
2665 count_or_remove_death_notes (blocks, 1);
2666 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2669 sbitmap_free (blocks);
2672 #ifdef HAVE_peephole2
2673 /* This is the last insn we'll allow recog_next_insn to consider. */
2674 static rtx recog_last_allowed_insn;
2676 /* Return the Nth non-note insn after INSN, or return NULL_RTX if it does
2677 not exist. Used by the recognizer to find the next insn to match in a
2678 multi-insn pattern. */
2680 recog_next_insn (insn, n)
2684 if (insn != NULL_RTX)
2688 if (insn == recog_last_allowed_insn)
2691 insn = NEXT_INSN (insn);
2692 if (insn == NULL_RTX)
2695 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2703 /* Perform the peephole2 optimization pass. */
2705 peephole2_optimize (dump_file)
2706 FILE *dump_file ATTRIBUTE_UNUSED;
2712 /* ??? TODO: Arrange with resource.c to start at bb->global_live_at_end
2713 and backtrack insn by insn as we proceed through the block. In this
2714 way we'll not need to keep searching forward from the beginning of
2715 basic blocks to find register life info. */
2717 init_resource_info (NULL);
2719 blocks = sbitmap_alloc (n_basic_blocks);
2720 sbitmap_zero (blocks);
2723 for (i = n_basic_blocks - 1; i >= 0; --i)
2725 basic_block bb = BASIC_BLOCK (i);
2727 /* Since we don't update life info until the very end, we can't
2728 allow matching instructions that we've replaced before. Walk
2729 backward through the basic block so that we don't have to
2730 care about subsequent life info; recog_last_allowed_insn to
2731 restrict how far forward we will allow the match to proceed. */
2733 recog_last_allowed_insn = bb->end;
2734 for (insn = bb->end; ; insn = prev)
2736 prev = PREV_INSN (insn);
2737 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2741 try = peephole2_insns (PATTERN (insn), insn, &last_insn);
2744 flow_delete_insn_chain (insn, last_insn);
2745 try = emit_insn_after (try, prev);
2747 if (last_insn == bb->end)
2749 if (insn == bb->head)
2750 bb->head = NEXT_INSN (prev);
2752 recog_last_allowed_insn = prev;
2753 SET_BIT (blocks, i);
2758 if (insn == bb->head)
2763 free_resource_info ();
2765 compute_bb_for_insn (get_max_uid ());
2766 count_or_remove_death_notes (blocks, 1);
2767 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);