1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "insn-flags.h"
30 #include "insn-codes.h"
33 #include "hard-reg-set.h"
38 #include "basic-block.h"
42 #ifndef STACK_PUSH_CODE
43 #ifdef STACK_GROWS_DOWNWARD
44 #define STACK_PUSH_CODE PRE_DEC
46 #define STACK_PUSH_CODE PRE_INC
50 #ifndef STACK_POP_CODE
51 #ifdef STACK_GROWS_DOWNWARD
52 #define STACK_POP_CODE POST_INC
54 #define STACK_POP_CODE POST_DEC
58 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
59 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
60 static rtx *find_constant_term_loc PARAMS ((rtx *));
61 static int insn_invalid_p PARAMS ((rtx));
63 /* Nonzero means allow operands to be volatile.
64 This should be 0 if you are generating rtl, such as if you are calling
65 the functions in optabs.c and expmed.c (most of the time).
66 This should be 1 if all valid insns need to be recognized,
67 such as in regclass.c and final.c and reload.c.
69 init_recog and init_recog_no_volatile are responsible for setting this. */
73 struct recog_data recog_data;
75 /* Contains a vector of operand_alternative structures for every operand.
76 Set up by preprocess_constraints. */
77 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
79 /* On return from `constrain_operands', indicate which alternative
82 int which_alternative;
84 /* Nonzero after end of reload pass.
85 Set to 1 or 0 by toplev.c.
86 Controls the significance of (SUBREG (MEM)). */
90 /* Initialize data used by the function `recog'.
91 This must be called once in the compilation of a function
92 before any insn recognition may be done in the function. */
95 init_recog_no_volatile ()
106 /* Try recognizing the instruction INSN,
107 and return the code number that results.
108 Remember the code so that repeated calls do not
109 need to spend the time for actual rerecognition.
111 This function is the normal interface to instruction recognition.
112 The automatically-generated function `recog' is normally called
113 through this one. (The only exception is in combine.c.) */
116 recog_memoized (insn)
119 if (INSN_CODE (insn) < 0)
120 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
121 return INSN_CODE (insn);
124 /* Check that X is an insn-body for an `asm' with operands
125 and that the operands mentioned in it are legitimate. */
128 check_asm_operands (x)
133 const char **constraints;
136 /* Post-reload, be more strict with things. */
137 if (reload_completed)
139 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
140 extract_insn (make_insn_raw (x));
141 constrain_operands (1);
142 return which_alternative >= 0;
145 noperands = asm_noperands (x);
151 operands = (rtx *) alloca (noperands * sizeof (rtx));
152 constraints = (const char **) alloca (noperands * sizeof (char *));
154 decode_asm_operands (x, operands, NULL_PTR, constraints, NULL_PTR);
156 for (i = 0; i < noperands; i++)
158 const char *c = constraints[i];
161 if (ISDIGIT ((unsigned char)c[0]) && c[1] == '\0')
162 c = constraints[c[0] - '0'];
164 if (! asm_operand_ok (operands[i], c))
171 /* Static data for the next two routines. */
173 typedef struct change_t
181 static change_t *changes;
182 static int changes_allocated;
184 static int num_changes = 0;
186 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
187 at which NEW will be placed. If OBJECT is zero, no validation is done,
188 the change is simply made.
190 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
191 will be called with the address and mode as parameters. If OBJECT is
192 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
195 IN_GROUP is non-zero if this is part of a group of changes that must be
196 performed as a group. In that case, the changes will be stored. The
197 function `apply_change_group' will validate and apply the changes.
199 If IN_GROUP is zero, this is a single change. Try to recognize the insn
200 or validate the memory reference with the change applied. If the result
201 is not valid for the machine, suppress the change and return zero.
202 Otherwise, perform the change and return 1. */
205 validate_change (object, loc, new, in_group)
213 if (old == new || rtx_equal_p (old, new))
216 if (in_group == 0 && num_changes != 0)
221 /* Save the information describing this change. */
222 if (num_changes >= changes_allocated)
224 if (changes_allocated == 0)
225 /* This value allows for repeated substitutions inside complex
226 indexed addresses, or changes in up to 5 insns. */
227 changes_allocated = MAX_RECOG_OPERANDS * 5;
229 changes_allocated *= 2;
232 (change_t*) xrealloc (changes,
233 sizeof (change_t) * changes_allocated);
236 changes[num_changes].object = object;
237 changes[num_changes].loc = loc;
238 changes[num_changes].old = old;
240 if (object && GET_CODE (object) != MEM)
242 /* Set INSN_CODE to force rerecognition of insn. Save old code in
244 changes[num_changes].old_code = INSN_CODE (object);
245 INSN_CODE (object) = -1;
250 /* If we are making a group of changes, return 1. Otherwise, validate the
251 change group we made. */
256 return apply_change_group ();
259 /* This subroutine of apply_change_group verifies whether the changes to INSN
260 were valid; i.e. whether INSN can still be recognized. */
263 insn_invalid_p (insn)
266 int icode = recog_memoized (insn);
267 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
269 if (is_asm && ! check_asm_operands (PATTERN (insn)))
271 if (! is_asm && icode < 0)
274 /* After reload, verify that all constraints are satisfied. */
275 if (reload_completed)
279 if (! constrain_operands (1))
286 /* Apply a group of changes previously issued with `validate_change'.
287 Return 1 if all changes are valid, zero otherwise. */
290 apply_change_group ()
294 /* The changes have been applied and all INSN_CODEs have been reset to force
297 The changes are valid if we aren't given an object, or if we are
298 given a MEM and it still is a valid address, or if this is in insn
299 and it is recognized. In the latter case, if reload has completed,
300 we also require that the operands meet the constraints for
303 for (i = 0; i < num_changes; i++)
305 rtx object = changes[i].object;
310 if (GET_CODE (object) == MEM)
312 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
315 else if (insn_invalid_p (object))
317 rtx pat = PATTERN (object);
319 /* Perhaps we couldn't recognize the insn because there were
320 extra CLOBBERs at the end. If so, try to re-recognize
321 without the last CLOBBER (later iterations will cause each of
322 them to be eliminated, in turn). But don't do this if we
323 have an ASM_OPERAND. */
324 if (GET_CODE (pat) == PARALLEL
325 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
326 && asm_noperands (PATTERN (object)) < 0)
330 if (XVECLEN (pat, 0) == 2)
331 newpat = XVECEXP (pat, 0, 0);
337 = gen_rtx_PARALLEL (VOIDmode,
338 gen_rtvec (XVECLEN (pat, 0) - 1));
339 for (j = 0; j < XVECLEN (newpat, 0); j++)
340 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
343 /* Add a new change to this group to replace the pattern
344 with this new pattern. Then consider this change
345 as having succeeded. The change we added will
346 cause the entire call to fail if things remain invalid.
348 Note that this can lose if a later change than the one
349 we are processing specified &XVECEXP (PATTERN (object), 0, X)
350 but this shouldn't occur. */
352 validate_change (object, &PATTERN (object), newpat, 1);
354 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
355 /* If this insn is a CLOBBER or USE, it is always valid, but is
363 if (i == num_changes)
375 /* Return the number of changes so far in the current group. */
378 num_validated_changes ()
383 /* Retract the changes numbered NUM and up. */
391 /* Back out all the changes. Do this in the opposite order in which
393 for (i = num_changes - 1; i >= num; i--)
395 *changes[i].loc = changes[i].old;
396 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
397 INSN_CODE (changes[i].object) = changes[i].old_code;
402 /* Replace every occurrence of FROM in X with TO. Mark each change with
403 validate_change passing OBJECT. */
406 validate_replace_rtx_1 (loc, from, to, object)
408 rtx from, to, object;
411 register const char *fmt;
412 register rtx x = *loc;
413 enum rtx_code code = GET_CODE (x);
415 /* X matches FROM if it is the same rtx or they are both referring to the
416 same register in the same mode. Avoid calling rtx_equal_p unless the
417 operands look similar. */
420 || (GET_CODE (x) == REG && GET_CODE (from) == REG
421 && GET_MODE (x) == GET_MODE (from)
422 && REGNO (x) == REGNO (from))
423 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
424 && rtx_equal_p (x, from)))
426 validate_change (object, loc, to, 1);
430 /* For commutative or comparison operations, try replacing each argument
431 separately and seeing if we made any changes. If so, put a constant
433 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
435 int prev_changes = num_changes;
437 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
438 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
439 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
441 validate_change (object, loc,
442 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
443 : swap_condition (code),
444 GET_MODE (x), XEXP (x, 1),
452 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
453 done the substitution, otherwise we won't. */
458 /* If we have a PLUS whose second operand is now a CONST_INT, use
459 plus_constant to try to simplify it. */
460 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
461 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
466 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
468 validate_change (object, loc,
469 plus_constant (XEXP (x, 0), - INTVAL (to)),
477 /* In these cases, the operation to be performed depends on the mode
478 of the operand. If we are replacing the operand with a VOIDmode
479 constant, we lose the information. So try to simplify the operation
480 in that case. If it fails, substitute in something that we know
481 won't be recognized. */
482 if (GET_MODE (to) == VOIDmode
483 && (XEXP (x, 0) == from
484 || (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (from) == REG
485 && GET_MODE (XEXP (x, 0)) == GET_MODE (from)
486 && REGNO (XEXP (x, 0)) == REGNO (from))))
488 rtx new = simplify_unary_operation (code, GET_MODE (x), to,
491 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
493 validate_change (object, loc, new, 1);
499 /* If we have a SUBREG of a register that we are replacing and we are
500 replacing it with a MEM, make a new MEM and try replacing the
501 SUBREG with it. Don't do this if the MEM has a mode-dependent address
502 or if we would be widening it. */
504 if (SUBREG_REG (x) == from
505 && GET_CODE (from) == REG
506 && GET_CODE (to) == MEM
507 && ! mode_dependent_address_p (XEXP (to, 0))
508 && ! MEM_VOLATILE_P (to)
509 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
511 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
512 enum machine_mode mode = GET_MODE (x);
515 if (BYTES_BIG_ENDIAN)
516 offset += (MIN (UNITS_PER_WORD,
517 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
518 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
520 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
521 MEM_COPY_ATTRIBUTES (new, to);
522 validate_change (object, loc, new, 1);
529 /* If we are replacing a register with memory, try to change the memory
530 to be the mode required for memory in extract operations (this isn't
531 likely to be an insertion operation; if it was, nothing bad will
532 happen, we might just fail in some cases). */
534 if (XEXP (x, 0) == from && GET_CODE (from) == REG && GET_CODE (to) == MEM
535 && GET_CODE (XEXP (x, 1)) == CONST_INT
536 && GET_CODE (XEXP (x, 2)) == CONST_INT
537 && ! mode_dependent_address_p (XEXP (to, 0))
538 && ! MEM_VOLATILE_P (to))
540 enum machine_mode wanted_mode = VOIDmode;
541 enum machine_mode is_mode = GET_MODE (to);
542 int pos = INTVAL (XEXP (x, 2));
545 if (code == ZERO_EXTRACT)
547 wanted_mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
548 if (wanted_mode == VOIDmode)
549 wanted_mode = word_mode;
553 if (code == SIGN_EXTRACT)
555 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
556 if (wanted_mode == VOIDmode)
557 wanted_mode = word_mode;
561 /* If we have a narrower mode, we can do something. */
562 if (wanted_mode != VOIDmode
563 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
565 int offset = pos / BITS_PER_UNIT;
568 /* If the bytes and bits are counted differently, we
569 must adjust the offset. */
570 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
571 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
574 pos %= GET_MODE_BITSIZE (wanted_mode);
576 newmem = gen_rtx_MEM (wanted_mode,
577 plus_constant (XEXP (to, 0), offset));
578 MEM_COPY_ATTRIBUTES (newmem, to);
580 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
581 validate_change (object, &XEXP (x, 0), newmem, 1);
591 /* For commutative or comparison operations we've already performed
592 replacements. Don't try to perform them again. */
593 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
595 fmt = GET_RTX_FORMAT (code);
596 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
599 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
600 else if (fmt[i] == 'E')
601 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
602 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
607 /* Try replacing every occurrence of FROM in INSN with TO. After all
608 changes have been made, validate by seeing if INSN is still valid. */
611 validate_replace_rtx (from, to, insn)
614 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
615 return apply_change_group ();
618 /* Try replacing every occurrence of FROM in INSN with TO. After all
619 changes have been made, validate by seeing if INSN is still valid. */
622 validate_replace_rtx_group (from, to, insn)
625 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
628 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
629 SET_DESTs. After all changes have been made, validate by seeing if
630 INSN is still valid. */
633 validate_replace_src (from, to, insn)
636 if ((GET_CODE (insn) != INSN && GET_CODE (insn) != JUMP_INSN)
637 || GET_CODE (PATTERN (insn)) != SET)
640 validate_replace_rtx_1 (&SET_SRC (PATTERN (insn)), from, to, insn);
641 if (GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
642 validate_replace_rtx_1 (&XEXP (SET_DEST (PATTERN (insn)), 0),
644 return apply_change_group ();
648 /* Return 1 if the insn using CC0 set by INSN does not contain
649 any ordered tests applied to the condition codes.
650 EQ and NE tests do not count. */
653 next_insn_tests_no_inequality (insn)
656 register rtx next = next_cc0_user (insn);
658 /* If there is no next insn, we have to take the conservative choice. */
662 return ((GET_CODE (next) == JUMP_INSN
663 || GET_CODE (next) == INSN
664 || GET_CODE (next) == CALL_INSN)
665 && ! inequality_comparisons_p (PATTERN (next)));
668 #if 0 /* This is useless since the insn that sets the cc's
669 must be followed immediately by the use of them. */
670 /* Return 1 if the CC value set up by INSN is not used. */
673 next_insns_test_no_inequality (insn)
676 register rtx next = NEXT_INSN (insn);
678 for (; next != 0; next = NEXT_INSN (next))
680 if (GET_CODE (next) == CODE_LABEL
681 || GET_CODE (next) == BARRIER)
683 if (GET_CODE (next) == NOTE)
685 if (inequality_comparisons_p (PATTERN (next)))
687 if (sets_cc0_p (PATTERN (next)) == 1)
689 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
697 /* This is used by find_single_use to locate an rtx that contains exactly one
698 use of DEST, which is typically either a REG or CC0. It returns a
699 pointer to the innermost rtx expression containing DEST. Appearances of
700 DEST that are being used to totally replace it are not counted. */
703 find_single_use_1 (dest, loc)
708 enum rtx_code code = GET_CODE (x);
725 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
726 of a REG that occupies all of the REG, the insn uses DEST if
727 it is mentioned in the destination or the source. Otherwise, we
728 need just check the source. */
729 if (GET_CODE (SET_DEST (x)) != CC0
730 && GET_CODE (SET_DEST (x)) != PC
731 && GET_CODE (SET_DEST (x)) != REG
732 && ! (GET_CODE (SET_DEST (x)) == SUBREG
733 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
734 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
735 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
736 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
737 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
740 return find_single_use_1 (dest, &SET_SRC (x));
744 return find_single_use_1 (dest, &XEXP (x, 0));
750 /* If it wasn't one of the common cases above, check each expression and
751 vector of this code. Look for a unique usage of DEST. */
753 fmt = GET_RTX_FORMAT (code);
754 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
758 if (dest == XEXP (x, i)
759 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
760 && REGNO (dest) == REGNO (XEXP (x, i))))
763 this_result = find_single_use_1 (dest, &XEXP (x, i));
766 result = this_result;
767 else if (this_result)
768 /* Duplicate usage. */
771 else if (fmt[i] == 'E')
775 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
777 if (XVECEXP (x, i, j) == dest
778 || (GET_CODE (dest) == REG
779 && GET_CODE (XVECEXP (x, i, j)) == REG
780 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
783 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
786 result = this_result;
787 else if (this_result)
796 /* See if DEST, produced in INSN, is used only a single time in the
797 sequel. If so, return a pointer to the innermost rtx expression in which
800 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
802 This routine will return usually zero either before flow is called (because
803 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
804 note can't be trusted).
806 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
807 care about REG_DEAD notes or LOG_LINKS.
809 Otherwise, we find the single use by finding an insn that has a
810 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
811 only referenced once in that insn, we know that it must be the first
812 and last insn referencing DEST. */
815 find_single_use (dest, insn, ploc)
827 next = NEXT_INSN (insn);
829 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
832 result = find_single_use_1 (dest, &PATTERN (next));
839 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
842 for (next = next_nonnote_insn (insn);
843 next != 0 && GET_CODE (next) != CODE_LABEL;
844 next = next_nonnote_insn (next))
845 if (GET_RTX_CLASS (GET_CODE (next)) == 'i' && dead_or_set_p (next, dest))
847 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
848 if (XEXP (link, 0) == insn)
853 result = find_single_use_1 (dest, &PATTERN (next));
863 /* Return 1 if OP is a valid general operand for machine mode MODE.
864 This is either a register reference, a memory reference,
865 or a constant. In the case of a memory reference, the address
866 is checked for general validity for the target machine.
868 Register and memory references must have mode MODE in order to be valid,
869 but some constants have no machine mode and are valid for any mode.
871 If MODE is VOIDmode, OP is checked for validity for whatever mode
874 The main use of this function is as a predicate in match_operand
875 expressions in the machine description.
877 For an explanation of this function's behavior for registers of
878 class NO_REGS, see the comment for `register_operand'. */
881 general_operand (op, mode)
883 enum machine_mode mode;
885 register enum rtx_code code = GET_CODE (op);
886 int mode_altering_drug = 0;
888 if (mode == VOIDmode)
889 mode = GET_MODE (op);
891 /* Don't accept CONST_INT or anything similar
892 if the caller wants something floating. */
893 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
894 && GET_MODE_CLASS (mode) != MODE_INT
895 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
899 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
900 #ifdef LEGITIMATE_PIC_OPERAND_P
901 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
903 && LEGITIMATE_CONSTANT_P (op));
905 /* Except for certain constants with VOIDmode, already checked for,
906 OP's mode must match MODE if MODE specifies a mode. */
908 if (GET_MODE (op) != mode)
913 #ifdef INSN_SCHEDULING
914 /* On machines that have insn scheduling, we want all memory
915 reference to be explicit, so outlaw paradoxical SUBREGs. */
916 if (GET_CODE (SUBREG_REG (op)) == MEM
917 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
921 op = SUBREG_REG (op);
922 code = GET_CODE (op);
924 /* No longer needed, since (SUBREG (MEM...))
925 will load the MEM into a reload reg in the MEM's own mode. */
926 mode_altering_drug = 1;
931 /* A register whose class is NO_REGS is not a general operand. */
932 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
933 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
937 register rtx y = XEXP (op, 0);
938 if (! volatile_ok && MEM_VOLATILE_P (op))
940 if (GET_CODE (y) == ADDRESSOF)
942 /* Use the mem's mode, since it will be reloaded thus. */
943 mode = GET_MODE (op);
944 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
947 /* Pretend this is an operand for now; we'll run force_operand
948 on its replacement in fixup_var_refs_1. */
949 if (code == ADDRESSOF)
955 if (mode_altering_drug)
956 return ! mode_dependent_address_p (XEXP (op, 0));
960 /* Return 1 if OP is a valid memory address for a memory reference
963 The main use of this function is as a predicate in match_operand
964 expressions in the machine description. */
967 address_operand (op, mode)
969 enum machine_mode mode;
971 return memory_address_p (mode, op);
974 /* Return 1 if OP is a register reference of mode MODE.
975 If MODE is VOIDmode, accept a register in any mode.
977 The main use of this function is as a predicate in match_operand
978 expressions in the machine description.
980 As a special exception, registers whose class is NO_REGS are
981 not accepted by `register_operand'. The reason for this change
982 is to allow the representation of special architecture artifacts
983 (such as a condition code register) without extending the rtl
984 definitions. Since registers of class NO_REGS cannot be used
985 as registers in any case where register classes are examined,
986 it is most consistent to keep this function from accepting them. */
989 register_operand (op, mode)
991 enum machine_mode mode;
993 if (GET_MODE (op) != mode && mode != VOIDmode)
996 if (GET_CODE (op) == SUBREG)
998 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
999 because it is guaranteed to be reloaded into one.
1000 Just make sure the MEM is valid in itself.
1001 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1002 but currently it does result from (SUBREG (REG)...) where the
1003 reg went on the stack.) */
1004 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1005 return general_operand (op, mode);
1007 #ifdef CLASS_CANNOT_CHANGE_SIZE
1008 if (GET_CODE (SUBREG_REG (op)) == REG
1009 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1010 && TEST_HARD_REG_BIT (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
1011 REGNO (SUBREG_REG (op)))
1012 && (GET_MODE_SIZE (mode)
1013 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
1014 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1015 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1019 op = SUBREG_REG (op);
1022 /* If we have an ADDRESSOF, consider it valid since it will be
1023 converted into something that will not be a MEM. */
1024 if (GET_CODE (op) == ADDRESSOF)
1027 /* We don't consider registers whose class is NO_REGS
1028 to be a register operand. */
1029 return (GET_CODE (op) == REG
1030 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1031 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1034 /* Return 1 for a register in Pmode; ignore the tested mode. */
1037 pmode_register_operand (op, mode)
1039 enum machine_mode mode ATTRIBUTE_UNUSED;
1041 return register_operand (op, Pmode);
1044 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1045 or a hard register. */
1048 scratch_operand (op, mode)
1050 enum machine_mode mode;
1052 if (GET_MODE (op) != mode && mode != VOIDmode)
1055 return (GET_CODE (op) == SCRATCH
1056 || (GET_CODE (op) == REG
1057 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1060 /* Return 1 if OP is a valid immediate operand for mode MODE.
1062 The main use of this function is as a predicate in match_operand
1063 expressions in the machine description. */
1066 immediate_operand (op, mode)
1068 enum machine_mode mode;
1070 /* Don't accept CONST_INT or anything similar
1071 if the caller wants something floating. */
1072 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1073 && GET_MODE_CLASS (mode) != MODE_INT
1074 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1077 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1078 result in 0/1. It seems a safe assumption that this is
1079 in range for everyone. */
1080 if (GET_CODE (op) == CONSTANT_P_RTX)
1083 return (CONSTANT_P (op)
1084 && (GET_MODE (op) == mode || mode == VOIDmode
1085 || GET_MODE (op) == VOIDmode)
1086 #ifdef LEGITIMATE_PIC_OPERAND_P
1087 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1089 && LEGITIMATE_CONSTANT_P (op));
1092 /* Returns 1 if OP is an operand that is a CONST_INT. */
1095 const_int_operand (op, mode)
1097 enum machine_mode mode ATTRIBUTE_UNUSED;
1099 return GET_CODE (op) == CONST_INT;
1102 /* Returns 1 if OP is an operand that is a constant integer or constant
1103 floating-point number. */
1106 const_double_operand (op, mode)
1108 enum machine_mode mode;
1110 /* Don't accept CONST_INT or anything similar
1111 if the caller wants something floating. */
1112 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1113 && GET_MODE_CLASS (mode) != MODE_INT
1114 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1117 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1118 && (mode == VOIDmode || GET_MODE (op) == mode
1119 || GET_MODE (op) == VOIDmode));
1122 /* Return 1 if OP is a general operand that is not an immediate operand. */
1125 nonimmediate_operand (op, mode)
1127 enum machine_mode mode;
1129 return (general_operand (op, mode) && ! CONSTANT_P (op));
1132 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1135 nonmemory_operand (op, mode)
1137 enum machine_mode mode;
1139 if (CONSTANT_P (op))
1141 /* Don't accept CONST_INT or anything similar
1142 if the caller wants something floating. */
1143 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1144 && GET_MODE_CLASS (mode) != MODE_INT
1145 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1148 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
1149 #ifdef LEGITIMATE_PIC_OPERAND_P
1150 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1152 && LEGITIMATE_CONSTANT_P (op));
1155 if (GET_MODE (op) != mode && mode != VOIDmode)
1158 if (GET_CODE (op) == SUBREG)
1160 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1161 because it is guaranteed to be reloaded into one.
1162 Just make sure the MEM is valid in itself.
1163 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1164 but currently it does result from (SUBREG (REG)...) where the
1165 reg went on the stack.) */
1166 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1167 return general_operand (op, mode);
1168 op = SUBREG_REG (op);
1171 /* We don't consider registers whose class is NO_REGS
1172 to be a register operand. */
1173 return (GET_CODE (op) == REG
1174 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1175 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1178 /* Return 1 if OP is a valid operand that stands for pushing a
1179 value of mode MODE onto the stack.
1181 The main use of this function is as a predicate in match_operand
1182 expressions in the machine description. */
1185 push_operand (op, mode)
1187 enum machine_mode mode;
1189 if (GET_CODE (op) != MEM)
1192 if (mode != VOIDmode && GET_MODE (op) != mode)
1197 if (GET_CODE (op) != STACK_PUSH_CODE)
1200 return XEXP (op, 0) == stack_pointer_rtx;
1203 /* Return 1 if OP is a valid operand that stands for popping a
1204 value of mode MODE off the stack.
1206 The main use of this function is as a predicate in match_operand
1207 expressions in the machine description. */
1210 pop_operand (op, mode)
1212 enum machine_mode mode;
1214 if (GET_CODE (op) != MEM)
1217 if (mode != VOIDmode && GET_MODE (op) != mode)
1222 if (GET_CODE (op) != STACK_POP_CODE)
1225 return XEXP (op, 0) == stack_pointer_rtx;
1228 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1231 memory_address_p (mode, addr)
1232 enum machine_mode mode ATTRIBUTE_UNUSED;
1235 if (GET_CODE (addr) == ADDRESSOF)
1238 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1245 /* Return 1 if OP is a valid memory reference with mode MODE,
1246 including a valid address.
1248 The main use of this function is as a predicate in match_operand
1249 expressions in the machine description. */
1252 memory_operand (op, mode)
1254 enum machine_mode mode;
1258 if (! reload_completed)
1259 /* Note that no SUBREG is a memory operand before end of reload pass,
1260 because (SUBREG (MEM...)) forces reloading into a register. */
1261 return GET_CODE (op) == MEM && general_operand (op, mode);
1263 if (mode != VOIDmode && GET_MODE (op) != mode)
1267 if (GET_CODE (inner) == SUBREG)
1268 inner = SUBREG_REG (inner);
1270 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1273 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1274 that is, a memory reference whose address is a general_operand. */
1277 indirect_operand (op, mode)
1279 enum machine_mode mode;
1281 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1282 if (! reload_completed
1283 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1285 register int offset = SUBREG_WORD (op) * UNITS_PER_WORD;
1286 rtx inner = SUBREG_REG (op);
1288 if (BYTES_BIG_ENDIAN)
1289 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op)))
1290 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner))));
1292 if (mode != VOIDmode && GET_MODE (op) != mode)
1295 /* The only way that we can have a general_operand as the resulting
1296 address is if OFFSET is zero and the address already is an operand
1297 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1300 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1301 || (GET_CODE (XEXP (inner, 0)) == PLUS
1302 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1303 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1304 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1307 return (GET_CODE (op) == MEM
1308 && memory_operand (op, mode)
1309 && general_operand (XEXP (op, 0), Pmode));
1312 /* Return 1 if this is a comparison operator. This allows the use of
1313 MATCH_OPERATOR to recognize all the branch insns. */
1316 comparison_operator (op, mode)
1318 enum machine_mode mode;
1320 return ((mode == VOIDmode || GET_MODE (op) == mode)
1321 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1324 /* If BODY is an insn body that uses ASM_OPERANDS,
1325 return the number of operands (both input and output) in the insn.
1326 Otherwise return -1. */
1329 asm_noperands (body)
1332 if (GET_CODE (body) == ASM_OPERANDS)
1333 /* No output operands: return number of input operands. */
1334 return ASM_OPERANDS_INPUT_LENGTH (body);
1335 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1336 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1337 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1338 else if (GET_CODE (body) == PARALLEL
1339 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1340 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1342 /* Multiple output operands, or 1 output plus some clobbers:
1343 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1347 /* Count backwards through CLOBBERs to determine number of SETs. */
1348 for (i = XVECLEN (body, 0); i > 0; i--)
1350 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1352 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1356 /* N_SETS is now number of output operands. */
1359 /* Verify that all the SETs we have
1360 came from a single original asm_operands insn
1361 (so that invalid combinations are blocked). */
1362 for (i = 0; i < n_sets; i++)
1364 rtx elt = XVECEXP (body, 0, i);
1365 if (GET_CODE (elt) != SET)
1367 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1369 /* If these ASM_OPERANDS rtx's came from different original insns
1370 then they aren't allowed together. */
1371 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1372 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1375 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1378 else if (GET_CODE (body) == PARALLEL
1379 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1381 /* 0 outputs, but some clobbers:
1382 body is [(asm_operands ...) (clobber (reg ...))...]. */
1385 /* Make sure all the other parallel things really are clobbers. */
1386 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1387 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1390 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1396 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1397 copy its operands (both input and output) into the vector OPERANDS,
1398 the locations of the operands within the insn into the vector OPERAND_LOCS,
1399 and the constraints for the operands into CONSTRAINTS.
1400 Write the modes of the operands into MODES.
1401 Return the assembler-template.
1403 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1404 we don't store that info. */
1407 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1411 const char **constraints;
1412 enum machine_mode *modes;
1416 const char *template = 0;
1418 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1420 rtx asmop = SET_SRC (body);
1421 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1423 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1425 for (i = 1; i < noperands; i++)
1428 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1430 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1432 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1434 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1437 /* The output is in the SET.
1438 Its constraint is in the ASM_OPERANDS itself. */
1440 operands[0] = SET_DEST (body);
1442 operand_locs[0] = &SET_DEST (body);
1444 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1446 modes[0] = GET_MODE (SET_DEST (body));
1447 template = ASM_OPERANDS_TEMPLATE (asmop);
1449 else if (GET_CODE (body) == ASM_OPERANDS)
1452 /* No output operands: BODY is (asm_operands ....). */
1454 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1456 /* The input operands are found in the 1st element vector. */
1457 /* Constraints for inputs are in the 2nd element vector. */
1458 for (i = 0; i < noperands; i++)
1461 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1463 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1465 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1467 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1469 template = ASM_OPERANDS_TEMPLATE (asmop);
1471 else if (GET_CODE (body) == PARALLEL
1472 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1474 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1475 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1476 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1477 int nout = 0; /* Does not include CLOBBERs. */
1479 /* At least one output, plus some CLOBBERs. */
1481 /* The outputs are in the SETs.
1482 Their constraints are in the ASM_OPERANDS itself. */
1483 for (i = 0; i < nparallel; i++)
1485 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1486 break; /* Past last SET */
1489 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1491 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1493 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1495 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1499 for (i = 0; i < nin; i++)
1502 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1504 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1506 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1508 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1511 template = ASM_OPERANDS_TEMPLATE (asmop);
1513 else if (GET_CODE (body) == PARALLEL
1514 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1516 /* No outputs, but some CLOBBERs. */
1518 rtx asmop = XVECEXP (body, 0, 0);
1519 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1521 for (i = 0; i < nin; i++)
1524 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1526 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1528 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1530 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1533 template = ASM_OPERANDS_TEMPLATE (asmop);
1539 /* Check if an asm_operand matches it's constraints.
1540 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1543 asm_operand_ok (op, constraint)
1545 const char *constraint;
1549 /* Use constrain_operands after reload. */
1550 if (reload_completed)
1555 switch (*constraint++)
1568 case '0': case '1': case '2': case '3': case '4':
1569 case '5': case '6': case '7': case '8': case '9':
1570 /* For best results, our caller should have given us the
1571 proper matching constraint, but we can't actually fail
1572 the check if they didn't. Indicate that results are
1578 if (address_operand (op, VOIDmode))
1583 case 'V': /* non-offsettable */
1584 if (memory_operand (op, VOIDmode))
1588 case 'o': /* offsettable */
1589 if (offsettable_nonstrict_memref_p (op))
1594 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1595 excepting those that expand_call created. Further, on some
1596 machines which do not have generalized auto inc/dec, an inc/dec
1597 is not a memory_operand.
1599 Match any memory and hope things are resolved after reload. */
1601 if (GET_CODE (op) == MEM
1603 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1604 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1609 if (GET_CODE (op) == MEM
1611 || GET_CODE (XEXP (op, 0)) == PRE_INC
1612 || GET_CODE (XEXP (op, 0)) == POST_INC))
1617 #ifndef REAL_ARITHMETIC
1618 /* Match any floating double constant, but only if
1619 we can examine the bits of it reliably. */
1620 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1621 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1622 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1628 if (GET_CODE (op) == CONST_DOUBLE)
1633 if (GET_CODE (op) == CONST_DOUBLE
1634 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1638 if (GET_CODE (op) == CONST_DOUBLE
1639 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1644 if (GET_CODE (op) == CONST_INT
1645 || (GET_CODE (op) == CONST_DOUBLE
1646 && GET_MODE (op) == VOIDmode))
1652 #ifdef LEGITIMATE_PIC_OPERAND_P
1653 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1660 if (GET_CODE (op) == CONST_INT
1661 || (GET_CODE (op) == CONST_DOUBLE
1662 && GET_MODE (op) == VOIDmode))
1667 if (GET_CODE (op) == CONST_INT
1668 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1672 if (GET_CODE (op) == CONST_INT
1673 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1677 if (GET_CODE (op) == CONST_INT
1678 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1682 if (GET_CODE (op) == CONST_INT
1683 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1687 if (GET_CODE (op) == CONST_INT
1688 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1692 if (GET_CODE (op) == CONST_INT
1693 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1697 if (GET_CODE (op) == CONST_INT
1698 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1702 if (GET_CODE (op) == CONST_INT
1703 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1711 if (general_operand (op, VOIDmode))
1715 #ifdef EXTRA_CONSTRAINT
1717 if (EXTRA_CONSTRAINT (op, 'Q'))
1721 if (EXTRA_CONSTRAINT (op, 'R'))
1725 if (EXTRA_CONSTRAINT (op, 'S'))
1729 if (EXTRA_CONSTRAINT (op, 'T'))
1733 if (EXTRA_CONSTRAINT (op, 'U'))
1740 if (GET_MODE (op) == BLKmode)
1742 if (register_operand (op, VOIDmode))
1751 /* Given an rtx *P, if it is a sum containing an integer constant term,
1752 return the location (type rtx *) of the pointer to that constant term.
1753 Otherwise, return a null pointer. */
1756 find_constant_term_loc (p)
1760 register enum rtx_code code = GET_CODE (*p);
1762 /* If *P IS such a constant term, P is its location. */
1764 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1768 /* Otherwise, if not a sum, it has no constant term. */
1770 if (GET_CODE (*p) != PLUS)
1773 /* If one of the summands is constant, return its location. */
1775 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1776 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1779 /* Otherwise, check each summand for containing a constant term. */
1781 if (XEXP (*p, 0) != 0)
1783 tem = find_constant_term_loc (&XEXP (*p, 0));
1788 if (XEXP (*p, 1) != 0)
1790 tem = find_constant_term_loc (&XEXP (*p, 1));
1798 /* Return 1 if OP is a memory reference
1799 whose address contains no side effects
1800 and remains valid after the addition
1801 of a positive integer less than the
1802 size of the object being referenced.
1804 We assume that the original address is valid and do not check it.
1806 This uses strict_memory_address_p as a subroutine, so
1807 don't use it before reload. */
1810 offsettable_memref_p (op)
1813 return ((GET_CODE (op) == MEM)
1814 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1817 /* Similar, but don't require a strictly valid mem ref:
1818 consider pseudo-regs valid as index or base regs. */
1821 offsettable_nonstrict_memref_p (op)
1824 return ((GET_CODE (op) == MEM)
1825 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1828 /* Return 1 if Y is a memory address which contains no side effects
1829 and would remain valid after the addition of a positive integer
1830 less than the size of that mode.
1832 We assume that the original address is valid and do not check it.
1833 We do check that it is valid for narrower modes.
1835 If STRICTP is nonzero, we require a strictly valid address,
1836 for the sake of use in reload.c. */
1839 offsettable_address_p (strictp, mode, y)
1841 enum machine_mode mode;
1844 register enum rtx_code ycode = GET_CODE (y);
1848 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1849 (strictp ? strict_memory_address_p : memory_address_p);
1850 unsigned int mode_sz = GET_MODE_SIZE (mode);
1852 if (CONSTANT_ADDRESS_P (y))
1855 /* Adjusting an offsettable address involves changing to a narrower mode.
1856 Make sure that's OK. */
1858 if (mode_dependent_address_p (y))
1861 /* ??? How much offset does an offsettable BLKmode reference need?
1862 Clearly that depends on the situation in which it's being used.
1863 However, the current situation in which we test 0xffffffff is
1864 less than ideal. Caveat user. */
1866 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1868 /* If the expression contains a constant term,
1869 see if it remains valid when max possible offset is added. */
1871 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1876 *y2 = plus_constant (*y2, mode_sz - 1);
1877 /* Use QImode because an odd displacement may be automatically invalid
1878 for any wider mode. But it should be valid for a single byte. */
1879 good = (*addressp) (QImode, y);
1881 /* In any case, restore old contents of memory. */
1886 if (ycode == PRE_DEC || ycode == PRE_INC
1887 || ycode == POST_DEC || ycode == POST_INC)
1890 /* The offset added here is chosen as the maximum offset that
1891 any instruction could need to add when operating on something
1892 of the specified mode. We assume that if Y and Y+c are
1893 valid addresses then so is Y+d for all 0<d<c. */
1895 z = plus_constant_for_output (y, mode_sz - 1);
1897 /* Use QImode because an odd displacement may be automatically invalid
1898 for any wider mode. But it should be valid for a single byte. */
1899 return (*addressp) (QImode, z);
1902 /* Return 1 if ADDR is an address-expression whose effect depends
1903 on the mode of the memory reference it is used in.
1905 Autoincrement addressing is a typical example of mode-dependence
1906 because the amount of the increment depends on the mode. */
1909 mode_dependent_address_p (addr)
1910 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
1912 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1914 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1915 win: ATTRIBUTE_UNUSED_LABEL
1919 /* Return 1 if OP is a general operand
1920 other than a memory ref with a mode dependent address. */
1923 mode_independent_operand (op, mode)
1924 enum machine_mode mode;
1929 if (! general_operand (op, mode))
1932 if (GET_CODE (op) != MEM)
1935 addr = XEXP (op, 0);
1936 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
1938 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1939 lose: ATTRIBUTE_UNUSED_LABEL
1943 /* Given an operand OP that is a valid memory reference
1944 which satisfies offsettable_memref_p,
1945 return a new memory reference whose address has been adjusted by OFFSET.
1946 OFFSET should be positive and less than the size of the object referenced.
1950 adj_offsettable_operand (op, offset)
1954 register enum rtx_code code = GET_CODE (op);
1958 register rtx y = XEXP (op, 0);
1961 if (CONSTANT_ADDRESS_P (y))
1963 new = gen_rtx_MEM (GET_MODE (op),
1964 plus_constant_for_output (y, offset));
1965 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1969 if (GET_CODE (y) == PLUS)
1972 register rtx *const_loc;
1976 const_loc = find_constant_term_loc (&z);
1979 *const_loc = plus_constant_for_output (*const_loc, offset);
1984 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
1985 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1991 /* Analyze INSN and fill in recog_data. */
2000 rtx body = PATTERN (insn);
2002 recog_data.n_operands = 0;
2003 recog_data.n_alternatives = 0;
2004 recog_data.n_dups = 0;
2006 switch (GET_CODE (body))
2018 recog_data.n_operands = noperands = asm_noperands (body);
2021 /* This insn is an `asm' with operands. */
2023 /* expand_asm_operands makes sure there aren't too many operands. */
2024 if (noperands > MAX_RECOG_OPERANDS)
2027 /* Now get the operand values and constraints out of the insn. */
2028 decode_asm_operands (body, recog_data.operand,
2029 recog_data.operand_loc,
2030 recog_data.constraints,
2031 recog_data.operand_mode);
2034 const char *p = recog_data.constraints[0];
2035 recog_data.n_alternatives = 1;
2037 recog_data.n_alternatives += (*p++ == ',');
2045 /* Ordinary insn: recognize it, get the operands via insn_extract
2046 and get the constraints. */
2048 icode = recog_memoized (insn);
2050 fatal_insn_not_found (insn);
2052 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2053 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2054 recog_data.n_dups = insn_data[icode].n_dups;
2056 insn_extract (insn);
2058 for (i = 0; i < noperands; i++)
2060 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2061 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2064 for (i = 0; i < noperands; i++)
2065 recog_data.operand_type[i]
2066 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2067 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2070 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2074 /* After calling extract_insn, you can use this function to extract some
2075 information from the constraint strings into a more usable form.
2076 The collected data is stored in recog_op_alt. */
2078 preprocess_constraints ()
2082 memset (recog_op_alt, 0, sizeof recog_op_alt);
2083 for (i = 0; i < recog_data.n_operands; i++)
2086 struct operand_alternative *op_alt;
2087 const char *p = recog_data.constraints[i];
2089 op_alt = recog_op_alt[i];
2091 for (j = 0; j < recog_data.n_alternatives; j++)
2093 op_alt[j].class = NO_REGS;
2094 op_alt[j].constraint = p;
2095 op_alt[j].matches = -1;
2096 op_alt[j].matched = -1;
2098 if (*p == '\0' || *p == ',')
2100 op_alt[j].anything_ok = 1;
2110 while (c != ',' && c != '\0');
2111 if (c == ',' || c == '\0')
2116 case '=': case '+': case '*': case '%':
2117 case 'E': case 'F': case 'G': case 'H':
2118 case 's': case 'i': case 'n':
2119 case 'I': case 'J': case 'K': case 'L':
2120 case 'M': case 'N': case 'O': case 'P':
2121 #ifdef EXTRA_CONSTRAINT
2122 case 'Q': case 'R': case 'S': case 'T': case 'U':
2124 /* These don't say anything we care about. */
2128 op_alt[j].reject += 6;
2131 op_alt[j].reject += 600;
2134 op_alt[j].earlyclobber = 1;
2137 case '0': case '1': case '2': case '3': case '4':
2138 case '5': case '6': case '7': case '8': case '9':
2139 op_alt[j].matches = c - '0';
2140 recog_op_alt[op_alt[j].matches][j].matched = i;
2144 op_alt[j].memory_ok = 1;
2147 op_alt[j].decmem_ok = 1;
2150 op_alt[j].incmem_ok = 1;
2153 op_alt[j].nonoffmem_ok = 1;
2156 op_alt[j].offmem_ok = 1;
2159 op_alt[j].anything_ok = 1;
2163 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
2167 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2171 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
2179 /* Check the operands of an insn against the insn's operand constraints
2180 and return 1 if they are valid.
2181 The information about the insn's operands, constraints, operand modes
2182 etc. is obtained from the global variables set up by extract_insn.
2184 WHICH_ALTERNATIVE is set to a number which indicates which
2185 alternative of constraints was matched: 0 for the first alternative,
2186 1 for the next, etc.
2188 In addition, when two operands are match
2189 and it happens that the output operand is (reg) while the
2190 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2191 make the output operand look like the input.
2192 This is because the output operand is the one the template will print.
2194 This is used in final, just before printing the assembler code and by
2195 the routines that determine an insn's attribute.
2197 If STRICT is a positive non-zero value, it means that we have been
2198 called after reload has been completed. In that case, we must
2199 do all checks strictly. If it is zero, it means that we have been called
2200 before reload has completed. In that case, we first try to see if we can
2201 find an alternative that matches strictly. If not, we try again, this
2202 time assuming that reload will fix up the insn. This provides a "best
2203 guess" for the alternative and is used to compute attributes of insns prior
2204 to reload. A negative value of STRICT is used for this internal call. */
2212 constrain_operands (strict)
2215 const char *constraints[MAX_RECOG_OPERANDS];
2216 int matching_operands[MAX_RECOG_OPERANDS];
2217 int earlyclobber[MAX_RECOG_OPERANDS];
2220 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2221 int funny_match_index;
2223 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2226 for (c = 0; c < recog_data.n_operands; c++)
2228 constraints[c] = recog_data.constraints[c];
2229 matching_operands[c] = -1;
2232 which_alternative = 0;
2234 while (which_alternative < recog_data.n_alternatives)
2238 funny_match_index = 0;
2240 for (opno = 0; opno < recog_data.n_operands; opno++)
2242 register rtx op = recog_data.operand[opno];
2243 enum machine_mode mode = GET_MODE (op);
2244 register const char *p = constraints[opno];
2249 earlyclobber[opno] = 0;
2251 /* A unary operator may be accepted by the predicate, but it
2252 is irrelevant for matching constraints. */
2253 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2256 if (GET_CODE (op) == SUBREG)
2258 if (GET_CODE (SUBREG_REG (op)) == REG
2259 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2260 offset = SUBREG_WORD (op);
2261 op = SUBREG_REG (op);
2264 /* An empty constraint or empty alternative
2265 allows anything which matched the pattern. */
2266 if (*p == 0 || *p == ',')
2269 while (*p && (c = *p++) != ',')
2272 case '?': case '!': case '*': case '%':
2277 /* Ignore rest of this alternative as far as
2278 constraint checking is concerned. */
2279 while (*p && *p != ',')
2284 earlyclobber[opno] = 1;
2287 case '0': case '1': case '2': case '3': case '4':
2288 case '5': case '6': case '7': case '8': case '9':
2290 /* This operand must be the same as a previous one.
2291 This kind of constraint is used for instructions such
2292 as add when they take only two operands.
2294 Note that the lower-numbered operand is passed first.
2296 If we are not testing strictly, assume that this constraint
2297 will be satisfied. */
2302 rtx op1 = recog_data.operand[c - '0'];
2303 rtx op2 = recog_data.operand[opno];
2305 /* A unary operator may be accepted by the predicate,
2306 but it is irrelevant for matching constraints. */
2307 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2308 op1 = XEXP (op1, 0);
2309 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2310 op2 = XEXP (op2, 0);
2312 val = operands_match_p (op1, op2);
2315 matching_operands[opno] = c - '0';
2316 matching_operands[c - '0'] = opno;
2320 /* If output is *x and input is *--x,
2321 arrange later to change the output to *--x as well,
2322 since the output op is the one that will be printed. */
2323 if (val == 2 && strict > 0)
2325 funny_match[funny_match_index].this = opno;
2326 funny_match[funny_match_index++].other = c - '0';
2331 /* p is used for address_operands. When we are called by
2332 gen_reload, no one will have checked that the address is
2333 strictly valid, i.e., that all pseudos requiring hard regs
2334 have gotten them. */
2336 || (strict_memory_address_p (recog_data.operand_mode[opno],
2341 /* No need to check general_operand again;
2342 it was done in insn-recog.c. */
2344 /* Anything goes unless it is a REG and really has a hard reg
2345 but the hard reg is not in the class GENERAL_REGS. */
2347 || GENERAL_REGS == ALL_REGS
2348 || GET_CODE (op) != REG
2349 || (reload_in_progress
2350 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2351 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2358 && GET_CODE (op) == REG
2359 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2360 || (strict == 0 && GET_CODE (op) == SCRATCH)
2361 || (GET_CODE (op) == REG
2362 && ((GENERAL_REGS == ALL_REGS
2363 && REGNO (op) < FIRST_PSEUDO_REGISTER)
2364 || reg_fits_class_p (op, GENERAL_REGS,
2370 /* This is used for a MATCH_SCRATCH in the cases when
2371 we don't actually need anything. So anything goes
2377 if (GET_CODE (op) == MEM
2378 /* Before reload, accept what reload can turn into mem. */
2379 || (strict < 0 && CONSTANT_P (op))
2380 /* During reload, accept a pseudo */
2381 || (reload_in_progress && GET_CODE (op) == REG
2382 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2387 if (GET_CODE (op) == MEM
2388 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2389 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2394 if (GET_CODE (op) == MEM
2395 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2396 || GET_CODE (XEXP (op, 0)) == POST_INC))
2401 #ifndef REAL_ARITHMETIC
2402 /* Match any CONST_DOUBLE, but only if
2403 we can examine the bits of it reliably. */
2404 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2405 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2406 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2409 if (GET_CODE (op) == CONST_DOUBLE)
2414 if (GET_CODE (op) == CONST_DOUBLE)
2420 if (GET_CODE (op) == CONST_DOUBLE
2421 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2426 if (GET_CODE (op) == CONST_INT
2427 || (GET_CODE (op) == CONST_DOUBLE
2428 && GET_MODE (op) == VOIDmode))
2431 if (CONSTANT_P (op))
2436 if (GET_CODE (op) == CONST_INT
2437 || (GET_CODE (op) == CONST_DOUBLE
2438 && GET_MODE (op) == VOIDmode))
2450 if (GET_CODE (op) == CONST_INT
2451 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2455 #ifdef EXTRA_CONSTRAINT
2461 if (EXTRA_CONSTRAINT (op, c))
2467 if (GET_CODE (op) == MEM
2468 && ((strict > 0 && ! offsettable_memref_p (op))
2470 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2471 || (reload_in_progress
2472 && !(GET_CODE (op) == REG
2473 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2478 if ((strict > 0 && offsettable_memref_p (op))
2479 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2480 /* Before reload, accept what reload can handle. */
2482 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2483 /* During reload, accept a pseudo */
2484 || (reload_in_progress && GET_CODE (op) == REG
2485 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2492 && GET_CODE (op) == REG
2493 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2494 || (strict == 0 && GET_CODE (op) == SCRATCH)
2495 || (GET_CODE (op) == REG
2496 && reg_fits_class_p (op, REG_CLASS_FROM_LETTER (c),
2501 constraints[opno] = p;
2502 /* If this operand did not win somehow,
2503 this alternative loses. */
2507 /* This alternative won; the operands are ok.
2508 Change whichever operands this alternative says to change. */
2513 /* See if any earlyclobber operand conflicts with some other
2517 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2518 /* Ignore earlyclobber operands now in memory,
2519 because we would often report failure when we have
2520 two memory operands, one of which was formerly a REG. */
2521 if (earlyclobber[eopno]
2522 && GET_CODE (recog_data.operand[eopno]) == REG)
2523 for (opno = 0; opno < recog_data.n_operands; opno++)
2524 if ((GET_CODE (recog_data.operand[opno]) == MEM
2525 || recog_data.operand_type[opno] != OP_OUT)
2527 /* Ignore things like match_operator operands. */
2528 && *recog_data.constraints[opno] != 0
2529 && ! (matching_operands[opno] == eopno
2530 && operands_match_p (recog_data.operand[opno],
2531 recog_data.operand[eopno]))
2532 && ! safe_from_earlyclobber (recog_data.operand[opno],
2533 recog_data.operand[eopno]))
2538 while (--funny_match_index >= 0)
2540 recog_data.operand[funny_match[funny_match_index].other]
2541 = recog_data.operand[funny_match[funny_match_index].this];
2548 which_alternative++;
2551 /* If we are about to reject this, but we are not to test strictly,
2552 try a very loose test. Only return failure if it fails also. */
2554 return constrain_operands (-1);
2559 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2560 is a hard reg in class CLASS when its regno is offset by OFFSET
2561 and changed to mode MODE.
2562 If REG occupies multiple hard regs, all of them must be in CLASS. */
2565 reg_fits_class_p (operand, class, offset, mode)
2567 register enum reg_class class;
2569 enum machine_mode mode;
2571 register int regno = REGNO (operand);
2572 if (regno < FIRST_PSEUDO_REGISTER
2573 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2578 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2580 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2589 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2592 split_all_insns (upd_life)
2599 blocks = sbitmap_alloc (n_basic_blocks);
2600 sbitmap_zero (blocks);
2603 for (i = n_basic_blocks - 1; i >= 0; --i)
2605 basic_block bb = BASIC_BLOCK (i);
2608 for (insn = bb->head; insn ; insn = next)
2612 /* Can't use `next_real_insn' because that might go across
2613 CODE_LABELS and short-out basic blocks. */
2614 next = NEXT_INSN (insn);
2615 if (GET_CODE (insn) != INSN)
2618 /* Don't split no-op move insns. These should silently
2619 disappear later in final. Splitting such insns would
2620 break the code that handles REG_NO_CONFLICT blocks. */
2622 else if ((set = single_set (insn)) != NULL
2623 && rtx_equal_p (SET_SRC (set), SET_DEST (set)))
2625 /* Nops get in the way while scheduling, so delete them
2626 now if register allocation has already been done. It
2627 is too risky to try to do this before register
2628 allocation, and there are unlikely to be very many
2629 nops then anyways. */
2630 if (reload_completed)
2632 PUT_CODE (insn, NOTE);
2633 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2634 NOTE_SOURCE_FILE (insn) = 0;
2639 /* Split insns here to get max fine-grain parallelism. */
2640 rtx first = PREV_INSN (insn);
2641 rtx last = try_split (PATTERN (insn), insn, 1);
2645 SET_BIT (blocks, i);
2648 /* try_split returns the NOTE that INSN became. */
2649 first = NEXT_INSN (first);
2650 PUT_CODE (insn, NOTE);
2651 NOTE_SOURCE_FILE (insn) = 0;
2652 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2654 if (insn == bb->end)
2662 if (insn == bb->end)
2666 /* ??? When we're called from just after reload, the CFG is in bad
2667 shape, and we may have fallen off the end. This could be fixed
2668 by having reload not try to delete unreachable code. Otherwise
2669 assert we found the end insn. */
2670 if (insn == NULL && upd_life)
2674 if (changed && upd_life)
2676 compute_bb_for_insn (get_max_uid ());
2677 count_or_remove_death_notes (blocks, 1);
2678 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2681 sbitmap_free (blocks);
2684 #ifdef HAVE_peephole2
2685 /* This is the last insn we'll allow recog_next_insn to consider. */
2686 static rtx recog_last_allowed_insn;
2688 /* Return the Nth non-note insn after INSN, or return NULL_RTX if it does
2689 not exist. Used by the recognizer to find the next insn to match in a
2690 multi-insn pattern. */
2692 recog_next_insn (insn, n)
2696 if (insn != NULL_RTX)
2700 if (insn == recog_last_allowed_insn)
2703 insn = NEXT_INSN (insn);
2704 if (insn == NULL_RTX)
2707 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2715 /* Perform the peephole2 optimization pass. */
2717 peephole2_optimize (dump_file)
2718 FILE *dump_file ATTRIBUTE_UNUSED;
2724 /* ??? TODO: Arrange with resource.c to start at bb->global_live_at_end
2725 and backtrack insn by insn as we proceed through the block. In this
2726 way we'll not need to keep searching forward from the beginning of
2727 basic blocks to find register life info. */
2729 init_resource_info (NULL);
2731 blocks = sbitmap_alloc (n_basic_blocks);
2732 sbitmap_zero (blocks);
2735 for (i = n_basic_blocks - 1; i >= 0; --i)
2737 basic_block bb = BASIC_BLOCK (i);
2739 /* Since we don't update life info until the very end, we can't
2740 allow matching instructions that we've replaced before. Walk
2741 backward through the basic block so that we don't have to
2742 care about subsequent life info; recog_last_allowed_insn to
2743 restrict how far forward we will allow the match to proceed. */
2745 recog_last_allowed_insn = NEXT_INSN (bb->end);
2746 for (insn = bb->end; ; insn = prev)
2748 prev = PREV_INSN (insn);
2753 try = peephole2_insns (PATTERN (insn), insn, &last_insn);
2756 flow_delete_insn_chain (insn, last_insn);
2757 try = emit_insn_after (try, prev);
2759 if (last_insn == bb->end)
2761 if (insn == bb->head)
2762 bb->head = NEXT_INSN (prev);
2764 recog_last_allowed_insn = NEXT_INSN (prev);
2765 SET_BIT (blocks, i);
2770 if (insn == bb->head)
2775 free_resource_info ();
2777 compute_bb_for_insn (get_max_uid ());
2778 count_or_remove_death_notes (blocks, 1);
2779 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);