1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "hard-reg-set.h"
36 #include "basic-block.h"
40 #ifndef STACK_PUSH_CODE
41 #ifdef STACK_GROWS_DOWNWARD
42 #define STACK_PUSH_CODE PRE_DEC
44 #define STACK_PUSH_CODE PRE_INC
48 #ifndef STACK_POP_CODE
49 #ifdef STACK_GROWS_DOWNWARD
50 #define STACK_POP_CODE POST_INC
52 #define STACK_POP_CODE POST_DEC
56 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
57 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
58 static rtx *find_constant_term_loc PARAMS ((rtx *));
59 static void validate_replace_src_1 PARAMS ((rtx *, void *));
61 /* Nonzero means allow operands to be volatile.
62 This should be 0 if you are generating rtl, such as if you are calling
63 the functions in optabs.c and expmed.c (most of the time).
64 This should be 1 if all valid insns need to be recognized,
65 such as in regclass.c and final.c and reload.c.
67 init_recog and init_recog_no_volatile are responsible for setting this. */
71 struct recog_data recog_data;
73 /* Contains a vector of operand_alternative structures for every operand.
74 Set up by preprocess_constraints. */
75 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
77 /* On return from `constrain_operands', indicate which alternative
80 int which_alternative;
82 /* Nonzero after end of reload pass.
83 Set to 1 or 0 by toplev.c.
84 Controls the significance of (SUBREG (MEM)). */
88 /* Initialize data used by the function `recog'.
89 This must be called once in the compilation of a function
90 before any insn recognition may be done in the function. */
93 init_recog_no_volatile ()
104 /* Try recognizing the instruction INSN,
105 and return the code number that results.
106 Remember the code so that repeated calls do not
107 need to spend the time for actual rerecognition.
109 This function is the normal interface to instruction recognition.
110 The automatically-generated function `recog' is normally called
111 through this one. (The only exception is in combine.c.) */
114 recog_memoized_1 (insn)
117 if (INSN_CODE (insn) < 0)
118 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
119 return INSN_CODE (insn);
122 /* Check that X is an insn-body for an `asm' with operands
123 and that the operands mentioned in it are legitimate. */
126 check_asm_operands (x)
131 const char **constraints;
134 /* Post-reload, be more strict with things. */
135 if (reload_completed)
137 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
138 extract_insn (make_insn_raw (x));
139 constrain_operands (1);
140 return which_alternative >= 0;
143 noperands = asm_noperands (x);
149 operands = (rtx *) alloca (noperands * sizeof (rtx));
150 constraints = (const char **) alloca (noperands * sizeof (char *));
152 decode_asm_operands (x, operands, NULL, constraints, NULL);
154 for (i = 0; i < noperands; i++)
156 const char *c = constraints[i];
159 if (ISDIGIT ((unsigned char)c[0]) && c[1] == '\0')
160 c = constraints[c[0] - '0'];
162 if (! asm_operand_ok (operands[i], c))
169 /* Static data for the next two routines. */
171 typedef struct change_t
179 static change_t *changes;
180 static int changes_allocated;
182 static int num_changes = 0;
184 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
185 at which NEW will be placed. If OBJECT is zero, no validation is done,
186 the change is simply made.
188 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
189 will be called with the address and mode as parameters. If OBJECT is
190 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
193 IN_GROUP is non-zero if this is part of a group of changes that must be
194 performed as a group. In that case, the changes will be stored. The
195 function `apply_change_group' will validate and apply the changes.
197 If IN_GROUP is zero, this is a single change. Try to recognize the insn
198 or validate the memory reference with the change applied. If the result
199 is not valid for the machine, suppress the change and return zero.
200 Otherwise, perform the change and return 1. */
203 validate_change (object, loc, new, in_group)
211 if (old == new || rtx_equal_p (old, new))
214 if (in_group == 0 && num_changes != 0)
219 /* Save the information describing this change. */
220 if (num_changes >= changes_allocated)
222 if (changes_allocated == 0)
223 /* This value allows for repeated substitutions inside complex
224 indexed addresses, or changes in up to 5 insns. */
225 changes_allocated = MAX_RECOG_OPERANDS * 5;
227 changes_allocated *= 2;
230 (change_t*) xrealloc (changes,
231 sizeof (change_t) * changes_allocated);
234 changes[num_changes].object = object;
235 changes[num_changes].loc = loc;
236 changes[num_changes].old = old;
238 if (object && GET_CODE (object) != MEM)
240 /* Set INSN_CODE to force rerecognition of insn. Save old code in
242 changes[num_changes].old_code = INSN_CODE (object);
243 INSN_CODE (object) = -1;
248 /* If we are making a group of changes, return 1. Otherwise, validate the
249 change group we made. */
254 return apply_change_group ();
257 /* This subroutine of apply_change_group verifies whether the changes to INSN
258 were valid; i.e. whether INSN can still be recognized. */
261 insn_invalid_p (insn)
264 rtx pat = PATTERN (insn);
265 int num_clobbers = 0;
266 /* If we are before reload and the pattern is a SET, see if we can add
268 int icode = recog (pat, insn,
269 (GET_CODE (pat) == SET
270 && ! reload_completed && ! reload_in_progress)
271 ? &num_clobbers : 0);
272 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
275 /* If this is an asm and the operand aren't legal, then fail. Likewise if
276 this is not an asm and the insn wasn't recognized. */
277 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
278 || (!is_asm && icode < 0))
281 /* If we have to add CLOBBERs, fail if we have to add ones that reference
282 hard registers since our callers can't know if they are live or not.
283 Otherwise, add them. */
284 if (num_clobbers > 0)
288 if (added_clobbers_hard_reg_p (icode))
291 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
292 XVECEXP (newpat, 0, 0) = pat;
293 add_clobbers (newpat, icode);
294 PATTERN (insn) = pat = newpat;
297 /* After reload, verify that all constraints are satisfied. */
298 if (reload_completed)
302 if (! constrain_operands (1))
306 INSN_CODE (insn) = icode;
310 /* Apply a group of changes previously issued with `validate_change'.
311 Return 1 if all changes are valid, zero otherwise. */
314 apply_change_group ()
317 rtx last_validated = NULL_RTX;
319 /* The changes have been applied and all INSN_CODEs have been reset to force
322 The changes are valid if we aren't given an object, or if we are
323 given a MEM and it still is a valid address, or if this is in insn
324 and it is recognized. In the latter case, if reload has completed,
325 we also require that the operands meet the constraints for
328 for (i = 0; i < num_changes; i++)
330 rtx object = changes[i].object;
332 /* if there is no object to test or if it is the same as the one we
333 already tested, ignore it. */
334 if (object == 0 || object == last_validated)
337 if (GET_CODE (object) == MEM)
339 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
342 else if (insn_invalid_p (object))
344 rtx pat = PATTERN (object);
346 /* Perhaps we couldn't recognize the insn because there were
347 extra CLOBBERs at the end. If so, try to re-recognize
348 without the last CLOBBER (later iterations will cause each of
349 them to be eliminated, in turn). But don't do this if we
350 have an ASM_OPERAND. */
351 if (GET_CODE (pat) == PARALLEL
352 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
353 && asm_noperands (PATTERN (object)) < 0)
357 if (XVECLEN (pat, 0) == 2)
358 newpat = XVECEXP (pat, 0, 0);
364 = gen_rtx_PARALLEL (VOIDmode,
365 rtvec_alloc (XVECLEN (pat, 0) - 1));
366 for (j = 0; j < XVECLEN (newpat, 0); j++)
367 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
370 /* Add a new change to this group to replace the pattern
371 with this new pattern. Then consider this change
372 as having succeeded. The change we added will
373 cause the entire call to fail if things remain invalid.
375 Note that this can lose if a later change than the one
376 we are processing specified &XVECEXP (PATTERN (object), 0, X)
377 but this shouldn't occur. */
379 validate_change (object, &PATTERN (object), newpat, 1);
382 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
383 /* If this insn is a CLOBBER or USE, it is always valid, but is
389 last_validated = object;
392 if (i == num_changes)
404 /* Return the number of changes so far in the current group. */
407 num_validated_changes ()
412 /* Retract the changes numbered NUM and up. */
420 /* Back out all the changes. Do this in the opposite order in which
422 for (i = num_changes - 1; i >= num; i--)
424 *changes[i].loc = changes[i].old;
425 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
426 INSN_CODE (changes[i].object) = changes[i].old_code;
431 /* Replace every occurrence of FROM in X with TO. Mark each change with
432 validate_change passing OBJECT. */
435 validate_replace_rtx_1 (loc, from, to, object)
437 rtx from, to, object;
440 register const char *fmt;
441 register rtx x = *loc;
443 enum machine_mode op0_mode = VOIDmode;
444 int prev_changes = num_changes;
451 fmt = GET_RTX_FORMAT (code);
453 op0_mode = GET_MODE (XEXP (x, 0));
455 /* X matches FROM if it is the same rtx or they are both referring to the
456 same register in the same mode. Avoid calling rtx_equal_p unless the
457 operands look similar. */
460 || (GET_CODE (x) == REG && GET_CODE (from) == REG
461 && GET_MODE (x) == GET_MODE (from)
462 && REGNO (x) == REGNO (from))
463 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
464 && rtx_equal_p (x, from)))
466 validate_change (object, loc, to, 1);
470 /* Call ourseves recursivly to perform the replacements. */
472 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
475 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
476 else if (fmt[i] == 'E')
477 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
478 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
481 /* In case we didn't substituted, there is nothing to do. */
482 if (num_changes == prev_changes)
485 /* Allow substituted expression to have different mode. This is used by
486 regmove to change mode of pseudo register. */
487 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
488 op0_mode = GET_MODE (XEXP (x, 0));
490 /* Do changes needed to keep rtx consistent. Don't do any other
491 simplifications, as it is not our job. */
493 if ((GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
494 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
496 validate_change (object, loc,
497 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
498 : swap_condition (code),
499 GET_MODE (x), XEXP (x, 1),
508 /* If we have a PLUS whose second operand is now a CONST_INT, use
509 plus_constant to try to simplify it.
510 ??? We may want later to remove this, once simplification is
511 separated from this function. */
512 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
513 validate_change (object, loc,
514 plus_constant (XEXP (x, 0), INTVAL (to)), 1);
517 if (GET_CODE (XEXP (x, 1)) == CONST_INT
518 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
519 validate_change (object, loc,
521 (PLUS, GET_MODE (x), XEXP (x, 0),
522 simplify_gen_unary (NEG,
523 op0_mode, XEXP (x, 1),
528 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
530 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
532 /* If any of the above failed, substitute in something that
533 we know won't be recognized. */
535 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
536 validate_change (object, loc, new, 1);
540 /* All subregs possible to simplify should be simplified. */
541 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
544 /* Subregs of VOIDmode operands are incorect. */
545 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
546 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
548 validate_change (object, loc, new, 1);
552 /* If we are replacing a register with memory, try to change the memory
553 to be the mode required for memory in extract operations (this isn't
554 likely to be an insertion operation; if it was, nothing bad will
555 happen, we might just fail in some cases). */
557 if (GET_CODE (XEXP (x, 0)) == MEM
558 && GET_CODE (XEXP (x, 1)) == CONST_INT
559 && GET_CODE (XEXP (x, 2)) == CONST_INT
560 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
561 && !MEM_VOLATILE_P (XEXP (x, 0)))
563 enum machine_mode wanted_mode = VOIDmode;
564 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
565 int pos = INTVAL (XEXP (x, 2));
568 if (code == ZERO_EXTRACT)
570 wanted_mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
571 if (wanted_mode == VOIDmode)
572 wanted_mode = word_mode;
576 if (code == SIGN_EXTRACT)
578 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
579 if (wanted_mode == VOIDmode)
580 wanted_mode = word_mode;
584 /* If we have a narrower mode, we can do something. */
585 if (wanted_mode != VOIDmode
586 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
588 int offset = pos / BITS_PER_UNIT;
591 /* If the bytes and bits are counted differently, we
592 must adjust the offset. */
593 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
595 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
598 pos %= GET_MODE_BITSIZE (wanted_mode);
600 newmem = gen_rtx_MEM (wanted_mode,
601 plus_constant (XEXP (XEXP (x, 0), 0),
603 MEM_COPY_ATTRIBUTES (newmem, XEXP (x, 0));
605 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
606 validate_change (object, &XEXP (x, 0), newmem, 1);
617 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
618 with TO. After all changes have been made, validate by seeing
619 if INSN is still valid. */
622 validate_replace_rtx_subexp (from, to, insn, loc)
623 rtx from, to, insn, *loc;
625 validate_replace_rtx_1 (loc, from, to, insn);
626 return apply_change_group ();
629 /* Try replacing every occurrence of FROM in INSN with TO. After all
630 changes have been made, validate by seeing if INSN is still valid. */
633 validate_replace_rtx (from, to, insn)
636 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
637 return apply_change_group ();
640 /* Try replacing every occurrence of FROM in INSN with TO. */
643 validate_replace_rtx_group (from, to, insn)
646 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
649 /* Function called by note_uses to replace used subexpressions. */
650 struct validate_replace_src_data
652 rtx from; /* Old RTX */
653 rtx to; /* New RTX */
654 rtx insn; /* Insn in which substitution is occurring. */
658 validate_replace_src_1 (x, data)
662 struct validate_replace_src_data *d
663 = (struct validate_replace_src_data *) data;
665 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
668 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
669 SET_DESTs. After all changes have been made, validate by seeing if
670 INSN is still valid. */
673 validate_replace_src (from, to, insn)
676 struct validate_replace_src_data d;
681 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
682 return apply_change_group ();
686 /* Return 1 if the insn using CC0 set by INSN does not contain
687 any ordered tests applied to the condition codes.
688 EQ and NE tests do not count. */
691 next_insn_tests_no_inequality (insn)
694 register rtx next = next_cc0_user (insn);
696 /* If there is no next insn, we have to take the conservative choice. */
700 return ((GET_CODE (next) == JUMP_INSN
701 || GET_CODE (next) == INSN
702 || GET_CODE (next) == CALL_INSN)
703 && ! inequality_comparisons_p (PATTERN (next)));
706 #if 0 /* This is useless since the insn that sets the cc's
707 must be followed immediately by the use of them. */
708 /* Return 1 if the CC value set up by INSN is not used. */
711 next_insns_test_no_inequality (insn)
714 register rtx next = NEXT_INSN (insn);
716 for (; next != 0; next = NEXT_INSN (next))
718 if (GET_CODE (next) == CODE_LABEL
719 || GET_CODE (next) == BARRIER)
721 if (GET_CODE (next) == NOTE)
723 if (inequality_comparisons_p (PATTERN (next)))
725 if (sets_cc0_p (PATTERN (next)) == 1)
727 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
735 /* This is used by find_single_use to locate an rtx that contains exactly one
736 use of DEST, which is typically either a REG or CC0. It returns a
737 pointer to the innermost rtx expression containing DEST. Appearances of
738 DEST that are being used to totally replace it are not counted. */
741 find_single_use_1 (dest, loc)
746 enum rtx_code code = GET_CODE (x);
763 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
764 of a REG that occupies all of the REG, the insn uses DEST if
765 it is mentioned in the destination or the source. Otherwise, we
766 need just check the source. */
767 if (GET_CODE (SET_DEST (x)) != CC0
768 && GET_CODE (SET_DEST (x)) != PC
769 && GET_CODE (SET_DEST (x)) != REG
770 && ! (GET_CODE (SET_DEST (x)) == SUBREG
771 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
772 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
773 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
774 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
775 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
778 return find_single_use_1 (dest, &SET_SRC (x));
782 return find_single_use_1 (dest, &XEXP (x, 0));
788 /* If it wasn't one of the common cases above, check each expression and
789 vector of this code. Look for a unique usage of DEST. */
791 fmt = GET_RTX_FORMAT (code);
792 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
796 if (dest == XEXP (x, i)
797 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
798 && REGNO (dest) == REGNO (XEXP (x, i))))
801 this_result = find_single_use_1 (dest, &XEXP (x, i));
804 result = this_result;
805 else if (this_result)
806 /* Duplicate usage. */
809 else if (fmt[i] == 'E')
813 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
815 if (XVECEXP (x, i, j) == dest
816 || (GET_CODE (dest) == REG
817 && GET_CODE (XVECEXP (x, i, j)) == REG
818 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
821 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
824 result = this_result;
825 else if (this_result)
834 /* See if DEST, produced in INSN, is used only a single time in the
835 sequel. If so, return a pointer to the innermost rtx expression in which
838 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
840 This routine will return usually zero either before flow is called (because
841 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
842 note can't be trusted).
844 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
845 care about REG_DEAD notes or LOG_LINKS.
847 Otherwise, we find the single use by finding an insn that has a
848 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
849 only referenced once in that insn, we know that it must be the first
850 and last insn referencing DEST. */
853 find_single_use (dest, insn, ploc)
865 next = NEXT_INSN (insn);
867 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
870 result = find_single_use_1 (dest, &PATTERN (next));
877 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
880 for (next = next_nonnote_insn (insn);
881 next != 0 && GET_CODE (next) != CODE_LABEL;
882 next = next_nonnote_insn (next))
883 if (INSN_P (next) && dead_or_set_p (next, dest))
885 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
886 if (XEXP (link, 0) == insn)
891 result = find_single_use_1 (dest, &PATTERN (next));
901 /* Return 1 if OP is a valid general operand for machine mode MODE.
902 This is either a register reference, a memory reference,
903 or a constant. In the case of a memory reference, the address
904 is checked for general validity for the target machine.
906 Register and memory references must have mode MODE in order to be valid,
907 but some constants have no machine mode and are valid for any mode.
909 If MODE is VOIDmode, OP is checked for validity for whatever mode
912 The main use of this function is as a predicate in match_operand
913 expressions in the machine description.
915 For an explanation of this function's behavior for registers of
916 class NO_REGS, see the comment for `register_operand'. */
919 general_operand (op, mode)
921 enum machine_mode mode;
923 register enum rtx_code code = GET_CODE (op);
925 if (mode == VOIDmode)
926 mode = GET_MODE (op);
928 /* Don't accept CONST_INT or anything similar
929 if the caller wants something floating. */
930 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
931 && GET_MODE_CLASS (mode) != MODE_INT
932 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
935 if (GET_CODE (op) == CONST_INT
936 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
940 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
942 #ifdef LEGITIMATE_PIC_OPERAND_P
943 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
945 && LEGITIMATE_CONSTANT_P (op));
947 /* Except for certain constants with VOIDmode, already checked for,
948 OP's mode must match MODE if MODE specifies a mode. */
950 if (GET_MODE (op) != mode)
955 #ifdef INSN_SCHEDULING
956 /* On machines that have insn scheduling, we want all memory
957 reference to be explicit, so outlaw paradoxical SUBREGs. */
958 if (GET_CODE (SUBREG_REG (op)) == MEM
959 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
962 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
963 may result in incorrect reference. We should simplify all valid
964 subregs of MEM anyway. */
965 if (SUBREG_BYTE (op) && GET_CODE (SUBREG_REG (op)) == MEM)
968 op = SUBREG_REG (op);
969 code = GET_CODE (op);
973 /* A register whose class is NO_REGS is not a general operand. */
974 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
975 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
979 register rtx y = XEXP (op, 0);
981 if (! volatile_ok && MEM_VOLATILE_P (op))
984 if (GET_CODE (y) == ADDRESSOF)
987 /* Use the mem's mode, since it will be reloaded thus. */
988 mode = GET_MODE (op);
989 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
992 /* Pretend this is an operand for now; we'll run force_operand
993 on its replacement in fixup_var_refs_1. */
994 if (code == ADDRESSOF)
1003 /* Return 1 if OP is a valid memory address for a memory reference
1006 The main use of this function is as a predicate in match_operand
1007 expressions in the machine description. */
1010 address_operand (op, mode)
1012 enum machine_mode mode;
1014 return memory_address_p (mode, op);
1017 /* Return 1 if OP is a register reference of mode MODE.
1018 If MODE is VOIDmode, accept a register in any mode.
1020 The main use of this function is as a predicate in match_operand
1021 expressions in the machine description.
1023 As a special exception, registers whose class is NO_REGS are
1024 not accepted by `register_operand'. The reason for this change
1025 is to allow the representation of special architecture artifacts
1026 (such as a condition code register) without extending the rtl
1027 definitions. Since registers of class NO_REGS cannot be used
1028 as registers in any case where register classes are examined,
1029 it is most consistent to keep this function from accepting them. */
1032 register_operand (op, mode)
1034 enum machine_mode mode;
1036 if (GET_MODE (op) != mode && mode != VOIDmode)
1039 if (GET_CODE (op) == SUBREG)
1041 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1042 because it is guaranteed to be reloaded into one.
1043 Just make sure the MEM is valid in itself.
1044 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1045 but currently it does result from (SUBREG (REG)...) where the
1046 reg went on the stack.) */
1047 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1048 return general_operand (op, mode);
1050 #ifdef CLASS_CANNOT_CHANGE_MODE
1051 if (GET_CODE (SUBREG_REG (op)) == REG
1052 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1053 && (TEST_HARD_REG_BIT
1054 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
1055 REGNO (SUBREG_REG (op))))
1056 && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (SUBREG_REG (op)))
1057 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1058 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1062 op = SUBREG_REG (op);
1065 /* If we have an ADDRESSOF, consider it valid since it will be
1066 converted into something that will not be a MEM. */
1067 if (GET_CODE (op) == ADDRESSOF)
1070 /* We don't consider registers whose class is NO_REGS
1071 to be a register operand. */
1072 return (GET_CODE (op) == REG
1073 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1074 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1077 /* Return 1 for a register in Pmode; ignore the tested mode. */
1080 pmode_register_operand (op, mode)
1082 enum machine_mode mode ATTRIBUTE_UNUSED;
1084 return register_operand (op, Pmode);
1087 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1088 or a hard register. */
1091 scratch_operand (op, mode)
1093 enum machine_mode mode;
1095 if (GET_MODE (op) != mode && mode != VOIDmode)
1098 return (GET_CODE (op) == SCRATCH
1099 || (GET_CODE (op) == REG
1100 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1103 /* Return 1 if OP is a valid immediate operand for mode MODE.
1105 The main use of this function is as a predicate in match_operand
1106 expressions in the machine description. */
1109 immediate_operand (op, mode)
1111 enum machine_mode mode;
1113 /* Don't accept CONST_INT or anything similar
1114 if the caller wants something floating. */
1115 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1116 && GET_MODE_CLASS (mode) != MODE_INT
1117 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1120 if (GET_CODE (op) == CONST_INT
1121 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1124 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1125 result in 0/1. It seems a safe assumption that this is
1126 in range for everyone. */
1127 if (GET_CODE (op) == CONSTANT_P_RTX)
1130 return (CONSTANT_P (op)
1131 && (GET_MODE (op) == mode || mode == VOIDmode
1132 || GET_MODE (op) == VOIDmode)
1133 #ifdef LEGITIMATE_PIC_OPERAND_P
1134 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1136 && LEGITIMATE_CONSTANT_P (op));
1139 /* Returns 1 if OP is an operand that is a CONST_INT. */
1142 const_int_operand (op, mode)
1144 enum machine_mode mode ATTRIBUTE_UNUSED;
1146 return GET_CODE (op) == CONST_INT;
1149 /* Returns 1 if OP is an operand that is a constant integer or constant
1150 floating-point number. */
1153 const_double_operand (op, mode)
1155 enum machine_mode mode;
1157 /* Don't accept CONST_INT or anything similar
1158 if the caller wants something floating. */
1159 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1160 && GET_MODE_CLASS (mode) != MODE_INT
1161 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1164 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1165 && (mode == VOIDmode || GET_MODE (op) == mode
1166 || GET_MODE (op) == VOIDmode));
1169 /* Return 1 if OP is a general operand that is not an immediate operand. */
1172 nonimmediate_operand (op, mode)
1174 enum machine_mode mode;
1176 return (general_operand (op, mode) && ! CONSTANT_P (op));
1179 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1182 nonmemory_operand (op, mode)
1184 enum machine_mode mode;
1186 if (CONSTANT_P (op))
1188 /* Don't accept CONST_INT or anything similar
1189 if the caller wants something floating. */
1190 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1191 && GET_MODE_CLASS (mode) != MODE_INT
1192 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1195 if (GET_CODE (op) == CONST_INT
1196 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1199 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1200 || mode == VOIDmode)
1201 #ifdef LEGITIMATE_PIC_OPERAND_P
1202 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1204 && LEGITIMATE_CONSTANT_P (op));
1207 if (GET_MODE (op) != mode && mode != VOIDmode)
1210 if (GET_CODE (op) == SUBREG)
1212 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1213 because it is guaranteed to be reloaded into one.
1214 Just make sure the MEM is valid in itself.
1215 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1216 but currently it does result from (SUBREG (REG)...) where the
1217 reg went on the stack.) */
1218 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1219 return general_operand (op, mode);
1220 op = SUBREG_REG (op);
1223 /* We don't consider registers whose class is NO_REGS
1224 to be a register operand. */
1225 return (GET_CODE (op) == REG
1226 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1227 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1230 /* Return 1 if OP is a valid operand that stands for pushing a
1231 value of mode MODE onto the stack.
1233 The main use of this function is as a predicate in match_operand
1234 expressions in the machine description. */
1237 push_operand (op, mode)
1239 enum machine_mode mode;
1241 unsigned int rounded_size = GET_MODE_SIZE (mode);
1243 #ifdef PUSH_ROUNDING
1244 rounded_size = PUSH_ROUNDING (rounded_size);
1247 if (GET_CODE (op) != MEM)
1250 if (mode != VOIDmode && GET_MODE (op) != mode)
1255 if (rounded_size == GET_MODE_SIZE (mode))
1257 if (GET_CODE (op) != STACK_PUSH_CODE)
1262 if (GET_CODE (op) != PRE_MODIFY
1263 || GET_CODE (XEXP (op, 1)) != PLUS
1264 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1265 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1266 #ifdef STACK_GROWS_DOWNWARD
1267 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1269 || INTVAL (XEXP (XEXP (op, 1), 1)) != rounded_size
1275 return XEXP (op, 0) == stack_pointer_rtx;
1278 /* Return 1 if OP is a valid operand that stands for popping a
1279 value of mode MODE off the stack.
1281 The main use of this function is as a predicate in match_operand
1282 expressions in the machine description. */
1285 pop_operand (op, mode)
1287 enum machine_mode mode;
1289 if (GET_CODE (op) != MEM)
1292 if (mode != VOIDmode && GET_MODE (op) != mode)
1297 if (GET_CODE (op) != STACK_POP_CODE)
1300 return XEXP (op, 0) == stack_pointer_rtx;
1303 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1306 memory_address_p (mode, addr)
1307 enum machine_mode mode ATTRIBUTE_UNUSED;
1310 if (GET_CODE (addr) == ADDRESSOF)
1313 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1320 /* Return 1 if OP is a valid memory reference with mode MODE,
1321 including a valid address.
1323 The main use of this function is as a predicate in match_operand
1324 expressions in the machine description. */
1327 memory_operand (op, mode)
1329 enum machine_mode mode;
1333 if (! reload_completed)
1334 /* Note that no SUBREG is a memory operand before end of reload pass,
1335 because (SUBREG (MEM...)) forces reloading into a register. */
1336 return GET_CODE (op) == MEM && general_operand (op, mode);
1338 if (mode != VOIDmode && GET_MODE (op) != mode)
1342 if (GET_CODE (inner) == SUBREG)
1343 inner = SUBREG_REG (inner);
1345 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1348 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1349 that is, a memory reference whose address is a general_operand. */
1352 indirect_operand (op, mode)
1354 enum machine_mode mode;
1356 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1357 if (! reload_completed
1358 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1360 register int offset = SUBREG_BYTE (op);
1361 rtx inner = SUBREG_REG (op);
1363 if (mode != VOIDmode && GET_MODE (op) != mode)
1366 /* The only way that we can have a general_operand as the resulting
1367 address is if OFFSET is zero and the address already is an operand
1368 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1371 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1372 || (GET_CODE (XEXP (inner, 0)) == PLUS
1373 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1374 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1375 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1378 return (GET_CODE (op) == MEM
1379 && memory_operand (op, mode)
1380 && general_operand (XEXP (op, 0), Pmode));
1383 /* Return 1 if this is a comparison operator. This allows the use of
1384 MATCH_OPERATOR to recognize all the branch insns. */
1387 comparison_operator (op, mode)
1389 enum machine_mode mode;
1391 return ((mode == VOIDmode || GET_MODE (op) == mode)
1392 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1395 /* If BODY is an insn body that uses ASM_OPERANDS,
1396 return the number of operands (both input and output) in the insn.
1397 Otherwise return -1. */
1400 asm_noperands (body)
1403 switch (GET_CODE (body))
1406 /* No output operands: return number of input operands. */
1407 return ASM_OPERANDS_INPUT_LENGTH (body);
1409 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1410 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1411 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1415 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1416 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1418 /* Multiple output operands, or 1 output plus some clobbers:
1419 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1423 /* Count backwards through CLOBBERs to determine number of SETs. */
1424 for (i = XVECLEN (body, 0); i > 0; i--)
1426 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1428 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1432 /* N_SETS is now number of output operands. */
1435 /* Verify that all the SETs we have
1436 came from a single original asm_operands insn
1437 (so that invalid combinations are blocked). */
1438 for (i = 0; i < n_sets; i++)
1440 rtx elt = XVECEXP (body, 0, i);
1441 if (GET_CODE (elt) != SET)
1443 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1445 /* If these ASM_OPERANDS rtx's came from different original insns
1446 then they aren't allowed together. */
1447 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1448 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1451 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1454 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1456 /* 0 outputs, but some clobbers:
1457 body is [(asm_operands ...) (clobber (reg ...))...]. */
1460 /* Make sure all the other parallel things really are clobbers. */
1461 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1462 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1465 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1474 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1475 copy its operands (both input and output) into the vector OPERANDS,
1476 the locations of the operands within the insn into the vector OPERAND_LOCS,
1477 and the constraints for the operands into CONSTRAINTS.
1478 Write the modes of the operands into MODES.
1479 Return the assembler-template.
1481 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1482 we don't store that info. */
1485 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1489 const char **constraints;
1490 enum machine_mode *modes;
1494 const char *template = 0;
1496 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1498 rtx asmop = SET_SRC (body);
1499 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1501 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1503 for (i = 1; i < noperands; i++)
1506 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1508 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1510 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1512 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1515 /* The output is in the SET.
1516 Its constraint is in the ASM_OPERANDS itself. */
1518 operands[0] = SET_DEST (body);
1520 operand_locs[0] = &SET_DEST (body);
1522 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1524 modes[0] = GET_MODE (SET_DEST (body));
1525 template = ASM_OPERANDS_TEMPLATE (asmop);
1527 else if (GET_CODE (body) == ASM_OPERANDS)
1530 /* No output operands: BODY is (asm_operands ....). */
1532 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1534 /* The input operands are found in the 1st element vector. */
1535 /* Constraints for inputs are in the 2nd element vector. */
1536 for (i = 0; i < noperands; i++)
1539 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1541 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1543 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1545 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1547 template = ASM_OPERANDS_TEMPLATE (asmop);
1549 else if (GET_CODE (body) == PARALLEL
1550 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1552 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1553 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1554 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1555 int nout = 0; /* Does not include CLOBBERs. */
1557 /* At least one output, plus some CLOBBERs. */
1559 /* The outputs are in the SETs.
1560 Their constraints are in the ASM_OPERANDS itself. */
1561 for (i = 0; i < nparallel; i++)
1563 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1564 break; /* Past last SET */
1567 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1569 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1571 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1573 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1577 for (i = 0; i < nin; i++)
1580 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1582 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1584 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1586 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1589 template = ASM_OPERANDS_TEMPLATE (asmop);
1591 else if (GET_CODE (body) == PARALLEL
1592 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1594 /* No outputs, but some CLOBBERs. */
1596 rtx asmop = XVECEXP (body, 0, 0);
1597 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1599 for (i = 0; i < nin; i++)
1602 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1604 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1606 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1608 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1611 template = ASM_OPERANDS_TEMPLATE (asmop);
1617 /* Check if an asm_operand matches it's constraints.
1618 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1621 asm_operand_ok (op, constraint)
1623 const char *constraint;
1627 /* Use constrain_operands after reload. */
1628 if (reload_completed)
1633 char c = *constraint++;
1647 case '0': case '1': case '2': case '3': case '4':
1648 case '5': case '6': case '7': case '8': case '9':
1649 /* For best results, our caller should have given us the
1650 proper matching constraint, but we can't actually fail
1651 the check if they didn't. Indicate that results are
1657 if (address_operand (op, VOIDmode))
1662 case 'V': /* non-offsettable */
1663 if (memory_operand (op, VOIDmode))
1667 case 'o': /* offsettable */
1668 if (offsettable_nonstrict_memref_p (op))
1673 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1674 excepting those that expand_call created. Further, on some
1675 machines which do not have generalized auto inc/dec, an inc/dec
1676 is not a memory_operand.
1678 Match any memory and hope things are resolved after reload. */
1680 if (GET_CODE (op) == MEM
1682 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1683 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1688 if (GET_CODE (op) == MEM
1690 || GET_CODE (XEXP (op, 0)) == PRE_INC
1691 || GET_CODE (XEXP (op, 0)) == POST_INC))
1696 #ifndef REAL_ARITHMETIC
1697 /* Match any floating double constant, but only if
1698 we can examine the bits of it reliably. */
1699 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1700 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1701 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1707 if (GET_CODE (op) == CONST_DOUBLE)
1712 if (GET_CODE (op) == CONST_DOUBLE
1713 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1717 if (GET_CODE (op) == CONST_DOUBLE
1718 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1723 if (GET_CODE (op) == CONST_INT
1724 || (GET_CODE (op) == CONST_DOUBLE
1725 && GET_MODE (op) == VOIDmode))
1731 #ifdef LEGITIMATE_PIC_OPERAND_P
1732 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1739 if (GET_CODE (op) == CONST_INT
1740 || (GET_CODE (op) == CONST_DOUBLE
1741 && GET_MODE (op) == VOIDmode))
1746 if (GET_CODE (op) == CONST_INT
1747 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1751 if (GET_CODE (op) == CONST_INT
1752 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1756 if (GET_CODE (op) == CONST_INT
1757 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1761 if (GET_CODE (op) == CONST_INT
1762 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1766 if (GET_CODE (op) == CONST_INT
1767 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1771 if (GET_CODE (op) == CONST_INT
1772 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1776 if (GET_CODE (op) == CONST_INT
1777 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1781 if (GET_CODE (op) == CONST_INT
1782 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1790 if (general_operand (op, VOIDmode))
1795 /* For all other letters, we first check for a register class,
1796 otherwise it is an EXTRA_CONSTRAINT. */
1797 if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
1800 if (GET_MODE (op) == BLKmode)
1802 if (register_operand (op, VOIDmode))
1805 #ifdef EXTRA_CONSTRAINT
1806 if (EXTRA_CONSTRAINT (op, c))
1816 /* Given an rtx *P, if it is a sum containing an integer constant term,
1817 return the location (type rtx *) of the pointer to that constant term.
1818 Otherwise, return a null pointer. */
1821 find_constant_term_loc (p)
1825 register enum rtx_code code = GET_CODE (*p);
1827 /* If *P IS such a constant term, P is its location. */
1829 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1833 /* Otherwise, if not a sum, it has no constant term. */
1835 if (GET_CODE (*p) != PLUS)
1838 /* If one of the summands is constant, return its location. */
1840 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1841 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1844 /* Otherwise, check each summand for containing a constant term. */
1846 if (XEXP (*p, 0) != 0)
1848 tem = find_constant_term_loc (&XEXP (*p, 0));
1853 if (XEXP (*p, 1) != 0)
1855 tem = find_constant_term_loc (&XEXP (*p, 1));
1863 /* Return 1 if OP is a memory reference
1864 whose address contains no side effects
1865 and remains valid after the addition
1866 of a positive integer less than the
1867 size of the object being referenced.
1869 We assume that the original address is valid and do not check it.
1871 This uses strict_memory_address_p as a subroutine, so
1872 don't use it before reload. */
1875 offsettable_memref_p (op)
1878 return ((GET_CODE (op) == MEM)
1879 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1882 /* Similar, but don't require a strictly valid mem ref:
1883 consider pseudo-regs valid as index or base regs. */
1886 offsettable_nonstrict_memref_p (op)
1889 return ((GET_CODE (op) == MEM)
1890 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1893 /* Return 1 if Y is a memory address which contains no side effects
1894 and would remain valid after the addition of a positive integer
1895 less than the size of that mode.
1897 We assume that the original address is valid and do not check it.
1898 We do check that it is valid for narrower modes.
1900 If STRICTP is nonzero, we require a strictly valid address,
1901 for the sake of use in reload.c. */
1904 offsettable_address_p (strictp, mode, y)
1906 enum machine_mode mode;
1909 register enum rtx_code ycode = GET_CODE (y);
1913 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1914 (strictp ? strict_memory_address_p : memory_address_p);
1915 unsigned int mode_sz = GET_MODE_SIZE (mode);
1917 if (CONSTANT_ADDRESS_P (y))
1920 /* Adjusting an offsettable address involves changing to a narrower mode.
1921 Make sure that's OK. */
1923 if (mode_dependent_address_p (y))
1926 /* ??? How much offset does an offsettable BLKmode reference need?
1927 Clearly that depends on the situation in which it's being used.
1928 However, the current situation in which we test 0xffffffff is
1929 less than ideal. Caveat user. */
1931 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1933 /* If the expression contains a constant term,
1934 see if it remains valid when max possible offset is added. */
1936 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1941 *y2 = plus_constant (*y2, mode_sz - 1);
1942 /* Use QImode because an odd displacement may be automatically invalid
1943 for any wider mode. But it should be valid for a single byte. */
1944 good = (*addressp) (QImode, y);
1946 /* In any case, restore old contents of memory. */
1951 if (GET_RTX_CLASS (ycode) == 'a')
1954 /* The offset added here is chosen as the maximum offset that
1955 any instruction could need to add when operating on something
1956 of the specified mode. We assume that if Y and Y+c are
1957 valid addresses then so is Y+d for all 0<d<c. */
1959 z = plus_constant_for_output (y, mode_sz - 1);
1961 /* Use QImode because an odd displacement may be automatically invalid
1962 for any wider mode. But it should be valid for a single byte. */
1963 return (*addressp) (QImode, z);
1966 /* Return 1 if ADDR is an address-expression whose effect depends
1967 on the mode of the memory reference it is used in.
1969 Autoincrement addressing is a typical example of mode-dependence
1970 because the amount of the increment depends on the mode. */
1973 mode_dependent_address_p (addr)
1974 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
1976 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1978 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1979 win: ATTRIBUTE_UNUSED_LABEL
1983 /* Return 1 if OP is a general operand
1984 other than a memory ref with a mode dependent address. */
1987 mode_independent_operand (op, mode)
1988 enum machine_mode mode;
1993 if (! general_operand (op, mode))
1996 if (GET_CODE (op) != MEM)
1999 addr = XEXP (op, 0);
2000 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2002 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2003 lose: ATTRIBUTE_UNUSED_LABEL
2007 /* Given an operand OP that is a valid memory reference which
2008 satisfies offsettable_memref_p, return a new memory reference whose
2009 address has been adjusted by OFFSET. OFFSET should be positive and
2010 less than the size of the object referenced. */
2013 adj_offsettable_operand (op, offset)
2017 register enum rtx_code code = GET_CODE (op);
2021 register rtx y = XEXP (op, 0);
2024 if (CONSTANT_ADDRESS_P (y))
2026 new = gen_rtx_MEM (GET_MODE (op),
2027 plus_constant_for_output (y, offset));
2028 MEM_COPY_ATTRIBUTES (new, op);
2032 if (GET_CODE (y) == PLUS)
2035 register rtx *const_loc;
2039 const_loc = find_constant_term_loc (&z);
2042 *const_loc = plus_constant_for_output (*const_loc, offset);
2047 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
2048 MEM_COPY_ATTRIBUTES (new, op);
2054 /* Like extract_insn, but save insn extracted and don't extract again, when
2055 called again for the same insn expecting that recog_data still contain the
2056 valid information. This is used primary by gen_attr infrastructure that
2057 often does extract insn again and again. */
2059 extract_insn_cached (insn)
2062 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2064 extract_insn (insn);
2065 recog_data.insn = insn;
2067 /* Do cached extract_insn, constrain_operand and complain about failures.
2068 Used by insn_attrtab. */
2070 extract_constrain_insn_cached (insn)
2073 extract_insn_cached (insn);
2074 if (which_alternative == -1
2075 && !constrain_operands (reload_completed))
2076 fatal_insn_not_found (insn);
2078 /* Do cached constrain_operand and complain about failures. */
2080 constrain_operands_cached (strict)
2083 if (which_alternative == -1)
2084 return constrain_operands (strict);
2089 /* Analyze INSN and fill in recog_data. */
2098 rtx body = PATTERN (insn);
2100 recog_data.insn = NULL;
2101 recog_data.n_operands = 0;
2102 recog_data.n_alternatives = 0;
2103 recog_data.n_dups = 0;
2104 which_alternative = -1;
2106 switch (GET_CODE (body))
2116 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2121 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2122 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2123 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2129 recog_data.n_operands = noperands = asm_noperands (body);
2132 /* This insn is an `asm' with operands. */
2134 /* expand_asm_operands makes sure there aren't too many operands. */
2135 if (noperands > MAX_RECOG_OPERANDS)
2138 /* Now get the operand values and constraints out of the insn. */
2139 decode_asm_operands (body, recog_data.operand,
2140 recog_data.operand_loc,
2141 recog_data.constraints,
2142 recog_data.operand_mode);
2145 const char *p = recog_data.constraints[0];
2146 recog_data.n_alternatives = 1;
2148 recog_data.n_alternatives += (*p++ == ',');
2152 fatal_insn_not_found (insn);
2156 /* Ordinary insn: recognize it, get the operands via insn_extract
2157 and get the constraints. */
2159 icode = recog_memoized (insn);
2161 fatal_insn_not_found (insn);
2163 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2164 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2165 recog_data.n_dups = insn_data[icode].n_dups;
2167 insn_extract (insn);
2169 for (i = 0; i < noperands; i++)
2171 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2172 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2173 /* VOIDmode match_operands gets mode from their real operand. */
2174 if (recog_data.operand_mode[i] == VOIDmode)
2175 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2178 for (i = 0; i < noperands; i++)
2179 recog_data.operand_type[i]
2180 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2181 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2184 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2188 /* After calling extract_insn, you can use this function to extract some
2189 information from the constraint strings into a more usable form.
2190 The collected data is stored in recog_op_alt. */
2192 preprocess_constraints ()
2196 memset (recog_op_alt, 0, sizeof recog_op_alt);
2197 for (i = 0; i < recog_data.n_operands; i++)
2200 struct operand_alternative *op_alt;
2201 const char *p = recog_data.constraints[i];
2203 op_alt = recog_op_alt[i];
2205 for (j = 0; j < recog_data.n_alternatives; j++)
2207 op_alt[j].class = NO_REGS;
2208 op_alt[j].constraint = p;
2209 op_alt[j].matches = -1;
2210 op_alt[j].matched = -1;
2212 if (*p == '\0' || *p == ',')
2214 op_alt[j].anything_ok = 1;
2224 while (c != ',' && c != '\0');
2225 if (c == ',' || c == '\0')
2230 case '=': case '+': case '*': case '%':
2231 case 'E': case 'F': case 'G': case 'H':
2232 case 's': case 'i': case 'n':
2233 case 'I': case 'J': case 'K': case 'L':
2234 case 'M': case 'N': case 'O': case 'P':
2235 /* These don't say anything we care about. */
2239 op_alt[j].reject += 6;
2242 op_alt[j].reject += 600;
2245 op_alt[j].earlyclobber = 1;
2248 case '0': case '1': case '2': case '3': case '4':
2249 case '5': case '6': case '7': case '8': case '9':
2250 op_alt[j].matches = c - '0';
2251 recog_op_alt[op_alt[j].matches][j].matched = i;
2255 op_alt[j].memory_ok = 1;
2258 op_alt[j].decmem_ok = 1;
2261 op_alt[j].incmem_ok = 1;
2264 op_alt[j].nonoffmem_ok = 1;
2267 op_alt[j].offmem_ok = 1;
2270 op_alt[j].anything_ok = 1;
2274 op_alt[j].is_address = 1;
2275 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
2279 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2283 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
2291 /* Check the operands of an insn against the insn's operand constraints
2292 and return 1 if they are valid.
2293 The information about the insn's operands, constraints, operand modes
2294 etc. is obtained from the global variables set up by extract_insn.
2296 WHICH_ALTERNATIVE is set to a number which indicates which
2297 alternative of constraints was matched: 0 for the first alternative,
2298 1 for the next, etc.
2300 In addition, when two operands are match
2301 and it happens that the output operand is (reg) while the
2302 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2303 make the output operand look like the input.
2304 This is because the output operand is the one the template will print.
2306 This is used in final, just before printing the assembler code and by
2307 the routines that determine an insn's attribute.
2309 If STRICT is a positive non-zero value, it means that we have been
2310 called after reload has been completed. In that case, we must
2311 do all checks strictly. If it is zero, it means that we have been called
2312 before reload has completed. In that case, we first try to see if we can
2313 find an alternative that matches strictly. If not, we try again, this
2314 time assuming that reload will fix up the insn. This provides a "best
2315 guess" for the alternative and is used to compute attributes of insns prior
2316 to reload. A negative value of STRICT is used for this internal call. */
2324 constrain_operands (strict)
2327 const char *constraints[MAX_RECOG_OPERANDS];
2328 int matching_operands[MAX_RECOG_OPERANDS];
2329 int earlyclobber[MAX_RECOG_OPERANDS];
2332 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2333 int funny_match_index;
2335 which_alternative = 0;
2336 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2339 for (c = 0; c < recog_data.n_operands; c++)
2341 constraints[c] = recog_data.constraints[c];
2342 matching_operands[c] = -1;
2349 funny_match_index = 0;
2351 for (opno = 0; opno < recog_data.n_operands; opno++)
2353 register rtx op = recog_data.operand[opno];
2354 enum machine_mode mode = GET_MODE (op);
2355 register const char *p = constraints[opno];
2360 earlyclobber[opno] = 0;
2362 /* A unary operator may be accepted by the predicate, but it
2363 is irrelevant for matching constraints. */
2364 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2367 if (GET_CODE (op) == SUBREG)
2369 if (GET_CODE (SUBREG_REG (op)) == REG
2370 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2371 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2372 GET_MODE (SUBREG_REG (op)),
2375 op = SUBREG_REG (op);
2378 /* An empty constraint or empty alternative
2379 allows anything which matched the pattern. */
2380 if (*p == 0 || *p == ',')
2383 while (*p && (c = *p++) != ',')
2386 case '?': case '!': case '*': case '%':
2391 /* Ignore rest of this alternative as far as
2392 constraint checking is concerned. */
2393 while (*p && *p != ',')
2398 earlyclobber[opno] = 1;
2401 case '0': case '1': case '2': case '3': case '4':
2402 case '5': case '6': case '7': case '8': case '9':
2404 /* This operand must be the same as a previous one.
2405 This kind of constraint is used for instructions such
2406 as add when they take only two operands.
2408 Note that the lower-numbered operand is passed first.
2410 If we are not testing strictly, assume that this constraint
2411 will be satisfied. */
2416 rtx op1 = recog_data.operand[c - '0'];
2417 rtx op2 = recog_data.operand[opno];
2419 /* A unary operator may be accepted by the predicate,
2420 but it is irrelevant for matching constraints. */
2421 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2422 op1 = XEXP (op1, 0);
2423 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2424 op2 = XEXP (op2, 0);
2426 val = operands_match_p (op1, op2);
2429 matching_operands[opno] = c - '0';
2430 matching_operands[c - '0'] = opno;
2434 /* If output is *x and input is *--x,
2435 arrange later to change the output to *--x as well,
2436 since the output op is the one that will be printed. */
2437 if (val == 2 && strict > 0)
2439 funny_match[funny_match_index].this = opno;
2440 funny_match[funny_match_index++].other = c - '0';
2445 /* p is used for address_operands. When we are called by
2446 gen_reload, no one will have checked that the address is
2447 strictly valid, i.e., that all pseudos requiring hard regs
2448 have gotten them. */
2450 || (strict_memory_address_p (recog_data.operand_mode[opno],
2455 /* No need to check general_operand again;
2456 it was done in insn-recog.c. */
2458 /* Anything goes unless it is a REG and really has a hard reg
2459 but the hard reg is not in the class GENERAL_REGS. */
2461 || GENERAL_REGS == ALL_REGS
2462 || GET_CODE (op) != REG
2463 || (reload_in_progress
2464 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2465 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2470 /* This is used for a MATCH_SCRATCH in the cases when
2471 we don't actually need anything. So anything goes
2477 if (GET_CODE (op) == MEM
2478 /* Before reload, accept what reload can turn into mem. */
2479 || (strict < 0 && CONSTANT_P (op))
2480 /* During reload, accept a pseudo */
2481 || (reload_in_progress && GET_CODE (op) == REG
2482 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2487 if (GET_CODE (op) == MEM
2488 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2489 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2494 if (GET_CODE (op) == MEM
2495 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2496 || GET_CODE (XEXP (op, 0)) == POST_INC))
2501 #ifndef REAL_ARITHMETIC
2502 /* Match any CONST_DOUBLE, but only if
2503 we can examine the bits of it reliably. */
2504 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2505 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2506 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2509 if (GET_CODE (op) == CONST_DOUBLE)
2514 if (GET_CODE (op) == CONST_DOUBLE)
2520 if (GET_CODE (op) == CONST_DOUBLE
2521 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2526 if (GET_CODE (op) == CONST_INT
2527 || (GET_CODE (op) == CONST_DOUBLE
2528 && GET_MODE (op) == VOIDmode))
2531 if (CONSTANT_P (op))
2536 if (GET_CODE (op) == CONST_INT
2537 || (GET_CODE (op) == CONST_DOUBLE
2538 && GET_MODE (op) == VOIDmode))
2550 if (GET_CODE (op) == CONST_INT
2551 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2556 if (GET_CODE (op) == MEM
2557 && ((strict > 0 && ! offsettable_memref_p (op))
2559 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2560 || (reload_in_progress
2561 && !(GET_CODE (op) == REG
2562 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2567 if ((strict > 0 && offsettable_memref_p (op))
2568 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2569 /* Before reload, accept what reload can handle. */
2571 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2572 /* During reload, accept a pseudo */
2573 || (reload_in_progress && GET_CODE (op) == REG
2574 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2580 enum reg_class class;
2582 class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
2583 if (class != NO_REGS)
2587 && GET_CODE (op) == REG
2588 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2589 || (strict == 0 && GET_CODE (op) == SCRATCH)
2590 || (GET_CODE (op) == REG
2591 && reg_fits_class_p (op, class, offset, mode)))
2594 #ifdef EXTRA_CONSTRAINT
2595 else if (EXTRA_CONSTRAINT (op, c))
2602 constraints[opno] = p;
2603 /* If this operand did not win somehow,
2604 this alternative loses. */
2608 /* This alternative won; the operands are ok.
2609 Change whichever operands this alternative says to change. */
2614 /* See if any earlyclobber operand conflicts with some other
2618 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2619 /* Ignore earlyclobber operands now in memory,
2620 because we would often report failure when we have
2621 two memory operands, one of which was formerly a REG. */
2622 if (earlyclobber[eopno]
2623 && GET_CODE (recog_data.operand[eopno]) == REG)
2624 for (opno = 0; opno < recog_data.n_operands; opno++)
2625 if ((GET_CODE (recog_data.operand[opno]) == MEM
2626 || recog_data.operand_type[opno] != OP_OUT)
2628 /* Ignore things like match_operator operands. */
2629 && *recog_data.constraints[opno] != 0
2630 && ! (matching_operands[opno] == eopno
2631 && operands_match_p (recog_data.operand[opno],
2632 recog_data.operand[eopno]))
2633 && ! safe_from_earlyclobber (recog_data.operand[opno],
2634 recog_data.operand[eopno]))
2639 while (--funny_match_index >= 0)
2641 recog_data.operand[funny_match[funny_match_index].other]
2642 = recog_data.operand[funny_match[funny_match_index].this];
2649 which_alternative++;
2651 while (which_alternative < recog_data.n_alternatives);
2653 which_alternative = -1;
2654 /* If we are about to reject this, but we are not to test strictly,
2655 try a very loose test. Only return failure if it fails also. */
2657 return constrain_operands (-1);
2662 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2663 is a hard reg in class CLASS when its regno is offset by OFFSET
2664 and changed to mode MODE.
2665 If REG occupies multiple hard regs, all of them must be in CLASS. */
2668 reg_fits_class_p (operand, class, offset, mode)
2670 register enum reg_class class;
2672 enum machine_mode mode;
2674 register int regno = REGNO (operand);
2675 if (regno < FIRST_PSEUDO_REGISTER
2676 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2681 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2683 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2692 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2695 split_all_insns (upd_life)
2702 blocks = sbitmap_alloc (n_basic_blocks);
2703 sbitmap_zero (blocks);
2706 for (i = n_basic_blocks - 1; i >= 0; --i)
2708 basic_block bb = BASIC_BLOCK (i);
2711 for (insn = bb->head; insn ; insn = next)
2715 /* Can't use `next_real_insn' because that might go across
2716 CODE_LABELS and short-out basic blocks. */
2717 next = NEXT_INSN (insn);
2718 if (! INSN_P (insn))
2721 /* Don't split no-op move insns. These should silently
2722 disappear later in final. Splitting such insns would
2723 break the code that handles REG_NO_CONFLICT blocks. */
2725 else if ((set = single_set (insn)) != NULL
2726 && set_noop_p (set))
2728 /* Nops get in the way while scheduling, so delete them
2729 now if register allocation has already been done. It
2730 is too risky to try to do this before register
2731 allocation, and there are unlikely to be very many
2732 nops then anyways. */
2733 if (reload_completed)
2735 PUT_CODE (insn, NOTE);
2736 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2737 NOTE_SOURCE_FILE (insn) = 0;
2742 /* Split insns here to get max fine-grain parallelism. */
2743 rtx first = PREV_INSN (insn);
2744 rtx last = try_split (PATTERN (insn), insn, 1);
2748 SET_BIT (blocks, i);
2751 /* try_split returns the NOTE that INSN became. */
2752 PUT_CODE (insn, NOTE);
2753 NOTE_SOURCE_FILE (insn) = 0;
2754 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2756 /* ??? Coddle to md files that generate subregs in post-
2757 reload splitters instead of computing the proper
2759 if (reload_completed && first != last)
2761 first = NEXT_INSN (first);
2765 cleanup_subreg_operands (first);
2768 first = NEXT_INSN (first);
2772 if (insn == bb->end)
2780 if (insn == bb->end)
2784 /* ??? When we're called from just after reload, the CFG is in bad
2785 shape, and we may have fallen off the end. This could be fixed
2786 by having reload not try to delete unreachable code. Otherwise
2787 assert we found the end insn. */
2788 if (insn == NULL && upd_life)
2792 if (changed && upd_life)
2794 compute_bb_for_insn (get_max_uid ());
2795 count_or_remove_death_notes (blocks, 1);
2796 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2799 sbitmap_free (blocks);
2802 #ifdef HAVE_peephole2
2803 struct peep2_insn_data
2809 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2810 static int peep2_current;
2812 /* A non-insn marker indicating the last insn of the block.
2813 The live_before regset for this element is correct, indicating
2814 global_live_at_end for the block. */
2815 #define PEEP2_EOB pc_rtx
2817 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2818 does not exist. Used by the recognizer to find the next insn to match
2819 in a multi-insn pattern. */
2825 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2829 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2830 n -= MAX_INSNS_PER_PEEP2 + 1;
2832 if (peep2_insn_data[n].insn == PEEP2_EOB)
2834 return peep2_insn_data[n].insn;
2837 /* Return true if REGNO is dead before the Nth non-note insn
2841 peep2_regno_dead_p (ofs, regno)
2845 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2848 ofs += peep2_current;
2849 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2850 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2852 if (peep2_insn_data[ofs].insn == NULL_RTX)
2855 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2858 /* Similarly for a REG. */
2861 peep2_reg_dead_p (ofs, reg)
2867 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2870 ofs += peep2_current;
2871 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2872 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2874 if (peep2_insn_data[ofs].insn == NULL_RTX)
2877 regno = REGNO (reg);
2878 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2880 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2885 /* Try to find a hard register of mode MODE, matching the register class in
2886 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2887 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2888 in which case the only condition is that the register must be available
2889 before CURRENT_INSN.
2890 Registers that already have bits set in REG_SET will not be considered.
2892 If an appropriate register is available, it will be returned and the
2893 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2897 peep2_find_free_register (from, to, class_str, mode, reg_set)
2899 const char *class_str;
2900 enum machine_mode mode;
2901 HARD_REG_SET *reg_set;
2903 static int search_ofs;
2904 enum reg_class class;
2908 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2911 from += peep2_current;
2912 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2913 from -= MAX_INSNS_PER_PEEP2 + 1;
2914 to += peep2_current;
2915 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2916 to -= MAX_INSNS_PER_PEEP2 + 1;
2918 if (peep2_insn_data[from].insn == NULL_RTX)
2920 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2924 HARD_REG_SET this_live;
2926 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2928 if (peep2_insn_data[from].insn == NULL_RTX)
2930 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2931 IOR_HARD_REG_SET (live, this_live);
2934 class = (class_str[0] == 'r' ? GENERAL_REGS
2935 : REG_CLASS_FROM_LETTER (class_str[0]));
2937 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2939 int raw_regno, regno, success, j;
2941 /* Distribute the free registers as much as possible. */
2942 raw_regno = search_ofs + i;
2943 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2944 raw_regno -= FIRST_PSEUDO_REGISTER;
2945 #ifdef REG_ALLOC_ORDER
2946 regno = reg_alloc_order[raw_regno];
2951 /* Don't allocate fixed registers. */
2952 if (fixed_regs[regno])
2954 /* Make sure the register is of the right class. */
2955 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
2957 /* And can support the mode we need. */
2958 if (! HARD_REGNO_MODE_OK (regno, mode))
2960 /* And that we don't create an extra save/restore. */
2961 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2963 /* And we don't clobber traceback for noreturn functions. */
2964 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2965 && (! reload_completed || frame_pointer_needed))
2969 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
2971 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2972 || TEST_HARD_REG_BIT (live, regno + j))
2980 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
2981 SET_HARD_REG_BIT (*reg_set, regno + j);
2983 /* Start the next search with the next register. */
2984 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
2986 search_ofs = raw_regno;
2988 return gen_rtx_REG (mode, regno);
2996 /* Perform the peephole2 optimization pass. */
2999 peephole2_optimize (dump_file)
3000 FILE *dump_file ATTRIBUTE_UNUSED;
3002 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3006 #ifdef HAVE_conditional_execution
3011 /* Initialize the regsets we're going to use. */
3012 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3013 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3014 live = INITIALIZE_REG_SET (rs_heads[i]);
3016 #ifdef HAVE_conditional_execution
3017 blocks = sbitmap_alloc (n_basic_blocks);
3018 sbitmap_zero (blocks);
3021 count_or_remove_death_notes (NULL, 1);
3024 for (b = n_basic_blocks - 1; b >= 0; --b)
3026 basic_block bb = BASIC_BLOCK (b);
3027 struct propagate_block_info *pbi;
3029 /* Indicate that all slots except the last holds invalid data. */
3030 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3031 peep2_insn_data[i].insn = NULL_RTX;
3033 /* Indicate that the last slot contains live_after data. */
3034 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3035 peep2_current = MAX_INSNS_PER_PEEP2;
3037 /* Start up propagation. */
3038 COPY_REG_SET (live, bb->global_live_at_end);
3039 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3041 #ifdef HAVE_conditional_execution
3042 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3044 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3047 for (insn = bb->end; ; insn = prev)
3049 prev = PREV_INSN (insn);
3055 /* Record this insn. */
3056 if (--peep2_current < 0)
3057 peep2_current = MAX_INSNS_PER_PEEP2;
3058 peep2_insn_data[peep2_current].insn = insn;
3059 propagate_one_insn (pbi, insn);
3060 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3062 /* Match the peephole. */
3063 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3066 i = match_len + peep2_current;
3067 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3068 i -= MAX_INSNS_PER_PEEP2 + 1;
3070 /* Replace the old sequence with the new. */
3071 flow_delete_insn_chain (insn, peep2_insn_data[i].insn);
3072 try = emit_insn_after (try, prev);
3074 /* Adjust the basic block boundaries. */
3075 if (peep2_insn_data[i].insn == bb->end)
3077 if (insn == bb->head)
3078 bb->head = NEXT_INSN (prev);
3080 #ifdef HAVE_conditional_execution
3081 /* With conditional execution, we cannot back up the
3082 live information so easily, since the conditional
3083 death data structures are not so self-contained.
3084 So record that we've made a modification to this
3085 block and update life information at the end. */
3086 SET_BIT (blocks, b);
3089 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3090 peep2_insn_data[i].insn = NULL_RTX;
3091 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3093 /* Back up lifetime information past the end of the
3094 newly created sequence. */
3095 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3097 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3099 /* Update life information for the new sequence. */
3105 i = MAX_INSNS_PER_PEEP2;
3106 peep2_insn_data[i].insn = try;
3107 propagate_one_insn (pbi, try);
3108 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3110 try = PREV_INSN (try);
3112 while (try != prev);
3114 /* ??? Should verify that LIVE now matches what we
3115 had before the new sequence. */
3122 if (insn == bb->head)
3126 free_propagate_block_info (pbi);
3129 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3130 FREE_REG_SET (peep2_insn_data[i].live_before);
3131 FREE_REG_SET (live);
3133 #ifdef HAVE_conditional_execution
3134 count_or_remove_death_notes (blocks, 1);
3135 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3136 sbitmap_free (blocks);
3139 #endif /* HAVE_peephole2 */