1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "hard-reg-set.h"
33 #include "insn-codes.h"
38 #include "basic-block.h"
42 #ifndef STACK_PUSH_CODE
43 #ifdef STACK_GROWS_DOWNWARD
44 #define STACK_PUSH_CODE PRE_DEC
46 #define STACK_PUSH_CODE PRE_INC
50 #ifndef STACK_POP_CODE
51 #ifdef STACK_GROWS_DOWNWARD
52 #define STACK_POP_CODE POST_INC
54 #define STACK_POP_CODE POST_DEC
58 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
59 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
60 static void validate_replace_src_1 PARAMS ((rtx *, void *));
61 static rtx split_insn PARAMS ((rtx));
63 /* Nonzero means allow operands to be volatile.
64 This should be 0 if you are generating rtl, such as if you are calling
65 the functions in optabs.c and expmed.c (most of the time).
66 This should be 1 if all valid insns need to be recognized,
67 such as in regclass.c and final.c and reload.c.
69 init_recog and init_recog_no_volatile are responsible for setting this. */
73 struct recog_data recog_data;
75 /* Contains a vector of operand_alternative structures for every operand.
76 Set up by preprocess_constraints. */
77 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
79 /* On return from `constrain_operands', indicate which alternative
82 int which_alternative;
84 /* Nonzero after end of reload pass.
85 Set to 1 or 0 by toplev.c.
86 Controls the significance of (SUBREG (MEM)). */
90 /* Initialize data used by the function `recog'.
91 This must be called once in the compilation of a function
92 before any insn recognition may be done in the function. */
95 init_recog_no_volatile ()
106 /* Try recognizing the instruction INSN,
107 and return the code number that results.
108 Remember the code so that repeated calls do not
109 need to spend the time for actual rerecognition.
111 This function is the normal interface to instruction recognition.
112 The automatically-generated function `recog' is normally called
113 through this one. (The only exception is in combine.c.) */
116 recog_memoized_1 (insn)
119 if (INSN_CODE (insn) < 0)
120 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
121 return INSN_CODE (insn);
124 /* Check that X is an insn-body for an `asm' with operands
125 and that the operands mentioned in it are legitimate. */
128 check_asm_operands (x)
133 const char **constraints;
136 /* Post-reload, be more strict with things. */
137 if (reload_completed)
139 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
140 extract_insn (make_insn_raw (x));
141 constrain_operands (1);
142 return which_alternative >= 0;
145 noperands = asm_noperands (x);
151 operands = (rtx *) alloca (noperands * sizeof (rtx));
152 constraints = (const char **) alloca (noperands * sizeof (char *));
154 decode_asm_operands (x, operands, NULL, constraints, NULL);
156 for (i = 0; i < noperands; i++)
158 const char *c = constraints[i];
161 if (ISDIGIT ((unsigned char)c[0]) && c[1] == '\0')
162 c = constraints[c[0] - '0'];
164 if (! asm_operand_ok (operands[i], c))
171 /* Static data for the next two routines. */
173 typedef struct change_t
181 static change_t *changes;
182 static int changes_allocated;
184 static int num_changes = 0;
186 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
187 at which NEW will be placed. If OBJECT is zero, no validation is done,
188 the change is simply made.
190 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
191 will be called with the address and mode as parameters. If OBJECT is
192 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
195 IN_GROUP is non-zero if this is part of a group of changes that must be
196 performed as a group. In that case, the changes will be stored. The
197 function `apply_change_group' will validate and apply the changes.
199 If IN_GROUP is zero, this is a single change. Try to recognize the insn
200 or validate the memory reference with the change applied. If the result
201 is not valid for the machine, suppress the change and return zero.
202 Otherwise, perform the change and return 1. */
205 validate_change (object, loc, new, in_group)
213 if (old == new || rtx_equal_p (old, new))
216 if (in_group == 0 && num_changes != 0)
221 /* Save the information describing this change. */
222 if (num_changes >= changes_allocated)
224 if (changes_allocated == 0)
225 /* This value allows for repeated substitutions inside complex
226 indexed addresses, or changes in up to 5 insns. */
227 changes_allocated = MAX_RECOG_OPERANDS * 5;
229 changes_allocated *= 2;
232 (change_t*) xrealloc (changes,
233 sizeof (change_t) * changes_allocated);
236 changes[num_changes].object = object;
237 changes[num_changes].loc = loc;
238 changes[num_changes].old = old;
240 if (object && GET_CODE (object) != MEM)
242 /* Set INSN_CODE to force rerecognition of insn. Save old code in
244 changes[num_changes].old_code = INSN_CODE (object);
245 INSN_CODE (object) = -1;
250 /* If we are making a group of changes, return 1. Otherwise, validate the
251 change group we made. */
256 return apply_change_group ();
259 /* This subroutine of apply_change_group verifies whether the changes to INSN
260 were valid; i.e. whether INSN can still be recognized. */
263 insn_invalid_p (insn)
266 rtx pat = PATTERN (insn);
267 int num_clobbers = 0;
268 /* If we are before reload and the pattern is a SET, see if we can add
270 int icode = recog (pat, insn,
271 (GET_CODE (pat) == SET
272 && ! reload_completed && ! reload_in_progress)
273 ? &num_clobbers : 0);
274 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
277 /* If this is an asm and the operand aren't legal, then fail. Likewise if
278 this is not an asm and the insn wasn't recognized. */
279 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
280 || (!is_asm && icode < 0))
283 /* If we have to add CLOBBERs, fail if we have to add ones that reference
284 hard registers since our callers can't know if they are live or not.
285 Otherwise, add them. */
286 if (num_clobbers > 0)
290 if (added_clobbers_hard_reg_p (icode))
293 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
294 XVECEXP (newpat, 0, 0) = pat;
295 add_clobbers (newpat, icode);
296 PATTERN (insn) = pat = newpat;
299 /* After reload, verify that all constraints are satisfied. */
300 if (reload_completed)
304 if (! constrain_operands (1))
308 INSN_CODE (insn) = icode;
312 /* Apply a group of changes previously issued with `validate_change'.
313 Return 1 if all changes are valid, zero otherwise. */
316 apply_change_group ()
319 rtx last_validated = NULL_RTX;
321 /* The changes have been applied and all INSN_CODEs have been reset to force
324 The changes are valid if we aren't given an object, or if we are
325 given a MEM and it still is a valid address, or if this is in insn
326 and it is recognized. In the latter case, if reload has completed,
327 we also require that the operands meet the constraints for
330 for (i = 0; i < num_changes; i++)
332 rtx object = changes[i].object;
334 /* if there is no object to test or if it is the same as the one we
335 already tested, ignore it. */
336 if (object == 0 || object == last_validated)
339 if (GET_CODE (object) == MEM)
341 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
344 else if (insn_invalid_p (object))
346 rtx pat = PATTERN (object);
348 /* Perhaps we couldn't recognize the insn because there were
349 extra CLOBBERs at the end. If so, try to re-recognize
350 without the last CLOBBER (later iterations will cause each of
351 them to be eliminated, in turn). But don't do this if we
352 have an ASM_OPERAND. */
353 if (GET_CODE (pat) == PARALLEL
354 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
355 && asm_noperands (PATTERN (object)) < 0)
359 if (XVECLEN (pat, 0) == 2)
360 newpat = XVECEXP (pat, 0, 0);
366 = gen_rtx_PARALLEL (VOIDmode,
367 rtvec_alloc (XVECLEN (pat, 0) - 1));
368 for (j = 0; j < XVECLEN (newpat, 0); j++)
369 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
372 /* Add a new change to this group to replace the pattern
373 with this new pattern. Then consider this change
374 as having succeeded. The change we added will
375 cause the entire call to fail if things remain invalid.
377 Note that this can lose if a later change than the one
378 we are processing specified &XVECEXP (PATTERN (object), 0, X)
379 but this shouldn't occur. */
381 validate_change (object, &PATTERN (object), newpat, 1);
384 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
385 /* If this insn is a CLOBBER or USE, it is always valid, but is
391 last_validated = object;
394 if (i == num_changes)
406 /* Return the number of changes so far in the current group. */
409 num_validated_changes ()
414 /* Retract the changes numbered NUM and up. */
422 /* Back out all the changes. Do this in the opposite order in which
424 for (i = num_changes - 1; i >= num; i--)
426 *changes[i].loc = changes[i].old;
427 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
428 INSN_CODE (changes[i].object) = changes[i].old_code;
433 /* Replace every occurrence of FROM in X with TO. Mark each change with
434 validate_change passing OBJECT. */
437 validate_replace_rtx_1 (loc, from, to, object)
439 rtx from, to, object;
442 register const char *fmt;
443 register rtx x = *loc;
445 enum machine_mode op0_mode = VOIDmode;
446 int prev_changes = num_changes;
453 fmt = GET_RTX_FORMAT (code);
455 op0_mode = GET_MODE (XEXP (x, 0));
457 /* X matches FROM if it is the same rtx or they are both referring to the
458 same register in the same mode. Avoid calling rtx_equal_p unless the
459 operands look similar. */
462 || (GET_CODE (x) == REG && GET_CODE (from) == REG
463 && GET_MODE (x) == GET_MODE (from)
464 && REGNO (x) == REGNO (from))
465 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
466 && rtx_equal_p (x, from)))
468 validate_change (object, loc, to, 1);
472 /* Call ourseves recursivly to perform the replacements. */
474 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
477 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
478 else if (fmt[i] == 'E')
479 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
480 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
483 /* In case we didn't substituted, there is nothing to do. */
484 if (num_changes == prev_changes)
487 /* Allow substituted expression to have different mode. This is used by
488 regmove to change mode of pseudo register. */
489 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
490 op0_mode = GET_MODE (XEXP (x, 0));
492 /* Do changes needed to keep rtx consistent. Don't do any other
493 simplifications, as it is not our job. */
495 if ((GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
496 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
498 validate_change (object, loc,
499 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
500 : swap_condition (code),
501 GET_MODE (x), XEXP (x, 1),
510 /* If we have a PLUS whose second operand is now a CONST_INT, use
511 plus_constant to try to simplify it.
512 ??? We may want later to remove this, once simplification is
513 separated from this function. */
514 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
515 validate_change (object, loc,
516 plus_constant (XEXP (x, 0), INTVAL (XEXP (x, 1))), 1);
519 if (GET_CODE (XEXP (x, 1)) == CONST_INT
520 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
521 validate_change (object, loc,
523 (PLUS, GET_MODE (x), XEXP (x, 0),
524 simplify_gen_unary (NEG,
525 GET_MODE (x), XEXP (x, 1),
530 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
532 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
534 /* If any of the above failed, substitute in something that
535 we know won't be recognized. */
537 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
538 validate_change (object, loc, new, 1);
542 /* All subregs possible to simplify should be simplified. */
543 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
546 /* Subregs of VOIDmode operands are incorect. */
547 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
548 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
550 validate_change (object, loc, new, 1);
554 /* If we are replacing a register with memory, try to change the memory
555 to be the mode required for memory in extract operations (this isn't
556 likely to be an insertion operation; if it was, nothing bad will
557 happen, we might just fail in some cases). */
559 if (GET_CODE (XEXP (x, 0)) == MEM
560 && GET_CODE (XEXP (x, 1)) == CONST_INT
561 && GET_CODE (XEXP (x, 2)) == CONST_INT
562 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
563 && !MEM_VOLATILE_P (XEXP (x, 0)))
565 enum machine_mode wanted_mode = VOIDmode;
566 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
567 int pos = INTVAL (XEXP (x, 2));
570 if (code == ZERO_EXTRACT)
572 wanted_mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
573 if (wanted_mode == VOIDmode)
574 wanted_mode = word_mode;
578 if (code == SIGN_EXTRACT)
580 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
581 if (wanted_mode == VOIDmode)
582 wanted_mode = word_mode;
586 /* If we have a narrower mode, we can do something. */
587 if (wanted_mode != VOIDmode
588 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
590 int offset = pos / BITS_PER_UNIT;
593 /* If the bytes and bits are counted differently, we
594 must adjust the offset. */
595 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
597 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
600 pos %= GET_MODE_BITSIZE (wanted_mode);
602 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
604 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
605 validate_change (object, &XEXP (x, 0), newmem, 1);
616 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
617 with TO. After all changes have been made, validate by seeing
618 if INSN is still valid. */
621 validate_replace_rtx_subexp (from, to, insn, loc)
622 rtx from, to, insn, *loc;
624 validate_replace_rtx_1 (loc, from, to, insn);
625 return apply_change_group ();
628 /* Try replacing every occurrence of FROM in INSN with TO. After all
629 changes have been made, validate by seeing if INSN is still valid. */
632 validate_replace_rtx (from, to, insn)
635 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
636 return apply_change_group ();
639 /* Try replacing every occurrence of FROM in INSN with TO. */
642 validate_replace_rtx_group (from, to, insn)
645 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
648 /* Function called by note_uses to replace used subexpressions. */
649 struct validate_replace_src_data
651 rtx from; /* Old RTX */
652 rtx to; /* New RTX */
653 rtx insn; /* Insn in which substitution is occurring. */
657 validate_replace_src_1 (x, data)
661 struct validate_replace_src_data *d
662 = (struct validate_replace_src_data *) data;
664 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
667 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
668 SET_DESTs. After all changes have been made, validate by seeing if
669 INSN is still valid. */
672 validate_replace_src (from, to, insn)
675 struct validate_replace_src_data d;
680 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
681 return apply_change_group ();
685 /* Return 1 if the insn using CC0 set by INSN does not contain
686 any ordered tests applied to the condition codes.
687 EQ and NE tests do not count. */
690 next_insn_tests_no_inequality (insn)
693 register rtx next = next_cc0_user (insn);
695 /* If there is no next insn, we have to take the conservative choice. */
699 return ((GET_CODE (next) == JUMP_INSN
700 || GET_CODE (next) == INSN
701 || GET_CODE (next) == CALL_INSN)
702 && ! inequality_comparisons_p (PATTERN (next)));
705 #if 0 /* This is useless since the insn that sets the cc's
706 must be followed immediately by the use of them. */
707 /* Return 1 if the CC value set up by INSN is not used. */
710 next_insns_test_no_inequality (insn)
713 register rtx next = NEXT_INSN (insn);
715 for (; next != 0; next = NEXT_INSN (next))
717 if (GET_CODE (next) == CODE_LABEL
718 || GET_CODE (next) == BARRIER)
720 if (GET_CODE (next) == NOTE)
722 if (inequality_comparisons_p (PATTERN (next)))
724 if (sets_cc0_p (PATTERN (next)) == 1)
726 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
734 /* This is used by find_single_use to locate an rtx that contains exactly one
735 use of DEST, which is typically either a REG or CC0. It returns a
736 pointer to the innermost rtx expression containing DEST. Appearances of
737 DEST that are being used to totally replace it are not counted. */
740 find_single_use_1 (dest, loc)
745 enum rtx_code code = GET_CODE (x);
762 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
763 of a REG that occupies all of the REG, the insn uses DEST if
764 it is mentioned in the destination or the source. Otherwise, we
765 need just check the source. */
766 if (GET_CODE (SET_DEST (x)) != CC0
767 && GET_CODE (SET_DEST (x)) != PC
768 && GET_CODE (SET_DEST (x)) != REG
769 && ! (GET_CODE (SET_DEST (x)) == SUBREG
770 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
771 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
772 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
773 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
774 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
777 return find_single_use_1 (dest, &SET_SRC (x));
781 return find_single_use_1 (dest, &XEXP (x, 0));
787 /* If it wasn't one of the common cases above, check each expression and
788 vector of this code. Look for a unique usage of DEST. */
790 fmt = GET_RTX_FORMAT (code);
791 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
795 if (dest == XEXP (x, i)
796 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
797 && REGNO (dest) == REGNO (XEXP (x, i))))
800 this_result = find_single_use_1 (dest, &XEXP (x, i));
803 result = this_result;
804 else if (this_result)
805 /* Duplicate usage. */
808 else if (fmt[i] == 'E')
812 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
814 if (XVECEXP (x, i, j) == dest
815 || (GET_CODE (dest) == REG
816 && GET_CODE (XVECEXP (x, i, j)) == REG
817 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
820 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
823 result = this_result;
824 else if (this_result)
833 /* See if DEST, produced in INSN, is used only a single time in the
834 sequel. If so, return a pointer to the innermost rtx expression in which
837 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
839 This routine will return usually zero either before flow is called (because
840 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
841 note can't be trusted).
843 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
844 care about REG_DEAD notes or LOG_LINKS.
846 Otherwise, we find the single use by finding an insn that has a
847 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
848 only referenced once in that insn, we know that it must be the first
849 and last insn referencing DEST. */
852 find_single_use (dest, insn, ploc)
864 next = NEXT_INSN (insn);
866 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
869 result = find_single_use_1 (dest, &PATTERN (next));
876 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
879 for (next = next_nonnote_insn (insn);
880 next != 0 && GET_CODE (next) != CODE_LABEL;
881 next = next_nonnote_insn (next))
882 if (INSN_P (next) && dead_or_set_p (next, dest))
884 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
885 if (XEXP (link, 0) == insn)
890 result = find_single_use_1 (dest, &PATTERN (next));
900 /* Return 1 if OP is a valid general operand for machine mode MODE.
901 This is either a register reference, a memory reference,
902 or a constant. In the case of a memory reference, the address
903 is checked for general validity for the target machine.
905 Register and memory references must have mode MODE in order to be valid,
906 but some constants have no machine mode and are valid for any mode.
908 If MODE is VOIDmode, OP is checked for validity for whatever mode
911 The main use of this function is as a predicate in match_operand
912 expressions in the machine description.
914 For an explanation of this function's behavior for registers of
915 class NO_REGS, see the comment for `register_operand'. */
918 general_operand (op, mode)
920 enum machine_mode mode;
922 register enum rtx_code code = GET_CODE (op);
924 if (mode == VOIDmode)
925 mode = GET_MODE (op);
927 /* Don't accept CONST_INT or anything similar
928 if the caller wants something floating. */
929 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
930 && GET_MODE_CLASS (mode) != MODE_INT
931 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
934 if (GET_CODE (op) == CONST_INT
935 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
939 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
941 #ifdef LEGITIMATE_PIC_OPERAND_P
942 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
944 && LEGITIMATE_CONSTANT_P (op));
946 /* Except for certain constants with VOIDmode, already checked for,
947 OP's mode must match MODE if MODE specifies a mode. */
949 if (GET_MODE (op) != mode)
954 #ifdef INSN_SCHEDULING
955 /* On machines that have insn scheduling, we want all memory
956 reference to be explicit, so outlaw paradoxical SUBREGs. */
957 if (GET_CODE (SUBREG_REG (op)) == MEM
958 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
961 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
962 may result in incorrect reference. We should simplify all valid
963 subregs of MEM anyway. But allow this after reload because we
964 might be called from cleanup_subreg_operands.
966 ??? This is a kludge. */
967 if (!reload_completed && SUBREG_BYTE (op) != 0
968 && GET_CODE (SUBREG_REG (op)) == MEM)
971 op = SUBREG_REG (op);
972 code = GET_CODE (op);
976 /* A register whose class is NO_REGS is not a general operand. */
977 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
978 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
982 register rtx y = XEXP (op, 0);
984 if (! volatile_ok && MEM_VOLATILE_P (op))
987 if (GET_CODE (y) == ADDRESSOF)
990 /* Use the mem's mode, since it will be reloaded thus. */
991 mode = GET_MODE (op);
992 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
995 /* Pretend this is an operand for now; we'll run force_operand
996 on its replacement in fixup_var_refs_1. */
997 if (code == ADDRESSOF)
1006 /* Return 1 if OP is a valid memory address for a memory reference
1009 The main use of this function is as a predicate in match_operand
1010 expressions in the machine description. */
1013 address_operand (op, mode)
1015 enum machine_mode mode;
1017 return memory_address_p (mode, op);
1020 /* Return 1 if OP is a register reference of mode MODE.
1021 If MODE is VOIDmode, accept a register in any mode.
1023 The main use of this function is as a predicate in match_operand
1024 expressions in the machine description.
1026 As a special exception, registers whose class is NO_REGS are
1027 not accepted by `register_operand'. The reason for this change
1028 is to allow the representation of special architecture artifacts
1029 (such as a condition code register) without extending the rtl
1030 definitions. Since registers of class NO_REGS cannot be used
1031 as registers in any case where register classes are examined,
1032 it is most consistent to keep this function from accepting them. */
1035 register_operand (op, mode)
1037 enum machine_mode mode;
1039 if (GET_MODE (op) != mode && mode != VOIDmode)
1042 if (GET_CODE (op) == SUBREG)
1044 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1045 because it is guaranteed to be reloaded into one.
1046 Just make sure the MEM is valid in itself.
1047 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1048 but currently it does result from (SUBREG (REG)...) where the
1049 reg went on the stack.) */
1050 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1051 return general_operand (op, mode);
1053 #ifdef CLASS_CANNOT_CHANGE_MODE
1054 if (GET_CODE (SUBREG_REG (op)) == REG
1055 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1056 && (TEST_HARD_REG_BIT
1057 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
1058 REGNO (SUBREG_REG (op))))
1059 && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (SUBREG_REG (op)))
1060 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1061 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1065 op = SUBREG_REG (op);
1068 /* If we have an ADDRESSOF, consider it valid since it will be
1069 converted into something that will not be a MEM. */
1070 if (GET_CODE (op) == ADDRESSOF)
1073 /* We don't consider registers whose class is NO_REGS
1074 to be a register operand. */
1075 return (GET_CODE (op) == REG
1076 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1077 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1080 /* Return 1 for a register in Pmode; ignore the tested mode. */
1083 pmode_register_operand (op, mode)
1085 enum machine_mode mode ATTRIBUTE_UNUSED;
1087 return register_operand (op, Pmode);
1090 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1091 or a hard register. */
1094 scratch_operand (op, mode)
1096 enum machine_mode mode;
1098 if (GET_MODE (op) != mode && mode != VOIDmode)
1101 return (GET_CODE (op) == SCRATCH
1102 || (GET_CODE (op) == REG
1103 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1106 /* Return 1 if OP is a valid immediate operand for mode MODE.
1108 The main use of this function is as a predicate in match_operand
1109 expressions in the machine description. */
1112 immediate_operand (op, mode)
1114 enum machine_mode mode;
1116 /* Don't accept CONST_INT or anything similar
1117 if the caller wants something floating. */
1118 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1119 && GET_MODE_CLASS (mode) != MODE_INT
1120 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1123 if (GET_CODE (op) == CONST_INT
1124 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1127 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1128 result in 0/1. It seems a safe assumption that this is
1129 in range for everyone. */
1130 if (GET_CODE (op) == CONSTANT_P_RTX)
1133 return (CONSTANT_P (op)
1134 && (GET_MODE (op) == mode || mode == VOIDmode
1135 || GET_MODE (op) == VOIDmode)
1136 #ifdef LEGITIMATE_PIC_OPERAND_P
1137 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1139 && LEGITIMATE_CONSTANT_P (op));
1142 /* Returns 1 if OP is an operand that is a CONST_INT. */
1145 const_int_operand (op, mode)
1147 enum machine_mode mode;
1149 if (GET_CODE (op) != CONST_INT)
1152 if (mode != VOIDmode
1153 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1159 /* Returns 1 if OP is an operand that is a constant integer or constant
1160 floating-point number. */
1163 const_double_operand (op, mode)
1165 enum machine_mode mode;
1167 /* Don't accept CONST_INT or anything similar
1168 if the caller wants something floating. */
1169 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1170 && GET_MODE_CLASS (mode) != MODE_INT
1171 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1174 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1175 && (mode == VOIDmode || GET_MODE (op) == mode
1176 || GET_MODE (op) == VOIDmode));
1179 /* Return 1 if OP is a general operand that is not an immediate operand. */
1182 nonimmediate_operand (op, mode)
1184 enum machine_mode mode;
1186 return (general_operand (op, mode) && ! CONSTANT_P (op));
1189 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1192 nonmemory_operand (op, mode)
1194 enum machine_mode mode;
1196 if (CONSTANT_P (op))
1198 /* Don't accept CONST_INT or anything similar
1199 if the caller wants something floating. */
1200 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1201 && GET_MODE_CLASS (mode) != MODE_INT
1202 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1205 if (GET_CODE (op) == CONST_INT
1206 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1209 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1210 || mode == VOIDmode)
1211 #ifdef LEGITIMATE_PIC_OPERAND_P
1212 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1214 && LEGITIMATE_CONSTANT_P (op));
1217 if (GET_MODE (op) != mode && mode != VOIDmode)
1220 if (GET_CODE (op) == SUBREG)
1222 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1223 because it is guaranteed to be reloaded into one.
1224 Just make sure the MEM is valid in itself.
1225 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1226 but currently it does result from (SUBREG (REG)...) where the
1227 reg went on the stack.) */
1228 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1229 return general_operand (op, mode);
1230 op = SUBREG_REG (op);
1233 /* We don't consider registers whose class is NO_REGS
1234 to be a register operand. */
1235 return (GET_CODE (op) == REG
1236 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1237 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1240 /* Return 1 if OP is a valid operand that stands for pushing a
1241 value of mode MODE onto the stack.
1243 The main use of this function is as a predicate in match_operand
1244 expressions in the machine description. */
1247 push_operand (op, mode)
1249 enum machine_mode mode;
1251 unsigned int rounded_size = GET_MODE_SIZE (mode);
1253 #ifdef PUSH_ROUNDING
1254 rounded_size = PUSH_ROUNDING (rounded_size);
1257 if (GET_CODE (op) != MEM)
1260 if (mode != VOIDmode && GET_MODE (op) != mode)
1265 if (rounded_size == GET_MODE_SIZE (mode))
1267 if (GET_CODE (op) != STACK_PUSH_CODE)
1272 if (GET_CODE (op) != PRE_MODIFY
1273 || GET_CODE (XEXP (op, 1)) != PLUS
1274 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1275 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1276 #ifdef STACK_GROWS_DOWNWARD
1277 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1279 || INTVAL (XEXP (XEXP (op, 1), 1)) != rounded_size
1285 return XEXP (op, 0) == stack_pointer_rtx;
1288 /* Return 1 if OP is a valid operand that stands for popping a
1289 value of mode MODE off the stack.
1291 The main use of this function is as a predicate in match_operand
1292 expressions in the machine description. */
1295 pop_operand (op, mode)
1297 enum machine_mode mode;
1299 if (GET_CODE (op) != MEM)
1302 if (mode != VOIDmode && GET_MODE (op) != mode)
1307 if (GET_CODE (op) != STACK_POP_CODE)
1310 return XEXP (op, 0) == stack_pointer_rtx;
1313 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1316 memory_address_p (mode, addr)
1317 enum machine_mode mode ATTRIBUTE_UNUSED;
1320 if (GET_CODE (addr) == ADDRESSOF)
1323 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1330 /* Return 1 if OP is a valid memory reference with mode MODE,
1331 including a valid address.
1333 The main use of this function is as a predicate in match_operand
1334 expressions in the machine description. */
1337 memory_operand (op, mode)
1339 enum machine_mode mode;
1343 if (! reload_completed)
1344 /* Note that no SUBREG is a memory operand before end of reload pass,
1345 because (SUBREG (MEM...)) forces reloading into a register. */
1346 return GET_CODE (op) == MEM && general_operand (op, mode);
1348 if (mode != VOIDmode && GET_MODE (op) != mode)
1352 if (GET_CODE (inner) == SUBREG)
1353 inner = SUBREG_REG (inner);
1355 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1358 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1359 that is, a memory reference whose address is a general_operand. */
1362 indirect_operand (op, mode)
1364 enum machine_mode mode;
1366 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1367 if (! reload_completed
1368 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1370 register int offset = SUBREG_BYTE (op);
1371 rtx inner = SUBREG_REG (op);
1373 if (mode != VOIDmode && GET_MODE (op) != mode)
1376 /* The only way that we can have a general_operand as the resulting
1377 address is if OFFSET is zero and the address already is an operand
1378 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1381 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1382 || (GET_CODE (XEXP (inner, 0)) == PLUS
1383 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1384 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1385 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1388 return (GET_CODE (op) == MEM
1389 && memory_operand (op, mode)
1390 && general_operand (XEXP (op, 0), Pmode));
1393 /* Return 1 if this is a comparison operator. This allows the use of
1394 MATCH_OPERATOR to recognize all the branch insns. */
1397 comparison_operator (op, mode)
1399 enum machine_mode mode;
1401 return ((mode == VOIDmode || GET_MODE (op) == mode)
1402 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1405 /* If BODY is an insn body that uses ASM_OPERANDS,
1406 return the number of operands (both input and output) in the insn.
1407 Otherwise return -1. */
1410 asm_noperands (body)
1413 switch (GET_CODE (body))
1416 /* No output operands: return number of input operands. */
1417 return ASM_OPERANDS_INPUT_LENGTH (body);
1419 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1420 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1421 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1425 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1426 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1428 /* Multiple output operands, or 1 output plus some clobbers:
1429 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1433 /* Count backwards through CLOBBERs to determine number of SETs. */
1434 for (i = XVECLEN (body, 0); i > 0; i--)
1436 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1438 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1442 /* N_SETS is now number of output operands. */
1445 /* Verify that all the SETs we have
1446 came from a single original asm_operands insn
1447 (so that invalid combinations are blocked). */
1448 for (i = 0; i < n_sets; i++)
1450 rtx elt = XVECEXP (body, 0, i);
1451 if (GET_CODE (elt) != SET)
1453 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1455 /* If these ASM_OPERANDS rtx's came from different original insns
1456 then they aren't allowed together. */
1457 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1458 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1461 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1464 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1466 /* 0 outputs, but some clobbers:
1467 body is [(asm_operands ...) (clobber (reg ...))...]. */
1470 /* Make sure all the other parallel things really are clobbers. */
1471 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1472 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1475 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1484 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1485 copy its operands (both input and output) into the vector OPERANDS,
1486 the locations of the operands within the insn into the vector OPERAND_LOCS,
1487 and the constraints for the operands into CONSTRAINTS.
1488 Write the modes of the operands into MODES.
1489 Return the assembler-template.
1491 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1492 we don't store that info. */
1495 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1499 const char **constraints;
1500 enum machine_mode *modes;
1504 const char *template = 0;
1506 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1508 rtx asmop = SET_SRC (body);
1509 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1511 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1513 for (i = 1; i < noperands; i++)
1516 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1518 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1520 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1522 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1525 /* The output is in the SET.
1526 Its constraint is in the ASM_OPERANDS itself. */
1528 operands[0] = SET_DEST (body);
1530 operand_locs[0] = &SET_DEST (body);
1532 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1534 modes[0] = GET_MODE (SET_DEST (body));
1535 template = ASM_OPERANDS_TEMPLATE (asmop);
1537 else if (GET_CODE (body) == ASM_OPERANDS)
1540 /* No output operands: BODY is (asm_operands ....). */
1542 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1544 /* The input operands are found in the 1st element vector. */
1545 /* Constraints for inputs are in the 2nd element vector. */
1546 for (i = 0; i < noperands; i++)
1549 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1551 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1553 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1555 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1557 template = ASM_OPERANDS_TEMPLATE (asmop);
1559 else if (GET_CODE (body) == PARALLEL
1560 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1562 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1563 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1564 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1565 int nout = 0; /* Does not include CLOBBERs. */
1567 /* At least one output, plus some CLOBBERs. */
1569 /* The outputs are in the SETs.
1570 Their constraints are in the ASM_OPERANDS itself. */
1571 for (i = 0; i < nparallel; i++)
1573 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1574 break; /* Past last SET */
1577 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1579 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1581 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1583 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1587 for (i = 0; i < nin; i++)
1590 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1592 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1594 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1596 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1599 template = ASM_OPERANDS_TEMPLATE (asmop);
1601 else if (GET_CODE (body) == PARALLEL
1602 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1604 /* No outputs, but some CLOBBERs. */
1606 rtx asmop = XVECEXP (body, 0, 0);
1607 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1609 for (i = 0; i < nin; i++)
1612 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1614 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1616 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1618 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1621 template = ASM_OPERANDS_TEMPLATE (asmop);
1627 /* Check if an asm_operand matches it's constraints.
1628 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1631 asm_operand_ok (op, constraint)
1633 const char *constraint;
1637 /* Use constrain_operands after reload. */
1638 if (reload_completed)
1643 char c = *constraint++;
1657 case '0': case '1': case '2': case '3': case '4':
1658 case '5': case '6': case '7': case '8': case '9':
1659 /* For best results, our caller should have given us the
1660 proper matching constraint, but we can't actually fail
1661 the check if they didn't. Indicate that results are
1667 if (address_operand (op, VOIDmode))
1672 case 'V': /* non-offsettable */
1673 if (memory_operand (op, VOIDmode))
1677 case 'o': /* offsettable */
1678 if (offsettable_nonstrict_memref_p (op))
1683 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1684 excepting those that expand_call created. Further, on some
1685 machines which do not have generalized auto inc/dec, an inc/dec
1686 is not a memory_operand.
1688 Match any memory and hope things are resolved after reload. */
1690 if (GET_CODE (op) == MEM
1692 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1693 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1698 if (GET_CODE (op) == MEM
1700 || GET_CODE (XEXP (op, 0)) == PRE_INC
1701 || GET_CODE (XEXP (op, 0)) == POST_INC))
1706 #ifndef REAL_ARITHMETIC
1707 /* Match any floating double constant, but only if
1708 we can examine the bits of it reliably. */
1709 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1710 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1711 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1717 if (GET_CODE (op) == CONST_DOUBLE)
1722 if (GET_CODE (op) == CONST_DOUBLE
1723 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1727 if (GET_CODE (op) == CONST_DOUBLE
1728 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1733 if (GET_CODE (op) == CONST_INT
1734 || (GET_CODE (op) == CONST_DOUBLE
1735 && GET_MODE (op) == VOIDmode))
1741 #ifdef LEGITIMATE_PIC_OPERAND_P
1742 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1749 if (GET_CODE (op) == CONST_INT
1750 || (GET_CODE (op) == CONST_DOUBLE
1751 && GET_MODE (op) == VOIDmode))
1756 if (GET_CODE (op) == CONST_INT
1757 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1761 if (GET_CODE (op) == CONST_INT
1762 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1766 if (GET_CODE (op) == CONST_INT
1767 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1771 if (GET_CODE (op) == CONST_INT
1772 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1776 if (GET_CODE (op) == CONST_INT
1777 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1781 if (GET_CODE (op) == CONST_INT
1782 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1786 if (GET_CODE (op) == CONST_INT
1787 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1791 if (GET_CODE (op) == CONST_INT
1792 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1800 if (general_operand (op, VOIDmode))
1805 /* For all other letters, we first check for a register class,
1806 otherwise it is an EXTRA_CONSTRAINT. */
1807 if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
1810 if (GET_MODE (op) == BLKmode)
1812 if (register_operand (op, VOIDmode))
1815 #ifdef EXTRA_CONSTRAINT
1816 if (EXTRA_CONSTRAINT (op, c))
1826 /* Given an rtx *P, if it is a sum containing an integer constant term,
1827 return the location (type rtx *) of the pointer to that constant term.
1828 Otherwise, return a null pointer. */
1831 find_constant_term_loc (p)
1835 register enum rtx_code code = GET_CODE (*p);
1837 /* If *P IS such a constant term, P is its location. */
1839 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1843 /* Otherwise, if not a sum, it has no constant term. */
1845 if (GET_CODE (*p) != PLUS)
1848 /* If one of the summands is constant, return its location. */
1850 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1851 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1854 /* Otherwise, check each summand for containing a constant term. */
1856 if (XEXP (*p, 0) != 0)
1858 tem = find_constant_term_loc (&XEXP (*p, 0));
1863 if (XEXP (*p, 1) != 0)
1865 tem = find_constant_term_loc (&XEXP (*p, 1));
1873 /* Return 1 if OP is a memory reference
1874 whose address contains no side effects
1875 and remains valid after the addition
1876 of a positive integer less than the
1877 size of the object being referenced.
1879 We assume that the original address is valid and do not check it.
1881 This uses strict_memory_address_p as a subroutine, so
1882 don't use it before reload. */
1885 offsettable_memref_p (op)
1888 return ((GET_CODE (op) == MEM)
1889 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1892 /* Similar, but don't require a strictly valid mem ref:
1893 consider pseudo-regs valid as index or base regs. */
1896 offsettable_nonstrict_memref_p (op)
1899 return ((GET_CODE (op) == MEM)
1900 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1903 /* Return 1 if Y is a memory address which contains no side effects
1904 and would remain valid after the addition of a positive integer
1905 less than the size of that mode.
1907 We assume that the original address is valid and do not check it.
1908 We do check that it is valid for narrower modes.
1910 If STRICTP is nonzero, we require a strictly valid address,
1911 for the sake of use in reload.c. */
1914 offsettable_address_p (strictp, mode, y)
1916 enum machine_mode mode;
1919 register enum rtx_code ycode = GET_CODE (y);
1923 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1924 (strictp ? strict_memory_address_p : memory_address_p);
1925 unsigned int mode_sz = GET_MODE_SIZE (mode);
1927 if (CONSTANT_ADDRESS_P (y))
1930 /* Adjusting an offsettable address involves changing to a narrower mode.
1931 Make sure that's OK. */
1933 if (mode_dependent_address_p (y))
1936 /* ??? How much offset does an offsettable BLKmode reference need?
1937 Clearly that depends on the situation in which it's being used.
1938 However, the current situation in which we test 0xffffffff is
1939 less than ideal. Caveat user. */
1941 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1943 /* If the expression contains a constant term,
1944 see if it remains valid when max possible offset is added. */
1946 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1951 *y2 = plus_constant (*y2, mode_sz - 1);
1952 /* Use QImode because an odd displacement may be automatically invalid
1953 for any wider mode. But it should be valid for a single byte. */
1954 good = (*addressp) (QImode, y);
1956 /* In any case, restore old contents of memory. */
1961 if (GET_RTX_CLASS (ycode) == 'a')
1964 /* The offset added here is chosen as the maximum offset that
1965 any instruction could need to add when operating on something
1966 of the specified mode. We assume that if Y and Y+c are
1967 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1968 go inside a LO_SUM here, so we do so as well. */
1969 if (GET_CODE (y) == LO_SUM)
1970 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1971 plus_constant (XEXP (y, 1), mode_sz - 1));
1973 z = plus_constant (y, mode_sz - 1);
1975 /* Use QImode because an odd displacement may be automatically invalid
1976 for any wider mode. But it should be valid for a single byte. */
1977 return (*addressp) (QImode, z);
1980 /* Return 1 if ADDR is an address-expression whose effect depends
1981 on the mode of the memory reference it is used in.
1983 Autoincrement addressing is a typical example of mode-dependence
1984 because the amount of the increment depends on the mode. */
1987 mode_dependent_address_p (addr)
1988 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
1990 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1992 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1993 win: ATTRIBUTE_UNUSED_LABEL
1997 /* Return 1 if OP is a general operand
1998 other than a memory ref with a mode dependent address. */
2001 mode_independent_operand (op, mode)
2002 enum machine_mode mode;
2007 if (! general_operand (op, mode))
2010 if (GET_CODE (op) != MEM)
2013 addr = XEXP (op, 0);
2014 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2016 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2017 lose: ATTRIBUTE_UNUSED_LABEL
2021 /* Like extract_insn, but save insn extracted and don't extract again, when
2022 called again for the same insn expecting that recog_data still contain the
2023 valid information. This is used primary by gen_attr infrastructure that
2024 often does extract insn again and again. */
2026 extract_insn_cached (insn)
2029 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2031 extract_insn (insn);
2032 recog_data.insn = insn;
2034 /* Do cached extract_insn, constrain_operand and complain about failures.
2035 Used by insn_attrtab. */
2037 extract_constrain_insn_cached (insn)
2040 extract_insn_cached (insn);
2041 if (which_alternative == -1
2042 && !constrain_operands (reload_completed))
2043 fatal_insn_not_found (insn);
2045 /* Do cached constrain_operand and complain about failures. */
2047 constrain_operands_cached (strict)
2050 if (which_alternative == -1)
2051 return constrain_operands (strict);
2056 /* Analyze INSN and fill in recog_data. */
2065 rtx body = PATTERN (insn);
2067 recog_data.insn = NULL;
2068 recog_data.n_operands = 0;
2069 recog_data.n_alternatives = 0;
2070 recog_data.n_dups = 0;
2071 which_alternative = -1;
2073 switch (GET_CODE (body))
2083 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2088 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2089 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2090 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2096 recog_data.n_operands = noperands = asm_noperands (body);
2099 /* This insn is an `asm' with operands. */
2101 /* expand_asm_operands makes sure there aren't too many operands. */
2102 if (noperands > MAX_RECOG_OPERANDS)
2105 /* Now get the operand values and constraints out of the insn. */
2106 decode_asm_operands (body, recog_data.operand,
2107 recog_data.operand_loc,
2108 recog_data.constraints,
2109 recog_data.operand_mode);
2112 const char *p = recog_data.constraints[0];
2113 recog_data.n_alternatives = 1;
2115 recog_data.n_alternatives += (*p++ == ',');
2119 fatal_insn_not_found (insn);
2123 /* Ordinary insn: recognize it, get the operands via insn_extract
2124 and get the constraints. */
2126 icode = recog_memoized (insn);
2128 fatal_insn_not_found (insn);
2130 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2131 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2132 recog_data.n_dups = insn_data[icode].n_dups;
2134 insn_extract (insn);
2136 for (i = 0; i < noperands; i++)
2138 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2139 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2140 /* VOIDmode match_operands gets mode from their real operand. */
2141 if (recog_data.operand_mode[i] == VOIDmode)
2142 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2145 for (i = 0; i < noperands; i++)
2146 recog_data.operand_type[i]
2147 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2148 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2151 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2155 /* After calling extract_insn, you can use this function to extract some
2156 information from the constraint strings into a more usable form.
2157 The collected data is stored in recog_op_alt. */
2159 preprocess_constraints ()
2163 memset (recog_op_alt, 0, sizeof recog_op_alt);
2164 for (i = 0; i < recog_data.n_operands; i++)
2167 struct operand_alternative *op_alt;
2168 const char *p = recog_data.constraints[i];
2170 op_alt = recog_op_alt[i];
2172 for (j = 0; j < recog_data.n_alternatives; j++)
2174 op_alt[j].class = NO_REGS;
2175 op_alt[j].constraint = p;
2176 op_alt[j].matches = -1;
2177 op_alt[j].matched = -1;
2179 if (*p == '\0' || *p == ',')
2181 op_alt[j].anything_ok = 1;
2191 while (c != ',' && c != '\0');
2192 if (c == ',' || c == '\0')
2197 case '=': case '+': case '*': case '%':
2198 case 'E': case 'F': case 'G': case 'H':
2199 case 's': case 'i': case 'n':
2200 case 'I': case 'J': case 'K': case 'L':
2201 case 'M': case 'N': case 'O': case 'P':
2202 /* These don't say anything we care about. */
2206 op_alt[j].reject += 6;
2209 op_alt[j].reject += 600;
2212 op_alt[j].earlyclobber = 1;
2215 case '0': case '1': case '2': case '3': case '4':
2216 case '5': case '6': case '7': case '8': case '9':
2217 op_alt[j].matches = c - '0';
2218 recog_op_alt[op_alt[j].matches][j].matched = i;
2222 op_alt[j].memory_ok = 1;
2225 op_alt[j].decmem_ok = 1;
2228 op_alt[j].incmem_ok = 1;
2231 op_alt[j].nonoffmem_ok = 1;
2234 op_alt[j].offmem_ok = 1;
2237 op_alt[j].anything_ok = 1;
2241 op_alt[j].is_address = 1;
2242 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
2246 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2250 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
2258 /* Check the operands of an insn against the insn's operand constraints
2259 and return 1 if they are valid.
2260 The information about the insn's operands, constraints, operand modes
2261 etc. is obtained from the global variables set up by extract_insn.
2263 WHICH_ALTERNATIVE is set to a number which indicates which
2264 alternative of constraints was matched: 0 for the first alternative,
2265 1 for the next, etc.
2267 In addition, when two operands are match
2268 and it happens that the output operand is (reg) while the
2269 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2270 make the output operand look like the input.
2271 This is because the output operand is the one the template will print.
2273 This is used in final, just before printing the assembler code and by
2274 the routines that determine an insn's attribute.
2276 If STRICT is a positive non-zero value, it means that we have been
2277 called after reload has been completed. In that case, we must
2278 do all checks strictly. If it is zero, it means that we have been called
2279 before reload has completed. In that case, we first try to see if we can
2280 find an alternative that matches strictly. If not, we try again, this
2281 time assuming that reload will fix up the insn. This provides a "best
2282 guess" for the alternative and is used to compute attributes of insns prior
2283 to reload. A negative value of STRICT is used for this internal call. */
2291 constrain_operands (strict)
2294 const char *constraints[MAX_RECOG_OPERANDS];
2295 int matching_operands[MAX_RECOG_OPERANDS];
2296 int earlyclobber[MAX_RECOG_OPERANDS];
2299 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2300 int funny_match_index;
2302 which_alternative = 0;
2303 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2306 for (c = 0; c < recog_data.n_operands; c++)
2308 constraints[c] = recog_data.constraints[c];
2309 matching_operands[c] = -1;
2316 funny_match_index = 0;
2318 for (opno = 0; opno < recog_data.n_operands; opno++)
2320 register rtx op = recog_data.operand[opno];
2321 enum machine_mode mode = GET_MODE (op);
2322 register const char *p = constraints[opno];
2327 earlyclobber[opno] = 0;
2329 /* A unary operator may be accepted by the predicate, but it
2330 is irrelevant for matching constraints. */
2331 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2334 if (GET_CODE (op) == SUBREG)
2336 if (GET_CODE (SUBREG_REG (op)) == REG
2337 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2338 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2339 GET_MODE (SUBREG_REG (op)),
2342 op = SUBREG_REG (op);
2345 /* An empty constraint or empty alternative
2346 allows anything which matched the pattern. */
2347 if (*p == 0 || *p == ',')
2350 while (*p && (c = *p++) != ',')
2353 case '?': case '!': case '*': case '%':
2358 /* Ignore rest of this alternative as far as
2359 constraint checking is concerned. */
2360 while (*p && *p != ',')
2365 earlyclobber[opno] = 1;
2368 case '0': case '1': case '2': case '3': case '4':
2369 case '5': case '6': case '7': case '8': case '9':
2371 /* This operand must be the same as a previous one.
2372 This kind of constraint is used for instructions such
2373 as add when they take only two operands.
2375 Note that the lower-numbered operand is passed first.
2377 If we are not testing strictly, assume that this constraint
2378 will be satisfied. */
2383 rtx op1 = recog_data.operand[c - '0'];
2384 rtx op2 = recog_data.operand[opno];
2386 /* A unary operator may be accepted by the predicate,
2387 but it is irrelevant for matching constraints. */
2388 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2389 op1 = XEXP (op1, 0);
2390 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2391 op2 = XEXP (op2, 0);
2393 val = operands_match_p (op1, op2);
2396 matching_operands[opno] = c - '0';
2397 matching_operands[c - '0'] = opno;
2401 /* If output is *x and input is *--x,
2402 arrange later to change the output to *--x as well,
2403 since the output op is the one that will be printed. */
2404 if (val == 2 && strict > 0)
2406 funny_match[funny_match_index].this = opno;
2407 funny_match[funny_match_index++].other = c - '0';
2412 /* p is used for address_operands. When we are called by
2413 gen_reload, no one will have checked that the address is
2414 strictly valid, i.e., that all pseudos requiring hard regs
2415 have gotten them. */
2417 || (strict_memory_address_p (recog_data.operand_mode[opno],
2422 /* No need to check general_operand again;
2423 it was done in insn-recog.c. */
2425 /* Anything goes unless it is a REG and really has a hard reg
2426 but the hard reg is not in the class GENERAL_REGS. */
2428 || GENERAL_REGS == ALL_REGS
2429 || GET_CODE (op) != REG
2430 || (reload_in_progress
2431 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2432 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2437 /* This is used for a MATCH_SCRATCH in the cases when
2438 we don't actually need anything. So anything goes
2444 if (GET_CODE (op) == MEM
2445 /* Before reload, accept what reload can turn into mem. */
2446 || (strict < 0 && CONSTANT_P (op))
2447 /* During reload, accept a pseudo */
2448 || (reload_in_progress && GET_CODE (op) == REG
2449 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2454 if (GET_CODE (op) == MEM
2455 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2456 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2461 if (GET_CODE (op) == MEM
2462 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2463 || GET_CODE (XEXP (op, 0)) == POST_INC))
2468 #ifndef REAL_ARITHMETIC
2469 /* Match any CONST_DOUBLE, but only if
2470 we can examine the bits of it reliably. */
2471 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2472 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2473 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2476 if (GET_CODE (op) == CONST_DOUBLE)
2481 if (GET_CODE (op) == CONST_DOUBLE)
2487 if (GET_CODE (op) == CONST_DOUBLE
2488 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2493 if (GET_CODE (op) == CONST_INT
2494 || (GET_CODE (op) == CONST_DOUBLE
2495 && GET_MODE (op) == VOIDmode))
2498 if (CONSTANT_P (op))
2503 if (GET_CODE (op) == CONST_INT
2504 || (GET_CODE (op) == CONST_DOUBLE
2505 && GET_MODE (op) == VOIDmode))
2517 if (GET_CODE (op) == CONST_INT
2518 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2523 if (GET_CODE (op) == MEM
2524 && ((strict > 0 && ! offsettable_memref_p (op))
2526 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2527 || (reload_in_progress
2528 && !(GET_CODE (op) == REG
2529 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2534 if ((strict > 0 && offsettable_memref_p (op))
2535 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2536 /* Before reload, accept what reload can handle. */
2538 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2539 /* During reload, accept a pseudo */
2540 || (reload_in_progress && GET_CODE (op) == REG
2541 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2547 enum reg_class class;
2549 class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
2550 if (class != NO_REGS)
2554 && GET_CODE (op) == REG
2555 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2556 || (strict == 0 && GET_CODE (op) == SCRATCH)
2557 || (GET_CODE (op) == REG
2558 && reg_fits_class_p (op, class, offset, mode)))
2561 #ifdef EXTRA_CONSTRAINT
2562 else if (EXTRA_CONSTRAINT (op, c))
2569 constraints[opno] = p;
2570 /* If this operand did not win somehow,
2571 this alternative loses. */
2575 /* This alternative won; the operands are ok.
2576 Change whichever operands this alternative says to change. */
2581 /* See if any earlyclobber operand conflicts with some other
2585 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2586 /* Ignore earlyclobber operands now in memory,
2587 because we would often report failure when we have
2588 two memory operands, one of which was formerly a REG. */
2589 if (earlyclobber[eopno]
2590 && GET_CODE (recog_data.operand[eopno]) == REG)
2591 for (opno = 0; opno < recog_data.n_operands; opno++)
2592 if ((GET_CODE (recog_data.operand[opno]) == MEM
2593 || recog_data.operand_type[opno] != OP_OUT)
2595 /* Ignore things like match_operator operands. */
2596 && *recog_data.constraints[opno] != 0
2597 && ! (matching_operands[opno] == eopno
2598 && operands_match_p (recog_data.operand[opno],
2599 recog_data.operand[eopno]))
2600 && ! safe_from_earlyclobber (recog_data.operand[opno],
2601 recog_data.operand[eopno]))
2606 while (--funny_match_index >= 0)
2608 recog_data.operand[funny_match[funny_match_index].other]
2609 = recog_data.operand[funny_match[funny_match_index].this];
2616 which_alternative++;
2618 while (which_alternative < recog_data.n_alternatives);
2620 which_alternative = -1;
2621 /* If we are about to reject this, but we are not to test strictly,
2622 try a very loose test. Only return failure if it fails also. */
2624 return constrain_operands (-1);
2629 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2630 is a hard reg in class CLASS when its regno is offset by OFFSET
2631 and changed to mode MODE.
2632 If REG occupies multiple hard regs, all of them must be in CLASS. */
2635 reg_fits_class_p (operand, class, offset, mode)
2637 register enum reg_class class;
2639 enum machine_mode mode;
2641 register int regno = REGNO (operand);
2642 if (regno < FIRST_PSEUDO_REGISTER
2643 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2648 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2650 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2659 /* Split single instruction. Helper function for split_all_insns.
2660 Return last insn in the sequence if succesfull, or NULL if unsuccesfull. */
2668 /* Don't split no-op move insns. These should silently
2669 disappear later in final. Splitting such insns would
2670 break the code that handles REG_NO_CONFLICT blocks. */
2672 else if ((set = single_set (insn)) != NULL && set_noop_p (set))
2674 /* Nops get in the way while scheduling, so delete them
2675 now if register allocation has already been done. It
2676 is too risky to try to do this before register
2677 allocation, and there are unlikely to be very many
2678 nops then anyways. */
2679 if (reload_completed)
2681 PUT_CODE (insn, NOTE);
2682 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2683 NOTE_SOURCE_FILE (insn) = 0;
2688 /* Split insns here to get max fine-grain parallelism. */
2689 rtx first = PREV_INSN (insn);
2690 rtx last = try_split (PATTERN (insn), insn, 1);
2694 /* try_split returns the NOTE that INSN became. */
2695 PUT_CODE (insn, NOTE);
2696 NOTE_SOURCE_FILE (insn) = 0;
2697 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2699 /* ??? Coddle to md files that generate subregs in post-
2700 reload splitters instead of computing the proper
2702 if (reload_completed && first != last)
2704 first = NEXT_INSN (first);
2708 cleanup_subreg_operands (first);
2711 first = NEXT_INSN (first);
2719 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2722 split_all_insns (upd_life)
2729 blocks = sbitmap_alloc (n_basic_blocks);
2730 sbitmap_zero (blocks);
2733 for (i = n_basic_blocks - 1; i >= 0; --i)
2735 basic_block bb = BASIC_BLOCK (i);
2738 for (insn = bb->head; insn ; insn = next)
2742 /* Can't use `next_real_insn' because that might go across
2743 CODE_LABELS and short-out basic blocks. */
2744 next = NEXT_INSN (insn);
2745 last = split_insn (insn);
2748 SET_BIT (blocks, i);
2750 if (insn == bb->end)
2755 if (insn == bb->end)
2765 compute_bb_for_insn (get_max_uid ());
2766 for (i = 0; i < n_basic_blocks; i++)
2767 find_sub_basic_blocks (BASIC_BLOCK (i));
2770 if (changed && upd_life)
2772 count_or_remove_death_notes (blocks, 1);
2773 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2775 #ifdef ENABLE_CHECKING
2776 verify_flow_info ();
2779 sbitmap_free (blocks);
2782 /* Same as split_all_insns, but do not expect CFG to be available.
2783 Used by machine depedent reorg passes. */
2786 split_all_insns_noflow ()
2790 for (insn = get_insns (); insn; insn = next)
2792 next = NEXT_INSN (insn);
2798 #ifdef HAVE_peephole2
2799 struct peep2_insn_data
2805 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2806 static int peep2_current;
2808 /* A non-insn marker indicating the last insn of the block.
2809 The live_before regset for this element is correct, indicating
2810 global_live_at_end for the block. */
2811 #define PEEP2_EOB pc_rtx
2813 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2814 does not exist. Used by the recognizer to find the next insn to match
2815 in a multi-insn pattern. */
2821 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2825 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2826 n -= MAX_INSNS_PER_PEEP2 + 1;
2828 if (peep2_insn_data[n].insn == PEEP2_EOB)
2830 return peep2_insn_data[n].insn;
2833 /* Return true if REGNO is dead before the Nth non-note insn
2837 peep2_regno_dead_p (ofs, regno)
2841 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2844 ofs += peep2_current;
2845 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2846 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2848 if (peep2_insn_data[ofs].insn == NULL_RTX)
2851 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2854 /* Similarly for a REG. */
2857 peep2_reg_dead_p (ofs, reg)
2863 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2866 ofs += peep2_current;
2867 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2868 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2870 if (peep2_insn_data[ofs].insn == NULL_RTX)
2873 regno = REGNO (reg);
2874 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2876 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2881 /* Try to find a hard register of mode MODE, matching the register class in
2882 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2883 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2884 in which case the only condition is that the register must be available
2885 before CURRENT_INSN.
2886 Registers that already have bits set in REG_SET will not be considered.
2888 If an appropriate register is available, it will be returned and the
2889 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2893 peep2_find_free_register (from, to, class_str, mode, reg_set)
2895 const char *class_str;
2896 enum machine_mode mode;
2897 HARD_REG_SET *reg_set;
2899 static int search_ofs;
2900 enum reg_class class;
2904 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2907 from += peep2_current;
2908 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2909 from -= MAX_INSNS_PER_PEEP2 + 1;
2910 to += peep2_current;
2911 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2912 to -= MAX_INSNS_PER_PEEP2 + 1;
2914 if (peep2_insn_data[from].insn == NULL_RTX)
2916 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2920 HARD_REG_SET this_live;
2922 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2924 if (peep2_insn_data[from].insn == NULL_RTX)
2926 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2927 IOR_HARD_REG_SET (live, this_live);
2930 class = (class_str[0] == 'r' ? GENERAL_REGS
2931 : REG_CLASS_FROM_LETTER (class_str[0]));
2933 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2935 int raw_regno, regno, success, j;
2937 /* Distribute the free registers as much as possible. */
2938 raw_regno = search_ofs + i;
2939 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2940 raw_regno -= FIRST_PSEUDO_REGISTER;
2941 #ifdef REG_ALLOC_ORDER
2942 regno = reg_alloc_order[raw_regno];
2947 /* Don't allocate fixed registers. */
2948 if (fixed_regs[regno])
2950 /* Make sure the register is of the right class. */
2951 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
2953 /* And can support the mode we need. */
2954 if (! HARD_REGNO_MODE_OK (regno, mode))
2956 /* And that we don't create an extra save/restore. */
2957 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2959 /* And we don't clobber traceback for noreturn functions. */
2960 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2961 && (! reload_completed || frame_pointer_needed))
2965 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
2967 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2968 || TEST_HARD_REG_BIT (live, regno + j))
2976 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
2977 SET_HARD_REG_BIT (*reg_set, regno + j);
2979 /* Start the next search with the next register. */
2980 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
2982 search_ofs = raw_regno;
2984 return gen_rtx_REG (mode, regno);
2992 /* Perform the peephole2 optimization pass. */
2995 peephole2_optimize (dump_file)
2996 FILE *dump_file ATTRIBUTE_UNUSED;
2998 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3002 #ifdef HAVE_conditional_execution
3007 /* Initialize the regsets we're going to use. */
3008 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3009 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3010 live = INITIALIZE_REG_SET (rs_heads[i]);
3012 #ifdef HAVE_conditional_execution
3013 blocks = sbitmap_alloc (n_basic_blocks);
3014 sbitmap_zero (blocks);
3017 count_or_remove_death_notes (NULL, 1);
3020 for (b = n_basic_blocks - 1; b >= 0; --b)
3022 basic_block bb = BASIC_BLOCK (b);
3023 struct propagate_block_info *pbi;
3025 /* Indicate that all slots except the last holds invalid data. */
3026 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3027 peep2_insn_data[i].insn = NULL_RTX;
3029 /* Indicate that the last slot contains live_after data. */
3030 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3031 peep2_current = MAX_INSNS_PER_PEEP2;
3033 /* Start up propagation. */
3034 COPY_REG_SET (live, bb->global_live_at_end);
3035 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3037 #ifdef HAVE_conditional_execution
3038 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3040 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3043 for (insn = bb->end; ; insn = prev)
3045 prev = PREV_INSN (insn);
3051 /* Record this insn. */
3052 if (--peep2_current < 0)
3053 peep2_current = MAX_INSNS_PER_PEEP2;
3054 peep2_insn_data[peep2_current].insn = insn;
3055 propagate_one_insn (pbi, insn);
3056 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3058 /* Match the peephole. */
3059 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3062 i = match_len + peep2_current;
3063 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3064 i -= MAX_INSNS_PER_PEEP2 + 1;
3066 /* Replace the old sequence with the new. */
3067 flow_delete_insn_chain (insn, peep2_insn_data[i].insn);
3068 try = emit_insn_after (try, prev);
3070 /* Adjust the basic block boundaries. */
3071 if (peep2_insn_data[i].insn == bb->end)
3073 if (insn == bb->head)
3074 bb->head = NEXT_INSN (prev);
3076 #ifdef HAVE_conditional_execution
3077 /* With conditional execution, we cannot back up the
3078 live information so easily, since the conditional
3079 death data structures are not so self-contained.
3080 So record that we've made a modification to this
3081 block and update life information at the end. */
3082 SET_BIT (blocks, b);
3085 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3086 peep2_insn_data[i].insn = NULL_RTX;
3087 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3089 /* Back up lifetime information past the end of the
3090 newly created sequence. */
3091 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3093 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3095 /* Update life information for the new sequence. */
3101 i = MAX_INSNS_PER_PEEP2;
3102 peep2_insn_data[i].insn = try;
3103 propagate_one_insn (pbi, try);
3104 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3106 try = PREV_INSN (try);
3108 while (try != prev);
3110 /* ??? Should verify that LIVE now matches what we
3111 had before the new sequence. */
3118 if (insn == bb->head)
3122 free_propagate_block_info (pbi);
3125 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3126 FREE_REG_SET (peep2_insn_data[i].live_before);
3127 FREE_REG_SET (live);
3129 #ifdef HAVE_conditional_execution
3130 count_or_remove_death_notes (blocks, 1);
3131 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3132 sbitmap_free (blocks);
3135 #endif /* HAVE_peephole2 */