1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "hard-reg-set.h"
37 #include "basic-block.h"
41 #ifndef STACK_PUSH_CODE
42 #ifdef STACK_GROWS_DOWNWARD
43 #define STACK_PUSH_CODE PRE_DEC
45 #define STACK_PUSH_CODE PRE_INC
49 #ifndef STACK_POP_CODE
50 #ifdef STACK_GROWS_DOWNWARD
51 #define STACK_POP_CODE POST_INC
53 #define STACK_POP_CODE POST_DEC
57 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
58 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
59 static void validate_replace_src_1 PARAMS ((rtx *, void *));
60 static rtx split_insn PARAMS ((rtx));
62 /* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.c and expmed.c (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in regclass.c and final.c and reload.c.
68 init_recog and init_recog_no_volatile are responsible for setting this. */
72 struct recog_data recog_data;
74 /* Contains a vector of operand_alternative structures for every operand.
75 Set up by preprocess_constraints. */
76 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
78 /* On return from `constrain_operands', indicate which alternative
81 int which_alternative;
83 /* Nonzero after end of reload pass.
84 Set to 1 or 0 by toplev.c.
85 Controls the significance of (SUBREG (MEM)). */
89 /* Initialize data used by the function `recog'.
90 This must be called once in the compilation of a function
91 before any insn recognition may be done in the function. */
94 init_recog_no_volatile ()
105 /* Try recognizing the instruction INSN,
106 and return the code number that results.
107 Remember the code so that repeated calls do not
108 need to spend the time for actual rerecognition.
110 This function is the normal interface to instruction recognition.
111 The automatically-generated function `recog' is normally called
112 through this one. (The only exception is in combine.c.) */
115 recog_memoized_1 (insn)
118 if (INSN_CODE (insn) < 0)
119 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
120 return INSN_CODE (insn);
123 /* Check that X is an insn-body for an `asm' with operands
124 and that the operands mentioned in it are legitimate. */
127 check_asm_operands (x)
132 const char **constraints;
135 /* Post-reload, be more strict with things. */
136 if (reload_completed)
138 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
139 extract_insn (make_insn_raw (x));
140 constrain_operands (1);
141 return which_alternative >= 0;
144 noperands = asm_noperands (x);
150 operands = (rtx *) alloca (noperands * sizeof (rtx));
151 constraints = (const char **) alloca (noperands * sizeof (char *));
153 decode_asm_operands (x, operands, NULL, constraints, NULL);
155 for (i = 0; i < noperands; i++)
157 const char *c = constraints[i];
160 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
161 c = constraints[c[0] - '0'];
163 if (! asm_operand_ok (operands[i], c))
170 /* Static data for the next two routines. */
172 typedef struct change_t
180 static change_t *changes;
181 static int changes_allocated;
183 static int num_changes = 0;
185 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
186 at which NEW will be placed. If OBJECT is zero, no validation is done,
187 the change is simply made.
189 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
190 will be called with the address and mode as parameters. If OBJECT is
191 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
194 IN_GROUP is non-zero if this is part of a group of changes that must be
195 performed as a group. In that case, the changes will be stored. The
196 function `apply_change_group' will validate and apply the changes.
198 If IN_GROUP is zero, this is a single change. Try to recognize the insn
199 or validate the memory reference with the change applied. If the result
200 is not valid for the machine, suppress the change and return zero.
201 Otherwise, perform the change and return 1. */
204 validate_change (object, loc, new, in_group)
212 if (old == new || rtx_equal_p (old, new))
215 if (in_group == 0 && num_changes != 0)
220 /* Save the information describing this change. */
221 if (num_changes >= changes_allocated)
223 if (changes_allocated == 0)
224 /* This value allows for repeated substitutions inside complex
225 indexed addresses, or changes in up to 5 insns. */
226 changes_allocated = MAX_RECOG_OPERANDS * 5;
228 changes_allocated *= 2;
231 (change_t*) xrealloc (changes,
232 sizeof (change_t) * changes_allocated);
235 changes[num_changes].object = object;
236 changes[num_changes].loc = loc;
237 changes[num_changes].old = old;
239 if (object && GET_CODE (object) != MEM)
241 /* Set INSN_CODE to force rerecognition of insn. Save old code in
243 changes[num_changes].old_code = INSN_CODE (object);
244 INSN_CODE (object) = -1;
249 /* If we are making a group of changes, return 1. Otherwise, validate the
250 change group we made. */
255 return apply_change_group ();
258 /* This subroutine of apply_change_group verifies whether the changes to INSN
259 were valid; i.e. whether INSN can still be recognized. */
262 insn_invalid_p (insn)
265 rtx pat = PATTERN (insn);
266 int num_clobbers = 0;
267 /* If we are before reload and the pattern is a SET, see if we can add
269 int icode = recog (pat, insn,
270 (GET_CODE (pat) == SET
271 && ! reload_completed && ! reload_in_progress)
272 ? &num_clobbers : 0);
273 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
276 /* If this is an asm and the operand aren't legal, then fail. Likewise if
277 this is not an asm and the insn wasn't recognized. */
278 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
279 || (!is_asm && icode < 0))
282 /* If we have to add CLOBBERs, fail if we have to add ones that reference
283 hard registers since our callers can't know if they are live or not.
284 Otherwise, add them. */
285 if (num_clobbers > 0)
289 if (added_clobbers_hard_reg_p (icode))
292 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
293 XVECEXP (newpat, 0, 0) = pat;
294 add_clobbers (newpat, icode);
295 PATTERN (insn) = pat = newpat;
298 /* After reload, verify that all constraints are satisfied. */
299 if (reload_completed)
303 if (! constrain_operands (1))
307 INSN_CODE (insn) = icode;
311 /* Apply a group of changes previously issued with `validate_change'.
312 Return 1 if all changes are valid, zero otherwise. */
315 apply_change_group ()
318 rtx last_validated = NULL_RTX;
320 /* The changes have been applied and all INSN_CODEs have been reset to force
323 The changes are valid if we aren't given an object, or if we are
324 given a MEM and it still is a valid address, or if this is in insn
325 and it is recognized. In the latter case, if reload has completed,
326 we also require that the operands meet the constraints for
329 for (i = 0; i < num_changes; i++)
331 rtx object = changes[i].object;
333 /* if there is no object to test or if it is the same as the one we
334 already tested, ignore it. */
335 if (object == 0 || object == last_validated)
338 if (GET_CODE (object) == MEM)
340 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
343 else if (insn_invalid_p (object))
345 rtx pat = PATTERN (object);
347 /* Perhaps we couldn't recognize the insn because there were
348 extra CLOBBERs at the end. If so, try to re-recognize
349 without the last CLOBBER (later iterations will cause each of
350 them to be eliminated, in turn). But don't do this if we
351 have an ASM_OPERAND. */
352 if (GET_CODE (pat) == PARALLEL
353 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
354 && asm_noperands (PATTERN (object)) < 0)
358 if (XVECLEN (pat, 0) == 2)
359 newpat = XVECEXP (pat, 0, 0);
365 = gen_rtx_PARALLEL (VOIDmode,
366 rtvec_alloc (XVECLEN (pat, 0) - 1));
367 for (j = 0; j < XVECLEN (newpat, 0); j++)
368 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
371 /* Add a new change to this group to replace the pattern
372 with this new pattern. Then consider this change
373 as having succeeded. The change we added will
374 cause the entire call to fail if things remain invalid.
376 Note that this can lose if a later change than the one
377 we are processing specified &XVECEXP (PATTERN (object), 0, X)
378 but this shouldn't occur. */
380 validate_change (object, &PATTERN (object), newpat, 1);
383 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
384 /* If this insn is a CLOBBER or USE, it is always valid, but is
390 last_validated = object;
393 if (i == num_changes)
397 for (i = 0; i < num_changes; i++)
398 if (changes[i].object
399 && INSN_P (changes[i].object)
400 && basic_block_for_insn
401 && ((unsigned int)INSN_UID (changes[i].object)
402 < basic_block_for_insn->num_elements)
403 && (bb = BLOCK_FOR_INSN (changes[i].object)))
404 bb->flags |= BB_DIRTY;
416 /* Return the number of changes so far in the current group. */
419 num_validated_changes ()
424 /* Retract the changes numbered NUM and up. */
432 /* Back out all the changes. Do this in the opposite order in which
434 for (i = num_changes - 1; i >= num; i--)
436 *changes[i].loc = changes[i].old;
437 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
438 INSN_CODE (changes[i].object) = changes[i].old_code;
443 /* Replace every occurrence of FROM in X with TO. Mark each change with
444 validate_change passing OBJECT. */
447 validate_replace_rtx_1 (loc, from, to, object)
449 rtx from, to, object;
455 enum machine_mode op0_mode = VOIDmode;
456 int prev_changes = num_changes;
463 fmt = GET_RTX_FORMAT (code);
465 op0_mode = GET_MODE (XEXP (x, 0));
467 /* X matches FROM if it is the same rtx or they are both referring to the
468 same register in the same mode. Avoid calling rtx_equal_p unless the
469 operands look similar. */
472 || (GET_CODE (x) == REG && GET_CODE (from) == REG
473 && GET_MODE (x) == GET_MODE (from)
474 && REGNO (x) == REGNO (from))
475 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
476 && rtx_equal_p (x, from)))
478 validate_change (object, loc, to, 1);
482 /* Call ourself recursively to perform the replacements. */
484 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
487 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
488 else if (fmt[i] == 'E')
489 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
490 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
493 /* If we didn't substitute, there is nothing more to do. */
494 if (num_changes == prev_changes)
497 /* Allow substituted expression to have different mode. This is used by
498 regmove to change mode of pseudo register. */
499 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
500 op0_mode = GET_MODE (XEXP (x, 0));
502 /* Do changes needed to keep rtx consistent. Don't do any other
503 simplifications, as it is not our job. */
505 if ((GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
506 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
508 validate_change (object, loc,
509 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
510 : swap_condition (code),
511 GET_MODE (x), XEXP (x, 1),
520 /* If we have a PLUS whose second operand is now a CONST_INT, use
521 plus_constant to try to simplify it.
522 ??? We may want later to remove this, once simplification is
523 separated from this function. */
524 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
525 validate_change (object, loc,
527 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
530 if (GET_CODE (XEXP (x, 1)) == CONST_INT
531 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
532 validate_change (object, loc,
534 (PLUS, GET_MODE (x), XEXP (x, 0),
535 simplify_gen_unary (NEG,
536 GET_MODE (x), XEXP (x, 1),
541 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
543 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
545 /* If any of the above failed, substitute in something that
546 we know won't be recognized. */
548 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
549 validate_change (object, loc, new, 1);
553 /* All subregs possible to simplify should be simplified. */
554 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
557 /* Subregs of VOIDmode operands are incorrect. */
558 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
559 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
561 validate_change (object, loc, new, 1);
565 /* If we are replacing a register with memory, try to change the memory
566 to be the mode required for memory in extract operations (this isn't
567 likely to be an insertion operation; if it was, nothing bad will
568 happen, we might just fail in some cases). */
570 if (GET_CODE (XEXP (x, 0)) == MEM
571 && GET_CODE (XEXP (x, 1)) == CONST_INT
572 && GET_CODE (XEXP (x, 2)) == CONST_INT
573 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
574 && !MEM_VOLATILE_P (XEXP (x, 0)))
576 enum machine_mode wanted_mode = VOIDmode;
577 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
578 int pos = INTVAL (XEXP (x, 2));
580 if (GET_CODE (x) == ZERO_EXTRACT)
582 enum machine_mode new_mode
583 = mode_for_extraction (EP_extzv, 1);
584 if (new_mode != MAX_MACHINE_MODE)
585 wanted_mode = new_mode;
587 else if (GET_CODE (x) == SIGN_EXTRACT)
589 enum machine_mode new_mode
590 = mode_for_extraction (EP_extv, 1);
591 if (new_mode != MAX_MACHINE_MODE)
592 wanted_mode = new_mode;
595 /* If we have a narrower mode, we can do something. */
596 if (wanted_mode != VOIDmode
597 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
599 int offset = pos / BITS_PER_UNIT;
602 /* If the bytes and bits are counted differently, we
603 must adjust the offset. */
604 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
606 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
609 pos %= GET_MODE_BITSIZE (wanted_mode);
611 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
613 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
614 validate_change (object, &XEXP (x, 0), newmem, 1);
625 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
626 with TO. After all changes have been made, validate by seeing
627 if INSN is still valid. */
630 validate_replace_rtx_subexp (from, to, insn, loc)
631 rtx from, to, insn, *loc;
633 validate_replace_rtx_1 (loc, from, to, insn);
634 return apply_change_group ();
637 /* Try replacing every occurrence of FROM in INSN with TO. After all
638 changes have been made, validate by seeing if INSN is still valid. */
641 validate_replace_rtx (from, to, insn)
644 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
645 return apply_change_group ();
648 /* Try replacing every occurrence of FROM in INSN with TO. */
651 validate_replace_rtx_group (from, to, insn)
654 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
657 /* Function called by note_uses to replace used subexpressions. */
658 struct validate_replace_src_data
660 rtx from; /* Old RTX */
661 rtx to; /* New RTX */
662 rtx insn; /* Insn in which substitution is occurring. */
666 validate_replace_src_1 (x, data)
670 struct validate_replace_src_data *d
671 = (struct validate_replace_src_data *) data;
673 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
676 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
677 SET_DESTs. After all changes have been made, validate by seeing if
678 INSN is still valid. */
681 validate_replace_src (from, to, insn)
684 struct validate_replace_src_data d;
689 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
690 return apply_change_group ();
694 /* Return 1 if the insn using CC0 set by INSN does not contain
695 any ordered tests applied to the condition codes.
696 EQ and NE tests do not count. */
699 next_insn_tests_no_inequality (insn)
702 rtx next = next_cc0_user (insn);
704 /* If there is no next insn, we have to take the conservative choice. */
708 return ((GET_CODE (next) == JUMP_INSN
709 || GET_CODE (next) == INSN
710 || GET_CODE (next) == CALL_INSN)
711 && ! inequality_comparisons_p (PATTERN (next)));
714 #if 0 /* This is useless since the insn that sets the cc's
715 must be followed immediately by the use of them. */
716 /* Return 1 if the CC value set up by INSN is not used. */
719 next_insns_test_no_inequality (insn)
722 rtx next = NEXT_INSN (insn);
724 for (; next != 0; next = NEXT_INSN (next))
726 if (GET_CODE (next) == CODE_LABEL
727 || GET_CODE (next) == BARRIER)
729 if (GET_CODE (next) == NOTE)
731 if (inequality_comparisons_p (PATTERN (next)))
733 if (sets_cc0_p (PATTERN (next)) == 1)
735 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
743 /* This is used by find_single_use to locate an rtx that contains exactly one
744 use of DEST, which is typically either a REG or CC0. It returns a
745 pointer to the innermost rtx expression containing DEST. Appearances of
746 DEST that are being used to totally replace it are not counted. */
749 find_single_use_1 (dest, loc)
754 enum rtx_code code = GET_CODE (x);
772 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
773 of a REG that occupies all of the REG, the insn uses DEST if
774 it is mentioned in the destination or the source. Otherwise, we
775 need just check the source. */
776 if (GET_CODE (SET_DEST (x)) != CC0
777 && GET_CODE (SET_DEST (x)) != PC
778 && GET_CODE (SET_DEST (x)) != REG
779 && ! (GET_CODE (SET_DEST (x)) == SUBREG
780 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
781 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
782 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
783 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
784 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
787 return find_single_use_1 (dest, &SET_SRC (x));
791 return find_single_use_1 (dest, &XEXP (x, 0));
797 /* If it wasn't one of the common cases above, check each expression and
798 vector of this code. Look for a unique usage of DEST. */
800 fmt = GET_RTX_FORMAT (code);
801 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
805 if (dest == XEXP (x, i)
806 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
807 && REGNO (dest) == REGNO (XEXP (x, i))))
810 this_result = find_single_use_1 (dest, &XEXP (x, i));
813 result = this_result;
814 else if (this_result)
815 /* Duplicate usage. */
818 else if (fmt[i] == 'E')
822 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
824 if (XVECEXP (x, i, j) == dest
825 || (GET_CODE (dest) == REG
826 && GET_CODE (XVECEXP (x, i, j)) == REG
827 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
830 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
833 result = this_result;
834 else if (this_result)
843 /* See if DEST, produced in INSN, is used only a single time in the
844 sequel. If so, return a pointer to the innermost rtx expression in which
847 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
849 This routine will return usually zero either before flow is called (because
850 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
851 note can't be trusted).
853 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
854 care about REG_DEAD notes or LOG_LINKS.
856 Otherwise, we find the single use by finding an insn that has a
857 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
858 only referenced once in that insn, we know that it must be the first
859 and last insn referencing DEST. */
862 find_single_use (dest, insn, ploc)
874 next = NEXT_INSN (insn);
876 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
879 result = find_single_use_1 (dest, &PATTERN (next));
886 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
889 for (next = next_nonnote_insn (insn);
890 next != 0 && GET_CODE (next) != CODE_LABEL;
891 next = next_nonnote_insn (next))
892 if (INSN_P (next) && dead_or_set_p (next, dest))
894 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
895 if (XEXP (link, 0) == insn)
900 result = find_single_use_1 (dest, &PATTERN (next));
910 /* Return 1 if OP is a valid general operand for machine mode MODE.
911 This is either a register reference, a memory reference,
912 or a constant. In the case of a memory reference, the address
913 is checked for general validity for the target machine.
915 Register and memory references must have mode MODE in order to be valid,
916 but some constants have no machine mode and are valid for any mode.
918 If MODE is VOIDmode, OP is checked for validity for whatever mode
921 The main use of this function is as a predicate in match_operand
922 expressions in the machine description.
924 For an explanation of this function's behavior for registers of
925 class NO_REGS, see the comment for `register_operand'. */
928 general_operand (op, mode)
930 enum machine_mode mode;
932 enum rtx_code code = GET_CODE (op);
934 if (mode == VOIDmode)
935 mode = GET_MODE (op);
937 /* Don't accept CONST_INT or anything similar
938 if the caller wants something floating. */
939 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
940 && GET_MODE_CLASS (mode) != MODE_INT
941 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
944 if (GET_CODE (op) == CONST_INT
945 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
949 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
951 #ifdef LEGITIMATE_PIC_OPERAND_P
952 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
954 && LEGITIMATE_CONSTANT_P (op));
956 /* Except for certain constants with VOIDmode, already checked for,
957 OP's mode must match MODE if MODE specifies a mode. */
959 if (GET_MODE (op) != mode)
964 #ifdef INSN_SCHEDULING
965 /* On machines that have insn scheduling, we want all memory
966 reference to be explicit, so outlaw paradoxical SUBREGs. */
967 if (GET_CODE (SUBREG_REG (op)) == MEM
968 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
971 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
972 may result in incorrect reference. We should simplify all valid
973 subregs of MEM anyway. But allow this after reload because we
974 might be called from cleanup_subreg_operands.
976 ??? This is a kludge. */
977 if (!reload_completed && SUBREG_BYTE (op) != 0
978 && GET_CODE (SUBREG_REG (op)) == MEM)
981 op = SUBREG_REG (op);
982 code = GET_CODE (op);
986 /* A register whose class is NO_REGS is not a general operand. */
987 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
988 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
992 rtx y = XEXP (op, 0);
994 if (! volatile_ok && MEM_VOLATILE_P (op))
997 if (GET_CODE (y) == ADDRESSOF)
1000 /* Use the mem's mode, since it will be reloaded thus. */
1001 mode = GET_MODE (op);
1002 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
1005 /* Pretend this is an operand for now; we'll run force_operand
1006 on its replacement in fixup_var_refs_1. */
1007 if (code == ADDRESSOF)
1016 /* Return 1 if OP is a valid memory address for a memory reference
1019 The main use of this function is as a predicate in match_operand
1020 expressions in the machine description. */
1023 address_operand (op, mode)
1025 enum machine_mode mode;
1027 return memory_address_p (mode, op);
1030 /* Return 1 if OP is a register reference of mode MODE.
1031 If MODE is VOIDmode, accept a register in any mode.
1033 The main use of this function is as a predicate in match_operand
1034 expressions in the machine description.
1036 As a special exception, registers whose class is NO_REGS are
1037 not accepted by `register_operand'. The reason for this change
1038 is to allow the representation of special architecture artifacts
1039 (such as a condition code register) without extending the rtl
1040 definitions. Since registers of class NO_REGS cannot be used
1041 as registers in any case where register classes are examined,
1042 it is most consistent to keep this function from accepting them. */
1045 register_operand (op, mode)
1047 enum machine_mode mode;
1049 if (GET_MODE (op) != mode && mode != VOIDmode)
1052 if (GET_CODE (op) == SUBREG)
1054 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1055 because it is guaranteed to be reloaded into one.
1056 Just make sure the MEM is valid in itself.
1057 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1058 but currently it does result from (SUBREG (REG)...) where the
1059 reg went on the stack.) */
1060 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1061 return general_operand (op, mode);
1063 #ifdef CLASS_CANNOT_CHANGE_MODE
1064 if (GET_CODE (SUBREG_REG (op)) == REG
1065 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1066 && (TEST_HARD_REG_BIT
1067 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
1068 REGNO (SUBREG_REG (op))))
1069 && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (SUBREG_REG (op)))
1070 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1071 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1075 op = SUBREG_REG (op);
1078 /* If we have an ADDRESSOF, consider it valid since it will be
1079 converted into something that will not be a MEM. */
1080 if (GET_CODE (op) == ADDRESSOF)
1083 /* We don't consider registers whose class is NO_REGS
1084 to be a register operand. */
1085 return (GET_CODE (op) == REG
1086 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1087 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1090 /* Return 1 for a register in Pmode; ignore the tested mode. */
1093 pmode_register_operand (op, mode)
1095 enum machine_mode mode ATTRIBUTE_UNUSED;
1097 return register_operand (op, Pmode);
1100 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1101 or a hard register. */
1104 scratch_operand (op, mode)
1106 enum machine_mode mode;
1108 if (GET_MODE (op) != mode && mode != VOIDmode)
1111 return (GET_CODE (op) == SCRATCH
1112 || (GET_CODE (op) == REG
1113 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1116 /* Return 1 if OP is a valid immediate operand for mode MODE.
1118 The main use of this function is as a predicate in match_operand
1119 expressions in the machine description. */
1122 immediate_operand (op, mode)
1124 enum machine_mode mode;
1126 /* Don't accept CONST_INT or anything similar
1127 if the caller wants something floating. */
1128 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1129 && GET_MODE_CLASS (mode) != MODE_INT
1130 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1133 if (GET_CODE (op) == CONST_INT
1134 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1137 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1138 result in 0/1. It seems a safe assumption that this is
1139 in range for everyone. */
1140 if (GET_CODE (op) == CONSTANT_P_RTX)
1143 return (CONSTANT_P (op)
1144 && (GET_MODE (op) == mode || mode == VOIDmode
1145 || GET_MODE (op) == VOIDmode)
1146 #ifdef LEGITIMATE_PIC_OPERAND_P
1147 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1149 && LEGITIMATE_CONSTANT_P (op));
1152 /* Returns 1 if OP is an operand that is a CONST_INT. */
1155 const_int_operand (op, mode)
1157 enum machine_mode mode;
1159 if (GET_CODE (op) != CONST_INT)
1162 if (mode != VOIDmode
1163 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1169 /* Returns 1 if OP is an operand that is a constant integer or constant
1170 floating-point number. */
1173 const_double_operand (op, mode)
1175 enum machine_mode mode;
1177 /* Don't accept CONST_INT or anything similar
1178 if the caller wants something floating. */
1179 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1180 && GET_MODE_CLASS (mode) != MODE_INT
1181 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1184 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1185 && (mode == VOIDmode || GET_MODE (op) == mode
1186 || GET_MODE (op) == VOIDmode));
1189 /* Return 1 if OP is a general operand that is not an immediate operand. */
1192 nonimmediate_operand (op, mode)
1194 enum machine_mode mode;
1196 return (general_operand (op, mode) && ! CONSTANT_P (op));
1199 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1202 nonmemory_operand (op, mode)
1204 enum machine_mode mode;
1206 if (CONSTANT_P (op))
1208 /* Don't accept CONST_INT or anything similar
1209 if the caller wants something floating. */
1210 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1211 && GET_MODE_CLASS (mode) != MODE_INT
1212 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1215 if (GET_CODE (op) == CONST_INT
1216 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1219 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1220 || mode == VOIDmode)
1221 #ifdef LEGITIMATE_PIC_OPERAND_P
1222 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1224 && LEGITIMATE_CONSTANT_P (op));
1227 if (GET_MODE (op) != mode && mode != VOIDmode)
1230 if (GET_CODE (op) == SUBREG)
1232 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1233 because it is guaranteed to be reloaded into one.
1234 Just make sure the MEM is valid in itself.
1235 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1236 but currently it does result from (SUBREG (REG)...) where the
1237 reg went on the stack.) */
1238 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1239 return general_operand (op, mode);
1240 op = SUBREG_REG (op);
1243 /* We don't consider registers whose class is NO_REGS
1244 to be a register operand. */
1245 return (GET_CODE (op) == REG
1246 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1247 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1250 /* Return 1 if OP is a valid operand that stands for pushing a
1251 value of mode MODE onto the stack.
1253 The main use of this function is as a predicate in match_operand
1254 expressions in the machine description. */
1257 push_operand (op, mode)
1259 enum machine_mode mode;
1261 unsigned int rounded_size = GET_MODE_SIZE (mode);
1263 #ifdef PUSH_ROUNDING
1264 rounded_size = PUSH_ROUNDING (rounded_size);
1267 if (GET_CODE (op) != MEM)
1270 if (mode != VOIDmode && GET_MODE (op) != mode)
1275 if (rounded_size == GET_MODE_SIZE (mode))
1277 if (GET_CODE (op) != STACK_PUSH_CODE)
1282 if (GET_CODE (op) != PRE_MODIFY
1283 || GET_CODE (XEXP (op, 1)) != PLUS
1284 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1285 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1286 #ifdef STACK_GROWS_DOWNWARD
1287 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1289 || INTVAL (XEXP (XEXP (op, 1), 1)) != rounded_size
1295 return XEXP (op, 0) == stack_pointer_rtx;
1298 /* Return 1 if OP is a valid operand that stands for popping a
1299 value of mode MODE off the stack.
1301 The main use of this function is as a predicate in match_operand
1302 expressions in the machine description. */
1305 pop_operand (op, mode)
1307 enum machine_mode mode;
1309 if (GET_CODE (op) != MEM)
1312 if (mode != VOIDmode && GET_MODE (op) != mode)
1317 if (GET_CODE (op) != STACK_POP_CODE)
1320 return XEXP (op, 0) == stack_pointer_rtx;
1323 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1326 memory_address_p (mode, addr)
1327 enum machine_mode mode ATTRIBUTE_UNUSED;
1330 if (GET_CODE (addr) == ADDRESSOF)
1333 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1340 /* Return 1 if OP is a valid memory reference with mode MODE,
1341 including a valid address.
1343 The main use of this function is as a predicate in match_operand
1344 expressions in the machine description. */
1347 memory_operand (op, mode)
1349 enum machine_mode mode;
1353 if (! reload_completed)
1354 /* Note that no SUBREG is a memory operand before end of reload pass,
1355 because (SUBREG (MEM...)) forces reloading into a register. */
1356 return GET_CODE (op) == MEM && general_operand (op, mode);
1358 if (mode != VOIDmode && GET_MODE (op) != mode)
1362 if (GET_CODE (inner) == SUBREG)
1363 inner = SUBREG_REG (inner);
1365 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1368 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1369 that is, a memory reference whose address is a general_operand. */
1372 indirect_operand (op, mode)
1374 enum machine_mode mode;
1376 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1377 if (! reload_completed
1378 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1380 int offset = SUBREG_BYTE (op);
1381 rtx inner = SUBREG_REG (op);
1383 if (mode != VOIDmode && GET_MODE (op) != mode)
1386 /* The only way that we can have a general_operand as the resulting
1387 address is if OFFSET is zero and the address already is an operand
1388 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1391 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1392 || (GET_CODE (XEXP (inner, 0)) == PLUS
1393 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1394 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1395 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1398 return (GET_CODE (op) == MEM
1399 && memory_operand (op, mode)
1400 && general_operand (XEXP (op, 0), Pmode));
1403 /* Return 1 if this is a comparison operator. This allows the use of
1404 MATCH_OPERATOR to recognize all the branch insns. */
1407 comparison_operator (op, mode)
1409 enum machine_mode mode;
1411 return ((mode == VOIDmode || GET_MODE (op) == mode)
1412 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1415 /* If BODY is an insn body that uses ASM_OPERANDS,
1416 return the number of operands (both input and output) in the insn.
1417 Otherwise return -1. */
1420 asm_noperands (body)
1423 switch (GET_CODE (body))
1426 /* No output operands: return number of input operands. */
1427 return ASM_OPERANDS_INPUT_LENGTH (body);
1429 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1430 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1431 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1435 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1436 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1438 /* Multiple output operands, or 1 output plus some clobbers:
1439 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1443 /* Count backwards through CLOBBERs to determine number of SETs. */
1444 for (i = XVECLEN (body, 0); i > 0; i--)
1446 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1448 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1452 /* N_SETS is now number of output operands. */
1455 /* Verify that all the SETs we have
1456 came from a single original asm_operands insn
1457 (so that invalid combinations are blocked). */
1458 for (i = 0; i < n_sets; i++)
1460 rtx elt = XVECEXP (body, 0, i);
1461 if (GET_CODE (elt) != SET)
1463 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1465 /* If these ASM_OPERANDS rtx's came from different original insns
1466 then they aren't allowed together. */
1467 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1468 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1471 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1474 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1476 /* 0 outputs, but some clobbers:
1477 body is [(asm_operands ...) (clobber (reg ...))...]. */
1480 /* Make sure all the other parallel things really are clobbers. */
1481 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1482 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1485 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1494 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1495 copy its operands (both input and output) into the vector OPERANDS,
1496 the locations of the operands within the insn into the vector OPERAND_LOCS,
1497 and the constraints for the operands into CONSTRAINTS.
1498 Write the modes of the operands into MODES.
1499 Return the assembler-template.
1501 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1502 we don't store that info. */
1505 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1509 const char **constraints;
1510 enum machine_mode *modes;
1514 const char *template = 0;
1516 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1518 rtx asmop = SET_SRC (body);
1519 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1521 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1523 for (i = 1; i < noperands; i++)
1526 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1528 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1530 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1532 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1535 /* The output is in the SET.
1536 Its constraint is in the ASM_OPERANDS itself. */
1538 operands[0] = SET_DEST (body);
1540 operand_locs[0] = &SET_DEST (body);
1542 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1544 modes[0] = GET_MODE (SET_DEST (body));
1545 template = ASM_OPERANDS_TEMPLATE (asmop);
1547 else if (GET_CODE (body) == ASM_OPERANDS)
1550 /* No output operands: BODY is (asm_operands ....). */
1552 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1554 /* The input operands are found in the 1st element vector. */
1555 /* Constraints for inputs are in the 2nd element vector. */
1556 for (i = 0; i < noperands; i++)
1559 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1561 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1563 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1565 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1567 template = ASM_OPERANDS_TEMPLATE (asmop);
1569 else if (GET_CODE (body) == PARALLEL
1570 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1571 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1573 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1574 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1575 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1576 int nout = 0; /* Does not include CLOBBERs. */
1578 /* At least one output, plus some CLOBBERs. */
1580 /* The outputs are in the SETs.
1581 Their constraints are in the ASM_OPERANDS itself. */
1582 for (i = 0; i < nparallel; i++)
1584 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1585 break; /* Past last SET */
1588 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1590 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1592 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1594 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1598 for (i = 0; i < nin; i++)
1601 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1603 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1605 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1607 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1610 template = ASM_OPERANDS_TEMPLATE (asmop);
1612 else if (GET_CODE (body) == PARALLEL
1613 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1615 /* No outputs, but some CLOBBERs. */
1617 rtx asmop = XVECEXP (body, 0, 0);
1618 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1620 for (i = 0; i < nin; i++)
1623 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1625 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1627 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1629 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1632 template = ASM_OPERANDS_TEMPLATE (asmop);
1638 /* Check if an asm_operand matches it's constraints.
1639 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1642 asm_operand_ok (op, constraint)
1644 const char *constraint;
1648 /* Use constrain_operands after reload. */
1649 if (reload_completed)
1654 char c = *constraint++;
1668 case '0': case '1': case '2': case '3': case '4':
1669 case '5': case '6': case '7': case '8': case '9':
1670 /* For best results, our caller should have given us the
1671 proper matching constraint, but we can't actually fail
1672 the check if they didn't. Indicate that results are
1674 while (ISDIGIT (*constraint))
1680 if (address_operand (op, VOIDmode))
1685 case 'V': /* non-offsettable */
1686 if (memory_operand (op, VOIDmode))
1690 case 'o': /* offsettable */
1691 if (offsettable_nonstrict_memref_p (op))
1696 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1697 excepting those that expand_call created. Further, on some
1698 machines which do not have generalized auto inc/dec, an inc/dec
1699 is not a memory_operand.
1701 Match any memory and hope things are resolved after reload. */
1703 if (GET_CODE (op) == MEM
1705 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1706 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1711 if (GET_CODE (op) == MEM
1713 || GET_CODE (XEXP (op, 0)) == PRE_INC
1714 || GET_CODE (XEXP (op, 0)) == POST_INC))
1719 #ifndef REAL_ARITHMETIC
1720 /* Match any floating double constant, but only if
1721 we can examine the bits of it reliably. */
1722 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1723 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1724 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1730 if (GET_CODE (op) == CONST_DOUBLE)
1735 if (GET_CODE (op) == CONST_DOUBLE
1736 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1740 if (GET_CODE (op) == CONST_DOUBLE
1741 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1746 if (GET_CODE (op) == CONST_INT
1747 || (GET_CODE (op) == CONST_DOUBLE
1748 && GET_MODE (op) == VOIDmode))
1754 #ifdef LEGITIMATE_PIC_OPERAND_P
1755 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1762 if (GET_CODE (op) == CONST_INT
1763 || (GET_CODE (op) == CONST_DOUBLE
1764 && GET_MODE (op) == VOIDmode))
1769 if (GET_CODE (op) == CONST_INT
1770 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1774 if (GET_CODE (op) == CONST_INT
1775 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1779 if (GET_CODE (op) == CONST_INT
1780 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1784 if (GET_CODE (op) == CONST_INT
1785 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1789 if (GET_CODE (op) == CONST_INT
1790 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1794 if (GET_CODE (op) == CONST_INT
1795 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1799 if (GET_CODE (op) == CONST_INT
1800 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1804 if (GET_CODE (op) == CONST_INT
1805 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1813 if (general_operand (op, VOIDmode))
1818 /* For all other letters, we first check for a register class,
1819 otherwise it is an EXTRA_CONSTRAINT. */
1820 if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
1823 if (GET_MODE (op) == BLKmode)
1825 if (register_operand (op, VOIDmode))
1828 #ifdef EXTRA_CONSTRAINT
1829 if (EXTRA_CONSTRAINT (op, c))
1839 /* Given an rtx *P, if it is a sum containing an integer constant term,
1840 return the location (type rtx *) of the pointer to that constant term.
1841 Otherwise, return a null pointer. */
1844 find_constant_term_loc (p)
1848 enum rtx_code code = GET_CODE (*p);
1850 /* If *P IS such a constant term, P is its location. */
1852 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1856 /* Otherwise, if not a sum, it has no constant term. */
1858 if (GET_CODE (*p) != PLUS)
1861 /* If one of the summands is constant, return its location. */
1863 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1864 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1867 /* Otherwise, check each summand for containing a constant term. */
1869 if (XEXP (*p, 0) != 0)
1871 tem = find_constant_term_loc (&XEXP (*p, 0));
1876 if (XEXP (*p, 1) != 0)
1878 tem = find_constant_term_loc (&XEXP (*p, 1));
1886 /* Return 1 if OP is a memory reference
1887 whose address contains no side effects
1888 and remains valid after the addition
1889 of a positive integer less than the
1890 size of the object being referenced.
1892 We assume that the original address is valid and do not check it.
1894 This uses strict_memory_address_p as a subroutine, so
1895 don't use it before reload. */
1898 offsettable_memref_p (op)
1901 return ((GET_CODE (op) == MEM)
1902 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1905 /* Similar, but don't require a strictly valid mem ref:
1906 consider pseudo-regs valid as index or base regs. */
1909 offsettable_nonstrict_memref_p (op)
1912 return ((GET_CODE (op) == MEM)
1913 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1916 /* Return 1 if Y is a memory address which contains no side effects
1917 and would remain valid after the addition of a positive integer
1918 less than the size of that mode.
1920 We assume that the original address is valid and do not check it.
1921 We do check that it is valid for narrower modes.
1923 If STRICTP is nonzero, we require a strictly valid address,
1924 for the sake of use in reload.c. */
1927 offsettable_address_p (strictp, mode, y)
1929 enum machine_mode mode;
1932 enum rtx_code ycode = GET_CODE (y);
1936 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1937 (strictp ? strict_memory_address_p : memory_address_p);
1938 unsigned int mode_sz = GET_MODE_SIZE (mode);
1940 if (CONSTANT_ADDRESS_P (y))
1943 /* Adjusting an offsettable address involves changing to a narrower mode.
1944 Make sure that's OK. */
1946 if (mode_dependent_address_p (y))
1949 /* ??? How much offset does an offsettable BLKmode reference need?
1950 Clearly that depends on the situation in which it's being used.
1951 However, the current situation in which we test 0xffffffff is
1952 less than ideal. Caveat user. */
1954 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1956 /* If the expression contains a constant term,
1957 see if it remains valid when max possible offset is added. */
1959 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1964 *y2 = plus_constant (*y2, mode_sz - 1);
1965 /* Use QImode because an odd displacement may be automatically invalid
1966 for any wider mode. But it should be valid for a single byte. */
1967 good = (*addressp) (QImode, y);
1969 /* In any case, restore old contents of memory. */
1974 if (GET_RTX_CLASS (ycode) == 'a')
1977 /* The offset added here is chosen as the maximum offset that
1978 any instruction could need to add when operating on something
1979 of the specified mode. We assume that if Y and Y+c are
1980 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1981 go inside a LO_SUM here, so we do so as well. */
1982 if (GET_CODE (y) == LO_SUM)
1983 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1984 plus_constant (XEXP (y, 1), mode_sz - 1));
1986 z = plus_constant (y, mode_sz - 1);
1988 /* Use QImode because an odd displacement may be automatically invalid
1989 for any wider mode. But it should be valid for a single byte. */
1990 return (*addressp) (QImode, z);
1993 /* Return 1 if ADDR is an address-expression whose effect depends
1994 on the mode of the memory reference it is used in.
1996 Autoincrement addressing is a typical example of mode-dependence
1997 because the amount of the increment depends on the mode. */
2000 mode_dependent_address_p (addr)
2001 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
2003 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
2005 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2006 win: ATTRIBUTE_UNUSED_LABEL
2010 /* Return 1 if OP is a general operand
2011 other than a memory ref with a mode dependent address. */
2014 mode_independent_operand (op, mode)
2015 enum machine_mode mode;
2020 if (! general_operand (op, mode))
2023 if (GET_CODE (op) != MEM)
2026 addr = XEXP (op, 0);
2027 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2029 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2030 lose: ATTRIBUTE_UNUSED_LABEL
2034 /* Like extract_insn, but save insn extracted and don't extract again, when
2035 called again for the same insn expecting that recog_data still contain the
2036 valid information. This is used primary by gen_attr infrastructure that
2037 often does extract insn again and again. */
2039 extract_insn_cached (insn)
2042 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2044 extract_insn (insn);
2045 recog_data.insn = insn;
2047 /* Do cached extract_insn, constrain_operand and complain about failures.
2048 Used by insn_attrtab. */
2050 extract_constrain_insn_cached (insn)
2053 extract_insn_cached (insn);
2054 if (which_alternative == -1
2055 && !constrain_operands (reload_completed))
2056 fatal_insn_not_found (insn);
2058 /* Do cached constrain_operand and complain about failures. */
2060 constrain_operands_cached (strict)
2063 if (which_alternative == -1)
2064 return constrain_operands (strict);
2069 /* Analyze INSN and fill in recog_data. */
2078 rtx body = PATTERN (insn);
2080 recog_data.insn = NULL;
2081 recog_data.n_operands = 0;
2082 recog_data.n_alternatives = 0;
2083 recog_data.n_dups = 0;
2084 which_alternative = -1;
2086 switch (GET_CODE (body))
2096 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2101 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2102 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2103 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2109 recog_data.n_operands = noperands = asm_noperands (body);
2112 /* This insn is an `asm' with operands. */
2114 /* expand_asm_operands makes sure there aren't too many operands. */
2115 if (noperands > MAX_RECOG_OPERANDS)
2118 /* Now get the operand values and constraints out of the insn. */
2119 decode_asm_operands (body, recog_data.operand,
2120 recog_data.operand_loc,
2121 recog_data.constraints,
2122 recog_data.operand_mode);
2125 const char *p = recog_data.constraints[0];
2126 recog_data.n_alternatives = 1;
2128 recog_data.n_alternatives += (*p++ == ',');
2132 fatal_insn_not_found (insn);
2136 /* Ordinary insn: recognize it, get the operands via insn_extract
2137 and get the constraints. */
2139 icode = recog_memoized (insn);
2141 fatal_insn_not_found (insn);
2143 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2144 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2145 recog_data.n_dups = insn_data[icode].n_dups;
2147 insn_extract (insn);
2149 for (i = 0; i < noperands; i++)
2151 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2152 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2153 /* VOIDmode match_operands gets mode from their real operand. */
2154 if (recog_data.operand_mode[i] == VOIDmode)
2155 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2158 for (i = 0; i < noperands; i++)
2159 recog_data.operand_type[i]
2160 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2161 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2164 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2168 /* After calling extract_insn, you can use this function to extract some
2169 information from the constraint strings into a more usable form.
2170 The collected data is stored in recog_op_alt. */
2172 preprocess_constraints ()
2176 memset (recog_op_alt, 0, sizeof recog_op_alt);
2177 for (i = 0; i < recog_data.n_operands; i++)
2180 struct operand_alternative *op_alt;
2181 const char *p = recog_data.constraints[i];
2183 op_alt = recog_op_alt[i];
2185 for (j = 0; j < recog_data.n_alternatives; j++)
2187 op_alt[j].class = NO_REGS;
2188 op_alt[j].constraint = p;
2189 op_alt[j].matches = -1;
2190 op_alt[j].matched = -1;
2192 if (*p == '\0' || *p == ',')
2194 op_alt[j].anything_ok = 1;
2204 while (c != ',' && c != '\0');
2205 if (c == ',' || c == '\0')
2210 case '=': case '+': case '*': case '%':
2211 case 'E': case 'F': case 'G': case 'H':
2212 case 's': case 'i': case 'n':
2213 case 'I': case 'J': case 'K': case 'L':
2214 case 'M': case 'N': case 'O': case 'P':
2215 /* These don't say anything we care about. */
2219 op_alt[j].reject += 6;
2222 op_alt[j].reject += 600;
2225 op_alt[j].earlyclobber = 1;
2228 case '0': case '1': case '2': case '3': case '4':
2229 case '5': case '6': case '7': case '8': case '9':
2232 op_alt[j].matches = strtoul (p - 1, &end, 10);
2233 recog_op_alt[op_alt[j].matches][j].matched = i;
2239 op_alt[j].memory_ok = 1;
2242 op_alt[j].decmem_ok = 1;
2245 op_alt[j].incmem_ok = 1;
2248 op_alt[j].nonoffmem_ok = 1;
2251 op_alt[j].offmem_ok = 1;
2254 op_alt[j].anything_ok = 1;
2258 op_alt[j].is_address = 1;
2259 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class]
2260 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2264 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2268 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char) c)];
2276 /* Check the operands of an insn against the insn's operand constraints
2277 and return 1 if they are valid.
2278 The information about the insn's operands, constraints, operand modes
2279 etc. is obtained from the global variables set up by extract_insn.
2281 WHICH_ALTERNATIVE is set to a number which indicates which
2282 alternative of constraints was matched: 0 for the first alternative,
2283 1 for the next, etc.
2285 In addition, when two operands are match
2286 and it happens that the output operand is (reg) while the
2287 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2288 make the output operand look like the input.
2289 This is because the output operand is the one the template will print.
2291 This is used in final, just before printing the assembler code and by
2292 the routines that determine an insn's attribute.
2294 If STRICT is a positive non-zero value, it means that we have been
2295 called after reload has been completed. In that case, we must
2296 do all checks strictly. If it is zero, it means that we have been called
2297 before reload has completed. In that case, we first try to see if we can
2298 find an alternative that matches strictly. If not, we try again, this
2299 time assuming that reload will fix up the insn. This provides a "best
2300 guess" for the alternative and is used to compute attributes of insns prior
2301 to reload. A negative value of STRICT is used for this internal call. */
2309 constrain_operands (strict)
2312 const char *constraints[MAX_RECOG_OPERANDS];
2313 int matching_operands[MAX_RECOG_OPERANDS];
2314 int earlyclobber[MAX_RECOG_OPERANDS];
2317 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2318 int funny_match_index;
2320 which_alternative = 0;
2321 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2324 for (c = 0; c < recog_data.n_operands; c++)
2326 constraints[c] = recog_data.constraints[c];
2327 matching_operands[c] = -1;
2334 funny_match_index = 0;
2336 for (opno = 0; opno < recog_data.n_operands; opno++)
2338 rtx op = recog_data.operand[opno];
2339 enum machine_mode mode = GET_MODE (op);
2340 const char *p = constraints[opno];
2345 earlyclobber[opno] = 0;
2347 /* A unary operator may be accepted by the predicate, but it
2348 is irrelevant for matching constraints. */
2349 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2352 if (GET_CODE (op) == SUBREG)
2354 if (GET_CODE (SUBREG_REG (op)) == REG
2355 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2356 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2357 GET_MODE (SUBREG_REG (op)),
2360 op = SUBREG_REG (op);
2363 /* An empty constraint or empty alternative
2364 allows anything which matched the pattern. */
2365 if (*p == 0 || *p == ',')
2368 while (*p && (c = *p++) != ',')
2371 case '?': case '!': case '*': case '%':
2376 /* Ignore rest of this alternative as far as
2377 constraint checking is concerned. */
2378 while (*p && *p != ',')
2383 earlyclobber[opno] = 1;
2386 case '0': case '1': case '2': case '3': case '4':
2387 case '5': case '6': case '7': case '8': case '9':
2389 /* This operand must be the same as a previous one.
2390 This kind of constraint is used for instructions such
2391 as add when they take only two operands.
2393 Note that the lower-numbered operand is passed first.
2395 If we are not testing strictly, assume that this
2396 constraint will be satisfied. */
2401 match = strtoul (p - 1, &end, 10);
2408 rtx op1 = recog_data.operand[match];
2409 rtx op2 = recog_data.operand[opno];
2411 /* A unary operator may be accepted by the predicate,
2412 but it is irrelevant for matching constraints. */
2413 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2414 op1 = XEXP (op1, 0);
2415 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2416 op2 = XEXP (op2, 0);
2418 val = operands_match_p (op1, op2);
2421 matching_operands[opno] = match;
2422 matching_operands[match] = opno;
2427 /* If output is *x and input is *--x, arrange later
2428 to change the output to *--x as well, since the
2429 output op is the one that will be printed. */
2430 if (val == 2 && strict > 0)
2432 funny_match[funny_match_index].this = opno;
2433 funny_match[funny_match_index++].other = match;
2439 /* p is used for address_operands. When we are called by
2440 gen_reload, no one will have checked that the address is
2441 strictly valid, i.e., that all pseudos requiring hard regs
2442 have gotten them. */
2444 || (strict_memory_address_p (recog_data.operand_mode[opno],
2449 /* No need to check general_operand again;
2450 it was done in insn-recog.c. */
2452 /* Anything goes unless it is a REG and really has a hard reg
2453 but the hard reg is not in the class GENERAL_REGS. */
2455 || GENERAL_REGS == ALL_REGS
2456 || GET_CODE (op) != REG
2457 || (reload_in_progress
2458 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2459 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2464 /* This is used for a MATCH_SCRATCH in the cases when
2465 we don't actually need anything. So anything goes
2471 if (GET_CODE (op) == MEM
2472 /* Before reload, accept what reload can turn into mem. */
2473 || (strict < 0 && CONSTANT_P (op))
2474 /* During reload, accept a pseudo */
2475 || (reload_in_progress && GET_CODE (op) == REG
2476 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2481 if (GET_CODE (op) == MEM
2482 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2483 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2488 if (GET_CODE (op) == MEM
2489 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2490 || GET_CODE (XEXP (op, 0)) == POST_INC))
2495 #ifndef REAL_ARITHMETIC
2496 /* Match any CONST_DOUBLE, but only if
2497 we can examine the bits of it reliably. */
2498 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2499 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2500 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2503 if (GET_CODE (op) == CONST_DOUBLE)
2508 if (GET_CODE (op) == CONST_DOUBLE)
2514 if (GET_CODE (op) == CONST_DOUBLE
2515 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2520 if (GET_CODE (op) == CONST_INT
2521 || (GET_CODE (op) == CONST_DOUBLE
2522 && GET_MODE (op) == VOIDmode))
2525 if (CONSTANT_P (op))
2530 if (GET_CODE (op) == CONST_INT
2531 || (GET_CODE (op) == CONST_DOUBLE
2532 && GET_MODE (op) == VOIDmode))
2544 if (GET_CODE (op) == CONST_INT
2545 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2550 if (GET_CODE (op) == MEM
2551 && ((strict > 0 && ! offsettable_memref_p (op))
2553 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2554 || (reload_in_progress
2555 && !(GET_CODE (op) == REG
2556 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2561 if ((strict > 0 && offsettable_memref_p (op))
2562 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2563 /* Before reload, accept what reload can handle. */
2565 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2566 /* During reload, accept a pseudo */
2567 || (reload_in_progress && GET_CODE (op) == REG
2568 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2574 enum reg_class class;
2576 class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
2577 if (class != NO_REGS)
2581 && GET_CODE (op) == REG
2582 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2583 || (strict == 0 && GET_CODE (op) == SCRATCH)
2584 || (GET_CODE (op) == REG
2585 && reg_fits_class_p (op, class, offset, mode)))
2588 #ifdef EXTRA_CONSTRAINT
2589 else if (EXTRA_CONSTRAINT (op, c))
2596 constraints[opno] = p;
2597 /* If this operand did not win somehow,
2598 this alternative loses. */
2602 /* This alternative won; the operands are ok.
2603 Change whichever operands this alternative says to change. */
2608 /* See if any earlyclobber operand conflicts with some other
2612 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2613 /* Ignore earlyclobber operands now in memory,
2614 because we would often report failure when we have
2615 two memory operands, one of which was formerly a REG. */
2616 if (earlyclobber[eopno]
2617 && GET_CODE (recog_data.operand[eopno]) == REG)
2618 for (opno = 0; opno < recog_data.n_operands; opno++)
2619 if ((GET_CODE (recog_data.operand[opno]) == MEM
2620 || recog_data.operand_type[opno] != OP_OUT)
2622 /* Ignore things like match_operator operands. */
2623 && *recog_data.constraints[opno] != 0
2624 && ! (matching_operands[opno] == eopno
2625 && operands_match_p (recog_data.operand[opno],
2626 recog_data.operand[eopno]))
2627 && ! safe_from_earlyclobber (recog_data.operand[opno],
2628 recog_data.operand[eopno]))
2633 while (--funny_match_index >= 0)
2635 recog_data.operand[funny_match[funny_match_index].other]
2636 = recog_data.operand[funny_match[funny_match_index].this];
2643 which_alternative++;
2645 while (which_alternative < recog_data.n_alternatives);
2647 which_alternative = -1;
2648 /* If we are about to reject this, but we are not to test strictly,
2649 try a very loose test. Only return failure if it fails also. */
2651 return constrain_operands (-1);
2656 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2657 is a hard reg in class CLASS when its regno is offset by OFFSET
2658 and changed to mode MODE.
2659 If REG occupies multiple hard regs, all of them must be in CLASS. */
2662 reg_fits_class_p (operand, class, offset, mode)
2664 enum reg_class class;
2666 enum machine_mode mode;
2668 int regno = REGNO (operand);
2669 if (regno < FIRST_PSEUDO_REGISTER
2670 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2675 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2677 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2686 /* Split single instruction. Helper function for split_all_insns.
2687 Return last insn in the sequence if successful, or NULL if unsuccessful. */
2695 /* Don't split no-op move insns. These should silently
2696 disappear later in final. Splitting such insns would
2697 break the code that handles REG_NO_CONFLICT blocks. */
2699 else if ((set = single_set (insn)) != NULL && set_noop_p (set))
2701 /* Nops get in the way while scheduling, so delete them
2702 now if register allocation has already been done. It
2703 is too risky to try to do this before register
2704 allocation, and there are unlikely to be very many
2705 nops then anyways. */
2706 if (reload_completed)
2708 PUT_CODE (insn, NOTE);
2709 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2710 NOTE_SOURCE_FILE (insn) = 0;
2715 /* Split insns here to get max fine-grain parallelism. */
2716 rtx first = PREV_INSN (insn);
2717 rtx last = try_split (PATTERN (insn), insn, 1);
2721 /* try_split returns the NOTE that INSN became. */
2722 PUT_CODE (insn, NOTE);
2723 NOTE_SOURCE_FILE (insn) = 0;
2724 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2726 /* ??? Coddle to md files that generate subregs in post-
2727 reload splitters instead of computing the proper
2729 if (reload_completed && first != last)
2731 first = NEXT_INSN (first);
2735 cleanup_subreg_operands (first);
2738 first = NEXT_INSN (first);
2746 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2749 split_all_insns (upd_life)
2756 blocks = sbitmap_alloc (n_basic_blocks);
2757 sbitmap_zero (blocks);
2760 for (i = n_basic_blocks - 1; i >= 0; --i)
2762 basic_block bb = BASIC_BLOCK (i);
2765 for (insn = bb->head; insn ; insn = next)
2769 /* Can't use `next_real_insn' because that might go across
2770 CODE_LABELS and short-out basic blocks. */
2771 next = NEXT_INSN (insn);
2772 last = split_insn (insn);
2775 /* The split sequence may include barrier, but the
2776 BB boundary we are interested in will be set to previous
2779 while (GET_CODE (last) == BARRIER)
2780 last = PREV_INSN (last);
2781 SET_BIT (blocks, i);
2786 if (insn == bb->end)
2796 find_many_sub_basic_blocks (blocks);
2799 if (changed && upd_life)
2801 count_or_remove_death_notes (blocks, 1);
2802 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2804 #ifdef ENABLE_CHECKING
2805 verify_flow_info ();
2808 sbitmap_free (blocks);
2811 /* Same as split_all_insns, but do not expect CFG to be available.
2812 Used by machine depedent reorg passes. */
2815 split_all_insns_noflow ()
2819 for (insn = get_insns (); insn; insn = next)
2821 next = NEXT_INSN (insn);
2827 #ifdef HAVE_peephole2
2828 struct peep2_insn_data
2834 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2835 static int peep2_current;
2837 /* A non-insn marker indicating the last insn of the block.
2838 The live_before regset for this element is correct, indicating
2839 global_live_at_end for the block. */
2840 #define PEEP2_EOB pc_rtx
2842 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2843 does not exist. Used by the recognizer to find the next insn to match
2844 in a multi-insn pattern. */
2850 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2854 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2855 n -= MAX_INSNS_PER_PEEP2 + 1;
2857 if (peep2_insn_data[n].insn == PEEP2_EOB)
2859 return peep2_insn_data[n].insn;
2862 /* Return true if REGNO is dead before the Nth non-note insn
2866 peep2_regno_dead_p (ofs, regno)
2870 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2873 ofs += peep2_current;
2874 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2875 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2877 if (peep2_insn_data[ofs].insn == NULL_RTX)
2880 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2883 /* Similarly for a REG. */
2886 peep2_reg_dead_p (ofs, reg)
2892 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2895 ofs += peep2_current;
2896 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2897 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2899 if (peep2_insn_data[ofs].insn == NULL_RTX)
2902 regno = REGNO (reg);
2903 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2905 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2910 /* Try to find a hard register of mode MODE, matching the register class in
2911 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2912 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2913 in which case the only condition is that the register must be available
2914 before CURRENT_INSN.
2915 Registers that already have bits set in REG_SET will not be considered.
2917 If an appropriate register is available, it will be returned and the
2918 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2922 peep2_find_free_register (from, to, class_str, mode, reg_set)
2924 const char *class_str;
2925 enum machine_mode mode;
2926 HARD_REG_SET *reg_set;
2928 static int search_ofs;
2929 enum reg_class class;
2933 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2936 from += peep2_current;
2937 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2938 from -= MAX_INSNS_PER_PEEP2 + 1;
2939 to += peep2_current;
2940 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2941 to -= MAX_INSNS_PER_PEEP2 + 1;
2943 if (peep2_insn_data[from].insn == NULL_RTX)
2945 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2949 HARD_REG_SET this_live;
2951 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2953 if (peep2_insn_data[from].insn == NULL_RTX)
2955 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2956 IOR_HARD_REG_SET (live, this_live);
2959 class = (class_str[0] == 'r' ? GENERAL_REGS
2960 : REG_CLASS_FROM_LETTER (class_str[0]));
2962 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2964 int raw_regno, regno, success, j;
2966 /* Distribute the free registers as much as possible. */
2967 raw_regno = search_ofs + i;
2968 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2969 raw_regno -= FIRST_PSEUDO_REGISTER;
2970 #ifdef REG_ALLOC_ORDER
2971 regno = reg_alloc_order[raw_regno];
2976 /* Don't allocate fixed registers. */
2977 if (fixed_regs[regno])
2979 /* Make sure the register is of the right class. */
2980 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
2982 /* And can support the mode we need. */
2983 if (! HARD_REGNO_MODE_OK (regno, mode))
2985 /* And that we don't create an extra save/restore. */
2986 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2988 /* And we don't clobber traceback for noreturn functions. */
2989 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2990 && (! reload_completed || frame_pointer_needed))
2994 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
2996 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2997 || TEST_HARD_REG_BIT (live, regno + j))
3005 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3006 SET_HARD_REG_BIT (*reg_set, regno + j);
3008 /* Start the next search with the next register. */
3009 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3011 search_ofs = raw_regno;
3013 return gen_rtx_REG (mode, regno);
3021 /* Perform the peephole2 optimization pass. */
3024 peephole2_optimize (dump_file)
3025 FILE *dump_file ATTRIBUTE_UNUSED;
3027 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3031 #ifdef HAVE_conditional_execution
3036 /* Initialize the regsets we're going to use. */
3037 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3038 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3039 live = INITIALIZE_REG_SET (rs_heads[i]);
3041 #ifdef HAVE_conditional_execution
3042 blocks = sbitmap_alloc (n_basic_blocks);
3043 sbitmap_zero (blocks);
3046 count_or_remove_death_notes (NULL, 1);
3049 for (b = n_basic_blocks - 1; b >= 0; --b)
3051 basic_block bb = BASIC_BLOCK (b);
3052 struct propagate_block_info *pbi;
3054 /* Indicate that all slots except the last holds invalid data. */
3055 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3056 peep2_insn_data[i].insn = NULL_RTX;
3058 /* Indicate that the last slot contains live_after data. */
3059 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3060 peep2_current = MAX_INSNS_PER_PEEP2;
3062 /* Start up propagation. */
3063 COPY_REG_SET (live, bb->global_live_at_end);
3064 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3066 #ifdef HAVE_conditional_execution
3067 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3069 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3072 for (insn = bb->end; ; insn = prev)
3074 prev = PREV_INSN (insn);
3080 /* Record this insn. */
3081 if (--peep2_current < 0)
3082 peep2_current = MAX_INSNS_PER_PEEP2;
3083 peep2_insn_data[peep2_current].insn = insn;
3084 propagate_one_insn (pbi, insn);
3085 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3087 /* Match the peephole. */
3088 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3091 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3092 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3093 cfg-related call notes. */
3094 for (i = 0; i <= match_len; ++i)
3097 rtx old_insn, new_insn, note;
3099 j = i + peep2_current;
3100 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3101 j -= MAX_INSNS_PER_PEEP2 + 1;
3102 old_insn = peep2_insn_data[j].insn;
3103 if (GET_CODE (old_insn) != CALL_INSN)
3106 new_insn = NULL_RTX;
3107 if (GET_CODE (try) == SEQUENCE)
3108 for (k = XVECLEN (try, 0) - 1; k >= 0; k--)
3110 rtx x = XVECEXP (try, 0, k);
3111 if (GET_CODE (x) == CALL_INSN)
3117 else if (GET_CODE (try) == CALL_INSN)
3122 CALL_INSN_FUNCTION_USAGE (new_insn)
3123 = CALL_INSN_FUNCTION_USAGE (old_insn);
3125 for (note = REG_NOTES (old_insn);
3127 note = XEXP (note, 1))
3128 switch (REG_NOTE_KIND (note))
3133 case REG_ALWAYS_RETURN:
3134 REG_NOTES (new_insn)
3135 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3137 REG_NOTES (new_insn));
3139 /* Discard all other reg notes. */
3143 /* Croak if there is another call in the sequence. */
3144 while (++i <= match_len)
3146 j = i + peep2_current;
3147 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3148 j -= MAX_INSNS_PER_PEEP2 + 1;
3149 old_insn = peep2_insn_data[j].insn;
3150 if (GET_CODE (old_insn) == CALL_INSN)
3156 i = match_len + peep2_current;
3157 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3158 i -= MAX_INSNS_PER_PEEP2 + 1;
3160 /* Replace the old sequence with the new. */
3161 try = emit_insn_after (try, peep2_insn_data[i].insn);
3162 delete_insn_chain (insn, peep2_insn_data[i].insn);
3164 #ifdef HAVE_conditional_execution
3165 /* With conditional execution, we cannot back up the
3166 live information so easily, since the conditional
3167 death data structures are not so self-contained.
3168 So record that we've made a modification to this
3169 block and update life information at the end. */
3170 SET_BIT (blocks, b);
3173 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3174 peep2_insn_data[i].insn = NULL_RTX;
3175 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3177 /* Back up lifetime information past the end of the
3178 newly created sequence. */
3179 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3181 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3183 /* Update life information for the new sequence. */
3189 i = MAX_INSNS_PER_PEEP2;
3190 peep2_insn_data[i].insn = try;
3191 propagate_one_insn (pbi, try);
3192 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3194 try = PREV_INSN (try);
3196 while (try != prev);
3198 /* ??? Should verify that LIVE now matches what we
3199 had before the new sequence. */
3206 if (insn == bb->head)
3210 free_propagate_block_info (pbi);
3213 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3214 FREE_REG_SET (peep2_insn_data[i].live_before);
3215 FREE_REG_SET (live);
3217 #ifdef HAVE_conditional_execution
3218 count_or_remove_death_notes (blocks, 1);
3219 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3220 sbitmap_free (blocks);
3223 #endif /* HAVE_peephole2 */