1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
26 #include "coretypes.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 #include "hard-reg-set.h"
35 #include "addresses.h"
41 #include "basic-block.h"
45 #include "tree-pass.h"
48 #ifndef STACK_PUSH_CODE
49 #ifdef STACK_GROWS_DOWNWARD
50 #define STACK_PUSH_CODE PRE_DEC
52 #define STACK_PUSH_CODE PRE_INC
56 #ifndef STACK_POP_CODE
57 #ifdef STACK_GROWS_DOWNWARD
58 #define STACK_POP_CODE POST_INC
60 #define STACK_POP_CODE POST_DEC
64 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx);
65 static void validate_replace_src_1 (rtx *, void *);
66 static rtx split_insn (rtx);
68 /* Nonzero means allow operands to be volatile.
69 This should be 0 if you are generating rtl, such as if you are calling
70 the functions in optabs.c and expmed.c (most of the time).
71 This should be 1 if all valid insns need to be recognized,
72 such as in regclass.c and final.c and reload.c.
74 init_recog and init_recog_no_volatile are responsible for setting this. */
78 struct recog_data recog_data;
80 /* Contains a vector of operand_alternative structures for every operand.
81 Set up by preprocess_constraints. */
82 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
84 /* On return from `constrain_operands', indicate which alternative
87 int which_alternative;
89 /* Nonzero after end of reload pass.
90 Set to 1 or 0 by toplev.c.
91 Controls the significance of (SUBREG (MEM)). */
95 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
96 int epilogue_completed;
98 /* Initialize data used by the function `recog'.
99 This must be called once in the compilation of a function
100 before any insn recognition may be done in the function. */
103 init_recog_no_volatile (void)
115 /* Check that X is an insn-body for an `asm' with operands
116 and that the operands mentioned in it are legitimate. */
119 check_asm_operands (rtx x)
123 const char **constraints;
126 /* Post-reload, be more strict with things. */
127 if (reload_completed)
129 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
130 extract_insn (make_insn_raw (x));
131 constrain_operands (1);
132 return which_alternative >= 0;
135 noperands = asm_noperands (x);
141 operands = alloca (noperands * sizeof (rtx));
142 constraints = alloca (noperands * sizeof (char *));
144 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
146 for (i = 0; i < noperands; i++)
148 const char *c = constraints[i];
151 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
152 c = constraints[c[0] - '0'];
154 if (! asm_operand_ok (operands[i], c))
161 /* Static data for the next two routines. */
163 typedef struct change_t
172 static change_t *changes;
173 static int changes_allocated;
175 static int num_changes = 0;
177 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
178 at which NEW will be placed. If OBJECT is zero, no validation is done,
179 the change is simply made.
181 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
182 will be called with the address and mode as parameters. If OBJECT is
183 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
186 IN_GROUP is nonzero if this is part of a group of changes that must be
187 performed as a group. In that case, the changes will be stored. The
188 function `apply_change_group' will validate and apply the changes.
190 If IN_GROUP is zero, this is a single change. Try to recognize the insn
191 or validate the memory reference with the change applied. If the result
192 is not valid for the machine, suppress the change and return zero.
193 Otherwise, perform the change and return 1. */
196 validate_change_1 (rtx object, rtx *loc, rtx new, bool in_group, bool unshare)
200 if (old == new || rtx_equal_p (old, new))
203 gcc_assert (in_group != 0 || num_changes == 0);
207 /* Save the information describing this change. */
208 if (num_changes >= changes_allocated)
210 if (changes_allocated == 0)
211 /* This value allows for repeated substitutions inside complex
212 indexed addresses, or changes in up to 5 insns. */
213 changes_allocated = MAX_RECOG_OPERANDS * 5;
215 changes_allocated *= 2;
217 changes = xrealloc (changes, sizeof (change_t) * changes_allocated);
220 changes[num_changes].object = object;
221 changes[num_changes].loc = loc;
222 changes[num_changes].old = old;
223 changes[num_changes].unshare = unshare;
225 if (object && !MEM_P (object))
227 /* Set INSN_CODE to force rerecognition of insn. Save old code in
229 changes[num_changes].old_code = INSN_CODE (object);
230 INSN_CODE (object) = -1;
235 /* If we are making a group of changes, return 1. Otherwise, validate the
236 change group we made. */
241 return apply_change_group ();
244 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
248 validate_change (rtx object, rtx *loc, rtx new, bool in_group)
250 return validate_change_1 (object, loc, new, in_group, false);
253 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
257 validate_unshare_change (rtx object, rtx *loc, rtx new, bool in_group)
259 return validate_change_1 (object, loc, new, in_group, true);
263 /* Keep X canonicalized if some changes have made it non-canonical; only
264 modifies the operands of X, not (for example) its code. Simplifications
265 are not the job of this routine.
267 Return true if anything was changed. */
269 canonicalize_change_group (rtx insn, rtx x)
271 if (COMMUTATIVE_P (x)
272 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
274 /* Oops, the caller has made X no longer canonical.
275 Let's redo the changes in the correct order. */
276 rtx tem = XEXP (x, 0);
277 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
278 validate_change (insn, &XEXP (x, 1), tem, 1);
286 /* This subroutine of apply_change_group verifies whether the changes to INSN
287 were valid; i.e. whether INSN can still be recognized. */
290 insn_invalid_p (rtx insn)
292 rtx pat = PATTERN (insn);
293 int num_clobbers = 0;
294 /* If we are before reload and the pattern is a SET, see if we can add
296 int icode = recog (pat, insn,
297 (GET_CODE (pat) == SET
298 && ! reload_completed && ! reload_in_progress)
299 ? &num_clobbers : 0);
300 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
303 /* If this is an asm and the operand aren't legal, then fail. Likewise if
304 this is not an asm and the insn wasn't recognized. */
305 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
306 || (!is_asm && icode < 0))
309 /* If we have to add CLOBBERs, fail if we have to add ones that reference
310 hard registers since our callers can't know if they are live or not.
311 Otherwise, add them. */
312 if (num_clobbers > 0)
316 if (added_clobbers_hard_reg_p (icode))
319 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
320 XVECEXP (newpat, 0, 0) = pat;
321 add_clobbers (newpat, icode);
322 PATTERN (insn) = pat = newpat;
325 /* After reload, verify that all constraints are satisfied. */
326 if (reload_completed)
330 if (! constrain_operands (1))
334 INSN_CODE (insn) = icode;
338 /* Return number of changes made and not validated yet. */
340 num_changes_pending (void)
345 /* Tentatively apply the changes numbered NUM and up.
346 Return 1 if all changes are valid, zero otherwise. */
349 verify_changes (int num)
352 rtx last_validated = NULL_RTX;
354 /* The changes have been applied and all INSN_CODEs have been reset to force
357 The changes are valid if we aren't given an object, or if we are
358 given a MEM and it still is a valid address, or if this is in insn
359 and it is recognized. In the latter case, if reload has completed,
360 we also require that the operands meet the constraints for
363 for (i = num; i < num_changes; i++)
365 rtx object = changes[i].object;
367 /* If there is no object to test or if it is the same as the one we
368 already tested, ignore it. */
369 if (object == 0 || object == last_validated)
374 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
377 else if (insn_invalid_p (object))
379 rtx pat = PATTERN (object);
381 /* Perhaps we couldn't recognize the insn because there were
382 extra CLOBBERs at the end. If so, try to re-recognize
383 without the last CLOBBER (later iterations will cause each of
384 them to be eliminated, in turn). But don't do this if we
385 have an ASM_OPERAND. */
386 if (GET_CODE (pat) == PARALLEL
387 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
388 && asm_noperands (PATTERN (object)) < 0)
392 if (XVECLEN (pat, 0) == 2)
393 newpat = XVECEXP (pat, 0, 0);
399 = gen_rtx_PARALLEL (VOIDmode,
400 rtvec_alloc (XVECLEN (pat, 0) - 1));
401 for (j = 0; j < XVECLEN (newpat, 0); j++)
402 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
405 /* Add a new change to this group to replace the pattern
406 with this new pattern. Then consider this change
407 as having succeeded. The change we added will
408 cause the entire call to fail if things remain invalid.
410 Note that this can lose if a later change than the one
411 we are processing specified &XVECEXP (PATTERN (object), 0, X)
412 but this shouldn't occur. */
414 validate_change (object, &PATTERN (object), newpat, 1);
417 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
418 /* If this insn is a CLOBBER or USE, it is always valid, but is
424 last_validated = object;
427 return (i == num_changes);
430 /* A group of changes has previously been issued with validate_change
431 and verified with verify_changes. Call df_insn_rescan for each of
432 the insn changed and clear num_changes. */
435 confirm_change_group (void)
438 rtx last_object = NULL;
440 for (i = 0; i < num_changes; i++)
442 rtx object = changes[i].object;
444 if (changes[i].unshare)
445 *changes[i].loc = copy_rtx (*changes[i].loc);
447 /* Avoid unnecesary rescaning when multiple changes to same instruction
451 if (object != last_object && last_object && INSN_P (last_object))
452 df_insn_rescan (last_object);
453 last_object = object;
457 if (last_object && INSN_P (last_object))
458 df_insn_rescan (last_object);
462 /* Apply a group of changes previously issued with `validate_change'.
463 If all changes are valid, call confirm_change_group and return 1,
464 otherwise, call cancel_changes and return 0. */
467 apply_change_group (void)
469 if (verify_changes (0))
471 confirm_change_group ();
482 /* Return the number of changes so far in the current group. */
485 num_validated_changes (void)
490 /* Retract the changes numbered NUM and up. */
493 cancel_changes (int num)
497 /* Back out all the changes. Do this in the opposite order in which
499 for (i = num_changes - 1; i >= num; i--)
501 *changes[i].loc = changes[i].old;
502 if (changes[i].object && !MEM_P (changes[i].object))
503 INSN_CODE (changes[i].object) = changes[i].old_code;
508 /* Replace every occurrence of FROM in X with TO. Mark each change with
509 validate_change passing OBJECT. */
512 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
518 enum machine_mode op0_mode = VOIDmode;
519 int prev_changes = num_changes;
526 fmt = GET_RTX_FORMAT (code);
528 op0_mode = GET_MODE (XEXP (x, 0));
530 /* X matches FROM if it is the same rtx or they are both referring to the
531 same register in the same mode. Avoid calling rtx_equal_p unless the
532 operands look similar. */
535 || (REG_P (x) && REG_P (from)
536 && GET_MODE (x) == GET_MODE (from)
537 && REGNO (x) == REGNO (from))
538 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
539 && rtx_equal_p (x, from)))
541 validate_unshare_change (object, loc, to, 1);
545 /* Call ourself recursively to perform the replacements.
546 We must not replace inside already replaced expression, otherwise we
547 get infinite recursion for replacements like (reg X)->(subreg (reg X))
548 done by regmove, so we must special case shared ASM_OPERANDS. */
550 if (GET_CODE (x) == PARALLEL)
552 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
554 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
555 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
557 /* Verify that operands are really shared. */
558 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
559 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
561 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
565 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object);
569 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
572 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
573 else if (fmt[i] == 'E')
574 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
575 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
578 /* If we didn't substitute, there is nothing more to do. */
579 if (num_changes == prev_changes)
582 /* Allow substituted expression to have different mode. This is used by
583 regmove to change mode of pseudo register. */
584 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
585 op0_mode = GET_MODE (XEXP (x, 0));
587 /* Do changes needed to keep rtx consistent. Don't do any other
588 simplifications, as it is not our job. */
590 if (SWAPPABLE_OPERANDS_P (x)
591 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
593 validate_change (object, loc,
594 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
595 : swap_condition (code),
596 GET_MODE (x), XEXP (x, 1),
605 /* If we have a PLUS whose second operand is now a CONST_INT, use
606 simplify_gen_binary to try to simplify it.
607 ??? We may want later to remove this, once simplification is
608 separated from this function. */
609 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
610 validate_change (object, loc,
612 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
615 if (GET_CODE (XEXP (x, 1)) == CONST_INT
616 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
617 validate_change (object, loc,
619 (PLUS, GET_MODE (x), XEXP (x, 0),
620 simplify_gen_unary (NEG,
621 GET_MODE (x), XEXP (x, 1),
626 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
628 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
630 /* If any of the above failed, substitute in something that
631 we know won't be recognized. */
633 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
634 validate_change (object, loc, new, 1);
638 /* All subregs possible to simplify should be simplified. */
639 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
642 /* Subregs of VOIDmode operands are incorrect. */
643 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
644 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
646 validate_change (object, loc, new, 1);
650 /* If we are replacing a register with memory, try to change the memory
651 to be the mode required for memory in extract operations (this isn't
652 likely to be an insertion operation; if it was, nothing bad will
653 happen, we might just fail in some cases). */
655 if (MEM_P (XEXP (x, 0))
656 && GET_CODE (XEXP (x, 1)) == CONST_INT
657 && GET_CODE (XEXP (x, 2)) == CONST_INT
658 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
659 && !MEM_VOLATILE_P (XEXP (x, 0)))
661 enum machine_mode wanted_mode = VOIDmode;
662 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
663 int pos = INTVAL (XEXP (x, 2));
665 if (GET_CODE (x) == ZERO_EXTRACT)
667 enum machine_mode new_mode
668 = mode_for_extraction (EP_extzv, 1);
669 if (new_mode != MAX_MACHINE_MODE)
670 wanted_mode = new_mode;
672 else if (GET_CODE (x) == SIGN_EXTRACT)
674 enum machine_mode new_mode
675 = mode_for_extraction (EP_extv, 1);
676 if (new_mode != MAX_MACHINE_MODE)
677 wanted_mode = new_mode;
680 /* If we have a narrower mode, we can do something. */
681 if (wanted_mode != VOIDmode
682 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
684 int offset = pos / BITS_PER_UNIT;
687 /* If the bytes and bits are counted differently, we
688 must adjust the offset. */
689 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
691 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
694 pos %= GET_MODE_BITSIZE (wanted_mode);
696 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
698 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
699 validate_change (object, &XEXP (x, 0), newmem, 1);
710 /* Try replacing every occurrence of FROM in INSN with TO. After all
711 changes have been made, validate by seeing if INSN is still valid. */
714 validate_replace_rtx (rtx from, rtx to, rtx insn)
716 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
717 return apply_change_group ();
720 /* Try replacing every occurrence of FROM in INSN with TO. */
723 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
725 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
728 /* Function called by note_uses to replace used subexpressions. */
729 struct validate_replace_src_data
731 rtx from; /* Old RTX */
732 rtx to; /* New RTX */
733 rtx insn; /* Insn in which substitution is occurring. */
737 validate_replace_src_1 (rtx *x, void *data)
739 struct validate_replace_src_data *d
740 = (struct validate_replace_src_data *) data;
742 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
745 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
749 validate_replace_src_group (rtx from, rtx to, rtx insn)
751 struct validate_replace_src_data d;
756 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
759 /* Try simplify INSN.
760 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
761 pattern and return true if something was simplified. */
764 validate_simplify_insn (rtx insn)
770 pat = PATTERN (insn);
772 if (GET_CODE (pat) == SET)
774 newpat = simplify_rtx (SET_SRC (pat));
775 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
776 validate_change (insn, &SET_SRC (pat), newpat, 1);
777 newpat = simplify_rtx (SET_DEST (pat));
778 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
779 validate_change (insn, &SET_DEST (pat), newpat, 1);
781 else if (GET_CODE (pat) == PARALLEL)
782 for (i = 0; i < XVECLEN (pat, 0); i++)
784 rtx s = XVECEXP (pat, 0, i);
786 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
788 newpat = simplify_rtx (SET_SRC (s));
789 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
790 validate_change (insn, &SET_SRC (s), newpat, 1);
791 newpat = simplify_rtx (SET_DEST (s));
792 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
793 validate_change (insn, &SET_DEST (s), newpat, 1);
796 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
800 /* Return 1 if the insn using CC0 set by INSN does not contain
801 any ordered tests applied to the condition codes.
802 EQ and NE tests do not count. */
805 next_insn_tests_no_inequality (rtx insn)
807 rtx next = next_cc0_user (insn);
809 /* If there is no next insn, we have to take the conservative choice. */
813 return (INSN_P (next)
814 && ! inequality_comparisons_p (PATTERN (next)));
818 /* Return 1 if OP is a valid general operand for machine mode MODE.
819 This is either a register reference, a memory reference,
820 or a constant. In the case of a memory reference, the address
821 is checked for general validity for the target machine.
823 Register and memory references must have mode MODE in order to be valid,
824 but some constants have no machine mode and are valid for any mode.
826 If MODE is VOIDmode, OP is checked for validity for whatever mode
829 The main use of this function is as a predicate in match_operand
830 expressions in the machine description.
832 For an explanation of this function's behavior for registers of
833 class NO_REGS, see the comment for `register_operand'. */
836 general_operand (rtx op, enum machine_mode mode)
838 enum rtx_code code = GET_CODE (op);
840 if (mode == VOIDmode)
841 mode = GET_MODE (op);
843 /* Don't accept CONST_INT or anything similar
844 if the caller wants something floating. */
845 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
846 && GET_MODE_CLASS (mode) != MODE_INT
847 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
850 if (GET_CODE (op) == CONST_INT
852 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
856 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
858 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
859 && LEGITIMATE_CONSTANT_P (op));
861 /* Except for certain constants with VOIDmode, already checked for,
862 OP's mode must match MODE if MODE specifies a mode. */
864 if (GET_MODE (op) != mode)
869 rtx sub = SUBREG_REG (op);
871 #ifdef INSN_SCHEDULING
872 /* On machines that have insn scheduling, we want all memory
873 reference to be explicit, so outlaw paradoxical SUBREGs.
874 However, we must allow them after reload so that they can
875 get cleaned up by cleanup_subreg_operands. */
876 if (!reload_completed && MEM_P (sub)
877 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
880 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
881 may result in incorrect reference. We should simplify all valid
882 subregs of MEM anyway. But allow this after reload because we
883 might be called from cleanup_subreg_operands.
885 ??? This is a kludge. */
886 if (!reload_completed && SUBREG_BYTE (op) != 0
890 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
891 create such rtl, and we must reject it. */
892 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
893 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
897 code = GET_CODE (op);
901 /* A register whose class is NO_REGS is not a general operand. */
902 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
903 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
907 rtx y = XEXP (op, 0);
909 if (! volatile_ok && MEM_VOLATILE_P (op))
912 /* Use the mem's mode, since it will be reloaded thus. */
913 if (memory_address_p (GET_MODE (op), y))
920 /* Return 1 if OP is a valid memory address for a memory reference
923 The main use of this function is as a predicate in match_operand
924 expressions in the machine description. */
927 address_operand (rtx op, enum machine_mode mode)
929 return memory_address_p (mode, op);
932 /* Return 1 if OP is a register reference of mode MODE.
933 If MODE is VOIDmode, accept a register in any mode.
935 The main use of this function is as a predicate in match_operand
936 expressions in the machine description.
938 As a special exception, registers whose class is NO_REGS are
939 not accepted by `register_operand'. The reason for this change
940 is to allow the representation of special architecture artifacts
941 (such as a condition code register) without extending the rtl
942 definitions. Since registers of class NO_REGS cannot be used
943 as registers in any case where register classes are examined,
944 it is most consistent to keep this function from accepting them. */
947 register_operand (rtx op, enum machine_mode mode)
949 if (GET_MODE (op) != mode && mode != VOIDmode)
952 if (GET_CODE (op) == SUBREG)
954 rtx sub = SUBREG_REG (op);
956 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
957 because it is guaranteed to be reloaded into one.
958 Just make sure the MEM is valid in itself.
959 (Ideally, (SUBREG (MEM)...) should not exist after reload,
960 but currently it does result from (SUBREG (REG)...) where the
961 reg went on the stack.) */
962 if (! reload_completed && MEM_P (sub))
963 return general_operand (op, mode);
965 #ifdef CANNOT_CHANGE_MODE_CLASS
967 && REGNO (sub) < FIRST_PSEUDO_REGISTER
968 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
969 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
970 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
974 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
975 create such rtl, and we must reject it. */
976 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
977 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
983 /* We don't consider registers whose class is NO_REGS
984 to be a register operand. */
986 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
987 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
990 /* Return 1 for a register in Pmode; ignore the tested mode. */
993 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
995 return register_operand (op, Pmode);
998 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
999 or a hard register. */
1002 scratch_operand (rtx op, enum machine_mode mode)
1004 if (GET_MODE (op) != mode && mode != VOIDmode)
1007 return (GET_CODE (op) == SCRATCH
1009 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1012 /* Return 1 if OP is a valid immediate operand for mode MODE.
1014 The main use of this function is as a predicate in match_operand
1015 expressions in the machine description. */
1018 immediate_operand (rtx op, enum machine_mode mode)
1020 /* Don't accept CONST_INT or anything similar
1021 if the caller wants something floating. */
1022 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1023 && GET_MODE_CLASS (mode) != MODE_INT
1024 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1027 if (GET_CODE (op) == CONST_INT
1029 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1032 return (CONSTANT_P (op)
1033 && (GET_MODE (op) == mode || mode == VOIDmode
1034 || GET_MODE (op) == VOIDmode)
1035 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1036 && LEGITIMATE_CONSTANT_P (op));
1039 /* Returns 1 if OP is an operand that is a CONST_INT. */
1042 const_int_operand (rtx op, enum machine_mode mode)
1044 if (GET_CODE (op) != CONST_INT)
1047 if (mode != VOIDmode
1048 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1054 /* Returns 1 if OP is an operand that is a constant integer or constant
1055 floating-point number. */
1058 const_double_operand (rtx op, enum machine_mode mode)
1060 /* Don't accept CONST_INT or anything similar
1061 if the caller wants something floating. */
1062 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1063 && GET_MODE_CLASS (mode) != MODE_INT
1064 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1067 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1068 && (mode == VOIDmode || GET_MODE (op) == mode
1069 || GET_MODE (op) == VOIDmode));
1072 /* Return 1 if OP is a general operand that is not an immediate operand. */
1075 nonimmediate_operand (rtx op, enum machine_mode mode)
1077 return (general_operand (op, mode) && ! CONSTANT_P (op));
1080 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1083 nonmemory_operand (rtx op, enum machine_mode mode)
1085 if (CONSTANT_P (op))
1087 /* Don't accept CONST_INT or anything similar
1088 if the caller wants something floating. */
1089 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1090 && GET_MODE_CLASS (mode) != MODE_INT
1091 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1094 if (GET_CODE (op) == CONST_INT
1096 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1099 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1100 || mode == VOIDmode)
1101 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1102 && LEGITIMATE_CONSTANT_P (op));
1105 if (GET_MODE (op) != mode && mode != VOIDmode)
1108 if (GET_CODE (op) == SUBREG)
1110 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1111 because it is guaranteed to be reloaded into one.
1112 Just make sure the MEM is valid in itself.
1113 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1114 but currently it does result from (SUBREG (REG)...) where the
1115 reg went on the stack.) */
1116 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1117 return general_operand (op, mode);
1118 op = SUBREG_REG (op);
1121 /* We don't consider registers whose class is NO_REGS
1122 to be a register operand. */
1124 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1125 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1128 /* Return 1 if OP is a valid operand that stands for pushing a
1129 value of mode MODE onto the stack.
1131 The main use of this function is as a predicate in match_operand
1132 expressions in the machine description. */
1135 push_operand (rtx op, enum machine_mode mode)
1137 unsigned int rounded_size = GET_MODE_SIZE (mode);
1139 #ifdef PUSH_ROUNDING
1140 rounded_size = PUSH_ROUNDING (rounded_size);
1146 if (mode != VOIDmode && GET_MODE (op) != mode)
1151 if (rounded_size == GET_MODE_SIZE (mode))
1153 if (GET_CODE (op) != STACK_PUSH_CODE)
1158 if (GET_CODE (op) != PRE_MODIFY
1159 || GET_CODE (XEXP (op, 1)) != PLUS
1160 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1161 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1162 #ifdef STACK_GROWS_DOWNWARD
1163 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1165 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1171 return XEXP (op, 0) == stack_pointer_rtx;
1174 /* Return 1 if OP is a valid operand that stands for popping a
1175 value of mode MODE off the stack.
1177 The main use of this function is as a predicate in match_operand
1178 expressions in the machine description. */
1181 pop_operand (rtx op, enum machine_mode mode)
1186 if (mode != VOIDmode && GET_MODE (op) != mode)
1191 if (GET_CODE (op) != STACK_POP_CODE)
1194 return XEXP (op, 0) == stack_pointer_rtx;
1197 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1200 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1202 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1209 /* Return 1 if OP is a valid memory reference with mode MODE,
1210 including a valid address.
1212 The main use of this function is as a predicate in match_operand
1213 expressions in the machine description. */
1216 memory_operand (rtx op, enum machine_mode mode)
1220 if (! reload_completed)
1221 /* Note that no SUBREG is a memory operand before end of reload pass,
1222 because (SUBREG (MEM...)) forces reloading into a register. */
1223 return MEM_P (op) && general_operand (op, mode);
1225 if (mode != VOIDmode && GET_MODE (op) != mode)
1229 if (GET_CODE (inner) == SUBREG)
1230 inner = SUBREG_REG (inner);
1232 return (MEM_P (inner) && general_operand (op, mode));
1235 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1236 that is, a memory reference whose address is a general_operand. */
1239 indirect_operand (rtx op, enum machine_mode mode)
1241 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1242 if (! reload_completed
1243 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1245 int offset = SUBREG_BYTE (op);
1246 rtx inner = SUBREG_REG (op);
1248 if (mode != VOIDmode && GET_MODE (op) != mode)
1251 /* The only way that we can have a general_operand as the resulting
1252 address is if OFFSET is zero and the address already is an operand
1253 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1256 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1257 || (GET_CODE (XEXP (inner, 0)) == PLUS
1258 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1259 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1260 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1264 && memory_operand (op, mode)
1265 && general_operand (XEXP (op, 0), Pmode));
1268 /* Return 1 if this is a comparison operator. This allows the use of
1269 MATCH_OPERATOR to recognize all the branch insns. */
1272 comparison_operator (rtx op, enum machine_mode mode)
1274 return ((mode == VOIDmode || GET_MODE (op) == mode)
1275 && COMPARISON_P (op));
1278 /* If BODY is an insn body that uses ASM_OPERANDS,
1279 return the number of operands (both input and output) in the insn.
1280 Otherwise return -1. */
1283 asm_noperands (rtx body)
1285 switch (GET_CODE (body))
1288 /* No output operands: return number of input operands. */
1289 return ASM_OPERANDS_INPUT_LENGTH (body);
1291 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1292 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1293 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1297 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1298 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1300 /* Multiple output operands, or 1 output plus some clobbers:
1301 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1305 /* Count backwards through CLOBBERs to determine number of SETs. */
1306 for (i = XVECLEN (body, 0); i > 0; i--)
1308 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1310 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1314 /* N_SETS is now number of output operands. */
1317 /* Verify that all the SETs we have
1318 came from a single original asm_operands insn
1319 (so that invalid combinations are blocked). */
1320 for (i = 0; i < n_sets; i++)
1322 rtx elt = XVECEXP (body, 0, i);
1323 if (GET_CODE (elt) != SET)
1325 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1327 /* If these ASM_OPERANDS rtx's came from different original insns
1328 then they aren't allowed together. */
1329 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1330 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1333 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1336 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1338 /* 0 outputs, but some clobbers:
1339 body is [(asm_operands ...) (clobber (reg ...))...]. */
1342 /* Make sure all the other parallel things really are clobbers. */
1343 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1344 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1347 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1356 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1357 copy its operands (both input and output) into the vector OPERANDS,
1358 the locations of the operands within the insn into the vector OPERAND_LOCS,
1359 and the constraints for the operands into CONSTRAINTS.
1360 Write the modes of the operands into MODES.
1361 Return the assembler-template.
1363 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1364 we don't store that info. */
1367 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1368 const char **constraints, enum machine_mode *modes,
1375 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1377 asmop = SET_SRC (body);
1378 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1380 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1382 for (i = 1; i < noperands; i++)
1385 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1387 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1389 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1391 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1394 /* The output is in the SET.
1395 Its constraint is in the ASM_OPERANDS itself. */
1397 operands[0] = SET_DEST (body);
1399 operand_locs[0] = &SET_DEST (body);
1401 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1403 modes[0] = GET_MODE (SET_DEST (body));
1405 else if (GET_CODE (body) == ASM_OPERANDS)
1408 /* No output operands: BODY is (asm_operands ....). */
1410 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1412 /* The input operands are found in the 1st element vector. */
1413 /* Constraints for inputs are in the 2nd element vector. */
1414 for (i = 0; i < noperands; i++)
1417 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1419 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1421 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1423 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1426 else if (GET_CODE (body) == PARALLEL
1427 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1428 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1430 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1432 int nout = 0; /* Does not include CLOBBERs. */
1434 asmop = SET_SRC (XVECEXP (body, 0, 0));
1435 nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1437 /* At least one output, plus some CLOBBERs. */
1439 /* The outputs are in the SETs.
1440 Their constraints are in the ASM_OPERANDS itself. */
1441 for (i = 0; i < nparallel; i++)
1443 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1444 break; /* Past last SET */
1447 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1449 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1451 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1453 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1457 for (i = 0; i < nin; i++)
1460 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1462 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1464 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1466 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1469 else if (GET_CODE (body) == PARALLEL
1470 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1472 /* No outputs, but some CLOBBERs. */
1476 asmop = XVECEXP (body, 0, 0);
1477 nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1479 for (i = 0; i < nin; i++)
1482 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1484 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1486 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1488 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1495 #ifdef USE_MAPPED_LOCATION
1496 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1498 loc->file = ASM_OPERANDS_SOURCE_FILE (asmop);
1499 loc->line = ASM_OPERANDS_SOURCE_LINE (asmop);
1503 return ASM_OPERANDS_TEMPLATE (asmop);
1506 /* Check if an asm_operand matches its constraints.
1507 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1510 asm_operand_ok (rtx op, const char *constraint)
1514 /* Use constrain_operands after reload. */
1515 gcc_assert (!reload_completed);
1519 char c = *constraint;
1536 case '0': case '1': case '2': case '3': case '4':
1537 case '5': case '6': case '7': case '8': case '9':
1538 /* For best results, our caller should have given us the
1539 proper matching constraint, but we can't actually fail
1540 the check if they didn't. Indicate that results are
1544 while (ISDIGIT (*constraint));
1550 if (address_operand (op, VOIDmode))
1555 case 'V': /* non-offsettable */
1556 if (memory_operand (op, VOIDmode))
1560 case 'o': /* offsettable */
1561 if (offsettable_nonstrict_memref_p (op))
1566 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1567 excepting those that expand_call created. Further, on some
1568 machines which do not have generalized auto inc/dec, an inc/dec
1569 is not a memory_operand.
1571 Match any memory and hope things are resolved after reload. */
1575 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1576 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1583 || GET_CODE (XEXP (op, 0)) == PRE_INC
1584 || GET_CODE (XEXP (op, 0)) == POST_INC))
1590 if (GET_CODE (op) == CONST_DOUBLE
1591 || (GET_CODE (op) == CONST_VECTOR
1592 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1597 if (GET_CODE (op) == CONST_DOUBLE
1598 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1602 if (GET_CODE (op) == CONST_DOUBLE
1603 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1608 if (GET_CODE (op) == CONST_INT
1609 || (GET_CODE (op) == CONST_DOUBLE
1610 && GET_MODE (op) == VOIDmode))
1615 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1620 if (GET_CODE (op) == CONST_INT
1621 || (GET_CODE (op) == CONST_DOUBLE
1622 && GET_MODE (op) == VOIDmode))
1627 if (GET_CODE (op) == CONST_INT
1628 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1632 if (GET_CODE (op) == CONST_INT
1633 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1637 if (GET_CODE (op) == CONST_INT
1638 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1642 if (GET_CODE (op) == CONST_INT
1643 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1647 if (GET_CODE (op) == CONST_INT
1648 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1652 if (GET_CODE (op) == CONST_INT
1653 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1657 if (GET_CODE (op) == CONST_INT
1658 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1662 if (GET_CODE (op) == CONST_INT
1663 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1672 if (general_operand (op, VOIDmode))
1677 /* For all other letters, we first check for a register class,
1678 otherwise it is an EXTRA_CONSTRAINT. */
1679 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1682 if (GET_MODE (op) == BLKmode)
1684 if (register_operand (op, VOIDmode))
1687 #ifdef EXTRA_CONSTRAINT_STR
1688 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1690 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
1691 /* Every memory operand can be reloaded to fit. */
1692 && memory_operand (op, VOIDmode))
1694 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint)
1695 /* Every address operand can be reloaded to fit. */
1696 && address_operand (op, VOIDmode))
1701 len = CONSTRAINT_LEN (c, constraint);
1704 while (--len && *constraint);
1712 /* Given an rtx *P, if it is a sum containing an integer constant term,
1713 return the location (type rtx *) of the pointer to that constant term.
1714 Otherwise, return a null pointer. */
1717 find_constant_term_loc (rtx *p)
1720 enum rtx_code code = GET_CODE (*p);
1722 /* If *P IS such a constant term, P is its location. */
1724 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1728 /* Otherwise, if not a sum, it has no constant term. */
1730 if (GET_CODE (*p) != PLUS)
1733 /* If one of the summands is constant, return its location. */
1735 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1736 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1739 /* Otherwise, check each summand for containing a constant term. */
1741 if (XEXP (*p, 0) != 0)
1743 tem = find_constant_term_loc (&XEXP (*p, 0));
1748 if (XEXP (*p, 1) != 0)
1750 tem = find_constant_term_loc (&XEXP (*p, 1));
1758 /* Return 1 if OP is a memory reference
1759 whose address contains no side effects
1760 and remains valid after the addition
1761 of a positive integer less than the
1762 size of the object being referenced.
1764 We assume that the original address is valid and do not check it.
1766 This uses strict_memory_address_p as a subroutine, so
1767 don't use it before reload. */
1770 offsettable_memref_p (rtx op)
1772 return ((MEM_P (op))
1773 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1776 /* Similar, but don't require a strictly valid mem ref:
1777 consider pseudo-regs valid as index or base regs. */
1780 offsettable_nonstrict_memref_p (rtx op)
1782 return ((MEM_P (op))
1783 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1786 /* Return 1 if Y is a memory address which contains no side effects
1787 and would remain valid after the addition of a positive integer
1788 less than the size of that mode.
1790 We assume that the original address is valid and do not check it.
1791 We do check that it is valid for narrower modes.
1793 If STRICTP is nonzero, we require a strictly valid address,
1794 for the sake of use in reload.c. */
1797 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1799 enum rtx_code ycode = GET_CODE (y);
1803 int (*addressp) (enum machine_mode, rtx) =
1804 (strictp ? strict_memory_address_p : memory_address_p);
1805 unsigned int mode_sz = GET_MODE_SIZE (mode);
1807 if (CONSTANT_ADDRESS_P (y))
1810 /* Adjusting an offsettable address involves changing to a narrower mode.
1811 Make sure that's OK. */
1813 if (mode_dependent_address_p (y))
1816 /* ??? How much offset does an offsettable BLKmode reference need?
1817 Clearly that depends on the situation in which it's being used.
1818 However, the current situation in which we test 0xffffffff is
1819 less than ideal. Caveat user. */
1821 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1823 /* If the expression contains a constant term,
1824 see if it remains valid when max possible offset is added. */
1826 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1831 *y2 = plus_constant (*y2, mode_sz - 1);
1832 /* Use QImode because an odd displacement may be automatically invalid
1833 for any wider mode. But it should be valid for a single byte. */
1834 good = (*addressp) (QImode, y);
1836 /* In any case, restore old contents of memory. */
1841 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1844 /* The offset added here is chosen as the maximum offset that
1845 any instruction could need to add when operating on something
1846 of the specified mode. We assume that if Y and Y+c are
1847 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1848 go inside a LO_SUM here, so we do so as well. */
1849 if (GET_CODE (y) == LO_SUM
1851 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1852 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1853 plus_constant (XEXP (y, 1), mode_sz - 1));
1855 z = plus_constant (y, mode_sz - 1);
1857 /* Use QImode because an odd displacement may be automatically invalid
1858 for any wider mode. But it should be valid for a single byte. */
1859 return (*addressp) (QImode, z);
1862 /* Return 1 if ADDR is an address-expression whose effect depends
1863 on the mode of the memory reference it is used in.
1865 Autoincrement addressing is a typical example of mode-dependence
1866 because the amount of the increment depends on the mode. */
1869 mode_dependent_address_p (rtx addr)
1871 /* Auto-increment addressing with anything other than post_modify
1872 or pre_modify always introduces a mode dependency. Catch such
1873 cases now instead of deferring to the target. */
1874 if (GET_CODE (addr) == PRE_INC
1875 || GET_CODE (addr) == POST_INC
1876 || GET_CODE (addr) == PRE_DEC
1877 || GET_CODE (addr) == POST_DEC)
1880 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1882 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1883 win: ATTRIBUTE_UNUSED_LABEL
1887 /* Like extract_insn, but save insn extracted and don't extract again, when
1888 called again for the same insn expecting that recog_data still contain the
1889 valid information. This is used primary by gen_attr infrastructure that
1890 often does extract insn again and again. */
1892 extract_insn_cached (rtx insn)
1894 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
1896 extract_insn (insn);
1897 recog_data.insn = insn;
1900 /* Do cached extract_insn, constrain_operands and complain about failures.
1901 Used by insn_attrtab. */
1903 extract_constrain_insn_cached (rtx insn)
1905 extract_insn_cached (insn);
1906 if (which_alternative == -1
1907 && !constrain_operands (reload_completed))
1908 fatal_insn_not_found (insn);
1911 /* Do cached constrain_operands and complain about failures. */
1913 constrain_operands_cached (int strict)
1915 if (which_alternative == -1)
1916 return constrain_operands (strict);
1921 /* Analyze INSN and fill in recog_data. */
1924 extract_insn (rtx insn)
1929 rtx body = PATTERN (insn);
1931 recog_data.insn = NULL;
1932 recog_data.n_operands = 0;
1933 recog_data.n_alternatives = 0;
1934 recog_data.n_dups = 0;
1935 which_alternative = -1;
1937 switch (GET_CODE (body))
1947 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1952 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
1953 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1954 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1960 recog_data.n_operands = noperands = asm_noperands (body);
1963 /* This insn is an `asm' with operands. */
1965 /* expand_asm_operands makes sure there aren't too many operands. */
1966 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
1968 /* Now get the operand values and constraints out of the insn. */
1969 decode_asm_operands (body, recog_data.operand,
1970 recog_data.operand_loc,
1971 recog_data.constraints,
1972 recog_data.operand_mode, NULL);
1975 const char *p = recog_data.constraints[0];
1976 recog_data.n_alternatives = 1;
1978 recog_data.n_alternatives += (*p++ == ',');
1982 fatal_insn_not_found (insn);
1986 /* Ordinary insn: recognize it, get the operands via insn_extract
1987 and get the constraints. */
1989 icode = recog_memoized (insn);
1991 fatal_insn_not_found (insn);
1993 recog_data.n_operands = noperands = insn_data[icode].n_operands;
1994 recog_data.n_alternatives = insn_data[icode].n_alternatives;
1995 recog_data.n_dups = insn_data[icode].n_dups;
1997 insn_extract (insn);
1999 for (i = 0; i < noperands; i++)
2001 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2002 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2003 /* VOIDmode match_operands gets mode from their real operand. */
2004 if (recog_data.operand_mode[i] == VOIDmode)
2005 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2008 for (i = 0; i < noperands; i++)
2009 recog_data.operand_type[i]
2010 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2011 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2014 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2017 /* After calling extract_insn, you can use this function to extract some
2018 information from the constraint strings into a more usable form.
2019 The collected data is stored in recog_op_alt. */
2021 preprocess_constraints (void)
2025 for (i = 0; i < recog_data.n_operands; i++)
2026 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2027 * sizeof (struct operand_alternative)));
2029 for (i = 0; i < recog_data.n_operands; i++)
2032 struct operand_alternative *op_alt;
2033 const char *p = recog_data.constraints[i];
2035 op_alt = recog_op_alt[i];
2037 for (j = 0; j < recog_data.n_alternatives; j++)
2039 op_alt[j].cl = NO_REGS;
2040 op_alt[j].constraint = p;
2041 op_alt[j].matches = -1;
2042 op_alt[j].matched = -1;
2044 if (*p == '\0' || *p == ',')
2046 op_alt[j].anything_ok = 1;
2056 while (c != ',' && c != '\0');
2057 if (c == ',' || c == '\0')
2065 case '=': case '+': case '*': case '%':
2066 case 'E': case 'F': case 'G': case 'H':
2067 case 's': case 'i': case 'n':
2068 case 'I': case 'J': case 'K': case 'L':
2069 case 'M': case 'N': case 'O': case 'P':
2070 /* These don't say anything we care about. */
2074 op_alt[j].reject += 6;
2077 op_alt[j].reject += 600;
2080 op_alt[j].earlyclobber = 1;
2083 case '0': case '1': case '2': case '3': case '4':
2084 case '5': case '6': case '7': case '8': case '9':
2087 op_alt[j].matches = strtoul (p, &end, 10);
2088 recog_op_alt[op_alt[j].matches][j].matched = i;
2094 op_alt[j].memory_ok = 1;
2097 op_alt[j].decmem_ok = 1;
2100 op_alt[j].incmem_ok = 1;
2103 op_alt[j].nonoffmem_ok = 1;
2106 op_alt[j].offmem_ok = 1;
2109 op_alt[j].anything_ok = 1;
2113 op_alt[j].is_address = 1;
2114 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2115 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
2121 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2125 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2127 op_alt[j].memory_ok = 1;
2130 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2132 op_alt[j].is_address = 1;
2134 = (reg_class_subunion
2135 [(int) op_alt[j].cl]
2136 [(int) base_reg_class (VOIDmode, ADDRESS,
2142 = (reg_class_subunion
2143 [(int) op_alt[j].cl]
2144 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2147 p += CONSTRAINT_LEN (c, p);
2153 /* Check the operands of an insn against the insn's operand constraints
2154 and return 1 if they are valid.
2155 The information about the insn's operands, constraints, operand modes
2156 etc. is obtained from the global variables set up by extract_insn.
2158 WHICH_ALTERNATIVE is set to a number which indicates which
2159 alternative of constraints was matched: 0 for the first alternative,
2160 1 for the next, etc.
2162 In addition, when two operands are required to match
2163 and it happens that the output operand is (reg) while the
2164 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2165 make the output operand look like the input.
2166 This is because the output operand is the one the template will print.
2168 This is used in final, just before printing the assembler code and by
2169 the routines that determine an insn's attribute.
2171 If STRICT is a positive nonzero value, it means that we have been
2172 called after reload has been completed. In that case, we must
2173 do all checks strictly. If it is zero, it means that we have been called
2174 before reload has completed. In that case, we first try to see if we can
2175 find an alternative that matches strictly. If not, we try again, this
2176 time assuming that reload will fix up the insn. This provides a "best
2177 guess" for the alternative and is used to compute attributes of insns prior
2178 to reload. A negative value of STRICT is used for this internal call. */
2186 constrain_operands (int strict)
2188 const char *constraints[MAX_RECOG_OPERANDS];
2189 int matching_operands[MAX_RECOG_OPERANDS];
2190 int earlyclobber[MAX_RECOG_OPERANDS];
2193 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2194 int funny_match_index;
2196 which_alternative = 0;
2197 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2200 for (c = 0; c < recog_data.n_operands; c++)
2202 constraints[c] = recog_data.constraints[c];
2203 matching_operands[c] = -1;
2208 int seen_earlyclobber_at = -1;
2211 funny_match_index = 0;
2213 for (opno = 0; opno < recog_data.n_operands; opno++)
2215 rtx op = recog_data.operand[opno];
2216 enum machine_mode mode = GET_MODE (op);
2217 const char *p = constraints[opno];
2223 earlyclobber[opno] = 0;
2225 /* A unary operator may be accepted by the predicate, but it
2226 is irrelevant for matching constraints. */
2230 if (GET_CODE (op) == SUBREG)
2232 if (REG_P (SUBREG_REG (op))
2233 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2234 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2235 GET_MODE (SUBREG_REG (op)),
2238 op = SUBREG_REG (op);
2241 /* An empty constraint or empty alternative
2242 allows anything which matched the pattern. */
2243 if (*p == 0 || *p == ',')
2247 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2256 case '?': case '!': case '*': case '%':
2261 /* Ignore rest of this alternative as far as
2262 constraint checking is concerned. */
2265 while (*p && *p != ',');
2270 earlyclobber[opno] = 1;
2271 if (seen_earlyclobber_at < 0)
2272 seen_earlyclobber_at = opno;
2275 case '0': case '1': case '2': case '3': case '4':
2276 case '5': case '6': case '7': case '8': case '9':
2278 /* This operand must be the same as a previous one.
2279 This kind of constraint is used for instructions such
2280 as add when they take only two operands.
2282 Note that the lower-numbered operand is passed first.
2284 If we are not testing strictly, assume that this
2285 constraint will be satisfied. */
2290 match = strtoul (p, &end, 10);
2297 rtx op1 = recog_data.operand[match];
2298 rtx op2 = recog_data.operand[opno];
2300 /* A unary operator may be accepted by the predicate,
2301 but it is irrelevant for matching constraints. */
2303 op1 = XEXP (op1, 0);
2305 op2 = XEXP (op2, 0);
2307 val = operands_match_p (op1, op2);
2310 matching_operands[opno] = match;
2311 matching_operands[match] = opno;
2316 /* If output is *x and input is *--x, arrange later
2317 to change the output to *--x as well, since the
2318 output op is the one that will be printed. */
2319 if (val == 2 && strict > 0)
2321 funny_match[funny_match_index].this = opno;
2322 funny_match[funny_match_index++].other = match;
2329 /* p is used for address_operands. When we are called by
2330 gen_reload, no one will have checked that the address is
2331 strictly valid, i.e., that all pseudos requiring hard regs
2332 have gotten them. */
2334 || (strict_memory_address_p (recog_data.operand_mode[opno],
2339 /* No need to check general_operand again;
2340 it was done in insn-recog.c. Well, except that reload
2341 doesn't check the validity of its replacements, but
2342 that should only matter when there's a bug. */
2344 /* Anything goes unless it is a REG and really has a hard reg
2345 but the hard reg is not in the class GENERAL_REGS. */
2349 || GENERAL_REGS == ALL_REGS
2350 || (reload_in_progress
2351 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2352 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2355 else if (strict < 0 || general_operand (op, mode))
2360 /* This is used for a MATCH_SCRATCH in the cases when
2361 we don't actually need anything. So anything goes
2367 /* Memory operands must be valid, to the extent
2368 required by STRICT. */
2372 && !strict_memory_address_p (GET_MODE (op),
2376 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2380 /* Before reload, accept what reload can turn into mem. */
2381 else if (strict < 0 && CONSTANT_P (op))
2383 /* During reload, accept a pseudo */
2384 else if (reload_in_progress && REG_P (op)
2385 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2391 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2392 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2398 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2399 || GET_CODE (XEXP (op, 0)) == POST_INC))
2405 if (GET_CODE (op) == CONST_DOUBLE
2406 || (GET_CODE (op) == CONST_VECTOR
2407 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2413 if (GET_CODE (op) == CONST_DOUBLE
2414 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2419 if (GET_CODE (op) == CONST_INT
2420 || (GET_CODE (op) == CONST_DOUBLE
2421 && GET_MODE (op) == VOIDmode))
2424 if (CONSTANT_P (op))
2429 if (GET_CODE (op) == CONST_INT
2430 || (GET_CODE (op) == CONST_DOUBLE
2431 && GET_MODE (op) == VOIDmode))
2443 if (GET_CODE (op) == CONST_INT
2444 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2450 && ((strict > 0 && ! offsettable_memref_p (op))
2452 && !(CONSTANT_P (op) || MEM_P (op)))
2453 || (reload_in_progress
2455 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2460 if ((strict > 0 && offsettable_memref_p (op))
2461 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2462 /* Before reload, accept what reload can handle. */
2464 && (CONSTANT_P (op) || MEM_P (op)))
2465 /* During reload, accept a pseudo */
2466 || (reload_in_progress && REG_P (op)
2467 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2476 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2482 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2483 || (strict == 0 && GET_CODE (op) == SCRATCH)
2485 && reg_fits_class_p (op, cl, offset, mode)))
2488 #ifdef EXTRA_CONSTRAINT_STR
2489 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2492 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2493 /* Every memory operand can be reloaded to fit. */
2494 && ((strict < 0 && MEM_P (op))
2495 /* Before reload, accept what reload can turn
2497 || (strict < 0 && CONSTANT_P (op))
2498 /* During reload, accept a pseudo */
2499 || (reload_in_progress && REG_P (op)
2500 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2502 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2503 /* Every address operand can be reloaded to fit. */
2510 while (p += len, c);
2512 constraints[opno] = p;
2513 /* If this operand did not win somehow,
2514 this alternative loses. */
2518 /* This alternative won; the operands are ok.
2519 Change whichever operands this alternative says to change. */
2524 /* See if any earlyclobber operand conflicts with some other
2527 if (strict > 0 && seen_earlyclobber_at >= 0)
2528 for (eopno = seen_earlyclobber_at;
2529 eopno < recog_data.n_operands;
2531 /* Ignore earlyclobber operands now in memory,
2532 because we would often report failure when we have
2533 two memory operands, one of which was formerly a REG. */
2534 if (earlyclobber[eopno]
2535 && REG_P (recog_data.operand[eopno]))
2536 for (opno = 0; opno < recog_data.n_operands; opno++)
2537 if ((MEM_P (recog_data.operand[opno])
2538 || recog_data.operand_type[opno] != OP_OUT)
2540 /* Ignore things like match_operator operands. */
2541 && *recog_data.constraints[opno] != 0
2542 && ! (matching_operands[opno] == eopno
2543 && operands_match_p (recog_data.operand[opno],
2544 recog_data.operand[eopno]))
2545 && ! safe_from_earlyclobber (recog_data.operand[opno],
2546 recog_data.operand[eopno]))
2551 while (--funny_match_index >= 0)
2553 recog_data.operand[funny_match[funny_match_index].other]
2554 = recog_data.operand[funny_match[funny_match_index].this];
2561 which_alternative++;
2563 while (which_alternative < recog_data.n_alternatives);
2565 which_alternative = -1;
2566 /* If we are about to reject this, but we are not to test strictly,
2567 try a very loose test. Only return failure if it fails also. */
2569 return constrain_operands (-1);
2574 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2575 is a hard reg in class CLASS when its regno is offset by OFFSET
2576 and changed to mode MODE.
2577 If REG occupies multiple hard regs, all of them must be in CLASS. */
2580 reg_fits_class_p (rtx operand, enum reg_class cl, int offset,
2581 enum machine_mode mode)
2583 int regno = REGNO (operand);
2588 return (regno < FIRST_PSEUDO_REGISTER
2589 && in_hard_reg_set_p (reg_class_contents[(int) cl],
2590 mode, regno + offset));
2593 /* Split single instruction. Helper function for split_all_insns and
2594 split_all_insns_noflow. Return last insn in the sequence if successful,
2595 or NULL if unsuccessful. */
2598 split_insn (rtx insn)
2600 /* Split insns here to get max fine-grain parallelism. */
2601 rtx first = PREV_INSN (insn);
2602 rtx last = try_split (PATTERN (insn), insn, 1);
2607 /* try_split returns the NOTE that INSN became. */
2608 SET_INSN_DELETED (insn);
2610 /* ??? Coddle to md files that generate subregs in post-reload
2611 splitters instead of computing the proper hard register. */
2612 if (reload_completed && first != last)
2614 first = NEXT_INSN (first);
2618 cleanup_subreg_operands (first);
2621 first = NEXT_INSN (first);
2627 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2630 split_all_insns (void)
2636 blocks = sbitmap_alloc (last_basic_block);
2637 sbitmap_zero (blocks);
2640 FOR_EACH_BB_REVERSE (bb)
2643 bool finish = false;
2645 for (insn = BB_HEAD (bb); !finish ; insn = next)
2647 /* Can't use `next_real_insn' because that might go across
2648 CODE_LABELS and short-out basic blocks. */
2649 next = NEXT_INSN (insn);
2650 finish = (insn == BB_END (bb));
2653 rtx set = single_set (insn);
2655 /* Don't split no-op move insns. These should silently
2656 disappear later in final. Splitting such insns would
2657 break the code that handles REG_NO_CONFLICT blocks. */
2658 if (set && set_noop_p (set))
2660 /* Nops get in the way while scheduling, so delete them
2661 now if register allocation has already been done. It
2662 is too risky to try to do this before register
2663 allocation, and there are unlikely to be very many
2664 nops then anyways. */
2665 if (reload_completed)
2666 delete_insn_and_edges (insn);
2670 rtx last = split_insn (insn);
2673 /* The split sequence may include barrier, but the
2674 BB boundary we are interested in will be set to
2677 while (BARRIER_P (last))
2678 last = PREV_INSN (last);
2679 SET_BIT (blocks, bb->index);
2688 find_many_sub_basic_blocks (blocks);
2690 #ifdef ENABLE_CHECKING
2691 verify_flow_info ();
2694 sbitmap_free (blocks);
2697 /* Same as split_all_insns, but do not expect CFG to be available.
2698 Used by machine dependent reorg passes. */
2701 split_all_insns_noflow (void)
2705 for (insn = get_insns (); insn; insn = next)
2707 next = NEXT_INSN (insn);
2710 /* Don't split no-op move insns. These should silently
2711 disappear later in final. Splitting such insns would
2712 break the code that handles REG_NO_CONFLICT blocks. */
2713 rtx set = single_set (insn);
2714 if (set && set_noop_p (set))
2716 /* Nops get in the way while scheduling, so delete them
2717 now if register allocation has already been done. It
2718 is too risky to try to do this before register
2719 allocation, and there are unlikely to be very many
2722 ??? Should we use delete_insn when the CFG isn't valid? */
2723 if (reload_completed)
2724 delete_insn_and_edges (insn);
2733 #ifdef HAVE_peephole2
2734 struct peep2_insn_data
2740 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2741 static int peep2_current;
2742 /* The number of instructions available to match a peep2. */
2743 int peep2_current_count;
2745 /* A non-insn marker indicating the last insn of the block.
2746 The live_before regset for this element is correct, indicating
2747 DF_LIVE_OUT for the block. */
2748 #define PEEP2_EOB pc_rtx
2750 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2751 does not exist. Used by the recognizer to find the next insn to match
2752 in a multi-insn pattern. */
2755 peep2_next_insn (int n)
2757 gcc_assert (n <= peep2_current_count);
2760 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2761 n -= MAX_INSNS_PER_PEEP2 + 1;
2763 return peep2_insn_data[n].insn;
2766 /* Return true if REGNO is dead before the Nth non-note insn
2770 peep2_regno_dead_p (int ofs, int regno)
2772 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2774 ofs += peep2_current;
2775 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2776 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2778 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2780 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2783 /* Similarly for a REG. */
2786 peep2_reg_dead_p (int ofs, rtx reg)
2790 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2792 ofs += peep2_current;
2793 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2794 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2796 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2798 regno = REGNO (reg);
2799 n = hard_regno_nregs[regno][GET_MODE (reg)];
2801 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2806 /* Try to find a hard register of mode MODE, matching the register class in
2807 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2808 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2809 in which case the only condition is that the register must be available
2810 before CURRENT_INSN.
2811 Registers that already have bits set in REG_SET will not be considered.
2813 If an appropriate register is available, it will be returned and the
2814 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2818 peep2_find_free_register (int from, int to, const char *class_str,
2819 enum machine_mode mode, HARD_REG_SET *reg_set)
2821 static int search_ofs;
2826 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
2827 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
2829 from += peep2_current;
2830 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2831 from -= MAX_INSNS_PER_PEEP2 + 1;
2832 to += peep2_current;
2833 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2834 to -= MAX_INSNS_PER_PEEP2 + 1;
2836 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2837 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2841 HARD_REG_SET this_live;
2843 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2845 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2846 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2847 IOR_HARD_REG_SET (live, this_live);
2850 cl = (class_str[0] == 'r' ? GENERAL_REGS
2851 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
2853 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2855 int raw_regno, regno, success, j;
2857 /* Distribute the free registers as much as possible. */
2858 raw_regno = search_ofs + i;
2859 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2860 raw_regno -= FIRST_PSEUDO_REGISTER;
2861 #ifdef REG_ALLOC_ORDER
2862 regno = reg_alloc_order[raw_regno];
2867 /* Don't allocate fixed registers. */
2868 if (fixed_regs[regno])
2870 /* Make sure the register is of the right class. */
2871 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
2873 /* And can support the mode we need. */
2874 if (! HARD_REGNO_MODE_OK (regno, mode))
2876 /* And that we don't create an extra save/restore. */
2877 if (! call_used_regs[regno] && ! df_regs_ever_live_p (regno))
2879 /* And we don't clobber traceback for noreturn functions. */
2880 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2881 && (! reload_completed || frame_pointer_needed))
2885 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2887 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2888 || TEST_HARD_REG_BIT (live, regno + j))
2896 add_to_hard_reg_set (reg_set, mode, regno);
2898 /* Start the next search with the next register. */
2899 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
2901 search_ofs = raw_regno;
2903 return gen_rtx_REG (mode, regno);
2911 /* Perform the peephole2 optimization pass. */
2914 peephole2_optimize (void)
2920 bool do_cleanup_cfg = false;
2921 bool do_rebuild_jump_labels = false;
2923 df_set_flags (DF_LR_RUN_DCE);
2926 /* Initialize the regsets we're going to use. */
2927 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
2928 peep2_insn_data[i].live_before = BITMAP_ALLOC (®_obstack);
2929 live = BITMAP_ALLOC (®_obstack);
2931 FOR_EACH_BB_REVERSE (bb)
2933 /* Indicate that all slots except the last holds invalid data. */
2934 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
2935 peep2_insn_data[i].insn = NULL_RTX;
2936 peep2_current_count = 0;
2938 /* Indicate that the last slot contains live_after data. */
2939 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
2940 peep2_current = MAX_INSNS_PER_PEEP2;
2942 /* Start up propagation. */
2943 bitmap_copy (live, DF_LR_OUT (bb));
2944 df_simulate_artificial_refs_at_end (bb, live);
2945 bitmap_copy (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
2947 for (insn = BB_END (bb); ; insn = prev)
2949 prev = PREV_INSN (insn);
2952 rtx try, before_try, x;
2955 bool was_call = false;
2957 /* Record this insn. */
2958 if (--peep2_current < 0)
2959 peep2_current = MAX_INSNS_PER_PEEP2;
2960 if (peep2_current_count < MAX_INSNS_PER_PEEP2
2961 && peep2_insn_data[peep2_current].insn == NULL_RTX)
2962 peep2_current_count++;
2963 peep2_insn_data[peep2_current].insn = insn;
2964 df_simulate_one_insn_backwards (bb, insn, live);
2965 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
2967 if (RTX_FRAME_RELATED_P (insn))
2969 /* If an insn has RTX_FRAME_RELATED_P set, peephole
2970 substitution would lose the
2971 REG_FRAME_RELATED_EXPR that is attached. */
2972 peep2_current_count = 0;
2976 /* Match the peephole. */
2977 try = peephole2_insns (PATTERN (insn), insn, &match_len);
2981 /* If we are splitting a CALL_INSN, look for the CALL_INSN
2982 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
2983 cfg-related call notes. */
2984 for (i = 0; i <= match_len; ++i)
2987 rtx old_insn, new_insn, note;
2989 j = i + peep2_current;
2990 if (j >= MAX_INSNS_PER_PEEP2 + 1)
2991 j -= MAX_INSNS_PER_PEEP2 + 1;
2992 old_insn = peep2_insn_data[j].insn;
2993 if (!CALL_P (old_insn))
2998 while (new_insn != NULL_RTX)
3000 if (CALL_P (new_insn))
3002 new_insn = NEXT_INSN (new_insn);
3005 gcc_assert (new_insn != NULL_RTX);
3007 CALL_INSN_FUNCTION_USAGE (new_insn)
3008 = CALL_INSN_FUNCTION_USAGE (old_insn);
3010 for (note = REG_NOTES (old_insn);
3012 note = XEXP (note, 1))
3013 switch (REG_NOTE_KIND (note))
3017 REG_NOTES (new_insn)
3018 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3020 REG_NOTES (new_insn));
3022 /* Discard all other reg notes. */
3026 /* Croak if there is another call in the sequence. */
3027 while (++i <= match_len)
3029 j = i + peep2_current;
3030 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3031 j -= MAX_INSNS_PER_PEEP2 + 1;
3032 old_insn = peep2_insn_data[j].insn;
3033 gcc_assert (!CALL_P (old_insn));
3038 i = match_len + peep2_current;
3039 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3040 i -= MAX_INSNS_PER_PEEP2 + 1;
3042 note = find_reg_note (peep2_insn_data[i].insn,
3043 REG_EH_REGION, NULL_RTX);
3045 /* Replace the old sequence with the new. */
3046 try = emit_insn_after_setloc (try, peep2_insn_data[i].insn,
3047 INSN_LOCATOR (peep2_insn_data[i].insn));
3048 before_try = PREV_INSN (insn);
3049 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3051 /* Re-insert the EH_REGION notes. */
3052 if (note || (was_call && nonlocal_goto_handler_labels))
3057 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3058 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3061 for (x = try ; x != before_try ; x = PREV_INSN (x))
3063 || (flag_non_call_exceptions
3064 && may_trap_p (PATTERN (x))
3065 && !find_reg_note (x, REG_EH_REGION, NULL)))
3069 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3073 if (x != BB_END (bb) && eh_edge)
3078 nfte = split_block (bb, x);
3079 flags = (eh_edge->flags
3080 & (EDGE_EH | EDGE_ABNORMAL));
3082 flags |= EDGE_ABNORMAL_CALL;
3083 nehe = make_edge (nfte->src, eh_edge->dest,
3086 nehe->probability = eh_edge->probability;
3088 = REG_BR_PROB_BASE - nehe->probability;
3090 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3096 /* Converting possibly trapping insn to non-trapping is
3097 possible. Zap dummy outgoing edges. */
3098 do_cleanup_cfg |= purge_dead_edges (bb);
3101 #ifdef HAVE_conditional_execution
3102 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3103 peep2_insn_data[i].insn = NULL_RTX;
3104 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3105 peep2_current_count = 0;
3107 /* Back up lifetime information past the end of the
3108 newly created sequence. */
3109 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3111 bitmap_copy (live, peep2_insn_data[i].live_before);
3113 /* Update life information for the new sequence. */
3120 i = MAX_INSNS_PER_PEEP2;
3121 if (peep2_current_count < MAX_INSNS_PER_PEEP2
3122 && peep2_insn_data[i].insn == NULL_RTX)
3123 peep2_current_count++;
3124 peep2_insn_data[i].insn = x;
3126 df_simulate_one_insn_backwards (bb, x, live);
3127 bitmap_copy (peep2_insn_data[i].live_before, live);
3136 /* If we generated a jump instruction, it won't have
3137 JUMP_LABEL set. Recompute after we're done. */
3138 for (x = try; x != before_try; x = PREV_INSN (x))
3141 do_rebuild_jump_labels = true;
3147 if (insn == BB_HEAD (bb))
3152 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3153 BITMAP_FREE (peep2_insn_data[i].live_before);
3155 if (do_rebuild_jump_labels)
3156 rebuild_jump_labels (get_insns ());
3158 #endif /* HAVE_peephole2 */
3160 /* Common predicates for use with define_bypass. */
3162 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3163 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3164 must be either a single_set or a PARALLEL with SETs inside. */
3167 store_data_bypass_p (rtx out_insn, rtx in_insn)
3169 rtx out_set, in_set;
3170 rtx out_pat, in_pat;
3171 rtx out_exp, in_exp;
3174 in_set = single_set (in_insn);
3177 if (!MEM_P (SET_DEST (in_set)))
3180 out_set = single_set (out_insn);
3183 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3188 out_pat = PATTERN (out_insn);
3190 if (GET_CODE (out_pat) != PARALLEL)
3193 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3195 out_exp = XVECEXP (out_pat, 0, i);
3197 if (GET_CODE (out_exp) == CLOBBER)
3200 gcc_assert (GET_CODE (out_exp) == SET);
3202 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3209 in_pat = PATTERN (in_insn);
3210 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3212 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3214 in_exp = XVECEXP (in_pat, 0, i);
3216 if (GET_CODE (in_exp) == CLOBBER)
3219 gcc_assert (GET_CODE (in_exp) == SET);
3221 if (!MEM_P (SET_DEST (in_exp)))
3224 out_set = single_set (out_insn);
3227 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3232 out_pat = PATTERN (out_insn);
3233 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3235 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3237 out_exp = XVECEXP (out_pat, 0, j);
3239 if (GET_CODE (out_exp) == CLOBBER)
3242 gcc_assert (GET_CODE (out_exp) == SET);
3244 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3254 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3255 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3256 or multiple set; IN_INSN should be single_set for truth, but for convenience
3257 of insn categorization may be any JUMP or CALL insn. */
3260 if_test_bypass_p (rtx out_insn, rtx in_insn)
3262 rtx out_set, in_set;
3264 in_set = single_set (in_insn);
3267 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3271 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3273 in_set = SET_SRC (in_set);
3275 out_set = single_set (out_insn);
3278 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3279 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3287 out_pat = PATTERN (out_insn);
3288 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3290 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3292 rtx exp = XVECEXP (out_pat, 0, i);
3294 if (GET_CODE (exp) == CLOBBER)
3297 gcc_assert (GET_CODE (exp) == SET);
3299 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3300 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3309 gate_handle_peephole2 (void)
3311 return (optimize > 0 && flag_peephole2);
3315 rest_of_handle_peephole2 (void)
3317 #ifdef HAVE_peephole2
3318 peephole2_optimize ();
3323 struct tree_opt_pass pass_peephole2 =
3325 "peephole2", /* name */
3326 gate_handle_peephole2, /* gate */
3327 rest_of_handle_peephole2, /* execute */
3330 0, /* static_pass_number */
3331 TV_PEEPHOLE2, /* tv_id */
3332 0, /* properties_required */
3333 0, /* properties_provided */
3334 0, /* properties_destroyed */
3335 0, /* todo_flags_start */
3337 TODO_dump_func, /* todo_flags_finish */
3342 rest_of_handle_split_all_insns (void)
3348 struct tree_opt_pass pass_split_all_insns =
3350 "split1", /* name */
3352 rest_of_handle_split_all_insns, /* execute */
3355 0, /* static_pass_number */
3357 0, /* properties_required */
3358 0, /* properties_provided */
3359 0, /* properties_destroyed */
3360 0, /* todo_flags_start */
3361 TODO_dump_func, /* todo_flags_finish */
3366 rest_of_handle_split_after_reload (void)
3368 /* If optimizing, then go ahead and split insns now. */
3376 struct tree_opt_pass pass_split_after_reload =
3378 "split2", /* name */
3380 rest_of_handle_split_after_reload, /* execute */
3383 0, /* static_pass_number */
3385 0, /* properties_required */
3386 0, /* properties_provided */
3387 0, /* properties_destroyed */
3388 0, /* todo_flags_start */
3389 TODO_dump_func, /* todo_flags_finish */
3394 gate_handle_split_before_regstack (void)
3396 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3397 /* If flow2 creates new instructions which need splitting
3398 and scheduling after reload is not done, they might not be
3399 split until final which doesn't allow splitting
3400 if HAVE_ATTR_length. */
3401 # ifdef INSN_SCHEDULING
3402 return (optimize && !flag_schedule_insns_after_reload);
3412 rest_of_handle_split_before_regstack (void)
3418 struct tree_opt_pass pass_split_before_regstack =
3420 "split3", /* name */
3421 gate_handle_split_before_regstack, /* gate */
3422 rest_of_handle_split_before_regstack, /* execute */
3425 0, /* static_pass_number */
3427 0, /* properties_required */
3428 0, /* properties_provided */
3429 0, /* properties_destroyed */
3430 0, /* todo_flags_start */
3431 TODO_dump_func, /* todo_flags_finish */
3436 gate_handle_split_before_sched2 (void)
3438 #ifdef INSN_SCHEDULING
3439 return optimize > 0 && flag_schedule_insns_after_reload;
3446 rest_of_handle_split_before_sched2 (void)
3448 #ifdef INSN_SCHEDULING
3454 struct tree_opt_pass pass_split_before_sched2 =
3456 "split4", /* name */
3457 gate_handle_split_before_sched2, /* gate */
3458 rest_of_handle_split_before_sched2, /* execute */
3461 0, /* static_pass_number */
3463 0, /* properties_required */
3464 0, /* properties_provided */
3465 0, /* properties_destroyed */
3466 0, /* todo_flags_start */
3468 TODO_dump_func, /* todo_flags_finish */
3472 /* The placement of the splitting that we do for shorten_branches
3473 depends on whether regstack is used by the target or not. */
3475 gate_do_final_split (void)
3477 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3484 struct tree_opt_pass pass_split_for_shorten_branches =
3486 "split5", /* name */
3487 gate_do_final_split, /* gate */
3488 split_all_insns_noflow, /* execute */
3491 0, /* static_pass_number */
3493 0, /* properties_required */
3494 0, /* properties_provided */
3495 0, /* properties_destroyed */
3496 0, /* todo_flags_start */
3497 TODO_dump_func, /* todo_flags_finish */