1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 /* This is the pathetic reminder of old fame of the jump-optimization pass
24 of the compiler. Now it contains basically set of utility function to
27 Each CODE_LABEL has a count of the times it is used
28 stored in the LABEL_NUSES internal field, and each JUMP_INSN
29 has one label that it refers to stored in the
30 JUMP_LABEL internal field. With this we can detect labels that
31 become unused because of the deletion of all the jumps that
32 formerly used them. The JUMP_LABEL info is sometimes looked
35 The subroutines redirect_jump and invert_jump are used
36 from other passes as well. */
40 #include "coretypes.h"
45 #include "hard-reg-set.h"
47 #include "insn-config.h"
48 #include "insn-attr.h"
54 #include "diagnostic.h"
59 #include "tree-pass.h"
61 /* Optimize jump y; x: ... y: jumpif... x?
62 Don't know if it is worth bothering with. */
63 /* Optimize two cases of conditional jump to conditional jump?
64 This can never delete any instruction or make anything dead,
65 or even change what is live at any point.
66 So perhaps let combiner do it. */
68 static void init_label_info (rtx);
69 static void mark_all_labels (rtx);
70 static void delete_computation (rtx);
71 static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
72 static int invert_exp_1 (rtx, rtx);
73 static int returnjump_p_1 (rtx *, void *);
74 static void delete_prior_computation (rtx, rtx);
76 /* Alternate entry into the jump optimizer. This entry point only rebuilds
77 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
80 rebuild_jump_labels (rtx f)
84 timevar_push (TV_REBUILD_JUMP);
88 /* Keep track of labels used from static data; we don't track them
89 closely enough to delete them here, so make sure their reference
90 count doesn't drop to zero. */
92 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
93 if (LABEL_P (XEXP (insn, 0)))
94 LABEL_NUSES (XEXP (insn, 0))++;
95 timevar_pop (TV_REBUILD_JUMP);
98 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
99 non-fallthru insn. This is not generally true, as multiple barriers
100 may have crept in, or the BARRIER may be separated from the last
101 real insn by one or more NOTEs.
103 This simple pass moves barriers and removes duplicates so that the
107 cleanup_barriers (void)
109 rtx insn, next, prev;
110 for (insn = get_insns (); insn; insn = next)
112 next = NEXT_INSN (insn);
113 if (BARRIER_P (insn))
115 prev = prev_nonnote_insn (insn);
116 if (BARRIER_P (prev))
118 else if (prev != PREV_INSN (insn))
119 reorder_insns (insn, insn, prev);
124 struct tree_opt_pass pass_cleanup_barriers =
128 cleanup_barriers, /* execute */
131 0, /* static_pass_number */
133 0, /* properties_required */
134 0, /* properties_provided */
135 0, /* properties_destroyed */
136 0, /* todo_flags_start */
137 0, /* todo_flags_finish */
142 purge_line_number_notes (void)
146 /* Delete extraneous line number notes.
147 Note that two consecutive notes for different lines are not really
148 extraneous. There should be some indication where that line belonged,
149 even if it became empty. */
151 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
154 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
155 /* Any previous line note was for the prologue; gdb wants a new
156 note after the prologue even if it is for the same line. */
157 last_note = NULL_RTX;
158 else if (NOTE_LINE_NUMBER (insn) >= 0)
160 /* Delete this note if it is identical to previous note. */
162 #ifdef USE_MAPPED_LOCATION
163 && NOTE_SOURCE_LOCATION (insn) == NOTE_SOURCE_LOCATION (last_note)
165 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
166 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note)
170 delete_related_insns (insn);
179 struct tree_opt_pass pass_purge_lineno_notes =
183 purge_line_number_notes, /* execute */
186 0, /* static_pass_number */
188 0, /* properties_required */
189 0, /* properties_provided */
190 0, /* properties_destroyed */
191 0, /* todo_flags_start */
192 0, /* todo_flags_finish */
197 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
198 notes whose labels don't occur in the insn any more. Returns the
199 largest INSN_UID found. */
201 init_label_info (rtx f)
205 for (insn = f; insn; insn = NEXT_INSN (insn))
207 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
208 else if (JUMP_P (insn))
209 JUMP_LABEL (insn) = 0;
210 else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
214 for (note = REG_NOTES (insn); note; note = next)
216 next = XEXP (note, 1);
217 if (REG_NOTE_KIND (note) == REG_LABEL
218 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
219 remove_note (insn, note);
224 /* Mark the label each jump jumps to.
225 Combine consecutive labels, and count uses of labels. */
228 mark_all_labels (rtx f)
232 for (insn = f; insn; insn = NEXT_INSN (insn))
235 mark_jump_label (PATTERN (insn), insn, 0);
236 if (! INSN_DELETED_P (insn) && JUMP_P (insn))
238 /* When we know the LABEL_REF contained in a REG used in
239 an indirect jump, we'll have a REG_LABEL note so that
240 flow can tell where it's going. */
241 if (JUMP_LABEL (insn) == 0)
243 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
246 /* But a LABEL_REF around the REG_LABEL note, so
247 that we can canonicalize it. */
248 rtx label_ref = gen_rtx_LABEL_REF (Pmode,
249 XEXP (label_note, 0));
251 mark_jump_label (label_ref, insn, 0);
252 XEXP (label_note, 0) = XEXP (label_ref, 0);
253 JUMP_LABEL (insn) = XEXP (label_note, 0);
260 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
261 notes between START and END out before START. START and END may be such
262 notes. Returns the values of the new starting and ending insns, which
263 may be different if the original ones were such notes.
264 Return true if there were only such notes and no real instructions. */
267 squeeze_notes (rtx* startp, rtx* endp)
275 rtx past_end = NEXT_INSN (end);
277 for (insn = start; insn != past_end; insn = next)
279 next = NEXT_INSN (insn);
281 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
282 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
283 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
284 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END))
286 /* BLOCK_BEG or BLOCK_END notes only exist in the `final' pass. */
287 gcc_assert (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_BEG
288 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END);
294 rtx prev = PREV_INSN (insn);
295 PREV_INSN (insn) = PREV_INSN (start);
296 NEXT_INSN (insn) = start;
297 NEXT_INSN (PREV_INSN (insn)) = insn;
298 PREV_INSN (NEXT_INSN (insn)) = insn;
299 NEXT_INSN (prev) = next;
300 PREV_INSN (next) = prev;
307 /* There were no real instructions. */
308 if (start == past_end)
318 /* Return the label before INSN, or put a new label there. */
321 get_label_before (rtx insn)
325 /* Find an existing label at this point
326 or make a new one if there is none. */
327 label = prev_nonnote_insn (insn);
329 if (label == 0 || !LABEL_P (label))
331 rtx prev = PREV_INSN (insn);
333 label = gen_label_rtx ();
334 emit_label_after (label, prev);
335 LABEL_NUSES (label) = 0;
340 /* Return the label after INSN, or put a new label there. */
343 get_label_after (rtx insn)
347 /* Find an existing label at this point
348 or make a new one if there is none. */
349 label = next_nonnote_insn (insn);
351 if (label == 0 || !LABEL_P (label))
353 label = gen_label_rtx ();
354 emit_label_after (label, insn);
355 LABEL_NUSES (label) = 0;
360 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
361 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
362 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
363 know whether it's source is floating point or integer comparison. Machine
364 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
365 to help this function avoid overhead in these cases. */
367 reversed_comparison_code_parts (enum rtx_code code, rtx arg0, rtx arg1, rtx insn)
369 enum machine_mode mode;
371 /* If this is not actually a comparison, we can't reverse it. */
372 if (GET_RTX_CLASS (code) != RTX_COMPARE
373 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
376 mode = GET_MODE (arg0);
377 if (mode == VOIDmode)
378 mode = GET_MODE (arg1);
380 /* First see if machine description supplies us way to reverse the
381 comparison. Give it priority over everything else to allow
382 machine description to do tricks. */
383 if (GET_MODE_CLASS (mode) == MODE_CC
384 && REVERSIBLE_CC_MODE (mode))
386 #ifdef REVERSE_CONDITION
387 return REVERSE_CONDITION (code, mode);
389 return reverse_condition (code);
392 /* Try a few special cases based on the comparison code. */
401 /* It is always safe to reverse EQ and NE, even for the floating
402 point. Similarly the unsigned comparisons are never used for
403 floating point so we can reverse them in the default way. */
404 return reverse_condition (code);
409 /* In case we already see unordered comparison, we can be sure to
410 be dealing with floating point so we don't need any more tests. */
411 return reverse_condition_maybe_unordered (code);
416 /* We don't have safe way to reverse these yet. */
422 if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
425 /* Try to search for the comparison to determine the real mode.
426 This code is expensive, but with sane machine description it
427 will be never used, since REVERSIBLE_CC_MODE will return true
432 for (prev = prev_nonnote_insn (insn);
433 prev != 0 && !LABEL_P (prev);
434 prev = prev_nonnote_insn (prev))
436 rtx set = set_of (arg0, prev);
437 if (set && GET_CODE (set) == SET
438 && rtx_equal_p (SET_DEST (set), arg0))
440 rtx src = SET_SRC (set);
442 if (GET_CODE (src) == COMPARE)
444 rtx comparison = src;
445 arg0 = XEXP (src, 0);
446 mode = GET_MODE (arg0);
447 if (mode == VOIDmode)
448 mode = GET_MODE (XEXP (comparison, 1));
451 /* We can get past reg-reg moves. This may be useful for model
452 of i387 comparisons that first move flag registers around. */
459 /* If register is clobbered in some ununderstandable way,
466 /* Test for an integer condition, or a floating-point comparison
467 in which NaNs can be ignored. */
468 if (GET_CODE (arg0) == CONST_INT
469 || (GET_MODE (arg0) != VOIDmode
470 && GET_MODE_CLASS (mode) != MODE_CC
471 && !HONOR_NANS (mode)))
472 return reverse_condition (code);
477 /* A wrapper around the previous function to take COMPARISON as rtx
478 expression. This simplifies many callers. */
480 reversed_comparison_code (rtx comparison, rtx insn)
482 if (!COMPARISON_P (comparison))
484 return reversed_comparison_code_parts (GET_CODE (comparison),
485 XEXP (comparison, 0),
486 XEXP (comparison, 1), insn);
489 /* Return comparison with reversed code of EXP.
490 Return NULL_RTX in case we fail to do the reversal. */
492 reversed_comparison (rtx exp, enum machine_mode mode)
494 enum rtx_code reversed_code = reversed_comparison_code (exp, NULL_RTX);
495 if (reversed_code == UNKNOWN)
498 return simplify_gen_relational (reversed_code, mode, VOIDmode,
499 XEXP (exp, 0), XEXP (exp, 1));
503 /* Given an rtx-code for a comparison, return the code for the negated
504 comparison. If no such code exists, return UNKNOWN.
506 WATCH OUT! reverse_condition is not safe to use on a jump that might
507 be acting on the results of an IEEE floating point comparison, because
508 of the special treatment of non-signaling nans in comparisons.
509 Use reversed_comparison_code instead. */
512 reverse_condition (enum rtx_code code)
554 /* Similar, but we're allowed to generate unordered comparisons, which
555 makes it safe for IEEE floating-point. Of course, we have to recognize
556 that the target will support them too... */
559 reverse_condition_maybe_unordered (enum rtx_code code)
597 /* Similar, but return the code when two operands of a comparison are swapped.
598 This IS safe for IEEE floating-point. */
601 swap_condition (enum rtx_code code)
643 /* Given a comparison CODE, return the corresponding unsigned comparison.
644 If CODE is an equality comparison or already an unsigned comparison,
648 unsigned_condition (enum rtx_code code)
674 /* Similarly, return the signed version of a comparison. */
677 signed_condition (enum rtx_code code)
703 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
704 truth of CODE1 implies the truth of CODE2. */
707 comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
709 /* UNKNOWN comparison codes can happen as a result of trying to revert
711 They can't match anything, so we have to reject them here. */
712 if (code1 == UNKNOWN || code2 == UNKNOWN)
721 if (code2 == UNLE || code2 == UNGE)
726 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
732 if (code2 == UNLE || code2 == NE)
737 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
742 if (code2 == UNGE || code2 == NE)
747 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
753 if (code2 == ORDERED)
758 if (code2 == NE || code2 == ORDERED)
763 if (code2 == LEU || code2 == NE)
768 if (code2 == GEU || code2 == NE)
773 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
774 || code2 == UNGE || code2 == UNGT)
785 /* Return 1 if INSN is an unconditional jump and nothing else. */
788 simplejump_p (rtx insn)
790 return (JUMP_P (insn)
791 && GET_CODE (PATTERN (insn)) == SET
792 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
793 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
796 /* Return nonzero if INSN is a (possibly) conditional jump
799 Use of this function is deprecated, since we need to support combined
800 branch and compare insns. Use any_condjump_p instead whenever possible. */
803 condjump_p (rtx insn)
805 rtx x = PATTERN (insn);
807 if (GET_CODE (x) != SET
808 || GET_CODE (SET_DEST (x)) != PC)
812 if (GET_CODE (x) == LABEL_REF)
815 return (GET_CODE (x) == IF_THEN_ELSE
816 && ((GET_CODE (XEXP (x, 2)) == PC
817 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
818 || GET_CODE (XEXP (x, 1)) == RETURN))
819 || (GET_CODE (XEXP (x, 1)) == PC
820 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
821 || GET_CODE (XEXP (x, 2)) == RETURN))));
824 /* Return nonzero if INSN is a (possibly) conditional jump inside a
827 Use this function is deprecated, since we need to support combined
828 branch and compare insns. Use any_condjump_p instead whenever possible. */
831 condjump_in_parallel_p (rtx insn)
833 rtx x = PATTERN (insn);
835 if (GET_CODE (x) != PARALLEL)
838 x = XVECEXP (x, 0, 0);
840 if (GET_CODE (x) != SET)
842 if (GET_CODE (SET_DEST (x)) != PC)
844 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
846 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
848 if (XEXP (SET_SRC (x), 2) == pc_rtx
849 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
850 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
852 if (XEXP (SET_SRC (x), 1) == pc_rtx
853 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
854 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
859 /* Return set of PC, otherwise NULL. */
867 pat = PATTERN (insn);
869 /* The set is allowed to appear either as the insn pattern or
870 the first set in a PARALLEL. */
871 if (GET_CODE (pat) == PARALLEL)
872 pat = XVECEXP (pat, 0, 0);
873 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
879 /* Return true when insn is an unconditional direct jump,
880 possibly bundled inside a PARALLEL. */
883 any_uncondjump_p (rtx insn)
885 rtx x = pc_set (insn);
888 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
890 if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
895 /* Return true when insn is a conditional jump. This function works for
896 instructions containing PC sets in PARALLELs. The instruction may have
897 various other effects so before removing the jump you must verify
900 Note that unlike condjump_p it returns false for unconditional jumps. */
903 any_condjump_p (rtx insn)
905 rtx x = pc_set (insn);
910 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
913 a = GET_CODE (XEXP (SET_SRC (x), 1));
914 b = GET_CODE (XEXP (SET_SRC (x), 2));
916 return ((b == PC && (a == LABEL_REF || a == RETURN))
917 || (a == PC && (b == LABEL_REF || b == RETURN)));
920 /* Return the label of a conditional jump. */
923 condjump_label (rtx insn)
925 rtx x = pc_set (insn);
930 if (GET_CODE (x) == LABEL_REF)
932 if (GET_CODE (x) != IF_THEN_ELSE)
934 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
936 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
941 /* Return true if INSN is a (possibly conditional) return insn. */
944 returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
948 return x && (GET_CODE (x) == RETURN
949 || (GET_CODE (x) == SET && SET_IS_RETURN_P (x)));
953 returnjump_p (rtx insn)
957 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
960 /* Return true if INSN is a jump that only transfers control and
964 onlyjump_p (rtx insn)
971 set = single_set (insn);
974 if (GET_CODE (SET_DEST (set)) != PC)
976 if (side_effects_p (SET_SRC (set)))
984 /* Return nonzero if X is an RTX that only sets the condition codes
985 and has no side effects. */
988 only_sets_cc0_p (rtx x)
996 return sets_cc0_p (x) == 1 && ! side_effects_p (x);
999 /* Return 1 if X is an RTX that does nothing but set the condition codes
1000 and CLOBBER or USE registers.
1001 Return -1 if X does explicitly set the condition codes,
1002 but also does other things. */
1013 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1015 if (GET_CODE (x) == PARALLEL)
1019 int other_things = 0;
1020 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1022 if (GET_CODE (XVECEXP (x, 0, i)) == SET
1023 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1025 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1028 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1034 /* Follow any unconditional jump at LABEL;
1035 return the ultimate label reached by any such chain of jumps.
1036 Return null if the chain ultimately leads to a return instruction.
1037 If LABEL is not followed by a jump, return LABEL.
1038 If the chain loops or we can't find end, return LABEL,
1039 since that tells caller to avoid changing the insn.
1041 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
1042 a USE or CLOBBER. */
1045 follow_jumps (rtx label)
1054 && (insn = next_active_insn (value)) != 0
1056 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1057 && onlyjump_p (insn))
1058 || GET_CODE (PATTERN (insn)) == RETURN)
1059 && (next = NEXT_INSN (insn))
1060 && BARRIER_P (next));
1063 /* Don't chain through the insn that jumps into a loop
1064 from outside the loop,
1065 since that would create multiple loop entry jumps
1066 and prevent loop optimization. */
1068 if (!reload_completed)
1069 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1071 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
1072 /* ??? Optional. Disables some optimizations, but makes
1073 gcov output more accurate with -O. */
1074 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
1077 /* If we have found a cycle, make the insn jump to itself. */
1078 if (JUMP_LABEL (insn) == label)
1081 tem = next_active_insn (JUMP_LABEL (insn));
1082 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1083 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1086 value = JUMP_LABEL (insn);
1094 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1095 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1096 in INSN, then store one of them in JUMP_LABEL (INSN).
1097 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1098 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1099 Also, when there are consecutive labels, canonicalize on the last of them.
1101 Note that two labels separated by a loop-beginning note
1102 must be kept distinct if we have not yet done loop-optimization,
1103 because the gap between them is where loop-optimize
1104 will want to move invariant code to. CROSS_JUMP tells us
1105 that loop-optimization is done with. */
1108 mark_jump_label (rtx x, rtx insn, int in_mem)
1110 RTX_CODE code = GET_CODE (x);
1133 /* If this is a constant-pool reference, see if it is a label. */
1134 if (CONSTANT_POOL_ADDRESS_P (x))
1135 mark_jump_label (get_pool_constant (x), insn, in_mem);
1140 rtx label = XEXP (x, 0);
1142 /* Ignore remaining references to unreachable labels that
1143 have been deleted. */
1145 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1148 gcc_assert (LABEL_P (label));
1150 /* Ignore references to labels of containing functions. */
1151 if (LABEL_REF_NONLOCAL_P (x))
1154 XEXP (x, 0) = label;
1155 if (! insn || ! INSN_DELETED_P (insn))
1156 ++LABEL_NUSES (label);
1161 JUMP_LABEL (insn) = label;
1164 /* Add a REG_LABEL note for LABEL unless there already
1165 is one. All uses of a label, except for labels
1166 that are the targets of jumps, must have a
1168 if (! find_reg_note (insn, REG_LABEL, label))
1169 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1176 /* Do walk the labels in a vector, but not the first operand of an
1177 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1180 if (! INSN_DELETED_P (insn))
1182 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1184 for (i = 0; i < XVECLEN (x, eltnum); i++)
1185 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1193 fmt = GET_RTX_FORMAT (code);
1194 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1197 mark_jump_label (XEXP (x, i), insn, in_mem);
1198 else if (fmt[i] == 'E')
1201 for (j = 0; j < XVECLEN (x, i); j++)
1202 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1207 /* If all INSN does is set the pc, delete it,
1208 and delete the insn that set the condition codes for it
1209 if that's what the previous thing was. */
1212 delete_jump (rtx insn)
1214 rtx set = single_set (insn);
1216 if (set && GET_CODE (SET_DEST (set)) == PC)
1217 delete_computation (insn);
1220 /* Recursively delete prior insns that compute the value (used only by INSN
1221 which the caller is deleting) stored in the register mentioned by NOTE
1222 which is a REG_DEAD note associated with INSN. */
1225 delete_prior_computation (rtx note, rtx insn)
1228 rtx reg = XEXP (note, 0);
1230 for (our_prev = prev_nonnote_insn (insn);
1231 our_prev && (NONJUMP_INSN_P (our_prev)
1232 || CALL_P (our_prev));
1233 our_prev = prev_nonnote_insn (our_prev))
1235 rtx pat = PATTERN (our_prev);
1237 /* If we reach a CALL which is not calling a const function
1238 or the callee pops the arguments, then give up. */
1239 if (CALL_P (our_prev)
1240 && (! CONST_OR_PURE_CALL_P (our_prev)
1241 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1244 /* If we reach a SEQUENCE, it is too complex to try to
1245 do anything with it, so give up. We can be run during
1246 and after reorg, so SEQUENCE rtl can legitimately show
1248 if (GET_CODE (pat) == SEQUENCE)
1251 if (GET_CODE (pat) == USE
1252 && NONJUMP_INSN_P (XEXP (pat, 0)))
1253 /* reorg creates USEs that look like this. We leave them
1254 alone because reorg needs them for its own purposes. */
1257 if (reg_set_p (reg, pat))
1259 if (side_effects_p (pat) && !CALL_P (our_prev))
1262 if (GET_CODE (pat) == PARALLEL)
1264 /* If we find a SET of something else, we can't
1269 for (i = 0; i < XVECLEN (pat, 0); i++)
1271 rtx part = XVECEXP (pat, 0, i);
1273 if (GET_CODE (part) == SET
1274 && SET_DEST (part) != reg)
1278 if (i == XVECLEN (pat, 0))
1279 delete_computation (our_prev);
1281 else if (GET_CODE (pat) == SET
1282 && REG_P (SET_DEST (pat)))
1284 int dest_regno = REGNO (SET_DEST (pat));
1287 + (dest_regno < FIRST_PSEUDO_REGISTER
1288 ? hard_regno_nregs[dest_regno]
1289 [GET_MODE (SET_DEST (pat))] : 1));
1290 int regno = REGNO (reg);
1293 + (regno < FIRST_PSEUDO_REGISTER
1294 ? hard_regno_nregs[regno][GET_MODE (reg)] : 1));
1296 if (dest_regno >= regno
1297 && dest_endregno <= endregno)
1298 delete_computation (our_prev);
1300 /* We may have a multi-word hard register and some, but not
1301 all, of the words of the register are needed in subsequent
1302 insns. Write REG_UNUSED notes for those parts that were not
1304 else if (dest_regno <= regno
1305 && dest_endregno >= endregno)
1309 REG_NOTES (our_prev)
1310 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1311 REG_NOTES (our_prev));
1313 for (i = dest_regno; i < dest_endregno; i++)
1314 if (! find_regno_note (our_prev, REG_UNUSED, i))
1317 if (i == dest_endregno)
1318 delete_computation (our_prev);
1325 /* If PAT references the register that dies here, it is an
1326 additional use. Hence any prior SET isn't dead. However, this
1327 insn becomes the new place for the REG_DEAD note. */
1328 if (reg_overlap_mentioned_p (reg, pat))
1330 XEXP (note, 1) = REG_NOTES (our_prev);
1331 REG_NOTES (our_prev) = note;
1337 /* Delete INSN and recursively delete insns that compute values used only
1338 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1339 If we are running before flow.c, we need do nothing since flow.c will
1340 delete dead code. We also can't know if the registers being used are
1341 dead or not at this point.
1343 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1344 nothing other than set a register that dies in this insn, we can delete
1347 On machines with CC0, if CC0 is used in this insn, we may be able to
1348 delete the insn that set it. */
1351 delete_computation (rtx insn)
1356 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1358 rtx prev = prev_nonnote_insn (insn);
1359 /* We assume that at this stage
1360 CC's are always set explicitly
1361 and always immediately before the jump that
1362 will use them. So if the previous insn
1363 exists to set the CC's, delete it
1364 (unless it performs auto-increments, etc.). */
1365 if (prev && NONJUMP_INSN_P (prev)
1366 && sets_cc0_p (PATTERN (prev)))
1368 if (sets_cc0_p (PATTERN (prev)) > 0
1369 && ! side_effects_p (PATTERN (prev)))
1370 delete_computation (prev);
1372 /* Otherwise, show that cc0 won't be used. */
1373 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1374 cc0_rtx, REG_NOTES (prev));
1379 for (note = REG_NOTES (insn); note; note = next)
1381 next = XEXP (note, 1);
1383 if (REG_NOTE_KIND (note) != REG_DEAD
1384 /* Verify that the REG_NOTE is legitimate. */
1385 || !REG_P (XEXP (note, 0)))
1388 delete_prior_computation (note, insn);
1391 delete_related_insns (insn);
1394 /* Delete insn INSN from the chain of insns and update label ref counts
1395 and delete insns now unreachable.
1397 Returns the first insn after INSN that was not deleted.
1399 Usage of this instruction is deprecated. Use delete_insn instead and
1400 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1403 delete_related_insns (rtx insn)
1405 int was_code_label = (LABEL_P (insn));
1407 rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1409 while (next && INSN_DELETED_P (next))
1410 next = NEXT_INSN (next);
1412 /* This insn is already deleted => return first following nondeleted. */
1413 if (INSN_DELETED_P (insn))
1418 /* If instruction is followed by a barrier,
1419 delete the barrier too. */
1421 if (next != 0 && BARRIER_P (next))
1424 /* If deleting a jump, decrement the count of the label,
1425 and delete the label if it is now unused. */
1427 if (JUMP_P (insn) && JUMP_LABEL (insn))
1429 rtx lab = JUMP_LABEL (insn), lab_next;
1431 if (LABEL_NUSES (lab) == 0)
1433 /* This can delete NEXT or PREV,
1434 either directly if NEXT is JUMP_LABEL (INSN),
1435 or indirectly through more levels of jumps. */
1436 delete_related_insns (lab);
1438 /* I feel a little doubtful about this loop,
1439 but I see no clean and sure alternative way
1440 to find the first insn after INSN that is not now deleted.
1441 I hope this works. */
1442 while (next && INSN_DELETED_P (next))
1443 next = NEXT_INSN (next);
1446 else if (tablejump_p (insn, NULL, &lab_next))
1448 /* If we're deleting the tablejump, delete the dispatch table.
1449 We may not be able to kill the label immediately preceding
1450 just yet, as it might be referenced in code leading up to
1452 delete_related_insns (lab_next);
1456 /* Likewise if we're deleting a dispatch table. */
1459 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1460 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1462 rtx pat = PATTERN (insn);
1463 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1464 int len = XVECLEN (pat, diff_vec_p);
1466 for (i = 0; i < len; i++)
1467 if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1468 delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1469 while (next && INSN_DELETED_P (next))
1470 next = NEXT_INSN (next);
1474 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1475 if (NONJUMP_INSN_P (insn) || CALL_P (insn))
1476 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1477 if (REG_NOTE_KIND (note) == REG_LABEL
1478 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1479 && LABEL_P (XEXP (note, 0)))
1480 if (LABEL_NUSES (XEXP (note, 0)) == 0)
1481 delete_related_insns (XEXP (note, 0));
1483 while (prev && (INSN_DELETED_P (prev) || NOTE_P (prev)))
1484 prev = PREV_INSN (prev);
1486 /* If INSN was a label and a dispatch table follows it,
1487 delete the dispatch table. The tablejump must have gone already.
1488 It isn't useful to fall through into a table. */
1491 && NEXT_INSN (insn) != 0
1492 && JUMP_P (NEXT_INSN (insn))
1493 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1494 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1495 next = delete_related_insns (NEXT_INSN (insn));
1497 /* If INSN was a label, delete insns following it if now unreachable. */
1499 if (was_code_label && prev && BARRIER_P (prev))
1504 code = GET_CODE (next);
1506 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1507 next = NEXT_INSN (next);
1508 /* Keep going past other deleted labels to delete what follows. */
1509 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1510 next = NEXT_INSN (next);
1511 else if (code == BARRIER || INSN_P (next))
1512 /* Note: if this deletes a jump, it can cause more
1513 deletion of unreachable code, after a different label.
1514 As long as the value from this recursive call is correct,
1515 this invocation functions correctly. */
1516 next = delete_related_insns (next);
1525 /* Delete a range of insns from FROM to TO, inclusive.
1526 This is for the sake of peephole optimization, so assume
1527 that whatever these insns do will still be done by a new
1528 peephole insn that will replace them. */
1531 delete_for_peephole (rtx from, rtx to)
1537 rtx next = NEXT_INSN (insn);
1538 rtx prev = PREV_INSN (insn);
1542 INSN_DELETED_P (insn) = 1;
1544 /* Patch this insn out of the chain. */
1545 /* We don't do this all at once, because we
1546 must preserve all NOTEs. */
1548 NEXT_INSN (prev) = next;
1551 PREV_INSN (next) = prev;
1559 /* Note that if TO is an unconditional jump
1560 we *do not* delete the BARRIER that follows,
1561 since the peephole that replaces this sequence
1562 is also an unconditional jump in that case. */
1565 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1566 NLABEL as a return. Accrue modifications into the change group. */
1569 redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
1572 RTX_CODE code = GET_CODE (x);
1576 if (code == LABEL_REF)
1578 if (XEXP (x, 0) == olabel)
1582 n = gen_rtx_LABEL_REF (Pmode, nlabel);
1584 n = gen_rtx_RETURN (VOIDmode);
1586 validate_change (insn, loc, n, 1);
1590 else if (code == RETURN && olabel == 0)
1593 x = gen_rtx_LABEL_REF (Pmode, nlabel);
1595 x = gen_rtx_RETURN (VOIDmode);
1596 if (loc == &PATTERN (insn))
1597 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1598 validate_change (insn, loc, x, 1);
1602 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1603 && GET_CODE (SET_SRC (x)) == LABEL_REF
1604 && XEXP (SET_SRC (x), 0) == olabel)
1606 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1610 fmt = GET_RTX_FORMAT (code);
1611 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1614 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1615 else if (fmt[i] == 'E')
1618 for (j = 0; j < XVECLEN (x, i); j++)
1619 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1624 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
1625 the modifications into the change group. Return false if we did
1626 not see how to do that. */
1629 redirect_jump_1 (rtx jump, rtx nlabel)
1631 int ochanges = num_validated_changes ();
1634 if (GET_CODE (PATTERN (jump)) == PARALLEL)
1635 loc = &XVECEXP (PATTERN (jump), 0, 0);
1637 loc = &PATTERN (jump);
1639 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1640 return num_validated_changes () > ochanges;
1643 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
1644 jump target label is unused as a result, it and the code following
1647 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
1650 The return value will be 1 if the change was made, 0 if it wasn't
1651 (this can only occur for NLABEL == 0). */
1654 redirect_jump (rtx jump, rtx nlabel, int delete_unused)
1656 rtx olabel = JUMP_LABEL (jump);
1658 if (nlabel == olabel)
1661 if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ())
1664 redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0);
1668 /* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1669 NLABEL in JUMP. If DELETE_UNUSED is non-negative, copy a
1670 NOTE_INSN_FUNCTION_END found after OLABEL to the place after NLABEL.
1671 If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1672 count has dropped to zero. */
1674 redirect_jump_2 (rtx jump, rtx olabel, rtx nlabel, int delete_unused,
1679 JUMP_LABEL (jump) = nlabel;
1681 ++LABEL_NUSES (nlabel);
1683 /* Update labels in any REG_EQUAL note. */
1684 if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
1686 if (!nlabel || (invert && !invert_exp_1 (XEXP (note, 0), jump)))
1687 remove_note (jump, note);
1690 redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump);
1691 confirm_change_group ();
1695 /* If we're eliding the jump over exception cleanups at the end of a
1696 function, move the function end note so that -Wreturn-type works. */
1697 if (olabel && nlabel
1698 && NEXT_INSN (olabel)
1699 && NOTE_P (NEXT_INSN (olabel))
1700 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END
1701 && delete_unused >= 0)
1702 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
1704 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused > 0
1705 /* Undefined labels will remain outside the insn stream. */
1706 && INSN_UID (olabel))
1707 delete_related_insns (olabel);
1709 invert_br_probabilities (jump);
1712 /* Invert the jump condition X contained in jump insn INSN. Accrue the
1713 modifications into the change group. Return nonzero for success. */
1715 invert_exp_1 (rtx x, rtx insn)
1717 RTX_CODE code = GET_CODE (x);
1719 if (code == IF_THEN_ELSE)
1721 rtx comp = XEXP (x, 0);
1723 enum rtx_code reversed_code;
1725 /* We can do this in two ways: The preferable way, which can only
1726 be done if this is not an integer comparison, is to reverse
1727 the comparison code. Otherwise, swap the THEN-part and ELSE-part
1728 of the IF_THEN_ELSE. If we can't do either, fail. */
1730 reversed_code = reversed_comparison_code (comp, insn);
1732 if (reversed_code != UNKNOWN)
1734 validate_change (insn, &XEXP (x, 0),
1735 gen_rtx_fmt_ee (reversed_code,
1736 GET_MODE (comp), XEXP (comp, 0),
1743 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
1744 validate_change (insn, &XEXP (x, 2), tem, 1);
1751 /* Invert the condition of the jump JUMP, and make it jump to label
1752 NLABEL instead of where it jumps now. Accrue changes into the
1753 change group. Return false if we didn't see how to perform the
1754 inversion and redirection. */
1757 invert_jump_1 (rtx jump, rtx nlabel)
1759 rtx x = pc_set (jump);
1763 ochanges = num_validated_changes ();
1765 ok = invert_exp_1 (SET_SRC (x), jump);
1768 if (num_validated_changes () == ochanges)
1771 /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1772 in Pmode, so checking this is not merely an optimization. */
1773 return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel);
1776 /* Invert the condition of the jump JUMP, and make it jump to label
1777 NLABEL instead of where it jumps now. Return true if successful. */
1780 invert_jump (rtx jump, rtx nlabel, int delete_unused)
1782 rtx olabel = JUMP_LABEL (jump);
1784 if (invert_jump_1 (jump, nlabel) && apply_change_group ())
1786 redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1);
1794 /* Like rtx_equal_p except that it considers two REGs as equal
1795 if they renumber to the same value and considers two commutative
1796 operations to be the same if the order of the operands has been
1799 ??? Addition is not commutative on the PA due to the weird implicit
1800 space register selection rules for memory addresses. Therefore, we
1801 don't consider a + b == b + a.
1803 We could/should make this test a little tighter. Possibly only
1804 disabling it on the PA via some backend macro or only disabling this
1805 case when the PLUS is inside a MEM. */
1808 rtx_renumbered_equal_p (rtx x, rtx y)
1811 enum rtx_code code = GET_CODE (x);
1817 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
1818 && (REG_P (y) || (GET_CODE (y) == SUBREG
1819 && REG_P (SUBREG_REG (y)))))
1821 int reg_x = -1, reg_y = -1;
1822 int byte_x = 0, byte_y = 0;
1824 if (GET_MODE (x) != GET_MODE (y))
1827 /* If we haven't done any renumbering, don't
1828 make any assumptions. */
1829 if (reg_renumber == 0)
1830 return rtx_equal_p (x, y);
1834 reg_x = REGNO (SUBREG_REG (x));
1835 byte_x = SUBREG_BYTE (x);
1837 if (reg_renumber[reg_x] >= 0)
1839 reg_x = subreg_regno_offset (reg_renumber[reg_x],
1840 GET_MODE (SUBREG_REG (x)),
1849 if (reg_renumber[reg_x] >= 0)
1850 reg_x = reg_renumber[reg_x];
1853 if (GET_CODE (y) == SUBREG)
1855 reg_y = REGNO (SUBREG_REG (y));
1856 byte_y = SUBREG_BYTE (y);
1858 if (reg_renumber[reg_y] >= 0)
1860 reg_y = subreg_regno_offset (reg_renumber[reg_y],
1861 GET_MODE (SUBREG_REG (y)),
1870 if (reg_renumber[reg_y] >= 0)
1871 reg_y = reg_renumber[reg_y];
1874 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
1877 /* Now we have disposed of all the cases
1878 in which different rtx codes can match. */
1879 if (code != GET_CODE (y))
1892 /* We can't assume nonlocal labels have their following insns yet. */
1893 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
1894 return XEXP (x, 0) == XEXP (y, 0);
1896 /* Two label-refs are equivalent if they point at labels
1897 in the same position in the instruction stream. */
1898 return (next_real_insn (XEXP (x, 0))
1899 == next_real_insn (XEXP (y, 0)));
1902 return XSTR (x, 0) == XSTR (y, 0);
1905 /* If we didn't match EQ equality above, they aren't the same. */
1912 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1914 if (GET_MODE (x) != GET_MODE (y))
1917 /* For commutative operations, the RTX match if the operand match in any
1918 order. Also handle the simple binary and unary cases without a loop.
1920 ??? Don't consider PLUS a commutative operator; see comments above. */
1921 if (COMMUTATIVE_P (x) && code != PLUS)
1922 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1923 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
1924 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
1925 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
1926 else if (NON_COMMUTATIVE_P (x))
1927 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1928 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
1929 else if (UNARY_P (x))
1930 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
1932 /* Compare the elements. If any pair of corresponding elements
1933 fail to match, return 0 for the whole things. */
1935 fmt = GET_RTX_FORMAT (code);
1936 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1942 if (XWINT (x, i) != XWINT (y, i))
1947 if (XINT (x, i) != XINT (y, i))
1952 if (XTREE (x, i) != XTREE (y, i))
1957 if (strcmp (XSTR (x, i), XSTR (y, i)))
1962 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
1967 if (XEXP (x, i) != XEXP (y, i))
1974 if (XVECLEN (x, i) != XVECLEN (y, i))
1976 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1977 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1988 /* If X is a hard register or equivalent to one or a subregister of one,
1989 return the hard register number. If X is a pseudo register that was not
1990 assigned a hard register, return the pseudo register number. Otherwise,
1991 return -1. Any rtx is valid for X. */
1998 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
1999 return reg_renumber[REGNO (x)];
2002 if (GET_CODE (x) == SUBREG)
2004 int base = true_regnum (SUBREG_REG (x));
2005 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
2006 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
2007 GET_MODE (SUBREG_REG (x)),
2008 SUBREG_BYTE (x), GET_MODE (x));
2013 /* Return regno of the register REG and handle subregs too. */
2015 reg_or_subregno (rtx reg)
2017 if (GET_CODE (reg) == SUBREG)
2018 reg = SUBREG_REG (reg);
2019 gcc_assert (REG_P (reg));