/* Optimize jump instructions, for GNU compiler.
Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
- 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
+ 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
This file is part of GCC.
formerly used them. The JUMP_LABEL info is sometimes looked
at by later passes.
- The subroutines delete_insn, redirect_jump, and invert_jump are used
+ The subroutines redirect_jump and invert_jump are used
from other passes as well. */
#include "config.h"
#include "expr.h"
#include "real.h"
#include "except.h"
+#include "diagnostic.h"
#include "toplev.h"
#include "reload.h"
#include "predict.h"
or even change what is live at any point.
So perhaps let combiner do it. */
-static rtx next_nonnote_insn_in_loop PARAMS ((rtx));
-static void init_label_info PARAMS ((rtx));
-static void mark_all_labels PARAMS ((rtx));
-static int duplicate_loop_exit_test PARAMS ((rtx));
-static void delete_computation PARAMS ((rtx));
-static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
-static int redirect_exp PARAMS ((rtx, rtx, rtx));
-static void invert_exp_1 PARAMS ((rtx));
-static int invert_exp PARAMS ((rtx));
-static int returnjump_p_1 PARAMS ((rtx *, void *));
-static void delete_prior_computation PARAMS ((rtx, rtx));
+static rtx next_nonnote_insn_in_loop (rtx);
+static void init_label_info (rtx);
+static void mark_all_labels (rtx);
+static int duplicate_loop_exit_test (rtx);
+static void delete_computation (rtx);
+static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
+static int redirect_exp (rtx, rtx, rtx);
+static void invert_exp_1 (rtx);
+static int invert_exp (rtx);
+static int returnjump_p_1 (rtx *, void *);
+static void delete_prior_computation (rtx, rtx);
\f
/* Alternate entry into the jump optimizer. This entry point only rebuilds
the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
instructions. */
void
-rebuild_jump_labels (f)
- rtx f;
+rebuild_jump_labels (rtx f)
{
rtx insn;
old code is happy.
*/
void
-cleanup_barriers ()
+cleanup_barriers (void)
{
rtx insn, next, prev;
for (insn = get_insns (); insn; insn = next)
This routine does not look inside SEQUENCEs. */
static rtx
-next_nonnote_insn_in_loop (insn)
- rtx insn;
+next_nonnote_insn_in_loop (rtx insn)
{
while (insn)
{
}
void
-copy_loop_headers (f)
- rtx f;
+copy_loop_headers (rtx f)
{
rtx insn, next;
/* Now iterate optimizing jumps until nothing changes over one pass. */
}
void
-purge_line_number_notes (f)
- rtx f;
+purge_line_number_notes (rtx f)
{
rtx last_note = 0;
rtx insn;
notes whose labels don't occur in the insn any more. Returns the
largest INSN_UID found. */
static void
-init_label_info (f)
- rtx f;
+init_label_info (rtx f)
{
rtx insn;
Combine consecutive labels, and count uses of labels. */
static void
-mark_all_labels (f)
- rtx f;
+mark_all_labels (rtx f)
{
rtx insn;
values of regno_first_uid and regno_last_uid. */
static int
-duplicate_loop_exit_test (loop_start)
- rtx loop_start;
+duplicate_loop_exit_test (rtx loop_start)
{
rtx insn, set, reg, p, link;
rtx copy = 0, first_copy = 0;
break;
case JUMP_INSN:
case INSN:
- /* The code below would grossly mishandle REG_WAS_0 notes,
- so get rid of them here. */
- while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
- remove_note (insn, p);
if (++num_insns > 20
|| find_reg_note (insn, REG_RETVAL, NULL_RTX)
|| find_reg_note (insn, REG_LIBCALL, NULL_RTX))
/* We can do the replacement. Allocate reg_map if this is the
first replacement we found. */
if (reg_map == 0)
- reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
+ reg_map = xcalloc (max_reg, sizeof (rtx));
REG_LOOP_TEST_P (reg) = 1;
replace_regs (PATTERN (copy), reg_map, max_reg, 1);
mark_jump_label (PATTERN (copy), copy, 0);
- INSN_SCOPE (copy) = INSN_SCOPE (insn);
+ INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
/* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
make them. */
case JUMP_INSN:
copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
loop_start);
- INSN_SCOPE (copy) = INSN_SCOPE (insn);
+ INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
if (reg_map)
replace_regs (PATTERN (copy), reg_map, max_reg, 1);
mark_jump_label (PATTERN (copy), copy, 0);
Return true if there were only such notes and no real instructions. */
bool
-squeeze_notes (startp, endp)
- rtx* startp;
- rtx* endp;
+squeeze_notes (rtx* startp, rtx* endp)
{
rtx start = *startp;
rtx end = *endp;
/* Return the label before INSN, or put a new label there. */
rtx
-get_label_before (insn)
- rtx insn;
+get_label_before (rtx insn)
{
rtx label;
/* Return the label after INSN, or put a new label there. */
rtx
-get_label_after (insn)
- rtx insn;
+get_label_after (rtx insn)
{
rtx label;
description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
to help this function avoid overhead in these cases. */
enum rtx_code
-reversed_comparison_code_parts (code, arg0, arg1, insn)
- rtx insn, arg0, arg1;
- enum rtx_code code;
+reversed_comparison_code_parts (enum rtx_code code, rtx arg0, rtx arg1, rtx insn)
{
enum machine_mode mode;
/* If this is not actually a comparison, we can't reverse it. */
- if (GET_RTX_CLASS (code) != '<')
+ if (GET_RTX_CLASS (code) != RTX_COMPARE
+ && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
return UNKNOWN;
mode = GET_MODE (arg0);
case NE:
case EQ:
/* It is always safe to reverse EQ and NE, even for the floating
- point. Similary the unsigned comparisons are never used for
+ point. Similarly the unsigned comparisons are never used for
floating point so we can reverse them in the default way. */
return reverse_condition (code);
case ORDERED:
return UNKNOWN;
}
-/* An wrapper around the previous function to take COMPARISON as rtx
+/* A wrapper around the previous function to take COMPARISON as rtx
expression. This simplifies many callers. */
enum rtx_code
-reversed_comparison_code (comparison, insn)
- rtx comparison, insn;
+reversed_comparison_code (rtx comparison, rtx insn)
{
- if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
+ if (!COMPARISON_P (comparison))
return UNKNOWN;
return reversed_comparison_code_parts (GET_CODE (comparison),
XEXP (comparison, 0),
Use reversed_comparison_code instead. */
enum rtx_code
-reverse_condition (code)
- enum rtx_code code;
+reverse_condition (enum rtx_code code)
{
switch (code)
{
that the target will support them too... */
enum rtx_code
-reverse_condition_maybe_unordered (code)
- enum rtx_code code;
+reverse_condition_maybe_unordered (enum rtx_code code)
{
switch (code)
{
This IS safe for IEEE floating-point. */
enum rtx_code
-swap_condition (code)
- enum rtx_code code;
+swap_condition (enum rtx_code code)
{
switch (code)
{
CODE is returned. */
enum rtx_code
-unsigned_condition (code)
- enum rtx_code code;
+unsigned_condition (enum rtx_code code)
{
switch (code)
{
/* Similarly, return the signed version of a comparison. */
enum rtx_code
-signed_condition (code)
- enum rtx_code code;
+signed_condition (enum rtx_code code)
{
switch (code)
{
truth of CODE1 implies the truth of CODE2. */
int
-comparison_dominates_p (code1, code2)
- enum rtx_code code1, code2;
+comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
{
/* UNKNOWN comparison codes can happen as a result of trying to revert
comparison codes.
/* Return 1 if INSN is an unconditional jump and nothing else. */
int
-simplejump_p (insn)
- rtx insn;
+simplejump_p (rtx insn)
{
return (GET_CODE (insn) == JUMP_INSN
&& GET_CODE (PATTERN (insn)) == SET
/* Return nonzero if INSN is a (possibly) conditional jump
and nothing more.
- Use this function is deprecated, since we need to support combined
+ Use of this function is deprecated, since we need to support combined
branch and compare insns. Use any_condjump_p instead whenever possible. */
int
-condjump_p (insn)
- rtx insn;
+condjump_p (rtx insn)
{
rtx x = PATTERN (insn);
branch and compare insns. Use any_condjump_p instead whenever possible. */
int
-condjump_in_parallel_p (insn)
- rtx insn;
+condjump_in_parallel_p (rtx insn)
{
rtx x = PATTERN (insn);
/* Return set of PC, otherwise NULL. */
rtx
-pc_set (insn)
- rtx insn;
+pc_set (rtx insn)
{
rtx pat;
if (GET_CODE (insn) != JUMP_INSN)
possibly bundled inside a PARALLEL. */
int
-any_uncondjump_p (insn)
- rtx insn;
+any_uncondjump_p (rtx insn)
{
rtx x = pc_set (insn);
if (!x)
Note that unlike condjump_p it returns false for unconditional jumps. */
int
-any_condjump_p (insn)
- rtx insn;
+any_condjump_p (rtx insn)
{
rtx x = pc_set (insn);
enum rtx_code a, b;
/* Return the label of a conditional jump. */
rtx
-condjump_label (insn)
- rtx insn;
+condjump_label (rtx insn)
{
rtx x = pc_set (insn);
/* Return true if INSN is a (possibly conditional) return insn. */
static int
-returnjump_p_1 (loc, data)
- rtx *loc;
- void *data ATTRIBUTE_UNUSED;
+returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
{
rtx x = *loc;
}
int
-returnjump_p (insn)
- rtx insn;
+returnjump_p (rtx insn)
{
if (GET_CODE (insn) != JUMP_INSN)
return 0;
nothing more. */
int
-onlyjump_p (insn)
- rtx insn;
+onlyjump_p (rtx insn)
{
rtx set;
and has no side effects. */
int
-only_sets_cc0_p (x)
- rtx x;
+only_sets_cc0_p (rtx x)
{
-
if (! x)
return 0;
but also does other things. */
int
-sets_cc0_p (x)
- rtx x;
+sets_cc0_p (rtx x)
{
-
if (! x)
return 0;
a USE or CLOBBER. */
rtx
-follow_jumps (label)
- rtx label;
+follow_jumps (rtx label)
{
rtx insn;
rtx next;
that loop-optimization is done with. */
void
-mark_jump_label (x, insn, in_mem)
- rtx x;
- rtx insn;
- int in_mem;
+mark_jump_label (rtx x, rtx insn, int in_mem)
{
RTX_CODE code = GET_CODE (x);
int i;
if that's what the previous thing was. */
void
-delete_jump (insn)
- rtx insn;
+delete_jump (rtx insn)
{
rtx set = single_set (insn);
/* Verify INSN is a BARRIER and delete it. */
void
-delete_barrier (insn)
- rtx insn;
+delete_barrier (rtx insn)
{
if (GET_CODE (insn) != BARRIER)
abort ();
which is a REG_DEAD note associated with INSN. */
static void
-delete_prior_computation (note, insn)
- rtx note;
- rtx insn;
+delete_prior_computation (rtx note, rtx insn)
{
rtx our_prev;
rtx reg = XEXP (note, 0);
int dest_endregno
= (dest_regno
+ (dest_regno < FIRST_PSEUDO_REGISTER
- ? HARD_REGNO_NREGS (dest_regno,
- GET_MODE (SET_DEST (pat))) : 1));
+ ? hard_regno_nregs[dest_regno]
+ [GET_MODE (SET_DEST (pat))] : 1));
int regno = REGNO (reg);
int endregno
= (regno
+ (regno < FIRST_PSEUDO_REGISTER
- ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
+ ? hard_regno_nregs[regno][GET_MODE (reg)] : 1));
if (dest_regno >= regno
&& dest_endregno <= endregno)
delete the insn that set it. */
static void
-delete_computation (insn)
- rtx insn;
+delete_computation (rtx insn)
{
rtx note, next;
subsequent cfg_cleanup pass to delete unreachable code if needed. */
rtx
-delete_related_insns (insn)
- rtx insn;
+delete_related_insns (rtx insn)
{
int was_code_label = (GET_CODE (insn) == CODE_LABEL);
rtx note;
if (was_code_label && prev && GET_CODE (prev) == BARRIER)
{
- RTX_CODE code;
- while (next != 0
- && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
- || code == NOTE || code == BARRIER
- || (code == CODE_LABEL && INSN_DELETED_P (next))))
+ enum rtx_code code;
+ while (next)
{
+ code = GET_CODE (next);
if (code == NOTE
&& NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
next = NEXT_INSN (next);
/* Keep going past other deleted labels to delete what follows. */
else if (code == CODE_LABEL && INSN_DELETED_P (next))
next = NEXT_INSN (next);
- else
+ else if (code == BARRIER || INSN_P (next))
/* Note: if this deletes a jump, it can cause more
deletion of unreachable code, after a different label.
As long as the value from this recursive call is correct,
this invocation functions correctly. */
next = delete_related_insns (next);
+ else
+ break;
}
}
return next;
}
-
-/* Advance from INSN till reaching something not deleted
- then return that. May return INSN itself. */
-
-rtx
-next_nondeleted_insn (insn)
- rtx insn;
-{
- while (INSN_DELETED_P (insn))
- insn = NEXT_INSN (insn);
- return insn;
-}
\f
/* Delete a range of insns from FROM to TO, inclusive.
This is for the sake of peephole optimization, so assume
peephole insn that will replace them. */
void
-delete_for_peephole (from, to)
- rtx from, to;
+delete_for_peephole (rtx from, rtx to)
{
rtx insn = from;
spurious warnings from this. */
void
-never_reached_warning (avoided_insn, finish)
- rtx avoided_insn, finish;
+never_reached_warning (rtx avoided_insn, rtx finish)
{
rtx insn;
rtx a_line_note = NULL;
/* Back up to the first of any NOTEs preceding avoided_insn; flow passes
us the head of a block, a NOTE_INSN_BASIC_BLOCK, which often follows
the line note. */
- for (insn = PREV_INSN (avoided_insn); ; insn = PREV_INSN (insn))
- if (GET_CODE (insn) != NOTE)
- {
- insn = NEXT_INSN (insn);
+ insn = avoided_insn;
+ while (1)
+ {
+ rtx prev = PREV_INSN (insn);
+ if (prev == NULL_RTX
+ || GET_CODE (prev) != NOTE)
break;
- }
+ insn = prev;
+ }
/* Scan forwards, looking at LINE_NUMBER notes, until we hit a LABEL
in case FINISH is NULL, otherwise until we run out of insns. */
reached_end = 1;
}
if (two_avoided_lines && contains_insn)
- warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
- NOTE_LINE_NUMBER (a_line_note),
- "will never be executed");
+ {
+ location_t locus;
+ locus.file = NOTE_SOURCE_FILE (a_line_note);
+ locus.line = NOTE_LINE_NUMBER (a_line_note);
+ warning ("%Hwill never be executed", &locus);
+ }
}
\f
/* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
NLABEL as a return. Accrue modifications into the change group. */
static void
-redirect_exp_1 (loc, olabel, nlabel, insn)
- rtx *loc;
- rtx olabel, nlabel;
- rtx insn;
+redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
{
rtx x = *loc;
RTX_CODE code = GET_CODE (x);
/* Similar, but apply the change group and report success or failure. */
static int
-redirect_exp (olabel, nlabel, insn)
- rtx olabel, nlabel;
- rtx insn;
+redirect_exp (rtx olabel, rtx nlabel, rtx insn)
{
rtx *loc;
not see how to do that. */
int
-redirect_jump_1 (jump, nlabel)
- rtx jump, nlabel;
+redirect_jump_1 (rtx jump, rtx nlabel)
{
int ochanges = num_validated_changes ();
rtx *loc;
(this can only occur for NLABEL == 0). */
int
-redirect_jump (jump, nlabel, delete_unused)
- rtx jump, nlabel;
- int delete_unused;
+redirect_jump (rtx jump, rtx nlabel, int delete_unused)
{
rtx olabel = JUMP_LABEL (jump);
+ rtx note;
if (nlabel == olabel)
return 1;
if (nlabel)
++LABEL_NUSES (nlabel);
+ /* Update labels in any REG_EQUAL note. */
+ if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
+ {
+ if (nlabel && olabel)
+ {
+ rtx dest = XEXP (note, 0);
+
+ if (GET_CODE (dest) == IF_THEN_ELSE)
+ {
+ if (GET_CODE (XEXP (dest, 1)) == LABEL_REF
+ && XEXP (XEXP (dest, 1), 0) == olabel)
+ XEXP (XEXP (dest, 1), 0) = nlabel;
+ if (GET_CODE (XEXP (dest, 2)) == LABEL_REF
+ && XEXP (XEXP (dest, 2), 0) == olabel)
+ XEXP (XEXP (dest, 2), 0) = nlabel;
+ }
+ else
+ remove_note (jump, note);
+ }
+ else
+ remove_note (jump, note);
+ }
+
/* If we're eliding the jump over exception cleanups at the end of a
function, move the function end note so that -Wreturn-type works. */
if (olabel && nlabel
Accrue the modifications into the change group. */
static void
-invert_exp_1 (insn)
- rtx insn;
+invert_exp_1 (rtx insn)
{
RTX_CODE code;
rtx x = pc_set (insn);
matches a pattern. */
static int
-invert_exp (insn)
- rtx insn;
+invert_exp (rtx insn)
{
invert_exp_1 (insn);
if (num_validated_changes () == 0)
inversion and redirection. */
int
-invert_jump_1 (jump, nlabel)
- rtx jump, nlabel;
+invert_jump_1 (rtx jump, rtx nlabel)
{
int ochanges;
NLABEL instead of where it jumps now. Return true if successful. */
int
-invert_jump (jump, nlabel, delete_unused)
- rtx jump, nlabel;
- int delete_unused;
+invert_jump (rtx jump, rtx nlabel, int delete_unused)
{
/* We have to either invert the condition and change the label or
do neither. Either operation could fail. We first try to invert
if (redirect_jump (jump, nlabel, delete_unused))
{
+ /* Remove REG_EQUAL note if we have one. */
+ rtx note = find_reg_note (jump, REG_EQUAL, NULL_RTX);
+ if (note)
+ remove_note (jump, note);
+
invert_br_probabilities (jump);
return 1;
case when the PLUS is inside a MEM. */
int
-rtx_renumbered_equal_p (x, y)
- rtx x, y;
+rtx_renumbered_equal_p (rtx x, rtx y)
{
int i;
- RTX_CODE code = GET_CODE (x);
+ enum rtx_code code = GET_CODE (x);
const char *fmt;
if (x == y)
case CC0:
case ADDR_VEC:
case ADDR_DIFF_VEC:
- return 0;
-
case CONST_INT:
- return INTVAL (x) == INTVAL (y);
+ return 0;
case LABEL_REF:
/* We can't assume nonlocal labels have their following insns yet. */
order. Also handle the simple binary and unary cases without a loop.
??? Don't consider PLUS a commutative operator; see comments above. */
- if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
- && code != PLUS)
+ if (COMMUTATIVE_P (x) && code != PLUS)
return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
&& rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
|| (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
&& rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
- else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
+ else if (NON_COMMUTATIVE_P (x))
return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
&& rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
- else if (GET_RTX_CLASS (code) == '1')
+ else if (UNARY_P (x))
return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
/* Compare the elements. If any pair of corresponding elements
case 'u':
if (XEXP (x, i) != XEXP (y, i))
return 0;
- /* fall through. */
+ /* Fall through. */
case '0':
break;
return -1. Any rtx is valid for X. */
int
-true_regnum (x)
- rtx x;
+true_regnum (rtx x)
{
if (GET_CODE (x) == REG)
{
/* Return regno of the register REG and handle subregs too. */
unsigned int
-reg_or_subregno (reg)
- rtx reg;
+reg_or_subregno (rtx reg)
{
if (REG_P (reg))
return REGNO (reg);