/* Optimize jump instructions, for GNU compiler.
Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
- 1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
+ 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
This file is part of GCC.
#include "config.h"
#include "system.h"
+#include "coretypes.h"
+#include "tm.h"
#include "rtl.h"
#include "tm_p.h"
#include "flags.h"
#include "expr.h"
#include "real.h"
#include "except.h"
+#include "diagnostic.h"
#include "toplev.h"
#include "reload.h"
#include "predict.h"
+#include "timevar.h"
/* Optimize jump y; x: ... y: jumpif... x?
Don't know if it is worth bothering with. */
or even change what is live at any point.
So perhaps let combiner do it. */
-static int init_label_info PARAMS ((rtx));
-static void mark_all_labels PARAMS ((rtx));
-static int duplicate_loop_exit_test PARAMS ((rtx));
-static void delete_computation PARAMS ((rtx));
-static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
-static int redirect_exp PARAMS ((rtx, rtx, rtx));
-static void invert_exp_1 PARAMS ((rtx));
-static int invert_exp PARAMS ((rtx));
-static int returnjump_p_1 PARAMS ((rtx *, void *));
-static void delete_prior_computation PARAMS ((rtx, rtx));
+static rtx next_nonnote_insn_in_loop (rtx);
+static void init_label_info (rtx);
+static void mark_all_labels (rtx);
+static int duplicate_loop_exit_test (rtx);
+static void delete_computation (rtx);
+static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
+static int redirect_exp (rtx, rtx, rtx);
+static void invert_exp_1 (rtx);
+static int invert_exp (rtx);
+static int returnjump_p_1 (rtx *, void *);
+static void delete_prior_computation (rtx, rtx);
\f
/* Alternate entry into the jump optimizer. This entry point only rebuilds
the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
instructions. */
void
-rebuild_jump_labels (f)
- rtx f;
+rebuild_jump_labels (rtx f)
{
rtx insn;
- int max_uid = 0;
-
- max_uid = init_label_info (f) + 1;
+ timevar_push (TV_REBUILD_JUMP);
+ init_label_info (f);
mark_all_labels (f);
/* Keep track of labels used from static data; we don't track them
for (insn = forced_labels; insn; insn = XEXP (insn, 1))
if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
LABEL_NUSES (XEXP (insn, 0))++;
+ timevar_pop (TV_REBUILD_JUMP);
}
\f
/* Some old code expects exactly one BARRIER as the NEXT_INSN of a
old code is happy.
*/
void
-cleanup_barriers ()
+cleanup_barriers (void)
{
rtx insn, next, prev;
for (insn = get_insns (); insn; insn = next)
}
}
\f
+/* Return the next insn after INSN that is not a NOTE and is in the loop,
+ i.e. when there is no such INSN before NOTE_INSN_LOOP_END return NULL_RTX.
+ This routine does not look inside SEQUENCEs. */
+
+static rtx
+next_nonnote_insn_in_loop (rtx insn)
+{
+ while (insn)
+ {
+ insn = NEXT_INSN (insn);
+ if (insn == 0 || GET_CODE (insn) != NOTE)
+ break;
+ if (GET_CODE (insn) == NOTE
+ && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
+ return NULL_RTX;
+ }
+
+ return insn;
+}
+
void
-copy_loop_headers (f)
- rtx f;
+copy_loop_headers (rtx f)
{
rtx insn, next;
/* Now iterate optimizing jumps until nothing changes over one pass. */
the values of regno_first_uid and regno_last_uid. */
if (GET_CODE (insn) == NOTE
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
- && (temp1 = next_nonnote_insn (insn)) != 0
+ && (temp1 = next_nonnote_insn_in_loop (insn)) != 0
&& any_uncondjump_p (temp1) && onlyjump_p (temp1))
{
temp = PREV_INSN (insn);
}
void
-purge_line_number_notes (f)
- rtx f;
+purge_line_number_notes (rtx f)
{
rtx last_note = 0;
rtx insn;
/* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
notes whose labels don't occur in the insn any more. Returns the
largest INSN_UID found. */
-static int
-init_label_info (f)
- rtx f;
+static void
+init_label_info (rtx f)
{
- int largest_uid = 0;
rtx insn;
for (insn = f; insn; insn = NEXT_INSN (insn))
- {
- if (GET_CODE (insn) == CODE_LABEL)
- LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
- else if (GET_CODE (insn) == JUMP_INSN)
- JUMP_LABEL (insn) = 0;
- else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
- {
- rtx note, next;
-
- for (note = REG_NOTES (insn); note; note = next)
- {
- next = XEXP (note, 1);
- if (REG_NOTE_KIND (note) == REG_LABEL
- && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
- remove_note (insn, note);
- }
- }
- if (INSN_UID (insn) > largest_uid)
- largest_uid = INSN_UID (insn);
- }
+ if (GET_CODE (insn) == CODE_LABEL)
+ LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
+ else if (GET_CODE (insn) == JUMP_INSN)
+ JUMP_LABEL (insn) = 0;
+ else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
+ {
+ rtx note, next;
- return largest_uid;
+ for (note = REG_NOTES (insn); note; note = next)
+ {
+ next = XEXP (note, 1);
+ if (REG_NOTE_KIND (note) == REG_LABEL
+ && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
+ remove_note (insn, note);
+ }
+ }
}
/* Mark the label each jump jumps to.
Combine consecutive labels, and count uses of labels. */
static void
-mark_all_labels (f)
- rtx f;
+mark_all_labels (rtx f)
{
rtx insn;
values of regno_first_uid and regno_last_uid. */
static int
-duplicate_loop_exit_test (loop_start)
- rtx loop_start;
+duplicate_loop_exit_test (rtx loop_start)
{
rtx insn, set, reg, p, link;
rtx copy = 0, first_copy = 0;
int num_insns = 0;
- rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
+ rtx exitcode
+ = NEXT_INSN (JUMP_LABEL (next_nonnote_insn_in_loop (loop_start)));
rtx lastexit;
int max_reg = max_reg_num ();
rtx *reg_map = 0;
break;
case JUMP_INSN:
case INSN:
- /* The code below would grossly mishandle REG_WAS_0 notes,
- so get rid of them here. */
- while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
- remove_note (insn, p);
if (++num_insns > 20
|| find_reg_note (insn, REG_RETVAL, NULL_RTX)
|| find_reg_note (insn, REG_LIBCALL, NULL_RTX))
/* We can do the replacement. Allocate reg_map if this is the
first replacement we found. */
if (reg_map == 0)
- reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
+ reg_map = xcalloc (max_reg, sizeof (rtx));
REG_LOOP_TEST_P (reg) = 1;
replace_regs (PATTERN (copy), reg_map, max_reg, 1);
mark_jump_label (PATTERN (copy), copy, 0);
- INSN_SCOPE (copy) = INSN_SCOPE (insn);
+ INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
/* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
make them. */
case JUMP_INSN:
copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
loop_start);
- INSN_SCOPE (copy) = INSN_SCOPE (insn);
+ INSN_LOCATOR (copy) = INSN_LOCATOR (insn);
if (reg_map)
replace_regs (PATTERN (copy), reg_map, max_reg, 1);
mark_jump_label (PATTERN (copy), copy, 0);
Return true if there were only such notes and no real instructions. */
bool
-squeeze_notes (startp, endp)
- rtx* startp;
- rtx* endp;
+squeeze_notes (rtx* startp, rtx* endp)
{
rtx start = *startp;
rtx end = *endp;
/* Return the label before INSN, or put a new label there. */
rtx
-get_label_before (insn)
- rtx insn;
+get_label_before (rtx insn)
{
rtx label;
/* Return the label after INSN, or put a new label there. */
rtx
-get_label_after (insn)
- rtx insn;
+get_label_after (rtx insn)
{
rtx label;
description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
to help this function avoid overhead in these cases. */
enum rtx_code
-reversed_comparison_code_parts (code, arg0, arg1, insn)
- rtx insn, arg0, arg1;
- enum rtx_code code;
+reversed_comparison_code_parts (enum rtx_code code, rtx arg0, rtx arg1, rtx insn)
{
enum machine_mode mode;
case NE:
case EQ:
/* It is always safe to reverse EQ and NE, even for the floating
- point. Similary the unsigned comparisons are never used for
+ point. Similarly the unsigned comparisons are never used for
floating point so we can reverse them in the default way. */
return reverse_condition (code);
case ORDERED:
break;
}
- if (GET_MODE_CLASS (mode) == MODE_CC
-#ifdef HAVE_cc0
- || arg0 == cc0_rtx
-#endif
- )
+ if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
{
rtx prev;
/* Try to search for the comparison to determine the real mode.
return UNKNOWN;
}
-/* An wrapper around the previous function to take COMPARISON as rtx
+/* A wrapper around the previous function to take COMPARISON as rtx
expression. This simplifies many callers. */
enum rtx_code
-reversed_comparison_code (comparison, insn)
- rtx comparison, insn;
+reversed_comparison_code (rtx comparison, rtx insn)
{
if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
return UNKNOWN;
Use reversed_comparison_code instead. */
enum rtx_code
-reverse_condition (code)
- enum rtx_code code;
+reverse_condition (enum rtx_code code)
{
switch (code)
{
that the target will support them too... */
enum rtx_code
-reverse_condition_maybe_unordered (code)
- enum rtx_code code;
+reverse_condition_maybe_unordered (enum rtx_code code)
{
switch (code)
{
This IS safe for IEEE floating-point. */
enum rtx_code
-swap_condition (code)
- enum rtx_code code;
+swap_condition (enum rtx_code code)
{
switch (code)
{
CODE is returned. */
enum rtx_code
-unsigned_condition (code)
- enum rtx_code code;
+unsigned_condition (enum rtx_code code)
{
switch (code)
{
/* Similarly, return the signed version of a comparison. */
enum rtx_code
-signed_condition (code)
- enum rtx_code code;
+signed_condition (enum rtx_code code)
{
switch (code)
{
truth of CODE1 implies the truth of CODE2. */
int
-comparison_dominates_p (code1, code2)
- enum rtx_code code1, code2;
+comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
{
/* UNKNOWN comparison codes can happen as a result of trying to revert
comparison codes.
/* Return 1 if INSN is an unconditional jump and nothing else. */
int
-simplejump_p (insn)
- rtx insn;
+simplejump_p (rtx insn)
{
return (GET_CODE (insn) == JUMP_INSN
&& GET_CODE (PATTERN (insn)) == SET
branch and compare insns. Use any_condjump_p instead whenever possible. */
int
-condjump_p (insn)
- rtx insn;
+condjump_p (rtx insn)
{
rtx x = PATTERN (insn);
branch and compare insns. Use any_condjump_p instead whenever possible. */
int
-condjump_in_parallel_p (insn)
- rtx insn;
+condjump_in_parallel_p (rtx insn)
{
rtx x = PATTERN (insn);
/* Return set of PC, otherwise NULL. */
rtx
-pc_set (insn)
- rtx insn;
+pc_set (rtx insn)
{
rtx pat;
if (GET_CODE (insn) != JUMP_INSN)
possibly bundled inside a PARALLEL. */
int
-any_uncondjump_p (insn)
- rtx insn;
+any_uncondjump_p (rtx insn)
{
rtx x = pc_set (insn);
if (!x)
Note that unlike condjump_p it returns false for unconditional jumps. */
int
-any_condjump_p (insn)
- rtx insn;
+any_condjump_p (rtx insn)
{
rtx x = pc_set (insn);
enum rtx_code a, b;
/* Return the label of a conditional jump. */
rtx
-condjump_label (insn)
- rtx insn;
+condjump_label (rtx insn)
{
rtx x = pc_set (insn);
/* Return true if INSN is a (possibly conditional) return insn. */
static int
-returnjump_p_1 (loc, data)
- rtx *loc;
- void *data ATTRIBUTE_UNUSED;
+returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
{
rtx x = *loc;
}
int
-returnjump_p (insn)
- rtx insn;
+returnjump_p (rtx insn)
{
if (GET_CODE (insn) != JUMP_INSN)
return 0;
nothing more. */
int
-onlyjump_p (insn)
- rtx insn;
+onlyjump_p (rtx insn)
{
rtx set;
and has no side effects. */
int
-only_sets_cc0_p (x)
- rtx x;
+only_sets_cc0_p (rtx x)
{
-
if (! x)
return 0;
but also does other things. */
int
-sets_cc0_p (x)
- rtx x;
+sets_cc0_p (rtx x)
{
-
if (! x)
return 0;
a USE or CLOBBER. */
rtx
-follow_jumps (label)
- rtx label;
+follow_jumps (rtx label)
{
rtx insn;
rtx next;
that loop-optimization is done with. */
void
-mark_jump_label (x, insn, in_mem)
- rtx x;
- rtx insn;
- int in_mem;
+mark_jump_label (rtx x, rtx insn, int in_mem)
{
RTX_CODE code = GET_CODE (x);
int i;
case PC:
case CC0:
case REG:
- case SUBREG:
case CONST_INT:
case CONST_DOUBLE:
case CLOBBER:
if that's what the previous thing was. */
void
-delete_jump (insn)
- rtx insn;
+delete_jump (rtx insn)
{
rtx set = single_set (insn);
/* Verify INSN is a BARRIER and delete it. */
void
-delete_barrier (insn)
- rtx insn;
+delete_barrier (rtx insn)
{
if (GET_CODE (insn) != BARRIER)
abort ();
which is a REG_DEAD note associated with INSN. */
static void
-delete_prior_computation (note, insn)
- rtx note;
- rtx insn;
+delete_prior_computation (rtx note, rtx insn)
{
rtx our_prev;
rtx reg = XEXP (note, 0);
delete the insn that set it. */
static void
-delete_computation (insn)
- rtx insn;
+delete_computation (rtx insn)
{
rtx note, next;
}
\f
/* Delete insn INSN from the chain of insns and update label ref counts
- and delete insns now unreachable.
+ and delete insns now unreachable.
- Returns the first insn after INSN that was not deleted.
+ Returns the first insn after INSN that was not deleted.
Usage of this instruction is deprecated. Use delete_insn instead and
subsequent cfg_cleanup pass to delete unreachable code if needed. */
rtx
-delete_related_insns (insn)
- rtx insn;
+delete_related_insns (rtx insn)
{
int was_code_label = (GET_CODE (insn) == CODE_LABEL);
rtx note;
next = NEXT_INSN (next);
return next;
}
- else if ((lab_next = next_nonnote_insn (lab)) != NULL
- && GET_CODE (lab_next) == JUMP_INSN
- && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
- || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
+ else if (tablejump_p (insn, NULL, &lab_next))
{
/* If we're deleting the tablejump, delete the dispatch table.
We may not be able to kill the label immediately preceding
return next;
}
-
-/* Advance from INSN till reaching something not deleted
- then return that. May return INSN itself. */
-
-rtx
-next_nondeleted_insn (insn)
- rtx insn;
-{
- while (INSN_DELETED_P (insn))
- insn = NEXT_INSN (insn);
- return insn;
-}
\f
/* Delete a range of insns from FROM to TO, inclusive.
This is for the sake of peephole optimization, so assume
peephole insn that will replace them. */
void
-delete_for_peephole (from, to)
- rtx from, to;
+delete_for_peephole (rtx from, rtx to)
{
rtx insn = from;
is also an unconditional jump in that case. */
}
\f
-/* We have determined that INSN is never reached, and are about to
- delete it. Print a warning if the user asked for one.
+/* We have determined that AVOIDED_INSN is never reached, and are
+ about to delete it. If the insn chain between AVOIDED_INSN and
+ FINISH contains more than one line from the current function, and
+ contains at least one operation, print a warning if the user asked
+ for it. If FINISH is NULL, look between AVOIDED_INSN and a LABEL.
- To try to make this warning more useful, this should only be called
- once per basic block not reached, and it only warns when the basic
- block contains more than one line from the current function, and
- contains at least one operation. CSE and inlining can duplicate insns,
- so it's possible to get spurious warnings from this. */
+ CSE and inlining can duplicate insns, so it's possible to get
+ spurious warnings from this. */
void
-never_reached_warning (avoided_insn, finish)
- rtx avoided_insn, finish;
+never_reached_warning (rtx avoided_insn, rtx finish)
{
rtx insn;
rtx a_line_note = NULL;
int two_avoided_lines = 0, contains_insn = 0, reached_end = 0;
- if (! warn_notreached)
+ if (!warn_notreached)
return;
- /* Scan forwards, looking at LINE_NUMBER notes, until
- we hit a LABEL or we run out of insns. */
+ /* Back up to the first of any NOTEs preceding avoided_insn; flow passes
+ us the head of a block, a NOTE_INSN_BASIC_BLOCK, which often follows
+ the line note. */
+ insn = avoided_insn;
+ while (1)
+ {
+ rtx prev = PREV_INSN (insn);
+ if (prev == NULL_RTX
+ || GET_CODE (prev) != NOTE)
+ break;
+ insn = prev;
+ }
+
+ /* Scan forwards, looking at LINE_NUMBER notes, until we hit a LABEL
+ in case FINISH is NULL, otherwise until we run out of insns. */
- for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
+ for (; insn != NULL; insn = NEXT_INSN (insn))
{
- if (finish == NULL && GET_CODE (insn) == CODE_LABEL)
+ if ((finish == NULL && GET_CODE (insn) == CODE_LABEL)
+ || GET_CODE (insn) == BARRIER)
break;
if (GET_CODE (insn) == NOTE /* A line number note? */
reached_end = 1;
}
if (two_avoided_lines && contains_insn)
- warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
- NOTE_LINE_NUMBER (a_line_note),
- "will never be executed");
+ {
+ location_t locus;
+ locus.file = NOTE_SOURCE_FILE (a_line_note);
+ locus.line = NOTE_LINE_NUMBER (a_line_note);
+ warning ("%Hwill never be executed", &locus);
+ }
}
\f
/* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
NLABEL as a return. Accrue modifications into the change group. */
static void
-redirect_exp_1 (loc, olabel, nlabel, insn)
- rtx *loc;
- rtx olabel, nlabel;
- rtx insn;
+redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
{
rtx x = *loc;
RTX_CODE code = GET_CODE (x);
/* Similar, but apply the change group and report success or failure. */
static int
-redirect_exp (olabel, nlabel, insn)
- rtx olabel, nlabel;
- rtx insn;
+redirect_exp (rtx olabel, rtx nlabel, rtx insn)
{
rtx *loc;
not see how to do that. */
int
-redirect_jump_1 (jump, nlabel)
- rtx jump, nlabel;
+redirect_jump_1 (rtx jump, rtx nlabel)
{
int ochanges = num_validated_changes ();
rtx *loc;
(this can only occur for NLABEL == 0). */
int
-redirect_jump (jump, nlabel, delete_unused)
- rtx jump, nlabel;
- int delete_unused;
+redirect_jump (rtx jump, rtx nlabel, int delete_unused)
{
rtx olabel = JUMP_LABEL (jump);
+ rtx note;
if (nlabel == olabel)
return 1;
if (nlabel)
++LABEL_NUSES (nlabel);
+ /* Update labels in any REG_EQUAL note. */
+ if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
+ {
+ if (nlabel && olabel)
+ {
+ rtx dest = XEXP (note, 0);
+
+ if (GET_CODE (dest) == IF_THEN_ELSE)
+ {
+ if (GET_CODE (XEXP (dest, 1)) == LABEL_REF
+ && XEXP (XEXP (dest, 1), 0) == olabel)
+ XEXP (XEXP (dest, 1), 0) = nlabel;
+ if (GET_CODE (XEXP (dest, 2)) == LABEL_REF
+ && XEXP (XEXP (dest, 2), 0) == olabel)
+ XEXP (XEXP (dest, 2), 0) = nlabel;
+ }
+ else
+ remove_note (jump, note);
+ }
+ else
+ remove_note (jump, note);
+ }
+
/* If we're eliding the jump over exception cleanups at the end of a
function, move the function end note so that -Wreturn-type works. */
if (olabel && nlabel
Accrue the modifications into the change group. */
static void
-invert_exp_1 (insn)
- rtx insn;
+invert_exp_1 (rtx insn)
{
RTX_CODE code;
rtx x = pc_set (insn);
matches a pattern. */
static int
-invert_exp (insn)
- rtx insn;
+invert_exp (rtx insn)
{
invert_exp_1 (insn);
if (num_validated_changes () == 0)
inversion and redirection. */
int
-invert_jump_1 (jump, nlabel)
- rtx jump, nlabel;
+invert_jump_1 (rtx jump, rtx nlabel)
{
int ochanges;
NLABEL instead of where it jumps now. Return true if successful. */
int
-invert_jump (jump, nlabel, delete_unused)
- rtx jump, nlabel;
- int delete_unused;
+invert_jump (rtx jump, rtx nlabel, int delete_unused)
{
/* We have to either invert the condition and change the label or
do neither. Either operation could fail. We first try to invert
if (redirect_jump (jump, nlabel, delete_unused))
{
+ /* Remove REG_EQUAL note if we have one. */
+ rtx note = find_reg_note (jump, REG_EQUAL, NULL_RTX);
+ if (note)
+ remove_note (jump, note);
+
invert_br_probabilities (jump);
return 1;
case when the PLUS is inside a MEM. */
int
-rtx_renumbered_equal_p (x, y)
- rtx x, y;
+rtx_renumbered_equal_p (rtx x, rtx y)
{
int i;
RTX_CODE code = GET_CODE (x);
case CC0:
case ADDR_VEC:
case ADDR_DIFF_VEC:
- return 0;
-
case CONST_INT:
- return INTVAL (x) == INTVAL (y);
+ return 0;
case LABEL_REF:
/* We can't assume nonlocal labels have their following insns yet. */
case 'u':
if (XEXP (x, i) != XEXP (y, i))
return 0;
- /* fall through. */
+ /* Fall through. */
case '0':
break;
return -1. Any rtx is valid for X. */
int
-true_regnum (x)
- rtx x;
+true_regnum (rtx x)
{
if (GET_CODE (x) == REG)
{
/* Return regno of the register REG and handle subregs too. */
unsigned int
-reg_or_subregno (reg)
- rtx reg;
+reg_or_subregno (rtx reg)
{
if (REG_P (reg))
return REGNO (reg);