/* Perform instruction reorganizations for delay slot filling.
Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
- 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
+ 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
Free Software Foundation, Inc.
Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
Hacked by Michael Tiemann (tiemann@cygnus.com).
#include "coretypes.h"
#include "tm.h"
#include "diagnostic-core.h"
-#include "toplev.h"
#include "rtl.h"
#include "tm_p.h"
#include "expr.h"
static void make_return_insns (rtx);
#endif
\f
+/* A wrapper around next_active_insn which takes care to return ret_rtx
+ unchanged. */
+
+static rtx
+first_active_target_insn (rtx insn)
+{
+ if (ANY_RETURN_P (insn))
+ return insn;
+ return next_active_insn (insn);
+}
+\f
/* Return TRUE if this insn should stop the search for insn to fill delay
slots. LABELS_P indicates that labels should terminate the search.
In all cases, jumps terminate the search. */
/* The return we make may have delay slots too. */
rtx insn = gen_return ();
insn = emit_jump_insn (insn);
+ JUMP_LABEL (insn) = ret_rtx;
emit_barrier ();
if (num_delay_slots (insn) > 0)
obstack_ptr_grow (&unfilled_slots_obstack, insn);
|| GET_CODE (PATTERN (next_trial)) == RETURN))
{
rtx target_label = JUMP_LABEL (next_trial);
- if (target_label == 0)
+ if (ANY_RETURN_P (target_label))
target_label = find_end_label ();
if (target_label)
be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
direction information, and only if they are conditional jumps.
- If LABEL is zero, then there is no way to determine the branch
+ If LABEL is a return, then there is no way to determine the branch
direction. */
if (JUMP_P (insn)
&& (condjump_p (insn) || condjump_in_parallel_p (insn))
+ && !ANY_RETURN_P (label)
&& INSN_UID (insn) <= max_uid
- && label != 0
&& INSN_UID (label) <= max_uid)
flags
= (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
int jump_count = 0;
rtx next;
- for (; insn; insn = next)
+ for (; insn && !ANY_RETURN_P (insn); insn = next)
{
if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
/* Predict backward branches usually take, forward branches usually not. If
we don't know whether this is forward or backward, assume the branch
will be taken, since most are. */
- return (target_label == 0 || INSN_UID (jump_insn) > max_uid
+ return (ANY_RETURN_P (target_label) || INSN_UID (jump_insn) > max_uid
|| INSN_UID (target_label) > max_uid
|| (uid_to_ruid[INSN_UID (jump_insn)]
> uid_to_ruid[INSN_UID (target_label)]));
if (condjump_in_parallel_p (insn))
pat = XVECEXP (pat, 0, 0);
- if (GET_CODE (pat) == RETURN)
- return target == 0 ? const_true_rtx : 0;
+ if (ANY_RETURN_P (pat))
+ return pat == target ? const_true_rtx : 0;
- else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
+ if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
return 0;
src = SET_SRC (pat);
return const_true_rtx;
else if (GET_CODE (src) == IF_THEN_ELSE
- && ((target == 0 && GET_CODE (XEXP (src, 1)) == RETURN)
- || (GET_CODE (XEXP (src, 1)) == LABEL_REF
- && XEXP (XEXP (src, 1), 0) == target))
+ && XEXP (XEXP (src, 1), 0) == target
&& XEXP (src, 2) == pc_rtx)
return XEXP (src, 0);
else if (GET_CODE (src) == IF_THEN_ELSE
- && ((target == 0 && GET_CODE (XEXP (src, 2)) == RETURN)
- || (GET_CODE (XEXP (src, 2)) == LABEL_REF
- && XEXP (XEXP (src, 2), 0) == target))
+ && XEXP (XEXP (src, 2), 0) == target
&& XEXP (src, 1) == pc_rtx)
{
enum rtx_code rev;
}
/* Show the place to which we will be branching. */
- *pnew_thread = next_active_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
+ *pnew_thread = first_active_target_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
/* Add any new insns to the delay list and update the count of the
number of slots filled. */
rtx insn;
/* We don't own the function end. */
- if (thread == 0)
+ if (thread == 0 || ANY_RETURN_P (thread))
return 0;
/* Get the first active insn, or THREAD, if it is an active insn. */
/* This must be an INSN or CALL_INSN. */
pat = PATTERN (trial);
- /* USE and CLOBBER at this level was just for flow; ignore it. */
+ /* Stand-alone USE and CLOBBER are just for flow. */
if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
continue;
&& (!JUMP_P (insn)
|| ((condjump_p (insn) || condjump_in_parallel_p (insn))
&& ! simplejump_p (insn)
- && JUMP_LABEL (insn) != 0)))
+ && !ANY_RETURN_P (JUMP_LABEL (insn)))))
{
/* Invariant: If insn is a JUMP_INSN, the insn's jump
label. Otherwise, zero. */
target = JUMP_LABEL (insn);
}
- if (target == 0)
- for (trial = next_nonnote_insn (insn); trial; trial = next_trial)
+ if (target == 0 || ANY_RETURN_P (target))
+ for (trial = next_nonnote_insn (insn); !stop_search_p (trial, 1);
+ trial = next_trial)
{
next_trial = next_nonnote_insn (trial);
- if (LABEL_P (trial)
- || BARRIER_P (trial))
- break;
-
- /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
+ /* This must be an INSN or CALL_INSN. */
pat = PATTERN (trial);
/* Stand-alone USE and CLOBBER are just for flow. */
else
trial_delay = trial;
- /* Stop our search when seeing an unconditional jump. */
+ /* Stop our search when seeing a jump. */
if (JUMP_P (trial_delay))
break;
Don't do this if the insn at the branch target is a branch. */
if (slots_to_fill != slots_filled
&& trial
- && JUMP_P (trial)
+ && jump_to_label_p (trial)
&& simplejump_p (trial)
&& (target == 0 || JUMP_LABEL (trial) == target)
&& (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
\f
/* Follow any unconditional jump at LABEL;
return the ultimate label reached by any such chain of jumps.
- Return null if the chain ultimately leads to a return instruction.
+ Return ret_rtx if the chain ultimately leads to a return instruction.
If LABEL is not followed by a jump, return LABEL.
If the chain loops or we can't find end, return LABEL,
since that tells caller to avoid changing the insn. */
rtx value = label;
int depth;
+ if (ANY_RETURN_P (label))
+ return label;
for (depth = 0;
(depth < 10
&& (insn = next_active_insn (value)) != 0
&& JUMP_P (insn)
- && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
- && onlyjump_p (insn))
+ && JUMP_LABEL (insn) != NULL_RTX
+ && ((any_uncondjump_p (insn) && onlyjump_p (insn))
|| GET_CODE (PATTERN (insn)) == RETURN)
&& (next = NEXT_INSN (insn))
&& BARRIER_P (next));
depth++)
{
+ rtx this_label = JUMP_LABEL (insn);
rtx tem;
/* If we have found a cycle, make the insn jump to itself. */
- if (JUMP_LABEL (insn) == label)
+ if (this_label == label)
return label;
-
- tem = next_active_insn (JUMP_LABEL (insn));
- if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
- || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
+ if (ANY_RETURN_P (this_label))
+ return this_label;
+ tem = next_active_insn (this_label);
+ if (tem
+ && (GET_CODE (PATTERN (tem)) == ADDR_VEC
+ || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
break;
- value = JUMP_LABEL (insn);
+ value = this_label;
}
if (depth == 10)
return label;
/* If our thread is the end of subroutine, we can't get any delay
insns from that. */
- if (thread == 0)
+ if (thread == NULL_RTX || ANY_RETURN_P (thread))
return delay_list;
/* If this is an unconditional branch, nothing is needed at the
gcc_assert (REG_NOTE_KIND (note)
== REG_LABEL_OPERAND);
}
- if (JUMP_P (trial) && JUMP_LABEL (trial))
+ if (jump_to_label_p (trial))
LABEL_NUSES (JUMP_LABEL (trial))++;
delete_related_insns (trial);
gcc_assert (REG_NOTE_KIND (note)
== REG_LABEL_OPERAND);
}
- if (JUMP_P (trial) && JUMP_LABEL (trial))
+ if (jump_to_label_p (trial))
LABEL_NUSES (JUMP_LABEL (trial))--;
}
else
depend on the destination register. If so, try to place the opposite
arithmetic insn after the jump insn and put the arithmetic insn in the
delay slot. If we can't do this, return. */
- if (delay_list == 0 && likely && new_thread
+ if (delay_list == 0 && likely
+ && new_thread && !ANY_RETURN_P (new_thread)
&& NONJUMP_INSN_P (new_thread)
&& GET_CODE (PATTERN (new_thread)) != ASM_INPUT
&& asm_noperands (PATTERN (new_thread)) < 0)
delay_list))
new_thread = follow_jumps (JUMP_LABEL (new_thread));
- if (new_thread == 0)
+ if (ANY_RETURN_P (new_thread))
label = find_end_label ();
else if (LABEL_P (new_thread))
label = new_thread;
them. Then see whether the branch is likely true. We don't need
to do a lot of this for unconditional branches. */
- insn_at_target = next_active_insn (target_label);
+ insn_at_target = first_active_target_insn (target_label);
own_target = own_thread_p (target_label, target_label, 0);
if (condition == const_true_rtx)
from the thread that was filled. So we have to recompute
the next insn at the target. */
target_label = JUMP_LABEL (insn);
- insn_at_target = next_active_insn (target_label);
+ insn_at_target = first_active_target_insn (target_label);
delay_list
= fill_slots_from_thread (insn, condition, fallthrough_insn,
group of consecutive labels. */
if (JUMP_P (insn)
&& (condjump_p (insn) || condjump_in_parallel_p (insn))
- && (target_label = JUMP_LABEL (insn)) != 0)
+ && !ANY_RETURN_P (target_label = JUMP_LABEL (insn)))
{
target_label = skip_consecutive_labels (follow_jumps (target_label));
- if (target_label == 0)
+ if (ANY_RETURN_P (target_label))
target_label = find_end_label ();
if (target_label && next_active_insn (target_label) == next
invert_jump fails. */
++LABEL_NUSES (target_label);
- if (label)
+ if (!ANY_RETURN_P (label))
++LABEL_NUSES (label);
if (invert_jump (insn, label, 1))
next = insn;
}
- if (label)
+ if (!ANY_RETURN_P (label))
--LABEL_NUSES (label);
if (--LABEL_NUSES (target_label) == 0)
target_label = JUMP_LABEL (delay_insn);
- if (target_label)
+ if (!ANY_RETURN_P (target_label))
{
/* If this jump goes to another unconditional jump, thread it, but
don't convert a jump into a RETURN here. */
trial = skip_consecutive_labels (follow_jumps (target_label));
- if (trial == 0)
+ if (ANY_RETURN_P (trial))
trial = find_end_label ();
if (trial && trial != target_label
&& redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
{
target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
- if (target_label == 0)
+ if (ANY_RETURN_P (target_label))
target_label = find_end_label ();
if (target_label
rtx label = JUMP_LABEL (next);
rtx old_label = JUMP_LABEL (delay_insn);
- if (label == 0)
+ if (ANY_RETURN_P (label))
label = find_end_label ();
/* find_end_label can generate a new label. Check this first. */
/* If we can't make the jump into a RETURN, try to redirect it to the best
RETURN and go on to the next insn. */
- if (! reorg_redirect_jump (jump_insn, NULL_RTX))
+ if (! reorg_redirect_jump (jump_insn, ret_rtx))
{
/* Make sure redirecting the jump will not invalidate the delay
slot insns. */
/* Ensure all jumps go to the last of a set of consecutive labels. */
if (JUMP_P (insn)
&& (condjump_p (insn) || condjump_in_parallel_p (insn))
- && JUMP_LABEL (insn) != 0
+ && !ANY_RETURN_P (JUMP_LABEL (insn))
&& ((target = skip_consecutive_labels (JUMP_LABEL (insn)))
!= JUMP_LABEL (insn)))
redirect_jump (insn, target, 1);
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func |
TODO_ggc_collect /* todo_flags_finish */
}
};
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func |
TODO_ggc_collect /* todo_flags_finish */
}
};