static int count_bb_insns (basic_block);
static rtx first_active_insn (basic_block);
static rtx last_active_insn (basic_block, int);
-static int seq_contains_jump (rtx);
static basic_block block_fallthru (basic_block);
static int cond_exec_process_insns (ce_if_block_t *, rtx, rtx, rtx, rtx, int);
static rtx cond_exec_get_condition (rtx);
return insn;
}
-/* It is possible, especially when having dealt with multi-word
- arithmetic, for the expanders to have emitted jumps. Search
- through the sequence and return TRUE if a jump exists so that
- we can abort the conversion. */
-
-static int
-seq_contains_jump (rtx insn)
-{
- while (insn)
- {
- if (GET_CODE (insn) == JUMP_INSN)
- return 1;
- insn = NEXT_INSN (insn);
- }
- return 0;
-}
+/* Return the basic block reached by falling though the basic block BB. */
static basic_block
block_fallthru (basic_block bb)
{
/* ??? Ug. Actually unlinking the thing is problematic,
given what we'd have to coordinate with our callers. */
- PUT_CODE (insn, NOTE);
- NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
- NOTE_SOURCE_FILE (insn) = 0;
+ SET_INSN_DELETED (insn);
goto insn_done;
}
GET_MODE_BITSIZE (inmode));
}
-/* Unshare sequence SEQ produced by if conversion. We care to mark
- all arguments that may be shared with outer instruction stream. */
-static void
-unshare_ifcvt_sequence (struct noce_if_info *if_info, rtx seq)
+/* Return sequence of instructions generated by if conversion. This
+ function calls end_sequence() to end the current stream, ensures
+ that are instructions are unshared, recognizable non-jump insns.
+ On failure, this function returns a NULL_RTX. */
+
+static rtx
+end_ifcvt_sequence (struct noce_if_info *if_info)
{
+ rtx insn;
+ rtx seq = get_insns ();
+
set_used_flags (if_info->x);
set_used_flags (if_info->cond);
unshare_all_rtl_in_chain (seq);
+ end_sequence ();
+
+ /* Make sure that all of the instructions emitted are recognizable,
+ and that we haven't introduced a new jump instruction.
+ As an exercise for the reader, build a general mechanism that
+ allows proper placement of required clobbers. */
+ for (insn = seq; insn; insn = NEXT_INSN (insn))
+ if (GET_CODE (insn) == JUMP_INSN
+ || recog_memoized (insn) == -1)
+ return NULL_RTX;
+
+ return seq;
}
/* Convert "if (a != b) x = a; else x = b" into "x = a" and
{
start_sequence ();
noce_emit_move_insn (if_info->x, y);
- seq = get_insns ();
- unshare_ifcvt_sequence (if_info, seq);
- end_sequence ();
+ seq = end_ifcvt_sequence (if_info);
+ if (!seq)
+ return FALSE;
+
emit_insn_before_setloc (seq, if_info->jump,
INSN_LOCATOR (if_info->insn_a));
}
if (target != if_info->x)
noce_emit_move_insn (if_info->x, target);
- seq = get_insns ();
- unshare_ifcvt_sequence (if_info, seq);
- end_sequence ();
- emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
+ seq = end_ifcvt_sequence (if_info);
+ if (! seq)
+ return FALSE;
+ emit_insn_before_setloc (seq, if_info->jump,
+ INSN_LOCATOR (if_info->insn_a));
return TRUE;
}
else
if (target != if_info->x)
noce_emit_move_insn (if_info->x, target);
- seq = get_insns ();
- unshare_ifcvt_sequence (if_info, seq);
- end_sequence ();
-
- if (seq_contains_jump (seq))
+ seq = end_ifcvt_sequence (if_info);
+ if (!seq)
return FALSE;
- emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
-
+ emit_insn_before_setloc (seq, if_info->jump,
+ INSN_LOCATOR (if_info->insn_a));
return TRUE;
}
if (target != if_info->x)
noce_emit_move_insn (if_info->x, target);
- seq = get_insns ();
- unshare_ifcvt_sequence (if_info, seq);
- end_sequence ();
+ seq = end_ifcvt_sequence (if_info);
+ if (!seq)
+ return FALSE;
+
emit_insn_before_setloc (seq, if_info->jump,
- INSN_LOCATOR (if_info->insn_a));
+ INSN_LOCATOR (if_info->insn_a));
return TRUE;
}
end_sequence ();
if (target != if_info->x)
noce_emit_move_insn (if_info->x, target);
- seq = get_insns ();
- unshare_ifcvt_sequence (if_info, seq);
- end_sequence ();
-
- if (seq_contains_jump (seq))
+ seq = end_ifcvt_sequence (if_info);
+ if (!seq)
return FALSE;
emit_insn_before_setloc (seq, if_info->jump,
- INSN_LOCATOR (if_info->insn_a));
-
+ INSN_LOCATOR (if_info->insn_a));
return TRUE;
}
end_sequence ();
if (target != if_info->x)
noce_emit_move_insn (if_info->x, target);
- seq = get_insns ();
- unshare_ifcvt_sequence (if_info, seq);
- end_sequence ();
-
- if (seq_contains_jump (seq))
+ seq = end_ifcvt_sequence (if_info);
+ if (!seq)
return FALSE;
emit_insn_before_setloc (seq, if_info->jump,
- INSN_LOCATOR (if_info->insn_a));
-
+ INSN_LOCATOR (if_info->insn_a));
return TRUE;
}
if (target != if_info->x)
noce_emit_move_insn (if_info->x, target);
- seq = get_insns ();
- unshare_ifcvt_sequence (if_info, seq);
- end_sequence ();
+ seq = end_ifcvt_sequence (if_info);
+ if (!seq)
+ return FALSE;
+
emit_insn_before_setloc (seq, if_info->jump,
- INSN_LOCATOR (if_info->insn_a));
+ INSN_LOCATOR (if_info->insn_a));
return TRUE;
}
else
early because it'll screw alias analysis. Note that we've
already checked for no side effects. */
if (! no_new_pseudos && cse_not_expected
- && GET_CODE (a) == MEM && GET_CODE (b) == MEM
+ && MEM_P (a) && MEM_P (b)
&& BRANCH_COST >= 5)
{
a = XEXP (a, 0);
else if (target != x)
noce_emit_move_insn (x, target);
- tmp = get_insns ();
- unshare_ifcvt_sequence (if_info, tmp);
- end_sequence ();
+ tmp = end_ifcvt_sequence (if_info);
+ if (!tmp)
+ return FALSE;
+
emit_insn_before_setloc (tmp, if_info->jump, INSN_LOCATOR (if_info->insn_a));
return TRUE;
if (target != if_info->x)
noce_emit_move_insn (if_info->x, target);
- seq = get_insns ();
- unshare_ifcvt_sequence (if_info, seq);
- end_sequence ();
-
- if (seq_contains_jump (seq))
+ seq = end_ifcvt_sequence (if_info);
+ if (!seq)
return FALSE;
emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
return FALSE;
c = XEXP (note, 0);
}
- if (GET_CODE (c) == MEM
+ if (MEM_P (c)
&& GET_CODE (XEXP (c, 0)) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
c = get_pool_constant (XEXP (c, 0));
if (target != if_info->x)
noce_emit_move_insn (if_info->x, target);
- seq = get_insns ();
- unshare_ifcvt_sequence (if_info, seq);
- end_sequence ();
-
- if (seq_contains_jump (seq))
+ seq = end_ifcvt_sequence (if_info);
+ if (!seq)
return FALSE;
emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
return FALSE;
start_sequence ();
- c = gen_int_mode (GET_MODE_BITSIZE (mode) - 1, mode);
- m = expand_binop (mode, ashr_optab, m, c, NULL_RTX, 0, OPTAB_DIRECT);
+ /* Use emit_store_flag to generate "m < 0 ? -1 : 0" instead of expanding
+ "(signed) m >> 31" directly. This benefits targets with specialized
+ insns to obtain the signmask, but still uses ashr_optab otherwise. */
+ m = emit_store_flag (gen_reg_rtx (mode), LT, m, const0_rtx, mode, 0, -1);
t = m ? expand_binop (mode, and_optab, m, t, NULL_RTX, 0, OPTAB_DIRECT)
: NULL_RTX;
}
noce_emit_move_insn (if_info->x, t);
- seq = get_insns ();
- unshare_ifcvt_sequence (if_info, seq);
- end_sequence ();
- emit_insn_before_setloc (seq, if_info->jump,
- INSN_LOCATOR (if_info->insn_a));
+
+ seq = end_ifcvt_sequence (if_info);
+ if (!seq)
+ return FALSE;
+
+ emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
return TRUE;
}
{
/* We special-case memories, so handle any of them with
no address side effects. */
- if (GET_CODE (op) == MEM)
+ if (MEM_P (op))
return ! side_effects_p (XEXP (op, 0));
if (side_effects_p (op))
for most optimizations if writing to x may trap, i.e. it's a memory
other than a static var or a stack slot. */
if (! set_b
- && GET_CODE (orig_x) == MEM
+ && MEM_P (orig_x)
&& ! MEM_NOTRAP_P (orig_x)
&& rtx_addr_can_trap_p (XEXP (orig_x, 0)))
{
static int
find_memory (rtx *px, void *data ATTRIBUTE_UNUSED)
{
- return GET_CODE (*px) == MEM;
+ return MEM_P (*px);
}
/* Used by the code above to perform the actual rtl transformations.