static int
safe_insn_predicate (int code, int operand, rtx x)
{
- const struct insn_operand_data *op_data;
-
- if (code < 0)
- return true;
-
- op_data = &insn_data[code].operand[operand];
- if (op_data->predicate == NULL)
- return true;
-
- return op_data->predicate (x, op_data->mode);
+ return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
}
/* A subroutine of instantiate_virtual_regs. Instantiate any virtual
record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
{
HARD_REG_SET *pset = (HARD_REG_SET *)data;
- if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
- {
- int nregs = hard_regno_nregs[REGNO (x)][GET_MODE (x)];
- while (nregs-- > 0)
- SET_HARD_REG_BIT (*pset, REGNO (x) + nregs);
- }
+ if (REG_P (x) && HARD_REGISTER_P (x))
+ add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
}
/* A subroutine of assign_parms. Allocate a pseudo to hold the current
op0 = parmreg;
op1 = validated_mem;
if (icode != CODE_FOR_nothing
- && insn_data[icode].operand[0].predicate (op0, promoted_nominal_mode)
- && insn_data[icode].operand[1].predicate (op1, data->passed_mode))
+ && insn_operand_matches (icode, 0, op0)
+ && insn_operand_matches (icode, 1, op1))
{
enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
rtx insn, insns;
t = built_in_decls[BUILT_IN_ALLOCA];
t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm));
/* The call has been built for a variable-sized object. */
- ALLOCA_FOR_VAR_P (t) = 1;
+ CALL_ALLOCA_FOR_VAR_P (t) = 1;
t = fold_convert (ptr_type, t);
t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
gimplify_and_add (t, &stmts);
return prev;
}
+/* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
+ by modifying the last node in chain 1 to point to chain 2. */
+
+tree
+block_chainon (tree op1, tree op2)
+{
+ tree t1;
+
+ if (!op1)
+ return op2;
+ if (!op2)
+ return op1;
+
+ for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
+ continue;
+ BLOCK_CHAIN (t1) = op2;
+
+#ifdef ENABLE_TREE_CHECKING
+ {
+ tree t2;
+ for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
+ gcc_assert (t2 != t1);
+ }
+#endif
+
+ return op1;
+}
+
/* Count the subblocks of the list starting with BLOCK. If VECTOR is
non-NULL, list them all into VECTOR, in a depth-first preorder
traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
}
#ifdef HAVE_return
+/* Insert use of return register before the end of BB. */
+
+static void
+emit_use_return_register_into_block (basic_block bb)
+{
+ rtx seq;
+ start_sequence ();
+ use_return_register ();
+ seq = get_insns ();
+ end_sequence ();
+ emit_insn_before (seq, BB_END (bb));
+}
+
/* Insert gen_return at the end of block BB. This also means updating
block_for_insn appropriately. */
{
bool inserted;
rtx seq ATTRIBUTE_UNUSED, epilogue_end ATTRIBUTE_UNUSED;
- edge entry_edge ATTRIBUTE_UNUSED;
- edge e;
+ edge entry_edge, e;
edge_iterator ei;
rtl_profile_for_bb (ENTRY_BLOCK_PTR);
record_insns (seq, NULL, &prologue_insn_hash);
set_insn_locators (seq, prologue_locator);
- /* This relies on the fact that committing the edge insertion
- will look for basic blocks within the inserted instructions,
- which in turn relies on the fact that we are not in CFG
- layout mode here. */
insert_insn_on_edge (seq, entry_edge);
inserted = true;
#endif
with a simple return instruction. */
if (simplejump_p (jump))
{
+ /* The use of the return register might be present in the exit
+ fallthru block. Either:
+ - removing the use is safe, and we should remove the use in
+ the exit fallthru block, or
+ - removing the use is not safe, and we should add it here.
+ For now, we conservatively choose the latter. Either of the
+ 2 helps in crossjumping. */
+ emit_use_return_register_into_block (bb);
+
emit_return_into_block (bb);
delete_insn (jump);
}
continue;
}
+ /* See comment in simple_jump_p case above. */
+ emit_use_return_register_into_block (bb);
+
/* If this block has only one successor, it both jumps
and falls through to the fallthru block, so we can't
delete the edge. */
cur_bb->aux = cur_bb->next_bb;
cfg_layout_finalize ();
}
+
epilogue_done:
default_rtl_profile ();
if (inserted)
{
+ sbitmap blocks;
+
commit_edge_insertions ();
+ /* Look for basic blocks within the prologue insns. */
+ blocks = sbitmap_alloc (last_basic_block);
+ sbitmap_zero (blocks);
+ SET_BIT (blocks, entry_edge->dest->index);
+ find_many_sub_basic_blocks (blocks);
+ sbitmap_free (blocks);
+
/* The epilogue insns we inserted may cause the exit edge to no longer
be fallthru. */
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)