into RTL. */
struct ssaexpand SA;
+/* This variable holds the currently expanded gimple statement for purposes
+ of comminucating the profile info to the builtin expanders. */
+gimple currently_expanding_gimple_stmt;
+
/* Return an expression tree corresponding to the RHS of GIMPLE
statement STMT. */
{
tree t;
enum gimple_rhs_class grhs_class;
-
+
grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
if (grhs_class == GIMPLE_BINARY_RHS)
is lower triangular. */
static bool *stack_vars_conflict;
static size_t stack_vars_conflict_alloc;
+static size_t n_stack_vars_conflict;
/* The phase of the stack frame. This is the known misalignment of
virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
size_t t;
t = i, i = j, j = t;
}
- return (i * (i + 1)) / 2 + j;
+
+ if (i & 1)
+ return ((i + 1) / 2) * i + j;
+ else
+ return (i / 2) * (i + 1) + j;
}
/* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */
size_t size = triangular_index (n-1, n-1) + 1;
if (size <= stack_vars_conflict_alloc)
- return;
+ {
+ if (n > n_stack_vars_conflict)
+ fatal_error ("program is too large to be compiled on this machine");
+ return;
+ }
stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
(size - stack_vars_conflict_alloc) * sizeof (bool));
stack_vars_conflict_alloc = size;
+ n_stack_vars_conflict = n;
}
/* Make the decls associated with luid's X and Y conflict. */
gcc_assert (index < stack_vars_conflict_alloc);
return stack_vars_conflict[index];
}
-
+
/* Returns true if TYPE is or contains a union type. */
static bool
/* A subroutine of expand_used_vars. Expand one variable according to
its flavor. Variables to be placed on the stack are not actually
- expanded yet, merely recorded.
+ expanded yet, merely recorded.
When REALLY_EXPAND is false, only add stack values to be allocated.
Return stack usage this variable is supposed to take.
*/
}
/* Prepare for expanding variables. */
-static void
+static void
init_vars_expansion (void)
{
tree t;
return (rtx) *elt;
/* Find the tree label if it is present. */
-
+
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
lab_stmt = gsi_stmt (gsi);
/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
of a basic block where we just expanded the conditional at the end,
- possibly clean up the CFG and instruction sequence. */
+ possibly clean up the CFG and instruction sequence. LAST is the
+ last instruction before the just emitted jump sequence. */
static void
-maybe_cleanup_end_of_block (edge e)
+maybe_cleanup_end_of_block (edge e, rtx last)
{
/* Special case: when jumpif decides that the condition is
trivial it emits an unconditional jump (and the necessary
normally isn't there in a cleaned CFG), fix it here. */
if (BARRIER_P (get_last_insn ()))
{
- basic_block bb = e->src;
rtx insn;
remove_edge (e);
/* Now, we have a single successor block, if we have insns to
/* Make sure we have an unconditional jump. Otherwise we're
confused. */
gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
- for (insn = PREV_INSN (insn); insn != BB_HEAD (bb);)
+ for (insn = PREV_INSN (insn); insn != last;)
{
insn = PREV_INSN (insn);
if (JUMP_P (NEXT_INSN (insn)))
}
true_edge->goto_block = NULL;
false_edge->flags |= EDGE_FALLTHRU;
- maybe_cleanup_end_of_block (false_edge);
+ maybe_cleanup_end_of_block (false_edge, last);
return NULL;
}
if (true_edge->dest == bb->next_bb)
}
false_edge->goto_block = NULL;
true_edge->flags |= EDGE_FALLTHRU;
- maybe_cleanup_end_of_block (true_edge);
+ maybe_cleanup_end_of_block (true_edge, last);
return NULL;
}
{
tree exp;
tree lhs = gimple_call_lhs (stmt);
- tree fndecl = gimple_call_fndecl (stmt);
size_t i;
exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
SET_EXPR_LOCATION (exp, gimple_location (stmt));
TREE_BLOCK (exp) = gimple_block (stmt);
- /* Record the original call statement, as it may be used
- to retrieve profile information during expansion. */
-
- if (fndecl && DECL_BUILT_IN (fndecl))
- {
- tree_ann_common_t ann = get_tree_common_ann (exp);
- ann->stmt = stmt;
- }
-
if (lhs)
expand_assignment (lhs, exp, false);
else
basic_block new_bb;
stmt = gsi_stmt (gsi);
+ currently_expanding_gimple_stmt = stmt;
/* Expand this statement, then evaluate the resulting RTL and
fixup the CFG accordingly. */
/* Ignore this stmt if it is in the list of
replaceable expressions. */
if (SA.values
- && bitmap_bit_p (SA.values,
+ && bitmap_bit_p (SA.values,
SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
continue;
}
}
}
+ currently_expanding_gimple_stmt = NULL;
+
/* Expand implicit goto and convert goto_locus. */
FOR_EACH_EDGE (e, ei, bb->succs)
{
}
}
+ /* Expanded RTL can create a jump in the last instruction of block.
+ This later might be assumed to be a jump to successor and break edge insertion.
+ We need to insert dummy move to prevent this. PR41440. */
+ if (single_succ_p (bb)
+ && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
+ && (last = get_last_insn ())
+ && JUMP_P (last))
+ {
+ rtx dummy = gen_reg_rtx (SImode);
+ emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
+ }
+
do_pending_stack_adjust ();
/* Find the block tail. The last insn in the block is the insn
if (! SUPPORTS_STACK_ALIGNMENT)
return;
-
+
if (cfun->calls_alloca
|| cfun->has_nonlocal_label
|| crtl->has_nonlocal_goto)
/* Target has to redefine TARGET_GET_DRAP_RTX to support stack
alignment. */
gcc_assert (targetm.calls.get_drap_rtx != NULL);
- drap_rtx = targetm.calls.get_drap_rtx ();
+ drap_rtx = targetm.calls.get_drap_rtx ();
/* stack_realign_drap and drap_rtx must match. */
gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
if (warn_stack_protect)
{
if (cfun->calls_alloca)
- warning (OPT_Wstack_protector,
+ warning (OPT_Wstack_protector,
"not protecting local variables: variable length buffer");
if (has_short_buffer && !crtl->stack_protect_guard)
- warning (OPT_Wstack_protector,
+ warning (OPT_Wstack_protector,
"not protecting function: no buffer at least %d bytes long",
(int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
}