if (!any_condjump_p (last)
|| !JUMP_P (NEXT_INSN (last))
|| !simplejump_p (NEXT_INSN (last))
+ || !NEXT_INSN (NEXT_INSN (last))
|| !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
+ || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))
|| !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
|| NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
goto failed;
{
HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
+ unsigned int uida = DECL_UID (stack_vars[*(const size_t *)a].decl);
+ unsigned int uidb = DECL_UID (stack_vars[*(const size_t *)b].decl);
if (sa < sb)
return -1;
if (sa > sb)
return 1;
+ /* For stack variables of the same size use the uid of the decl
+ to make the sort stable. */
+ if (uida < uidb)
+ return -1;
+ if (uida > uidb)
+ return 1;
return 0;
}
/* Expand all variables at this level. */
for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
- if (TREE_USED (t))
+ if (TREE_USED (t)
+ /* Force local static variables to be output when marked by
+ used attribute. For unit-at-a-time, cgraph code already takes
+ care of this. */
+ || (!flag_unit_at_a_time && TREE_STATIC (t)
+ && DECL_PRESERVE_P (t)))
expand_one_var (t, toplevel);
this_sv_num = stack_vars_num;
{
basic_block bb, init_block;
sbitmap blocks;
+ edge_iterator ei;
+ edge e;
/* Some backends want to know that we are expanding to RTL. */
currently_expanding_to_rtl = 1;
init_block = construct_init_block ();
+ /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
+ remaining edges in expand_gimple_basic_block. */
+ FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
+ e->flags &= ~EDGE_EXECUTABLE;
+
FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
bb = expand_gimple_basic_block (bb);