/* Try to find an available, already-allocated temporary of the proper
mode which meets the size and alignment requirements. Choose the
- smallest one with the closest alignment. */
- for (p = avail_temp_slots; p; p = p->next)
+ smallest one with the closest alignment.
+
+ If assign_stack_temp is called outside of the tree->rtl expansion,
+ we cannot reuse the stack slots (that may still refer to
+ VIRTUAL_STACK_VARS_REGNUM). */
+ if (!virtuals_instantiated)
{
- if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
- && objects_must_conflict_p (p->type, type)
- && (best_p == 0 || best_p->size > p->size
- || (best_p->size == p->size && best_p->align > p->align)))
+ for (p = avail_temp_slots; p; p = p->next)
{
- if (p->align == align && p->size == size)
+ if (p->align >= align && p->size >= size
+ && GET_MODE (p->slot) == mode
+ && objects_must_conflict_p (p->type, type)
+ && (best_p == 0 || best_p->size > p->size
+ || (best_p->size == p->size && best_p->align > p->align)))
{
- selected = p;
- cut_slot_from_list (selected, &avail_temp_slots);
- best_p = 0;
- break;
+ if (p->align == align && p->size == size)
+ {
+ selected = p;
+ cut_slot_from_list (selected, &avail_temp_slots);
+ best_p = 0;
+ break;
+ }
+ best_p = p;
}
- best_p = p;
}
}
for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
}
+/* Helper for instantiate_decls called via walk_tree: Process all decls
+ in the given DECL_VALUE_EXPR. */
+
+static tree
+instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
+{
+ tree t = *tp;
+ if (! EXPR_P (t))
+ {
+ *walk_subtrees = 0;
+ if (DECL_P (t) && DECL_RTL_SET_P (t))
+ instantiate_decl (DECL_RTL (t));
+ }
+ return NULL;
+}
+
/* Subroutine of instantiate_decls: Process all decls in the given
BLOCK node and all its subblocks. */
tree t;
for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
- if (DECL_RTL_SET_P (t))
- instantiate_decl (DECL_RTL (t));
+ {
+ if (DECL_RTL_SET_P (t))
+ instantiate_decl (DECL_RTL (t));
+ if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
+ {
+ tree v = DECL_VALUE_EXPR (t);
+ walk_tree (&v, instantiate_expr, NULL, NULL);
+ }
+ }
/* Process all subblocks. */
for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
{
instantiate_decl (DECL_RTL (decl));
instantiate_decl (DECL_INCOMING_RTL (decl));
+ if (DECL_HAS_VALUE_EXPR_P (decl))
+ {
+ tree v = DECL_VALUE_EXPR (decl);
+ walk_tree (&v, instantiate_expr, NULL, NULL);
+ }
}
/* Now process all variables defined in the function or its subblocks. */
/* Pass through the INSNS of function FNDECL and convert virtual register
references to hard register references. */
-void
+static unsigned int
instantiate_virtual_regs (void)
{
rtx insn;
/* Indicate that, from now on, assign_stack_local should use
frame_pointer_rtx. */
virtuals_instantiated = 1;
+ return 0;
}
struct tree_opt_pass pass_instantiate_virtual_regs =
/* Store the parm in a pseudoregister during the function, but we may
need to do it in a wider mode. */
+ /* This is not really promoting for a call. However we need to be
+ consistent with assign_parm_find_data_types and expand_expr_real_1. */
promoted_nominal_mode
- = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 0);
+ = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
parmreg = gen_reg_rtx (promoted_nominal_mode);
tree *block_vector;
*n_blocks_p = all_blocks (block, NULL);
- block_vector = xmalloc (*n_blocks_p * sizeof (tree));
+ block_vector = XNEWVEC (tree, *n_blocks_p);
all_blocks (block, block_vector);
return block_vector;
/* Make sure all values used by the optimization passes have sane
defaults. */
-void
+unsigned int
init_function_for_compilation (void)
{
reg_renumber = 0;
gcc_assert (VEC_length (int, prologue) == 0);
gcc_assert (VEC_length (int, epilogue) == 0);
gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
+ return 0;
}
struct tree_opt_pass pass_init_function =
clear_pending_stack_adjust ();
do_pending_stack_adjust ();
- /* @@@ This is a kludge. We want to ensure that instructions that
- may trap are not moved into the epilogue by scheduling, because
- we don't always emit unwind information for the epilogue.
- However, not all machine descriptions define a blockage insn, so
- emit an ASM_INPUT to act as one. */
- if (flag_non_call_exceptions)
- emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
-
/* Mark the end of the function body.
If control reaches this insn, the function can drop through
without returning a value. */
/* Output the label for the actual return from the function. */
emit_label (return_label);
- /* Let except.c know where it should emit the call to unregister
- the function context for sjlj exceptions. */
- if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
- sjlj_emit_function_exit_after (get_last_insn ());
+ if (USING_SJLJ_EXCEPTIONS)
+ {
+ /* Let except.c know where it should emit the call to unregister
+ the function context for sjlj exceptions. */
+ if (flag_exceptions)
+ sjlj_emit_function_exit_after (get_last_insn ());
+ }
+ else
+ {
+ /* @@@ This is a kludge. We want to ensure that instructions that
+ may trap are not moved into the epilogue by scheduling, because
+ we don't always emit unwind information for the epilogue.
+ However, not all machine descriptions define a blockage insn, so
+ emit an ASM_INPUT to act as one. */
+ if (flag_non_call_exceptions)
+ emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
+ }
/* If this is an implementation of throw, do what's necessary to
communicate between __builtin_eh_return and the epilogue. */
fixup_fallthru_exit_predecessor. */
cfg_layout_initialize (0);
FOR_EACH_BB (cur_bb)
- if (cur_bb->index >= 0 && cur_bb->next_bb->index >= 0)
+ if (cur_bb->index >= NUM_FIXED_BLOCKS
+ && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
cur_bb->aux = cur_bb->next_bb;
cfg_layout_finalize ();
}
}
\f
-static void
+static unsigned int
rest_of_handle_check_leaf_regs (void)
{
#ifdef LEAF_REGISTERS
current_function_uses_only_leaf_regs
= optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
#endif
+ return 0;
}
struct tree_opt_pass pass_leaf_regs =