#include "integrate.h"
#include "langhooks.h"
#include "target.h"
+#include "common/common-target.h"
#include "cfglayout.h"
#include "gimple.h"
#include "tree-pass.h"
prologue_insn_hash = NULL;
epilogue_insn_hash = NULL;
- if (crtl->emit.regno_pointer_align)
- free (crtl->emit.regno_pointer_align);
+ free (crtl->emit.regno_pointer_align);
memset (crtl, 0, sizeof (struct rtl_data));
f->eh = NULL;
frame_pointer_rtx. */
virtuals_instantiated = 1;
- /* See allocate_dynamic_stack_space for the rationale. */
-#ifdef SETJMP_VIA_SAVE_AREA
- if (flag_stack_usage && cfun->calls_setjmp)
- {
- int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
- dynamic_offset = (dynamic_offset + align - 1) / align * align;
- current_function_dynamic_stack_size
- += current_function_dynamic_alloc_count * dynamic_offset;
- }
-#endif
-
return 0;
}
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func /* todo_flags_finish */
+ 0 /* todo_flags_finish */
}
};
}
}
- return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
+ return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
+ type, named_arg);
}
/* Return true if TYPE, which is passed by reference, should be callee
{
if (type && TREE_ADDRESSABLE (type))
return false;
- return targetm.calls.callee_copies (ca, mode, type, named_arg);
+ return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
+ named_arg);
}
/* Structures to communicate between the subroutines of assign_parms.
struct assign_parm_data_all
{
- CUMULATIVE_ARGS args_so_far;
+ /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
+ should become a job of the target or otherwise encapsulated. */
+ CUMULATIVE_ARGS args_so_far_v;
+ cumulative_args_t args_so_far;
struct args_size stack_args_size;
tree function_result_decl;
tree orig_fnargs;
fntype = TREE_TYPE (current_function_decl);
#ifdef INIT_CUMULATIVE_INCOMING_ARGS
- INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
+ INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
#else
- INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
+ INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
current_function_decl, -1);
#endif
+ all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
#ifdef REG_PARM_STACK_SPACE
all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
data->named_arg = 1; /* No variadic parms. */
else if (DECL_CHAIN (parm))
data->named_arg = 1; /* Not the last non-variadic parm. */
- else if (targetm.calls.strict_argument_naming (&all->args_so_far))
+ else if (targetm.calls.strict_argument_naming (all->args_so_far))
data->named_arg = 1; /* Only variadic ones are unnamed. */
else
data->named_arg = 0; /* Treat as variadic. */
passed_type = TREE_TYPE (first_field (passed_type));
/* See if this arg was passed by invisible reference. */
- if (pass_by_reference (&all->args_so_far, passed_mode,
+ if (pass_by_reference (&all->args_so_far_v, passed_mode,
passed_type, data->named_arg))
{
passed_type = nominal_type = build_pointer_type (passed_type);
{
int varargs_pretend_bytes = 0;
- targetm.calls.setup_incoming_varargs (&all->args_so_far,
+ targetm.calls.setup_incoming_varargs (all->args_so_far,
data->promoted_mode,
data->passed_type,
&varargs_pretend_bytes, no_rtl);
return;
}
- entry_parm = targetm.calls.function_incoming_arg (&all->args_so_far,
+ entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
data->promoted_mode,
data->passed_type,
data->named_arg);
#endif
if (!in_regs && !data->named_arg)
{
- if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
+ if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
{
rtx tem;
- tem = targetm.calls.function_incoming_arg (&all->args_so_far,
+ tem = targetm.calls.function_incoming_arg (all->args_so_far,
data->promoted_mode,
data->passed_type, true);
in_regs = tem != NULL;
{
int partial;
- partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
+ partial = targetm.calls.arg_partial_bytes (all->args_so_far,
data->promoted_mode,
data->passed_type,
data->named_arg);
if (data->promoted_mode != BLKmode
&& data->promoted_mode != DECL_MODE (parm))
{
- set_mem_size (stack_parm,
- GEN_INT (GET_MODE_SIZE (data->promoted_mode)));
- if (MEM_EXPR (stack_parm) && MEM_OFFSET (stack_parm))
+ set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
+ if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
{
int offset = subreg_lowpart_offset (DECL_MODE (parm),
data->promoted_mode);
if (offset)
- set_mem_offset (stack_parm,
- plus_constant (MEM_OFFSET (stack_parm),
- -offset));
+ set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
}
}
}
int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
- x = expand_shift (LSHIFT_EXPR, word_mode, reg,
- build_int_cst (NULL_TREE, by),
- NULL_RTX, 1);
+ x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
tem = change_address (mem, word_mode, 0);
emit_move_insn (tem, x);
}
/* ??? This may need a big-endian conversion on sparc64. */
data->stack_parm
= adjust_address (data->stack_parm, data->nominal_mode, 0);
- if (offset && MEM_OFFSET (data->stack_parm))
+ if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
set_mem_offset (data->stack_parm,
- plus_constant (MEM_OFFSET (data->stack_parm),
- offset));
+ MEM_OFFSET (data->stack_parm) + offset);
}
}
set_decl_incoming_rtl (parm, data.entry_parm, false);
/* Update info on where next arg arrives in registers. */
- targetm.calls.function_arg_advance (&all.args_so_far, data.promoted_mode,
+ targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
data.passed_type, data.named_arg);
assign_parm_adjust_stack_rtl (&data);
/* For stdarg.h function, save info about
regs and stack space used by the named args. */
- crtl->args.info = all.args_so_far;
+ crtl->args.info = all.args_so_far_v;
/* Set the rtx used for the function return value. Put this in its
own variable so any optimizers that need this information don't have
continue;
/* Update info on where next arg arrives in registers. */
- targetm.calls.function_arg_advance (&all.args_so_far, data.promoted_mode,
+ targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
data.passed_type, data.named_arg);
/* ??? Once upon a time variable_size stuffed parameter list
if (data.passed_pointer)
{
tree type = TREE_TYPE (data.passed_type);
- if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
+ if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
type, data.named_arg))
{
tree local, t;
t = built_in_decls[BUILT_IN_ALLOCA];
t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm));
/* The call has been built for a variable-sized object. */
- ALLOCA_FOR_VAR_P (t) = 1;
+ CALL_ALLOCA_FOR_VAR_P (t) = 1;
t = fold_convert (ptr_type, t);
t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
gimplify_and_add (t, &stmts);
{
tree sizetree;
enum direction where_pad;
- unsigned int boundary;
+ unsigned int boundary, round_boundary;
int reg_parm_stack_space = 0;
int part_size_in_regs;
= type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
boundary = targetm.calls.function_arg_boundary (passed_mode, type);
+ round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
+ type);
locate->where_pad = where_pad;
/* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
tree s2 = sizetree;
if (where_pad != none
&& (!host_integerp (sizetree, 1)
- || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
- s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
+ || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % round_boundary))
+ s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
SUB_PARM_SIZE (locate->slot_offset, s2);
}
if (where_pad != none
&& (!host_integerp (sizetree, 1)
- || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
- sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
+ || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % round_boundary))
+ sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
ADD_PARM_SIZE (locate->size, sizetree);
return prev;
}
+/* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
+ by modifying the last node in chain 1 to point to chain 2. */
+
+tree
+block_chainon (tree op1, tree op2)
+{
+ tree t1;
+
+ if (!op1)
+ return op2;
+ if (!op2)
+ return op1;
+
+ for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
+ continue;
+ BLOCK_CHAIN (t1) = op2;
+
+#ifdef ENABLE_TREE_CHECKING
+ {
+ tree t2;
+ for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
+ gcc_assert (t2 != t1);
+ }
+#endif
+
+ return op1;
+}
+
/* Count the subblocks of the list starting with BLOCK. If VECTOR is
non-NULL, list them all into VECTOR, in a depth-first preorder
traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
return funcdef_no++;
}
+/* Return value of funcdef. */
+int
+get_last_funcdef_no (void)
+{
+ return funcdef_no;
+}
+
/* Allocate a function structure for FNDECL and set its contents
to the defaults. Set cfun to the newly-allocated object.
Some of the helper functions invoked during initialization assume
init_expr ();
default_rtl_profile ();
- if (flag_stack_usage)
+ if (flag_stack_usage_info)
{
cfun->su = ggc_alloc_cleared_stack_usage ();
cfun->su->static_stack_size = -1;
else
allocate_struct_function (subr, false);
prepare_function_start ();
+ decide_function_section (subr);
/* Warn if this value is an aggregate type,
regardless of which calling convention we are using for it. */
if (!DECL_RTL_SET_P (var))
expand_decl (var);
- t_save = build4 (ARRAY_REF, ptr_type_node,
+ t_save = build4 (ARRAY_REF,
+ TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
cfun->nonlocal_goto_save_area,
integer_zero_node, NULL_TREE, NULL_TREE);
r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
- r_save = convert_memory_address (Pmode, r_save);
+ gcc_assert (GET_MODE (r_save) == Pmode);
emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
update_nonlocal_goto_save_area ();
#endif
}
- /* After the display initializations is where the stack checking
- probe should go. */
- if(flag_stack_check)
+ /* If we are doing generic stack checking, the probe should go here. */
+ if (flag_stack_check == GENERIC_STACK_CHECK)
stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
/* Make sure there is a line number after the function entry setup code. */
/* Output the label for the actual return from the function. */
emit_label (return_label);
- if (targetm.except_unwind_info (&global_options) == UI_SJLJ)
+ if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
{
/* Let except.c know where it should emit the call to unregister
the function context for sjlj exceptions. */
may trap are not moved into the epilogue by scheduling, because
we don't always emit unwind information for the epilogue. */
if (cfun->can_throw_non_call_exceptions
- && targetm.except_unwind_info (&global_options) != UI_SJLJ)
+ && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
emit_insn (gen_blockage ());
/* If stack protection is enabled for this function, check the guard. */
static void
emit_return_into_block (basic_block bb)
{
- emit_jump_insn_after (gen_return (), BB_END (bb));
+ rtx jump = emit_jump_insn_after (gen_return (), BB_END (bb));
+ JUMP_LABEL (jump) = ret_rtx;
}
#endif /* HAVE_return */
that with a conditional return instruction. */
else if (condjump_p (jump))
{
- if (! redirect_jump (jump, 0, 0))
+ if (! redirect_jump (jump, ret_rtx, 0))
{
ei_next (&ei2);
continue;
#ifdef HAVE_epilogue
if (HAVE_epilogue)
{
+ rtx returnjump;
+
start_sequence ();
epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
seq = gen_epilogue ();
record_insns (seq, NULL, &epilogue_insn_hash);
set_insn_locators (seq, epilogue_locator);
+ returnjump = get_last_insn ();
seq = get_insns ();
end_sequence ();
insert_insn_on_edge (seq, e);
inserted = true;
+
+ if (JUMP_P (returnjump))
+ {
+ rtx pat = PATTERN (returnjump);
+ if (GET_CODE (pat) == PARALLEL)
+ pat = XVECEXP (pat, 0, 0);
+ if (ANY_RETURN_P (pat))
+ JUMP_LABEL (returnjump) = pat;
+ else
+ JUMP_LABEL (returnjump) = ret_rtx;
+ }
+ else
+ returnjump = NULL_RTX;
}
else
#endif
thread_prologue_and_epilogue_insns ();
/* The stack usage info is finalized during prologue expansion. */
- if (flag_stack_usage)
+ if (flag_stack_usage_info)
output_stack_usage ();
return 0;
0, /* properties_provided */
0, /* properties_destroyed */
TODO_verify_flow, /* todo_flags_start */
- TODO_dump_func |
TODO_df_verify |
TODO_df_finish | TODO_verify_rtl_sharing |
TODO_ggc_collect /* todo_flags_finish */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func /* todo_flags_finish */
+ 0 /* todo_flags_finish */
}
};