#include "tree-gimple.h"
#include "tree-pass.h"
#include "predict.h"
+#include "vecprim.h"
#ifndef LOCAL_ALIGNMENT
#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
/* The currently compiled function. */
struct function *cfun = 0;
-DEF_VEC_I(int);
-DEF_VEC_ALLOC_I(int,heap);
-
/* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
static VEC(int,heap) *prologue;
static VEC(int,heap) *epilogue;
f->x_return_label = NULL;
f->x_naked_return_label = NULL;
f->x_stack_slot_list = NULL;
- f->x_tail_recursion_reentry = NULL;
+ f->x_stack_check_probe_note = NULL;
f->x_arg_pointer_save_area = NULL;
f->x_parm_birth_insn = NULL;
- f->original_arg_vector = NULL;
- f->original_decl_initial = NULL;
f->epilogue_delay_list = NULL;
}
\f
/* Return size needed for stack frame based on slots so far allocated.
This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
the caller may have to do that. */
+
HOST_WIDE_INT
get_frame_size (void)
{
return get_func_frame_size (cfun);
}
+/* Issue an error message and return TRUE if frame OFFSET overflows in
+ the signed target pointer arithmetics for function FUNC. Otherwise
+ return FALSE. */
+
+bool
+frame_offset_overflow (HOST_WIDE_INT offset, tree func)
+{
+ unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
+
+ if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
+ /* Leave room for the fixed part of the frame. */
+ - 64 * UNITS_PER_WORD)
+ {
+ error ("%Jtotal size of local objects too large", func);
+ return TRUE;
+ }
+
+ return FALSE;
+}
+
/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
with machine mode MODE.
/* On a big-endian machine, if we are allocating more space than we will use,
use the least significant bytes of those that are allocated. */
- if (BYTES_BIG_ENDIAN && mode != BLKmode)
+ if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
bigend_correction = size - GET_MODE_SIZE (mode);
/* If we have already instantiated virtual registers, return the actual
function->x_frame_offset += size;
x = gen_rtx_MEM (mode, addr);
+ MEM_NOTRAP_P (x) = 1;
function->x_stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
+ if (frame_offset_overflow (function->x_frame_offset, function->decl))
+ function->x_frame_offset = 0;
+
return x;
}
static struct temp_slot **
temp_slots_at_level (int level)
{
+ if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
+ {
+ size_t old_length = VEC_length (temp_slot_p, used_temp_slots);
+ temp_slot_p *p;
- if (!used_temp_slots)
- VARRAY_GENERIC_PTR_INIT (used_temp_slots, 3, "used_temp_slots");
-
- while (level >= (int) VARRAY_ACTIVE_SIZE (used_temp_slots))
- VARRAY_PUSH_GENERIC_PTR (used_temp_slots, NULL);
+ VEC_safe_grow (temp_slot_p, gc, used_temp_slots, level + 1);
+ p = VEC_address (temp_slot_p, used_temp_slots);
+ memset (&p[old_length], 0,
+ sizeof (temp_slot_p) * (level + 1 - old_length));
+ }
- return (struct temp_slot **) &VARRAY_GENERIC_PTR (used_temp_slots, level);
+ return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
}
/* Returns the maximal temporary slot level. */
if (!used_temp_slots)
return -1;
- return VARRAY_ACTIVE_SIZE (used_temp_slots) - 1;
+ return VEC_length (temp_slot_p, used_temp_slots) - 1;
}
/* Moves temporary slot TEMP to LEVEL. */
/* Try to find an available, already-allocated temporary of the proper
mode which meets the size and alignment requirements. Choose the
- smallest one with the closest alignment. */
- for (p = avail_temp_slots; p; p = p->next)
+ smallest one with the closest alignment.
+
+ If assign_stack_temp is called outside of the tree->rtl expansion,
+ we cannot reuse the stack slots (that may still refer to
+ VIRTUAL_STACK_VARS_REGNUM). */
+ if (!virtuals_instantiated)
{
- if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
- && objects_must_conflict_p (p->type, type)
- && (best_p == 0 || best_p->size > p->size
- || (best_p->size == p->size && best_p->align > p->align)))
+ for (p = avail_temp_slots; p; p = p->next)
{
- if (p->align == align && p->size == size)
+ if (p->align >= align && p->size >= size
+ && GET_MODE (p->slot) == mode
+ && objects_must_conflict_p (p->type, type)
+ && (best_p == 0 || best_p->size > p->size
+ || (best_p->size == p->size && best_p->align > p->align)))
{
- selected = p;
- cut_slot_from_list (selected, &avail_temp_slots);
- best_p = 0;
- break;
+ if (p->align == align && p->size == size)
+ {
+ selected = p;
+ cut_slot_from_list (selected, &avail_temp_slots);
+ best_p = 0;
+ break;
+ }
+ best_p = p;
}
- best_p = p;
}
}
p->size = best_p->size - rounded_size;
p->base_offset = best_p->base_offset + rounded_size;
p->full_size = best_p->full_size - rounded_size;
- p->slot = gen_rtx_MEM (BLKmode,
- plus_constant (XEXP (best_p->slot, 0),
- rounded_size));
+ p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
p->align = best_p->align;
p->address = 0;
p->type = best_p->type;
MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
}
+ MEM_NOTRAP_P (slot) = 1;
return slot;
}
if (mode == BLKmode || memory_required)
{
HOST_WIDE_INT size = int_size_in_bytes (type);
- tree size_tree;
rtx tmp;
/* Zero sized arrays are GNU C extension. Set size to 1 to avoid
size = 1;
/* Unfortunately, we don't yet know how to allocate variable-sized
- temporaries. However, sometimes we have a fixed upper limit on
- the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
- instead. This is the case for Chill variable-sized strings. */
- if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
- && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
- && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
- size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
-
- /* If we still haven't been able to get a size, see if the language
- can compute a maximum size. */
- if (size == -1
- && (size_tree = lang_hooks.types.max_size (type)) != 0
- && host_integerp (size_tree, 1))
- size = tree_low_cst (size_tree, 1);
+ temporaries. However, sometimes we can find a fixed upper limit on
+ the size, so try that instead. */
+ else if (size == -1)
+ size = max_int_size_in_bytes (type);
/* The size of the temporary may be too large to fit into an integer. */
/* ??? Not sure this should happen except for user silliness, so limit
#endif
#endif
-/* On most machines, the CFA coincides with the first incoming parm. */
-
-#ifndef ARG_POINTER_CFA_OFFSET
-#define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
-#endif
-
\f
/* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
is a virtual register, return the equivalent hard register and set the
else if (x == virtual_outgoing_args_rtx)
new = stack_pointer_rtx, offset = out_arg_offset;
else if (x == virtual_cfa_rtx)
- new = arg_pointer_rtx, offset = cfa_offset;
+ {
+#ifdef FRAME_POINTER_CFA_OFFSET
+ new = frame_pointer_rtx;
+#else
+ new = arg_pointer_rtx;
+#endif
+ offset = cfa_offset;
+ }
else
return NULL_RTX;
for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
}
+/* Helper for instantiate_decls called via walk_tree: Process all decls
+ in the given DECL_VALUE_EXPR. */
+
+static tree
+instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
+{
+ tree t = *tp;
+ if (! EXPR_P (t))
+ {
+ *walk_subtrees = 0;
+ if (DECL_P (t) && DECL_RTL_SET_P (t))
+ instantiate_decl (DECL_RTL (t));
+ }
+ return NULL;
+}
+
/* Subroutine of instantiate_decls: Process all decls in the given
BLOCK node and all its subblocks. */
tree t;
for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
- if (DECL_RTL_SET_P (t))
- instantiate_decl (DECL_RTL (t));
+ {
+ if (DECL_RTL_SET_P (t))
+ instantiate_decl (DECL_RTL (t));
+ if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
+ {
+ tree v = DECL_VALUE_EXPR (t);
+ walk_tree (&v, instantiate_expr, NULL, NULL);
+ }
+ }
/* Process all subblocks. */
for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
{
instantiate_decl (DECL_RTL (decl));
instantiate_decl (DECL_INCOMING_RTL (decl));
+ if (DECL_HAS_VALUE_EXPR_P (decl))
+ {
+ tree v = DECL_VALUE_EXPR (decl);
+ walk_tree (&v, instantiate_expr, NULL, NULL);
+ }
}
/* Now process all variables defined in the function or its subblocks. */
/* Pass through the INSNS of function FNDECL and convert virtual register
references to hard register references. */
-void
+static unsigned int
instantiate_virtual_regs (void)
{
rtx insn;
var_offset = STARTING_FRAME_OFFSET;
dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
out_arg_offset = STACK_POINTER_OFFSET;
+#ifdef FRAME_POINTER_CFA_OFFSET
+ cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
+#else
cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
+#endif
/* Initialize recognition, indicating that volatile is OK. */
init_recog ();
/* Indicate that, from now on, assign_stack_local should use
frame_pointer_rtx. */
virtuals_instantiated = 1;
+ return 0;
}
struct tree_opt_pass pass_instantiate_virtual_regs =
{
- NULL, /* name */
+ "vregs", /* name */
NULL, /* gate */
instantiate_virtual_regs, /* execute */
NULL, /* sub */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- 0, /* todo_flags_finish */
+ TODO_dump_func, /* todo_flags_finish */
0 /* letter */
};
/* If the parm is to be passed as a transparent union, use the type of
the first field for the tests below. We have already verified that
the modes are the same. */
- if (DECL_TRANSPARENT_UNION (parm)
- || (TREE_CODE (passed_type) == UNION_TYPE
- && TYPE_TRANSPARENT_UNION (passed_type)))
+ if (TREE_CODE (passed_type) == UNION_TYPE
+ && TYPE_TRANSPARENT_UNION (passed_type))
passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
/* See if this arg was passed by invisible reference. */
/* Store the parm in a pseudoregister during the function, but we may
need to do it in a wider mode. */
+ /* This is not really promoting for a call. However we need to be
+ consistent with assign_parm_find_data_types and expand_expr_real_1. */
promoted_nominal_mode
- = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 0);
+ = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
parmreg = gen_reg_rtx (promoted_nominal_mode);
{
struct assign_parm_data_all all;
tree fnargs, parm;
- rtx internal_arg_pointer;
-
- /* If the reg that the virtual arg pointer will be translated into is
- not a fixed reg or is the stack pointer, make a copy of the virtual
- arg pointer, and address parms via the copy. The frame pointer is
- considered fixed even though it is not marked as such.
-
- The second time through, simply use ap to avoid generating rtx. */
- if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
- || ! (fixed_regs[ARG_POINTER_REGNUM]
- || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
- internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
- else
- internal_arg_pointer = virtual_incoming_args_rtx;
- current_function_internal_arg_pointer = internal_arg_pointer;
+ current_function_internal_arg_pointer
+ = targetm.calls.internal_arg_pointer ();
assign_parms_initialize_all (&all);
fnargs = assign_parms_augmented_arg_list (&all);
REG_PARM_STACK_SPACE (fndecl));
#endif
- current_function_args_size
- = ((current_function_args_size + STACK_BYTES - 1)
- / STACK_BYTES) * STACK_BYTES;
+ current_function_args_size = CEIL_ROUND (current_function_args_size,
+ PARM_BOUNDARY / BITS_PER_UNIT);
#ifdef ARGS_GROW_DOWNWARD
current_function_arg_offset_rtx
HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
#ifdef SPARC_STACK_BOUNDARY_HACK
- /* The sparc port has a bug. It sometimes claims a STACK_BOUNDARY
- higher than the real alignment of %sp. However, when it does this,
- the alignment of %sp+STACK_POINTER_OFFSET will be STACK_BOUNDARY.
- This is a temporary hack while the sparc port is fixed. */
+ /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
+ the real alignment of %sp. However, when it does this, the
+ alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
if (SPARC_STACK_BOUNDARY_HACK)
sp_offset = 0;
#endif
tree *block_vector;
*n_blocks_p = all_blocks (block, NULL);
- block_vector = xmalloc (*n_blocks_p * sizeof (tree));
+ block_vector = XNEWVEC (tree, *n_blocks_p);
all_blocks (block, block_vector);
return block_vector;
/* Make sure all values used by the optimization passes have sane
defaults. */
-void
+unsigned int
init_function_for_compilation (void)
{
reg_renumber = 0;
gcc_assert (VEC_length (int, prologue) == 0);
gcc_assert (VEC_length (int, epilogue) == 0);
gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
+ return 0;
}
struct tree_opt_pass pass_init_function =
void
expand_main_function (void)
{
-#ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
- if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
- {
- int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
- rtx tmp, seq;
-
- start_sequence ();
- /* Forcibly align the stack. */
-#ifdef STACK_GROWS_DOWNWARD
- tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
- stack_pointer_rtx, 1, OPTAB_WIDEN);
-#else
- tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
- GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
- tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
- stack_pointer_rtx, 1, OPTAB_WIDEN);
-#endif
- if (tmp != stack_pointer_rtx)
- emit_move_insn (stack_pointer_rtx, tmp);
-
- /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
- tmp = force_reg (Pmode, const0_rtx);
- allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
- seq = get_insns ();
- end_sequence ();
-
- for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp))
- if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG)
- break;
- if (tmp)
- emit_insn_before (seq, tmp);
- else
- emit_insn (seq);
- }
-#endif
-
#if (defined(INVOKE__main) \
|| (!defined(HAS_INIT_SECTION) \
&& !defined(INIT_SECTION_ASM_OP) \
# define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
#endif
-static void
+void
stack_protect_epilogue (void)
{
tree guard_decl = targetm.stack_protect_guard ();
else
#endif
{
- rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 1);
+ rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
/* Expect to be passed the address of a place to store the value.
If it is passed as an argument, assign_parms will take care of
it. */
as opposed to parm setup. */
emit_note (NOTE_INSN_FUNCTION_BEG);
- if (!NOTE_P (get_last_insn ()))
- emit_note (NOTE_INSN_DELETED);
+ gcc_assert (NOTE_P (get_last_insn ()));
+
parm_birth_insn = get_last_insn ();
if (current_function_profile)
#endif
}
- /* After the display initializations is where the tail-recursion label
- should go, if we end up needing one. Ensure we have a NOTE here
- since some things (like trampolines) get placed before this. */
- tail_recursion_reentry = emit_note (NOTE_INSN_DELETED);
+ /* After the display initializations is where the stack checking
+ probe should go. */
+ if(flag_stack_check)
+ stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
/* Make sure there is a line number after the function entry setup code. */
force_next_line_note ();
emit_insn (gen_rtx_USE (VOIDmode, reg));
}
-void
+static void
use_return_register (void)
{
diddle_return_value (do_use_return_reg, NULL);
GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
seq = get_insns ();
end_sequence ();
- emit_insn_before (seq, tail_recursion_reentry);
+ emit_insn_before (seq, stack_check_probe_note);
break;
}
}
clear_pending_stack_adjust ();
do_pending_stack_adjust ();
- /* @@@ This is a kludge. We want to ensure that instructions that
- may trap are not moved into the epilogue by scheduling, because
- we don't always emit unwind information for the epilogue.
- However, not all machine descriptions define a blockage insn, so
- emit an ASM_INPUT to act as one. */
- if (flag_non_call_exceptions)
- emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
-
/* Mark the end of the function body.
If control reaches this insn, the function can drop through
without returning a value. */
/* Output the label for the actual return from the function. */
emit_label (return_label);
- /* Let except.c know where it should emit the call to unregister
- the function context for sjlj exceptions. */
- if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
- sjlj_emit_function_exit_after (get_last_insn ());
+ if (USING_SJLJ_EXCEPTIONS)
+ {
+ /* Let except.c know where it should emit the call to unregister
+ the function context for sjlj exceptions. */
+ if (flag_exceptions)
+ sjlj_emit_function_exit_after (get_last_insn ());
+ }
+ else
+ {
+ /* @@@ This is a kludge. We want to ensure that instructions that
+ may trap are not moved into the epilogue by scheduling, because
+ we don't always emit unwind information for the epilogue.
+ However, not all machine descriptions define a blockage insn, so
+ emit an ASM_INPUT to act as one. */
+ if (flag_non_call_exceptions)
+ emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
+ }
+
+ /* If this is an implementation of throw, do what's necessary to
+ communicate between __builtin_eh_return and the epilogue. */
+ expand_eh_return ();
/* If scalar return value was computed in a pseudo-reg, or was a named
return value that got dumped to the stack, copy that to the hard
TREE_TYPE (decl_result),
int_size_in_bytes (TREE_TYPE (decl_result)));
}
+ /* In the case of complex integer modes smaller than a word, we'll
+ need to generate some non-trivial bitfield insertions. Do that
+ on a pseudo and not the hard register. */
+ else if (GET_CODE (decl_rtl) == CONCAT
+ && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
+ && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
+ {
+ int old_generating_concat_p;
+ rtx tmp;
+
+ old_generating_concat_p = generating_concat_p;
+ generating_concat_p = 0;
+ tmp = gen_reg_rtx (GET_MODE (decl_rtl));
+ generating_concat_p = old_generating_concat_p;
+
+ emit_move_insn (tmp, decl_rtl);
+ emit_move_insn (real_decl_rtl, tmp);
+ }
else
emit_move_insn (real_decl_rtl, decl_rtl);
}
current_function_return_rtx = outgoing;
}
- /* If this is an implementation of throw, do what's necessary to
- communicate between __builtin_eh_return and the epilogue. */
- expand_eh_return ();
-
/* Emit the actual code to clobber return register. */
{
rtx seq;
info.sp_offset));
retaddr = gen_rtx_MEM (Pmode, retaddr);
+ MEM_NOTRAP_P (retaddr) = 1;
/* If there is a pending load to the equivalent register for SP
and we reference that register, we must load our address into
fixup_fallthru_exit_predecessor. */
cfg_layout_initialize (0);
FOR_EACH_BB (cur_bb)
- if (cur_bb->index >= 0 && cur_bb->next_bb->index >= 0)
+ if (cur_bb->index >= NUM_FIXED_BLOCKS
+ && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
cur_bb->aux = cur_bb->next_bb;
cfg_layout_finalize ();
}
void
reset_block_changes (void)
{
- VARRAY_TREE_INIT (cfun->ib_boundaries_block, 100, "ib_boundaries_block");
- VARRAY_PUSH_TREE (cfun->ib_boundaries_block, NULL_TREE);
+ cfun->ib_boundaries_block = VEC_alloc (tree, gc, 100);
+ VEC_quick_push (tree, cfun->ib_boundaries_block, NULL_TREE);
}
/* Record the boundary for BLOCK. */
if (!block)
return;
- last_block = VARRAY_TOP_TREE (cfun->ib_boundaries_block);
- VARRAY_POP (cfun->ib_boundaries_block);
+ if(!cfun->ib_boundaries_block)
+ return;
+
+ last_block = VEC_pop (tree, cfun->ib_boundaries_block);
n = get_max_uid ();
- for (i = VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block); i < n; i++)
- VARRAY_PUSH_TREE (cfun->ib_boundaries_block, last_block);
+ for (i = VEC_length (tree, cfun->ib_boundaries_block); i < n; i++)
+ VEC_safe_push (tree, gc, cfun->ib_boundaries_block, last_block);
- VARRAY_PUSH_TREE (cfun->ib_boundaries_block, block);
+ VEC_safe_push (tree, gc, cfun->ib_boundaries_block, block);
}
/* Finishes record of boundaries. */
{
unsigned uid = INSN_UID (insn);
- if (uid >= VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block))
+ if (uid >= VEC_length (tree, cfun->ib_boundaries_block))
return;
- *block = VARRAY_TREE (cfun->ib_boundaries_block, uid);
+ *block = VEC_index (tree, cfun->ib_boundaries_block, uid);
}
/* Releases the ib_boundaries_block records. */
void
free_block_changes (void)
{
- cfun->ib_boundaries_block = NULL;
+ VEC_free (tree, gc, cfun->ib_boundaries_block);
}
/* Returns the name of the current function. */
}
\f
-static void
+static unsigned int
rest_of_handle_check_leaf_regs (void)
{
#ifdef LEAF_REGISTERS
current_function_uses_only_leaf_regs
= optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
#endif
+ return 0;
+}
+
+/* Insert a TYPE into the used types hash table of CFUN. */
+static void
+used_types_insert_helper (tree type, struct function *func)
+{
+ if (type != NULL && func != NULL)
+ {
+ void **slot;
+
+ if (func->used_types_hash == NULL)
+ func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
+ htab_eq_pointer, NULL);
+ slot = htab_find_slot (func->used_types_hash, type, INSERT);
+ if (*slot == NULL)
+ *slot = type;
+ }
+}
+
+/* Given a type, insert it into the used hash table in cfun. */
+void
+used_types_insert (tree t)
+{
+ while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
+ t = TREE_TYPE (t);
+ t = TYPE_MAIN_VARIANT (t);
+ if (debug_info_level > DINFO_LEVEL_NONE)
+ used_types_insert_helper (t, cfun);
}
struct tree_opt_pass pass_leaf_regs =