/* Expands front end tree to back end RTL for GCC.
Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
- 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
+ 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
Free Software Foundation, Inc.
This file is part of GCC.
#include "tree-gimple.h"
#include "tree-pass.h"
#include "predict.h"
+#include "vecprim.h"
#ifndef LOCAL_ALIGNMENT
#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
/* The currently compiled function. */
struct function *cfun = 0;
-DEF_VEC_I(int);
-DEF_VEC_ALLOC_I(int,heap);
-
/* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
static VEC(int,heap) *prologue;
static VEC(int,heap) *epilogue;
static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
static void pad_below (struct args_size *, enum machine_mode, tree);
static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
-static void reorder_fix_fragments (tree);
static int all_blocks (tree, tree *);
static tree *get_block_vector (tree, int *);
extern tree debug_find_var_in_block_tree (tree, tree);
static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
static int contains (rtx, VEC(int,heap) **);
#ifdef HAVE_return
-static void emit_return_into_block (basic_block, rtx);
+static void emit_return_into_block (basic_block);
#endif
#if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
static rtx keep_stack_depressed (rtx);
f->x_return_label = NULL;
f->x_naked_return_label = NULL;
f->x_stack_slot_list = NULL;
- f->x_tail_recursion_reentry = NULL;
+ f->x_stack_check_probe_note = NULL;
f->x_arg_pointer_save_area = NULL;
f->x_parm_birth_insn = NULL;
- f->original_arg_vector = NULL;
- f->original_decl_initial = NULL;
f->epilogue_delay_list = NULL;
}
\f
/* Return size needed for stack frame based on slots so far allocated.
This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
the caller may have to do that. */
+
HOST_WIDE_INT
get_frame_size (void)
{
return get_func_frame_size (cfun);
}
+/* Issue an error message and return TRUE if frame OFFSET overflows in
+ the signed target pointer arithmetics for function FUNC. Otherwise
+ return FALSE. */
+
+bool
+frame_offset_overflow (HOST_WIDE_INT offset, tree func)
+{
+ unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
+
+ if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
+ /* Leave room for the fixed part of the frame. */
+ - 64 * UNITS_PER_WORD)
+ {
+ error ("%Jtotal size of local objects too large", func);
+ return TRUE;
+ }
+
+ return FALSE;
+}
+
/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
with machine mode MODE.
function->x_stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
- /* Try to detect frame size overflows on native platforms. */
-#if BITS_PER_WORD >= 32
- if ((FRAME_GROWS_DOWNWARD
- ? (unsigned HOST_WIDE_INT) -function->x_frame_offset
- : (unsigned HOST_WIDE_INT) function->x_frame_offset)
- > ((unsigned HOST_WIDE_INT) 1 << (BITS_PER_WORD - 1))
- /* Leave room for the fixed part of the frame. */
- - 64 * UNITS_PER_WORD)
- {
- error ("%Jtotal size of local objects too large", function->decl);
- /* Avoid duplicate error messages as much as possible. */
- function->x_frame_offset = 0;
- }
-#endif
+ if (frame_offset_overflow (function->x_frame_offset, function->decl))
+ function->x_frame_offset = 0;
return x;
}
static struct temp_slot **
temp_slots_at_level (int level)
{
+ if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
+ {
+ size_t old_length = VEC_length (temp_slot_p, used_temp_slots);
+ temp_slot_p *p;
- if (!used_temp_slots)
- VARRAY_GENERIC_PTR_INIT (used_temp_slots, 3, "used_temp_slots");
-
- while (level >= (int) VARRAY_ACTIVE_SIZE (used_temp_slots))
- VARRAY_PUSH_GENERIC_PTR (used_temp_slots, NULL);
+ VEC_safe_grow (temp_slot_p, gc, used_temp_slots, level + 1);
+ p = VEC_address (temp_slot_p, used_temp_slots);
+ memset (&p[old_length], 0,
+ sizeof (temp_slot_p) * (level + 1 - old_length));
+ }
- return (struct temp_slot **) &VARRAY_GENERIC_PTR (used_temp_slots, level);
+ return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
}
/* Returns the maximal temporary slot level. */
if (!used_temp_slots)
return -1;
- return VARRAY_ACTIVE_SIZE (used_temp_slots) - 1;
+ return VEC_length (temp_slot_p, used_temp_slots) - 1;
}
/* Moves temporary slot TEMP to LEVEL. */
if (mode == BLKmode || memory_required)
{
HOST_WIDE_INT size = int_size_in_bytes (type);
- tree size_tree;
rtx tmp;
/* Zero sized arrays are GNU C extension. Set size to 1 to avoid
size = 1;
/* Unfortunately, we don't yet know how to allocate variable-sized
- temporaries. However, sometimes we have a fixed upper limit on
- the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
- instead. This is the case for Chill variable-sized strings. */
- if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
- && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
- && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
- size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
-
- /* If we still haven't been able to get a size, see if the language
- can compute a maximum size. */
- if (size == -1
- && (size_tree = lang_hooks.types.max_size (type)) != 0
- && host_integerp (size_tree, 1))
- size = tree_low_cst (size_tree, 1);
+ temporaries. However, sometimes we can find a fixed upper limit on
+ the size, so try that instead. */
+ else if (size == -1)
+ size = max_int_size_in_bytes (type);
/* The size of the temporary may be too large to fit into an integer. */
/* ??? Not sure this should happen except for user silliness, so limit
Validate the new value vs the insn predicate. Note that
asm insns will have insn_code -1 here. */
if (!safe_insn_predicate (insn_code, i, x))
- x = force_reg (insn_data[insn_code].operand[i].mode, x);
+ {
+ start_sequence ();
+ x = force_reg (insn_data[insn_code].operand[i].mode, x);
+ seq = get_insns ();
+ end_sequence ();
+ if (seq)
+ emit_insn_before (seq, insn);
+ }
*recog_data.operand_loc[i] = recog_data.operand[i] = x;
any_change = true;
/* Propagate operand changes into the duplicates. */
for (i = 0; i < recog_data.n_dups; ++i)
*recog_data.dup_loc[i]
- = recog_data.operand[(unsigned)recog_data.dup_num[i]];
+ = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
/* Force re-recognition of the instruction for validation. */
INSN_CODE (insn) = -1;
instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
{
tree t = *tp;
- if (! EXPR_P (t))
+ if (! EXPR_P (t) && ! GIMPLE_STMT_P (t))
{
*walk_subtrees = 0;
if (DECL_P (t) && DECL_RTL_SET_P (t))
tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
+ /* DECL node associated with FNTYPE when relevant, which we might need to
+ check for by-invisible-reference returns, typically for CALL_EXPR input
+ EXPressions. */
+ tree fndecl = NULL_TREE;
+
if (fntype)
switch (TREE_CODE (fntype))
{
case CALL_EXPR:
- fntype = get_callee_fndecl (fntype);
- fntype = fntype ? TREE_TYPE (fntype) : 0;
+ fndecl = get_callee_fndecl (fntype);
+ fntype = fndecl ? TREE_TYPE (fndecl) : 0;
break;
case FUNCTION_DECL:
- fntype = TREE_TYPE (fntype);
+ fndecl = fntype;
+ fntype = TREE_TYPE (fndecl);
break;
case FUNCTION_TYPE:
case METHOD_TYPE:
if (TREE_CODE (type) == VOID_TYPE)
return 0;
+
/* If the front end has decided that this needs to be passed by
reference, do so. */
if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
&& DECL_BY_REFERENCE (exp))
return 1;
+
+ /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the
+ called function RESULT_DECL, meaning the function returns in memory by
+ invisible reference. This check lets front-ends not set TREE_ADDRESSABLE
+ on the function type, which used to be the way to request such a return
+ mechanism but might now be causing troubles at gimplification time if
+ temporaries with the function type need to be created. */
+ if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl)
+ && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
+ return 1;
+
if (targetm.calls.return_in_memory (type, fntype))
return 1;
/* Types that are TREE_ADDRESSABLE must be constructed in memory,
t = built_in_decls[BUILT_IN_ALLOCA];
t = build_function_call_expr (t, args);
t = fold_convert (ptr_type, t);
- t = build2 (MODIFY_EXPR, void_type_node, addr, t);
+ t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
gimplify_and_add (t, &stmts);
}
- t = build2 (MODIFY_EXPR, void_type_node, local, parm);
+ t = build2 (GIMPLE_MODIFY_STMT, void_type_node, local, parm);
gimplify_and_add (t, &stmts);
SET_DECL_VALUE_EXPR (parm, local);
reorder_blocks_1 (get_insns (), block, &block_stack);
BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
- /* Remove deleted blocks from the block fragment chains. */
- reorder_fix_fragments (block);
-
VEC_free (tree, heap, block_stack);
}
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
{
tree block = NOTE_BLOCK (insn);
+ tree origin;
+
+ origin = (BLOCK_FRAGMENT_ORIGIN (block)
+ ? BLOCK_FRAGMENT_ORIGIN (block)
+ : block);
/* If we have seen this block before, that means it now
spans multiple address regions. Create a new fragment. */
if (TREE_ASM_WRITTEN (block))
{
tree new_block = copy_node (block);
- tree origin;
- origin = (BLOCK_FRAGMENT_ORIGIN (block)
- ? BLOCK_FRAGMENT_ORIGIN (block)
- : block);
BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
BLOCK_FRAGMENT_CHAIN (new_block)
= BLOCK_FRAGMENT_CHAIN (origin);
will cause infinite recursion. */
if (block != current_block)
{
+ if (block != origin)
+ gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
+
BLOCK_SUPERCONTEXT (block) = current_block;
BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
BLOCK_SUBBLOCKS (current_block) = block;
- current_block = block;
+ current_block = origin;
}
VEC_safe_push (tree, heap, *p_block_stack, block);
}
}
}
-/* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
- appears in the block tree, select one of the fragments to become
- the new origin block. */
-
-static void
-reorder_fix_fragments (tree block)
-{
- while (block)
- {
- tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
- tree new_origin = NULL_TREE;
-
- if (dup_origin)
- {
- if (! TREE_ASM_WRITTEN (dup_origin))
- {
- new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
-
- /* Find the first of the remaining fragments. There must
- be at least one -- the current block. */
- while (! TREE_ASM_WRITTEN (new_origin))
- new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
- BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
- }
- }
- else if (! dup_origin)
- new_origin = block;
-
- /* Re-root the rest of the fragments to the new origin. In the
- case that DUP_ORIGIN was null, that means BLOCK was the origin
- of a chain of fragments and we want to remove those fragments
- that didn't make it to the output. */
- if (new_origin)
- {
- tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
- tree chain = *pp;
-
- while (chain)
- {
- if (TREE_ASM_WRITTEN (chain))
- {
- BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
- *pp = chain;
- pp = &BLOCK_FRAGMENT_CHAIN (chain);
- }
- chain = BLOCK_FRAGMENT_CHAIN (chain);
- }
- *pp = NULL_TREE;
- }
-
- reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
- block = BLOCK_CHAIN (block);
- }
-}
-
/* Reverse the order of elements in the chain T of blocks,
and return the new head of the chain (old last element). */
else
#endif
{
- rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 1);
+ rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
/* Expect to be passed the address of a place to store the value.
If it is passed as an argument, assign_parms will take care of
it. */
as opposed to parm setup. */
emit_note (NOTE_INSN_FUNCTION_BEG);
- if (!NOTE_P (get_last_insn ()))
- emit_note (NOTE_INSN_DELETED);
+ gcc_assert (NOTE_P (get_last_insn ()));
+
parm_birth_insn = get_last_insn ();
if (current_function_profile)
#endif
}
- /* After the display initializations is where the tail-recursion label
- should go, if we end up needing one. Ensure we have a NOTE here
- since some things (like trampolines) get placed before this. */
- tail_recursion_reentry = emit_note (NOTE_INSN_DELETED);
+ /* After the display initializations is where the stack checking
+ probe should go. */
+ if(flag_stack_check)
+ stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
/* Make sure there is a line number after the function entry setup code. */
force_next_line_note ();
GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
seq = get_insns ();
end_sequence ();
- emit_insn_before (seq, tail_recursion_reentry);
+ emit_insn_before (seq, stack_check_probe_note);
break;
}
}
clear_pending_stack_adjust ();
do_pending_stack_adjust ();
- /* Mark the end of the function body.
- If control reaches this insn, the function can drop through
- without returning a value. */
- emit_note (NOTE_INSN_FUNCTION_END);
-
- /* Must mark the last line number note in the function, so that the test
- coverage code can avoid counting the last line twice. This just tells
- the code to ignore the immediately following line note, since there
- already exists a copy of this note somewhere above. This line number
- note is still needed for debugging though, so we can't delete it. */
- if (flag_test_coverage)
- emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
-
/* Output a linenumber for the end of the function.
SDB depends on this. */
force_next_line_note ();
block_for_insn appropriately. */
static void
-emit_return_into_block (basic_block bb, rtx line_note)
+emit_return_into_block (basic_block bb)
{
emit_jump_insn_after (gen_return (), BB_END (bb));
- if (line_note)
- emit_note_copy_after (line_note, PREV_INSN (BB_END (bb)));
}
#endif /* HAVE_return */
if (BB_HEAD (last) == label && LABEL_P (label))
{
edge_iterator ei2;
- rtx epilogue_line_note = NULL_RTX;
-
- /* Locate the line number associated with the closing brace,
- if we can find one. */
- for (seq = get_last_insn ();
- seq && ! active_insn_p (seq);
- seq = PREV_INSN (seq))
- if (NOTE_P (seq) && NOTE_LINE_NUMBER (seq) > 0)
- {
- epilogue_line_note = seq;
- break;
- }
for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
{
with a simple return instruction. */
if (simplejump_p (jump))
{
- emit_return_into_block (bb, epilogue_line_note);
+ emit_return_into_block (bb);
delete_insn (jump);
}
this is still reachable will be determined later. */
emit_barrier_after (BB_END (last));
- emit_return_into_block (last, epilogue_line_note);
+ emit_return_into_block (last);
epilogue_end = BB_END (last);
single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
goto epilogue_done;
}
#endif
-#ifdef HAVE_prologue
- /* This is probably all useless now that we use locators. */
- if (prologue_end)
- {
- rtx insn, prev;
-
- /* GDB handles `break f' by setting a breakpoint on the first
- line note after the prologue. Which means (1) that if
- there are line number notes before where we inserted the
- prologue we should move them, and (2) we should generate a
- note before the end of the first basic block, if there isn't
- one already there.
-
- ??? This behavior is completely broken when dealing with
- multiple entry functions. We simply place the note always
- into first basic block and let alternate entry points
- to be missed.
- */
-
- for (insn = prologue_end; insn; insn = prev)
- {
- prev = PREV_INSN (insn);
- if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
- {
- /* Note that we cannot reorder the first insn in the
- chain, since rest_of_compilation relies on that
- remaining constant. */
- if (prev == NULL)
- break;
- reorder_insns (insn, insn, prologue_end);
- }
- }
-
- /* Find the last line number note in the first block. */
- for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
- insn != prologue_end && insn;
- insn = PREV_INSN (insn))
- if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
- break;
-
- /* If we didn't find one, make a copy of the first line number
- we run across. */
- if (! insn)
- {
- for (insn = next_active_insn (prologue_end);
- insn;
- insn = PREV_INSN (insn))
- if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
- {
- emit_note_copy_after (insn, prologue_end);
- break;
- }
- }
- }
-#endif
#ifdef HAVE_epilogue
if (epilogue_end)
{
/* Similarly, move any line notes that appear after the epilogue.
There is no need, however, to be quite so anal about the existence
- of such a note. Also move the NOTE_INSN_FUNCTION_END and (possibly)
+ of such a note. Also possibly move
NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
info generation. */
for (insn = epilogue_end; insn; insn = next)
{
next = NEXT_INSN (insn);
if (NOTE_P (insn)
- && (NOTE_LINE_NUMBER (insn) > 0
- || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG
- || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END))
+ && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG))
reorder_insns (insn, insn, PREV_INSN (epilogue_end));
}
}
void
reset_block_changes (void)
{
- VARRAY_TREE_INIT (cfun->ib_boundaries_block, 100, "ib_boundaries_block");
- VARRAY_PUSH_TREE (cfun->ib_boundaries_block, NULL_TREE);
+ cfun->ib_boundaries_block = VEC_alloc (tree, gc, 100);
+ VEC_quick_push (tree, cfun->ib_boundaries_block, NULL_TREE);
}
/* Record the boundary for BLOCK. */
if(!cfun->ib_boundaries_block)
return;
- last_block = VARRAY_TOP_TREE (cfun->ib_boundaries_block);
- VARRAY_POP (cfun->ib_boundaries_block);
+ last_block = VEC_pop (tree, cfun->ib_boundaries_block);
n = get_max_uid ();
- for (i = VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block); i < n; i++)
- VARRAY_PUSH_TREE (cfun->ib_boundaries_block, last_block);
+ for (i = VEC_length (tree, cfun->ib_boundaries_block); i < n; i++)
+ VEC_safe_push (tree, gc, cfun->ib_boundaries_block, last_block);
- VARRAY_PUSH_TREE (cfun->ib_boundaries_block, block);
+ VEC_safe_push (tree, gc, cfun->ib_boundaries_block, block);
}
/* Finishes record of boundaries. */
-void finalize_block_changes (void)
+void
+finalize_block_changes (void)
{
record_block_change (DECL_INITIAL (current_function_decl));
}
{
unsigned uid = INSN_UID (insn);
- if (uid >= VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block))
+ if (uid >= VEC_length (tree, cfun->ib_boundaries_block))
return;
- *block = VARRAY_TREE (cfun->ib_boundaries_block, uid);
+ *block = VEC_index (tree, cfun->ib_boundaries_block, uid);
}
/* Releases the ib_boundaries_block records. */
void
free_block_changes (void)
{
- cfun->ib_boundaries_block = NULL;
+ VEC_free (tree, gc, cfun->ib_boundaries_block);
}
/* Returns the name of the current function. */
return 0;
}
+/* Insert a TYPE into the used types hash table of CFUN. */
+static void
+used_types_insert_helper (tree type, struct function *func)
+{
+ if (type != NULL && func != NULL)
+ {
+ void **slot;
+
+ if (func->used_types_hash == NULL)
+ func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
+ htab_eq_pointer, NULL);
+ slot = htab_find_slot (func->used_types_hash, type, INSERT);
+ if (*slot == NULL)
+ *slot = type;
+ }
+}
+
+/* Given a type, insert it into the used hash table in cfun. */
+void
+used_types_insert (tree t)
+{
+ while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
+ t = TREE_TYPE (t);
+ t = TYPE_MAIN_VARIANT (t);
+ if (debug_info_level > DINFO_LEVEL_NONE)
+ used_types_insert_helper (t, cfun);
+}
+
struct tree_opt_pass pass_leaf_regs =
{
NULL, /* name */