-/* Expands front end tree to back end RTL for GNU C-Compiler
+/* Expands front end tree to back end RTL for GCC.
Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
- 1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
+ 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
This file is part of GCC.
#include "except.h"
#include "function.h"
#include "expr.h"
+#include "optabs.h"
#include "libfuncs.h"
#include "regs.h"
#include "hard-reg-set.h"
#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
#endif
+#ifndef STACK_ALIGNMENT_NEEDED
+#define STACK_ALIGNMENT_NEEDED 1
+#endif
+
/* Some systems use __main in a way incompatible with its use in gcc, in these
cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
give the same symbol without quotes for an alternative entry point. You
post-instantiation libcalls. */
int virtuals_instantiated;
+/* Nonzero if at least one trampoline has been created. */
+int trampolines_created;
+
/* Assign unique numbers to labels generated for profiling, debugging, etc. */
static GTY(()) int funcdef_no;
/* These variables hold pointers to functions to create and destroy
target specific, per-function data structures. */
-struct machine_function * (*init_machine_status) PARAMS ((void));
+struct machine_function * (*init_machine_status) (void);
/* The FUNCTION_DECL for an inline function currently being expanded. */
tree inline_function_decl;
/* Forward declarations. */
-static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
- int, struct function *));
-static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
-static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
- enum machine_mode, enum machine_mode,
- int, unsigned int, int,
- htab_t));
-static void schedule_fixup_var_refs PARAMS ((struct function *, rtx, tree,
- enum machine_mode,
- htab_t));
-static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int, rtx,
- htab_t));
+static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
+ struct function *);
+static struct temp_slot *find_temp_slot_from_address (rtx);
+static void put_reg_into_stack (struct function *, rtx, tree, enum machine_mode,
+ enum machine_mode, int, unsigned int, int, htab_t);
+static void schedule_fixup_var_refs (struct function *, rtx, tree, enum machine_mode,
+ htab_t);
+static void fixup_var_refs (rtx, enum machine_mode, int, rtx, htab_t);
static struct fixup_replacement
- *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
-static void fixup_var_refs_insns PARAMS ((rtx, rtx, enum machine_mode,
- int, int, rtx));
-static void fixup_var_refs_insns_with_hash
- PARAMS ((htab_t, rtx,
- enum machine_mode, int, rtx));
-static void fixup_var_refs_insn PARAMS ((rtx, rtx, enum machine_mode,
- int, int, rtx));
-static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
- struct fixup_replacement **, rtx));
-static rtx fixup_memory_subreg PARAMS ((rtx, rtx, enum machine_mode, int));
-static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, enum machine_mode,
- int));
-static rtx fixup_stack_1 PARAMS ((rtx, rtx));
-static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
-static void instantiate_decls PARAMS ((tree, int));
-static void instantiate_decls_1 PARAMS ((tree, int));
-static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int));
-static rtx instantiate_new_reg PARAMS ((rtx, HOST_WIDE_INT *));
-static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
-static void delete_handlers PARAMS ((void));
-static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
- struct args_size *));
-static void pad_below PARAMS ((struct args_size *, enum machine_mode,
- tree));
-static rtx round_trampoline_addr PARAMS ((rtx));
-static rtx adjust_trampoline_addr PARAMS ((rtx));
-static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
-static void reorder_blocks_0 PARAMS ((tree));
-static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
-static void reorder_fix_fragments PARAMS ((tree));
-static tree blocks_nreverse PARAMS ((tree));
-static int all_blocks PARAMS ((tree, tree *));
-static tree *get_block_vector PARAMS ((tree, int *));
-extern tree debug_find_var_in_block_tree PARAMS ((tree, tree));
+ *find_fixup_replacement (struct fixup_replacement **, rtx);
+static void fixup_var_refs_insns (rtx, rtx, enum machine_mode, int, int, rtx);
+static void fixup_var_refs_insns_with_hash (htab_t, rtx, enum machine_mode, int, rtx);
+static void fixup_var_refs_insn (rtx, rtx, enum machine_mode, int, int, rtx);
+static void fixup_var_refs_1 (rtx, enum machine_mode, rtx *, rtx,
+ struct fixup_replacement **, rtx);
+static rtx fixup_memory_subreg (rtx, rtx, enum machine_mode, int);
+static rtx walk_fixup_memory_subreg (rtx, rtx, enum machine_mode, int);
+static rtx fixup_stack_1 (rtx, rtx);
+static void optimize_bit_field (rtx, rtx, rtx *);
+static void instantiate_decls (tree, int);
+static void instantiate_decls_1 (tree, int);
+static void instantiate_decl (rtx, HOST_WIDE_INT, int);
+static rtx instantiate_new_reg (rtx, HOST_WIDE_INT *);
+static int instantiate_virtual_regs_1 (rtx *, rtx, int);
+static void delete_handlers (void);
+static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
+static void pad_below (struct args_size *, enum machine_mode, tree);
+static rtx round_trampoline_addr (rtx);
+static rtx adjust_trampoline_addr (rtx);
+static tree *identify_blocks_1 (rtx, tree *, tree *, tree *);
+static void reorder_blocks_0 (tree);
+static void reorder_blocks_1 (rtx, tree, varray_type *);
+static void reorder_fix_fragments (tree);
+static tree blocks_nreverse (tree);
+static int all_blocks (tree, tree *);
+static tree *get_block_vector (tree, int *);
+extern tree debug_find_var_in_block_tree (tree, tree);
/* We always define `record_insns' even if its not used so that we
can always export `prologue_epilogue_contains'. */
-static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
-static int contains PARAMS ((rtx, varray_type));
+static void record_insns (rtx, varray_type *) ATTRIBUTE_UNUSED;
+static int contains (rtx, varray_type);
#ifdef HAVE_return
-static void emit_return_into_block PARAMS ((basic_block, rtx));
+static void emit_return_into_block (basic_block, rtx);
#endif
-static void put_addressof_into_stack PARAMS ((rtx, htab_t));
-static bool purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
- htab_t));
-static void purge_single_hard_subreg_set PARAMS ((rtx));
+static void put_addressof_into_stack (rtx, htab_t);
+static bool purge_addressof_1 (rtx *, rtx, int, int, int, htab_t);
+static void purge_single_hard_subreg_set (rtx);
#if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
-static rtx keep_stack_depressed PARAMS ((rtx));
+static rtx keep_stack_depressed (rtx);
#endif
-static int is_addressof PARAMS ((rtx *, void *));
-static hashval_t insns_for_mem_hash PARAMS ((const void *));
-static int insns_for_mem_comp PARAMS ((const void *, const void *));
-static int insns_for_mem_walk PARAMS ((rtx *, void *));
-static void compute_insns_for_mem PARAMS ((rtx, rtx, htab_t));
-static void prepare_function_start PARAMS ((void));
-static void do_clobber_return_reg PARAMS ((rtx, void *));
-static void do_use_return_reg PARAMS ((rtx, void *));
+static int is_addressof (rtx *, void *);
+static hashval_t insns_for_mem_hash (const void *);
+static int insns_for_mem_comp (const void *, const void *);
+static int insns_for_mem_walk (rtx *, void *);
+static void compute_insns_for_mem (rtx, rtx, htab_t);
+static void prepare_function_start (void);
+static void do_clobber_return_reg (rtx, void *);
+static void do_use_return_reg (rtx, void *);
+static void instantiate_virtual_regs_lossage (rtx);
+static tree split_complex_args (tree);
+static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
\f
/* Pointer to chain of `struct function' for containing functions. */
static GTY(()) struct function *outer_function_chain;
+/* List of insns that were postponed by purge_addressof_1. */
+static rtx postponed_insns;
+
/* Given a function decl for a containing function,
return the `struct function' for it. */
struct function *
-find_function_data (decl)
- tree decl;
+find_function_data (tree decl)
{
struct function *p;
variables. */
void
-push_function_context_to (context)
- tree context;
+push_function_context_to (tree context)
{
struct function *p;
}
void
-push_function_context ()
+push_function_context (void)
{
push_function_context_to (current_function_decl);
}
This function is called from language-specific code. */
void
-pop_function_context_from (context)
- tree context ATTRIBUTE_UNUSED;
+pop_function_context_from (tree context ATTRIBUTE_UNUSED)
{
struct function *p = outer_function_chain;
struct var_refs_queue *queue;
}
void
-pop_function_context ()
+pop_function_context (void)
{
pop_function_context_from (current_function_decl);
}
garbage collection reclaim the memory. */
void
-free_after_parsing (f)
- struct function *f;
+free_after_parsing (struct function *f)
{
/* f->expr->forced_labels is used by code generation. */
/* f->emit->regno_reg_rtx is used by code generation. */
reclaim the memory. */
void
-free_after_compilation (f)
- struct function *f;
+free_after_compilation (struct function *f)
{
f->eh = NULL;
f->expr = NULL;
f->x_nonlocal_goto_stack_level = NULL;
f->x_cleanup_label = NULL;
f->x_return_label = NULL;
+ f->computed_goto_common_label = NULL;
+ f->computed_goto_common_reg = NULL;
f->x_save_expr_regs = NULL;
f->x_stack_slot_list = NULL;
f->x_rtl_expr_chain = NULL;
the caller may have to do that. */
HOST_WIDE_INT
-get_func_frame_size (f)
- struct function *f;
+get_func_frame_size (struct function *f)
{
#ifdef FRAME_GROWS_DOWNWARD
return -f->x_frame_offset;
This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
the caller may have to do that. */
HOST_WIDE_INT
-get_frame_size ()
+get_frame_size (void)
{
return get_func_frame_size (cfun);
}
FUNCTION specifies the function to allocate in. */
static rtx
-assign_stack_local_1 (mode, size, align, function)
- enum machine_mode mode;
- HOST_WIDE_INT size;
- int align;
- struct function *function;
+assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
+ struct function *function)
{
rtx x, addr;
int bigend_correction = 0;
frame_off = STARTING_FRAME_OFFSET % frame_alignment;
frame_phase = frame_off ? frame_alignment - frame_off : 0;
- /* Round frame offset to that alignment.
- We must be careful here, since FRAME_OFFSET might be negative and
- division with a negative dividend isn't as well defined as we might
- like. So we instead assume that ALIGNMENT is a power of two and
- use logical operations which are unambiguous. */
+ /* Round the frame offset to the specified alignment. The default is
+ to always honor requests to align the stack but a port may choose to
+ do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
+ if (STACK_ALIGNMENT_NEEDED
+ || mode != BLKmode
+ || size != 0)
+ {
+ /* We must be careful here, since FRAME_OFFSET might be negative and
+ division with a negative dividend isn't as well defined as we might
+ like. So we instead assume that ALIGNMENT is a power of two and
+ use logical operations which are unambiguous. */
#ifdef FRAME_GROWS_DOWNWARD
- function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset - frame_phase, alignment) + frame_phase;
+ function->x_frame_offset
+ = (FLOOR_ROUND (function->x_frame_offset - frame_phase, alignment)
+ + frame_phase);
#else
- function->x_frame_offset = CEIL_ROUND (function->x_frame_offset - frame_phase, alignment) + frame_phase;
+ function->x_frame_offset
+ = (CEIL_ROUND (function->x_frame_offset - frame_phase, alignment)
+ + frame_phase);
#endif
+ }
/* On a big-endian machine, if we are allocating more space than we will use,
use the least significant bytes of those that are allocated. */
address relative to the frame pointer. */
if (function == cfun && virtuals_instantiated)
addr = plus_constant (frame_pointer_rtx,
+ trunc_int_for_mode
(frame_offset + bigend_correction
- + STARTING_FRAME_OFFSET));
+ + STARTING_FRAME_OFFSET, Pmode));
else
addr = plus_constant (virtual_stack_vars_rtx,
- function->x_frame_offset + bigend_correction);
+ trunc_int_for_mode
+ (function->x_frame_offset + bigend_correction,
+ Pmode));
#ifndef FRAME_GROWS_DOWNWARD
function->x_frame_offset += size;
current function. */
rtx
-assign_stack_local (mode, size, align)
- enum machine_mode mode;
- HOST_WIDE_INT size;
- int align;
+assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
{
return assign_stack_local_1 (mode, size, align, cfun);
}
TYPE is the type that will be used for the stack slot. */
rtx
-assign_stack_temp_for_type (mode, size, keep, type)
- enum machine_mode mode;
- HOST_WIDE_INT size;
- int keep;
- tree type;
+assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, int keep,
+ tree type)
{
unsigned int align;
struct temp_slot *p, *best_p = 0;
if (best_p->size - rounded_size >= alignment)
{
- p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
+ p = ggc_alloc (sizeof (struct temp_slot));
p->in_use = p->addr_taken = 0;
p->size = best_p->size - rounded_size;
p->base_offset = best_p->base_offset + rounded_size;
{
HOST_WIDE_INT frame_offset_old = frame_offset;
- p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
+ p = ggc_alloc (sizeof (struct temp_slot));
/* We are passing an explicit alignment request to assign_stack_local.
One side effect of that is assign_stack_local will not round SIZE
/* If a type is specified, set the relevant flags. */
if (type != 0)
{
- RTX_UNCHANGING_P (slot) = (lang_hooks.honor_readonly
+ RTX_UNCHANGING_P (slot) = (lang_hooks.honor_readonly
&& TYPE_READONLY (type));
MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
reuse. First three arguments are same as in preceding function. */
rtx
-assign_stack_temp (mode, size, keep)
- enum machine_mode mode;
- HOST_WIDE_INT size;
- int keep;
+assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
{
return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
}
to wider modes. */
rtx
-assign_temp (type_or_decl, keep, memory_required, dont_promote)
- tree type_or_decl;
- int keep;
- int memory_required;
- int dont_promote ATTRIBUTE_UNUSED;
+assign_temp (tree type_or_decl, int keep, int memory_required,
+ int dont_promote ATTRIBUTE_UNUSED)
{
tree type, decl;
enum machine_mode mode;
if (decl && size == -1
&& TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
{
- error_with_decl (decl, "size of variable `%s' is too large");
+ error ("%Hsize of variable '%D' is too large",
+ &DECL_SOURCE_LOCATION (decl), decl);
size = 1;
}
problems in this case. */
void
-combine_temp_slots ()
+combine_temp_slots (void)
{
struct temp_slot *p, *q;
struct temp_slot *prev_p, *prev_q;
/* Find the temp slot corresponding to the object at address X. */
static struct temp_slot *
-find_temp_slot_from_address (x)
- rtx x;
+find_temp_slot_from_address (rtx x)
{
struct temp_slot *p;
rtx next;
that previously was known by OLD. */
void
-update_temp_slot_address (old, new)
- rtx old, new;
+update_temp_slot_address (rtx old, rtx new)
{
struct temp_slot *p;
address was taken. */
void
-mark_temp_addr_taken (x)
- rtx x;
+mark_temp_addr_taken (rtx x)
{
struct temp_slot *p;
returns a value in memory. */
void
-preserve_temp_slots (x)
- rtx x;
+preserve_temp_slots (rtx x)
{
struct temp_slot *p = 0;
RTL_EXPR. */
void
-preserve_rtl_expr_result (x)
- rtx x;
+preserve_rtl_expr_result (rtx x)
{
struct temp_slot *p;
worthwhile. */
void
-free_temp_slots ()
+free_temp_slots (void)
{
struct temp_slot *p;
/* Free all temporary slots used in T, an RTL_EXPR node. */
void
-free_temps_for_rtl_expr (t)
- tree t;
+free_temps_for_rtl_expr (tree t)
{
struct temp_slot *p;
for reuse until the current level is exited. */
void
-mark_all_temps_used ()
+mark_all_temps_used (void)
{
struct temp_slot *p;
/* Push deeper into the nesting level for stack temporaries. */
void
-push_temp_slots ()
+push_temp_slots (void)
{
temp_slot_level++;
}
are freed. */
void
-pop_temp_slots ()
+pop_temp_slots (void)
{
struct temp_slot *p;
/* Initialize temporary slots. */
void
-init_temp_slots ()
+init_temp_slots (void)
{
/* We have not allocated any temporaries yet. */
temp_slots = 0;
target_temp_slot_level = 0;
}
\f
-/* Retroactively move an auto variable from a register to a stack slot.
- This is done when an address-reference to the variable is seen. */
+/* Retroactively move an auto variable from a register to a stack
+ slot. This is done when an address-reference to the variable is
+ seen. If RESCAN is true, all previously emitted instructions are
+ examined and modified to handle the fact that DECL is now
+ addressable. */
void
-put_var_into_stack (decl)
- tree decl;
+put_var_into_stack (tree decl, int rescan)
{
rtx reg;
enum machine_mode promoted_mode, decl_mode;
to put things in the stack for the sake of setjmp, try to keep it
in a register until we know we actually need the address. */
if (can_use_addressof)
- gen_mem_addressof (reg, decl);
+ gen_mem_addressof (reg, decl, rescan);
else
put_reg_into_stack (function, reg, TREE_TYPE (decl), promoted_mode,
decl_mode, volatilep, 0, usedp, 0);
/* Prevent sharing of rtl that might lose. */
if (GET_CODE (XEXP (reg, 0)) == PLUS)
XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
- if (usedp)
+ if (usedp && rescan)
{
schedule_fixup_var_refs (function, reg, TREE_TYPE (decl),
promoted_mode, 0);
USED_P is nonzero if this reg might have already been used in an insn. */
static void
-put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
- original_regno, used_p, ht)
- struct function *function;
- rtx reg;
- tree type;
- enum machine_mode promoted_mode, decl_mode;
- int volatile_p;
- unsigned int original_regno;
- int used_p;
- htab_t ht;
+put_reg_into_stack (struct function *function, rtx reg, tree type,
+ enum machine_mode promoted_mode, enum machine_mode decl_mode,
+ int volatile_p, unsigned int original_regno, int used_p, htab_t ht)
{
struct function *func = function ? function : cfun;
rtx new = 0;
See function above for meaning of arguments. */
static void
-schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
- struct function *function;
- rtx reg;
- tree type;
- enum machine_mode promoted_mode;
- htab_t ht;
+schedule_fixup_var_refs (struct function *function, rtx reg, tree type,
+ enum machine_mode promoted_mode, htab_t ht)
{
int unsigned_p = type ? TREE_UNSIGNED (type) : 0;
{
struct var_refs_queue *temp;
- temp
- = (struct var_refs_queue *) ggc_alloc (sizeof (struct var_refs_queue));
+ temp = ggc_alloc (sizeof (struct var_refs_queue));
temp->modified = reg;
temp->promoted_mode = promoted_mode;
temp->unsignedp = unsigned_p;
}
\f
static void
-fixup_var_refs (var, promoted_mode, unsignedp, may_share, ht)
- rtx var;
- enum machine_mode promoted_mode;
- int unsignedp;
- htab_t ht;
- rtx may_share;
+fixup_var_refs (rtx var, enum machine_mode promoted_mode, int unsignedp,
+ rtx may_share, htab_t ht)
{
tree pending;
rtx first_insn = get_insns ();
value is equal to X. Allocate a new structure if no such entry exists. */
static struct fixup_replacement *
-find_fixup_replacement (replacements, x)
- struct fixup_replacement **replacements;
- rtx x;
+find_fixup_replacement (struct fixup_replacement **replacements, rtx x)
{
struct fixup_replacement *p;
if (p == 0)
{
- p = (struct fixup_replacement *) xmalloc (sizeof (struct fixup_replacement));
+ p = xmalloc (sizeof (struct fixup_replacement));
p->old = x;
p->new = 0;
p->next = *replacements;
to be unshared or a list of them. */
static void
-fixup_var_refs_insns (insn, var, promoted_mode, unsignedp, toplevel, may_share)
- rtx insn;
- rtx var;
- enum machine_mode promoted_mode;
- int unsignedp;
- int toplevel;
- rtx may_share;
+fixup_var_refs_insns (rtx insn, rtx var, enum machine_mode promoted_mode,
+ int unsignedp, int toplevel, rtx may_share)
{
while (insn)
{
(inside the CALL_PLACEHOLDER). */
static void
-fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp, may_share)
- htab_t ht;
- rtx var;
- enum machine_mode promoted_mode;
- int unsignedp;
- rtx may_share;
+fixup_var_refs_insns_with_hash (htab_t ht, rtx var, enum machine_mode promoted_mode,
+ int unsignedp, rtx may_share)
{
struct insns_for_mem_entry tmp;
struct insns_for_mem_entry *ime;
rtx insn_list;
tmp.key = var;
- ime = (struct insns_for_mem_entry *) htab_find (ht, &tmp);
+ ime = htab_find (ht, &tmp);
for (insn_list = ime->insns; insn_list != 0; insn_list = XEXP (insn_list, 1))
if (INSN_P (XEXP (insn_list, 0)))
fixup_var_refs_insn (XEXP (insn_list, 0), var, promoted_mode,
function. */
static void
-fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel, no_share)
- rtx insn;
- rtx var;
- enum machine_mode promoted_mode;
- int unsignedp;
- int toplevel;
- rtx no_share;
+fixup_var_refs_insn (rtx insn, rtx var, enum machine_mode promoted_mode,
+ int unsignedp, int toplevel, rtx no_share)
{
rtx call_dest = 0;
rtx set, prev, prev_set;
or the SUBREG, as appropriate, to the pseudo. */
static void
-fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements, no_share)
- rtx var;
- enum machine_mode promoted_mode;
- rtx *loc;
- rtx insn;
- struct fixup_replacement **replacements;
- rtx no_share;
+fixup_var_refs_1 (rtx var, enum machine_mode promoted_mode, rtx *loc, rtx insn,
+ struct fixup_replacement **replacements, rtx no_share)
{
int i;
rtx x = *loc;
This is used for subregs found inside REG_NOTES. */
static rtx
-fixup_memory_subreg (x, insn, promoted_mode, uncritical)
- rtx x;
- rtx insn;
- enum machine_mode promoted_mode;
- int uncritical;
+fixup_memory_subreg (rtx x, rtx insn, enum machine_mode promoted_mode, int uncritical)
{
int offset;
rtx mem = SUBREG_REG (x);
fixup_memory_subreg. */
static rtx
-walk_fixup_memory_subreg (x, insn, promoted_mode, uncritical)
- rtx x;
- rtx insn;
- enum machine_mode promoted_mode;
- int uncritical;
+walk_fixup_memory_subreg (rtx x, rtx insn, enum machine_mode promoted_mode,
+ int uncritical)
{
enum rtx_code code;
const char *fmt;
Replace each such MEM rtx with a copy, to avoid clobberage. */
static rtx
-fixup_stack_1 (x, insn)
- rtx x;
- rtx insn;
+fixup_stack_1 (rtx x, rtx insn)
{
int i;
RTX_CODE code = GET_CODE (x);
is always 0.) */
static void
-optimize_bit_field (body, insn, equiv_mem)
- rtx body;
- rtx insn;
- rtx *equiv_mem;
+optimize_bit_field (rtx body, rtx insn, rtx *equiv_mem)
{
rtx bitfield;
int destflag;
#define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
#endif
-/* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had its
- address taken. DECL is the decl or SAVE_EXPR for the object stored in the
- register, for later use if we do need to force REG into the stack. REG is
- overwritten by the MEM like in put_reg_into_stack. */
+/* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just
+ had its address taken. DECL is the decl or SAVE_EXPR for the
+ object stored in the register, for later use if we do need to force
+ REG into the stack. REG is overwritten by the MEM like in
+ put_reg_into_stack. RESCAN is true if previously emitted
+ instructions must be rescanned and modified now that the REG has
+ been transformed. */
rtx
-gen_mem_addressof (reg, decl)
- rtx reg;
- tree decl;
+gen_mem_addressof (rtx reg, tree decl, int rescan)
{
rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
REGNO (reg), decl);
if (DECL_P (decl) && decl_rtl == reg)
SET_DECL_RTL (decl, reg);
- if (TREE_USED (decl) || (DECL_P (decl) && DECL_INITIAL (decl) != 0))
+ if (rescan
+ && (TREE_USED (decl) || (DECL_P (decl) && DECL_INITIAL (decl) != 0)))
fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), reg, 0);
}
- else
+ else if (rescan)
fixup_var_refs (reg, GET_MODE (reg), 0, reg, 0);
return reg;
/* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
void
-flush_addressof (decl)
- tree decl;
+flush_addressof (tree decl)
{
if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
&& DECL_RTL (decl) != 0
/* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
static void
-put_addressof_into_stack (r, ht)
- rtx r;
- htab_t ht;
+put_addressof_into_stack (rtx r, htab_t ht)
{
tree decl, type;
int volatile_p, used_p;
/* Helper function for purge_addressof. See if the rtx expression at *LOC
in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
the stack. If the function returns FALSE then the replacement could not
- be made. */
+ be made. If MAY_POSTPONE is true and we would not put the addressof
+ to stack, postpone processing of the insn. */
static bool
-purge_addressof_1 (loc, insn, force, store, ht)
- rtx *loc;
- rtx insn;
- int force, store;
- htab_t ht;
+purge_addressof_1 (rtx *loc, rtx insn, int force, int store, int may_postpone,
+ htab_t ht)
{
rtx x;
RTX_CODE code;
memory. */
if (code == SET)
{
- result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
- result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
+ result = purge_addressof_1 (&SET_DEST (x), insn, force, 1,
+ may_postpone, ht);
+ result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0,
+ may_postpone, ht);
return result;
}
else if (code == ADDRESSOF)
return true;
start_sequence ();
- sub = force_operand (sub, NULL_RTX);
+
+ /* If SUB is a hard or virtual register, try it as a pseudo-register.
+ Otherwise, perhaps SUB is an expression, so generate code to compute
+ it. */
+ if (GET_CODE (sub) == REG && REGNO (sub) <= LAST_VIRTUAL_REGISTER)
+ sub = copy_to_reg (sub);
+ else
+ sub = force_operand (sub, NULL_RTX);
+
if (! validate_change (insn, loc, sub, 0)
&& ! validate_replace_rtx (x, sub, insn))
abort ();
{
int size_x, size_sub;
+ if (may_postpone)
+ {
+ /* Postpone for now, so that we do not emit bitfield arithmetics
+ unless there is some benefit from it. */
+ if (!postponed_insns || XEXP (postponed_insns, 0) != insn)
+ postponed_insns = alloc_INSN_LIST (insn, postponed_insns);
+ return true;
+ }
+
if (!insn)
{
/* When processing REG_NOTES look at the list of
return true;
}
+ /* When we are processing the REG_NOTES of the last instruction
+ of a libcall, there will be typically no replacements
+ for that insn; the replacements happened before, piecemeal
+ fashion. OTOH we are not interested in the details of
+ this for the REG_EQUAL note, we want to know the big picture,
+ which can be succinctly described with a simple SUBREG.
+ Note that removing the REG_EQUAL note is not an option
+ on the last insn of a libcall, so we must do a replacement. */
+ if (! purge_addressof_replacements
+ && ! purge_bitfield_addressof_replacements)
+ {
+ /* In compile/990107-1.c:7 compiled at -O1 -m1 for sh-elf,
+ we got
+ (mem:DI (addressof:SI (reg/v:DF 160) 159 0x401c8510)
+ [0 S8 A32]), which can be expressed with a simple
+ same-size subreg */
+ if ((GET_MODE_SIZE (GET_MODE (x))
+ == GET_MODE_SIZE (GET_MODE (sub)))
+ /* Again, invalid pointer casts (as in
+ compile/990203-1.c) can require paradoxical
+ subregs. */
+ || (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
+ && (GET_MODE_SIZE (GET_MODE (x))
+ > GET_MODE_SIZE (GET_MODE (sub)))))
+ {
+ *loc = gen_rtx_SUBREG (GET_MODE (x), sub, 0);
+ return true;
+ }
+ /* ??? Are there other cases we should handle? */
+ }
/* Sometimes we may not be able to find the replacement. For
example when the original insn was a MEM in a wider mode,
and the note is part of a sign extension of a narrowed
size_x = GET_MODE_BITSIZE (GET_MODE (x));
size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
+ /* Do not frob unchanging MEMs. If a later reference forces the
+ pseudo to the stack, we can wind up with multiple writes to
+ an unchanging memory, which is invalid. */
+ if (RTX_UNCHANGING_P (x) && size_x != size_sub)
+ ;
+
/* Don't even consider working with paradoxical subregs,
or the moral equivalent seen here. */
- if (size_x <= size_sub
- && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
+ else if (size_x <= size_sub
+ && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
{
/* Do a bitfield insertion to mirror what would happen
in memory. */
for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
{
if (*fmt == 'e')
- result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
+ result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0,
+ may_postpone, ht);
else if (*fmt == 'E')
for (j = 0; j < XVECLEN (x, i); j++)
- result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
+ result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0,
+ may_postpone, ht);
}
return result;
/* Return a hash value for K, a REG. */
static hashval_t
-insns_for_mem_hash (k)
- const void * k;
+insns_for_mem_hash (const void *k)
{
/* Use the address of the key for the hash value. */
struct insns_for_mem_entry *m = (struct insns_for_mem_entry *) k;
/* Return nonzero if K1 and K2 (two REGs) are the same. */
static int
-insns_for_mem_comp (k1, k2)
- const void * k1;
- const void * k2;
+insns_for_mem_comp (const void *k1, const void *k2)
{
struct insns_for_mem_entry *m1 = (struct insns_for_mem_entry *) k1;
struct insns_for_mem_entry *m2 = (struct insns_for_mem_entry *) k2;
insns_for_mem_walk_info structure). */
static int
-insns_for_mem_walk (r, data)
- rtx *r;
- void *data;
+insns_for_mem_walk (rtx *r, void *data)
{
struct insns_for_mem_walk_info *ifmwi
= (struct insns_for_mem_walk_info *) data;
if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
&& GET_CODE (XEXP (*r, 0)) == REG)
{
- PTR *e;
+ void **e;
tmp.key = XEXP (*r, 0);
e = htab_find_slot (ifmwi->ht, &tmp, INSERT);
if (*e == NULL)
{
struct insns_for_mem_entry *ifme;
tmp.key = *r;
- ifme = (struct insns_for_mem_entry *) htab_find (ifmwi->ht, &tmp);
+ ifme = htab_find (ifmwi->ht, &tmp);
/* If we have not already recorded this INSN, do so now. Since
we process the INSNs in order, we know that if we have
which REGs in HT. */
static void
-compute_insns_for_mem (insns, last_insn, ht)
- rtx insns;
- rtx last_insn;
- htab_t ht;
+compute_insns_for_mem (rtx insns, rtx last_insn, htab_t ht)
{
rtx insn;
struct insns_for_mem_walk_info ifmwi;
Returns true iff the rtl is an ADDRESSOF. */
static int
-is_addressof (rtl, data)
- rtx *rtl;
- void *data ATTRIBUTE_UNUSED;
+is_addressof (rtx *rtl, void *data ATTRIBUTE_UNUSED)
{
return GET_CODE (*rtl) == ADDRESSOF;
}
stack. */
void
-purge_addressof (insns)
- rtx insns;
+purge_addressof (rtx insns)
{
- rtx insn;
+ rtx insn, tmp;
htab_t ht;
/* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
ht = htab_create_ggc (1000, insns_for_mem_hash, insns_for_mem_comp, NULL);
compute_insns_for_mem (insns, NULL_RTX, ht);
+ postponed_insns = NULL;
+
for (insn = insns; insn; insn = NEXT_INSN (insn))
if (INSN_P (insn))
{
if (! purge_addressof_1 (&PATTERN (insn), insn,
- asm_noperands (PATTERN (insn)) > 0, 0, ht))
+ asm_noperands (PATTERN (insn)) > 0, 0, 1, ht))
/* If we could not replace the ADDRESSOFs in the insn,
something is wrong. */
abort ();
- if (! purge_addressof_1 (®_NOTES (insn), NULL_RTX, 0, 0, ht))
+ if (! purge_addressof_1 (®_NOTES (insn), NULL_RTX, 0, 0, 0, ht))
{
/* If we could not replace the ADDRESSOFs in the insn's notes,
we can just remove the offending notes instead. */
}
}
+ /* Process the postponed insns. */
+ while (postponed_insns)
+ {
+ insn = XEXP (postponed_insns, 0);
+ tmp = postponed_insns;
+ postponed_insns = XEXP (postponed_insns, 1);
+ free_INSN_LIST_node (tmp);
+
+ if (! purge_addressof_1 (&PATTERN (insn), insn,
+ asm_noperands (PATTERN (insn)) > 0, 0, 0, ht))
+ abort ();
+ }
+
/* Clean up. */
purge_bitfield_addressof_replacements = 0;
purge_addressof_replacements = 0;
register. A subroutine of purge_hard_subreg_sets. */
static void
-purge_single_hard_subreg_set (pattern)
- rtx pattern;
+purge_single_hard_subreg_set (rtx pattern)
{
rtx reg = SET_DEST (pattern);
enum machine_mode mode = GET_MODE (SET_DEST (pattern));
of hard registers. */
void
-purge_hard_subreg_sets (insn)
- rtx insn;
+purge_hard_subreg_sets (rtx insn)
{
for (; insn; insn = NEXT_INSN (insn))
{
references to hard register references. */
void
-instantiate_virtual_regs (fndecl, insns)
- tree fndecl;
- rtx insns;
+instantiate_virtual_regs (tree fndecl, rtx insns)
{
rtx insn;
unsigned int i;
|| GET_CODE (insn) == CALL_INSN)
{
instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
+ if (INSN_DELETED_P (insn))
+ continue;
instantiate_virtual_regs_1 (®_NOTES (insn), NULL_RTX, 0);
/* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
if (GET_CODE (insn) == CALL_INSN)
instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
NULL_RTX, 0);
+
+ /* Past this point all ASM statements should match. Verify that
+ to avoid failures later in the compilation process. */
+ if (asm_noperands (PATTERN (insn)) >= 0
+ && ! check_asm_operands (PATTERN (insn)))
+ instantiate_virtual_regs_lossage (insn);
}
/* Instantiate the stack slots for the parm registers, for later use in
Otherwise, always do it. */
static void
-instantiate_decls (fndecl, valid_only)
- tree fndecl;
- int valid_only;
+instantiate_decls (tree fndecl, int valid_only)
{
tree decl;
BLOCK node and all its subblocks. */
static void
-instantiate_decls_1 (let, valid_only)
- tree let;
- int valid_only;
+instantiate_decls_1 (tree let, int valid_only)
{
tree t;
changed if the new address is valid. */
static void
-instantiate_decl (x, size, valid_only)
- rtx x;
- HOST_WIDE_INT size;
- int valid_only;
+instantiate_decl (rtx x, HOST_WIDE_INT size, int valid_only)
{
enum machine_mode mode;
rtx addr;
offset indirectly through the pointer. Otherwise, return 0. */
static rtx
-instantiate_new_reg (x, poffset)
- rtx x;
- HOST_WIDE_INT *poffset;
+instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
{
rtx new;
HOST_WIDE_INT offset;
return new;
}
\f
+
+/* Called when instantiate_virtual_regs has failed to update the instruction.
+ Usually this means that non-matching instruction has been emit, however for
+ asm statements it may be the problem in the constraints. */
+static void
+instantiate_virtual_regs_lossage (rtx insn)
+{
+ if (asm_noperands (PATTERN (insn)) >= 0)
+ {
+ error_for_asm (insn, "impossible constraint in `asm'");
+ delete_insn (insn);
+ }
+ else
+ abort ();
+}
/* Given a pointer to a piece of rtx and an optional pointer to the
containing object, instantiate any virtual registers present in it.
pseudos. */
static int
-instantiate_virtual_regs_1 (loc, object, extra_insns)
- rtx *loc;
- rtx object;
- int extra_insns;
+instantiate_virtual_regs_1 (rtx *loc, rtx object, int extra_insns)
{
rtx x;
RTX_CODE code;
if (x == 0)
return 1;
+ /* We may have detected and deleted invalid asm statements. */
+ if (object && INSN_P (object) && INSN_DELETED_P (object))
+ return 1;
+
code = GET_CODE (x);
/* Check for some special cases. */
/* The only valid sources here are PLUS or REG. Just do
the simplest possible thing to handle them. */
if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
- abort ();
+ {
+ instantiate_virtual_regs_lossage (object);
+ return 1;
+ }
start_sequence ();
if (GET_CODE (src) != REG)
if (! validate_change (object, &SET_SRC (x), temp, 0)
|| ! extra_insns)
- abort ();
+ instantiate_virtual_regs_lossage (object);
return 1;
}
emit_insn_before (seq, object);
if (! validate_change (object, loc, temp, 0)
&& ! validate_replace_rtx (x, temp, object))
- abort ();
+ {
+ instantiate_virtual_regs_lossage (object);
+ return 1;
+ }
}
}
case ABS:
case SQRT:
case FFS:
+ case CLZ: case CTZ:
+ case POPCOUNT: case PARITY:
/* These case either have just one operand or we know that we need not
check the rest of the operands. */
loc = &XEXP (x, 0);
emit_insn_before (seq, object);
if (! validate_change (object, loc, temp, 0)
&& ! validate_replace_rtx (x, temp, object))
- abort ();
+ instantiate_virtual_regs_lossage (object);
}
}
and disestablish them. */
static void
-delete_handlers ()
+delete_handlers (void)
{
rtx insn;
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
/* Return the first insn following those generated by `assign_parms'. */
rtx
-get_first_nonparm_insn ()
+get_first_nonparm_insn (void)
{
if (last_parm_insn)
return NEXT_INSN (last_parm_insn);
EXP may be a type node or an expression (whose type is tested). */
int
-aggregate_value_p (exp)
- tree exp;
+aggregate_value_p (tree exp)
{
int i, regno, nregs;
rtx reg;
those registers as the RTL for them. */
void
-assign_parms (fndecl)
- tree fndecl;
+assign_parms (tree fndecl)
{
tree parm;
- rtx entry_parm = 0;
- rtx stack_parm = 0;
CUMULATIVE_ARGS args_so_far;
- enum machine_mode promoted_mode, passed_mode;
- enum machine_mode nominal_mode, promoted_nominal_mode;
- int unsignedp;
/* Total space needed so far for args on the stack,
given as a constant and a tree-expression. */
struct args_size stack_args_size;
tree fntype = TREE_TYPE (fndecl);
- tree fnargs = DECL_ARGUMENTS (fndecl);
+ tree fnargs = DECL_ARGUMENTS (fndecl), orig_fnargs;
/* This is used for the arg pointer when referring to stack args. */
rtx internal_arg_pointer;
/* This is a dummy PARM_DECL that we used for the function result if
#ifdef SETUP_INCOMING_VARARGS
int varargs_setup = 0;
#endif
+ int reg_parm_stack_space = 0;
rtx conversion_insns = 0;
- struct args_size alignment_pad;
/* Nonzero if function takes extra anonymous args.
This means the last named arg must be on the stack
fnargs = function_result_decl;
}
+ orig_fnargs = fnargs;
+
max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
- parm_reg_stack_loc = (rtx *) ggc_alloc_cleared (max_parm_reg * sizeof (rtx));
+ parm_reg_stack_loc = ggc_alloc_cleared (max_parm_reg * sizeof (rtx));
+
+ if (SPLIT_COMPLEX_ARGS)
+ fnargs = split_complex_args (fnargs);
+
+#ifdef REG_PARM_STACK_SPACE
+#ifdef MAYBE_REG_PARM_STACK_SPACE
+ reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
+#else
+ reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
+#endif
+#endif
#ifdef INIT_CUMULATIVE_INCOMING_ARGS
INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
#else
- INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
+ INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, fndecl);
#endif
/* We haven't yet found an argument that we must push and pretend the
for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
{
- struct args_size stack_offset;
- struct args_size arg_size;
+ rtx entry_parm;
+ rtx stack_parm;
+ enum machine_mode promoted_mode, passed_mode;
+ enum machine_mode nominal_mode, promoted_nominal_mode;
+ int unsignedp;
+ struct locate_and_pad_arg_data locate;
int passed_pointer = 0;
int did_conversion = 0;
tree passed_type = DECL_ARG_TYPE (parm);
tree nominal_type = TREE_TYPE (parm);
- int pretend_named;
int last_named = 0, named_arg;
+ int in_regs;
+ int partial = 0;
/* Set LAST_NAMED if this is last named arg before last
anonymous args. */
object itself or if the machine requires these objects be passed
that way. */
- if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
- && contains_placeholder_p (TYPE_SIZE (passed_type)))
+ if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (passed_type))
|| TREE_ADDRESSABLE (passed_type)
#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
|| FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
- passed_type, named_arg)
+ passed_type, named_arg)
#endif
)
{
it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
0 as it was the previous time. */
-
- pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
- locate_and_pad_parm (promoted_mode, passed_type,
+ in_regs = entry_parm != 0;
#ifdef STACK_PARMS_IN_REG_PARM_AREA
- 1,
-#else
+ in_regs = 1;
+#endif
+ if (!in_regs && !named_arg)
+ {
+ int pretend_named = PRETEND_OUTGOING_VARARGS_NAMED;
+ if (pretend_named)
+ {
#ifdef FUNCTION_INCOMING_ARG
- FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
- passed_type,
- pretend_named) != 0,
+ in_regs = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
+ passed_type,
+ pretend_named) != 0;
#else
- FUNCTION_ARG (args_so_far, promoted_mode,
- passed_type,
- pretend_named) != 0,
+ in_regs = FUNCTION_ARG (args_so_far, promoted_mode,
+ passed_type,
+ pretend_named) != 0;
#endif
+ }
+ }
+
+ /* If this parameter was passed both in registers and in the stack,
+ use the copy on the stack. */
+ if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
+ entry_parm = 0;
+
+#ifdef FUNCTION_ARG_PARTIAL_NREGS
+ if (entry_parm)
+ partial = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
+ passed_type, named_arg);
#endif
- fndecl, &stack_args_size, &stack_offset, &arg_size,
- &alignment_pad);
+
+ memset (&locate, 0, sizeof (locate));
+ locate_and_pad_parm (promoted_mode, passed_type, in_regs,
+ entry_parm ? partial : 0, fndecl,
+ &stack_args_size, &locate);
{
- rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
+ rtx offset_rtx;
+
+ /* If we're passing this arg using a reg, make its stack home
+ the aligned stack slot. */
+ if (entry_parm)
+ offset_rtx = ARGS_SIZE_RTX (locate.slot_offset);
+ else
+ offset_rtx = ARGS_SIZE_RTX (locate.offset);
if (offset_rtx == const0_rtx)
stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
offset_rtx));
set_mem_attributes (stack_parm, parm, 1);
- }
+ if (entry_parm && MEM_ATTRS (stack_parm)->align < PARM_BOUNDARY)
+ set_mem_align (stack_parm, PARM_BOUNDARY);
- /* If this parameter was passed both in registers and in the stack,
- use the copy on the stack. */
- if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
- entry_parm = 0;
+ /* Set also REG_ATTRS if parameter was passed in a register. */
+ if (entry_parm)
+ set_reg_attrs_for_parm (entry_parm, stack_parm);
+ }
-#ifdef FUNCTION_ARG_PARTIAL_NREGS
/* If this parm was passed part in regs and part in memory,
pretend it arrived entirely in memory
by pushing the register-part onto the stack.
we could put it together in a pseudoreg directly,
but for now that's not worth bothering with. */
- if (entry_parm)
+ if (partial)
{
- int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
- passed_type, named_arg);
-
- if (nregs > 0)
- {
-#if defined (REG_PARM_STACK_SPACE) && !defined (MAYBE_REG_PARM_STACK_SPACE)
- /* When REG_PARM_STACK_SPACE is nonzero, stack space for
- split parameters was allocated by our caller, so we
- won't be pushing it in the prolog. */
- if (REG_PARM_STACK_SPACE (fndecl) == 0)
+#ifndef MAYBE_REG_PARM_STACK_SPACE
+ /* When REG_PARM_STACK_SPACE is nonzero, stack space for
+ split parameters was allocated by our caller, so we
+ won't be pushing it in the prolog. */
+ if (reg_parm_stack_space == 0)
#endif
- current_function_pretend_args_size
- = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
- / (PARM_BOUNDARY / BITS_PER_UNIT)
- * (PARM_BOUNDARY / BITS_PER_UNIT));
+ current_function_pretend_args_size
+ = (((partial * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
+ / (PARM_BOUNDARY / BITS_PER_UNIT)
+ * (PARM_BOUNDARY / BITS_PER_UNIT));
- /* Handle calls that pass values in multiple non-contiguous
- locations. The Irix 6 ABI has examples of this. */
- if (GET_CODE (entry_parm) == PARALLEL)
- emit_group_store (validize_mem (stack_parm), entry_parm,
- int_size_in_bytes (TREE_TYPE (parm)));
+ /* Handle calls that pass values in multiple non-contiguous
+ locations. The Irix 6 ABI has examples of this. */
+ if (GET_CODE (entry_parm) == PARALLEL)
+ emit_group_store (validize_mem (stack_parm), entry_parm,
+ TREE_TYPE (parm),
+ int_size_in_bytes (TREE_TYPE (parm)));
- else
- move_block_from_reg (REGNO (entry_parm),
- validize_mem (stack_parm), nregs,
- int_size_in_bytes (TREE_TYPE (parm)));
+ else
+ move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
+ partial);
- entry_parm = stack_parm;
- }
+ entry_parm = stack_parm;
}
-#endif
/* If we didn't decide this parm came in a register,
by default it came on the stack. */
#endif
)
{
- stack_args_size.constant += arg_size.constant;
- if (arg_size.var)
- ADD_PARM_SIZE (stack_args_size, arg_size.var);
+ stack_args_size.constant += locate.size.constant;
+ /* locate.size doesn't include the part in regs. */
+ if (partial)
+ stack_args_size.constant += current_function_pretend_args_size;
+ if (locate.size.var)
+ ADD_PARM_SIZE (stack_args_size, locate.size.var);
}
else
/* No stack slot was pushed for this parm. */
/* If parm was passed in memory, and we need to convert it on entry,
don't store it back in that same slot. */
- if (entry_parm != 0
+ if (entry_parm == stack_parm
&& nominal_mode != BLKmode && nominal_mode != passed_mode)
stack_parm = 0;
Set DECL_RTL to that place. */
- if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
+ if (nominal_mode == BLKmode
+#ifdef BLOCK_REG_PADDING
+ || (locate.where_pad == (BYTES_BIG_ENDIAN ? upward : downward)
+ && GET_MODE_SIZE (promoted_mode) < UNITS_PER_WORD)
+#endif
+ || GET_CODE (entry_parm) == PARALLEL)
{
/* If a BLKmode arrives in registers, copy it to a stack slot.
Handle calls that pass values in multiple non-contiguous
if (GET_CODE (entry_parm) == REG
|| GET_CODE (entry_parm) == PARALLEL)
{
- int size_stored
- = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
- UNITS_PER_WORD);
+ int size = int_size_in_bytes (TREE_TYPE (parm));
+ int size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
+ rtx mem;
/* Note that we will be storing an integral number of words.
So we have to be careful to ensure that we allocate an
else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
abort ();
+ mem = validize_mem (stack_parm);
+
/* Handle calls that pass values in multiple non-contiguous
locations. The Irix 6 ABI has examples of this. */
if (GET_CODE (entry_parm) == PARALLEL)
- emit_group_store (validize_mem (stack_parm), entry_parm,
- int_size_in_bytes (TREE_TYPE (parm)));
+ emit_group_store (mem, entry_parm, TREE_TYPE (parm), size);
+
+ else if (size == 0)
+ ;
+
+ /* If SIZE is that of a mode no bigger than a word, just use
+ that mode's store operation. */
+ else if (size <= UNITS_PER_WORD)
+ {
+ enum machine_mode mode
+ = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
+
+ if (mode != BLKmode
+#ifdef BLOCK_REG_PADDING
+ && (size == UNITS_PER_WORD
+ || (BLOCK_REG_PADDING (mode, TREE_TYPE (parm), 1)
+ != (BYTES_BIG_ENDIAN ? upward : downward)))
+#endif
+ )
+ {
+ rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
+ emit_move_insn (change_address (mem, mode, 0), reg);
+ }
+
+ /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
+ machine must be aligned to the left before storing
+ to memory. Note that the previous test doesn't
+ handle all cases (e.g. SIZE == 3). */
+ else if (size != UNITS_PER_WORD
+#ifdef BLOCK_REG_PADDING
+ && (BLOCK_REG_PADDING (mode, TREE_TYPE (parm), 1)
+ == downward)
+#else
+ && BYTES_BIG_ENDIAN
+#endif
+ )
+ {
+ rtx tem, x;
+ int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
+ rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
+
+ x = expand_binop (word_mode, ashl_optab, reg,
+ GEN_INT (by), 0, 1, OPTAB_WIDEN);
+ tem = change_address (mem, word_mode, 0);
+ emit_move_insn (tem, x);
+ }
+ else
+ move_block_from_reg (REGNO (entry_parm), mem,
+ size_stored / UNITS_PER_WORD);
+ }
else
- move_block_from_reg (REGNO (entry_parm),
- validize_mem (stack_parm),
- size_stored / UNITS_PER_WORD,
- int_size_in_bytes (TREE_TYPE (parm)));
+ move_block_from_reg (REGNO (entry_parm), mem,
+ size_stored / UNITS_PER_WORD);
}
SET_DECL_RTL (parm, stack_parm);
}
/* TREE_USED gets set erroneously during expand_assignment. */
save_tree_used = TREE_USED (parm);
expand_assignment (parm,
- make_tree (nominal_type, tempreg), 0, 0);
+ make_tree (nominal_type, tempreg), 0);
TREE_USED (parm) = save_tree_used;
conversion_insns = get_insns ();
did_conversion = 1;
but it's also rare and we need max_parm_reg to be
precisely correct. */
max_parm_reg = regno + 1;
- new = (rtx *) ggc_realloc (parm_reg_stack_loc,
- max_parm_reg * sizeof (rtx));
- memset ((char *) (new + old_max_parm_reg), 0,
- (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
+ new = ggc_realloc (parm_reg_stack_loc,
+ max_parm_reg * sizeof (rtx));
+ memset (new + old_max_parm_reg, 0,
+ (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
parm_reg_stack_loc = new;
}
&& ! did_conversion
&& stack_parm != 0
&& GET_CODE (stack_parm) == MEM
- && stack_offset.var == 0
+ && locate.offset.var == 0
&& reg_mentioned_p (virtual_incoming_args_rtx,
XEXP (stack_parm, 0)))
{
stack. So, we go back to that sequence, just so that
the fixups will happen. */
push_to_sequence (conversion_insns);
- put_var_into_stack (parm);
+ put_var_into_stack (parm, /*rescan=*/true);
conversion_insns = get_insns ();
end_sequence ();
}
{
stack_parm
= assign_stack_local (GET_MODE (entry_parm),
- GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
+ GET_MODE_SIZE (GET_MODE (entry_parm)),
+ 0);
set_mem_attributes (stack_parm, parm, 1);
}
SET_DECL_RTL (parm, stack_parm);
}
+ }
- /* If this "parameter" was the place where we are receiving the
- function's incoming structure pointer, set up the result. */
- if (parm == function_result_decl)
+ if (SPLIT_COMPLEX_ARGS && fnargs != orig_fnargs)
+ {
+ for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
{
- tree result = DECL_RESULT (fndecl);
- rtx addr = DECL_RTL (parm);
- rtx x;
-
-#ifdef POINTERS_EXTEND_UNSIGNED
- if (GET_MODE (addr) != Pmode)
- addr = convert_memory_address (Pmode, addr);
-#endif
-
- x = gen_rtx_MEM (DECL_MODE (result), addr);
- set_mem_attributes (x, result, 1);
- SET_DECL_RTL (result, x);
+ if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE)
+ {
+ SET_DECL_RTL (parm,
+ gen_rtx_CONCAT (DECL_MODE (parm),
+ DECL_RTL (fnargs),
+ DECL_RTL (TREE_CHAIN (fnargs))));
+ DECL_INCOMING_RTL (parm)
+ = gen_rtx_CONCAT (DECL_MODE (parm),
+ DECL_INCOMING_RTL (fnargs),
+ DECL_INCOMING_RTL (TREE_CHAIN (fnargs)));
+ fnargs = TREE_CHAIN (fnargs);
+ }
+ else
+ {
+ SET_DECL_RTL (parm, DECL_RTL (fnargs));
+ DECL_INCOMING_RTL (parm) = DECL_INCOMING_RTL (fnargs);
+ }
+ fnargs = TREE_CHAIN (fnargs);
}
}
now that all parameters have been copied out of hard registers. */
emit_insn (conversion_insns);
+ /* If we are receiving a struct value address as the first argument, set up
+ the RTL for the function result. As this might require code to convert
+ the transmitted address to Pmode, we do this here to ensure that possible
+ preliminary conversions of the address have been emitted already. */
+ if (function_result_decl)
+ {
+ tree result = DECL_RESULT (fndecl);
+ rtx addr = DECL_RTL (function_result_decl);
+ rtx x;
+
+#ifdef POINTERS_EXTEND_UNSIGNED
+ if (GET_MODE (addr) != Pmode)
+ addr = convert_memory_address (Pmode, addr);
+#endif
+
+ x = gen_rtx_MEM (DECL_MODE (result), addr);
+ set_mem_attributes (x, result, 1);
+ SET_DECL_RTL (result, x);
+ }
+
last_parm_insn = get_last_insn ();
current_function_args_size = stack_args_size.constant;
}
}
}
+
+/* If ARGS contains entries with complex types, split the entry into two
+ entries of the component type. Return a new list of substitutions are
+ needed, else the old list. */
+
+static tree
+split_complex_args (tree args)
+{
+ tree p;
+
+ /* Before allocating memory, check for the common case of no complex. */
+ for (p = args; p; p = TREE_CHAIN (p))
+ if (TREE_CODE (TREE_TYPE (p)) == COMPLEX_TYPE)
+ goto found;
+ return args;
+
+ found:
+ args = copy_list (args);
+
+ for (p = args; p; p = TREE_CHAIN (p))
+ {
+ tree type = TREE_TYPE (p);
+ if (TREE_CODE (type) == COMPLEX_TYPE)
+ {
+ tree decl;
+ tree subtype = TREE_TYPE (type);
+
+ /* Rewrite the PARM_DECL's type with its component. */
+ TREE_TYPE (p) = subtype;
+ DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
+ DECL_MODE (p) = VOIDmode;
+ DECL_SIZE (p) = NULL;
+ DECL_SIZE_UNIT (p) = NULL;
+ layout_decl (p, 0);
+
+ /* Build a second synthetic decl. */
+ decl = build_decl (PARM_DECL, NULL_TREE, subtype);
+ DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
+ layout_decl (decl, 0);
+
+ /* Splice it in; skip the new decl. */
+ TREE_CHAIN (decl) = TREE_CHAIN (p);
+ TREE_CHAIN (p) = decl;
+ p = decl;
+ }
+ }
+
+ return args;
+}
\f
/* Indicate whether REGNO is an incoming argument to the current function
that was promoted to a wider mode. If so, return the RTX for the
#ifdef PROMOTE_FUNCTION_ARGS
rtx
-promoted_input_arg (regno, pmode, punsignedp)
- unsigned int regno;
- enum machine_mode *pmode;
- int *punsignedp;
+promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp)
{
tree arg;
INITIAL_OFFSET_PTR points to the current offset into the stacked
arguments.
- The starting offset and size for this parm are returned in *OFFSET_PTR
- and *ARG_SIZE_PTR, respectively.
+ The starting offset and size for this parm are returned in
+ LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
+ nonzero, the offset is that of stack slot, which is returned in
+ LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
+ padding required from the initial offset ptr to the stack slot.
IN_REGS is nonzero if the argument will be passed in registers. It will
never be set if REG_PARM_STACK_SPACE is not defined.
initial offset is not affected by this rounding, while the size always
is and the starting offset may be. */
-/* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
- initial_offset_ptr is positive because locate_and_pad_parm's
+/* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
+ INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
callers pass in the total size of args so far as
- initial_offset_ptr. arg_size_ptr is always positive. */
+ INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
void
-locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
- initial_offset_ptr, offset_ptr, arg_size_ptr,
- alignment_pad)
- enum machine_mode passed_mode;
- tree type;
- int in_regs ATTRIBUTE_UNUSED;
- tree fndecl ATTRIBUTE_UNUSED;
- struct args_size *initial_offset_ptr;
- struct args_size *offset_ptr;
- struct args_size *arg_size_ptr;
- struct args_size *alignment_pad;
-
+locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
+ int partial, tree fndecl ATTRIBUTE_UNUSED,
+ struct args_size *initial_offset_ptr,
+ struct locate_and_pad_arg_data *locate)
{
- tree sizetree
- = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
- enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
- int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
-#ifdef ARGS_GROW_DOWNWARD
- tree s2 = sizetree;
-#endif
+ tree sizetree;
+ enum direction where_pad;
+ int boundary;
+ int reg_parm_stack_space = 0;
+ int part_size_in_regs;
#ifdef REG_PARM_STACK_SPACE
+#ifdef MAYBE_REG_PARM_STACK_SPACE
+ reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
+#else
+ reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
+#endif
+
/* If we have found a stack parm before we reach the end of the
area reserved for registers, skip that area. */
if (! in_regs)
{
- int reg_parm_stack_space = 0;
-
-#ifdef MAYBE_REG_PARM_STACK_SPACE
- reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
-#else
- reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
-#endif
if (reg_parm_stack_space > 0)
{
if (initial_offset_ptr->var)
}
#endif /* REG_PARM_STACK_SPACE */
- arg_size_ptr->var = 0;
- arg_size_ptr->constant = 0;
- alignment_pad->var = 0;
- alignment_pad->constant = 0;
+ part_size_in_regs = 0;
+ if (reg_parm_stack_space == 0)
+ part_size_in_regs = ((partial * UNITS_PER_WORD)
+ / (PARM_BOUNDARY / BITS_PER_UNIT)
+ * (PARM_BOUNDARY / BITS_PER_UNIT));
+
+ sizetree
+ = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
+ where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
+ boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
+ locate->where_pad = where_pad;
#ifdef ARGS_GROW_DOWNWARD
+ locate->slot_offset.constant = -initial_offset_ptr->constant;
if (initial_offset_ptr->var)
- {
- offset_ptr->constant = 0;
- offset_ptr->var = size_binop (MINUS_EXPR, ssize_int (0),
- initial_offset_ptr->var);
- }
- else
- {
- offset_ptr->constant = -initial_offset_ptr->constant;
- offset_ptr->var = 0;
- }
+ locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
+ initial_offset_ptr->var);
- if (where_pad != none
- && (!host_integerp (sizetree, 1)
- || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
- s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
- SUB_PARM_SIZE (*offset_ptr, s2);
+ {
+ tree s2 = sizetree;
+ if (where_pad != none
+ && (!host_integerp (sizetree, 1)
+ || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
+ s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
+ SUB_PARM_SIZE (locate->slot_offset, s2);
+ }
+
+ locate->slot_offset.constant += part_size_in_regs;
if (!in_regs
#ifdef REG_PARM_STACK_SPACE
|| REG_PARM_STACK_SPACE (fndecl) > 0
#endif
)
- pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
+ pad_to_arg_alignment (&locate->slot_offset, boundary,
+ &locate->alignment_pad);
+ locate->size.constant = (-initial_offset_ptr->constant
+ - locate->slot_offset.constant);
if (initial_offset_ptr->var)
- arg_size_ptr->var = size_binop (MINUS_EXPR,
- size_binop (MINUS_EXPR,
- ssize_int (0),
- initial_offset_ptr->var),
- offset_ptr->var);
-
- else
- arg_size_ptr->constant = (-initial_offset_ptr->constant
- - offset_ptr->constant);
-
- /* Pad_below needs the pre-rounded size to know how much to pad below.
- We only pad parameters which are not in registers as they have their
- padding done elsewhere. */
- if (where_pad == downward
- && !in_regs)
- pad_below (offset_ptr, passed_mode, sizetree);
+ locate->size.var = size_binop (MINUS_EXPR,
+ size_binop (MINUS_EXPR,
+ ssize_int (0),
+ initial_offset_ptr->var),
+ locate->slot_offset.var);
+
+ /* Pad_below needs the pre-rounded size to know how much to pad
+ below. */
+ locate->offset = locate->slot_offset;
+ if (where_pad == downward)
+ pad_below (&locate->offset, passed_mode, sizetree);
#else /* !ARGS_GROW_DOWNWARD */
if (!in_regs
|| REG_PARM_STACK_SPACE (fndecl) > 0
#endif
)
- pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
- *offset_ptr = *initial_offset_ptr;
+ pad_to_arg_alignment (initial_offset_ptr, boundary,
+ &locate->alignment_pad);
+ locate->slot_offset = *initial_offset_ptr;
#ifdef PUSH_ROUNDING
if (passed_mode != BLKmode)
/* Pad_below needs the pre-rounded size to know how much to pad below
so this must be done before rounding up. */
- if (where_pad == downward
- /* However, BLKmode args passed in regs have their padding done elsewhere.
- The stack slot must be able to hold the entire register. */
- && !(in_regs && passed_mode == BLKmode))
- pad_below (offset_ptr, passed_mode, sizetree);
+ locate->offset = locate->slot_offset;
+ if (where_pad == downward)
+ pad_below (&locate->offset, passed_mode, sizetree);
if (where_pad != none
&& (!host_integerp (sizetree, 1)
|| (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
- ADD_PARM_SIZE (*arg_size_ptr, sizetree);
+ ADD_PARM_SIZE (locate->size, sizetree);
+
+ locate->size.constant -= part_size_in_regs;
#endif /* ARGS_GROW_DOWNWARD */
}
BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
static void
-pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
- struct args_size *offset_ptr;
- int boundary;
- struct args_size *alignment_pad;
+pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
+ struct args_size *alignment_pad)
{
tree save_var = NULL_TREE;
HOST_WIDE_INT save_constant = 0;
#endif
(ARGS_SIZE_TREE (*offset_ptr),
boundary / BITS_PER_UNIT);
- offset_ptr->constant = 0; /*?*/
+ /* ARGS_SIZE_TREE includes constant term. */
+ offset_ptr->constant = 0;
if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
save_var);
}
static void
-pad_below (offset_ptr, passed_mode, sizetree)
- struct args_size *offset_ptr;
- enum machine_mode passed_mode;
- tree sizetree;
+pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
{
if (passed_mode != BLKmode)
{
clobbers the pseudo-regs to hard regs. */
void
-uninitialized_vars_warning (block)
- tree block;
+uninitialized_vars_warning (tree block)
{
tree decl, sub;
for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
not worry that it may be a dangling pointer. */
&& DECL_INITIAL (decl) == NULL_TREE
&& regno_uninitialized (REGNO (DECL_RTL (decl))))
- warning_with_decl (decl,
- "`%s' might be used uninitialized in this function");
+ warning ("%H'%D' might be used uninitialized in this function",
+ &DECL_SOURCE_LOCATION (decl), decl);
if (extra_warnings
&& TREE_CODE (decl) == VAR_DECL
&& DECL_RTL (decl) != 0
&& GET_CODE (DECL_RTL (decl)) == REG
&& regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
- warning_with_decl (decl,
- "variable `%s' might be clobbered by `longjmp' or `vfork'");
+ warning ("%Hvariable '%D' might be clobbered by `longjmp' or `vfork'",
+ &DECL_SOURCE_LOCATION (decl), decl);
}
for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
uninitialized_vars_warning (sub);
but for arguments instead of local variables. */
void
-setjmp_args_warning ()
+setjmp_args_warning (void)
{
tree decl;
for (decl = DECL_ARGUMENTS (current_function_decl);
if (DECL_RTL (decl) != 0
&& GET_CODE (DECL_RTL (decl)) == REG
&& regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
- warning_with_decl (decl,
- "argument `%s' might be clobbered by `longjmp' or `vfork'");
+ warning ("%Hargument '%D' might be clobbered by `longjmp' or `vfork'",
+ &DECL_SOURCE_LOCATION (decl), decl);
}
/* If this function call setjmp, put all vars into the stack
unless they were declared `register'. */
void
-setjmp_protect (block)
- tree block;
+setjmp_protect (tree block)
{
tree decl, sub;
for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
||
#endif
! DECL_REGISTER (decl)))
- put_var_into_stack (decl);
+ put_var_into_stack (decl, /*rescan=*/true);
for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
setjmp_protect (sub);
}
/* Like the previous function, but for args instead of local variables. */
void
-setjmp_protect_args ()
+setjmp_protect_args (void)
{
tree decl;
for (decl = DECL_ARGUMENTS (current_function_decl);
||
#endif
! DECL_REGISTER (decl)))
- put_var_into_stack (decl);
+ put_var_into_stack (decl, /*rescan=*/true);
}
\f
/* Return the context-pointer register corresponding to DECL,
or 0 if it does not need one. */
rtx
-lookup_static_chain (decl)
- tree decl;
+lookup_static_chain (tree decl)
{
tree context = decl_function_context (decl);
tree link;
into an address valid in this function (using a static chain). */
rtx
-fix_lexical_addr (addr, var)
- rtx addr;
- tree var;
+fix_lexical_addr (rtx addr, tree var)
{
rtx basereg;
HOST_WIDE_INT displacement;
and emit rtl to initialize its contents (at entry to this function). */
rtx
-trampoline_address (function)
- tree function;
+trampoline_address (tree function)
{
tree link;
tree rtlexp;
&& fn_context != inline_function_decl)
fp = find_function_data (fn_context);
- /* Allocate run-time space for this trampoline
- (usually in the defining function's stack frame). */
-#ifdef ALLOCATE_TRAMPOLINE
- tramp = ALLOCATE_TRAMPOLINE (fp);
-#else
+ /* Allocate run-time space for this trampoline. */
/* If rounding needed, allocate extra space
to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
#define TRAMPOLINE_REAL_SIZE \
(TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
fp ? fp : cfun);
-#endif
-
/* Record the trampoline for reuse and note it for later initialization
by expand_function_end. */
if (fp != 0)
round it to multiple of TRAMPOLINE_ALIGNMENT. */
static rtx
-round_trampoline_addr (tramp)
- rtx tramp;
+round_trampoline_addr (rtx tramp)
{
/* Round address up to desired boundary. */
rtx temp = gen_reg_rtx (Pmode);
function call . */
static rtx
-adjust_trampoline_addr (tramp)
- rtx tramp;
+adjust_trampoline_addr (rtx tramp)
{
tramp = round_trampoline_addr (tramp);
#ifdef TRAMPOLINE_ADJUST_ADDRESS
and INSNS, the insn chain of the function. */
void
-identify_blocks ()
+identify_blocks (void)
{
int n_blocks;
tree *block_vector, *last_block_vector;
/* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
depth-first order. */
block_vector = get_block_vector (block, &n_blocks);
- block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
+ block_stack = xmalloc (n_blocks * sizeof (tree));
last_block_vector = identify_blocks_1 (get_insns (),
block_vector + 1,
BLOCK_VECTOR is incremented for each block seen. */
static tree *
-identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack)
- rtx insns;
- tree *block_vector;
- tree *end_block_vector;
- tree *orig_block_stack;
+identify_blocks_1 (rtx insns, tree *block_vector, tree *end_block_vector,
+ tree *orig_block_stack)
{
rtx insn;
tree *block_stack = orig_block_stack;
on what optimization has been performed. */
void
-reorder_blocks ()
+reorder_blocks (void)
{
tree block = DECL_INITIAL (current_function_decl);
varray_type block_stack;
/* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
static void
-reorder_blocks_0 (block)
- tree block;
+reorder_blocks_0 (tree block)
{
while (block)
{
}
static void
-reorder_blocks_1 (insns, current_block, p_block_stack)
- rtx insns;
- tree current_block;
- varray_type *p_block_stack;
+reorder_blocks_1 (rtx insns, tree current_block, varray_type *p_block_stack)
{
rtx insn;
BLOCK_SUBBLOCKS (block) = 0;
TREE_ASM_WRITTEN (block) = 1;
- BLOCK_SUPERCONTEXT (block) = current_block;
- BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
- BLOCK_SUBBLOCKS (current_block) = block;
- current_block = block;
+ /* When there's only one block for the entire function,
+ current_block == block and we mustn't do this, it
+ will cause infinite recursion. */
+ if (block != current_block)
+ {
+ BLOCK_SUPERCONTEXT (block) = current_block;
+ BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
+ BLOCK_SUBBLOCKS (current_block) = block;
+ current_block = block;
+ }
VARRAY_PUSH_TREE (*p_block_stack, block);
}
else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
the new origin block. */
static void
-reorder_fix_fragments (block)
- tree block;
+reorder_fix_fragments (tree block)
{
while (block)
{
and return the new head of the chain (old last element). */
static tree
-blocks_nreverse (t)
- tree t;
+blocks_nreverse (tree t)
{
tree prev = 0, decl, next;
for (decl = t; decl; decl = next)
blocks. */
static int
-all_blocks (block, vector)
- tree block;
- tree *vector;
+all_blocks (tree block, tree *vector)
{
int n_blocks = 0;
to call `free' on the pointer returned. */
static tree *
-get_block_vector (block, n_blocks_p)
- tree block;
- int *n_blocks_p;
+get_block_vector (tree block, int *n_blocks_p)
{
tree *block_vector;
*n_blocks_p = all_blocks (block, NULL);
- block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree));
+ block_vector = xmalloc (*n_blocks_p * sizeof (tree));
all_blocks (block, block_vector);
return block_vector;
}
-static int next_block_index = 2;
+static GTY(()) int next_block_index = 2;
/* Set BLOCK_NUMBER for all the blocks in FN. */
void
-number_blocks (fn)
- tree fn;
+number_blocks (tree fn)
{
int i;
int n_blocks;
/* If VAR is present in a subblock of BLOCK, return the subblock. */
tree
-debug_find_var_in_block_tree (var, block)
- tree var;
- tree block;
+debug_find_var_in_block_tree (tree var, tree block)
{
tree t;
/* Allocate a function structure and reset its contents to the defaults. */
static void
-prepare_function_start ()
+prepare_function_start (void)
{
- cfun = (struct function *) ggc_alloc_cleared (sizeof (struct function));
+ cfun = ggc_alloc_cleared (sizeof (struct function));
init_stmt_for_function ();
init_eh_for_function ();
current_function_funcdef_no = funcdef_no++;
- cfun->arc_profile = profile_arc_flag || flag_test_coverage;
-
cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
cfun->max_jumptable_ents = 0;
like generate sequences. This is used to provide a context during global
initialization of some passes. */
void
-init_dummy_function_start ()
+init_dummy_function_start (void)
{
prepare_function_start ();
}
of the function. */
void
-init_function_start (subr, filename, line)
- tree subr;
- const char *filename;
- int line;
+init_function_start (tree subr)
{
prepare_function_start ();
/* Within function body, compute a type's size as soon it is laid out. */
immediate_size_expand++;
- /* Prevent ever trying to delete the first instruction of a function.
- Also tell final how to output a linenum before the function prologue.
- Note linenums could be missing, e.g. when compiling a Java .class file. */
- if (line > 0)
- emit_line_note (filename, line);
+ /* Prevent ever trying to delete the first instruction of a
+ function. Also tell final how to output a linenum before the
+ function prologue. Note linenums could be missing, e.g. when
+ compiling a Java .class file. */
+ if (DECL_SOURCE_LINE (subr))
+ emit_line_note (DECL_SOURCE_LOCATION (subr));
/* Make sure first insn is a note even if we don't want linenums.
This makes sure the first insn will never be deleted.
Also, final expects a note to appear there. */
- emit_note (NULL, NOTE_INSN_DELETED);
+ emit_note (NOTE_INSN_DELETED);
/* Set flags used by final.c. */
if (aggregate_value_p (DECL_RESULT (subr)))
/* Make sure all values used by the optimization passes have sane
defaults. */
void
-init_function_for_compilation ()
+init_function_for_compilation (void)
{
reg_renumber = 0;
#endif
void
-expand_main_function ()
+expand_main_function (void)
{
#ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
#endif
#ifndef HAS_INIT_SECTION
- emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), LCT_NORMAL,
- VOIDmode, 0);
+ emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
#endif
}
\f
TREE_VALUE of each node is a SAVE_EXPR. */
void
-expand_pending_sizes (pending_sizes)
- tree pending_sizes;
+expand_pending_sizes (tree pending_sizes)
{
tree tem;
the function's parameters, which must be run at any return statement. */
void
-expand_function_start (subr, parms_have_cleanups)
- tree subr;
- int parms_have_cleanups;
+expand_function_start (tree subr, int parms_have_cleanups)
{
tree tem;
rtx last_ptr = NULL_RTX;
The move is supposed to make sdb output more accurate. */
/* Indicate the beginning of the function body,
as opposed to parm setup. */
- emit_note (NULL, NOTE_INSN_FUNCTION_BEG);
+ emit_note (NOTE_INSN_FUNCTION_BEG);
if (GET_CODE (get_last_insn ()) != NOTE)
- emit_note (NULL, NOTE_INSN_DELETED);
+ emit_note (NOTE_INSN_DELETED);
parm_birth_insn = get_last_insn ();
context_display = 0;
/* After the display initializations is where the tail-recursion label
should go, if we end up needing one. Ensure we have a NOTE here
since some things (like trampolines) get placed before this. */
- tail_recursion_reentry = emit_note (NULL, NOTE_INSN_DELETED);
+ tail_recursion_reentry = emit_note (NOTE_INSN_DELETED);
/* Evaluate now the sizes of any types declared among the arguments. */
expand_pending_sizes (nreverse (get_pending_sizes ()));
\f
/* Undo the effects of init_dummy_function_start. */
void
-expand_dummy_function_end ()
+expand_dummy_function_end (void)
{
/* End any sequences that failed to be closed due to syntax errors. */
while (in_sequence_p ())
the current function. */
void
-diddle_return_value (doit, arg)
- void (*doit) PARAMS ((rtx, void *));
- void *arg;
+diddle_return_value (void (*doit) (rtx, void *), void *arg)
{
rtx outgoing = current_function_return_rtx;
}
static void
-do_clobber_return_reg (reg, arg)
- rtx reg;
- void *arg ATTRIBUTE_UNUSED;
+do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
{
emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
}
void
-clobber_return_register ()
+clobber_return_register (void)
{
diddle_return_value (do_clobber_return_reg, NULL);
}
static void
-do_use_return_reg (reg, arg)
- rtx reg;
- void *arg ATTRIBUTE_UNUSED;
+do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
{
emit_insn (gen_rtx_USE (VOIDmode, reg));
}
void
-use_return_register ()
+use_return_register (void)
{
diddle_return_value (do_use_return_reg, NULL);
}
static GTY(()) rtx initial_trampoline;
-/* Generate RTL for the end of the current function.
- FILENAME and LINE are the current position in the source file.
-
- It is up to language-specific callers to do cleanups for parameters--
- or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
+/* Generate RTL for the end of the current function. */
void
-expand_function_end (filename, line, end_bindings)
- const char *filename;
- int line;
- int end_bindings;
+expand_function_end (void)
{
tree link;
rtx clobber_after;
emit_block_move (blktramp, initial_trampoline,
GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
#endif
+ trampolines_created = 1;
INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
seq = get_insns ();
end_sequence ();
}
}
- /* Warn about unused parms if extra warnings were specified. */
- /* Either ``-Wextra -Wunused'' or ``-Wunused-parameter'' enables this
- warning. WARN_UNUSED_PARAMETER is negative when set by
- -Wunused. Note that -Wall implies -Wunused, so ``-Wall -Wextra'' will
- also give these warnings. */
- if (warn_unused_parameter > 0
- || (warn_unused_parameter < 0 && extra_warnings))
+ /* Possibly warn about unused parameters. */
+ if (warn_unused_parameter)
{
tree decl;
decl; decl = TREE_CHAIN (decl))
if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
&& DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
- warning_with_decl (decl, "unused parameter `%s'");
+ warning ("%Hunused parameter '%D'",
+ &DECL_SOURCE_LOCATION (decl), decl);
}
/* Delete handlers for nonlocal gotos if nothing uses them. */
/* Mark the end of the function body.
If control reaches this insn, the function can drop through
without returning a value. */
- emit_note (NULL, NOTE_INSN_FUNCTION_END);
+ emit_note (NOTE_INSN_FUNCTION_END);
/* Must mark the last line number note in the function, so that the test
coverage code can avoid counting the last line twice. This just tells
already exists a copy of this note somewhere above. This line number
note is still needed for debugging though, so we can't delete it. */
if (flag_test_coverage)
- emit_note (NULL, NOTE_INSN_REPEATED_LINE_NUMBER);
+ emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
/* Output a linenumber for the end of the function.
SDB depends on this. */
- emit_line_note_force (filename, line);
+ force_next_line_note ();
+ emit_line_note (input_location);
/* Before the return label (if any), clobber the return
registers so that they are not propagated live to the rest of
if (return_label)
emit_label (return_label);
- /* C++ uses this. */
- if (end_bindings)
- expand_end_bindings (0, 0, 0);
-
if (current_function_instrument_entry_exit)
{
rtx fun = DECL_RTL (current_function_decl);
emit_group_move (real_decl_rtl, decl_rtl);
else
emit_group_load (real_decl_rtl, decl_rtl,
+ TREE_TYPE (decl_result),
int_size_in_bytes (TREE_TYPE (decl_result)));
}
else
}
rtx
-get_arg_pointer_save_area (f)
- struct function *f;
+get_arg_pointer_save_area (struct function *f)
{
rtx ret = f->x_arg_pointer_save_area;
(a list of one or more insns). */
static void
-record_insns (insns, vecp)
- rtx insns;
- varray_type *vecp;
+record_insns (rtx insns, varray_type *vecp)
{
int i, len;
rtx tmp;
}
}
+/* Set the specified locator to the insn chain. */
+static void
+set_insn_locators (rtx insn, int loc)
+{
+ while (insn != NULL_RTX)
+ {
+ if (INSN_P (insn))
+ INSN_LOCATOR (insn) = loc;
+ insn = NEXT_INSN (insn);
+ }
+}
+
/* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
be running after reorg, SEQUENCE rtl is possible. */
static int
-contains (insn, vec)
- rtx insn;
- varray_type vec;
+contains (rtx insn, varray_type vec)
{
int i, j;
}
int
-prologue_epilogue_contains (insn)
- rtx insn;
+prologue_epilogue_contains (rtx insn)
{
if (contains (insn, prologue))
return 1;
}
int
-sibcall_epilogue_contains (insn)
- rtx insn;
+sibcall_epilogue_contains (rtx insn)
{
if (sibcall_epilogue)
return contains (insn, sibcall_epilogue);
block_for_insn appropriately. */
static void
-emit_return_into_block (bb, line_note)
- basic_block bb;
- rtx line_note;
+emit_return_into_block (basic_block bb, rtx line_note)
{
emit_jump_insn_after (gen_return (), bb->end);
if (line_note)
- emit_line_note_after (NOTE_SOURCE_FILE (line_note),
- NOTE_LINE_NUMBER (line_note), PREV_INSN (bb->end));
+ emit_note_copy_after (line_note, PREV_INSN (bb->end));
}
#endif /* HAVE_return */
its value. */
};
-static void handle_epilogue_set PARAMS ((rtx, struct epi_info *));
-static void emit_equiv_load PARAMS ((struct epi_info *));
+static void handle_epilogue_set (rtx, struct epi_info *);
+static void emit_equiv_load (struct epi_info *);
/* Modify INSN, a list of one or more insns that is part of the epilogue, to
no modifications to the stack pointer. Return the new list of insns. */
static rtx
-keep_stack_depressed (insns)
- rtx insns;
+keep_stack_depressed (rtx insns)
{
int j;
struct epi_info info;
more insns. */
static void
-handle_epilogue_set (set, p)
- rtx set;
- struct epi_info *p;
+handle_epilogue_set (rtx set, struct epi_info *p)
{
/* First handle the case where we are setting SP. Record what it is being
set from. If unknown, abort. */
/* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
static void
-emit_equiv_load (p)
- struct epi_info *p;
+emit_equiv_load (struct epi_info *p)
{
if (p->equiv_reg_src != 0)
emit_move_insn (p->sp_equiv_reg, p->equiv_reg_src);
the epilogue begins. Update the basic block information when possible. */
void
-thread_prologue_and_epilogue_insns (f)
- rtx f ATTRIBUTE_UNUSED;
+thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
{
int inserted = 0;
edge e;
/* Retain a map of the prologue insns. */
record_insns (seq, &prologue);
- prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
+ prologue_end = emit_note (NOTE_INSN_PROLOGUE_END);
seq = get_insns ();
end_sequence ();
+ set_insn_locators (seq, prologue_locator);
/* Can't deal with multiple successors of the entry block
at the moment. Function should always have at least one
that with a conditional return instruction. */
else if (condjump_p (jump))
{
- rtx ret, *loc;
-
- ret = SET_SRC (PATTERN (jump));
- if (GET_CODE (XEXP (ret, 1)) == LABEL_REF)
- loc = &XEXP (ret, 1);
- else
- loc = &XEXP (ret, 2);
- ret = gen_rtx_RETURN (VOIDmode);
-
- if (! validate_change (jump, loc, ret, 0))
+ if (! redirect_jump (jump, 0, 0))
continue;
- if (JUMP_LABEL (jump))
- LABEL_NUSES (JUMP_LABEL (jump))--;
/* If this block has only one successor, it both jumps
and falls through to the fallthru block, so we can't
goto epilogue_done;
start_sequence ();
- epilogue_end = emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
+ epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
seq = gen_epilogue ();
/* Retain a map of the epilogue insns. */
record_insns (seq, &epilogue);
+ set_insn_locators (seq, epilogue_locator);
seq = get_insns ();
end_sequence ();
avoid getting rid of sibcall epilogue insns. Do this before we
actually emit the sequence. */
record_insns (seq, &sibcall_epilogue);
+ set_insn_locators (seq, epilogue_locator);
i = PREV_INSN (insn);
newinsn = emit_insn_before (seq, insn);
insn = PREV_INSN (insn))
if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
{
- emit_line_note_after (NOTE_SOURCE_FILE (insn),
- NOTE_LINE_NUMBER (insn),
- prologue_end);
+ emit_note_copy_after (insn, prologue_end);
break;
}
}
scheduling and delayed branch scheduling. */
void
-reposition_prologue_and_epilogue_notes (f)
- rtx f ATTRIBUTE_UNUSED;
+reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
{
#if defined (HAVE_prologue) || defined (HAVE_epilogue)
rtx insn, last, note;
/* Called once, at initialization, to initialize function.c. */
void
-init_function_once ()
+init_function_once (void)
{
VARRAY_INT_INIT (prologue, 0, "prologue");
VARRAY_INT_INIT (epilogue, 0, "epilogue");