#include "ggc.h"
#include "tm_p.h"
+#ifndef ACCUMULATE_OUTGOING_ARGS
+#define ACCUMULATE_OUTGOING_ARGS 0
+#endif
+
#ifndef TRAMPOLINE_ALIGNMENT
#define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
#endif
imposed on the memory. For example, if the stack slot is the
call frame for an inline functioned, we have no idea what alias
sets will be assigned to various pieces of the call frame. */
- int alias_set;
+ HOST_WIDE_INT alias_set;
/* The value of `sequence_rtl_expr' when this temporary is allocated. */
tree rtl_expr;
/* Non-zero if this temporary is currently in use. */
static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
enum machine_mode, enum machine_mode,
- int, int, int, struct hash_table *));
+ int, unsigned int, int,
+ struct hash_table *));
static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int,
struct hash_table *));
static struct fixup_replacement
static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
static void instantiate_decls PARAMS ((tree, int));
static void instantiate_decls_1 PARAMS ((tree, int));
-static void instantiate_decl PARAMS ((rtx, int, int));
+static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int));
static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
static void delete_handlers PARAMS ((void));
static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
static void pad_below PARAMS ((struct args_size *, enum machine_mode,
tree));
#endif
-#ifdef ARGS_GROW_DOWNWARD
-static tree round_down PARAMS ((tree, int));
-#endif
static rtx round_trampoline_addr PARAMS ((rtx));
static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
static int contains PARAMS ((rtx, varray_type));
#ifdef HAVE_return
-static void emit_return_into_block PARAMS ((basic_block));
+static void emit_return_into_block PARAMS ((basic_block, rtx));
#endif
static void put_addressof_into_stack PARAMS ((rtx, struct hash_table *));
static boolean purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
free_after_compilation (f)
struct function *f;
{
+ struct temp_slot *ts;
+ struct temp_slot *next;
+
free_eh_status (f);
free_expr_status (f);
free_emit_status (f);
if (f->x_parm_reg_stack_loc)
free (f->x_parm_reg_stack_loc);
+ for (ts = f->x_temp_slots; ts; ts = next)
+ {
+ next = ts->next;
+ free (ts);
+ }
+ f->x_temp_slots = NULL;
+
f->arg_offset_rtx = NULL;
f->return_rtx = NULL;
f->internal_arg_pointer = NULL;
f->x_parm_birth_insn = NULL;
f->x_last_parm_insn = NULL;
f->x_parm_reg_stack_loc = NULL;
- f->x_temp_slots = NULL;
f->fixup_var_refs_queue = NULL;
f->original_arg_vector = NULL;
f->original_decl_initial = NULL;
/* Wrapper around assign_stack_local_1; assign a local stack slot for the
current function. */
+
rtx
assign_stack_local (mode, size, align)
enum machine_mode mode;
tree type;
{
int align;
- int alias_set;
+ HOST_WIDE_INT alias_set;
struct temp_slot *p, *best_p = 0;
/* If SIZE is -1 it means that somebody tried to allocate a temporary
if (! type)
type = type_for_mode (mode, 0);
+
if (type)
align = LOCAL_ALIGNMENT (type, align);
for (p = temp_slots; p; p = p->next)
if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
&& ! p->in_use
- && (!flag_strict_aliasing
+ && (! flag_strict_aliasing
|| (alias_set && p->alias_set == alias_set))
&& (best_p == 0 || best_p->size > p->size
|| (best_p->size == p->size && best_p->align > p->align)))
/* If there are enough aligned bytes left over, make them into a new
temp_slot so that the extra bytes don't get wasted. Do this only
for BLKmode slots, so that we can be sure of the alignment. */
- if (GET_MODE (best_p->slot) == BLKmode
- /* We can't split slots if -fstrict-aliasing because the
- information about the alias set for the new slot will be
- lost. */
- && !flag_strict_aliasing)
+ if (GET_MODE (best_p->slot) == BLKmode)
{
int alignment = best_p->align / BITS_PER_UNIT;
HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
if (best_p->size - rounded_size >= alignment)
{
- p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
+ p = (struct temp_slot *) xmalloc (sizeof (struct temp_slot));
p->in_use = p->addr_taken = 0;
p->size = best_p->size - rounded_size;
p->base_offset = best_p->base_offset + rounded_size;
p->align = best_p->align;
p->address = 0;
p->rtl_expr = 0;
+ p->alias_set = best_p->alias_set;
p->next = temp_slots;
temp_slots = p;
{
HOST_WIDE_INT frame_offset_old = frame_offset;
- p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
+ p = (struct temp_slot *) xmalloc (sizeof (struct temp_slot));
/* We are passing an explicit alignment request to assign_stack_local.
One side effect of that is assign_stack_local will not round SIZE
RTX_UNCHANGING_P (p->slot) = 0;
MEM_IN_STRUCT_P (p->slot) = 0;
MEM_SCALAR_P (p->slot) = 0;
- MEM_ALIAS_SET (p->slot) = 0;
+ MEM_ALIAS_SET (p->slot) = alias_set;
+
+ if (type != 0)
+ MEM_SET_IN_STRUCT_P (p->slot, AGGREGATE_TYPE_P (type));
+
return p->slot;
}
instead. This is the case for Chill variable-sized strings. */
if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
&& TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
- && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
- size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
+ && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
+ size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
tmp = assign_stack_temp_for_type (mode, size, keep, type);
- MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
return tmp;
}
}
/* Either delete Q or advance past it. */
if (delete_q)
- prev_q->next = q->next;
+ {
+ prev_q->next = q->next;
+ free (q);
+ }
else
prev_q = q;
}
struct function *function = 0;
tree context;
int can_use_addressof;
+ int volatilep = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl);
+ int usedp = (TREE_USED (decl)
+ || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0));
context = decl_function_context (decl);
/* If this is a variable-size object with a pseudo to address it,
put that pseudo into the stack, if the var is nonlocal. */
- if (DECL_NONLOCAL (decl)
+ if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
&& GET_CODE (reg) == MEM
&& GET_CODE (XEXP (reg, 0)) == REG
&& REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
if (can_use_addressof)
gen_mem_addressof (reg, decl);
else
- put_reg_into_stack (function, reg, TREE_TYPE (decl),
- promoted_mode, decl_mode,
- TREE_SIDE_EFFECTS (decl), 0,
- TREE_USED (decl) || DECL_INITIAL (decl) != 0,
- 0);
+ put_reg_into_stack (function, reg, TREE_TYPE (decl), promoted_mode,
+ decl_mode, volatilep, 0, usedp, 0);
}
else if (GET_CODE (reg) == CONCAT)
{
#ifdef FRAME_GROWS_DOWNWARD
/* Since part 0 should have a lower address, do it second. */
put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
- part_mode, TREE_SIDE_EFFECTS (decl), 0,
- TREE_USED (decl) || DECL_INITIAL (decl) != 0,
- 0);
+ part_mode, volatilep, 0, usedp, 0);
put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
- part_mode, TREE_SIDE_EFFECTS (decl), 0,
- TREE_USED (decl) || DECL_INITIAL (decl) != 0,
- 0);
+ part_mode, volatilep, 0, usedp, 0);
#else
put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
- part_mode, TREE_SIDE_EFFECTS (decl), 0,
- TREE_USED (decl) || DECL_INITIAL (decl) != 0,
- 0);
+ part_mode, volatilep, 0, usedp, 0);
put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
- part_mode, TREE_SIDE_EFFECTS (decl), 0,
- TREE_USED (decl) || DECL_INITIAL (decl) != 0,
- 0);
+ part_mode, volatilep, 0, usedp, 0);
#endif
/* Change the CONCAT into a combined MEM for both parts. */
PUT_CODE (reg, MEM);
- MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
- MEM_ALIAS_SET (reg) = get_alias_set (decl);
- MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (TREE_TYPE (decl)));
+ set_mem_attributes (reg, decl, 1);
/* The two parts are in memory order already.
Use the lower parts address as ours. */
tree type;
enum machine_mode promoted_mode, decl_mode;
int volatile_p;
- int original_regno;
+ unsigned int original_regno;
int used_p;
struct hash_table *ht;
{
struct function *func = function ? function : cfun;
rtx new = 0;
- int regno = original_regno;
+ unsigned int regno = original_regno;
if (regno == 0)
regno = REGNO (reg);
if (regno < func->x_max_parm_reg)
new = func->x_parm_reg_stack_loc[regno];
+
if (new == 0)
new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
newmem = gen_rtx_MEM (wanted_mode,
plus_constant (XEXP (tem, 0), offset));
- RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
MEM_COPY_ATTRIBUTES (newmem, tem);
/* Make the change and see if the insn remains valid. */
newmem = gen_rtx_MEM (wanted_mode,
plus_constant (XEXP (tem, 0),
offset));
- RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
MEM_COPY_ATTRIBUTES (newmem, tem);
/* Make the change and see if the insn remains valid. */
#ifndef STACK_DYNAMIC_OFFSET
-#ifdef ACCUMULATE_OUTGOING_ARGS
/* The bottom of the stack points to the actual arguments. If
REG_PARM_STACK_SPACE is defined, this includes the space for the register
parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
#define STACK_DYNAMIC_OFFSET(FNDECL) \
-(current_function_outgoing_args_size \
- + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
+((ACCUMULATE_OUTGOING_ARGS \
+ ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
+ + (STACK_POINTER_OFFSET)) \
#else
#define STACK_DYNAMIC_OFFSET(FNDECL) \
-(current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
-#endif
-
-#else
-#define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
+((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
+ + (STACK_POINTER_OFFSET))
#endif
#endif
-/* On a few machines, the CFA coincides with the arg pointer. */
+/* On most machines, the CFA coincides with the first incoming parm. */
#ifndef ARG_POINTER_CFA_OFFSET
-#define ARG_POINTER_CFA_OFFSET 0
+#define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
#endif
/* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
-#if 0
void
flush_addressof (decl)
tree decl;
&& GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
}
-#endif
/* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
abort ();
put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
- DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
+ GET_MODE (reg),
+ (TREE_CODE (decl) != SAVE_EXPR
+ && TREE_THIS_VOLATILE (decl)),
ADDRESSOF_REGNO (r),
- TREE_USED (decl) || DECL_INITIAL (decl) != 0, ht);
+ (TREE_USED (decl)
+ || (TREE_CODE (decl) != SAVE_EXPR
+ && DECL_INITIAL (decl) != 0)),
+ ht);
}
/* List of replacements made below in purge_addressof_1 when creating
{
sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
MEM_COPY_ATTRIBUTES (sub2, sub);
- RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
sub = sub2;
}
else if (GET_CODE (sub) == REG
start_sequence ();
store_bit_field (sub, size_x, 0, GET_MODE (x),
val, GET_MODE_SIZE (GET_MODE (sub)),
- GET_MODE_SIZE (GET_MODE (sub)));
+ GET_MODE_ALIGNMENT (GET_MODE (sub)));
/* Make sure to unshare any shared rtl that store_bit_field
might have created. */
/* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
requires a fixup pass over the instruction stream to correct
INSNs that depended on the REG being a REG, and not a MEM. But,
- these fixup passes are slow. Furthermore, more MEMs are not
+ these fixup passes are slow. Furthermore, most MEMs are not
mentioned in very many instructions. So, we speed up the process
by pre-calculating which REGs occur in which INSNs; that allows
us to perform the fixup passes much more quickly. */
hash_table_free (&ht);
purge_bitfield_addressof_replacements = 0;
purge_addressof_replacements = 0;
+
+ /* REGs are shared. purge_addressof will destructively replace a REG
+ with a MEM, which creates shared MEMs.
+
+ Unfortunately, the children of put_reg_into_stack assume that MEMs
+ referring to the same stack slot are shared (fixup_var_refs and
+ the associated hash table code).
+
+ So, we have to do another unsharing pass after we have flushed any
+ REGs that had their address taken into the stack.
+
+ It may be worth tracking whether or not we converted any REGs into
+ MEMs to avoid this overhead when it is not needed. */
+ unshare_all_rtl_again (get_insns ());
}
\f
/* Pass through the INSNS of function FNDECL and convert virtual register
rtx insns;
{
rtx insn;
- int i;
+ unsigned int i;
/* Compute the offsets to use for this function. */
in_arg_offset = FIRST_PARM_OFFSET (fndecl);
var_offset = STARTING_FRAME_OFFSET;
dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
out_arg_offset = STACK_POINTER_OFFSET;
- cfa_offset = ARG_POINTER_CFA_OFFSET;
+ cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl);
/* Scan all variables and parameters of this function. For each that is
in memory, instantiate all virtual registers if the result is a valid
static void
instantiate_decl (x, size, valid_only)
rtx x;
- int size;
+ HOST_WIDE_INT size;
int valid_only;
{
enum machine_mode mode;
instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
- if (valid_only)
+ if (valid_only && size >= 0)
{
+ unsigned HOST_WIDE_INT decl_size = size;
+
/* Now verify that the resulting address is valid for every integer or
floating-point mode up to and including SIZE bytes long. We do this
since the object might be accessed in any mode and frame addresses
are shared. */
for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
- mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
+ mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
mode = GET_MODE_WIDER_MODE (mode))
if (! memory_address_p (mode, addr))
return;
for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
- mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
+ mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
mode = GET_MODE_WIDER_MODE (mode))
if (! memory_address_p (mode, addr))
return;
{
int i, regno, nregs;
rtx reg;
- tree type;
- if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
- type = exp;
- else
- type = TREE_TYPE (exp);
+ tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
+
+ if (TREE_CODE (type) == VOID_TYPE)
+ return 0;
if (RETURN_IN_MEMORY (type))
return 1;
/* Types that are TREE_ADDRESSABLE must be constructed in memory,
for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
{
- int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
struct args_size stack_offset;
struct args_size arg_size;
int passed_pointer = 0;
type of the first field for the tests below. We have already
verified that the modes are the same. */
if (DECL_TRANSPARENT_UNION (parm)
- || TYPE_TRANSPARENT_UNION (passed_type))
+ || (TREE_CODE (passed_type) == UNION_TYPE
+ && TYPE_TRANSPARENT_UNION (passed_type)))
passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
/* See if this arg was passed by invisible reference. It is if
internal_arg_pointer,
offset_rtx));
- /* If this is a memory ref that contains aggregate components,
- mark it as such for cse and loop optimize. Likewise if it
- is readonly. */
- MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
- RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
- MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
+ set_mem_attributes (stack_parm, parm, 1);
}
/* If this parameter was passed both in registers and in the stack,
if (GET_CODE (entry_parm) == PARALLEL)
emit_group_store (validize_mem (stack_parm), entry_parm,
int_size_in_bytes (TREE_TYPE (parm)),
- (TYPE_ALIGN (TREE_TYPE (parm))
- / BITS_PER_UNIT));
+ TYPE_ALIGN (TREE_TYPE (parm)));
+
else
move_block_from_reg (REGNO (entry_parm),
validize_mem (stack_parm), nregs,
&& nominal_mode != BLKmode && nominal_mode != passed_mode)
stack_parm = 0;
-#if 0
- /* Now adjust STACK_PARM to the mode and precise location
- where this parameter should live during execution,
- if we discover that it must live in the stack during execution.
- To make debuggers happier on big-endian machines, we store
- the value in the last bytes of the space available. */
-
- if (nominal_mode != BLKmode && nominal_mode != passed_mode
- && stack_parm != 0)
- {
- rtx offset_rtx;
-
- if (BYTES_BIG_ENDIAN
- && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
- stack_offset.constant += (GET_MODE_SIZE (passed_mode)
- - GET_MODE_SIZE (nominal_mode));
-
- offset_rtx = ARGS_SIZE_RTX (stack_offset);
- if (offset_rtx == const0_rtx)
- stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
- else
- stack_parm = gen_rtx_MEM (nominal_mode,
- gen_rtx_PLUS (Pmode,
- internal_arg_pointer,
- offset_rtx));
-
- /* If this is a memory ref that contains aggregate components,
- mark it as such for cse and loop optimize. */
- MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
- }
-#endif /* 0 */
-
/* ENTRY_PARM is an RTX for the parameter as it arrives,
in the mode in which it arrives.
STACK_PARM is an RTX for a stack slot where the parameter can live
stack_parm
= assign_stack_local (GET_MODE (entry_parm),
size_stored, 0);
-
- /* If this is a memory ref that contains aggregate
- components, mark it as such for cse and loop optimize. */
- MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
+ set_mem_attributes (stack_parm, parm, 1);
}
else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
abort ();
- if (TREE_READONLY (parm))
- RTX_UNCHANGING_P (stack_parm) = 1;
-
/* Handle calls that pass values in multiple non-contiguous
locations. The Irix 6 ABI has examples of this. */
if (GET_CODE (entry_parm) == PARALLEL)
emit_group_store (validize_mem (stack_parm), entry_parm,
int_size_in_bytes (TREE_TYPE (parm)),
- (TYPE_ALIGN (TREE_TYPE (parm))
- / BITS_PER_UNIT));
+ TYPE_ALIGN (TREE_TYPE (parm)));
else
move_block_from_reg (REGNO (entry_parm),
validize_mem (stack_parm),
may need to do it in a wider mode. */
register rtx parmreg;
- int regno, regnoi = 0, regnor = 0;
+ unsigned int regno, regnoi = 0, regnor = 0;
unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
{
DECL_RTL (parm)
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
- MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
+ set_mem_attributes (DECL_RTL (parm), parm, 1);
}
else
DECL_RTL (parm) = parmreg;
else
copy = assign_stack_temp (TYPE_MODE (type),
int_size_in_bytes (type), 1);
- MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
- RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
+ set_mem_attributes (copy, parm, 1);
store_expr (parm, copy, 0);
emit_move_insn (parmreg, XEXP (copy, 0));
/* For pointer data type, suggest pointer register. */
if (POINTER_TYPE_P (TREE_TYPE (parm)))
mark_reg_pointer (parmreg,
- (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
- / BITS_PER_UNIT));
+ TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
+
}
else
{
stack_parm
= assign_stack_local (GET_MODE (entry_parm),
GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
- /* If this is a memory ref that contains aggregate components,
- mark it as such for cse and loop optimize. */
- MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
+ set_mem_attributes (stack_parm, parm, 1);
}
if (promoted_mode != nominal_mode)
if (parm == function_result_decl)
{
tree result = DECL_RESULT (fndecl);
- tree restype = TREE_TYPE (result);
DECL_RTL (result)
= gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
- MEM_SET_IN_STRUCT_P (DECL_RTL (result),
- AGGREGATE_TYPE_P (restype));
+ set_mem_attributes (DECL_RTL (result), result, 1);
}
-
- if (TREE_THIS_VOLATILE (parm))
- MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
- if (TREE_READONLY (parm))
- RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
}
/* Output all parameter conversion instructions (possibly including calls)
rtx
promoted_input_arg (regno, pmode, punsignedp)
- int regno;
+ unsigned int regno;
enum machine_mode *pmode;
int *punsignedp;
{
addr = fix_lexical_addr (XEXP (fp->x_arg_pointer_save_area, 0), var);
addr = memory_address (Pmode, addr);
- base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
+ base = gen_rtx_MEM (Pmode, addr);
+ MEM_ALIAS_SET (base) = get_frame_alias_set ();
+ base = copy_to_reg (base);
#else
displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
base = lookup_static_chain (var);
void
init_function_start (subr, filename, line)
tree subr;
- char *filename;
+ const char *filename;
int line;
{
prepare_function_start ();
{
DECL_RTL (DECL_RESULT (subr))
= gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
- MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
- AGGREGATE_TYPE_P (TREE_TYPE
- (DECL_RESULT
- (subr))));
+ set_mem_attributes (DECL_RTL (DECL_RESULT (subr)),
+ DECL_RESULT (subr), 1);
}
}
else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
else
/* Scalar, returned in a register. */
{
-#ifdef FUNCTION_OUTGOING_VALUE
DECL_RTL (DECL_RESULT (subr))
- = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
-#else
- DECL_RTL (DECL_RESULT (subr))
- = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
-#endif
+ = hard_function_value (TREE_TYPE (DECL_RESULT (subr)), subr, 1);
/* Mark this reg as the function's return value. */
if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
#ifdef FRAME_GROWS_DOWNWARD
last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
#endif
- last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
- memory_address (Pmode,
- last_ptr)));
+ last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr));
+ MEM_ALIAS_SET (last_ptr) = get_frame_alias_set ();
+ last_ptr = copy_to_reg (last_ptr);
/* If we are not optimizing, ensure that we know that this
piece of context is live over the entire function. */
void *arg;
{
rtx outgoing = current_function_return_rtx;
+ int pcc;
if (! outgoing)
return;
- if (GET_CODE (outgoing) == REG
- && REGNO (outgoing) >= FIRST_PSEUDO_REGISTER)
+ pcc = (current_function_returns_struct
+ || current_function_returns_pcc_struct);
+
+ if ((GET_CODE (outgoing) == REG
+ && REGNO (outgoing) >= FIRST_PSEUDO_REGISTER)
+ || pcc)
{
tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
+
+ /* A PCC-style return returns a pointer to the memory in which
+ the structure is stored. */
+ if (pcc)
+ type = build_pointer_type (type);
+
#ifdef FUNCTION_OUTGOING_VALUE
outgoing = FUNCTION_OUTGOING_VALUE (type, current_function_decl);
#else
if (GET_MODE (outgoing) == BLKmode)
PUT_MODE (outgoing,
GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
+ REG_FUNCTION_VALUE_P (outgoing) = 1;
}
if (GET_CODE (outgoing) == REG)
void
expand_function_end (filename, line, end_bindings)
- char *filename;
+ const char *filename;
int line;
int end_bindings;
{
blktramp = change_address (initial_trampoline, BLKmode, tramp);
emit_block_move (blktramp, initial_trampoline,
GEN_INT (TRAMPOLINE_SIZE),
- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
+ TRAMPOLINE_ALIGNMENT);
#endif
INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
seq = get_insns ();
}
/* Warn about unused parms if extra warnings were specified. */
- if (warn_unused && extra_warnings)
+ /* Either ``-W -Wunused'' or ``-Wunused-parameter'' enables this
+ warning. WARN_UNUSED_PARAMETER is negative when set by
+ -Wunused. */
+ if (warn_unused_parameter > 0
+ || (warn_unused_parameter < 0 && extra_warnings))
{
tree decl;
already exists a copy of this note somewhere above. This line number
note is still needed for debugging though, so we can't delete it. */
if (flag_test_coverage)
- emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
+ emit_note (NULL_PTR, NOTE_INSN_REPEATED_LINE_NUMBER);
/* Output a linenumber for the end of the function.
SDB depends on this. */
block_for_insn appropriately. */
static void
-emit_return_into_block (bb)
+emit_return_into_block (bb, line_note)
basic_block bb;
+ rtx line_note;
{
rtx p, end;
- end = emit_jump_insn_after (gen_return (), bb->end);
p = NEXT_INSN (bb->end);
+ end = emit_jump_insn_after (gen_return (), bb->end);
+ if (line_note)
+ emit_line_note_after (NOTE_SOURCE_FILE (line_note),
+ NOTE_LINE_NUMBER (line_note), bb->end);
+
while (1)
{
set_block_for_insn (p, bb);
- if (p == end)
+ if (p == bb->end)
break;
- p = NEXT_INSN (p);
+ p = PREV_INSN (p);
}
bb->end = end;
}
thread_prologue_and_epilogue_insns (f)
rtx f ATTRIBUTE_UNUSED;
{
- int insertted = 0;
+ int inserted = 0;
edge e;
rtx seq;
+#ifdef HAVE_prologue
+ rtx prologue_end = NULL_RTX;
+#endif
+#if defined (HAVE_epilogue) || defined(HAVE_return)
+ rtx epilogue_end = NULL_RTX;
+#endif
#ifdef HAVE_prologue
if (HAVE_prologue)
{
- rtx insn;
-
start_sequence ();
seq = gen_prologue();
emit_insn (seq);
if (GET_CODE (seq) != SEQUENCE)
seq = get_insns ();
record_insns (seq, &prologue);
- emit_note (NULL, NOTE_INSN_PROLOGUE_END);
-
- /* GDB handles `break f' by setting a breakpoint on the first
- line note *after* the prologue. That means that we should
- insert a line note here; otherwise, if the next line note
- comes part way into the next block, GDB will skip all the way
- to that point. */
- insn = next_nonnote_insn (f);
- while (insn)
- {
- if (GET_CODE (insn) == NOTE
- && NOTE_LINE_NUMBER (insn) >= 0)
- {
- emit_line_note_force (NOTE_SOURCE_FILE (insn),
- NOTE_LINE_NUMBER (insn));
- break;
- }
-
- insn = PREV_INSN (insn);
- }
+ prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
seq = gen_sequence ();
end_sequence ();
abort ();
insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
- insertted = 1;
+ inserted = 1;
}
else
emit_insn_after (seq, f);
if (last->head == label && GET_CODE (label) == CODE_LABEL)
{
+ rtx epilogue_line_note = NULL_RTX;
+
+ /* Locate the line number associated with the closing brace,
+ if we can find one. */
+ for (seq = get_last_insn ();
+ seq && ! active_insn_p (seq);
+ seq = PREV_INSN (seq))
+ if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
+ {
+ epilogue_line_note = seq;
+ break;
+ }
+
for (e = last->pred; e ; e = e_next)
{
basic_block bb = e->src;
with a simple return instruction. */
if (simplejump_p (jump))
{
- emit_return_into_block (bb);
+ emit_return_into_block (bb, epilogue_line_note);
flow_delete_insn (jump);
}
continue;
/* Fix up the CFG for the successful change we just made. */
- remove_edge (e);
- make_edge (NULL, bb, EXIT_BLOCK_PTR, 0);
+ redirect_edge_succ (e, EXIT_BLOCK_PTR);
}
/* Emit a return insn for the exit fallthru block. Whether
this is still reachable will be determined later. */
emit_barrier_after (last->end);
- emit_return_into_block (last);
+ emit_return_into_block (last, epilogue_line_note);
+ epilogue_end = last->end;
+ goto epilogue_done;
}
- else
- {
- /* The exit block wasn't empty. We have to use insert_insn_on_edge,
- as it may be the exit block can go elsewhere as well
- as exiting. */
- start_sequence ();
- emit_jump_insn (gen_return ());
- seq = gen_sequence ();
- end_sequence ();
- insert_insn_on_edge (seq, e);
- insertted = 1;
- }
- goto epilogue_done;
}
#endif
#ifdef HAVE_epilogue
goto epilogue_done;
start_sequence ();
- emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
+ epilogue_end = emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
seq = gen_epilogue ();
emit_jump_insn (seq);
end_sequence();
insert_insn_on_edge (seq, e);
- insertted = 1;
+ inserted = 1;
}
#endif
epilogue_done:
- if (insertted)
+ if (inserted)
commit_edge_insertions ();
#ifdef HAVE_sibcall_epilogue
basic_block bb = e->src;
rtx insn = bb->end;
rtx i;
+ rtx newinsn;
if (GET_CODE (insn) != CALL_INSN
|| ! SIBLING_CALL_P (insn))
end_sequence ();
i = PREV_INSN (insn);
- emit_insn_before (seq, insn);
+ newinsn = emit_insn_before (seq, insn);
/* Update the UID to basic block map. */
for (i = NEXT_INSN (i); i != insn; i = NEXT_INSN (i))
/* Retain a map of the epilogue insns. Used in life analysis to
avoid getting rid of sibcall epilogue insns. */
- record_insns (seq, &sibcall_epilogue);
+ record_insns (GET_CODE (seq) == SEQUENCE
+ ? seq : newinsn, &sibcall_epilogue);
+ }
+#endif
+
+#ifdef HAVE_prologue
+ if (prologue_end)
+ {
+ rtx insn, prev;
+
+ /* GDB handles `break f' by setting a breakpoint on the first
+ line note after the prologue. Which means (1) that if
+ there are line number notes before where we inserted the
+ prologue we should move them, and (2) we should generate a
+ note before the end of the first basic block, if there isn't
+ one already there. */
+
+ for (insn = prologue_end; insn ; insn = prev)
+ {
+ prev = PREV_INSN (insn);
+ if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
+ {
+ /* Note that we cannot reorder the first insn in the
+ chain, since rest_of_compilation relies on that
+ remaining constant. */
+ if (prev == NULL)
+ break;
+ reorder_insns (insn, insn, prologue_end);
+ }
+ }
+
+ /* Find the last line number note in the first block. */
+ for (insn = BASIC_BLOCK (0)->end;
+ insn != prologue_end;
+ insn = PREV_INSN (insn))
+ if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
+ break;
+
+ /* If we didn't find one, make a copy of the first line number
+ we run across. */
+ if (! insn)
+ {
+ for (insn = next_active_insn (prologue_end);
+ insn;
+ insn = PREV_INSN (insn))
+ if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
+ {
+ emit_line_note_after (NOTE_SOURCE_FILE (insn),
+ NOTE_LINE_NUMBER (insn),
+ prologue_end);
+ break;
+ }
+ }
+ }
+#endif
+#ifdef HAVE_epilogue
+ if (epilogue_end)
+ {
+ rtx insn, next;
+
+ /* Similarly, move any line notes that appear after the epilogue.
+ There is no need, however, to be quite so anal about the existance
+ of such a note. */
+ for (insn = epilogue_end; insn ; insn = next)
+ {
+ next = NEXT_INSN (insn);
+ if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
+ reorder_insns (insn, insn, PREV_INSN (epilogue_end));
+ }
}
#endif
}