/* Expands front end tree to back end RTL for GNU C-Compiler
Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
- 1998, 1999, 2000 Free Software Foundation, Inc.
+ 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
-This file is part of GNU CC.
+This file is part of GCC.
-GNU CC is free software; you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 2, or (at your option)
-any later version.
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 2, or (at your option) any later
+version.
-GNU CC is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU General Public License for more details.
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+for more details.
You should have received a copy of the GNU General Public License
-along with GNU CC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+along with GCC; see the file COPYING. If not, write to the Free
+Software Foundation, 59 Temple Place - Suite 330, Boston, MA
+02111-1307, USA. */
/* This file handles the generation of rtl code from tree structure
at the level of the function as a whole.
#include "flags.h"
#include "except.h"
#include "function.h"
-#include "insn-flags.h"
#include "expr.h"
-#include "insn-codes.h"
+#include "libfuncs.h"
#include "regs.h"
#include "hard-reg-set.h"
#include "insn-config.h"
#include "hash.h"
#include "ggc.h"
#include "tm_p.h"
-
-#ifndef ACCUMULATE_OUTGOING_ARGS
-#define ACCUMULATE_OUTGOING_ARGS 0
-#endif
+#include "integrate.h"
#ifndef TRAMPOLINE_ALIGNMENT
#define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
#endif
-#if !defined (PREFERRED_STACK_BOUNDARY) && defined (STACK_BOUNDARY)
-#define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
-#endif
-
/* Some systems use __main in a way incompatible with its use in gcc, in these
cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
give the same symbol without quotes for an alternative entry point. You
int current_function_uses_only_leaf_regs;
/* Nonzero once virtual register instantiation has been done.
- assign_stack_local uses frame_pointer_rtx when this is nonzero. */
-static int virtuals_instantiated;
+ assign_stack_local uses frame_pointer_rtx when this is nonzero.
+ calls.c:emit_library_call_value_1 uses it to set up
+ post-instantiation libcalls. */
+int virtuals_instantiated;
-/* These variables hold pointers to functions to
- save and restore machine-specific data,
- in push_function_context and pop_function_context. */
+/* These variables hold pointers to functions to create and destroy
+ target specific, per-function data structures. */
void (*init_machine_status) PARAMS ((struct function *));
-void (*save_machine_status) PARAMS ((struct function *));
-void (*restore_machine_status) PARAMS ((struct function *));
-void (*mark_machine_status) PARAMS ((struct function *));
void (*free_machine_status) PARAMS ((struct function *));
+/* This variable holds a pointer to a function to register any
+ data items in the target specific, per-function data structure
+ that will need garbage collection. */
+void (*mark_machine_status) PARAMS ((struct function *));
/* Likewise, but for language-specific data. */
void (*init_lang_status) PARAMS ((struct function *));
/* The currently compiled function. */
struct function *cfun = 0;
-/* Global list of all compiled functions. */
-struct function *all_functions = 0;
-
/* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
static varray_type prologue;
static varray_type epilogue;
int align;
/* The size, in units, of the slot. */
HOST_WIDE_INT size;
- /* The alias set for the slot. If the alias set is zero, we don't
- know anything about the alias set of the slot. We must only
- reuse a slot if it is assigned an object of the same alias set.
- Otherwise, the rest of the compiler may assume that the new use
- of the slot cannot alias the old use of the slot, which is
- false. If the slot has alias set zero, then we can't reuse the
- slot at all, since we have no idea what alias set may have been
- imposed on the memory. For example, if the stack slot is the
- call frame for an inline functioned, we have no idea what alias
- sets will be assigned to various pieces of the call frame. */
- HOST_WIDE_INT alias_set;
+ /* The type of the object in the slot, or zero if it doesn't correspond
+ to a type. We use this to determine whether a slot can be reused.
+ It can be reused if objects of the type of the new slot will always
+ conflict with objects of the type of the old slot. */
+ tree type;
/* The value of `sequence_rtl_expr' when this temporary is allocated. */
tree rtl_expr;
/* Non-zero if this temporary is currently in use. */
struct fixup_replacement *next;
};
-struct insns_for_mem_entry {
+struct insns_for_mem_entry
+{
/* The KEY in HE will be a MEM. */
struct hash_entry he;
/* These are the INSNS which reference the MEM. */
struct hash_table *));
static struct fixup_replacement
*find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
-static void fixup_var_refs_insns PARAMS ((rtx, enum machine_mode, int,
- rtx, int, struct hash_table *));
+static void fixup_var_refs_insns PARAMS ((rtx, rtx, enum machine_mode,
+ int, int));
+static void fixup_var_refs_insns_with_hash
+ PARAMS ((struct hash_table *, rtx,
+ enum machine_mode, int));
+static void fixup_var_refs_insn PARAMS ((rtx, rtx, enum machine_mode,
+ int, int));
static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
struct fixup_replacement **));
static rtx fixup_memory_subreg PARAMS ((rtx, rtx, int));
static void instantiate_decls PARAMS ((tree, int));
static void instantiate_decls_1 PARAMS ((tree, int));
static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int));
+static rtx instantiate_new_reg PARAMS ((rtx, HOST_WIDE_INT *));
static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
static void delete_handlers PARAMS ((void));
static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
tree));
#endif
static rtx round_trampoline_addr PARAMS ((rtx));
+static rtx adjust_trampoline_addr PARAMS ((rtx));
static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
+static void reorder_blocks_0 PARAMS ((tree));
static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
+static void reorder_fix_fragments PARAMS ((tree));
static tree blocks_nreverse PARAMS ((tree));
static int all_blocks PARAMS ((tree, tree *));
static tree *get_block_vector PARAMS ((tree, int *));
static void emit_return_into_block PARAMS ((basic_block, rtx));
#endif
static void put_addressof_into_stack PARAMS ((rtx, struct hash_table *));
-static boolean purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
+static bool purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
struct hash_table *));
static void purge_single_hard_subreg_set PARAMS ((rtx));
#ifdef HAVE_epilogue
struct hash_table *,
hash_table_key));
static unsigned long insns_for_mem_hash PARAMS ((hash_table_key));
-static boolean insns_for_mem_comp PARAMS ((hash_table_key, hash_table_key));
+static bool insns_for_mem_comp PARAMS ((hash_table_key, hash_table_key));
static int insns_for_mem_walk PARAMS ((rtx *, void *));
static void compute_insns_for_mem PARAMS ((rtx, rtx, struct hash_table *));
-static void mark_temp_slot PARAMS ((struct temp_slot *));
static void mark_function_status PARAMS ((struct function *));
-static void mark_function_chain PARAMS ((void *));
+static void maybe_mark_struct_function PARAMS ((void *));
static void prepare_function_start PARAMS ((void));
static void do_clobber_return_reg PARAMS ((rtx, void *));
static void do_use_return_reg PARAMS ((rtx, void *));
\f
/* Pointer to chain of `struct function' for containing functions. */
-struct function *outer_function_chain;
+static struct function *outer_function_chain;
/* Given a function decl for a containing function,
return the `struct function' for it. */
{
struct function *p;
- for (p = outer_function_chain; p; p = p->next)
+ for (p = outer_function_chain; p; p = p->outer)
if (p->decl == decl)
return p;
push_function_context_to (context)
tree context;
{
- struct function *p, *context_data;
+ struct function *p;
if (context)
{
- context_data = (context == current_function_decl
- ? cfun
- : find_function_data (context));
- context_data->contains_functions = 1;
+ if (context == current_function_decl)
+ cfun->contains_functions = 1;
+ else
+ {
+ struct function *containing = find_function_data (context);
+ containing->contains_functions = 1;
+ }
}
if (cfun == 0)
init_dummy_function_start ();
p = cfun;
- p->next = outer_function_chain;
+ p->outer = outer_function_chain;
outer_function_chain = p;
p->fixup_var_refs_queue = 0;
if (save_lang_status)
(*save_lang_status) (p);
- if (save_machine_status)
- (*save_machine_status) (p);
cfun = 0;
}
{
struct function *p = outer_function_chain;
struct var_refs_queue *queue;
- struct var_refs_queue *next;
cfun = p;
- outer_function_chain = p->next;
+ outer_function_chain = p->outer;
current_function_decl = p->decl;
reg_renumber = 0;
restore_emit_status (p);
+ restore_varasm_status (p);
- if (restore_machine_status)
- (*restore_machine_status) (p);
if (restore_lang_status)
(*restore_lang_status) (p);
/* Finish doing put_var_into_stack for any of our variables
which became addressable during the nested function. */
- for (queue = p->fixup_var_refs_queue; queue; queue = next)
- {
- next = queue->next;
- fixup_var_refs (queue->modified, queue->promoted_mode,
- queue->unsignedp, 0);
- free (queue);
- }
+ for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
+ fixup_var_refs (queue->modified, queue->promoted_mode,
+ queue->unsignedp, 0);
+
p->fixup_var_refs_queue = 0;
/* Reset variables that have known state during rtx generation. */
free_after_compilation (f)
struct function *f;
{
- struct temp_slot *ts;
- struct temp_slot *next;
-
free_eh_status (f);
free_expr_status (f);
free_emit_status (f);
if (f->x_parm_reg_stack_loc)
free (f->x_parm_reg_stack_loc);
- for (ts = f->x_temp_slots; ts; ts = next)
- {
- next = ts->next;
- free (ts);
- }
f->x_temp_slots = NULL;
-
f->arg_offset_rtx = NULL;
f->return_rtx = NULL;
f->internal_arg_pointer = NULL;
f->x_tail_recursion_label = NULL;
f->x_tail_recursion_reentry = NULL;
f->x_arg_pointer_save_area = NULL;
+ f->x_clobber_return_insn = NULL;
f->x_context_display = NULL;
f->x_trampoline_list = NULL;
f->x_parm_birth_insn = NULL;
int align;
struct function *function;
{
- register rtx x, addr;
+ rtx x, addr;
int bigend_correction = 0;
int alignment;
tree type;
{
int align;
- HOST_WIDE_INT alias_set;
struct temp_slot *p, *best_p = 0;
/* If SIZE is -1 it means that somebody tried to allocate a temporary
if (size == -1)
abort ();
- /* If we know the alias set for the memory that will be used, use
- it. If there's no TYPE, then we don't know anything about the
- alias set for the memory. */
- if (type)
- alias_set = get_alias_set (type);
- else
- alias_set = 0;
-
if (mode == BLKmode)
align = BIGGEST_ALIGNMENT;
else
for (p = temp_slots; p; p = p->next)
if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
&& ! p->in_use
- && (! flag_strict_aliasing
- || (alias_set && p->alias_set == alias_set))
+ && objects_must_conflict_p (p->type, type)
&& (best_p == 0 || best_p->size > p->size
|| (best_p->size == p->size && best_p->align > p->align)))
{
if (best_p->size - rounded_size >= alignment)
{
- p = (struct temp_slot *) xmalloc (sizeof (struct temp_slot));
+ p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
p->in_use = p->addr_taken = 0;
p->size = best_p->size - rounded_size;
p->base_offset = best_p->base_offset + rounded_size;
p->align = best_p->align;
p->address = 0;
p->rtl_expr = 0;
- p->alias_set = best_p->alias_set;
+ p->type = best_p->type;
p->next = temp_slots;
temp_slots = p;
{
HOST_WIDE_INT frame_offset_old = frame_offset;
- p = (struct temp_slot *) xmalloc (sizeof (struct temp_slot));
+ p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
/* We are passing an explicit alignment request to assign_stack_local.
One side effect of that is assign_stack_local will not round SIZE
align);
p->align = align;
- p->alias_set = alias_set;
/* The following slot size computation is necessary because we don't
know the actual size of the temporary slot until assign_stack_local
p->in_use = 1;
p->addr_taken = 0;
p->rtl_expr = seq_rtl_expr;
+ p->type = type;
if (keep == 2)
{
RTX_UNCHANGING_P (p->slot) = 0;
MEM_IN_STRUCT_P (p->slot) = 0;
MEM_SCALAR_P (p->slot) = 0;
- MEM_ALIAS_SET (p->slot) = alias_set;
+ MEM_VOLATILE_P (p->slot) = 0;
+
+ /* If we know the alias set for the memory that will be used, use
+ it. If there's no TYPE, then we don't know anything about the
+ alias set for the memory. */
+ set_mem_alias_set (p->slot, type ? get_alias_set (type) : 0);
+ /* If a type is specified, set the relevant flags. */
if (type != 0)
- MEM_SET_IN_STRUCT_P (p->slot, AGGREGATE_TYPE_P (type));
+ {
+ RTX_UNCHANGING_P (p->slot) = TYPE_READONLY (type);
+ MEM_VOLATILE_P (p->slot) = TYPE_VOLATILE (type);
+ MEM_SET_IN_STRUCT_P (p->slot, AGGREGATE_TYPE_P (type));
+ }
return p->slot;
}
}
/* Either delete Q or advance past it. */
if (delete_q)
- {
- prev_q->next = q->next;
- free (q);
- }
+ prev_q->next = q->next;
else
prev_q = q;
}
a temporary slot we know it points to. To be consistent with
the code below, we really should preserve all non-kept slots
if we can't find a match, but that seems to be much too costly. */
- if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
+ if (GET_CODE (x) == REG && REG_POINTER (x))
p = find_temp_slot_from_address (x);
/* If X is not in memory or is at a constant address, it cannot be in
/* If this slot is below the current TEMP_SLOT_LEVEL, then it
needs to be preserved. This can happen if a temporary in
the RTL_EXPR was addressed; preserve_temp_slots will move
- the temporary into a higher level. */
+ the temporary into a higher level. */
if (temp_slot_level <= p->level)
p->in_use = 0;
else
put_var_into_stack (decl)
tree decl;
{
- register rtx reg;
+ rtx reg;
enum machine_mode promoted_mode, decl_mode;
struct function *function = 0;
tree context;
context = decl_function_context (decl);
/* Get the current rtl used for this object and its original mode. */
- reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
+ reg = (TREE_CODE (decl) == SAVE_EXPR
+ ? SAVE_EXPR_RTL (decl)
+ : DECL_RTL_IF_SET (decl));
/* No need to do anything if decl has no rtx yet
since in that case caller is setting TREE_ADDRESSABLE
/* Get the mode it's actually stored in. */
promoted_mode = GET_MODE (reg);
- /* If this variable comes from an outer function,
- find that function's saved context. */
+ /* If this variable comes from an outer function, find that
+ function's saved context. Don't use find_function_data here,
+ because it might not be in any active function.
+ FIXME: Is that really supposed to happen?
+ It does in ObjC at least. */
if (context != current_function_decl && context != inline_function_decl)
- for (function = outer_function_chain; function; function = function->next)
+ for (function = outer_function_chain; function; function = function->outer)
if (function->decl == context)
break;
/* Change the CONCAT into a combined MEM for both parts. */
PUT_CODE (reg, MEM);
+ MEM_ATTRS (reg) = 0;
+
+ /* set_mem_attributes uses DECL_RTL to avoid re-generating of
+ already computed alias sets. Here we want to re-generate. */
+ if (DECL_P (decl))
+ SET_DECL_RTL (decl, NULL);
set_mem_attributes (reg, decl, 1);
+ if (DECL_P (decl))
+ SET_DECL_RTL (decl, reg);
/* The two parts are in memory order already.
Use the lower parts address as ours. */
PUT_CODE (reg, MEM);
PUT_MODE (reg, decl_mode);
XEXP (reg, 0) = XEXP (new, 0);
+ MEM_ATTRS (reg) = 0;
/* `volatil' bit means one thing for MEMs, another entirely for REGs. */
MEM_VOLATILE_P (reg) = volatile_p;
{
MEM_SET_IN_STRUCT_P (reg,
AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
- MEM_ALIAS_SET (reg) = get_alias_set (type);
+ set_mem_alias_set (reg, get_alias_set (type));
}
+
if (used_p)
schedule_fixup_var_refs (function, reg, type, promoted_mode, ht);
}
/* Make sure that all refs to the variable, previously made
when it was a register, are fixed up to be valid again.
See function above for meaning of arguments. */
+
static void
schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
struct function *function;
struct var_refs_queue *temp;
temp
- = (struct var_refs_queue *) xmalloc (sizeof (struct var_refs_queue));
+ = (struct var_refs_queue *) ggc_alloc (sizeof (struct var_refs_queue));
temp->modified = reg;
temp->promoted_mode = promoted_mode;
temp->unsignedp = unsigned_p;
rtx first_insn = get_insns ();
struct sequence_stack *stack = seq_stack;
tree rtl_exps = rtl_expr_chain;
- rtx insn;
- /* Must scan all insns for stack-refs that exceed the limit. */
- fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
- stack == 0, ht);
/* If there's a hash table, it must record all uses of VAR. */
if (ht)
- return;
+ {
+ if (stack != 0)
+ abort ();
+ fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp);
+ return;
+ }
+
+ fixup_var_refs_insns (first_insn, var, promoted_mode, unsignedp,
+ stack == 0);
/* Scan all pending sequences too. */
for (; stack; stack = stack->next)
{
- push_to_sequence (stack->first);
- fixup_var_refs_insns (var, promoted_mode, unsignedp,
- stack->first, stack->next != 0, 0);
+ push_to_full_sequence (stack->first, stack->last);
+ fixup_var_refs_insns (stack->first, var, promoted_mode, unsignedp,
+ stack->next != 0);
/* Update remembered end of sequence
in case we added an insn at the end. */
stack->last = get_last_insn ();
if (seq != const0_rtx && seq != 0)
{
push_to_sequence (seq);
- fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0, 0);
+ fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0);
end_sequence ();
}
}
-
- /* Scan the catch clauses for exception handling too. */
- push_to_full_sequence (catch_clauses, catch_clauses_last);
- fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses, 0, 0);
- end_full_sequence (&catch_clauses, &catch_clauses_last);
-
- /* Scan sequences saved in CALL_PLACEHOLDERS too. */
- for (insn = first_insn; insn; insn = NEXT_INSN (insn))
- {
- if (GET_CODE (insn) == CALL_INSN
- && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
- {
- int i;
-
- /* Look at the Normal call, sibling call and tail recursion
- sequences attached to the CALL_PLACEHOLDER. */
- for (i = 0; i < 3; i++)
- {
- rtx seq = XEXP (PATTERN (insn), i);
- if (seq)
- {
- push_to_sequence (seq);
- fixup_var_refs_insns (var, promoted_mode, unsignedp,
- seq, 0, 0);
- XEXP (PATTERN (insn), i) = get_insns ();
- end_sequence ();
- }
- }
- }
- }
}
\f
/* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
main chain of insns for the current function. */
static void
-fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel, ht)
+fixup_var_refs_insns (insn, var, promoted_mode, unsignedp, toplevel)
+ rtx insn;
rtx var;
enum machine_mode promoted_mode;
int unsignedp;
- rtx insn;
int toplevel;
+{
+ while (insn)
+ {
+ /* fixup_var_refs_insn might modify insn, so save its next
+ pointer now. */
+ rtx next = NEXT_INSN (insn);
+
+ /* CALL_PLACEHOLDERs are special; we have to switch into each of
+ the three sequences they (potentially) contain, and process
+ them recursively. The CALL_INSN itself is not interesting. */
+
+ if (GET_CODE (insn) == CALL_INSN
+ && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
+ {
+ int i;
+
+ /* Look at the Normal call, sibling call and tail recursion
+ sequences attached to the CALL_PLACEHOLDER. */
+ for (i = 0; i < 3; i++)
+ {
+ rtx seq = XEXP (PATTERN (insn), i);
+ if (seq)
+ {
+ push_to_sequence (seq);
+ fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0);
+ XEXP (PATTERN (insn), i) = get_insns ();
+ end_sequence ();
+ }
+ }
+ }
+
+ else if (INSN_P (insn))
+ fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel);
+
+ insn = next;
+ }
+}
+
+/* Look up the insns which reference VAR in HT and fix them up. Other
+ arguments are the same as fixup_var_refs_insns.
+
+ N.B. No need for special processing of CALL_PLACEHOLDERs here,
+ because the hash table will point straight to the interesting insn
+ (inside the CALL_PLACEHOLDER). */
+
+static void
+fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp)
struct hash_table *ht;
+ rtx var;
+ enum machine_mode promoted_mode;
+ int unsignedp;
{
- rtx call_dest = 0;
- rtx insn_list = NULL_RTX;
+ struct insns_for_mem_entry *ime = (struct insns_for_mem_entry *)
+ hash_lookup (ht, var, /*create=*/0, /*copy=*/0);
+ rtx insn_list = ime->insns;
- /* If we already know which INSNs reference VAR there's no need
- to walk the entire instruction chain. */
- if (ht)
+ while (insn_list)
{
- insn_list = ((struct insns_for_mem_entry *)
- hash_lookup (ht, var, /*create=*/0, /*copy=*/0))->insns;
- insn = insn_list ? XEXP (insn_list, 0) : NULL_RTX;
+ rtx insn = XEXP (insn_list, 0);
+
+ if (INSN_P (insn))
+ fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, 1);
+
insn_list = XEXP (insn_list, 1);
}
+}
- while (insn)
+
+/* Per-insn processing by fixup_var_refs_insns(_with_hash). INSN is
+ the insn under examination, VAR is the variable to fix up
+ references to, PROMOTED_MODE and UNSIGNEDP describe VAR, and
+ TOPLEVEL is nonzero if this is the main insn chain for this
+ function. */
+
+static void
+fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel)
+ rtx insn;
+ rtx var;
+ enum machine_mode promoted_mode;
+ int unsignedp;
+ int toplevel;
+{
+ rtx call_dest = 0;
+ rtx set, prev, prev_set;
+ rtx note;
+
+ /* Remember the notes in case we delete the insn. */
+ note = REG_NOTES (insn);
+
+ /* If this is a CLOBBER of VAR, delete it.
+
+ If it has a REG_LIBCALL note, delete the REG_LIBCALL
+ and REG_RETVAL notes too. */
+ if (GET_CODE (PATTERN (insn)) == CLOBBER
+ && (XEXP (PATTERN (insn), 0) == var
+ || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
+ && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
+ || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
{
- rtx next = NEXT_INSN (insn);
- rtx set, prev, prev_set;
- rtx note;
+ if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
+ /* The REG_LIBCALL note will go away since we are going to
+ turn INSN into a NOTE, so just delete the
+ corresponding REG_RETVAL note. */
+ remove_note (XEXP (note, 0),
+ find_reg_note (XEXP (note, 0), REG_RETVAL,
+ NULL_RTX));
+
+ delete_insn (insn);
+ }
- if (INSN_P (insn))
+ /* The insn to load VAR from a home in the arglist
+ is now a no-op. When we see it, just delete it.
+ Similarly if this is storing VAR from a register from which
+ it was loaded in the previous insn. This will occur
+ when an ADDRESSOF was made for an arglist slot. */
+ else if (toplevel
+ && (set = single_set (insn)) != 0
+ && SET_DEST (set) == var
+ /* If this represents the result of an insn group,
+ don't delete the insn. */
+ && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
+ && (rtx_equal_p (SET_SRC (set), var)
+ || (GET_CODE (SET_SRC (set)) == REG
+ && (prev = prev_nonnote_insn (insn)) != 0
+ && (prev_set = single_set (prev)) != 0
+ && SET_DEST (prev_set) == SET_SRC (set)
+ && rtx_equal_p (SET_SRC (prev_set), var))))
+ {
+ delete_insn (insn);
+ }
+ else
+ {
+ struct fixup_replacement *replacements = 0;
+ rtx next_insn = NEXT_INSN (insn);
+
+ if (SMALL_REGISTER_CLASSES)
{
- /* Remember the notes in case we delete the insn. */
- note = REG_NOTES (insn);
-
- /* If this is a CLOBBER of VAR, delete it.
-
- If it has a REG_LIBCALL note, delete the REG_LIBCALL
- and REG_RETVAL notes too. */
- if (GET_CODE (PATTERN (insn)) == CLOBBER
- && (XEXP (PATTERN (insn), 0) == var
- || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
- && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
- || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
+ /* If the insn that copies the results of a CALL_INSN
+ into a pseudo now references VAR, we have to use an
+ intermediate pseudo since we want the life of the
+ return value register to be only a single insn.
+
+ If we don't use an intermediate pseudo, such things as
+ address computations to make the address of VAR valid
+ if it is not can be placed between the CALL_INSN and INSN.
+
+ To make sure this doesn't happen, we record the destination
+ of the CALL_INSN and see if the next insn uses both that
+ and VAR. */
+
+ if (call_dest != 0 && GET_CODE (insn) == INSN
+ && reg_mentioned_p (var, PATTERN (insn))
+ && reg_mentioned_p (call_dest, PATTERN (insn)))
{
- if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
- /* The REG_LIBCALL note will go away since we are going to
- turn INSN into a NOTE, so just delete the
- corresponding REG_RETVAL note. */
- remove_note (XEXP (note, 0),
- find_reg_note (XEXP (note, 0), REG_RETVAL,
- NULL_RTX));
-
- /* In unoptimized compilation, we shouldn't call delete_insn
- except in jump.c doing warnings. */
- PUT_CODE (insn, NOTE);
- NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
- NOTE_SOURCE_FILE (insn) = 0;
- }
+ rtx temp = gen_reg_rtx (GET_MODE (call_dest));
- /* The insn to load VAR from a home in the arglist
- is now a no-op. When we see it, just delete it.
- Similarly if this is storing VAR from a register from which
- it was loaded in the previous insn. This will occur
- when an ADDRESSOF was made for an arglist slot. */
- else if (toplevel
- && (set = single_set (insn)) != 0
- && SET_DEST (set) == var
- /* If this represents the result of an insn group,
- don't delete the insn. */
- && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
- && (rtx_equal_p (SET_SRC (set), var)
- || (GET_CODE (SET_SRC (set)) == REG
- && (prev = prev_nonnote_insn (insn)) != 0
- && (prev_set = single_set (prev)) != 0
- && SET_DEST (prev_set) == SET_SRC (set)
- && rtx_equal_p (SET_SRC (prev_set), var))))
- {
- /* In unoptimized compilation, we shouldn't call delete_insn
- except in jump.c doing warnings. */
- PUT_CODE (insn, NOTE);
- NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
- NOTE_SOURCE_FILE (insn) = 0;
- if (insn == last_parm_insn)
- last_parm_insn = PREV_INSN (next);
+ emit_insn_before (gen_move_insn (temp, call_dest), insn);
+
+ PATTERN (insn) = replace_rtx (PATTERN (insn),
+ call_dest, temp);
}
+
+ if (GET_CODE (insn) == CALL_INSN
+ && GET_CODE (PATTERN (insn)) == SET)
+ call_dest = SET_DEST (PATTERN (insn));
+ else if (GET_CODE (insn) == CALL_INSN
+ && GET_CODE (PATTERN (insn)) == PARALLEL
+ && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
+ call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
else
- {
- struct fixup_replacement *replacements = 0;
- rtx next_insn = NEXT_INSN (insn);
+ call_dest = 0;
+ }
- if (SMALL_REGISTER_CLASSES)
- {
- /* If the insn that copies the results of a CALL_INSN
- into a pseudo now references VAR, we have to use an
- intermediate pseudo since we want the life of the
- return value register to be only a single insn.
-
- If we don't use an intermediate pseudo, such things as
- address computations to make the address of VAR valid
- if it is not can be placed between the CALL_INSN and INSN.
-
- To make sure this doesn't happen, we record the destination
- of the CALL_INSN and see if the next insn uses both that
- and VAR. */
-
- if (call_dest != 0 && GET_CODE (insn) == INSN
- && reg_mentioned_p (var, PATTERN (insn))
- && reg_mentioned_p (call_dest, PATTERN (insn)))
- {
- rtx temp = gen_reg_rtx (GET_MODE (call_dest));
+ /* See if we have to do anything to INSN now that VAR is in
+ memory. If it needs to be loaded into a pseudo, use a single
+ pseudo for the entire insn in case there is a MATCH_DUP
+ between two operands. We pass a pointer to the head of
+ a list of struct fixup_replacements. If fixup_var_refs_1
+ needs to allocate pseudos or replacement MEMs (for SUBREGs),
+ it will record them in this list.
- emit_insn_before (gen_move_insn (temp, call_dest), insn);
+ If it allocated a pseudo for any replacement, we copy into
+ it here. */
- PATTERN (insn) = replace_rtx (PATTERN (insn),
- call_dest, temp);
- }
+ fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
+ &replacements);
- if (GET_CODE (insn) == CALL_INSN
- && GET_CODE (PATTERN (insn)) == SET)
- call_dest = SET_DEST (PATTERN (insn));
- else if (GET_CODE (insn) == CALL_INSN
- && GET_CODE (PATTERN (insn)) == PARALLEL
- && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
- call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
- else
- call_dest = 0;
- }
-
- /* See if we have to do anything to INSN now that VAR is in
- memory. If it needs to be loaded into a pseudo, use a single
- pseudo for the entire insn in case there is a MATCH_DUP
- between two operands. We pass a pointer to the head of
- a list of struct fixup_replacements. If fixup_var_refs_1
- needs to allocate pseudos or replacement MEMs (for SUBREGs),
- it will record them in this list.
+ /* If this is last_parm_insn, and any instructions were output
+ after it to fix it up, then we must set last_parm_insn to
+ the last such instruction emitted. */
+ if (insn == last_parm_insn)
+ last_parm_insn = PREV_INSN (next_insn);
- If it allocated a pseudo for any replacement, we copy into
- it here. */
+ while (replacements)
+ {
+ struct fixup_replacement *next;
- fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
- &replacements);
+ if (GET_CODE (replacements->new) == REG)
+ {
+ rtx insert_before;
+ rtx seq;
- /* If this is last_parm_insn, and any instructions were output
- after it to fix it up, then we must set last_parm_insn to
- the last such instruction emitted. */
- if (insn == last_parm_insn)
- last_parm_insn = PREV_INSN (next_insn);
+ /* OLD might be a (subreg (mem)). */
+ if (GET_CODE (replacements->old) == SUBREG)
+ replacements->old
+ = fixup_memory_subreg (replacements->old, insn, 0);
+ else
+ replacements->old
+ = fixup_stack_1 (replacements->old, insn);
- while (replacements)
- {
- struct fixup_replacement *next;
+ insert_before = insn;
- if (GET_CODE (replacements->new) == REG)
- {
- rtx insert_before;
- rtx seq;
-
- /* OLD might be a (subreg (mem)). */
- if (GET_CODE (replacements->old) == SUBREG)
- replacements->old
- = fixup_memory_subreg (replacements->old, insn, 0);
- else
- replacements->old
- = fixup_stack_1 (replacements->old, insn);
-
- insert_before = insn;
-
- /* If we are changing the mode, do a conversion.
- This might be wasteful, but combine.c will
- eliminate much of the waste. */
-
- if (GET_MODE (replacements->new)
- != GET_MODE (replacements->old))
- {
- start_sequence ();
- convert_move (replacements->new,
- replacements->old, unsignedp);
- seq = gen_sequence ();
- end_sequence ();
- }
- else
- seq = gen_move_insn (replacements->new,
- replacements->old);
-
- emit_insn_before (seq, insert_before);
- }
+ /* If we are changing the mode, do a conversion.
+ This might be wasteful, but combine.c will
+ eliminate much of the waste. */
- next = replacements->next;
- free (replacements);
- replacements = next;
+ if (GET_MODE (replacements->new)
+ != GET_MODE (replacements->old))
+ {
+ start_sequence ();
+ convert_move (replacements->new,
+ replacements->old, unsignedp);
+ seq = gen_sequence ();
+ end_sequence ();
}
- }
+ else
+ seq = gen_move_insn (replacements->new,
+ replacements->old);
- /* Also fix up any invalid exprs in the REG_NOTES of this insn.
- But don't touch other insns referred to by reg-notes;
- we will get them elsewhere. */
- while (note)
- {
- if (GET_CODE (note) != INSN_LIST)
- XEXP (note, 0)
- = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
- note = XEXP (note, 1);
+ emit_insn_before (seq, insert_before);
}
- }
- if (!ht)
- insn = next;
- else if (insn_list)
- {
- insn = XEXP (insn_list, 0);
- insn_list = XEXP (insn_list, 1);
+ next = replacements->next;
+ free (replacements);
+ replacements = next;
}
- else
- insn = NULL_RTX;
+ }
+
+ /* Also fix up any invalid exprs in the REG_NOTES of this insn.
+ But don't touch other insns referred to by reg-notes;
+ we will get them elsewhere. */
+ while (note)
+ {
+ if (GET_CODE (note) != INSN_LIST)
+ XEXP (note, 0)
+ = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
+ note = XEXP (note, 1);
}
}
\f
to modify this insn by replacing a memory reference with a pseudo or by
making a new MEM to implement a SUBREG, we consult that list to see if
we have already chosen a replacement. If none has already been allocated,
- we allocate it and update the list. fixup_var_refs_insns will copy VAR
+ we allocate it and update the list. fixup_var_refs_insn will copy VAR
or the SUBREG, as appropriate, to the pseudo. */
static void
fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
- register rtx var;
+ rtx var;
enum machine_mode promoted_mode;
- register rtx *loc;
+ rtx *loc;
rtx insn;
struct fixup_replacement **replacements;
{
- register int i;
- register rtx x = *loc;
+ int i;
+ rtx x = *loc;
RTX_CODE code = GET_CODE (x);
- register const char *fmt;
- register rtx tem, tem1;
+ const char *fmt;
+ rtx tem, tem1;
struct fixup_replacement *replacement;
switch (code)
/* That failed. Fall back on force_operand and hope. */
start_sequence ();
- force_operand (sub, y);
+ sub = force_operand (sub, y);
+ if (sub != y)
+ emit_insn (gen_move_insn (y, sub));
seq = gen_sequence ();
end_sequence ();
}
enum machine_mode is_mode = GET_MODE (tem);
HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
-#ifdef HAVE_extzv
if (GET_CODE (x) == ZERO_EXTRACT)
{
- wanted_mode
- = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
- if (wanted_mode == VOIDmode)
- wanted_mode = word_mode;
+ enum machine_mode new_mode
+ = mode_for_extraction (EP_extzv, 1);
+ if (new_mode != MAX_MACHINE_MODE)
+ wanted_mode = new_mode;
}
-#endif
-#ifdef HAVE_extv
- if (GET_CODE (x) == SIGN_EXTRACT)
+ else if (GET_CODE (x) == SIGN_EXTRACT)
{
- wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
- if (wanted_mode == VOIDmode)
- wanted_mode = word_mode;
+ enum machine_mode new_mode
+ = mode_for_extraction (EP_extv, 1);
+ if (new_mode != MAX_MACHINE_MODE)
+ wanted_mode = new_mode;
}
-#endif
+
/* If we have a narrower mode, we can do something. */
if (wanted_mode != VOIDmode
&& GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
pos %= GET_MODE_BITSIZE (wanted_mode);
- newmem = gen_rtx_MEM (wanted_mode,
- plus_constant (XEXP (tem, 0), offset));
- MEM_COPY_ATTRIBUTES (newmem, tem);
+ newmem = adjust_address_nv (tem, wanted_mode, offset);
/* Make the change and see if the insn remains valid. */
INSN_CODE (insn) = -1;
{
replacement = find_fixup_replacement (replacements, var);
if (replacement->new == 0)
- replacement->new = gen_reg_rtx (GET_MODE (var));
+ replacement->new = gen_reg_rtx (promoted_mode);
SUBREG_REG (x) = replacement->new;
return;
}
optimize_bit_field (x, insn, 0);
if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
|| GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
- optimize_bit_field (x, insn, NULL_PTR);
+ optimize_bit_field (x, insn, 0);
/* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
into a register and then store it back out. */
{
rtx dest = SET_DEST (x);
rtx src = SET_SRC (x);
-#ifdef HAVE_insv
rtx outerdest = dest;
-#endif
while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
|| GET_CODE (dest) == SIGN_EXTRACT
dest = XEXP (dest, 0);
if (GET_CODE (src) == SUBREG)
- src = XEXP (src, 0);
+ src = SUBREG_REG (src);
/* If VAR does not appear at the top level of the SET
just scan the lower levels of the tree. */
/* We will need to rerecognize this insn. */
INSN_CODE (insn) = -1;
-#ifdef HAVE_insv
- if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
+ if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var
+ && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
{
/* Since this case will return, ensure we fixup all the
operands here. */
enum machine_mode is_mode = GET_MODE (tem);
HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
- wanted_mode = insn_data[(int) CODE_FOR_insv].operand[0].mode;
- if (wanted_mode == VOIDmode)
- wanted_mode = word_mode;
+ wanted_mode = mode_for_extraction (EP_insv, 0);
/* If we have a narrower mode, we can do something. */
if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
pos %= GET_MODE_BITSIZE (wanted_mode);
- newmem = gen_rtx_MEM (wanted_mode,
- plus_constant (XEXP (tem, 0),
- offset));
- MEM_COPY_ATTRIBUTES (newmem, tem);
+ newmem = adjust_address_nv (tem, wanted_mode, offset);
/* Make the change and see if the insn remains valid. */
INSN_CODE (insn) = -1;
XEXP (outerdest, 0) = tem1;
return;
}
-#endif
/* STRICT_LOW_PART is a no-op on memory references
and it can cause combinations to be unrecognizable,
REG_NOTES (insn) = REG_NOTES (last);
PATTERN (insn) = PATTERN (last);
- PUT_CODE (last, NOTE);
- NOTE_LINE_NUMBER (last) = NOTE_INSN_DELETED;
- NOTE_SOURCE_FILE (last) = 0;
+ delete_insn (last);
}
else
PATTERN (insn) = pat;
REG_NOTES (insn) = REG_NOTES (last);
PATTERN (insn) = PATTERN (last);
- PUT_CODE (last, NOTE);
- NOTE_LINE_NUMBER (last) = NOTE_INSN_DELETED;
- NOTE_SOURCE_FILE (last) = 0;
+ delete_insn (last);
}
else
PATTERN (insn) = pat;
fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
else if (fmt[i] == 'E')
{
- register int j;
+ int j;
for (j = 0; j < XVECLEN (x, i); j++)
fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
insn, replacements);
rtx insn;
int uncritical;
{
- int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
+ int offset = SUBREG_BYTE (x);
rtx addr = XEXP (SUBREG_REG (x), 0);
enum machine_mode mode = GET_MODE (x);
rtx result;
&& ! uncritical)
abort ();
- if (BYTES_BIG_ENDIAN)
- offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
- - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
- addr = plus_constant (addr, offset);
- if (!flag_force_addr && memory_address_p (mode, addr))
+ if (!flag_force_addr
+ && memory_address_p (mode, plus_constant (addr, offset)))
/* Shortcut if no insns need be emitted. */
- return change_address (SUBREG_REG (x), mode, addr);
+ return adjust_address (SUBREG_REG (x), mode, offset);
+
start_sequence ();
- result = change_address (SUBREG_REG (x), mode, addr);
+ result = adjust_address (SUBREG_REG (x), mode, offset);
emit_insn_before (gen_sequence (), insn);
end_sequence ();
return result;
static rtx
walk_fixup_memory_subreg (x, insn, uncritical)
- register rtx x;
+ rtx x;
rtx insn;
int uncritical;
{
- register enum rtx_code code;
- register const char *fmt;
- register int i;
+ enum rtx_code code;
+ const char *fmt;
+ int i;
if (x == 0)
return 0;
XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
else if (fmt[i] == 'E')
{
- register int j;
+ int j;
for (j = 0; j < XVECLEN (x, i); j++)
XVECEXP (x, i, j)
= walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
rtx x;
rtx insn;
{
- register int i;
- register RTX_CODE code = GET_CODE (x);
- register const char *fmt;
+ int i;
+ RTX_CODE code = GET_CODE (x);
+ const char *fmt;
if (code == MEM)
{
- register rtx ad = XEXP (x, 0);
+ rtx ad = XEXP (x, 0);
/* If we have address of a stack slot but it's not valid
(displacement is too large), compute the sum in a register. */
if (GET_CODE (ad) == PLUS
seq = gen_sequence ();
end_sequence ();
emit_insn_before (seq, insn);
- return change_address (x, VOIDmode, temp);
+ return replace_equiv_address (x, temp);
}
return x;
}
XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
else if (fmt[i] == 'E')
{
- register int j;
+ int j;
for (j = 0; j < XVECLEN (x, i); j++)
XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
}
rtx insn;
rtx *equiv_mem;
{
- register rtx bitfield;
+ rtx bitfield;
int destflag;
rtx seq = 0;
enum machine_mode mode;
!= BLKmode)
&& INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
{
- register rtx memref = 0;
+ rtx memref = 0;
/* Now check that the containing word is memory, not a register,
and that it is safe to change the machine mode. */
offset /= BITS_PER_UNIT;
if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
{
- offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
+ offset += (SUBREG_BYTE (XEXP (bitfield, 0))
+ / UNITS_PER_WORD) * UNITS_PER_WORD;
if (BYTES_BIG_ENDIAN)
offset -= (MIN (UNITS_PER_WORD,
GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
}
start_sequence ();
- memref = change_address (memref, mode,
- plus_constant (XEXP (memref, 0), offset));
+ memref = adjust_address (memref, mode, offset);
insns = get_insns ();
end_sequence ();
emit_insns_before (insns, insn);
{
rtx src = SET_SRC (body);
while (GET_CODE (src) == SUBREG
- && SUBREG_WORD (src) == 0)
+ && SUBREG_BYTE (src) == 0)
src = SUBREG_REG (src);
if (GET_MODE (src) != GET_MODE (memref))
src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
rtx dest = SET_DEST (body);
while (GET_CODE (dest) == SUBREG
- && SUBREG_WORD (dest) == 0
+ && SUBREG_BYTE (dest) == 0
&& (GET_MODE_CLASS (GET_MODE (dest))
== GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
&& (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
REGNO (reg), decl);
+ /* Calculate this before we start messing with decl's RTL. */
+ HOST_WIDE_INT set = decl ? get_alias_set (decl) : 0;
+
/* If the original REG was a user-variable, then so is the REG whose
address is being taken. Likewise for unchanging. */
REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
PUT_CODE (reg, MEM);
+ MEM_ATTRS (reg) = 0;
XEXP (reg, 0) = r;
+
if (decl)
{
tree type = TREE_TYPE (decl);
+ enum machine_mode decl_mode
+ = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
+ : DECL_MODE (decl));
+ rtx decl_rtl = decl ? DECL_RTL_IF_SET (decl) : 0;
+
+ PUT_MODE (reg, decl_mode);
- PUT_MODE (reg, DECL_MODE (decl));
- MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
- MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
- MEM_ALIAS_SET (reg) = get_alias_set (decl);
+ /* Clear DECL_RTL momentarily so functions below will work
+ properly, then set it again. */
+ if (decl_rtl == reg)
+ SET_DECL_RTL (decl, 0);
+
+ set_mem_attributes (reg, decl, 1);
+ set_mem_alias_set (reg, set);
+
+ if (decl_rtl == reg)
+ SET_DECL_RTL (decl, reg);
if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
}
else
- {
- /* We have no alias information about this newly created MEM. */
- MEM_ALIAS_SET (reg) = 0;
-
- fixup_var_refs (reg, GET_MODE (reg), 0, 0);
- }
+ fixup_var_refs (reg, GET_MODE (reg), 0, 0);
return reg;
}
the stack. If the function returns FALSE then the replacement could not
be made. */
-static boolean
+static bool
purge_addressof_1 (loc, insn, force, store, ht)
rtx *loc;
rtx insn;
RTX_CODE code;
int i, j;
const char *fmt;
- boolean result = true;
+ bool result = true;
/* Re-start here to avoid recursion in common cases. */
restart:
result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
return result;
}
-
- else if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
+ else if (code == ADDRESSOF)
{
+ rtx sub, insns;
+
+ if (GET_CODE (XEXP (x, 0)) != MEM)
+ {
+ put_addressof_into_stack (x, ht);
+ return true;
+ }
+
/* We must create a copy of the rtx because it was created by
overwriting a REG rtx which is always shared. */
- rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
- rtx insns;
-
+ sub = copy_rtx (XEXP (XEXP (x, 0), 0));
if (validate_change (insn, loc, sub, 0)
|| validate_replace_rtx (x, sub, insn))
return true;
else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
{
rtx sub = XEXP (XEXP (x, 0), 0);
- rtx sub2;
if (GET_CODE (sub) == MEM)
- {
- sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
- MEM_COPY_ATTRIBUTES (sub2, sub);
- sub = sub2;
- }
+ sub = adjust_address_nv (sub, GET_MODE (x), 0);
else if (GET_CODE (sub) == REG
&& (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
;
code did. This is especially true of
REG_RETVAL. */
- if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
+ if (GET_CODE (z) == SUBREG && SUBREG_BYTE (z) == 0)
z = SUBREG_REG (z);
if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
}
goto restart;
}
- give_up:;
- /* else give up and put it into the stack */
- }
-
- else if (code == ADDRESSOF)
- {
- put_addressof_into_stack (x, ht);
- return true;
- }
- else if (code == SET)
- {
- result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
- result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
- return result;
}
+ give_up:
/* Scan all subexpressions. */
fmt = GET_RTX_FORMAT (code);
for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
/* Return non-zero if K1 and K2 (two REGs) are the same. */
-static boolean
+static bool
insns_for_mem_comp (k1, k2)
hash_table_key k1;
hash_table_key k2;
return k1 == k2;
}
-struct insns_for_mem_walk_info {
+struct insns_for_mem_walk_info
+{
/* The hash table that we are using to record which INSNs use which
MEMs. */
struct hash_table *ht;
/* Helper function for purge_addressof called through for_each_rtx.
Returns true iff the rtl is an ADDRESSOF. */
+
static int
is_addressof (rtl, data)
rtx *rtl;
{
rtx reg = SET_DEST (pattern);
enum machine_mode mode = GET_MODE (SET_DEST (pattern));
- int word = 0;
-
- while (GET_CODE (reg) == SUBREG)
+ int offset = 0;
+
+ if (GET_CODE (reg) == SUBREG && GET_CODE (SUBREG_REG (reg)) == REG
+ && REGNO (SUBREG_REG (reg)) < FIRST_PSEUDO_REGISTER)
{
- word += SUBREG_WORD (reg);
+ offset = subreg_regno_offset (REGNO (SUBREG_REG (reg)),
+ GET_MODE (SUBREG_REG (reg)),
+ SUBREG_BYTE (reg),
+ GET_MODE (reg));
reg = SUBREG_REG (reg);
}
-
- if (REGNO (reg) < FIRST_PSEUDO_REGISTER)
+
+
+ if (GET_CODE (reg) == REG && REGNO (reg) < FIRST_PSEUDO_REGISTER)
{
- reg = gen_rtx_REG (mode, REGNO (reg) + word);
+ reg = gen_rtx_REG (mode, REGNO (reg) + offset);
SET_DEST (pattern) = reg;
}
}
{
instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
instantiate_virtual_regs_1 (®_NOTES (insn), NULL_RTX, 0);
+ /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
+ if (GET_CODE (insn) == CALL_INSN)
+ instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
+ NULL_RTX, 0);
}
/* Instantiate the stack slots for the parm registers, for later use in
for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
{
HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
+ HOST_WIDE_INT size_rtl;
instantiate_decl (DECL_RTL (decl), size, valid_only);
/* If the parameter was promoted, then the incoming RTL mode may be
larger than the declared type size. We must use the larger of
the two sizes. */
- size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
+ size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
+ size = MAX (size_rtl, size);
instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
}
tree t;
for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
- instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
- valid_only);
+ if (DECL_RTL_SET_P (t))
+ instantiate_decl (DECL_RTL (t),
+ int_size_in_bytes (TREE_TYPE (t)),
+ valid_only);
/* Process all subblocks. */
for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
XEXP (x, 0) = addr;
}
\f
+/* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
+ is a virtual register, return the requivalent hard register and set the
+ offset indirectly through the pointer. Otherwise, return 0. */
+
+static rtx
+instantiate_new_reg (x, poffset)
+ rtx x;
+ HOST_WIDE_INT *poffset;
+{
+ rtx new;
+ HOST_WIDE_INT offset;
+
+ if (x == virtual_incoming_args_rtx)
+ new = arg_pointer_rtx, offset = in_arg_offset;
+ else if (x == virtual_stack_vars_rtx)
+ new = frame_pointer_rtx, offset = var_offset;
+ else if (x == virtual_stack_dynamic_rtx)
+ new = stack_pointer_rtx, offset = dynamic_offset;
+ else if (x == virtual_outgoing_args_rtx)
+ new = stack_pointer_rtx, offset = out_arg_offset;
+ else if (x == virtual_cfa_rtx)
+ new = arg_pointer_rtx, offset = cfa_offset;
+ else
+ return 0;
+
+ *poffset = offset;
+ return new;
+}
+\f
/* Given a pointer to a piece of rtx and an optional pointer to the
containing object, instantiate any virtual registers present in it.
the actual register should receive the source minus the
appropriate offset. This is used, for example, in the handling
of non-local gotos. */
- if (SET_DEST (x) == virtual_incoming_args_rtx)
- new = arg_pointer_rtx, offset = -in_arg_offset;
- else if (SET_DEST (x) == virtual_stack_vars_rtx)
- new = frame_pointer_rtx, offset = -var_offset;
- else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
- new = stack_pointer_rtx, offset = -dynamic_offset;
- else if (SET_DEST (x) == virtual_outgoing_args_rtx)
- new = stack_pointer_rtx, offset = -out_arg_offset;
- else if (SET_DEST (x) == virtual_cfa_rtx)
- new = arg_pointer_rtx, offset = -cfa_offset;
-
- if (new)
+ if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
{
rtx src = SET_SRC (x);
+ /* We are setting the register, not using it, so the relevant
+ offset is the negative of the offset to use were we using
+ the register. */
+ offset = - offset;
instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
/* The only valid sources here are PLUS or REG. Just do
/* Check for (plus (plus VIRT foo) (const_int)) first. */
if (GET_CODE (XEXP (x, 0)) == PLUS)
{
- rtx inner = XEXP (XEXP (x, 0), 0);
-
- if (inner == virtual_incoming_args_rtx)
- new = arg_pointer_rtx, offset = in_arg_offset;
- else if (inner == virtual_stack_vars_rtx)
- new = frame_pointer_rtx, offset = var_offset;
- else if (inner == virtual_stack_dynamic_rtx)
- new = stack_pointer_rtx, offset = dynamic_offset;
- else if (inner == virtual_outgoing_args_rtx)
- new = stack_pointer_rtx, offset = out_arg_offset;
- else if (inner == virtual_cfa_rtx)
- new = arg_pointer_rtx, offset = cfa_offset;
+ if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
+ {
+ instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
+ extra_insns);
+ new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
+ }
else
{
loc = &XEXP (x, 0);
goto restart;
}
-
- instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
- extra_insns);
- new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
}
- else if (XEXP (x, 0) == virtual_incoming_args_rtx)
- new = arg_pointer_rtx, offset = in_arg_offset;
- else if (XEXP (x, 0) == virtual_stack_vars_rtx)
- new = frame_pointer_rtx, offset = var_offset;
- else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
- new = stack_pointer_rtx, offset = dynamic_offset;
- else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
- new = stack_pointer_rtx, offset = out_arg_offset;
- else if (XEXP (x, 0) == virtual_cfa_rtx)
- new = arg_pointer_rtx, offset = cfa_offset;
- else
+#ifdef POINTERS_EXTEND_UNSIGNED
+ /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
+ we can commute the PLUS and SUBREG because pointers into the
+ frame are well-behaved. */
+ else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
+ && GET_CODE (XEXP (x, 1)) == CONST_INT
+ && 0 != (new
+ = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
+ &offset))
+ && validate_change (object, loc,
+ plus_constant (gen_lowpart (ptr_mode,
+ new),
+ offset
+ + INTVAL (XEXP (x, 1))),
+ 0))
+ return 1;
+#endif
+ else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
{
/* We know the second operand is a constant. Unless the
first operand is a REG (which has been already checked),
case REG:
/* Try to replace with a PLUS. If that doesn't work, compute the sum
in front of this insn and substitute the temporary. */
- if (x == virtual_incoming_args_rtx)
- new = arg_pointer_rtx, offset = in_arg_offset;
- else if (x == virtual_stack_vars_rtx)
- new = frame_pointer_rtx, offset = var_offset;
- else if (x == virtual_stack_dynamic_rtx)
- new = stack_pointer_rtx, offset = dynamic_offset;
- else if (x == virtual_outgoing_args_rtx)
- new = stack_pointer_rtx, offset = out_arg_offset;
- else if (x == virtual_cfa_rtx)
- new = arg_pointer_rtx, offset = cfa_offset;
-
- if (new)
+ if ((new = instantiate_new_reg (x, &offset)) != 0)
{
temp = plus_constant (new, offset);
if (!validate_change (object, loc, temp, 0))
|| (nonlocal_goto_stack_level != 0
&& reg_mentioned_p (nonlocal_goto_stack_level,
PATTERN (insn))))
- delete_insn (insn);
+ delete_related_insns (insn);
}
}
}
rtx
get_first_block_beg ()
{
- register rtx searcher;
- register rtx insn = get_first_nonparm_insn ();
+ rtx searcher;
+ rtx insn = get_first_nonparm_insn ();
for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
if (GET_CODE (searcher) == NOTE
assign_parms (fndecl)
tree fndecl;
{
- register tree parm;
- register rtx entry_parm = 0;
- register rtx stack_parm = 0;
+ tree parm;
+ rtx entry_parm = 0;
+ rtx stack_parm = 0;
CUMULATIVE_ARGS args_so_far;
enum machine_mode promoted_mode, passed_mode;
enum machine_mode nominal_mode, promoted_nominal_mode;
|| TREE_CODE (parm) != PARM_DECL
|| passed_type == NULL)
{
- DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
- = gen_rtx_MEM (BLKmode, const0_rtx);
+ SET_DECL_RTL (parm, gen_rtx_MEM (BLKmode, const0_rtx));
+ DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
TREE_USED (parm) = 1;
continue;
}
and avoid the usual things like emit_move_insn that could crash. */
if (nominal_mode == VOIDmode)
{
- DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
+ SET_DECL_RTL (parm, const0_rtx);
+ DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
continue;
}
&& nominal_mode != BLKmode && nominal_mode != passed_mode)
stack_parm = 0;
+ /* When an argument is passed in multiple locations, we can't
+ make use of this information, but we can save some copying if
+ the whole argument is passed in a single register. */
+ if (GET_CODE (entry_parm) == PARALLEL
+ && nominal_mode != BLKmode && passed_mode != BLKmode)
+ {
+ int i, len = XVECLEN (entry_parm, 0);
+
+ for (i = 0; i < len; i++)
+ if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
+ && GET_CODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) == REG
+ && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
+ == passed_mode)
+ && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
+ {
+ entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
+ DECL_INCOMING_RTL (parm) = entry_parm;
+ break;
+ }
+ }
+
/* ENTRY_PARM is an RTX for the parameter as it arrives,
in the mode in which it arrives.
STACK_PARM is an RTX for a stack slot where the parameter can live
size_stored / UNITS_PER_WORD,
int_size_in_bytes (TREE_TYPE (parm)));
}
- DECL_RTL (parm) = stack_parm;
+ SET_DECL_RTL (parm, stack_parm);
}
else if (! ((! optimize
&& ! DECL_REGISTER (parm)
/* Store the parm in a pseudoregister during the function, but we
may need to do it in a wider mode. */
- register rtx parmreg;
+ rtx parmreg;
unsigned int regno, regnoi = 0, regnor = 0;
unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
appropriately. */
if (passed_pointer)
{
- DECL_RTL (parm)
- = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
- set_mem_attributes (DECL_RTL (parm), parm, 1);
+ rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)),
+ parmreg);
+ set_mem_attributes (x, parm, 1);
+ SET_DECL_RTL (parm, x);
}
else
- DECL_RTL (parm) = parmreg;
-
+ {
+ SET_DECL_RTL (parm, parmreg);
+ maybe_set_unchanging (DECL_RTL (parm), parm);
+ }
+
/* Copy the value into the register. */
if (nominal_mode != passed_mode
|| promoted_nominal_mode != promoted_mode)
push_to_sequence (conversion_insns);
tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
+ if (GET_CODE (tempreg) == SUBREG
+ && GET_MODE (tempreg) == nominal_mode
+ && GET_CODE (SUBREG_REG (tempreg)) == REG
+ && nominal_mode == passed_mode
+ && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (entry_parm)
+ && GET_MODE_SIZE (GET_MODE (tempreg))
+ < GET_MODE_SIZE (GET_MODE (entry_parm)))
+ {
+ /* The argument is already sign/zero extended, so note it
+ into the subreg. */
+ SUBREG_PROMOTED_VAR_P (tempreg) = 1;
+ SUBREG_PROMOTED_UNSIGNED_P (tempreg) = unsignedp;
+ }
+
/* TREE_USED gets set erroneously during expand_assignment. */
save_tree_used = TREE_USED (parm);
expand_assignment (parm,
Pmode above. We must use the actual mode of the parm. */
parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
mark_user_reg (parmreg);
- emit_move_insn (parmreg, DECL_RTL (parm));
- DECL_RTL (parm) = parmreg;
+ if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
+ {
+ rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
+ int unsigned_p = TREE_UNSIGNED (TREE_TYPE (parm));
+ push_to_sequence (conversion_insns);
+ emit_move_insn (tempreg, DECL_RTL (parm));
+ SET_DECL_RTL (parm,
+ convert_to_mode (GET_MODE (parmreg),
+ tempreg,
+ unsigned_p));
+ emit_move_insn (parmreg, DECL_RTL (parm));
+ conversion_insns = get_insns();
+ did_conversion = 1;
+ end_sequence ();
+ }
+ else
+ emit_move_insn (parmreg, DECL_RTL (parm));
+ SET_DECL_RTL (parm, parmreg);
/* STACK_PARM is the pointer, not the parm, and PARMREG is
now the parm. */
stack_parm = 0;
/* If something wants our address, try to use ADDRESSOF. */
if (TREE_ADDRESSABLE (parm))
- put_var_into_stack (parm);
+ {
+ /* If we end up putting something into the stack,
+ fixup_var_refs_insns will need to make a pass over
+ all the instructions. It looks throughs the pending
+ sequences -- but it can't see the ones in the
+ CONVERSION_INSNS, if they're not on the sequence
+ stack. So, we go back to that sequence, just so that
+ the fixups will happen. */
+ push_to_sequence (conversion_insns);
+ put_var_into_stack (parm);
+ conversion_insns = get_insns ();
+ end_sequence ();
+ }
}
else
{
if (promoted_mode != nominal_mode)
{
- /* Conversion is required. */
+ /* Conversion is required. */
rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
emit_move_insn (tempreg, validize_mem (entry_parm));
entry_parm = convert_to_mode (nominal_mode, tempreg,
TREE_UNSIGNED (TREE_TYPE (parm)));
if (stack_parm)
- {
- /* ??? This may need a big-endian conversion on sparc64. */
- stack_parm = change_address (stack_parm, nominal_mode,
- NULL_RTX);
- }
+ /* ??? This may need a big-endian conversion on sparc64. */
+ stack_parm = adjust_address (stack_parm, nominal_mode, 0);
+
conversion_insns = get_insns ();
did_conversion = 1;
end_sequence ();
conversion_insns = get_insns ();
end_sequence ();
}
- DECL_RTL (parm) = stack_parm;
+ SET_DECL_RTL (parm, stack_parm);
}
/* If this "parameter" was the place where we are receiving the
if (parm == function_result_decl)
{
tree result = DECL_RESULT (fndecl);
+ rtx x = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
- DECL_RTL (result)
- = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
+ set_mem_attributes (x, result, 1);
+ SET_DECL_RTL (result, x);
+ }
- set_mem_attributes (DECL_RTL (result), result, 1);
+ if (GET_CODE (DECL_RTL (parm)) == REG)
+ REGNO_DECL (REGNO (DECL_RTL (parm))) = parm;
+ else if (GET_CODE (DECL_RTL (parm)) == CONCAT)
+ {
+ REGNO_DECL (REGNO (XEXP (DECL_RTL (parm), 0))) = parm;
+ REGNO_DECL (REGNO (XEXP (DECL_RTL (parm), 1))) = parm;
}
+
}
/* Output all parameter conversion instructions (possibly including calls)
#endif
#endif
-#ifdef STACK_BOUNDARY
#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
current_function_args_size
= ((current_function_args_size + STACK_BYTES - 1)
/ STACK_BYTES) * STACK_BYTES;
-#endif
#ifdef ARGS_GROW_DOWNWARD
current_function_arg_offset_rtx
to include tree.h. Do this here so it gets done when an inlined
function gets output. */
- current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
+ current_function_return_rtx
+ = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
+ ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
}
\f
/* Indicate whether REGNO is an incoming argument to the current function
uninitialized_vars_warning (block)
tree block;
{
- register tree decl, sub;
+ tree decl, sub;
for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
{
if (warn_uninitialized
void
setjmp_args_warning ()
{
- register tree decl;
+ tree decl;
for (decl = DECL_ARGUMENTS (current_function_decl);
decl; decl = TREE_CHAIN (decl))
if (DECL_RTL (decl) != 0
setjmp_protect (block)
tree block;
{
- register tree decl, sub;
+ tree decl, sub;
for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
if ((TREE_CODE (decl) == VAR_DECL
|| TREE_CODE (decl) == PARM_DECL)
void
setjmp_protect_args ()
{
- register tree decl;
+ tree decl;
for (decl = DECL_ARGUMENTS (current_function_decl);
decl; decl = TREE_CHAIN (decl))
if ((TREE_CODE (decl) == VAR_DECL
if (context == current_function_decl || context == inline_function_decl)
return addr;
- for (fp = outer_function_chain; fp; fp = fp->next)
- if (fp->decl == context)
- break;
-
- if (fp == 0)
- abort ();
+ fp = find_function_data (context);
if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
addr = XEXP (XEXP (addr, 0), 0);
#ifdef NEED_SEPARATE_AP
rtx addr;
- if (fp->x_arg_pointer_save_area == 0)
- fp->x_arg_pointer_save_area
- = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
-
- addr = fix_lexical_addr (XEXP (fp->x_arg_pointer_save_area, 0), var);
+ addr = get_arg_pointer_save_area (fp);
+ addr = fix_lexical_addr (XEXP (addr, 0), var);
addr = memory_address (Pmode, addr);
base = gen_rtx_MEM (Pmode, addr);
- MEM_ALIAS_SET (base) = get_frame_alias_set ();
+ set_mem_alias_set (base, get_frame_alias_set ());
base = copy_to_reg (base);
#else
displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
for (link = trampoline_list; link; link = TREE_CHAIN (link))
if (TREE_PURPOSE (link) == function)
return
- round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
+ adjust_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
- for (fp = outer_function_chain; fp; fp = fp->next)
+ for (fp = outer_function_chain; fp; fp = fp->outer)
for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
if (TREE_PURPOSE (link) == function)
{
tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
function);
- return round_trampoline_addr (tramp);
+ return adjust_trampoline_addr (tramp);
}
/* None exists; we must make one. */
fn_context = decl_function_context (function);
if (fn_context != current_function_decl
&& fn_context != inline_function_decl)
- for (fp = outer_function_chain; fp; fp = fp->next)
- if (fp->decl == fn_context)
- break;
+ fp = find_function_data (fn_context);
/* Allocate run-time space for this trampoline
(usually in the defining function's stack frame). */
}
tramp = fix_lexical_addr (XEXP (tramp, 0), function);
- return round_trampoline_addr (tramp);
+ return adjust_trampoline_addr (tramp);
}
/* Given a trampoline address,
#ifdef TRAMPOLINE_ALIGNMENT
/* Round address up to desired boundary. */
rtx temp = gen_reg_rtx (Pmode);
- temp = expand_binop (Pmode, add_optab, tramp,
- GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
- temp, 0, OPTAB_LIB_WIDEN);
- tramp = expand_binop (Pmode, and_optab, temp,
- GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
- temp, 0, OPTAB_LIB_WIDEN);
+ rtx addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
+ rtx mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
+
+ temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
+ temp, 0, OPTAB_LIB_WIDEN);
+ tramp = expand_simple_binop (Pmode, AND, temp, mask,
+ temp, 0, OPTAB_LIB_WIDEN);
+#endif
+ return tramp;
+}
+
+/* Given a trampoline address, round it then apply any
+ platform-specific adjustments so that the result can be used for a
+ function call . */
+
+static rtx
+adjust_trampoline_addr (tramp)
+ rtx tramp;
+{
+ tramp = round_trampoline_addr (tramp);
+#ifdef TRAMPOLINE_ADJUST_ADDRESS
+ TRAMPOLINE_ADJUST_ADDRESS (tramp);
#endif
return tramp;
}
return block_vector;
}
-/* Identify BLOCKs referenced by more than one
- NOTE_INSN_BLOCK_{BEG,END}, and create duplicate blocks. */
+/* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
+ and create duplicate blocks. */
+/* ??? Need an option to either create block fragments or to create
+ abstract origin duplicates of a source block. It really depends
+ on what optimization has been performed. */
void
reorder_blocks ()
VARRAY_TREE_INIT (block_stack, 10, "block_stack");
+ /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
+ reorder_blocks_0 (block);
+
/* Prune the old trees away, so that they don't get in the way. */
BLOCK_SUBBLOCKS (block) = NULL_TREE;
BLOCK_CHAIN (block) = NULL_TREE;
+ /* Recreate the block tree from the note nesting. */
reorder_blocks_1 (get_insns (), block, &block_stack);
-
BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
+ /* Remove deleted blocks from the block fragment chains. */
+ reorder_fix_fragments (block);
+
VARRAY_FREE (block_stack);
}
-/* Helper function for reorder_blocks. Process the insn chain beginning
- at INSNS. Recurse for CALL_PLACEHOLDER insns. */
+/* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
+
+static void
+reorder_blocks_0 (block)
+ tree block;
+{
+ while (block)
+ {
+ TREE_ASM_WRITTEN (block) = 0;
+ reorder_blocks_0 (BLOCK_SUBBLOCKS (block));
+ block = BLOCK_CHAIN (block);
+ }
+}
static void
reorder_blocks_1 (insns, current_block, p_block_stack)
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
{
tree block = NOTE_BLOCK (insn);
- /* If we have seen this block before, copy it. */
+
+ /* If we have seen this block before, that means it now
+ spans multiple address regions. Create a new fragment. */
if (TREE_ASM_WRITTEN (block))
{
- block = copy_node (block);
- NOTE_BLOCK (insn) = block;
+ tree new_block = copy_node (block);
+ tree origin;
+
+ origin = (BLOCK_FRAGMENT_ORIGIN (block)
+ ? BLOCK_FRAGMENT_ORIGIN (block)
+ : block);
+ BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
+ BLOCK_FRAGMENT_CHAIN (new_block)
+ = BLOCK_FRAGMENT_CHAIN (origin);
+ BLOCK_FRAGMENT_CHAIN (origin) = new_block;
+
+ NOTE_BLOCK (insn) = new_block;
+ block = new_block;
}
+
BLOCK_SUBBLOCKS (block) = 0;
TREE_ASM_WRITTEN (block) = 1;
BLOCK_SUPERCONTEXT (block) = current_block;
}
}
+/* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
+ appears in the block tree, select one of the fragments to become
+ the new origin block. */
+
+static void
+reorder_fix_fragments (block)
+ tree block;
+{
+ while (block)
+ {
+ tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
+ tree new_origin = NULL_TREE;
+
+ if (dup_origin)
+ {
+ if (! TREE_ASM_WRITTEN (dup_origin))
+ {
+ new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
+
+ /* Find the first of the remaining fragments. There must
+ be at least one -- the current block. */
+ while (! TREE_ASM_WRITTEN (new_origin))
+ new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
+ BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
+ }
+ }
+ else if (! dup_origin)
+ new_origin = block;
+
+ /* Re-root the rest of the fragments to the new origin. In the
+ case that DUP_ORIGIN was null, that means BLOCK was the origin
+ of a chain of fragments and we want to remove those fragments
+ that didn't make it to the output. */
+ if (new_origin)
+ {
+ tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
+ tree chain = *pp;
+
+ while (chain)
+ {
+ if (TREE_ASM_WRITTEN (chain))
+ {
+ BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
+ *pp = chain;
+ pp = &BLOCK_FRAGMENT_CHAIN (chain);
+ }
+ chain = BLOCK_FRAGMENT_CHAIN (chain);
+ }
+ *pp = NULL_TREE;
+ }
+
+ reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
+ block = BLOCK_CHAIN (block);
+ }
+}
+
/* Reverse the order of elements in the chain T of blocks,
and return the new head of the chain (old last element). */
blocks_nreverse (t)
tree t;
{
- register tree prev = 0, decl, next;
+ tree prev = 0, decl, next;
for (decl = t; decl; decl = next)
{
next = BLOCK_CHAIN (decl);
}
\f
/* Allocate a function structure and reset its contents to the defaults. */
+
static void
prepare_function_start ()
{
- cfun = (struct function *) xcalloc (1, sizeof (struct function));
+ cfun = (struct function *) ggc_alloc_cleared (sizeof (struct function));
init_stmt_for_function ();
init_eh_for_function ();
cfun->original_decl_initial = 0;
cfun->original_arg_vector = 0;
-#ifdef STACK_BOUNDARY
cfun->stack_alignment_needed = STACK_BOUNDARY;
cfun->preferred_stack_boundary = STACK_BOUNDARY;
-#else
- cfun->stack_alignment_needed = 0;
- cfun->preferred_stack_boundary = 0;
-#endif
/* Set if a call to setjmp is seen. */
current_function_calls_setjmp = 0;
{
prepare_function_start ();
- /* Remember this function for later. */
- cfun->next_global = all_functions;
- all_functions = cfun;
-
current_function_name = (*decl_printable_name) (subr, 2);
cfun->decl = subr;
/* Make sure first insn is a note even if we don't want linenums.
This makes sure the first insn will never be deleted.
Also, final expects a note to appear there. */
- emit_note (NULL_PTR, NOTE_INSN_DELETED);
+ emit_note (NULL, NOTE_INSN_DELETED);
/* Set flags used by final.c. */
if (aggregate_value_p (DECL_RESULT (subr)))
void
expand_main_function ()
{
-#if !defined (HAS_INIT_SECTION)
+#ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
+ if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
+ {
+ int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
+ rtx tmp;
+
+ /* Forcibly align the stack. */
+#ifdef STACK_GROWS_DOWNWARD
+ tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
+ stack_pointer_rtx, 1, OPTAB_WIDEN);
+#else
+ tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
+ GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
+ tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
+ stack_pointer_rtx, 1, OPTAB_WIDEN);
+#endif
+ if (tmp != stack_pointer_rtx)
+ emit_move_insn (stack_pointer_rtx, tmp);
+
+ /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
+ tmp = force_reg (Pmode, const0_rtx);
+ allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
+ }
+#endif
+
+#ifndef HAS_INIT_SECTION
emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
VOIDmode, 0);
-#endif /* not HAS_INIT_SECTION */
+#endif
}
\f
extern struct obstack permanent_obstack;
+/* The PENDING_SIZES represent the sizes of variable-sized types.
+ Create RTL for the various sizes now (using temporary variables),
+ so that we can refer to the sizes from the RTL we are generating
+ for the current function. The PENDING_SIZES are a TREE_LIST. The
+ TREE_VALUE of each node is a SAVE_EXPR. */
+
+void
+expand_pending_sizes (pending_sizes)
+ tree pending_sizes;
+{
+ tree tem;
+
+ /* Evaluate now the sizes of any types declared among the arguments. */
+ for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
+ {
+ expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
+ EXPAND_MEMORY_USE_BAD);
+ /* Flush the queue in case this parameter declaration has
+ side-effects. */
+ emit_queue ();
+ }
+}
+
/* Start the RTL for a new function, and set variables used for
emitting RTL.
SUBR is the FUNCTION_DECL node.
else
cleanup_label = 0;
- /* Make the label for return statements to jump to, if this machine
- does not have a one-instruction return and uses an epilogue,
- or if it returns a structure, or if it has parm cleanups. */
-#ifdef HAVE_return
- if (cleanup_label == 0 && HAVE_return
- && ! current_function_instrument_entry_exit
- && ! current_function_returns_pcc_struct
- && ! (current_function_returns_struct && ! optimize))
- return_label = 0;
- else
- return_label = gen_label_rtx ();
-#else
+ /* Make the label for return statements to jump to. Do not special
+ case machines with special return instructions -- they will be
+ handled later during jump, ifcvt, or epilogue creation. */
return_label = gen_label_rtx ();
-#endif
/* Initialize rtx used to return the value. */
/* Do this before assign_parms so that we copy the struct value address
if (aggregate_value_p (DECL_RESULT (subr)))
{
/* Returning something that won't go in a register. */
- register rtx value_address = 0;
+ rtx value_address = 0;
#ifdef PCC_STATIC_STRUCT_RETURN
if (current_function_returns_pcc_struct)
}
if (value_address)
{
- DECL_RTL (DECL_RESULT (subr))
- = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
- set_mem_attributes (DECL_RTL (DECL_RESULT (subr)),
- DECL_RESULT (subr), 1);
+ rtx x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
+ set_mem_attributes (x, DECL_RESULT (subr), 1);
+ SET_DECL_RTL (DECL_RESULT (subr), x);
}
}
else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
/* If return mode is void, this decl rtl should not be used. */
- DECL_RTL (DECL_RESULT (subr)) = 0;
- else if (parms_have_cleanups || current_function_instrument_entry_exit)
- {
- /* If function will end with cleanup code for parms,
- compute the return values into a pseudo reg,
- which we will copy into the true return register
- after the cleanups are done. */
-
- enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
-
-#ifdef PROMOTE_FUNCTION_RETURN
- tree type = TREE_TYPE (DECL_RESULT (subr));
- int unsignedp = TREE_UNSIGNED (type);
-
- mode = promote_mode (type, mode, &unsignedp, 1);
-#endif
-
- DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
- }
+ SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
else
- /* Scalar, returned in a register. */
{
- DECL_RTL (DECL_RESULT (subr))
- = hard_function_value (TREE_TYPE (DECL_RESULT (subr)), subr, 1);
-
- /* Mark this reg as the function's return value. */
- if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
+ /* Compute the return values into a pseudo reg, which we will copy
+ into the true return register after the cleanups are done. */
+
+ /* In order to figure out what mode to use for the pseudo, we
+ figure out what the mode of the eventual return register will
+ actually be, and use that. */
+ rtx hard_reg
+ = hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
+ subr, 1);
+
+ /* Structures that are returned in registers are not aggregate_value_p,
+ so we may see a PARALLEL. Don't play pseudo games with this. */
+ if (! REG_P (hard_reg))
+ SET_DECL_RTL (DECL_RESULT (subr), hard_reg);
+ else
{
- REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
+ /* Create the pseudo. */
+ SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
+
/* Needed because we may need to move this to memory
in case it's a named return value whose address is taken. */
DECL_REGISTER (DECL_RESULT (subr)) = 1;
The move is supposed to make sdb output more accurate. */
/* Indicate the beginning of the function body,
as opposed to parm setup. */
- emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
+ emit_note (NULL, NOTE_INSN_FUNCTION_BEG);
if (GET_CODE (get_last_insn ()) != NOTE)
- emit_note (NULL_PTR, NOTE_INSN_DELETED);
+ emit_note (NULL, NOTE_INSN_DELETED);
parm_birth_insn = get_last_insn ();
context_display = 0;
-(HOST_WIDE_INT) GET_MODE_SIZE (Pmode));
#endif
last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr));
- MEM_ALIAS_SET (last_ptr) = get_frame_alias_set ();
+ set_mem_alias_set (last_ptr, get_frame_alias_set ());
last_ptr = copy_to_reg (last_ptr);
/* If we are not optimizing, ensure that we know that this
Pmode);
}
+#ifdef PROFILE_HOOK
+ if (profile_flag)
+ PROFILE_HOOK (profile_label_no);
+#endif
+
/* After the display initializations is where the tail-recursion label
should go, if we end up needing one. Ensure we have a NOTE here
since some things (like trampolines) get placed before this. */
- tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
+ tail_recursion_reentry = emit_note (NULL, NOTE_INSN_DELETED);
/* Evaluate now the sizes of any types declared among the arguments. */
- for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
- {
- expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
- EXPAND_MEMORY_USE_BAD);
- /* Flush the queue in case this parameter declaration has
- side-effects. */
- emit_queue ();
- }
+ expand_pending_sizes (nreverse (get_pending_sizes ()));
/* Make sure there is a line number after the function entry setup code. */
force_next_line_note ();
free_after_parsing (cfun);
free_after_compilation (cfun);
- free (cfun);
cfun = 0;
}
void *arg;
{
rtx outgoing = current_function_return_rtx;
- int pcc;
if (! outgoing)
return;
- pcc = (current_function_returns_struct
- || current_function_returns_pcc_struct);
-
- if ((GET_CODE (outgoing) == REG
- && REGNO (outgoing) >= FIRST_PSEUDO_REGISTER)
- || pcc)
- {
- tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
-
- /* A PCC-style return returns a pointer to the memory in which
- the structure is stored. */
- if (pcc)
- type = build_pointer_type (type);
-
-#ifdef FUNCTION_OUTGOING_VALUE
- outgoing = FUNCTION_OUTGOING_VALUE (type, current_function_decl);
-#else
- outgoing = FUNCTION_VALUE (type, current_function_decl);
-#endif
- /* If this is a BLKmode structure being returned in registers, then use
- the mode computed in expand_return. */
- if (GET_MODE (outgoing) == BLKmode)
- PUT_MODE (outgoing, GET_MODE (current_function_return_rtx));
- REG_FUNCTION_VALUE_P (outgoing) = 1;
- }
-
if (GET_CODE (outgoing) == REG)
(*doit) (outgoing, arg);
else if (GET_CODE (outgoing) == PARALLEL)
clobber_return_register ()
{
diddle_return_value (do_clobber_return_reg, NULL);
+
+ /* In case we do use pseudo to return value, clobber it too. */
+ if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
+ {
+ tree decl_result = DECL_RESULT (current_function_decl);
+ rtx decl_rtl = DECL_RTL (decl_result);
+ if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
+ {
+ do_clobber_return_reg (decl_rtl, NULL);
+ }
+ }
}
static void
int end_bindings;
{
tree link;
+ rtx clobber_after;
#ifdef TRAMPOLINE_TEMPLATE
static rtx initial_trampoline;
finish_expr_for_function ();
+ /* If arg_pointer_save_area was referenced only from a nested
+ function, we will not have initialized it yet. Do that now. */
+ if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
+ get_arg_pointer_save_area (cfun);
+
#ifdef NON_SAVING_SETJMP
/* Don't put any variables in registers if we call setjmp
on a machine that fails to restore the registers. */
}
#endif
- /* Save the argument pointer if a save area was made for it. */
- if (arg_pointer_save_area)
- {
- /* arg_pointer_save_area may not be a valid memory address, so we
- have to check it and fix it if necessary. */
- rtx seq;
- start_sequence ();
- emit_move_insn (validize_mem (arg_pointer_save_area),
- virtual_incoming_args_rtx);
- seq = gen_sequence ();
- end_sequence ();
- emit_insn_before (seq, tail_recursion_reentry);
- }
-
/* Initialize any trampolines required by this function. */
for (link = trampoline_list; link; link = TREE_CHAIN (link))
{
/* Mark the end of the function body.
If control reaches this insn, the function can drop through
without returning a value. */
- emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
+ emit_note (NULL, NOTE_INSN_FUNCTION_END);
/* Must mark the last line number note in the function, so that the test
coverage code can avoid counting the last line twice. This just tells
already exists a copy of this note somewhere above. This line number
note is still needed for debugging though, so we can't delete it. */
if (flag_test_coverage)
- emit_note (NULL_PTR, NOTE_INSN_REPEATED_LINE_NUMBER);
+ emit_note (NULL, NOTE_INSN_REPEATED_LINE_NUMBER);
/* Output a linenumber for the end of the function.
SDB depends on this. */
emit_line_note_force (filename, line);
+ /* Before the return label (if any), clobber the return
+ registers so that they are not propogated live to the rest of
+ the function. This can only happen with functions that drop
+ through; if there had been a return statement, there would
+ have either been a return rtx, or a jump to the return label.
+
+ We delay actual code generation after the current_function_value_rtx
+ is computed. */
+ clobber_after = get_last_insn ();
+
/* Output the label for the actual return from the function,
if one is expected. This happens either because a function epilogue
is used instead of a return instruction, or because a return was done
with a goto in order to run local cleanups, or because of pcc-style
structure returning. */
-
if (return_label)
- {
- rtx before, after;
-
- /* Before the return label, clobber the return registers so that
- they are not propogated live to the rest of the function. This
- can only happen with functions that drop through; if there had
- been a return statement, there would have either been a return
- rtx, or a jump to the return label. */
-
- before = get_last_insn ();
- clobber_return_register ();
- after = get_last_insn ();
-
- if (before != after)
- cfun->x_clobber_return_insn = after;
-
- emit_label (return_label);
- }
+ emit_label (return_label);
/* C++ uses this. */
if (end_bindings)
expand_end_bindings (0, 0, 0);
- /* Now handle any leftover exception regions that may have been
- created for the parameters. */
- {
- rtx last = get_last_insn ();
- rtx label;
-
- expand_leftover_cleanups ();
-
- /* If there are any catch_clauses remaining, output them now. */
- emit_insns (catch_clauses);
- catch_clauses = catch_clauses_last = NULL_RTX;
- /* If the above emitted any code, may sure we jump around it. */
- if (last != get_last_insn ())
- {
- label = gen_label_rtx ();
- last = emit_jump_insn_after (gen_jump (label), last);
- last = emit_barrier_after (last);
- emit_label (label);
- }
- }
-
if (current_function_instrument_entry_exit)
{
rtx fun = DECL_RTL (current_function_decl);
Pmode);
}
+ /* Let except.c know where it should emit the call to unregister
+ the function context for sjlj exceptions. */
+ if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
+ sjlj_emit_function_exit_after (get_last_insn ());
+
/* If we had calls to alloca, and this machine needs
an accurate stack pointer to exit the function,
insert some code to save and restore the stack pointer. */
/* If scalar return value was computed in a pseudo-reg, or was a named
return value that got dumped to the stack, copy that to the hard
return register. */
- if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0)
+ if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
{
tree decl_result = DECL_RESULT (current_function_decl);
rtx decl_rtl = DECL_RTL (decl_result);
convert_move (real_decl_rtl, decl_rtl, unsignedp);
}
+ else if (GET_CODE (real_decl_rtl) == PARALLEL)
+ emit_group_load (real_decl_rtl, decl_rtl,
+ int_size_in_bytes (TREE_TYPE (decl_result)),
+ TYPE_ALIGN (TREE_TYPE (decl_result)));
else
emit_move_insn (real_decl_rtl, decl_rtl);
if (current_function_returns_struct
|| current_function_returns_pcc_struct)
{
- rtx value_address =
- XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
+ rtx value_address
+ = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
#ifdef FUNCTION_OUTGOING_VALUE
rtx outgoing
current_function_decl);
#else
rtx outgoing
- = FUNCTION_VALUE (build_pointer_type (type),
- current_function_decl);
+ = FUNCTION_VALUE (build_pointer_type (type), current_function_decl);
#endif
/* Mark this as a function return value so integrate will delete the
assignment and USE below when inlining this function. */
REG_FUNCTION_VALUE_P (outgoing) = 1;
+#ifdef POINTERS_EXTEND_UNSIGNED
+ /* The address may be ptr_mode and OUTGOING may be Pmode. */
+ if (GET_MODE (outgoing) != GET_MODE (value_address))
+ value_address = convert_memory_address (GET_MODE (outgoing),
+ value_address);
+#endif
+
emit_move_insn (outgoing, value_address);
+
+ /* Show return register used to hold result (in this case the address
+ of the result. */
+ current_function_return_rtx = outgoing;
}
+ /* If this is an implementation of throw, do what's necessary to
+ communicate between __builtin_eh_return and the epilogue. */
+ expand_eh_return ();
+
+ /* Emit the actual code to clobber return register. */
+ {
+ rtx seq, after;
+
+ start_sequence ();
+ clobber_return_register ();
+ seq = gen_sequence ();
+ end_sequence ();
+
+ after = emit_insn_after (seq, clobber_after);
+
+ if (clobber_after != after)
+ cfun->x_clobber_return_insn = after;
+ }
+
/* ??? This should no longer be necessary since stupid is no longer with
us, but there are some parts of the compiler (eg reload_combine, and
sh mach_dep_reorg) that still try and compute their own lifetime info
instead of using the general framework. */
use_return_register ();
- /* If this is an implementation of __throw, do what's necessary to
- communicate between __builtin_eh_return and the epilogue. */
- expand_eh_return ();
-
- /* Output a return insn if we are using one.
- Otherwise, let the rtl chain end here, to drop through
- into the epilogue. */
-
-#ifdef HAVE_return
- if (HAVE_return)
- {
- emit_jump_insn (gen_return ());
- emit_barrier ();
- }
-#endif
-
/* Fix up any gotos that jumped out to the outermost
binding level of the function.
Must follow emitting RETURN_LABEL. */
then you will lose. */
expand_fixups (get_insns ());
}
+
+rtx
+get_arg_pointer_save_area (f)
+ struct function *f;
+{
+ rtx ret = f->x_arg_pointer_save_area;
+
+ if (! ret)
+ {
+ ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
+ f->x_arg_pointer_save_area = ret;
+ }
+
+ if (f == cfun && ! f->arg_pointer_save_area_init)
+ {
+ rtx seq;
+
+ /* Save the arg pointer at the beginning of the function. The
+ generated stack slot may not be a valid memory address, so we
+ have to check it and fix it if necessary. */
+ start_sequence ();
+ emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
+ seq = gen_sequence ();
+ end_sequence ();
+
+ push_topmost_sequence ();
+ emit_insn_after (seq, get_insns ());
+ pop_topmost_sequence ();
+ }
+
+ return ret;
+}
\f
/* Extend a vector that records the INSN_UIDs of INSNS (either a
sequence or a single insn). */
rtx insn;
varray_type vec;
{
- register int i, j;
+ int i, j;
if (GET_CODE (insn) == INSN
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
end = emit_jump_insn_after (gen_return (), bb->end);
if (line_note)
emit_line_note_after (NOTE_SOURCE_FILE (line_note),
- NOTE_LINE_NUMBER (line_note), bb->end);
-
- while (1)
- {
- set_block_for_insn (p, bb);
- if (p == bb->end)
- break;
- p = PREV_INSN (p);
- }
- bb->end = end;
+ NOTE_LINE_NUMBER (line_note), PREV_INSN (bb->end));
}
#endif /* HAVE_return */
/* If the epilogue is just a single instruction, it's OK as is */
- if (GET_CODE (seq) != SEQUENCE) return;
+ if (GET_CODE (seq) != SEQUENCE)
+ return;
/* Scan all insns in SEQ looking for ones that modified the stack
pointer. Record if it modified the stack pointer by copying it
else
sp_modified_unknown = 1;
- /* Don't allow the SP modification to happen. */
+ /* Don't allow the SP modification to happen. We don't call
+ delete_insn here since INSN isn't in any chain. */
PUT_CODE (insn, NOTE);
NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
NOTE_SOURCE_FILE (insn) = 0;
{
int inserted = 0;
edge e;
+#if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
rtx seq;
+#endif
#ifdef HAVE_prologue
rtx prologue_end = NULL_RTX;
#endif
seq = gen_sequence ();
end_sequence ();
- /* If optimization is off, and perhaps in an empty function,
- the entry block will have no successors. */
- if (ENTRY_BLOCK_PTR->succ)
- {
- /* Can't deal with multiple successsors of the entry block. */
- if (ENTRY_BLOCK_PTR->succ->succ_next)
- abort ();
+ /* Can't deal with multiple successsors of the entry block
+ at the moment. Function should always have at least one
+ entry point. */
+ if (!ENTRY_BLOCK_PTR->succ || ENTRY_BLOCK_PTR->succ->succ_next)
+ abort ();
- insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
- inserted = 1;
- }
- else
- emit_insn_after (seq, f);
+ insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
+ inserted = 1;
}
#endif
if (simplejump_p (jump))
{
emit_return_into_block (bb, epilogue_line_note);
- flow_delete_insn (jump);
+ delete_insn (jump);
}
/* If we have a conditional jump, we can try to replace
emit_barrier_after (last->end);
emit_return_into_block (last, epilogue_line_note);
epilogue_end = last->end;
+ last->succ->flags &= ~EDGE_FALLTHRU;
goto epilogue_done;
}
}
i = PREV_INSN (insn);
newinsn = emit_insn_before (seq, insn);
- /* Update the UID to basic block map. */
- for (i = NEXT_INSN (i); i != insn; i = NEXT_INSN (i))
- set_block_for_insn (i, bb);
-
/* Retain a map of the epilogue insns. Used in life analysis to
avoid getting rid of sibcall epilogue insns. */
record_insns (GET_CODE (seq) == SEQUENCE
there are line number notes before where we inserted the
prologue we should move them, and (2) we should generate a
note before the end of the first basic block, if there isn't
- one already there. */
+ one already there.
+
+ ??? This behaviour is completely broken when dealing with
+ multiple entry functions. We simply place the note always
+ into first basic block and let alternate entry points
+ to be missed.
+ */
for (insn = prologue_end; insn; insn = prev)
{
/* Find the last line number note in the first block. */
for (insn = BASIC_BLOCK (0)->end;
- insn != prologue_end;
+ insn != prologue_end && insn;
insn = PREV_INSN (insn))
if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
break;
if ((len = VARRAY_SIZE (prologue)) > 0)
{
- register rtx insn, note = 0;
+ rtx insn, note = 0;
/* Scan from the beginning until we reach the last prologue insn.
We apparently can't depend on basic_block_{head,end} after
BLOCK_HEAD (0) = next;
remove_insn (note);
+ /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
+ if (GET_CODE (insn) == CODE_LABEL)
+ insn = NEXT_INSN (insn);
add_insn_after (note, insn);
}
}
if ((len = VARRAY_SIZE (epilogue)) > 0)
{
- register rtx insn, note = 0;
+ rtx insn, note = 0;
/* Scan from the end until we reach the first epilogue insn.
We apparently can't depend on basic_block_{head,end} after
#endif /* HAVE_prologue or HAVE_epilogue */
}
-/* Mark T for GC. */
-
-static void
-mark_temp_slot (t)
- struct temp_slot *t;
-{
- while (t)
- {
- ggc_mark_rtx (t->slot);
- ggc_mark_rtx (t->address);
- ggc_mark_tree (t->rtl_expr);
-
- t = t->next;
- }
-}
-
/* Mark P for GC. */
static void
mark_function_status (p)
struct function *p;
{
+ struct var_refs_queue *q;
+ struct temp_slot *t;
int i;
rtx *r;
ggc_mark_rtx (p->epilogue_delay_list);
ggc_mark_rtx (p->x_clobber_return_insn);
- mark_temp_slot (p->x_temp_slots);
+ for (t = p->x_temp_slots; t != 0; t = t->next)
+ {
+ ggc_mark (t);
+ ggc_mark_rtx (t->slot);
+ ggc_mark_rtx (t->address);
+ ggc_mark_tree (t->rtl_expr);
+ ggc_mark_tree (t->type);
+ }
- {
- struct var_refs_queue *q = p->fixup_var_refs_queue;
- while (q)
- {
- ggc_mark_rtx (q->modified);
- q = q->next;
+ for (q = p->fixup_var_refs_queue; q != 0; q = q->next)
+ {
+ ggc_mark (q);
+ ggc_mark_rtx (q->modified);
}
- }
ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
ggc_mark_tree (p->x_nonlocal_labels);
+
+ mark_hard_reg_initial_vals (p);
}
-/* Mark the function chain ARG (which is really a struct function **)
- for GC. */
+/* Mark the struct function pointed to by *ARG for GC, if it is not
+ NULL. This is used to mark the current function and the outer
+ function chain. */
static void
-mark_function_chain (arg)
+maybe_mark_struct_function (arg)
void *arg;
{
struct function *f = *(struct function **) arg;
- for (; f; f = f->next_global)
- {
- ggc_mark_tree (f->decl);
-
- mark_function_status (f);
- mark_eh_status (f->eh);
- mark_stmt_status (f->stmt);
- mark_expr_status (f->expr);
- mark_emit_status (f->emit);
- mark_varasm_status (f->varasm);
-
- if (mark_machine_status)
- (*mark_machine_status) (f);
- if (mark_lang_status)
- (*mark_lang_status) (f);
-
- if (f->original_arg_vector)
- ggc_mark_rtvec ((rtvec) f->original_arg_vector);
- if (f->original_decl_initial)
- ggc_mark_tree (f->original_decl_initial);
- }
+ if (f == 0)
+ return;
+
+ ggc_mark_struct_function (f);
+}
+
+/* Mark a struct function * for GC. This is called from ggc-common.c. */
+
+void
+ggc_mark_struct_function (f)
+ struct function *f;
+{
+ ggc_mark (f);
+ ggc_mark_tree (f->decl);
+
+ mark_function_status (f);
+ mark_eh_status (f->eh);
+ mark_stmt_status (f->stmt);
+ mark_expr_status (f->expr);
+ mark_emit_status (f->emit);
+ mark_varasm_status (f->varasm);
+
+ if (mark_machine_status)
+ (*mark_machine_status) (f);
+ if (mark_lang_status)
+ (*mark_lang_status) (f);
+
+ if (f->original_arg_vector)
+ ggc_mark_rtvec ((rtvec) f->original_arg_vector);
+ if (f->original_decl_initial)
+ ggc_mark_tree (f->original_decl_initial);
+ if (f->outer)
+ ggc_mark_struct_function (f->outer);
}
/* Called once, at initialization, to initialize function.c. */
void
init_function_once ()
{
- ggc_add_root (&all_functions, 1, sizeof all_functions,
- mark_function_chain);
+ ggc_add_root (&cfun, 1, sizeof cfun, maybe_mark_struct_function);
+ ggc_add_root (&outer_function_chain, 1, sizeof outer_function_chain,
+ maybe_mark_struct_function);
VARRAY_INT_INIT (prologue, 0, "prologue");
VARRAY_INT_INIT (epilogue, 0, "epilogue");