/* Reload pseudo regs into hard regs for insns that require hard regs.
Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000 Free Software Foundation, Inc.
+ 1999, 2000, 2001 Free Software Foundation, Inc.
-This file is part of GNU CC.
+This file is part of GCC.
-GNU CC is free software; you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 2, or (at your option)
-any later version.
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 2, or (at your option) any later
+version.
-GNU CC is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU General Public License for more details.
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+for more details.
You should have received a copy of the GNU General Public License
-along with GNU CC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
-
+along with GCC; see the file COPYING. If not, write to the Free
+Software Foundation, 59 Temple Place - Suite 330, Boston, MA
+02111-1307, USA. */
#include "config.h"
#include "system.h"
#include "tm_p.h"
#include "obstack.h"
#include "insn-config.h"
-#include "insn-flags.h"
-#include "insn-codes.h"
#include "flags.h"
#include "function.h"
#include "expr.h"
+#include "optabs.h"
#include "regs.h"
#include "basic-block.h"
#include "reload.h"
#include "cselib.h"
#include "real.h"
#include "toplev.h"
-
-#if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
-#define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
-#endif
+#include "except.h"
/* This file contains the reload pass of the compiler, which is
run after register allocation has been done. It checks that
fixing up each insn, and generating the new insns to copy values
into the reload registers. */
-
#ifndef REGISTER_MOVE_COST
-#define REGISTER_MOVE_COST(x, y) 2
+#define REGISTER_MOVE_COST(m, x, y) 2
+#endif
+
+#ifndef LOCAL_REGNO
+#define LOCAL_REGNO(REGNO) 0
#endif
\f
/* During reload_as_needed, element N contains a REG rtx for the hard reg
rtx to_rtx; /* REG rtx for the replacement. */
};
-static struct elim_table * reg_eliminate = 0;
+static struct elim_table *reg_eliminate = 0;
/* This is an intermediate structure to initialize the table. It has
- exactly the members provided by ELIMINABLE_REGS. */
+ exactly the members provided by ELIMINABLE_REGS. */
static struct elim_table_1
{
int from;
{{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
#endif
-#define NUM_ELIMINABLE_REGS (sizeof reg_eliminate_1/sizeof reg_eliminate_1[0])
+#define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
/* Record the number of pending eliminations that have an offset not equal
to their initial offset. If non-zero, we use a new copy of each
static int num_labels;
\f
+static void replace_pseudos_in_call_usage PARAMS((rtx *,
+ enum machine_mode,
+ rtx));
static void maybe_fix_stack_asms PARAMS ((void));
static void copy_reloads PARAMS ((struct insn_chain *));
static void calculate_needs_all_insns PARAMS ((int));
-static int find_reg PARAMS ((struct insn_chain *, int,
- FILE *));
-static void find_reload_regs PARAMS ((struct insn_chain *, FILE *));
-static void select_reload_regs PARAMS ((FILE *));
+static int find_reg PARAMS ((struct insn_chain *, int));
+static void find_reload_regs PARAMS ((struct insn_chain *));
+static void select_reload_regs PARAMS ((void));
static void delete_caller_save_insns PARAMS ((void));
static void spill_failure PARAMS ((rtx, enum reg_class));
static void count_spilled_pseudo PARAMS ((int, int, int));
static void delete_dead_insn PARAMS ((rtx));
-static void alter_reg PARAMS ((int, int));
+static void alter_reg PARAMS ((int, int));
static void set_label_offsets PARAMS ((rtx, rtx, int));
static void check_eliminable_occurrences PARAMS ((rtx));
static void elimination_effects PARAMS ((rtx, enum machine_mode));
static void set_offsets_for_label PARAMS ((rtx));
static void init_elim_table PARAMS ((void));
static void update_eliminables PARAMS ((HARD_REG_SET *));
-static void spill_hard_reg PARAMS ((unsigned int, FILE *, int));
-static int finish_spills PARAMS ((int, FILE *));
+static void spill_hard_reg PARAMS ((unsigned int, int));
+static int finish_spills PARAMS ((int));
static void ior_hard_reg_set PARAMS ((HARD_REG_SET *, HARD_REG_SET *));
static void scan_paradoxical_subregs PARAMS ((rtx));
static void count_pseudo PARAMS ((int));
enum machine_mode));
static int reload_reg_free_p PARAMS ((unsigned int, int,
enum reload_type));
-static int reload_reg_free_for_value_p PARAMS ((int, int, enum reload_type,
+static int reload_reg_free_for_value_p PARAMS ((int, int, int,
+ enum reload_type,
rtx, rtx, int, int));
+static int free_for_value_p PARAMS ((int, enum machine_mode, int,
+ enum reload_type, rtx, rtx,
+ int, int));
static int reload_reg_reaches_end_p PARAMS ((unsigned int, int,
enum reload_type));
static int allocate_reload_reg PARAMS ((struct insn_chain *, int,
int));
+static int conflicts_with_override PARAMS ((rtx));
static void failed_reload PARAMS ((rtx, int));
static int set_reload_reg PARAMS ((int, int));
static void choose_reload_regs_init PARAMS ((struct insn_chain *, rtx *));
#ifdef AUTO_INC_DEC
static void add_auto_inc_notes PARAMS ((rtx, rtx));
#endif
-static rtx gen_mode_int PARAMS ((enum machine_mode,
+static void copy_eh_notes PARAMS ((rtx, rtx));
+static HOST_WIDE_INT sext_for_mode PARAMS ((enum machine_mode,
HOST_WIDE_INT));
static void failed_reload PARAMS ((rtx, int));
static int set_reload_reg PARAMS ((int, int));
static void reload_cse_delete_noop_set PARAMS ((rtx, rtx));
static void reload_cse_simplify PARAMS ((rtx));
-extern void dump_needs PARAMS ((struct insn_chain *, FILE *));
+static void fixup_abnormal_edges PARAMS ((void));
+extern void dump_needs PARAMS ((struct insn_chain *));
\f
/* Initialize the reload pass once per compilation. */
}
});
}
+
+/* Replace all pseudos found in LOC with their corresponding
+ equivalences. */
+
+static void
+replace_pseudos_in_call_usage (loc, mem_mode, usage)
+ rtx *loc;
+ enum machine_mode mem_mode;
+ rtx usage;
+{
+ rtx x = *loc;
+ enum rtx_code code;
+ const char *fmt;
+ int i, j;
+
+ if (! x)
+ return;
+
+ code = GET_CODE (x);
+ if (code == REG)
+ {
+ unsigned int regno = REGNO (x);
+
+ if (regno < FIRST_PSEUDO_REGISTER)
+ return;
+
+ x = eliminate_regs (x, mem_mode, usage);
+ if (x != *loc)
+ {
+ *loc = x;
+ replace_pseudos_in_call_usage (loc, mem_mode, usage);
+ return;
+ }
+
+ if (reg_equiv_constant[regno])
+ *loc = reg_equiv_constant[regno];
+ else if (reg_equiv_mem[regno])
+ *loc = reg_equiv_mem[regno];
+ else if (reg_equiv_address[regno])
+ *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address[regno]);
+ else if (GET_CODE (regno_reg_rtx[regno]) != REG
+ || REGNO (regno_reg_rtx[regno]) != regno)
+ *loc = regno_reg_rtx[regno];
+ else
+ abort ();
+
+ return;
+ }
+ else if (code == MEM)
+ {
+ replace_pseudos_in_call_usage (& XEXP (x, 0), GET_MODE (x), usage);
+ return;
+ }
+
+ /* Process each of our operands recursively. */
+ fmt = GET_RTX_FORMAT (code);
+ for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
+ if (*fmt == 'e')
+ replace_pseudos_in_call_usage (&XEXP (x, i), mem_mode, usage);
+ else if (*fmt == 'E')
+ for (j = 0; j < XVECLEN (x, i); j++)
+ replace_pseudos_in_call_usage (& XVECEXP (x, i, j), mem_mode, usage);
+}
+
\f
/* Global variables used by reload and its subroutines. */
If GLOBAL is zero, we do not have enough information to do that,
so any pseudo reg that is spilled must go to the stack.
- DUMPFILE is the global-reg debugging dump file stream, or 0.
- If it is nonzero, messages are written to it to describe
- which registers are seized as reload regs, which pseudo regs
- are spilled from them, and where the pseudo regs are reallocated to.
-
Return value is nonzero if reload failed
and we must not do any more for this function. */
int
-reload (first, global, dumpfile)
+reload (first, global)
rtx first;
int global;
- FILE *dumpfile;
{
register int i;
register rtx insn;
/* The two pointers used to track the true location of the memory used
for label offsets. */
- char *real_known_ptr = NULL_PTR;
+ char *real_known_ptr = NULL;
int (*real_at_ptr)[NUM_ELIMINABLE_REGS];
/* Make sure even insns with volatile mem refs are recognizable. */
/* Make sure that the last insn in the chain
is not something that needs reloading. */
- emit_note (NULL_PTR, NOTE_INSN_DELETED);
+ emit_note (NULL, NOTE_INSN_DELETED);
/* Enable find_equiv_reg to distinguish insns made by reload. */
reload_first_uid = get_max_uid ();
#endif
/* We don't have a stack slot for any spill reg yet. */
- bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
- bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
+ memset ((char *) spill_stack_slot, 0, sizeof spill_stack_slot);
+ memset ((char *) spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
/* Initialize the save area information for caller-save, in case some
are needed. */
registers. */
if (current_function_has_nonlocal_label)
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- {
- if (! call_used_regs[i] && ! fixed_regs[i])
- regs_ever_live[i] = 1;
- }
+ if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
+ regs_ever_live[i] = 1;
/* Find all the pseudo registers that didn't get hard regs
but do have known equivalent constants or memory slots.
reg_equiv_mem = (rtx *) xcalloc (max_regno, sizeof (rtx));
reg_equiv_init = (rtx *) xcalloc (max_regno, sizeof (rtx));
reg_equiv_address = (rtx *) xcalloc (max_regno, sizeof (rtx));
- reg_max_ref_width = (int *) xcalloc (max_regno, sizeof (int));
+ reg_max_ref_width = (unsigned int *) xcalloc (max_regno, sizeof (int));
reg_old_renumber = (short *) xcalloc (max_regno, sizeof (short));
- bcopy ((PTR) reg_renumber, (PTR) reg_old_renumber, max_regno * sizeof (short));
+ memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
pseudo_forbidden_regs
= (HARD_REG_SET *) xmalloc (max_regno * sizeof (HARD_REG_SET));
pseudo_previous_regs
Also find all paradoxical subregs and find largest such for each pseudo.
On machines with small register classes, record hard registers that
are used for user variables. These can never be used for spills.
- Also look for a "constant" NOTE_INSN_SETJMP. This means that all
+ Also look for a "constant" REG_SETJMP. This means that all
caller-saved registers must be marked live. */
num_eliminable_invariants = 0;
{
rtx set = single_set (insn);
- if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn)
- && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
+ if (GET_CODE (insn) == CALL_INSN
+ && find_reg_note (insn, REG_SETJMP, NULL))
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (! call_used_regs[i])
regs_ever_live[i] = 1;
{
if (GET_CODE (x) == MEM)
{
- /* If the operand is a PLUS, the MEM may be shared,
- so make sure we have an unshared copy here. */
- if (GET_CODE (XEXP (x, 0)) == PLUS)
- x = copy_rtx (x);
-
- reg_equiv_memory_loc[i] = x;
+ /* Always unshare the equivalence, so we can
+ substitute into this insn without touching the
+ equivalence. */
+ reg_equiv_memory_loc[i] = copy_rtx (x);
}
else if (function_invariant_p (x))
{
= gen_rtx_INSN_LIST (VOIDmode, insn,
reg_equiv_init[REGNO (SET_SRC (set))]);
- if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
+ if (INSN_P (insn))
scan_paradoxical_subregs (PATTERN (insn));
}
CLEAR_HARD_REG_SET (used_spill_regs);
for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
if (! ep->can_eliminate)
- spill_hard_reg (ep->from, dumpfile, 1);
+ spill_hard_reg (ep->from, 1);
#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
if (frame_pointer_needed)
- spill_hard_reg (HARD_FRAME_POINTER_REGNUM, dumpfile, 1);
+ spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
#endif
- finish_spills (global, dumpfile);
+ finish_spills (global);
/* From now on, we may need to generate moves differently. We may also
allow modifications of insns which cause them to not be recognized.
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (TEST_HARD_REG_BIT (to_spill, i))
{
- spill_hard_reg (i, dumpfile, 1);
+ spill_hard_reg (i, 1);
did_spill = 1;
/* Regardless of the state of spills, if we previously had
}
}
- select_reload_regs (dumpfile);
+ select_reload_regs ();
if (failure)
goto failed;
if (insns_need_reload != 0 || did_spill)
- something_changed |= finish_spills (global, dumpfile);
+ something_changed |= finish_spills (global);
if (! something_changed)
break;
if (insns_need_reload != 0 || something_needs_elimination
|| something_needs_operands_changed)
{
- int old_frame_size = get_frame_size ();
+ HOST_WIDE_INT old_frame_size = get_frame_size ();
reload_as_needed (global);
MEM_SCALAR_P (reg) = is_scalar;
/* We have no alias information about this newly created
MEM. */
- MEM_ALIAS_SET (reg) = 0;
+ set_mem_alias_set (reg, 0);
}
else if (reg_equiv_mem[i])
XEXP (reg_equiv_mem[i], 0) = addr;
and regenerate REG_INC notes that may have been moved around. */
for (insn = first; insn; insn = NEXT_INSN (insn))
- if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
+ if (INSN_P (insn))
{
rtx *pnote;
+ if (GET_CODE (insn) == CALL_INSN)
+ replace_pseudos_in_call_usage (& CALL_INSN_FUNCTION_USAGE (insn),
+ VOIDmode,
+ CALL_INSN_FUNCTION_USAGE (insn));
+
if ((GET_CODE (PATTERN (insn)) == USE
&& find_reg_note (insn, REG_EQUAL, NULL_RTX))
|| (GET_CODE (PATTERN (insn)) == CLOBBER
/* Free all the insn_chain structures at once. */
obstack_free (&reload_obstack, reload_startobj);
unused_insn_chains = 0;
+ fixup_abnormal_edges ();
return failure;
}
fatal_insn later. We clear the corresponding regnos in the live
register sets to avoid this.
The whole thing is rather sick, I'm afraid. */
+
static void
maybe_fix_stack_asms ()
{
HARD_REG_SET clobbered, allowed;
rtx pat;
- if (GET_RTX_CLASS (GET_CODE (chain->insn)) != 'i'
+ if (! INSN_P (chain->insn)
|| (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
continue;
pat = PATTERN (chain->insn);
case 'F': case 's': case 'i': case 'n': case 'X': case 'I':
case 'J': case 'K': case 'L': case 'M': case 'N': case 'O':
case 'P':
-#ifdef EXTRA_CONSTRAINT
- case 'Q': case 'R': case 'S': case 'T': case 'U':
-#endif
break;
case 'p':
int global;
{
struct insn_chain **pprev_reload = &insns_need_reload;
- struct insn_chain *chain;
+ struct insn_chain *chain, *next = 0;
something_needs_elimination = 0;
reload_insn_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
- for (chain = reload_insn_chain; chain != 0; chain = chain->next)
+ for (chain = reload_insn_chain; chain != 0; chain = next)
{
rtx insn = chain->insn;
+ next = chain->next;
+
/* Clear out the shortcuts. */
chain->n_reloads = 0;
chain->need_elim = 0;
known offsets at labels. */
if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
- || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
- && REG_NOTES (insn) != 0))
+ || (INSN_P (insn) && REG_NOTES (insn) != 0))
set_label_offsets (insn, insn, 0);
- if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
+ if (INSN_P (insn))
{
rtx old_body = PATTERN (insn);
int old_code = INSN_CODE (insn);
PUT_CODE (insn, NOTE);
NOTE_SOURCE_FILE (insn) = 0;
NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
+ /* Delete it from the reload chain */
+ if (chain->prev)
+ chain->prev->next = next;
+ else
+ reload_insn_chain = next;
+ if (next)
+ next->prev = chain->prev;
+ chain->next = unused_insn_chains;
+ unused_insn_chains = chain;
continue;
}
}
const PTR r1p;
const PTR r2p;
{
- register int r1 = *(const short *)r1p, r2 = *(const short *)r2p;
+ register int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
register int t;
/* Consider required reloads before optional ones. */
count_pseudo (reg)
int reg;
{
- int n_refs = REG_N_REFS (reg);
+ int freq = REG_FREQ (reg);
int r = reg_renumber[reg];
int nregs;
if (r < 0)
abort ();
-
- spill_add_cost[r] += n_refs;
+
+ spill_add_cost[r] += freq;
nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (reg));
while (nregs-- > 0)
- spill_cost[r + nregs] += n_refs;
+ spill_cost[r + nregs] += freq;
}
/* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
contents of BAD_SPILL_REGS for the insn described by CHAIN. */
+
static void
order_regs_for_reload (chain)
struct insn_chain *chain;
{
- register int i, j;
+ int i;
+ HARD_REG_SET used_by_pseudos;
+ HARD_REG_SET used_by_pseudos2;
- COPY_HARD_REG_SET (bad_spill_regs, bad_spill_regs_global);
+ COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
memset (spill_cost, 0, sizeof spill_cost);
memset (spill_add_cost, 0, sizeof spill_add_cost);
/* Count number of uses of each hard reg by pseudo regs allocated to it
- and then order them by decreasing use. */
+ and then order them by decreasing use. First exclude hard registers
+ that are live in or across this insn. */
+
+ REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
+ REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
+ IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
+ IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
- for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- {
- /* Test the various reasons why we can't use a register for
- spilling in this insn. */
- if (fixed_regs[i]
- || REGNO_REG_SET_P (&chain->live_throughout, i)
- || REGNO_REG_SET_P (&chain->dead_or_set, i))
- SET_HARD_REG_BIT (bad_spill_regs, i);
- }
/* Now find out which pseudos are allocated to it, and update
hard_reg_n_uses. */
CLEAR_REG_SET (&pseudos_counted);
EXECUTE_IF_SET_IN_REG_SET
- (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j,
+ (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i,
{
- count_pseudo (j);
+ count_pseudo (i);
});
EXECUTE_IF_SET_IN_REG_SET
- (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j,
+ (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i,
{
- count_pseudo (j);
+ count_pseudo (i);
});
CLEAR_REG_SET (&pseudos_counted);
}
SET_REGNO_REG_SET (&spilled_pseudos, reg);
- spill_add_cost[r] -= REG_N_REFS (reg);
+ spill_add_cost[r] -= REG_FREQ (reg);
while (nregs-- > 0)
- spill_cost[r + nregs] -= REG_N_REFS (reg);
+ spill_cost[r + nregs] -= REG_FREQ (reg);
}
/* Find reload register to use for reload number ORDER. */
static int
-find_reg (chain, order, dumpfile)
+find_reg (chain, order)
struct insn_chain *chain;
int order;
- FILE *dumpfile;
{
int rnum = reload_order[order];
struct reload *rl = rld + rnum;
if (best_reg == -1)
return 0;
- if (dumpfile)
- fprintf (dumpfile, "Using reg %d for reload %d\n", best_reg, rnum);
+ if (rtl_dump_file)
+ fprintf (rtl_dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
rl->nregs = HARD_REGNO_NREGS (best_reg, rl->mode);
rl->regno = best_reg;
for a smaller class even though it belongs to that class. */
static void
-find_reload_regs (chain, dumpfile)
+find_reload_regs (chain)
struct insn_chain *chain;
- FILE *dumpfile;
{
int i;
CLEAR_HARD_REG_SET (used_spill_regs_local);
- if (dumpfile)
- fprintf (dumpfile, "Spilling for insn %d.\n", INSN_UID (chain->insn));
+ if (rtl_dump_file)
+ fprintf (rtl_dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
&& ! rld[r].optional
&& rld[r].regno == -1)
- if (! find_reg (chain, i, dumpfile))
+ if (! find_reg (chain, i))
{
spill_failure (chain->insn, rld[r].class);
failure = 1;
}
static void
-select_reload_regs (dumpfile)
- FILE *dumpfile;
+select_reload_regs ()
{
struct insn_chain *chain;
/* Try to satisfy the needs for each insn. */
for (chain = insns_need_reload; chain != 0;
chain = chain->next_need_reload)
- find_reload_regs (chain, dumpfile);
+ find_reload_regs (chain);
}
\f
/* Delete all insns that were inserted by emit_caller_save_insns during
adjust = inherent_size - total_size;
RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
+
+ /* Nothing can alias this slot except this pseudo. */
+ set_mem_alias_set (x, new_alias_set ());
}
+
/* Reuse a stack slot if possible. */
else if (spill_stack_slot[from_reg] != 0
&& spill_stack_slot_width[from_reg] >= total_size
&& (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
>= inherent_size))
x = spill_stack_slot[from_reg];
+
/* Allocate a bigger slot. */
else
{
and for total size. */
enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
rtx stack_slot;
+
if (spill_stack_slot[from_reg])
{
if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
if (spill_stack_slot_width[from_reg] > total_size)
total_size = spill_stack_slot_width[from_reg];
}
+
/* Make a slot with that size. */
x = assign_stack_local (mode, total_size,
inherent_size == total_size ? 0 : -1);
stack_slot = x;
+
+ /* All pseudos mapped to this slot can alias each other. */
+ if (spill_stack_slot[from_reg])
+ set_mem_alias_set (x, MEM_ALIAS_SET (spill_stack_slot[from_reg]));
+ else
+ set_mem_alias_set (x, new_alias_set ());
+
if (BYTES_BIG_ENDIAN)
{
/* Cancel the big-endian correction done in assign_stack_local.
MODE_INT, 1),
plus_constant (XEXP (x, 0), adjust));
}
+
spill_stack_slot[from_reg] = stack_slot;
spill_stack_slot_width[from_reg] = total_size;
}
/* If we have any adjustment to make, or if the stack slot is the
wrong mode, make a new stack slot. */
if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
- {
- x = gen_rtx_MEM (GET_MODE (regno_reg_rtx[i]),
- plus_constant (XEXP (x, 0), adjust));
-
- /* If this was shared among registers, must ensure we never
- set it readonly since that can cause scheduling
- problems. Note we would only have in this adjustment
- case in any event, since the code above doesn't set it. */
-
- if (from_reg == -1)
- RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
- }
+ x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
/* Save the stack slot for later. */
reg_equiv_memory_loc[i] = x;
set_label_offsets (XEXP (tem, 0), insn, 1);
return;
+ case PARALLEL:
case ADDR_VEC:
case ADDR_DIFF_VEC:
- /* Each of the labels in the address vector must be at their initial
- offsets. We want the first field for ADDR_VEC and the second
- field for ADDR_DIFF_VEC. */
+ /* Each of the labels in the parallel or address vector must be
+ at their initial offsets. We want the first field for PARALLEL
+ and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
return x;
case SUBREG:
- /* Similar to above processing, but preserve SUBREG_WORD.
+ /* Similar to above processing, but preserve SUBREG_BYTE.
Convert (subreg (mem)) to (mem) if not paradoxical.
Also, if we have a non-paradoxical (subreg (pseudo)) and the
pseudo didn't get a hard reg, we must replace this with the
else
new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
- if (new != XEXP (x, 0))
+ if (new != SUBREG_REG (x))
{
int x_size = GET_MODE_SIZE (GET_MODE (x));
int new_size = GET_MODE_SIZE (GET_MODE (new));
(reg:m2 R) later, expecting all bits to be preserved.
So if the number of words is the same, preserve the
subreg so that push_reloads can see it. */
- && ! ((x_size-1)/UNITS_PER_WORD == (new_size-1)/UNITS_PER_WORD)
+ && ! ((x_size - 1) / UNITS_PER_WORD
+ == (new_size -1 ) / UNITS_PER_WORD)
#endif
)
- || (x_size == new_size))
+ || x_size == new_size)
)
{
- int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
+ int offset = SUBREG_BYTE (x);
enum machine_mode mode = GET_MODE (x);
- if (BYTES_BIG_ENDIAN)
- offset += (MIN (UNITS_PER_WORD,
- GET_MODE_SIZE (GET_MODE (new)))
- - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
-
PUT_MODE (new, mode);
XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
return new;
}
else
- return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_WORD (x));
+ return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_BYTE (x));
}
return x;
/* Our only special processing is to pass the mode of the MEM to our
recursive call and copy the flags. While we are here, handle this
case more efficiently. */
- new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
- if (new != XEXP (x, 0))
- {
- new = gen_rtx_MEM (GET_MODE (x), new);
- new->volatil = x->volatil;
- new->unchanging = x->unchanging;
- new->in_struct = x->in_struct;
- return new;
- }
- else
- return x;
+ return
+ replace_equiv_address_nv (x,
+ eliminate_regs (XEXP (x, 0),
+ GET_MODE (x), insn));
case USE:
+ /* Handle insn_list USE that a call to a pure function may generate. */
+ new = eliminate_regs (XEXP (x, 0), 0, insn);
+ if (new != XEXP (x, 0))
+ return gen_rtx_USE (GET_MODE (x), new);
+ return x;
+
case CLOBBER:
case ASM_OPERANDS:
case SET:
if (new != XEXP (x, i) && ! copied)
{
rtx new_x = rtx_alloc (code);
- bcopy ((char *) x, (char *) new_x,
- (sizeof (*new_x) - sizeof (new_x->fld)
- + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
+ memcpy (new_x, x,
+ (sizeof (*new_x) - sizeof (new_x->fld)
+ + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
x = new_x;
copied = 1;
}
if (! copied)
{
rtx new_x = rtx_alloc (code);
- bcopy ((char *) x, (char *) new_x,
- (sizeof (*new_x) - sizeof (new_x->fld)
- + (sizeof (new_x->fld[0])
- * GET_RTX_LENGTH (code))));
+ memcpy (new_x, x,
+ (sizeof (*new_x) - sizeof (new_x->fld)
+ + (sizeof (new_x->fld[0])
+ * GET_RTX_LENGTH (code))));
x = new_x;
copied = 1;
}
case POST_INC:
case PRE_DEC:
case POST_DEC:
+ case POST_MODIFY:
+ case PRE_MODIFY:
for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
if (ep->to_rtx == XEXP (x, 0))
{
#endif
if (code == PRE_DEC || code == POST_DEC)
ep->offset += size;
- else
+ else if (code == PRE_INC || code == POST_INC)
ep->offset -= size;
+ else if ((code == PRE_MODIFY || code == POST_MODIFY)
+ && GET_CODE (XEXP (x, 1)) == PLUS
+ && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
+ && CONSTANT_P (XEXP (XEXP (x, 1), 1)))
+ ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
}
+ /* These two aren't unary operators. */
+ if (code == POST_MODIFY || code == PRE_MODIFY)
+ break;
+
/* Fall through to generic unary operation case. */
case STRICT_LOW_PART:
case NEG: case NOT:
if (x == 0)
return;
-
+
code = GET_CODE (x);
if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
ep->can_eliminate = 0;
return;
}
-
+
fmt = GET_RTX_FORMAT (code);
for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
{
abort ();
}
- if (! replace)
- push_obstacks (&reload_obstack, &reload_obstack);
-
if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
&& REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
{
if (ok)
{
- if (replace)
+ rtx src
+ = plus_constant (ep->to_rtx, offset - ep->offset);
+
+ new_body = old_body;
+ if (! replace)
{
- rtx src
- = plus_constant (ep->to_rtx, offset - ep->offset);
-
- /* First see if this insn remains valid when we
- make the change. If not, keep the INSN_CODE
- the same and let reload fit it up. */
- validate_change (insn, &SET_SRC (old_set), src, 1);
- validate_change (insn, &SET_DEST (old_set),
- ep->to_rtx, 1);
- if (! apply_change_group ())
- {
- SET_SRC (old_set) = src;
- SET_DEST (old_set) = ep->to_rtx;
- }
+ new_body = copy_insn (old_body);
+ if (REG_NOTES (insn))
+ REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
+ }
+ PATTERN (insn) = new_body;
+ old_set = single_set (insn);
+
+ /* First see if this insn remains valid when we
+ make the change. If not, keep the INSN_CODE
+ the same and let reload fit it up. */
+ validate_change (insn, &SET_SRC (old_set), src, 1);
+ validate_change (insn, &SET_DEST (old_set),
+ ep->to_rtx, 1);
+ if (! apply_change_group ())
+ {
+ SET_SRC (old_set) = src;
+ SET_DEST (old_set) = ep->to_rtx;
}
val = 1;
process it since it won't be used unless something changes. */
if (replace)
{
- delete_dead_insn (insn);
+ delete_dead_insn (insn);
return 1;
}
val = 1;
currently support: a single set with the source being a PLUS of an
eliminable register and a constant. */
if (old_set
+ && GET_CODE (SET_DEST (old_set)) == REG
&& GET_CODE (SET_SRC (old_set)) == PLUS
&& GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
&& GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT
if (offset == 0)
{
- /* We assume here that we don't need a PARALLEL of
- any CLOBBERs for this assignment. There's not
- much we can do if we do need it. */
+ int num_clobbers;
+ /* We assume here that if we need a PARALLEL with
+ CLOBBERs for this assignment, we can do with the
+ MATCH_SCRATCHes that add_clobbers allocates.
+ There's not much we can do if that doesn't work. */
PATTERN (insn) = gen_rtx_SET (VOIDmode,
SET_DEST (old_set),
ep->to_rtx);
- INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
+ num_clobbers = 0;
+ INSN_CODE (insn) = recog (PATTERN (insn), insn, &num_clobbers);
+ if (num_clobbers)
+ {
+ rtvec vec = rtvec_alloc (num_clobbers + 1);
+
+ vec->elem[0] = PATTERN (insn);
+ PATTERN (insn) = gen_rtx_PARALLEL (VOIDmode, vec);
+ add_clobbers (PATTERN (insn), INSN_CODE (insn));
+ }
if (INSN_CODE (insn) < 0)
abort ();
}
for (i = 0; i < recog_data.n_dups; i++)
*recog_data.dup_loc[i]
- = *recog_data.operand_loc[(int)recog_data.dup_num[i]];
+ = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
/* If any eliminable remain, they aren't eliminable anymore. */
check_eliminable_occurrences (old_body);
for (i = 0; i < recog_data.n_operands; i++)
*recog_data.operand_loc[i] = substed_operand[i];
for (i = 0; i < recog_data.n_dups; i++)
- *recog_data.dup_loc[i] = substed_operand[(int)recog_data.dup_num[i]];
+ *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
/* If we are replacing a body that was a (set X (plus Y Z)), try to
re-recognize the insn. We do this in case we had a simple addition
If re-recognition fails, the old insn code number will still be used,
and some register operands may have changed into PLUS expressions.
These will be handled by find_reloads by loading them into a register
- again.*/
+ again. */
if (val)
{
for (i = 0; i < recog_data.n_operands; i++)
*recog_data.operand_loc[i] = orig_operand[i];
for (i = 0; i < recog_data.n_dups; i++)
- *recog_data.dup_loc[i] = orig_operand[(int)recog_data.dup_num[i]];
+ *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
}
/* Update all elimination pairs to reflect the status after the current
if (val && REG_NOTES (insn) != 0)
REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
- if (! replace)
- pop_obstacks ();
-
return val;
}
set_initial_label_offsets ()
{
rtx x;
- bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
+ memset ((char *) &offsets_known_at[get_first_label_num ()], 0, num_labels);
for (x = forced_labels; x; x = XEXP (x, 1))
if (XEXP (x, 0))
if (!reg_eliminate)
reg_eliminate = (struct elim_table *)
- xcalloc(sizeof(struct elim_table), NUM_ELIMINABLE_REGS);
+ xcalloc (sizeof (struct elim_table), NUM_ELIMINABLE_REGS);
/* Does this function require a frame pointer? */
}
\f
/* Kick all pseudos out of hard register REGNO.
- If DUMPFILE is nonzero, log actions taken on that file.
If CANT_ELIMINATE is nonzero, it means that we are doing this spill
because we found we can't eliminate some register. In the case, no pseudos
Return nonzero if any pseudos needed to be kicked out. */
static void
-spill_hard_reg (regno, dumpfile, cant_eliminate)
+spill_hard_reg (regno, cant_eliminate)
unsigned int regno;
- FILE *dumpfile ATTRIBUTE_UNUSED;
int cant_eliminate;
{
register int i;
spill_regs array for use by choose_reload_regs. */
static int
-finish_spills (global, dumpfile)
+finish_spills (global)
int global;
- FILE *dumpfile;
{
struct insn_chain *chain;
int something_changed = 0;
else
spill_reg_order[i] = -1;
- for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
- if (REGNO_REG_SET_P (&spilled_pseudos, i))
- {
- /* Record the current hard register the pseudo is allocated to in
- pseudo_previous_regs so we avoid reallocating it to the same
- hard reg in a later pass. */
- if (reg_renumber[i] < 0)
- abort ();
- SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
- /* Mark it as no longer having a hard register home. */
- reg_renumber[i] = -1;
- /* We will need to scan everything again. */
- something_changed = 1;
- }
+ EXECUTE_IF_SET_IN_REG_SET
+ (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i,
+ {
+ /* Record the current hard register the pseudo is allocated to in
+ pseudo_previous_regs so we avoid reallocating it to the same
+ hard reg in a later pass. */
+ if (reg_renumber[i] < 0)
+ abort ();
+
+ SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
+ /* Mark it as no longer having a hard register home. */
+ reg_renumber[i] = -1;
+ /* We will need to scan everything again. */
+ something_changed = 1;
+ });
/* Retry global register allocation if possible. */
if (global)
{
- bzero ((char *) pseudo_forbidden_regs, max_regno * sizeof (HARD_REG_SET));
+ memset ((char *) pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
/* For every insn that needs reloads, set the registers used as spill
regs in pseudo_forbidden_regs for every pseudo live across the
insn. */
alter_reg (i, reg_old_renumber[i]);
reg_old_renumber[i] = regno;
- if (dumpfile)
+ if (rtl_dump_file)
{
if (regno == -1)
- fprintf (dumpfile, " Register %d now on stack.\n\n", i);
+ fprintf (rtl_dump_file, " Register %d now on stack.\n\n", i);
else
- fprintf (dumpfile, " Register %d now in %d.\n\n",
+ fprintf (rtl_dump_file, " Register %d now in %d.\n\n",
i, reg_renumber[i]);
}
}
else if (fmt[i] == 'E')
{
register int j;
- for (j = XVECLEN (x, i) - 1; j >=0; j--)
+ for (j = XVECLEN (x, i) - 1; j >= 0; j--)
scan_paradoxical_subregs (XVECEXP (x, i, j));
}
}
#endif
rtx x;
- bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
- bzero ((char *) spill_reg_store, sizeof spill_reg_store);
+ memset ((char *) spill_reg_rtx, 0, sizeof spill_reg_rtx);
+ memset ((char *) spill_reg_store, 0, sizeof spill_reg_store);
reg_last_reload_reg = (rtx *) xcalloc (max_regno, sizeof (rtx));
reg_has_output_reload = (char *) xmalloc (max_regno);
CLEAR_HARD_REG_SET (reg_reloaded_valid);
if (GET_CODE (insn) == CODE_LABEL)
set_offsets_for_label (insn);
- else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
+ else if (INSN_P (insn))
{
rtx oldpat = PATTERN (insn);
rtx's for those pseudo regs. */
else
{
- bzero (reg_has_output_reload, max_regno);
+ memset (reg_has_output_reload, 0, max_regno);
CLEAR_HARD_REG_SET (reg_is_output_reload);
find_reloads (insn, 1, spill_indirect_levels, live_known,
spill_reg_order);
}
- if (num_eliminable && chain->need_elim)
- update_eliminable_offsets ();
-
if (n_reloads > 0)
{
rtx next = NEXT_INSN (insn);
into the insn's body (or perhaps into the bodies of other
load and store insn that we just made for reloading
and that we moved the structure into). */
- subst_reloads ();
+ subst_reloads (insn);
/* If this was an ASM, make sure that all the reload insns
we have generated are valid. If not, give an error
if (asm_noperands (PATTERN (insn)) >= 0)
for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
- if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
+ if (p != insn && INSN_P (p)
&& (recog_memoized (p) < 0
|| (extract_insn (p), ! constrain_operands (1))))
{
NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
}
}
+
+ if (num_eliminable && chain->need_elim)
+ update_eliminable_offsets ();
+
/* Any previously reloaded spilled pseudo reg, stored in this insn,
is no longer validly lying around to save a future reload.
Note that this does not detect pseudos that were reloaded
use PATTERN (p) as argument to reg_set_p . */
if (reg_set_p (reload_reg, PATTERN (p)))
break;
- n = count_occurrences (PATTERN (p), reload_reg);
+ n = count_occurrences (PATTERN (p), reload_reg, 0);
if (! n)
continue;
if (n == 1)
reg_has_output_reload[REGNO (XEXP (in_reg, 0))] = 1;
}
else
- forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
+ forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
NULL);
}
else if ((code == PRE_INC || code == PRE_DEC)
unsigned int nr;
int offset = 0;
- /* note_stores does give us subregs of hard regs. */
+ /* note_stores does give us subregs of hard regs,
+ subreg_regno_offset will abort if it is not a hard reg. */
while (GET_CODE (x) == SUBREG)
{
- offset += SUBREG_WORD (x);
+ offset += subreg_regno_offset (REGNO (SUBREG_REG (x)),
+ GET_MODE (SUBREG_REG (x)),
+ SUBREG_BYTE (x),
+ GET_MODE (x));
x = SUBREG_REG (x);
}
reload reg in the current instruction. */
if (n_reloads == 0
|| ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
- CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
+ {
+ CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
+ spill_reg_store[regno + i] = 0;
+ }
}
/* Since value of X has changed,
if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
return 0;
- return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
- && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
+ return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
+ && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
+ && !TEST_HARD_REG_BIT (reload_reg_used, regno));
case RELOAD_FOR_INPUT:
/* Similar to input address, except we start at the next operand for
|| TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
return 0;
- return 1;
+ return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
case RELOAD_FOR_OPADDR_ADDR:
for (i = 0; i < reload_n_operands; i++)
|| TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
return 0;
- return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
- && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
+ return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
+ && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
+ && !TEST_HARD_REG_BIT (reload_reg_used, regno));
case RELOAD_FOR_INSN:
/* These conflict with other outputs with RELOAD_OTHER. So
or -1 if we did not need a register for this reload. */
int reload_spill_index[MAX_RELOADS];
-/* Return 1 if the value in reload reg REGNO, as used by a reload
- needed for the part of the insn specified by OPNUM and TYPE,
- may be used to load VALUE into it.
-
- Other read-only reloads with the same value do not conflict
- unless OUT is non-zero and these other reloads have to live while
- output reloads live.
- If OUT is CONST0_RTX, this is a special case: it means that the
- test should not be for using register REGNO as reload register, but
- for copying from register REGNO into the reload register.
-
- RELOADNUM is the number of the reload we want to load this value for;
- a reload does not conflict with itself.
+/* Subroutine of free_for_value_p, used to check a single register.
+ START_REGNO is the starting regno of the full reload register
+ (possibly comprising multiple hard registers) that we are considering. */
- When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
- reloads that load an address for the very reload we are considering.
-
- The caller has to make sure that there is no conflict with the return
- register. */
static int
-reload_reg_free_for_value_p (regno, opnum, type, value, out, reloadnum,
- ignore_address_reloads)
- int regno;
+reload_reg_free_for_value_p (start_regno, regno, opnum, type, value, out,
+ reloadnum, ignore_address_reloads)
+ int start_regno, regno;
int opnum;
enum reload_type type;
rtx value, out;
<= HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)) - (unsigned)1)
&& i != reloadnum)
{
- if (! rld[i].in || ! rtx_equal_p (rld[i].in, value)
+ rtx other_input = rld[i].in;
+
+ /* If the other reload loads the same input value, that
+ will not cause a conflict only if it's loading it into
+ the same register. */
+ if (true_regnum (reg) != start_regno)
+ other_input = NULL_RTX;
+ if (! other_input || ! rtx_equal_p (other_input, value)
|| rld[i].out || out)
{
int time2;
case RELOAD_OTHER:
/* If there is no conflict in the input part, handle this
like an output reload. */
- if (! rld[i].in || rtx_equal_p (rld[i].in, value))
+ if (! rld[i].in || rtx_equal_p (other_input, value))
{
time2 = MAX_RECOG_OPERANDS * 4 + 4;
/* Earlyclobbered outputs must conflict with inputs. */
if (earlyclobber_operand_p (rld[i].out))
time2 = MAX_RECOG_OPERANDS * 4 + 3;
-
+
break;
}
time2 = 1;
}
if ((time1 >= time2
&& (! rld[i].in || rld[i].out
- || ! rtx_equal_p (rld[i].in, value)))
+ || ! rtx_equal_p (other_input, value)))
|| (out && rld[reloadnum].out_reg
&& time2 >= MAX_RECOG_OPERANDS * 4 + 3))
return 0;
return 1;
}
+/* Return 1 if the value in reload reg REGNO, as used by a reload
+ needed for the part of the insn specified by OPNUM and TYPE,
+ may be used to load VALUE into it.
+
+ MODE is the mode in which the register is used, this is needed to
+ determine how many hard regs to test.
+
+ Other read-only reloads with the same value do not conflict
+ unless OUT is non-zero and these other reloads have to live while
+ output reloads live.
+ If OUT is CONST0_RTX, this is a special case: it means that the
+ test should not be for using register REGNO as reload register, but
+ for copying from register REGNO into the reload register.
+
+ RELOADNUM is the number of the reload we want to load this value for;
+ a reload does not conflict with itself.
+
+ When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
+ reloads that load an address for the very reload we are considering.
+
+ The caller has to make sure that there is no conflict with the return
+ register. */
+
+static int
+free_for_value_p (regno, mode, opnum, type, value, out, reloadnum,
+ ignore_address_reloads)
+ int regno;
+ enum machine_mode mode;
+ int opnum;
+ enum reload_type type;
+ rtx value, out;
+ int reloadnum;
+ int ignore_address_reloads;
+{
+ int nregs = HARD_REGNO_NREGS (regno, mode);
+ while (nregs-- > 0)
+ if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
+ value, out, reloadnum,
+ ignore_address_reloads))
+ return 0;
+ return 1;
+}
+
+/* Determine whether the reload reg X overlaps any rtx'es used for
+ overriding inheritance. Return nonzero if so. */
+
+static int
+conflicts_with_override (x)
+ rtx x;
+{
+ int i;
+ for (i = 0; i < n_reloads; i++)
+ if (reload_override_in[i]
+ && reg_overlap_mentioned_p (x, reload_override_in[i]))
+ return 1;
+ return 0;
+}
+\f
/* Give an error message saying we failed to find a reload for INSN,
and clear out reload R. */
static void
/* We check reload_reg_used to make sure we
don't clobber the return register. */
&& ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
- && reload_reg_free_for_value_p (regnum,
- rld[r].opnum,
- rld[r].when_needed,
- rld[r].in,
- rld[r].out, r, 1)))
+ && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
+ rld[r].when_needed, rld[r].in,
+ rld[r].out, r, 1)))
&& TEST_HARD_REG_BIT (reg_class_contents[class], regnum)
&& HARD_REGNO_MODE_OK (regnum, rld[r].mode)
/* Look first for regs to share, then for unshared. But
if (count < n_spills)
break;
}
-
+
/* We should have found a spill register by now. */
if (count >= n_spills)
return 0;
/* Initialize all the tables needed to allocate reload registers.
CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
is the array we use to restore the reg_rtx field for every reload. */
+
static void
choose_reload_regs_init (chain, save_reload_reg_rtx)
struct insn_chain *chain;
for (i = 0; i < n_reloads; i++)
rld[i].reg_rtx = save_reload_reg_rtx[i];
- bzero (reload_inherited, MAX_RELOADS);
- bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
- bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
+ memset (reload_inherited, 0, MAX_RELOADS);
+ memset ((char *) reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
+ memset ((char *) reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
CLEAR_HARD_REG_SET (reload_reg_used);
CLEAR_HARD_REG_SET (reload_reg_used_at_all);
compute_use_by_pseudos (®_used_in_insn, &chain->live_throughout);
compute_use_by_pseudos (®_used_in_insn, &chain->dead_or_set);
}
+
for (i = 0; i < reload_n_operands; i++)
{
CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
{
max_group_size = MAX (rld[j].nregs, max_group_size);
group_class
- = reg_class_superunion[(int)rld[j].class][(int)group_class];
+ = reg_class_superunion[(int) rld[j].class][(int)group_class];
}
save_reload_reg_rtx[j] = rld[j].reg_rtx;
if (inheritance)
{
- int word = 0;
+ int byte = 0;
register int regno = -1;
enum machine_mode mode = VOIDmode;
else if (GET_CODE (rld[r].in_reg) == SUBREG
&& GET_CODE (SUBREG_REG (rld[r].in_reg)) == REG)
{
- word = SUBREG_WORD (rld[r].in_reg);
+ byte = SUBREG_BYTE (rld[r].in_reg);
regno = REGNO (SUBREG_REG (rld[r].in_reg));
if (regno < FIRST_PSEUDO_REGISTER)
- regno += word;
+ regno = subreg_regno (rld[r].in_reg);
mode = GET_MODE (rld[r].in_reg);
}
#ifdef AUTO_INC_DEC
that can invalidate an inherited reload of part of a pseudoreg. */
else if (GET_CODE (rld[r].in) == SUBREG
&& GET_CODE (SUBREG_REG (rld[r].in)) == REG)
- regno = REGNO (SUBREG_REG (rld[r].in)) + SUBREG_WORD (rld[r].in);
+ regno = subreg_regno (rld[r].in);
#endif
if (regno >= 0 && reg_last_reload_reg[regno] != 0)
{
enum reg_class class = rld[r].class, last_class;
rtx last_reg = reg_last_reload_reg[regno];
+ enum machine_mode need_mode;
- i = REGNO (last_reg) + word;
+ i = REGNO (last_reg);
+ i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
last_class = REGNO_REG_CLASS (i);
- if ((GET_MODE_SIZE (GET_MODE (last_reg))
- >= GET_MODE_SIZE (mode) + word * UNITS_PER_WORD)
+
+ if (byte == 0)
+ need_mode = mode;
+ else
+ need_mode
+ = smallest_mode_for_size (GET_MODE_SIZE (mode) + byte,
+ GET_MODE_CLASS (mode));
+
+ if (
+#ifdef CLASS_CANNOT_CHANGE_MODE
+ (TEST_HARD_REG_BIT
+ (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE], i)
+ ? ! CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (last_reg),
+ need_mode)
+ : (GET_MODE_SIZE (GET_MODE (last_reg))
+ >= GET_MODE_SIZE (need_mode)))
+#else
+ (GET_MODE_SIZE (GET_MODE (last_reg))
+ >= GET_MODE_SIZE (need_mode))
+#endif
&& reg_reloaded_contents[i] == regno
&& TEST_HARD_REG_BIT (reg_reloaded_valid, i)
&& HARD_REGNO_MODE_OK (i, rld[r].mode)
register, we might use it for reload_override_in,
if copying it to the desired class is cheap
enough. */
- || ((REGISTER_MOVE_COST (last_class, class)
+ || ((REGISTER_MOVE_COST (mode, last_class, class)
< MEMORY_MOVE_COST (mode, class, 1))
#ifdef SECONDARY_INPUT_RELOAD_CLASS
&& (SECONDARY_INPUT_RELOAD_CLASS (class, mode,
&& (rld[r].nregs == max_group_size
|| ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
i))
- && reload_reg_free_for_value_p (i, rld[r].opnum,
- rld[r].when_needed,
- rld[r].in,
- const0_rtx, r, 1))
+ && free_for_value_p (i, rld[r].mode, rld[r].opnum,
+ rld[r].when_needed, rld[r].in,
+ const0_rtx, r, 1))
{
/* If a group is needed, verify that all the subsequent
registers still have their values intact. */
- int nr
- = HARD_REGNO_NREGS (i, rld[r].mode);
+ int nr = HARD_REGNO_NREGS (i, rld[r].mode);
int k;
for (k = 1; k < nr; k++)
if (k == nr)
{
int i1;
+ int bad_for_class;
last_reg = (GET_MODE (last_reg) == mode
? last_reg : gen_rtx_REG (mode, i));
+ bad_for_class = 0;
+ for (k = 0; k < nr; k++)
+ bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
+ i+k);
+
/* We found a register that contains the
value we need. If this register is the
same as an `earlyclobber' operand of the
break;
if (i1 != n_earlyclobbers
- || ! (reload_reg_free_for_value_p
- (i, rld[r].opnum, rld[r].when_needed,
- rld[r].in, rld[r].out, r, 1))
+ || ! (free_for_value_p (i, rld[r].mode,
+ rld[r].opnum,
+ rld[r].when_needed, rld[r].in,
+ rld[r].out, r, 1))
/* Don't use it if we'd clobber a pseudo reg. */
|| (TEST_HARD_REG_BIT (reg_used_in_insn, i)
&& rld[r].out
&& ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
/* Don't clobber the frame pointer. */
- || (i == HARD_FRAME_POINTER_REGNUM && rld[r].out)
+ || (i == HARD_FRAME_POINTER_REGNUM
+ && rld[r].out)
/* Don't really use the inherited spill reg
if we need it wider than we've got it. */
|| (GET_MODE_SIZE (rld[r].mode)
> GET_MODE_SIZE (mode))
- || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
- i)
+ || bad_for_class
/* If find_reloads chose reload_out as reload
register, stay with it - that leaves the
|| (rld[r].out && rld[r].reg_rtx
&& rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
{
- reload_override_in[r] = last_reg;
- reload_inheritance_insn[r]
- = reg_reloaded_insn[i];
+ if (! rld[r].optional)
+ {
+ reload_override_in[r] = last_reg;
+ reload_inheritance_insn[r]
+ = reg_reloaded_insn[i];
+ }
}
else
{
{
register rtx equiv
= find_equiv_reg (search_equiv, insn, rld[r].class,
- -1, NULL_PTR, 0, rld[r].mode);
+ -1, NULL, 0, rld[r].mode);
int regno = 0;
if (equiv != 0)
Make a new REG since this might be used in an
address and not all machines support SUBREGs
there. */
- regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
+ regno = subreg_regno (equiv);
equiv = gen_rtx_REG (rld[r].mode, regno);
}
else
and of the desired class. */
if (equiv != 0
&& ((TEST_HARD_REG_BIT (reload_reg_used_at_all, regno)
- && ! reload_reg_free_for_value_p (regno, rld[r].opnum,
- rld[r].when_needed,
- rld[r].in,
- rld[r].out, r, 1))
+ && ! free_for_value_p (regno, rld[r].mode,
+ rld[r].opnum, rld[r].when_needed,
+ rld[r].in, rld[r].out, r, 1))
|| ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
regno)))
equiv = 0;
if (reg_overlap_mentioned_for_reload_p (equiv,
reload_earlyclobbers[i]))
{
- reload_override_in[r] = equiv;
+ if (! rld[r].optional)
+ reload_override_in[r] = equiv;
equiv = 0;
break;
}
In particular, we then can't use EQUIV for a
RELOAD_FOR_OUTPUT_ADDRESS reload. */
- if (equiv != 0 && regno_clobbered_p (regno, insn))
+ if (equiv != 0)
{
- switch (rld[r].when_needed)
- {
- case RELOAD_FOR_OTHER_ADDRESS:
- case RELOAD_FOR_INPADDR_ADDRESS:
- case RELOAD_FOR_INPUT_ADDRESS:
- case RELOAD_FOR_OPADDR_ADDR:
- break;
- case RELOAD_OTHER:
- case RELOAD_FOR_INPUT:
- case RELOAD_FOR_OPERAND_ADDRESS:
- reload_override_in[r] = equiv;
- /* Fall through. */
- default:
- equiv = 0;
- break;
- }
+ if (regno_clobbered_p (regno, insn, rld[r].mode, 0))
+ switch (rld[r].when_needed)
+ {
+ case RELOAD_FOR_OTHER_ADDRESS:
+ case RELOAD_FOR_INPADDR_ADDRESS:
+ case RELOAD_FOR_INPUT_ADDRESS:
+ case RELOAD_FOR_OPADDR_ADDR:
+ break;
+ case RELOAD_OTHER:
+ case RELOAD_FOR_INPUT:
+ case RELOAD_FOR_OPERAND_ADDRESS:
+ if (! rld[r].optional)
+ reload_override_in[r] = equiv;
+ /* Fall through. */
+ default:
+ equiv = 0;
+ break;
+ }
+ else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
+ switch (rld[r].when_needed)
+ {
+ case RELOAD_FOR_OTHER_ADDRESS:
+ case RELOAD_FOR_INPADDR_ADDRESS:
+ case RELOAD_FOR_INPUT_ADDRESS:
+ case RELOAD_FOR_OPADDR_ADDR:
+ case RELOAD_FOR_OPERAND_ADDRESS:
+ case RELOAD_FOR_INPUT:
+ break;
+ case RELOAD_OTHER:
+ if (! rld[r].optional)
+ reload_override_in[r] = equiv;
+ /* Fall through. */
+ default:
+ equiv = 0;
+ break;
+ }
}
/* If we found an equivalent reg, say no code need be generated
if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
continue;
-#if 0 /* No longer needed for correct operation. Might or might not
- give better code on the average. Want to experiment? */
+#if 0
+ /* No longer needed for correct operation. Might or might
+ not give better code on the average. Want to experiment? */
/* See if there is a later reload that has a class different from our
class that intersects our class or that requires less register
check_reg = reload_override_in[r];
else
continue;
- if (! reload_reg_free_for_value_p (true_regnum (check_reg),
- rld[r].opnum,
- rld[r].when_needed,
- rld[r].in,
- (reload_inherited[r]
- ? rld[r].out : const0_rtx),
- r, 1))
+ if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
+ rld[r].opnum, rld[r].when_needed, rld[r].in,
+ (reload_inherited[r]
+ ? rld[r].out : const0_rtx),
+ r, 1))
{
if (pass)
continue;
if (rld[i].when_needed == RELOAD_OTHER)
for (j = 0; j < n_reloads; j++)
if (rld[j].in != 0
- && rld[i].when_needed != RELOAD_OTHER
+ && rld[j].when_needed != RELOAD_OTHER
&& reg_overlap_mentioned_for_reload_p (rld[j].in,
rld[i].in))
rld[j].when_needed
- = ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
- || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
+ = ((rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
+ || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
}
}
}
-
\f
/* These arrays are filled by emit_reload_insns and its subroutines. */
static rtx input_reload_insns[MAX_RECOG_OPERANDS];
oldequiv
= find_equiv_reg (old, insn,
rld[rl->secondary_in_reload].class,
- -1, NULL_PTR, 0, mode);
+ -1, NULL, 0, mode);
#endif
/* If reloading from memory, see if there is a register
|| (GET_CODE (old) == REG
&& REGNO (old) >= FIRST_PSEUDO_REGISTER
&& reg_renumber[REGNO (old)] < 0)))
- oldequiv = find_equiv_reg (old, insn, ALL_REGS,
- -1, NULL_PTR, 0, mode);
+ oldequiv = find_equiv_reg (old, insn, ALL_REGS, -1, NULL, 0, mode);
if (oldequiv)
{
/* Don't use OLDEQUIV if any other reload changes it at an
earlier stage of this insn or at this stage. */
- if (! reload_reg_free_for_value_p (regno, rl->opnum,
- rl->when_needed,
- rl->in, const0_rtx, j,
- 0))
+ if (! free_for_value_p (regno, rl->mode, rl->opnum, rl->when_needed,
+ rl->in, const0_rtx, j, 0))
oldequiv = 0;
/* If it is no cheaper to copy from OLDEQUIV into the
if (oldequiv != 0
&& ((REGNO_REG_CLASS (regno) != rl->class
- && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
+ && (REGISTER_MOVE_COST (mode, REGNO_REG_CLASS (regno),
rl->class)
>= MEMORY_MOVE_COST (mode, rl->class, 1)))
#ifdef SECONDARY_INPUT_RELOAD_CLASS
oldequiv = SUBREG_REG (oldequiv);
if (GET_MODE (oldequiv) != VOIDmode
&& mode != GET_MODE (oldequiv))
- oldequiv = gen_rtx_SUBREG (mode, oldequiv, 0);
+ oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
/* Switch to the right place to emit the reload insns. */
switch (rl->when_needed)
&& dead_or_set_p (insn, old)
/* This is unsafe if some other reload
uses the same reg first. */
- && reload_reg_free_for_value_p (REGNO (reloadreg),
- rl->opnum,
- rl->when_needed,
- old, rl->out,
- j, 0))
+ && ! conflicts_with_override (reloadreg)
+ && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
+ rl->when_needed, old, rl->out, j, 0))
{
rtx temp = PREV_INSN (insn);
while (temp && GET_CODE (temp) == NOTE)
reloadreg)
/* This is unsafe if operand occurs more than once in current
insn. Perhaps some occurrences aren't reloaded. */
- && count_occurrences (PATTERN (insn), old) == 1
+ && count_occurrences (PATTERN (insn), old, 0) == 1
/* Don't risk splitting a matching pair of operands. */
&& ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
{
&& ((reg_equiv_memory_loc
[REGNO (SUBREG_REG (oldequiv))] != 0)
|| (reg_equiv_constant
- [REGNO (SUBREG_REG (oldequiv))] != 0))))
+ [REGNO (SUBREG_REG (oldequiv))] != 0)))
+ || (CONSTANT_P (oldequiv)
+ && PREFERRED_RELOAD_CLASS (oldequiv,
+ REGNO_REG_CLASS (REGNO (reloadreg))) == NO_REGS))
real_oldequiv = rl->in;
gen_reload (reloadreg, real_oldequiv, rl->opnum,
rl->when_needed);
}
+ if (flag_non_call_exceptions)
+ copy_eh_notes (insn, get_insns ());
+
/* End this sequence. */
*where = get_insns ();
end_sequence ();
-
+
/* Update reload_override_in so that delete_address_reloads_1
can see the actual register usage. */
if (oldequiv_reg)
/* Copy primary reload reg to secondary reload reg.
(Note that these have been swapped above, then
- secondary reload reg to OLD using our insn. */
+ secondary reload reg to OLD using our insn.) */
/* If REAL_OLD is a paradoxical SUBREG, remove it
and try to put the opposite SUBREG on
rtx set;
/* Don't output the last reload if OLD is not the dest of
- INSN and is in the src and is clobbered by INSN. */
+ INSN and is in the src and is clobbered by INSN. */
if (! flag_expensive_optimizations
|| GET_CODE (old) != REG
|| !(set = single_set (insn))
|| rtx_equal_p (old, SET_DEST (set))
|| !reg_mentioned_p (old, SET_SRC (set))
- || !regno_clobbered_p (REGNO (old), insn))
+ || !regno_clobbered_p (REGNO (old), insn, rl->mode, 0))
gen_reload (old, reloadreg, rl->opnum,
rl->when_needed);
}
/* Look at all insns we emitted, just to be safe. */
for (p = get_insns (); p; p = NEXT_INSN (p))
- if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
+ if (INSN_P (p))
{
rtx pat = PATTERN (p);
register, the secondary reload does the actual
store. */
if (s >= 0 && set == NULL_RTX)
- ; /* We can't tell what function the secondary reload
- has and where the actual store to the pseudo is
- made; leave new_spill_reg_store alone. */
+ /* We can't tell what function the secondary reload
+ has and where the actual store to the pseudo is
+ made; leave new_spill_reg_store alone. */
+ ;
else if (s >= 0
&& SET_SRC (set) == rl->reg_rtx
&& SET_DEST (set) == rld[s].reg_rtx)
else
output_reload_insns[rl->opnum] = get_insns ();
- end_sequence ();
+ if (flag_non_call_exceptions)
+ copy_eh_notes (insn, get_insns ());
+
+ end_sequence ();
}
/* Do input reloading for reload RL, which is for the insn described by CHAIN
&& TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
{
expect_occurrences
- = count_occurrences (PATTERN (insn), rl->in) == 1 ? 0 : -1;
- rl->in
- = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
+ = count_occurrences (PATTERN (insn), rl->in, 0) == 1 ? 0 : -1;
+ rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
}
/* If we are reloading a register that was recently stored in with an
operand_reload_insns = 0;
other_operand_reload_insns = 0;
+ /* Dump reloads into the dump file. */
+ if (rtl_dump_file)
+ {
+ fprintf (rtl_dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
+ debug_reload_to_stream (rtl_dump_file);
+ }
+
/* Now output the instructions to copy the data into and out of the
reload registers. Do these in the order that the reloads were reported,
since reloads of base and index registers precede reloads of operands
if (i >= 0 && rld[r].reg_rtx != 0)
{
- int nr
- = HARD_REGNO_NREGS (i, GET_MODE (rld[r].reg_rtx));
+ int nr = HARD_REGNO_NREGS (i, GET_MODE (rld[r].reg_rtx));
int k;
int part_reaches_end = 0;
int all_reaches_end = 1;
necessarily checked exactly in the code that moves
notes, so just check both locations. */
rtx note = find_regno_note (insn, REG_DEAD, src_regno);
- if (! note)
+ if (! note && store_insn)
note = find_regno_note (store_insn, REG_DEAD, src_regno);
while (nr-- > 0)
{
}
else
{
- int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (rld[r].out));
+ int num_regs = HARD_REGNO_NREGS (nregno, GET_MODE (rld[r].out));
while (num_regs-- > 0)
reg_last_reload_reg[nregno + num_regs] = 0;
return;
}
}
- n_occurrences = count_occurrences (PATTERN (insn), reg);
+ n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
if (substed)
- n_occurrences += count_occurrences (PATTERN (insn), substed);
+ n_occurrences += count_occurrences (PATTERN (insn),
+ eliminate_regs (substed, 0,
+ NULL_RTX), 0);
if (n_occurrences > n_inherited)
return;
|| ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
|| ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
|| (INTVAL (XEXP (SET_SRC (set), 1))
- != - INTVAL (XEXP (SET_SRC (set2), 1))))
+ != -INTVAL (XEXP (SET_SRC (set2), 1))))
return;
delete_insn (prev);
delete_insn (next);
if (code != REG)
{
- const char *fmt= GET_RTX_FORMAT (code);
+ const char *fmt = GET_RTX_FORMAT (code);
for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
{
if (fmt[i] == 'e')
delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
else if (fmt[i] == 'E')
{
- for (j = XVECLEN (x, i) - 1; j >=0; j--)
+ for (j = XVECLEN (x, i) - 1; j >= 0; j--)
delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
current_insn);
}
{
if (GET_CODE (i2) == CODE_LABEL)
break;
- if (GET_RTX_CLASS (GET_CODE (i2)) != 'i')
+ if (! INSN_P (i2))
continue;
if (reg_referenced_p (dst, PATTERN (i2)))
{
reg_last_reload_reg[REGNO (incloc)] = 0;
if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
- inc_amount = - inc_amount;
+ inc_amount = -inc_amount;
inc = GEN_INT (inc_amount);
}
}
\f
-/* Return the number of places FIND appears within X, but don't count
- an occurrence if some SET_DEST is FIND. */
-
-int
-count_occurrences (x, find)
- register rtx x, find;
-{
- register int i, j;
- register enum rtx_code code;
- register const char *format_ptr;
- int count;
-
- if (x == find)
- return 1;
- if (x == 0)
- return 0;
-
- code = GET_CODE (x);
-
- switch (code)
- {
- case REG:
- case QUEUED:
- case CONST_INT:
- case CONST_DOUBLE:
- case SYMBOL_REF:
- case CODE_LABEL:
- case PC:
- case CC0:
- return 0;
-
- case MEM:
- if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
- return 1;
- break;
- case SET:
- if (SET_DEST (x) == find)
- return count_occurrences (SET_SRC (x), find);
- break;
-
- default:
- break;
- }
-
- format_ptr = GET_RTX_FORMAT (code);
- count = 0;
-
- for (i = 0; i < GET_RTX_LENGTH (code); i++)
- {
- switch (*format_ptr++)
- {
- case 'e':
- count += count_occurrences (XEXP (x, i), find);
- break;
-
- case 'E':
- if (XVEC (x, i) != NULL)
- {
- for (j = 0; j < XVECLEN (x, i); j++)
- count += count_occurrences (XVECEXP (x, i, j), find);
- }
- break;
- }
- }
- return count;
-}
-\f
/* INSN is a no-op; delete it.
If this sets the return value of the function, we must keep a USE around,
in case this is in a different basic block than the final USE. Otherwise,
if (GET_CODE (body) == SET)
{
int count = 0;
- if (reload_cse_noop_set_p (body))
+
+ /* Simplify even if we may think it is a no-op.
+ We may think a memory load of a value smaller than WORD_SIZE
+ is redundant because we haven't taken into account possible
+ implicit extension. reload_cse_simplify_set() will bring
+ this out, so it's safer to simplify before we delete. */
+ count += reload_cse_simplify_set (body, insn);
+
+ if (!count && reload_cse_noop_set_p (body))
{
rtx value = SET_DEST (body);
if (! REG_FUNCTION_VALUE_P (SET_DEST (body)))
return;
}
- /* It's not a no-op, but we can try to simplify it. */
- count += reload_cse_simplify_set (body, insn);
-
if (count > 0)
apply_change_group ();
else
{
rtx insn;
- cselib_init ();
+ cselib_init ();
init_alias_analysis ();
for (insn = first; insn; insn = NEXT_INSN (insn))
{
- if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
+ if (INSN_P (insn))
reload_cse_simplify (insn);
cselib_process_insn (insn);
int old_cost;
cselib_val *val;
struct elt_loc_list *l;
+#ifdef LOAD_EXTEND_OP
+ enum rtx_code extend_op = NIL;
+#endif
dreg = true_regnum (SET_DEST (set));
if (dreg < 0)
dclass = REGNO_REG_CLASS (dreg);
+#ifdef LOAD_EXTEND_OP
+ /* When replacing a memory with a register, we need to honor assumptions
+ that combine made wrt the contents of sign bits. We'll do this by
+ generating an extend instruction instead of a reg->reg copy. Thus
+ the destination must be a register that we can widen. */
+ if (GET_CODE (src) == MEM
+ && GET_MODE_BITSIZE (GET_MODE (src)) < BITS_PER_WORD
+ && (extend_op = LOAD_EXTEND_OP (GET_MODE (src))) != NIL
+ && GET_CODE (SET_DEST (set)) != REG)
+ return 0;
+#endif
+
/* If memory loads are cheaper than register copies, don't change them. */
if (GET_CODE (src) == MEM)
old_cost = MEMORY_MOVE_COST (GET_MODE (src), dclass, 1);
else if (CONSTANT_P (src))
old_cost = rtx_cost (src, SET);
else if (GET_CODE (src) == REG)
- old_cost = REGISTER_MOVE_COST (REGNO_REG_CLASS (REGNO (src)), dclass);
+ old_cost = REGISTER_MOVE_COST (GET_MODE (src),
+ REGNO_REG_CLASS (REGNO (src)), dclass);
else
/* ??? */
old_cost = rtx_cost (src, SET);
- val = cselib_lookup (src, VOIDmode, 0);
+ val = cselib_lookup (src, GET_MODE (SET_DEST (set)), 0);
if (! val)
return 0;
for (l = val->locs; l; l = l->next)
{
+ rtx this_rtx = l->loc;
int this_cost;
- if (CONSTANT_P (l->loc) && ! references_value_p (l->loc, 0))
- this_cost = rtx_cost (l->loc, SET);
- else if (GET_CODE (l->loc) == REG)
- this_cost = REGISTER_MOVE_COST (REGNO_REG_CLASS (REGNO (l->loc)),
- dclass);
+
+ if (CONSTANT_P (this_rtx) && ! references_value_p (this_rtx, 0))
+ {
+#ifdef LOAD_EXTEND_OP
+ if (extend_op != NIL)
+ {
+ HOST_WIDE_INT this_val;
+
+ /* ??? I'm lazy and don't wish to handle CONST_DOUBLE. Other
+ constants, such as SYMBOL_REF, cannot be extended. */
+ if (GET_CODE (this_rtx) != CONST_INT)
+ continue;
+
+ this_val = INTVAL (this_rtx);
+ switch (extend_op)
+ {
+ case ZERO_EXTEND:
+ this_val &= GET_MODE_MASK (GET_MODE (src));
+ break;
+ case SIGN_EXTEND:
+ /* ??? In theory we're already extended. */
+ if (this_val == trunc_int_for_mode (this_val, GET_MODE (src)))
+ break;
+ default:
+ abort ();
+ }
+ this_rtx = GEN_INT (this_val);
+ }
+#endif
+ this_cost = rtx_cost (this_rtx, SET);
+ }
+ else if (GET_CODE (this_rtx) == REG)
+ {
+#ifdef LOAD_EXTEND_OP
+ if (extend_op != NIL)
+ {
+ this_rtx = gen_rtx_fmt_e (extend_op, word_mode, this_rtx);
+ this_cost = rtx_cost (this_rtx, SET);
+ }
+ else
+#endif
+ this_cost = REGISTER_MOVE_COST (GET_MODE (this_rtx),
+ REGNO_REG_CLASS (REGNO (this_rtx)),
+ dclass);
+ }
else
continue;
- /* If equal costs, prefer registers over anything else. That tends to
- lead to smaller instructions on some machines. */
- if ((this_cost < old_cost
- || (this_cost == old_cost
- && GET_CODE (l->loc) == REG
- && GET_CODE (SET_SRC (set)) != REG))
- && validate_change (insn, &SET_SRC (set), copy_rtx (l->loc), 1))
- old_cost = this_cost, did_change = 1;
+
+ /* If equal costs, prefer registers over anything else. That
+ tends to lead to smaller instructions on some machines. */
+ if (this_cost < old_cost
+ || (this_cost == old_cost
+ && GET_CODE (this_rtx) == REG
+ && GET_CODE (SET_SRC (set)) != REG))
+ {
+#ifdef LOAD_EXTEND_OP
+ if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set))) < BITS_PER_WORD
+ && extend_op != NIL)
+ {
+ rtx wide_dest = gen_rtx_REG (word_mode, REGNO (SET_DEST (set)));
+ ORIGINAL_REGNO (wide_dest) = ORIGINAL_REGNO (SET_DEST (set));
+ validate_change (insn, &SET_DEST (set), wide_dest, 1);
+ }
+#endif
+
+ validate_change (insn, &SET_SRC (set), copy_rtx (this_rtx), 1);
+ old_cost = this_cost, did_change = 1;
+ }
}
return did_change;
reload_cse_simplify_operands (insn)
rtx insn;
{
- int i,j;
+ int i, j;
/* For each operand, all registers that are equivalent to it. */
HARD_REG_SET equiv_regs[MAX_RECOG_OPERANDS];
/* Figure out which alternative currently matches. */
if (! constrain_operands (1))
fatal_insn_not_found (insn);
-
+
alternative_reject = (int *) alloca (recog_data.n_alternatives * sizeof (int));
alternative_nregs = (int *) alloca (recog_data.n_alternatives * sizeof (int));
alternative_order = (int *) alloca (recog_data.n_alternatives * sizeof (int));
- bzero ((char *)alternative_reject, recog_data.n_alternatives * sizeof (int));
- bzero ((char *)alternative_nregs, recog_data.n_alternatives * sizeof (int));
+ memset ((char *)alternative_reject, 0, recog_data.n_alternatives * sizeof (int));
+ memset ((char *)alternative_nregs, 0, recog_data.n_alternatives * sizeof (int));
/* For each operand, find out which regs are equivalent. */
for (i = 0; i < recog_data.n_operands; i++)
CLEAR_HARD_REG_SET (equiv_regs[i]);
/* cselib blows up on CODE_LABELs. Trying to fix that doesn't seem
- right, so avoid the problem here. */
- if (GET_CODE (recog_data.operand[i]) == CODE_LABEL)
+ right, so avoid the problem here. Likewise if we have a constant
+ and the insn pattern doesn't tell us the mode we need. */
+ if (GET_CODE (recog_data.operand[i]) == CODE_LABEL
+ || (CONSTANT_P (recog_data.operand[i])
+ && recog_data.operand_mode[i] == VOIDmode))
continue;
v = cselib_lookup (recog_data.operand[i], recog_data.operand_mode[i], 0);
case 's': case 'i': case 'n':
case 'I': case 'J': case 'K': case 'L':
case 'M': case 'N': case 'O': case 'P':
-#ifdef EXTRA_CONSTRAINT
- case 'Q': case 'R': case 'S': case 'T': case 'U':
-#endif
case 'p': case 'X':
/* These don't say anything we care about. */
break;
/* See if REGNO fits this alternative, and set it up as the
replacement register if we don't have one for this
alternative yet and the operand being replaced is not
- a cheap CONST_INT. */
+ a cheap CONST_INT. */
if (op_alt_regno[i][j] == -1
&& reg_fits_class_p (reg, class, 0, mode)
&& (GET_CODE (recog_data.operand[i]) != CONST_INT
reload_combine ()
{
rtx insn, set;
- int first_index_reg = 1, last_index_reg = 0;
+ int first_index_reg = -1;
+ int last_index_reg = 0;
int i;
+ unsigned int r;
int last_label_ruid;
int min_labelno, n_labels;
HARD_REG_SET ever_live_at_start, *label_live;
/* To avoid wasting too much time later searching for an index register,
determine the minimum and maximum index register numbers. */
- for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
- {
- if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i))
- {
- if (! last_index_reg)
- last_index_reg = i;
- first_index_reg = i;
- }
- }
+ for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
+ if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], r))
+ {
+ if (first_index_reg == -1)
+ first_index_reg = r;
+
+ last_index_reg = r;
+ }
+
/* If no index register is available, we can quit now. */
- if (first_index_reg > last_index_reg)
+ if (first_index_reg == -1)
return;
/* Set up LABEL_LIVE and EVER_LIVE_AT_START. The register lifetime
n_labels = max_label_num () - min_labelno;
label_live = (HARD_REG_SET *) xmalloc (n_labels * sizeof (HARD_REG_SET));
CLEAR_HARD_REG_SET (ever_live_at_start);
+
for (i = n_basic_blocks - 1; i >= 0; i--)
{
insn = BLOCK_HEAD (i);
{
HARD_REG_SET live;
- REG_SET_TO_HARD_REG_SET (live, BASIC_BLOCK (i)->global_live_at_start);
- compute_use_by_pseudos (&live, BASIC_BLOCK (i)->global_live_at_start);
+ REG_SET_TO_HARD_REG_SET (live,
+ BASIC_BLOCK (i)->global_live_at_start);
+ compute_use_by_pseudos (&live,
+ BASIC_BLOCK (i)->global_live_at_start);
COPY_HARD_REG_SET (LABEL_LIVE (insn), live);
IOR_HARD_REG_SET (ever_live_at_start, live);
}
/* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
last_label_ruid = reload_combine_ruid = 0;
- for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
+ for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
{
- reg_state[i].store_ruid = reload_combine_ruid;
- if (fixed_regs[i])
- reg_state[i].use_index = -1;
+ reg_state[r].store_ruid = reload_combine_ruid;
+ if (fixed_regs[r])
+ reg_state[r].use_index = -1;
else
- reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
+ reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
}
for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
is and then later disable any optimization that would cross it. */
if (GET_CODE (insn) == CODE_LABEL)
last_label_ruid = reload_combine_ruid;
- if (GET_CODE (insn) == BARRIER)
- {
- for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
- if (! fixed_regs[i])
- reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
- }
- if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
+ else if (GET_CODE (insn) == BARRIER)
+ for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
+ if (! fixed_regs[r])
+ reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
+
+ if (! INSN_P (insn))
continue;
+
reload_combine_ruid++;
/* Look for (set (REGX) (CONST_INT))
rtx base = XEXP (plus, 1);
rtx prev = prev_nonnote_insn (insn);
rtx prev_set = prev ? single_set (prev) : NULL_RTX;
- int regno = REGNO (reg);
+ unsigned int regno = REGNO (reg);
rtx const_reg = NULL_RTX;
rtx reg_sum = NULL_RTX;
two registers. */
for (i = first_index_reg; i <= last_index_reg; i++)
{
- if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i)
+ if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
+ i)
&& reg_state[i].use_index == RELOAD_COMBINE_MAX_USES
&& reg_state[i].store_ruid <= reg_state[regno].use_ruid
&& HARD_REGNO_NREGS (i, GET_MODE (reg)) == 1)
{
rtx index_reg = gen_rtx_REG (GET_MODE (reg), i);
+
const_reg = index_reg;
reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base);
break;
}
}
}
+
/* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
(REGY), i.e. BASE, is not clobbered before the last use we'll
create. */
- if (prev_set
+ if (prev_set != 0
&& GET_CODE (SET_SRC (prev_set)) == CONST_INT
&& rtx_equal_p (SET_DEST (prev_set), reg)
&& reg_state[regno].use_index >= 0
- && reg_state[REGNO (base)].store_ruid <= reg_state[regno].use_ruid
- && reg_sum)
+ && (reg_state[REGNO (base)].store_ruid
+ <= reg_state[regno].use_ruid)
+ && reg_sum != 0)
{
int i;
- /* Change destination register and - if necessary - the
+ /* Change destination register and, if necessary, the
constant value in PREV, the constant loading instruction. */
validate_change (prev, &SET_DEST (prev_set), const_reg, 1);
if (reg_state[regno].offset != const0_rtx)
GEN_INT (INTVAL (SET_SRC (prev_set))
+ INTVAL (reg_state[regno].offset)),
1);
+
/* Now for every use of REG that we have recorded, replace REG
with REG_SUM. */
for (i = reg_state[regno].use_index;
NOTE_SOURCE_FILE (insn) = 0;
if (reg_state[regno].offset != const0_rtx)
- {
- /* Previous REG_EQUIV / REG_EQUAL notes for PREV
- are now invalid. */
- for (np = ®_NOTES (prev); *np; )
- {
- if (REG_NOTE_KIND (*np) == REG_EQUAL
- || REG_NOTE_KIND (*np) == REG_EQUIV)
- *np = XEXP (*np, 1);
- else
- np = &XEXP (*np, 1);
- }
- }
+ /* Previous REG_EQUIV / REG_EQUAL notes for PREV
+ are now invalid. */
+ for (np = ®_NOTES (prev); *np;)
+ {
+ if (REG_NOTE_KIND (*np) == REG_EQUAL
+ || REG_NOTE_KIND (*np) == REG_EQUIV)
+ *np = XEXP (*np, 1);
+ else
+ np = &XEXP (*np, 1);
+ }
+
reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
- reg_state[REGNO (const_reg)].store_ruid = reload_combine_ruid;
+ reg_state[REGNO (const_reg)].store_ruid
+ = reload_combine_ruid;
continue;
}
}
}
+
note_stores (PATTERN (insn), reload_combine_note_store, NULL);
+
if (GET_CODE (insn) == CALL_INSN)
{
rtx link;
- for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
- {
- if (call_used_regs[i])
- {
- reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
- reg_state[i].store_ruid = reload_combine_ruid;
- }
- }
+ for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
+ if (call_used_regs[r])
+ {
+ reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
+ reg_state[r].store_ruid = reload_combine_ruid;
+ }
+
for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
link = XEXP (link, 1))
{
- rtx use = XEXP (link, 0);
- int regno = REGNO (XEXP (use, 0));
- if (GET_CODE (use) == CLOBBER)
- {
- reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
- reg_state[regno].store_ruid = reload_combine_ruid;
- }
- else
- reg_state[regno].use_index = -1;
- }
+ rtx usage_rtx = XEXP (XEXP (link, 0), 0);
+ if (GET_CODE (usage_rtx) == REG)
+ {
+ unsigned int i;
+ unsigned int start_reg = REGNO (usage_rtx);
+ unsigned int num_regs =
+ HARD_REGNO_NREGS (start_reg, GET_MODE (usage_rtx));
+ unsigned int end_reg = start_reg + num_regs - 1;
+ for (i = start_reg; i <= end_reg; i++)
+ if (GET_CODE (XEXP (link, 0)) == CLOBBER)
+ {
+ reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
+ reg_state[i].store_ruid = reload_combine_ruid;
+ }
+ else
+ reg_state[i].use_index = -1;
+ }
+ }
+
}
- if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) != RETURN)
+ else if (GET_CODE (insn) == JUMP_INSN
+ && GET_CODE (PATTERN (insn)) != RETURN)
{
/* Non-spill registers might be used at the call destination in
some unknown fashion, so we have to mark the unknown use. */
HARD_REG_SET *live;
+
if ((condjump_p (insn) || condjump_in_parallel_p (insn))
&& JUMP_LABEL (insn))
live = &LABEL_LIVE (JUMP_LABEL (insn));
else
live = &ever_live_at_start;
+
for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
- {
- if (TEST_HARD_REG_BIT (*live, i))
- reg_state[i].use_index = -1;
- }
+ if (TEST_HARD_REG_BIT (*live, i))
+ reg_state[i].use_index = -1;
}
+
reload_combine_note_use (&PATTERN (insn), insn);
for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
{
}
}
}
+
free (label_live);
}
/* Check if DST is a register or a subreg of a register; if it is,
update reg_state[regno].store_ruid and reg_state[regno].use_index
accordingly. Called via note_stores from reload_combine. */
+
static void
reload_combine_note_store (dst, set, data)
rtx dst, set;
if (GET_CODE (dst) == SUBREG)
{
- regno = SUBREG_WORD (dst);
+ regno = subreg_regno_offset (REGNO (SUBREG_REG (dst)),
+ GET_MODE (SUBREG_REG (dst)),
+ SUBREG_BYTE (dst),
+ GET_MODE (dst));
dst = SUBREG_REG (dst);
}
if (GET_CODE (dst) != REG)
case PLUS:
/* We are interested in (plus (reg) (const_int)) . */
- if (GET_CODE (XEXP (x, 0)) != REG || GET_CODE (XEXP (x, 1)) != CONST_INT)
+ if (GET_CODE (XEXP (x, 0)) != REG
+ || GET_CODE (XEXP (x, 1)) != CONST_INT)
break;
offset = XEXP (x, 1);
x = XEXP (x, 0);
}
}
\f
-/* See if we can reduce the cost of a constant by replacing a move with
- an add. */
+/* See if we can reduce the cost of a constant by replacing a move
+ with an add. We track situations in which a register is set to a
+ constant or to a register plus a constant. */
/* We cannot do our optimization across labels. Invalidating all the
information about register contents we have would be costly, so we
- use last_label_luid (local variable of reload_cse_move2add) to note
- where the label is and then later disable any optimization that would
- cross it.
+ use move2add_last_label_luid to note where the label is and then
+ later disable any optimization that would cross it.
reg_offset[n] / reg_base_reg[n] / reg_mode[n] are only valid if
- reg_set_luid[n] is larger than last_label_luid[n] . */
+ reg_set_luid[n] is greater than last_label_luid[n] . */
static int reg_set_luid[FIRST_PSEUDO_REGISTER];
-/* reg_offset[n] has to be CONST_INT for it and reg_base_reg[n] /
- reg_mode[n] to be valid.
- If reg_offset[n] is a CONST_INT and reg_base_reg[n] is negative, register n
- has been set to reg_offset[n] in mode reg_mode[n] .
- If reg_offset[n] is a CONST_INT and reg_base_reg[n] is non-negative,
- register n has been set to the sum of reg_offset[n] and register
- reg_base_reg[n], calculated in mode reg_mode[n] . */
-static rtx reg_offset[FIRST_PSEUDO_REGISTER];
+/* If reg_base_reg[n] is negative, register n has been set to
+ reg_offset[n] in mode reg_mode[n] .
+ If reg_base_reg[n] is non-negative, register n has been set to the
+ sum of reg_offset[n] and the value of register reg_base_reg[n]
+ before reg_set_luid[n], calculated in mode reg_mode[n] . */
+static HOST_WIDE_INT reg_offset[FIRST_PSEUDO_REGISTER];
static int reg_base_reg[FIRST_PSEUDO_REGISTER];
static enum machine_mode reg_mode[FIRST_PSEUDO_REGISTER];
reload_cse_move2add and move2add_note_store. */
static int move2add_luid;
+/* move2add_last_label_luid is set whenever a label is found. Labels
+ invalidate all previously collected reg_offset data. */
+static int move2add_last_label_luid;
+
/* Generate a CONST_INT and force it in the range of MODE. */
-static rtx
-gen_mode_int (mode, value)
+static HOST_WIDE_INT
+sext_for_mode (mode, value)
enum machine_mode mode;
HOST_WIDE_INT value;
{
&& (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
cval |= (HOST_WIDE_INT) -1 << width;
- return GEN_INT (cval);
+ return cval;
}
+/* ??? We don't know how zero / sign extension is handled, hence we
+ can't go from a narrower to a wider mode. */
+#define MODES_OK_FOR_MOVE2ADD(OUTMODE, INMODE) \
+ (GET_MODE_SIZE (OUTMODE) == GET_MODE_SIZE (INMODE) \
+ || (GET_MODE_SIZE (OUTMODE) <= GET_MODE_SIZE (INMODE) \
+ && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (OUTMODE), \
+ GET_MODE_BITSIZE (INMODE))))
+
static void
reload_cse_move2add (first)
rtx first;
{
int i;
rtx insn;
- int last_label_luid;
- for (i = FIRST_PSEUDO_REGISTER-1; i >= 0; i--)
+ for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
reg_set_luid[i] = 0;
- last_label_luid = 0;
- move2add_luid = 1;
+ move2add_last_label_luid = 0;
+ move2add_luid = 2;
for (insn = first; insn; insn = NEXT_INSN (insn), move2add_luid++)
{
rtx pat, note;
if (GET_CODE (insn) == CODE_LABEL)
- last_label_luid = move2add_luid;
- if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
+ {
+ move2add_last_label_luid = move2add_luid;
+ /* We're going to increment move2add_luid twice after a
+ label, so that we can use move2add_last_label_luid + 1 as
+ the luid for constants. */
+ move2add_luid++;
+ continue;
+ }
+ if (! INSN_P (insn))
continue;
pat = PATTERN (insn);
/* For simplicity, we only perform this optimization on
/* Check if we have valid information on the contents of this
register in the mode of REG. */
- /* ??? We don't know how zero / sign extension is handled, hence
- we can't go from a narrower to a wider mode. */
- if (reg_set_luid[regno] > last_label_luid
- && (GET_MODE_SIZE (GET_MODE (reg))
- <= GET_MODE_SIZE (reg_mode[regno]))
- && GET_CODE (reg_offset[regno]) == CONST_INT)
+ if (reg_set_luid[regno] > move2add_last_label_luid
+ && MODES_OK_FOR_MOVE2ADD (GET_MODE (reg), reg_mode[regno]))
{
/* Try to transform (set (REGX) (CONST_INT A))
...
if (GET_CODE (src) == CONST_INT && reg_base_reg[regno] < 0)
{
int success = 0;
- rtx new_src
- = gen_mode_int (GET_MODE (reg),
- INTVAL (src) - INTVAL (reg_offset[regno]));
+ rtx new_src = GEN_INT (sext_for_mode (GET_MODE (reg),
+ INTVAL (src)
+ - reg_offset[regno]));
/* (set (reg) (plus (reg) (const_int 0))) is not canonical;
use (set (reg) (reg)) instead.
We don't delete this insn, nor do we convert it into a
if (new_src == const0_rtx)
success = validate_change (insn, &SET_SRC (pat), reg, 0);
else if (rtx_cost (new_src, PLUS) < rtx_cost (src, SET)
- && have_add2_insn (GET_MODE (reg)))
+ && have_add2_insn (reg, new_src))
success = validate_change (insn, &PATTERN (insn),
gen_add2_insn (reg, new_src), 0);
reg_set_luid[regno] = move2add_luid;
reg_mode[regno] = GET_MODE (reg);
- reg_offset[regno] = src;
+ reg_offset[regno] = INTVAL (src);
continue;
}
...
(set (REGX) (plus (REGX) (CONST_INT B-A))) */
else if (GET_CODE (src) == REG
- && reg_base_reg[regno] == (int) REGNO (src)
- && reg_set_luid[regno] > reg_set_luid[REGNO (src)])
+ && reg_set_luid[regno] == reg_set_luid[REGNO (src)]
+ && reg_base_reg[regno] == reg_base_reg[REGNO (src)]
+ && MODES_OK_FOR_MOVE2ADD (GET_MODE (reg),
+ reg_mode[REGNO (src)]))
{
rtx next = next_nonnote_insn (insn);
rtx set = NULL_RTX;
if (next)
set = single_set (next);
- if (next
- && set
+ if (set
&& SET_DEST (set) == reg
&& GET_CODE (SET_SRC (set)) == PLUS
&& XEXP (SET_SRC (set), 0) == reg
&& GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
{
rtx src3 = XEXP (SET_SRC (set), 1);
- rtx new_src
- = gen_mode_int (GET_MODE (reg),
- INTVAL (src3)
- - INTVAL (reg_offset[regno]));
+ HOST_WIDE_INT added_offset = INTVAL (src3);
+ HOST_WIDE_INT base_offset = reg_offset[REGNO (src)];
+ HOST_WIDE_INT regno_offset = reg_offset[regno];
+ rtx new_src = GEN_INT (sext_for_mode (GET_MODE (reg),
+ added_offset
+ + base_offset
+ - regno_offset));
int success = 0;
if (new_src == const0_rtx)
success
= validate_change (next, &SET_SRC (set), reg, 0);
else if ((rtx_cost (new_src, PLUS)
- < 2 + rtx_cost (src3, SET))
- && have_add2_insn (GET_MODE (reg)))
+ < COSTS_N_INSNS (1) + rtx_cost (src3, SET))
+ && have_add2_insn (reg, new_src))
success
= validate_change (next, &PATTERN (next),
gen_add2_insn (reg, new_src), 0);
NOTE_SOURCE_FILE (insn) = 0;
}
insn = next;
- reg_set_luid[regno] = move2add_luid;
reg_mode[regno] = GET_MODE (reg);
- reg_offset[regno] = src3;
+ reg_offset[regno] = sext_for_mode (GET_MODE (reg),
+ added_offset
+ + base_offset);
continue;
}
}
if (REG_NOTE_KIND (note) == REG_INC
&& GET_CODE (XEXP (note, 0)) == REG)
{
- /* Indicate that this register has been recently written to,
- but the exact contents are not available. */
+ /* Reset the information about this register. */
int regno = REGNO (XEXP (note, 0));
if (regno < FIRST_PSEUDO_REGISTER)
- {
- reg_set_luid[regno] = move2add_luid;
- reg_offset[regno] = note;
- }
+ reg_set_luid[regno] = 0;
}
}
note_stores (PATTERN (insn), move2add_note_store, NULL);
unknown values. */
if (GET_CODE (insn) == CALL_INSN)
{
- for (i = FIRST_PSEUDO_REGISTER-1; i >= 0; i--)
+ for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
{
if (call_used_regs[i])
- {
- reg_set_luid[i] = move2add_luid;
- reg_offset[i] = insn; /* Invalidate contents. */
- }
+ /* Reset the information about this register. */
+ reg_set_luid[i] = 0;
}
}
}
if (GET_CODE (dst) == SUBREG)
{
- regno = SUBREG_WORD (dst);
+ regno = subreg_regno_offset (REGNO (SUBREG_REG (dst)),
+ GET_MODE (SUBREG_REG (dst)),
+ SUBREG_BYTE (dst),
+ GET_MODE (dst));
dst = SUBREG_REG (dst);
}
+ /* Some targets do argument pushes without adding REG_INC notes. */
+
+ if (GET_CODE (dst) == MEM)
+ {
+ dst = XEXP (dst, 0);
+ if (GET_CODE (dst) == PRE_INC || GET_CODE (dst) == POST_INC
+ || GET_CODE (dst) == PRE_DEC || GET_CODE (dst) == POST_DEC)
+ reg_set_luid[REGNO (XEXP (dst, 0))] = 0;
+ return;
+ }
if (GET_CODE (dst) != REG)
return;
&& GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
{
rtx src = SET_SRC (set);
+ rtx base_reg;
+ HOST_WIDE_INT offset;
+ int base_regno;
+ /* This may be different from mode, if SET_DEST (set) is a
+ SUBREG. */
+ enum machine_mode dst_mode = GET_MODE (dst);
- reg_mode[regno] = mode;
switch (GET_CODE (src))
{
case PLUS:
- {
- rtx src0 = XEXP (src, 0);
-
- if (GET_CODE (src0) == REG)
- {
- if (REGNO (src0) != regno
- || reg_offset[regno] != const0_rtx)
- {
- reg_base_reg[regno] = REGNO (src0);
- reg_set_luid[regno] = move2add_luid;
- }
+ if (GET_CODE (XEXP (src, 0)) == REG)
+ {
+ base_reg = XEXP (src, 0);
+
+ if (GET_CODE (XEXP (src, 1)) == CONST_INT)
+ offset = INTVAL (XEXP (src, 1));
+ else if (GET_CODE (XEXP (src, 1)) == REG
+ && (reg_set_luid[REGNO (XEXP (src, 1))]
+ > move2add_last_label_luid)
+ && (MODES_OK_FOR_MOVE2ADD
+ (dst_mode, reg_mode[REGNO (XEXP (src, 1))])))
+ {
+ if (reg_base_reg[REGNO (XEXP (src, 1))] < 0)
+ offset = reg_offset[REGNO (XEXP (src, 1))];
+ /* Maybe the first register is known to be a
+ constant. */
+ else if (reg_set_luid[REGNO (base_reg)]
+ > move2add_last_label_luid
+ && (MODES_OK_FOR_MOVE2ADD
+ (dst_mode, reg_mode[REGNO (XEXP (src, 1))]))
+ && reg_base_reg[REGNO (base_reg)] < 0)
+ {
+ offset = reg_offset[REGNO (base_reg)];
+ base_reg = XEXP (src, 1);
+ }
+ else
+ goto invalidate;
+ }
+ else
+ goto invalidate;
- reg_offset[regno] = XEXP (src, 1);
- break;
- }
+ break;
+ }
- reg_set_luid[regno] = move2add_luid;
- reg_offset[regno] = set; /* Invalidate contents. */
- break;
- }
+ goto invalidate;
case REG:
- reg_base_reg[regno] = REGNO (SET_SRC (set));
- reg_offset[regno] = const0_rtx;
- reg_set_luid[regno] = move2add_luid;
+ base_reg = src;
+ offset = 0;
break;
- default:
+ case CONST_INT:
+ /* Start tracking the register as a constant. */
reg_base_reg[regno] = -1;
- reg_offset[regno] = SET_SRC (set);
- reg_set_luid[regno] = move2add_luid;
- break;
+ reg_offset[regno] = INTVAL (SET_SRC (set));
+ /* We assign the same luid to all registers set to constants. */
+ reg_set_luid[regno] = move2add_last_label_luid + 1;
+ reg_mode[regno] = mode;
+ return;
+
+ default:
+ invalidate:
+ /* Invalidate the contents of the register. */
+ reg_set_luid[regno] = 0;
+ return;
+ }
+
+ base_regno = REGNO (base_reg);
+ /* If information about the base register is not valid, set it
+ up as a new base register, pretending its value is known
+ starting from the current insn. */
+ if (reg_set_luid[base_regno] <= move2add_last_label_luid)
+ {
+ reg_base_reg[base_regno] = base_regno;
+ reg_offset[base_regno] = 0;
+ reg_set_luid[base_regno] = move2add_luid;
+ reg_mode[base_regno] = mode;
}
+ else if (! MODES_OK_FOR_MOVE2ADD (dst_mode,
+ reg_mode[base_regno]))
+ goto invalidate;
+
+ reg_mode[regno] = mode;
+
+ /* Copy base information from our base register. */
+ reg_set_luid[regno] = reg_set_luid[base_regno];
+ reg_base_reg[regno] = reg_base_reg[base_regno];
+
+ /* Compute the sum of the offsets or constants. */
+ reg_offset[regno] = sext_for_mode (dst_mode,
+ offset
+ + reg_offset[base_regno]);
}
else
{
unsigned int endregno = regno + HARD_REGNO_NREGS (regno, mode);
for (i = regno; i < endregno; i++)
- {
- /* Indicate that this register has been recently written to,
- but the exact contents are not available. */
- reg_set_luid[i] = move2add_luid;
- reg_offset[i] = dst;
- }
+ /* Reset the information about this register. */
+ reg_set_luid[i] = 0;
}
}
}
}
#endif
+
+/* Copy EH notes from an insn to its reloads. */
+static void
+copy_eh_notes (insn, x)
+ rtx insn;
+ rtx x;
+{
+ rtx eh_note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
+ if (eh_note)
+ {
+ for (; x != 0; x = NEXT_INSN (x))
+ {
+ if (may_trap_p (PATTERN (x)))
+ REG_NOTES (x)
+ = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (eh_note, 0),
+ REG_NOTES (x));
+ }
+ }
+}
+
+/* This is used by reload pass, that does emit some instructions after
+ abnormal calls moving basic block end, but in fact it wants to emit
+ them on the edge. Looks for abnormal call edges, find backward the
+ proper call and fix the damage.
+
+ Similar handle instructions throwing exceptions internally. */
+static void
+fixup_abnormal_edges ()
+{
+ int i;
+ bool inserted = false;
+
+ for (i = 0; i < n_basic_blocks; i++)
+ {
+ basic_block bb = BASIC_BLOCK (i);
+ edge e;
+
+ /* Look for cases we are interested in - an calls or instructions causing
+ exceptions. */
+ for (e = bb->succ; e; e = e->succ_next)
+ {
+ if (e->flags & EDGE_ABNORMAL_CALL)
+ break;
+ if ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
+ == (EDGE_ABNORMAL | EDGE_EH))
+ break;
+ }
+ if (e && GET_CODE (bb->end) != CALL_INSN && !can_throw_internal (bb->end))
+ {
+ rtx insn = bb->end, stop = NEXT_INSN (bb->end);
+ rtx next;
+ for (e = bb->succ; e; e = e->succ_next)
+ if (e->flags & EDGE_FALLTHRU)
+ break;
+ /* Get past the new insns generated. Allow notes, as the insns may
+ be already deleted. */
+ while ((GET_CODE (insn) == INSN || GET_CODE (insn) == NOTE)
+ && !can_throw_internal (insn)
+ && insn != bb->head)
+ insn = PREV_INSN (insn);
+ if (GET_CODE (insn) != CALL_INSN && !can_throw_internal (insn))
+ abort ();
+ bb->end = insn;
+ inserted = true;
+ insn = NEXT_INSN (insn);
+ while (insn && insn != stop)
+ {
+ next = NEXT_INSN (insn);
+ if (INSN_P (insn))
+ {
+ insert_insn_on_edge (PATTERN (insn), e);
+ flow_delete_insn (insn);
+ }
+ insn = next;
+ }
+ }
+ }
+ if (inserted)
+ commit_edge_insertions ();
+}