X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Freload1.c;h=d0fec9b29af9355fdc2a3b22c25aeb0f5430c080;hb=ffc0b1edcad561e7c8304c3f6cb38ec31502b024;hp=87cc425e42c1f5ee72533d96ac3b07c28fdd8cec;hpb=997a5633756fea47523b48e0ee3825222d662cb8;p=pf3gnuchains%2Fgcc-fork.git
diff --git a/gcc/reload1.c b/gcc/reload1.c
index 87cc425e42c..d0fec9b29af 100644
--- a/gcc/reload1.c
+++ b/gcc/reload1.c
@@ -1,13 +1,13 @@
/* Reload pseudo regs into hard regs for insns that require hard regs.
Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
- Inc.
+ 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
+ Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
-Software Foundation; either version 2, or (at your option) any later
+Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
@@ -16,9 +16,8 @@ FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
-02110-1301, USA. */
+along with GCC; see the file COPYING3. If not see
+. */
#include "config.h"
#include "system.h"
@@ -36,6 +35,7 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
#include "expr.h"
#include "optabs.h"
#include "regs.h"
+#include "addresses.h"
#include "basic-block.h"
#include "reload.h"
#include "recog.h"
@@ -44,7 +44,9 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
#include "toplev.h"
#include "except.h"
#include "tree.h"
+#include "df.h"
#include "target.h"
+#include "dse.h"
/* This file contains the reload pass of the compiler, which is
run after register allocation has been done. It checks that
@@ -86,7 +88,7 @@ static rtx *reg_last_reload_reg;
/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
for an output reload that stores into reg N. */
-static char *reg_has_output_reload;
+static regset_head reg_has_output_reload;
/* Indicates which hard regs are reload-registers for an output reload
in the current insn. */
@@ -111,7 +113,7 @@ rtx *reg_equiv_memory_loc;
/* We allocate reg_equiv_memory_loc inside a varray so that the garbage
collector can keep track of what is inside. */
-varray_type reg_equiv_memory_loc_varray;
+VEC(rtx,gc) *reg_equiv_memory_loc_vec;
/* Element N is the address of stack slot to which pseudo reg N is equivalent.
This is used when the address is not valid as a memory address
@@ -122,6 +124,10 @@ rtx *reg_equiv_address;
or zero if pseudo reg N is not equivalent to a memory slot. */
rtx *reg_equiv_mem;
+/* Element N is an EXPR_LIST of REG_EQUIVs containing MEMs with
+ alternate representations of the location of pseudo reg N. */
+rtx *reg_equiv_alt_mem_list;
+
/* Widest width in which each pseudo reg is referred to (via subreg). */
static unsigned int *reg_max_ref_width;
@@ -389,7 +395,7 @@ static void check_eliminable_occurrences (rtx);
static void elimination_effects (rtx, enum machine_mode);
static int eliminate_regs_in_insn (rtx, int);
static void update_eliminable_offsets (void);
-static void mark_not_eliminable (rtx, rtx, void *);
+static void mark_not_eliminable (rtx, const_rtx, void *);
static void set_initial_elim_offsets (void);
static bool verify_initial_elim_offsets (void);
static void set_initial_label_offsets (void);
@@ -402,7 +408,8 @@ static void scan_paradoxical_subregs (rtx);
static void count_pseudo (int);
static void order_regs_for_reload (struct insn_chain *);
static void reload_as_needed (int);
-static void forget_old_reloads_1 (rtx, rtx, void *);
+static void forget_old_reloads_1 (rtx, const_rtx, void *);
+static void forget_marked_reloads (regset);
static int reload_reg_class_lower (const void *, const void *);
static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
enum machine_mode);
@@ -441,7 +448,8 @@ static int reloads_conflict (int, int);
static rtx gen_reload (rtx, rtx, int, enum reload_type);
static rtx emit_insn_if_valid_for_reload (rtx);
-/* Initialize the reload pass once per compilation. */
+/* Initialize the reload pass. This is called at the beginning of compilation
+ and may be called again if the target is reinitialized. */
void
init_reload (void)
@@ -495,7 +503,6 @@ init_reload (void)
INIT_REG_SET (&spilled_pseudos);
INIT_REG_SET (&pseudos_counted);
- VARRAY_RTX_INIT (reg_equiv_memory_loc_varray, 0, "reg_equiv_memory_loc");
}
/* List of insn chains that are currently unused. */
@@ -537,22 +544,17 @@ compute_use_by_pseudos (HARD_REG_SET *to, regset from)
EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
{
int r = reg_renumber[regno];
- int nregs;
if (r < 0)
{
/* reload_combine uses the information from
- BASIC_BLOCK->global_live_at_start, which might still
+ DF_LIVE_IN (BASIC_BLOCK), which might still
contain registers that have not actually been allocated
since they have an equivalence. */
gcc_assert (reload_completed);
}
else
- {
- nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (regno)];
- while (nregs-- > 0)
- SET_HARD_REG_BIT (*to, r + nregs);
- }
+ add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
}
}
@@ -617,6 +619,61 @@ replace_pseudos_in (rtx *loc, enum machine_mode mem_mode, rtx usage)
replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
}
+/* Determine if the current function has an exception receiver block
+ that reaches the exit block via non-exceptional edges */
+
+static bool
+has_nonexceptional_receiver (void)
+{
+ edge e;
+ edge_iterator ei;
+ basic_block *tos, *worklist, bb;
+
+ /* If we're not optimizing, then just err on the safe side. */
+ if (!optimize)
+ return true;
+
+ /* First determine which blocks can reach exit via normal paths. */
+ tos = worklist = xmalloc (sizeof (basic_block) * (n_basic_blocks + 1));
+
+ FOR_EACH_BB (bb)
+ bb->flags &= ~BB_REACHABLE;
+
+ /* Place the exit block on our worklist. */
+ EXIT_BLOCK_PTR->flags |= BB_REACHABLE;
+ *tos++ = EXIT_BLOCK_PTR;
+
+ /* Iterate: find everything reachable from what we've already seen. */
+ while (tos != worklist)
+ {
+ bb = *--tos;
+
+ FOR_EACH_EDGE (e, ei, bb->preds)
+ if (!(e->flags & EDGE_ABNORMAL))
+ {
+ basic_block src = e->src;
+
+ if (!(src->flags & BB_REACHABLE))
+ {
+ src->flags |= BB_REACHABLE;
+ *tos++ = src;
+ }
+ }
+ }
+ free (worklist);
+
+ /* Now see if there's a reachable block with an exceptional incoming
+ edge. */
+ FOR_EACH_BB (bb)
+ if (bb->flags & BB_REACHABLE)
+ FOR_EACH_EDGE (e, ei, bb->preds)
+ if (e->flags & EDGE_ABNORMAL)
+ return true;
+
+ /* No exceptional block reached exit unexceptionally. */
+ return false;
+}
+
/* Global variables used by reload and its subroutines. */
@@ -683,12 +740,17 @@ reload (rtx first, int global)
for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
mark_home_live (i);
- /* A function that receives a nonlocal goto must save all call-saved
+ /* A function that has a nonlocal label that can reach the exit
+ block via non-exceptional paths must save all call-saved
registers. */
- if (current_function_has_nonlocal_label)
+ if (current_function_has_nonlocal_label
+ && has_nonexceptional_receiver ())
+ current_function_saves_all_registers = 1;
+
+ if (current_function_saves_all_registers)
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
- regs_ever_live[i] = 1;
+ df_set_regs_ever_live (i, true);
/* Find all the pseudo registers that didn't get hard regs
but do have known equivalent constants or memory slots.
@@ -703,6 +765,7 @@ reload (rtx first, int global)
reg_equiv_constant = XCNEWVEC (rtx, max_regno);
reg_equiv_invariant = XCNEWVEC (rtx, max_regno);
reg_equiv_mem = XCNEWVEC (rtx, max_regno);
+ reg_equiv_alt_mem_list = XCNEWVEC (rtx, max_regno);
reg_equiv_address = XCNEWVEC (rtx, max_regno);
reg_max_ref_width = XCNEWVEC (unsigned int, max_regno);
reg_old_renumber = XCNEWVEC (short, max_regno);
@@ -882,16 +945,8 @@ reload (rtx first, int global)
{
int something_changed;
int did_spill;
-
HOST_WIDE_INT starting_frame_size;
- /* Round size of stack frame to stack_alignment_needed. This must be done
- here because the stack size may be a part of the offset computation
- for register elimination, and there might have been new stack slots
- created in the last iteration of this loop. */
- if (cfun->stack_alignment_needed)
- assign_stack_local (BLKmode, 0, cfun->stack_alignment_needed);
-
starting_frame_size = get_frame_size ();
set_initial_elim_offsets ();
@@ -958,6 +1013,20 @@ reload (rtx first, int global)
/* If we allocated another stack slot, redo elimination bookkeeping. */
if (starting_frame_size != get_frame_size ())
continue;
+ if (starting_frame_size && cfun->stack_alignment_needed)
+ {
+ /* If we have a stack frame, we must align it now. The
+ stack size may be a part of the offset computation for
+ register elimination. So if this changes the stack size,
+ then repeat the elimination bookkeeping. We don't
+ realign when there is no stack, as that will cause a
+ stack frame when none is needed should
+ STARTING_FRAME_OFFSET not be already aligned to
+ STACK_BOUNDARY. */
+ assign_stack_local (BLKmode, 0, cfun->stack_alignment_needed);
+ if (starting_frame_size != get_frame_size ())
+ continue;
+ }
if (caller_save_needed)
{
@@ -989,6 +1058,8 @@ reload (rtx first, int global)
HARD_REG_SET to_spill;
CLEAR_HARD_REG_SET (to_spill);
update_eliminables (&to_spill);
+ AND_COMPL_HARD_REG_SET (used_spill_regs, to_spill);
+
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (TEST_HARD_REG_BIT (to_spill, i))
{
@@ -1087,9 +1158,8 @@ reload (rtx first, int global)
if (! frame_pointer_needed)
FOR_EACH_BB (bb)
- CLEAR_REGNO_REG_SET (bb->il.rtl->global_live_at_start,
- HARD_FRAME_POINTER_REGNUM);
-
+ bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
+
/* Come here (with failure set nonzero) if we can't get enough spill
regs. */
failed:
@@ -1217,8 +1287,22 @@ reload (rtx first, int global)
add_auto_inc_notes (insn, PATTERN (insn));
#endif
- /* And simplify (subreg (reg)) if it appears as an operand. */
+ /* Simplify (subreg (reg)) if it appears as an operand. */
cleanup_subreg_operands (insn);
+
+ /* Clean up invalid ASMs so that they don't confuse later passes.
+ See PR 21299. */
+ if (asm_noperands (PATTERN (insn)) >= 0)
+ {
+ extract_insn (insn);
+ if (!constrain_operands (1))
+ {
+ error_for_asm (insn,
+ "% operand has impossible constraints");
+ delete_insn (insn);
+ continue;
+ }
+ }
}
/* If we are doing stack checking, give a warning if this function's
@@ -1229,7 +1313,7 @@ reload (rtx first, int global)
static int verbose_warned = 0;
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
+ if (df_regs_ever_live_p (i) && ! fixed_regs[i] && call_used_regs[i])
size += UNITS_PER_WORD;
if (size > STACK_CHECK_MAX_FRAME_SIZE)
@@ -1250,7 +1334,7 @@ reload (rtx first, int global)
free (reg_equiv_invariant);
reg_equiv_constant = 0;
reg_equiv_invariant = 0;
- VARRAY_GROW (reg_equiv_memory_loc_varray, 0);
+ VEC_free (rtx, gc, reg_equiv_memory_loc_vec);
reg_equiv_memory_loc = 0;
if (offsets_known_at)
@@ -1258,6 +1342,11 @@ reload (rtx first, int global)
if (offsets_at)
free (offsets_at);
+ for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
+ if (reg_equiv_alt_mem_list[i])
+ free_EXPR_LIST_list (®_equiv_alt_mem_list[i]);
+ free (reg_equiv_alt_mem_list);
+
free (reg_equiv_mem);
reg_equiv_init = 0;
free (reg_equiv_address);
@@ -1332,7 +1421,7 @@ maybe_fix_stack_asms (void)
/* Get the operand values and constraints out of the insn. */
decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
- constraints, operand_mode);
+ constraints, operand_mode, NULL);
/* For every operand, see what registers are allowed. */
for (i = 0; i < noperands; i++)
@@ -1375,7 +1464,7 @@ maybe_fix_stack_asms (void)
case 'p':
cls = (int) reg_class_subunion[cls]
- [(int) MODE_BASE_REG_CLASS (VOIDmode)];
+ [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
break;
case 'g':
@@ -1386,7 +1475,7 @@ maybe_fix_stack_asms (void)
default:
if (EXTRA_ADDRESS_CONSTRAINT (c, p))
cls = (int) reg_class_subunion[cls]
- [(int) MODE_BASE_REG_CLASS (VOIDmode)];
+ [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
else
cls = (int) reg_class_subunion[cls]
[(int) REG_CLASS_FROM_CONSTRAINT (c, p)];
@@ -1446,8 +1535,8 @@ calculate_needs_all_insns (int global)
chain->need_operand_change = 0;
/* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
- include REG_LABEL), we need to see what effects this has on the
- known offsets at labels. */
+ include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
+ what effects this has on the known offsets at labels. */
if (LABEL_P (insn) || JUMP_P (insn)
|| (INSN_P (insn) && REG_NOTES (insn) != 0))
@@ -1833,7 +1922,7 @@ find_reload_regs (struct insn_chain *chain)
if (! find_reg (chain, i))
{
if (dump_file)
- fprintf(dump_file, "reload failure for reload %d\n", r);
+ fprintf (dump_file, "reload failure for reload %d\n", r);
spill_failure (chain->insn, rld[r].class);
failure = 1;
return;
@@ -1956,8 +2045,8 @@ alter_reg (int i, int from_reg)
/* Modify the reg-rtx to contain the new hard reg
number or else to contain its pseudo reg number. */
- REGNO (regno_reg_rtx[i])
- = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
+ SET_REGNO (regno_reg_rtx[i],
+ reg_renumber[i] >= 0 ? reg_renumber[i] : i);
/* If we have a pseudo that is needed but has no hard reg or equivalent,
allocate a stack slot for it. */
@@ -1969,8 +2058,11 @@ alter_reg (int i, int from_reg)
&& reg_equiv_memory_loc[i] == 0)
{
rtx x;
+ enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
+ unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
+ unsigned int min_align = reg_max_ref_width[i] * BITS_PER_UNIT;
int adjust = 0;
/* Each pseudo reg has an inherent size which comes from its own mode,
@@ -1983,9 +2075,12 @@ alter_reg (int i, int from_reg)
inherent space, and no less total space, then the previous slot. */
if (from_reg == -1)
{
+ alias_set_type alias_set = new_alias_set ();
+
/* No known place to spill from => no slot to reuse. */
- x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
- inherent_size == total_size ? 0 : -1);
+ x = assign_stack_local (mode, total_size,
+ min_align > inherent_align
+ || total_size > inherent_size ? -1 : 0);
if (BYTES_BIG_ENDIAN)
/* Cancel the big-endian correction done in assign_stack_local.
Get the address of the beginning of the slot.
@@ -1994,22 +2089,22 @@ alter_reg (int i, int from_reg)
adjust = inherent_size - total_size;
/* Nothing can alias this slot except this pseudo. */
- set_mem_alias_set (x, new_alias_set ());
+ set_mem_alias_set (x, alias_set);
+ dse_record_singleton_alias_set (alias_set, mode);
}
/* Reuse a stack slot if possible. */
else if (spill_stack_slot[from_reg] != 0
&& spill_stack_slot_width[from_reg] >= total_size
&& (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
- >= inherent_size))
+ >= inherent_size)
+ && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
x = spill_stack_slot[from_reg];
-
/* Allocate a bigger slot. */
else
{
/* Compute maximum size needed, both for inherent size
and for total size. */
- enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
rtx stack_slot;
if (spill_stack_slot[from_reg])
@@ -2019,18 +2114,30 @@ alter_reg (int i, int from_reg)
mode = GET_MODE (spill_stack_slot[from_reg]);
if (spill_stack_slot_width[from_reg] > total_size)
total_size = spill_stack_slot_width[from_reg];
+ if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
+ min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
}
/* Make a slot with that size. */
x = assign_stack_local (mode, total_size,
- inherent_size == total_size ? 0 : -1);
+ min_align > inherent_align
+ || total_size > inherent_size ? -1 : 0);
stack_slot = x;
/* All pseudos mapped to this slot can alias each other. */
if (spill_stack_slot[from_reg])
- set_mem_alias_set (x, MEM_ALIAS_SET (spill_stack_slot[from_reg]));
+ {
+ alias_set_type alias_set
+ = MEM_ALIAS_SET (spill_stack_slot[from_reg]);
+ set_mem_alias_set (x, alias_set);
+ dse_invalidate_singleton_alias_set (alias_set);
+ }
else
- set_mem_alias_set (x, new_alias_set ());
+ {
+ alias_set_type alias_set = new_alias_set ();
+ set_mem_alias_set (x, alias_set);
+ dse_record_singleton_alias_set (alias_set, mode);
+ }
if (BYTES_BIG_ENDIAN)
{
@@ -2085,20 +2192,30 @@ alter_reg (int i, int from_reg)
}
}
-/* Mark the slots in regs_ever_live for the hard regs
- used by pseudo-reg number REGNO. */
+/* Mark the slots in regs_ever_live for the hard regs used by
+ pseudo-reg number REGNO, accessed in MODE. */
-void
-mark_home_live (int regno)
+static void
+mark_home_live_1 (int regno, enum machine_mode mode)
{
int i, lim;
i = reg_renumber[regno];
if (i < 0)
return;
- lim = i + hard_regno_nregs[i][PSEUDO_REGNO_MODE (regno)];
+ lim = end_hard_regno (mode, i);
while (i < lim)
- regs_ever_live[i++] = 1;
+ df_set_regs_ever_live(i++, true);
+}
+
+/* Mark the slots in regs_ever_live for the hard regs
+ used by pseudo-reg number REGNO. */
+
+void
+mark_home_live (int regno)
+{
+ if (reg_renumber[regno] >= 0)
+ mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
}
/* This function handles the tracking of elimination offsets around branches.
@@ -2174,10 +2291,11 @@ set_label_offsets (rtx x, rtx insn, int initial_p)
case INSN:
case CALL_INSN:
- /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
- and hence must have all eliminations at their initial offsets. */
+ /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
+ to indirectly and hence must have all eliminations at their
+ initial offsets. */
for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
- if (REG_NOTE_KIND (tem) == REG_LABEL)
+ if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
set_label_offsets (XEXP (tem, 0), insn, 1);
return;
@@ -2293,6 +2411,7 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
{
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR:
case CONST:
case SYMBOL_REF:
@@ -2508,6 +2627,30 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
case POST_INC:
case PRE_DEC:
case POST_DEC:
+ /* We do not support elimination of a register that is modified.
+ elimination_effects has already make sure that this does not
+ happen. */
+ return x;
+
+ case PRE_MODIFY:
+ case POST_MODIFY:
+ /* We do not support elimination of a register that is modified.
+ elimination_effects has already make sure that this does not
+ happen. The only remaining case we need to consider here is
+ that the increment value may be an eliminable register. */
+ if (GET_CODE (XEXP (x, 1)) == PLUS
+ && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
+ {
+ rtx new = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
+ insn, true);
+
+ if (new != XEXP (XEXP (x, 1), 1))
+ return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
+ gen_rtx_PLUS (GET_MODE (x),
+ XEXP (x, 0), new));
+ }
+ return x;
+
case STRICT_LOW_PART:
case NEG: case NOT:
case SIGN_EXTEND: case ZERO_EXTEND:
@@ -2521,6 +2664,7 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
case CTZ:
case POPCOUNT:
case PARITY:
+ case BSWAP:
new = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false);
if (new != XEXP (x, 0))
return gen_rtx_fmt_e (code, GET_MODE (x), new);
@@ -2607,9 +2751,7 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
new = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false);
if (new != XEXP (x, i) && ! copied)
{
- rtx new_x = rtx_alloc (code);
- memcpy (new_x, x, RTX_SIZE (code));
- x = new_x;
+ x = shallow_copy_rtx (x);
copied = 1;
}
XEXP (x, i) = new;
@@ -2626,9 +2768,7 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
XVEC (x, i)->elem);
if (! copied)
{
- rtx new_x = rtx_alloc (code);
- memcpy (new_x, x, RTX_SIZE (code));
- x = new_x;
+ x = shallow_copy_rtx (x);
copied = 1;
}
XVEC (x, i) = new_v;
@@ -2665,6 +2805,7 @@ elimination_effects (rtx x, enum machine_mode mem_mode)
{
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR:
case CONST:
case SYMBOL_REF:
@@ -2706,6 +2847,14 @@ elimination_effects (rtx x, enum machine_mode mem_mode)
case POST_DEC:
case POST_MODIFY:
case PRE_MODIFY:
+ /* If we modify the source of an elimination rule, disable it. */
+ for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
+ if (ep->from_rtx == XEXP (x, 0))
+ ep->can_eliminate = 0;
+
+ /* If we modify the target of an elimination rule by adding a constant,
+ update its offset. If we modify the target in any other way, we'll
+ have to disable the rule as well. */
for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
if (ep->to_rtx == XEXP (x, 0))
{
@@ -2720,11 +2869,15 @@ elimination_effects (rtx x, enum machine_mode mem_mode)
ep->offset += size;
else if (code == PRE_INC || code == POST_INC)
ep->offset -= size;
- else if ((code == PRE_MODIFY || code == POST_MODIFY)
- && GET_CODE (XEXP (x, 1)) == PLUS
- && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
- && CONSTANT_P (XEXP (XEXP (x, 1), 1)))
- ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
+ else if (code == PRE_MODIFY || code == POST_MODIFY)
+ {
+ if (GET_CODE (XEXP (x, 1)) == PLUS
+ && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
+ && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
+ ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
+ else
+ ep->can_eliminate = 0;
+ }
}
/* These two aren't unary operators. */
@@ -2745,6 +2898,7 @@ elimination_effects (rtx x, enum machine_mode mem_mode)
case CTZ:
case POPCOUNT:
case PARITY:
+ case BSWAP:
elimination_effects (XEXP (x, 0), mem_mode);
return;
@@ -3027,7 +3181,8 @@ eliminate_regs_in_insn (rtx insn, int replace)
rtx links;
for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
{
- if (REG_NOTE_KIND (links) == REG_EQUAL
+ if ((REG_NOTE_KIND (links) == REG_EQUAL
+ || REG_NOTE_KIND (links) == REG_EQUIV)
&& GET_CODE (XEXP (links, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (links, 0), 1)) == CONST_INT)
{
@@ -3062,39 +3217,20 @@ eliminate_regs_in_insn (rtx insn, int replace)
{
rtx to_rtx = ep->to_rtx;
offset += ep->offset;
+ offset = trunc_int_for_mode (offset, GET_MODE (reg));
if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
to_rtx);
- if (offset == 0)
- {
- int num_clobbers;
- /* We assume here that if we need a PARALLEL with
- CLOBBERs for this assignment, we can do with the
- MATCH_SCRATCHes that add_clobbers allocates.
- There's not much we can do if that doesn't work. */
- PATTERN (insn) = gen_rtx_SET (VOIDmode,
- SET_DEST (old_set),
- to_rtx);
- num_clobbers = 0;
- INSN_CODE (insn) = recog (PATTERN (insn), insn, &num_clobbers);
- if (num_clobbers)
- {
- rtvec vec = rtvec_alloc (num_clobbers + 1);
-
- vec->elem[0] = PATTERN (insn);
- PATTERN (insn) = gen_rtx_PARALLEL (VOIDmode, vec);
- add_clobbers (PATTERN (insn), INSN_CODE (insn));
- }
- gcc_assert (INSN_CODE (insn) >= 0);
- }
/* If we have a nonzero offset, and the source is already
a simple REG, the following transformation would
increase the cost of the insn by replacing a simple REG
with (plus (reg sp) CST). So try only when we already
had a PLUS before. */
- else if (plus_src)
+ if (offset == 0 || plus_src)
{
+ rtx new_src = plus_constant (to_rtx, offset);
+
new_body = old_body;
if (! replace)
{
@@ -3105,8 +3241,20 @@ eliminate_regs_in_insn (rtx insn, int replace)
PATTERN (insn) = new_body;
old_set = single_set (insn);
- XEXP (SET_SRC (old_set), 0) = to_rtx;
- XEXP (SET_SRC (old_set), 1) = GEN_INT (offset);
+ /* First see if this insn remains valid when we make the
+ change. If not, try to replace the whole pattern with
+ a simple set (this may help if the original insn was a
+ PARALLEL that was only recognized as single_set due to
+ REG_UNUSED notes). If this isn't valid either, keep
+ the INSN_CODE the same and let reload fix it up. */
+ if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
+ {
+ rtx new_pat = gen_rtx_SET (VOIDmode,
+ SET_DEST (old_set), new_src);
+
+ if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
+ SET_SRC (old_set) = new_src;
+ }
}
else
break;
@@ -3331,7 +3479,7 @@ update_eliminable_offsets (void)
the insns of the function. */
static void
-mark_not_eliminable (rtx dest, rtx x, void *data ATTRIBUTE_UNUSED)
+mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
{
unsigned int i;
@@ -3539,6 +3687,20 @@ update_eliminables (HARD_REG_SET *pset)
SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
}
+/* Return true if X is used as the target register of an elimination. */
+
+bool
+elimination_target_reg_p (rtx x)
+{
+ struct elim_table *ep;
+
+ for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
+ if (ep->to_rtx == x && ep->can_eliminate)
+ return true;
+
+ return false;
+}
+
/* Initialize the table of registers to eliminate. */
static void
@@ -3614,7 +3776,7 @@ spill_hard_reg (unsigned int regno, int cant_eliminate)
if (cant_eliminate)
{
SET_HARD_REG_BIT (bad_spill_regs_global, regno);
- regs_ever_live[regno] = 1;
+ df_set_regs_ever_live (regno, true);
}
/* Spill every pseudo reg that was allocated to this reg
@@ -3623,10 +3785,7 @@ spill_hard_reg (unsigned int regno, int cant_eliminate)
for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
if (reg_renumber[i] >= 0
&& (unsigned int) reg_renumber[i] <= regno
- && ((unsigned int) reg_renumber[i]
- + hard_regno_nregs[(unsigned int) reg_renumber[i]]
- [PSEUDO_REGNO_MODE (i)]
- > regno))
+ && end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
SET_REGNO_REG_SET (&spilled_pseudos, i);
}
@@ -3661,9 +3820,9 @@ finish_spills (int global)
{
spill_reg_order[i] = n_spills;
spill_regs[n_spills++] = i;
- if (num_eliminable && ! regs_ever_live[i])
+ if (num_eliminable && ! df_regs_ever_live_p (i))
something_changed = 1;
- regs_ever_live[i] = 1;
+ df_set_regs_ever_live (i, true);
}
else
spill_reg_order[i] = -1;
@@ -3751,9 +3910,8 @@ finish_spills (int global)
AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
/* Make sure we only enlarge the set. */
- GO_IF_HARD_REG_SUBSET (used_by_pseudos2, chain->used_spill_regs, ok);
- gcc_unreachable ();
- ok:;
+ gcc_assert (hard_reg_set_subset_p (used_by_pseudos2,
+ chain->used_spill_regs));
}
}
@@ -3796,6 +3954,7 @@ scan_paradoxical_subregs (rtx x)
case SYMBOL_REF:
case LABEL_REF:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR: /* shouldn't happen, but just in case. */
case CC0:
case PC:
@@ -3805,9 +3964,13 @@ scan_paradoxical_subregs (rtx x)
case SUBREG:
if (REG_P (SUBREG_REG (x))
- && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
- reg_max_ref_width[REGNO (SUBREG_REG (x))]
- = GET_MODE_SIZE (GET_MODE (x));
+ && (GET_MODE_SIZE (GET_MODE (x))
+ > reg_max_ref_width[REGNO (SUBREG_REG (x))]))
+ {
+ reg_max_ref_width[REGNO (SUBREG_REG (x))]
+ = GET_MODE_SIZE (GET_MODE (x));
+ mark_home_live_1 (REGNO (SUBREG_REG (x)), GET_MODE (x));
+ }
return;
default:
@@ -3880,7 +4043,7 @@ reload_as_needed (int live_known)
memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
memset (spill_reg_store, 0, sizeof spill_reg_store);
reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
- reg_has_output_reload = XNEWVEC (char, max_regno);
+ INIT_REG_SET (®_has_output_reload);
CLEAR_HARD_REG_SET (reg_reloaded_valid);
CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
@@ -3899,7 +4062,9 @@ reload_as_needed (int live_known)
else if (INSN_P (insn))
{
- rtx oldpat = copy_rtx (PATTERN (insn));
+ regset_head regs_to_forget;
+ INIT_REG_SET (®s_to_forget);
+ note_stores (PATTERN (insn), forget_old_reloads_1, ®s_to_forget);
/* If this is a USE and CLOBBER of a MEM, ensure that any
references to eliminable registers have been removed. */
@@ -3920,6 +4085,7 @@ reload_as_needed (int live_known)
if (NOTE_P (insn))
{
update_eliminable_offsets ();
+ CLEAR_REG_SET (®s_to_forget);
continue;
}
}
@@ -3940,7 +4106,7 @@ reload_as_needed (int live_known)
rtx's for those pseudo regs. */
else
{
- memset (reg_has_output_reload, 0, max_regno);
+ CLEAR_REG_SET (®_has_output_reload);
CLEAR_HARD_REG_SET (reg_is_output_reload);
find_reloads (insn, 1, spill_indirect_levels, live_known,
@@ -4006,7 +4172,8 @@ reload_as_needed (int live_known)
for this insn in order to be stored in
(obeying register constraints). That is correct; such reload
registers ARE still valid. */
- note_stores (oldpat, forget_old_reloads_1, NULL);
+ forget_marked_reloads (®s_to_forget);
+ CLEAR_REG_SET (®s_to_forget);
/* There may have been CLOBBER insns placed after INSN. So scan
between INSN and NEXT and use them to forget old reloads. */
@@ -4093,7 +4260,8 @@ reload_as_needed (int live_known)
the reload for inheritance. */
SET_HARD_REG_BIT (reg_is_output_reload,
REGNO (reload_reg));
- reg_has_output_reload[REGNO (XEXP (in_reg, 0))] = 1;
+ SET_REGNO_REG_SET (®_has_output_reload,
+ REGNO (XEXP (in_reg, 0)));
}
else
forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
@@ -4109,7 +4277,8 @@ reload_as_needed (int live_known)
{
SET_HARD_REG_BIT (reg_is_output_reload,
REGNO (rld[i].reg_rtx));
- reg_has_output_reload[REGNO (XEXP (in_reg, 0))] = 1;
+ SET_REGNO_REG_SET (®_has_output_reload,
+ REGNO (XEXP (in_reg, 0)));
}
}
}
@@ -4147,7 +4316,7 @@ reload_as_needed (int live_known)
/* Clean up. */
free (reg_last_reload_reg);
- free (reg_has_output_reload);
+ CLEAR_REG_SET (®_has_output_reload);
}
/* Discard all record of any value reloaded from X,
@@ -4155,14 +4324,18 @@ reload_as_needed (int live_known)
unless X is an output reload reg of the current insn.
X may be a hard reg (the reload reg)
- or it may be a pseudo reg that was reloaded from. */
+ or it may be a pseudo reg that was reloaded from.
+
+ When DATA is non-NULL just mark the registers in regset
+ to be forgotten later. */
static void
-forget_old_reloads_1 (rtx x, rtx ignored ATTRIBUTE_UNUSED,
- void *data ATTRIBUTE_UNUSED)
+forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED,
+ void *data)
{
unsigned int regno;
unsigned int nr;
+ regset regs = (regset) data;
/* note_stores does give us subregs of hard regs,
subreg_regno_offset requires a hard reg. */
@@ -4190,26 +4363,58 @@ forget_old_reloads_1 (rtx x, rtx ignored ATTRIBUTE_UNUSED,
This can happen if a block-local pseudo is allocated to that reg
and it wasn't spilled because this block's total need is 0.
Then some insn might have an optional reload and use this reg. */
- for (i = 0; i < nr; i++)
- /* But don't do this if the reg actually serves as an output
- reload reg in the current instruction. */
+ if (!regs)
+ for (i = 0; i < nr; i++)
+ /* But don't do this if the reg actually serves as an output
+ reload reg in the current instruction. */
+ if (n_reloads == 0
+ || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
+ {
+ CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
+ CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered, regno + i);
+ spill_reg_store[regno + i] = 0;
+ }
+ }
+
+ if (regs)
+ while (nr-- > 0)
+ SET_REGNO_REG_SET (regs, regno + nr);
+ else
+ {
+ /* Since value of X has changed,
+ forget any value previously copied from it. */
+
+ while (nr-- > 0)
+ /* But don't forget a copy if this is the output reload
+ that establishes the copy's validity. */
if (n_reloads == 0
- || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
+ || !REGNO_REG_SET_P (®_has_output_reload, regno + nr))
+ reg_last_reload_reg[regno + nr] = 0;
+ }
+}
+
+/* Forget the reloads marked in regset by previous function. */
+static void
+forget_marked_reloads (regset regs)
+{
+ unsigned int reg;
+ reg_set_iterator rsi;
+ EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
+ {
+ if (reg < FIRST_PSEUDO_REGISTER
+ /* But don't do this if the reg actually serves as an output
+ reload reg in the current instruction. */
+ && (n_reloads == 0
+ || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
{
- CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
- CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered, regno + i);
- spill_reg_store[regno + i] = 0;
+ CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
+ CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered, reg);
+ spill_reg_store[reg] = 0;
}
+ if (n_reloads == 0
+ || !REGNO_REG_SET_P (®_has_output_reload, reg))
+ reg_last_reload_reg[reg] = 0;
}
-
- /* Since value of X has changed,
- forget any value previously copied from it. */
-
- while (nr-- > 0)
- /* But don't forget a copy if this is the output reload
- that establishes the copy's validity. */
- if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
- reg_last_reload_reg[regno + nr] = 0;
}
/* The following HARD_REG_SETs indicate when each hard register is
@@ -4404,8 +4609,7 @@ clear_reload_reg_in_use (unsigned int regno, int opnum,
{
unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
unsigned int conflict_end
- = (conflict_start
- + hard_regno_nregs[conflict_start][rld[i].mode]);
+ = end_hard_regno (rld[i].mode, conflict_start);
/* If there is an overlap with the first to-be-freed register,
adjust the interval start. */
@@ -4719,6 +4923,51 @@ reload_reg_reaches_end_p (unsigned int regno, int opnum, enum reload_type type)
}
}
+
+/* Returns whether R1 and R2 are uniquely chained: the value of one
+ is used by the other, and that value is not used by any other
+ reload for this insn. This is used to partially undo the decision
+ made in find_reloads when in the case of multiple
+ RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
+ RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
+ reloads. This code tries to avoid the conflict created by that
+ change. It might be cleaner to explicitly keep track of which
+ RELOAD_FOR_OPADDR_ADDR reload is associated with which
+ RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
+ this after the fact. */
+static bool
+reloads_unique_chain_p (int r1, int r2)
+{
+ int i;
+
+ /* We only check input reloads. */
+ if (! rld[r1].in || ! rld[r2].in)
+ return false;
+
+ /* Avoid anything with output reloads. */
+ if (rld[r1].out || rld[r2].out)
+ return false;
+
+ /* "chained" means one reload is a component of the other reload,
+ not the same as the other reload. */
+ if (rld[r1].opnum != rld[r2].opnum
+ || rtx_equal_p (rld[r1].in, rld[r2].in)
+ || rld[r1].optional || rld[r2].optional
+ || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
+ || reg_mentioned_p (rld[r2].in, rld[r1].in)))
+ return false;
+
+ for (i = 0; i < n_reloads; i ++)
+ /* Look for input reloads that aren't our two */
+ if (i != r1 && i != r2 && rld[i].in)
+ {
+ /* If our reload is mentioned at all, it isn't a simple chain. */
+ if (reg_mentioned_p (rld[r1].in, rld[i].in))
+ return false;
+ }
+ return true;
+}
+
/* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
Return 0 otherwise.
@@ -4767,7 +5016,8 @@ reloads_conflict (int r1, int r2)
case RELOAD_FOR_OPERAND_ADDRESS:
return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
- || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
+ || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
+ && !reloads_unique_chain_p (r1, r2)));
case RELOAD_FOR_OPADDR_ADDR:
return (r2_type == RELOAD_FOR_INPUT
@@ -5076,7 +5326,7 @@ free_for_value_p (int regno, enum machine_mode mode, int opnum,
memory. */
int
-function_invariant_p (rtx x)
+function_invariant_p (const_rtx x)
{
if (CONSTANT_P (x))
return 1;
@@ -5385,7 +5635,14 @@ choose_reload_regs (struct insn_chain *chain)
for (j = 0; j < n_reloads; j++)
{
reload_order[j] = j;
- reload_spill_index[j] = -1;
+ if (rld[j].reg_rtx != NULL_RTX)
+ {
+ gcc_assert (REG_P (rld[j].reg_rtx)
+ && HARD_REGISTER_P (rld[j].reg_rtx));
+ reload_spill_index[j] = REGNO (rld[j].reg_rtx);
+ }
+ else
+ reload_spill_index[j] = -1;
if (rld[j].nregs > 1)
{
@@ -5501,17 +5758,15 @@ choose_reload_regs (struct insn_chain *chain)
else if (GET_CODE (rld[r].in_reg) == SUBREG
&& REG_P (SUBREG_REG (rld[r].in_reg)))
{
- byte = SUBREG_BYTE (rld[r].in_reg);
regno = REGNO (SUBREG_REG (rld[r].in_reg));
if (regno < FIRST_PSEUDO_REGISTER)
regno = subreg_regno (rld[r].in_reg);
+ else
+ byte = SUBREG_BYTE (rld[r].in_reg);
mode = GET_MODE (rld[r].in_reg);
}
#ifdef AUTO_INC_DEC
- else if ((GET_CODE (rld[r].in_reg) == PRE_INC
- || GET_CODE (rld[r].in_reg) == PRE_DEC
- || GET_CODE (rld[r].in_reg) == POST_INC
- || GET_CODE (rld[r].in_reg) == POST_DEC)
+ else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
&& REG_P (XEXP (rld[r].in_reg, 0)))
{
regno = REGNO (XEXP (rld[r].in_reg, 0));
@@ -5528,7 +5783,16 @@ choose_reload_regs (struct insn_chain *chain)
regno = subreg_regno (rld[r].in);
#endif
- if (regno >= 0 && reg_last_reload_reg[regno] != 0)
+ if (regno >= 0
+ && reg_last_reload_reg[regno] != 0
+#ifdef CANNOT_CHANGE_MODE_CLASS
+ /* Verify that the register it's in can be used in
+ mode MODE. */
+ && !REG_CANNOT_CHANGE_MODE_P (REGNO (reg_last_reload_reg[regno]),
+ GET_MODE (reg_last_reload_reg[regno]),
+ mode)
+#endif
+ )
{
enum reg_class class = rld[r].class, last_class;
rtx last_reg = reg_last_reload_reg[regno];
@@ -5548,13 +5812,6 @@ choose_reload_regs (struct insn_chain *chain)
if ((GET_MODE_SIZE (GET_MODE (last_reg))
>= GET_MODE_SIZE (need_mode))
-#ifdef CANNOT_CHANGE_MODE_CLASS
- /* Verify that the register in "i" can be obtained
- from LAST_REG. */
- && !REG_CANNOT_CHANGE_MODE_P (REGNO (last_reg),
- GET_MODE (last_reg),
- mode)
-#endif
&& reg_reloaded_contents[i] == regno
&& TEST_HARD_REG_BIT (reg_reloaded_valid, i)
&& HARD_REGNO_MODE_OK (i, rld[r].mode)
@@ -5780,7 +6037,7 @@ choose_reload_regs (struct insn_chain *chain)
if (equiv != 0)
{
- if (regno_clobbered_p (regno, insn, rld[r].mode, 0))
+ if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
switch (rld[r].when_needed)
{
case RELOAD_FOR_OTHER_ADDRESS:
@@ -6040,7 +6297,8 @@ choose_reload_regs (struct insn_chain *chain)
nr = hard_regno_nregs[nregno][rld[r].mode];
while (--nr >= 0)
- reg_has_output_reload[nregno + nr] = 1;
+ SET_REGNO_REG_SET (®_has_output_reload,
+ nregno + nr);
if (i >= 0)
{
@@ -6165,15 +6423,23 @@ merge_assigned_reloads (rtx insn)
transfer_replacements (i, j);
}
- /* If this is now RELOAD_OTHER, look for any reloads that load
- parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
- if they were for inputs, RELOAD_OTHER for outputs. Note that
- this test is equivalent to looking for reloads for this operand
- number. */
- /* We must take special care with RELOAD_FOR_OUTPUT_ADDRESS; it may
- share registers with a RELOAD_FOR_INPUT, so we can not change it
- to RELOAD_FOR_OTHER_ADDRESS. We should never need to, since we
- do not modify RELOAD_FOR_OUTPUT. */
+ /* If this is now RELOAD_OTHER, look for any reloads that
+ load parts of this operand and set them to
+ RELOAD_FOR_OTHER_ADDRESS if they were for inputs,
+ RELOAD_OTHER for outputs. Note that this test is
+ equivalent to looking for reloads for this operand
+ number.
+
+ We must take special care with RELOAD_FOR_OUTPUT_ADDRESS;
+ it may share registers with a RELOAD_FOR_INPUT, so we can
+ not change it to RELOAD_FOR_OTHER_ADDRESS. We should
+ never need to, since we do not modify RELOAD_FOR_OUTPUT.
+
+ It is possible that the RELOAD_FOR_OPERAND_ADDRESS
+ instruction is assigned the same register as the earlier
+ RELOAD_FOR_OTHER_ADDRESS instruction. Merging these two
+ instructions will cause the RELOAD_FOR_OTHER_ADDRESS
+ instruction to be deleted later on. */
if (rld[i].when_needed == RELOAD_OTHER)
for (j = 0; j < n_reloads; j++)
@@ -6181,6 +6447,7 @@ merge_assigned_reloads (rtx insn)
&& rld[j].when_needed != RELOAD_OTHER
&& rld[j].when_needed != RELOAD_FOR_OTHER_ADDRESS
&& rld[j].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
+ && rld[j].when_needed != RELOAD_FOR_OPERAND_ADDRESS
&& (! conflicting_input
|| rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
|| rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
@@ -6999,10 +7266,6 @@ do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
actually no need to store the old value in it. */
if (optimize
- /* Only attempt this for input reloads; for RELOAD_OTHER we miss
- that there may be multiple uses of the previous output reload.
- Restricting to RELOAD_FOR_INPUT is mostly paranoia. */
- && rl->when_needed == RELOAD_FOR_INPUT
&& (reload_inherited[j] || reload_override_in[j])
&& rl->reg_rtx
&& REG_P (rl->reg_rtx)
@@ -7230,7 +7493,7 @@ emit_reload_insns (struct insn_chain *chain)
if (REG_P (reg)
&& REGNO (reg) >= FIRST_PSEUDO_REGISTER
- && ! reg_has_output_reload[REGNO (reg)])
+ && !REGNO_REG_SET_P (®_has_output_reload, REGNO (reg)))
{
int nregno = REGNO (reg);
@@ -7341,9 +7604,11 @@ emit_reload_insns (struct insn_chain *chain)
&& rld[r].in != 0
&& ((REG_P (rld[r].in)
&& REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER
- && ! reg_has_output_reload[REGNO (rld[r].in)])
+ && !REGNO_REG_SET_P (®_has_output_reload,
+ REGNO (rld[r].in)))
|| (REG_P (rld[r].in_reg)
- && ! reg_has_output_reload[REGNO (rld[r].in_reg)]))
+ && !REGNO_REG_SET_P (®_has_output_reload,
+ REGNO (rld[r].in_reg))))
&& ! reg_set_p (rld[r].reg_rtx, PATTERN (insn)))
{
int nregno;
@@ -7419,15 +7684,37 @@ emit_reload_insns (struct insn_chain *chain)
/* If a register gets output-reloaded from a non-spill register,
that invalidates any previous reloaded copy of it.
But forget_old_reloads_1 won't get to see it, because
- it thinks only about the original insn. So invalidate it here. */
- if (i < 0 && rld[r].out != 0
- && (REG_P (rld[r].out)
- || (MEM_P (rld[r].out)
+ it thinks only about the original insn. So invalidate it here.
+ Also do the same thing for RELOAD_OTHER constraints where the
+ output is discarded. */
+ if (i < 0
+ && ((rld[r].out != 0
+ && (REG_P (rld[r].out)
+ || (MEM_P (rld[r].out)
+ && REG_P (rld[r].out_reg))))
+ || (rld[r].out == 0 && rld[r].out_reg
&& REG_P (rld[r].out_reg))))
{
- rtx out = (REG_P (rld[r].out)
+ rtx out = ((rld[r].out && REG_P (rld[r].out))
? rld[r].out : rld[r].out_reg);
int nregno = REGNO (out);
+
+ /* REG_RTX is now set or clobbered by the main instruction.
+ As the comment above explains, forget_old_reloads_1 only
+ sees the original instruction, and there is no guarantee
+ that the original instruction also clobbered REG_RTX.
+ For example, if find_reloads sees that the input side of
+ a matched operand pair dies in this instruction, it may
+ use the input register as the reload register.
+
+ Calling forget_old_reloads_1 is a waste of effort if
+ REG_RTX is also the output register.
+
+ If we know that REG_RTX holds the value of a pseudo
+ register, the code after the call will record that fact. */
+ if (rld[r].reg_rtx && rld[r].reg_rtx != out)
+ forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
+
if (nregno >= FIRST_PSEUDO_REGISTER)
{
rtx src_reg, store_insn = NULL_RTX;
@@ -7496,12 +7783,13 @@ emit_reload_insns (struct insn_chain *chain)
/* We have to set reg_has_output_reload here, or else
forget_old_reloads_1 will clear reg_last_reload_reg
right away. */
- reg_has_output_reload[nregno] = 1;
+ SET_REGNO_REG_SET (®_has_output_reload,
+ nregno);
}
}
else
{
- int num_regs = hard_regno_nregs[nregno][GET_MODE (rld[r].out)];
+ int num_regs = hard_regno_nregs[nregno][GET_MODE (out)];
while (num_regs-- > 0)
reg_last_reload_reg[nregno + num_regs] = 0;
@@ -7655,7 +7943,7 @@ gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
DEFINE_PEEPHOLE should be specified that recognizes the sequence
we emit below. */
- code = (int) add_optab->handlers[(int) GET_MODE (out)].insn_code;
+ code = (int) optab_handler (add_optab, GET_MODE (out))->insn_code;
if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
|| (REG_P (op1)
@@ -7678,17 +7966,17 @@ gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
if (insn)
{
/* Add a REG_EQUIV note so that find_equiv_reg can find it. */
- REG_NOTES (insn)
- = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
+ set_unique_reg_note (insn, REG_EQUIV, in);
return insn;
}
/* If that failed, copy the address register to the reload register.
Then add the constant to the reload register. */
+ gcc_assert (!reg_overlap_mentioned_p (out, op0));
gen_reload (out, op1, opnum, type);
insn = emit_insn (gen_add2_insn (out, op0));
- REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
+ set_unique_reg_note (insn, REG_EQUIV, in);
}
#ifdef SECONDARY_MEMORY_NEEDED
@@ -7721,6 +8009,10 @@ gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
rtx out_moded;
rtx set;
+ op1 = find_replacement (&XEXP (in, 0));
+ if (op1 != XEXP (in, 0))
+ in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
+
/* First, try a plain SET. */
set = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
if (set)
@@ -7729,7 +8021,6 @@ gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
/* If that failed, move the inner operand to the reload
register, and try the same unop with the inner expression
replaced with the reload register. */
- op1 = XEXP (in, 0);
if (GET_MODE (op1) != GET_MODE (out))
out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
@@ -7745,8 +8036,7 @@ gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
insn = emit_insn_if_valid_for_reload (insn);
if (insn)
{
- REG_NOTES (insn)
- = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
+ set_unique_reg_note (insn, REG_EQUIV, in);
return insn;
}
@@ -7754,7 +8044,11 @@ gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
}
/* If IN is a simple operand, use gen_move_insn. */
else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
- emit_insn (gen_move_insn (out, in));
+ {
+ tem = emit_insn (gen_move_insn (out, in));
+ /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note. */
+ mark_jump_label (in, tem, 0);
+ }
#ifdef HAVE_reload_load_address
else if (HAVE_reload_load_address)
@@ -7823,25 +8117,24 @@ delete_output_reload (rtx insn, int j, int last_reload_reg)
if (rtx_equal_p (reg2, reg))
{
if (reload_inherited[k] || reload_override_in[k] || k == j)
- {
- n_inherited++;
- reg2 = rld[k].out_reg;
- if (! reg2)
- continue;
- while (GET_CODE (reg2) == SUBREG)
- reg2 = XEXP (reg2, 0);
- if (rtx_equal_p (reg2, reg))
- n_inherited++;
- }
+ n_inherited++;
else
return;
}
}
n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
+ if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
+ n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
+ reg, 0);
if (substed)
n_occurrences += count_occurrences (PATTERN (insn),
eliminate_regs (substed, 0,
NULL_RTX), 0);
+ for (i1 = reg_equiv_alt_mem_list [REGNO (reg)]; i1; i1 = XEXP (i1, 1))
+ {
+ gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
+ n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
+ }
if (n_occurrences > n_inherited)
return;
@@ -7889,7 +8182,7 @@ delete_output_reload (rtx insn, int j, int last_reload_reg)
if (rld[j].out != rld[j].in
&& REG_N_DEATHS (REGNO (reg)) == 1
&& REG_N_SETS (REGNO (reg)) == 1
- && REG_BASIC_BLOCK (REGNO (reg)) >= 0
+ && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
&& find_regno_note (insn, REG_DEAD, REGNO (reg)))
{
rtx i2;
@@ -8108,15 +8401,16 @@ static rtx
inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
{
/* REG or MEM to be copied and incremented. */
- rtx incloc = XEXP (value, 0);
+ rtx incloc = find_replacement (&XEXP (value, 0));
/* Nonzero if increment after copying. */
- int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
+ int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
+ || GET_CODE (value) == POST_MODIFY);
rtx last;
rtx inc;
rtx add_insn;
int code;
rtx store;
- rtx real_in = in == value ? XEXP (in, 0) : in;
+ rtx real_in = in == value ? incloc : in;
/* No hard register is equivalent to this register after
inc/dec operation. If REG_LAST_RELOAD_REG were nonzero,
@@ -8125,10 +8419,18 @@ inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
if (REG_P (incloc))
reg_last_reload_reg[REGNO (incloc)] = 0;
- if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
- inc_amount = -inc_amount;
+ if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
+ {
+ gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
+ inc = find_replacement (&XEXP (XEXP (value, 1), 1));
+ }
+ else
+ {
+ if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
+ inc_amount = -inc_amount;
- inc = GEN_INT (inc_amount);
+ inc = GEN_INT (inc_amount);
+ }
/* If this is post-increment, first copy the location to the reload reg. */
if (post && real_in != reloadreg)
@@ -8188,7 +8490,10 @@ inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
emit_insn (gen_add2_insn (reloadreg, inc));
store = emit_insn (gen_move_insn (incloc, reloadreg));
- emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
+ if (GET_CODE (inc) == CONST_INT)
+ emit_insn (gen_add2_insn (reloadreg, GEN_INT (-INTVAL (inc))));
+ else
+ emit_insn (gen_sub2_insn (reloadreg, inc));
}
return store;
@@ -8296,7 +8601,7 @@ fixup_abnormal_edges (void)
next = NEXT_INSN (insn);
if (INSN_P (insn))
{
- delete_insn (insn);
+ delete_insn (insn);
/* Sometimes there's still the return value USE.
If it's placed after a trapping call (i.e. that
@@ -8314,6 +8619,8 @@ fixup_abnormal_edges (void)
inserted = true;
}
}
+ else if (!BARRIER_P (insn))
+ set_block_for_insn (insn, NULL);
insn = next;
}
}
@@ -8334,6 +8641,7 @@ fixup_abnormal_edges (void)
blocks = sbitmap_alloc (last_basic_block);
sbitmap_ones (blocks);
find_many_sub_basic_blocks (blocks);
+ sbitmap_free (blocks);
}
if (inserted)