Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
1999, 2000, 2001 Free Software Foundation, Inc.
-This file is part of GNU CC.
+This file is part of GCC.
-GNU CC is free software; you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 2, or (at your option)
-any later version.
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 2, or (at your option) any later
+version.
-GNU CC is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU General Public License for more details.
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+for more details.
You should have received a copy of the GNU General Public License
-along with GNU CC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+along with GCC; see the file COPYING. If not, write to the Free
+Software Foundation, 59 Temple Place - Suite 330, Boston, MA
+02111-1307, USA. */
/* Middle-to-low level generation of rtx code and insns.
#include "bitmap.h"
#include "basic-block.h"
#include "ggc.h"
+#include "debug.h"
/* Commonly used modes. */
static htab_t const_int_htab;
+/* A hash table storing memory attribute structures. */
+static htab_t mem_attrs_htab;
+
/* start_sequence and gen_sequence can make a lot of rtx expressions which are
shortly thrown away. We use two mechanisms to prevent this waste:
static rtx make_call_insn_raw PARAMS ((rtx));
static rtx find_line_note PARAMS ((rtx));
static void mark_sequence_stack PARAMS ((struct sequence_stack *));
+static rtx change_address_1 PARAMS ((rtx, enum machine_mode, rtx,
+ int));
static void unshare_all_rtl_1 PARAMS ((rtx));
static void unshare_all_decls PARAMS ((tree));
static void reset_used_decls PARAMS ((tree));
static hashval_t const_int_htab_hash PARAMS ((const void *));
static int const_int_htab_eq PARAMS ((const void *,
const void *));
-static int rtx_htab_mark_1 PARAMS ((void **, void *));
-static void rtx_htab_mark PARAMS ((void *));
+static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
+static int mem_attrs_htab_eq PARAMS ((const void *,
+ const void *));
+static void mem_attrs_mark PARAMS ((const void *));
+static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
+ rtx, unsigned int));
+/* Probability of the conditional branch currently proceeded by try_split.
+ Set to -1 otherwise. */
+int split_branch_probability = -1;
\f
/* Returns a hash code for X (which is a really a CONST_INT). */
return (INTVAL ((const struct rtx_def *) x) == *((const HOST_WIDE_INT *) y));
}
-/* Mark the hash-table element X (which is really a pointer to an
- rtx). */
+/* Returns a hash code for X (which is a really a mem_attrs *). */
+
+static hashval_t
+mem_attrs_htab_hash (x)
+ const void *x;
+{
+ mem_attrs *p = (mem_attrs *) x;
+
+ return (p->alias ^ (p->align * 1000)
+ ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
+ ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
+ ^ (long) p->decl);
+}
+
+/* Returns non-zero if the value represented by X (which is really a
+ mem_attrs *) is the same as that given by Y (which is also really a
+ mem_attrs *). */
static int
-rtx_htab_mark_1 (x, data)
- void **x;
- void *data ATTRIBUTE_UNUSED;
+mem_attrs_htab_eq (x, y)
+ const void *x;
+ const void *y;
{
- ggc_mark_rtx (*x);
- return 1;
+ mem_attrs *p = (mem_attrs *) x;
+ mem_attrs *q = (mem_attrs *) y;
+
+ return (p->alias == q->alias && p->decl == q->decl && p->offset == q->offset
+ && p->size == q->size && p->align == q->align);
}
-/* Mark all the elements of HTAB (which is really an htab_t full of
- rtxs). */
+/* This routine is called when we determine that we need a mem_attrs entry.
+ It marks the associated decl and RTL as being used, if present. */
static void
-rtx_htab_mark (htab)
- void *htab;
+mem_attrs_mark (x)
+ const void *x;
+{
+ mem_attrs *p = (mem_attrs *) x;
+
+ if (p->decl)
+ ggc_mark_tree (p->decl);
+
+ if (p->offset)
+ ggc_mark_rtx (p->offset);
+
+ if (p->size)
+ ggc_mark_rtx (p->size);
+}
+
+/* Allocate a new mem_attrs structure and insert it into the hash table if
+ one identical to it is not already in the table. */
+
+static mem_attrs *
+get_mem_attrs (alias, decl, offset, size, align)
+ HOST_WIDE_INT alias;
+ tree decl;
+ rtx offset;
+ rtx size;
+ unsigned int align;
{
- htab_traverse (*((htab_t *) htab), rtx_htab_mark_1, NULL);
+ mem_attrs attrs;
+ void **slot;
+
+ attrs.alias = alias;
+ attrs.decl = decl;
+ attrs.offset = offset;
+ attrs.size = size;
+ attrs.align = align;
+
+ slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
+ if (*slot == 0)
+ {
+ *slot = ggc_alloc (sizeof (mem_attrs));
+ memcpy (*slot, &attrs, sizeof (mem_attrs));
+ }
+
+ return *slot;
}
/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
/* This field is not cleared by the mere allocation of the rtx, so
we clear it here. */
- MEM_ALIAS_SET (rt) = 0;
+ MEM_ATTRS (rt) = 0;
return rt;
}
return gen_rtx_fmt_ei (SUBREG, mode, reg, offset);
}
-/* Generate a SUBREG representing the least-significant part
- * of REG if MODE is smaller than mode of REG, otherwise
- * paradoxical SUBREG. */
+/* Generate a SUBREG representing the least-significant part of REG if MODE
+ is smaller than mode of REG, otherwise paradoxical SUBREG. */
+
rtx
gen_lowpart_SUBREG (mode, reg)
enum machine_mode mode;
rtx
gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
{
-#ifndef ANSI_PROTOTYPES
- enum rtx_code code;
- enum machine_mode mode;
-#endif
- va_list p;
- register int i; /* Array indices... */
- register const char *fmt; /* Current rtx's format... */
- register rtx rt_val; /* RTX to return to caller... */
+ int i; /* Array indices... */
+ const char *fmt; /* Current rtx's format... */
+ rtx rt_val; /* RTX to return to caller... */
- VA_START (p, mode);
-
-#ifndef ANSI_PROTOTYPES
- code = va_arg (p, enum rtx_code);
- mode = va_arg (p, enum machine_mode);
-#endif
+ VA_OPEN (p, mode);
+ VA_FIXEDARG (p, enum rtx_code, code);
+ VA_FIXEDARG (p, enum machine_mode, mode);
switch (code)
{
break;
}
- va_end (p);
+ VA_CLOSE (p);
return rt_val;
}
rtvec
gen_rtvec VPARAMS ((int n, ...))
{
-#ifndef ANSI_PROTOTYPES
- int n;
-#endif
- int i;
- va_list p;
+ int i, save_n;
rtx *vector;
- VA_START (p, n);
-
-#ifndef ANSI_PROTOTYPES
- n = va_arg (p, int);
-#endif
+ VA_OPEN (p, n);
+ VA_FIXEDARG (p, int, n);
if (n == 0)
return NULL_RTVEC; /* Don't allocate an empty rtvec... */
for (i = 0; i < n; i++)
vector[i] = va_arg (p, rtx);
- va_end (p);
- return gen_rtvec_v (n, vector);
+ /* The definition of VA_* in K&R C causes `n' to go out of scope. */
+ save_n = n;
+ VA_CLOSE (p);
+
+ return gen_rtvec_v (save_n, vector);
}
rtvec
int n;
rtx *argp;
{
- register int i;
- register rtvec rt_val;
+ int i;
+ rtvec rt_val;
if (n == 0)
return NULL_RTVEC; /* Don't allocate an empty rtvec... */
return rt_val;
}
-
\f
/* Generate a REG rtx for a new pseudo register of mode MODE.
This pseudo is assigned the next sequential register number. */
enum machine_mode mode;
{
struct function *f = cfun;
- register rtx val;
+ rtx val;
/* Don't let anything called after initial flow analysis create new
registers. */
register. */
int
subreg_hard_regno (x, check_mode)
- register rtx x;
+ rtx x;
int check_mode;
{
enum machine_mode mode = GET_MODE (x);
rtx
gen_lowpart_common (mode, x)
enum machine_mode mode;
- register rtx x;
+ rtx x;
{
int msize = GET_MODE_SIZE (mode);
int xsize = GET_MODE_SIZE (GET_MODE (x));
rtx
gen_realpart (mode, x)
enum machine_mode mode;
- register rtx x;
+ rtx x;
{
if (WORDS_BIG_ENDIAN
&& GET_MODE_BITSIZE (mode) < BITS_PER_WORD
rtx
gen_imagpart (mode, x)
enum machine_mode mode;
- register rtx x;
+ rtx x;
{
if (WORDS_BIG_ENDIAN)
return gen_lowpart (mode, x);
rtx
gen_lowpart (mode, x)
enum machine_mode mode;
- register rtx x;
+ rtx x;
{
rtx result = gen_lowpart_common (mode, x);
else if (GET_CODE (x) == MEM)
{
/* The only additional case we can do is MEM. */
- register int offset = 0;
+ int offset = 0;
if (WORDS_BIG_ENDIAN)
offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
- MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
rtx
gen_highpart (mode, x)
enum machine_mode mode;
- register rtx x;
+ rtx x;
{
unsigned int msize = GET_MODE_SIZE (mode);
rtx result;
if (mode == VOIDmode)
abort ();
- /* If OP is narrower than a word, fail. */
+ /* If OP is narrower than a word, fail. */
if (mode != BLKmode
&& (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
return 0;
- /* If we want a word outside OP, return zero. */
+ /* If we want a word outside OP, return zero. */
if (mode != BLKmode
&& (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
return const0_rtx;
}
}
\f
-/* Return a memory reference like MEMREF, but with its mode changed
- to MODE and its address changed to ADDR.
- (VOIDmode means don't change the mode.
- NULL for ADDR means don't change the address.)
- VALIDATE is nonzero if the returned memory location is required to be
- valid. */
-rtx
+/* Given REF, a MEM, and T, either the type of X or the expression
+ corresponding to REF, set the memory attributes. OBJECTP is nonzero
+ if we are making a new object of this type. */
+
+void
+set_mem_attributes (ref, t, objectp)
+ rtx ref;
+ tree t;
+ int objectp;
+{
+ tree type;
+
+ /* It can happen that type_for_mode was given a mode for which there
+ is no language-level type. In which case it returns NULL, which
+ we can see here. */
+ if (t == NULL_TREE)
+ return;
+
+ type = TYPE_P (t) ? t : TREE_TYPE (t);
+
+ /* If we have already set DECL_RTL = ref, get_alias_set will get the
+ wrong answer, as it assumes that DECL_RTL already has the right alias
+ info. Callers should not set DECL_RTL until after the call to
+ set_mem_attributes. */
+ if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
+ abort ();
+
+ /* Get the alias set from the expression or type (perhaps using a
+ front-end routine). */
+ set_mem_alias_set (ref, get_alias_set (t));
+
+ /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY (type)
+ here, because, in C and C++, the fact that a location is accessed
+ through a const expression does not mean that the value there can
+ never change. */
+
+ MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
+ MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
+
+ /* If we are making an object of this type, we know that it is a scalar if
+ the type is not an aggregate. */
+ if (objectp && ! AGGREGATE_TYPE_P (type))
+ MEM_SCALAR_P (ref) = 1;
+
+ /* If the size is known, we can set that. */
+ if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
+ MEM_ATTRS (ref)
+ = get_mem_attrs (MEM_ALIAS_SET (ref), MEM_DECL (ref), MEM_OFFSET (ref),
+ GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1)),
+ MEM_ALIGN (ref));
+
+ /* If T is a type, there's nothing more we can do. Otherwise, we may be able
+ to deduce some more information about the expression. */
+ if (TYPE_P (t))
+ return;
+
+ maybe_set_unchanging (ref, t);
+ if (TREE_THIS_VOLATILE (t))
+ MEM_VOLATILE_P (ref) = 1;
+
+ /* Now remove any NOPs: they don't change what the underlying object is.
+ Likewise for SAVE_EXPR. */
+ while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
+ || TREE_CODE (t) == NON_LVALUE_EXPR || TREE_CODE (t) == SAVE_EXPR)
+ t = TREE_OPERAND (t, 0);
+
+ /* If this is a decl, set the attributes of the MEM from it. */
+ if (DECL_P (t))
+ MEM_ATTRS (ref)
+ = get_mem_attrs
+ (MEM_ALIAS_SET (ref), t, GEN_INT (0),
+ (TYPE_SIZE_UNIT (TREE_TYPE (t))
+ && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (t)), 1))
+ ? GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (t)), 1))
+ : 0, DECL_ALIGN (t) / BITS_PER_UNIT);
+
+ /* Now see if we can say more about whether it's an aggregate or
+ scalar. If we already know it's an aggregate, don't bother. */
+ if (MEM_IN_STRUCT_P (ref))
+ return;
+
+ /* Since we already know the type isn't an aggregate, if this is a decl,
+ it must be a scalar. Or if it is a reference into an aggregate,
+ this is part of an aggregate. Otherwise we don't know. */
+ if (DECL_P (t))
+ MEM_SCALAR_P (ref) = 1;
+ else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
+ || TREE_CODE (t) == ARRAY_RANGE_REF
+ || TREE_CODE (t) == BIT_FIELD_REF)
+ MEM_IN_STRUCT_P (ref) = 1;
+}
+
+/* Set the alias set of MEM to SET. */
+
+void
+set_mem_alias_set (mem, set)
+ rtx mem;
+ HOST_WIDE_INT set;
+{
+#ifdef ENABLE_CHECKING
+ /* If the new and old alias sets don't conflict, something is wrong. */
+ if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
+ abort ();
+#endif
+
+ MEM_ATTRS (mem) = get_mem_attrs (set, MEM_DECL (mem), MEM_OFFSET (mem),
+ MEM_SIZE (mem), MEM_ALIGN (mem));
+}
+
+/* Set the alignment of MEM to ALIGN. */
+
+void
+set_mem_align (mem, align)
+ rtx mem;
+ unsigned int align;
+{
+ MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_DECL (mem),
+ MEM_OFFSET (mem), MEM_SIZE (mem), align);
+}
+\f
+/* Return a memory reference like MEMREF, but with its mode changed to MODE
+ and its address changed to ADDR. (VOIDmode means don't change the mode.
+ NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
+ returned memory location is required to be valid. The memory
+ attributes are not changed. */
+
+static rtx
change_address_1 (memref, mode, addr, validate)
rtx memref;
enum machine_mode mode;
return new;
}
-/* Return a memory reference like MEMREF, but with its mode changed
- to MODE and its address offset by OFFSET bytes. */
+/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
+ way we are changing MEMREF, so we only preserve the alias set. */
rtx
-adjust_address (memref, mode, offset)
+change_address (memref, mode, addr)
rtx memref;
enum machine_mode mode;
- HOST_WIDE_INT offset;
+ rtx addr;
{
- /* For now, this is just a wrapper for change_address, but eventually
- will do memref tracking. */
- rtx addr = XEXP (memref, 0);
+ rtx new = change_address_1 (memref, mode, addr, 1);
+ enum machine_mode mmode = GET_MODE (new);
- /* If MEMREF is a LO_SUM and the offset is within the alignment of the
- object, we can merge it into the LO_SUM. */
- if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
- && offset >= 0
- && offset < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
- addr = gen_rtx_LO_SUM (mode, XEXP (addr, 0),
- plus_constant (XEXP (addr, 1), offset));
- else
- addr = plus_constant (addr, offset);
+ MEM_ATTRS (new)
+ = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
+ mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
+ (mmode == BLKmode ? 1
+ : GET_MODE_ALIGNMENT (mmode) / BITS_PER_UNIT));
- return change_address (memref, mode, addr);
+ return new;
}
-/* Likewise, but the reference is not required to be valid. */
+/* Return a memory reference like MEMREF, but with its mode changed
+ to MODE and its address offset by OFFSET bytes. If VALIDATE is
+ nonzero, the memory address is forced to be valid. */
rtx
-adjust_address_nv (memref, mode, offset)
+adjust_address_1 (memref, mode, offset, validate)
rtx memref;
enum machine_mode mode;
HOST_WIDE_INT offset;
+ int validate;
{
- /* For now, this is just a wrapper for change_address, but eventually
- will do memref tracking. */
rtx addr = XEXP (memref, 0);
+ rtx new;
+ rtx memoffset = MEM_OFFSET (memref);
+ unsigned int memalign = MEM_ALIGN (memref);
- /* If MEMREF is a LO_SUM and the offset is within the size of the
+ /* If MEMREF is a LO_SUM and the offset is within the alignment of the
object, we can merge it into the LO_SUM. */
if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
&& offset >= 0
- && offset < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
- addr = gen_rtx_LO_SUM (mode, XEXP (addr, 0),
+ && (unsigned HOST_WIDE_INT) offset
+ < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
+ addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
plus_constant (XEXP (addr, 1), offset));
+ else if (offset == 0)
+ /* ??? Prefer to create garbage instead of creating shared rtl. */
+ addr = copy_rtx (addr);
else
addr = plus_constant (addr, offset);
- return change_address_1 (memref, mode, addr, 0);
+ new = change_address_1 (memref, mode, addr, validate);
+
+ /* Compute the new values of the memory attributes due to this adjustment.
+ We add the offsets and update the alignment. */
+ if (memoffset)
+ memoffset = GEN_INT (offset + INTVAL (memoffset));
+
+ /* If the offset is negative, don't try to update the alignment. If it's
+ zero, the alignment hasn't changed. Otherwise, the known alignment may
+ be less strict. */
+ if (offset < 0)
+ memalign = 1;
+
+ while (offset > 0 && (offset % memalign) != 0)
+ memalign >>= 1;
+
+ MEM_ATTRS (new)
+ = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_DECL (memref), memoffset,
+ mode == BLKmode
+ ? 0 : GEN_INT (GET_MODE_SIZE (mode)), memalign);
+
+ /* At some point, we should validate that this offset is within the object,
+ if all the appropriate values are known. */
+ return new;
}
/* Return a memory reference like MEMREF, but with its address changed to
rtx memref;
rtx addr;
{
- /* For now, this is just a wrapper for change_address, but eventually
- will do memref tracking. */
- return change_address (memref, VOIDmode, addr);
+ /* change_address_1 copies the memory attribute structure without change
+ and that's exactly what we want here. */
+ return change_address_1 (memref, VOIDmode, addr, 1);
}
+
/* Likewise, but the reference is not required to be valid. */
rtx
rtx memref;
rtx addr;
{
- /* For now, this is just a wrapper for change_address, but eventually
- will do memref tracking. */
return change_address_1 (memref, VOIDmode, addr, 0);
}
\f
rtx
gen_label_rtx ()
{
- register rtx label;
+ rtx label;
label = gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX,
NULL_RTX, label_num++, NULL, NULL);
}
/* Go through all virtual stack slots of a function and mark them as
- not shared. */
+ not shared. */
static void
reset_used_decls (blk)
tree blk;
copy_rtx_if_shared (orig)
rtx orig;
{
- register rtx x = orig;
- register int i;
- register enum rtx_code code;
- register const char *format_ptr;
+ rtx x = orig;
+ int i;
+ enum rtx_code code;
+ const char *format_ptr;
int copied = 0;
if (x == 0)
if (x->used)
{
- register rtx copy;
+ rtx copy;
copy = rtx_alloc (code);
memcpy (copy, x,
case 'E':
if (XVEC (x, i) != NULL)
{
- register int j;
+ int j;
int len = XVECLEN (x, i);
if (copied && len > 0)
reset_used_flags (x)
rtx x;
{
- register int i, j;
- register enum rtx_code code;
- register const char *format_ptr;
+ int i, j;
+ enum rtx_code code;
+ const char *format_ptr;
if (x == 0)
return;
mark_label_nuses(x)
rtx x;
{
- register enum rtx_code code;
- register int i, j;
- register const char *fmt;
+ enum rtx_code code;
+ int i, j;
+ const char *fmt;
code = GET_CODE (x);
if (code == LABEL_REF)
{
rtx before = PREV_INSN (trial);
rtx after = NEXT_INSN (trial);
- rtx seq = split_insns (pat, trial);
int has_barrier = 0;
rtx tem;
+ rtx note, seq;
+ int probability;
+
+ if (any_condjump_p (trial)
+ && (note = find_reg_note (trial, REG_BR_PROB, 0)))
+ split_branch_probability = INTVAL (XEXP (note, 0));
+ probability = split_branch_probability;
+
+ seq = split_insns (pat, trial);
+
+ split_branch_probability = -1;
/* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
We may need to handle this specially. */
it, in turn, will be split (SFmode on the 29k is an example). */
if (GET_CODE (seq) == SEQUENCE)
{
- int i;
- rtx eh_note;
+ int i, njumps = 0;
/* Avoid infinite loop if any insn of the result matches
the original pattern. */
/* Mark labels. */
for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
if (GET_CODE (XVECEXP (seq, 0, i)) == JUMP_INSN)
- mark_jump_label (PATTERN (XVECEXP (seq, 0, i)),
- XVECEXP (seq, 0, i), 0, 0);
+ {
+ rtx insn = XVECEXP (seq, 0, i);
+ mark_jump_label (PATTERN (insn),
+ XVECEXP (seq, 0, i), 0);
+ njumps++;
+ if (probability != -1
+ && any_condjump_p (insn)
+ && !find_reg_note (insn, REG_BR_PROB, 0))
+ {
+ /* We can preserve the REG_BR_PROB notes only if exactly
+ one jump is created, otherwise the machinde description
+ is responsible for this step using
+ split_branch_probability variable. */
+ if (njumps != 1)
+ abort ();
+ REG_NOTES (insn)
+ = gen_rtx_EXPR_LIST (REG_BR_PROB,
+ GEN_INT (probability),
+ REG_NOTES (insn));
+ }
+ }
/* If we are splitting a CALL_INSN, look for the CALL_INSN
in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
CALL_INSN_FUNCTION_USAGE (XVECEXP (seq, 0, i))
= CALL_INSN_FUNCTION_USAGE (trial);
- /* Copy EH notes. */
- if ((eh_note = find_reg_note (trial, REG_EH_REGION, NULL_RTX)))
- for (i = 0; i < XVECLEN (seq, 0); i++)
- {
- rtx insn = XVECEXP (seq, 0, i);
- if (GET_CODE (insn) == CALL_INSN
- || (flag_non_call_exceptions
- && may_trap_p (PATTERN (insn))))
- REG_NOTES (insn)
- = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (eh_note, 0),
- REG_NOTES (insn));
- }
+ /* Copy notes, particularly those related to the CFG. */
+ for (note = REG_NOTES (trial); note ; note = XEXP (note, 1))
+ {
+ switch (REG_NOTE_KIND (note))
+ {
+ case REG_EH_REGION:
+ for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
+ {
+ rtx insn = XVECEXP (seq, 0, i);
+ if (GET_CODE (insn) == CALL_INSN
+ || (flag_non_call_exceptions
+ && may_trap_p (PATTERN (insn))))
+ REG_NOTES (insn)
+ = gen_rtx_EXPR_LIST (REG_EH_REGION,
+ XEXP (note, 0),
+ REG_NOTES (insn));
+ }
+ break;
+
+ case REG_NORETURN:
+ case REG_SETJMP:
+ case REG_ALWAYS_RETURN:
+ for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
+ {
+ rtx insn = XVECEXP (seq, 0, i);
+ if (GET_CODE (insn) == CALL_INSN)
+ REG_NOTES (insn)
+ = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
+ XEXP (note, 0),
+ REG_NOTES (insn));
+ }
+ break;
+
+ case REG_NON_LOCAL_GOTO:
+ for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
+ {
+ rtx insn = XVECEXP (seq, 0, i);
+ if (GET_CODE (insn) == JUMP_INSN)
+ REG_NOTES (insn)
+ = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
+ XEXP (note, 0),
+ REG_NOTES (insn));
+ }
+ break;
+
+ default:
+ break;
+ }
+ }
/* If there are LABELS inside the split insns increment the
usage count so we don't delete the label. */
if (GET_CODE (XVECEXP (seq, 0, i)) == INSN)
mark_label_nuses (PATTERN (XVECEXP (seq, 0, i)));
- tem = emit_insn_after (seq, before);
+ tem = emit_insn_after (seq, trial);
- delete_insn (trial);
+ delete_related_insns (trial);
if (has_barrier)
emit_barrier_after (tem);
/* Return either the first or the last insn, depending on which was
requested. */
return last
- ? (after ? prev_active_insn (after) : last_insn)
- : next_active_insn (before);
+ ? (after ? PREV_INSN (after) : last_insn)
+ : NEXT_INSN (before);
}
return trial;
make_insn_raw (pattern)
rtx pattern;
{
- register rtx insn;
+ rtx insn;
insn = rtx_alloc (INSN);
make_jump_insn_raw (pattern)
rtx pattern;
{
- register rtx insn;
+ rtx insn;
insn = rtx_alloc (JUMP_INSN);
INSN_UID (insn) = cur_insn_uid++;
make_call_insn_raw (pattern)
rtx pattern;
{
- register rtx insn;
+ rtx insn;
insn = rtx_alloc (CALL_INSN);
INSN_UID (insn) = cur_insn_uid++;
void
add_insn (insn)
- register rtx insn;
+ rtx insn;
{
PREV_INSN (insn) = last_insn;
NEXT_INSN (insn) = 0;
rtx insn, after;
{
rtx next = NEXT_INSN (after);
+ basic_block bb;
if (optimize && INSN_DELETED_P (after))
abort ();
abort ();
}
+ if (basic_block_for_insn
+ && (unsigned int)INSN_UID (after) < basic_block_for_insn->num_elements
+ && (bb = BLOCK_FOR_INSN (after)))
+ {
+ set_block_for_insn (insn, bb);
+ /* Should not happen as first in the BB is always
+ eigther NOTE or LABEL. */
+ if (bb->end == after
+ /* Avoid clobbering of structure when creating new BB. */
+ && GET_CODE (insn) != BARRIER
+ && (GET_CODE (insn) != NOTE
+ || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
+ bb->end = insn;
+ }
+
NEXT_INSN (after) = insn;
if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
{
rtx insn, before;
{
rtx prev = PREV_INSN (before);
+ basic_block bb;
if (optimize && INSN_DELETED_P (before))
abort ();
abort ();
}
+ if (basic_block_for_insn
+ && (unsigned int)INSN_UID (before) < basic_block_for_insn->num_elements
+ && (bb = BLOCK_FOR_INSN (before)))
+ {
+ set_block_for_insn (insn, bb);
+ /* Should not happen as first in the BB is always
+ eigther NOTE or LABEl. */
+ if (bb->head == insn
+ /* Avoid clobbering of structure when creating new BB. */
+ && GET_CODE (insn) != BARRIER
+ && (GET_CODE (insn) != NOTE
+ || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
+ abort ();
+ }
+
PREV_INSN (before) = insn;
if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
{
rtx next = NEXT_INSN (insn);
rtx prev = PREV_INSN (insn);
+ basic_block bb;
+
if (prev)
{
NEXT_INSN (prev) = next;
if (stack == 0)
abort ();
}
+ if (basic_block_for_insn
+ && (unsigned int)INSN_UID (insn) < basic_block_for_insn->num_elements
+ && (bb = BLOCK_FOR_INSN (insn)))
+ {
+ if (bb->head == insn)
+ {
+ /* Never ever delete the basic block note without deleting whole basic
+ block. */
+ if (GET_CODE (insn) == NOTE)
+ abort ();
+ bb->head = next;
+ }
+ if (bb->end == insn)
+ bb->end = prev;
+ }
}
/* Delete all insns made since FROM.
called after delay-slot filling has been done. */
void
-reorder_insns (from, to, after)
+reorder_insns_nobb (from, to, after)
rtx from, to, after;
{
/* Splice this bunch out of where it is now. */
last_insn = to;
}
+/* Same as function above, but take care to update BB boundaries. */
+void
+reorder_insns (from, to, after)
+ rtx from, to, after;
+{
+ rtx prev = PREV_INSN (from);
+ basic_block bb, bb2;
+
+ reorder_insns_nobb (from, to, after);
+
+ if (basic_block_for_insn
+ && (unsigned int)INSN_UID (after) < basic_block_for_insn->num_elements
+ && (bb = BLOCK_FOR_INSN (after)))
+ {
+ rtx x;
+
+ if (basic_block_for_insn
+ && (unsigned int)INSN_UID (from) < basic_block_for_insn->num_elements
+ && (bb2 = BLOCK_FOR_INSN (from)))
+ {
+ if (bb2->end == to)
+ bb2->end = prev;
+ }
+
+ if (bb->end == after)
+ bb->end = to;
+
+ for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
+ set_block_for_insn (x, bb);
+ }
+}
+
/* Return the line note insn preceding INSN. */
static rtx
if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
{
- /* We just verified that this BLOCK matches us
- with the block_stack check above. */
- if (debug_ignore_block (NOTE_BLOCK (insn)))
+ /* We just verified that this BLOCK matches us with
+ the block_stack check above. Never delete the
+ BLOCK for the outermost scope of the function; we
+ can refer to names from that scope even if the
+ block notes are messed up. */
+ if (! is_body_block (NOTE_BLOCK (insn))
+ && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
{
remove_insn (tmp);
remove_insn (insn);
rtx
emit_insn_before (pattern, before)
- register rtx pattern, before;
+ rtx pattern, before;
{
- register rtx insn = before;
+ rtx insn = before;
if (GET_CODE (pattern) == SEQUENCE)
{
- register int i;
+ int i;
for (i = 0; i < XVECLEN (pattern, 0); i++)
{
return insn;
}
-/* Similar to emit_insn_before, but update basic block boundaries as well. */
-
-rtx
-emit_block_insn_before (pattern, before, block)
- rtx pattern, before;
- basic_block block;
-{
- rtx prev = PREV_INSN (before);
- rtx r = emit_insn_before (pattern, before);
- if (block && block->head == before)
- block->head = NEXT_INSN (prev);
- return r;
-}
-
/* Make an instruction with body PATTERN and code JUMP_INSN
and output it before the instruction BEFORE. */
rtx
emit_jump_insn_before (pattern, before)
- register rtx pattern, before;
+ rtx pattern, before;
{
- register rtx insn;
+ rtx insn;
if (GET_CODE (pattern) == SEQUENCE)
insn = emit_insn_before (pattern, before);
rtx
emit_call_insn_before (pattern, before)
- register rtx pattern, before;
+ rtx pattern, before;
{
- register rtx insn;
+ rtx insn;
if (GET_CODE (pattern) == SEQUENCE)
insn = emit_insn_before (pattern, before);
rtx
emit_barrier_before (before)
- register rtx before;
+ rtx before;
{
- register rtx insn = rtx_alloc (BARRIER);
+ rtx insn = rtx_alloc (BARRIER);
INSN_UID (insn) = cur_insn_uid++;
int subtype;
rtx before;
{
- register rtx note = rtx_alloc (NOTE);
+ rtx note = rtx_alloc (NOTE);
INSN_UID (note) = cur_insn_uid++;
NOTE_SOURCE_FILE (note) = 0;
NOTE_LINE_NUMBER (note) = subtype;
rtx
emit_insn_after (pattern, after)
- register rtx pattern, after;
+ rtx pattern, after;
{
- register rtx insn = after;
+ rtx insn = after;
if (GET_CODE (pattern) == SEQUENCE)
{
- register int i;
+ int i;
for (i = 0; i < XVECLEN (pattern, 0); i++)
{
insn);
}
-/* Similar to emit_insn_after, but update basic block boundaries as well. */
-
-rtx
-emit_block_insn_after (pattern, after, block)
- rtx pattern, after;
- basic_block block;
-{
- rtx r = emit_insn_after (pattern, after);
- if (block && block->end == after)
- block->end = r;
- return r;
-}
-
/* Make an insn of code JUMP_INSN with body PATTERN
and output it after the insn AFTER. */
rtx
emit_jump_insn_after (pattern, after)
- register rtx pattern, after;
+ rtx pattern, after;
{
- register rtx insn;
+ rtx insn;
if (GET_CODE (pattern) == SEQUENCE)
insn = emit_insn_after (pattern, after);
rtx
emit_barrier_after (after)
- register rtx after;
+ rtx after;
{
- register rtx insn = rtx_alloc (BARRIER);
+ rtx insn = rtx_alloc (BARRIER);
INSN_UID (insn) = cur_insn_uid++;
int subtype;
rtx after;
{
- register rtx note = rtx_alloc (NOTE);
+ rtx note = rtx_alloc (NOTE);
INSN_UID (note) = cur_insn_uid++;
NOTE_SOURCE_FILE (note) = 0;
NOTE_LINE_NUMBER (note) = subtype;
int line;
rtx after;
{
- register rtx note;
+ rtx note;
if (no_line_numbers && line > 0)
{
if (GET_CODE (pattern) == SEQUENCE)
{
- register int i;
+ int i;
for (i = 0; i < XVECLEN (pattern, 0); i++)
{
rtx
emit_insns_after (first, after)
- register rtx first;
- register rtx after;
+ rtx first;
+ rtx after;
{
- register rtx last;
- register rtx after_after;
+ rtx last;
+ rtx after_after;
+ basic_block bb;
if (!after)
abort ();
if (!first)
- return first;
+ return after;
- for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
- continue;
+ if (basic_block_for_insn
+ && (unsigned int)INSN_UID (after) < basic_block_for_insn->num_elements
+ && (bb = BLOCK_FOR_INSN (after)))
+ {
+ for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
+ set_block_for_insn (last, bb);
+ set_block_for_insn (last, bb);
+ if (bb->end == after)
+ bb->end = last;
+ }
+ else
+ for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
+ continue;
after_after = NEXT_INSN (after);
return emit_insn (pattern);
else
{
- register rtx insn = make_jump_insn_raw (pattern);
+ rtx insn = make_jump_insn_raw (pattern);
add_insn (insn);
return insn;
}
return emit_insn (pattern);
else
{
- register rtx insn = make_call_insn_raw (pattern);
+ rtx insn = make_call_insn_raw (pattern);
add_insn (insn);
PUT_CODE (insn, CALL_INSN);
return insn;
rtx
emit_barrier ()
{
- register rtx barrier = rtx_alloc (BARRIER);
+ rtx barrier = rtx_alloc (BARRIER);
INSN_UID (barrier) = cur_insn_uid++;
add_insn (barrier);
return barrier;
const char *file;
int line;
{
- register rtx note;
+ rtx note;
if (line > 0)
{
}
/* Place a note of KIND on insn INSN with DATUM as the datum. If a
- note of this type already exists, remove it first. */
+ note of this type already exists, remove it first. */
void
set_unique_reg_note (insn, kind, datum)
}
if (GET_CODE (x) == PARALLEL)
{
- register int j;
+ int j;
for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
return CALL_INSN;
return emit_insn (x);
else if (code == JUMP_INSN)
{
- register rtx insn = emit_jump_insn (x);
+ rtx insn = emit_jump_insn (x);
if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
return emit_barrier ();
return insn;
if (len == 1
&& ! RTX_FRAME_RELATED_P (first_insn)
&& GET_CODE (first_insn) == INSN
- /* Don't throw away any reg notes. */
+ /* Don't throw away any reg notes. */
&& REG_NOTES (first_insn) == 0)
return PATTERN (first_insn);
rtx
copy_insn_1 (orig)
- register rtx orig;
+ rtx orig;
{
- register rtx copy;
- register int i, j;
- register RTX_CODE code;
- register const char *format_ptr;
+ rtx copy;
+ int i, j;
+ RTX_CODE code;
+ const char *format_ptr;
code = GET_CODE (orig);
enum machine_mode mode;
enum machine_mode double_mode;
- /* Initialize the CONST_INT hash table. */
+ /* Initialize the CONST_INT and memory attribute hash tables. */
const_int_htab = htab_create (37, const_int_htab_hash,
const_int_htab_eq, NULL);
- ggc_add_root (&const_int_htab, 1, sizeof (const_int_htab),
- rtx_htab_mark);
+ ggc_add_deletable_htab (const_int_htab, 0, 0);
+
+ mem_attrs_htab = htab_create (37, mem_attrs_htab_hash,
+ mem_attrs_htab_eq, NULL);
+ ggc_add_deletable_htab (mem_attrs_htab, 0, mem_attrs_mark);
no_line_numbers = ! line_numbers;