/* Emit RTL for the GCC expander.
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
+ 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
+ Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
-Software Foundation; either version 2, or (at your option) any later
+Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
/* Middle-to-low level generation of rtx code and insns.
#include "insn-config.h"
#include "recog.h"
#include "real.h"
+#include "fixed-value.h"
#include "bitmap.h"
#include "basic-block.h"
#include "ggc.h"
#include "debug.h"
#include "langhooks.h"
+#include "tree-pass.h"
+#include "df.h"
/* Commonly used modes. */
static GTY(()) int label_num = 1;
-/* Highest label number in current function.
- Zero means use the value of label_num instead.
- This is nonzero only when belatedly compiling an inline function. */
-
-static int last_label_num;
-
-/* Value label_num had when set_new_last_label_num was called.
- If label_num has not changed since then, last_label_num is valid. */
-
-static int base_label_num;
-
/* Nonzero means do not generate NOTEs for source line numbers. */
static int no_line_numbers;
REAL_VALUE_TYPE dconstm2;
REAL_VALUE_TYPE dconsthalf;
REAL_VALUE_TYPE dconstthird;
-REAL_VALUE_TYPE dconstpi;
+REAL_VALUE_TYPE dconstsqrt2;
REAL_VALUE_TYPE dconste;
+/* Record fixed-point constant 0 and 1. */
+FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
+FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
+
/* All references to the following fixed hard registers go through
these unique rtl objects. On machines where the frame-pointer and
arg-pointer are the same register, they use the same unique object.
static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
htab_t const_double_htab;
+/* A hash table storing all CONST_FIXEDs. */
+static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
+ htab_t const_fixed_htab;
+
#define first_insn (cfun->emit->x_first_insn)
#define last_insn (cfun->emit->x_last_insn)
#define cur_insn_uid (cfun->emit->x_cur_insn_uid)
#define last_location (cfun->emit->x_last_location)
#define first_label_num (cfun->emit->x_first_label_num)
-static rtx make_jump_insn_raw (rtx);
static rtx make_call_insn_raw (rtx);
-static rtx find_line_note (rtx);
static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
-static void unshare_all_decls (tree);
-static void reset_used_decls (tree);
+static void set_used_decls (tree);
static void mark_label_nuses (rtx);
static hashval_t const_int_htab_hash (const void *);
static int const_int_htab_eq (const void *, const void *);
static hashval_t const_double_htab_hash (const void *);
static int const_double_htab_eq (const void *, const void *);
static rtx lookup_const_double (rtx);
+static hashval_t const_fixed_htab_hash (const void *);
+static int const_fixed_htab_eq (const void *, const void *);
+static rtx lookup_const_fixed (rtx);
static hashval_t mem_attrs_htab_hash (const void *);
static int mem_attrs_htab_eq (const void *, const void *);
-static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int,
+static mem_attrs *get_mem_attrs (alias_set_type, tree, rtx, rtx, unsigned int,
enum machine_mode);
static hashval_t reg_attrs_htab_hash (const void *);
static int reg_attrs_htab_eq (const void *, const void *);
static reg_attrs *get_reg_attrs (tree, int);
static tree component_ref_for_mem_expr (tree);
-static rtx gen_const_vector_0 (enum machine_mode);
-static rtx gen_complex_constant_part (enum machine_mode, rtx, int);
+static rtx gen_const_vector (enum machine_mode, int);
static void copy_rtx_if_shared_1 (rtx *orig);
/* Probability of the conditional branch currently proceeded by try_split.
static hashval_t
const_int_htab_hash (const void *x)
{
- return (hashval_t) INTVAL ((rtx) x);
+ return (hashval_t) INTVAL ((const_rtx) x);
}
/* Returns nonzero if the value represented by X (which is really a
static int
const_int_htab_eq (const void *x, const void *y)
{
- return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
+ return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
}
/* Returns a hash code for X (which is really a CONST_DOUBLE). */
static hashval_t
const_double_htab_hash (const void *x)
{
- rtx value = (rtx) x;
+ const_rtx const value = (const_rtx) x;
hashval_t h;
if (GET_MODE (value) == VOIDmode)
static int
const_double_htab_eq (const void *x, const void *y)
{
- rtx a = (rtx)x, b = (rtx)y;
+ const_rtx const a = (const_rtx)x, b = (const_rtx)y;
if (GET_MODE (a) != GET_MODE (b))
return 0;
CONST_DOUBLE_REAL_VALUE (b));
}
+/* Returns a hash code for X (which is really a CONST_FIXED). */
+
+static hashval_t
+const_fixed_htab_hash (const void *x)
+{
+ const_rtx const value = (const_rtx) x;
+ hashval_t h;
+
+ h = fixed_hash (CONST_FIXED_VALUE (value));
+ /* MODE is used in the comparison, so it should be in the hash. */
+ h ^= GET_MODE (value);
+ return h;
+}
+
+/* Returns nonzero if the value represented by X (really a ...)
+ is the same as that represented by Y (really a ...). */
+
+static int
+const_fixed_htab_eq (const void *x, const void *y)
+{
+ const_rtx const a = (const_rtx) x, b = (const_rtx) y;
+
+ if (GET_MODE (a) != GET_MODE (b))
+ return 0;
+ return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
+}
+
/* Returns a hash code for X (which is a really a mem_attrs *). */
static hashval_t
mem_attrs_htab_hash (const void *x)
{
- mem_attrs *p = (mem_attrs *) x;
+ const mem_attrs *const p = (const mem_attrs *) x;
return (p->alias ^ (p->align * 1000)
^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
- ^ (size_t) p->expr);
+ ^ (size_t) iterative_hash_expr (p->expr, 0));
}
/* Returns nonzero if the value represented by X (which is really a
static int
mem_attrs_htab_eq (const void *x, const void *y)
{
- mem_attrs *p = (mem_attrs *) x;
- mem_attrs *q = (mem_attrs *) y;
+ const mem_attrs *const p = (const mem_attrs *) x;
+ const mem_attrs *const q = (const mem_attrs *) y;
- return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
- && p->size == q->size && p->align == q->align);
+ return (p->alias == q->alias && p->offset == q->offset
+ && p->size == q->size && p->align == q->align
+ && (p->expr == q->expr
+ || (p->expr != NULL_TREE && q->expr != NULL_TREE
+ && operand_equal_p (p->expr, q->expr, 0))));
}
/* Allocate a new mem_attrs structure and insert it into the hash table if
MEM of mode MODE. */
static mem_attrs *
-get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size,
+get_mem_attrs (alias_set_type alias, tree expr, rtx offset, rtx size,
unsigned int align, enum machine_mode mode)
{
mem_attrs attrs;
static hashval_t
reg_attrs_htab_hash (const void *x)
{
- reg_attrs *p = (reg_attrs *) x;
+ const reg_attrs *const p = (const reg_attrs *) x;
return ((p->offset * 1000) ^ (long) p->decl);
}
static int
reg_attrs_htab_eq (const void *x, const void *y)
{
- reg_attrs *p = (reg_attrs *) x;
- reg_attrs *q = (reg_attrs *) y;
+ const reg_attrs *const p = (const reg_attrs *) x;
+ const reg_attrs *const q = (const reg_attrs *) y;
return (p->decl == q->decl && p->offset == q->offset);
}
return *slot;
}
+
+#if !HAVE_blockage
+/* Generate an empty ASM_INPUT, which is used to block attempts to schedule
+ across this insn. */
+
+rtx
+gen_blockage (void)
+{
+ rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
+ MEM_VOLATILE_P (x) = true;
+ return x;
+}
+#endif
+
+
/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
don't attempt to share with the various global pieces of rtl (such as
frame_pointer_rtx). */
rtx real = rtx_alloc (CONST_DOUBLE);
PUT_MODE (real, mode);
- memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
+ real->u.rv = value;
return lookup_const_double (real);
}
+/* Determine whether FIXED, a CONST_FIXED, already exists in the
+ hash table. If so, return its counterpart; otherwise add it
+ to the hash table and return it. */
+
+static rtx
+lookup_const_fixed (rtx fixed)
+{
+ void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
+ if (*slot == 0)
+ *slot = fixed;
+
+ return (rtx) *slot;
+}
+
+/* Return a CONST_FIXED rtx for a fixed-point value specified by
+ VALUE in mode MODE. */
+
+rtx
+const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
+{
+ rtx fixed = rtx_alloc (CONST_FIXED);
+ PUT_MODE (fixed, mode);
+
+ fixed->u.fv = value;
+
+ return lookup_const_fixed (fixed);
+}
+
/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
of ints: I0 is the low-order word and I1 is the high-order word.
Do not use this routine for non-integer modes; convert to
rtx value;
unsigned int i;
+ /* There are the following cases (note that there are no modes with
+ HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
+
+ 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
+ gen_int_mode.
+ 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
+ the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
+ from copies of the sign bit, and sign of i0 and i1 are the same), then
+ we return a CONST_INT for i0.
+ 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
if (mode != VOIDmode)
{
- int width;
- if (GET_MODE_CLASS (mode) != MODE_INT
- && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT
- /* We can get a 0 for an error mark. */
- && GET_MODE_CLASS (mode) != MODE_VECTOR_INT
- && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
- abort ();
-
- /* We clear out all bits that don't belong in MODE, unless they and
- our sign bit are all one. So we get either a reasonable negative
- value or a reasonable unsigned value for this mode. */
- width = GET_MODE_BITSIZE (mode);
- if (width < HOST_BITS_PER_WIDE_INT
- && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
- != ((HOST_WIDE_INT) (-1) << (width - 1))))
- i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
- else if (width == HOST_BITS_PER_WIDE_INT
- && ! (i1 == ~0 && i0 < 0))
- i1 = 0;
- else if (width > 2 * HOST_BITS_PER_WIDE_INT)
- /* We cannot represent this value as a constant. */
- abort ();
-
- /* If this would be an entire word for the target, but is not for
- the host, then sign-extend on the host so that the number will
- look the same way on the host that it would on the target.
-
- For example, when building a 64 bit alpha hosted 32 bit sparc
- targeted compiler, then we want the 32 bit unsigned value -1 to be
- represented as a 64 bit value -1, and not as 0x00000000ffffffff.
- The latter confuses the sparc backend. */
-
- if (width < HOST_BITS_PER_WIDE_INT
- && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
- i0 |= ((HOST_WIDE_INT) (-1) << width);
-
- /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
- CONST_INT.
-
- ??? Strictly speaking, this is wrong if we create a CONST_INT for
- a large unsigned constant with the size of MODE being
- HOST_BITS_PER_WIDE_INT and later try to interpret that constant
- in a wider mode. In that case we will mis-interpret it as a
- negative number.
-
- Unfortunately, the only alternative is to make a CONST_DOUBLE for
- any constant in any mode if it is an unsigned constant larger
- than the maximum signed integer in an int on the host. However,
- doing this will break everyone that always expects to see a
- CONST_INT for SImode and smaller.
-
- We have always been making CONST_INTs in this case, so nothing
- new is being broken. */
-
- if (width <= HOST_BITS_PER_WIDE_INT)
- i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
+ gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
+ || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
+ /* We can get a 0 for an error mark. */
+ || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
+ || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
+
+ if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
+ return gen_int_mode (i0, mode);
+
+ gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
}
/* If this integer fits in one word, return a CONST_INT. */
return rt;
}
+/* Generate a memory referring to non-trapping constant memory. */
+
rtx
-gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
+gen_const_mem (enum machine_mode mode, rtx addr)
{
- /* This is the most common failure type.
- Catch it early so we can see who does it. */
- if ((offset % GET_MODE_SIZE (mode)) != 0)
- abort ();
+ rtx mem = gen_rtx_MEM (mode, addr);
+ MEM_READONLY_P (mem) = 1;
+ MEM_NOTRAP_P (mem) = 1;
+ return mem;
+}
- /* This check isn't usable right now because combine will
- throw arbitrary crap like a CALL into a SUBREG in
- gen_lowpart_for_combine so we must just eat it. */
-#if 0
- /* Check for this too. */
- if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
- abort ();
+/* Generate a MEM referring to fixed portions of the frame, e.g., register
+ save areas. */
+
+rtx
+gen_frame_mem (enum machine_mode mode, rtx addr)
+{
+ rtx mem = gen_rtx_MEM (mode, addr);
+ MEM_NOTRAP_P (mem) = 1;
+ set_mem_alias_set (mem, get_frame_alias_set ());
+ return mem;
+}
+
+/* Generate a MEM referring to a temporary use of the stack, not part
+ of the fixed stack frame. For example, something which is pushed
+ by a target splitter. */
+rtx
+gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
+{
+ rtx mem = gen_rtx_MEM (mode, addr);
+ MEM_NOTRAP_P (mem) = 1;
+ if (!current_function_calls_alloca)
+ set_mem_alias_set (mem, get_frame_alias_set ());
+ return mem;
+}
+
+/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
+ this construct would be valid, and false otherwise. */
+
+bool
+validate_subreg (enum machine_mode omode, enum machine_mode imode,
+ const_rtx reg, unsigned int offset)
+{
+ unsigned int isize = GET_MODE_SIZE (imode);
+ unsigned int osize = GET_MODE_SIZE (omode);
+
+ /* All subregs must be aligned. */
+ if (offset % osize != 0)
+ return false;
+
+ /* The subreg offset cannot be outside the inner object. */
+ if (offset >= isize)
+ return false;
+
+ /* ??? This should not be here. Temporarily continue to allow word_mode
+ subregs of anything. The most common offender is (subreg:SI (reg:DF)).
+ Generally, backends are doing something sketchy but it'll take time to
+ fix them all. */
+ if (omode == word_mode)
+ ;
+ /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
+ is the culprit here, and not the backends. */
+ else if (osize >= UNITS_PER_WORD && isize >= osize)
+ ;
+ /* Allow component subregs of complex and vector. Though given the below
+ extraction rules, it's not always clear what that means. */
+ else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
+ && GET_MODE_INNER (imode) == omode)
+ ;
+ /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
+ i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
+ represent this. It's questionable if this ought to be represented at
+ all -- why can't this all be hidden in post-reload splitters that make
+ arbitrarily mode changes to the registers themselves. */
+ else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
+ ;
+ /* Subregs involving floating point modes are not allowed to
+ change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
+ (subreg:SI (reg:DF) 0) isn't. */
+ else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
+ {
+ if (isize != osize)
+ return false;
+ }
+
+ /* Paradoxical subregs must have offset zero. */
+ if (osize > isize)
+ return offset == 0;
+
+ /* This is a normal subreg. Verify that the offset is representable. */
+
+ /* For hard registers, we already have most of these rules collected in
+ subreg_offset_representable_p. */
+ if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
+ {
+ unsigned int regno = REGNO (reg);
+
+#ifdef CANNOT_CHANGE_MODE_CLASS
+ if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
+ && GET_MODE_INNER (imode) == omode)
+ ;
+ else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
+ return false;
#endif
+
+ return subreg_offset_representable_p (regno, imode, offset, omode);
+ }
+
+ /* For pseudo registers, we want most of the same checks. Namely:
+ If the register no larger than a word, the subreg must be lowpart.
+ If the register is larger than a word, the subreg must be the lowpart
+ of a subword. A subreg does *not* perform arbitrary bit extraction.
+ Given that we've already checked mode/offset alignment, we only have
+ to check subword subregs here. */
+ if (osize < UNITS_PER_WORD)
+ {
+ enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
+ unsigned int low_off = subreg_lowpart_offset (omode, wmode);
+ if (offset % UNITS_PER_WORD != low_off)
+ return false;
+ }
+ return true;
+}
+
+rtx
+gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
+{
+ gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
return gen_rtx_raw_SUBREG (mode, reg, offset);
}
struct function *f = cfun;
rtx val;
- /* Don't let anything called after initial flow analysis create new
- registers. */
- if (no_new_pseudos)
- abort ();
+ gcc_assert (can_create_pseudo_p ());
if (generating_concat_p
&& (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
return val;
}
-/* Generate a register with same attributes as REG, but offsetted by OFFSET.
+/* Update NEW with the same attributes as REG, but offsetted by OFFSET.
Do the big endian correction if needed. */
-rtx
-gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int offset)
+static void
+update_reg_offset (rtx new, rtx reg, int offset)
{
- rtx new = gen_rtx_REG (mode, regno);
tree decl;
HOST_WIDE_INT var_size;
if ((BYTES_BIG_ENDIAN || WORDS_BIG_ENDIAN)
&& decl != NULL
&& offset > 0
- && GET_MODE_SIZE (GET_MODE (reg)) > GET_MODE_SIZE (mode)
+ && GET_MODE_SIZE (GET_MODE (reg)) > GET_MODE_SIZE (GET_MODE (new))
&& ((var_size = int_size_in_bytes (TREE_TYPE (decl))) > 0
&& var_size < GET_MODE_SIZE (GET_MODE (reg))))
{
REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
REG_OFFSET (reg) + offset);
+}
+
+/* Generate a register with same attributes as REG, but offsetted by
+ OFFSET. */
+
+rtx
+gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
+ int offset)
+{
+ rtx new = gen_rtx_REG (mode, regno);
+
+ update_reg_offset (new, reg, offset);
+ return new;
+}
+
+/* Generate a new pseudo-register with the same attributes as REG, but
+ offsetted by OFFSET. */
+
+rtx
+gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
+{
+ rtx new = gen_reg_rtx (mode);
+
+ update_reg_offset (new, reg, offset);
return new;
}
void
set_decl_rtl (tree t, rtx x)
{
- DECL_CHECK (t)->decl.rtl = x;
+ DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
if (!x)
return;
REG_USERVAR_P (XEXP (reg, 0)) = 1;
REG_USERVAR_P (XEXP (reg, 1)) = 1;
}
- else if (REG_P (reg))
- REG_USERVAR_P (reg) = 1;
else
- abort ();
+ {
+ gcc_assert (REG_P (reg));
+ REG_USERVAR_P (reg) = 1;
+ }
}
/* Identify REG as a probable pointer register and show its alignment
int
max_label_num (void)
{
- if (last_label_num && label_num == base_label_num)
- return last_label_num;
return label_num;
}
first_label_num = CODE_LABEL_NUMBER (x);
}
\f
-/* Return the final regno of X, which is a SUBREG of a hard
- register. */
-int
-subreg_hard_regno (rtx x, int check_mode)
-{
- enum machine_mode mode = GET_MODE (x);
- unsigned int byte_offset, base_regno, final_regno;
- rtx reg = SUBREG_REG (x);
-
- /* This is where we attempt to catch illegal subregs
- created by the compiler. */
- if (GET_CODE (x) != SUBREG
- || !REG_P (reg))
- abort ();
- base_regno = REGNO (reg);
- if (base_regno >= FIRST_PSEUDO_REGISTER)
- abort ();
- if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
- abort ();
-#ifdef ENABLE_CHECKING
- if (!subreg_offset_representable_p (REGNO (reg), GET_MODE (reg),
- SUBREG_BYTE (x), mode))
- abort ();
-#endif
- /* Catch non-congruent offsets too. */
- byte_offset = SUBREG_BYTE (x);
- if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
- abort ();
-
- final_regno = subreg_regno (x);
-
- return final_regno;
-}
-
/* Return a value representing some low-order bits of X, where the number
of low-order bits is given by MODE. Note that no conversion is done
between floating-point and fixed-point values, rather, the bit
/* Unfortunately, this routine doesn't take a parameter for the mode of X,
so we have to make one up. Yuk. */
innermode = GET_MODE (x);
- if (GET_CODE (x) == CONST_INT && msize <= HOST_BITS_PER_WIDE_INT)
+ if (GET_CODE (x) == CONST_INT
+ && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
else if (innermode == VOIDmode)
innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
xsize = GET_MODE_SIZE (innermode);
- if (innermode == VOIDmode || innermode == BLKmode)
- abort ();
+ gcc_assert (innermode != VOIDmode && innermode != BLKmode);
if (innermode == mode)
return x;
return 0;
/* Don't allow generating paradoxical FLOAT_MODE subregs. */
- if (GET_MODE_CLASS (mode) == MODE_FLOAT && msize > xsize)
+ if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
return 0;
offset = subreg_lowpart_offset (mode, innermode);
return 0;
}
\f
-/* Return the constant real or imaginary part (which has mode MODE)
- of a complex value X. The IMAGPART_P argument determines whether
- the real or complex component should be returned. This function
- returns NULL_RTX if the component isn't a constant. */
-
-static rtx
-gen_complex_constant_part (enum machine_mode mode, rtx x, int imagpart_p)
-{
- tree decl, part;
-
- if (MEM_P (x)
- && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
- {
- decl = SYMBOL_REF_DECL (XEXP (x, 0));
- if (decl != NULL_TREE && TREE_CODE (decl) == COMPLEX_CST)
- {
- part = imagpart_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
- if (TREE_CODE (part) == REAL_CST
- || TREE_CODE (part) == INTEGER_CST)
- return expand_expr (part, NULL_RTX, mode, 0);
- }
- }
- return NULL_RTX;
-}
-
-/* Return the real part (which has mode MODE) of a complex value X.
- This always comes at the low address in memory. */
-
-rtx
-gen_realpart (enum machine_mode mode, rtx x)
-{
- rtx part;
-
- /* Handle complex constants. */
- part = gen_complex_constant_part (mode, x, 0);
- if (part != NULL_RTX)
- return part;
-
- if (WORDS_BIG_ENDIAN
- && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
- && REG_P (x)
- && REGNO (x) < FIRST_PSEUDO_REGISTER)
- internal_error
- ("can't access real part of complex value in hard register");
- else if (WORDS_BIG_ENDIAN)
- return gen_highpart (mode, x);
- else
- return gen_lowpart (mode, x);
-}
-
-/* Return the imaginary part (which has mode MODE) of a complex value X.
- This always comes at the high address in memory. */
-
-rtx
-gen_imagpart (enum machine_mode mode, rtx x)
-{
- rtx part;
-
- /* Handle complex constants. */
- part = gen_complex_constant_part (mode, x, 1);
- if (part != NULL_RTX)
- return part;
-
- if (WORDS_BIG_ENDIAN)
- return gen_lowpart (mode, x);
- else if (! WORDS_BIG_ENDIAN
- && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
- && REG_P (x)
- && REGNO (x) < FIRST_PSEUDO_REGISTER)
- internal_error
- ("can't access imaginary part of complex value in hard register");
- else
- return gen_highpart (mode, x);
-}
-\f
rtx
gen_highpart (enum machine_mode mode, rtx x)
{
/* This case loses if X is a subreg. To catch bugs early,
complain if an invalid MODE is used even in other cases. */
- if (msize > UNITS_PER_WORD
- && msize != (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)))
- abort ();
+ gcc_assert (msize <= UNITS_PER_WORD
+ || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
result = simplify_gen_subreg (mode, x, GET_MODE (x),
subreg_highpart_offset (mode, GET_MODE (x)));
-
+ gcc_assert (result);
+
/* simplify_gen_subreg is not guaranteed to return a valid operand for
the target if we have a MEM. gen_highpart must return a valid operand,
emitting code if necessary to do so. */
- if (result != NULL_RTX && MEM_P (result))
- result = validize_mem (result);
-
- if (!result)
- abort ();
+ if (MEM_P (result))
+ {
+ result = validize_mem (result);
+ gcc_assert (result);
+ }
+
return result;
}
{
if (GET_MODE (exp) != VOIDmode)
{
- if (GET_MODE (exp) != innermode)
- abort ();
+ gcc_assert (GET_MODE (exp) == innermode);
return gen_highpart (outermode, exp);
}
return simplify_gen_subreg (outermode, exp, innermode,
unsigned int offset = 0;
int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
- if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
- abort ();
+ gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
if (difference > 0)
{
If X is not a SUBREG, always return 1 (it is its own low part!). */
int
-subreg_lowpart_p (rtx x)
+subreg_lowpart_p (const_rtx x)
{
if (GET_CODE (x) != SUBREG)
return 1;
if (mode == VOIDmode)
mode = GET_MODE (op);
- if (mode == VOIDmode)
- abort ();
+ gcc_assert (mode != VOIDmode);
/* If OP is narrower than a word, fail. */
if (mode != BLKmode
return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
}
-/* Similar to `operand_subword', but never return 0. If we can't extract
- the required subword, put OP into a register and try again. If that fails,
- abort. We always validate the address in this case.
+/* Similar to `operand_subword', but never return 0. If we can't
+ extract the required subword, put OP into a register and try again.
+ The second attempt must succeed. We always validate the address in
+ this case.
MODE is the mode of OP, in case it is CONST_INT. */
}
result = operand_subword (op, offset, 1, mode);
- if (result == 0)
- abort ();
+ gcc_assert (result);
return result;
}
\f
-/* Given a compare instruction, swap the operands.
- A test instruction is changed into a compare of 0 against the operand. */
-
-void
-reverse_comparison (rtx insn)
-{
- rtx body = PATTERN (insn);
- rtx comp;
-
- if (GET_CODE (body) == SET)
- comp = SET_SRC (body);
- else
- comp = SET_SRC (XVECEXP (body, 0, 0));
-
- if (GET_CODE (comp) == COMPARE)
- {
- rtx op0 = XEXP (comp, 0);
- rtx op1 = XEXP (comp, 1);
- XEXP (comp, 0) = op1;
- XEXP (comp, 1) = op0;
- }
- else
- {
- rtx new = gen_rtx_COMPARE (VOIDmode,
- CONST0_RTX (GET_MODE (comp)), comp);
- if (GET_CODE (body) == SET)
- SET_SRC (body) = new;
- else
- SET_SRC (XVECEXP (body, 0, 0)) = new;
- }
-}
-\f
/* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
or (2) a component ref of something variable. Represent the later with
a NULL expression. */
if (inner == TREE_OPERAND (ref, 0))
return ref;
else
- return build (COMPONENT_REF, TREE_TYPE (ref), inner, TREE_OPERAND (ref, 1),
- NULL_TREE);
+ return build3 (COMPONENT_REF, TREE_TYPE (ref), inner,
+ TREE_OPERAND (ref, 1), NULL_TREE);
}
/* Returns 1 if both MEM_EXPR can be considered equal
and 0 otherwise. */
int
-mem_expr_equal_p (tree expr1, tree expr2)
+mem_expr_equal_p (const_tree expr1, const_tree expr2)
{
if (expr1 == expr2)
return 1;
&& mem_expr_equal_p (TREE_OPERAND (expr1, 1), /* field decl */
TREE_OPERAND (expr2, 1));
- if (TREE_CODE (expr1) == INDIRECT_REF)
+ if (INDIRECT_REF_P (expr1))
return mem_expr_equal_p (TREE_OPERAND (expr1, 0),
TREE_OPERAND (expr2, 0));
-
- /* Decls with different pointers can't be equal. */
- if (DECL_P (expr1))
- return 0;
- abort(); /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
+ /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
have been resolved here. */
+ gcc_assert (DECL_P (expr1));
+
+ /* Decls with different pointers can't be equal. */
+ return 0;
}
/* Given REF, a MEM, and T, either the type of X or the expression
set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
HOST_WIDE_INT bitpos)
{
- HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
+ alias_set_type alias = MEM_ALIAS_SET (ref);
tree expr = MEM_EXPR (ref);
rtx offset = MEM_OFFSET (ref);
rtx size = MEM_SIZE (ref);
wrong answer, as it assumes that DECL_RTL already has the right alias
info. Callers should not set DECL_RTL until after the call to
set_mem_attributes. */
- if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
- abort ();
+ gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
/* Get the alias set from the expression or type (perhaps using a
front-end routine) and use it. */
alias = get_alias_set (t);
MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
- MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
- RTX_UNCHANGING_P (ref)
- |= ((lang_hooks.honor_readonly
- && (TYPE_READONLY (type) || (t != type && TREE_READONLY (t))))
- || (! TYPE_P (t) && TREE_CONSTANT (t)));
+ MEM_IN_STRUCT_P (ref)
+ = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
MEM_POINTER (ref) = POINTER_TYPE_P (type);
- MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (t);
/* If we are making an object of this type, or if this is a DECL, we know
that it is a scalar if the type is not an aggregate. */
- if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
+ if ((objectp || DECL_P (t))
+ && ! AGGREGATE_TYPE_P (type)
+ && TREE_CODE (type) != COMPLEX_TYPE)
MEM_SCALAR_P (ref) = 1;
/* We can set the alignment from the type if we are making an object,
this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
- if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
+ if (objectp || TREE_CODE (t) == INDIRECT_REF
+ || TREE_CODE (t) == ALIGN_INDIRECT_REF
+ || TYPE_ALIGN_OK (type))
align = MAX (align, TYPE_ALIGN (type));
+ else
+ if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
+ {
+ if (integer_zerop (TREE_OPERAND (t, 1)))
+ /* We don't know anything about the alignment. */
+ align = BITS_PER_UNIT;
+ else
+ align = tree_low_cst (TREE_OPERAND (t, 1), 1);
+ }
/* If the size is known, we can set that. */
if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
the expression. */
if (! TYPE_P (t))
{
- maybe_set_unchanging (ref, t);
+ tree base;
+
if (TREE_THIS_VOLATILE (t))
MEM_VOLATILE_P (ref) = 1;
|| TREE_CODE (t) == SAVE_EXPR)
t = TREE_OPERAND (t, 0);
- /* If this expression can't be addressed (e.g., it contains a reference
- to a non-addressable field), show we don't change its alias set. */
- if (! can_address_p (t))
+ /* We may look through structure-like accesses for the purposes of
+ examining TREE_THIS_NOTRAP, but not array-like accesses. */
+ base = t;
+ while (TREE_CODE (base) == COMPONENT_REF
+ || TREE_CODE (base) == REALPART_EXPR
+ || TREE_CODE (base) == IMAGPART_EXPR
+ || TREE_CODE (base) == BIT_FIELD_REF)
+ base = TREE_OPERAND (base, 0);
+
+ if (DECL_P (base))
+ {
+ if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
+ MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
+ else
+ MEM_NOTRAP_P (ref) = 1;
+ }
+ else
+ MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
+
+ base = get_base_address (base);
+ if (base && DECL_P (base)
+ && TREE_READONLY (base)
+ && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
+ {
+ tree base_type = TREE_TYPE (base);
+ gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type))
+ || DECL_ARTIFICIAL (base));
+ MEM_READONLY_P (ref) = 1;
+ }
+
+ /* If this expression uses it's parent's alias set, mark it such
+ that we won't change it. */
+ if (component_uses_parent_alias_set (t))
MEM_KEEP_ALIAS_SET_P (ref) = 1;
/* If this is a decl, set the attributes of the MEM from it. */
}
/* If this is a constant, we know the alignment. */
- else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
+ else if (CONSTANT_CLASS_P (t))
{
align = TYPE_ALIGN (type);
#ifdef CONSTANT_ALIGNMENT
index, then convert to sizetype and multiply by the size of
the array element. */
if (! integer_zerop (low_bound))
- index = fold (build (MINUS_EXPR, TREE_TYPE (index),
- index, low_bound));
+ index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
+ index, low_bound);
off_tree = size_binop (PLUS_EXPR,
- size_binop (MULT_EXPR, convert (sizetype,
- index),
+ size_binop (MULT_EXPR,
+ fold_convert (sizetype,
+ index),
unit_size),
off_tree);
t2 = TREE_OPERAND (t2, 0);
the size we got from the type? */
}
else if (flag_argument_noalias > 1
- && TREE_CODE (t2) == INDIRECT_REF
+ && (INDIRECT_REF_P (t2))
&& TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
{
expr = t2;
/* If this is a Fortran indirect argument reference, record the
parameter decl. */
else if (flag_argument_noalias > 1
- && TREE_CODE (t) == INDIRECT_REF
+ && (INDIRECT_REF_P (t))
&& TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
{
expr = t;
size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
}
+ if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
+ {
+ /* Force EXPR and OFFSE to NULL, since we don't know exactly what
+ we're overlapping. */
+ offset = NULL;
+ expr = NULL;
+ }
+
/* Now set the attributes we computed above. */
MEM_ATTRS (ref)
= get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
/* Set the alias set of MEM to SET. */
void
-set_mem_alias_set (rtx mem, HOST_WIDE_INT set)
+set_mem_alias_set (rtx mem, alias_set_type set)
{
#ifdef ENABLE_CHECKING
/* If the new and old alias sets don't conflict, something is wrong. */
- if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
- abort ();
+ gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
#endif
MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
{
rtx new;
- if (!MEM_P (memref))
- abort ();
+ gcc_assert (MEM_P (memref));
if (mode == VOIDmode)
mode = GET_MODE (memref);
if (addr == 0)
if (validate)
{
if (reload_in_progress || reload_completed)
- {
- if (! memory_address_p (mode, addr))
- abort ();
- }
+ gcc_assert (memory_address_p (mode, addr));
else
addr = memory_address (mode, addr);
}
cur_insn_uid++;
}
-
-/* Set the last label number found in the current function.
- This is used when belatedly compiling an inline function. */
-
-void
-set_new_last_label_num (int last)
-{
- base_label_num = label_num;
- last_label_num = last;
-}
-\f
-/* Restore all variables describing the current status from the structure *P.
- This is used after a nested function. */
-
-void
-restore_emit_status (struct function *p ATTRIBUTE_UNUSED)
-{
- last_label_num = 0;
-}
\f
/* Go through all the RTL insn bodies and copy any invalid shared
structure. This routine should only be called once. */
static void
-unshare_all_rtl_1 (tree fndecl, rtx insn)
+unshare_all_rtl_1 (rtx insn)
{
- tree decl;
-
- /* Make sure that virtual parameters are not shared. */
- for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
- SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
-
- /* Make sure that virtual stack slots are not shared. */
- unshare_all_decls (DECL_INITIAL (fndecl));
-
/* Unshare just about everything else. */
unshare_all_rtl_in_chain (insn);
{
reset_used_flags (PATTERN (p));
reset_used_flags (REG_NOTES (p));
- reset_used_flags (LOG_LINKS (p));
}
/* Make sure that virtual stack slots are not shared. */
- reset_used_decls (DECL_INITIAL (cfun->decl));
+ set_used_decls (DECL_INITIAL (cfun->decl));
/* Make sure that virtual parameters are not shared. */
for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
- reset_used_flags (DECL_RTL (decl));
+ set_used_flags (DECL_RTL (decl));
reset_used_flags (stack_slot_list);
- unshare_all_rtl_1 (cfun->decl, insn);
+ unshare_all_rtl_1 (insn);
}
-void
+unsigned int
unshare_all_rtl (void)
{
- unshare_all_rtl_1 (current_function_decl, get_insns ());
+ unshare_all_rtl_1 (get_insns ());
+ return 0;
}
+struct tree_opt_pass pass_unshare_all_rtl =
+{
+ "unshare", /* name */
+ NULL, /* gate */
+ unshare_all_rtl, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ 0, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func | TODO_verify_rtl_sharing, /* todo_flags_finish */
+ 0 /* letter */
+};
+
+
/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
Recursively does the same for subexpressions. */
case REG:
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR:
case SYMBOL_REF:
case LABEL_REF:
break;
case CONST:
- /* CONST can be shared if it contains a SYMBOL_REF. If it contains
- a LABEL_REF, it isn't sharable. */
- if (GET_CODE (XEXP (x, 0)) == PLUS
- && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
- && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
+ if (shared_const_p (orig))
return;
break;
/* This rtx may not be shared. If it has already been seen,
replace it with a copy of itself. */
-
+#ifdef ENABLE_CHECKING
if (RTX_FLAG (x, used))
{
- error ("Invalid rtl sharing found in the insn");
+ error ("invalid rtl sharing found in the insn");
debug_rtx (insn);
- error ("Shared rtx");
+ error ("shared rtx");
debug_rtx (x);
- abort ();
+ internal_error ("internal consistency failure");
}
+#endif
+ gcc_assert (!RTX_FLAG (x, used));
+
RTX_FLAG (x, used) = 1;
/* Now scan the subexpressions recursively. */
for (j = 0; j < len; j++)
{
- /* We allow sharing of ASM_OPERANDS inside single instruction. */
+ /* We allow sharing of ASM_OPERANDS inside single
+ instruction. */
if (j && GET_CODE (XVECEXP (x, i, j)) == SET
- && GET_CODE (SET_SRC (XVECEXP (x, i, j))) == ASM_OPERANDS)
+ && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
+ == ASM_OPERANDS))
verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
else
verify_rtx_sharing (XVECEXP (x, i, j), insn);
{
reset_used_flags (PATTERN (p));
reset_used_flags (REG_NOTES (p));
- reset_used_flags (LOG_LINKS (p));
+ if (GET_CODE (PATTERN (p)) == SEQUENCE)
+ {
+ int i;
+ rtx q, sequence = PATTERN (p);
+
+ for (i = 0; i < XVECLEN (sequence, 0); i++)
+ {
+ q = XVECEXP (sequence, 0, i);
+ gcc_assert (INSN_P (q));
+ reset_used_flags (PATTERN (q));
+ reset_used_flags (REG_NOTES (q));
+ }
+ }
}
for (p = get_insns (); p; p = NEXT_INSN (p))
{
verify_rtx_sharing (PATTERN (p), p);
verify_rtx_sharing (REG_NOTES (p), p);
- verify_rtx_sharing (LOG_LINKS (p), p);
}
}
{
PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
- LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
}
}
-/* Go through all virtual stack slots of a function and copy any
- shared structure. */
-static void
-unshare_all_decls (tree blk)
-{
- tree t;
-
- /* Copy shared decls. */
- for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
- if (DECL_RTL_SET_P (t))
- SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
+/* Go through all virtual stack slots of a function and mark them as
+ shared. We never replace the DECL_RTLs themselves with a copy,
+ but expressions mentioned into a DECL_RTL cannot be shared with
+ expressions in the instruction stream.
- /* Now process sub-blocks. */
- for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
- unshare_all_decls (t);
-}
+ Note that reload may convert pseudo registers into memories in-place.
+ Pseudo registers are always shared, but MEMs never are. Thus if we
+ reset the used flags on MEMs in the instruction stream, we must set
+ them again on MEMs that appear in DECL_RTLs. */
-/* Go through all virtual stack slots of a function and mark them as
- not shared. */
static void
-reset_used_decls (tree blk)
+set_used_decls (tree blk)
{
tree t;
/* Mark decls. */
for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
if (DECL_RTL_SET_P (t))
- reset_used_flags (DECL_RTL (t));
+ set_used_flags (DECL_RTL (t));
/* Now process sub-blocks. */
for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
- reset_used_decls (t);
+ set_used_decls (t);
}
-/* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
- placed in the result directly, rather than being copied. MAY_SHARE is
- either a MEM of an EXPR_LIST of MEMs. */
+/* Mark ORIG as in use, and return a copy of it if it was already in use.
+ Recursively does the same for subexpressions. Uses
+ copy_rtx_if_shared_1 to reduce stack space. */
rtx
-copy_most_rtx (rtx orig, rtx may_share)
+copy_rtx_if_shared (rtx orig)
{
- rtx copy;
- int i, j;
- RTX_CODE code;
+ copy_rtx_if_shared_1 (&orig);
+ return orig;
+}
+
+/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
+ use. Recursively does the same for subexpressions. */
+
+static void
+copy_rtx_if_shared_1 (rtx *orig1)
+{
+ rtx x;
+ int i;
+ enum rtx_code code;
+ rtx *last_ptr;
const char *format_ptr;
+ int copied = 0;
+ int length;
- if (orig == may_share
- || (GET_CODE (may_share) == EXPR_LIST
- && in_expr_list_p (may_share, orig)))
- return orig;
+ /* Repeat is used to turn tail-recursion into iteration. */
+repeat:
+ x = *orig1;
- code = GET_CODE (orig);
-
- switch (code)
- {
- case REG:
- case CONST_INT:
- case CONST_DOUBLE:
- case CONST_VECTOR:
- case SYMBOL_REF:
- case CODE_LABEL:
- case PC:
- case CC0:
- return orig;
- default:
- break;
- }
-
- copy = rtx_alloc (code);
- PUT_MODE (copy, GET_MODE (orig));
- RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
- RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
- RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
- RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
- RTX_FLAG (copy, return_val) = RTX_FLAG (orig, return_val);
-
- format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
-
- for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
- {
- switch (*format_ptr++)
- {
- case 'e':
- XEXP (copy, i) = XEXP (orig, i);
- if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
- XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
- break;
-
- case 'u':
- XEXP (copy, i) = XEXP (orig, i);
- break;
-
- case 'E':
- case 'V':
- XVEC (copy, i) = XVEC (orig, i);
- if (XVEC (orig, i) != NULL)
- {
- XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
- for (j = 0; j < XVECLEN (copy, i); j++)
- XVECEXP (copy, i, j)
- = copy_most_rtx (XVECEXP (orig, i, j), may_share);
- }
- break;
-
- case 'w':
- XWINT (copy, i) = XWINT (orig, i);
- break;
-
- case 'n':
- case 'i':
- XINT (copy, i) = XINT (orig, i);
- break;
-
- case 't':
- XTREE (copy, i) = XTREE (orig, i);
- break;
-
- case 's':
- case 'S':
- XSTR (copy, i) = XSTR (orig, i);
- break;
-
- case '0':
- X0ANY (copy, i) = X0ANY (orig, i);
- break;
-
- default:
- abort ();
- }
- }
- return copy;
-}
-
-/* Mark ORIG as in use, and return a copy of it if it was already in use.
- Recursively does the same for subexpressions. Uses
- copy_rtx_if_shared_1 to reduce stack space. */
-
-rtx
-copy_rtx_if_shared (rtx orig)
-{
- copy_rtx_if_shared_1 (&orig);
- return orig;
-}
-
-/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
- use. Recursively does the same for subexpressions. */
-
-static void
-copy_rtx_if_shared_1 (rtx *orig1)
-{
- rtx x;
- int i;
- enum rtx_code code;
- rtx *last_ptr;
- const char *format_ptr;
- int copied = 0;
- int length;
-
- /* Repeat is used to turn tail-recursion into iteration. */
-repeat:
- x = *orig1;
-
- if (x == 0)
- return;
+ if (x == 0)
+ return;
code = GET_CODE (x);
case REG:
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR:
case SYMBOL_REF:
case LABEL_REF:
break;
case CONST:
- /* CONST can be shared if it contains a SYMBOL_REF. If it contains
- a LABEL_REF, it isn't sharable. */
- if (GET_CODE (XEXP (x, 0)) == PLUS
- && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
- && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
+ if (shared_const_p (x))
return;
break;
if (RTX_FLAG (x, used))
{
- rtx copy;
-
- copy = rtx_alloc (code);
- memcpy (copy, x, RTX_SIZE (code));
- x = copy;
+ x = shallow_copy_rtx (x);
copied = 1;
}
RTX_FLAG (x, used) = 1;
case REG:
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR:
case SYMBOL_REF:
case CODE_LABEL:
case REG:
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR:
case SYMBOL_REF:
case CODE_LABEL:
void
set_first_insn (rtx insn)
{
- if (PREV_INSN (insn) != 0)
- abort ();
+ gcc_assert (!PREV_INSN (insn));
first_insn = insn;
}
void
set_last_insn (rtx insn)
{
- if (NEXT_INSN (insn) != 0)
- abort ();
+ gcc_assert (!NEXT_INSN (insn));
last_insn = insn;
}
{
rtx insn = first_insn;
- while (insn)
+ if (insn)
{
- insn = next_insn (insn);
- if (insn == 0 || !NOTE_P (insn))
- break;
+ if (NOTE_P (insn))
+ for (insn = next_insn (insn);
+ insn && NOTE_P (insn);
+ insn = next_insn (insn))
+ continue;
+ else
+ {
+ if (NONJUMP_INSN_P (insn)
+ && GET_CODE (PATTERN (insn)) == SEQUENCE)
+ insn = XVECEXP (PATTERN (insn), 0, 0);
+ }
}
return insn;
{
rtx insn = last_insn;
- while (insn)
+ if (insn)
{
- insn = previous_insn (insn);
- if (insn == 0 || !NOTE_P (insn))
- break;
+ if (NOTE_P (insn))
+ for (insn = previous_insn (insn);
+ insn && NOTE_P (insn);
+ insn = previous_insn (insn))
+ continue;
+ else
+ {
+ if (NONJUMP_INSN_P (insn)
+ && GET_CODE (PATTERN (insn)) == SEQUENCE)
+ insn = XVECEXP (PATTERN (insn), 0,
+ XVECLEN (PATTERN (insn), 0) - 1);
+ }
}
return insn;
{
return cur_insn_uid;
}
-
-/* Renumber instructions so that no instruction UIDs are wasted. */
-
-void
-renumber_insns (FILE *stream)
-{
- rtx insn;
-
- /* If we're not supposed to renumber instructions, don't. */
- if (!flag_renumber_insns)
- return;
-
- /* If there aren't that many instructions, then it's not really
- worth renumbering them. */
- if (flag_renumber_insns == 1 && get_max_uid () < 25000)
- return;
-
- cur_insn_uid = 1;
-
- for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
- {
- if (stream)
- fprintf (stream, "Renumbering insn %d to %d\n",
- INSN_UID (insn), cur_insn_uid);
- INSN_UID (insn) = cur_insn_uid++;
- }
-}
\f
/* Return the next insn. If it is a SEQUENCE, return the first insn
of the sequence. */
same as next_real_insn. */
int
-active_insn_p (rtx insn)
+active_insn_p (const_rtx insn)
{
return (CALL_P (insn) || JUMP_P (insn)
|| (NONJUMP_INSN_P (insn)
return XEXP (note, 0);
insn = prev_nonnote_insn (insn);
- if (! sets_cc0_p (PATTERN (insn)))
- abort ();
+ gcc_assert (sets_cc0_p (PATTERN (insn)));
return insn;
}
#endif
+#ifdef AUTO_INC_DEC
+/* Find a RTX_AUTOINC class rtx which matches DATA. */
+
+static int
+find_auto_inc (rtx *xp, void *data)
+{
+ rtx x = *xp;
+ rtx reg = data;
+
+ if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
+ return 0;
+
+ switch (GET_CODE (x))
+ {
+ case PRE_DEC:
+ case PRE_INC:
+ case POST_DEC:
+ case POST_INC:
+ case PRE_MODIFY:
+ case POST_MODIFY:
+ if (rtx_equal_p (reg, XEXP (x, 0)))
+ return 1;
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
+ return -1;
+}
+#endif
+
/* Increment the label uses for all labels present in rtx. */
static void
rtx before = PREV_INSN (trial);
rtx after = NEXT_INSN (trial);
int has_barrier = 0;
- rtx tem;
+ rtx tem, note_retval;
rtx note, seq;
int probability;
rtx insn_last, insn;
insn_last = NEXT_INSN (insn_last);
}
+ /* We will be adding the new sequence to the function. The splitters
+ may have introduced invalid RTL sharing, so unshare the sequence now. */
+ unshare_all_rtl_in_chain (seq);
+
/* Mark labels. */
for (insn = insn_last; insn ; insn = PREV_INSN (insn))
{
one jump is created, otherwise the machine description
is responsible for this step using
split_branch_probability variable. */
- if (njumps != 1)
- abort ();
+ gcc_assert (njumps == 1);
REG_NOTES (insn)
= gen_rtx_EXPR_LIST (REG_BR_PROB,
GEN_INT (probability),
switch (REG_NOTE_KIND (note))
{
case REG_EH_REGION:
- insn = insn_last;
- while (insn != NULL_RTX)
+ for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
{
if (CALL_P (insn)
- || (flag_non_call_exceptions
+ || (flag_non_call_exceptions && INSN_P (insn)
&& may_trap_p (PATTERN (insn))))
REG_NOTES (insn)
= gen_rtx_EXPR_LIST (REG_EH_REGION,
XEXP (note, 0),
REG_NOTES (insn));
- insn = PREV_INSN (insn);
}
break;
case REG_NORETURN:
case REG_SETJMP:
- case REG_ALWAYS_RETURN:
- insn = insn_last;
- while (insn != NULL_RTX)
+ for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
{
if (CALL_P (insn))
REG_NOTES (insn)
= gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
XEXP (note, 0),
REG_NOTES (insn));
- insn = PREV_INSN (insn);
}
break;
case REG_NON_LOCAL_GOTO:
- insn = insn_last;
- while (insn != NULL_RTX)
+ for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
{
if (JUMP_P (insn))
REG_NOTES (insn)
= gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
XEXP (note, 0),
REG_NOTES (insn));
- insn = PREV_INSN (insn);
}
break;
+#ifdef AUTO_INC_DEC
+ case REG_INC:
+ for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
+ {
+ rtx reg = XEXP (note, 0);
+ if (!FIND_REG_INC_NOTE (insn, reg)
+ && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
+ REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_INC, reg,
+ REG_NOTES (insn));
+ }
+ break;
+#endif
+
+ case REG_LIBCALL:
+ /* Relink the insns with REG_LIBCALL note and with REG_RETVAL note
+ after split. */
+ REG_NOTES (insn_last)
+ = gen_rtx_INSN_LIST (REG_LIBCALL,
+ XEXP (note, 0),
+ REG_NOTES (insn_last));
+
+ note_retval = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL);
+ XEXP (note_retval, 0) = insn_last;
+ break;
+
default:
break;
}
/* If there are LABELS inside the split insns increment the
usage count so we don't delete the label. */
- if (NONJUMP_INSN_P (trial))
+ if (INSN_P (trial))
{
insn = insn_last;
while (insn != NULL_RTX)
{
+ /* JUMP_P insns have already been "marked" above. */
if (NONJUMP_INSN_P (insn))
mark_label_nuses (PATTERN (insn));
INSN_UID (insn) = cur_insn_uid++;
PATTERN (insn) = pattern;
INSN_CODE (insn) = -1;
- LOG_LINKS (insn) = NULL;
REG_NOTES (insn) = NULL;
- INSN_LOCATOR (insn) = 0;
+ INSN_LOCATOR (insn) = curr_insn_locator ();
BLOCK_FOR_INSN (insn) = NULL;
#ifdef ENABLE_RTL_CHECKING
|| (GET_CODE (insn) == SET
&& SET_DEST (insn) == pc_rtx)))
{
- warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
+ warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
debug_rtx (insn);
}
#endif
/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
-static rtx
+rtx
make_jump_insn_raw (rtx pattern)
{
rtx insn;
PATTERN (insn) = pattern;
INSN_CODE (insn) = -1;
- LOG_LINKS (insn) = NULL;
REG_NOTES (insn) = NULL;
JUMP_LABEL (insn) = NULL;
- INSN_LOCATOR (insn) = 0;
+ INSN_LOCATOR (insn) = curr_insn_locator ();
BLOCK_FOR_INSN (insn) = NULL;
return insn;
PATTERN (insn) = pattern;
INSN_CODE (insn) = -1;
- LOG_LINKS (insn) = NULL;
REG_NOTES (insn) = NULL;
CALL_INSN_FUNCTION_USAGE (insn) = NULL;
- INSN_LOCATOR (insn) = 0;
+ INSN_LOCATOR (insn) = curr_insn_locator ();
BLOCK_FOR_INSN (insn) = NULL;
return insn;
SEQUENCE. */
void
-add_insn_after (rtx insn, rtx after)
+add_insn_after (rtx insn, rtx after, basic_block bb)
{
rtx next = NEXT_INSN (after);
- basic_block bb;
- if (optimize && INSN_DELETED_P (after))
- abort ();
+ gcc_assert (!optimize || !INSN_DELETED_P (after));
NEXT_INSN (insn) = next;
PREV_INSN (insn) = after;
break;
}
- if (stack == 0)
- abort ();
+ gcc_assert (stack);
}
if (!BARRIER_P (after)
{
set_block_for_insn (insn, bb);
if (INSN_P (insn))
- bb->flags |= BB_DIRTY;
+ df_insn_rescan (insn);
/* Should not happen as first in the BB is always
either NOTE or LABEL. */
if (BB_END (bb) == after
/* Avoid clobbering of structure when creating new BB. */
&& !BARRIER_P (insn)
- && (!NOTE_P (insn)
- || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
+ && !NOTE_INSN_BASIC_BLOCK_P (insn))
BB_END (bb) = insn;
}
}
/* Add INSN into the doubly-linked list before insn BEFORE. This and
- the previous should be the only functions called to insert an insn once
- delay slots have been filled since only they know how to update a
- SEQUENCE. */
+ the previous should be the only functions called to insert an insn
+ once delay slots have been filled since only they know how to
+ update a SEQUENCE. If BB is NULL, an attempt is made to infer the
+ bb from before. */
void
-add_insn_before (rtx insn, rtx before)
+add_insn_before (rtx insn, rtx before, basic_block bb)
{
rtx prev = PREV_INSN (before);
- basic_block bb;
- if (optimize && INSN_DELETED_P (before))
- abort ();
+ gcc_assert (!optimize || !INSN_DELETED_P (before));
PREV_INSN (insn) = prev;
NEXT_INSN (insn) = before;
break;
}
- if (stack == 0)
- abort ();
+ gcc_assert (stack);
}
- if (!BARRIER_P (before)
- && !BARRIER_P (insn)
- && (bb = BLOCK_FOR_INSN (before)))
+ if (!bb
+ && !BARRIER_P (before)
+ && !BARRIER_P (insn))
+ bb = BLOCK_FOR_INSN (before);
+
+ if (bb)
{
set_block_for_insn (insn, bb);
if (INSN_P (insn))
- bb->flags |= BB_DIRTY;
- /* Should not happen as first in the BB is always
- either NOTE or LABEl. */
- if (BB_HEAD (bb) == insn
- /* Avoid clobbering of structure when creating new BB. */
- && !BARRIER_P (insn)
- && (!NOTE_P (insn)
- || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
- abort ();
+ df_insn_rescan (insn);
+ /* Should not happen as first in the BB is always either NOTE or
+ LABEL. */
+ gcc_assert (BB_HEAD (bb) != insn
+ /* Avoid clobbering of structure when creating new BB. */
+ || BARRIER_P (insn)
+ || NOTE_INSN_BASIC_BLOCK_P (insn));
}
PREV_INSN (before) = insn;
PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
}
+
+/* Replace insn with an deleted instruction note. */
+
+void set_insn_deleted (rtx insn)
+{
+ df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
+ PUT_CODE (insn, NOTE);
+ NOTE_KIND (insn) = NOTE_INSN_DELETED;
+}
+
+
/* Remove an insn from its doubly-linked list. This function knows how
to handle sequences. */
void
rtx prev = PREV_INSN (insn);
basic_block bb;
+ /* Later in the code, the block will be marked dirty. */
+ df_insn_delete (NULL, INSN_UID (insn));
+
if (prev)
{
NEXT_INSN (prev) = next;
break;
}
- if (stack == 0)
- abort ();
+ gcc_assert (stack);
}
if (next)
break;
}
- if (stack == 0)
- abort ();
+ gcc_assert (stack);
}
if (!BARRIER_P (insn)
&& (bb = BLOCK_FOR_INSN (insn)))
{
if (INSN_P (insn))
- bb->flags |= BB_DIRTY;
+ df_set_bb_dirty (bb);
if (BB_HEAD (bb) == insn)
{
/* Never ever delete the basic block note without deleting whole
basic block. */
- if (NOTE_P (insn))
- abort ();
+ gcc_assert (!NOTE_P (insn));
BB_HEAD (bb) = next;
}
if (BB_END (bb) == insn)
void
add_function_usage_to (rtx call_insn, rtx call_fusage)
{
- if (! call_insn || !CALL_P (call_insn))
- abort ();
+ gcc_assert (call_insn && CALL_P (call_insn));
/* Put the register usage information on the CALL. If there is already
some usage information, put ours at the end. */
&& (bb = BLOCK_FOR_INSN (after)))
{
rtx x;
- bb->flags |= BB_DIRTY;
+ df_set_bb_dirty (bb);
if (!BARRIER_P (from)
&& (bb2 = BLOCK_FOR_INSN (from)))
{
if (BB_END (bb2) == to)
BB_END (bb2) = prev;
- bb2->flags |= BB_DIRTY;
+ df_set_bb_dirty (bb2);
}
if (BB_END (bb) == after)
for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
if (!BARRIER_P (x))
- set_block_for_insn (x, bb);
- }
-}
-
-/* Return the line note insn preceding INSN. */
-
-static rtx
-find_line_note (rtx insn)
-{
- if (no_line_numbers)
- return 0;
-
- for (; insn; insn = PREV_INSN (insn))
- if (NOTE_P (insn)
- && NOTE_LINE_NUMBER (insn) >= 0)
- break;
-
- return insn;
-}
-
-/* Remove unnecessary notes from the instruction stream. */
-
-void
-remove_unnecessary_notes (void)
-{
- rtx block_stack = NULL_RTX;
- rtx eh_stack = NULL_RTX;
- rtx insn;
- rtx next;
- rtx tmp;
-
- /* We must not remove the first instruction in the function because
- the compiler depends on the first instruction being a note. */
- for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
- {
- /* Remember what's next. */
- next = NEXT_INSN (insn);
-
- /* We're only interested in notes. */
- if (!NOTE_P (insn))
- continue;
-
- switch (NOTE_LINE_NUMBER (insn))
- {
- case NOTE_INSN_DELETED:
- case NOTE_INSN_LOOP_END_TOP_COND:
- remove_insn (insn);
- break;
-
- case NOTE_INSN_EH_REGION_BEG:
- eh_stack = alloc_INSN_LIST (insn, eh_stack);
- break;
-
- case NOTE_INSN_EH_REGION_END:
- /* Too many end notes. */
- if (eh_stack == NULL_RTX)
- abort ();
- /* Mismatched nesting. */
- if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
- abort ();
- tmp = eh_stack;
- eh_stack = XEXP (eh_stack, 1);
- free_INSN_LIST_node (tmp);
- break;
-
- case NOTE_INSN_BLOCK_BEG:
- /* By now, all notes indicating lexical blocks should have
- NOTE_BLOCK filled in. */
- if (NOTE_BLOCK (insn) == NULL_TREE)
- abort ();
- block_stack = alloc_INSN_LIST (insn, block_stack);
- break;
-
- case NOTE_INSN_BLOCK_END:
- /* Too many end notes. */
- if (block_stack == NULL_RTX)
- abort ();
- /* Mismatched nesting. */
- if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
- abort ();
- tmp = block_stack;
- block_stack = XEXP (block_stack, 1);
- free_INSN_LIST_node (tmp);
-
- /* Scan back to see if there are any non-note instructions
- between INSN and the beginning of this block. If not,
- then there is no PC range in the generated code that will
- actually be in this block, so there's no point in
- remembering the existence of the block. */
- for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
- {
- /* This block contains a real instruction. Note that we
- don't include labels; if the only thing in the block
- is a label, then there are still no PC values that
- lie within the block. */
- if (INSN_P (tmp))
- break;
-
- /* We're only interested in NOTEs. */
- if (!NOTE_P (tmp))
- continue;
-
- if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
- {
- /* We just verified that this BLOCK matches us with
- the block_stack check above. Never delete the
- BLOCK for the outermost scope of the function; we
- can refer to names from that scope even if the
- block notes are messed up. */
- if (! is_body_block (NOTE_BLOCK (insn))
- && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
- {
- remove_insn (tmp);
- remove_insn (insn);
- }
- break;
- }
- else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
- /* There's a nested block. We need to leave the
- current block in place since otherwise the debugger
- wouldn't be able to show symbols from our block in
- the nested block. */
- break;
- }
- }
+ {
+ set_block_for_insn (x, bb);
+ df_insn_change_bb (x);
+ }
}
-
- /* Too many begin notes. */
- if (block_stack || eh_stack)
- abort ();
}
\f
/* Make X be output before the instruction BEFORE. */
rtx
-emit_insn_before (rtx x, rtx before)
+emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
{
rtx last = before;
rtx insn;
-#ifdef ENABLE_RTL_CHECKING
- if (before == NULL_RTX)
- abort ();
-#endif
+ gcc_assert (before);
if (x == NULL_RTX)
return last;
while (insn)
{
rtx next = NEXT_INSN (insn);
- add_insn_before (insn, before);
+ add_insn_before (insn, before, bb);
last = insn;
insn = next;
}
#ifdef ENABLE_RTL_CHECKING
case SEQUENCE:
- abort ();
+ gcc_unreachable ();
break;
#endif
default:
last = make_insn_raw (x);
- add_insn_before (last, before);
+ add_insn_before (last, before, bb);
break;
}
and output it before the instruction BEFORE. */
rtx
-emit_jump_insn_before (rtx x, rtx before)
+emit_jump_insn_before_noloc (rtx x, rtx before)
{
rtx insn, last = NULL_RTX;
-#ifdef ENABLE_RTL_CHECKING
- if (before == NULL_RTX)
- abort ();
-#endif
+ gcc_assert (before);
switch (GET_CODE (x))
{
while (insn)
{
rtx next = NEXT_INSN (insn);
- add_insn_before (insn, before);
+ add_insn_before (insn, before, NULL);
last = insn;
insn = next;
}
#ifdef ENABLE_RTL_CHECKING
case SEQUENCE:
- abort ();
+ gcc_unreachable ();
break;
#endif
default:
last = make_jump_insn_raw (x);
- add_insn_before (last, before);
+ add_insn_before (last, before, NULL);
break;
}
and output it before the instruction BEFORE. */
rtx
-emit_call_insn_before (rtx x, rtx before)
+emit_call_insn_before_noloc (rtx x, rtx before)
{
rtx last = NULL_RTX, insn;
-#ifdef ENABLE_RTL_CHECKING
- if (before == NULL_RTX)
- abort ();
-#endif
+ gcc_assert (before);
switch (GET_CODE (x))
{
while (insn)
{
rtx next = NEXT_INSN (insn);
- add_insn_before (insn, before);
+ add_insn_before (insn, before, NULL);
last = insn;
insn = next;
}
#ifdef ENABLE_RTL_CHECKING
case SEQUENCE:
- abort ();
+ gcc_unreachable ();
break;
#endif
default:
last = make_call_insn_raw (x);
- add_insn_before (last, before);
+ add_insn_before (last, before, NULL);
break;
}
INSN_UID (insn) = cur_insn_uid++;
- add_insn_before (insn, before);
+ add_insn_before (insn, before, NULL);
return insn;
}
if (INSN_UID (label) == 0)
{
INSN_UID (label) = cur_insn_uid++;
- add_insn_before (label, before);
+ add_insn_before (label, before, NULL);
}
return label;
/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
rtx
-emit_note_before (int subtype, rtx before)
+emit_note_before (enum insn_note subtype, rtx before)
{
rtx note = rtx_alloc (NOTE);
INSN_UID (note) = cur_insn_uid++;
-#ifndef USE_MAPPED_LOCATION
- NOTE_SOURCE_FILE (note) = 0;
-#endif
- NOTE_LINE_NUMBER (note) = subtype;
+ NOTE_KIND (note) = subtype;
BLOCK_FOR_INSN (note) = NULL;
+ memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
- add_insn_before (note, before);
+ add_insn_before (note, before, NULL);
return note;
}
\f
/* Helper for emit_insn_after, handles lists of instructions
efficiently. */
-static rtx emit_insn_after_1 (rtx, rtx);
-
static rtx
-emit_insn_after_1 (rtx first, rtx after)
+emit_insn_after_1 (rtx first, rtx after, basic_block bb)
{
rtx last;
rtx after_after;
- basic_block bb;
+ if (!bb && !BARRIER_P (after))
+ bb = BLOCK_FOR_INSN (after);
- if (!BARRIER_P (after)
- && (bb = BLOCK_FOR_INSN (after)))
+ if (bb)
{
- bb->flags |= BB_DIRTY;
+ df_set_bb_dirty (bb);
for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
if (!BARRIER_P (last))
- set_block_for_insn (last, bb);
+ {
+ set_block_for_insn (last, bb);
+ df_insn_rescan (last);
+ }
if (!BARRIER_P (last))
- set_block_for_insn (last, bb);
+ {
+ set_block_for_insn (last, bb);
+ df_insn_rescan (last);
+ }
if (BB_END (bb) == after)
BB_END (bb) = last;
}
return last;
}
-/* Make X be output after the insn AFTER. */
+/* Make X be output after the insn AFTER and set the BB of insn. If
+ BB is NULL, an attempt is made to infer the BB from AFTER. */
rtx
-emit_insn_after (rtx x, rtx after)
+emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
{
rtx last = after;
-#ifdef ENABLE_RTL_CHECKING
- if (after == NULL_RTX)
- abort ();
-#endif
+ gcc_assert (after);
if (x == NULL_RTX)
return last;
case CODE_LABEL:
case BARRIER:
case NOTE:
- last = emit_insn_after_1 (x, after);
+ last = emit_insn_after_1 (x, after, bb);
break;
#ifdef ENABLE_RTL_CHECKING
case SEQUENCE:
- abort ();
+ gcc_unreachable ();
break;
#endif
default:
last = make_insn_raw (x);
- add_insn_after (last, after);
+ add_insn_after (last, after, bb);
break;
}
return last;
}
-/* Similar to emit_insn_after, except that line notes are to be inserted so
- as to act as if this insn were at FROM. */
-
-void
-emit_insn_after_with_line_notes (rtx x, rtx after, rtx from)
-{
- rtx from_line = find_line_note (from);
- rtx after_line = find_line_note (after);
- rtx insn = emit_insn_after (x, after);
-
- if (from_line)
- emit_note_copy_after (from_line, after);
-
- if (after_line)
- emit_note_copy_after (after_line, insn);
-}
/* Make an insn of code JUMP_INSN with body X
and output it after the insn AFTER. */
rtx
-emit_jump_insn_after (rtx x, rtx after)
+emit_jump_insn_after_noloc (rtx x, rtx after)
{
rtx last;
-#ifdef ENABLE_RTL_CHECKING
- if (after == NULL_RTX)
- abort ();
-#endif
+ gcc_assert (after);
switch (GET_CODE (x))
{
case CODE_LABEL:
case BARRIER:
case NOTE:
- last = emit_insn_after_1 (x, after);
+ last = emit_insn_after_1 (x, after, NULL);
break;
#ifdef ENABLE_RTL_CHECKING
case SEQUENCE:
- abort ();
+ gcc_unreachable ();
break;
#endif
default:
last = make_jump_insn_raw (x);
- add_insn_after (last, after);
+ add_insn_after (last, after, NULL);
break;
}
and output it after the instruction AFTER. */
rtx
-emit_call_insn_after (rtx x, rtx after)
+emit_call_insn_after_noloc (rtx x, rtx after)
{
rtx last;
-#ifdef ENABLE_RTL_CHECKING
- if (after == NULL_RTX)
- abort ();
-#endif
+ gcc_assert (after);
switch (GET_CODE (x))
{
case CODE_LABEL:
case BARRIER:
case NOTE:
- last = emit_insn_after_1 (x, after);
+ last = emit_insn_after_1 (x, after, NULL);
break;
#ifdef ENABLE_RTL_CHECKING
case SEQUENCE:
- abort ();
+ gcc_unreachable ();
break;
#endif
default:
last = make_call_insn_raw (x);
- add_insn_after (last, after);
+ add_insn_after (last, after, NULL);
break;
}
INSN_UID (insn) = cur_insn_uid++;
- add_insn_after (insn, after);
+ add_insn_after (insn, after, NULL);
return insn;
}
if (INSN_UID (label) == 0)
{
INSN_UID (label) = cur_insn_uid++;
- add_insn_after (label, after);
+ add_insn_after (label, after, NULL);
}
return label;
/* Emit a note of subtype SUBTYPE after the insn AFTER. */
rtx
-emit_note_after (int subtype, rtx after)
+emit_note_after (enum insn_note subtype, rtx after)
{
rtx note = rtx_alloc (NOTE);
INSN_UID (note) = cur_insn_uid++;
-#ifndef USE_MAPPED_LOCATION
- NOTE_SOURCE_FILE (note) = 0;
-#endif
- NOTE_LINE_NUMBER (note) = subtype;
- BLOCK_FOR_INSN (note) = NULL;
- add_insn_after (note, after);
- return note;
-}
-
-/* Emit a copy of note ORIG after the insn AFTER. */
-
-rtx
-emit_note_copy_after (rtx orig, rtx after)
-{
- rtx note;
-
- if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
- {
- cur_insn_uid++;
- return 0;
- }
-
- note = rtx_alloc (NOTE);
- INSN_UID (note) = cur_insn_uid++;
- NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
- NOTE_DATA (note) = NOTE_DATA (orig);
+ NOTE_KIND (note) = subtype;
BLOCK_FOR_INSN (note) = NULL;
- add_insn_after (note, after);
+ memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
+ add_insn_after (note, after, NULL);
return note;
}
\f
-/* Like emit_insn_after, but set INSN_LOCATOR according to SCOPE. */
+/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
rtx
emit_insn_after_setloc (rtx pattern, rtx after, int loc)
{
- rtx last = emit_insn_after (pattern, after);
+ rtx last = emit_insn_after_noloc (pattern, after, NULL);
- if (pattern == NULL_RTX)
+ if (pattern == NULL_RTX || !loc)
return last;
after = NEXT_INSN (after);
while (1)
{
- if (active_insn_p (after))
+ if (active_insn_p (after) && !INSN_LOCATOR (after))
INSN_LOCATOR (after) = loc;
if (after == last)
break;
return last;
}
-/* Like emit_jump_insn_after, but set INSN_LOCATOR according to SCOPE. */
+/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
+rtx
+emit_insn_after (rtx pattern, rtx after)
+{
+ if (INSN_P (after))
+ return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
+ else
+ return emit_insn_after_noloc (pattern, after, NULL);
+}
+
+/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
rtx
emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
{
- rtx last = emit_jump_insn_after (pattern, after);
+ rtx last = emit_jump_insn_after_noloc (pattern, after);
- if (pattern == NULL_RTX)
+ if (pattern == NULL_RTX || !loc)
return last;
after = NEXT_INSN (after);
while (1)
{
- if (active_insn_p (after))
+ if (active_insn_p (after) && !INSN_LOCATOR (after))
INSN_LOCATOR (after) = loc;
if (after == last)
break;
return last;
}
-/* Like emit_call_insn_after, but set INSN_LOCATOR according to SCOPE. */
+/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
+rtx
+emit_jump_insn_after (rtx pattern, rtx after)
+{
+ if (INSN_P (after))
+ return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
+ else
+ return emit_jump_insn_after_noloc (pattern, after);
+}
+
+/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
rtx
emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
{
- rtx last = emit_call_insn_after (pattern, after);
+ rtx last = emit_call_insn_after_noloc (pattern, after);
- if (pattern == NULL_RTX)
+ if (pattern == NULL_RTX || !loc)
return last;
after = NEXT_INSN (after);
while (1)
{
- if (active_insn_p (after))
+ if (active_insn_p (after) && !INSN_LOCATOR (after))
INSN_LOCATOR (after) = loc;
if (after == last)
break;
return last;
}
-/* Like emit_insn_before, but set INSN_LOCATOR according to SCOPE. */
+/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
+rtx
+emit_call_insn_after (rtx pattern, rtx after)
+{
+ if (INSN_P (after))
+ return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
+ else
+ return emit_call_insn_after_noloc (pattern, after);
+}
+
+/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
rtx
emit_insn_before_setloc (rtx pattern, rtx before, int loc)
{
rtx first = PREV_INSN (before);
- rtx last = emit_insn_before (pattern, before);
+ rtx last = emit_insn_before_noloc (pattern, before, NULL);
- if (pattern == NULL_RTX)
+ if (pattern == NULL_RTX || !loc)
return last;
- first = NEXT_INSN (first);
+ if (!first)
+ first = get_insns ();
+ else
+ first = NEXT_INSN (first);
while (1)
{
- if (active_insn_p (first))
+ if (active_insn_p (first) && !INSN_LOCATOR (first))
INSN_LOCATOR (first) = loc;
if (first == last)
break;
}
return last;
}
+
+/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
+rtx
+emit_insn_before (rtx pattern, rtx before)
+{
+ if (INSN_P (before))
+ return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
+ else
+ return emit_insn_before_noloc (pattern, before, NULL);
+}
+
+/* like emit_insn_before_noloc, but set insn_locator according to scope. */
+rtx
+emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
+{
+ rtx first = PREV_INSN (before);
+ rtx last = emit_jump_insn_before_noloc (pattern, before);
+
+ if (pattern == NULL_RTX)
+ return last;
+
+ first = NEXT_INSN (first);
+ while (1)
+ {
+ if (active_insn_p (first) && !INSN_LOCATOR (first))
+ INSN_LOCATOR (first) = loc;
+ if (first == last)
+ break;
+ first = NEXT_INSN (first);
+ }
+ return last;
+}
+
+/* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
+rtx
+emit_jump_insn_before (rtx pattern, rtx before)
+{
+ if (INSN_P (before))
+ return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
+ else
+ return emit_jump_insn_before_noloc (pattern, before);
+}
+
+/* like emit_insn_before_noloc, but set insn_locator according to scope. */
+rtx
+emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
+{
+ rtx first = PREV_INSN (before);
+ rtx last = emit_call_insn_before_noloc (pattern, before);
+
+ if (pattern == NULL_RTX)
+ return last;
+
+ first = NEXT_INSN (first);
+ while (1)
+ {
+ if (active_insn_p (first) && !INSN_LOCATOR (first))
+ INSN_LOCATOR (first) = loc;
+ if (first == last)
+ break;
+ first = NEXT_INSN (first);
+ }
+ return last;
+}
+
+/* like emit_call_insn_before_noloc,
+ but set insn_locator according to before. */
+rtx
+emit_call_insn_before (rtx pattern, rtx before)
+{
+ if (INSN_P (before))
+ return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
+ else
+ return emit_call_insn_before_noloc (pattern, before);
+}
\f
/* Take X and emit it at the end of the doubly-linked
INSN list.
#ifdef ENABLE_RTL_CHECKING
case SEQUENCE:
- abort ();
+ gcc_unreachable ();
break;
#endif
#ifdef ENABLE_RTL_CHECKING
case SEQUENCE:
- abort ();
+ gcc_unreachable ();
break;
#endif
#ifdef ENABLE_RTL_CHECKING
case SEQUENCE:
- abort ();
+ gcc_unreachable ();
break;
#endif
return barrier;
}
-/* Make line numbering NOTE insn for LOCATION add it to the end
- of the doubly-linked list, but only if line-numbers are desired for
- debugging info and it doesn't match the previous one. */
-
-rtx
-emit_line_note (location_t location)
-{
- rtx note;
-
- set_file_and_line_for_stmt (location);
-
-#ifdef USE_MAPPED_LOCATION
- if (location == last_location)
- return NULL_RTX;
-#else
- if (location.file && last_location.file
- && !strcmp (location.file, last_location.file)
- && location.line == last_location.line)
- return NULL_RTX;
-#endif
- last_location = location;
-
- if (no_line_numbers)
- {
- cur_insn_uid++;
- return NULL_RTX;
- }
-
-#ifdef USE_MAPPED_LOCATION
- note = emit_note ((int) location);
-#else
- note = emit_note (location.line);
- NOTE_SOURCE_FILE (note) = location.file;
-#endif
-
- return note;
-}
-
/* Emit a copy of note ORIG. */
rtx
{
rtx note;
- if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
- {
- cur_insn_uid++;
- return NULL_RTX;
- }
-
note = rtx_alloc (NOTE);
INSN_UID (note) = cur_insn_uid++;
NOTE_DATA (note) = NOTE_DATA (orig);
- NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
+ NOTE_KIND (note) = NOTE_KIND (orig);
BLOCK_FOR_INSN (note) = NULL;
add_insn (note);
and add it to the end of the doubly-linked list. */
rtx
-emit_note (int note_no)
+emit_note (enum insn_note kind)
{
rtx note;
note = rtx_alloc (NOTE);
INSN_UID (note) = cur_insn_uid++;
- NOTE_LINE_NUMBER (note) = note_no;
+ NOTE_KIND (note) = kind;
memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
BLOCK_FOR_INSN (note) = NULL;
add_insn (note);
set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
{
rtx note = find_reg_note (insn, kind, NULL_RTX);
+ rtx new_note = NULL;
switch (kind)
{
means the insn only has one * useful * set). */
if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
{
- if (note)
- abort ();
+ gcc_assert (!note);
return NULL_RTX;
}
It serves no useful purpose and breaks eliminate_regs. */
if (GET_CODE (datum) == ASM_OPERANDS)
return NULL_RTX;
+
+ if (note)
+ {
+ XEXP (note, 0) = datum;
+ df_notes_rescan (insn);
+ return note;
+ }
break;
default:
+ if (note)
+ {
+ XEXP (note, 0) = datum;
+ return note;
+ }
break;
}
- if (note)
+ new_note = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
+ REG_NOTES (insn) = new_note;
+
+ switch (kind)
{
- XEXP (note, 0) = datum;
- return note;
+ case REG_EQUAL:
+ case REG_EQUIV:
+ df_notes_rescan (insn);
+ break;
+ default:
+ break;
}
- REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
return REG_NOTES (insn);
}
\f
/* Return an indication of which type of insn should have X as a body.
The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
-enum rtx_code
+static enum rtx_code
classify_insn (rtx x)
{
if (LABEL_P (x))
{
enum rtx_code code = classify_insn (x);
- if (code == CODE_LABEL)
- return emit_label (x);
- else if (code == INSN)
- return emit_insn (x);
- else if (code == JUMP_INSN)
+ switch (code)
{
- rtx insn = emit_jump_insn (x);
- if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
- return emit_barrier ();
- return insn;
+ case CODE_LABEL:
+ return emit_label (x);
+ case INSN:
+ return emit_insn (x);
+ case JUMP_INSN:
+ {
+ rtx insn = emit_jump_insn (x);
+ if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
+ return emit_barrier ();
+ return insn;
+ }
+ case CALL_INSN:
+ return emit_call_insn (x);
+ default:
+ gcc_unreachable ();
}
- else if (code == CALL_INSN)
- return emit_call_insn (x);
- else
- abort ();
}
\f
/* Space for free sequence stack entries. */
last_insn = last;
}
-/* Set up the insn chain from a chain stort in FIRST to LAST. */
+/* Like push_to_sequence, but take the last insn as an argument to avoid
+ looping through the list. */
void
-push_to_full_sequence (rtx first, rtx last)
+push_to_sequence2 (rtx first, rtx last)
{
start_sequence ();
+
first_insn = first;
last_insn = last;
- /* We really should have the end of the insn chain here. */
- if (last && NEXT_INSN (last))
- abort ();
}
/* Set up the outer-level insn chain
\f
/* Put the various virtual registers into REGNO_REG_RTX. */
-void
+static void
init_virtual_regs (struct emit_status *es)
{
rtx *ptr = es->x_regno_reg_rtx;
case REG:
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR:
case SYMBOL_REF:
case CODE_LABEL:
break;
case CONST:
- /* CONST can be shared if it contains a SYMBOL_REF. If it contains
- a LABEL_REF, it isn't sharable. */
- if (GET_CODE (XEXP (orig, 0)) == PLUS
- && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
- && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
+ if (shared_const_p (orig))
return orig;
break;
break;
}
- copy = rtx_alloc (code);
-
- /* Copy the various flags, and other information. We assume that
- all fields need copying, and then clear the fields that should
+ /* Copy the various flags, fields, and other information. We assume
+ that all fields need copying, and then clear the fields that should
not be copied. That is the sensible default behavior, and forces
us to explicitly document why we are *not* copying a flag. */
- memcpy (copy, orig, RTX_HDR_SIZE);
+ copy = shallow_copy_rtx (orig);
/* We do not copy the USED flag, which is used as a mark bit during
walks over the RTL. */
format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
- {
- copy->u.fld[i] = orig->u.fld[i];
- switch (*format_ptr++)
- {
- case 'e':
- if (XEXP (orig, i) != NULL)
- XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
- break;
+ switch (*format_ptr++)
+ {
+ case 'e':
+ if (XEXP (orig, i) != NULL)
+ XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
+ break;
- case 'E':
- case 'V':
- if (XVEC (orig, i) == orig_asm_constraints_vector)
- XVEC (copy, i) = copy_asm_constraints_vector;
- else if (XVEC (orig, i) == orig_asm_operands_vector)
- XVEC (copy, i) = copy_asm_operands_vector;
- else if (XVEC (orig, i) != NULL)
- {
- XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
- for (j = 0; j < XVECLEN (copy, i); j++)
- XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
- }
- break;
+ case 'E':
+ case 'V':
+ if (XVEC (orig, i) == orig_asm_constraints_vector)
+ XVEC (copy, i) = copy_asm_constraints_vector;
+ else if (XVEC (orig, i) == orig_asm_operands_vector)
+ XVEC (copy, i) = copy_asm_operands_vector;
+ else if (XVEC (orig, i) != NULL)
+ {
+ XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
+ for (j = 0; j < XVECLEN (copy, i); j++)
+ XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
+ }
+ break;
- case 't':
- case 'w':
- case 'i':
- case 's':
- case 'S':
- case 'u':
- case '0':
- /* These are left unchanged. */
- break;
+ case 't':
+ case 'w':
+ case 'i':
+ case 's':
+ case 'S':
+ case 'u':
+ case '0':
+ /* These are left unchanged. */
+ break;
- default:
- abort ();
- }
- }
+ default:
+ gcc_unreachable ();
+ }
if (code == SCRATCH)
{
i = copy_insn_n_scratches++;
- if (i >= MAX_RECOG_OPERANDS)
- abort ();
+ gcc_assert (i < MAX_RECOG_OPERANDS);
copy_insn_scratch_in[i] = orig;
copy_insn_scratch_out[i] = copy;
}
reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
last_location = UNKNOWN_LOCATION;
first_label_num = label_num;
- last_label_num = 0;
seq_stack = NULL;
/* Init the tables that describe all the pseudo regs. */
#endif
}
-/* Generate the constant 0. */
+/* Generate a vector constant for mode MODE and constant value CONSTANT. */
static rtx
-gen_const_vector_0 (enum machine_mode mode)
+gen_const_vector (enum machine_mode mode, int constant)
{
rtx tem;
rtvec v;
units = GET_MODE_NUNITS (mode);
inner = GET_MODE_INNER (mode);
+ gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
+
v = rtvec_alloc (units);
- /* We need to call this function after we to set CONST0_RTX first. */
- if (!CONST0_RTX (inner))
- abort ();
+ /* We need to call this function after we set the scalar const_tiny_rtx
+ entries. */
+ gcc_assert (const_tiny_rtx[constant][(int) inner]);
for (i = 0; i < units; ++i)
- RTVEC_ELT (v, i) = CONST0_RTX (inner);
+ RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
tem = gen_rtx_raw_CONST_VECTOR (mode, v);
return tem;
}
/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
- all elements are zero. */
+ all elements are zero, and the one vector when all elements are one. */
rtx
gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
{
- rtx inner_zero = CONST0_RTX (GET_MODE_INNER (mode));
+ enum machine_mode inner = GET_MODE_INNER (mode);
+ int nunits = GET_MODE_NUNITS (mode);
+ rtx x;
int i;
- for (i = GET_MODE_NUNITS (mode) - 1; i >= 0; i--)
- if (RTVEC_ELT (v, i) != inner_zero)
- return gen_rtx_raw_CONST_VECTOR (mode, v);
- return CONST0_RTX (mode);
+ /* Check to see if all of the elements have the same value. */
+ x = RTVEC_ELT (v, nunits - 1);
+ for (i = nunits - 2; i >= 0; i--)
+ if (RTVEC_ELT (v, i) != x)
+ break;
+
+ /* If the values are all the same, check to see if we can use one of the
+ standard constant vectors. */
+ if (i == -1)
+ {
+ if (x == CONST0_RTX (inner))
+ return CONST0_RTX (mode);
+ else if (x == CONST1_RTX (inner))
+ return CONST1_RTX (mode);
+ }
+
+ return gen_rtx_raw_CONST_VECTOR (mode, v);
+}
+
+/* Initialise global register information required by all functions. */
+
+void
+init_emit_regs (void)
+{
+ int i;
+
+ /* Reset register attributes */
+ htab_empty (reg_attrs_htab);
+
+ /* We need reg_raw_mode, so initialize the modes now. */
+ init_reg_modes_target ();
+
+ /* Assign register numbers to the globally defined register rtx. */
+ pc_rtx = gen_rtx_PC (VOIDmode);
+ cc0_rtx = gen_rtx_CC0 (VOIDmode);
+ stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
+ frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
+ hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
+ arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
+ virtual_incoming_args_rtx =
+ gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
+ virtual_stack_vars_rtx =
+ gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
+ virtual_stack_dynamic_rtx =
+ gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
+ virtual_outgoing_args_rtx =
+ gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
+ virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
+
+ /* Initialize RTL for commonly used hard registers. These are
+ copied into regno_reg_rtx as we begin to compile each function. */
+ for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
+ static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
+
+#ifdef RETURN_ADDRESS_POINTER_REGNUM
+ return_address_pointer_rtx
+ = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
+#endif
+
+#ifdef STATIC_CHAIN_REGNUM
+ static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
+
+#ifdef STATIC_CHAIN_INCOMING_REGNUM
+ if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
+ static_chain_incoming_rtx
+ = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
+ else
+#endif
+ static_chain_incoming_rtx = static_chain_rtx;
+#endif
+
+#ifdef STATIC_CHAIN
+ static_chain_rtx = STATIC_CHAIN;
+
+#ifdef STATIC_CHAIN_INCOMING
+ static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
+#else
+ static_chain_incoming_rtx = static_chain_rtx;
+#endif
+#endif
+
+ if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
+ pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
+ else
+ pic_offset_table_rtx = NULL_RTX;
}
/* Create some permanent unique rtl objects shared between all functions.
enum machine_mode mode;
enum machine_mode double_mode;
- /* We need reg_raw_mode, so initialize the modes now. */
- init_reg_modes_once ();
-
- /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
- tables. */
+ /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
+ hash tables. */
const_int_htab = htab_create_ggc (37, const_int_htab_hash,
const_int_htab_eq, NULL);
const_double_htab = htab_create_ggc (37, const_double_htab_hash,
const_double_htab_eq, NULL);
+ const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
+ const_fixed_htab_eq, NULL);
+
mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
mem_attrs_htab_eq, NULL);
reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
word_mode = VOIDmode;
double_mode = VOIDmode;
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
+ mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
{
if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
word_mode = mode;
}
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
+ mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
{
if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
- /* Assign register numbers to the globally defined register rtx.
- This must be done at runtime because the register number field
- is in a union and some compilers can't initialize unions. */
-
- pc_rtx = gen_rtx_PC (VOIDmode);
- cc0_rtx = gen_rtx_CC0 (VOIDmode);
- stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
- frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
- if (hard_frame_pointer_rtx == 0)
- hard_frame_pointer_rtx = gen_raw_REG (Pmode,
- HARD_FRAME_POINTER_REGNUM);
- if (arg_pointer_rtx == 0)
- arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
- virtual_incoming_args_rtx =
- gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
- virtual_stack_vars_rtx =
- gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
- virtual_stack_dynamic_rtx =
- gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
- virtual_outgoing_args_rtx =
- gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
- virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
-
- /* Initialize RTL for commonly used hard registers. These are
- copied into regno_reg_rtx as we begin to compile each function. */
- for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
-
#ifdef INIT_EXPANDERS
/* This is to initialize {init|mark|free}_machine_status before the first
call to push_function_context_to. This is needed by the Chill front
/* Initialize mathematical constants for constant folding builtins.
These constants need to be given to at least 160 bits precision. */
- real_from_string (&dconstpi,
- "3.1415926535897932384626433832795028841971693993751058209749445923078");
+ real_from_string (&dconstsqrt2,
+ "1.4142135623730950488016887242096980785696718753769480731766797379907");
real_from_string (&dconste,
"2.7182818284590452353602874713526624977572470936999595749669676277241");
REAL_VALUE_TYPE *r =
(i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ const_tiny_rtx[i][(int) mode] =
+ CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
+
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
+ mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
const_tiny_rtx[i][(int) mode] =
CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
+ mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
const_tiny_rtx[i][(int) mode] = GEN_INT (i);
const_tiny_rtx[i][(int) mode] = GEN_INT (i);
}
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ {
+ rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
+ const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
+ }
+
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ {
+ rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
+ const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
+ }
+
for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
- const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
+ {
+ const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
+ const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
+ }
for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
- const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
+ {
+ const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
+ const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
+ }
- for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
- if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
- const_tiny_rtx[0][i] = const0_rtx;
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ {
+ FCONST0(mode).data.high = 0;
+ FCONST0(mode).data.low = 0;
+ FCONST0(mode).mode = mode;
+ const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
+ FCONST0 (mode), mode);
+ }
- const_tiny_rtx[0][(int) BImode] = const0_rtx;
- if (STORE_FLAG_VALUE == 1)
- const_tiny_rtx[1][(int) BImode] = const1_rtx;
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ {
+ FCONST0(mode).data.high = 0;
+ FCONST0(mode).data.low = 0;
+ FCONST0(mode).mode = mode;
+ const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
+ FCONST0 (mode), mode);
+ }
-#ifdef RETURN_ADDRESS_POINTER_REGNUM
- return_address_pointer_rtx
- = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
-#endif
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ {
+ FCONST0(mode).data.high = 0;
+ FCONST0(mode).data.low = 0;
+ FCONST0(mode).mode = mode;
+ const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
+ FCONST0 (mode), mode);
+
+ /* We store the value 1. */
+ FCONST1(mode).data.high = 0;
+ FCONST1(mode).data.low = 0;
+ FCONST1(mode).mode = mode;
+ lshift_double (1, 0, GET_MODE_FBIT (mode),
+ 2 * HOST_BITS_PER_WIDE_INT,
+ &FCONST1(mode).data.low,
+ &FCONST1(mode).data.high,
+ SIGNED_FIXED_POINT_MODE_P (mode));
+ const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
+ FCONST1 (mode), mode);
+ }
-#ifdef STATIC_CHAIN_REGNUM
- static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ {
+ FCONST0(mode).data.high = 0;
+ FCONST0(mode).data.low = 0;
+ FCONST0(mode).mode = mode;
+ const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
+ FCONST0 (mode), mode);
+
+ /* We store the value 1. */
+ FCONST1(mode).data.high = 0;
+ FCONST1(mode).data.low = 0;
+ FCONST1(mode).mode = mode;
+ lshift_double (1, 0, GET_MODE_FBIT (mode),
+ 2 * HOST_BITS_PER_WIDE_INT,
+ &FCONST1(mode).data.low,
+ &FCONST1(mode).data.high,
+ SIGNED_FIXED_POINT_MODE_P (mode));
+ const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
+ FCONST1 (mode), mode);
+ }
-#ifdef STATIC_CHAIN_INCOMING_REGNUM
- if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
- static_chain_incoming_rtx
- = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
- else
-#endif
- static_chain_incoming_rtx = static_chain_rtx;
-#endif
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ {
+ const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
+ }
-#ifdef STATIC_CHAIN
- static_chain_rtx = STATIC_CHAIN;
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ {
+ const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
+ }
-#ifdef STATIC_CHAIN_INCOMING
- static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
-#else
- static_chain_incoming_rtx = static_chain_rtx;
-#endif
-#endif
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ {
+ const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
+ const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
+ }
- if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
- pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ {
+ const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
+ const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
+ }
+
+ for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
+ if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
+ const_tiny_rtx[0][i] = const0_rtx;
+
+ const_tiny_rtx[0][(int) BImode] = const0_rtx;
+ if (STORE_FLAG_VALUE == 1)
+ const_tiny_rtx[1][(int) BImode] = const1_rtx;
}
\f
/* Produce exact duplicate of insn INSN after AFTER.
break;
default:
- abort ();
+ gcc_unreachable ();
}
/* Update LABEL_NUSES. */
INSN_LOCATOR (new) = INSN_LOCATOR (insn);
- /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
- make them. */
+ /* If the old insn is frame related, then so is the new one. This is
+ primarily needed for IA-64 unwind info which marks epilogue insns,
+ which may be duplicated by the basic block reordering code. */
+ RTX_FRAME_RELATED_P (new) = RTX_FRAME_RELATED_P (insn);
+
+ /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
+ will make them. REG_LABEL_TARGETs are created there too, but are
+ supposed to be sticky, so we copy them. */
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
- if (REG_NOTE_KIND (link) != REG_LABEL)
+ if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
{
if (GET_CODE (link) == EXPR_LIST)
REG_NOTES (new)
- = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
- XEXP (link, 0),
- REG_NOTES (new)));
+ = gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
+ copy_insn_1 (XEXP (link, 0)), REG_NOTES (new));
else
REG_NOTES (new)
- = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
- XEXP (link, 0),
- REG_NOTES (new)));
+ = gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
+ XEXP (link, 0), REG_NOTES (new));
}
/* Fix the libcall sequences. */