X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Femit-rtl.c;h=f9b13470fe5fc791818424058660c705b88891eb;hb=a634298771087aa45566135c02aef7751a34dd2a;hp=e9d3213a49132f7459c3f960afcf62b60d85e276;hpb=0a81f5a04d914aa6a7803a9d37486d85bdfd3c03;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/emit-rtl.c b/gcc/emit-rtl.c index e9d3213a491..f9b13470fe5 100644 --- a/gcc/emit-rtl.c +++ b/gcc/emit-rtl.c @@ -1,12 +1,13 @@ /* Emit RTL for the GCC expander. Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, - 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc. + 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 + Free Software Foundation, Inc. This file is part of GCC. GCC is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free -Software Foundation; either version 2, or (at your option) any later +Software Foundation; either version 3, or (at your option) any later version. GCC is distributed in the hope that it will be useful, but WITHOUT ANY @@ -15,9 +16,8 @@ FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License -along with GCC; see the file COPYING. If not, write to the Free -Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA -02110-1301, USA. */ +along with GCC; see the file COPYING3. If not see +. */ /* Middle-to-low level generation of rtx code and insns. @@ -50,11 +50,14 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA #include "insn-config.h" #include "recog.h" #include "real.h" +#include "fixed-value.h" #include "bitmap.h" #include "basic-block.h" #include "ggc.h" #include "debug.h" #include "langhooks.h" +#include "tree-pass.h" +#include "df.h" /* Commonly used modes. */ @@ -103,9 +106,13 @@ REAL_VALUE_TYPE dconstm1; REAL_VALUE_TYPE dconstm2; REAL_VALUE_TYPE dconsthalf; REAL_VALUE_TYPE dconstthird; -REAL_VALUE_TYPE dconstpi; +REAL_VALUE_TYPE dconstsqrt2; REAL_VALUE_TYPE dconste; +/* Record fixed-point constant 0 and 1. */ +FIXED_VALUE_TYPE fconst0[MAX_FCONST0]; +FIXED_VALUE_TYPE fconst1[MAX_FCONST1]; + /* All references to the following fixed hard registers go through these unique rtl objects. On machines where the frame-pointer and arg-pointer are the same register, they use the same unique object. @@ -157,27 +164,31 @@ static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs))) static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def))) htab_t const_double_htab; +/* A hash table storing all CONST_FIXEDs. */ +static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def))) + htab_t const_fixed_htab; + #define first_insn (cfun->emit->x_first_insn) #define last_insn (cfun->emit->x_last_insn) #define cur_insn_uid (cfun->emit->x_cur_insn_uid) #define last_location (cfun->emit->x_last_location) #define first_label_num (cfun->emit->x_first_label_num) -static rtx make_jump_insn_raw (rtx); static rtx make_call_insn_raw (rtx); -static rtx find_line_note (rtx); static rtx change_address_1 (rtx, enum machine_mode, rtx, int); -static void unshare_all_decls (tree); -static void reset_used_decls (tree); +static void set_used_decls (tree); static void mark_label_nuses (rtx); static hashval_t const_int_htab_hash (const void *); static int const_int_htab_eq (const void *, const void *); static hashval_t const_double_htab_hash (const void *); static int const_double_htab_eq (const void *, const void *); static rtx lookup_const_double (rtx); +static hashval_t const_fixed_htab_hash (const void *); +static int const_fixed_htab_eq (const void *, const void *); +static rtx lookup_const_fixed (rtx); static hashval_t mem_attrs_htab_hash (const void *); static int mem_attrs_htab_eq (const void *, const void *); -static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int, +static mem_attrs *get_mem_attrs (alias_set_type, tree, rtx, rtx, unsigned int, enum machine_mode); static hashval_t reg_attrs_htab_hash (const void *); static int reg_attrs_htab_eq (const void *, const void *); @@ -195,7 +206,7 @@ int split_branch_probability = -1; static hashval_t const_int_htab_hash (const void *x) { - return (hashval_t) INTVAL ((rtx) x); + return (hashval_t) INTVAL ((const_rtx) x); } /* Returns nonzero if the value represented by X (which is really a @@ -205,14 +216,14 @@ const_int_htab_hash (const void *x) static int const_int_htab_eq (const void *x, const void *y) { - return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y)); + return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y)); } /* Returns a hash code for X (which is really a CONST_DOUBLE). */ static hashval_t const_double_htab_hash (const void *x) { - rtx value = (rtx) x; + const_rtx const value = (const_rtx) x; hashval_t h; if (GET_MODE (value) == VOIDmode) @@ -231,7 +242,7 @@ const_double_htab_hash (const void *x) static int const_double_htab_eq (const void *x, const void *y) { - rtx a = (rtx)x, b = (rtx)y; + const_rtx const a = (const_rtx)x, b = (const_rtx)y; if (GET_MODE (a) != GET_MODE (b)) return 0; @@ -243,17 +254,44 @@ const_double_htab_eq (const void *x, const void *y) CONST_DOUBLE_REAL_VALUE (b)); } +/* Returns a hash code for X (which is really a CONST_FIXED). */ + +static hashval_t +const_fixed_htab_hash (const void *x) +{ + const_rtx const value = (const_rtx) x; + hashval_t h; + + h = fixed_hash (CONST_FIXED_VALUE (value)); + /* MODE is used in the comparison, so it should be in the hash. */ + h ^= GET_MODE (value); + return h; +} + +/* Returns nonzero if the value represented by X (really a ...) + is the same as that represented by Y (really a ...). */ + +static int +const_fixed_htab_eq (const void *x, const void *y) +{ + const_rtx const a = (const_rtx) x, b = (const_rtx) y; + + if (GET_MODE (a) != GET_MODE (b)) + return 0; + return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b)); +} + /* Returns a hash code for X (which is a really a mem_attrs *). */ static hashval_t mem_attrs_htab_hash (const void *x) { - mem_attrs *p = (mem_attrs *) x; + const mem_attrs *const p = (const mem_attrs *) x; return (p->alias ^ (p->align * 1000) ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000) ^ ((p->size ? INTVAL (p->size) : 0) * 2500000) - ^ (size_t) p->expr); + ^ (size_t) iterative_hash_expr (p->expr, 0)); } /* Returns nonzero if the value represented by X (which is really a @@ -263,11 +301,14 @@ mem_attrs_htab_hash (const void *x) static int mem_attrs_htab_eq (const void *x, const void *y) { - mem_attrs *p = (mem_attrs *) x; - mem_attrs *q = (mem_attrs *) y; + const mem_attrs *const p = (const mem_attrs *) x; + const mem_attrs *const q = (const mem_attrs *) y; - return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset - && p->size == q->size && p->align == q->align); + return (p->alias == q->alias && p->offset == q->offset + && p->size == q->size && p->align == q->align + && (p->expr == q->expr + || (p->expr != NULL_TREE && q->expr != NULL_TREE + && operand_equal_p (p->expr, q->expr, 0)))); } /* Allocate a new mem_attrs structure and insert it into the hash table if @@ -275,7 +316,7 @@ mem_attrs_htab_eq (const void *x, const void *y) MEM of mode MODE. */ static mem_attrs * -get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size, +get_mem_attrs (alias_set_type alias, tree expr, rtx offset, rtx size, unsigned int align, enum machine_mode mode) { mem_attrs attrs; @@ -312,7 +353,7 @@ get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size, static hashval_t reg_attrs_htab_hash (const void *x) { - reg_attrs *p = (reg_attrs *) x; + const reg_attrs *const p = (const reg_attrs *) x; return ((p->offset * 1000) ^ (long) p->decl); } @@ -324,8 +365,8 @@ reg_attrs_htab_hash (const void *x) static int reg_attrs_htab_eq (const void *x, const void *y) { - reg_attrs *p = (reg_attrs *) x; - reg_attrs *q = (reg_attrs *) y; + const reg_attrs *const p = (const reg_attrs *) x; + const reg_attrs *const q = (const reg_attrs *) y; return (p->decl == q->decl && p->offset == q->offset); } @@ -356,6 +397,21 @@ get_reg_attrs (tree decl, int offset) return *slot; } + +#if !HAVE_blockage +/* Generate an empty ASM_INPUT, which is used to block attempts to schedule + across this insn. */ + +rtx +gen_blockage (void) +{ + rtx x = gen_rtx_ASM_INPUT (VOIDmode, ""); + MEM_VOLATILE_P (x) = true; + return x; +} +#endif + + /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and don't attempt to share with the various global pieces of rtl (such as frame_pointer_rtx). */ @@ -425,11 +481,39 @@ const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode) rtx real = rtx_alloc (CONST_DOUBLE); PUT_MODE (real, mode); - memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE)); + real->u.rv = value; return lookup_const_double (real); } +/* Determine whether FIXED, a CONST_FIXED, already exists in the + hash table. If so, return its counterpart; otherwise add it + to the hash table and return it. */ + +static rtx +lookup_const_fixed (rtx fixed) +{ + void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT); + if (*slot == 0) + *slot = fixed; + + return (rtx) *slot; +} + +/* Return a CONST_FIXED rtx for a fixed-point value specified by + VALUE in mode MODE. */ + +rtx +const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode) +{ + rtx fixed = rtx_alloc (CONST_FIXED); + PUT_MODE (fixed, mode); + + fixed->u.fv = value; + + return lookup_const_fixed (fixed); +} + /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair of ints: I0 is the low-order word and I1 is the high-order word. Do not use this routine for non-integer modes; convert to @@ -441,64 +525,28 @@ immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode) rtx value; unsigned int i; + /* There are the following cases (note that there are no modes with + HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT): + + 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use + gen_int_mode. + 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of + the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only + from copies of the sign bit, and sign of i0 and i1 are the same), then + we return a CONST_INT for i0. + 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */ if (mode != VOIDmode) { - int width; - gcc_assert (GET_MODE_CLASS (mode) == MODE_INT || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT /* We can get a 0 for an error mark. */ || GET_MODE_CLASS (mode) == MODE_VECTOR_INT || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT); - /* We clear out all bits that don't belong in MODE, unless they and - our sign bit are all one. So we get either a reasonable negative - value or a reasonable unsigned value for this mode. */ - width = GET_MODE_BITSIZE (mode); - if (width < HOST_BITS_PER_WIDE_INT - && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1))) - != ((HOST_WIDE_INT) (-1) << (width - 1)))) - i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0; - else if (width == HOST_BITS_PER_WIDE_INT - && ! (i1 == ~0 && i0 < 0)) - i1 = 0; - else - /* We should be able to represent this value as a constant. */ - gcc_assert (width <= 2 * HOST_BITS_PER_WIDE_INT); - - /* If this would be an entire word for the target, but is not for - the host, then sign-extend on the host so that the number will - look the same way on the host that it would on the target. - - For example, when building a 64 bit alpha hosted 32 bit sparc - targeted compiler, then we want the 32 bit unsigned value -1 to be - represented as a 64 bit value -1, and not as 0x00000000ffffffff. - The latter confuses the sparc backend. */ - - if (width < HOST_BITS_PER_WIDE_INT - && (i0 & ((HOST_WIDE_INT) 1 << (width - 1)))) - i0 |= ((HOST_WIDE_INT) (-1) << width); - - /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a - CONST_INT. - - ??? Strictly speaking, this is wrong if we create a CONST_INT for - a large unsigned constant with the size of MODE being - HOST_BITS_PER_WIDE_INT and later try to interpret that constant - in a wider mode. In that case we will mis-interpret it as a - negative number. - - Unfortunately, the only alternative is to make a CONST_DOUBLE for - any constant in any mode if it is an unsigned constant larger - than the maximum signed integer in an int on the host. However, - doing this will break everyone that always expects to see a - CONST_INT for SImode and smaller. - - We have always been making CONST_INTs in this case, so nothing - new is being broken. */ + if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) + return gen_int_mode (i0, mode); - if (width <= HOST_BITS_PER_WIDE_INT) - i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0; + gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT); } /* If this integer fits in one word, return a CONST_INT. */ @@ -606,12 +654,37 @@ gen_const_mem (enum machine_mode mode, rtx addr) return mem; } +/* Generate a MEM referring to fixed portions of the frame, e.g., register + save areas. */ + +rtx +gen_frame_mem (enum machine_mode mode, rtx addr) +{ + rtx mem = gen_rtx_MEM (mode, addr); + MEM_NOTRAP_P (mem) = 1; + set_mem_alias_set (mem, get_frame_alias_set ()); + return mem; +} + +/* Generate a MEM referring to a temporary use of the stack, not part + of the fixed stack frame. For example, something which is pushed + by a target splitter. */ +rtx +gen_tmp_stack_mem (enum machine_mode mode, rtx addr) +{ + rtx mem = gen_rtx_MEM (mode, addr); + MEM_NOTRAP_P (mem) = 1; + if (!current_function_calls_alloca) + set_mem_alias_set (mem, get_frame_alias_set ()); + return mem; +} + /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if this construct would be valid, and false otherwise. */ bool validate_subreg (enum machine_mode omode, enum machine_mode imode, - rtx reg, unsigned int offset) + const_rtx reg, unsigned int offset) { unsigned int isize = GET_MODE_SIZE (imode); unsigned int osize = GET_MODE_SIZE (omode); @@ -773,9 +846,7 @@ gen_reg_rtx (enum machine_mode mode) struct function *f = cfun; rtx val; - /* Don't let anything called after initial flow analysis create new - registers. */ - gcc_assert (!no_new_pseudos); + gcc_assert (can_create_pseudo_p ()); if (generating_concat_p && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT @@ -820,13 +891,12 @@ gen_reg_rtx (enum machine_mode mode) return val; } -/* Generate a register with same attributes as REG, but offsetted by OFFSET. +/* Update NEW with the same attributes as REG, but offsetted by OFFSET. Do the big endian correction if needed. */ -rtx -gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int offset) +static void +update_reg_offset (rtx new, rtx reg, int offset) { - rtx new = gen_rtx_REG (mode, regno); tree decl; HOST_WIDE_INT var_size; @@ -868,7 +938,7 @@ gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int off if ((BYTES_BIG_ENDIAN || WORDS_BIG_ENDIAN) && decl != NULL && offset > 0 - && GET_MODE_SIZE (GET_MODE (reg)) > GET_MODE_SIZE (mode) + && GET_MODE_SIZE (GET_MODE (reg)) > GET_MODE_SIZE (GET_MODE (new)) && ((var_size = int_size_in_bytes (TREE_TYPE (decl))) > 0 && var_size < GET_MODE_SIZE (GET_MODE (reg)))) { @@ -912,10 +982,34 @@ gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int off REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg), REG_OFFSET (reg) + offset); +} + +/* Generate a register with same attributes as REG, but offsetted by + OFFSET. */ + +rtx +gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, + int offset) +{ + rtx new = gen_rtx_REG (mode, regno); + + update_reg_offset (new, reg, offset); + return new; +} + +/* Generate a new pseudo-register with the same attributes as REG, but + offsetted by OFFSET. */ + +rtx +gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset) +{ + rtx new = gen_reg_rtx (mode); + + update_reg_offset (new, reg, offset); return new; } -/* Set the decl for MEM to DECL. */ +/* Set REG to the decl that MEM refers to. */ void set_reg_attrs_from_mem (rtx reg, rtx mem) @@ -953,7 +1047,7 @@ set_reg_attrs_for_parm (rtx parm_rtx, rtx mem) void set_decl_rtl (tree t, rtx x) { - DECL_CHECK (t)->decl.rtl = x; + DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x; if (!x) return; @@ -1117,7 +1211,8 @@ gen_lowpart_common (enum machine_mode mode, rtx x) /* Unfortunately, this routine doesn't take a parameter for the mode of X, so we have to make one up. Yuk. */ innermode = GET_MODE (x); - if (GET_CODE (x) == CONST_INT && msize <= HOST_BITS_PER_WIDE_INT) + if (GET_CODE (x) == CONST_INT + && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT) innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0); else if (innermode == VOIDmode) innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0); @@ -1135,7 +1230,7 @@ gen_lowpart_common (enum machine_mode mode, rtx x) return 0; /* Don't allow generating paradoxical FLOAT_MODE subregs. */ - if (GET_MODE_CLASS (mode) == MODE_FLOAT && msize > xsize) + if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize) return 0; offset = subreg_lowpart_offset (mode, innermode); @@ -1255,7 +1350,7 @@ subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode If X is not a SUBREG, always return 1 (it is its own low part!). */ int -subreg_lowpart_p (rtx x) +subreg_lowpart_p (const_rtx x) { if (GET_CODE (x) != SUBREG) return 1; @@ -1397,7 +1492,7 @@ component_ref_for_mem_expr (tree ref) and 0 otherwise. */ int -mem_expr_equal_p (tree expr1, tree expr2) +mem_expr_equal_p (const_tree expr1, const_tree expr2) { if (expr1 == expr2) return 1; @@ -1436,7 +1531,7 @@ void set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, HOST_WIDE_INT bitpos) { - HOST_WIDE_INT alias = MEM_ALIAS_SET (ref); + alias_set_type alias = MEM_ALIAS_SET (ref); tree expr = MEM_EXPR (ref); rtx offset = MEM_OFFSET (ref); rtx size = MEM_SIZE (ref); @@ -1465,13 +1560,15 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, alias = get_alias_set (t); MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type); - MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type); + MEM_IN_STRUCT_P (ref) + = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE; MEM_POINTER (ref) = POINTER_TYPE_P (type); - MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (t); /* If we are making an object of this type, or if this is a DECL, we know that it is a scalar if the type is not an aggregate. */ - if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type)) + if ((objectp || DECL_P (t)) + && ! AGGREGATE_TYPE_P (type) + && TREE_CODE (type) != COMPLEX_TYPE) MEM_SCALAR_P (ref) = 1; /* We can set the alignment from the type if we are making an object, @@ -1498,16 +1595,7 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, the expression. */ if (! TYPE_P (t)) { - tree base = get_base_address (t); - if (base && DECL_P (base) - && TREE_READONLY (base) - && (TREE_STATIC (base) || DECL_EXTERNAL (base))) - { - tree base_type = TREE_TYPE (base); - gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type)) - || DECL_ARTIFICIAL (base)); - MEM_READONLY_P (ref) = 1; - } + tree base; if (TREE_THIS_VOLATILE (t)) MEM_VOLATILE_P (ref) = 1; @@ -1520,6 +1608,36 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, || TREE_CODE (t) == SAVE_EXPR) t = TREE_OPERAND (t, 0); + /* We may look through structure-like accesses for the purposes of + examining TREE_THIS_NOTRAP, but not array-like accesses. */ + base = t; + while (TREE_CODE (base) == COMPONENT_REF + || TREE_CODE (base) == REALPART_EXPR + || TREE_CODE (base) == IMAGPART_EXPR + || TREE_CODE (base) == BIT_FIELD_REF) + base = TREE_OPERAND (base, 0); + + if (DECL_P (base)) + { + if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS)) + MEM_NOTRAP_P (ref) = !DECL_WEAK (base); + else + MEM_NOTRAP_P (ref) = 1; + } + else + MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base); + + base = get_base_address (base); + if (base && DECL_P (base) + && TREE_READONLY (base) + && (TREE_STATIC (base) || DECL_EXTERNAL (base))) + { + tree base_type = TREE_TYPE (base); + gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type)) + || DECL_ARTIFICIAL (base)); + MEM_READONLY_P (ref) = 1; + } + /* If this expression uses it's parent's alias set, mark it such that we won't change it. */ if (component_uses_parent_alias_set (t)) @@ -1583,8 +1701,9 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, index, low_bound); off_tree = size_binop (PLUS_EXPR, - size_binop (MULT_EXPR, convert (sizetype, - index), + size_binop (MULT_EXPR, + fold_convert (sizetype, + index), unit_size), off_tree); t2 = TREE_OPERAND (t2, 0); @@ -1677,7 +1796,7 @@ set_mem_attributes (rtx ref, tree t, int objectp) set_mem_attributes_minus_bitpos (ref, t, objectp, 0); } -/* Set the decl for MEM to DECL. */ +/* Set MEM to the decl that REG refers to. */ void set_mem_attrs_from_reg (rtx mem, rtx reg) @@ -1691,7 +1810,7 @@ set_mem_attrs_from_reg (rtx mem, rtx reg) /* Set the alias set of MEM to SET. */ void -set_mem_alias_set (rtx mem, HOST_WIDE_INT set) +set_mem_alias_set (rtx mem, alias_set_type set) { #ifdef ENABLE_CHECKING /* If the new and old alias sets don't conflict, something is wrong. */ @@ -2075,17 +2194,8 @@ set_new_first_and_last_insn (rtx first, rtx last) structure. This routine should only be called once. */ static void -unshare_all_rtl_1 (tree fndecl, rtx insn) +unshare_all_rtl_1 (rtx insn) { - tree decl; - - /* Make sure that virtual parameters are not shared. */ - for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl)) - SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl))); - - /* Make sure that virtual stack slots are not shared. */ - unshare_all_decls (DECL_INITIAL (fndecl)); - /* Unshare just about everything else. */ unshare_all_rtl_in_chain (insn); @@ -2114,27 +2224,45 @@ unshare_all_rtl_again (rtx insn) { reset_used_flags (PATTERN (p)); reset_used_flags (REG_NOTES (p)); - reset_used_flags (LOG_LINKS (p)); } /* Make sure that virtual stack slots are not shared. */ - reset_used_decls (DECL_INITIAL (cfun->decl)); + set_used_decls (DECL_INITIAL (cfun->decl)); /* Make sure that virtual parameters are not shared. */ for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl)) - reset_used_flags (DECL_RTL (decl)); + set_used_flags (DECL_RTL (decl)); reset_used_flags (stack_slot_list); - unshare_all_rtl_1 (cfun->decl, insn); + unshare_all_rtl_1 (insn); } -void +unsigned int unshare_all_rtl (void) { - unshare_all_rtl_1 (current_function_decl, get_insns ()); + unshare_all_rtl_1 (get_insns ()); + return 0; } +struct tree_opt_pass pass_unshare_all_rtl = +{ + "unshare", /* name */ + NULL, /* gate */ + unshare_all_rtl, /* execute */ + NULL, /* sub */ + NULL, /* next */ + 0, /* static_pass_number */ + 0, /* tv_id */ + 0, /* properties_required */ + 0, /* properties_provided */ + 0, /* properties_destroyed */ + 0, /* todo_flags_start */ + TODO_dump_func | TODO_verify_rtl_sharing, /* todo_flags_finish */ + 0 /* letter */ +}; + + /* Check that ORIG is not marked when it should not be and mark ORIG as in use, Recursively does the same for subexpressions. */ @@ -2158,6 +2286,7 @@ verify_rtx_sharing (rtx orig, rtx insn) case REG: case CONST_INT: case CONST_DOUBLE: + case CONST_FIXED: case CONST_VECTOR: case SYMBOL_REF: case LABEL_REF: @@ -2173,11 +2302,7 @@ verify_rtx_sharing (rtx orig, rtx insn) break; case CONST: - /* CONST can be shared if it contains a SYMBOL_REF. If it contains - a LABEL_REF, it isn't sharable. */ - if (GET_CODE (XEXP (x, 0)) == PLUS - && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF - && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT) + if (shared_const_p (orig)) return; break; @@ -2258,7 +2383,19 @@ verify_rtl_sharing (void) { reset_used_flags (PATTERN (p)); reset_used_flags (REG_NOTES (p)); - reset_used_flags (LOG_LINKS (p)); + if (GET_CODE (PATTERN (p)) == SEQUENCE) + { + int i; + rtx q, sequence = PATTERN (p); + + for (i = 0; i < XVECLEN (sequence, 0); i++) + { + q = XVECEXP (sequence, 0, i); + gcc_assert (INSN_P (q)); + reset_used_flags (PATTERN (q)); + reset_used_flags (REG_NOTES (q)); + } + } } for (p = get_insns (); p; p = NEXT_INSN (p)) @@ -2266,7 +2403,6 @@ verify_rtl_sharing (void) { verify_rtx_sharing (PATTERN (p), p); verify_rtx_sharing (REG_NOTES (p), p); - verify_rtx_sharing (LOG_LINKS (p), p); } } @@ -2281,42 +2417,32 @@ unshare_all_rtl_in_chain (rtx insn) { PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn)); REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn)); - LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn)); } } -/* Go through all virtual stack slots of a function and copy any - shared structure. */ -static void -unshare_all_decls (tree blk) -{ - tree t; - - /* Copy shared decls. */ - for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t)) - if (DECL_RTL_SET_P (t)) - SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t))); +/* Go through all virtual stack slots of a function and mark them as + shared. We never replace the DECL_RTLs themselves with a copy, + but expressions mentioned into a DECL_RTL cannot be shared with + expressions in the instruction stream. - /* Now process sub-blocks. */ - for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t)) - unshare_all_decls (t); -} + Note that reload may convert pseudo registers into memories in-place. + Pseudo registers are always shared, but MEMs never are. Thus if we + reset the used flags on MEMs in the instruction stream, we must set + them again on MEMs that appear in DECL_RTLs. */ -/* Go through all virtual stack slots of a function and mark them as - not shared. */ static void -reset_used_decls (tree blk) +set_used_decls (tree blk) { tree t; /* Mark decls. */ for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t)) if (DECL_RTL_SET_P (t)) - reset_used_flags (DECL_RTL (t)); + set_used_flags (DECL_RTL (t)); /* Now process sub-blocks. */ - for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t)) - reset_used_decls (t); + for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t)) + set_used_decls (t); } /* Mark ORIG as in use, and return a copy of it if it was already in use. @@ -2360,6 +2486,7 @@ repeat: case REG: case CONST_INT: case CONST_DOUBLE: + case CONST_FIXED: case CONST_VECTOR: case SYMBOL_REF: case LABEL_REF: @@ -2375,11 +2502,7 @@ repeat: break; case CONST: - /* CONST can be shared if it contains a SYMBOL_REF. If it contains - a LABEL_REF, it isn't sharable. */ - if (GET_CODE (XEXP (x, 0)) == PLUS - && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF - && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT) + if (shared_const_p (x)) return; break; @@ -2400,11 +2523,7 @@ repeat: if (RTX_FLAG (x, used)) { - rtx copy; - - copy = rtx_alloc (code); - memcpy (copy, x, RTX_SIZE (code)); - x = copy; + x = shallow_copy_rtx (x); copied = 1; } RTX_FLAG (x, used) = 1; @@ -2485,6 +2604,7 @@ repeat: case REG: case CONST_INT: case CONST_DOUBLE: + case CONST_FIXED: case CONST_VECTOR: case SYMBOL_REF: case CODE_LABEL: @@ -2554,6 +2674,7 @@ set_used_flags (rtx x) case REG: case CONST_INT: case CONST_DOUBLE: + case CONST_FIXED: case CONST_VECTOR: case SYMBOL_REF: case CODE_LABEL: @@ -2740,33 +2861,6 @@ get_max_uid (void) { return cur_insn_uid; } - -/* Renumber instructions so that no instruction UIDs are wasted. */ - -void -renumber_insns (FILE *stream) -{ - rtx insn; - - /* If we're not supposed to renumber instructions, don't. */ - if (!flag_renumber_insns) - return; - - /* If there aren't that many instructions, then it's not really - worth renumbering them. */ - if (flag_renumber_insns == 1 && get_max_uid () < 25000) - return; - - cur_insn_uid = 1; - - for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) - { - if (stream) - fprintf (stream, "Renumbering insn %d to %d\n", - INSN_UID (insn), cur_insn_uid); - INSN_UID (insn) = cur_insn_uid++; - } -} /* Return the next insn. If it is a SEQUENCE, return the first insn of the sequence. */ @@ -2889,7 +2983,7 @@ last_call_insn (void) same as next_real_insn. */ int -active_insn_p (rtx insn) +active_insn_p (const_rtx insn) { return (CALL_P (insn) || JUMP_P (insn) || (NONJUMP_INSN_P (insn) @@ -3035,6 +3129,37 @@ prev_cc0_setter (rtx insn) } #endif +#ifdef AUTO_INC_DEC +/* Find a RTX_AUTOINC class rtx which matches DATA. */ + +static int +find_auto_inc (rtx *xp, void *data) +{ + rtx x = *xp; + rtx reg = data; + + if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC) + return 0; + + switch (GET_CODE (x)) + { + case PRE_DEC: + case PRE_INC: + case POST_DEC: + case POST_INC: + case PRE_MODIFY: + case POST_MODIFY: + if (rtx_equal_p (reg, XEXP (x, 0))) + return 1; + break; + + default: + gcc_unreachable (); + } + return -1; +} +#endif + /* Increment the label uses for all labels present in rtx. */ static void @@ -3075,7 +3200,7 @@ try_split (rtx pat, rtx trial, int last) rtx before = PREV_INSN (trial); rtx after = NEXT_INSN (trial); int has_barrier = 0; - rtx tem; + rtx tem, note_retval; rtx note, seq; int probability; rtx insn_last, insn; @@ -3114,6 +3239,10 @@ try_split (rtx pat, rtx trial, int last) insn_last = NEXT_INSN (insn_last); } + /* We will be adding the new sequence to the function. The splitters + may have introduced invalid RTL sharing, so unshare the sequence now. */ + unshare_all_rtl_in_chain (seq); + /* Mark labels. */ for (insn = insn_last; insn ; insn = PREV_INSN (insn)) { @@ -3159,8 +3288,7 @@ try_split (rtx pat, rtx trial, int last) switch (REG_NOTE_KIND (note)) { case REG_EH_REGION: - insn = insn_last; - while (insn != NULL_RTX) + for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn)) { if (CALL_P (insn) || (flag_non_call_exceptions && INSN_P (insn) @@ -3169,37 +3297,57 @@ try_split (rtx pat, rtx trial, int last) = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (note, 0), REG_NOTES (insn)); - insn = PREV_INSN (insn); } break; case REG_NORETURN: case REG_SETJMP: - insn = insn_last; - while (insn != NULL_RTX) + for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn)) { if (CALL_P (insn)) REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note), XEXP (note, 0), REG_NOTES (insn)); - insn = PREV_INSN (insn); } break; case REG_NON_LOCAL_GOTO: - insn = insn_last; - while (insn != NULL_RTX) + for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn)) { if (JUMP_P (insn)) REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note), XEXP (note, 0), REG_NOTES (insn)); - insn = PREV_INSN (insn); } break; +#ifdef AUTO_INC_DEC + case REG_INC: + for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn)) + { + rtx reg = XEXP (note, 0); + if (!FIND_REG_INC_NOTE (insn, reg) + && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0) + REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_INC, reg, + REG_NOTES (insn)); + } + break; +#endif + + case REG_LIBCALL: + /* Relink the insns with REG_LIBCALL note and with REG_RETVAL note + after split. */ + REG_NOTES (insn_last) + = gen_rtx_INSN_LIST (REG_LIBCALL, + XEXP (note, 0), + REG_NOTES (insn_last)); + + note_retval = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL); + XEXP (note_retval, 0) = insn_last; + break; + default: break; } @@ -3207,11 +3355,12 @@ try_split (rtx pat, rtx trial, int last) /* If there are LABELS inside the split insns increment the usage count so we don't delete the label. */ - if (NONJUMP_INSN_P (trial)) + if (INSN_P (trial)) { insn = insn_last; while (insn != NULL_RTX) { + /* JUMP_P insns have already been "marked" above. */ if (NONJUMP_INSN_P (insn)) mark_label_nuses (PATTERN (insn)); @@ -3254,9 +3403,8 @@ make_insn_raw (rtx pattern) INSN_UID (insn) = cur_insn_uid++; PATTERN (insn) = pattern; INSN_CODE (insn) = -1; - LOG_LINKS (insn) = NULL; REG_NOTES (insn) = NULL; - INSN_LOCATOR (insn) = 0; + INSN_LOCATOR (insn) = curr_insn_locator (); BLOCK_FOR_INSN (insn) = NULL; #ifdef ENABLE_RTL_CHECKING @@ -3276,7 +3424,7 @@ make_insn_raw (rtx pattern) /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */ -static rtx +rtx make_jump_insn_raw (rtx pattern) { rtx insn; @@ -3286,10 +3434,9 @@ make_jump_insn_raw (rtx pattern) PATTERN (insn) = pattern; INSN_CODE (insn) = -1; - LOG_LINKS (insn) = NULL; REG_NOTES (insn) = NULL; JUMP_LABEL (insn) = NULL; - INSN_LOCATOR (insn) = 0; + INSN_LOCATOR (insn) = curr_insn_locator (); BLOCK_FOR_INSN (insn) = NULL; return insn; @@ -3307,10 +3454,9 @@ make_call_insn_raw (rtx pattern) PATTERN (insn) = pattern; INSN_CODE (insn) = -1; - LOG_LINKS (insn) = NULL; REG_NOTES (insn) = NULL; CALL_INSN_FUNCTION_USAGE (insn) = NULL; - INSN_LOCATOR (insn) = 0; + INSN_LOCATOR (insn) = curr_insn_locator (); BLOCK_FOR_INSN (insn) = NULL; return insn; @@ -3340,10 +3486,9 @@ add_insn (rtx insn) SEQUENCE. */ void -add_insn_after (rtx insn, rtx after) +add_insn_after (rtx insn, rtx after, basic_block bb) { rtx next = NEXT_INSN (after); - basic_block bb; gcc_assert (!optimize || !INSN_DELETED_P (after)); @@ -3378,14 +3523,13 @@ add_insn_after (rtx insn, rtx after) { set_block_for_insn (insn, bb); if (INSN_P (insn)) - bb->flags |= BB_DIRTY; + df_insn_rescan (insn); /* Should not happen as first in the BB is always either NOTE or LABEL. */ if (BB_END (bb) == after /* Avoid clobbering of structure when creating new BB. */ && !BARRIER_P (insn) - && (!NOTE_P (insn) - || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)) + && !NOTE_INSN_BASIC_BLOCK_P (insn)) BB_END (bb) = insn; } @@ -3398,15 +3542,15 @@ add_insn_after (rtx insn, rtx after) } /* Add INSN into the doubly-linked list before insn BEFORE. This and - the previous should be the only functions called to insert an insn once - delay slots have been filled since only they know how to update a - SEQUENCE. */ + the previous should be the only functions called to insert an insn + once delay slots have been filled since only they know how to + update a SEQUENCE. If BB is NULL, an attempt is made to infer the + bb from before. */ void -add_insn_before (rtx insn, rtx before) +add_insn_before (rtx insn, rtx before, basic_block bb) { rtx prev = PREV_INSN (before); - basic_block bb; gcc_assert (!optimize || !INSN_DELETED_P (before)); @@ -3438,20 +3582,22 @@ add_insn_before (rtx insn, rtx before) gcc_assert (stack); } - if (!BARRIER_P (before) - && !BARRIER_P (insn) - && (bb = BLOCK_FOR_INSN (before))) + if (!bb + && !BARRIER_P (before) + && !BARRIER_P (insn)) + bb = BLOCK_FOR_INSN (before); + + if (bb) { set_block_for_insn (insn, bb); if (INSN_P (insn)) - bb->flags |= BB_DIRTY; + df_insn_rescan (insn); /* Should not happen as first in the BB is always either NOTE or LABEL. */ gcc_assert (BB_HEAD (bb) != insn /* Avoid clobbering of structure when creating new BB. */ || BARRIER_P (insn) - || (NOTE_P (insn) - && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK)); + || NOTE_INSN_BASIC_BLOCK_P (insn)); } PREV_INSN (before) = insn; @@ -3459,6 +3605,17 @@ add_insn_before (rtx insn, rtx before) PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn; } + +/* Replace insn with an deleted instruction note. */ + +void set_insn_deleted (rtx insn) +{ + df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn)); + PUT_CODE (insn, NOTE); + NOTE_KIND (insn) = NOTE_INSN_DELETED; +} + + /* Remove an insn from its doubly-linked list. This function knows how to handle sequences. */ void @@ -3468,6 +3625,9 @@ remove_insn (rtx insn) rtx prev = PREV_INSN (insn); basic_block bb; + /* Later in the code, the block will be marked dirty. */ + df_insn_delete (NULL, INSN_UID (insn)); + if (prev) { NEXT_INSN (prev) = next; @@ -3518,7 +3678,7 @@ remove_insn (rtx insn) && (bb = BLOCK_FOR_INSN (insn))) { if (INSN_P (insn)) - bb->flags |= BB_DIRTY; + df_set_bb_dirty (bb); if (BB_HEAD (bb) == insn) { /* Never ever delete the basic block note without deleting whole @@ -3614,14 +3774,14 @@ reorder_insns (rtx from, rtx to, rtx after) && (bb = BLOCK_FOR_INSN (after))) { rtx x; - bb->flags |= BB_DIRTY; + df_set_bb_dirty (bb); if (!BARRIER_P (from) && (bb2 = BLOCK_FOR_INSN (from))) { if (BB_END (bb2) == to) BB_END (bb2) = prev; - bb2->flags |= BB_DIRTY; + df_set_bb_dirty (bb2); } if (BB_END (bb) == after) @@ -3629,80 +3789,11 @@ reorder_insns (rtx from, rtx to, rtx after) for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x)) if (!BARRIER_P (x)) - set_block_for_insn (x, bb); - } -} - -/* Return the line note insn preceding INSN. */ - -static rtx -find_line_note (rtx insn) -{ - if (no_line_numbers) - return 0; - - for (; insn; insn = PREV_INSN (insn)) - if (NOTE_P (insn) - && NOTE_LINE_NUMBER (insn) >= 0) - break; - - return insn; -} - -/* Remove unnecessary notes from the instruction stream. */ - -void -remove_unnecessary_notes (void) -{ - rtx eh_stack = NULL_RTX; - rtx insn; - rtx next; - rtx tmp; - - /* We must not remove the first instruction in the function because - the compiler depends on the first instruction being a note. */ - for (insn = NEXT_INSN (get_insns ()); insn; insn = next) - { - /* Remember what's next. */ - next = NEXT_INSN (insn); - - /* We're only interested in notes. */ - if (!NOTE_P (insn)) - continue; - - switch (NOTE_LINE_NUMBER (insn)) - { - case NOTE_INSN_DELETED: - remove_insn (insn); - break; - - case NOTE_INSN_EH_REGION_BEG: - eh_stack = alloc_INSN_LIST (insn, eh_stack); - break; - - case NOTE_INSN_EH_REGION_END: - /* Too many end notes. */ - gcc_assert (eh_stack); - /* Mismatched nesting. */ - gcc_assert (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) - == NOTE_EH_HANDLER (insn)); - tmp = eh_stack; - eh_stack = XEXP (eh_stack, 1); - free_INSN_LIST_node (tmp); - break; - - case NOTE_INSN_BLOCK_BEG: - case NOTE_INSN_BLOCK_END: - /* BLOCK_END and BLOCK_BEG notes only exist in the `final' pass. */ - gcc_unreachable (); - - default: - break; - } + { + set_block_for_insn (x, bb); + df_insn_change_bb (x); + } } - - /* Too many EH_REGION_BEG notes. */ - gcc_assert (!eh_stack); } @@ -3734,7 +3825,7 @@ remove_unnecessary_notes (void) /* Make X be output before the instruction BEFORE. */ rtx -emit_insn_before_noloc (rtx x, rtx before) +emit_insn_before_noloc (rtx x, rtx before, basic_block bb) { rtx last = before; rtx insn; @@ -3756,7 +3847,7 @@ emit_insn_before_noloc (rtx x, rtx before) while (insn) { rtx next = NEXT_INSN (insn); - add_insn_before (insn, before); + add_insn_before (insn, before, bb); last = insn; insn = next; } @@ -3770,7 +3861,7 @@ emit_insn_before_noloc (rtx x, rtx before) default: last = make_insn_raw (x); - add_insn_before (last, before); + add_insn_before (last, before, bb); break; } @@ -3799,7 +3890,7 @@ emit_jump_insn_before_noloc (rtx x, rtx before) while (insn) { rtx next = NEXT_INSN (insn); - add_insn_before (insn, before); + add_insn_before (insn, before, NULL); last = insn; insn = next; } @@ -3813,7 +3904,7 @@ emit_jump_insn_before_noloc (rtx x, rtx before) default: last = make_jump_insn_raw (x); - add_insn_before (last, before); + add_insn_before (last, before, NULL); break; } @@ -3842,7 +3933,7 @@ emit_call_insn_before_noloc (rtx x, rtx before) while (insn) { rtx next = NEXT_INSN (insn); - add_insn_before (insn, before); + add_insn_before (insn, before, NULL); last = insn; insn = next; } @@ -3856,7 +3947,7 @@ emit_call_insn_before_noloc (rtx x, rtx before) default: last = make_call_insn_raw (x); - add_insn_before (last, before); + add_insn_before (last, before, NULL); break; } @@ -3873,7 +3964,7 @@ emit_barrier_before (rtx before) INSN_UID (insn) = cur_insn_uid++; - add_insn_before (insn, before); + add_insn_before (insn, before, NULL); return insn; } @@ -3887,7 +3978,7 @@ emit_label_before (rtx label, rtx before) if (INSN_UID (label) == 0) { INSN_UID (label) = cur_insn_uid++; - add_insn_before (label, before); + add_insn_before (label, before, NULL); } return label; @@ -3896,41 +3987,43 @@ emit_label_before (rtx label, rtx before) /* Emit a note of subtype SUBTYPE before the insn BEFORE. */ rtx -emit_note_before (int subtype, rtx before) +emit_note_before (enum insn_note subtype, rtx before) { rtx note = rtx_alloc (NOTE); INSN_UID (note) = cur_insn_uid++; -#ifndef USE_MAPPED_LOCATION - NOTE_SOURCE_FILE (note) = 0; -#endif - NOTE_LINE_NUMBER (note) = subtype; + NOTE_KIND (note) = subtype; BLOCK_FOR_INSN (note) = NULL; + memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note))); - add_insn_before (note, before); + add_insn_before (note, before, NULL); return note; } /* Helper for emit_insn_after, handles lists of instructions efficiently. */ -static rtx emit_insn_after_1 (rtx, rtx); - static rtx -emit_insn_after_1 (rtx first, rtx after) +emit_insn_after_1 (rtx first, rtx after, basic_block bb) { rtx last; rtx after_after; - basic_block bb; + if (!bb && !BARRIER_P (after)) + bb = BLOCK_FOR_INSN (after); - if (!BARRIER_P (after) - && (bb = BLOCK_FOR_INSN (after))) + if (bb) { - bb->flags |= BB_DIRTY; + df_set_bb_dirty (bb); for (last = first; NEXT_INSN (last); last = NEXT_INSN (last)) if (!BARRIER_P (last)) - set_block_for_insn (last, bb); + { + set_block_for_insn (last, bb); + df_insn_rescan (last); + } if (!BARRIER_P (last)) - set_block_for_insn (last, bb); + { + set_block_for_insn (last, bb); + df_insn_rescan (last); + } if (BB_END (bb) == after) BB_END (bb) = last; } @@ -3951,10 +4044,11 @@ emit_insn_after_1 (rtx first, rtx after) return last; } -/* Make X be output after the insn AFTER. */ +/* Make X be output after the insn AFTER and set the BB of insn. If + BB is NULL, an attempt is made to infer the BB from AFTER. */ rtx -emit_insn_after_noloc (rtx x, rtx after) +emit_insn_after_noloc (rtx x, rtx after, basic_block bb) { rtx last = after; @@ -3971,7 +4065,7 @@ emit_insn_after_noloc (rtx x, rtx after) case CODE_LABEL: case BARRIER: case NOTE: - last = emit_insn_after_1 (x, after); + last = emit_insn_after_1 (x, after, bb); break; #ifdef ENABLE_RTL_CHECKING @@ -3982,29 +4076,13 @@ emit_insn_after_noloc (rtx x, rtx after) default: last = make_insn_raw (x); - add_insn_after (last, after); + add_insn_after (last, after, bb); break; } return last; } -/* Similar to emit_insn_after, except that line notes are to be inserted so - as to act as if this insn were at FROM. */ - -void -emit_insn_after_with_line_notes (rtx x, rtx after, rtx from) -{ - rtx from_line = find_line_note (from); - rtx after_line = find_line_note (after); - rtx insn = emit_insn_after (x, after); - - if (from_line) - emit_note_copy_after (from_line, after); - - if (after_line) - emit_note_copy_after (after_line, insn); -} /* Make an insn of code JUMP_INSN with body X and output it after the insn AFTER. */ @@ -4024,7 +4102,7 @@ emit_jump_insn_after_noloc (rtx x, rtx after) case CODE_LABEL: case BARRIER: case NOTE: - last = emit_insn_after_1 (x, after); + last = emit_insn_after_1 (x, after, NULL); break; #ifdef ENABLE_RTL_CHECKING @@ -4035,7 +4113,7 @@ emit_jump_insn_after_noloc (rtx x, rtx after) default: last = make_jump_insn_raw (x); - add_insn_after (last, after); + add_insn_after (last, after, NULL); break; } @@ -4060,7 +4138,7 @@ emit_call_insn_after_noloc (rtx x, rtx after) case CODE_LABEL: case BARRIER: case NOTE: - last = emit_insn_after_1 (x, after); + last = emit_insn_after_1 (x, after, NULL); break; #ifdef ENABLE_RTL_CHECKING @@ -4071,7 +4149,7 @@ emit_call_insn_after_noloc (rtx x, rtx after) default: last = make_call_insn_raw (x); - add_insn_after (last, after); + add_insn_after (last, after, NULL); break; } @@ -4088,7 +4166,7 @@ emit_barrier_after (rtx after) INSN_UID (insn) = cur_insn_uid++; - add_insn_after (insn, after); + add_insn_after (insn, after, NULL); return insn; } @@ -4103,7 +4181,7 @@ emit_label_after (rtx label, rtx after) if (INSN_UID (label) == 0) { INSN_UID (label) = cur_insn_uid++; - add_insn_after (label, after); + add_insn_after (label, after, NULL); } return label; @@ -4112,38 +4190,14 @@ emit_label_after (rtx label, rtx after) /* Emit a note of subtype SUBTYPE after the insn AFTER. */ rtx -emit_note_after (int subtype, rtx after) +emit_note_after (enum insn_note subtype, rtx after) { rtx note = rtx_alloc (NOTE); INSN_UID (note) = cur_insn_uid++; -#ifndef USE_MAPPED_LOCATION - NOTE_SOURCE_FILE (note) = 0; -#endif - NOTE_LINE_NUMBER (note) = subtype; + NOTE_KIND (note) = subtype; BLOCK_FOR_INSN (note) = NULL; - add_insn_after (note, after); - return note; -} - -/* Emit a copy of note ORIG after the insn AFTER. */ - -rtx -emit_note_copy_after (rtx orig, rtx after) -{ - rtx note; - - if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers) - { - cur_insn_uid++; - return 0; - } - - note = rtx_alloc (NOTE); - INSN_UID (note) = cur_insn_uid++; - NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig); - NOTE_DATA (note) = NOTE_DATA (orig); - BLOCK_FOR_INSN (note) = NULL; - add_insn_after (note, after); + memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note))); + add_insn_after (note, after, NULL); return note; } @@ -4151,7 +4205,7 @@ emit_note_copy_after (rtx orig, rtx after) rtx emit_insn_after_setloc (rtx pattern, rtx after, int loc) { - rtx last = emit_insn_after_noloc (pattern, after); + rtx last = emit_insn_after_noloc (pattern, after, NULL); if (pattern == NULL_RTX || !loc) return last; @@ -4175,7 +4229,7 @@ emit_insn_after (rtx pattern, rtx after) if (INSN_P (after)) return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (after)); else - return emit_insn_after_noloc (pattern, after); + return emit_insn_after_noloc (pattern, after, NULL); } /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */ @@ -4245,12 +4299,15 @@ rtx emit_insn_before_setloc (rtx pattern, rtx before, int loc) { rtx first = PREV_INSN (before); - rtx last = emit_insn_before_noloc (pattern, before); + rtx last = emit_insn_before_noloc (pattern, before, NULL); if (pattern == NULL_RTX || !loc) return last; - first = NEXT_INSN (first); + if (!first) + first = get_insns (); + else + first = NEXT_INSN (first); while (1) { if (active_insn_p (first) && !INSN_LOCATOR (first)) @@ -4269,7 +4326,7 @@ emit_insn_before (rtx pattern, rtx before) if (INSN_P (before)) return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (before)); else - return emit_insn_before_noloc (pattern, before); + return emit_insn_before_noloc (pattern, before, NULL); } /* like emit_insn_before_noloc, but set insn_locator according to scope. */ @@ -4487,42 +4544,6 @@ emit_barrier (void) return barrier; } -/* Make line numbering NOTE insn for LOCATION add it to the end - of the doubly-linked list, but only if line-numbers are desired for - debugging info and it doesn't match the previous one. */ - -rtx -emit_line_note (location_t location) -{ - rtx note; - -#ifdef USE_MAPPED_LOCATION - if (location == last_location) - return NULL_RTX; -#else - if (location.file && last_location.file - && !strcmp (location.file, last_location.file) - && location.line == last_location.line) - return NULL_RTX; -#endif - last_location = location; - - if (no_line_numbers) - { - cur_insn_uid++; - return NULL_RTX; - } - -#ifdef USE_MAPPED_LOCATION - note = emit_note ((int) location); -#else - note = emit_note (location.line); - NOTE_SOURCE_FILE (note) = location.file; -#endif - - return note; -} - /* Emit a copy of note ORIG. */ rtx @@ -4530,17 +4551,11 @@ emit_note_copy (rtx orig) { rtx note; - if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers) - { - cur_insn_uid++; - return NULL_RTX; - } - note = rtx_alloc (NOTE); INSN_UID (note) = cur_insn_uid++; NOTE_DATA (note) = NOTE_DATA (orig); - NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig); + NOTE_KIND (note) = NOTE_KIND (orig); BLOCK_FOR_INSN (note) = NULL; add_insn (note); @@ -4551,13 +4566,13 @@ emit_note_copy (rtx orig) and add it to the end of the doubly-linked list. */ rtx -emit_note (int note_no) +emit_note (enum insn_note kind) { rtx note; note = rtx_alloc (NOTE); INSN_UID (note) = cur_insn_uid++; - NOTE_LINE_NUMBER (note) = note_no; + NOTE_KIND (note) = kind; memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note))); BLOCK_FOR_INSN (note) = NULL; add_insn (note); @@ -4584,6 +4599,7 @@ rtx set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum) { rtx note = find_reg_note (insn, kind, NULL_RTX); + rtx new_note = NULL; switch (kind) { @@ -4603,19 +4619,37 @@ set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum) It serves no useful purpose and breaks eliminate_regs. */ if (GET_CODE (datum) == ASM_OPERANDS) return NULL_RTX; + + if (note) + { + XEXP (note, 0) = datum; + df_notes_rescan (insn); + return note; + } break; default: + if (note) + { + XEXP (note, 0) = datum; + return note; + } break; } - if (note) + new_note = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn)); + REG_NOTES (insn) = new_note; + + switch (kind) { - XEXP (note, 0) = datum; - return note; + case REG_EQUAL: + case REG_EQUIV: + df_notes_rescan (insn); + break; + default: + break; } - REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn)); return REG_NOTES (insn); } @@ -4734,6 +4768,18 @@ push_to_sequence (rtx first) last_insn = last; } +/* Like push_to_sequence, but take the last insn as an argument to avoid + looping through the list. */ + +void +push_to_sequence2 (rtx first, rtx last) +{ + start_sequence (); + + first_insn = first; + last_insn = last; +} + /* Set up the outer-level insn chain as the current sequence, saving the previously current one. */ @@ -4805,7 +4851,7 @@ in_sequence_p (void) /* Put the various virtual registers into REGNO_REG_RTX. */ -void +static void init_virtual_regs (struct emit_status *es) { rtx *ptr = es->x_regno_reg_rtx; @@ -4859,6 +4905,7 @@ copy_insn_1 (rtx orig) case REG: case CONST_INT: case CONST_DOUBLE: + case CONST_FIXED: case CONST_VECTOR: case SYMBOL_REF: case CODE_LABEL: @@ -4877,11 +4924,7 @@ copy_insn_1 (rtx orig) break; case CONST: - /* CONST can be shared if it contains a SYMBOL_REF. If it contains - a LABEL_REF, it isn't sharable. */ - if (GET_CODE (XEXP (orig, 0)) == PLUS - && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF - && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT) + if (shared_const_p (orig)) return orig; break; @@ -4894,13 +4937,11 @@ copy_insn_1 (rtx orig) break; } - copy = rtx_alloc (code); - - /* Copy the various flags, and other information. We assume that - all fields need copying, and then clear the fields that should + /* Copy the various flags, fields, and other information. We assume + that all fields need copying, and then clear the fields that should not be copied. That is the sensible default behavior, and forces us to explicitly document why we are *not* copying a flag. */ - memcpy (copy, orig, RTX_HDR_SIZE); + copy = shallow_copy_rtx (orig); /* We do not copy the USED flag, which is used as a mark bit during walks over the RTL. */ @@ -4917,43 +4958,40 @@ copy_insn_1 (rtx orig) format_ptr = GET_RTX_FORMAT (GET_CODE (copy)); for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++) - { - copy->u.fld[i] = orig->u.fld[i]; - switch (*format_ptr++) - { - case 'e': - if (XEXP (orig, i) != NULL) - XEXP (copy, i) = copy_insn_1 (XEXP (orig, i)); - break; + switch (*format_ptr++) + { + case 'e': + if (XEXP (orig, i) != NULL) + XEXP (copy, i) = copy_insn_1 (XEXP (orig, i)); + break; - case 'E': - case 'V': - if (XVEC (orig, i) == orig_asm_constraints_vector) - XVEC (copy, i) = copy_asm_constraints_vector; - else if (XVEC (orig, i) == orig_asm_operands_vector) - XVEC (copy, i) = copy_asm_operands_vector; - else if (XVEC (orig, i) != NULL) - { - XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i)); - for (j = 0; j < XVECLEN (copy, i); j++) - XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j)); - } - break; + case 'E': + case 'V': + if (XVEC (orig, i) == orig_asm_constraints_vector) + XVEC (copy, i) = copy_asm_constraints_vector; + else if (XVEC (orig, i) == orig_asm_operands_vector) + XVEC (copy, i) = copy_asm_operands_vector; + else if (XVEC (orig, i) != NULL) + { + XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i)); + for (j = 0; j < XVECLEN (copy, i); j++) + XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j)); + } + break; - case 't': - case 'w': - case 'i': - case 's': - case 'S': - case 'u': - case '0': - /* These are left unchanged. */ - break; + case 't': + case 'w': + case 'i': + case 's': + case 'S': + case 'u': + case '0': + /* These are left unchanged. */ + break; - default: - gcc_unreachable (); - } - } + default: + gcc_unreachable (); + } if (code == SCRATCH) { @@ -5069,6 +5107,8 @@ gen_const_vector (enum machine_mode mode, int constant) units = GET_MODE_NUNITS (mode); inner = GET_MODE_INNER (mode); + gcc_assert (!DECIMAL_FLOAT_MODE_P (inner)); + v = rtvec_alloc (units); /* We need to call this function after we set the scalar const_tiny_rtx @@ -5111,6 +5151,74 @@ gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v) return gen_rtx_raw_CONST_VECTOR (mode, v); } +/* Initialise global register information required by all functions. */ + +void +init_emit_regs (void) +{ + int i; + + /* Reset register attributes */ + htab_empty (reg_attrs_htab); + + /* We need reg_raw_mode, so initialize the modes now. */ + init_reg_modes_target (); + + /* Assign register numbers to the globally defined register rtx. */ + pc_rtx = gen_rtx_PC (VOIDmode); + cc0_rtx = gen_rtx_CC0 (VOIDmode); + stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM); + frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM); + hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM); + arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM); + virtual_incoming_args_rtx = + gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM); + virtual_stack_vars_rtx = + gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM); + virtual_stack_dynamic_rtx = + gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM); + virtual_outgoing_args_rtx = + gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM); + virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM); + + /* Initialize RTL for commonly used hard registers. These are + copied into regno_reg_rtx as we begin to compile each function. */ + for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) + static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i); + +#ifdef RETURN_ADDRESS_POINTER_REGNUM + return_address_pointer_rtx + = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM); +#endif + +#ifdef STATIC_CHAIN_REGNUM + static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM); + +#ifdef STATIC_CHAIN_INCOMING_REGNUM + if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM) + static_chain_incoming_rtx + = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM); + else +#endif + static_chain_incoming_rtx = static_chain_rtx; +#endif + +#ifdef STATIC_CHAIN + static_chain_rtx = STATIC_CHAIN; + +#ifdef STATIC_CHAIN_INCOMING + static_chain_incoming_rtx = STATIC_CHAIN_INCOMING; +#else + static_chain_incoming_rtx = static_chain_rtx; +#endif +#endif + + if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM) + pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM); + else + pic_offset_table_rtx = NULL_RTX; +} + /* Create some permanent unique rtl objects shared between all functions. LINE_NUMBERS is nonzero if line numbers are to be generated. */ @@ -5121,17 +5229,17 @@ init_emit_once (int line_numbers) enum machine_mode mode; enum machine_mode double_mode; - /* We need reg_raw_mode, so initialize the modes now. */ - init_reg_modes_once (); - - /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash - tables. */ + /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute + hash tables. */ const_int_htab = htab_create_ggc (37, const_int_htab_hash, const_int_htab_eq, NULL); const_double_htab = htab_create_ggc (37, const_double_htab_hash, const_double_htab_eq, NULL); + const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash, + const_fixed_htab_eq, NULL); + mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash, mem_attrs_htab_eq, NULL); reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash, @@ -5145,7 +5253,8 @@ init_emit_once (int line_numbers) word_mode = VOIDmode; double_mode = VOIDmode; - for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; + for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); + mode != VOIDmode; mode = GET_MODE_WIDER_MODE (mode)) { if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT @@ -5157,7 +5266,8 @@ init_emit_once (int line_numbers) word_mode = mode; } - for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode; + for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); + mode != VOIDmode; mode = GET_MODE_WIDER_MODE (mode)) { if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE @@ -5167,34 +5277,6 @@ init_emit_once (int line_numbers) ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0); - /* Assign register numbers to the globally defined register rtx. - This must be done at runtime because the register number field - is in a union and some compilers can't initialize unions. */ - - pc_rtx = gen_rtx_PC (VOIDmode); - cc0_rtx = gen_rtx_CC0 (VOIDmode); - stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM); - frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM); - if (hard_frame_pointer_rtx == 0) - hard_frame_pointer_rtx = gen_raw_REG (Pmode, - HARD_FRAME_POINTER_REGNUM); - if (arg_pointer_rtx == 0) - arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM); - virtual_incoming_args_rtx = - gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM); - virtual_stack_vars_rtx = - gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM); - virtual_stack_dynamic_rtx = - gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM); - virtual_outgoing_args_rtx = - gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM); - virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM); - - /* Initialize RTL for commonly used hard registers. These are - copied into regno_reg_rtx as we begin to compile each function. */ - for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) - static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i); - #ifdef INIT_EXPANDERS /* This is to initialize {init|mark|free}_machine_status before the first call to push_function_context_to. This is needed by the Chill front @@ -5232,8 +5314,8 @@ init_emit_once (int line_numbers) /* Initialize mathematical constants for constant folding builtins. These constants need to be given to at least 160 bits precision. */ - real_from_string (&dconstpi, - "3.1415926535897932384626433832795028841971693993751058209749445923078"); + real_from_string (&dconstsqrt2, + "1.4142135623730950488016887242096980785696718753769480731766797379907"); real_from_string (&dconste, "2.7182818284590452353602874713526624977572470936999595749669676277241"); @@ -5242,14 +5324,22 @@ init_emit_once (int line_numbers) REAL_VALUE_TYPE *r = (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2); - for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode; + for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); + mode != VOIDmode; + mode = GET_MODE_WIDER_MODE (mode)) + const_tiny_rtx[i][(int) mode] = + CONST_DOUBLE_FROM_REAL_VALUE (*r, mode); + + for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT); + mode != VOIDmode; mode = GET_MODE_WIDER_MODE (mode)) const_tiny_rtx[i][(int) mode] = CONST_DOUBLE_FROM_REAL_VALUE (*r, mode); const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i); - for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; + for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); + mode != VOIDmode; mode = GET_MODE_WIDER_MODE (mode)) const_tiny_rtx[i][(int) mode] = GEN_INT (i); @@ -5259,6 +5349,22 @@ init_emit_once (int line_numbers) const_tiny_rtx[i][(int) mode] = GEN_INT (i); } + for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT); + mode != VOIDmode; + mode = GET_MODE_WIDER_MODE (mode)) + { + rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)]; + const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner); + } + + for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT); + mode != VOIDmode; + mode = GET_MODE_WIDER_MODE (mode)) + { + rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)]; + const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner); + } + for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT); mode != VOIDmode; mode = GET_MODE_WIDER_MODE (mode)) @@ -5275,43 +5381,111 @@ init_emit_once (int line_numbers) const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1); } - for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i) - if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC) - const_tiny_rtx[0][i] = const0_rtx; + for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT); + mode != VOIDmode; + mode = GET_MODE_WIDER_MODE (mode)) + { + FCONST0(mode).data.high = 0; + FCONST0(mode).data.low = 0; + FCONST0(mode).mode = mode; + const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE ( + FCONST0 (mode), mode); + } - const_tiny_rtx[0][(int) BImode] = const0_rtx; - if (STORE_FLAG_VALUE == 1) - const_tiny_rtx[1][(int) BImode] = const1_rtx; + for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT); + mode != VOIDmode; + mode = GET_MODE_WIDER_MODE (mode)) + { + FCONST0(mode).data.high = 0; + FCONST0(mode).data.low = 0; + FCONST0(mode).mode = mode; + const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE ( + FCONST0 (mode), mode); + } -#ifdef RETURN_ADDRESS_POINTER_REGNUM - return_address_pointer_rtx - = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM); -#endif + for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM); + mode != VOIDmode; + mode = GET_MODE_WIDER_MODE (mode)) + { + FCONST0(mode).data.high = 0; + FCONST0(mode).data.low = 0; + FCONST0(mode).mode = mode; + const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE ( + FCONST0 (mode), mode); + + /* We store the value 1. */ + FCONST1(mode).data.high = 0; + FCONST1(mode).data.low = 0; + FCONST1(mode).mode = mode; + lshift_double (1, 0, GET_MODE_FBIT (mode), + 2 * HOST_BITS_PER_WIDE_INT, + &FCONST1(mode).data.low, + &FCONST1(mode).data.high, + SIGNED_FIXED_POINT_MODE_P (mode)); + const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE ( + FCONST1 (mode), mode); + } -#ifdef STATIC_CHAIN_REGNUM - static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM); + for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM); + mode != VOIDmode; + mode = GET_MODE_WIDER_MODE (mode)) + { + FCONST0(mode).data.high = 0; + FCONST0(mode).data.low = 0; + FCONST0(mode).mode = mode; + const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE ( + FCONST0 (mode), mode); + + /* We store the value 1. */ + FCONST1(mode).data.high = 0; + FCONST1(mode).data.low = 0; + FCONST1(mode).mode = mode; + lshift_double (1, 0, GET_MODE_FBIT (mode), + 2 * HOST_BITS_PER_WIDE_INT, + &FCONST1(mode).data.low, + &FCONST1(mode).data.high, + SIGNED_FIXED_POINT_MODE_P (mode)); + const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE ( + FCONST1 (mode), mode); + } -#ifdef STATIC_CHAIN_INCOMING_REGNUM - if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM) - static_chain_incoming_rtx - = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM); - else -#endif - static_chain_incoming_rtx = static_chain_rtx; -#endif + for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT); + mode != VOIDmode; + mode = GET_MODE_WIDER_MODE (mode)) + { + const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); + } -#ifdef STATIC_CHAIN - static_chain_rtx = STATIC_CHAIN; + for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT); + mode != VOIDmode; + mode = GET_MODE_WIDER_MODE (mode)) + { + const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); + } -#ifdef STATIC_CHAIN_INCOMING - static_chain_incoming_rtx = STATIC_CHAIN_INCOMING; -#else - static_chain_incoming_rtx = static_chain_rtx; -#endif -#endif + for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM); + mode != VOIDmode; + mode = GET_MODE_WIDER_MODE (mode)) + { + const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); + const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1); + } - if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM) - pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM); + for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM); + mode != VOIDmode; + mode = GET_MODE_WIDER_MODE (mode)) + { + const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); + const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1); + } + + for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i) + if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC) + const_tiny_rtx[0][i] = const0_rtx; + + const_tiny_rtx[0][(int) BImode] = const0_rtx; + if (STORE_FLAG_VALUE == 1) + const_tiny_rtx[1][(int) BImode] = const1_rtx; } /* Produce exact duplicate of insn INSN after AFTER. @@ -5356,21 +5530,20 @@ emit_copy_of_insn_after (rtx insn, rtx after) which may be duplicated by the basic block reordering code. */ RTX_FRAME_RELATED_P (new) = RTX_FRAME_RELATED_P (insn); - /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will - make them. */ + /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label + will make them. REG_LABEL_TARGETs are created there too, but are + supposed to be sticky, so we copy them. */ for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) - if (REG_NOTE_KIND (link) != REG_LABEL) + if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND) { if (GET_CODE (link) == EXPR_LIST) REG_NOTES (new) - = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link), - XEXP (link, 0), - REG_NOTES (new))); + = gen_rtx_EXPR_LIST (REG_NOTE_KIND (link), + copy_insn_1 (XEXP (link, 0)), REG_NOTES (new)); else REG_NOTES (new) - = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link), - XEXP (link, 0), - REG_NOTES (new))); + = gen_rtx_INSN_LIST (REG_NOTE_KIND (link), + XEXP (link, 0), REG_NOTES (new)); } /* Fix the libcall sequences. */