X-Git-Url: http://git.sourceforge.jp/view?p=pf3gnuchains%2Fgcc-fork.git;a=blobdiff_plain;f=gcc%2Femit-rtl.c;h=e0acc0c276605680bac77968985b3874821b0d41;hp=9096a62dcbf06c8e3e558283f713a909773017af;hb=c18c9264dc78d955933ecc5a2b93a82ea5ab6423;hpb=9845d1202fec65574ca05d780859eb8c25489566 diff --git a/gcc/emit-rtl.c b/gcc/emit-rtl.c index 9096a62dcbf..e0acc0c2766 100644 --- a/gcc/emit-rtl.c +++ b/gcc/emit-rtl.c @@ -1,6 +1,7 @@ /* Emit RTL for the GCC expander. Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, - 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 + 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, + 2010 Free Software Foundation, Inc. This file is part of GCC. @@ -59,6 +60,7 @@ along with GCC; see the file COPYING3. If not see #include "tree-pass.h" #include "df.h" #include "params.h" +#include "target.h" /* Commonly used modes. */ @@ -72,7 +74,7 @@ enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */ struct rtl_data x_rtl; /* Indexed by pseudo register number, gives the rtx for that pseudo. - Allocated in parallel with regno_pointer_align. + Allocated in parallel with regno_pointer_align. FIXME: We could put it into emit_status struct, but gengtype is not able to deal with length attribute nested in top level structures. */ @@ -83,10 +85,6 @@ rtx * regno_reg_rtx; static GTY(()) int label_num = 1; -/* Nonzero means do not generate NOTEs for source line numbers. */ - -static int no_line_numbers; - /* Commonly used rtx's, so that we only need space for one copy. These are initialized once for the entire compilation. All of these are unique; no other rtx-object will be equal to any @@ -136,8 +134,6 @@ FIXED_VALUE_TYPE fconst1[MAX_FCONST1]; In an inline procedure, the stack and frame pointer rtxs may not be used for anything else. */ -rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */ -rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */ rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */ /* This is used to implement __builtin_return_address for some machines. @@ -195,7 +191,7 @@ static rtx lookup_const_fixed (rtx); static hashval_t mem_attrs_htab_hash (const void *); static int mem_attrs_htab_eq (const void *, const void *); static mem_attrs *get_mem_attrs (alias_set_type, tree, rtx, rtx, unsigned int, - enum machine_mode); + addr_space_t, enum machine_mode); static hashval_t reg_attrs_htab_hash (const void *); static int reg_attrs_htab_eq (const void *, const void *); static reg_attrs *get_reg_attrs (tree, int); @@ -294,6 +290,7 @@ mem_attrs_htab_hash (const void *x) const mem_attrs *const p = (const mem_attrs *) x; return (p->alias ^ (p->align * 1000) + ^ (p->addrspace * 4000) ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000) ^ ((p->size ? INTVAL (p->size) : 0) * 2500000) ^ (size_t) iterative_hash_expr (p->expr, 0)); @@ -311,6 +308,7 @@ mem_attrs_htab_eq (const void *x, const void *y) return (p->alias == q->alias && p->offset == q->offset && p->size == q->size && p->align == q->align + && p->addrspace == q->addrspace && (p->expr == q->expr || (p->expr != NULL_TREE && q->expr != NULL_TREE && operand_equal_p (p->expr, q->expr, 0)))); @@ -322,7 +320,7 @@ mem_attrs_htab_eq (const void *x, const void *y) static mem_attrs * get_mem_attrs (alias_set_type alias, tree expr, rtx offset, rtx size, - unsigned int align, enum machine_mode mode) + unsigned int align, addr_space_t addrspace, enum machine_mode mode) { mem_attrs attrs; void **slot; @@ -330,7 +328,7 @@ get_mem_attrs (alias_set_type alias, tree expr, rtx offset, rtx size, /* If everything is the default, we can just return zero. This must match what the corresponding MEM_* macros return when the field is not present. */ - if (alias == 0 && expr == 0 && offset == 0 + if (alias == 0 && expr == 0 && offset == 0 && addrspace == 0 && (size == 0 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size))) && (STRICT_ALIGNMENT && mode != BLKmode @@ -342,6 +340,7 @@ get_mem_attrs (alias_set_type alias, tree expr, rtx offset, rtx size, attrs.offset = offset; attrs.size = size; attrs.align = align; + attrs.addrspace = addrspace; slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT); if (*slot == 0) @@ -519,6 +518,36 @@ const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode) return lookup_const_fixed (fixed); } +/* Constructs double_int from rtx CST. */ + +double_int +rtx_to_double_int (const_rtx cst) +{ + double_int r; + + if (CONST_INT_P (cst)) + r = shwi_to_double_int (INTVAL (cst)); + else if (CONST_DOUBLE_P (cst) && GET_MODE (cst) == VOIDmode) + { + r.low = CONST_DOUBLE_LOW (cst); + r.high = CONST_DOUBLE_HIGH (cst); + } + else + gcc_unreachable (); + + return r; +} + + +/* Return a CONST_DOUBLE or CONST_INT for a value specified as + a double_int. */ + +rtx +immed_double_int_const (double_int i, enum machine_mode mode) +{ + return immed_double_const (i.low, i.high, mode); +} + /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair of ints: I0 is the low-order word and I1 is the high-order word. Do not use this routine for non-integer modes; convert to @@ -537,7 +566,7 @@ immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode) gen_int_mode. 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only - from copies of the sign bit, and sign of i0 and i1 are the same), then + from copies of the sign bit, and sign of i0 and i1 are the same), then we return a CONST_INT for i0. 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */ if (mode != VOIDmode) @@ -867,7 +896,7 @@ gen_reg_rtx (enum machine_mode mode) /* If a virtual register with bigger mode alignment is generated, increase stack alignment estimation because it might be spilled to stack later. */ - if (SUPPORTS_STACK_ALIGNMENT + if (SUPPORTS_STACK_ALIGNMENT && crtl->stack_alignment_estimated < align && !crtl->stack_realign_processed) { @@ -1191,7 +1220,7 @@ gen_lowpart_common (enum machine_mode mode, rtx x) innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0); else if (innermode == VOIDmode) innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0); - + xsize = GET_MODE_SIZE (innermode); gcc_assert (innermode != VOIDmode && innermode != BLKmode); @@ -1252,7 +1281,7 @@ gen_highpart (enum machine_mode mode, rtx x) result = simplify_gen_subreg (mode, x, GET_MODE (x), subreg_highpart_offset (mode, GET_MODE (x))); gcc_assert (result); - + /* simplify_gen_subreg is not guaranteed to return a valid operand for the target if we have a MEM. gen_highpart must return a valid operand, emitting code if necessary to do so. */ @@ -1261,7 +1290,7 @@ gen_highpart (enum machine_mode mode, rtx x) result = validize_mem (result); gcc_assert (result); } - + return result; } @@ -1388,7 +1417,9 @@ operand_subword (rtx op, unsigned int offset, int validate_address, enum machine else if (reload_completed) { - if (! strict_memory_address_p (word_mode, XEXP (new_rtx, 0))) + if (! strict_memory_address_addr_space_p (word_mode, + XEXP (new_rtx, 0), + MEM_ADDR_SPACE (op))) return 0; } else @@ -1583,11 +1614,11 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, /* We can set the alignment from the type if we are making an object, this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */ - if (objectp || TREE_CODE (t) == INDIRECT_REF - || TREE_CODE (t) == ALIGN_INDIRECT_REF + if (objectp || TREE_CODE (t) == INDIRECT_REF + || TREE_CODE (t) == ALIGN_INDIRECT_REF || TYPE_ALIGN_OK (type)) align = MAX (align, TYPE_ALIGN (type)); - else + else if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF) { if (integer_zerop (TREE_OPERAND (t, 1))) @@ -1750,23 +1781,24 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, /* ??? Any reason the field size would be different than the size we got from the type? */ } - else if (flag_argument_noalias > 1 - && (INDIRECT_REF_P (t2)) - && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL) + + /* If this is an indirect reference, record it. */ + else if (TREE_CODE (t) == INDIRECT_REF + || TREE_CODE (t) == MISALIGNED_INDIRECT_REF) { - expr = t2; - offset = NULL; + expr = t; + offset = const0_rtx; + apply_bitpos = bitpos; } } - /* If this is a Fortran indirect argument reference, record the - parameter decl. */ - else if (flag_argument_noalias > 1 - && (INDIRECT_REF_P (t)) - && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL) + /* If this is an indirect reference, record it. */ + else if (TREE_CODE (t) == INDIRECT_REF + || TREE_CODE (t) == MISALIGNED_INDIRECT_REF) { expr = t; - offset = NULL; + offset = const0_rtx; + apply_bitpos = bitpos; } if (!align_computed && !INDIRECT_REF_P (t)) @@ -1797,7 +1829,8 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, /* Now set the attributes we computed above. */ MEM_ATTRS (ref) - = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref)); + = get_mem_attrs (alias, expr, offset, size, align, + TYPE_ADDR_SPACE (type), GET_MODE (ref)); /* If this is already known to be a scalar or aggregate, we are done. */ if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref)) @@ -1829,7 +1862,17 @@ set_mem_alias_set (rtx mem, alias_set_type set) MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem), MEM_SIZE (mem), MEM_ALIGN (mem), - GET_MODE (mem)); + MEM_ADDR_SPACE (mem), GET_MODE (mem)); +} + +/* Set the address space of MEM to ADDRSPACE (target-defined). */ + +void +set_mem_addr_space (rtx mem, addr_space_t addrspace) +{ + MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem), + MEM_OFFSET (mem), MEM_SIZE (mem), + MEM_ALIGN (mem), addrspace, GET_MODE (mem)); } /* Set the alignment of MEM to ALIGN bits. */ @@ -1839,7 +1882,7 @@ set_mem_align (rtx mem, unsigned int align) { MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem), MEM_OFFSET (mem), MEM_SIZE (mem), align, - GET_MODE (mem)); + MEM_ADDR_SPACE (mem), GET_MODE (mem)); } /* Set the expr for MEM to EXPR. */ @@ -1849,7 +1892,8 @@ set_mem_expr (rtx mem, tree expr) { MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem), - MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem)); + MEM_SIZE (mem), MEM_ALIGN (mem), + MEM_ADDR_SPACE (mem), GET_MODE (mem)); } /* Set the offset of MEM to OFFSET. */ @@ -1859,7 +1903,7 @@ set_mem_offset (rtx mem, rtx offset) { MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem), offset, MEM_SIZE (mem), MEM_ALIGN (mem), - GET_MODE (mem)); + MEM_ADDR_SPACE (mem), GET_MODE (mem)); } /* Set the size of MEM to SIZE. */ @@ -1869,7 +1913,7 @@ set_mem_size (rtx mem, rtx size) { MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem), MEM_OFFSET (mem), size, MEM_ALIGN (mem), - GET_MODE (mem)); + MEM_ADDR_SPACE (mem), GET_MODE (mem)); } /* Return a memory reference like MEMREF, but with its mode changed to MODE @@ -1881,23 +1925,25 @@ set_mem_size (rtx mem, rtx size) static rtx change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate) { + addr_space_t as; rtx new_rtx; gcc_assert (MEM_P (memref)); + as = MEM_ADDR_SPACE (memref); if (mode == VOIDmode) mode = GET_MODE (memref); if (addr == 0) addr = XEXP (memref, 0); if (mode == GET_MODE (memref) && addr == XEXP (memref, 0) - && (!validate || memory_address_p (mode, addr))) + && (!validate || memory_address_addr_space_p (mode, addr, as))) return memref; if (validate) { if (reload_in_progress || reload_completed) - gcc_assert (memory_address_p (mode, addr)); + gcc_assert (memory_address_addr_space_p (mode, addr, as)); else - addr = memory_address (mode, addr); + addr = memory_address_addr_space (mode, addr, as); } if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref)) @@ -1936,7 +1982,8 @@ change_address (rtx memref, enum machine_mode mode, rtx addr) } MEM_ATTRS (new_rtx) - = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode); + = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, + MEM_ADDR_SPACE (memref), mmode); return new_rtx; } @@ -1956,11 +2003,13 @@ adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset, rtx memoffset = MEM_OFFSET (memref); rtx size = 0; unsigned int memalign = MEM_ALIGN (memref); + addr_space_t as = MEM_ADDR_SPACE (memref); + enum machine_mode address_mode = targetm.addr_space.address_mode (as); int pbits; /* If there are no changes, just return the original memory reference. */ if (mode == GET_MODE (memref) && !offset - && (!validate || memory_address_p (mode, addr))) + && (!validate || memory_address_addr_space_p (mode, addr, as))) return memref; /* ??? Prefer to create garbage instead of creating shared rtl. @@ -1970,7 +2019,7 @@ adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset, /* Convert a possibly large offset to a signed value within the range of the target address space. */ - pbits = GET_MODE_BITSIZE (Pmode); + pbits = GET_MODE_BITSIZE (address_mode); if (HOST_BITS_PER_WIDE_INT > pbits) { int shift = HOST_BITS_PER_WIDE_INT - pbits; @@ -1986,7 +2035,7 @@ adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset, && offset >= 0 && (unsigned HOST_WIDE_INT) offset < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT) - addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0), + addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0), plus_constant (XEXP (addr, 1), offset)); else addr = plus_constant (addr, offset); @@ -2019,7 +2068,8 @@ adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset, size = plus_constant (MEM_SIZE (memref), -offset); MEM_ATTRS (new_rtx) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), - memoffset, size, memalign, GET_MODE (new_rtx)); + memoffset, size, memalign, as, + GET_MODE (new_rtx)); /* At some point, we should validate that this offset is within the object, if all the appropriate values are known. */ @@ -2047,8 +2097,10 @@ rtx offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2) { rtx new_rtx, addr = XEXP (memref, 0); + addr_space_t as = MEM_ADDR_SPACE (memref); + enum machine_mode address_mode = targetm.addr_space.address_mode (as); - new_rtx = simplify_gen_binary (PLUS, Pmode, addr, offset); + new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset); /* At this point we don't know _why_ the address is invalid. It could have secondary memory references, multiplies or anything. @@ -2057,12 +2109,12 @@ offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2) being able to recognize the magic around pic_offset_table_rtx. This stuff is fragile, and is yet another example of why it is bad to expose PIC machinery too early. */ - if (! memory_address_p (GET_MODE (memref), new_rtx) + if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx, as) && GET_CODE (addr) == PLUS && XEXP (addr, 0) == pic_offset_table_rtx) { addr = force_reg (GET_MODE (addr), addr); - new_rtx = simplify_gen_binary (PLUS, Pmode, addr, offset); + new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset); } update_temp_slot_address (XEXP (memref, 0), new_rtx); @@ -2077,7 +2129,7 @@ offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2) MEM_ATTRS (new_rtx) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0, MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT), - GET_MODE (new_rtx)); + as, GET_MODE (new_rtx)); return new_rtx; } @@ -2181,7 +2233,8 @@ widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset) /* ??? Maybe use get_alias_set on any remaining expression. */ MEM_ATTRS (new_rtx) = get_mem_attrs (0, expr, memoffset, GEN_INT (size), - MEM_ALIGN (new_rtx), mode); + MEM_ALIGN (new_rtx), + MEM_ADDR_SPACE (new_rtx), mode); return new_rtx; } @@ -2209,7 +2262,7 @@ get_spill_slot_decl (bool force_build_p) rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx); MEM_NOTRAP_P (rd) = 1; MEM_ATTRS (rd) = get_mem_attrs (new_alias_set (), d, const0_rtx, - NULL_RTX, 0, BLKmode); + NULL_RTX, 0, ADDR_SPACE_GENERIC, BLKmode); SET_DECL_RTL (d, rd); return d; @@ -2242,7 +2295,7 @@ set_mem_attrs_for_spill (rtx mem) MEM_ATTRS (mem) = get_mem_attrs (alias, expr, offset, MEM_SIZE (mem), MEM_ALIGN (mem), - GET_MODE (mem)); + ADDR_SPACE_GENERIC, GET_MODE (mem)); MEM_NOTRAP_P (mem) = 1; } @@ -2395,6 +2448,8 @@ verify_rtx_sharing (rtx orig, rtx insn) switch (code) { case REG: + case DEBUG_EXPR: + case VALUE: case CONST_INT: case CONST_DOUBLE: case CONST_FIXED: @@ -2442,7 +2497,7 @@ verify_rtx_sharing (rtx orig, rtx insn) } #endif gcc_assert (!RTX_FLAG (x, used)); - + RTX_FLAG (x, used) = 1; /* Now scan the subexpressions recursively. */ @@ -2595,6 +2650,8 @@ repeat: switch (code) { case REG: + case DEBUG_EXPR: + case VALUE: case CONST_INT: case CONST_DOUBLE: case CONST_FIXED: @@ -2648,7 +2705,7 @@ repeat: format_ptr = GET_RTX_FORMAT (code); length = GET_RTX_LENGTH (code); last_ptr = NULL; - + for (i = 0; i < length; i++) { switch (*format_ptr++) @@ -2664,12 +2721,12 @@ repeat: { int j; int len = XVECLEN (x, i); - + /* Copy the vector iff I copied the rtx and the length is nonzero. */ if (copied && len > 0) XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem); - + /* Call recursively on all inside the vector. */ for (j = 0; j < len; j++) { @@ -2714,6 +2771,8 @@ repeat: switch (code) { case REG: + case DEBUG_EXPR: + case VALUE: case CONST_INT: case CONST_DOUBLE: case CONST_FIXED: @@ -2742,7 +2801,7 @@ repeat: format_ptr = GET_RTX_FORMAT (code); length = GET_RTX_LENGTH (code); - + for (i = 0; i < length; i++) { switch (*format_ptr++) @@ -2785,6 +2844,8 @@ set_used_flags (rtx x) switch (code) { case REG: + case DEBUG_EXPR: + case VALUE: case CONST_INT: case CONST_DOUBLE: case CONST_FIXED: @@ -3082,6 +3143,25 @@ prev_nonnote_insn (rtx insn) return insn; } +/* Return the previous insn before INSN that is not a NOTE, but stop + the search before we enter another basic block. This routine does + not look inside SEQUENCEs. */ + +rtx +prev_nonnote_insn_bb (rtx insn) +{ + while (insn) + { + insn = PREV_INSN (insn); + if (insn == 0 || !NOTE_P (insn)) + break; + if (NOTE_INSN_BASIC_BLOCK_P (insn)) + return NULL_RTX; + } + + return insn; +} + /* Return the next insn after INSN that is not a DEBUG_INSN. This routine does not look inside SEQUENCEs. */ @@ -3165,8 +3245,8 @@ last_call_insn (void) } /* Find the next insn after INSN that really does something. This routine - does not look inside SEQUENCEs. Until reload has completed, this is the - same as next_real_insn. */ + does not look inside SEQUENCEs. After reload this also skips over + standalone USE and CLOBBER insn. */ int active_insn_p (const_rtx insn) @@ -3192,8 +3272,8 @@ next_active_insn (rtx insn) } /* Find the last insn before INSN that really does something. This routine - does not look inside SEQUENCEs. Until reload has completed, this is the - same as prev_real_insn. */ + does not look inside SEQUENCEs. After reload this also skips over + standalone USE and CLOBBER insn. */ rtx prev_active_insn (rtx insn) @@ -3464,6 +3544,10 @@ try_split (rtx pat, rtx trial, int last) p = &XEXP (*p, 1); *p = CALL_INSN_FUNCTION_USAGE (trial); SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial); + + /* Update the debug information for the CALL_INSN. */ + if (flag_enable_icf_debug) + (*debug_hooks->copy_call_info) (trial, insn); } } @@ -3473,13 +3557,7 @@ try_split (rtx pat, rtx trial, int last) switch (REG_NOTE_KIND (note)) { case REG_EH_REGION: - for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn)) - { - if (CALL_P (insn) - || (flag_non_call_exceptions && INSN_P (insn) - && may_trap_p (PATTERN (insn)))) - add_reg_note (insn, REG_EH_REGION, XEXP (note, 0)); - } + copy_reg_eh_region_note_backward (note, insn_last, NULL); break; case REG_NORETURN: @@ -3766,7 +3844,7 @@ add_insn_before (rtx insn, rtx before, basic_block bb) gcc_assert (stack); } - if (!bb + if (!bb && !BARRIER_P (before) && !BARRIER_P (insn)) bb = BLOCK_FOR_INSN (before); @@ -4963,15 +5041,15 @@ rtx emit_note_copy (rtx orig) { rtx note; - + note = rtx_alloc (NOTE); - + INSN_UID (note) = cur_insn_uid++; NOTE_DATA (note) = NOTE_DATA (orig); NOTE_KIND (note) = NOTE_KIND (orig); BLOCK_FOR_INSN (note) = NULL; add_insn (note); - + return note; } @@ -5658,39 +5736,16 @@ init_emit_regs (void) = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM); #endif -#ifdef STATIC_CHAIN_REGNUM - static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM); - -#ifdef STATIC_CHAIN_INCOMING_REGNUM - if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM) - static_chain_incoming_rtx - = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM); - else -#endif - static_chain_incoming_rtx = static_chain_rtx; -#endif - -#ifdef STATIC_CHAIN - static_chain_rtx = STATIC_CHAIN; - -#ifdef STATIC_CHAIN_INCOMING - static_chain_incoming_rtx = STATIC_CHAIN_INCOMING; -#else - static_chain_incoming_rtx = static_chain_rtx; -#endif -#endif - if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM) pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM); else pic_offset_table_rtx = NULL_RTX; } -/* Create some permanent unique rtl objects shared between all functions. - LINE_NUMBERS is nonzero if line numbers are to be generated. */ +/* Create some permanent unique rtl objects shared between all functions. */ void -init_emit_once (int line_numbers) +init_emit_once (void) { int i; enum machine_mode mode; @@ -5712,8 +5767,6 @@ init_emit_once (int line_numbers) reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash, reg_attrs_htab_eq, NULL); - no_line_numbers = ! line_numbers; - /* Compute the word and byte modes. */ byte_mode = VOIDmode; @@ -5975,7 +6028,7 @@ emit_copy_of_insn_after (rtx insn, rtx after) SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn); RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn); RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn); - RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx) + RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx) = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn); break;