X-Git-Url: http://git.sourceforge.jp/view?p=pf3gnuchains%2Fgcc-fork.git;a=blobdiff_plain;f=gcc%2Fpostreload.c;h=47930ad042acca16f649cc56e9d8ebf3a971716f;hp=3b15118f3294c6d43ccbfe9d2c81f17c37ff7519;hb=f16feee25a739555a76f0cd97aca0b2f8f267aa3;hpb=21f1e711ba84209ea411dc821cd70757318894ad diff --git a/gcc/postreload.c b/gcc/postreload.c index 3b15118f329..47930ad042a 100644 --- a/gcc/postreload.c +++ b/gcc/postreload.c @@ -1,6 +1,6 @@ /* Perform simple optimizations to clean up the result of reload. Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998, - 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc. + 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc. This file is part of GCC. @@ -16,8 +16,8 @@ for more details. You should have received a copy of the GNU General Public License along with GCC; see the file COPYING. If not, write to the Free -Software Foundation, 59 Temple Place - Suite 330, Boston, MA -02111-1307, USA. */ +Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA +02110-1301, USA. */ #include "config.h" #include "system.h" @@ -44,6 +44,8 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA #include "toplev.h" #include "except.h" #include "tree.h" +#include "timevar.h" +#include "tree-pass.h" static int reload_cse_noop_set_p (rtx); static void reload_cse_simplify (rtx, rtx); @@ -118,6 +120,19 @@ reload_cse_simplify (rtx insn, rtx testreg) int count = 0; rtx value = NULL_RTX; + /* Registers mentioned in the clobber list for an asm cannot be reused + within the body of the asm. Invalidate those registers now so that + we don't try to substitute values for them. */ + if (asm_noperands (body) >= 0) + { + for (i = XVECLEN (body, 0) - 1; i >= 0; --i) + { + rtx part = XVECEXP (body, 0, i); + if (GET_CODE (part) == CLOBBER && REG_P (XEXP (part, 0))) + cselib_invalidate_rtx (XEXP (part, 0)); + } + } + /* If every action in a PARALLEL is a noop, we can delete the entire PARALLEL. */ for (i = XVECLEN (body, 0) - 1; i >= 0; --i) @@ -280,7 +295,7 @@ reload_cse_simplify_set (rtx set, rtx insn) if (this_val == trunc_int_for_mode (this_val, GET_MODE (src))) break; default: - abort (); + gcc_unreachable (); } this_rtx = GEN_INT (this_val); } @@ -414,7 +429,7 @@ reload_cse_simplify_operands (rtx insn, rtx testreg) extension. Punt on this for now. */ if (! set) continue; - /* If the destination is a also MEM or a STRICT_LOW_PART, no + /* If the destination is also a MEM or a STRICT_LOW_PART, no extension applies. Also, if there is an explicit extension, we don't have to worry about an implicit one. */ @@ -560,6 +575,7 @@ reload_cse_simplify_operands (rtx insn, rtx testreg) op_alt_regno[i][j] = regno; } j++; + class = (int) NO_REGS; break; } p += CONSTRAINT_LEN (c, p); @@ -592,7 +608,7 @@ reload_cse_simplify_operands (rtx insn, rtx testreg) int this_nregs = alternative_nregs[alternative_order[j]]; if (this_reject < best_reject - || (this_reject == best_reject && this_nregs < best_nregs)) + || (this_reject == best_reject && this_nregs > best_nregs)) { best = j; best_reject = this_reject; @@ -715,7 +731,7 @@ reload_combine (void) destination. */ min_labelno = get_first_label_num (); n_labels = max_label_num () - min_labelno; - label_live = xmalloc (n_labels * sizeof (HARD_REG_SET)); + label_live = XNEWVEC (HARD_REG_SET, n_labels); CLEAR_HARD_REG_SET (ever_live_at_start); FOR_EACH_BB_REVERSE (bb) @@ -726,9 +742,9 @@ reload_combine (void) HARD_REG_SET live; REG_SET_TO_HARD_REG_SET (live, - bb->global_live_at_start); + bb->il.rtl->global_live_at_start); compute_use_by_pseudos (&live, - bb->global_live_at_start); + bb->il.rtl->global_live_at_start); COPY_HARD_REG_SET (LABEL_LIVE (insn), live); IOR_HARD_REG_SET (ever_live_at_start, live); } @@ -871,22 +887,13 @@ reload_combine (void) if (apply_change_group ()) { - rtx *np; - /* Delete the reg-reg addition. */ delete_insn (insn); if (reg_state[regno].offset != const0_rtx) /* Previous REG_EQUIV / REG_EQUAL notes for PREV are now invalid. */ - for (np = ®_NOTES (prev); *np;) - { - if (REG_NOTE_KIND (*np) == REG_EQUAL - || REG_NOTE_KIND (*np) == REG_EQUIV) - *np = XEXP (*np, 1); - else - np = &XEXP (*np, 1); - } + remove_reg_equal_equiv_notes (prev); reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES; reg_state[REGNO (const_reg)].store_ruid @@ -992,11 +999,9 @@ reload_combine_note_store (rtx dst, rtx set, void *data ATTRIBUTE_UNUSED) /* note_stores might have stripped a STRICT_LOW_PART, so we have to be careful with registers / register parts that are not full words. - - Similarly for ZERO_EXTRACT and SIGN_EXTRACT. */ + Similarly for ZERO_EXTRACT. */ if (GET_CODE (set) != SET || GET_CODE (SET_DEST (set)) == ZERO_EXTRACT - || GET_CODE (SET_DEST (set)) == SIGN_EXTRACT || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART) { for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--) @@ -1057,8 +1062,7 @@ reload_combine_note_use (rtx *xp, rtx insn) if (REG_P (SET_DEST (x))) { /* No spurious CLOBBERs of pseudo registers may remain. */ - if (REGNO (SET_DEST (x)) >= FIRST_PSEUDO_REGISTER) - abort (); + gcc_assert (REGNO (SET_DEST (x)) < FIRST_PSEUDO_REGISTER); return; } break; @@ -1078,8 +1082,7 @@ reload_combine_note_use (rtx *xp, rtx insn) int nregs; /* No spurious USEs of pseudo registers may remain. */ - if (regno >= FIRST_PSEUDO_REGISTER) - abort (); + gcc_assert (regno < FIRST_PSEUDO_REGISTER); nregs = hard_regno_nregs[regno][GET_MODE (x)]; @@ -1233,10 +1236,8 @@ reload_cse_move2add (rtx first) if (GET_CODE (src) == CONST_INT && reg_base_reg[regno] < 0) { - rtx new_src = - GEN_INT (trunc_int_for_mode (INTVAL (src) - - reg_offset[regno], - GET_MODE (reg))); + rtx new_src = gen_int_mode (INTVAL (src) - reg_offset[regno], + GET_MODE (reg)); /* (set (reg) (plus (reg) (const_int 0))) is not canonical; use (set (reg) (reg)) instead. We don't delete this insn, nor do we convert it into a @@ -1258,11 +1259,12 @@ reload_cse_move2add (rtx first) rtx tem = gen_rtx_PLUS (GET_MODE (reg), reg, new_src); validate_change (insn, &SET_SRC (pat), tem, 0); } - else + else if (GET_MODE (reg) != BImode) { enum machine_mode narrow_mode; for (narrow_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); - narrow_mode != GET_MODE (reg); + narrow_mode != VOIDmode + && narrow_mode != GET_MODE (reg); narrow_mode = GET_MODE_WIDER_MODE (narrow_mode)) { if (have_insn_for (STRICT_LOW_PART, narrow_mode) @@ -1273,9 +1275,8 @@ reload_cse_move2add (rtx first) { rtx narrow_reg = gen_rtx_REG (narrow_mode, REGNO (reg)); - rtx narrow_src = - GEN_INT (trunc_int_for_mode (INTVAL (src), - narrow_mode)); + rtx narrow_src = gen_int_mode (INTVAL (src), + narrow_mode); rtx new_set = gen_rtx_SET (VOIDmode, gen_rtx_STRICT_LOW_PART (VOIDmode, @@ -1324,10 +1325,10 @@ reload_cse_move2add (rtx first) HOST_WIDE_INT base_offset = reg_offset[REGNO (src)]; HOST_WIDE_INT regno_offset = reg_offset[regno]; rtx new_src = - GEN_INT (trunc_int_for_mode (added_offset - + base_offset - - regno_offset, - GET_MODE (reg))); + gen_int_mode (added_offset + + base_offset + - regno_offset, + GET_MODE (reg)); int success = 0; if (new_src == const0_rtx) @@ -1375,13 +1376,14 @@ reload_cse_move2add (rtx first) /* If INSN is a conditional branch, we try to extract an implicit set out of it. */ - if (any_condjump_p (insn) && onlyjump_p (insn)) + if (any_condjump_p (insn)) { rtx cnd = fis_get_condition (insn); if (cnd != NULL_RTX && GET_CODE (cnd) == NE && REG_P (XEXP (cnd, 0)) + && !reg_set_p (XEXP (cnd, 0), insn) /* The following two checks, which are also in move2add_note_store, are intended to reduce the number of calls to gen_rtx_SET to avoid memory @@ -1418,6 +1420,7 @@ static void move2add_note_store (rtx dst, rtx set, void *data ATTRIBUTE_UNUSED) { unsigned int regno = 0; + unsigned int nregs = 0; unsigned int i; enum machine_mode mode = GET_MODE (dst); @@ -1427,6 +1430,7 @@ move2add_note_store (rtx dst, rtx set, void *data ATTRIBUTE_UNUSED) GET_MODE (SUBREG_REG (dst)), SUBREG_BYTE (dst), GET_MODE (dst)); + nregs = subreg_nregs (dst); dst = SUBREG_REG (dst); } @@ -1444,11 +1448,12 @@ move2add_note_store (rtx dst, rtx set, void *data ATTRIBUTE_UNUSED) return; regno += REGNO (dst); + if (!nregs) + nregs = hard_regno_nregs[regno][mode]; - if (SCALAR_INT_MODE_P (mode) - && hard_regno_nregs[regno][mode] == 1 && GET_CODE (set) == SET + if (SCALAR_INT_MODE_P (GET_MODE (dst)) + && nregs == 1 && GET_CODE (set) == SET && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT - && GET_CODE (SET_DEST (set)) != SIGN_EXTRACT && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART) { rtx src = SET_SRC (set); @@ -1547,10 +1552,47 @@ move2add_note_store (rtx dst, rtx set, void *data ATTRIBUTE_UNUSED) } else { - unsigned int endregno = regno + hard_regno_nregs[regno][mode]; + unsigned int endregno = regno + nregs; for (i = regno; i < endregno; i++) /* Reset the information about this register. */ reg_set_luid[i] = 0; } } + +static bool +gate_handle_postreload (void) +{ + return (optimize > 0); +} + + +static unsigned int +rest_of_handle_postreload (void) +{ + /* Do a very simple CSE pass over just the hard registers. */ + reload_cse_regs (get_insns ()); + /* reload_cse_regs can eliminate potentially-trapping MEMs. + Remove any EH edges associated with them. */ + if (flag_non_call_exceptions) + purge_all_dead_edges (); + return 0; +} + +struct tree_opt_pass pass_postreload_cse = +{ + "postreload", /* name */ + gate_handle_postreload, /* gate */ + rest_of_handle_postreload, /* execute */ + NULL, /* sub */ + NULL, /* next */ + 0, /* static_pass_number */ + TV_RELOAD_CSE_REGS, /* tv_id */ + 0, /* properties_required */ + 0, /* properties_provided */ + 0, /* properties_destroyed */ + 0, /* todo_flags_start */ + TODO_dump_func, /* todo_flags_finish */ + 'o' /* letter */ +}; +