X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Fpostreload-gcse.c;h=0f2f67b5668fae49274fc5a4e9cbc206affb91a7;hb=f6b25e1c30aa1500028eb3b716469fa428ee094b;hp=75ca2572176c2b3ca5326a80769ee0dd562fb0ae;hpb=2b4876d244c2e5d707b79dc0e63dc930e4705079;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/postreload-gcse.c b/gcc/postreload-gcse.c index 75ca2572176..0f2f67b5668 100644 --- a/gcc/postreload-gcse.c +++ b/gcc/postreload-gcse.c @@ -1,12 +1,12 @@ /* Post reload partially redundant load elimination - Copyright (C) 2004, 2005 + Copyright (C) 2004, 2005, 2006, 2007 Free Software Foundation, Inc. This file is part of GCC. GCC is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free -Software Foundation; either version 2, or (at your option) any later +Software Foundation; either version 3, or (at your option) any later version. GCC is distributed in the hope that it will be useful, but WITHOUT ANY @@ -15,9 +15,8 @@ FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License -along with GCC; see the file COPYING. If not, write to the Free -Software Foundation, 59 Temple Place - Suite 330, Boston, MA -02111-1307, USA. */ +along with GCC; see the file COPYING3. If not see +. */ #include "config.h" #include "system.h" @@ -43,6 +42,10 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA #include "obstack.h" #include "hashtab.h" #include "params.h" +#include "target.h" +#include "timevar.h" +#include "tree-pass.h" +#include "dbgcnt.h" /* The following code implements gcse after reload, the purpose of this pass is to cleanup redundant loads generated by reload and other @@ -174,10 +177,10 @@ static void free_mem (void); static bool oprs_unchanged_p (rtx, rtx, bool); static void record_last_reg_set_info (rtx, int); static void record_last_mem_set_info (rtx); -static void record_last_set_info (rtx, rtx, void *); +static void record_last_set_info (rtx, const_rtx, void *); static void record_opr_changes (rtx); -static void find_mem_conflicts (rtx, rtx, void *); +static void find_mem_conflicts (rtx, const_rtx, void *); static int load_killed_in_block_p (int, rtx, bool); static void reset_opr_set_tables (void); @@ -194,8 +197,6 @@ static void dump_hash_table (FILE *); static bool reg_killed_on_edge (rtx, edge); static bool reg_used_on_edge (rtx, edge); -static rtx reg_set_between_after_reload_p (rtx, rtx, rtx); -static rtx reg_used_between_after_reload_p (rtx, rtx, rtx); static rtx get_avail_load_store_reg (rtx); static bool bb_has_well_behaved_predecessors (basic_block); @@ -221,8 +222,8 @@ alloc_mem (void) rtx insn; /* Find the largest UID and create a mapping from UIDs to CUIDs. */ - uid_cuid = xcalloc (get_max_uid () + 1, sizeof (int)); - i = 0; + uid_cuid = XCNEWVEC (int, get_max_uid () + 1); + i = 1; FOR_EACH_BB (bb) FOR_BB_INSNS (bb, insn) { @@ -295,22 +296,21 @@ hash_expr (rtx x, int *do_not_record_p) static hashval_t hash_expr_for_htab (const void *expp) { - struct expr *exp = (struct expr *) expp; + const struct expr *const exp = (const struct expr *) expp; return exp->hash; } -/* Callbach for hashtab. +/* Callback for hashtab. Return nonzero if exp1 is equivalent to exp2. */ static int expr_equiv_p (const void *exp1p, const void *exp2p) { - struct expr *exp1 = (struct expr *) exp1p; - struct expr *exp2 = (struct expr *) exp2p; + const struct expr *const exp1 = (const struct expr *) exp1p; + const struct expr *const exp2 = (const struct expr *) exp2p; int equiv_p = exp_equiv_p (exp1->expr, exp2->expr, 0, true); - if (equiv_p - && exp1->hash != exp2->hash) - abort (); + + gcc_assert (!equiv_p || exp1->hash == exp2->hash); return equiv_p; } @@ -439,7 +439,7 @@ dump_hash_table_entry (void **slot, void *filep) fprintf (file, "expr: "); print_rtl (file, expr->expr); fprintf (file,"\nhashcode: %u\n", expr->hash); - fprintf (file,"list of occurences:\n"); + fprintf (file,"list of occurrences:\n"); occr = expr->avail_occr; while (occr) { @@ -468,6 +468,22 @@ dump_hash_table (FILE *file) fprintf (file, "\n"); } +/* Return true if register X is recorded as being set by an instruction + whose CUID is greater than the one given. */ + +static bool +reg_changed_after_insn_p (rtx x, int cuid) +{ + unsigned int regno, end_regno; + + regno = REGNO (x); + end_regno = END_HARD_REGNO (x); + do + if (reg_avail_info[regno] > cuid) + return true; + while (++regno < end_regno); + return false; +} /* Return nonzero if the operands of expression X are unchanged 1) from the start of INSN's basic block up to but not including INSN @@ -488,20 +504,12 @@ oprs_unchanged_p (rtx x, rtx insn, bool after_insn) switch (code) { case REG: -#ifdef ENABLE_CHECKING /* We are called after register allocation. */ - if (REGNO (x) >= FIRST_PSEUDO_REGISTER) - abort (); -#endif + gcc_assert (REGNO (x) < FIRST_PSEUDO_REGISTER); if (after_insn) - /* If the last CUID setting the insn is less than the CUID of - INSN, then reg X is not changed in or after INSN. */ - return reg_avail_info[REGNO (x)] < INSN_CUID (insn); + return !reg_changed_after_insn_p (x, INSN_CUID (insn) - 1); else - /* Reg X is not set before INSN in the current basic block if - we have not yet recorded the CUID of an insn that touches - the reg. */ - return reg_avail_info[REGNO (x)] == 0; + return !reg_changed_after_insn_p (x, 0); case MEM: if (load_killed_in_block_p (INSN_CUID (insn), x, after_insn)) @@ -563,7 +571,7 @@ static int mems_conflict_p; to a nonzero value. */ static void -find_mem_conflicts (rtx dest, rtx setter ATTRIBUTE_UNUSED, +find_mem_conflicts (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data) { rtx mem_op = (rtx) data; @@ -663,7 +671,7 @@ record_last_mem_set_info (rtx insn) the SET is taking place. */ static void -record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data) +record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data) { rtx last_set_insn = (rtx) data; @@ -672,10 +680,18 @@ record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data) if (REG_P (dest)) record_last_reg_set_info (last_set_insn, REGNO (dest)); - else if (MEM_P (dest) - /* Ignore pushes, they clobber nothing. */ - && ! push_operand (dest, GET_MODE (dest))) - record_last_mem_set_info (last_set_insn); + else if (MEM_P (dest)) + { + /* Ignore pushes, they don't clobber memory. They may still + clobber the stack pointer though. Some targets do argument + pushes without adding REG_INC notes. See e.g. PR25196, + where a pushsi2 on i386 doesn't have REG_INC notes. Note + such changes here too. */ + if (! push_operand (dest, GET_MODE (dest))) + record_last_mem_set_info (last_set_insn); + else + record_last_reg_set_info (last_set_insn, STACK_POINTER_REGNUM); + } } @@ -710,12 +726,28 @@ record_opr_changes (rtx insn) /* Finally, if this is a call, record all call clobbers. */ if (CALL_P (insn)) { - unsigned int regno; + unsigned int regno, end_regno; + rtx link, x; for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)) record_last_reg_set_info (insn, regno); + for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1)) + if (GET_CODE (XEXP (link, 0)) == CLOBBER) + { + x = XEXP (XEXP (link, 0), 0); + if (REG_P (x)) + { + gcc_assert (HARD_REGISTER_P (x)); + regno = REGNO (x); + end_regno = END_HARD_REGNO (x); + do + record_last_reg_set_info (insn, regno); + while (++regno < end_regno); + } + } + if (! CONST_OR_PURE_CALL_P (insn)) record_last_mem_set_info (insn); } @@ -740,18 +772,18 @@ hash_scan_set (rtx insn) if (JUMP_P (insn) || set_noop_p (pat)) return; -#ifdef ENABLE_CHEKCING - /* We shouldn't have any EH_REGION notes post reload. */ - if (find_reg_note (insn, REG_EH_REGION, NULL_RTX)) - abort (); -#endif - if (REG_P (dest)) { if (/* Don't CSE something if we can't do a reg/reg copy. */ can_copy_p (GET_MODE (dest)) /* Is SET_SRC something we want to gcse? */ && general_operand (src, GET_MODE (src)) +#ifdef STACK_REGS + /* Never consider insns touching the register stack. It may + create situations that reg-stack cannot handle (e.g. a stack + register live across an abnormal edge). */ + && (REGNO (dest) < FIRST_STACK_REG || REGNO (dest) > LAST_STACK_REG) +#endif /* An expression is not available if its operands are subsequently modified, including this insn. */ && oprs_unchanged_p (src, insn, true)) @@ -766,6 +798,10 @@ hash_scan_set (rtx insn) can_copy_p (GET_MODE (src)) /* Is SET_DEST something we want to gcse? */ && general_operand (dest, GET_MODE (dest)) +#ifdef STACK_REGS + /* As above for STACK_REGS. */ + && (REGNO (src) < FIRST_STACK_REG || REGNO (src) > LAST_STACK_REG) +#endif && ! (flag_float_store && FLOAT_MODE_P (GET_MODE (dest))) /* Check if the memory expression is killed after insn. */ && ! load_killed_in_block_p (INSN_CUID (insn) + 1, dest, true) @@ -845,112 +881,20 @@ reg_used_on_edge (rtx reg, edge e) return false; } - -/* Return the insn that sets register REG or clobbers it in between - FROM_INSN and TO_INSN (exclusive of those two). - Just like reg_set_between but for hard registers and not pseudos. */ - -static rtx -reg_set_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn) -{ - rtx insn; - -#ifdef ENABLE_CHECKING - /* We are called after register allocation. */ - if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER) - abort (); -#endif - - if (from_insn == to_insn) - return NULL_RTX; - - for (insn = NEXT_INSN (from_insn); - insn != to_insn; - insn = NEXT_INSN (insn)) - if (INSN_P (insn)) - { - if (set_of (reg, insn) != NULL_RTX) - return insn; - if ((CALL_P (insn) - && call_used_regs[REGNO (reg)]) - || find_reg_fusage (insn, CLOBBER, reg)) - return insn; - - if (FIND_REG_INC_NOTE (insn, reg)) - return insn; - } - - return NULL_RTX; -} - -/* Return the insn that uses register REG in between FROM_INSN and TO_INSN - (exclusive of those two). Similar to reg_used_between but for hard - registers and not pseudos. */ - -static rtx -reg_used_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn) -{ - rtx insn; - -#ifdef ENABLE_CHECKING - /* We are called after register allocation. */ - if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER) - abort (); -#endif - - if (from_insn == to_insn) - return NULL_RTX; - - for (insn = NEXT_INSN (from_insn); - insn != to_insn; - insn = NEXT_INSN (insn)) - if (INSN_P (insn)) - { - if (reg_overlap_mentioned_p (reg, PATTERN (insn)) - || (CALL_P (insn) - && call_used_regs[REGNO (reg)]) - || find_reg_fusage (insn, USE, reg) - || find_reg_fusage (insn, CLOBBER, reg)) - return insn; - - if (FIND_REG_INC_NOTE (insn, reg)) - return insn; - } - - return NULL_RTX; -} - -/* Return true if REG is used, set, or killed between the beginning of - basic block BB and UP_TO_INSN. Caches the result in reg_avail_info. */ - -static bool -reg_set_or_used_since_bb_start (rtx reg, basic_block bb, rtx up_to_insn) -{ - rtx insn, start = PREV_INSN (BB_HEAD (bb)); - - if (reg_avail_info[REGNO (reg)] != 0) - return true; - - insn = reg_used_between_after_reload_p (reg, start, up_to_insn); - if (! insn) - insn = reg_set_between_after_reload_p (reg, start, up_to_insn); - - if (insn) - reg_avail_info[REGNO (reg)] = INSN_CUID (insn); - - return insn != NULL_RTX; -} - /* Return the loaded/stored register of a load/store instruction. */ static rtx get_avail_load_store_reg (rtx insn) { - if (REG_P (SET_DEST (PATTERN (insn)))) /* A load. */ + if (REG_P (SET_DEST (PATTERN (insn)))) + /* A load. */ return SET_DEST(PATTERN(insn)); - if (REG_P (SET_SRC (PATTERN (insn)))) /* A store. */ - return SET_SRC (PATTERN (insn)); - abort (); + else + { + /* A store. */ + gcc_assert (REG_P (SET_SRC (PATTERN (insn)))); + return SET_SRC (PATTERN (insn)); + } } /* Return nonzero if the predecessors of BB are "well behaved". */ @@ -1016,6 +960,7 @@ eliminate_partially_redundant_load (basic_block bb, rtx insn, gcov_type ok_count = 0; /* Redundant load execution count. */ gcov_type critical_count = 0; /* Execution count of critical edges. */ edge_iterator ei; + bool critical_edge_split = false; /* The execution count of the loads to be added to make the load fully redundant. */ @@ -1027,7 +972,8 @@ eliminate_partially_redundant_load (basic_block bb, rtx insn, /* Check that the loaded register is not used, set, or killed from the beginning of the block. */ - if (reg_set_or_used_since_bb_start (dest, bb, insn)) + if (reg_changed_after_insn_p (dest, 0) + || reg_used_between_p (dest, PREV_INSN (BB_HEAD (bb)), insn)) return; /* Check potential for replacing load with copy for predecessors. */ @@ -1036,6 +982,7 @@ eliminate_partially_redundant_load (basic_block bb, rtx insn, rtx next_pred_bb_end; avail_insn = NULL_RTX; + avail_reg = NULL_RTX; pred_bb = pred->src; next_pred_bb_end = NEXT_INSN (BB_END (pred_bb)); for (a_occr = get_bb_avail_insn (pred_bb, expr->avail_occr); a_occr; @@ -1043,8 +990,9 @@ eliminate_partially_redundant_load (basic_block bb, rtx insn, { /* Check if the loaded register is not used. */ avail_insn = a_occr->insn; - if (! (avail_reg = get_avail_load_store_reg (avail_insn))) - abort (); + avail_reg = get_avail_load_store_reg (avail_insn); + gcc_assert (avail_reg); + /* Make sure we can generate a move from register avail_reg to dest. */ extract_insn (gen_move_insn (copy_rtx (dest), @@ -1056,8 +1004,7 @@ eliminate_partially_redundant_load (basic_block bb, rtx insn, avail_insn = NULL; continue; } - if (! reg_set_between_after_reload_p (avail_reg, avail_insn, - next_pred_bb_end)) + if (!reg_set_between_p (avail_reg, avail_insn, next_pred_bb_end)) /* AVAIL_INSN remains non-null. */ break; else @@ -1071,8 +1018,17 @@ eliminate_partially_redundant_load (basic_block bb, rtx insn, { npred_ok++; ok_count += pred->count; + if (! set_noop_p (PATTERN (gen_move_insn (copy_rtx (dest), + copy_rtx (avail_reg))))) + { + /* Check if there is going to be a split. */ + if (EDGE_CRITICAL_P (pred)) + critical_edge_split = true; + } + else /* Its a dead move no need to generate. */ + continue; occr = (struct unoccr *) obstack_alloc (&unoccr_obstack, - sizeof (struct occr)); + sizeof (struct unoccr)); occr->insn = avail_insn; occr->pred = pred; occr->next = avail_occrs; @@ -1082,6 +1038,9 @@ eliminate_partially_redundant_load (basic_block bb, rtx insn, } else { + /* Adding a load on a critical edge will cause a split. */ + if (EDGE_CRITICAL_P (pred)) + critical_edge_split = true; not_ok_count += pred->count; unoccr = (struct unoccr *) obstack_alloc (&unoccr_obstack, sizeof (struct unoccr)); @@ -1097,7 +1056,12 @@ eliminate_partially_redundant_load (basic_block bb, rtx insn, if (/* No load can be replaced by copy. */ npred_ok == 0 /* Prevent exploding the code. */ - || (optimize_size && npred_ok > 1)) + || (optimize_size && npred_ok > 1) + /* If we don't have profile information we cannot tell if splitting + a critical edge is profitable or not so don't do it. */ + || ((! profile_info || ! flag_branch_probabilities + || targetm.cannot_modify_jumps_p ()) + && critical_edge_split)) goto cleanup; /* Check if it's worth applying the partial redundancy elimination. */ @@ -1115,8 +1079,7 @@ eliminate_partially_redundant_load (basic_block bb, rtx insn, /* Set avail_reg to be the register having the value of the memory. */ avail_reg = get_avail_load_store_reg (avail_insn); - if (! avail_reg) - abort (); + gcc_assert (avail_reg); insert_insn_on_edge (gen_move_insn (copy_rtx (dest), copy_rtx (avail_reg)), @@ -1158,7 +1121,17 @@ eliminate_partially_redundant_load (basic_block bb, rtx insn, a_occr = get_bb_avail_insn (bb, a_occr->next)); if (!a_occr) - delete_insn (insn); + { + stats.insns_deleted++; + + if (dump_file) + { + fprintf (dump_file, "deleting insn:\n"); + print_rtl_single (dump_file, insn); + fprintf (dump_file, "\n"); + } + delete_insn (insn); + } else a_occr->deleted_p = 1; @@ -1252,7 +1225,7 @@ delete_redundant_insns_1 (void **slot, void *data ATTRIBUTE_UNUSED) for (occr = expr->avail_occr; occr != NULL; occr = occr->next) { - if (occr->deleted_p) + if (occr->deleted_p && dbg_cnt (gcse2_delete)) { delete_insn (occr->insn); stats.insns_deleted++; @@ -1280,9 +1253,10 @@ delete_redundant_insns (void) /* Main entry point of the GCSE after reload - clean some redundant loads due to spilling. */ -void +static void gcse_after_reload_main (rtx f ATTRIBUTE_UNUSED) { + memset (&stats, 0, sizeof (stats)); /* Allocate ememory for this pass. @@ -1318,3 +1292,37 @@ gcse_after_reload_main (rtx f ATTRIBUTE_UNUSED) free_mem (); } + +static bool +gate_handle_gcse2 (void) +{ + return (optimize > 0 && flag_gcse_after_reload); +} + + +static unsigned int +rest_of_handle_gcse2 (void) +{ + gcse_after_reload_main (get_insns ()); + rebuild_jump_labels (get_insns ()); + return 0; +} + +struct tree_opt_pass pass_gcse2 = +{ + "gcse2", /* name */ + gate_handle_gcse2, /* gate */ + rest_of_handle_gcse2, /* execute */ + NULL, /* sub */ + NULL, /* next */ + 0, /* static_pass_number */ + TV_GCSE_AFTER_RELOAD, /* tv_id */ + 0, /* properties_required */ + 0, /* properties_provided */ + 0, /* properties_destroyed */ + 0, /* todo_flags_start */ + TODO_dump_func | + TODO_verify_flow | TODO_ggc_collect, /* todo_flags_finish */ + 'J' /* letter */ +}; +