You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
+02110-1301, USA. */
#include "config.h"
#include "system.h"
#include "optabs.h"
#include "regs.h"
#include "ggc.h"
+#include "tree-flow.h"
+#include "tree-flow-inline.h"
+#include "diagnostic.h"
+#include "coverage.h"
+#include "tree.h"
+#include "gcov-io.h"
+#include "timevar.h"
+#include "tree-pass.h"
+#include "toplev.h"
+#include "pointer-set.h"
static struct value_prof_hooks *value_prof_hooks;
2) Speculative prefetching. If we are able to determine that the difference
between addresses accessed by a memory reference is usually constant, we
may add the prefetch instructions.
+ FIXME: This transformation was removed together with RTL based value
+ profiling.
Every such optimization should add its requirements for profiled values to
insn_values_to_profile function. This function is called from branch_prob
in profile.c and the requested values are instrumented by it in the first
compilation with -fprofile-arcs. The optimization may then read the
gathered data in the second compilation with -fbranch-probabilities.
- The measured data is appended as REG_VALUE_PROFILE note to the instrumented
- insn. The argument to the note consists of an EXPR_LIST where its
- members have the following meaning (from the first to the last):
-
- -- type of information gathered (HIST_TYPE*)
- -- the expression that is profiled
- -- list of counters starting from the first one. */
-
-/* For speculative prefetching, the range in that we do not prefetch (because
- we assume that it will be in cache anyway). The asymmetry between min and
- max range is trying to reflect the fact that the sequential prefetching
- of the data is commonly done directly by hardware. Nevertheless, these
- values are just a guess and should of course be target-specific. */
-
-#ifndef NOPREFETCH_RANGE_MIN
-#define NOPREFETCH_RANGE_MIN (-16)
-#endif
-#ifndef NOPREFETCH_RANGE_MAX
-#define NOPREFETCH_RANGE_MAX 32
-#endif
-static void insn_divmod_values_to_profile (rtx, histogram_values *);
-#ifdef HAVE_prefetch
-static bool insn_prefetch_values_to_profile (rtx, histogram_values *);
-static int find_mem_reference_1 (rtx *, void *);
-static void find_mem_reference_2 (rtx, rtx, void *);
-static bool find_mem_reference (rtx, rtx *, int *);
-#endif
+ The measured data is pointed to from the histograms
+ field of the statement annotation of the instrumented insns. It is
+ kept as a linked list of struct histogram_value_t's, which contain the
+ same information as above. */
-static void insn_values_to_profile (rtx, histogram_values *);
-static rtx gen_divmod_fixed_value (enum machine_mode, enum rtx_code, rtx, rtx,
- rtx, gcov_type, int);
-static rtx gen_mod_pow2 (enum machine_mode, enum rtx_code, rtx, rtx, rtx, int);
-static rtx gen_mod_subtract (enum machine_mode, enum rtx_code, rtx, rtx, rtx,
- int, int, int);
-#ifdef HAVE_prefetch
-static rtx gen_speculative_prefetch (rtx, gcov_type, int);
-#endif
-static bool divmod_fixed_value_transform (rtx insn);
-static bool mod_pow2_value_transform (rtx);
-static bool mod_subtract_transform (rtx);
-#ifdef HAVE_prefetch
-static bool speculative_prefetching_transform (rtx);
-#endif
-\f
-/* Find values inside INSN for that we want to measure histograms for
- division/modulo optimization and stores them to VALUES. */
-static void
-insn_divmod_values_to_profile (rtx insn, histogram_values *values)
-{
- rtx set, set_src, op1, op2;
- enum machine_mode mode;
- histogram_value hist;
-
- if (!INSN_P (insn))
- return;
- set = single_set (insn);
- if (!set)
- return;
+static tree tree_divmod_fixed_value (tree, tree, tree, tree,
+ tree, int, gcov_type, gcov_type);
+static tree tree_mod_pow2 (tree, tree, tree, tree, int, gcov_type, gcov_type);
+static tree tree_mod_subtract (tree, tree, tree, tree, int, int, int,
+ gcov_type, gcov_type, gcov_type);
+static bool tree_divmod_fixed_value_transform (tree);
+static bool tree_mod_pow2_value_transform (tree);
+static bool tree_mod_subtract_transform (tree);
+static bool tree_stringops_transform (block_stmt_iterator *);
- mode = GET_MODE (SET_DEST (set));
- if (!INTEGRAL_MODE_P (mode))
- return;
+/* Allocate histogram value. */
- set_src = SET_SRC (set);
- switch (GET_CODE (set_src))
- {
- case DIV:
- case MOD:
- case UDIV:
- case UMOD:
- op1 = XEXP (set_src, 0);
- op2 = XEXP (set_src, 1);
- if (side_effects_p (op2))
- return;
-
- /* Check for a special case where the divisor is power of 2. */
- if ((GET_CODE (set_src) == UMOD) && !CONSTANT_P (op2))
- {
- hist = ggc_alloc (sizeof (*hist));
- hist->value = op2;
- hist->seq = NULL_RTX;
- hist->mode = mode;
- hist->insn = insn;
- hist->type = HIST_TYPE_POW2;
- hist->hdata.pow2.may_be_other = 1;
- VEC_safe_push (histogram_value, *values, hist);
- }
+static histogram_value
+gimple_alloc_histogram_value (struct function *fun ATTRIBUTE_UNUSED,
+ enum hist_type type, tree stmt, tree value)
+{
+ histogram_value hist = (histogram_value) xcalloc (1, sizeof (*hist));
+ hist->hvalue.value = value;
+ hist->hvalue.stmt = stmt;
+ hist->type = type;
+ return hist;
+}
- /* Check whether the divisor is not in fact a constant. */
- if (!CONSTANT_P (op2))
- {
- hist = ggc_alloc (sizeof (*hist));
- hist->value = op2;
- hist->mode = mode;
- hist->seq = NULL_RTX;
- hist->insn = insn;
- hist->type = HIST_TYPE_SINGLE_VALUE;
- VEC_safe_push (histogram_value, *values, hist);
- }
+/* Hash value for histogram. */
- /* For mod, check whether it is not often a noop (or replaceable by
- a few subtractions). */
- if (GET_CODE (set_src) == UMOD && !side_effects_p (op1))
- {
- rtx tmp;
-
- hist = ggc_alloc (sizeof (*hist));
- start_sequence ();
- tmp = simplify_gen_binary (DIV, mode, copy_rtx (op1), copy_rtx (op2));
- hist->value = force_operand (tmp, NULL_RTX);
- hist->seq = get_insns ();
- end_sequence ();
- hist->mode = mode;
- hist->insn = insn;
- hist->type = HIST_TYPE_INTERVAL;
- hist->hdata.intvl.int_start = 0;
- hist->hdata.intvl.steps = 2;
- hist->hdata.intvl.may_be_less = 1;
- hist->hdata.intvl.may_be_more = 1;
- VEC_safe_push (histogram_value, *values, hist);
- }
- return;
-
- default:
- return;
- }
+static hashval_t
+histogram_hash (const void *x)
+{
+ return htab_hash_pointer (((histogram_value)x)->hvalue.stmt);
}
-#ifdef HAVE_prefetch
-
-/* Called from find_mem_reference through for_each_rtx, finds a memory
- reference. I.e. if *EXPR is a MEM, the reference to this MEM is stored
- to *RET and the traversing of the expression is interrupted by returning 1.
- Otherwise 0 is returned. */
+/* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
static int
-find_mem_reference_1 (rtx *expr, void *ret)
+histogram_eq (const void *x, const void *y)
{
- rtx *mem = ret;
-
- if (GET_CODE (*expr) == MEM)
- {
- *mem = *expr;
- return 1;
- }
- return 0;
+ return ((histogram_value) x)->hvalue.stmt == (tree)y;
}
-/* Called form find_mem_reference through note_stores to find out whether
- the memory reference MEM is a store. I.e. if EXPR == MEM, the variable
- FMR2_WRITE is set to true. */
+/* Set histogram for STMT. */
-static int fmr2_write;
static void
-find_mem_reference_2 (rtx expr, rtx pat ATTRIBUTE_UNUSED, void *mem)
+set_histogram_value (struct function *fun, tree stmt, histogram_value hist)
{
- if (expr == mem)
- fmr2_write = true;
+ void **loc;
+ if (!hist && !VALUE_HISTOGRAMS (fun))
+ return;
+ if (!VALUE_HISTOGRAMS (fun))
+ VALUE_HISTOGRAMS (fun) = htab_create (1, histogram_hash,
+ histogram_eq, NULL);
+ loc = htab_find_slot_with_hash (VALUE_HISTOGRAMS (fun), stmt,
+ htab_hash_pointer (stmt),
+ hist ? INSERT : NO_INSERT);
+ if (!hist)
+ {
+ if (loc)
+ htab_clear_slot (VALUE_HISTOGRAMS (fun), loc);
+ return;
+ }
+ *loc = hist;
}
-/* Find a memory reference inside INSN, return it in MEM. Set WRITE to true
- if it is a write of the mem. Return false if no memory reference is found,
- true otherwise. */
+/* Get histogram list for STMT. */
-static bool
-find_mem_reference (rtx insn, rtx *mem, int *write)
+histogram_value
+gimple_histogram_value (struct function *fun, tree stmt)
{
- *mem = NULL_RTX;
- for_each_rtx (&PATTERN (insn), find_mem_reference_1, mem);
-
- if (!*mem)
- return false;
-
- fmr2_write = false;
- note_stores (PATTERN (insn), find_mem_reference_2, *mem);
- *write = fmr2_write;
- return true;
+ if (!VALUE_HISTOGRAMS (fun))
+ return NULL;
+ return htab_find_with_hash (VALUE_HISTOGRAMS (fun), stmt,
+ htab_hash_pointer (stmt));
}
-/* Find values inside INSN for that we want to measure histograms for
- a speculative prefetching. Add them to the list VALUES.
- Returns true if such we found any such value, false otherwise. */
+/* Add histogram for STMT. */
-static bool
-insn_prefetch_values_to_profile (rtx insn, histogram_values *values)
+void
+gimple_add_histogram_value (struct function *fun, tree stmt, histogram_value hist)
{
- rtx mem, address;
- int write;
- histogram_value hist;
-
- /* It only makes sense to look for memory references in ordinary insns. */
- if (GET_CODE (insn) != INSN)
- return false;
-
- if (!find_mem_reference (insn, &mem, &write))
- return false;
-
- address = XEXP (mem, 0);
- if (side_effects_p (address))
- return false;
-
- if (CONSTANT_P (address))
- return false;
+ hist->hvalue.next = gimple_histogram_value (fun, stmt);
+ set_histogram_value (fun, stmt, hist);
+}
- hist = ggc_alloc (sizeof (*hist));
- hist->value = address;
- hist->mode = GET_MODE (address);
- hist->seq = NULL_RTX;
- hist->insn = insn;
- hist->type = HIST_TYPE_CONST_DELTA;
- VEC_safe_push (histogram_value, *values, hist);
+/* Remove histogram HIST from STMT's histogram list. */
- return true;
-}
-#endif
-/* Find values inside INSN for that we want to measure histograms and adds
- them to list VALUES (increasing the record of its length in N_VALUES). */
-static void
-insn_values_to_profile (rtx insn, histogram_values *values)
+void
+gimple_remove_histogram_value (struct function *fun, tree stmt, histogram_value hist)
{
- if (flag_value_profile_transformations)
- insn_divmod_values_to_profile (insn, values);
-
-#ifdef HAVE_prefetch
- if (flag_speculative_prefetching)
- insn_prefetch_values_to_profile (insn, values);
+ histogram_value hist2 = gimple_histogram_value (fun, stmt);
+ if (hist == hist2)
+ {
+ set_histogram_value (fun, stmt, hist->hvalue.next);
+ }
+ else
+ {
+ while (hist2->hvalue.next != hist)
+ hist2 = hist2->hvalue.next;
+ hist2->hvalue.next = hist->hvalue.next;
+ }
+ free (hist->hvalue.counters);
+#ifdef ENABLE_CHECKING
+ memset (hist, 0xab, sizeof (*hist));
#endif
+ free (hist);
}
-/* Find list of values for that we want to measure histograms. */
-static void
-rtl_find_values_to_profile (histogram_values *values)
+/* Lookup histogram of type TYPE in the STMT. */
+
+histogram_value
+gimple_histogram_value_of_type (struct function *fun, tree stmt, enum hist_type type)
{
- rtx insn;
- unsigned i, libcall_level;
+ histogram_value hist;
+ for (hist = gimple_histogram_value (fun, stmt); hist; hist = hist->hvalue.next)
+ if (hist->type == type)
+ return hist;
+ return NULL;
+}
- life_analysis (NULL, PROP_DEATH_NOTES);
+/* Dump information about HIST to DUMP_FILE. */
- *values = VEC_alloc (histogram_value, 0);
- libcall_level = 0;
- for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
+static void
+dump_histogram_value (FILE *dump_file, histogram_value hist)
+{
+ switch (hist->type)
{
- if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
- libcall_level++;
-
- /* Do not instrument values inside libcalls (we are going to split block
- due to instrumentation, and libcall blocks should be local to a single
- basic block). */
- if (!libcall_level)
- insn_values_to_profile (insn, values);
-
- if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
+ case HIST_TYPE_INTERVAL:
+ fprintf (dump_file, "Interval counter range %d -- %d",
+ hist->hdata.intvl.int_start,
+ (hist->hdata.intvl.int_start
+ + hist->hdata.intvl.steps - 1));
+ if (hist->hvalue.counters)
{
- gcc_assert (libcall_level > 0);
- libcall_level--;
+ unsigned int i;
+ fprintf(dump_file, " [");
+ for (i = 0; i < hist->hdata.intvl.steps; i++)
+ fprintf (dump_file, " %d:"HOST_WIDEST_INT_PRINT_DEC,
+ hist->hdata.intvl.int_start + i,
+ (HOST_WIDEST_INT) hist->hvalue.counters[i]);
+ fprintf (dump_file, " ] outside range:"HOST_WIDEST_INT_PRINT_DEC,
+ (HOST_WIDEST_INT) hist->hvalue.counters[i]);
}
- }
- gcc_assert (libcall_level == 0);
-
- for (i = 0; i < VEC_length (histogram_value, *values); i++)
- {
- histogram_value hist = VEC_index (histogram_value, *values, i);
+ fprintf (dump_file, ".\n");
+ break;
- switch (hist->type)
+ case HIST_TYPE_POW2:
+ fprintf (dump_file, "Pow2 counter ");
+ if (hist->hvalue.counters)
{
- case HIST_TYPE_INTERVAL:
- if (dump_file)
- fprintf (dump_file,
- "Interval counter for insn %d, range %d -- %d.\n",
- INSN_UID ((rtx)hist->insn),
- hist->hdata.intvl.int_start,
- (hist->hdata.intvl.int_start
- + hist->hdata.intvl.steps - 1));
- hist->n_counters = hist->hdata.intvl.steps +
- (hist->hdata.intvl.may_be_less ? 1 : 0) +
- (hist->hdata.intvl.may_be_more ? 1 : 0);
- break;
-
- case HIST_TYPE_POW2:
- if (dump_file)
- fprintf (dump_file,
- "Pow2 counter for insn %d.\n",
- INSN_UID ((rtx)hist->insn));
- hist->n_counters
- = GET_MODE_BITSIZE (hist->mode)
- + (hist->hdata.pow2.may_be_other ? 1 : 0);
- break;
-
- case HIST_TYPE_SINGLE_VALUE:
- if (dump_file)
- fprintf (dump_file,
- "Single value counter for insn %d.\n",
- INSN_UID ((rtx)hist->insn));
- hist->n_counters = 3;
- break;
+ fprintf (dump_file, "pow2:"HOST_WIDEST_INT_PRINT_DEC
+ " nonpow2:"HOST_WIDEST_INT_PRINT_DEC,
+ (HOST_WIDEST_INT) hist->hvalue.counters[0],
+ (HOST_WIDEST_INT) hist->hvalue.counters[1]);
+ }
+ fprintf (dump_file, ".\n");
+ break;
- case HIST_TYPE_CONST_DELTA:
- if (dump_file)
- fprintf (dump_file,
- "Constant delta counter for insn %d.\n",
- INSN_UID ((rtx)hist->insn));
- hist->n_counters = 4;
- break;
+ case HIST_TYPE_SINGLE_VALUE:
+ fprintf (dump_file, "Single value ");
+ if (hist->hvalue.counters)
+ {
+ fprintf (dump_file, "value:"HOST_WIDEST_INT_PRINT_DEC
+ " match:"HOST_WIDEST_INT_PRINT_DEC
+ " wrong:"HOST_WIDEST_INT_PRINT_DEC,
+ (HOST_WIDEST_INT) hist->hvalue.counters[0],
+ (HOST_WIDEST_INT) hist->hvalue.counters[1],
+ (HOST_WIDEST_INT) hist->hvalue.counters[2]);
+ }
+ fprintf (dump_file, ".\n");
+ break;
- default:
- gcc_unreachable ();
+ case HIST_TYPE_CONST_DELTA:
+ fprintf (dump_file, "Constant delta ");
+ if (hist->hvalue.counters)
+ {
+ fprintf (dump_file, "value:"HOST_WIDEST_INT_PRINT_DEC
+ " match:"HOST_WIDEST_INT_PRINT_DEC
+ " wrong:"HOST_WIDEST_INT_PRINT_DEC,
+ (HOST_WIDEST_INT) hist->hvalue.counters[0],
+ (HOST_WIDEST_INT) hist->hvalue.counters[1],
+ (HOST_WIDEST_INT) hist->hvalue.counters[2]);
}
- }
- allocate_reg_info (max_reg_num (), FALSE, FALSE);
+ fprintf (dump_file, ".\n");
+ break;
+ }
}
-/* Main entry point. Finds REG_VALUE_PROFILE notes from profiler and uses
- them to identify and exploit properties of values that are hard to analyze
- statically.
+/* Dump all histograms attached to STMT to DUMP_FILE. */
- We do following transformations:
-
- 1)
-
- x = a / b;
-
- where b is almost always a constant N is transformed to
-
- if (b == N)
- x = a / N;
- else
- x = a / b;
-
- Analogically with %
-
- 2)
-
- x = a % b
-
- where b is almost always a power of 2 and the division is unsigned
- TODO -- handle signed case as well
-
- if ((b & (b - 1)) == 0)
- x = a & (b - 1);
- else
- x = x % b;
-
- Note that when b = 0, no error will occur and x = a; this is correct,
- as result of such operation is undefined.
-
- 3)
-
- x = a % b
+void
+dump_histograms_for_stmt (struct function *fun, FILE *dump_file, tree stmt)
+{
+ histogram_value hist;
+ for (hist = gimple_histogram_value (fun, stmt); hist; hist = hist->hvalue.next)
+ dump_histogram_value (dump_file, hist);
+}
- where a is almost always less then b and the division is unsigned
- TODO -- handle signed case as well
+/* Remove all histograms associated with STMT. */
- x = a;
- if (x >= b)
- x %= b;
+void
+gimple_remove_stmt_histograms (struct function *fun, tree stmt)
+{
+ histogram_value val;
+ while ((val = gimple_histogram_value (fun, stmt)) != NULL)
+ gimple_remove_histogram_value (fun, stmt, val);
+}
- 4)
+/* Duplicate all histograms associates with OSTMT to STMT. */
- x = a % b
+void
+gimple_duplicate_stmt_histograms (struct function *fun, tree stmt,
+ struct function *ofun, tree ostmt)
+{
+ histogram_value val;
+ for (val = gimple_histogram_value (ofun, ostmt); val != NULL; val = val->hvalue.next)
+ {
+ histogram_value new = gimple_alloc_histogram_value (fun, val->type, NULL, NULL);
+ memcpy (new, val, sizeof (*val));
+ new->hvalue.stmt = stmt;
+ new->hvalue.counters = xmalloc (sizeof (*new->hvalue.counters) * new->n_counters);
+ memcpy (new->hvalue.counters, val->hvalue.counters, sizeof (*new->hvalue.counters) * new->n_counters);
+ gimple_add_histogram_value (fun, stmt, new);
+ }
+}
- where a is almost always less then 2 * b and the division is unsigned
- TODO -- handle signed case as well
+static bool error_found = false;
- x = a;
- if (x >= b)
- x -= b;
- if (x >= b)
- x %= b;
+/* Helper function for verify_histograms. For each histogram reachable via htab
+ walk verify that it was reached via statement walk. */
- It would be possible to continue analogically for K * b for other small
- K's, but it is probably not useful.
+static int
+visit_hist (void **slot, void *data)
+{
+ struct pointer_set_t *visited = (struct pointer_set_t *) data;
+ histogram_value hist = *(histogram_value *) slot;
+ if (!pointer_set_contains (visited, hist))
+ {
+ error ("Dead histogram");
+ dump_histogram_value (stderr, hist);
+ debug_generic_stmt (hist->hvalue.stmt);
+ error_found = true;
+ }
+ return 0;
+}
- 5)
+/* Verify sanity of the histograms. */
- Read or write of mem[address], where the value of address changes usually
- by a constant C != 0 between the following accesses to the computation; with
- -fspeculative-prefetching we then add a prefetch of address + C before
- the insn. This handles prefetching of several interesting cases in addition
- to a simple prefetching for addresses that are induction variables, e. g.
- linked lists allocated sequentially (even in case they are processed
- recursively).
+void
+verify_histograms (void)
+{
+ basic_block bb;
+ block_stmt_iterator bsi;
+ histogram_value hist;
+ struct pointer_set_t *visited_hists;
+
+ error_found = false;
+ visited_hists = pointer_set_create ();
+ FOR_EACH_BB (bb)
+ for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ {
+ tree stmt = bsi_stmt (bsi);
+
+ for (hist = gimple_histogram_value (cfun, stmt); hist; hist = hist->hvalue.next)
+ {
+ if (hist->hvalue.stmt != stmt)
+ {
+ error ("Histogram value statement does not correspond to statement"
+ " it is associated with");
+ debug_generic_stmt (stmt);
+ dump_histogram_value (stderr, hist);
+ error_found = true;
+ }
+ pointer_set_insert (visited_hists, hist);
+ }
+ }
+ if (VALUE_HISTOGRAMS (cfun))
+ htab_traverse (VALUE_HISTOGRAMS (cfun), visit_hist, visited_hists);
+ pointer_set_destroy (visited_hists);
+ if (error_found)
+ internal_error ("verify_histograms failed");
+}
- TODO -- we should also check whether there is not (usually) a small
- difference with the adjacent memory references, so that we do
- not issue overlapping prefetches. Also we should employ some
- heuristics to eliminate cases where prefetching evidently spoils
- the code.
- -- it should somehow cooperate with the loop optimizer prefetching
+/* Helper function for verify_histograms. For each histogram reachable via htab
+ walk verify that it was reached via statement walk. */
- TODO:
+static int
+free_hist (void **slot, void *data ATTRIBUTE_UNUSED)
+{
+ histogram_value hist = *(histogram_value *) slot;
+ free (hist->hvalue.counters);
+#ifdef ENABLE_CHECKING
+ memset (hist, 0xab, sizeof (*hist));
+#endif
+ free (hist);
+ return 0;
+}
- There are other useful cases that could be handled by a similar mechanism,
- for example:
-
- for (i = 0; i < n; i++)
- ...
-
- transform to (for constant N):
-
- if (n == N)
- for (i = 0; i < N; i++)
- ...
- else
- for (i = 0; i < n; i++)
- ...
- making unroller happy. Since this may grow the code significantly,
- we would have to be very careful here. */
+void
+free_histograms (void)
+{
+ if (VALUE_HISTOGRAMS (cfun))
+ {
+ htab_traverse (VALUE_HISTOGRAMS (cfun), free_hist, NULL);
+ htab_delete (VALUE_HISTOGRAMS (cfun));
+ VALUE_HISTOGRAMS (cfun) = NULL;
+ }
+}
+/* The overall number of invocations of the counter should match execution count
+ of basic block. Report it as error rather than internal error as it might
+ mean that user has misused the profile somehow. */
static bool
-rtl_value_profile_transformations (void)
+check_counter (tree stmt, const char * name, gcov_type all, gcov_type bb_count)
{
- rtx insn, next;
- int changed = false;
-
- for (insn = get_insns (); insn; insn = next)
+ if (all != bb_count)
{
- next = NEXT_INSN (insn);
-
- if (!INSN_P (insn))
- continue;
-
- /* Scan for insn carrying a histogram. */
- if (!find_reg_note (insn, REG_VALUE_PROFILE, 0))
- continue;
+ location_t * locus;
+ locus = (stmt != NULL && EXPR_HAS_LOCATION (stmt)
+ ? EXPR_LOCUS (stmt)
+ : &DECL_SOURCE_LOCATION (current_function_decl));
+ error ("%HCorrupted value profile: %s profiler overall count (%d) does not match BB count (%d)",
+ locus, name, (int)all, (int)bb_count);
+ return true;
+ }
+ return false;
+}
- /* Ignore cold areas -- we are growing a code. */
- if (!maybe_hot_bb_p (BLOCK_FOR_INSN (insn)))
- continue;
+/* Tree based transformations. */
+static bool
+tree_value_profile_transformations (void)
+{
+ basic_block bb;
+ block_stmt_iterator bsi;
+ bool changed = false;
- if (dump_file)
+ FOR_EACH_BB (bb)
+ {
+ for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
{
- fprintf (dump_file, "Trying transformations on insn %d\n",
- INSN_UID (insn));
- print_rtl_single (dump_file, insn);
- }
+ tree stmt = bsi_stmt (bsi);
+ histogram_value th = gimple_histogram_value (cfun, stmt);
+ if (!th)
+ continue;
- /* Transformations: */
- if (flag_value_profile_transformations
- && (mod_subtract_transform (insn)
- || divmod_fixed_value_transform (insn)
- || mod_pow2_value_transform (insn)))
- changed = true;
-#ifdef HAVE_prefetch
- if (flag_speculative_prefetching
- && speculative_prefetching_transform (insn))
- changed = true;
-#endif
+ if (dump_file)
+ {
+ fprintf (dump_file, "Trying transformations on stmt ");
+ print_generic_stmt (dump_file, stmt, TDF_SLIM);
+ dump_histograms_for_stmt (cfun, dump_file, stmt);
+ }
+
+ /* Transformations: */
+ /* The order of things in this conditional controls which
+ transformation is used when more than one is applicable. */
+ /* It is expected that any code added by the transformations
+ will be added before the current statement, and that the
+ current statement remain valid (although possibly
+ modified) upon return. */
+ if (flag_value_profile_transformations
+ && (tree_mod_subtract_transform (stmt)
+ || tree_divmod_fixed_value_transform (stmt)
+ || tree_mod_pow2_value_transform (stmt)
+ || tree_stringops_transform (&bsi)))
+ {
+ stmt = bsi_stmt (bsi);
+ changed = true;
+ /* Original statement may no longer be in the same block. */
+ if (bb != bb_for_stmt (stmt))
+ {
+ bb = bb_for_stmt (stmt);
+ bsi = bsi_for_stmt (stmt);
+ }
+ }
+ }
}
if (changed)
{
- commit_edge_insertions ();
- allocate_reg_info (max_reg_num (), FALSE, FALSE);
+ counts_to_freqs ();
}
return changed;
}
-/* Generate code for transformation 1 (with MODE and OPERATION, operands OP1
- and OP2, whose value is expected to be VALUE, result TARGET and
- probability of taking the optimal path PROB). */
-static rtx
-gen_divmod_fixed_value (enum machine_mode mode, enum rtx_code operation,
- rtx target, rtx op1, rtx op2, gcov_type value,
- int prob)
+/* Generate code for transformation 1 (with OPERATION, operands OP1
+ and OP2, whose value is expected to be VALUE, parent modify-expr STMT and
+ probability of taking the optimal path PROB, which is equivalent to COUNT/ALL
+ within roundoff error). This generates the result into a temp and returns
+ the temp; it does not replace or alter the original STMT. */
+static tree
+tree_divmod_fixed_value (tree stmt, tree operation,
+ tree op1, tree op2, tree value, int prob, gcov_type count,
+ gcov_type all)
{
- rtx tmp, tmp1, jump;
- rtx neq_label = gen_label_rtx ();
- rtx end_label = gen_label_rtx ();
- rtx sequence;
-
- start_sequence ();
-
- if (!REG_P (op2))
- {
- tmp = gen_reg_rtx (mode);
- emit_move_insn (tmp, copy_rtx (op2));
- }
- else
- tmp = op2;
-
- do_compare_rtx_and_jump (tmp, GEN_INT (value), NE, 0, mode, NULL_RTX,
- NULL_RTX, neq_label);
-
- /* Add branch probability to jump we just created. */
- jump = get_last_insn ();
- REG_NOTES (jump) = gen_rtx_EXPR_LIST (REG_BR_PROB,
- GEN_INT (REG_BR_PROB_BASE - prob),
- REG_NOTES (jump));
-
- tmp1 = simplify_gen_binary (operation, mode,
- copy_rtx (op1), GEN_INT (value));
- tmp1 = force_operand (tmp1, target);
- if (tmp1 != target)
- emit_move_insn (copy_rtx (target), copy_rtx (tmp1));
-
- emit_jump_insn (gen_jump (end_label));
- emit_barrier ();
-
- emit_label (neq_label);
- tmp1 = simplify_gen_binary (operation, mode,
- copy_rtx (op1), copy_rtx (tmp));
- tmp1 = force_operand (tmp1, target);
- if (tmp1 != target)
- emit_move_insn (copy_rtx (target), copy_rtx (tmp1));
+ tree stmt1, stmt2, stmt3;
+ tree tmp1, tmp2, tmpv;
+ tree label_decl1 = create_artificial_label ();
+ tree label_decl2 = create_artificial_label ();
+ tree label1, label2;
+ tree bb1end, bb2end, bb3end;
+ basic_block bb, bb2, bb3, bb4;
+ tree optype = TREE_TYPE (operation);
+ edge e12, e13, e23, e24, e34;
+ block_stmt_iterator bsi;
+
+ bb = bb_for_stmt (stmt);
+ bsi = bsi_for_stmt (stmt);
+
+ tmpv = create_tmp_var (optype, "PROF");
+ tmp1 = create_tmp_var (optype, "PROF");
+ stmt1 = build2 (GIMPLE_MODIFY_STMT, optype, tmpv,
+ fold_convert (optype, value));
+ stmt2 = build2 (GIMPLE_MODIFY_STMT, optype, tmp1, op2);
+ stmt3 = build3 (COND_EXPR, void_type_node,
+ build2 (NE_EXPR, boolean_type_node, tmp1, tmpv),
+ build1 (GOTO_EXPR, void_type_node, label_decl2),
+ build1 (GOTO_EXPR, void_type_node, label_decl1));
+ bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
+ bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
+ bsi_insert_before (&bsi, stmt3, BSI_SAME_STMT);
+ bb1end = stmt3;
+
+ tmp2 = create_tmp_var (optype, "PROF");
+ label1 = build1 (LABEL_EXPR, void_type_node, label_decl1);
+ stmt1 = build2 (GIMPLE_MODIFY_STMT, optype, tmp2,
+ build2 (TREE_CODE (operation), optype, op1, tmpv));
+ bsi_insert_before (&bsi, label1, BSI_SAME_STMT);
+ bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
+ bb2end = stmt1;
+
+ label2 = build1 (LABEL_EXPR, void_type_node, label_decl2);
+ stmt1 = build2 (GIMPLE_MODIFY_STMT, optype, tmp2,
+ build2 (TREE_CODE (operation), optype, op1, op2));
+ bsi_insert_before (&bsi, label2, BSI_SAME_STMT);
+ bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
+ bb3end = stmt1;
+
+ /* Fix CFG. */
+ /* Edge e23 connects bb2 to bb3, etc. */
+ e12 = split_block (bb, bb1end);
+ bb2 = e12->dest;
+ bb2->count = count;
+ e23 = split_block (bb2, bb2end);
+ bb3 = e23->dest;
+ bb3->count = all - count;
+ e34 = split_block (bb3, bb3end);
+ bb4 = e34->dest;
+ bb4->count = all;
+
+ e12->flags &= ~EDGE_FALLTHRU;
+ e12->flags |= EDGE_FALSE_VALUE;
+ e12->probability = prob;
+ e12->count = count;
+
+ e13 = make_edge (bb, bb3, EDGE_TRUE_VALUE);
+ e13->probability = REG_BR_PROB_BASE - prob;
+ e13->count = all - count;
+
+ remove_edge (e23);
- emit_label (end_label);
+ e24 = make_edge (bb2, bb4, EDGE_FALLTHRU);
+ e24->probability = REG_BR_PROB_BASE;
+ e24->count = count;
+
+ e34->probability = REG_BR_PROB_BASE;
+ e34->count = all - count;
- sequence = get_insns ();
- end_sequence ();
- rebuild_jump_labels (sequence);
- return sequence;
+ return tmp2;
}
/* Do transform 1) on INSN if applicable. */
static bool
-divmod_fixed_value_transform (rtx insn)
+tree_divmod_fixed_value_transform (tree stmt)
{
- rtx set, set_src, set_dest, op1, op2, value, histogram;
- enum rtx_code code;
- enum machine_mode mode;
+ histogram_value histogram;
+ enum tree_code code;
gcov_type val, count, all;
- edge e;
+ tree modify, op, op1, op2, result, value, tree_val;
int prob;
- set = single_set (insn);
- if (!set)
+ modify = stmt;
+ if (TREE_CODE (stmt) == RETURN_EXPR
+ && TREE_OPERAND (stmt, 0)
+ && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
+ modify = TREE_OPERAND (stmt, 0);
+ if (TREE_CODE (modify) != GIMPLE_MODIFY_STMT)
return false;
-
- set_src = SET_SRC (set);
- set_dest = SET_DEST (set);
- code = GET_CODE (set_src);
- mode = GET_MODE (set_dest);
+ op = GIMPLE_STMT_OPERAND (modify, 1);
+ if (!INTEGRAL_TYPE_P (TREE_TYPE (op)))
+ return false;
+ code = TREE_CODE (op);
- if (code != DIV && code != MOD && code != UDIV && code != UMOD)
+ if (code != TRUNC_DIV_EXPR && code != TRUNC_MOD_EXPR)
return false;
- op1 = XEXP (set_src, false);
- op2 = XEXP (set_src, 1);
-
- for (histogram = REG_NOTES (insn);
- histogram;
- histogram = XEXP (histogram, 1))
- if (REG_NOTE_KIND (histogram) == REG_VALUE_PROFILE
- && XEXP (XEXP (histogram, 0), 0) == GEN_INT (HIST_TYPE_SINGLE_VALUE))
- break;
+ op1 = TREE_OPERAND (op, 0);
+ op2 = TREE_OPERAND (op, 1);
+
+ histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_SINGLE_VALUE);
if (!histogram)
return false;
- histogram = XEXP (XEXP (histogram, 0), 1);
- value = XEXP (histogram, 0);
- histogram = XEXP (histogram, 1);
- val = INTVAL (XEXP (histogram, 0));
- histogram = XEXP (histogram, 1);
- count = INTVAL (XEXP (histogram, 0));
- histogram = XEXP (histogram, 1);
- all = INTVAL (XEXP (histogram, 0));
+ value = histogram->hvalue.value;
+ val = histogram->hvalue.counters[0];
+ count = histogram->hvalue.counters[1];
+ all = histogram->hvalue.counters[2];
+ gimple_remove_histogram_value (cfun, stmt, histogram);
- /* We require that count be at least half of all; this means
+ /* We require that count is at least half of all; this means
that for the transformation to fire the value must be constant
at least 50% of time (and 75% gives the guarantee of usage). */
- if (!rtx_equal_p (op2, value) || 2 * count < all)
+ if (simple_cst_equal (op2, value) != 1 || 2 * count < all
+ || !maybe_hot_bb_p (bb_for_stmt (stmt)))
return false;
- if (dump_file)
- fprintf (dump_file, "Div/mod by constant transformation on insn %d\n",
- INSN_UID (insn));
+ if (check_counter (stmt, "value", all, bb_for_stmt (stmt)->count))
+ return false;
/* Compute probability of taking the optimal path. */
prob = (count * REG_BR_PROB_BASE + all / 2) / all;
- e = split_block (BLOCK_FOR_INSN (insn), PREV_INSN (insn));
- delete_insn (insn);
-
- insert_insn_on_edge (
- gen_divmod_fixed_value (mode, code, set_dest,
- op1, op2, val, prob), e);
+ tree_val = build_int_cst_wide (get_gcov_type (),
+ (unsigned HOST_WIDE_INT) val,
+ val >> (HOST_BITS_PER_WIDE_INT - 1) >> 1);
+ result = tree_divmod_fixed_value (stmt, op, op1, op2, tree_val, prob, count, all);
+
+ if (dump_file)
+ {
+ fprintf (dump_file, "Div/mod by constant ");
+ print_generic_expr (dump_file, value, TDF_SLIM);
+ fprintf (dump_file, "=");
+ print_generic_expr (dump_file, tree_val, TDF_SLIM);
+ fprintf (dump_file, " transformation on insn ");
+ print_generic_stmt (dump_file, stmt, TDF_SLIM);
+ }
+
+ GIMPLE_STMT_OPERAND (modify, 1) = result;
return true;
}
-/* Generate code for transformation 2 (with MODE and OPERATION, operands OP1
- and OP2, result TARGET and probability of taking the optimal path PROB). */
-static rtx
-gen_mod_pow2 (enum machine_mode mode, enum rtx_code operation, rtx target,
- rtx op1, rtx op2, int prob)
+/* Generate code for transformation 2 (with OPERATION, operands OP1
+ and OP2, parent modify-expr STMT and probability of taking the optimal
+ path PROB, which is equivalent to COUNT/ALL within roundoff error).
+ This generates the result into a temp and returns
+ the temp; it does not replace or alter the original STMT. */
+static tree
+tree_mod_pow2 (tree stmt, tree operation, tree op1, tree op2, int prob,
+ gcov_type count, gcov_type all)
{
- rtx tmp, tmp1, tmp2, tmp3, jump;
- rtx neq_label = gen_label_rtx ();
- rtx end_label = gen_label_rtx ();
- rtx sequence;
-
- start_sequence ();
+ tree stmt1, stmt2, stmt3, stmt4;
+ tree tmp2, tmp3;
+ tree label_decl1 = create_artificial_label ();
+ tree label_decl2 = create_artificial_label ();
+ tree label1, label2;
+ tree bb1end, bb2end, bb3end;
+ basic_block bb, bb2, bb3, bb4;
+ tree optype = TREE_TYPE (operation);
+ edge e12, e13, e23, e24, e34;
+ block_stmt_iterator bsi;
+ tree result = create_tmp_var (optype, "PROF");
+
+ bb = bb_for_stmt (stmt);
+ bsi = bsi_for_stmt (stmt);
+
+ tmp2 = create_tmp_var (optype, "PROF");
+ tmp3 = create_tmp_var (optype, "PROF");
+ stmt2 = build2 (GIMPLE_MODIFY_STMT, optype, tmp2,
+ build2 (PLUS_EXPR, optype, op2, build_int_cst (optype, -1)));
+ stmt3 = build2 (GIMPLE_MODIFY_STMT, optype, tmp3,
+ build2 (BIT_AND_EXPR, optype, tmp2, op2));
+ stmt4 = build3 (COND_EXPR, void_type_node,
+ build2 (NE_EXPR, boolean_type_node,
+ tmp3, build_int_cst (optype, 0)),
+ build1 (GOTO_EXPR, void_type_node, label_decl2),
+ build1 (GOTO_EXPR, void_type_node, label_decl1));
+ bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
+ bsi_insert_before (&bsi, stmt3, BSI_SAME_STMT);
+ bsi_insert_before (&bsi, stmt4, BSI_SAME_STMT);
+ bb1end = stmt4;
+
+ /* tmp2 == op2-1 inherited from previous block */
+ label1 = build1 (LABEL_EXPR, void_type_node, label_decl1);
+ stmt1 = build2 (GIMPLE_MODIFY_STMT, optype, result,
+ build2 (BIT_AND_EXPR, optype, op1, tmp2));
+ bsi_insert_before (&bsi, label1, BSI_SAME_STMT);
+ bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
+ bb2end = stmt1;
+
+ label2 = build1 (LABEL_EXPR, void_type_node, label_decl2);
+ stmt1 = build2 (GIMPLE_MODIFY_STMT, optype, result,
+ build2 (TREE_CODE (operation), optype, op1, op2));
+ bsi_insert_before (&bsi, label2, BSI_SAME_STMT);
+ bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
+ bb3end = stmt1;
+
+ /* Fix CFG. */
+ /* Edge e23 connects bb2 to bb3, etc. */
+ e12 = split_block (bb, bb1end);
+ bb2 = e12->dest;
+ bb2->count = count;
+ e23 = split_block (bb2, bb2end);
+ bb3 = e23->dest;
+ bb3->count = all - count;
+ e34 = split_block (bb3, bb3end);
+ bb4 = e34->dest;
+ bb4->count = all;
+
+ e12->flags &= ~EDGE_FALLTHRU;
+ e12->flags |= EDGE_FALSE_VALUE;
+ e12->probability = prob;
+ e12->count = count;
+
+ e13 = make_edge (bb, bb3, EDGE_TRUE_VALUE);
+ e13->probability = REG_BR_PROB_BASE - prob;
+ e13->count = all - count;
+
+ remove_edge (e23);
- if (!REG_P (op2))
- {
- tmp = gen_reg_rtx (mode);
- emit_move_insn (tmp, copy_rtx (op2));
- }
- else
- tmp = op2;
-
- tmp1 = expand_simple_binop (mode, PLUS, tmp, constm1_rtx, NULL_RTX,
- 0, OPTAB_WIDEN);
- tmp2 = expand_simple_binop (mode, AND, tmp, tmp1, NULL_RTX,
- 0, OPTAB_WIDEN);
- do_compare_rtx_and_jump (tmp2, const0_rtx, NE, 0, mode, NULL_RTX,
- NULL_RTX, neq_label);
-
- /* Add branch probability to jump we just created. */
- jump = get_last_insn ();
- REG_NOTES (jump) = gen_rtx_EXPR_LIST (REG_BR_PROB,
- GEN_INT (REG_BR_PROB_BASE - prob),
- REG_NOTES (jump));
-
- tmp3 = expand_simple_binop (mode, AND, op1, tmp1, target,
- 0, OPTAB_WIDEN);
- if (tmp3 != target)
- emit_move_insn (copy_rtx (target), tmp3);
- emit_jump_insn (gen_jump (end_label));
- emit_barrier ();
-
- emit_label (neq_label);
- tmp1 = simplify_gen_binary (operation, mode, copy_rtx (op1), copy_rtx (tmp));
- tmp1 = force_operand (tmp1, target);
- if (tmp1 != target)
- emit_move_insn (target, tmp1);
-
- emit_label (end_label);
+ e24 = make_edge (bb2, bb4, EDGE_FALLTHRU);
+ e24->probability = REG_BR_PROB_BASE;
+ e24->count = count;
- sequence = get_insns ();
- end_sequence ();
- rebuild_jump_labels (sequence);
- return sequence;
+ e34->probability = REG_BR_PROB_BASE;
+ e34->count = all - count;
+
+ return result;
}
/* Do transform 2) on INSN if applicable. */
static bool
-mod_pow2_value_transform (rtx insn)
+tree_mod_pow2_value_transform (tree stmt)
{
- rtx set, set_src, set_dest, op1, op2, value, histogram;
- enum rtx_code code;
- enum machine_mode mode;
- gcov_type wrong_values, count;
- edge e;
- int i, all, prob;
-
- set = single_set (insn);
- if (!set)
- return false;
+ histogram_value histogram;
+ enum tree_code code;
+ gcov_type count, wrong_values, all;
+ tree modify, op, op1, op2, result, value;
+ int prob;
- set_src = SET_SRC (set);
- set_dest = SET_DEST (set);
- code = GET_CODE (set_src);
- mode = GET_MODE (set_dest);
+ modify = stmt;
+ if (TREE_CODE (stmt) == RETURN_EXPR
+ && TREE_OPERAND (stmt, 0)
+ && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
+ modify = TREE_OPERAND (stmt, 0);
+ if (TREE_CODE (modify) != GIMPLE_MODIFY_STMT)
+ return false;
+ op = GIMPLE_STMT_OPERAND (modify, 1);
+ if (!INTEGRAL_TYPE_P (TREE_TYPE (op)))
+ return false;
+ code = TREE_CODE (op);
- if (code != UMOD)
+ if (code != TRUNC_MOD_EXPR || !TYPE_UNSIGNED (TREE_TYPE (op)))
return false;
- op1 = XEXP (set_src, 0);
- op2 = XEXP (set_src, 1);
-
- for (histogram = REG_NOTES (insn);
- histogram;
- histogram = XEXP (histogram, 1))
- if (REG_NOTE_KIND (histogram) == REG_VALUE_PROFILE
- && XEXP (XEXP (histogram, 0), 0) == GEN_INT (HIST_TYPE_POW2))
- break;
+ op1 = TREE_OPERAND (op, 0);
+ op2 = TREE_OPERAND (op, 1);
+
+ histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_POW2);
if (!histogram)
return false;
- histogram = XEXP (XEXP (histogram, 0), 1);
- value = XEXP (histogram, 0);
- histogram = XEXP (histogram, 1);
- wrong_values =INTVAL (XEXP (histogram, 0));
- histogram = XEXP (histogram, 1);
+ value = histogram->hvalue.value;
+ wrong_values = histogram->hvalue.counters[0];
+ count = histogram->hvalue.counters[1];
- count = 0;
- for (i = 0; i < GET_MODE_BITSIZE (mode); i++)
- {
- count += INTVAL (XEXP (histogram, 0));
- histogram = XEXP (histogram, 1);
- }
+ gimple_remove_histogram_value (cfun, stmt, histogram);
- if (!rtx_equal_p (op2, value))
- return false;
-
- /* We require that we hit a power of two at least half of all evaluations. */
- if (count < wrong_values)
+ /* We require that we hit a power of 2 at least half of all evaluations. */
+ if (simple_cst_equal (op2, value) != 1 || count < wrong_values
+ || !maybe_hot_bb_p (bb_for_stmt (stmt)))
return false;
if (dump_file)
- fprintf (dump_file, "Mod power of 2 transformation on insn %d\n",
- INSN_UID (insn));
+ {
+ fprintf (dump_file, "Mod power of 2 transformation on insn ");
+ print_generic_stmt (dump_file, stmt, TDF_SLIM);
+ }
/* Compute probability of taking the optimal path. */
all = count + wrong_values;
+
+ if (check_counter (stmt, "pow2", all, bb_for_stmt (stmt)->count))
+ return false;
+
prob = (count * REG_BR_PROB_BASE + all / 2) / all;
- e = split_block (BLOCK_FOR_INSN (insn), PREV_INSN (insn));
- delete_insn (insn);
-
- insert_insn_on_edge (
- gen_mod_pow2 (mode, code, set_dest, op1, op2, prob), e);
+ result = tree_mod_pow2 (stmt, op, op1, op2, prob, count, all);
+
+ GIMPLE_STMT_OPERAND (modify, 1) = result;
return true;
}
-/* Generate code for transformations 3 and 4 (with MODE and OPERATION,
- operands OP1 and OP2, result TARGET, at most SUB subtractions, and
- probability of taking the optimal path(s) PROB1 and PROB2). */
-static rtx
-gen_mod_subtract (enum machine_mode mode, enum rtx_code operation,
- rtx target, rtx op1, rtx op2, int sub, int prob1, int prob2)
+/* Generate code for transformations 3 and 4 (with OPERATION, operands OP1
+ and OP2, parent modify-expr STMT, and NCOUNTS the number of cases to
+ support. Currently only NCOUNTS==0 or 1 is supported and this is
+ built into this interface. The probabilities of taking the optimal
+ paths are PROB1 and PROB2, which are equivalent to COUNT1/ALL and
+ COUNT2/ALL respectively within roundoff error). This generates the
+ result into a temp and returns the temp; it does not replace or alter
+ the original STMT. */
+/* FIXME: Generalize the interface to handle NCOUNTS > 1. */
+
+static tree
+tree_mod_subtract (tree stmt, tree operation, tree op1, tree op2,
+ int prob1, int prob2, int ncounts,
+ gcov_type count1, gcov_type count2, gcov_type all)
{
- rtx tmp, tmp1, jump;
- rtx end_label = gen_label_rtx ();
- rtx sequence;
- int i;
+ tree stmt1, stmt2, stmt3;
+ tree tmp1;
+ tree label_decl1 = create_artificial_label ();
+ tree label_decl2 = create_artificial_label ();
+ tree label_decl3 = create_artificial_label ();
+ tree label1, label2, label3;
+ tree bb1end, bb2end = NULL_TREE, bb3end;
+ basic_block bb, bb2, bb3, bb4;
+ tree optype = TREE_TYPE (operation);
+ edge e12, e23 = 0, e24, e34, e14;
+ block_stmt_iterator bsi;
+ tree result = create_tmp_var (optype, "PROF");
+
+ bb = bb_for_stmt (stmt);
+ bsi = bsi_for_stmt (stmt);
+
+ tmp1 = create_tmp_var (optype, "PROF");
+ stmt1 = build2 (GIMPLE_MODIFY_STMT, optype, result, op1);
+ stmt2 = build2 (GIMPLE_MODIFY_STMT, optype, tmp1, op2);
+ stmt3 = build3 (COND_EXPR, void_type_node,
+ build2 (LT_EXPR, boolean_type_node, result, tmp1),
+ build1 (GOTO_EXPR, void_type_node, label_decl3),
+ build1 (GOTO_EXPR, void_type_node,
+ ncounts ? label_decl1 : label_decl2));
+ bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
+ bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
+ bsi_insert_before (&bsi, stmt3, BSI_SAME_STMT);
+ bb1end = stmt3;
+
+ if (ncounts) /* Assumed to be 0 or 1 */
+ {
+ label1 = build1 (LABEL_EXPR, void_type_node, label_decl1);
+ stmt1 = build2 (GIMPLE_MODIFY_STMT, optype, result,
+ build2 (MINUS_EXPR, optype, result, tmp1));
+ stmt2 = build3 (COND_EXPR, void_type_node,
+ build2 (LT_EXPR, boolean_type_node, result, tmp1),
+ build1 (GOTO_EXPR, void_type_node, label_decl3),
+ build1 (GOTO_EXPR, void_type_node, label_decl2));
+ bsi_insert_before (&bsi, label1, BSI_SAME_STMT);
+ bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
+ bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
+ bb2end = stmt2;
+ }
- start_sequence ();
-
- if (!REG_P (op2))
+ /* Fallback case. */
+ label2 = build1 (LABEL_EXPR, void_type_node, label_decl2);
+ stmt1 = build2 (GIMPLE_MODIFY_STMT, optype, result,
+ build2 (TREE_CODE (operation), optype, result, tmp1));
+ bsi_insert_before (&bsi, label2, BSI_SAME_STMT);
+ bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
+ bb3end = stmt1;
+
+ label3 = build1 (LABEL_EXPR, void_type_node, label_decl3);
+ bsi_insert_before (&bsi, label3, BSI_SAME_STMT);
+
+ /* Fix CFG. */
+ /* Edge e23 connects bb2 to bb3, etc. */
+ /* However block 3 is optional; if it is not there, references
+ to 3 really refer to block 2. */
+ e12 = split_block (bb, bb1end);
+ bb2 = e12->dest;
+ bb2->count = all - count1;
+
+ if (ncounts) /* Assumed to be 0 or 1. */
{
- tmp = gen_reg_rtx (mode);
- emit_move_insn (tmp, copy_rtx (op2));
+ e23 = split_block (bb2, bb2end);
+ bb3 = e23->dest;
+ bb3->count = all - count1 - count2;
}
- else
- tmp = op2;
- emit_move_insn (target, copy_rtx (op1));
- do_compare_rtx_and_jump (target, tmp, LTU, 0, mode, NULL_RTX,
- NULL_RTX, end_label);
+ e34 = split_block (ncounts ? bb3 : bb2, bb3end);
+ bb4 = e34->dest;
+ bb4->count = all;
- /* Add branch probability to jump we just created. */
- jump = get_last_insn ();
- REG_NOTES (jump) = gen_rtx_EXPR_LIST (REG_BR_PROB,
- GEN_INT (prob1), REG_NOTES (jump));
+ e12->flags &= ~EDGE_FALLTHRU;
+ e12->flags |= EDGE_FALSE_VALUE;
+ e12->probability = REG_BR_PROB_BASE - prob1;
+ e12->count = all - count1;
- for (i = 0; i < sub; i++)
+ e14 = make_edge (bb, bb4, EDGE_TRUE_VALUE);
+ e14->probability = prob1;
+ e14->count = count1;
+
+ if (ncounts) /* Assumed to be 0 or 1. */
{
- tmp1 = expand_simple_binop (mode, MINUS, target, tmp, target,
- 0, OPTAB_WIDEN);
- if (tmp1 != target)
- emit_move_insn (target, tmp1);
- do_compare_rtx_and_jump (target, tmp, LTU, 0, mode, NULL_RTX,
- NULL_RTX, end_label);
-
- /* Add branch probability to jump we just created. */
- jump = get_last_insn ();
- REG_NOTES (jump) = gen_rtx_EXPR_LIST (REG_BR_PROB,
- GEN_INT (prob2), REG_NOTES (jump));
+ e23->flags &= ~EDGE_FALLTHRU;
+ e23->flags |= EDGE_FALSE_VALUE;
+ e23->count = all - count1 - count2;
+ e23->probability = REG_BR_PROB_BASE - prob2;
+
+ e24 = make_edge (bb2, bb4, EDGE_TRUE_VALUE);
+ e24->probability = prob2;
+ e24->count = count2;
}
- tmp1 = simplify_gen_binary (operation, mode, copy_rtx (target), copy_rtx (tmp));
- tmp1 = force_operand (tmp1, target);
- if (tmp1 != target)
- emit_move_insn (target, tmp1);
-
- emit_label (end_label);
+ e34->probability = REG_BR_PROB_BASE;
+ e34->count = all - count1 - count2;
- sequence = get_insns ();
- end_sequence ();
- rebuild_jump_labels (sequence);
- return sequence;
+ return result;
}
/* Do transforms 3) and 4) on INSN if applicable. */
static bool
-mod_subtract_transform (rtx insn)
+tree_mod_subtract_transform (tree stmt)
{
- rtx set, set_src, set_dest, op1, op2, histogram;
- enum rtx_code code;
- enum machine_mode mode;
- gcov_type wrong_values, counts[2], count, all;
- edge e;
- int i, prob1, prob2;
-
- set = single_set (insn);
- if (!set)
+ histogram_value histogram;
+ enum tree_code code;
+ gcov_type count, wrong_values, all;
+ tree modify, op, op1, op2, result, value;
+ int prob1, prob2;
+ unsigned int i, steps;
+ gcov_type count1, count2;
+
+ modify = stmt;
+ if (TREE_CODE (stmt) == RETURN_EXPR
+ && TREE_OPERAND (stmt, 0)
+ && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
+ modify = TREE_OPERAND (stmt, 0);
+ if (TREE_CODE (modify) != GIMPLE_MODIFY_STMT)
return false;
-
- set_src = SET_SRC (set);
- set_dest = SET_DEST (set);
- code = GET_CODE (set_src);
- mode = GET_MODE (set_dest);
+ op = GIMPLE_STMT_OPERAND (modify, 1);
+ if (!INTEGRAL_TYPE_P (TREE_TYPE (op)))
+ return false;
+ code = TREE_CODE (op);
- if (code != UMOD)
+ if (code != TRUNC_MOD_EXPR || !TYPE_UNSIGNED (TREE_TYPE (op)))
return false;
- op1 = XEXP (set_src, 0);
- op2 = XEXP (set_src, 1);
-
- for (histogram = REG_NOTES (insn);
- histogram;
- histogram = XEXP (histogram, 1))
- if (REG_NOTE_KIND (histogram) == REG_VALUE_PROFILE
- && XEXP (XEXP (histogram, 0), 0) == GEN_INT (HIST_TYPE_INTERVAL))
- break;
+ op1 = TREE_OPERAND (op, 0);
+ op2 = TREE_OPERAND (op, 1);
+
+ histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_INTERVAL);
if (!histogram)
return false;
- histogram = XEXP (XEXP (histogram, 0), 1);
- histogram = XEXP (histogram, 1);
-
+ value = histogram->hvalue.value;
all = 0;
- for (i = 0; i < 2; i++)
+ wrong_values = 0;
+ for (i = 0; i < histogram->hdata.intvl.steps; i++)
+ all += histogram->hvalue.counters[i];
+
+ wrong_values += histogram->hvalue.counters[i];
+ wrong_values += histogram->hvalue.counters[i+1];
+ steps = histogram->hdata.intvl.steps;
+ all += wrong_values;
+ count1 = histogram->hvalue.counters[0];
+ count2 = histogram->hvalue.counters[1];
+
+ /* Compute probability of taking the optimal path. */
+ if (check_counter (stmt, "interval", all, bb_for_stmt (stmt)->count))
{
- counts[i] = INTVAL (XEXP (histogram, 0));
- all += counts[i];
- histogram = XEXP (histogram, 1);
+ gimple_remove_histogram_value (cfun, stmt, histogram);
+ return false;
}
- wrong_values = INTVAL (XEXP (histogram, 0));
- histogram = XEXP (histogram, 1);
- wrong_values += INTVAL (XEXP (histogram, 0));
- all += wrong_values;
/* We require that we use just subtractions in at least 50% of all
evaluations. */
count = 0;
- for (i = 0; i < 2; i++)
+ for (i = 0; i < histogram->hdata.intvl.steps; i++)
{
- count += counts[i];
+ count += histogram->hvalue.counters[i];
if (count * 2 >= all)
break;
}
-
- if (i == 2)
+ if (i == steps
+ || !maybe_hot_bb_p (bb_for_stmt (stmt)))
return false;
+ gimple_remove_histogram_value (cfun, stmt, histogram);
if (dump_file)
- fprintf (dump_file, "Mod subtract transformation on insn %d\n",
- INSN_UID (insn));
+ {
+ fprintf (dump_file, "Mod subtract transformation on insn ");
+ print_generic_stmt (dump_file, stmt, TDF_SLIM);
+ }
/* Compute probability of taking the optimal path(s). */
- prob1 = (counts[0] * REG_BR_PROB_BASE + all / 2) / all;
- prob2 = (counts[1] * REG_BR_PROB_BASE + all / 2) / all;
+ prob1 = (count1 * REG_BR_PROB_BASE + all / 2) / all;
+ prob2 = (count2 * REG_BR_PROB_BASE + all / 2) / all;
- e = split_block (BLOCK_FOR_INSN (insn), PREV_INSN (insn));
- delete_insn (insn);
-
- insert_insn_on_edge (
- gen_mod_subtract (mode, code, set_dest,
- op1, op2, i, prob1, prob2), e);
+ /* In practice, "steps" is always 2. This interface reflects this,
+ and will need to be changed if "steps" can change. */
+ result = tree_mod_subtract (stmt, op, op1, op2, prob1, prob2, i,
+ count1, count2, all);
+
+ GIMPLE_STMT_OPERAND (modify, 1) = result;
return true;
}
-#ifdef HAVE_prefetch
-/* Generate code for transformation 5 for mem with ADDRESS and a constant
- step DELTA. WRITE is true if the reference is a store to mem. */
+/* Return true if the stringop FNDECL with ARGLIST shall be profiled. */
+static bool
+interesting_stringop_to_profile_p (tree fndecl, tree arglist)
+{
+ enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
-static rtx
-gen_speculative_prefetch (rtx address, gcov_type delta, int write)
+ if (fcode != BUILT_IN_MEMSET && fcode != BUILT_IN_MEMCPY
+ && fcode != BUILT_IN_BZERO)
+ return false;
+
+ switch (fcode)
+ {
+ case BUILT_IN_MEMCPY:
+ case BUILT_IN_MEMPCPY:
+ return validate_arglist (arglist,
+ POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE,
+ VOID_TYPE);
+ case BUILT_IN_MEMSET:
+ return validate_arglist (arglist,
+ POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
+ VOID_TYPE);
+ case BUILT_IN_BZERO:
+ return validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE,
+ VOID_TYPE);
+ default:
+ gcc_unreachable ();
+ }
+}
+
+/* Convert stringop (..., size)
+ into
+ if (size == VALUE)
+ stringop (...., VALUE);
+ else
+ stringop (...., size);
+ assuming constant propagation of VALUE will happen later.
+*/
+static void
+tree_stringop_fixed_value (tree stmt, tree value, int prob, gcov_type count,
+ gcov_type all)
{
- rtx tmp;
- rtx sequence;
-
- /* TODO: we do the prefetching for just one iteration ahead, which
- often is not enough. */
- start_sequence ();
- if (offsettable_address_p (0, VOIDmode, address))
- tmp = plus_constant (copy_rtx (address), delta);
+ tree stmt1, stmt2, stmt3;
+ tree tmp1, tmpv;
+ tree label_decl1 = create_artificial_label ();
+ tree label_decl2 = create_artificial_label ();
+ tree label1, label2;
+ tree bb1end, bb2end;
+ basic_block bb, bb2, bb3, bb4;
+ edge e12, e13, e23, e24, e34;
+ block_stmt_iterator bsi;
+ tree call = get_call_expr_in (stmt);
+ tree arglist = TREE_OPERAND (call, 1);
+ tree blck_size = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
+ tree optype = TREE_TYPE (blck_size);
+ int region;
+
+ bb = bb_for_stmt (stmt);
+ bsi = bsi_for_stmt (stmt);
+
+ if (bsi_end_p (bsi))
+ {
+ edge_iterator ei;
+ for (ei = ei_start (bb->succs); (e34 = ei_safe_edge (ei)); )
+ if (!e34->flags & EDGE_ABNORMAL)
+ break;
+ }
else
{
- tmp = simplify_gen_binary (PLUS, Pmode,
- copy_rtx (address), GEN_INT (delta));
- tmp = force_operand (tmp, NULL);
+ e34 = split_block (bb, stmt);
+ bsi = bsi_for_stmt (stmt);
}
- if (! (*insn_data[(int)CODE_FOR_prefetch].operand[0].predicate)
- (tmp, insn_data[(int)CODE_FOR_prefetch].operand[0].mode))
- tmp = force_reg (Pmode, tmp);
- emit_insn (gen_prefetch (tmp, GEN_INT (write), GEN_INT (3)));
- sequence = get_insns ();
- end_sequence ();
-
- return sequence;
-}
+ bb4 = e34->dest;
+
+ tmpv = create_tmp_var (optype, "PROF");
+ tmp1 = create_tmp_var (optype, "PROF");
+ stmt1 = build2 (GIMPLE_MODIFY_STMT, optype, tmpv,
+ fold_convert (optype, value));
+ stmt2 = build2 (GIMPLE_MODIFY_STMT, optype, tmp1, blck_size);
+ stmt3 = build3 (COND_EXPR, void_type_node,
+ build2 (NE_EXPR, boolean_type_node, tmp1, tmpv),
+ build1 (GOTO_EXPR, void_type_node, label_decl2),
+ build1 (GOTO_EXPR, void_type_node, label_decl1));
+ bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
+ bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
+ bsi_insert_before (&bsi, stmt3, BSI_SAME_STMT);
+ bb1end = stmt3;
+
+ label1 = build1 (LABEL_EXPR, void_type_node, label_decl1);
+ stmt1 = unshare_expr (stmt);
+ call = get_call_expr_in (stmt1);
+ arglist = TREE_OPERAND (call, 1);
+ TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))) = value;
+ bsi_insert_before (&bsi, label1, BSI_SAME_STMT);
+ bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
+ region = lookup_stmt_eh_region (stmt);
+ if (region >= 0)
+ add_stmt_to_eh_region (stmt1, region);
+ bb2end = stmt1;
+ label2 = build1 (LABEL_EXPR, void_type_node, label_decl2);
+ bsi_insert_before (&bsi, label2, BSI_SAME_STMT);
+
+ /* Fix CFG. */
+ /* Edge e23 connects bb2 to bb3, etc. */
+ e12 = split_block (bb, bb1end);
+ bb2 = e12->dest;
+ bb2->count = count;
+ e23 = split_block (bb2, bb2end);
+ bb3 = e23->dest;
+ bb3->count = all - count;
+
+ e12->flags &= ~EDGE_FALLTHRU;
+ e12->flags |= EDGE_FALSE_VALUE;
+ e12->probability = prob;
+ e12->count = count;
+
+ e13 = make_edge (bb, bb3, EDGE_TRUE_VALUE);
+ e13->probability = REG_BR_PROB_BASE - prob;
+ e13->count = all - count;
+
+ remove_edge (e23);
+
+ e24 = make_edge (bb2, bb4, EDGE_FALLTHRU);
+ e24->probability = REG_BR_PROB_BASE;
+ e24->count = count;
-/* Do transform 5) on INSN if applicable. */
+ e34->probability = REG_BR_PROB_BASE;
+ e34->count = all - count;
+}
+/* Find values inside STMT for that we want to measure histograms for
+ division/modulo optimization. */
static bool
-speculative_prefetching_transform (rtx insn)
+tree_stringops_transform (block_stmt_iterator *bsi)
{
- rtx histogram, value;
- gcov_type val, count, all;
- edge e;
- rtx mem, address;
- int write;
-
- if (!maybe_hot_bb_p (BLOCK_FOR_INSN (insn)))
- return false;
+ tree stmt = bsi_stmt (*bsi);
+ tree call = get_call_expr_in (stmt);
+ tree fndecl;
+ tree arglist;
+ tree blck_size;
+ enum built_in_function fcode;
+ histogram_value histogram;
+ gcov_type count, all, val;
+ tree value;
+ tree dest, src;
+ unsigned int dest_align, src_align;
+ int prob;
+ tree tree_val;
- if (!find_mem_reference (insn, &mem, &write))
+ if (!call)
return false;
-
- address = XEXP (mem, 0);
- if (side_effects_p (address))
+ fndecl = get_callee_fndecl (call);
+ if (!fndecl)
return false;
-
- if (CONSTANT_P (address))
+ fcode = DECL_FUNCTION_CODE (fndecl);
+ arglist = TREE_OPERAND (call, 1);
+ if (!interesting_stringop_to_profile_p (fndecl, arglist))
return false;
- for (histogram = REG_NOTES (insn);
- histogram;
- histogram = XEXP (histogram, 1))
- if (REG_NOTE_KIND (histogram) == REG_VALUE_PROFILE
- && XEXP (XEXP (histogram, 0), 0) == GEN_INT (HIST_TYPE_CONST_DELTA))
- break;
-
- if (!histogram)
+ if (fcode == BUILT_IN_BZERO)
+ blck_size = TREE_VALUE (TREE_CHAIN (arglist));
+ else
+ blck_size = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
+ if (TREE_CODE (blck_size) == INTEGER_CST)
return false;
- histogram = XEXP (XEXP (histogram, 0), 1);
- value = XEXP (histogram, 0);
- histogram = XEXP (histogram, 1);
- /* Skip last value referenced. */
- histogram = XEXP (histogram, 1);
- val = INTVAL (XEXP (histogram, 0));
- histogram = XEXP (histogram, 1);
- count = INTVAL (XEXP (histogram, 0));
- histogram = XEXP (histogram, 1);
- all = INTVAL (XEXP (histogram, 0));
-
- /* With that few executions we do not really have a reason to optimize the
- statement, and more importantly, the data about differences of addresses
- are spoiled by the first item that had no previous value to compare
- with. */
- if (all < 4)
+ histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_SINGLE_VALUE);
+ if (!histogram)
return false;
-
- /* We require that count be at least half of all; this means
+ value = histogram->hvalue.value;
+ val = histogram->hvalue.counters[0];
+ count = histogram->hvalue.counters[1];
+ all = histogram->hvalue.counters[2];
+ gimple_remove_histogram_value (cfun, stmt, histogram);
+ /* We require that count is at least half of all; this means
that for the transformation to fire the value must be constant
- at least 50% of time (and 75% gives the guarantee of usage). */
- if (!rtx_equal_p (address, value) || 2 * count < all)
+ at least 80% of time. */
+ if ((6 * count / 5) < all || !maybe_hot_bb_p (bb_for_stmt (stmt)))
return false;
-
- /* If the difference is too small, it does not make too much sense to
- prefetch, as the memory is probably already in cache. */
- if (val >= NOPREFETCH_RANGE_MIN && val <= NOPREFETCH_RANGE_MAX)
+ if (check_counter (stmt, "value", all, bb_for_stmt (stmt)->count))
return false;
-
+ prob = (count * REG_BR_PROB_BASE + all / 2) / all;
+ dest = TREE_VALUE (arglist);
+ dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
+ switch (fcode)
+ {
+ case BUILT_IN_MEMCPY:
+ case BUILT_IN_MEMPCPY:
+ src = TREE_VALUE (TREE_CHAIN (arglist));
+ src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
+ if (!can_move_by_pieces (val, MIN (dest_align, src_align)))
+ return false;
+ break;
+ case BUILT_IN_MEMSET:
+ if (!can_store_by_pieces (val, builtin_memset_read_str,
+ TREE_VALUE (TREE_CHAIN (arglist)),
+ dest_align))
+ return false;
+ break;
+ case BUILT_IN_BZERO:
+ if (!can_store_by_pieces (val, builtin_memset_read_str,
+ integer_zero_node,
+ dest_align))
+ return false;
+ break;
+ default:
+ gcc_unreachable ();
+ }
+ tree_val = build_int_cst_wide (get_gcov_type (),
+ (unsigned HOST_WIDE_INT) val,
+ val >> (HOST_BITS_PER_WIDE_INT - 1) >> 1);
if (dump_file)
- fprintf (dump_file, "Speculative prefetching for insn %d\n",
- INSN_UID (insn));
-
- e = split_block (BLOCK_FOR_INSN (insn), PREV_INSN (insn));
+ {
+ fprintf (dump_file, "Single value %i stringop transformation on ",
+ (int)val);
+ print_generic_stmt (dump_file, stmt, TDF_SLIM);
+ }
+ tree_stringop_fixed_value (stmt, tree_val, prob, count, all);
- insert_insn_on_edge (gen_speculative_prefetch (address, val, write), e);
-
return true;
}
-#endif /* HAVE_prefetch */
-\f
-/* Connection to the outside world. */
-/* Struct for IR-dependent hooks. */
+
struct value_prof_hooks {
/* Find list of values for which we want to measure histograms. */
void (*find_values_to_profile) (histogram_values *);
statically. See value-prof.c for more detail. */
bool (*value_profile_transformations) (void);
};
-
-/* Hooks for RTL-based versions (the only ones that currently work). */
-static struct value_prof_hooks rtl_value_prof_hooks =
+\f
+/* Find values inside STMT for that we want to measure histograms for
+ division/modulo optimization. */
+static void
+tree_divmod_values_to_profile (tree stmt, histogram_values *values)
{
- rtl_find_values_to_profile,
- rtl_value_profile_transformations
-};
+ tree assign, lhs, rhs, divisor, op0, type;
+ histogram_value hist;
-void
-rtl_register_value_prof_hooks (void)
+ if (TREE_CODE (stmt) == RETURN_EXPR)
+ assign = TREE_OPERAND (stmt, 0);
+ else
+ assign = stmt;
+
+ if (!assign
+ || TREE_CODE (assign) != GIMPLE_MODIFY_STMT)
+ return;
+ lhs = GIMPLE_STMT_OPERAND (assign, 0);
+ type = TREE_TYPE (lhs);
+ if (!INTEGRAL_TYPE_P (type))
+ return;
+
+ rhs = GIMPLE_STMT_OPERAND (assign, 1);
+ switch (TREE_CODE (rhs))
+ {
+ case TRUNC_DIV_EXPR:
+ case TRUNC_MOD_EXPR:
+ divisor = TREE_OPERAND (rhs, 1);
+ op0 = TREE_OPERAND (rhs, 0);
+
+ VEC_reserve (histogram_value, heap, *values, 3);
+
+ if (is_gimple_reg (divisor))
+ /* Check for the case where the divisor is the same value most
+ of the time. */
+ VEC_quick_push (histogram_value, *values,
+ gimple_alloc_histogram_value (cfun, HIST_TYPE_SINGLE_VALUE,
+ stmt, divisor));
+
+ /* For mod, check whether it is not often a noop (or replaceable by
+ a few subtractions). */
+ if (TREE_CODE (rhs) == TRUNC_MOD_EXPR
+ && TYPE_UNSIGNED (type))
+ {
+ tree val;
+ /* Check for a special case where the divisor is power of 2. */
+ VEC_quick_push (histogram_value, *values,
+ gimple_alloc_histogram_value (cfun, HIST_TYPE_POW2,
+ stmt, divisor));
+
+ val = build2 (TRUNC_DIV_EXPR, type, op0, divisor);
+ hist = gimple_alloc_histogram_value (cfun, HIST_TYPE_INTERVAL,
+ stmt, val);
+ hist->hdata.intvl.int_start = 0;
+ hist->hdata.intvl.steps = 2;
+ VEC_quick_push (histogram_value, *values, hist);
+ }
+ return;
+
+ default:
+ return;
+ }
+}
+
+/* Find values inside STMT for that we want to measure histograms for
+ string operations. */
+static void
+tree_stringops_values_to_profile (tree stmt, histogram_values *values)
{
- value_prof_hooks = &rtl_value_prof_hooks;
- gcc_assert (!ir_type ());
+ tree call = get_call_expr_in (stmt);
+ tree fndecl;
+ tree arglist;
+ tree blck_size;
+ enum built_in_function fcode;
+
+ if (!call)
+ return;
+ fndecl = get_callee_fndecl (call);
+ if (!fndecl)
+ return;
+ fcode = DECL_FUNCTION_CODE (fndecl);
+ arglist = TREE_OPERAND (call, 1);
+
+ if (!interesting_stringop_to_profile_p (fndecl, arglist))
+ return;
+
+ if (fcode == BUILT_IN_BZERO)
+ blck_size = TREE_VALUE (TREE_CHAIN (arglist));
+ else
+ blck_size = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
+
+ if (TREE_CODE (blck_size) != INTEGER_CST)
+ VEC_safe_push (histogram_value, heap, *values,
+ gimple_alloc_histogram_value (cfun, HIST_TYPE_SINGLE_VALUE,
+ stmt, blck_size));
}
-\f
-/* Tree-based versions are stubs for now. */
+
+/* Find values inside STMT for that we want to measure histograms and adds
+ them to list VALUES. */
+
static void
-tree_find_values_to_profile (histogram_values *values ATTRIBUTE_UNUSED)
+tree_values_to_profile (tree stmt, histogram_values *values)
{
- gcc_unreachable ();
+ if (flag_value_profile_transformations)
+ {
+ tree_divmod_values_to_profile (stmt, values);
+ tree_stringops_values_to_profile (stmt, values);
+ }
}
-static bool
-tree_value_profile_transformations (void)
+static void
+tree_find_values_to_profile (histogram_values *values)
{
- gcc_unreachable ();
+ basic_block bb;
+ block_stmt_iterator bsi;
+ unsigned i;
+ histogram_value hist = NULL;
+
+ *values = NULL;
+ FOR_EACH_BB (bb)
+ for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ tree_values_to_profile (bsi_stmt (bsi), values);
+
+ for (i = 0; VEC_iterate (histogram_value, *values, i, hist); i++)
+ {
+ switch (hist->type)
+ {
+ case HIST_TYPE_INTERVAL:
+ hist->n_counters = hist->hdata.intvl.steps + 2;
+ break;
+
+ case HIST_TYPE_POW2:
+ hist->n_counters = 2;
+ break;
+
+ case HIST_TYPE_SINGLE_VALUE:
+ hist->n_counters = 3;
+ break;
+
+ case HIST_TYPE_CONST_DELTA:
+ hist->n_counters = 4;
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
+ if (dump_file)
+ {
+ fprintf (dump_file, "Stmt ");
+ print_generic_expr (dump_file, hist->hvalue.stmt, TDF_SLIM);
+ dump_histogram_value (dump_file, hist);
+ }
+ }
}
static struct value_prof_hooks tree_value_prof_hooks = {
void
tree_register_value_prof_hooks (void)
{
+ gcc_assert (current_ir_type () == IR_GIMPLE);
value_prof_hooks = &tree_value_prof_hooks;
- gcc_assert (ir_type ());
}
\f
/* IR-independent entry points. */
{
return (value_prof_hooks->value_profile_transformations) ();
}
+\f