/* Magic constants follow. These should be replaced by machine specific
numbers. */
-/* A number that should rouhgly correspond to the number of instructions
+/* A number that should roughly correspond to the number of instructions
executed before the prefetch is completed. */
#ifndef PREFETCH_LATENCY
struct mem_ref *next; /* The next reference in the group. */
};
-/* Dumps information obout reference REF to FILE. */
+/* Dumps information about reference REF to FILE. */
static void
dump_mem_ref (FILE *file, struct mem_ref *ref)
if (cst_and_fits_in_hwi (ibase))
{
idelta += int_cst_value (ibase);
- ibase = build_int_cst_type (TREE_TYPE (ibase), 0);
+ ibase = build_int_cst (TREE_TYPE (ibase), 0);
}
if (TREE_CODE (base) == ARRAY_REF)
/* For now we just take memory references one by one and issue
prefetches for as many as possible. The groups are sorted
starting with the largest step, since the references with
- large step are more likely to cause many cache mises. */
+ large step are more likely to cause many cache misses. */
for (; groups; groups = groups->next)
for (ref = groups->refs; ref; ref = ref->next)
/* Issue prefetches for the reference REF into loop as decided before.
HEAD is the number of iterations to prefetch ahead. UNROLL_FACTOR
- is the factor by thet LOOP was unrolled. */
+ is the factor by which LOOP was unrolled. */
static void
issue_prefetch_ref (struct mem_ref *ref, unsigned unroll_factor, unsigned ahead)