/* Array prefetching.
- Copyright (C) 2005, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
+ Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011
+ Free Software Foundation, Inc.
This file is part of GCC.
#include "tree-chrec.h"
#include "tree-scalar-evolution.h"
#include "diagnostic-core.h"
-#include "toplev.h"
#include "params.h"
#include "langhooks.h"
#include "tree-inline.h"
(2) has PREFETCH_MOD 64
(3) has PREFETCH_MOD 4
(4) has PREFETCH_MOD 1. We do not set PREFETCH_BEFORE here, since
- the cache line accessed by (4) is the same with probability only
+ the cache line accessed by (5) is the same with probability only
7/32.
(5) has PREFETCH_MOD 1 as well.
HOST_WIDE_INT idelta = 0, imult = 1;
affine_iv iv;
- if (TREE_CODE (base) == MISALIGNED_INDIRECT_REF)
- return false;
-
if (!simple_iv (ar_data->loop, loop_containing_stmt (ar_data->stmt),
*index, &iv, true))
return false;
*step = NULL_TREE;
*delta = 0;
- /* First strip off the component references. Ignore bitfields. */
- if (TREE_CODE (ref) == COMPONENT_REF
- && DECL_NONADDRESSABLE_P (TREE_OPERAND (ref, 1)))
- ref = TREE_OPERAND (ref, 0);
+ /* First strip off the component references. Ignore bitfields.
+ Also strip off the real and imagine parts of a complex, so that
+ they can have the same base. */
+ if (TREE_CODE (ref) == REALPART_EXPR
+ || TREE_CODE (ref) == IMAGPART_EXPR
+ || (TREE_CODE (ref) == COMPONENT_REF
+ && DECL_NONADDRESSABLE_P (TREE_OPERAND (ref, 1))))
+ {
+ if (TREE_CODE (ref) == IMAGPART_EXPR)
+ *delta += int_size_in_bytes (TREE_TYPE (ref));
+ ref = TREE_OPERAND (ref, 0);
+ }
*ref_p = ref;
if (step == NULL_TREE)
return false;
+ /* Stop if the address of BASE could not be taken. */
+ if (may_be_nonaddressable_p (base))
+ return false;
+
/* Limit non-constant step prefetching only to the innermost loops. */
if (!cst_and_fits_in_hwi (step) && loop->inner != NULL)
return false;
prefetch_before = (hit_from - delta_r + step - 1) / step;
/* Do not reduce prefetch_before if we meet beyond cache size. */
- if (prefetch_before > (unsigned) abs (L2_CACHE_SIZE_BYTES / step))
+ if (prefetch_before > absu_hwi (L2_CACHE_SIZE_BYTES / step))
prefetch_before = PREFETCH_ALL;
if (prefetch_before < ref->prefetch_before)
ref->prefetch_before = prefetch_before;
addr_base = force_gimple_operand_gsi (&bsi, unshare_expr (addr_base),
true, NULL, true, GSI_SAME_STMT);
write_p = ref->write_p ? integer_one_node : integer_zero_node;
- local = build_int_cst (integer_type_node, nontemporal ? 0 : 3);
+ local = nontemporal ? integer_zero_node : integer_three_node;
for (ap = 0; ap < n_prefetches; ap++)
{
/* Determine the address to prefetch. */
delta = (ahead + ap * ref->prefetch_mod) *
int_cst_value (ref->group->step);
- addr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
- addr_base, size_int (delta));
+ addr = fold_build_pointer_plus_hwi (addr_base, delta);
addr = force_gimple_operand_gsi (&bsi, unshare_expr (addr), true, NULL,
true, GSI_SAME_STMT);
}
forward = fold_build2 (MULT_EXPR, sizetype,
fold_convert (sizetype, ref->group->step),
fold_convert (sizetype, size_int (ahead)));
- addr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, addr_base,
- forward);
+ addr = fold_build_pointer_plus (addr_base, forward);
addr = force_gimple_operand_gsi (&bsi, unshare_expr (addr), true,
NULL, true, GSI_SAME_STMT);
}
/* Create the prefetch instruction. */
- prefetch = gimple_build_call (built_in_decls[BUILT_IN_PREFETCH],
+ prefetch = gimple_build_call (builtin_decl_explicit (BUILT_IN_PREFETCH),
3, addr, write_p, local);
gsi_insert_before (&bsi, prefetch, GSI_SAME_STMT);
}
gimple_stmt_iterator bsi;
unsigned i;
- for (i = 0; VEC_iterate (edge, exits, i, exit); i++)
+ FOR_EACH_VEC_ELT (edge, exits, i, exit)
{
call = gimple_build_call (FENCE_FOLLOWING_MOVNT, 0);
unsigned i;
edge exit;
- for (i = 0; VEC_iterate (edge, exits, i, exit); i++)
+ FOR_EACH_VEC_ELT (edge, exits, i, exit)
if ((exit->flags & EDGE_ABNORMAL)
&& exit->dest == EXIT_BLOCK_PTR)
ret = false;
strides = XCNEWVEC (HOST_WIDE_INT, n);
access_fns = DR_ACCESS_FNS (dr);
- for (i = 0; VEC_iterate (tree, access_fns, i, access_fn); i++)
+ FOR_EACH_VEC_ELT (tree, access_fns, i, access_fn)
{
/* Keep track of the reference corresponding to the subscript, so that we
know its stride. */
continue;
aloop = VEC_index (loop_p, vloops, i);
- vol = estimated_loop_iterations_int (aloop, false);
+ vol = max_stmt_executions_int (aloop, false);
if (vol < 0)
vol = expected_loop_iterations (aloop);
volume *= vol;
for (gr = refs; gr; gr = gr->next)
for (ref = gr->refs; ref; ref = ref->next)
{
- dr = create_data_ref (nest, ref->mem, ref->stmt, !ref->write_p);
+ dr = create_data_ref (nest, loop_containing_stmt (ref->stmt),
+ ref->mem, ref->stmt, !ref->write_p);
if (dr)
{
no_other_refs = false;
}
- for (i = 0; VEC_iterate (data_reference_p, datarefs, i, dr); i++)
+ FOR_EACH_VEC_ELT (data_reference_p, datarefs, i, dr)
{
dist = self_reuse_distance (dr, loop_data_size, n, loop);
ref = (struct mem_ref *) dr->aux;
compute_all_dependences (datarefs, &dependences, vloops, true);
- for (i = 0; VEC_iterate (ddr_p, dependences, i, dep); i++)
+ FOR_EACH_VEC_ELT (ddr_p, dependences, i, dep)
{
if (DDR_ARE_DEPENDENT (dep) == chrec_known)
continue;
return false;
ahead = (PREFETCH_LATENCY + time - 1) / time;
- est_niter = estimated_loop_iterations_int (loop, false);
+ est_niter = max_stmt_executions_int (loop, false);
/* Prefetching is not likely to be profitable if the trip count to ahead
ratio is too small. */
initialize_original_copy_tables ();
- if (!built_in_decls[BUILT_IN_PREFETCH])
+ if (!builtin_decl_explicit_p (BUILT_IN_PREFETCH))
{
- tree type = build_function_type (void_type_node,
- tree_cons (NULL_TREE,
- const_ptr_type_node,
- NULL_TREE));
+ tree type = build_function_type_list (void_type_node,
+ const_ptr_type_node, NULL_TREE);
tree decl = add_builtin_function ("__builtin_prefetch", type,
BUILT_IN_PREFETCH, BUILT_IN_NORMAL,
NULL, NULL_TREE);
DECL_IS_NOVOPS (decl) = true;
- built_in_decls[BUILT_IN_PREFETCH] = decl;
+ set_builtin_decl (BUILT_IN_PREFETCH, decl, false);
}
/* We assume that size of cache line is a power of two, so verify this