if (!known_alignment_for_access_p (dr))
{
tree type = (TREE_TYPE (DR_REF (dr)));
- tree ba = DR_BASE_OBJECT (dr);
- bool is_packed = false;
-
- if (ba)
- is_packed = contains_packed_reference (ba);
+ bool is_packed = contains_packed_reference (DR_REF (dr));
if (compare_tree_int (TYPE_SIZE (type), TYPE_ALIGN (type)) > 0)
is_packed = true;
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
struct loop *loop = NULL;
- HOST_WIDE_INT dr_step = TREE_INT_CST_LOW (step);
+ HOST_WIDE_INT dr_step;
if (loop_vinfo)
loop = LOOP_VINFO_LOOP (loop_vinfo);
}
/* Allow invariant loads in loops. */
+ dr_step = TREE_INT_CST_LOW (step);
if (loop_vinfo && dr_step == 0)
{
GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt)) = NULL;
return false;
}
- base = unshare_expr (DR_BASE_ADDRESS (dr));
- offset = unshare_expr (DR_OFFSET (dr));
- init = unshare_expr (DR_INIT (dr));
-
if (stmt_can_throw_internal (stmt))
{
if (vect_print_dump_info (REPORT_UNVECTORIZED_LOCATIONS))
return false;
}
+ if (TREE_CODE (DR_REF (dr)) == COMPONENT_REF
+ && DECL_BIT_FIELD (TREE_OPERAND (DR_REF (dr), 1)))
+ {
+ if (vect_print_dump_info (REPORT_UNVECTORIZED_LOCATIONS))
+ {
+ fprintf (vect_dump, "not vectorized: statement is bitfield "
+ "access ");
+ print_gimple_stmt (vect_dump, stmt, 0, TDF_SLIM);
+ }
+
+ if (bb_vinfo)
+ {
+ STMT_VINFO_VECTORIZABLE (stmt_info) = false;
+ stop_bb_analysis = true;
+ continue;
+ }
+
+ if (gather)
+ free_data_ref (dr);
+ return false;
+ }
+
+ base = unshare_expr (DR_BASE_ADDRESS (dr));
+ offset = unshare_expr (DR_OFFSET (dr));
+ init = unshare_expr (DR_INIT (dr));
+
if (is_gimple_call (stmt))
{
if (vect_print_dump_info (REPORT_UNVECTORIZED_LOCATIONS))
/* Function vect_strided_store_supported.
- Returns TRUE is INTERLEAVE_HIGH and INTERLEAVE_LOW operations are supported,
- and FALSE otherwise. */
+ Returns TRUE if interleave high and interleave low permutations
+ are supported, and FALSE otherwise. */
bool
vect_strided_store_supported (tree vectype, unsigned HOST_WIDE_INT count)
{
- enum machine_mode mode;
-
- mode = TYPE_MODE (vectype);
+ enum machine_mode mode = TYPE_MODE (vectype);
/* vect_permute_store_chain requires the group size to be a power of two. */
if (exact_log2 (count) == -1)
return false;
}
- /* Check that the operation is supported. */
+ /* Check that the permutation is supported. */
if (VECTOR_MODE_P (mode))
{
unsigned int i, nelt = GET_MODE_NUNITS (mode);
tree vectype = STMT_VINFO_VECTYPE (vinfo_for_stmt (stmt));
tree perm_mask_low, perm_mask_high;
unsigned int i, n;
- unsigned int j, nelt = GET_MODE_NUNITS (TYPE_MODE (vectype));
+ unsigned int j, nelt = TYPE_VECTOR_SUBPARTS (vectype);
unsigned char *sel = XALLOCAVEC (unsigned char, nelt);
- gcc_assert (vect_strided_store_supported (vectype, length));
-
*result_chain = VEC_copy (tree, heap, dr_chain);
for (i = 0, n = nelt / 2; i < n; i++)
sel[i * 2 + 1] = i + nelt;
}
perm_mask_high = vect_gen_perm_mask (vectype, sel);
+ gcc_assert (perm_mask_high != NULL);
+
for (i = 0; i < nelt; i++)
sel[i] += nelt / 2;
perm_mask_low = vect_gen_perm_mask (vectype, sel);
+ gcc_assert (perm_mask_low != NULL);
for (i = 0, n = exact_log2 (length); i < n; i++)
{
/* Function vect_strided_load_supported.
- Returns TRUE is EXTRACT_EVEN and EXTRACT_ODD operations are supported,
+ Returns TRUE if even and odd permutations are supported,
and FALSE otherwise. */
bool
vect_strided_load_supported (tree vectype, unsigned HOST_WIDE_INT count)
{
- optab ee_optab, eo_optab;
- enum machine_mode mode;
-
- mode = TYPE_MODE (vectype);
+ enum machine_mode mode = TYPE_MODE (vectype);
/* vect_permute_load_chain requires the group size to be a power of two. */
if (exact_log2 (count) == -1)
return false;
}
- ee_optab = optab_for_tree_code (VEC_EXTRACT_EVEN_EXPR,
- vectype, optab_default);
- eo_optab = optab_for_tree_code (VEC_EXTRACT_ODD_EXPR,
- vectype, optab_default);
- if (ee_optab && eo_optab
- && optab_handler (ee_optab, mode) != CODE_FOR_nothing
- && optab_handler (eo_optab, mode) != CODE_FOR_nothing)
- return true;
+ /* Check that the permutation is supported. */
+ if (VECTOR_MODE_P (mode))
+ {
+ unsigned int i, nelt = GET_MODE_NUNITS (mode);
+ unsigned char *sel = XALLOCAVEC (unsigned char, nelt);
- if (can_vec_perm_for_code_p (VEC_EXTRACT_EVEN_EXPR, mode, NULL)
- && can_vec_perm_for_code_p (VEC_EXTRACT_ODD_EXPR, mode, NULL))
- return true;
+ for (i = 0; i < nelt; i++)
+ sel[i] = i * 2;
+ if (can_vec_perm_p (mode, false, sel))
+ {
+ for (i = 0; i < nelt; i++)
+ sel[i] = i * 2 + 1;
+ if (can_vec_perm_p (mode, false, sel))
+ return true;
+ }
+ }
if (vect_print_dump_info (REPORT_DETAILS))
fprintf (vect_dump, "extract even/odd not supported by target");
VEC(tree,heap) **result_chain)
{
tree perm_dest, data_ref, first_vect, second_vect;
+ tree perm_mask_even, perm_mask_odd;
gimple perm_stmt;
tree vectype = STMT_VINFO_VECTYPE (vinfo_for_stmt (stmt));
- int i;
- unsigned int j;
-
- gcc_assert (vect_strided_load_supported (vectype, length));
+ unsigned int i, j, log_length = exact_log2 (length);
+ unsigned nelt = TYPE_VECTOR_SUBPARTS (vectype);
+ unsigned char *sel = XALLOCAVEC (unsigned char, nelt);
*result_chain = VEC_copy (tree, heap, dr_chain);
- for (i = 0; i < exact_log2 (length); i++)
+
+ for (i = 0; i < nelt; ++i)
+ sel[i] = i * 2;
+ perm_mask_even = vect_gen_perm_mask (vectype, sel);
+ gcc_assert (perm_mask_even != NULL);
+
+ for (i = 0; i < nelt; ++i)
+ sel[i] = i * 2 + 1;
+ perm_mask_odd = vect_gen_perm_mask (vectype, sel);
+ gcc_assert (perm_mask_odd != NULL);
+
+ for (i = 0; i < log_length; i++)
{
- for (j = 0; j < length; j +=2)
+ for (j = 0; j < length; j += 2)
{
first_vect = VEC_index (tree, dr_chain, j);
second_vect = VEC_index (tree, dr_chain, j+1);
DECL_GIMPLE_REG_P (perm_dest) = 1;
add_referenced_var (perm_dest);
- perm_stmt = gimple_build_assign_with_ops (VEC_EXTRACT_EVEN_EXPR,
- perm_dest, first_vect,
- second_vect);
+ perm_stmt = gimple_build_assign_with_ops3 (VEC_PERM_EXPR, perm_dest,
+ first_vect, second_vect,
+ perm_mask_even);
data_ref = make_ssa_name (perm_dest, perm_stmt);
gimple_assign_set_lhs (perm_stmt, data_ref);
DECL_GIMPLE_REG_P (perm_dest) = 1;
add_referenced_var (perm_dest);
- perm_stmt = gimple_build_assign_with_ops (VEC_EXTRACT_ODD_EXPR,
- perm_dest, first_vect,
- second_vect);
+ perm_stmt = gimple_build_assign_with_ops3 (VEC_PERM_EXPR, perm_dest,
+ first_vect, second_vect,
+ perm_mask_odd);
+
data_ref = make_ssa_name (perm_dest, perm_stmt);
gimple_assign_set_lhs (perm_stmt, data_ref);
vect_finish_stmt_generation (stmt, perm_stmt, gsi);
return dr_explicit_realign_optimized;
}
if (!known_alignment_for_access_p (dr))
- {
- tree ba = DR_BASE_OBJECT (dr);
-
- if (ba)
- is_packed = contains_packed_reference (ba);
- }
+ is_packed = contains_packed_reference (DR_REF (dr));
if (targetm.vectorize.
support_vector_misalignment (mode, type,
tree type = (TREE_TYPE (DR_REF (dr)));
if (!known_alignment_for_access_p (dr))
- {
- tree ba = DR_BASE_OBJECT (dr);
-
- if (ba)
- is_packed = contains_packed_reference (ba);
- }
+ is_packed = contains_packed_reference (DR_REF (dr));
if (targetm.vectorize.
support_vector_misalignment (mode, type,