{
/* Reject incorrectly represented bit fields. */
if (DECL_BIT_FIELD (t)
+ && INTEGRAL_TYPE_P (TREE_TYPE (t))
&& (tree_low_cst (DECL_SIZE (t), 1)
!= TYPE_PRECISION (TREE_TYPE (t))))
goto fail;
/* HACK: if we decompose a va_list_type_node before inlining, then we'll
confuse tree-stdarg.c, and we won't be able to figure out which and
how many arguments are accessed. This really should be improved in
- tree-stdarg.c, as the decomposition is truely a win. This could also
+ tree-stdarg.c, as the decomposition is truly a win. This could also
be fixed if the stdarg pass ran early, but this can't be done until
we've aliasing information early too. See PR 30791. */
if (early_sra
static hashval_t
sra_elt_hash (const void *x)
{
- const struct sra_elt *e = x;
+ const struct sra_elt *const e = (const struct sra_elt *) x;
const struct sra_elt *p;
hashval_t h;
static int
sra_elt_eq (const void *x, const void *y)
{
- const struct sra_elt *a = x;
- const struct sra_elt *b = y;
+ const struct sra_elt *const a = (const struct sra_elt *) x;
+ const struct sra_elt *const b = (const struct sra_elt *) y;
tree ae, be;
const struct sra_elt *ap = a->parent;
const struct sra_elt *bp = b->parent;
elt = *slot;
if (!elt && insert == INSERT)
{
- *slot = elt = obstack_alloc (&sra_obstack, sizeof (*elt));
+ *slot = elt = XOBNEW (&sra_obstack, struct sra_elt);
memset (elt, 0, sizeof (*elt));
elt->parent = parent;
if (elt)
elt->is_vector_lhs = true;
}
+
/* A bit field reference (access to *multiple* fields simultaneously)
- is not currently scalarized. Consider this an access to the
- complete outer element, to which walk_tree will bring us next. */
-
+ is not currently scalarized. Consider this an access to the full
+ outer element, to which walk_tree will bring us next. */
goto use_all;
- case VIEW_CONVERT_EXPR:
case NOP_EXPR:
- /* Similarly, a view/nop explicitly wants to look at an object in a
+ /* Similarly, a nop explicitly wants to look at an object in a
type other than the one we've scalarized. */
goto use_all;
+ case VIEW_CONVERT_EXPR:
+ /* Likewise for a view conversion, but with an additional twist:
+ it can be on the LHS and, in this case, an access to the full
+ outer element would mean a killing def. So we need to punt
+ if we haven't already a full access to the current element,
+ because we cannot pretend to have a killing def if we only
+ have a partial access at some level. */
+ if (is_output && !use_all_p && inner != expr)
+ disable_scalarization = true;
+ goto use_all;
+
case WITH_SIZE_EXPR:
/* This is a transparent wrapper. The entire inner expression really
is being used. */
static const struct sra_walk_fns fns = {
scan_use, scan_copy, scan_init, scan_ldst, true
};
+ bitmap_iterator bi;
sra_walk_function (&fns);
if (dump_file && (dump_flags & TDF_DETAILS))
{
- referenced_var_iterator ri;
- tree var;
+ unsigned i;
fputs ("\nScan results:\n", dump_file);
- FOR_EACH_REFERENCED_VAR_IN_BITMAP (sra_candidates, var, ri)
+ EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
{
+ tree var = referenced_var (i);
struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
if (elt)
scan_dump (elt);
DECL_SIZE_UNIT (var) = DECL_SIZE_UNIT (elt->element);
elt->in_bitfld_block = 1;
- elt->replacement = build3 (BIT_FIELD_REF, elt->type, var,
- DECL_SIZE (var),
- BYTES_BIG_ENDIAN
- ? size_binop (MINUS_EXPR,
- TYPE_SIZE (elt->type),
- DECL_SIZE (var))
- : bitsize_int (0));
- if (!INTEGRAL_TYPE_P (elt->type)
- || TYPE_UNSIGNED (elt->type))
- BIT_FIELD_REF_UNSIGNED (elt->replacement) = 1;
+ elt->replacement = fold_build3 (BIT_FIELD_REF, elt->type, var,
+ DECL_SIZE (var),
+ BYTES_BIG_ENDIAN
+ ? size_binop (MINUS_EXPR,
+ TYPE_SIZE (elt->type),
+ DECL_SIZE (var))
+ : bitsize_int (0));
}
/* For vectors, if used on the left hand side with BIT_FIELD_REF,
tree type, var;
struct sra_elt *block;
+ /* Point fields are typically best handled as standalone entities. */
+ if (POINTER_TYPE_P (TREE_TYPE (f)))
+ return f;
+
if (!is_sra_scalar_type (TREE_TYPE (f))
|| !host_integerp (DECL_FIELD_OFFSET (f), 1)
|| !host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)
/* Create the field group as a single variable. */
- type = lang_hooks.types.type_for_mode (mode, 1);
+ /* We used to create a type for the mode above, but size turns
+ to be out not of mode-size. As we need a matching type
+ to build a BIT_FIELD_REF, use a nonstandard integer type as
+ fallback. */
+ type = lang_hooks.types.type_for_size (size, 1);
+ if (!type || TYPE_PRECISION (type) != size)
+ type = build_nonstandard_integer_type (size, 1);
gcc_assert (type);
var = build3 (BIT_FIELD_REF, type, NULL_TREE,
- bitsize_int (size),
- bitsize_int (bit));
- BIT_FIELD_REF_UNSIGNED (var) = 1;
+ bitsize_int (size), bitsize_int (bit));
block = instantiate_missing_elements_1 (elt, var, type);
gcc_assert (block && block->is_scalar);
if ((bit & ~alchk)
|| (HOST_WIDE_INT)size != tree_low_cst (DECL_SIZE (var), 1))
{
- block->replacement = build3 (BIT_FIELD_REF,
- TREE_TYPE (block->element), var,
- bitsize_int (size),
- bitsize_int (bit & ~alchk));
- BIT_FIELD_REF_UNSIGNED (block->replacement) = 1;
+ block->replacement = fold_build3 (BIT_FIELD_REF,
+ TREE_TYPE (block->element), var,
+ bitsize_int (size),
+ bitsize_int (bit & ~alchk));
}
block->in_bitfld_block = 2;
gcc_assert (fld && fld->is_scalar && !fld->replacement);
- fld->replacement = build3 (BIT_FIELD_REF, field_type, var,
- DECL_SIZE (f),
- bitsize_int
- ((TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f))
- * BITS_PER_UNIT
- + (TREE_INT_CST_LOW
- (DECL_FIELD_BIT_OFFSET (f))))
- & ~alchk));
- BIT_FIELD_REF_UNSIGNED (fld->replacement) = TYPE_UNSIGNED (field_type);
+ fld->replacement = fold_build3 (BIT_FIELD_REF, field_type, var,
+ DECL_SIZE (f),
+ bitsize_int
+ ((TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f))
+ * BITS_PER_UNIT
+ + (TREE_INT_CST_LOW
+ (DECL_FIELD_BIT_OFFSET (f))))
+ & ~alchk));
fld->in_bitfld_block = 1;
}
static void
decide_instantiations (void)
{
+ unsigned int i;
bool cleared_any;
bitmap_head done_head;
- referenced_var_iterator ri;
- tree var;
+ bitmap_iterator bi;
/* We cannot clear bits from a bitmap we're iterating over,
so save up all the bits to clear until the end. */
bitmap_initialize (&done_head, &bitmap_default_obstack);
cleared_any = false;
- FOR_EACH_REFERENCED_VAR_IN_BITMAP (sra_candidates, var, ri)
+ EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
{
+ tree var = referenced_var (i);
struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
if (elt)
{
}
if (!elt)
{
- bitmap_set_bit (&done_head, DECL_UID (var));
+ bitmap_set_bit (&done_head, i);
cleared_any = true;
}
}
{
tree field = elt->element;
- /* We can't test elt->in_bitfld_blk here because, when this is
+ /* We can't test elt->in_bitfld_block here because, when this is
called from instantiate_element, we haven't set this field
yet. */
if (TREE_CODE (field) == BIT_FIELD_REF)
if (scalar_bitfield_p (src))
{
tree var, shift, width;
- tree utype, stype, stmp, utmp;
+ tree utype, stype, stmp, utmp, dtmp;
tree list, stmt;
- bool unsignedp = BIT_FIELD_REF_UNSIGNED (src);
+ bool unsignedp = (INTEGRAL_TYPE_P (TREE_TYPE (src))
+ ? TYPE_UNSIGNED (TREE_TYPE (src)) : true);
var = TREE_OPERAND (src, 0);
width = TREE_OPERAND (src, 1);
/* The offset needs to be adjusted to a right shift quantity
- depending on the endianess. */
+ depending on the endianness. */
if (BYTES_BIG_ENDIAN)
{
tree tmp = size_binop (PLUS_EXPR, width, TREE_OPERAND (src, 2));
var = fold_convert (TREE_TYPE (dst), var);
else
var = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (dst), var);
+
+ /* If the destination is not a register the conversion needs
+ to be a separate statement. */
+ if (!is_gimple_reg (dst))
+ {
+ dtmp = make_rename_temp (TREE_TYPE (dst), "SR");
+ stmt = build_gimple_modify_stmt (dtmp, var);
+ append_to_statement_list (stmt, &list);
+ var = dtmp;
+ }
}
stmt = build_gimple_modify_stmt (dst, var);
append_to_statement_list (stmt, &list);
if (elt->in_bitfld_block == 2
&& TREE_CODE (src) == BIT_FIELD_REF)
{
+ tmp = src;
cst = TYPE_SIZE (TREE_TYPE (var));
cst2 = size_binop (MINUS_EXPR, TREE_OPERAND (src, 2),
TREE_OPERAND (dst, 2));
}
else
{
- src = fold_build3 (BIT_FIELD_REF, TREE_TYPE (var), src, cst, cst2);
- BIT_FIELD_REF_UNSIGNED (src) = 1;
+ src = fold_convert (TREE_TYPE (var), tmp);
}
return sra_build_assignment (var, src);
type = TREE_TYPE (infld);
if (TYPE_PRECISION (type) != TREE_INT_CST_LOW (flen))
type = lang_hooks.types.type_for_size (TREE_INT_CST_LOW (flen), 1);
+ else
+ type = unsigned_type_for (type);
if (TREE_CODE (infld) == BIT_FIELD_REF)
{
}
infld = fold_build3 (BIT_FIELD_REF, type, infld, flen, fpos);
- BIT_FIELD_REF_UNSIGNED (infld) = 1;
invar = size_binop (MINUS_EXPR, flp.field_pos, bpos);
if (flp.overlap_pos)
invar = size_binop (PLUS_EXPR, invar, vpos);
invar = fold_build3 (BIT_FIELD_REF, type, var, flen, invar);
- BIT_FIELD_REF_UNSIGNED (invar) = 1;
if (to_var)
st = sra_build_bf_assignment (invar, infld);
scalarize_parms (void)
{
tree list = NULL;
- referenced_var_iterator ri;
- tree var;
+ unsigned i;
+ bitmap_iterator bi;
- FOR_EACH_REFERENCED_VAR_IN_BITMAP (needs_copy_in, var, ri)
+ EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i, bi)
{
+ tree var = referenced_var (i);
struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
generate_copy_inout (elt, true, var, &list);
}
return flag_tree_sra != 0;
}
-struct tree_opt_pass pass_sra_early =
+struct gimple_opt_pass pass_sra_early =
{
+ {
+ GIMPLE_PASS,
"esra", /* name */
gate_sra, /* gate */
tree_sra_early, /* execute */
TODO_dump_func
| TODO_update_ssa
| TODO_ggc_collect
- | TODO_verify_ssa, /* todo_flags_finish */
- 0 /* letter */
+ | TODO_verify_ssa /* todo_flags_finish */
+ }
};
-struct tree_opt_pass pass_sra =
+struct gimple_opt_pass pass_sra =
{
+ {
+ GIMPLE_PASS,
"sra", /* name */
gate_sra, /* gate */
tree_sra, /* execute */
TODO_dump_func
| TODO_update_ssa
| TODO_ggc_collect
- | TODO_verify_ssa, /* todo_flags_finish */
- 0 /* letter */
+ | TODO_verify_ssa /* todo_flags_finish */
+ }
};