tree
voidify_wrapper_expr (tree wrapper, tree temp)
{
- if (!VOID_TYPE_P (TREE_TYPE (wrapper)))
+ tree type = TREE_TYPE (wrapper);
+ if (type && !VOID_TYPE_P (type))
{
- tree *p, sub = wrapper;
+ tree *p;
- restart:
- /* Set p to point to the body of the wrapper. */
- switch (TREE_CODE (sub))
- {
- case BIND_EXPR:
- /* For a BIND_EXPR, the body is operand 1. */
- p = &BIND_EXPR_BODY (sub);
- break;
-
- default:
- p = &TREE_OPERAND (sub, 0);
- break;
- }
-
- /* Advance to the last statement. Set all container types to void. */
- if (TREE_CODE (*p) == STATEMENT_LIST)
- {
- tree_stmt_iterator i = tsi_last (*p);
- p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
- }
- else
+ /* Set p to point to the body of the wrapper. Loop until we find
+ something that isn't a wrapper. */
+ for (p = &wrapper; p && *p; )
{
- for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
+ switch (TREE_CODE (*p))
{
+ case BIND_EXPR:
TREE_SIDE_EFFECTS (*p) = 1;
TREE_TYPE (*p) = void_type_node;
+ /* For a BIND_EXPR, the body is operand 1. */
+ p = &BIND_EXPR_BODY (*p);
+ break;
+
+ case CLEANUP_POINT_EXPR:
+ case TRY_FINALLY_EXPR:
+ case TRY_CATCH_EXPR:
+ TREE_SIDE_EFFECTS (*p) = 1;
+ TREE_TYPE (*p) = void_type_node;
+ p = &TREE_OPERAND (*p, 0);
+ break;
+
+ case STATEMENT_LIST:
+ {
+ tree_stmt_iterator i = tsi_last (*p);
+ TREE_SIDE_EFFECTS (*p) = 1;
+ TREE_TYPE (*p) = void_type_node;
+ p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
+ }
+ break;
+
+ case COMPOUND_EXPR:
+ /* Advance to the last statement. Set all container types to void. */
+ for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
+ {
+ TREE_SIDE_EFFECTS (*p) = 1;
+ TREE_TYPE (*p) = void_type_node;
+ }
+ break;
+
+ default:
+ goto out;
}
}
+ out:
if (p == NULL || IS_EMPTY_STMT (*p))
- ;
- /* Look through exception handling. */
- else if (TREE_CODE (*p) == TRY_FINALLY_EXPR
- || TREE_CODE (*p) == TRY_CATCH_EXPR)
- {
- sub = *p;
- goto restart;
- }
- /* The C++ frontend already did this for us. */
- else if (TREE_CODE (*p) == INIT_EXPR
- || TREE_CODE (*p) == TARGET_EXPR)
- temp = TREE_OPERAND (*p, 0);
- /* If we're returning a dereference, move the dereference
- outside the wrapper. */
- else if (TREE_CODE (*p) == INDIRECT_REF)
+ temp = NULL_TREE;
+ else if (temp)
{
- tree ptr = TREE_OPERAND (*p, 0);
- temp = create_tmp_var (TREE_TYPE (ptr), "retval");
- *p = build2 (MODIFY_EXPR, TREE_TYPE (ptr), temp, ptr);
- temp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (temp)), temp);
- /* If this is a BIND_EXPR for a const inline function, it might not
- have TREE_SIDE_EFFECTS set. That is no longer accurate. */
- TREE_SIDE_EFFECTS (wrapper) = 1;
+ /* The wrapper is on the RHS of an assignment that we're pushing
+ down. */
+ gcc_assert (TREE_CODE (temp) == INIT_EXPR
+ || TREE_CODE (temp) == MODIFY_EXPR);
+ TREE_OPERAND (temp, 1) = *p;
+ *p = temp;
}
else
{
- if (!temp)
- temp = create_tmp_var (TREE_TYPE (wrapper), "retval");
- *p = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, *p);
- TREE_SIDE_EFFECTS (wrapper) = 1;
+ temp = create_tmp_var (type, "retval");
+ *p = build2 (INIT_EXPR, type, temp, *p);
}
- TREE_TYPE (wrapper) = void_type_node;
return temp;
}
/* Gimplify a BIND_EXPR. Just voidify and recurse. */
static enum gimplify_status
-gimplify_bind_expr (tree *expr_p, tree temp, tree *pre_p)
+gimplify_bind_expr (tree *expr_p, tree *pre_p)
{
tree bind_expr = *expr_p;
bool old_save_stack = gimplify_ctxp->save_stack;
tree t;
- temp = voidify_wrapper_expr (bind_expr, temp);
+ tree temp = voidify_wrapper_expr (bind_expr, NULL);
/* Mark variables seen in this bind expr. */
for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
{
tree init = DECL_INITIAL (decl);
- if (!TREE_CONSTANT (DECL_SIZE (decl)))
+ if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
{
/* This is a variable-sized decl. Simplify its size and mark it
for deferred expansion. Note that mudflap depends on the format
bool want_value)
{
enum tree_code code;
- tree lhs, lvalue, rhs, t1;
+ tree lhs, lvalue, rhs, t1, post = NULL, *orig_post_p = post_p;
bool postfix;
enum tree_code arith_code;
enum gimplify_status ret;
else
postfix = false;
+ /* For postfix, make sure the inner expression's post side effects
+ are executed after side effects from this expression. */
+ if (postfix)
+ post_p = &post;
+
/* Add or subtract? */
if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
arith_code = PLUS_EXPR;
if (postfix)
{
- gimplify_and_add (t1, post_p);
+ gimplify_and_add (t1, orig_post_p);
+ append_to_statement_list (post, orig_post_p);
*expr_p = lhs;
return GS_ALL_DONE;
}
return;
}
- /* We can't preevaluate if the type contains a placeholder. */
- if (type_contains_placeholder_p (TREE_TYPE (*expr_p)))
- return;
+ /* If this is a variable sized type, we must remember the size. */
+ maybe_with_size_expr (expr_p);
/* Gimplify the constructor element to something appropriate for the rhs
of a MODIFY_EXPR. Given that we know the lhs is an aggregate, we know
/* If this is of variable size, we have no choice but to assume it doesn't
overlap since we can't make a temporary for it. */
- if (!TREE_CONSTANT (TYPE_SIZE (TREE_TYPE (*expr_p))))
+ if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
return;
/* Otherwise, we must search for overlap ... */
{
struct gimplify_init_ctor_preeval_data preeval_data;
HOST_WIDE_INT num_type_elements, num_ctor_elements;
- HOST_WIDE_INT num_nonzero_elements, num_nonconstant_elements;
- bool cleared;
+ HOST_WIDE_INT num_nonzero_elements;
+ bool cleared, valid_const_initializer;
/* Aggregate types must lower constructors to initialization of
individual elements. The exception is that a CONSTRUCTOR node
if (VEC_empty (constructor_elt, elts))
break;
- categorize_ctor_elements (ctor, &num_nonzero_elements,
- &num_nonconstant_elements,
- &num_ctor_elements, &cleared);
+ /* Fetch information about the constructor to direct later processing.
+ We might want to make static versions of it in various cases, and
+ can only do so if it known to be a valid constant initializer. */
+ valid_const_initializer
+ = categorize_ctor_elements (ctor, &num_nonzero_elements,
+ &num_ctor_elements, &cleared);
/* If a const aggregate variable is being initialized, then it
should never be a lose to promote the variable to be static. */
- if (num_nonconstant_elements == 0
+ if (valid_const_initializer
&& num_nonzero_elements > 1
&& TREE_READONLY (object)
&& TREE_CODE (object) == VAR_DECL)
for sparse arrays, though, as it's more efficient to follow
the standard CONSTRUCTOR behavior of memset followed by
individual element initialization. */
- if (num_nonconstant_elements == 0 && !cleared)
+ if (valid_const_initializer && !cleared)
{
HOST_WIDE_INT size = int_size_in_bytes (type);
unsigned int align;
}
}
+ /* If there are nonzero elements, pre-evaluate to capture elements
+ overlapping with the lhs into temporaries. We must do this before
+ clearing to fetch the values before they are zeroed-out. */
+ if (num_nonzero_elements > 0)
+ {
+ preeval_data.lhs_base_decl = get_base_address (object);
+ if (!DECL_P (preeval_data.lhs_base_decl))
+ preeval_data.lhs_base_decl = NULL;
+ preeval_data.lhs_alias_set = get_alias_set (object);
+
+ gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
+ pre_p, post_p, &preeval_data);
+ }
+
if (cleared)
{
/* Zap the CONSTRUCTOR element list, which simplifies this case.
elements in the constructor, add assignments to the individual
scalar fields of the object. */
if (!cleared || num_nonzero_elements > 0)
- {
- preeval_data.lhs_base_decl = get_base_address (object);
- if (!DECL_P (preeval_data.lhs_base_decl))
- preeval_data.lhs_base_decl = NULL;
- preeval_data.lhs_alias_set = get_alias_set (object);
-
- gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
- pre_p, post_p, &preeval_data);
- gimplify_init_ctor_eval (object, elts, pre_p, cleared);
- }
+ gimplify_init_ctor_eval (object, elts, pre_p, cleared);
*expr_p = NULL_TREE;
}
ret = GS_UNHANDLED;
break;
+ /* If we're initializing from a container, push the initialization
+ inside it. */
+ case CLEANUP_POINT_EXPR:
+ case BIND_EXPR:
+ case STATEMENT_LIST:
+ {
+ tree wrap = *from_p;
+ tree t;
+
+ ret = gimplify_expr (to_p, pre_p, post_p,
+ is_gimple_min_lval, fb_lvalue);
+ if (ret != GS_ERROR)
+ ret = GS_OK;
+
+ t = voidify_wrapper_expr (wrap, *expr_p);
+ gcc_assert (t == *expr_p);
+
+ if (want_value)
+ {
+ gimplify_and_add (wrap, pre_p);
+ *expr_p = unshare_expr (*to_p);
+ }
+ else
+ *expr_p = wrap;
+ return GS_OK;
+ }
+
default:
ret = GS_UNHANDLED;
break;
return GS_OK;
}
+/* Gimplify a comparison between two aggregate objects of integral scalar
+ mode as a comparison between the bitwise equivalent scalar values. */
+
+static enum gimplify_status
+gimplify_scalar_mode_aggregate_compare (tree *expr_p)
+{
+ tree op0 = TREE_OPERAND (*expr_p, 0);
+ tree op1 = TREE_OPERAND (*expr_p, 1);
+
+ tree type = TREE_TYPE (op0);
+ tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
+
+ op0 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op0);
+ op1 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op1);
+
+ *expr_p
+ = fold_build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
+
+ return GS_OK;
+}
+
/* Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions. EXPR_P
points to the expression to gimplify.
enlightened front-end, or by shortcut_cond_expr. */
static enum gimplify_status
-gimplify_statement_list (tree *expr_p)
+gimplify_statement_list (tree *expr_p, tree *pre_p)
{
+ tree temp = voidify_wrapper_expr (*expr_p, NULL);
+
tree_stmt_iterator i = tsi_start (*expr_p);
while (!tsi_end_p (i))
tsi_next (&i);
}
+ if (temp)
+ {
+ append_to_statement_list (*expr_p, pre_p);
+ *expr_p = temp;
+ return GS_OK;
+ }
+
return GS_ALL_DONE;
}
ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
else
{
- /* Special handling for BIND_EXPR can result in fewer temps. */
- ret = GS_OK;
- if (TREE_CODE (init) == BIND_EXPR)
- gimplify_bind_expr (&init, temp, pre_p);
- if (init != temp)
- {
- init = build2 (INIT_EXPR, void_type_node, temp, init);
- ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt,
- fb_none);
- }
+ init = build2 (INIT_EXPR, void_type_node, temp, init);
+ ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt,
+ fb_none);
}
if (ret == GS_ERROR)
return GS_ERROR;
/* When adding a variable-sized variable, we have to handle all sorts
of additional bits of data: the pointer replacement variable, and
the parameters of the type. */
- if (DECL_SIZE (decl) && !TREE_CONSTANT (DECL_SIZE (decl)))
+ if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
{
/* Add the pointer replacement variable as PRIVATE if the variable
replacement is private, else FIRSTPRIVATE since we'll need the
if ((flags & GOVD_SHARED) == 0)
{
t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
- if (!TREE_CONSTANT (t))
+ if (TREE_CODE (t) != INTEGER_CST)
omp_notice_variable (ctx, t, true);
}
}
gimplify_and_add (x, pre_p);
}
- x = build2 (MODIFY_EXPR, void_type_node, oldival2, oldival);
+ x = build2 (MODIFY_EXPR, void_type_node, oldival2,
+ fold_convert (itype, oldival));
gimplify_and_add (x, pre_p);
args = tree_cons (NULL, fold_convert (itype, newival), NULL);
break;
case BIND_EXPR:
- ret = gimplify_bind_expr (expr_p, NULL, pre_p);
+ ret = gimplify_bind_expr (expr_p, pre_p);
break;
case LOOP_EXPR:
break;
case STATEMENT_LIST:
- ret = gimplify_statement_list (expr_p);
+ ret = gimplify_statement_list (expr_p, pre_p);
break;
case WITH_SIZE_EXPR:
switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
{
case tcc_comparison:
- /* If this is a comparison of objects of aggregate type,
- handle it specially (by converting to a call to
- memcmp). It would be nice to only have to do this
- for variable-sized objects, but then we'd have to
- allow the same nest of reference nodes we allow for
- MODIFY_EXPR and that's too complex. */
- if (!AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 1))))
- goto expr_2;
- ret = gimplify_variable_sized_compare (expr_p);
- break;
+ /* Handle comparison of objects of non scalar mode aggregates
+ with a call to memcmp. It would be nice to only have to do
+ this for variable-sized objects, but then we'd have to allow
+ the same nest of reference nodes we allow for MODIFY_EXPR and
+ that's too complex.
+
+ Compare scalar mode aggregates as scalar mode values. Using
+ memcmp for them would be very inefficient at best, and is
+ plain wrong if bitfields are involved. */
+
+ {
+ tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
+
+ if (!AGGREGATE_TYPE_P (type))
+ goto expr_2;
+ else if (TYPE_MODE (type) != BLKmode)
+ ret = gimplify_scalar_mode_aggregate_compare (expr_p);
+ else
+ ret = gimplify_variable_sized_compare (expr_p);
+
+ break;
+ }
/* If *EXPR_P does not need to be special-cased, handle it
according to its class. */
case POINTER_TYPE:
case REFERENCE_TYPE:
- gimplify_type_sizes (TREE_TYPE (type), list_p);
+ /* We used to recurse on the pointed-to type here, which turned out to
+ be incorrect because its definition might refer to variables not
+ yet initialized at this point if a forward declaration is involved.
+
+ It was actually useful for anonymous pointed-to types to ensure
+ that the sizes evaluation dominates every possible later use of the
+ values. Restricting to such types here would be safe since there
+ is no possible forward declaration around, but would introduce a
+ undesireable middle-end semantic to anonymity. We then defer to
+ front-ends the responsibilty of ensuring that the sizes are
+ evaluated both early and late enough, e.g. by attaching artifical
+ type declarations to the tree. */
break;
default: