/* Tree lowering pass. This pass converts the GENERIC functions-as-trees
tree representation into the GIMPLE form.
- Copyright (C) 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
+ Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007
+ Free Software Foundation, Inc.
Major work done by Sebastian Pop <s.pop@laposte.net>,
Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
-Software Foundation; either version 2, or (at your option) any later
+Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
-02110-1301, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
#include "config.h"
#include "system.h"
#include "target.h"
#include "optabs.h"
#include "pointer-set.h"
+#include "splay-tree.h"
enum gimplify_omp_var_data
location_t location;
enum omp_clause_default_kind default_kind;
bool is_parallel;
+ bool is_combined_parallel;
};
struct gimplify_ctx
/* Forward declarations. */
static enum gimplify_status gimplify_compound_expr (tree *, tree *, bool);
-#ifdef ENABLE_CHECKING
-static bool cpt_same_type (tree a, tree b);
-#endif
+/* Mark X addressable. Unlike the langhook we expect X to be in gimple
+ form and we don't do any syntax checking. */
+static void
+mark_addressable (tree x)
+{
+ while (handled_component_p (x))
+ x = TREE_OPERAND (x, 0);
+ if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
+ return ;
+ TREE_ADDRESSABLE (x) = 1;
+}
/* Return a hash value for a formal temporary table entry. */
DECL_GIMPLE_FORMAL_TEMP_P (t) = 0;
if (body)
- declare_tmp_vars (c->temps, body);
+ declare_vars (c->temps, body, false);
else
record_vars (c->temps);
/* Create a new omp construct that deals with variable remapping. */
static struct gimplify_omp_ctx *
-new_omp_context (bool is_parallel)
+new_omp_context (bool is_parallel, bool is_combined_parallel)
{
struct gimplify_omp_ctx *c;
c->privatized_types = pointer_set_create ();
c->location = input_location;
c->is_parallel = is_parallel;
+ c->is_combined_parallel = is_combined_parallel;
c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
return c;
if (walk_tree (&t, find_single_pointer_decl_1, &decl, NULL))
{
- /* find_single_pointer_decl_1 returns a non-zero value, causing
- walk_tree to return a non-zero value, to indicate that it
+ /* find_single_pointer_decl_1 returns a nonzero value, causing
+ walk_tree to return a nonzero value, to indicate that it
found more than one pointer DECL. */
return NULL_TREE;
}
tree tmp_var;
/* We don't allow types that are addressable (meaning we can't make copies),
- incomplete, or of variable size. */
- gcc_assert (!TREE_ADDRESSABLE (type)
- && COMPLETE_TYPE_P (type)
- && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
+ or incomplete. We also used to reject every variable size objects here,
+ but now support those for which a constant upper bound can be obtained.
+ The processing for variable sizes is performed in gimple_add_tmp_var,
+ point at which it really matters and possibly reached via paths not going
+ through this function, e.g. after direct calls to create_tmp_var_raw. */
+ gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
tmp_var = create_tmp_var_raw (type, prefix);
gimple_add_tmp_var (tmp_var);
{
case ADDR_EXPR:
return get_name (TREE_OPERAND (stripped_decl, 0));
- break;
default:
return NULL;
}
}
}
- if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE)
- DECL_COMPLEX_GIMPLE_REG_P (t) = 1;
+ if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
+ DECL_GIMPLE_REG_P (t) = 1;
- mod = build2 (MODIFY_EXPR, TREE_TYPE (t), t, val);
+ mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
if (EXPR_HAS_LOCATION (val))
SET_EXPR_LOCUS (mod, EXPR_LOCUS (val));
return internal_get_tmp_var (val, pre_p, post_p, false);
}
-/* Declares all the variables in VARS in SCOPE. */
+/* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is
+ true, generate debug info for them; otherwise don't. */
void
-declare_tmp_vars (tree vars, tree scope)
+declare_vars (tree vars, tree scope, bool debug_info)
{
tree last = vars;
if (last)
{
- tree temps;
+ tree temps, block;
/* C99 mode puts the default 'return 0;' for main outside the outer
braces. So drill down until we find an actual scope. */
gcc_assert (TREE_CODE (scope) == BIND_EXPR);
temps = nreverse (last);
- TREE_CHAIN (last) = BIND_EXPR_VARS (scope);
- BIND_EXPR_VARS (scope) = temps;
+
+ block = BIND_EXPR_BLOCK (scope);
+ if (!block || !debug_info)
+ {
+ TREE_CHAIN (last) = BIND_EXPR_VARS (scope);
+ BIND_EXPR_VARS (scope) = temps;
+ }
+ else
+ {
+ /* We need to attach the nodes both to the BIND_EXPR and to its
+ associated BLOCK for debugging purposes. The key point here
+ is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
+ is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
+ if (BLOCK_VARS (block))
+ BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
+ else
+ {
+ BIND_EXPR_VARS (scope) = chainon (BIND_EXPR_VARS (scope), temps);
+ BLOCK_VARS (block) = temps;
+ }
+ }
}
}
+/* For VAR a VAR_DECL of variable size, try to find a constant upper bound
+ for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
+ no such upper bound can be obtained. */
+
+static void
+force_constant_size (tree var)
+{
+ /* The only attempt we make is by querying the maximum size of objects
+ of the variable's type. */
+
+ HOST_WIDE_INT max_size;
+
+ gcc_assert (TREE_CODE (var) == VAR_DECL);
+
+ max_size = max_int_size_in_bytes (TREE_TYPE (var));
+
+ gcc_assert (max_size >= 0);
+
+ DECL_SIZE_UNIT (var)
+ = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
+ DECL_SIZE (var)
+ = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
+}
+
void
gimple_add_tmp_var (tree tmp)
{
gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
+ /* Later processing assumes that the object size is constant, which might
+ not be true at this point. Force the use of a constant upper bound in
+ this case. */
+ if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
+ force_constant_size (tmp);
+
DECL_CONTEXT (tmp) = current_function_decl;
DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
else if (cfun)
record_vars (tmp);
else
- declare_tmp_vars (tmp, DECL_SAVED_TREE (current_function_decl));
+ declare_vars (tmp, DECL_SAVED_TREE (current_function_decl), false);
}
/* Determines whether to assign a locus to the statement STMT. */
static void
annotate_one_with_locus (tree t, location_t locus)
{
- if (EXPR_P (t) && ! EXPR_HAS_LOCATION (t) && should_carry_locus_p (t))
+ if (CAN_HAVE_LOCATION_P (t)
+ && ! EXPR_HAS_LOCATION (t) && should_carry_locus_p (t))
SET_EXPR_LOCATION (t, locus);
}
tree
voidify_wrapper_expr (tree wrapper, tree temp)
{
- if (!VOID_TYPE_P (TREE_TYPE (wrapper)))
+ tree type = TREE_TYPE (wrapper);
+ if (type && !VOID_TYPE_P (type))
{
- tree *p, sub = wrapper;
-
- restart:
- /* Set p to point to the body of the wrapper. */
- switch (TREE_CODE (sub))
- {
- case BIND_EXPR:
- /* For a BIND_EXPR, the body is operand 1. */
- p = &BIND_EXPR_BODY (sub);
- break;
-
- default:
- p = &TREE_OPERAND (sub, 0);
- break;
- }
+ tree *p;
- /* Advance to the last statement. Set all container types to void. */
- if (TREE_CODE (*p) == STATEMENT_LIST)
- {
- tree_stmt_iterator i = tsi_last (*p);
- p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
- }
- else
+ /* Set p to point to the body of the wrapper. Loop until we find
+ something that isn't a wrapper. */
+ for (p = &wrapper; p && *p; )
{
- for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
+ switch (TREE_CODE (*p))
{
+ case BIND_EXPR:
TREE_SIDE_EFFECTS (*p) = 1;
TREE_TYPE (*p) = void_type_node;
+ /* For a BIND_EXPR, the body is operand 1. */
+ p = &BIND_EXPR_BODY (*p);
+ break;
+
+ case CLEANUP_POINT_EXPR:
+ case TRY_FINALLY_EXPR:
+ case TRY_CATCH_EXPR:
+ TREE_SIDE_EFFECTS (*p) = 1;
+ TREE_TYPE (*p) = void_type_node;
+ p = &TREE_OPERAND (*p, 0);
+ break;
+
+ case STATEMENT_LIST:
+ {
+ tree_stmt_iterator i = tsi_last (*p);
+ TREE_SIDE_EFFECTS (*p) = 1;
+ TREE_TYPE (*p) = void_type_node;
+ p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
+ }
+ break;
+
+ case COMPOUND_EXPR:
+ /* Advance to the last statement. Set all container types to void. */
+ for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
+ {
+ TREE_SIDE_EFFECTS (*p) = 1;
+ TREE_TYPE (*p) = void_type_node;
+ }
+ break;
+
+ default:
+ goto out;
}
}
+ out:
if (p == NULL || IS_EMPTY_STMT (*p))
- ;
- /* Look through exception handling. */
- else if (TREE_CODE (*p) == TRY_FINALLY_EXPR
- || TREE_CODE (*p) == TRY_CATCH_EXPR)
- {
- sub = *p;
- goto restart;
- }
- /* The C++ frontend already did this for us. */
- else if (TREE_CODE (*p) == INIT_EXPR
- || TREE_CODE (*p) == TARGET_EXPR)
- temp = TREE_OPERAND (*p, 0);
- /* If we're returning a dereference, move the dereference
- outside the wrapper. */
- else if (TREE_CODE (*p) == INDIRECT_REF)
+ temp = NULL_TREE;
+ else if (temp)
{
- tree ptr = TREE_OPERAND (*p, 0);
- temp = create_tmp_var (TREE_TYPE (ptr), "retval");
- *p = build2 (MODIFY_EXPR, TREE_TYPE (ptr), temp, ptr);
- temp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (temp)), temp);
- /* If this is a BIND_EXPR for a const inline function, it might not
- have TREE_SIDE_EFFECTS set. That is no longer accurate. */
- TREE_SIDE_EFFECTS (wrapper) = 1;
+ /* The wrapper is on the RHS of an assignment that we're pushing
+ down. */
+ gcc_assert (TREE_CODE (temp) == INIT_EXPR
+ || TREE_CODE (temp) == GIMPLE_MODIFY_STMT
+ || TREE_CODE (temp) == MODIFY_EXPR);
+ GENERIC_TREE_OPERAND (temp, 1) = *p;
+ *p = temp;
}
else
{
- if (!temp)
- temp = create_tmp_var (TREE_TYPE (wrapper), "retval");
- *p = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, *p);
- TREE_SIDE_EFFECTS (wrapper) = 1;
+ temp = create_tmp_var (type, "retval");
+ *p = build2 (INIT_EXPR, type, temp, *p);
}
- TREE_TYPE (wrapper) = void_type_node;
return temp;
}
tree save_call, tmp_var;
save_call =
- build_function_call_expr (implicit_built_in_decls[BUILT_IN_STACK_SAVE],
- NULL_TREE);
+ build_call_expr (implicit_built_in_decls[BUILT_IN_STACK_SAVE], 0);
tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
- *save = build2 (MODIFY_EXPR, ptr_type_node, tmp_var, save_call);
+ *save = build_gimple_modify_stmt (tmp_var, save_call);
*restore =
- build_function_call_expr (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
- tree_cons (NULL_TREE, tmp_var, NULL_TREE));
+ build_call_expr (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
+ 1, tmp_var);
}
/* Gimplify a BIND_EXPR. Just voidify and recurse. */
static enum gimplify_status
-gimplify_bind_expr (tree *expr_p, tree temp, tree *pre_p)
+gimplify_bind_expr (tree *expr_p, tree *pre_p)
{
tree bind_expr = *expr_p;
bool old_save_stack = gimplify_ctxp->save_stack;
tree t;
- temp = voidify_wrapper_expr (bind_expr, temp);
+ tree temp = voidify_wrapper_expr (bind_expr, NULL);
/* Mark variables seen in this bind expr. */
for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
{
if (TREE_CODE (t) == VAR_DECL)
- DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
+ {
+ struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
+
+ /* Mark variable as local. */
+ if (ctx && !is_global_var (t)
+ && (! DECL_SEEN_IN_BIND_EXPR_P (t)
+ || splay_tree_lookup (ctx->variables,
+ (splay_tree_key) t) == NULL))
+ omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
+
+ DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
+ }
/* Preliminarily mark non-addressed complex variables as eligible
for promotion to gimple registers. We'll transform their uses
as we find them. */
- if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
+ if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
&& !TREE_THIS_VOLATILE (t)
&& (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
&& !needs_to_live_in_memory (t))
- DECL_COMPLEX_GIMPLE_REG_P (t) = 1;
- }
-
- /* Mark variables seen in this bind expr as locals. */
- if (gimplify_omp_ctxp)
- {
- struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
-
- for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
- if (TREE_CODE (t) == VAR_DECL && !is_global_var (t))
- omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
+ DECL_GIMPLE_REG_P (t) = 1;
}
gimple_push_bind_expr (bind_expr);
result_decl = NULL_TREE;
else
{
- result_decl = TREE_OPERAND (ret_expr, 0);
+ result_decl = GENERIC_TREE_OPERAND (ret_expr, 0);
if (TREE_CODE (result_decl) == INDIRECT_REF)
/* See through a return by reference. */
result_decl = TREE_OPERAND (result_decl, 0);
gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
+ || TREE_CODE (ret_expr) == GIMPLE_MODIFY_STMT
|| TREE_CODE (ret_expr) == INIT_EXPR)
&& TREE_CODE (result_decl) == RESULT_DECL);
}
else
{
result = create_tmp_var (TREE_TYPE (result_decl), NULL);
+ if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
+ DECL_GIMPLE_REG_P (result) = 1;
/* ??? With complex control flow (usually involving abnormal edges),
we can wind up warning about an uninitialized value for this. Due
gimplify_ctxp->return_temp = result;
}
- /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
+ /* Smash the lhs of the GIMPLE_MODIFY_STMT to the temporary we plan to use.
Then gimplify the whole thing. */
if (result != result_decl)
- TREE_OPERAND (ret_expr, 0) = result;
+ GENERIC_TREE_OPERAND (ret_expr, 0) = result;
gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
if (result == result_decl)
ret_expr = result;
else
- ret_expr = build2 (MODIFY_EXPR, TREE_TYPE (result), result_decl, result);
+ ret_expr = build_gimple_modify_stmt (result_decl, result);
TREE_OPERAND (stmt, 0) = ret_expr;
return GS_ALL_DONE;
{
tree init = DECL_INITIAL (decl);
- if (!TREE_CONSTANT (DECL_SIZE (decl)))
+ if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
{
/* This is a variable-sized decl. Simplify its size and mark it
for deferred expansion. Note that mudflap depends on the format
of the emitted code: see mx_register_decls(). */
- tree t, args, addr, ptr_type;
+ tree t, addr, ptr_type;
gimplify_one_sizepos (&DECL_SIZE (decl), stmt_p);
gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), stmt_p);
SET_DECL_VALUE_EXPR (decl, t);
DECL_HAS_VALUE_EXPR_P (decl) = 1;
- args = tree_cons (NULL, DECL_SIZE_UNIT (decl), NULL);
t = built_in_decls[BUILT_IN_ALLOCA];
- t = build_function_call_expr (t, args);
+ t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl));
t = fold_convert (ptr_type, t);
- t = build2 (MODIFY_EXPR, void_type_node, addr, t);
+ t = build_gimple_modify_stmt (addr, t);
gimplify_and_add (t, stmt_p);
if (!TREE_STATIC (decl))
{
DECL_INITIAL (decl) = NULL_TREE;
- init = build2 (MODIFY_EXPR, void_type_node, decl, init);
+ init = build2 (INIT_EXPR, void_type_node, decl, init);
gimplify_and_add (init, stmt_p);
}
else
walk_tree (&init, force_labels_r, NULL, NULL);
}
- /* This decl isn't mentioned in the enclosing block, so add it to the
- list of temps. FIXME it seems a bit of a kludge to say that
- anonymous artificial vars aren't pushed, but everything else is. */
- if (DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
+ /* Some front ends do not explicitly declare all anonymous
+ artificial variables. We compensate here by declaring the
+ variables, though it would be better if the front ends would
+ explicitly declare them. */
+ if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
+ && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
gimple_add_tmp_var (decl);
}
static int
compare_case_labels (const void *p1, const void *p2)
{
- tree case1 = *(tree *)p1;
- tree case2 = *(tree *)p2;
+ const_tree const case1 = *(const_tree const*)p1;
+ const_tree const case2 = *(const_tree const*)p2;
return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
}
labels = gimplify_ctxp->case_labels;
gimplify_ctxp->case_labels = saved_labels;
- len = VEC_length (tree, labels);
-
- for (i = 0; i < len; ++i)
+ i = 0;
+ while (i < VEC_length (tree, labels))
{
- tree t = VEC_index (tree, labels, i);
- if (!CASE_LOW (t))
+ tree elt = VEC_index (tree, labels, i);
+ tree low = CASE_LOW (elt);
+ bool remove_element = FALSE;
+
+ if (low)
+ {
+ /* Discard empty ranges. */
+ tree high = CASE_HIGH (elt);
+ if (high && INT_CST_LT (high, low))
+ remove_element = TRUE;
+ }
+ else
{
/* The default case must be the last label in the list. */
- default_case = t;
- VEC_replace (tree, labels, i, VEC_index (tree, labels, len - 1));
- len--;
- break;
+ gcc_assert (!default_case);
+ default_case = elt;
+ remove_element = TRUE;
}
+
+ if (remove_element)
+ VEC_ordered_remove (tree, labels, i);
+ else
+ i++;
}
+ len = i;
label_vec = make_tree_vec (len + 1);
SWITCH_LABELS (*expr_p) = label_vec;
/* Both cast and addr_expr types should address the same object type. */
dctype = TREE_TYPE (ctype);
ddatype = TREE_TYPE (datype);
- if (!lang_hooks.types_compatible_p (ddatype, dctype))
+ if (!useless_type_conversion_p (dctype, ddatype))
return;
/* The addr_expr and the object type should match. */
obj_expr = TREE_OPERAND (addr_expr, 0);
otype = TREE_TYPE (obj_expr);
- if (!lang_hooks.types_compatible_p (otype, datype))
+ if (!useless_type_conversion_p (datype, otype))
return;
/* The lower bound and element sizes must be constant. */
/* All checks succeeded. Build a new node to merge the cast. */
*expr_p = build4 (ARRAY_REF, dctype, obj_expr,
TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
- TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
- size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (dctype),
- size_int (TYPE_ALIGN_UNIT (dctype))));
+ NULL_TREE, NULL_TREE);
*expr_p = build1 (ADDR_EXPR, ctype, *expr_p);
}
static enum gimplify_status
gimplify_conversion (tree *expr_p)
{
+ tree tem;
gcc_assert (TREE_CODE (*expr_p) == NOP_EXPR
|| TREE_CODE (*expr_p) == CONVERT_EXPR);
if (tree_ssa_useless_type_conversion (*expr_p))
*expr_p = TREE_OPERAND (*expr_p, 0);
+ /* Attempt to avoid NOP_EXPR by producing reference to a subtype.
+ For example this fold (subclass *)&A into &A->subclass avoiding
+ a need for statement. */
+ if (TREE_CODE (*expr_p) == NOP_EXPR
+ && POINTER_TYPE_P (TREE_TYPE (*expr_p))
+ && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 0)))
+ && (tem = maybe_fold_offset_to_reference
+ (TREE_OPERAND (*expr_p, 0),
+ integer_zero_node, TREE_TYPE (TREE_TYPE (*expr_p)))))
+ {
+ tree ptr_type = build_pointer_type (TREE_TYPE (tem));
+ if (useless_type_conversion_p (TREE_TYPE (*expr_p), ptr_type))
+ *expr_p = build_fold_addr_expr_with_type (tem, ptr_type);
+ }
+
/* If we still have a conversion at the toplevel,
then canonicalize some constructs. */
if (TREE_CODE (*expr_p) == NOP_EXPR || TREE_CODE (*expr_p) == CONVERT_EXPR)
bool want_value)
{
enum tree_code code;
- tree lhs, lvalue, rhs, t1;
+ tree lhs, lvalue, rhs, t1, post = NULL, *orig_post_p = post_p;
bool postfix;
enum tree_code arith_code;
enum gimplify_status ret;
else
postfix = false;
+ /* For postfix, make sure the inner expression's post side effects
+ are executed after side effects from this expression. */
+ if (postfix)
+ post_p = &post;
+
/* Add or subtract? */
if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
arith_code = PLUS_EXPR;
return ret;
}
+ /* For POINTERs increment, use POINTER_PLUS_EXPR. */
+ if (POINTER_TYPE_P (TREE_TYPE (lhs)))
+ {
+ rhs = fold_convert (sizetype, rhs);
+ if (arith_code == MINUS_EXPR)
+ rhs = fold_build1 (NEGATE_EXPR, TREE_TYPE (rhs), rhs);
+ arith_code = POINTER_PLUS_EXPR;
+ }
+
t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
- t1 = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
+ t1 = build_gimple_modify_stmt (lvalue, t1);
if (postfix)
{
- gimplify_and_add (t1, post_p);
+ gimplify_and_add (t1, orig_post_p);
+ append_to_statement_list (post, orig_post_p);
*expr_p = lhs;
return GS_ALL_DONE;
}
static enum gimplify_status
gimplify_call_expr (tree *expr_p, tree *pre_p, bool want_value)
{
- tree decl;
- tree arglist;
+ tree decl, parms, p;
enum gimplify_status ret;
+ int i, nargs;
gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
decl = get_callee_fndecl (*expr_p);
if (decl && DECL_BUILT_IN (decl))
{
- tree fndecl = get_callee_fndecl (*expr_p);
- tree arglist = TREE_OPERAND (*expr_p, 1);
- tree new = fold_builtin (fndecl, arglist, !want_value);
+ tree new = fold_call_expr (*expr_p, !want_value);
if (new && new != *expr_p)
{
if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
&& DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_START)
{
- if (!arglist || !TREE_CHAIN (arglist))
+ if (call_expr_nargs (*expr_p) < 2)
{
error ("too few arguments to function %<va_start%>");
*expr_p = build_empty_stmt ();
return GS_OK;
}
- if (fold_builtin_next_arg (TREE_CHAIN (arglist)))
+ if (fold_builtin_next_arg (*expr_p, true))
{
*expr_p = build_empty_stmt ();
return GS_OK;
}
/* Avoid gimplifying the second argument to va_start, which needs
to be the plain PARM_DECL. */
- return gimplify_arg (&TREE_VALUE (TREE_OPERAND (*expr_p, 1)), pre_p);
+ return gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p);
}
}
/* There is a sequence point before the call, so any side effects in
the calling expression must occur before the actual call. Force
gimplify_expr to use an internal post queue. */
- ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, NULL,
+ ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
is_gimple_call_addr, fb_rvalue);
- if (PUSH_ARGS_REVERSED)
- TREE_OPERAND (*expr_p, 1) = nreverse (TREE_OPERAND (*expr_p, 1));
- for (arglist = TREE_OPERAND (*expr_p, 1); arglist;
- arglist = TREE_CHAIN (arglist))
+ nargs = call_expr_nargs (*expr_p);
+
+ /* Get argument types for verification. */
+ decl = get_callee_fndecl (*expr_p);
+ parms = NULL_TREE;
+ if (decl)
+ parms = TYPE_ARG_TYPES (TREE_TYPE (decl));
+ else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
+ parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
+
+ /* Verify if the type of the argument matches that of the function
+ declaration. If we cannot verify this or there is a mismatch,
+ mark the call expression so it doesn't get inlined later. */
+ if (decl && DECL_ARGUMENTS (decl))
+ {
+ for (i = 0, p = DECL_ARGUMENTS (decl); i < nargs;
+ i++, p = TREE_CHAIN (p))
+ {
+ /* We cannot distinguish a varargs function from the case
+ of excess parameters, still deferring the inlining decision
+ to the callee is possible. */
+ if (!p)
+ break;
+ if (p == error_mark_node
+ || CALL_EXPR_ARG (*expr_p, i) == error_mark_node
+ || !fold_convertible_p (DECL_ARG_TYPE (p),
+ CALL_EXPR_ARG (*expr_p, i)))
+ {
+ CALL_CANNOT_INLINE_P (*expr_p) = 1;
+ break;
+ }
+ }
+ }
+ else if (parms)
+ {
+ for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
+ {
+ /* If this is a varargs function defer inlining decision
+ to callee. */
+ if (!p)
+ break;
+ if (TREE_VALUE (p) == error_mark_node
+ || CALL_EXPR_ARG (*expr_p, i) == error_mark_node
+ || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
+ || !fold_convertible_p (TREE_VALUE (p),
+ CALL_EXPR_ARG (*expr_p, i)))
+ {
+ CALL_CANNOT_INLINE_P (*expr_p) = 1;
+ break;
+ }
+ }
+ }
+ else if (nargs != 0)
+ CALL_CANNOT_INLINE_P (*expr_p) = 1;
+
+ /* Finally, gimplify the function arguments. */
+ for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
+ PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
+ PUSH_ARGS_REVERSED ? i-- : i++)
{
enum gimplify_status t;
- t = gimplify_arg (&TREE_VALUE (arglist), pre_p);
+ t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p);
if (t == GS_ERROR)
ret = GS_ERROR;
}
- if (PUSH_ARGS_REVERSED)
- TREE_OPERAND (*expr_p, 1) = nreverse (TREE_OPERAND (*expr_p, 1));
/* Try this again in case gimplification exposed something. */
- if (ret != GS_ERROR && decl && DECL_BUILT_IN (decl))
+ if (ret != GS_ERROR)
{
- tree fndecl = get_callee_fndecl (*expr_p);
- tree arglist = TREE_OPERAND (*expr_p, 1);
- tree new = fold_builtin (fndecl, arglist, !want_value);
+ tree new = fold_call_expr (*expr_p, !want_value);
if (new && new != *expr_p)
{
default:
/* Other expressions that get here must have boolean values, but
might need to be converted to the appropriate mode. */
- return convert (boolean_type_node, expr);
+ return fold_convert (boolean_type_node, expr);
}
}
TARGET is the tree for T1 above.
PRE_P points to the list where side effects that must happen before
- *EXPR_P should be stored.
-
- POST_P points to the list where side effects that must happen after
- *EXPR_P should be stored. */
+ *EXPR_P should be stored. */
static enum gimplify_status
-gimplify_cond_expr (tree *expr_p, tree *pre_p, tree *post_p, tree target,
- fallback_t fallback)
+gimplify_cond_expr (tree *expr_p, tree *pre_p, fallback_t fallback)
{
tree expr = *expr_p;
tree tmp, tmp2, type;
{
tree result;
- if (target)
- {
- ret = gimplify_expr (&target, pre_p, post_p,
- is_gimple_min_lval, fb_lvalue);
- if (ret != GS_ERROR)
- ret = GS_OK;
- result = tmp = target;
- tmp2 = unshare_expr (target);
- }
- else if ((fallback & fb_lvalue) == 0)
+ if ((fallback & fb_lvalue) == 0)
{
result = tmp2 = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp");
ret = GS_ALL_DONE;
if this branch is void; in C++ it can be, if it's a throw. */
if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
TREE_OPERAND (expr, 1)
- = build2 (MODIFY_EXPR, void_type_node, tmp, TREE_OPERAND (expr, 1));
+ = build_gimple_modify_stmt (tmp, TREE_OPERAND (expr, 1));
/* Build the else clause, 't1 = b;'. */
if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
TREE_OPERAND (expr, 2)
- = build2 (MODIFY_EXPR, void_type_node, tmp2, TREE_OPERAND (expr, 2));
+ = build_gimple_modify_stmt (tmp2, TREE_OPERAND (expr, 2));
TREE_TYPE (expr) = void_type_node;
recalculate_side_effects (expr);
static enum gimplify_status
gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value)
{
- tree args, t, to, to_ptr, from;
-
- to = TREE_OPERAND (*expr_p, 0);
- from = TREE_OPERAND (*expr_p, 1);
+ tree t, to, to_ptr, from, from_ptr;
- args = tree_cons (NULL, size, NULL);
+ to = GENERIC_TREE_OPERAND (*expr_p, 0);
+ from = GENERIC_TREE_OPERAND (*expr_p, 1);
- t = build_fold_addr_expr (from);
- args = tree_cons (NULL, t, args);
+ from_ptr = build_fold_addr_expr (from);
to_ptr = build_fold_addr_expr (to);
- args = tree_cons (NULL, to_ptr, args);
t = implicit_built_in_decls[BUILT_IN_MEMCPY];
- t = build_function_call_expr (t, args);
+ t = build_call_expr (t, 3, to_ptr, from_ptr, size);
if (want_value)
{
static enum gimplify_status
gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value)
{
- tree args, t, to, to_ptr;
-
- to = TREE_OPERAND (*expr_p, 0);
+ tree t, to, to_ptr;
- args = tree_cons (NULL, size, NULL);
-
- args = tree_cons (NULL, integer_zero_node, args);
+ to = GENERIC_TREE_OPERAND (*expr_p, 0);
to_ptr = build_fold_addr_expr (to);
- args = tree_cons (NULL, to_ptr, args);
t = implicit_built_in_decls[BUILT_IN_MEMSET];
- t = build_function_call_expr (t, args);
+ t = build_call_expr (t, 3, to_ptr, integer_zero_node, size);
if (want_value)
{
&& alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
return t;
+ /* If the constructor component is a call, determine if it can hide a
+ potential overlap with the lhs through an INDIRECT_REF like above. */
+ if (TREE_CODE (t) == CALL_EXPR)
+ {
+ tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
+
+ for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
+ if (POINTER_TYPE_P (TREE_VALUE (type))
+ && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
+ && alias_sets_conflict_p (data->lhs_alias_set,
+ get_alias_set
+ (TREE_TYPE (TREE_VALUE (type)))))
+ return t;
+ }
+
if (IS_TYPE_OR_DECL_P (t))
*walk_subtrees = 0;
return NULL;
return;
}
- /* We can't preevaluate if the type contains a placeholder. */
- if (type_contains_placeholder_p (TREE_TYPE (*expr_p)))
- return;
+ /* If this is a variable sized type, we must remember the size. */
+ maybe_with_size_expr (expr_p);
/* Gimplify the constructor element to something appropriate for the rhs
of a MODIFY_EXPR. Given that we know the lhs is an aggregate, we know
/* If this is of variable size, we have no choice but to assume it doesn't
overlap since we can't make a temporary for it. */
- if (!TREE_CONSTANT (TYPE_SIZE (TREE_TYPE (*expr_p))))
+ if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
return;
/* Otherwise, we must search for overlap ... */
tree *pre_p, bool cleared)
{
tree loop_entry_label, loop_exit_label;
- tree var, var_type, cref;
+ tree var, var_type, cref, tmp;
loop_entry_label = create_artificial_label ();
loop_exit_label = create_artificial_label ();
/* Create and initialize the index variable. */
var_type = TREE_TYPE (upper);
var = create_tmp_var (var_type, NULL);
- append_to_statement_list (build2 (MODIFY_EXPR, var_type, var, lower), pre_p);
+ append_to_statement_list (build_gimple_modify_stmt (var, lower), pre_p);
/* Add the loop entry label. */
append_to_statement_list (build1 (LABEL_EXPR,
gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
pre_p, cleared);
else
- append_to_statement_list (build2 (MODIFY_EXPR, TREE_TYPE (cref),
- cref, value),
- pre_p);
+ append_to_statement_list (build_gimple_modify_stmt (cref, value), pre_p);
/* We exit the loop when the index var is equal to the upper bound. */
gimplify_and_add (build3 (COND_EXPR, void_type_node,
pre_p);
/* Otherwise, increment the index var... */
- append_to_statement_list (build2 (MODIFY_EXPR, var_type, var,
- build2 (PLUS_EXPR, var_type, var,
- fold_convert (var_type,
- integer_one_node))),
- pre_p);
+ tmp = build2 (PLUS_EXPR, var_type, var,
+ fold_convert (var_type, integer_one_node));
+ append_to_statement_list (build_gimple_modify_stmt (var, tmp), pre_p);
/* ...and jump back to the loop entry. */
append_to_statement_list (build1 (GOTO_EXPR,
pre_p, cleared);
else
{
- init = build2 (MODIFY_EXPR, TREE_TYPE (cref), cref, value);
+ init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
gimplify_and_add (init, pre_p);
}
}
tree *post_p, bool want_value)
{
tree object;
- tree ctor = TREE_OPERAND (*expr_p, 1);
+ tree ctor = GENERIC_TREE_OPERAND (*expr_p, 1);
tree type = TREE_TYPE (ctor);
enum gimplify_status ret;
VEC(constructor_elt,gc) *elts;
if (TREE_CODE (ctor) != CONSTRUCTOR)
return GS_UNHANDLED;
- ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
+ ret = gimplify_expr (&GENERIC_TREE_OPERAND (*expr_p, 0), pre_p, post_p,
is_gimple_lvalue, fb_lvalue);
if (ret == GS_ERROR)
return ret;
- object = TREE_OPERAND (*expr_p, 0);
+ object = GENERIC_TREE_OPERAND (*expr_p, 0);
elts = CONSTRUCTOR_ELTS (ctor);
{
struct gimplify_init_ctor_preeval_data preeval_data;
HOST_WIDE_INT num_type_elements, num_ctor_elements;
- HOST_WIDE_INT num_nonzero_elements, num_nonconstant_elements;
- bool cleared;
+ HOST_WIDE_INT num_nonzero_elements;
+ bool cleared, valid_const_initializer;
/* Aggregate types must lower constructors to initialization of
individual elements. The exception is that a CONSTRUCTOR node
if (VEC_empty (constructor_elt, elts))
break;
- categorize_ctor_elements (ctor, &num_nonzero_elements,
- &num_nonconstant_elements,
- &num_ctor_elements, &cleared);
+ /* Fetch information about the constructor to direct later processing.
+ We might want to make static versions of it in various cases, and
+ can only do so if it known to be a valid constant initializer. */
+ valid_const_initializer
+ = categorize_ctor_elements (ctor, &num_nonzero_elements,
+ &num_ctor_elements, &cleared);
/* If a const aggregate variable is being initialized, then it
should never be a lose to promote the variable to be static. */
- if (num_nonconstant_elements == 0
+ if (valid_const_initializer
&& num_nonzero_elements > 1
&& TREE_READONLY (object)
&& TREE_CODE (object) == VAR_DECL)
for sparse arrays, though, as it's more efficient to follow
the standard CONSTRUCTOR behavior of memset followed by
individual element initialization. */
- if (num_nonconstant_elements == 0 && !cleared)
+ if (valid_const_initializer && !cleared)
{
HOST_WIDE_INT size = int_size_in_bytes (type);
unsigned int align;
}
walk_tree (&DECL_INITIAL (new), force_labels_r, NULL, NULL);
- TREE_OPERAND (*expr_p, 1) = new;
+ GENERIC_TREE_OPERAND (*expr_p, 1) = new;
/* This is no longer an assignment of a CONSTRUCTOR, but
we still may have processing to do on the LHS. So
}
}
+ /* If there are nonzero elements, pre-evaluate to capture elements
+ overlapping with the lhs into temporaries. We must do this before
+ clearing to fetch the values before they are zeroed-out. */
+ if (num_nonzero_elements > 0)
+ {
+ preeval_data.lhs_base_decl = get_base_address (object);
+ if (!DECL_P (preeval_data.lhs_base_decl))
+ preeval_data.lhs_base_decl = NULL;
+ preeval_data.lhs_alias_set = get_alias_set (object);
+
+ gimplify_init_ctor_preeval (&GENERIC_TREE_OPERAND (*expr_p, 1),
+ pre_p, post_p, &preeval_data);
+ }
+
if (cleared)
{
/* Zap the CONSTRUCTOR element list, which simplifies this case.
elements in the constructor, add assignments to the individual
scalar fields of the object. */
if (!cleared || num_nonzero_elements > 0)
- {
- preeval_data.lhs_base_decl = get_base_address (object);
- if (!DECL_P (preeval_data.lhs_base_decl))
- preeval_data.lhs_base_decl = NULL;
- preeval_data.lhs_alias_set = get_alias_set (object);
-
- gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
- pre_p, post_p, &preeval_data);
- gimplify_init_ctor_eval (object, elts, pre_p, cleared);
- }
+ gimplify_init_ctor_eval (object, elts, pre_p, cleared);
*expr_p = NULL_TREE;
}
i = VEC_index (constructor_elt, elts, 1)->value;
if (r == NULL || i == NULL)
{
- tree zero = convert (TREE_TYPE (type), integer_zero_node);
+ tree zero = fold_convert (TREE_TYPE (type), integer_zero_node);
if (r == NULL)
r = zero;
if (i == NULL)
TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
break;
}
+
+ /* Don't reduce a TREE_CONSTANT vector ctor even if we can't
+ make a VECTOR_CST. It won't do anything for us, and it'll
+ prevent us from representing it as a single constant. */
+ break;
}
/* Vector types use CONSTRUCTOR all the way through gimple
if (tret == GS_ERROR)
ret = GS_ERROR;
}
+ if (!is_gimple_reg (GENERIC_TREE_OPERAND (*expr_p, 0)))
+ GENERIC_TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
}
break;
tree sub = t;
tree subtype;
- STRIP_NOPS (sub);
+ STRIP_USELESS_TYPE_CONVERSION (sub);
subtype = TREE_TYPE (sub);
if (!POINTER_TYPE_P (subtype))
return NULL_TREE;
tree op = TREE_OPERAND (sub, 0);
tree optype = TREE_TYPE (op);
/* *&p => p */
- if (lang_hooks.types_compatible_p (type, optype))
+ if (useless_type_conversion_p (type, optype))
return op;
/* *(foo *)&fooarray => fooarray[0] */
else if (TREE_CODE (optype) == ARRAY_TYPE
- && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
+ && useless_type_conversion_p (type, TREE_TYPE (optype)))
{
tree type_domain = TYPE_DOMAIN (optype);
tree min_val = size_zero_node;
/* *(foo *)fooarrptr => (*fooarrptr)[0] */
if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
- && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
+ && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
{
tree type_domain;
tree min_val = size_zero_node;
while (ret != GS_UNHANDLED)
switch (TREE_CODE (*from_p))
{
-#if 0
case INDIRECT_REF:
{
/* If we have code like
ret = GS_UNHANDLED;
break;
}
-#endif
case TARGET_EXPR:
{
copy in other cases as well. */
if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
{
- *expr_p = *from_p;
- return gimplify_cond_expr (expr_p, pre_p, post_p, *to_p,
- fb_rvalue);
+ /* This code should mirror the code in gimplify_cond_expr. */
+ enum tree_code code = TREE_CODE (*expr_p);
+ tree cond = *from_p;
+ tree result = *to_p;
+
+ ret = gimplify_expr (&result, pre_p, post_p,
+ is_gimple_min_lval, fb_lvalue);
+ if (ret != GS_ERROR)
+ ret = GS_OK;
+
+ if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
+ TREE_OPERAND (cond, 1)
+ = build2 (code, void_type_node, result,
+ TREE_OPERAND (cond, 1));
+ if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
+ TREE_OPERAND (cond, 2)
+ = build2 (code, void_type_node, unshare_expr (result),
+ TREE_OPERAND (cond, 2));
+
+ TREE_TYPE (cond) = void_type_node;
+ recalculate_side_effects (cond);
+
+ if (want_value)
+ {
+ gimplify_and_add (cond, pre_p);
+ *expr_p = unshare_expr (result);
+ }
+ else
+ *expr_p = cond;
+ return ret;
}
else
ret = GS_UNHANDLED;
{
bool use_target;
- if (TREE_CODE (*to_p) == RESULT_DECL
- && DECL_NAME (*to_p) == NULL_TREE
- && needs_to_live_in_memory (*to_p))
+ if (!(rhs_predicate_for (*to_p))(*from_p))
+ /* If we need a temporary, *to_p isn't accurate. */
+ use_target = false;
+ else if (TREE_CODE (*to_p) == RESULT_DECL
+ && DECL_NAME (*to_p) == NULL_TREE
+ && needs_to_live_in_memory (*to_p))
/* It's OK to use the return slot directly unless it's an NRV. */
use_target = true;
+ else if (is_gimple_reg_type (TREE_TYPE (*to_p))
+ || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
+ /* Don't force regs into memory. */
+ use_target = false;
+ else if (TREE_CODE (*to_p) == VAR_DECL
+ && DECL_GIMPLE_FORMAL_TEMP_P (*to_p))
+ /* Don't use the original target if it's a formal temp; we
+ don't want to take their addresses. */
+ use_target = false;
+ else if (TREE_CODE (*expr_p) == INIT_EXPR)
+ /* It's OK to use the target directly if it's being
+ initialized. */
+ use_target = true;
else if (!is_gimple_non_addressable (*to_p))
/* Don't use the original target if it's already addressable;
if its address escapes, and the called function uses the
When optimizing, the return_slot pass marks more functions
as safe after we have escape info. */
use_target = false;
- else if (TREE_CODE (*to_p) != PARM_DECL
- && DECL_GIMPLE_FORMAL_TEMP_P (*to_p))
- /* Don't use the original target if it's a formal temp; we
- don't want to take their addresses. */
- use_target = false;
- else if (is_gimple_reg_type (TREE_TYPE (*to_p)))
- /* Also don't force regs into memory. */
- use_target = false;
else
use_target = true;
if (use_target)
{
CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
- lang_hooks.mark_addressable (*to_p);
+ mark_addressable (*to_p);
}
}
ret = GS_UNHANDLED;
break;
+ /* If we're initializing from a container, push the initialization
+ inside it. */
+ case CLEANUP_POINT_EXPR:
+ case BIND_EXPR:
+ case STATEMENT_LIST:
+ {
+ tree wrap = *from_p;
+ tree t;
+
+ ret = gimplify_expr (to_p, pre_p, post_p,
+ is_gimple_min_lval, fb_lvalue);
+ if (ret != GS_ERROR)
+ ret = GS_OK;
+
+ t = voidify_wrapper_expr (wrap, *expr_p);
+ gcc_assert (t == *expr_p);
+
+ if (want_value)
+ {
+ gimplify_and_add (wrap, pre_p);
+ *expr_p = unshare_expr (*to_p);
+ }
+ else
+ *expr_p = wrap;
+ return GS_OK;
+ }
+
default:
ret = GS_UNHANDLED;
break;
return ret;
}
+/* Destructively convert the TREE pointer in TP into a gimple tuple if
+ appropriate. */
+
+static void
+tree_to_gimple_tuple (tree *tp)
+{
+
+ switch (TREE_CODE (*tp))
+ {
+ case GIMPLE_MODIFY_STMT:
+ return;
+ case MODIFY_EXPR:
+ {
+ struct gimple_stmt *gs;
+ tree lhs = TREE_OPERAND (*tp, 0);
+ bool def_stmt_self_p = false;
+
+ if (TREE_CODE (lhs) == SSA_NAME)
+ {
+ if (SSA_NAME_DEF_STMT (lhs) == *tp)
+ def_stmt_self_p = true;
+ }
+
+ gs = &make_node (GIMPLE_MODIFY_STMT)->gstmt;
+ gs->base = (*tp)->base;
+ /* The set to base above overwrites the CODE. */
+ TREE_SET_CODE ((tree) gs, GIMPLE_MODIFY_STMT);
+
+ gs->locus = EXPR_LOCUS (*tp);
+ gs->operands[0] = TREE_OPERAND (*tp, 0);
+ gs->operands[1] = TREE_OPERAND (*tp, 1);
+ gs->block = TREE_BLOCK (*tp);
+ *tp = (tree)gs;
+
+ /* If we re-gimplify a set to an SSA_NAME, we must change the
+ SSA name's DEF_STMT link. */
+ if (def_stmt_self_p)
+ SSA_NAME_DEF_STMT (GIMPLE_STMT_OPERAND (*tp, 0)) = *tp;
+
+ return;
+ }
+ default:
+ break;
+ }
+}
+
/* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
- DECL_COMPLEX_GIMPLE_REG_P set. */
+ DECL_GIMPLE_REG_P set. */
static enum gimplify_status
gimplify_modify_expr_complex_part (tree *expr_p, tree *pre_p, bool want_value)
enum tree_code code, ocode;
tree lhs, rhs, new_rhs, other, realpart, imagpart;
- lhs = TREE_OPERAND (*expr_p, 0);
- rhs = TREE_OPERAND (*expr_p, 1);
+ lhs = GENERIC_TREE_OPERAND (*expr_p, 0);
+ rhs = GENERIC_TREE_OPERAND (*expr_p, 1);
code = TREE_CODE (lhs);
lhs = TREE_OPERAND (lhs, 0);
else
new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
- TREE_OPERAND (*expr_p, 0) = lhs;
- TREE_OPERAND (*expr_p, 1) = new_rhs;
+ GENERIC_TREE_OPERAND (*expr_p, 0) = lhs;
+ GENERIC_TREE_OPERAND (*expr_p, 1) = new_rhs;
if (want_value)
{
+ tree_to_gimple_tuple (expr_p);
+
append_to_statement_list (*expr_p, pre_p);
*expr_p = rhs;
}
static enum gimplify_status
gimplify_modify_expr (tree *expr_p, tree *pre_p, tree *post_p, bool want_value)
{
- tree *from_p = &TREE_OPERAND (*expr_p, 1);
- tree *to_p = &TREE_OPERAND (*expr_p, 0);
+ tree *from_p = &GENERIC_TREE_OPERAND (*expr_p, 1);
+ tree *to_p = &GENERIC_TREE_OPERAND (*expr_p, 0);
enum gimplify_status ret = GS_UNHANDLED;
gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
+ || TREE_CODE (*expr_p) == GIMPLE_MODIFY_STMT
|| TREE_CODE (*expr_p) == INIT_EXPR);
- /* The distinction between MODIFY_EXPR and INIT_EXPR is no longer useful. */
- if (TREE_CODE (*expr_p) == INIT_EXPR)
- TREE_SET_CODE (*expr_p, MODIFY_EXPR);
-
/* For zero sized types only gimplify the left hand side and right hand side
as statements and throw away the assignment. */
if (zero_sized_type (TREE_TYPE (*from_p)))
*to_p = make_ssa_name (*to_p, *expr_p);
}
+ /* Try to alleviate the effects of the gimplification creating artificial
+ temporaries (see for example is_gimple_reg_rhs) on the debug info. */
+ if (!gimplify_ctxp->into_ssa
+ && DECL_P (*from_p) && DECL_IGNORED_P (*from_p)
+ && DECL_P (*to_p) && !DECL_IGNORED_P (*to_p))
+ {
+ if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
+ DECL_NAME (*from_p)
+ = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
+ DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
+ SET_DECL_DEBUG_EXPR (*from_p, *to_p);
+ }
+
if (want_value)
{
+ tree_to_gimple_tuple (expr_p);
+
append_to_statement_list (*expr_p, pre_p);
*expr_p = *to_p;
return GS_OK;
{
tree op0 = TREE_OPERAND (*expr_p, 0);
tree op1 = TREE_OPERAND (*expr_p, 1);
- tree args, t, dest;
-
- t = TYPE_SIZE_UNIT (TREE_TYPE (op0));
- t = unshare_expr (t);
- t = SUBSTITUTE_PLACEHOLDER_IN_EXPR (t, op0);
- args = tree_cons (NULL, t, NULL);
- t = build_fold_addr_expr (op1);
- args = tree_cons (NULL, t, args);
+ tree t, arg, dest, src;
+
+ arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
+ arg = unshare_expr (arg);
+ arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
+ src = build_fold_addr_expr (op1);
dest = build_fold_addr_expr (op0);
- args = tree_cons (NULL, dest, args);
t = implicit_built_in_decls[BUILT_IN_MEMCMP];
- t = build_function_call_expr (t, args);
+ t = build_call_expr (t, 3, dest, src, arg);
*expr_p
= build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
return GS_OK;
}
+/* Gimplify a comparison between two aggregate objects of integral scalar
+ mode as a comparison between the bitwise equivalent scalar values. */
+
+static enum gimplify_status
+gimplify_scalar_mode_aggregate_compare (tree *expr_p)
+{
+ tree op0 = TREE_OPERAND (*expr_p, 0);
+ tree op1 = TREE_OPERAND (*expr_p, 1);
+
+ tree type = TREE_TYPE (op0);
+ tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
+
+ op0 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op0);
+ op1 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op1);
+
+ *expr_p
+ = fold_build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
+
+ return GS_OK;
+}
+
/* Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions. EXPR_P
points to the expression to gimplify.
tree type = TREE_TYPE (*expr_p);
*expr_p = build3 (COND_EXPR, type, *expr_p,
- convert (type, boolean_true_node),
- convert (type, boolean_false_node));
+ fold_convert (type, boolean_true_node),
+ fold_convert (type, boolean_false_node));
return GS_OK;
}
enlightened front-end, or by shortcut_cond_expr. */
static enum gimplify_status
-gimplify_statement_list (tree *expr_p)
+gimplify_statement_list (tree *expr_p, tree *pre_p)
{
+ tree temp = voidify_wrapper_expr (*expr_p, NULL);
+
tree_stmt_iterator i = tsi_start (*expr_p);
while (!tsi_end_p (i))
tsi_next (&i);
}
+ if (temp)
+ {
+ append_to_statement_list (*expr_p, pre_p);
+ *expr_p = temp;
+ return GS_OK;
+ }
+
return GS_ALL_DONE;
}
tree t_expr = TREE_TYPE (expr);
tree t_op00 = TREE_TYPE (op00);
- if (!lang_hooks.types_compatible_p (t_expr, t_op00))
- {
-#ifdef ENABLE_CHECKING
- tree t_op0 = TREE_TYPE (op0);
- gcc_assert (POINTER_TYPE_P (t_expr)
- && cpt_same_type (TREE_CODE (t_op0) == ARRAY_TYPE
- ? TREE_TYPE (t_op0) : t_op0,
- TREE_TYPE (t_expr))
- && POINTER_TYPE_P (t_op00)
- && cpt_same_type (t_op0, TREE_TYPE (t_op00)));
-#endif
- op00 = fold_convert (TREE_TYPE (expr), op00);
- }
+ if (!useless_type_conversion_p (t_expr, t_op00))
+ op00 = fold_convert (TREE_TYPE (expr), op00);
*expr_p = op00;
ret = GS_OK;
}
the address of a call that returns a struct; see
gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
the implied temporary explicit. */
+
+ /* Mark the RHS addressable. */
ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
is_gimple_addressable, fb_either);
if (ret != GS_ERROR)
is set properly. */
recompute_tree_invariant_for_addr_expr (expr);
- /* Mark the RHS addressable. */
- lang_hooks.mark_addressable (TREE_OPERAND (expr, 0));
+ mark_addressable (TREE_OPERAND (expr, 0));
}
break;
}
&allows_mem, &allows_reg, &is_inout);
if (!allows_reg && allows_mem)
- lang_hooks.mark_addressable (TREE_VALUE (link));
+ mark_addressable (TREE_VALUE (link));
tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
is_inout ? is_gimple_min_lval : is_gimple_lvalue,
parse_input_constraint (&constraint, 0, 0, noutputs, 0,
oconstraints, &allows_mem, &allows_reg);
+ /* If we can't make copies, we can only accept memory. */
+ if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
+ {
+ if (allows_mem)
+ allows_reg = 0;
+ else
+ {
+ error ("impossible constraint in %<asm%>");
+ error ("non-memory input %d must stay in memory", i);
+ return GS_ERROR;
+ }
+ }
+
/* If the operand is a memory input, it should be an lvalue. */
if (!allows_reg && allows_mem)
{
- lang_hooks.mark_addressable (TREE_VALUE (link));
tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
is_gimple_lvalue, fb_lvalue | fb_mayfail);
+ mark_addressable (TREE_VALUE (link));
if (tret == GS_ERROR)
{
error ("memory input %d is not directly addressable", i);
*/
tree flag = create_tmp_var (boolean_type_node, "cleanup");
- tree ffalse = build2 (MODIFY_EXPR, void_type_node, flag,
- boolean_false_node);
- tree ftrue = build2 (MODIFY_EXPR, void_type_node, flag,
- boolean_true_node);
+ tree ffalse = build_gimple_modify_stmt (flag, boolean_false_node);
+ tree ftrue = build_gimple_modify_stmt (flag, boolean_true_node);
cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup);
append_to_statement_list (ffalse, &gimplify_ctxp->conditional_cleanups);
ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
else
{
- /* Special handling for BIND_EXPR can result in fewer temps. */
- ret = GS_OK;
- if (TREE_CODE (init) == BIND_EXPR)
- gimplify_bind_expr (&init, temp, pre_p);
- if (init != temp)
- {
- init = build2 (MODIFY_EXPR, void_type_node, temp, init);
- ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt,
- fb_none);
- }
+ init = build2 (INIT_EXPR, void_type_node, temp, init);
+ ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt,
+ fb_none);
}
if (ret == GS_ERROR)
- return GS_ERROR;
+ {
+ /* PR c++/28266 Make sure this is expanded only once. */
+ TARGET_EXPR_INITIAL (targ) = NULL_TREE;
+ return GS_ERROR;
+ }
append_to_statement_list (init, pre_p);
/* If needed, push the cleanup for the temp. */
/* When adding a variable-sized variable, we have to handle all sorts
of additional bits of data: the pointer replacement variable, and
the parameters of the type. */
- if (!TREE_CONSTANT (DECL_SIZE (decl)))
+ if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
{
/* Add the pointer replacement variable as PRIVATE if the variable
replacement is private, else FIRSTPRIVATE since we'll need the
/* We're going to make use of the TYPE_SIZE_UNIT at least in the
alloca statement we generate for the variable, so make sure it
is available. This isn't automatically needed for the SHARED
- case, since we won't be allocating local storage then. */
- else
+ case, since we won't be allocating local storage then.
+ For local variables TYPE_SIZE_UNIT might not be gimplified yet,
+ in this case omp_notice_variable will be called later
+ on when it is gimplified. */
+ else if (! (flags & GOVD_LOCAL))
omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
}
else if (lang_hooks.decls.omp_privatize_by_reference (decl))
if ((flags & GOVD_SHARED) == 0)
{
t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
- if (!TREE_CONSTANT (t))
+ if (TREE_CODE (t) != INTEGER_CST)
omp_notice_variable (ctx, t, true);
}
}
if (n->value & GOVD_SHARED)
{
if (ctx == gimplify_omp_ctxp)
- error ("iteration variable %qs should be private",
+ {
+ error ("iteration variable %qs should be private",
+ IDENTIFIER_POINTER (DECL_NAME (decl)));
+ n->value = GOVD_PRIVATE;
+ return true;
+ }
+ else
+ return false;
+ }
+ else if ((n->value & GOVD_EXPLICIT) != 0
+ && (ctx == gimplify_omp_ctxp
+ || (ctx->is_combined_parallel
+ && gimplify_omp_ctxp->outer_context == ctx)))
+ {
+ if ((n->value & GOVD_FIRSTPRIVATE) != 0)
+ error ("iteration variable %qs should not be firstprivate",
+ IDENTIFIER_POINTER (DECL_NAME (decl)));
+ else if ((n->value & GOVD_REDUCTION) != 0)
+ error ("iteration variable %qs should not be reduction",
IDENTIFIER_POINTER (DECL_NAME (decl)));
- n->value = GOVD_PRIVATE;
}
return true;
}
- if (ctx->outer_context)
- return omp_is_private (ctx->outer_context, decl);
- else if (ctx->is_parallel)
+ if (ctx->is_parallel)
return false;
+ else if (ctx->outer_context)
+ return omp_is_private (ctx->outer_context, decl);
else
return !is_global_var (decl);
}
+/* Return true if DECL is private within a parallel region
+ that binds to the current construct's context or in parallel
+ region's REDUCTION clause. */
+
+static bool
+omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
+{
+ splay_tree_node n;
+
+ do
+ {
+ ctx = ctx->outer_context;
+ if (ctx == NULL)
+ return !(is_global_var (decl)
+ /* References might be private, but might be shared too. */
+ || lang_hooks.decls.omp_privatize_by_reference (decl));
+
+ n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
+ if (n != NULL)
+ return (n->value & GOVD_SHARED) == 0;
+ }
+ while (!ctx->is_parallel);
+ return false;
+}
+
/* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
and previous omp contexts. */
static void
-gimplify_scan_omp_clauses (tree *list_p, tree *pre_p, bool in_parallel)
+gimplify_scan_omp_clauses (tree *list_p, tree *pre_p, bool in_parallel,
+ bool in_combined_parallel)
{
struct gimplify_omp_ctx *ctx, *outer_ctx;
tree c;
- ctx = new_omp_context (in_parallel);
+ ctx = new_omp_context (in_parallel, in_combined_parallel);
outer_ctx = ctx->outer_context;
while ((c = *list_p) != NULL)
enum gimplify_status gs;
bool remove = false;
bool notice_outer = true;
+ const char *check_non_private = NULL;
unsigned int flags;
tree decl;
goto do_add;
case OMP_CLAUSE_FIRSTPRIVATE:
flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
+ check_non_private = "firstprivate";
goto do_add;
case OMP_CLAUSE_LASTPRIVATE:
flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
+ check_non_private = "lastprivate";
goto do_add;
case OMP_CLAUSE_REDUCTION:
flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
+ check_non_private = "reduction";
goto do_add;
do_add:
break;
}
omp_add_variable (ctx, decl, flags);
- if (TREE_CODE (c) == OMP_CLAUSE_REDUCTION
+ if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
&& OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
{
omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
- GOVD_LOCAL);
+ GOVD_LOCAL | GOVD_SEEN);
gimplify_omp_ctxp = ctx;
push_gimplify_context ();
gimplify_stmt (&OMP_CLAUSE_REDUCTION_INIT (c));
do_notice:
if (outer_ctx)
omp_notice_variable (outer_ctx, decl, true);
+ if (check_non_private
+ && !in_parallel
+ && omp_check_private (ctx, decl))
+ {
+ error ("%s variable %qs is private in outer context",
+ check_non_private, IDENTIFIER_POINTER (DECL_NAME (decl)));
+ remove = true;
+ }
break;
- case OMP_CLAUSE_SCHEDULE:
case OMP_CLAUSE_IF:
+ OMP_CLAUSE_OPERAND (c, 0)
+ = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
+ /* Fall through. */
+
+ case OMP_CLAUSE_SCHEDULE:
case OMP_CLAUSE_NUM_THREADS:
gs = gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
is_gimple_val, fb_rvalue);
else if (flags & GOVD_SHARED)
{
if (is_global_var (decl))
- return 0;
+ {
+ struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
+ while (ctx != NULL)
+ {
+ splay_tree_node on
+ = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
+ if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
+ | GOVD_PRIVATE | GOVD_REDUCTION)) != 0)
+ break;
+ ctx = ctx->outer_context;
+ }
+ if (ctx == NULL)
+ return 0;
+ }
code = OMP_CLAUSE_SHARED;
}
else if (flags & GOVD_PRIVATE)
{
tree expr = *expr_p;
- gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p, true);
+ gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p, true,
+ OMP_PARALLEL_COMBINED (expr));
push_gimplify_context ();
gimplify_omp_for (tree *expr_p, tree *pre_p)
{
tree for_stmt, decl, t;
- enum gimplify_status ret = 0;
+ enum gimplify_status ret = GS_OK;
for_stmt = *expr_p;
- gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, false);
+ gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, false, false);
t = OMP_FOR_INIT (for_stmt);
- gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
- decl = TREE_OPERAND (t, 0);
+ gcc_assert (TREE_CODE (t) == MODIFY_EXPR
+ || TREE_CODE (t) == GIMPLE_MODIFY_STMT);
+ decl = GENERIC_TREE_OPERAND (t, 0);
gcc_assert (DECL_P (decl));
gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl)));
- gcc_assert (!TYPE_UNSIGNED (TREE_TYPE (decl)));
/* Make sure the iteration variable is private. */
if (omp_is_private (gimplify_omp_ctxp, decl))
else
omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
- ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt),
+ ret |= gimplify_expr (&GENERIC_TREE_OPERAND (t, 1),
+ &OMP_FOR_PRE_BODY (for_stmt),
NULL, is_gimple_val, fb_rvalue);
+ tree_to_gimple_tuple (&OMP_FOR_INIT (for_stmt));
+
t = OMP_FOR_COND (for_stmt);
gcc_assert (COMPARISON_CLASS_P (t));
- gcc_assert (TREE_OPERAND (t, 0) == decl);
+ gcc_assert (GENERIC_TREE_OPERAND (t, 0) == decl);
- ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt),
+ ret |= gimplify_expr (&GENERIC_TREE_OPERAND (t, 1),
+ &OMP_FOR_PRE_BODY (for_stmt),
NULL, is_gimple_val, fb_rvalue);
+ tree_to_gimple_tuple (&OMP_FOR_INCR (for_stmt));
t = OMP_FOR_INCR (for_stmt);
switch (TREE_CODE (t))
{
case PREINCREMENT_EXPR:
case POSTINCREMENT_EXPR:
t = build_int_cst (TREE_TYPE (decl), 1);
- goto build_modify;
+ t = build2 (PLUS_EXPR, TREE_TYPE (decl), decl, t);
+ t = build_gimple_modify_stmt (decl, t);
+ OMP_FOR_INCR (for_stmt) = t;
+ break;
+
case PREDECREMENT_EXPR:
case POSTDECREMENT_EXPR:
t = build_int_cst (TREE_TYPE (decl), -1);
- goto build_modify;
- build_modify:
t = build2 (PLUS_EXPR, TREE_TYPE (decl), decl, t);
- t = build2 (MODIFY_EXPR, void_type_node, decl, t);
+ t = build_gimple_modify_stmt (decl, t);
OMP_FOR_INCR (for_stmt) = t;
break;
- case MODIFY_EXPR:
- gcc_assert (TREE_OPERAND (t, 0) == decl);
- t = TREE_OPERAND (t, 1);
+ case GIMPLE_MODIFY_STMT:
+ gcc_assert (GIMPLE_STMT_OPERAND (t, 0) == decl);
+ t = GIMPLE_STMT_OPERAND (t, 1);
switch (TREE_CODE (t))
{
case PLUS_EXPR:
{
tree stmt = *expr_p;
- gimplify_scan_omp_clauses (&OMP_CLAUSES (stmt), pre_p, false);
+ gimplify_scan_omp_clauses (&OMP_CLAUSES (stmt), pre_p, false, false);
gimplify_to_stmt_list (&OMP_BODY (stmt));
gimplify_adjust_omp_clauses (&OMP_CLAUSES (stmt));
gimplify_omp_atomic_fetch_op (tree *expr_p, tree addr, tree rhs, int index)
{
enum built_in_function base;
- tree decl, args, itype;
+ tree decl, itype;
enum insn_code *optab;
/* Check for one of the supported fetch-op operations. */
switch (TREE_CODE (rhs))
{
+ case POINTER_PLUS_EXPR:
case PLUS_EXPR:
base = BUILT_IN_FETCH_AND_ADD_N;
optab = sync_add_optab;
if (optab[TYPE_MODE (itype)] == CODE_FOR_nothing)
return GS_UNHANDLED;
- args = tree_cons (NULL, fold_convert (itype, rhs), NULL);
- args = tree_cons (NULL, addr, args);
- *expr_p = build_function_call_expr (decl, args);
+ *expr_p = build_call_expr (decl, 2, addr, fold_convert (itype, rhs));
return GS_OK;
}
/* A subroutine of gimplify_omp_atomic_pipeline. Walk *EXPR_P and replace
- appearences of *LHS_ADDR with LHS_VAR. If an expression does not involve
+ appearances of *LHS_ADDR with LHS_VAR. If an expression does not involve
the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as
a subexpression, 0 if it did not, or -1 if an error was encountered. */
tree rhs, int index)
{
tree oldval, oldival, oldival2, newval, newival, label;
- tree type, itype, cmpxchg, args, x, iaddr;
+ tree type, itype, cmpxchg, x, iaddr;
cmpxchg = built_in_decls[BUILT_IN_VAL_COMPARE_AND_SWAP_N + index + 1];
type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
return GS_ERROR;
x = build_fold_indirect_ref (addr);
- x = build2 (MODIFY_EXPR, void_type_node, oldval, x);
+ x = build_gimple_modify_stmt (oldval, x);
gimplify_and_add (x, pre_p);
/* For floating-point values, we'll need to view-convert them to integers
newival = create_tmp_var (itype, NULL);
x = build1 (VIEW_CONVERT_EXPR, itype, oldval);
- x = build2 (MODIFY_EXPR, void_type_node, oldival, x);
+ x = build_gimple_modify_stmt (oldival, x);
gimplify_and_add (x, pre_p);
iaddr = fold_convert (build_pointer_type (itype), addr);
}
x = build1 (LABEL_EXPR, void_type_node, label);
gimplify_and_add (x, pre_p);
- x = build2 (MODIFY_EXPR, void_type_node, newval, rhs);
+ x = build_gimple_modify_stmt (newval, rhs);
gimplify_and_add (x, pre_p);
if (newval != newival)
{
x = build1 (VIEW_CONVERT_EXPR, itype, newval);
- x = build2 (MODIFY_EXPR, void_type_node, newival, x);
+ x = build_gimple_modify_stmt (newival, x);
gimplify_and_add (x, pre_p);
}
- x = build2 (MODIFY_EXPR, void_type_node, oldival2, oldival);
+ x = build_gimple_modify_stmt (oldival2, fold_convert (itype, oldival));
gimplify_and_add (x, pre_p);
- args = tree_cons (NULL, fold_convert (itype, newival), NULL);
- args = tree_cons (NULL, fold_convert (itype, oldival), args);
- args = tree_cons (NULL, iaddr, args);
- x = build_function_call_expr (cmpxchg, args);
+ x = build_call_expr (cmpxchg, 3, iaddr, fold_convert (itype, oldival),
+ fold_convert (itype, newival));
if (oldval == oldival)
x = fold_convert (type, x);
- x = build2 (MODIFY_EXPR, void_type_node, oldival, x);
+ x = build_gimple_modify_stmt (oldival, x);
gimplify_and_add (x, pre_p);
/* For floating point, be prepared for the loop backedge. */
if (oldval != oldival)
{
x = build1 (VIEW_CONVERT_EXPR, type, oldival);
- x = build2 (MODIFY_EXPR, void_type_node, oldval, x);
+ x = build_gimple_modify_stmt (oldval, x);
gimplify_and_add (x, pre_p);
}
floating point. This allows the atomic operation to properly
succeed even with NaNs and -0.0. */
x = build3 (COND_EXPR, void_type_node,
- build2 (NE_EXPR, boolean_type_node, oldival, oldival2),
+ build2 (NE_EXPR, boolean_type_node,
+ fold_convert (itype, oldival), oldival2),
build1 (GOTO_EXPR, void_type_node, label), NULL);
gimplify_and_add (x, pre_p);
tree t;
t = built_in_decls[BUILT_IN_GOMP_ATOMIC_START];
- t = build_function_call_expr (t, NULL);
+ t = build_call_expr (t, 0);
gimplify_and_add (t, pre_p);
t = build_fold_indirect_ref (addr);
- t = build2 (MODIFY_EXPR, void_type_node, t, rhs);
+ t = build_gimple_modify_stmt (t, rhs);
gimplify_and_add (t, pre_p);
t = built_in_decls[BUILT_IN_GOMP_ATOMIC_END];
- t = build_function_call_expr (t, NULL);
+ t = build_call_expr (t, 0);
gimplify_and_add (t, pre_p);
*expr_p = NULL;
/* Die, die, die, my darling. */
if (save_expr == error_mark_node
- || (TREE_TYPE (save_expr)
+ || (!GIMPLE_STMT_P (save_expr)
+ && TREE_TYPE (save_expr)
&& TREE_TYPE (save_expr) == error_mark_node))
{
ret = GS_ERROR;
break;
case COND_EXPR:
- ret = gimplify_cond_expr (expr_p, pre_p, post_p, NULL_TREE,
- fallback);
+ ret = gimplify_cond_expr (expr_p, pre_p, fallback);
/* C99 code may assign to an array in a structure value of a
conditional expression, and this has undefined behavior
only on execution, so create a temporary if an lvalue is
if (fallback == fb_lvalue)
{
*expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
- lang_hooks.mark_addressable (*expr_p);
+ mark_addressable (*expr_p);
}
break;
if (fallback == fb_lvalue)
{
*expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
- lang_hooks.mark_addressable (*expr_p);
+ mark_addressable (*expr_p);
}
break;
break;
case MODIFY_EXPR:
+ case GIMPLE_MODIFY_STMT:
case INIT_EXPR:
ret = gimplify_modify_expr (expr_p, pre_p, post_p,
fallback != fb_none);
+
+ if (*expr_p)
+ {
+ /* The distinction between MODIFY_EXPR and INIT_EXPR is no longer
+ useful. */
+ if (TREE_CODE (*expr_p) == INIT_EXPR)
+ TREE_SET_CODE (*expr_p, MODIFY_EXPR);
+
+ /* Convert MODIFY_EXPR to GIMPLE_MODIFY_STMT. */
+ if (TREE_CODE (*expr_p) == MODIFY_EXPR)
+ tree_to_gimple_tuple (expr_p);
+ }
+
break;
case TRUTH_ANDIF_EXPR:
/* FALLTHRU */
case FIX_TRUNC_EXPR:
- case FIX_CEIL_EXPR:
- case FIX_FLOOR_EXPR:
- case FIX_ROUND_EXPR:
/* unary_expr: ... | '(' cast ')' val | ... */
ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
is_gimple_val, fb_rvalue);
break;
case BIND_EXPR:
- ret = gimplify_bind_expr (expr_p, NULL, pre_p);
+ ret = gimplify_bind_expr (expr_p, pre_p);
break;
case LOOP_EXPR:
else if (fallback == fb_lvalue)
{
*expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
- lang_hooks.mark_addressable (*expr_p);
+ mark_addressable (*expr_p);
}
else
ret = GS_ALL_DONE;
ret = GS_ALL_DONE;
break;
+ case CHANGE_DYNAMIC_TYPE_EXPR:
+ ret = gimplify_expr (&CHANGE_DYNAMIC_TYPE_LOCATION (*expr_p),
+ pre_p, post_p, is_gimple_reg, fb_lvalue);
+ break;
+
case OBJ_TYPE_REF:
{
enum gimplify_status r0, r1;
break;
case STATEMENT_LIST:
- ret = gimplify_statement_list (expr_p);
+ ret = gimplify_statement_list (expr_p, pre_p);
break;
case WITH_SIZE_EXPR:
ret = gimplify_var_or_parm_decl (expr_p);
break;
+ case RESULT_DECL:
+ /* When within an OpenMP context, notice uses of variables. */
+ if (gimplify_omp_ctxp)
+ omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
+ ret = GS_ALL_DONE;
+ break;
+
case SSA_NAME:
/* Allow callbacks into the gimplifier during optimization. */
ret = GS_ALL_DONE;
ret = gimplify_omp_atomic (expr_p, pre_p);
break;
- case OMP_RETURN_EXPR:
+ case OMP_RETURN:
+ case OMP_CONTINUE:
ret = GS_ALL_DONE;
break;
+ case POINTER_PLUS_EXPR:
+ /* Convert ((type *)A)+offset into &A->field_of_type_and_offset.
+ The second is gimple immediate saving a need for extra statement.
+ */
+ if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
+ && (tmp = maybe_fold_offset_to_reference
+ (TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1),
+ TREE_TYPE (TREE_TYPE (*expr_p)))))
+ {
+ tree ptr_type = build_pointer_type (TREE_TYPE (tmp));
+ if (useless_type_conversion_p (TREE_TYPE (*expr_p), ptr_type))
+ {
+ *expr_p = build_fold_addr_expr_with_type (tmp, ptr_type);
+ break;
+ }
+ }
+ /* Convert (void *)&a + 4 into (void *)&a[1]. */
+ if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR
+ && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
+ && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p,
+ 0),0)))
+ && (tmp = maybe_fold_offset_to_reference
+ (TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0),
+ TREE_OPERAND (*expr_p, 1),
+ TREE_TYPE (TREE_TYPE
+ (TREE_OPERAND (TREE_OPERAND (*expr_p, 0),
+ 0))))))
+ {
+ tmp = build_fold_addr_expr (tmp);
+ *expr_p = fold_convert (TREE_TYPE (*expr_p), tmp);
+ break;
+ }
+ /* FALLTHRU */
default:
switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
{
case tcc_comparison:
- /* If this is a comparison of objects of aggregate type,
- handle it specially (by converting to a call to
- memcmp). It would be nice to only have to do this
- for variable-sized objects, but then we'd have to
- allow the same nest of reference nodes we allow for
- MODIFY_EXPR and that's too complex. */
- if (!AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 1))))
- goto expr_2;
- ret = gimplify_variable_sized_compare (expr_p);
- break;
+ /* Handle comparison of objects of non scalar mode aggregates
+ with a call to memcmp. It would be nice to only have to do
+ this for variable-sized objects, but then we'd have to allow
+ the same nest of reference nodes we allow for MODIFY_EXPR and
+ that's too complex.
+
+ Compare scalar mode aggregates as scalar mode values. Using
+ memcmp for them would be very inefficient at best, and is
+ plain wrong if bitfields are involved. */
+
+ {
+ tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
+
+ if (!AGGREGATE_TYPE_P (type))
+ goto expr_2;
+ else if (TYPE_MODE (type) != BLKmode)
+ ret = gimplify_scalar_mode_aggregate_compare (expr_p);
+ else
+ ret = gimplify_variable_sized_compare (expr_p);
+
+ break;
+ }
/* If *EXPR_P does not need to be special-cased, handle it
according to its class. */
switch (code)
{
case COMPONENT_REF:
- case REALPART_EXPR: case IMAGPART_EXPR:
+ case REALPART_EXPR:
+ case IMAGPART_EXPR:
+ case VIEW_CONVERT_EXPR:
gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
gimple_test_f, fallback);
break;
- case ARRAY_REF: case ARRAY_RANGE_REF:
+ case ARRAY_REF:
+ case ARRAY_RANGE_REF:
gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
gimple_test_f, fallback);
gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
default:
/* Anything else with side-effects must be converted to
- a valid statement before we get here. */
+ a valid statement before we get here. */
gcc_unreachable ();
}
*expr_p = NULL;
}
- else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p)))
+ else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
+ && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
{
- /* Historically, the compiler has treated a bare
- reference to a volatile lvalue as forcing a load. */
+ /* Historically, the compiler has treated a bare reference
+ to a non-BLKmode volatile lvalue as forcing a load. */
tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
- /* Normally, we do want to create a temporary for a
+ /* Normally, we do not want to create a temporary for a
TREE_ADDRESSABLE type because such a type should not be
copied by bitwise-assignment. However, we make an
exception here, as all we are doing here is ensuring that
given a TREE_ADDRESSABLE type. */
tree tmp = create_tmp_var_raw (type, "vol");
gimple_add_tmp_var (tmp);
- *expr_p = build2 (MODIFY_EXPR, type, tmp, *expr_p);
+ *expr_p = build_gimple_modify_stmt (tmp, *expr_p);
}
else
/* We can't do anything useful with a volatile reference to
- incomplete type, so just throw it away. */
+ an incomplete type, so just throw it away. Likewise for
+ a BLKmode type, since any implicit inner load should
+ already have been turned into an explicit one by the
+ gimplification process. */
*expr_p = NULL;
}
case POINTER_TYPE:
case REFERENCE_TYPE:
- gimplify_type_sizes (TREE_TYPE (type), list_p);
+ /* We used to recurse on the pointed-to type here, which turned out to
+ be incorrect because its definition might refer to variables not
+ yet initialized at this point if a forward declaration is involved.
+
+ It was actually useful for anonymous pointed-to types to ensure
+ that the sizes evaluation dominates every possible later use of the
+ values. Restricting to such types here would be safe since there
+ is no possible forward declaration around, but would introduce an
+ undesirable middle-end semantic to anonymity. We then defer to
+ front-ends the responsibility of ensuring that the sizes are
+ evaluated both early and late enough, e.g. by attaching artificial
+ type declarations to the tree. */
break;
default:
*expr_p = create_tmp_var (type, NULL);
tmp = build1 (NOP_EXPR, type, expr);
- tmp = build2 (MODIFY_EXPR, type, *expr_p, expr);
+ tmp = build_gimple_modify_stmt (*expr_p, tmp);
if (EXPR_HAS_LOCATION (expr))
SET_EXPR_LOCUS (tmp, EXPR_LOCUS (expr));
else
}
}
\f
-#ifdef ENABLE_CHECKING
-/* Compare types A and B for a "close enough" match. */
-
-static bool
-cpt_same_type (tree a, tree b)
-{
- if (lang_hooks.types_compatible_p (a, b))
- return true;
-
- /* ??? The C++ FE decomposes METHOD_TYPES to FUNCTION_TYPES and doesn't
- link them together. This routine is intended to catch type errors
- that will affect the optimizers, and the optimizers don't add new
- dereferences of function pointers, so ignore it. */
- if ((TREE_CODE (a) == FUNCTION_TYPE || TREE_CODE (a) == METHOD_TYPE)
- && (TREE_CODE (b) == FUNCTION_TYPE || TREE_CODE (b) == METHOD_TYPE))
- return true;
-
- /* ??? The C FE pushes type qualifiers after the fact into the type of
- the element from the type of the array. See build_unary_op's handling
- of ADDR_EXPR. This seems wrong -- if we were going to do this, we
- should have done it when creating the variable in the first place.
- Alternately, why aren't the two array types made variants? */
- if (TREE_CODE (a) == ARRAY_TYPE && TREE_CODE (b) == ARRAY_TYPE)
- return cpt_same_type (TREE_TYPE (a), TREE_TYPE (b));
-
- /* And because of those, we have to recurse down through pointers. */
- if (POINTER_TYPE_P (a) && POINTER_TYPE_P (b))
- return cpt_same_type (TREE_TYPE (a), TREE_TYPE (b));
-
- return false;
-}
-
-/* Check for some cases of the front end missing cast expressions.
- The type of a dereference should correspond to the pointer type;
- similarly the type of an address should match its object. */
-
-static tree
-check_pointer_types_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
- void *data ATTRIBUTE_UNUSED)
-{
- tree t = *tp;
- tree ptype, otype, dtype;
-
- switch (TREE_CODE (t))
- {
- case INDIRECT_REF:
- case ARRAY_REF:
- otype = TREE_TYPE (t);
- ptype = TREE_TYPE (TREE_OPERAND (t, 0));
- dtype = TREE_TYPE (ptype);
- gcc_assert (cpt_same_type (otype, dtype));
- break;
-
- case ADDR_EXPR:
- ptype = TREE_TYPE (t);
- otype = TREE_TYPE (TREE_OPERAND (t, 0));
- dtype = TREE_TYPE (ptype);
- if (!cpt_same_type (otype, dtype))
- {
- /* &array is allowed to produce a pointer to the element, rather than
- a pointer to the array type. We must allow this in order to
- properly represent assigning the address of an array in C into
- pointer to the element type. */
- gcc_assert (TREE_CODE (otype) == ARRAY_TYPE
- && POINTER_TYPE_P (ptype)
- && cpt_same_type (TREE_TYPE (otype), dtype));
- break;
- }
- break;
-
- default:
- return NULL_TREE;
- }
-
-
- return NULL_TREE;
-}
-#endif
/* Gimplify the body of statements pointed to by BODY_P. FNDECL is the
function decl containing BODY. */
pop_gimplify_context (body);
gcc_assert (gimplify_ctxp == NULL);
-#ifdef ENABLE_CHECKING
- walk_tree (body_p, check_pointer_types_r, NULL, NULL);
+#ifdef ENABLE_TYPES_CHECKING
+ if (!errorcount && !sorrycount)
+ verify_gimple_1 (BIND_EXPR_BODY (*body_p));
#endif
timevar_pop (TV_TREE_GIMPLIFY);
/* Preliminarily mark non-addressed complex variables as eligible
for promotion to gimple registers. We'll transform their uses
as we find them. */
- if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
+ if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
&& !TREE_THIS_VOLATILE (parm)
&& !needs_to_live_in_memory (parm))
- DECL_COMPLEX_GIMPLE_REG_P (parm) = 1;
+ DECL_GIMPLE_REG_P (parm) = 1;
}
ret = DECL_RESULT (fndecl);
- if (TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
+ if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
&& !needs_to_live_in_memory (ret))
- DECL_COMPLEX_GIMPLE_REG_P (ret) = 1;
+ DECL_GIMPLE_REG_P (ret) = 1;
gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
x = DECL_SAVED_TREE (fndecl);
append_to_statement_list (x, &TREE_OPERAND (tf, 0));
x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
- x = build_function_call_expr (x, NULL);
+ x = build_call_expr (x, 0);
append_to_statement_list (x, &TREE_OPERAND (tf, 1));
bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
TREE_SIDE_EFFECTS (bind) = 1;
x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
- x = build_function_call_expr (x, NULL);
+ x = build_call_expr (x, 0);
append_to_statement_list (x, &BIND_EXPR_BODY (bind));
append_to_statement_list (tf, &BIND_EXPR_BODY (bind));
DECL_SAVED_TREE (fndecl) = bind;
}
+ cfun->gimplified = true;
current_function_decl = oldfn;
cfun = oldfn ? DECL_STRUCT_FUNCTION (oldfn) : NULL;
}
-
\f
/* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true,
force the result to be either ssa_name or an invariant, otherwise
gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
push_gimplify_context ();
- gimplify_ctxp->into_ssa = in_ssa_p;
+ gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
if (var)
- expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
+ expr = build_gimple_modify_stmt (var, expr);
ret = gimplify_expr (&expr, stmts, NULL,
gimple_test_f, fb_rvalue);
gcc_assert (ret != GS_ERROR);
- if (referenced_vars)
+ if (gimple_referenced_vars (cfun))
{
for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
- add_referenced_tmp_var (t);
+ add_referenced_var (t);
}
pop_gimplify_context (NULL);
}
/* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If
- some statements are produced, emits them before BSI. */
+ some statements are produced, emits them at BSI. If BEFORE is true.
+ the statements are appended before BSI, otherwise they are appended after
+ it. M specifies the way BSI moves after insertion (BSI_SAME_STMT or
+ BSI_CONTINUE_LINKING are the usual values). */
tree
force_gimple_operand_bsi (block_stmt_iterator *bsi, tree expr,
- bool simple_p, tree var)
+ bool simple_p, tree var, bool before,
+ enum bsi_iterator_update m)
{
tree stmts;
expr = force_gimple_operand (expr, &stmts, simple_p, var);
if (stmts)
- bsi_insert_before (bsi, stmts, BSI_SAME_STMT);
+ {
+ if (gimple_in_ssa_p (cfun))
+ {
+ tree_stmt_iterator tsi;
+
+ for (tsi = tsi_start (stmts); !tsi_end_p (tsi); tsi_next (&tsi))
+ mark_symbols_for_renaming (tsi_stmt (tsi));
+ }
+
+ if (before)
+ bsi_insert_before (bsi, stmts, m);
+ else
+ bsi_insert_after (bsi, stmts, m);
+ }
return expr;
}