/* Tree lowering pass. This pass converts the GENERIC functions-as-trees
tree representation into the GIMPLE form.
- Copyright (C) 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
+ Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007
+ Free Software Foundation, Inc.
Major work done by Sebastian Pop <s.pop@laposte.net>,
Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
-Software Foundation; either version 2, or (at your option) any later
+Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
-02110-1301, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
#include "config.h"
#include "system.h"
#include "ggc.h"
#include "toplev.h"
#include "target.h"
+#include "optabs.h"
+#include "pointer-set.h"
+#include "splay-tree.h"
-static struct gimplify_ctx
+
+enum gimplify_omp_var_data
+{
+ GOVD_SEEN = 1,
+ GOVD_EXPLICIT = 2,
+ GOVD_SHARED = 4,
+ GOVD_PRIVATE = 8,
+ GOVD_FIRSTPRIVATE = 16,
+ GOVD_LASTPRIVATE = 32,
+ GOVD_REDUCTION = 64,
+ GOVD_LOCAL = 128,
+ GOVD_DEBUG_PRIVATE = 256,
+ GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
+ | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
+};
+
+struct gimplify_omp_ctx
{
+ struct gimplify_omp_ctx *outer_context;
+ splay_tree variables;
+ struct pointer_set_t *privatized_types;
+ location_t location;
+ enum omp_clause_default_kind default_kind;
+ bool is_parallel;
+ bool is_combined_parallel;
+};
+
+struct gimplify_ctx
+{
+ struct gimplify_ctx *prev_context;
+
tree current_bind_expr;
tree temps;
tree conditional_cleanups;
tree exit_label;
tree return_temp;
+
VEC(tree,heap) *case_labels;
/* The formal temporary table. Should this be persistent? */
htab_t temp_htab;
+
int conditions;
bool save_stack;
bool into_ssa;
-} *gimplify_ctxp;
+};
+
+static struct gimplify_ctx *gimplify_ctxp;
+static struct gimplify_omp_ctx *gimplify_omp_ctxp;
+
/* Formal (expression) temporary table handling: Multiple occurrences of
/* Forward declarations. */
static enum gimplify_status gimplify_compound_expr (tree *, tree *, bool);
-#ifdef ENABLE_CHECKING
-static bool cpt_same_type (tree a, tree b);
-#endif
+/* Mark X addressable. Unlike the langhook we expect X to be in gimple
+ form and we don't do any syntax checking. */
+static void
+mark_addressable (tree x)
+{
+ while (handled_component_p (x))
+ x = TREE_OPERAND (x, 0);
+ if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
+ return ;
+ TREE_ADDRESSABLE (x) = 1;
+}
/* Return a hash value for a formal temporary table entry. */
void
push_gimplify_context (void)
{
- gcc_assert (!gimplify_ctxp);
- gimplify_ctxp
- = (struct gimplify_ctx *) xcalloc (1, sizeof (struct gimplify_ctx));
+ struct gimplify_ctx *c;
+
+ c = (struct gimplify_ctx *) xcalloc (1, sizeof (struct gimplify_ctx));
+ c->prev_context = gimplify_ctxp;
if (optimize)
- gimplify_ctxp->temp_htab
- = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
- else
- gimplify_ctxp->temp_htab = NULL;
+ c->temp_htab = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
+
+ gimplify_ctxp = c;
}
/* Tear down a context for the gimplifier. If BODY is non-null, then
void
pop_gimplify_context (tree body)
{
+ struct gimplify_ctx *c = gimplify_ctxp;
tree t;
- gcc_assert (gimplify_ctxp && !gimplify_ctxp->current_bind_expr);
+ gcc_assert (c && !c->current_bind_expr);
+ gimplify_ctxp = c->prev_context;
- for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
+ for (t = c->temps; t ; t = TREE_CHAIN (t))
DECL_GIMPLE_FORMAL_TEMP_P (t) = 0;
if (body)
- declare_tmp_vars (gimplify_ctxp->temps, body);
+ declare_vars (c->temps, body, false);
else
- record_vars (gimplify_ctxp->temps);
-
-#if 0
- if (!quiet_flag && optimize)
- fprintf (stderr, " collisions: %f ",
- htab_collisions (gimplify_ctxp->temp_htab));
-#endif
+ record_vars (c->temps);
if (optimize)
- htab_delete (gimplify_ctxp->temp_htab);
- free (gimplify_ctxp);
- gimplify_ctxp = NULL;
+ htab_delete (c->temp_htab);
+ free (c);
}
static void
}
}
+/* A stable comparison routine for use with splay trees and DECLs. */
+
+static int
+splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
+{
+ tree a = (tree) xa;
+ tree b = (tree) xb;
+
+ return DECL_UID (a) - DECL_UID (b);
+}
+
+/* Create a new omp construct that deals with variable remapping. */
+
+static struct gimplify_omp_ctx *
+new_omp_context (bool is_parallel, bool is_combined_parallel)
+{
+ struct gimplify_omp_ctx *c;
+
+ c = XCNEW (struct gimplify_omp_ctx);
+ c->outer_context = gimplify_omp_ctxp;
+ c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
+ c->privatized_types = pointer_set_create ();
+ c->location = input_location;
+ c->is_parallel = is_parallel;
+ c->is_combined_parallel = is_combined_parallel;
+ c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
+
+ return c;
+}
+
+/* Destroy an omp construct that deals with variable remapping. */
+
+static void
+delete_omp_context (struct gimplify_omp_ctx *c)
+{
+ splay_tree_delete (c->variables);
+ pointer_set_destroy (c->privatized_types);
+ XDELETE (c);
+}
+
+static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
+static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
+
/* A subroutine of append_to_statement_list{,_force}. T is not NULL. */
static void
return lab;
}
+/* Subroutine for find_single_pointer_decl. */
+
+static tree
+find_single_pointer_decl_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
+ void *data)
+{
+ tree *pdecl = (tree *) data;
+
+ if (DECL_P (*tp) && POINTER_TYPE_P (TREE_TYPE (*tp)))
+ {
+ if (*pdecl)
+ {
+ /* We already found a pointer decl; return anything other
+ than NULL_TREE to unwind from walk_tree signalling that
+ we have a duplicate. */
+ return *tp;
+ }
+ *pdecl = *tp;
+ }
+
+ return NULL_TREE;
+}
+
+/* Find the single DECL of pointer type in the tree T and return it.
+ If there are zero or more than one such DECLs, return NULL. */
+
+static tree
+find_single_pointer_decl (tree t)
+{
+ tree decl = NULL_TREE;
+
+ if (walk_tree (&t, find_single_pointer_decl_1, &decl, NULL))
+ {
+ /* find_single_pointer_decl_1 returns a nonzero value, causing
+ walk_tree to return a nonzero value, to indicate that it
+ found more than one pointer DECL. */
+ return NULL_TREE;
+ }
+
+ return decl;
+}
+
/* Create a new temporary name with PREFIX. Returns an identifier. */
static GTY(()) unsigned int tmp_var_id_num;
tree tmp_var;
/* We don't allow types that are addressable (meaning we can't make copies),
- incomplete, or of variable size. */
- gcc_assert (!TREE_ADDRESSABLE (type)
- && COMPLETE_TYPE_P (type)
- && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
+ or incomplete. We also used to reject every variable size objects here,
+ but now support those for which a constant upper bound can be obtained.
+ The processing for variable sizes is performed in gimple_add_tmp_var,
+ point at which it really matters and possibly reached via paths not going
+ through this function, e.g. after direct calls to create_tmp_var_raw. */
+ gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
tmp_var = create_tmp_var_raw (type, prefix);
gimple_add_tmp_var (tmp_var);
{
case ADDR_EXPR:
return get_name (TREE_OPERAND (stripped_decl, 0));
- break;
default:
return NULL;
}
static inline tree
create_tmp_from_val (tree val)
{
- return create_tmp_var (TREE_TYPE (val), get_name (val));
+ return create_tmp_var (TYPE_MAIN_VARIANT (TREE_TYPE (val)), get_name (val));
}
/* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
if (*slot == NULL)
{
- elt_p = xmalloc (sizeof (*elt_p));
+ elt_p = XNEW (elt_t);
elt_p->val = val;
elt_p->temp = ret = create_tmp_from_val (val);
*slot = (void *) elt_p;
t = lookup_tmp_var (val, is_formal);
- if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE)
- DECL_COMPLEX_GIMPLE_REG_P (t) = 1;
+ if (is_formal)
+ {
+ tree u = find_single_pointer_decl (val);
+
+ if (u && TREE_CODE (u) == VAR_DECL && DECL_BASED_ON_RESTRICT_P (u))
+ u = DECL_GET_RESTRICT_BASE (u);
+ if (u && TYPE_RESTRICT (TREE_TYPE (u)))
+ {
+ if (DECL_BASED_ON_RESTRICT_P (t))
+ gcc_assert (u == DECL_GET_RESTRICT_BASE (t));
+ else
+ {
+ DECL_BASED_ON_RESTRICT_P (t) = 1;
+ SET_DECL_RESTRICT_BASE (t, u);
+ }
+ }
+ }
+
+ if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
+ DECL_GIMPLE_REG_P (t) = 1;
- mod = build (MODIFY_EXPR, TREE_TYPE (t), t, val);
+ mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
if (EXPR_HAS_LOCATION (val))
SET_EXPR_LOCUS (mod, EXPR_LOCUS (val));
return t;
}
+/* Returns a formal temporary variable initialized with VAL. PRE_P
+ points to a statement list where side-effects needed to compute VAL
+ should be stored. */
+
tree
get_formal_tmp_var (tree val, tree *pre_p)
{
return internal_get_tmp_var (val, pre_p, post_p, false);
}
-/* Declares all the variables in VARS in SCOPE. */
+/* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is
+ true, generate debug info for them; otherwise don't. */
void
-declare_tmp_vars (tree vars, tree scope)
+declare_vars (tree vars, tree scope, bool debug_info)
{
tree last = vars;
if (last)
{
- tree temps;
+ tree temps, block;
/* C99 mode puts the default 'return 0;' for main outside the outer
braces. So drill down until we find an actual scope. */
gcc_assert (TREE_CODE (scope) == BIND_EXPR);
temps = nreverse (last);
- TREE_CHAIN (last) = BIND_EXPR_VARS (scope);
- BIND_EXPR_VARS (scope) = temps;
+
+ block = BIND_EXPR_BLOCK (scope);
+ if (!block || !debug_info)
+ {
+ TREE_CHAIN (last) = BIND_EXPR_VARS (scope);
+ BIND_EXPR_VARS (scope) = temps;
+ }
+ else
+ {
+ /* We need to attach the nodes both to the BIND_EXPR and to its
+ associated BLOCK for debugging purposes. The key point here
+ is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
+ is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
+ if (BLOCK_VARS (block))
+ BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
+ else
+ {
+ BIND_EXPR_VARS (scope) = chainon (BIND_EXPR_VARS (scope), temps);
+ BLOCK_VARS (block) = temps;
+ }
+ }
}
}
+/* For VAR a VAR_DECL of variable size, try to find a constant upper bound
+ for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
+ no such upper bound can be obtained. */
+
+static void
+force_constant_size (tree var)
+{
+ /* The only attempt we make is by querying the maximum size of objects
+ of the variable's type. */
+
+ HOST_WIDE_INT max_size;
+
+ gcc_assert (TREE_CODE (var) == VAR_DECL);
+
+ max_size = max_int_size_in_bytes (TREE_TYPE (var));
+
+ gcc_assert (max_size >= 0);
+
+ DECL_SIZE_UNIT (var)
+ = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
+ DECL_SIZE (var)
+ = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
+}
+
void
gimple_add_tmp_var (tree tmp)
{
gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
+ /* Later processing assumes that the object size is constant, which might
+ not be true at this point. Force the use of a constant upper bound in
+ this case. */
+ if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
+ force_constant_size (tmp);
+
DECL_CONTEXT (tmp) = current_function_decl;
DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
{
TREE_CHAIN (tmp) = gimplify_ctxp->temps;
gimplify_ctxp->temps = tmp;
+
+ /* Mark temporaries local within the nearest enclosing parallel. */
+ if (gimplify_omp_ctxp)
+ {
+ struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
+ while (ctx && !ctx->is_parallel)
+ ctx = ctx->outer_context;
+ if (ctx)
+ omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
+ }
}
else if (cfun)
record_vars (tmp);
else
- declare_tmp_vars (tmp, DECL_SAVED_TREE (current_function_decl));
+ declare_vars (tmp, DECL_SAVED_TREE (current_function_decl), false);
}
/* Determines whether to assign a locus to the statement STMT. */
static void
annotate_one_with_locus (tree t, location_t locus)
{
- if (EXPR_P (t) && ! EXPR_HAS_LOCATION (t) && should_carry_locus_p (t))
+ if (CAN_HAVE_LOCATION_P (t)
+ && ! EXPR_HAS_LOCATION (t) && should_carry_locus_p (t))
SET_EXPR_LOCATION (t, locus);
}
tree t;
/* FIXME should the allowed types go in TREE_TYPE? */
- t = build (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
+ t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
- t = build (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
+ t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
append_to_statement_list (body, &TREE_OPERAND (t, 0));
return t;
tree
voidify_wrapper_expr (tree wrapper, tree temp)
{
- if (!VOID_TYPE_P (TREE_TYPE (wrapper)))
+ tree type = TREE_TYPE (wrapper);
+ if (type && !VOID_TYPE_P (type))
{
- tree *p, sub = wrapper;
+ tree *p;
- restart:
- /* Set p to point to the body of the wrapper. */
- switch (TREE_CODE (sub))
- {
- case BIND_EXPR:
- /* For a BIND_EXPR, the body is operand 1. */
- p = &BIND_EXPR_BODY (sub);
- break;
-
- default:
- p = &TREE_OPERAND (sub, 0);
- break;
- }
-
- /* Advance to the last statement. Set all container types to void. */
- if (TREE_CODE (*p) == STATEMENT_LIST)
- {
- tree_stmt_iterator i = tsi_last (*p);
- p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
- }
- else
+ /* Set p to point to the body of the wrapper. Loop until we find
+ something that isn't a wrapper. */
+ for (p = &wrapper; p && *p; )
{
- for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
+ switch (TREE_CODE (*p))
{
+ case BIND_EXPR:
+ TREE_SIDE_EFFECTS (*p) = 1;
+ TREE_TYPE (*p) = void_type_node;
+ /* For a BIND_EXPR, the body is operand 1. */
+ p = &BIND_EXPR_BODY (*p);
+ break;
+
+ case CLEANUP_POINT_EXPR:
+ case TRY_FINALLY_EXPR:
+ case TRY_CATCH_EXPR:
TREE_SIDE_EFFECTS (*p) = 1;
TREE_TYPE (*p) = void_type_node;
+ p = &TREE_OPERAND (*p, 0);
+ break;
+
+ case STATEMENT_LIST:
+ {
+ tree_stmt_iterator i = tsi_last (*p);
+ TREE_SIDE_EFFECTS (*p) = 1;
+ TREE_TYPE (*p) = void_type_node;
+ p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
+ }
+ break;
+
+ case COMPOUND_EXPR:
+ /* Advance to the last statement. Set all container types to void. */
+ for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
+ {
+ TREE_SIDE_EFFECTS (*p) = 1;
+ TREE_TYPE (*p) = void_type_node;
+ }
+ break;
+
+ default:
+ goto out;
}
}
+ out:
if (p == NULL || IS_EMPTY_STMT (*p))
- ;
- /* Look through exception handling. */
- else if (TREE_CODE (*p) == TRY_FINALLY_EXPR
- || TREE_CODE (*p) == TRY_CATCH_EXPR)
+ temp = NULL_TREE;
+ else if (temp)
{
- sub = *p;
- goto restart;
- }
- /* The C++ frontend already did this for us. */
- else if (TREE_CODE (*p) == INIT_EXPR
- || TREE_CODE (*p) == TARGET_EXPR)
- temp = TREE_OPERAND (*p, 0);
- /* If we're returning a dereference, move the dereference
- outside the wrapper. */
- else if (TREE_CODE (*p) == INDIRECT_REF)
- {
- tree ptr = TREE_OPERAND (*p, 0);
- temp = create_tmp_var (TREE_TYPE (ptr), "retval");
- *p = build (MODIFY_EXPR, TREE_TYPE (ptr), temp, ptr);
- temp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (temp)), temp);
- /* If this is a BIND_EXPR for a const inline function, it might not
- have TREE_SIDE_EFFECTS set. That is no longer accurate. */
- TREE_SIDE_EFFECTS (wrapper) = 1;
+ /* The wrapper is on the RHS of an assignment that we're pushing
+ down. */
+ gcc_assert (TREE_CODE (temp) == INIT_EXPR
+ || TREE_CODE (temp) == GIMPLE_MODIFY_STMT
+ || TREE_CODE (temp) == MODIFY_EXPR);
+ GENERIC_TREE_OPERAND (temp, 1) = *p;
+ *p = temp;
}
else
{
- if (!temp)
- temp = create_tmp_var (TREE_TYPE (wrapper), "retval");
- *p = build (MODIFY_EXPR, TREE_TYPE (temp), temp, *p);
- TREE_SIDE_EFFECTS (wrapper) = 1;
+ temp = create_tmp_var (type, "retval");
+ *p = build2 (INIT_EXPR, type, temp, *p);
}
- TREE_TYPE (wrapper) = void_type_node;
return temp;
}
tree save_call, tmp_var;
save_call =
- build_function_call_expr (implicit_built_in_decls[BUILT_IN_STACK_SAVE],
- NULL_TREE);
+ build_call_expr (implicit_built_in_decls[BUILT_IN_STACK_SAVE], 0);
tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
- *save = build (MODIFY_EXPR, ptr_type_node, tmp_var, save_call);
+ *save = build_gimple_modify_stmt (tmp_var, save_call);
*restore =
- build_function_call_expr (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
- tree_cons (NULL_TREE, tmp_var, NULL_TREE));
+ build_call_expr (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
+ 1, tmp_var);
}
/* Gimplify a BIND_EXPR. Just voidify and recurse. */
static enum gimplify_status
-gimplify_bind_expr (tree *expr_p, tree temp, tree *pre_p)
+gimplify_bind_expr (tree *expr_p, tree *pre_p)
{
tree bind_expr = *expr_p;
bool old_save_stack = gimplify_ctxp->save_stack;
tree t;
- temp = voidify_wrapper_expr (bind_expr, temp);
+ tree temp = voidify_wrapper_expr (bind_expr, NULL);
/* Mark variables seen in this bind expr. */
for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
{
if (TREE_CODE (t) == VAR_DECL)
- DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
+ {
+ struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
+
+ /* Mark variable as local. */
+ if (ctx && !is_global_var (t)
+ && (! DECL_SEEN_IN_BIND_EXPR_P (t)
+ || splay_tree_lookup (ctx->variables,
+ (splay_tree_key) t) == NULL))
+ omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
+
+ DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
+ }
/* Preliminarily mark non-addressed complex variables as eligible
for promotion to gimple registers. We'll transform their uses
as we find them. */
- if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
+ if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
&& !TREE_THIS_VOLATILE (t)
&& (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
&& !needs_to_live_in_memory (t))
- DECL_COMPLEX_GIMPLE_REG_P (t) = 1;
+ DECL_GIMPLE_REG_P (t) = 1;
}
gimple_push_bind_expr (bind_expr);
format of the emitted code: see mx_register_decls(). */
build_stack_save_restore (&stack_save, &stack_restore);
- t = build (TRY_FINALLY_EXPR, void_type_node,
- BIND_EXPR_BODY (bind_expr), NULL_TREE);
+ t = build2 (TRY_FINALLY_EXPR, void_type_node,
+ BIND_EXPR_BODY (bind_expr), NULL_TREE);
append_to_statement_list (stack_restore, &TREE_OPERAND (t, 1));
BIND_EXPR_BODY (bind_expr) = NULL_TREE;
result_decl = NULL_TREE;
else
{
- result_decl = TREE_OPERAND (ret_expr, 0);
+ result_decl = GENERIC_TREE_OPERAND (ret_expr, 0);
if (TREE_CODE (result_decl) == INDIRECT_REF)
/* See through a return by reference. */
result_decl = TREE_OPERAND (result_decl, 0);
gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
+ || TREE_CODE (ret_expr) == GIMPLE_MODIFY_STMT
|| TREE_CODE (ret_expr) == INIT_EXPR)
&& TREE_CODE (result_decl) == RESULT_DECL);
}
else
{
result = create_tmp_var (TREE_TYPE (result_decl), NULL);
+ if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
+ DECL_GIMPLE_REG_P (result) = 1;
/* ??? With complex control flow (usually involving abnormal edges),
we can wind up warning about an uninitialized value for this. Due
gimplify_ctxp->return_temp = result;
}
- /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
+ /* Smash the lhs of the GIMPLE_MODIFY_STMT to the temporary we plan to use.
Then gimplify the whole thing. */
if (result != result_decl)
- TREE_OPERAND (ret_expr, 0) = result;
+ GENERIC_TREE_OPERAND (ret_expr, 0) = result;
gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
if (result == result_decl)
ret_expr = result;
else
- ret_expr = build (MODIFY_EXPR, TREE_TYPE (result), result_decl, result);
+ ret_expr = build_gimple_modify_stmt (result_decl, result);
TREE_OPERAND (stmt, 0) = ret_expr;
return GS_ALL_DONE;
{
tree init = DECL_INITIAL (decl);
- if (!TREE_CONSTANT (DECL_SIZE (decl)))
+ if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
{
/* This is a variable-sized decl. Simplify its size and mark it
for deferred expansion. Note that mudflap depends on the format
of the emitted code: see mx_register_decls(). */
- tree t, args, addr, ptr_type;
+ tree t, addr, ptr_type;
gimplify_one_sizepos (&DECL_SIZE (decl), stmt_p);
gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), stmt_p);
SET_DECL_VALUE_EXPR (decl, t);
DECL_HAS_VALUE_EXPR_P (decl) = 1;
- args = tree_cons (NULL, DECL_SIZE_UNIT (decl), NULL);
t = built_in_decls[BUILT_IN_ALLOCA];
- t = build_function_call_expr (t, args);
+ t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl));
t = fold_convert (ptr_type, t);
- t = build2 (MODIFY_EXPR, void_type_node, addr, t);
+ t = build_gimple_modify_stmt (addr, t);
gimplify_and_add (t, stmt_p);
if (!TREE_STATIC (decl))
{
DECL_INITIAL (decl) = NULL_TREE;
- init = build (MODIFY_EXPR, void_type_node, decl, init);
+ init = build2 (INIT_EXPR, void_type_node, decl, init);
gimplify_and_add (init, stmt_p);
}
else
walk_tree (&init, force_labels_r, NULL, NULL);
}
- /* This decl isn't mentioned in the enclosing block, so add it to the
- list of temps. FIXME it seems a bit of a kludge to say that
- anonymous artificial vars aren't pushed, but everything else is. */
- if (DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
+ /* Some front ends do not explicitly declare all anonymous
+ artificial variables. We compensate here by declaring the
+ variables, though it would be better if the front ends would
+ explicitly declare them. */
+ if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
+ && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
gimple_add_tmp_var (decl);
}
static int
compare_case_labels (const void *p1, const void *p2)
{
- tree case1 = *(tree *)p1;
- tree case2 = *(tree *)p2;
+ const_tree const case1 = *(const_tree const*)p1;
+ const_tree const case2 = *(const_tree const*)p2;
return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
}
labels = gimplify_ctxp->case_labels;
gimplify_ctxp->case_labels = saved_labels;
- len = VEC_length (tree, labels);
-
- for (i = 0; i < len; ++i)
+ i = 0;
+ while (i < VEC_length (tree, labels))
{
- tree t = VEC_index (tree, labels, i);
- if (!CASE_LOW (t))
+ tree elt = VEC_index (tree, labels, i);
+ tree low = CASE_LOW (elt);
+ bool remove_element = FALSE;
+
+ if (low)
+ {
+ /* Discard empty ranges. */
+ tree high = CASE_HIGH (elt);
+ if (high && INT_CST_LT (high, low))
+ remove_element = TRUE;
+ }
+ else
{
/* The default case must be the last label in the list. */
- default_case = t;
- VEC_replace (tree, labels, i, VEC_index (tree, labels, len - 1));
- len--;
- break;
+ gcc_assert (!default_case);
+ default_case = elt;
+ remove_element = TRUE;
}
+
+ if (remove_element)
+ VEC_ordered_remove (tree, labels, i);
+ else
+ i++;
}
+ len = i;
label_vec = make_tree_vec (len + 1);
SWITCH_LABELS (*expr_p) = label_vec;
{
/* If the switch has no default label, add one, so that we jump
around the switch body. */
- default_case = build (CASE_LABEL_EXPR, void_type_node, NULL_TREE,
- NULL_TREE, create_artificial_label ());
+ default_case = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE,
+ NULL_TREE, create_artificial_label ());
append_to_statement_list (SWITCH_BODY (switch_expr), pre_p);
- *expr_p = build (LABEL_EXPR, void_type_node,
- CASE_LABEL (default_case));
+ *expr_p = build1 (LABEL_EXPR, void_type_node,
+ CASE_LABEL (default_case));
}
else
*expr_p = SWITCH_BODY (switch_expr);
gimplify_case_label_expr (tree *expr_p)
{
tree expr = *expr_p;
+ struct gimplify_ctx *ctxp;
+
+ /* Invalid OpenMP programs can play Duff's Device type games with
+ #pragma omp parallel. At least in the C front end, we don't
+ detect such invalid branches until after gimplification. */
+ for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
+ if (ctxp->case_labels)
+ break;
- gcc_assert (gimplify_ctxp->case_labels);
- VEC_safe_push (tree, heap, gimplify_ctxp->case_labels, expr);
- *expr_p = build (LABEL_EXPR, void_type_node, CASE_LABEL (expr));
+ VEC_safe_push (tree, heap, ctxp->case_labels, expr);
+ *expr_p = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (expr));
return GS_ALL_DONE;
}
tree expr;
expr = build_and_jump (&gimplify_ctxp->exit_label);
- expr = build (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
+ expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
*expr_p = expr;
return GS_OK;
/* Both cast and addr_expr types should address the same object type. */
dctype = TREE_TYPE (ctype);
ddatype = TREE_TYPE (datype);
- if (!lang_hooks.types_compatible_p (ddatype, dctype))
+ if (!useless_type_conversion_p (dctype, ddatype))
return;
/* The addr_expr and the object type should match. */
obj_expr = TREE_OPERAND (addr_expr, 0);
otype = TREE_TYPE (obj_expr);
- if (!lang_hooks.types_compatible_p (otype, datype))
+ if (!useless_type_conversion_p (datype, otype))
return;
/* The lower bound and element sizes must be constant. */
/* All checks succeeded. Build a new node to merge the cast. */
*expr_p = build4 (ARRAY_REF, dctype, obj_expr,
TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
- TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
- size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (dctype),
- size_int (TYPE_ALIGN_UNIT (dctype))));
+ NULL_TREE, NULL_TREE);
*expr_p = build1 (ADDR_EXPR, ctype, *expr_p);
}
static enum gimplify_status
gimplify_conversion (tree *expr_p)
{
+ tree tem;
gcc_assert (TREE_CODE (*expr_p) == NOP_EXPR
|| TREE_CODE (*expr_p) == CONVERT_EXPR);
if (tree_ssa_useless_type_conversion (*expr_p))
*expr_p = TREE_OPERAND (*expr_p, 0);
+ /* Attempt to avoid NOP_EXPR by producing reference to a subtype.
+ For example this fold (subclass *)&A into &A->subclass avoiding
+ a need for statement. */
+ if (TREE_CODE (*expr_p) == NOP_EXPR
+ && POINTER_TYPE_P (TREE_TYPE (*expr_p))
+ && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 0)))
+ && (tem = maybe_fold_offset_to_reference
+ (TREE_OPERAND (*expr_p, 0),
+ integer_zero_node, TREE_TYPE (TREE_TYPE (*expr_p)))))
+ {
+ tree ptr_type = build_pointer_type (TREE_TYPE (tem));
+ if (useless_type_conversion_p (TREE_TYPE (*expr_p), ptr_type))
+ *expr_p = build_fold_addr_expr_with_type (tem, ptr_type);
+ }
+
/* If we still have a conversion at the toplevel,
then canonicalize some constructs. */
if (TREE_CODE (*expr_p) == NOP_EXPR || TREE_CODE (*expr_p) == CONVERT_EXPR)
return GS_OK;
}
+/* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a
+ DECL_VALUE_EXPR, and it's worth re-examining things. */
+
+static enum gimplify_status
+gimplify_var_or_parm_decl (tree *expr_p)
+{
+ tree decl = *expr_p;
+
+ /* ??? If this is a local variable, and it has not been seen in any
+ outer BIND_EXPR, then it's probably the result of a duplicate
+ declaration, for which we've already issued an error. It would
+ be really nice if the front end wouldn't leak these at all.
+ Currently the only known culprit is C++ destructors, as seen
+ in g++.old-deja/g++.jason/binding.C. */
+ if (TREE_CODE (decl) == VAR_DECL
+ && !DECL_SEEN_IN_BIND_EXPR_P (decl)
+ && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
+ && decl_function_context (decl) == current_function_decl)
+ {
+ gcc_assert (errorcount || sorrycount);
+ return GS_ERROR;
+ }
+
+ /* When within an OpenMP context, notice uses of variables. */
+ if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
+ return GS_ALL_DONE;
+
+ /* If the decl is an alias for another expression, substitute it now. */
+ if (DECL_HAS_VALUE_EXPR_P (decl))
+ {
+ *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
+ return GS_OK;
+ }
+
+ return GS_ALL_DONE;
+}
+
+
/* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
node pointed to by EXPR_P.
/* We can handle anything that get_inner_reference can deal with. */
for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
{
+ restart:
/* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
if (TREE_CODE (*p) == INDIRECT_REF)
*p = fold_indirect_ref (*p);
- if (!handled_component_p (*p))
+
+ if (handled_component_p (*p))
+ ;
+ /* Expand DECL_VALUE_EXPR now. In some cases that may expose
+ additional COMPONENT_REFs. */
+ else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
+ && gimplify_var_or_parm_decl (p) == GS_OK)
+ goto restart;
+ else
break;
+
VEC_safe_push (tree, heap, stack, *p);
}
}
}
- /* Step 2 is to gimplify the base expression. */
- tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, fallback);
+ /* Step 2 is to gimplify the base expression. Make sure lvalue is set
+ so as to match the min_lval predicate. Failure to do so may result
+ in the creation of large aggregate temporaries. */
+ tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
+ fallback | fb_lvalue);
ret = MIN (ret, tret);
/* And finally, the indices and operands to BIT_FIELD_REF. During this
bool want_value)
{
enum tree_code code;
- tree lhs, lvalue, rhs, t1;
+ tree lhs, lvalue, rhs, t1, post = NULL, *orig_post_p = post_p;
bool postfix;
enum tree_code arith_code;
enum gimplify_status ret;
else
postfix = false;
+ /* For postfix, make sure the inner expression's post side effects
+ are executed after side effects from this expression. */
+ if (postfix)
+ post_p = &post;
+
/* Add or subtract? */
if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
arith_code = PLUS_EXPR;
return ret;
}
- t1 = build (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
- t1 = build (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
+ /* For POINTERs increment, use POINTER_PLUS_EXPR. */
+ if (POINTER_TYPE_P (TREE_TYPE (lhs)))
+ {
+ rhs = fold_convert (sizetype, rhs);
+ if (arith_code == MINUS_EXPR)
+ rhs = fold_build1 (NEGATE_EXPR, TREE_TYPE (rhs), rhs);
+ arith_code = POINTER_PLUS_EXPR;
+ }
+
+ t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
+ t1 = build_gimple_modify_stmt (lvalue, t1);
if (postfix)
{
- gimplify_and_add (t1, post_p);
+ gimplify_and_add (t1, orig_post_p);
+ append_to_statement_list (post, orig_post_p);
*expr_p = lhs;
return GS_ALL_DONE;
}
static enum gimplify_status
gimplify_call_expr (tree *expr_p, tree *pre_p, bool want_value)
{
- tree decl;
- tree arglist;
+ tree decl, parms, p;
enum gimplify_status ret;
+ int i, nargs;
gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
decl = get_callee_fndecl (*expr_p);
if (decl && DECL_BUILT_IN (decl))
{
- tree fndecl = get_callee_fndecl (*expr_p);
- tree arglist = TREE_OPERAND (*expr_p, 1);
- tree new = fold_builtin (fndecl, arglist, !want_value);
+ tree new = fold_call_expr (*expr_p, !want_value);
if (new && new != *expr_p)
{
if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
&& DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_START)
{
- if (!arglist || !TREE_CHAIN (arglist))
+ if (call_expr_nargs (*expr_p) < 2)
{
error ("too few arguments to function %<va_start%>");
*expr_p = build_empty_stmt ();
return GS_OK;
}
- if (fold_builtin_next_arg (TREE_CHAIN (arglist)))
+ if (fold_builtin_next_arg (*expr_p, true))
{
*expr_p = build_empty_stmt ();
return GS_OK;
}
/* Avoid gimplifying the second argument to va_start, which needs
to be the plain PARM_DECL. */
- return gimplify_arg (&TREE_VALUE (TREE_OPERAND (*expr_p, 1)), pre_p);
+ return gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p);
}
}
/* There is a sequence point before the call, so any side effects in
the calling expression must occur before the actual call. Force
gimplify_expr to use an internal post queue. */
- ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, NULL,
+ ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
is_gimple_call_addr, fb_rvalue);
- if (PUSH_ARGS_REVERSED)
- TREE_OPERAND (*expr_p, 1) = nreverse (TREE_OPERAND (*expr_p, 1));
- for (arglist = TREE_OPERAND (*expr_p, 1); arglist;
- arglist = TREE_CHAIN (arglist))
+ nargs = call_expr_nargs (*expr_p);
+
+ /* Get argument types for verification. */
+ decl = get_callee_fndecl (*expr_p);
+ parms = NULL_TREE;
+ if (decl)
+ parms = TYPE_ARG_TYPES (TREE_TYPE (decl));
+ else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
+ parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
+
+ /* Verify if the type of the argument matches that of the function
+ declaration. If we cannot verify this or there is a mismatch,
+ mark the call expression so it doesn't get inlined later. */
+ if (decl && DECL_ARGUMENTS (decl))
+ {
+ for (i = 0, p = DECL_ARGUMENTS (decl); i < nargs;
+ i++, p = TREE_CHAIN (p))
+ {
+ /* We cannot distinguish a varargs function from the case
+ of excess parameters, still deferring the inlining decision
+ to the callee is possible. */
+ if (!p)
+ break;
+ if (p == error_mark_node
+ || CALL_EXPR_ARG (*expr_p, i) == error_mark_node
+ || !fold_convertible_p (DECL_ARG_TYPE (p),
+ CALL_EXPR_ARG (*expr_p, i)))
+ {
+ CALL_CANNOT_INLINE_P (*expr_p) = 1;
+ break;
+ }
+ }
+ }
+ else if (parms)
+ {
+ for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
+ {
+ /* If this is a varargs function defer inlining decision
+ to callee. */
+ if (!p)
+ break;
+ if (TREE_VALUE (p) == error_mark_node
+ || CALL_EXPR_ARG (*expr_p, i) == error_mark_node
+ || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
+ || !fold_convertible_p (TREE_VALUE (p),
+ CALL_EXPR_ARG (*expr_p, i)))
+ {
+ CALL_CANNOT_INLINE_P (*expr_p) = 1;
+ break;
+ }
+ }
+ }
+ else if (nargs != 0)
+ CALL_CANNOT_INLINE_P (*expr_p) = 1;
+
+ /* Finally, gimplify the function arguments. */
+ for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
+ PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
+ PUSH_ARGS_REVERSED ? i-- : i++)
{
enum gimplify_status t;
- t = gimplify_arg (&TREE_VALUE (arglist), pre_p);
+ t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p);
if (t == GS_ERROR)
ret = GS_ERROR;
}
- if (PUSH_ARGS_REVERSED)
- TREE_OPERAND (*expr_p, 1) = nreverse (TREE_OPERAND (*expr_p, 1));
/* Try this again in case gimplification exposed something. */
- if (ret != GS_ERROR && decl && DECL_BUILT_IN (decl))
+ if (ret != GS_ERROR)
{
- tree fndecl = get_callee_fndecl (*expr_p);
- tree arglist = TREE_OPERAND (*expr_p, 1);
- tree new = fold_builtin (fndecl, arglist, !want_value);
+ tree new = fold_call_expr (*expr_p, !want_value);
if (new && new != *expr_p)
{
if (b) goto yes; else goto no;
else
if (c) goto yes; else goto no; */
- expr = build (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
- shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
- false_label_p),
- shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
- false_label_p));
+ expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
+ shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
+ false_label_p),
+ shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
+ false_label_p));
}
else
{
- expr = build (COND_EXPR, void_type_node, pred,
- build_and_jump (true_label_p),
- build_and_jump (false_label_p));
+ expr = build3 (COND_EXPR, void_type_node, pred,
+ build_and_jump (true_label_p),
+ build_and_jump (false_label_p));
}
if (local_label)
then_ = shortcut_cond_expr (expr);
then_se = then_ && TREE_SIDE_EFFECTS (then_);
pred = TREE_OPERAND (pred, 0);
- expr = build (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
+ expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
}
}
if (!then_se)
else_ = shortcut_cond_expr (expr);
else_se = else_ && TREE_SIDE_EFFECTS (else_);
pred = TREE_OPERAND (pred, 0);
- expr = build (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
+ expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
}
}
/* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
-static tree
+tree
gimple_boolify (tree expr)
{
tree type = TREE_TYPE (expr);
default:
/* Other expressions that get here must have boolean values, but
might need to be converted to the appropriate mode. */
- return convert (boolean_type_node, expr);
+ return fold_convert (boolean_type_node, expr);
}
}
TARGET is the tree for T1 above.
PRE_P points to the list where side effects that must happen before
- *EXPR_P should be stored.
-
- POST_P points to the list where side effects that must happen after
- *EXPR_P should be stored. */
+ *EXPR_P should be stored. */
static enum gimplify_status
-gimplify_cond_expr (tree *expr_p, tree *pre_p, tree *post_p, tree target,
- fallback_t fallback)
+gimplify_cond_expr (tree *expr_p, tree *pre_p, fallback_t fallback)
{
tree expr = *expr_p;
tree tmp, tmp2, type;
{
tree result;
- if (target)
- {
- ret = gimplify_expr (&target, pre_p, post_p,
- is_gimple_min_lval, fb_lvalue);
- if (ret != GS_ERROR)
- ret = GS_OK;
- result = tmp = target;
- tmp2 = unshare_expr (target);
- }
- else if ((fallback & fb_lvalue) == 0)
+ if ((fallback & fb_lvalue) == 0)
{
result = tmp2 = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp");
ret = GS_ALL_DONE;
tmp2 = tmp = create_tmp_var (type, "iftmp");
- expr = build (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0),
- TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2));
+ expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0),
+ TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2));
result = build_fold_indirect_ref (tmp);
ret = GS_ALL_DONE;
if this branch is void; in C++ it can be, if it's a throw. */
if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
TREE_OPERAND (expr, 1)
- = build (MODIFY_EXPR, void_type_node, tmp, TREE_OPERAND (expr, 1));
+ = build_gimple_modify_stmt (tmp, TREE_OPERAND (expr, 1));
/* Build the else clause, 't1 = b;'. */
if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
TREE_OPERAND (expr, 2)
- = build (MODIFY_EXPR, void_type_node, tmp2, TREE_OPERAND (expr, 2));
+ = build_gimple_modify_stmt (tmp2, TREE_OPERAND (expr, 2));
TREE_TYPE (expr) = void_type_node;
recalculate_side_effects (expr);
static enum gimplify_status
gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value)
{
- tree args, t, to, to_ptr, from;
-
- to = TREE_OPERAND (*expr_p, 0);
- from = TREE_OPERAND (*expr_p, 1);
+ tree t, to, to_ptr, from, from_ptr;
- args = tree_cons (NULL, size, NULL);
+ to = GENERIC_TREE_OPERAND (*expr_p, 0);
+ from = GENERIC_TREE_OPERAND (*expr_p, 1);
- t = build_fold_addr_expr (from);
- args = tree_cons (NULL, t, args);
+ from_ptr = build_fold_addr_expr (from);
to_ptr = build_fold_addr_expr (to);
- args = tree_cons (NULL, to_ptr, args);
t = implicit_built_in_decls[BUILT_IN_MEMCPY];
- t = build_function_call_expr (t, args);
+ t = build_call_expr (t, 3, to_ptr, from_ptr, size);
if (want_value)
{
static enum gimplify_status
gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value)
{
- tree args, t, to, to_ptr;
+ tree t, to, to_ptr;
- to = TREE_OPERAND (*expr_p, 0);
-
- args = tree_cons (NULL, size, NULL);
-
- args = tree_cons (NULL, integer_zero_node, args);
+ to = GENERIC_TREE_OPERAND (*expr_p, 0);
to_ptr = build_fold_addr_expr (to);
- args = tree_cons (NULL, to_ptr, args);
t = implicit_built_in_decls[BUILT_IN_MEMSET];
- t = build_function_call_expr (t, args);
+ t = build_call_expr (t, 3, to_ptr, integer_zero_node, size);
if (want_value)
{
&& alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
return t;
+ /* If the constructor component is a call, determine if it can hide a
+ potential overlap with the lhs through an INDIRECT_REF like above. */
+ if (TREE_CODE (t) == CALL_EXPR)
+ {
+ tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
+
+ for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
+ if (POINTER_TYPE_P (TREE_VALUE (type))
+ && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
+ && alias_sets_conflict_p (data->lhs_alias_set,
+ get_alias_set
+ (TREE_TYPE (TREE_VALUE (type)))))
+ return t;
+ }
+
if (IS_TYPE_OR_DECL_P (t))
*walk_subtrees = 0;
return NULL;
return;
}
- /* We can't preevaluate if the type contains a placeholder. */
- if (type_contains_placeholder_p (TREE_TYPE (*expr_p)))
- return;
+ /* If this is a variable sized type, we must remember the size. */
+ maybe_with_size_expr (expr_p);
/* Gimplify the constructor element to something appropriate for the rhs
of a MODIFY_EXPR. Given that we know the lhs is an aggregate, we know
/* If this is of variable size, we have no choice but to assume it doesn't
overlap since we can't make a temporary for it. */
- if (!TREE_CONSTANT (TYPE_SIZE (TREE_TYPE (*expr_p))))
+ if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
return;
/* Otherwise, we must search for overlap ... */
tree *pre_p, bool cleared)
{
tree loop_entry_label, loop_exit_label;
- tree var, var_type, cref;
+ tree var, var_type, cref, tmp;
loop_entry_label = create_artificial_label ();
loop_exit_label = create_artificial_label ();
/* Create and initialize the index variable. */
var_type = TREE_TYPE (upper);
var = create_tmp_var (var_type, NULL);
- append_to_statement_list (build2 (MODIFY_EXPR, var_type, var, lower), pre_p);
+ append_to_statement_list (build_gimple_modify_stmt (var, lower), pre_p);
/* Add the loop entry label. */
append_to_statement_list (build1 (LABEL_EXPR,
gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
pre_p, cleared);
else
- append_to_statement_list (build2 (MODIFY_EXPR, TREE_TYPE (cref),
- cref, value),
- pre_p);
+ append_to_statement_list (build_gimple_modify_stmt (cref, value), pre_p);
/* We exit the loop when the index var is equal to the upper bound. */
gimplify_and_add (build3 (COND_EXPR, void_type_node,
pre_p);
/* Otherwise, increment the index var... */
- append_to_statement_list (build2 (MODIFY_EXPR, var_type, var,
- build2 (PLUS_EXPR, var_type, var,
- fold_convert (var_type,
- integer_one_node))),
- pre_p);
+ tmp = build2 (PLUS_EXPR, var_type, var,
+ fold_convert (var_type, integer_one_node));
+ append_to_statement_list (build_gimple_modify_stmt (var, tmp), pre_p);
/* ...and jump back to the loop entry. */
append_to_statement_list (build1 (GOTO_EXPR,
so we don't have to figure out what's missing ourselves. */
gcc_assert (purpose);
- if (zero_sized_field_decl (purpose))
+ /* Skip zero-sized fields, unless value has side-effects. This can
+ happen with calls to functions returning a zero-sized type, which
+ we shouldn't discard. As a number of downstream passes don't
+ expect sets of zero-sized fields, we rely on the gimplification of
+ the MODIFY_EXPR we make below to drop the assignment statement. */
+ if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
continue;
/* If we have a RANGE_EXPR, we have to build a loop to assign the
if (array_elt_type)
{
- cref = build (ARRAY_REF, array_elt_type, unshare_expr (object),
- purpose, NULL_TREE, NULL_TREE);
+ cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
+ purpose, NULL_TREE, NULL_TREE);
}
else
{
gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
- cref = build (COMPONENT_REF, TREE_TYPE (purpose),
- unshare_expr (object), purpose, NULL_TREE);
+ cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
+ unshare_expr (object), purpose, NULL_TREE);
}
if (TREE_CODE (value) == CONSTRUCTOR
pre_p, cleared);
else
{
- init = build (MODIFY_EXPR, TREE_TYPE (cref), cref, value);
+ init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
gimplify_and_add (init, pre_p);
}
}
tree *post_p, bool want_value)
{
tree object;
- tree ctor = TREE_OPERAND (*expr_p, 1);
+ tree ctor = GENERIC_TREE_OPERAND (*expr_p, 1);
tree type = TREE_TYPE (ctor);
enum gimplify_status ret;
VEC(constructor_elt,gc) *elts;
if (TREE_CODE (ctor) != CONSTRUCTOR)
return GS_UNHANDLED;
- ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
+ ret = gimplify_expr (&GENERIC_TREE_OPERAND (*expr_p, 0), pre_p, post_p,
is_gimple_lvalue, fb_lvalue);
if (ret == GS_ERROR)
return ret;
- object = TREE_OPERAND (*expr_p, 0);
+ object = GENERIC_TREE_OPERAND (*expr_p, 0);
elts = CONSTRUCTOR_ELTS (ctor);
{
struct gimplify_init_ctor_preeval_data preeval_data;
HOST_WIDE_INT num_type_elements, num_ctor_elements;
- HOST_WIDE_INT num_nonzero_elements, num_nonconstant_elements;
- bool cleared;
+ HOST_WIDE_INT num_nonzero_elements;
+ bool cleared, valid_const_initializer;
/* Aggregate types must lower constructors to initialization of
individual elements. The exception is that a CONSTRUCTOR node
if (VEC_empty (constructor_elt, elts))
break;
- categorize_ctor_elements (ctor, &num_nonzero_elements,
- &num_nonconstant_elements,
- &num_ctor_elements, &cleared);
+ /* Fetch information about the constructor to direct later processing.
+ We might want to make static versions of it in various cases, and
+ can only do so if it known to be a valid constant initializer. */
+ valid_const_initializer
+ = categorize_ctor_elements (ctor, &num_nonzero_elements,
+ &num_ctor_elements, &cleared);
/* If a const aggregate variable is being initialized, then it
should never be a lose to promote the variable to be static. */
- if (num_nonconstant_elements == 0
+ if (valid_const_initializer
&& num_nonzero_elements > 1
&& TREE_READONLY (object)
&& TREE_CODE (object) == VAR_DECL)
for sparse arrays, though, as it's more efficient to follow
the standard CONSTRUCTOR behavior of memset followed by
individual element initialization. */
- if (num_nonconstant_elements == 0 && !cleared)
+ if (valid_const_initializer && !cleared)
{
HOST_WIDE_INT size = int_size_in_bytes (type);
unsigned int align;
}
walk_tree (&DECL_INITIAL (new), force_labels_r, NULL, NULL);
- TREE_OPERAND (*expr_p, 1) = new;
+ GENERIC_TREE_OPERAND (*expr_p, 1) = new;
/* This is no longer an assignment of a CONSTRUCTOR, but
we still may have processing to do on the LHS. So
}
}
+ /* If there are nonzero elements, pre-evaluate to capture elements
+ overlapping with the lhs into temporaries. We must do this before
+ clearing to fetch the values before they are zeroed-out. */
+ if (num_nonzero_elements > 0)
+ {
+ preeval_data.lhs_base_decl = get_base_address (object);
+ if (!DECL_P (preeval_data.lhs_base_decl))
+ preeval_data.lhs_base_decl = NULL;
+ preeval_data.lhs_alias_set = get_alias_set (object);
+
+ gimplify_init_ctor_preeval (&GENERIC_TREE_OPERAND (*expr_p, 1),
+ pre_p, post_p, &preeval_data);
+ }
+
if (cleared)
{
/* Zap the CONSTRUCTOR element list, which simplifies this case.
elements in the constructor, add assignments to the individual
scalar fields of the object. */
if (!cleared || num_nonzero_elements > 0)
- {
- preeval_data.lhs_base_decl = get_base_address (object);
- if (!DECL_P (preeval_data.lhs_base_decl))
- preeval_data.lhs_base_decl = NULL;
- preeval_data.lhs_alias_set = get_alias_set (object);
-
- gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
- pre_p, post_p, &preeval_data);
- gimplify_init_ctor_eval (object, elts, pre_p, cleared);
- }
+ gimplify_init_ctor_eval (object, elts, pre_p, cleared);
*expr_p = NULL_TREE;
}
i = VEC_index (constructor_elt, elts, 1)->value;
if (r == NULL || i == NULL)
{
- tree zero = convert (TREE_TYPE (type), integer_zero_node);
+ tree zero = fold_convert (TREE_TYPE (type), integer_zero_node);
if (r == NULL)
r = zero;
if (i == NULL)
}
else
{
- ctor = build (COMPLEX_EXPR, type, r, i);
+ ctor = build2 (COMPLEX_EXPR, type, r, i);
TREE_OPERAND (*expr_p, 1) = ctor;
ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
break;
}
+
+ /* Don't reduce a TREE_CONSTANT vector ctor even if we can't
+ make a VECTOR_CST. It won't do anything for us, and it'll
+ prevent us from representing it as a single constant. */
+ break;
}
/* Vector types use CONSTRUCTOR all the way through gimple
if (tret == GS_ERROR)
ret = GS_ERROR;
}
+ if (!is_gimple_reg (GENERIC_TREE_OPERAND (*expr_p, 0)))
+ GENERIC_TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
}
break;
tree sub = t;
tree subtype;
- STRIP_NOPS (sub);
+ STRIP_USELESS_TYPE_CONVERSION (sub);
subtype = TREE_TYPE (sub);
if (!POINTER_TYPE_P (subtype))
return NULL_TREE;
tree op = TREE_OPERAND (sub, 0);
tree optype = TREE_TYPE (op);
/* *&p => p */
- if (lang_hooks.types_compatible_p (type, optype))
+ if (useless_type_conversion_p (type, optype))
return op;
/* *(foo *)&fooarray => fooarray[0] */
else if (TREE_CODE (optype) == ARRAY_TYPE
- && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
+ && useless_type_conversion_p (type, TREE_TYPE (optype)))
{
tree type_domain = TYPE_DOMAIN (optype);
tree min_val = size_zero_node;
/* *(foo *)fooarrptr => (*fooarrptr)[0] */
if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
- && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
+ && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
{
tree type_domain;
tree min_val = size_zero_node;
+ tree osub = sub;
sub = fold_indirect_ref_rhs (sub);
if (! sub)
- sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), sub);
+ sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
if (type_domain && TYPE_MIN_VALUE (type_domain))
min_val = TYPE_MIN_VALUE (type_domain);
copy in other cases as well. */
if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
{
- *expr_p = *from_p;
- return gimplify_cond_expr (expr_p, pre_p, post_p, *to_p,
- fb_rvalue);
+ /* This code should mirror the code in gimplify_cond_expr. */
+ enum tree_code code = TREE_CODE (*expr_p);
+ tree cond = *from_p;
+ tree result = *to_p;
+
+ ret = gimplify_expr (&result, pre_p, post_p,
+ is_gimple_min_lval, fb_lvalue);
+ if (ret != GS_ERROR)
+ ret = GS_OK;
+
+ if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
+ TREE_OPERAND (cond, 1)
+ = build2 (code, void_type_node, result,
+ TREE_OPERAND (cond, 1));
+ if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
+ TREE_OPERAND (cond, 2)
+ = build2 (code, void_type_node, unshare_expr (result),
+ TREE_OPERAND (cond, 2));
+
+ TREE_TYPE (cond) = void_type_node;
+ recalculate_side_effects (cond);
+
+ if (want_value)
+ {
+ gimplify_and_add (cond, pre_p);
+ *expr_p = unshare_expr (result);
+ }
+ else
+ *expr_p = cond;
+ return ret;
}
else
ret = GS_UNHANDLED;
{
bool use_target;
- if (TREE_CODE (*to_p) == RESULT_DECL
- && needs_to_live_in_memory (*to_p))
- /* It's always OK to use the return slot directly. */
+ if (!(rhs_predicate_for (*to_p))(*from_p))
+ /* If we need a temporary, *to_p isn't accurate. */
+ use_target = false;
+ else if (TREE_CODE (*to_p) == RESULT_DECL
+ && DECL_NAME (*to_p) == NULL_TREE
+ && needs_to_live_in_memory (*to_p))
+ /* It's OK to use the return slot directly unless it's an NRV. */
+ use_target = true;
+ else if (is_gimple_reg_type (TREE_TYPE (*to_p))
+ || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
+ /* Don't force regs into memory. */
+ use_target = false;
+ else if (TREE_CODE (*to_p) == VAR_DECL
+ && DECL_GIMPLE_FORMAL_TEMP_P (*to_p))
+ /* Don't use the original target if it's a formal temp; we
+ don't want to take their addresses. */
+ use_target = false;
+ else if (TREE_CODE (*expr_p) == INIT_EXPR)
+ /* It's OK to use the target directly if it's being
+ initialized. */
use_target = true;
else if (!is_gimple_non_addressable (*to_p))
/* Don't use the original target if it's already addressable;
When optimizing, the return_slot pass marks more functions
as safe after we have escape info. */
use_target = false;
- else if (TREE_CODE (*to_p) != PARM_DECL
- && DECL_GIMPLE_FORMAL_TEMP_P (*to_p))
- /* Don't use the original target if it's a formal temp; we
- don't want to take their addresses. */
- use_target = false;
- else if (is_gimple_reg_type (TREE_TYPE (*to_p)))
- /* Also don't force regs into memory. */
- use_target = false;
else
use_target = true;
if (use_target)
{
CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
- lang_hooks.mark_addressable (*to_p);
+ mark_addressable (*to_p);
}
}
ret = GS_UNHANDLED;
break;
- default:
- ret = GS_UNHANDLED;
- break;
- }
+ /* If we're initializing from a container, push the initialization
+ inside it. */
+ case CLEANUP_POINT_EXPR:
+ case BIND_EXPR:
+ case STATEMENT_LIST:
+ {
+ tree wrap = *from_p;
+ tree t;
+
+ ret = gimplify_expr (to_p, pre_p, post_p,
+ is_gimple_min_lval, fb_lvalue);
+ if (ret != GS_ERROR)
+ ret = GS_OK;
+
+ t = voidify_wrapper_expr (wrap, *expr_p);
+ gcc_assert (t == *expr_p);
+
+ if (want_value)
+ {
+ gimplify_and_add (wrap, pre_p);
+ *expr_p = unshare_expr (*to_p);
+ }
+ else
+ *expr_p = wrap;
+ return GS_OK;
+ }
+
+ default:
+ ret = GS_UNHANDLED;
+ break;
+ }
return ret;
}
+/* Destructively convert the TREE pointer in TP into a gimple tuple if
+ appropriate. */
+
+static void
+tree_to_gimple_tuple (tree *tp)
+{
+
+ switch (TREE_CODE (*tp))
+ {
+ case GIMPLE_MODIFY_STMT:
+ return;
+ case MODIFY_EXPR:
+ {
+ struct gimple_stmt *gs;
+ tree lhs = TREE_OPERAND (*tp, 0);
+ bool def_stmt_self_p = false;
+
+ if (TREE_CODE (lhs) == SSA_NAME)
+ {
+ if (SSA_NAME_DEF_STMT (lhs) == *tp)
+ def_stmt_self_p = true;
+ }
+
+ gs = &make_node (GIMPLE_MODIFY_STMT)->gstmt;
+ gs->base = (*tp)->base;
+ /* The set to base above overwrites the CODE. */
+ TREE_SET_CODE ((tree) gs, GIMPLE_MODIFY_STMT);
+
+ gs->locus = EXPR_LOCUS (*tp);
+ gs->operands[0] = TREE_OPERAND (*tp, 0);
+ gs->operands[1] = TREE_OPERAND (*tp, 1);
+ gs->block = TREE_BLOCK (*tp);
+ *tp = (tree)gs;
+
+ /* If we re-gimplify a set to an SSA_NAME, we must change the
+ SSA name's DEF_STMT link. */
+ if (def_stmt_self_p)
+ SSA_NAME_DEF_STMT (GIMPLE_STMT_OPERAND (*tp, 0)) = *tp;
+
+ return;
+ }
+ default:
+ break;
+ }
+}
+
/* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
- DECL_COMPLEX_GIMPLE_REG_P set. */
+ DECL_GIMPLE_REG_P set. */
static enum gimplify_status
gimplify_modify_expr_complex_part (tree *expr_p, tree *pre_p, bool want_value)
enum tree_code code, ocode;
tree lhs, rhs, new_rhs, other, realpart, imagpart;
- lhs = TREE_OPERAND (*expr_p, 0);
- rhs = TREE_OPERAND (*expr_p, 1);
+ lhs = GENERIC_TREE_OPERAND (*expr_p, 0);
+ rhs = GENERIC_TREE_OPERAND (*expr_p, 1);
code = TREE_CODE (lhs);
lhs = TREE_OPERAND (lhs, 0);
else
new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
- TREE_OPERAND (*expr_p, 0) = lhs;
- TREE_OPERAND (*expr_p, 1) = new_rhs;
+ GENERIC_TREE_OPERAND (*expr_p, 0) = lhs;
+ GENERIC_TREE_OPERAND (*expr_p, 1) = new_rhs;
if (want_value)
{
+ tree_to_gimple_tuple (expr_p);
+
append_to_statement_list (*expr_p, pre_p);
*expr_p = rhs;
}
static enum gimplify_status
gimplify_modify_expr (tree *expr_p, tree *pre_p, tree *post_p, bool want_value)
{
- tree *from_p = &TREE_OPERAND (*expr_p, 1);
- tree *to_p = &TREE_OPERAND (*expr_p, 0);
+ tree *from_p = &GENERIC_TREE_OPERAND (*expr_p, 1);
+ tree *to_p = &GENERIC_TREE_OPERAND (*expr_p, 0);
enum gimplify_status ret = GS_UNHANDLED;
gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
+ || TREE_CODE (*expr_p) == GIMPLE_MODIFY_STMT
|| TREE_CODE (*expr_p) == INIT_EXPR);
- /* The distinction between MODIFY_EXPR and INIT_EXPR is no longer useful. */
- if (TREE_CODE (*expr_p) == INIT_EXPR)
- TREE_SET_CODE (*expr_p, MODIFY_EXPR);
-
/* For zero sized types only gimplify the left hand side and right hand side
as statements and throw away the assignment. */
if (zero_sized_type (TREE_TYPE (*from_p)))
*to_p = make_ssa_name (*to_p, *expr_p);
}
+ /* Try to alleviate the effects of the gimplification creating artificial
+ temporaries (see for example is_gimple_reg_rhs) on the debug info. */
+ if (!gimplify_ctxp->into_ssa
+ && DECL_P (*from_p) && DECL_IGNORED_P (*from_p)
+ && DECL_P (*to_p) && !DECL_IGNORED_P (*to_p))
+ {
+ if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
+ DECL_NAME (*from_p)
+ = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
+ DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
+ SET_DECL_DEBUG_EXPR (*from_p, *to_p);
+ }
+
if (want_value)
{
+ tree_to_gimple_tuple (expr_p);
+
append_to_statement_list (*expr_p, pre_p);
*expr_p = *to_p;
return GS_OK;
{
tree op0 = TREE_OPERAND (*expr_p, 0);
tree op1 = TREE_OPERAND (*expr_p, 1);
- tree args, t, dest;
-
- t = TYPE_SIZE_UNIT (TREE_TYPE (op0));
- t = unshare_expr (t);
- t = SUBSTITUTE_PLACEHOLDER_IN_EXPR (t, op0);
- args = tree_cons (NULL, t, NULL);
- t = build_fold_addr_expr (op1);
- args = tree_cons (NULL, t, args);
+ tree t, arg, dest, src;
+
+ arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
+ arg = unshare_expr (arg);
+ arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
+ src = build_fold_addr_expr (op1);
dest = build_fold_addr_expr (op0);
- args = tree_cons (NULL, dest, args);
t = implicit_built_in_decls[BUILT_IN_MEMCMP];
- t = build_function_call_expr (t, args);
+ t = build_call_expr (t, 3, dest, src, arg);
+ *expr_p
+ = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
+
+ return GS_OK;
+}
+
+/* Gimplify a comparison between two aggregate objects of integral scalar
+ mode as a comparison between the bitwise equivalent scalar values. */
+
+static enum gimplify_status
+gimplify_scalar_mode_aggregate_compare (tree *expr_p)
+{
+ tree op0 = TREE_OPERAND (*expr_p, 0);
+ tree op1 = TREE_OPERAND (*expr_p, 1);
+
+ tree type = TREE_TYPE (op0);
+ tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
+
+ op0 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op0);
+ op1 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op1);
+
*expr_p
- = build (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
+ = fold_build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
return GS_OK;
}
/* Preserve the original type of the expression. */
tree type = TREE_TYPE (*expr_p);
- *expr_p = build (COND_EXPR, type, *expr_p,
- convert (type, boolean_true_node),
- convert (type, boolean_false_node));
+ *expr_p = build3 (COND_EXPR, type, *expr_p,
+ fold_convert (type, boolean_true_node),
+ fold_convert (type, boolean_false_node));
return GS_OK;
}
enlightened front-end, or by shortcut_cond_expr. */
static enum gimplify_status
-gimplify_statement_list (tree *expr_p)
+gimplify_statement_list (tree *expr_p, tree *pre_p)
{
+ tree temp = voidify_wrapper_expr (*expr_p, NULL);
+
tree_stmt_iterator i = tsi_start (*expr_p);
while (!tsi_end_p (i))
tsi_next (&i);
}
+ if (temp)
+ {
+ append_to_statement_list (*expr_p, pre_p);
+ *expr_p = temp;
+ return GS_OK;
+ }
+
return GS_ALL_DONE;
}
tree t_expr = TREE_TYPE (expr);
tree t_op00 = TREE_TYPE (op00);
- if (!lang_hooks.types_compatible_p (t_expr, t_op00))
- {
-#ifdef ENABLE_CHECKING
- tree t_op0 = TREE_TYPE (op0);
- gcc_assert (POINTER_TYPE_P (t_expr)
- && cpt_same_type (TREE_CODE (t_op0) == ARRAY_TYPE
- ? TREE_TYPE (t_op0) : t_op0,
- TREE_TYPE (t_expr))
- && POINTER_TYPE_P (t_op00)
- && cpt_same_type (t_op0, TREE_TYPE (t_op00)));
-#endif
- op00 = fold_convert (TREE_TYPE (expr), op00);
- }
+ if (!useless_type_conversion_p (t_expr, t_op00))
+ op00 = fold_convert (TREE_TYPE (expr), op00);
*expr_p = op00;
ret = GS_OK;
}
the address of a call that returns a struct; see
gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
the implied temporary explicit. */
+
+ /* Mark the RHS addressable. */
ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
is_gimple_addressable, fb_either);
if (ret != GS_ERROR)
/* Make sure TREE_INVARIANT, TREE_CONSTANT, and TREE_SIDE_EFFECTS
is set properly. */
- recompute_tree_invarant_for_addr_expr (expr);
+ recompute_tree_invariant_for_addr_expr (expr);
- /* Mark the RHS addressable. */
- lang_hooks.mark_addressable (TREE_OPERAND (expr, 0));
+ mark_addressable (TREE_OPERAND (expr, 0));
}
break;
}
&allows_mem, &allows_reg, &is_inout);
if (!allows_reg && allows_mem)
- lang_hooks.mark_addressable (TREE_VALUE (link));
+ mark_addressable (TREE_VALUE (link));
tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
is_inout ? is_gimple_min_lval : is_gimple_lvalue,
break;
}
- str = alloca (len);
+ str = (char *) alloca (len);
for (beg = p + 1, dst = str;;)
{
const char *tem;
parse_input_constraint (&constraint, 0, 0, noutputs, 0,
oconstraints, &allows_mem, &allows_reg);
+ /* If we can't make copies, we can only accept memory. */
+ if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
+ {
+ if (allows_mem)
+ allows_reg = 0;
+ else
+ {
+ error ("impossible constraint in %<asm%>");
+ error ("non-memory input %d must stay in memory", i);
+ return GS_ERROR;
+ }
+ }
+
/* If the operand is a memory input, it should be an lvalue. */
if (!allows_reg && allows_mem)
{
- lang_hooks.mark_addressable (TREE_VALUE (link));
tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
is_gimple_lvalue, fb_lvalue | fb_mayfail);
+ mark_addressable (TREE_VALUE (link));
if (tret == GS_ERROR)
{
error ("memory input %d is not directly addressable", i);
tree temp = voidify_wrapper_expr (*expr_p, NULL);
/* We only care about the number of conditions between the innermost
- CLEANUP_POINT_EXPR and the cleanup. So save and reset the count. */
+ CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
+ any cleanups collected outside the CLEANUP_POINT_EXPR. */
int old_conds = gimplify_ctxp->conditions;
+ tree old_cleanups = gimplify_ctxp->conditional_cleanups;
gimplify_ctxp->conditions = 0;
+ gimplify_ctxp->conditional_cleanups = NULL_TREE;
body = TREE_OPERAND (*expr_p, 0);
gimplify_to_stmt_list (&body);
gimplify_ctxp->conditions = old_conds;
+ gimplify_ctxp->conditional_cleanups = old_cleanups;
for (iter = tsi_start (body); !tsi_end_p (iter); )
{
tree sl, tfe;
enum tree_code code;
- if (CLEANUP_EH_ONLY (wce))
- code = TRY_CATCH_EXPR;
- else
- code = TRY_FINALLY_EXPR;
+ if (CLEANUP_EH_ONLY (wce))
+ code = TRY_CATCH_EXPR;
+ else
+ code = TRY_FINALLY_EXPR;
+
+ sl = tsi_split_statement_list_after (&iter);
+ tfe = build2 (code, void_type_node, sl, NULL_TREE);
+ append_to_statement_list (TREE_OPERAND (wce, 0),
+ &TREE_OPERAND (tfe, 1));
+ *wce_p = tfe;
+ iter = tsi_start (sl);
+ }
+ }
+ else
+ tsi_next (&iter);
+ }
+
+ if (temp)
+ {
+ *expr_p = temp;
+ append_to_statement_list (body, pre_p);
+ return GS_OK;
+ }
+ else
+ {
+ *expr_p = body;
+ return GS_ALL_DONE;
+ }
+}
+
+/* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
+ is the cleanup action required. */
+
+static void
+gimple_push_cleanup (tree var, tree cleanup, bool eh_only, tree *pre_p)
+{
+ tree wce;
+
+ /* Errors can result in improperly nested cleanups. Which results in
+ confusion when trying to resolve the WITH_CLEANUP_EXPR. */
+ if (errorcount || sorrycount)
+ return;
+
+ if (gimple_conditional_context ())
+ {
+ /* If we're in a conditional context, this is more complex. We only
+ want to run the cleanup if we actually ran the initialization that
+ necessitates it, but we want to run it after the end of the
+ conditional context. So we wrap the try/finally around the
+ condition and use a flag to determine whether or not to actually
+ run the destructor. Thus
+
+ test ? f(A()) : 0
+
+ becomes (approximately)
+
+ flag = 0;
+ try {
+ if (test) { A::A(temp); flag = 1; val = f(temp); }
+ else { val = 0; }
+ } finally {
+ if (flag) A::~A(temp);
+ }
+ val
+ */
+
+ tree flag = create_tmp_var (boolean_type_node, "cleanup");
+ tree ffalse = build_gimple_modify_stmt (flag, boolean_false_node);
+ tree ftrue = build_gimple_modify_stmt (flag, boolean_true_node);
+ cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
+ wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup);
+ append_to_statement_list (ffalse, &gimplify_ctxp->conditional_cleanups);
+ append_to_statement_list (wce, &gimplify_ctxp->conditional_cleanups);
+ append_to_statement_list (ftrue, pre_p);
+
+ /* Because of this manipulation, and the EH edges that jump
+ threading cannot redirect, the temporary (VAR) will appear
+ to be used uninitialized. Don't warn. */
+ TREE_NO_WARNING (var) = 1;
+ }
+ else
+ {
+ wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup);
+ CLEANUP_EH_ONLY (wce) = eh_only;
+ append_to_statement_list (wce, pre_p);
+ }
+
+ gimplify_stmt (&TREE_OPERAND (wce, 0));
+}
+
+/* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
+
+static enum gimplify_status
+gimplify_target_expr (tree *expr_p, tree *pre_p, tree *post_p)
+{
+ tree targ = *expr_p;
+ tree temp = TARGET_EXPR_SLOT (targ);
+ tree init = TARGET_EXPR_INITIAL (targ);
+ enum gimplify_status ret;
+
+ if (init)
+ {
+ /* TARGET_EXPR temps aren't part of the enclosing block, so add it
+ to the temps list. */
+ gimple_add_tmp_var (temp);
+
+ /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
+ expression is supposed to initialize the slot. */
+ if (VOID_TYPE_P (TREE_TYPE (init)))
+ ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
+ else
+ {
+ init = build2 (INIT_EXPR, void_type_node, temp, init);
+ ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt,
+ fb_none);
+ }
+ if (ret == GS_ERROR)
+ {
+ /* PR c++/28266 Make sure this is expanded only once. */
+ TARGET_EXPR_INITIAL (targ) = NULL_TREE;
+ return GS_ERROR;
+ }
+ append_to_statement_list (init, pre_p);
+
+ /* If needed, push the cleanup for the temp. */
+ if (TARGET_EXPR_CLEANUP (targ))
+ {
+ gimplify_stmt (&TARGET_EXPR_CLEANUP (targ));
+ gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
+ CLEANUP_EH_ONLY (targ), pre_p);
+ }
+
+ /* Only expand this once. */
+ TREE_OPERAND (targ, 3) = init;
+ TARGET_EXPR_INITIAL (targ) = NULL_TREE;
+ }
+ else
+ /* We should have expanded this before. */
+ gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
+
+ *expr_p = temp;
+ return GS_OK;
+}
+
+/* Gimplification of expression trees. */
+
+/* Gimplify an expression which appears at statement context; usually, this
+ means replacing it with a suitably gimple STATEMENT_LIST. */
+
+void
+gimplify_stmt (tree *stmt_p)
+{
+ gimplify_expr (stmt_p, NULL, NULL, is_gimple_stmt, fb_none);
+}
+
+/* Similarly, but force the result to be a STATEMENT_LIST. */
+
+void
+gimplify_to_stmt_list (tree *stmt_p)
+{
+ gimplify_stmt (stmt_p);
+ if (!*stmt_p)
+ *stmt_p = alloc_stmt_list ();
+ else if (TREE_CODE (*stmt_p) != STATEMENT_LIST)
+ {
+ tree t = *stmt_p;
+ *stmt_p = alloc_stmt_list ();
+ append_to_statement_list (t, stmt_p);
+ }
+}
+
+
+/* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
+ to CTX. If entries already exist, force them to be some flavor of private.
+ If there is no enclosing parallel, do nothing. */
+
+void
+omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
+{
+ splay_tree_node n;
+
+ if (decl == NULL || !DECL_P (decl))
+ return;
+
+ do
+ {
+ n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
+ if (n != NULL)
+ {
+ if (n->value & GOVD_SHARED)
+ n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
+ else
+ return;
+ }
+ else if (ctx->is_parallel)
+ omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
+
+ ctx = ctx->outer_context;
+ }
+ while (ctx);
+}
+
+/* Similarly for each of the type sizes of TYPE. */
+
+static void
+omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
+{
+ if (type == NULL || type == error_mark_node)
+ return;
+ type = TYPE_MAIN_VARIANT (type);
+
+ if (pointer_set_insert (ctx->privatized_types, type))
+ return;
+
+ switch (TREE_CODE (type))
+ {
+ case INTEGER_TYPE:
+ case ENUMERAL_TYPE:
+ case BOOLEAN_TYPE:
+ case REAL_TYPE:
+ omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
+ omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
+ break;
+
+ case ARRAY_TYPE:
+ omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
+ omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
+ break;
+
+ case RECORD_TYPE:
+ case UNION_TYPE:
+ case QUAL_UNION_TYPE:
+ {
+ tree field;
+ for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
+ if (TREE_CODE (field) == FIELD_DECL)
+ {
+ omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
+ omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
+ }
+ }
+ break;
+
+ case POINTER_TYPE:
+ case REFERENCE_TYPE:
+ omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
+ break;
+
+ default:
+ break;
+ }
+
+ omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
+ omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
+ lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
+}
+
+/* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
+
+static void
+omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
+{
+ splay_tree_node n;
+ unsigned int nflags;
+ tree t;
+
+ if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
+ return;
+
+ /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
+ there are constructors involved somewhere. */
+ if (TREE_ADDRESSABLE (TREE_TYPE (decl))
+ || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
+ flags |= GOVD_SEEN;
+
+ n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
+ if (n != NULL)
+ {
+ /* We shouldn't be re-adding the decl with the same data
+ sharing class. */
+ gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
+ /* The only combination of data sharing classes we should see is
+ FIRSTPRIVATE and LASTPRIVATE. */
+ nflags = n->value | flags;
+ gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
+ == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
+ n->value = nflags;
+ return;
+ }
+
+ /* When adding a variable-sized variable, we have to handle all sorts
+ of additional bits of data: the pointer replacement variable, and
+ the parameters of the type. */
+ if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
+ {
+ /* Add the pointer replacement variable as PRIVATE if the variable
+ replacement is private, else FIRSTPRIVATE since we'll need the
+ address of the original variable either for SHARED, or for the
+ copy into or out of the context. */
+ if (!(flags & GOVD_LOCAL))
+ {
+ nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
+ nflags |= flags & GOVD_SEEN;
+ t = DECL_VALUE_EXPR (decl);
+ gcc_assert (TREE_CODE (t) == INDIRECT_REF);
+ t = TREE_OPERAND (t, 0);
+ gcc_assert (DECL_P (t));
+ omp_add_variable (ctx, t, nflags);
+ }
+
+ /* Add all of the variable and type parameters (which should have
+ been gimplified to a formal temporary) as FIRSTPRIVATE. */
+ omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
+ omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
+ omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
+
+ /* The variable-sized variable itself is never SHARED, only some form
+ of PRIVATE. The sharing would take place via the pointer variable
+ which we remapped above. */
+ if (flags & GOVD_SHARED)
+ flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
+ | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
+
+ /* We're going to make use of the TYPE_SIZE_UNIT at least in the
+ alloca statement we generate for the variable, so make sure it
+ is available. This isn't automatically needed for the SHARED
+ case, since we won't be allocating local storage then.
+ For local variables TYPE_SIZE_UNIT might not be gimplified yet,
+ in this case omp_notice_variable will be called later
+ on when it is gimplified. */
+ else if (! (flags & GOVD_LOCAL))
+ omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
+ }
+ else if (lang_hooks.decls.omp_privatize_by_reference (decl))
+ {
+ gcc_assert ((flags & GOVD_LOCAL) == 0);
+ omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
+
+ /* Similar to the direct variable sized case above, we'll need the
+ size of references being privatized. */
+ if ((flags & GOVD_SHARED) == 0)
+ {
+ t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
+ if (TREE_CODE (t) != INTEGER_CST)
+ omp_notice_variable (ctx, t, true);
+ }
+ }
+
+ splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
+}
+
+/* Record the fact that DECL was used within the OpenMP context CTX.
+ IN_CODE is true when real code uses DECL, and false when we should
+ merely emit default(none) errors. Return true if DECL is going to
+ be remapped and thus DECL shouldn't be gimplified into its
+ DECL_VALUE_EXPR (if any). */
+
+static bool
+omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
+{
+ splay_tree_node n;
+ unsigned flags = in_code ? GOVD_SEEN : 0;
+ bool ret = false, shared;
+
+ if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
+ return false;
+
+ /* Threadprivate variables are predetermined. */
+ if (is_global_var (decl))
+ {
+ if (DECL_THREAD_LOCAL_P (decl))
+ return false;
+
+ if (DECL_HAS_VALUE_EXPR_P (decl))
+ {
+ tree value = get_base_address (DECL_VALUE_EXPR (decl));
+
+ if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
+ return false;
+ }
+ }
+
+ n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
+ if (n == NULL)
+ {
+ enum omp_clause_default_kind default_kind, kind;
+
+ if (!ctx->is_parallel)
+ goto do_outer;
+
+ /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
+ remapped firstprivate instead of shared. To some extent this is
+ addressed in omp_firstprivatize_type_sizes, but not effectively. */
+ default_kind = ctx->default_kind;
+ kind = lang_hooks.decls.omp_predetermined_sharing (decl);
+ if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
+ default_kind = kind;
+
+ switch (default_kind)
+ {
+ case OMP_CLAUSE_DEFAULT_NONE:
+ error ("%qs not specified in enclosing parallel",
+ IDENTIFIER_POINTER (DECL_NAME (decl)));
+ error ("%Henclosing parallel", &ctx->location);
+ /* FALLTHRU */
+ case OMP_CLAUSE_DEFAULT_SHARED:
+ flags |= GOVD_SHARED;
+ break;
+ case OMP_CLAUSE_DEFAULT_PRIVATE:
+ flags |= GOVD_PRIVATE;
+ break;
+ default:
+ gcc_unreachable ();
+ }
+
+ omp_add_variable (ctx, decl, flags);
+
+ shared = (flags & GOVD_SHARED) != 0;
+ ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
+ goto do_outer;
+ }
+
+ shared = ((flags | n->value) & GOVD_SHARED) != 0;
+ ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
+
+ /* If nothing changed, there's nothing left to do. */
+ if ((n->value & flags) == flags)
+ return ret;
+ flags |= n->value;
+ n->value = flags;
+
+ do_outer:
+ /* If the variable is private in the current context, then we don't
+ need to propagate anything to an outer context. */
+ if (flags & GOVD_PRIVATE)
+ return ret;
+ if (ctx->outer_context
+ && omp_notice_variable (ctx->outer_context, decl, in_code))
+ return true;
+ return ret;
+}
+
+/* Verify that DECL is private within CTX. If there's specific information
+ to the contrary in the innermost scope, generate an error. */
+
+static bool
+omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
+{
+ splay_tree_node n;
+
+ n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
+ if (n != NULL)
+ {
+ if (n->value & GOVD_SHARED)
+ {
+ if (ctx == gimplify_omp_ctxp)
+ {
+ error ("iteration variable %qs should be private",
+ IDENTIFIER_POINTER (DECL_NAME (decl)));
+ n->value = GOVD_PRIVATE;
+ return true;
+ }
+ else
+ return false;
+ }
+ else if ((n->value & GOVD_EXPLICIT) != 0
+ && (ctx == gimplify_omp_ctxp
+ || (ctx->is_combined_parallel
+ && gimplify_omp_ctxp->outer_context == ctx)))
+ {
+ if ((n->value & GOVD_FIRSTPRIVATE) != 0)
+ error ("iteration variable %qs should not be firstprivate",
+ IDENTIFIER_POINTER (DECL_NAME (decl)));
+ else if ((n->value & GOVD_REDUCTION) != 0)
+ error ("iteration variable %qs should not be reduction",
+ IDENTIFIER_POINTER (DECL_NAME (decl)));
+ }
+ return true;
+ }
+
+ if (ctx->is_parallel)
+ return false;
+ else if (ctx->outer_context)
+ return omp_is_private (ctx->outer_context, decl);
+ else
+ return !is_global_var (decl);
+}
+
+/* Return true if DECL is private within a parallel region
+ that binds to the current construct's context or in parallel
+ region's REDUCTION clause. */
+
+static bool
+omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
+{
+ splay_tree_node n;
+
+ do
+ {
+ ctx = ctx->outer_context;
+ if (ctx == NULL)
+ return !(is_global_var (decl)
+ /* References might be private, but might be shared too. */
+ || lang_hooks.decls.omp_privatize_by_reference (decl));
+
+ n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
+ if (n != NULL)
+ return (n->value & GOVD_SHARED) == 0;
+ }
+ while (!ctx->is_parallel);
+ return false;
+}
+
+/* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
+ and previous omp contexts. */
+
+static void
+gimplify_scan_omp_clauses (tree *list_p, tree *pre_p, bool in_parallel,
+ bool in_combined_parallel)
+{
+ struct gimplify_omp_ctx *ctx, *outer_ctx;
+ tree c;
+
+ ctx = new_omp_context (in_parallel, in_combined_parallel);
+ outer_ctx = ctx->outer_context;
+
+ while ((c = *list_p) != NULL)
+ {
+ enum gimplify_status gs;
+ bool remove = false;
+ bool notice_outer = true;
+ const char *check_non_private = NULL;
+ unsigned int flags;
+ tree decl;
+
+ switch (OMP_CLAUSE_CODE (c))
+ {
+ case OMP_CLAUSE_PRIVATE:
+ flags = GOVD_PRIVATE | GOVD_EXPLICIT;
+ notice_outer = false;
+ goto do_add;
+ case OMP_CLAUSE_SHARED:
+ flags = GOVD_SHARED | GOVD_EXPLICIT;
+ goto do_add;
+ case OMP_CLAUSE_FIRSTPRIVATE:
+ flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
+ check_non_private = "firstprivate";
+ goto do_add;
+ case OMP_CLAUSE_LASTPRIVATE:
+ flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
+ check_non_private = "lastprivate";
+ goto do_add;
+ case OMP_CLAUSE_REDUCTION:
+ flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
+ check_non_private = "reduction";
+ goto do_add;
+
+ do_add:
+ decl = OMP_CLAUSE_DECL (c);
+ if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
+ {
+ remove = true;
+ break;
+ }
+ omp_add_variable (ctx, decl, flags);
+ if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
+ && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
+ {
+ omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
+ GOVD_LOCAL | GOVD_SEEN);
+ gimplify_omp_ctxp = ctx;
+ push_gimplify_context ();
+ gimplify_stmt (&OMP_CLAUSE_REDUCTION_INIT (c));
+ pop_gimplify_context (OMP_CLAUSE_REDUCTION_INIT (c));
+ push_gimplify_context ();
+ gimplify_stmt (&OMP_CLAUSE_REDUCTION_MERGE (c));
+ pop_gimplify_context (OMP_CLAUSE_REDUCTION_MERGE (c));
+ gimplify_omp_ctxp = outer_ctx;
+ }
+ if (notice_outer)
+ goto do_notice;
+ break;
+
+ case OMP_CLAUSE_COPYIN:
+ case OMP_CLAUSE_COPYPRIVATE:
+ decl = OMP_CLAUSE_DECL (c);
+ if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
+ {
+ remove = true;
+ break;
+ }
+ do_notice:
+ if (outer_ctx)
+ omp_notice_variable (outer_ctx, decl, true);
+ if (check_non_private
+ && !in_parallel
+ && omp_check_private (ctx, decl))
+ {
+ error ("%s variable %qs is private in outer context",
+ check_non_private, IDENTIFIER_POINTER (DECL_NAME (decl)));
+ remove = true;
+ }
+ break;
+
+ case OMP_CLAUSE_IF:
+ OMP_CLAUSE_OPERAND (c, 0)
+ = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
+ /* Fall through. */
+
+ case OMP_CLAUSE_SCHEDULE:
+ case OMP_CLAUSE_NUM_THREADS:
+ gs = gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
+ is_gimple_val, fb_rvalue);
+ if (gs == GS_ERROR)
+ remove = true;
+ break;
+
+ case OMP_CLAUSE_NOWAIT:
+ case OMP_CLAUSE_ORDERED:
+ break;
+
+ case OMP_CLAUSE_DEFAULT:
+ ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
+
+ if (remove)
+ *list_p = OMP_CLAUSE_CHAIN (c);
+ else
+ list_p = &OMP_CLAUSE_CHAIN (c);
+ }
+
+ gimplify_omp_ctxp = ctx;
+}
+
+/* For all variables that were not actually used within the context,
+ remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
+
+static int
+gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
+{
+ tree *list_p = (tree *) data;
+ tree decl = (tree) n->key;
+ unsigned flags = n->value;
+ enum omp_clause_code code;
+ tree clause;
+ bool private_debug;
+
+ if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
+ return 0;
+ if ((flags & GOVD_SEEN) == 0)
+ return 0;
+ if (flags & GOVD_DEBUG_PRIVATE)
+ {
+ gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
+ private_debug = true;
+ }
+ else
+ private_debug
+ = lang_hooks.decls.omp_private_debug_clause (decl,
+ !!(flags & GOVD_SHARED));
+ if (private_debug)
+ code = OMP_CLAUSE_PRIVATE;
+ else if (flags & GOVD_SHARED)
+ {
+ if (is_global_var (decl))
+ {
+ struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
+ while (ctx != NULL)
+ {
+ splay_tree_node on
+ = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
+ if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
+ | GOVD_PRIVATE | GOVD_REDUCTION)) != 0)
+ break;
+ ctx = ctx->outer_context;
+ }
+ if (ctx == NULL)
+ return 0;
+ }
+ code = OMP_CLAUSE_SHARED;
+ }
+ else if (flags & GOVD_PRIVATE)
+ code = OMP_CLAUSE_PRIVATE;
+ else if (flags & GOVD_FIRSTPRIVATE)
+ code = OMP_CLAUSE_FIRSTPRIVATE;
+ else
+ gcc_unreachable ();
+
+ clause = build_omp_clause (code);
+ OMP_CLAUSE_DECL (clause) = decl;
+ OMP_CLAUSE_CHAIN (clause) = *list_p;
+ if (private_debug)
+ OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
+ *list_p = clause;
+
+ return 0;
+}
+
+static void
+gimplify_adjust_omp_clauses (tree *list_p)
+{
+ struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
+ tree c, decl;
+
+ while ((c = *list_p) != NULL)
+ {
+ splay_tree_node n;
+ bool remove = false;
+
+ switch (OMP_CLAUSE_CODE (c))
+ {
+ case OMP_CLAUSE_PRIVATE:
+ case OMP_CLAUSE_SHARED:
+ case OMP_CLAUSE_FIRSTPRIVATE:
+ decl = OMP_CLAUSE_DECL (c);
+ n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
+ remove = !(n->value & GOVD_SEEN);
+ if (! remove)
+ {
+ bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
+ if ((n->value & GOVD_DEBUG_PRIVATE)
+ || lang_hooks.decls.omp_private_debug_clause (decl, shared))
+ {
+ gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
+ || ((n->value & GOVD_DATA_SHARE_CLASS)
+ == GOVD_PRIVATE));
+ OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
+ OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
+ }
+ }
+ break;
+
+ case OMP_CLAUSE_LASTPRIVATE:
+ /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
+ accurately reflect the presence of a FIRSTPRIVATE clause. */
+ decl = OMP_CLAUSE_DECL (c);
+ n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
+ OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
+ = (n->value & GOVD_FIRSTPRIVATE) != 0;
+ break;
+
+ case OMP_CLAUSE_REDUCTION:
+ case OMP_CLAUSE_COPYIN:
+ case OMP_CLAUSE_COPYPRIVATE:
+ case OMP_CLAUSE_IF:
+ case OMP_CLAUSE_NUM_THREADS:
+ case OMP_CLAUSE_SCHEDULE:
+ case OMP_CLAUSE_NOWAIT:
+ case OMP_CLAUSE_ORDERED:
+ case OMP_CLAUSE_DEFAULT:
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
+
+ if (remove)
+ *list_p = OMP_CLAUSE_CHAIN (c);
+ else
+ list_p = &OMP_CLAUSE_CHAIN (c);
+ }
+
+ /* Add in any implicit data sharing. */
+ splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
+
+ gimplify_omp_ctxp = ctx->outer_context;
+ delete_omp_context (ctx);
+}
+
+/* Gimplify the contents of an OMP_PARALLEL statement. This involves
+ gimplification of the body, as well as scanning the body for used
+ variables. We need to do this scan now, because variable-sized
+ decls will be decomposed during gimplification. */
+
+static enum gimplify_status
+gimplify_omp_parallel (tree *expr_p, tree *pre_p)
+{
+ tree expr = *expr_p;
+
+ gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p, true,
+ OMP_PARALLEL_COMBINED (expr));
+
+ push_gimplify_context ();
+
+ gimplify_stmt (&OMP_PARALLEL_BODY (expr));
+
+ if (TREE_CODE (OMP_PARALLEL_BODY (expr)) == BIND_EXPR)
+ pop_gimplify_context (OMP_PARALLEL_BODY (expr));
+ else
+ pop_gimplify_context (NULL_TREE);
+
+ gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
+
+ return GS_ALL_DONE;
+}
+
+/* Gimplify the gross structure of an OMP_FOR statement. */
+
+static enum gimplify_status
+gimplify_omp_for (tree *expr_p, tree *pre_p)
+{
+ tree for_stmt, decl, t;
+ enum gimplify_status ret = GS_OK;
+
+ for_stmt = *expr_p;
+
+ gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, false, false);
+
+ t = OMP_FOR_INIT (for_stmt);
+ gcc_assert (TREE_CODE (t) == MODIFY_EXPR
+ || TREE_CODE (t) == GIMPLE_MODIFY_STMT);
+ decl = GENERIC_TREE_OPERAND (t, 0);
+ gcc_assert (DECL_P (decl));
+ gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl)));
+
+ /* Make sure the iteration variable is private. */
+ if (omp_is_private (gimplify_omp_ctxp, decl))
+ omp_notice_variable (gimplify_omp_ctxp, decl, true);
+ else
+ omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
+
+ ret |= gimplify_expr (&GENERIC_TREE_OPERAND (t, 1),
+ &OMP_FOR_PRE_BODY (for_stmt),
+ NULL, is_gimple_val, fb_rvalue);
+
+ tree_to_gimple_tuple (&OMP_FOR_INIT (for_stmt));
+
+ t = OMP_FOR_COND (for_stmt);
+ gcc_assert (COMPARISON_CLASS_P (t));
+ gcc_assert (GENERIC_TREE_OPERAND (t, 0) == decl);
+
+ ret |= gimplify_expr (&GENERIC_TREE_OPERAND (t, 1),
+ &OMP_FOR_PRE_BODY (for_stmt),
+ NULL, is_gimple_val, fb_rvalue);
+
+ tree_to_gimple_tuple (&OMP_FOR_INCR (for_stmt));
+ t = OMP_FOR_INCR (for_stmt);
+ switch (TREE_CODE (t))
+ {
+ case PREINCREMENT_EXPR:
+ case POSTINCREMENT_EXPR:
+ t = build_int_cst (TREE_TYPE (decl), 1);
+ t = build2 (PLUS_EXPR, TREE_TYPE (decl), decl, t);
+ t = build_gimple_modify_stmt (decl, t);
+ OMP_FOR_INCR (for_stmt) = t;
+ break;
+
+ case PREDECREMENT_EXPR:
+ case POSTDECREMENT_EXPR:
+ t = build_int_cst (TREE_TYPE (decl), -1);
+ t = build2 (PLUS_EXPR, TREE_TYPE (decl), decl, t);
+ t = build_gimple_modify_stmt (decl, t);
+ OMP_FOR_INCR (for_stmt) = t;
+ break;
+
+ case GIMPLE_MODIFY_STMT:
+ gcc_assert (GIMPLE_STMT_OPERAND (t, 0) == decl);
+ t = GIMPLE_STMT_OPERAND (t, 1);
+ switch (TREE_CODE (t))
+ {
+ case PLUS_EXPR:
+ if (TREE_OPERAND (t, 1) == decl)
+ {
+ TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
+ TREE_OPERAND (t, 0) = decl;
+ break;
+ }
+ case MINUS_EXPR:
+ gcc_assert (TREE_OPERAND (t, 0) == decl);
+ break;
+ default:
+ gcc_unreachable ();
+ }
+
+ ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt),
+ NULL, is_gimple_val, fb_rvalue);
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
+
+ gimplify_to_stmt_list (&OMP_FOR_BODY (for_stmt));
+ gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
+
+ return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
+}
+
+/* Gimplify the gross structure of other OpenMP worksharing constructs.
+ In particular, OMP_SECTIONS and OMP_SINGLE. */
+
+static enum gimplify_status
+gimplify_omp_workshare (tree *expr_p, tree *pre_p)
+{
+ tree stmt = *expr_p;
+
+ gimplify_scan_omp_clauses (&OMP_CLAUSES (stmt), pre_p, false, false);
+ gimplify_to_stmt_list (&OMP_BODY (stmt));
+ gimplify_adjust_omp_clauses (&OMP_CLAUSES (stmt));
+
+ return GS_ALL_DONE;
+}
+
+/* A subroutine of gimplify_omp_atomic. The front end is supposed to have
+ stabilized the lhs of the atomic operation as *ADDR. Return true if
+ EXPR is this stabilized form. */
+
+static bool
+goa_lhs_expr_p (tree expr, tree addr)
+{
+ /* Also include casts to other type variants. The C front end is fond
+ of adding these for e.g. volatile variables. This is like
+ STRIP_TYPE_NOPS but includes the main variant lookup. */
+ while ((TREE_CODE (expr) == NOP_EXPR
+ || TREE_CODE (expr) == CONVERT_EXPR
+ || TREE_CODE (expr) == NON_LVALUE_EXPR)
+ && TREE_OPERAND (expr, 0) != error_mark_node
+ && (TYPE_MAIN_VARIANT (TREE_TYPE (expr))
+ == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))))
+ expr = TREE_OPERAND (expr, 0);
+
+ if (TREE_CODE (expr) == INDIRECT_REF && TREE_OPERAND (expr, 0) == addr)
+ return true;
+ if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
+ return true;
+ return false;
+}
+
+/* A subroutine of gimplify_omp_atomic. Attempt to implement the atomic
+ operation as a __sync_fetch_and_op builtin. INDEX is log2 of the
+ size of the data type, and thus usable to find the index of the builtin
+ decl. Returns GS_UNHANDLED if the expression is not of the proper form. */
+
+static enum gimplify_status
+gimplify_omp_atomic_fetch_op (tree *expr_p, tree addr, tree rhs, int index)
+{
+ enum built_in_function base;
+ tree decl, itype;
+ enum insn_code *optab;
+
+ /* Check for one of the supported fetch-op operations. */
+ switch (TREE_CODE (rhs))
+ {
+ case POINTER_PLUS_EXPR:
+ case PLUS_EXPR:
+ base = BUILT_IN_FETCH_AND_ADD_N;
+ optab = sync_add_optab;
+ break;
+ case MINUS_EXPR:
+ base = BUILT_IN_FETCH_AND_SUB_N;
+ optab = sync_add_optab;
+ break;
+ case BIT_AND_EXPR:
+ base = BUILT_IN_FETCH_AND_AND_N;
+ optab = sync_and_optab;
+ break;
+ case BIT_IOR_EXPR:
+ base = BUILT_IN_FETCH_AND_OR_N;
+ optab = sync_ior_optab;
+ break;
+ case BIT_XOR_EXPR:
+ base = BUILT_IN_FETCH_AND_XOR_N;
+ optab = sync_xor_optab;
+ break;
+ default:
+ return GS_UNHANDLED;
+ }
+
+ /* Make sure the expression is of the proper form. */
+ if (goa_lhs_expr_p (TREE_OPERAND (rhs, 0), addr))
+ rhs = TREE_OPERAND (rhs, 1);
+ else if (commutative_tree_code (TREE_CODE (rhs))
+ && goa_lhs_expr_p (TREE_OPERAND (rhs, 1), addr))
+ rhs = TREE_OPERAND (rhs, 0);
+ else
+ return GS_UNHANDLED;
+
+ decl = built_in_decls[base + index + 1];
+ itype = TREE_TYPE (TREE_TYPE (decl));
+
+ if (optab[TYPE_MODE (itype)] == CODE_FOR_nothing)
+ return GS_UNHANDLED;
+
+ *expr_p = build_call_expr (decl, 2, addr, fold_convert (itype, rhs));
+ return GS_OK;
+}
- sl = tsi_split_statement_list_after (&iter);
- tfe = build (code, void_type_node, sl, NULL_TREE);
- append_to_statement_list (TREE_OPERAND (wce, 0),
- &TREE_OPERAND (tfe, 1));
- *wce_p = tfe;
- iter = tsi_start (sl);
- }
- }
- else
- tsi_next (&iter);
- }
+/* A subroutine of gimplify_omp_atomic_pipeline. Walk *EXPR_P and replace
+ appearances of *LHS_ADDR with LHS_VAR. If an expression does not involve
+ the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as
+ a subexpression, 0 if it did not, or -1 if an error was encountered. */
- if (temp)
+static int
+goa_stabilize_expr (tree *expr_p, tree *pre_p, tree lhs_addr, tree lhs_var)
+{
+ tree expr = *expr_p;
+ int saw_lhs;
+
+ if (goa_lhs_expr_p (expr, lhs_addr))
{
- *expr_p = temp;
- append_to_statement_list (body, pre_p);
- return GS_OK;
+ *expr_p = lhs_var;
+ return 1;
}
- else
+ if (is_gimple_val (expr))
+ return 0;
+
+ saw_lhs = 0;
+ switch (TREE_CODE_CLASS (TREE_CODE (expr)))
{
- *expr_p = body;
- return GS_ALL_DONE;
+ case tcc_binary:
+ saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
+ lhs_addr, lhs_var);
+ case tcc_unary:
+ saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
+ lhs_addr, lhs_var);
+ break;
+ default:
+ break;
+ }
+
+ if (saw_lhs == 0)
+ {
+ enum gimplify_status gs;
+ gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
+ if (gs != GS_ALL_DONE)
+ saw_lhs = -1;
}
+
+ return saw_lhs;
}
-/* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
- is the cleanup action required. */
+/* A subroutine of gimplify_omp_atomic. Implement the atomic operation as:
-static void
-gimple_push_cleanup (tree var, tree cleanup, bool eh_only, tree *pre_p)
-{
- tree wce;
+ oldval = *addr;
+ repeat:
+ newval = rhs; // with oldval replacing *addr in rhs
+ oldval = __sync_val_compare_and_swap (addr, oldval, newval);
+ if (oldval != newval)
+ goto repeat;
- /* Errors can result in improperly nested cleanups. Which results in
- confusion when trying to resolve the WITH_CLEANUP_EXPR. */
- if (errorcount || sorrycount)
- return;
+ INDEX is log2 of the size of the data type, and thus usable to find the
+ index of the builtin decl. */
- if (gimple_conditional_context ())
- {
- /* If we're in a conditional context, this is more complex. We only
- want to run the cleanup if we actually ran the initialization that
- necessitates it, but we want to run it after the end of the
- conditional context. So we wrap the try/finally around the
- condition and use a flag to determine whether or not to actually
- run the destructor. Thus
+static enum gimplify_status
+gimplify_omp_atomic_pipeline (tree *expr_p, tree *pre_p, tree addr,
+ tree rhs, int index)
+{
+ tree oldval, oldival, oldival2, newval, newival, label;
+ tree type, itype, cmpxchg, x, iaddr;
- test ? f(A()) : 0
+ cmpxchg = built_in_decls[BUILT_IN_VAL_COMPARE_AND_SWAP_N + index + 1];
+ type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
+ itype = TREE_TYPE (TREE_TYPE (cmpxchg));
- becomes (approximately)
+ if (sync_compare_and_swap[TYPE_MODE (itype)] == CODE_FOR_nothing)
+ return GS_UNHANDLED;
- flag = 0;
- try {
- if (test) { A::A(temp); flag = 1; val = f(temp); }
- else { val = 0; }
- } finally {
- if (flag) A::~A(temp);
- }
- val
- */
+ oldval = create_tmp_var (type, NULL);
+ newval = create_tmp_var (type, NULL);
- tree flag = create_tmp_var (boolean_type_node, "cleanup");
- tree ffalse = build (MODIFY_EXPR, void_type_node, flag,
- boolean_false_node);
- tree ftrue = build (MODIFY_EXPR, void_type_node, flag,
- boolean_true_node);
- cleanup = build (COND_EXPR, void_type_node, flag, cleanup, NULL);
- wce = build (WITH_CLEANUP_EXPR, void_type_node, cleanup);
- append_to_statement_list (ffalse, &gimplify_ctxp->conditional_cleanups);
- append_to_statement_list (wce, &gimplify_ctxp->conditional_cleanups);
- append_to_statement_list (ftrue, pre_p);
+ /* Precompute as much of RHS as possible. In the same walk, replace
+ occurrences of the lhs value with our temporary. */
+ if (goa_stabilize_expr (&rhs, pre_p, addr, oldval) < 0)
+ return GS_ERROR;
- /* Because of this manipulation, and the EH edges that jump
- threading cannot redirect, the temporary (VAR) will appear
- to be used uninitialized. Don't warn. */
- TREE_NO_WARNING (var) = 1;
+ x = build_fold_indirect_ref (addr);
+ x = build_gimple_modify_stmt (oldval, x);
+ gimplify_and_add (x, pre_p);
+
+ /* For floating-point values, we'll need to view-convert them to integers
+ so that we can perform the atomic compare and swap. Simplify the
+ following code by always setting up the "i"ntegral variables. */
+ if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
+ {
+ oldival = oldval;
+ newival = newval;
+ iaddr = addr;
}
else
{
- wce = build (WITH_CLEANUP_EXPR, void_type_node, cleanup);
- CLEANUP_EH_ONLY (wce) = eh_only;
- append_to_statement_list (wce, pre_p);
+ oldival = create_tmp_var (itype, NULL);
+ newival = create_tmp_var (itype, NULL);
+
+ x = build1 (VIEW_CONVERT_EXPR, itype, oldval);
+ x = build_gimple_modify_stmt (oldival, x);
+ gimplify_and_add (x, pre_p);
+ iaddr = fold_convert (build_pointer_type (itype), addr);
}
- gimplify_stmt (&TREE_OPERAND (wce, 0));
-}
+ oldival2 = create_tmp_var (itype, NULL);
-/* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
+ label = create_artificial_label ();
+ x = build1 (LABEL_EXPR, void_type_node, label);
+ gimplify_and_add (x, pre_p);
-static enum gimplify_status
-gimplify_target_expr (tree *expr_p, tree *pre_p, tree *post_p)
-{
- tree targ = *expr_p;
- tree temp = TARGET_EXPR_SLOT (targ);
- tree init = TARGET_EXPR_INITIAL (targ);
- enum gimplify_status ret;
+ x = build_gimple_modify_stmt (newval, rhs);
+ gimplify_and_add (x, pre_p);
- if (init)
+ if (newval != newival)
{
- /* TARGET_EXPR temps aren't part of the enclosing block, so add it
- to the temps list. */
- gimple_add_tmp_var (temp);
+ x = build1 (VIEW_CONVERT_EXPR, itype, newval);
+ x = build_gimple_modify_stmt (newival, x);
+ gimplify_and_add (x, pre_p);
+ }
- /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
- expression is supposed to initialize the slot. */
- if (VOID_TYPE_P (TREE_TYPE (init)))
- ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
- else
- {
- /* Special handling for BIND_EXPR can result in fewer temps. */
- ret = GS_OK;
- if (TREE_CODE (init) == BIND_EXPR)
- gimplify_bind_expr (&init, temp, pre_p);
- if (init != temp)
- {
- init = build (MODIFY_EXPR, void_type_node, temp, init);
- ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt,
- fb_none);
- }
- }
- if (ret == GS_ERROR)
- return GS_ERROR;
- append_to_statement_list (init, pre_p);
+ x = build_gimple_modify_stmt (oldival2, fold_convert (itype, oldival));
+ gimplify_and_add (x, pre_p);
- /* If needed, push the cleanup for the temp. */
- if (TARGET_EXPR_CLEANUP (targ))
- {
- gimplify_stmt (&TARGET_EXPR_CLEANUP (targ));
- gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
- CLEANUP_EH_ONLY (targ), pre_p);
- }
+ x = build_call_expr (cmpxchg, 3, iaddr, fold_convert (itype, oldival),
+ fold_convert (itype, newival));
+ if (oldval == oldival)
+ x = fold_convert (type, x);
+ x = build_gimple_modify_stmt (oldival, x);
+ gimplify_and_add (x, pre_p);
- /* Only expand this once. */
- TREE_OPERAND (targ, 3) = init;
- TARGET_EXPR_INITIAL (targ) = NULL_TREE;
+ /* For floating point, be prepared for the loop backedge. */
+ if (oldval != oldival)
+ {
+ x = build1 (VIEW_CONVERT_EXPR, type, oldival);
+ x = build_gimple_modify_stmt (oldval, x);
+ gimplify_and_add (x, pre_p);
}
- else
- /* We should have expanded this before. */
- gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
- *expr_p = temp;
- return GS_OK;
+ /* Note that we always perform the comparison as an integer, even for
+ floating point. This allows the atomic operation to properly
+ succeed even with NaNs and -0.0. */
+ x = build3 (COND_EXPR, void_type_node,
+ build2 (NE_EXPR, boolean_type_node,
+ fold_convert (itype, oldival), oldival2),
+ build1 (GOTO_EXPR, void_type_node, label), NULL);
+ gimplify_and_add (x, pre_p);
+
+ *expr_p = NULL;
+ return GS_ALL_DONE;
}
-/* Gimplification of expression trees. */
+/* A subroutine of gimplify_omp_atomic. Implement the atomic operation as:
-/* Gimplify an expression which appears at statement context; usually, this
- means replacing it with a suitably gimple STATEMENT_LIST. */
+ GOMP_atomic_start ();
+ *addr = rhs;
+ GOMP_atomic_end ();
-void
-gimplify_stmt (tree *stmt_p)
+ The result is not globally atomic, but works so long as all parallel
+ references are within #pragma omp atomic directives. According to
+ responses received from omp@openmp.org, appears to be within spec.
+ Which makes sense, since that's how several other compilers handle
+ this situation as well. */
+
+static enum gimplify_status
+gimplify_omp_atomic_mutex (tree *expr_p, tree *pre_p, tree addr, tree rhs)
{
- gimplify_expr (stmt_p, NULL, NULL, is_gimple_stmt, fb_none);
+ tree t;
+
+ t = built_in_decls[BUILT_IN_GOMP_ATOMIC_START];
+ t = build_call_expr (t, 0);
+ gimplify_and_add (t, pre_p);
+
+ t = build_fold_indirect_ref (addr);
+ t = build_gimple_modify_stmt (t, rhs);
+ gimplify_and_add (t, pre_p);
+
+ t = built_in_decls[BUILT_IN_GOMP_ATOMIC_END];
+ t = build_call_expr (t, 0);
+ gimplify_and_add (t, pre_p);
+
+ *expr_p = NULL;
+ return GS_ALL_DONE;
}
-/* Similarly, but force the result to be a STATEMENT_LIST. */
+/* Gimplify an OMP_ATOMIC statement. */
-void
-gimplify_to_stmt_list (tree *stmt_p)
+static enum gimplify_status
+gimplify_omp_atomic (tree *expr_p, tree *pre_p)
{
- gimplify_stmt (stmt_p);
- if (!*stmt_p)
- *stmt_p = alloc_stmt_list ();
- else if (TREE_CODE (*stmt_p) != STATEMENT_LIST)
+ tree addr = TREE_OPERAND (*expr_p, 0);
+ tree rhs = TREE_OPERAND (*expr_p, 1);
+ tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
+ HOST_WIDE_INT index;
+
+ /* Make sure the type is one of the supported sizes. */
+ index = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
+ index = exact_log2 (index);
+ if (index >= 0 && index <= 4)
{
- tree t = *stmt_p;
- *stmt_p = alloc_stmt_list ();
- append_to_statement_list (t, stmt_p);
+ enum gimplify_status gs;
+ unsigned int align;
+
+ if (DECL_P (TREE_OPERAND (addr, 0)))
+ align = DECL_ALIGN_UNIT (TREE_OPERAND (addr, 0));
+ else if (TREE_CODE (TREE_OPERAND (addr, 0)) == COMPONENT_REF
+ && TREE_CODE (TREE_OPERAND (TREE_OPERAND (addr, 0), 1))
+ == FIELD_DECL)
+ align = DECL_ALIGN_UNIT (TREE_OPERAND (TREE_OPERAND (addr, 0), 1));
+ else
+ align = TYPE_ALIGN_UNIT (type);
+
+ /* __sync builtins require strict data alignment. */
+ if (exact_log2 (align) >= index)
+ {
+ /* When possible, use specialized atomic update functions. */
+ if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
+ {
+ gs = gimplify_omp_atomic_fetch_op (expr_p, addr, rhs, index);
+ if (gs != GS_UNHANDLED)
+ return gs;
+ }
+
+ /* If we don't have specialized __sync builtins, try and implement
+ as a compare and swap loop. */
+ gs = gimplify_omp_atomic_pipeline (expr_p, pre_p, addr, rhs, index);
+ if (gs != GS_UNHANDLED)
+ return gs;
+ }
}
-}
+ /* The ultimate fallback is wrapping the operation in a mutex. */
+ return gimplify_omp_atomic_mutex (expr_p, pre_p, addr, rhs);
+}
/* Gimplifies the expression tree pointed to by EXPR_P. Return 0 if
gimplification failed.
/* Die, die, die, my darling. */
if (save_expr == error_mark_node
- || (TREE_TYPE (save_expr)
+ || (!GIMPLE_STMT_P (save_expr)
+ && TREE_TYPE (save_expr)
&& TREE_TYPE (save_expr) == error_mark_node))
{
ret = GS_ERROR;
break;
case COND_EXPR:
- ret = gimplify_cond_expr (expr_p, pre_p, post_p, NULL_TREE,
- fallback);
+ ret = gimplify_cond_expr (expr_p, pre_p, fallback);
/* C99 code may assign to an array in a structure value of a
conditional expression, and this has undefined behavior
only on execution, so create a temporary if an lvalue is
if (fallback == fb_lvalue)
{
*expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
- lang_hooks.mark_addressable (*expr_p);
+ mark_addressable (*expr_p);
}
break;
if (fallback == fb_lvalue)
{
*expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
- lang_hooks.mark_addressable (*expr_p);
+ mark_addressable (*expr_p);
}
break;
break;
case MODIFY_EXPR:
+ case GIMPLE_MODIFY_STMT:
case INIT_EXPR:
ret = gimplify_modify_expr (expr_p, pre_p, post_p,
fallback != fb_none);
+
+ if (*expr_p)
+ {
+ /* The distinction between MODIFY_EXPR and INIT_EXPR is no longer
+ useful. */
+ if (TREE_CODE (*expr_p) == INIT_EXPR)
+ TREE_SET_CODE (*expr_p, MODIFY_EXPR);
+
+ /* Convert MODIFY_EXPR to GIMPLE_MODIFY_STMT. */
+ if (TREE_CODE (*expr_p) == MODIFY_EXPR)
+ tree_to_gimple_tuple (expr_p);
+ }
+
break;
case TRUTH_ANDIF_EXPR:
/* FALLTHRU */
case FIX_TRUNC_EXPR:
- case FIX_CEIL_EXPR:
- case FIX_FLOOR_EXPR:
- case FIX_ROUND_EXPR:
/* unary_expr: ... | '(' cast ')' val | ... */
ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
is_gimple_val, fb_rvalue);
break;
case BIND_EXPR:
- ret = gimplify_bind_expr (expr_p, NULL, pre_p);
+ ret = gimplify_bind_expr (expr_p, pre_p);
break;
case LOOP_EXPR:
{
unsigned HOST_WIDE_INT ix;
constructor_elt *ce;
+ tree temp = NULL_TREE;
for (ix = 0;
VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p),
ix, ce);
ix++)
if (TREE_SIDE_EFFECTS (ce->value))
- gimplify_expr (&ce->value, pre_p, post_p,
- gimple_test_f, fallback);
+ append_to_statement_list (ce->value, &temp);
- *expr_p = NULL_TREE;
+ *expr_p = temp;
+ ret = GS_OK;
}
-
- ret = GS_ALL_DONE;
+ /* C99 code may assign to an array in a constructed
+ structure or union, and this has undefined behavior only
+ on execution, so create a temporary if an lvalue is
+ required. */
+ else if (fallback == fb_lvalue)
+ {
+ *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
+ mark_addressable (*expr_p);
+ }
+ else
+ ret = GS_ALL_DONE;
break;
/* The following are special cases that are not handled by the
ret = GS_ALL_DONE;
break;
+ case CHANGE_DYNAMIC_TYPE_EXPR:
+ ret = gimplify_expr (&CHANGE_DYNAMIC_TYPE_LOCATION (*expr_p),
+ pre_p, post_p, is_gimple_reg, fb_lvalue);
+ break;
+
case OBJ_TYPE_REF:
{
enum gimplify_status r0, r1;
break;
case STATEMENT_LIST:
- ret = gimplify_statement_list (expr_p);
+ ret = gimplify_statement_list (expr_p, pre_p);
break;
case WITH_SIZE_EXPR:
break;
case VAR_DECL:
- /* ??? If this is a local variable, and it has not been seen in any
- outer BIND_EXPR, then it's probably the result of a duplicate
- declaration, for which we've already issued an error. It would
- be really nice if the front end wouldn't leak these at all.
- Currently the only known culprit is C++ destructors, as seen
- in g++.old-deja/g++.jason/binding.C. */
- tmp = *expr_p;
- if (!TREE_STATIC (tmp) && !DECL_EXTERNAL (tmp)
- && decl_function_context (tmp) == current_function_decl
- && !DECL_SEEN_IN_BIND_EXPR_P (tmp))
- {
- gcc_assert (errorcount || sorrycount);
- ret = GS_ERROR;
- break;
- }
- /* FALLTHRU */
-
case PARM_DECL:
- tmp = *expr_p;
-
- /* If this is a local variable sized decl, it must be accessed
- indirectly. Perform that substitution. */
- if (DECL_HAS_VALUE_EXPR_P (tmp))
- {
- *expr_p = unshare_expr (DECL_VALUE_EXPR (tmp));
- ret = GS_OK;
- break;
- }
+ ret = gimplify_var_or_parm_decl (expr_p);
+ break;
+ case RESULT_DECL:
+ /* When within an OpenMP context, notice uses of variables. */
+ if (gimplify_omp_ctxp)
+ omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
ret = GS_ALL_DONE;
break;
ret = GS_ALL_DONE;
break;
+ case OMP_PARALLEL:
+ ret = gimplify_omp_parallel (expr_p, pre_p);
+ break;
+
+ case OMP_FOR:
+ ret = gimplify_omp_for (expr_p, pre_p);
+ break;
+
+ case OMP_SECTIONS:
+ case OMP_SINGLE:
+ ret = gimplify_omp_workshare (expr_p, pre_p);
+ break;
+
+ case OMP_SECTION:
+ case OMP_MASTER:
+ case OMP_ORDERED:
+ case OMP_CRITICAL:
+ gimplify_to_stmt_list (&OMP_BODY (*expr_p));
+ break;
+
+ case OMP_ATOMIC:
+ ret = gimplify_omp_atomic (expr_p, pre_p);
+ break;
+
+ case OMP_RETURN:
+ case OMP_CONTINUE:
+ ret = GS_ALL_DONE;
+ break;
+
+ case POINTER_PLUS_EXPR:
+ /* Convert ((type *)A)+offset into &A->field_of_type_and_offset.
+ The second is gimple immediate saving a need for extra statement.
+ */
+ if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
+ && (tmp = maybe_fold_offset_to_reference
+ (TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1),
+ TREE_TYPE (TREE_TYPE (*expr_p)))))
+ {
+ tree ptr_type = build_pointer_type (TREE_TYPE (tmp));
+ if (useless_type_conversion_p (TREE_TYPE (*expr_p), ptr_type))
+ {
+ *expr_p = build_fold_addr_expr_with_type (tmp, ptr_type);
+ break;
+ }
+ }
+ /* Convert (void *)&a + 4 into (void *)&a[1]. */
+ if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR
+ && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
+ && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p,
+ 0),0)))
+ && (tmp = maybe_fold_offset_to_reference
+ (TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0),
+ TREE_OPERAND (*expr_p, 1),
+ TREE_TYPE (TREE_TYPE
+ (TREE_OPERAND (TREE_OPERAND (*expr_p, 0),
+ 0))))))
+ {
+ tmp = build_fold_addr_expr (tmp);
+ *expr_p = fold_convert (TREE_TYPE (*expr_p), tmp);
+ break;
+ }
+ /* FALLTHRU */
default:
switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
{
case tcc_comparison:
- /* If this is a comparison of objects of aggregate type,
- handle it specially (by converting to a call to
- memcmp). It would be nice to only have to do this
- for variable-sized objects, but then we'd have to
- allow the same nest of reference nodes we allow for
- MODIFY_EXPR and that's too complex. */
- if (!AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 1))))
- goto expr_2;
- ret = gimplify_variable_sized_compare (expr_p);
- break;
+ /* Handle comparison of objects of non scalar mode aggregates
+ with a call to memcmp. It would be nice to only have to do
+ this for variable-sized objects, but then we'd have to allow
+ the same nest of reference nodes we allow for MODIFY_EXPR and
+ that's too complex.
+
+ Compare scalar mode aggregates as scalar mode values. Using
+ memcmp for them would be very inefficient at best, and is
+ plain wrong if bitfields are involved. */
+
+ {
+ tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
+
+ if (!AGGREGATE_TYPE_P (type))
+ goto expr_2;
+ else if (TYPE_MODE (type) != BLKmode)
+ ret = gimplify_scalar_mode_aggregate_compare (expr_p);
+ else
+ ret = gimplify_variable_sized_compare (expr_p);
+
+ break;
+ }
/* If *EXPR_P does not need to be special-cased, handle it
according to its class. */
switch (code)
{
case COMPONENT_REF:
- case REALPART_EXPR: case IMAGPART_EXPR:
+ case REALPART_EXPR:
+ case IMAGPART_EXPR:
+ case VIEW_CONVERT_EXPR:
gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
gimple_test_f, fallback);
break;
- case ARRAY_REF: case ARRAY_RANGE_REF:
+ case ARRAY_REF:
+ case ARRAY_RANGE_REF:
gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
gimple_test_f, fallback);
gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
default:
/* Anything else with side-effects must be converted to
- a valid statement before we get here. */
+ a valid statement before we get here. */
gcc_unreachable ();
}
*expr_p = NULL;
}
- else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p)))
+ else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
+ && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
{
- /* Historically, the compiler has treated a bare
- reference to a volatile lvalue as forcing a load. */
+ /* Historically, the compiler has treated a bare reference
+ to a non-BLKmode volatile lvalue as forcing a load. */
tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
- /* Normally, we do want to create a temporary for a
+ /* Normally, we do not want to create a temporary for a
TREE_ADDRESSABLE type because such a type should not be
copied by bitwise-assignment. However, we make an
exception here, as all we are doing here is ensuring that
given a TREE_ADDRESSABLE type. */
tree tmp = create_tmp_var_raw (type, "vol");
gimple_add_tmp_var (tmp);
- *expr_p = build (MODIFY_EXPR, type, tmp, *expr_p);
+ *expr_p = build_gimple_modify_stmt (tmp, *expr_p);
}
else
/* We can't do anything useful with a volatile reference to
- incomplete type, so just throw it away. */
+ an incomplete type, so just throw it away. Likewise for
+ a BLKmode type, since any implicit inner load should
+ already have been turned into an explicit one by the
+ gimplification process. */
*expr_p = NULL;
}
case INTEGER_TYPE:
case ENUMERAL_TYPE:
case BOOLEAN_TYPE:
- case CHAR_TYPE:
case REAL_TYPE:
gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
case POINTER_TYPE:
case REFERENCE_TYPE:
- gimplify_type_sizes (TREE_TYPE (type), list_p);
+ /* We used to recurse on the pointed-to type here, which turned out to
+ be incorrect because its definition might refer to variables not
+ yet initialized at this point if a forward declaration is involved.
+
+ It was actually useful for anonymous pointed-to types to ensure
+ that the sizes evaluation dominates every possible later use of the
+ values. Restricting to such types here would be safe since there
+ is no possible forward declaration around, but would introduce an
+ undesirable middle-end semantic to anonymity. We then defer to
+ front-ends the responsibility of ensuring that the sizes are
+ evaluated both early and late enough, e.g. by attaching artificial
+ type declarations to the tree. */
break;
default:
*expr_p = create_tmp_var (type, NULL);
tmp = build1 (NOP_EXPR, type, expr);
- tmp = build2 (MODIFY_EXPR, type, *expr_p, expr);
+ tmp = build_gimple_modify_stmt (*expr_p, tmp);
if (EXPR_HAS_LOCATION (expr))
SET_EXPR_LOCUS (tmp, EXPR_LOCUS (expr));
else
}
}
\f
-#ifdef ENABLE_CHECKING
-/* Compare types A and B for a "close enough" match. */
-
-static bool
-cpt_same_type (tree a, tree b)
-{
- if (lang_hooks.types_compatible_p (a, b))
- return true;
-
- /* ??? The C++ FE decomposes METHOD_TYPES to FUNCTION_TYPES and doesn't
- link them together. This routine is intended to catch type errors
- that will affect the optimizers, and the optimizers don't add new
- dereferences of function pointers, so ignore it. */
- if ((TREE_CODE (a) == FUNCTION_TYPE || TREE_CODE (a) == METHOD_TYPE)
- && (TREE_CODE (b) == FUNCTION_TYPE || TREE_CODE (b) == METHOD_TYPE))
- return true;
-
- /* ??? The C FE pushes type qualifiers after the fact into the type of
- the element from the type of the array. See build_unary_op's handling
- of ADDR_EXPR. This seems wrong -- if we were going to do this, we
- should have done it when creating the variable in the first place.
- Alternately, why aren't the two array types made variants? */
- if (TREE_CODE (a) == ARRAY_TYPE && TREE_CODE (b) == ARRAY_TYPE)
- return cpt_same_type (TREE_TYPE (a), TREE_TYPE (b));
-
- /* And because of those, we have to recurse down through pointers. */
- if (POINTER_TYPE_P (a) && POINTER_TYPE_P (b))
- return cpt_same_type (TREE_TYPE (a), TREE_TYPE (b));
-
- return false;
-}
-
-/* Check for some cases of the front end missing cast expressions.
- The type of a dereference should correspond to the pointer type;
- similarly the type of an address should match its object. */
-
-static tree
-check_pointer_types_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
- void *data ATTRIBUTE_UNUSED)
-{
- tree t = *tp;
- tree ptype, otype, dtype;
-
- switch (TREE_CODE (t))
- {
- case INDIRECT_REF:
- case ARRAY_REF:
- otype = TREE_TYPE (t);
- ptype = TREE_TYPE (TREE_OPERAND (t, 0));
- dtype = TREE_TYPE (ptype);
- gcc_assert (cpt_same_type (otype, dtype));
- break;
-
- case ADDR_EXPR:
- ptype = TREE_TYPE (t);
- otype = TREE_TYPE (TREE_OPERAND (t, 0));
- dtype = TREE_TYPE (ptype);
- if (!cpt_same_type (otype, dtype))
- {
- /* &array is allowed to produce a pointer to the element, rather than
- a pointer to the array type. We must allow this in order to
- properly represent assigning the address of an array in C into
- pointer to the element type. */
- gcc_assert (TREE_CODE (otype) == ARRAY_TYPE
- && POINTER_TYPE_P (ptype)
- && cpt_same_type (TREE_TYPE (otype), dtype));
- break;
- }
- break;
-
- default:
- return NULL_TREE;
- }
-
-
- return NULL_TREE;
-}
-#endif
/* Gimplify the body of statements pointed to by BODY_P. FNDECL is the
function decl containing BODY. */
tree body, parm_stmts;
timevar_push (TV_TREE_GIMPLIFY);
+
+ gcc_assert (gimplify_ctxp == NULL);
push_gimplify_context ();
/* Unshare most shared trees in the body and in that of any nested functions.
/* If there isn't an outer BIND_EXPR, add one. */
if (TREE_CODE (body) != BIND_EXPR)
{
- tree b = build (BIND_EXPR, void_type_node, NULL_TREE,
- NULL_TREE, NULL_TREE);
+ tree b = build3 (BIND_EXPR, void_type_node, NULL_TREE,
+ NULL_TREE, NULL_TREE);
TREE_SIDE_EFFECTS (b) = 1;
append_to_statement_list_force (body, &BIND_EXPR_BODY (b));
body = b;
*body_p = body;
pop_gimplify_context (body);
+ gcc_assert (gimplify_ctxp == NULL);
-#ifdef ENABLE_CHECKING
- walk_tree (body_p, check_pointer_types_r, NULL, NULL);
+#ifdef ENABLE_TYPES_CHECKING
+ if (!errorcount && !sorrycount)
+ verify_gimple_1 (BIND_EXPR_BODY (*body_p));
#endif
timevar_pop (TV_TREE_GIMPLIFY);
/* Preliminarily mark non-addressed complex variables as eligible
for promotion to gimple registers. We'll transform their uses
as we find them. */
- if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
+ if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
&& !TREE_THIS_VOLATILE (parm)
&& !needs_to_live_in_memory (parm))
- DECL_COMPLEX_GIMPLE_REG_P (parm) = 1;
+ DECL_GIMPLE_REG_P (parm) = 1;
}
ret = DECL_RESULT (fndecl);
- if (TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
+ if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
&& !needs_to_live_in_memory (ret))
- DECL_COMPLEX_GIMPLE_REG_P (ret) = 1;
+ DECL_GIMPLE_REG_P (ret) = 1;
gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
{
tree tf, x, bind;
- tf = build (TRY_FINALLY_EXPR, void_type_node, NULL, NULL);
+ tf = build2 (TRY_FINALLY_EXPR, void_type_node, NULL, NULL);
TREE_SIDE_EFFECTS (tf) = 1;
x = DECL_SAVED_TREE (fndecl);
append_to_statement_list (x, &TREE_OPERAND (tf, 0));
x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
- x = build_function_call_expr (x, NULL);
+ x = build_call_expr (x, 0);
append_to_statement_list (x, &TREE_OPERAND (tf, 1));
- bind = build (BIND_EXPR, void_type_node, NULL, NULL, NULL);
+ bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
TREE_SIDE_EFFECTS (bind) = 1;
x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
- x = build_function_call_expr (x, NULL);
+ x = build_call_expr (x, 0);
append_to_statement_list (x, &BIND_EXPR_BODY (bind));
append_to_statement_list (tf, &BIND_EXPR_BODY (bind));
DECL_SAVED_TREE (fndecl) = bind;
}
+ cfun->gimplified = true;
current_function_decl = oldfn;
cfun = oldfn ? DECL_STRUCT_FUNCTION (oldfn) : NULL;
}
-
\f
/* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true,
force the result to be either ssa_name or an invariant, otherwise
gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
push_gimplify_context ();
- gimplify_ctxp->into_ssa = in_ssa_p;
+ gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
if (var)
- expr = build (MODIFY_EXPR, TREE_TYPE (var), var, expr);
+ expr = build_gimple_modify_stmt (var, expr);
ret = gimplify_expr (&expr, stmts, NULL,
gimple_test_f, fb_rvalue);
gcc_assert (ret != GS_ERROR);
- if (referenced_vars)
+ if (gimple_referenced_vars (cfun))
{
for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
- add_referenced_tmp_var (t);
+ add_referenced_var (t);
}
pop_gimplify_context (NULL);
}
/* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If
- some statements are produced, emits them before BSI. */
+ some statements are produced, emits them at BSI. If BEFORE is true.
+ the statements are appended before BSI, otherwise they are appended after
+ it. M specifies the way BSI moves after insertion (BSI_SAME_STMT or
+ BSI_CONTINUE_LINKING are the usual values). */
tree
force_gimple_operand_bsi (block_stmt_iterator *bsi, tree expr,
- bool simple_p, tree var)
+ bool simple_p, tree var, bool before,
+ enum bsi_iterator_update m)
{
tree stmts;
expr = force_gimple_operand (expr, &stmts, simple_p, var);
if (stmts)
- bsi_insert_before (bsi, stmts, BSI_SAME_STMT);
+ {
+ if (gimple_in_ssa_p (cfun))
+ {
+ tree_stmt_iterator tsi;
+
+ for (tsi = tsi_start (stmts); !tsi_end_p (tsi); tsi_next (&tsi))
+ mark_symbols_for_renaming (tsi_stmt (tsi));
+ }
+
+ if (before)
+ bsi_insert_before (bsi, stmts, m);
+ else
+ bsi_insert_after (bsi, stmts, m);
+ }
return expr;
}