X-Git-Url: http://git.sourceforge.jp/view?p=pf3gnuchains%2Fgcc-fork.git;a=blobdiff_plain;f=gcc%2Fgimplify.c;h=faa5b48baad953c4875f40312f818915b2437f05;hp=9a6d3076f134a4d014f118de87ac5bd55028971e;hb=6e8435a1039a5266c33343f12606821de0c68c54;hpb=98c5a6a3ed7cc531ae7d39c8b0d7eff72767347f diff --git a/gcc/gimplify.c b/gcc/gimplify.c index 9a6d3076f13..faa5b48baad 100644 --- a/gcc/gimplify.c +++ b/gcc/gimplify.c @@ -1,6 +1,6 @@ /* Tree lowering pass. This pass converts the GENERIC functions-as-trees tree representation into the GIMPLE form. - Copyright (C) 2002, 2003, 2004, 2005 Free Software Foundation, Inc. + Copyright (C) 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc. Major work done by Sebastian Pop , Diego Novillo and Jason Merrill . @@ -18,8 +18,8 @@ for more details. You should have received a copy of the GNU General Public License along with GCC; see the file COPYING. If not, write to the Free -Software Foundation, 59 Temple Place - Suite 330, Boston, MA -02111-1307, USA. */ +Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA +02110-1301, USA. */ #include "config.h" #include "system.h" @@ -27,7 +27,6 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA #include "tm.h" #include "tree.h" #include "rtl.h" -#include "errors.h" #include "varray.h" #include "tree-gimple.h" #include "tree-inline.h" @@ -45,22 +44,60 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA #include "output.h" #include "expr.h" #include "ggc.h" +#include "toplev.h" #include "target.h" +#include "optabs.h" +#include "pointer-set.h" -static struct gimplify_ctx + +enum gimplify_omp_var_data +{ + GOVD_SEEN = 1, + GOVD_EXPLICIT = 2, + GOVD_SHARED = 4, + GOVD_PRIVATE = 8, + GOVD_FIRSTPRIVATE = 16, + GOVD_LASTPRIVATE = 32, + GOVD_REDUCTION = 64, + GOVD_LOCAL = 128, + GOVD_DEBUG_PRIVATE = 256, + GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE + | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL) +}; + +struct gimplify_omp_ctx { + struct gimplify_omp_ctx *outer_context; + splay_tree variables; + struct pointer_set_t *privatized_types; + location_t location; + enum omp_clause_default_kind default_kind; + bool is_parallel; + bool is_combined_parallel; +}; + +struct gimplify_ctx +{ + struct gimplify_ctx *prev_context; + tree current_bind_expr; tree temps; tree conditional_cleanups; tree exit_label; tree return_temp; - varray_type case_labels; + + VEC(tree,heap) *case_labels; /* The formal temporary table. Should this be persistent? */ htab_t temp_htab; + int conditions; bool save_stack; bool into_ssa; -} *gimplify_ctxp; +}; + +static struct gimplify_ctx *gimplify_ctxp; +static struct gimplify_omp_ctx *gimplify_omp_ctxp; + /* Formal (expression) temporary table handling: Multiple occurrences of @@ -116,14 +153,14 @@ gimple_tree_eq (const void *p1, const void *p2) void push_gimplify_context (void) { - gcc_assert (!gimplify_ctxp); - gimplify_ctxp - = (struct gimplify_ctx *) xcalloc (1, sizeof (struct gimplify_ctx)); + struct gimplify_ctx *c; + + c = (struct gimplify_ctx *) xcalloc (1, sizeof (struct gimplify_ctx)); + c->prev_context = gimplify_ctxp; if (optimize) - gimplify_ctxp->temp_htab - = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free); - else - gimplify_ctxp->temp_htab = NULL; + c->temp_htab = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free); + + gimplify_ctxp = c; } /* Tear down a context for the gimplifier. If BODY is non-null, then @@ -133,28 +170,23 @@ push_gimplify_context (void) void pop_gimplify_context (tree body) { + struct gimplify_ctx *c = gimplify_ctxp; tree t; - gcc_assert (gimplify_ctxp && !gimplify_ctxp->current_bind_expr); + gcc_assert (c && !c->current_bind_expr); + gimplify_ctxp = c->prev_context; - for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t)) + for (t = c->temps; t ; t = TREE_CHAIN (t)) DECL_GIMPLE_FORMAL_TEMP_P (t) = 0; if (body) - declare_tmp_vars (gimplify_ctxp->temps, body); + declare_vars (c->temps, body, false); else - record_vars (gimplify_ctxp->temps); - -#if 0 - if (!quiet_flag && optimize) - fprintf (stderr, " collisions: %f ", - htab_collisions (gimplify_ctxp->temp_htab)); -#endif + record_vars (c->temps); if (optimize) - htab_delete (gimplify_ctxp->temp_htab); - free (gimplify_ctxp); - gimplify_ctxp = NULL; + htab_delete (c->temp_htab); + free (c); } static void @@ -214,6 +246,49 @@ gimple_pop_condition (tree *pre_p) } } +/* A stable comparison routine for use with splay trees and DECLs. */ + +static int +splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb) +{ + tree a = (tree) xa; + tree b = (tree) xb; + + return DECL_UID (a) - DECL_UID (b); +} + +/* Create a new omp construct that deals with variable remapping. */ + +static struct gimplify_omp_ctx * +new_omp_context (bool is_parallel, bool is_combined_parallel) +{ + struct gimplify_omp_ctx *c; + + c = XCNEW (struct gimplify_omp_ctx); + c->outer_context = gimplify_omp_ctxp; + c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0); + c->privatized_types = pointer_set_create (); + c->location = input_location; + c->is_parallel = is_parallel; + c->is_combined_parallel = is_combined_parallel; + c->default_kind = OMP_CLAUSE_DEFAULT_SHARED; + + return c; +} + +/* Destroy an omp construct that deals with variable remapping. */ + +static void +delete_omp_context (struct gimplify_omp_ctx *c) +{ + splay_tree_delete (c->variables); + pointer_set_destroy (c->privatized_types); + XDELETE (c); +} + +static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int); +static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool); + /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */ static void @@ -236,7 +311,7 @@ append_to_statement_list_1 (tree t, tree *list_p) tsi_link_after (&i, t, TSI_CONTINUE_LINKING); } -/* Add T to the end of the list container pointed by LIST_P. +/* Add T to the end of the list container pointed to by LIST_P. If T is an expression with no effects, it is ignored. */ void @@ -298,6 +373,48 @@ create_artificial_label (void) return lab; } +/* Subroutine for find_single_pointer_decl. */ + +static tree +find_single_pointer_decl_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, + void *data) +{ + tree *pdecl = (tree *) data; + + if (DECL_P (*tp) && POINTER_TYPE_P (TREE_TYPE (*tp))) + { + if (*pdecl) + { + /* We already found a pointer decl; return anything other + than NULL_TREE to unwind from walk_tree signalling that + we have a duplicate. */ + return *tp; + } + *pdecl = *tp; + } + + return NULL_TREE; +} + +/* Find the single DECL of pointer type in the tree T and return it. + If there are zero or more than one such DECLs, return NULL. */ + +static tree +find_single_pointer_decl (tree t) +{ + tree decl = NULL_TREE; + + if (walk_tree (&t, find_single_pointer_decl_1, &decl, NULL)) + { + /* find_single_pointer_decl_1 returns a nonzero value, causing + walk_tree to return a nonzero value, to indicate that it + found more than one pointer DECL. */ + return NULL_TREE; + } + + return decl; +} + /* Create a new temporary name with PREFIX. Returns an identifier. */ static GTY(()) unsigned int tmp_var_id_num; @@ -362,10 +479,12 @@ create_tmp_var (tree type, const char *prefix) tree tmp_var; /* We don't allow types that are addressable (meaning we can't make copies), - incomplete, or of variable size. */ - gcc_assert (!TREE_ADDRESSABLE (type) - && COMPLETE_TYPE_P (type) - && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST); + or incomplete. We also used to reject every variable size objects here, + but now support those for which a constant upper bound can be obtained. + The processing for variable sizes is performed in gimple_add_tmp_var, + point at which it really matters and possibly reached via paths not going + through this function, e.g. after direct calls to create_tmp_var_raw. */ + gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type)); tmp_var = create_tmp_var_raw (type, prefix); gimple_add_tmp_var (tmp_var); @@ -404,7 +523,7 @@ get_name (tree t) static inline tree create_tmp_from_val (tree val) { - return create_tmp_var (TREE_TYPE (val), get_name (val)); + return create_tmp_var (TYPE_MAIN_VARIANT (TREE_TYPE (val)), get_name (val)); } /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse @@ -431,7 +550,7 @@ lookup_tmp_var (tree val, bool is_formal) slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT); if (*slot == NULL) { - elt_p = xmalloc (sizeof (*elt_p)); + elt_p = XNEW (elt_t); elt_p->val = val; elt_p->temp = ret = create_tmp_from_val (val); *slot = (void *) elt_p; @@ -470,7 +589,28 @@ internal_get_tmp_var (tree val, tree *pre_p, tree *post_p, bool is_formal) t = lookup_tmp_var (val, is_formal); - mod = build (MODIFY_EXPR, TREE_TYPE (t), t, val); + if (is_formal) + { + tree u = find_single_pointer_decl (val); + + if (u && TREE_CODE (u) == VAR_DECL && DECL_BASED_ON_RESTRICT_P (u)) + u = DECL_GET_RESTRICT_BASE (u); + if (u && TYPE_RESTRICT (TREE_TYPE (u))) + { + if (DECL_BASED_ON_RESTRICT_P (t)) + gcc_assert (u == DECL_GET_RESTRICT_BASE (t)); + else + { + DECL_BASED_ON_RESTRICT_P (t) = 1; + SET_DECL_RESTRICT_BASE (t, u); + } + } + } + + if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE) + DECL_COMPLEX_GIMPLE_REG_P (t) = 1; + + mod = build2 (INIT_EXPR, TREE_TYPE (t), t, val); if (EXPR_HAS_LOCATION (val)) SET_EXPR_LOCUS (mod, EXPR_LOCUS (val)); @@ -488,6 +628,10 @@ internal_get_tmp_var (tree val, tree *pre_p, tree *post_p, bool is_formal) return t; } +/* Returns a formal temporary variable initialized with VAL. PRE_P + points to a statement list where side-effects needed to compute VAL + should be stored. */ + tree get_formal_tmp_var (tree val, tree *pre_p) { @@ -503,15 +647,16 @@ get_initialized_tmp_var (tree val, tree *pre_p, tree *post_p) return internal_get_tmp_var (val, pre_p, post_p, false); } -/* Declares all the variables in VARS in SCOPE. */ +/* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is + true, generate debug info for them; otherwise don't. */ void -declare_tmp_vars (tree vars, tree scope) +declare_vars (tree vars, tree scope, bool debug_info) { tree last = vars; if (last) { - tree temps; + tree temps, block; /* C99 mode puts the default 'return 0;' for main outside the outer braces. So drill down until we find an actual scope. */ @@ -521,16 +666,65 @@ declare_tmp_vars (tree vars, tree scope) gcc_assert (TREE_CODE (scope) == BIND_EXPR); temps = nreverse (last); - TREE_CHAIN (last) = BIND_EXPR_VARS (scope); - BIND_EXPR_VARS (scope) = temps; + + block = BIND_EXPR_BLOCK (scope); + if (!block || !debug_info) + { + TREE_CHAIN (last) = BIND_EXPR_VARS (scope); + BIND_EXPR_VARS (scope) = temps; + } + else + { + /* We need to attach the nodes both to the BIND_EXPR and to its + associated BLOCK for debugging purposes. The key point here + is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR + is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */ + if (BLOCK_VARS (block)) + BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps); + else + { + BIND_EXPR_VARS (scope) = chainon (BIND_EXPR_VARS (scope), temps); + BLOCK_VARS (block) = temps; + } + } } } +/* For VAR a VAR_DECL of variable size, try to find a constant upper bound + for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if + no such upper bound can be obtained. */ + +static void +force_constant_size (tree var) +{ + /* The only attempt we make is by querying the maximum size of objects + of the variable's type. */ + + HOST_WIDE_INT max_size; + + gcc_assert (TREE_CODE (var) == VAR_DECL); + + max_size = max_int_size_in_bytes (TREE_TYPE (var)); + + gcc_assert (max_size >= 0); + + DECL_SIZE_UNIT (var) + = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size); + DECL_SIZE (var) + = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT); +} + void gimple_add_tmp_var (tree tmp) { gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp)); + /* Later processing assumes that the object size is constant, which might + not be true at this point. Force the use of a constant upper bound in + this case. */ + if (!host_integerp (DECL_SIZE_UNIT (tmp), 1)) + force_constant_size (tmp); + DECL_CONTEXT (tmp) = current_function_decl; DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1; @@ -538,11 +732,21 @@ gimple_add_tmp_var (tree tmp) { TREE_CHAIN (tmp) = gimplify_ctxp->temps; gimplify_ctxp->temps = tmp; + + /* Mark temporaries local within the nearest enclosing parallel. */ + if (gimplify_omp_ctxp) + { + struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; + while (ctx && !ctx->is_parallel) + ctx = ctx->outer_context; + if (ctx) + omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN); + } } else if (cfun) record_vars (tmp); else - declare_tmp_vars (tmp, DECL_SAVED_TREE (current_function_decl)); + declare_vars (tmp, DECL_SAVED_TREE (current_function_decl), false); } /* Determines whether to assign a locus to the statement STMT. */ @@ -606,7 +810,7 @@ mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data) || TREE_CODE_CLASS (code) == tcc_constant || code == SAVE_EXPR || code == TARGET_EXPR /* We can't do anything sensible with a BLOCK used as an expression, - but we also can't abort when we see it because of non-expression + but we also can't just die when we see it because of non-expression uses. So just avert our eyes and cross our fingers. Silly Java. */ || code == BLOCK) *walk_subtrees = 0; @@ -733,10 +937,10 @@ gimple_build_eh_filter (tree body, tree allowed, tree failure) tree t; /* FIXME should the allowed types go in TREE_TYPE? */ - t = build (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE); + t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE); append_to_statement_list (failure, &EH_FILTER_FAILURE (t)); - t = build (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t); + t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t); append_to_statement_list (body, &TREE_OPERAND (t, 0)); return t; @@ -751,73 +955,73 @@ gimple_build_eh_filter (tree body, tree allowed, tree failure) tree voidify_wrapper_expr (tree wrapper, tree temp) { - if (!VOID_TYPE_P (TREE_TYPE (wrapper))) + tree type = TREE_TYPE (wrapper); + if (type && !VOID_TYPE_P (type)) { - tree *p, sub = wrapper; - - restart: - /* Set p to point to the body of the wrapper. */ - switch (TREE_CODE (sub)) - { - case BIND_EXPR: - /* For a BIND_EXPR, the body is operand 1. */ - p = &BIND_EXPR_BODY (sub); - break; + tree *p; - default: - p = &TREE_OPERAND (sub, 0); - break; - } - - /* Advance to the last statement. Set all container types to void. */ - if (TREE_CODE (*p) == STATEMENT_LIST) + /* Set p to point to the body of the wrapper. Loop until we find + something that isn't a wrapper. */ + for (p = &wrapper; p && *p; ) { - tree_stmt_iterator i = tsi_last (*p); - p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i); - } - else - { - for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1)) + switch (TREE_CODE (*p)) { + case BIND_EXPR: + TREE_SIDE_EFFECTS (*p) = 1; + TREE_TYPE (*p) = void_type_node; + /* For a BIND_EXPR, the body is operand 1. */ + p = &BIND_EXPR_BODY (*p); + break; + + case CLEANUP_POINT_EXPR: + case TRY_FINALLY_EXPR: + case TRY_CATCH_EXPR: TREE_SIDE_EFFECTS (*p) = 1; TREE_TYPE (*p) = void_type_node; + p = &TREE_OPERAND (*p, 0); + break; + + case STATEMENT_LIST: + { + tree_stmt_iterator i = tsi_last (*p); + TREE_SIDE_EFFECTS (*p) = 1; + TREE_TYPE (*p) = void_type_node; + p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i); + } + break; + + case COMPOUND_EXPR: + /* Advance to the last statement. Set all container types to void. */ + for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1)) + { + TREE_SIDE_EFFECTS (*p) = 1; + TREE_TYPE (*p) = void_type_node; + } + break; + + default: + goto out; } } + out: if (p == NULL || IS_EMPTY_STMT (*p)) - ; - /* Look through exception handling. */ - else if (TREE_CODE (*p) == TRY_FINALLY_EXPR - || TREE_CODE (*p) == TRY_CATCH_EXPR) - { - sub = *p; - goto restart; - } - /* The C++ frontend already did this for us. */ - else if (TREE_CODE (*p) == INIT_EXPR - || TREE_CODE (*p) == TARGET_EXPR) - temp = TREE_OPERAND (*p, 0); - /* If we're returning a dereference, move the dereference - outside the wrapper. */ - else if (TREE_CODE (*p) == INDIRECT_REF) + temp = NULL_TREE; + else if (temp) { - tree ptr = TREE_OPERAND (*p, 0); - temp = create_tmp_var (TREE_TYPE (ptr), "retval"); - *p = build (MODIFY_EXPR, TREE_TYPE (ptr), temp, ptr); - temp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (temp)), temp); - /* If this is a BIND_EXPR for a const inline function, it might not - have TREE_SIDE_EFFECTS set. That is no longer accurate. */ - TREE_SIDE_EFFECTS (wrapper) = 1; + /* The wrapper is on the RHS of an assignment that we're pushing + down. */ + gcc_assert (TREE_CODE (temp) == INIT_EXPR + || TREE_CODE (temp) == MODIFY_EXPR); + TREE_OPERAND (temp, 1) = *p; + *p = temp; } else { - if (!temp) - temp = create_tmp_var (TREE_TYPE (wrapper), "retval"); - *p = build (MODIFY_EXPR, TREE_TYPE (temp), temp, *p); - TREE_SIDE_EFFECTS (wrapper) = 1; + temp = create_tmp_var (type, "retval"); + *p = build2 (INIT_EXPR, type, temp, *p); } - TREE_TYPE (wrapper) = void_type_node; return temp; } @@ -837,7 +1041,7 @@ build_stack_save_restore (tree *save, tree *restore) NULL_TREE); tmp_var = create_tmp_var (ptr_type_node, "saved_stack"); - *save = build (MODIFY_EXPR, ptr_type_node, tmp_var, save_call); + *save = build2 (MODIFY_EXPR, ptr_type_node, tmp_var, save_call); *restore = build_function_call_expr (implicit_built_in_decls[BUILT_IN_STACK_RESTORE], tree_cons (NULL_TREE, tmp_var, NULL_TREE)); @@ -846,17 +1050,40 @@ build_stack_save_restore (tree *save, tree *restore) /* Gimplify a BIND_EXPR. Just voidify and recurse. */ static enum gimplify_status -gimplify_bind_expr (tree *expr_p, tree temp, tree *pre_p) +gimplify_bind_expr (tree *expr_p, tree *pre_p) { tree bind_expr = *expr_p; bool old_save_stack = gimplify_ctxp->save_stack; tree t; - temp = voidify_wrapper_expr (bind_expr, temp); + tree temp = voidify_wrapper_expr (bind_expr, NULL); /* Mark variables seen in this bind expr. */ for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t)) - DECL_SEEN_IN_BIND_EXPR_P (t) = 1; + { + if (TREE_CODE (t) == VAR_DECL) + { + struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; + + /* Mark variable as local. */ + if (ctx && !is_global_var (t) + && (! DECL_SEEN_IN_BIND_EXPR_P (t) + || splay_tree_lookup (ctx->variables, + (splay_tree_key) t) == NULL)) + omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN); + + DECL_SEEN_IN_BIND_EXPR_P (t) = 1; + } + + /* Preliminarily mark non-addressed complex variables as eligible + for promotion to gimple registers. We'll transform their uses + as we find them. */ + if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE + && !TREE_THIS_VOLATILE (t) + && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t)) + && !needs_to_live_in_memory (t)) + DECL_COMPLEX_GIMPLE_REG_P (t) = 1; + } gimple_push_bind_expr (bind_expr); gimplify_ctxp->save_stack = false; @@ -872,8 +1099,8 @@ gimplify_bind_expr (tree *expr_p, tree temp, tree *pre_p) format of the emitted code: see mx_register_decls(). */ build_stack_save_restore (&stack_save, &stack_restore); - t = build (TRY_FINALLY_EXPR, void_type_node, - BIND_EXPR_BODY (bind_expr), NULL_TREE); + t = build2 (TRY_FINALLY_EXPR, void_type_node, + BIND_EXPR_BODY (bind_expr), NULL_TREE); append_to_statement_list (stack_restore, &TREE_OPERAND (t, 1)); BIND_EXPR_BODY (bind_expr) = NULL_TREE; @@ -930,7 +1157,7 @@ gimplify_return_expr (tree stmt, tree *pre_p) returned in registers. If we're returning values in registers, then we don't want to extend the lifetime of the RESULT_DECL, particularly across another call. In addition, for those aggregates for which - hard_function_value generates a PARALLEL, we'll abort during normal + hard_function_value generates a PARALLEL, we'll die during normal expansion of structure assignments; there's special code in expand_return to handle this case that does not exist in expand_expr. */ if (!result_decl @@ -963,7 +1190,7 @@ gimplify_return_expr (tree stmt, tree *pre_p) if (result == result_decl) ret_expr = result; else - ret_expr = build (MODIFY_EXPR, TREE_TYPE (result), result_decl, result); + ret_expr = build2 (MODIFY_EXPR, TREE_TYPE (result), result_decl, result); TREE_OPERAND (stmt, 0) = ret_expr; return GS_ALL_DONE; @@ -983,26 +1210,22 @@ gimplify_decl_expr (tree *stmt_p) if (TREE_TYPE (decl) == error_mark_node) return GS_ERROR; - else if (TREE_CODE (decl) == TYPE_DECL) + if ((TREE_CODE (decl) == TYPE_DECL + || TREE_CODE (decl) == VAR_DECL) + && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl))) gimplify_type_sizes (TREE_TYPE (decl), stmt_p); - else if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl)) + if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl)) { tree init = DECL_INITIAL (decl); - if (!TREE_CONSTANT (DECL_SIZE (decl))) + if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) { /* This is a variable-sized decl. Simplify its size and mark it for deferred expansion. Note that mudflap depends on the format of the emitted code: see mx_register_decls(). */ tree t, args, addr, ptr_type; - /* ??? We really shouldn't need to gimplify the type of the variable - since it already should have been done. But leave this here - for now to avoid disrupting too many things at once. */ - if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl))) - gimplify_type_sizes (TREE_TYPE (decl), stmt_p); - gimplify_one_sizepos (&DECL_SIZE (decl), stmt_p); gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), stmt_p); @@ -1015,7 +1238,8 @@ gimplify_decl_expr (tree *stmt_p) addr = create_tmp_var (ptr_type, get_name (decl)); DECL_IGNORED_P (addr) = 0; t = build_fold_indirect_ref (addr); - DECL_VALUE_EXPR (decl) = t; + SET_DECL_VALUE_EXPR (decl, t); + DECL_HAS_VALUE_EXPR_P (decl) = 1; args = tree_cons (NULL, DECL_SIZE_UNIT (decl), NULL); t = built_in_decls[BUILT_IN_ALLOCA]; @@ -1035,7 +1259,7 @@ gimplify_decl_expr (tree *stmt_p) if (!TREE_STATIC (decl)) { DECL_INITIAL (decl) = NULL_TREE; - init = build (MODIFY_EXPR, void_type_node, decl, init); + init = build2 (INIT_EXPR, void_type_node, decl, init); gimplify_and_add (init, stmt_p); } else @@ -1044,10 +1268,12 @@ gimplify_decl_expr (tree *stmt_p) walk_tree (&init, force_labels_r, NULL, NULL); } - /* This decl isn't mentioned in the enclosing block, so add it to the - list of temps. FIXME it seems a bit of a kludge to say that - anonymous artificial vars aren't pushed, but everything else is. */ - if (DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE) + /* Some front ends do not explicitly declare all anonymous + artificial variables. We compensate here by declaring the + variables, though it would be better if the front ends would + explicitly declare them. */ + if (!DECL_SEEN_IN_BIND_EXPR_P (decl) + && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE) gimple_add_tmp_var (decl); } @@ -1142,7 +1368,7 @@ gimplify_switch_expr (tree *expr_p, tree *pre_p) if (SWITCH_BODY (switch_expr)) { - varray_type labels, saved_labels; + VEC(tree,heap) *labels, *saved_labels; tree label_vec, default_case = NULL_TREE; size_t i, len; @@ -1151,27 +1377,41 @@ gimplify_switch_expr (tree *expr_p, tree *pre_p) gcc_assert (!SWITCH_LABELS (switch_expr)); saved_labels = gimplify_ctxp->case_labels; - VARRAY_TREE_INIT (gimplify_ctxp->case_labels, 8, "case_labels"); + gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8); gimplify_to_stmt_list (&SWITCH_BODY (switch_expr)); labels = gimplify_ctxp->case_labels; gimplify_ctxp->case_labels = saved_labels; - len = VARRAY_ACTIVE_SIZE (labels); - - for (i = 0; i < len; ++i) + i = 0; + while (i < VEC_length (tree, labels)) { - tree t = VARRAY_TREE (labels, i); - if (!CASE_LOW (t)) + tree elt = VEC_index (tree, labels, i); + tree low = CASE_LOW (elt); + bool remove_element = FALSE; + + if (low) + { + /* Discard empty ranges. */ + tree high = CASE_HIGH (elt); + if (high && INT_CST_LT (high, low)) + remove_element = TRUE; + } + else { /* The default case must be the last label in the list. */ - default_case = t; - VARRAY_TREE (labels, i) = VARRAY_TREE (labels, len - 1); - len--; - break; + gcc_assert (!default_case); + default_case = elt; + remove_element = TRUE; } + + if (remove_element) + VEC_ordered_remove (tree, labels, i); + else + i++; } + len = i; label_vec = make_tree_vec (len + 1); SWITCH_LABELS (*expr_p) = label_vec; @@ -1181,19 +1421,21 @@ gimplify_switch_expr (tree *expr_p, tree *pre_p) { /* If the switch has no default label, add one, so that we jump around the switch body. */ - default_case = build (CASE_LABEL_EXPR, void_type_node, NULL_TREE, - NULL_TREE, create_artificial_label ()); + default_case = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE, + NULL_TREE, create_artificial_label ()); append_to_statement_list (SWITCH_BODY (switch_expr), pre_p); - *expr_p = build (LABEL_EXPR, void_type_node, - CASE_LABEL (default_case)); + *expr_p = build1 (LABEL_EXPR, void_type_node, + CASE_LABEL (default_case)); } else *expr_p = SWITCH_BODY (switch_expr); for (i = 0; i < len; ++i) - TREE_VEC_ELT (label_vec, i) = VARRAY_TREE (labels, i); + TREE_VEC_ELT (label_vec, i) = VEC_index (tree, labels, i); TREE_VEC_ELT (label_vec, len) = default_case; + VEC_free (tree, heap, labels); + sort_case_labels (label_vec); SWITCH_BODY (switch_expr) = NULL; @@ -1208,10 +1450,17 @@ static enum gimplify_status gimplify_case_label_expr (tree *expr_p) { tree expr = *expr_p; + struct gimplify_ctx *ctxp; + + /* Invalid OpenMP programs can play Duff's Device type games with + #pragma omp parallel. At least in the C front end, we don't + detect such invalid branches until after gimplification. */ + for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context) + if (ctxp->case_labels) + break; - gcc_assert (gimplify_ctxp->case_labels); - VARRAY_PUSH_TREE (gimplify_ctxp->case_labels, expr); - *expr_p = build (LABEL_EXPR, void_type_node, CASE_LABEL (expr)); + VEC_safe_push (tree, heap, ctxp->case_labels, expr); + *expr_p = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (expr)); return GS_ALL_DONE; } @@ -1245,7 +1494,7 @@ gimplify_exit_expr (tree *expr_p) tree expr; expr = build_and_jump (&gimplify_ctxp->exit_label); - expr = build (COND_EXPR, void_type_node, cond, expr, NULL_TREE); + expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE); *expr_p = expr; return GS_OK; @@ -1394,8 +1643,46 @@ gimplify_conversion (tree *expr_p) return GS_OK; } +/* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a + DECL_VALUE_EXPR, and it's worth re-examining things. */ + +static enum gimplify_status +gimplify_var_or_parm_decl (tree *expr_p) +{ + tree decl = *expr_p; + + /* ??? If this is a local variable, and it has not been seen in any + outer BIND_EXPR, then it's probably the result of a duplicate + declaration, for which we've already issued an error. It would + be really nice if the front end wouldn't leak these at all. + Currently the only known culprit is C++ destructors, as seen + in g++.old-deja/g++.jason/binding.C. */ + if (TREE_CODE (decl) == VAR_DECL + && !DECL_SEEN_IN_BIND_EXPR_P (decl) + && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl) + && decl_function_context (decl) == current_function_decl) + { + gcc_assert (errorcount || sorrycount); + return GS_ERROR; + } + + /* When within an OpenMP context, notice uses of variables. */ + if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true)) + return GS_ALL_DONE; + + /* If the decl is an alias for another expression, substitute it now. */ + if (DECL_HAS_VALUE_EXPR_P (decl)) + { + *expr_p = unshare_expr (DECL_VALUE_EXPR (decl)); + return GS_OK; + } + + return GS_ALL_DONE; +} + + /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR - node pointed by EXPR_P. + node pointed to by EXPR_P. compound_lval : min_lval '[' val ']' @@ -1421,29 +1708,36 @@ gimplify_compound_lval (tree *expr_p, tree *pre_p, tree *post_p, fallback_t fallback) { tree *p; - varray_type stack; + VEC(tree,heap) *stack; enum gimplify_status ret = GS_OK, tret; int i; /* Create a stack of the subexpressions so later we can walk them in - order from inner to outer. - - This array is very memory consuming. Don't even think of making - it VARRAY_TREE. */ - VARRAY_GENERIC_PTR_NOGC_INIT (stack, 10, "stack"); + order from inner to outer. */ + stack = VEC_alloc (tree, heap, 10); /* We can handle anything that get_inner_reference can deal with. */ for (p = expr_p; ; p = &TREE_OPERAND (*p, 0)) { + restart: /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */ if (TREE_CODE (*p) == INDIRECT_REF) *p = fold_indirect_ref (*p); - if (!handled_component_p (*p)) + + if (handled_component_p (*p)) + ; + /* Expand DECL_VALUE_EXPR now. In some cases that may expose + additional COMPONENT_REFs. */ + else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL) + && gimplify_var_or_parm_decl (p) == GS_OK) + goto restart; + else break; - VARRAY_PUSH_GENERIC_PTR_NOGC (stack, *p); + + VEC_safe_push (tree, heap, stack, *p); } - gcc_assert (VARRAY_ACTIVE_SIZE (stack)); + gcc_assert (VEC_length (tree, stack)); /* Now STACK is a stack of pointers to all the refs we've walked through and P points to the innermost expression. @@ -1457,9 +1751,9 @@ gimplify_compound_lval (tree *expr_p, tree *pre_p, So we do this in three steps. First we deal with the annotations for any variables in the components, then we gimplify the base, then we gimplify any indices, from left to right. */ - for (i = VARRAY_ACTIVE_SIZE (stack) - 1; i >= 0; i--) + for (i = VEC_length (tree, stack) - 1; i >= 0; i--) { - tree t = VARRAY_GENERIC_PTR_NOGC (stack, i); + tree t = VEC_index (tree, stack, i); if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) { @@ -1521,15 +1815,18 @@ gimplify_compound_lval (tree *expr_p, tree *pre_p, } } - /* Step 2 is to gimplify the base expression. */ - tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, fallback); + /* Step 2 is to gimplify the base expression. Make sure lvalue is set + so as to match the min_lval predicate. Failure to do so may result + in the creation of large aggregate temporaries. */ + tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, + fallback | fb_lvalue); ret = MIN (ret, tret); /* And finally, the indices and operands to BIT_FIELD_REF. During this loop we also remove any useless conversions. */ - for (; VARRAY_ACTIVE_SIZE (stack) > 0; ) + for (; VEC_length (tree, stack) > 0; ) { - tree t = VARRAY_TOP_TREE (stack); + tree t = VEC_pop (tree, stack); if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) { @@ -1565,7 +1862,6 @@ gimplify_compound_lval (tree *expr_p, tree *pre_p, set which would have caused all the outer expressions in EXPR_P leading to P to also have had TREE_SIDE_EFFECTS set. */ recalculate_side_effects (t); - VARRAY_POP (stack); } tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, fallback); @@ -1578,12 +1874,13 @@ gimplify_compound_lval (tree *expr_p, tree *pre_p, ret = MIN (ret, GS_OK); } - VARRAY_FREE (stack); + VEC_free (tree, heap, stack); return ret; } -/* Gimplify the self modifying expression pointed by EXPR_P (++, --, +=, -=). +/* Gimplify the self modifying expression pointed to by EXPR_P + (++, --, +=, -=). PRE_P points to the list where side effects that must happen before *EXPR_P should be stored. @@ -1599,7 +1896,7 @@ gimplify_self_mod_expr (tree *expr_p, tree *pre_p, tree *post_p, bool want_value) { enum tree_code code; - tree lhs, lvalue, rhs, t1; + tree lhs, lvalue, rhs, t1, post = NULL, *orig_post_p = post_p; bool postfix; enum tree_code arith_code; enum gimplify_status ret; @@ -1616,6 +1913,11 @@ gimplify_self_mod_expr (tree *expr_p, tree *pre_p, tree *post_p, else postfix = false; + /* For postfix, make sure the inner expression's post side effects + are executed after side effects from this expression. */ + if (postfix) + post_p = &post; + /* Add or subtract? */ if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR) arith_code = PLUS_EXPR; @@ -1641,12 +1943,13 @@ gimplify_self_mod_expr (tree *expr_p, tree *pre_p, tree *post_p, return ret; } - t1 = build (arith_code, TREE_TYPE (*expr_p), lhs, rhs); - t1 = build (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1); + t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs); + t1 = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1); if (postfix) { - gimplify_and_add (t1, post_p); + gimplify_and_add (t1, orig_post_p); + append_to_statement_list (post, orig_post_p); *expr_p = lhs; return GS_ALL_DONE; } @@ -1711,7 +2014,7 @@ gimplify_arg (tree *expr_p, tree *pre_p) return gimplify_expr (expr_p, pre_p, NULL, test, fb); } -/* Gimplify the CALL_EXPR node pointed by EXPR_P. PRE_P points to the +/* Gimplify the CALL_EXPR node pointed to by EXPR_P. PRE_P points to the list where side effects that must happen before *EXPR_P should be stored. WANT_VALUE is true if the result of the call is desired. */ @@ -1744,7 +2047,8 @@ gimplify_call_expr (tree *expr_p, tree *pre_p, bool want_value) decl = get_callee_fndecl (*expr_p); if (decl && DECL_BUILT_IN (decl)) { - tree new = fold_builtin (*expr_p, !want_value); + tree arglist = TREE_OPERAND (*expr_p, 1); + tree new = fold_builtin (decl, arglist, !want_value); if (new && new != *expr_p) { @@ -1758,8 +2062,6 @@ gimplify_call_expr (tree *expr_p, tree *pre_p, bool want_value) if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_START) { - tree arglist = TREE_OPERAND (*expr_p, 1); - if (!arglist || !TREE_CHAIN (arglist)) { error ("too few arguments to function %"); @@ -1800,17 +2102,22 @@ gimplify_call_expr (tree *expr_p, tree *pre_p, bool want_value) TREE_OPERAND (*expr_p, 1) = nreverse (TREE_OPERAND (*expr_p, 1)); /* Try this again in case gimplification exposed something. */ - if (ret != GS_ERROR && decl && DECL_BUILT_IN (decl)) + if (ret != GS_ERROR) { - tree new = fold_builtin (*expr_p, !want_value); - - if (new && new != *expr_p) + decl = get_callee_fndecl (*expr_p); + if (decl && DECL_BUILT_IN (decl)) { - /* There was a transformation of this call which computes the - same value, but in a more efficient way. Return and try - again. */ - *expr_p = new; - return GS_OK; + tree arglist = TREE_OPERAND (*expr_p, 1); + tree new = fold_builtin (decl, arglist, !want_value); + + if (new && new != *expr_p) + { + /* There was a transformation of this call which computes the + same value, but in a more efficient way. Return and try + again. */ + *expr_p = new; + return GS_OK; + } } } @@ -1887,17 +2194,17 @@ shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p) if (b) goto yes; else goto no; else if (c) goto yes; else goto no; */ - expr = build (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0), - shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, - false_label_p), - shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p, - false_label_p)); + expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0), + shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, + false_label_p), + shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p, + false_label_p)); } else { - expr = build (COND_EXPR, void_type_node, pred, - build_and_jump (true_label_p), - build_and_jump (false_label_p)); + expr = build3 (COND_EXPR, void_type_node, pred, + build_and_jump (true_label_p), + build_and_jump (false_label_p)); } if (local_label) @@ -1932,7 +2239,7 @@ shortcut_cond_expr (tree expr) then_ = shortcut_cond_expr (expr); then_se = then_ && TREE_SIDE_EFFECTS (then_); pred = TREE_OPERAND (pred, 0); - expr = build (COND_EXPR, void_type_node, pred, then_, NULL_TREE); + expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE); } } if (!then_se) @@ -1947,7 +2254,7 @@ shortcut_cond_expr (tree expr) else_ = shortcut_cond_expr (expr); else_se = else_ && TREE_SIDE_EFFECTS (else_); pred = TREE_OPERAND (pred, 0); - expr = build (COND_EXPR, void_type_node, pred, NULL_TREE, else_); + expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_); } } @@ -2063,7 +2370,7 @@ shortcut_cond_expr (tree expr) /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */ -static tree +tree gimple_boolify (tree expr) { tree type = TREE_TYPE (expr); @@ -2071,10 +2378,6 @@ gimple_boolify (tree expr) if (TREE_CODE (type) == BOOLEAN_TYPE) return expr; - /* If this is the predicate of a COND_EXPR, it might not even be a - truthvalue yet. */ - expr = lang_hooks.truthvalue_conversion (expr); - switch (TREE_CODE (expr)) { case TRUTH_AND_EXPR: @@ -2099,11 +2402,11 @@ gimple_boolify (tree expr) default: /* Other expressions that get here must have boolean values, but might need to be converted to the appropriate mode. */ - return convert (boolean_type_node, expr); + return fold_convert (boolean_type_node, expr); } } -/* Convert the conditional expression pointed by EXPR_P '(p) ? a : b;' +/* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;' into if (p) if (p) @@ -2117,38 +2420,46 @@ gimple_boolify (tree expr) TARGET is the tree for T1 above. PRE_P points to the list where side effects that must happen before - *EXPR_P should be stored. - - POST_P points to the list where side effects that must happen after - *EXPR_P should be stored. */ + *EXPR_P should be stored. */ static enum gimplify_status -gimplify_cond_expr (tree *expr_p, tree *pre_p, tree *post_p, tree target) +gimplify_cond_expr (tree *expr_p, tree *pre_p, fallback_t fallback) { tree expr = *expr_p; tree tmp, tmp2, type; enum gimplify_status ret; type = TREE_TYPE (expr); - if (!type) - TREE_TYPE (expr) = void_type_node; /* If this COND_EXPR has a value, copy the values into a temporary within the arms. */ - else if (! VOID_TYPE_P (type)) + if (! VOID_TYPE_P (type)) { - if (target) + tree result; + + if ((fallback & fb_lvalue) == 0) { - ret = gimplify_expr (&target, pre_p, post_p, - is_gimple_min_lval, fb_lvalue); - if (ret != GS_ERROR) - ret = GS_OK; - tmp = target; - tmp2 = unshare_expr (target); + result = tmp2 = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp"); + ret = GS_ALL_DONE; } else { - tmp2 = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp"); + tree type = build_pointer_type (TREE_TYPE (expr)); + + if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node) + TREE_OPERAND (expr, 1) = + build_fold_addr_expr (TREE_OPERAND (expr, 1)); + + if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node) + TREE_OPERAND (expr, 2) = + build_fold_addr_expr (TREE_OPERAND (expr, 2)); + + tmp2 = tmp = create_tmp_var (type, "iftmp"); + + expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0), + TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2)); + + result = build_fold_indirect_ref (tmp); ret = GS_ALL_DONE; } @@ -2156,12 +2467,12 @@ gimplify_cond_expr (tree *expr_p, tree *pre_p, tree *post_p, tree target) if this branch is void; in C++ it can be, if it's a throw. */ if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node) TREE_OPERAND (expr, 1) - = build (MODIFY_EXPR, void_type_node, tmp, TREE_OPERAND (expr, 1)); + = build2 (MODIFY_EXPR, void_type_node, tmp, TREE_OPERAND (expr, 1)); /* Build the else clause, 't1 = b;'. */ if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node) TREE_OPERAND (expr, 2) - = build (MODIFY_EXPR, void_type_node, tmp2, TREE_OPERAND (expr, 2)); + = build2 (MODIFY_EXPR, void_type_node, tmp2, TREE_OPERAND (expr, 2)); TREE_TYPE (expr) = void_type_node; recalculate_side_effects (expr); @@ -2169,7 +2480,7 @@ gimplify_cond_expr (tree *expr_p, tree *pre_p, tree *post_p, tree target) /* Move the COND_EXPR to the prequeue. */ gimplify_and_add (expr, pre_p); - *expr_p = tmp; + *expr_p = result; return ret; } @@ -2356,15 +2667,17 @@ gimplify_init_ctor_preeval (tree *expr_p, tree *pre_p, tree *post_p, /* Recurse for nested constructors. */ if (TREE_CODE (*expr_p) == CONSTRUCTOR) { - tree list; - for (list = CONSTRUCTOR_ELTS (*expr_p); list ; list = TREE_CHAIN (list)) - gimplify_init_ctor_preeval (&TREE_VALUE (list), pre_p, post_p, data); + unsigned HOST_WIDE_INT ix; + constructor_elt *ce; + VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p); + + for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++) + gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data); return; } - /* We can't preevaluate if the type contains a placeholder. */ - if (type_contains_placeholder_p (TREE_TYPE (*expr_p))) - return; + /* If this is a variable sized type, we must remember the size. */ + maybe_with_size_expr (expr_p); /* Gimplify the constructor element to something appropriate for the rhs of a MODIFY_EXPR. Given that we know the lhs is an aggregate, we know @@ -2388,7 +2701,7 @@ gimplify_init_ctor_preeval (tree *expr_p, tree *pre_p, tree *post_p, /* If this is of variable size, we have no choice but to assume it doesn't overlap since we can't make a temporary for it. */ - if (!TREE_CONSTANT (TYPE_SIZE (TREE_TYPE (*expr_p)))) + if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST) return; /* Otherwise, we must search for overlap ... */ @@ -2417,7 +2730,8 @@ gimplify_init_ctor_preeval (tree *expr_p, tree *pre_p, tree *post_p, Note that we never have to deal with SAVE_EXPRs here, because this has already been taken care of for us, in gimplify_init_ctor_preeval(). */ -static void gimplify_init_ctor_eval (tree, tree, tree *, bool); +static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *, + tree *, bool); static void gimplify_init_ctor_eval_range (tree object, tree lower, tree upper, @@ -2488,26 +2802,48 @@ gimplify_init_ctor_eval_range (tree object, tree lower, tree upper, pre_p); } +/* Return true if FDECL is accessing a field that is zero sized. */ + +static bool +zero_sized_field_decl (tree fdecl) +{ + if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl) + && integer_zerop (DECL_SIZE (fdecl))) + return true; + return false; +} + +/* Return true if TYPE is zero sized. */ + +static bool +zero_sized_type (tree type) +{ + if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type) + && integer_zerop (TYPE_SIZE (type))) + return true; + return false; +} + /* A subroutine of gimplify_init_constructor. Generate individual MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the - assignments should happen. LIST is the CONSTRUCTOR_ELTS of the + assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the CONSTRUCTOR. CLEARED is true if the entire LHS object has been zeroed first. */ static void -gimplify_init_ctor_eval (tree object, tree list, tree *pre_p, bool cleared) +gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts, + tree *pre_p, bool cleared) { tree array_elt_type = NULL; + unsigned HOST_WIDE_INT ix; + tree purpose, value; if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE) array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object))); - for (; list; list = TREE_CHAIN (list)) + FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value) { - tree purpose, value, cref, init; - - purpose = TREE_PURPOSE (list); - value = TREE_VALUE (list); + tree cref, init; /* NULL values are created above for gimplification errors. */ if (value == NULL) @@ -2520,6 +2856,14 @@ gimplify_init_ctor_eval (tree object, tree list, tree *pre_p, bool cleared) so we don't have to figure out what's missing ourselves. */ gcc_assert (purpose); + /* Skip zero-sized fields, unless value has side-effects. This can + happen with calls to functions returning a zero-sized type, which + we shouldn't discard. As a number of downstream passes don't + expect sets of zero-sized fields, we rely on the gimplification of + the MODIFY_EXPR we make below to drop the assignment statement. */ + if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose)) + continue; + /* If we have a RANGE_EXPR, we have to build a loop to assign the whole range. */ if (TREE_CODE (purpose) == RANGE_EXPR) @@ -2541,19 +2885,23 @@ gimplify_init_ctor_eval (tree object, tree list, tree *pre_p, bool cleared) if (array_elt_type) { - cref = build (ARRAY_REF, array_elt_type, unshare_expr (object), - purpose, NULL_TREE, NULL_TREE); + cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object), + purpose, NULL_TREE, NULL_TREE); } else - cref = build (COMPONENT_REF, TREE_TYPE (purpose), - unshare_expr (object), purpose, NULL_TREE); + { + gcc_assert (TREE_CODE (purpose) == FIELD_DECL); + cref = build3 (COMPONENT_REF, TREE_TYPE (purpose), + unshare_expr (object), purpose, NULL_TREE); + } - if (TREE_CODE (value) == CONSTRUCTOR) + if (TREE_CODE (value) == CONSTRUCTOR + && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE) gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value), pre_p, cleared); else { - init = build (MODIFY_EXPR, TREE_TYPE (cref), cref, value); + init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value); gimplify_and_add (init, pre_p); } } @@ -2574,7 +2922,7 @@ gimplify_init_constructor (tree *expr_p, tree *pre_p, tree ctor = TREE_OPERAND (*expr_p, 1); tree type = TREE_TYPE (ctor); enum gimplify_status ret; - tree elt_list; + VEC(constructor_elt,gc) *elts; if (TREE_CODE (ctor) != CONSTRUCTOR) return GS_UNHANDLED; @@ -2585,7 +2933,7 @@ gimplify_init_constructor (tree *expr_p, tree *pre_p, return ret; object = TREE_OPERAND (*expr_p, 0); - elt_list = CONSTRUCTOR_ELTS (ctor); + elts = CONSTRUCTOR_ELTS (ctor); ret = GS_ALL_DONE; switch (TREE_CODE (type)) @@ -2597,22 +2945,25 @@ gimplify_init_constructor (tree *expr_p, tree *pre_p, { struct gimplify_init_ctor_preeval_data preeval_data; HOST_WIDE_INT num_type_elements, num_ctor_elements; - HOST_WIDE_INT num_nonzero_elements, num_nonconstant_elements; - bool cleared; + HOST_WIDE_INT num_nonzero_elements; + bool cleared, valid_const_initializer; /* Aggregate types must lower constructors to initialization of individual elements. The exception is that a CONSTRUCTOR node with no elements indicates zero-initialization of the whole. */ - if (elt_list == NULL) + if (VEC_empty (constructor_elt, elts)) break; - categorize_ctor_elements (ctor, &num_nonzero_elements, - &num_nonconstant_elements, - &num_ctor_elements, &cleared); + /* Fetch information about the constructor to direct later processing. + We might want to make static versions of it in various cases, and + can only do so if it known to be a valid constant initializer. */ + valid_const_initializer + = categorize_ctor_elements (ctor, &num_nonzero_elements, + &num_ctor_elements, &cleared); /* If a const aggregate variable is being initialized, then it should never be a lose to promote the variable to be static. */ - if (num_nonconstant_elements == 0 + if (valid_const_initializer && num_nonzero_elements > 1 && TREE_READONLY (object) && TREE_CODE (object) == VAR_DECL) @@ -2636,10 +2987,40 @@ gimplify_init_constructor (tree *expr_p, tree *pre_p, break; } + /* If there are "lots" of initialized elements, even discounting + those that are not address constants (and thus *must* be + computed at runtime), then partition the constructor into + constant and non-constant parts. Block copy the constant + parts in, then generate code for the non-constant parts. */ + /* TODO. There's code in cp/typeck.c to do this. */ + + num_type_elements = count_type_elements (type, true); + + /* If count_type_elements could not determine number of type elements + for a constant-sized object, assume clearing is needed. + Don't do this for variable-sized objects, as store_constructor + will ignore the clearing of variable-sized objects. */ + if (num_type_elements < 0 && int_size_in_bytes (type) >= 0) + cleared = true; + /* If there are "lots" of zeros, then block clear the object first. */ + else if (num_type_elements - num_nonzero_elements > CLEAR_RATIO + && num_nonzero_elements < num_type_elements/4) + cleared = true; + /* ??? This bit ought not be needed. For any element not present + in the initializer, we should simply set them to zero. Except + we'd need to *find* the elements that are not present, and that + requires trickery to avoid quadratic compile-time behavior in + large cases or excessive memory use in small cases. */ + else if (num_ctor_elements < num_type_elements) + cleared = true; + /* If there are "lots" of initialized elements, and all of them are valid address constants, then the entire initializer can - be dropped to memory, and then memcpy'd out. */ - if (num_nonconstant_elements == 0) + be dropped to memory, and then memcpy'd out. Don't do this + for sparse arrays, though, as it's more efficient to follow + the standard CONSTRUCTOR behavior of memset followed by + individual element initialization. */ + if (valid_const_initializer && !cleared) { HOST_WIDE_INT size = int_size_in_bytes (type); unsigned int align; @@ -2685,34 +3066,26 @@ gimplify_init_constructor (tree *expr_p, tree *pre_p, } } - /* If there are "lots" of initialized elements, even discounting - those that are not address constants (and thus *must* be - computed at runtime), then partition the constructor into - constant and non-constant parts. Block copy the constant - parts in, then generate code for the non-constant parts. */ - /* TODO. There's code in cp/typeck.c to do this. */ - - num_type_elements = count_type_elements (TREE_TYPE (ctor)); + /* If there are nonzero elements, pre-evaluate to capture elements + overlapping with the lhs into temporaries. We must do this before + clearing to fetch the values before they are zeroed-out. */ + if (num_nonzero_elements > 0) + { + preeval_data.lhs_base_decl = get_base_address (object); + if (!DECL_P (preeval_data.lhs_base_decl)) + preeval_data.lhs_base_decl = NULL; + preeval_data.lhs_alias_set = get_alias_set (object); - /* If there are "lots" of zeros, then block clear the object first. */ - if (num_type_elements - num_nonzero_elements > CLEAR_RATIO - && num_nonzero_elements < num_type_elements/4) - cleared = true; - - /* ??? This bit ought not be needed. For any element not present - in the initializer, we should simply set them to zero. Except - we'd need to *find* the elements that are not present, and that - requires trickery to avoid quadratic compile-time behavior in - large cases or excessive memory use in small cases. */ - else if (num_ctor_elements < num_type_elements) - cleared = true; + gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1), + pre_p, post_p, &preeval_data); + } if (cleared) { /* Zap the CONSTRUCTOR element list, which simplifies this case. Note that we still have to gimplify, in order to handle the case of variable sized types. Avoid shared tree structures. */ - CONSTRUCTOR_ELTS (ctor) = NULL_TREE; + CONSTRUCTOR_ELTS (ctor) = NULL; object = unshare_expr (object); gimplify_stmt (expr_p); append_to_statement_list (*expr_p, pre_p); @@ -2722,16 +3095,7 @@ gimplify_init_constructor (tree *expr_p, tree *pre_p, elements in the constructor, add assignments to the individual scalar fields of the object. */ if (!cleared || num_nonzero_elements > 0) - { - preeval_data.lhs_base_decl = get_base_address (object); - if (!DECL_P (preeval_data.lhs_base_decl)) - preeval_data.lhs_base_decl = NULL; - preeval_data.lhs_alias_set = get_alias_set (object); - - gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1), - pre_p, post_p, &preeval_data); - gimplify_init_ctor_eval (object, elt_list, pre_p, cleared); - } + gimplify_init_ctor_eval (object, elts, pre_p, cleared); *expr_p = NULL_TREE; } @@ -2742,20 +3106,12 @@ gimplify_init_constructor (tree *expr_p, tree *pre_p, tree r, i; /* Extract the real and imaginary parts out of the ctor. */ - r = i = NULL_TREE; - if (elt_list) - { - r = TREE_VALUE (elt_list); - elt_list = TREE_CHAIN (elt_list); - if (elt_list) - { - i = TREE_VALUE (elt_list); - gcc_assert (!TREE_CHAIN (elt_list)); - } - } + gcc_assert (VEC_length (constructor_elt, elts) == 2); + r = VEC_index (constructor_elt, elts, 0)->value; + i = VEC_index (constructor_elt, elts, 1)->value; if (r == NULL || i == NULL) { - tree zero = convert (TREE_TYPE (type), integer_zero_node); + tree zero = fold_convert (TREE_TYPE (type), integer_zero_node); if (r == NULL) r = zero; if (i == NULL) @@ -2771,7 +3127,7 @@ gimplify_init_constructor (tree *expr_p, tree *pre_p, } else { - ctor = build (COMPLEX_EXPR, type, r, i); + ctor = build2 (COMPLEX_EXPR, type, r, i); TREE_OPERAND (*expr_p, 1) = ctor; ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p, rhs_predicate_for (TREE_OPERAND (*expr_p, 0)), @@ -2781,35 +3137,44 @@ gimplify_init_constructor (tree *expr_p, tree *pre_p, break; case VECTOR_TYPE: - /* Go ahead and simplify constant constructors to VECTOR_CST. */ - if (TREE_CONSTANT (ctor)) - { - tree tem; + { + unsigned HOST_WIDE_INT ix; + constructor_elt *ce; - /* Even when ctor is constant, it might contain non-*_CST - elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't - belong into VECTOR_CST nodes. */ - for (tem = elt_list; tem; tem = TREE_CHAIN (tem)) - if (! CONSTANT_CLASS_P (TREE_VALUE (tem))) - break; + /* Go ahead and simplify constant constructors to VECTOR_CST. */ + if (TREE_CONSTANT (ctor)) + { + bool constant_p = true; + tree value; + + /* Even when ctor is constant, it might contain non-*_CST + elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't + belong into VECTOR_CST nodes. */ + FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value) + if (!CONSTANT_CLASS_P (value)) + { + constant_p = false; + break; + } - if (! tem) - { - TREE_OPERAND (*expr_p, 1) = build_vector (type, elt_list); - break; - } - } + if (constant_p) + { + TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts); + break; + } + } - /* Vector types use CONSTRUCTOR all the way through gimple - compilation as a general initializer. */ - for (; elt_list; elt_list = TREE_CHAIN (elt_list)) - { - enum gimplify_status tret; - tret = gimplify_expr (&TREE_VALUE (elt_list), pre_p, post_p, - is_gimple_val, fb_rvalue); - if (tret == GS_ERROR) - ret = GS_ERROR; - } + /* Vector types use CONSTRUCTOR all the way through gimple + compilation as a general initializer. */ + for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++) + { + enum gimplify_status tret; + tret = gimplify_expr (&ce->value, pre_p, post_p, + is_gimple_val, fb_rvalue); + if (tret == GS_ERROR) + ret = GS_ERROR; + } + } break; default: @@ -2829,6 +3194,63 @@ gimplify_init_constructor (tree *expr_p, tree *pre_p, return GS_ALL_DONE; } +/* Given a pointer value OP0, return a simplified version of an + indirection through OP0, or NULL_TREE if no simplification is + possible. This may only be applied to a rhs of an expression. + Note that the resulting type may be different from the type pointed + to in the sense that it is still compatible from the langhooks + point of view. */ + +static tree +fold_indirect_ref_rhs (tree t) +{ + tree type = TREE_TYPE (TREE_TYPE (t)); + tree sub = t; + tree subtype; + + STRIP_NOPS (sub); + subtype = TREE_TYPE (sub); + if (!POINTER_TYPE_P (subtype)) + return NULL_TREE; + + if (TREE_CODE (sub) == ADDR_EXPR) + { + tree op = TREE_OPERAND (sub, 0); + tree optype = TREE_TYPE (op); + /* *&p => p */ + if (lang_hooks.types_compatible_p (type, optype)) + return op; + /* *(foo *)&fooarray => fooarray[0] */ + else if (TREE_CODE (optype) == ARRAY_TYPE + && lang_hooks.types_compatible_p (type, TREE_TYPE (optype))) + { + tree type_domain = TYPE_DOMAIN (optype); + tree min_val = size_zero_node; + if (type_domain && TYPE_MIN_VALUE (type_domain)) + min_val = TYPE_MIN_VALUE (type_domain); + return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE); + } + } + + /* *(foo *)fooarrptr => (*fooarrptr)[0] */ + if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE + && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype)))) + { + tree type_domain; + tree min_val = size_zero_node; + tree osub = sub; + sub = fold_indirect_ref_rhs (sub); + if (! sub) + sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub); + type_domain = TYPE_DOMAIN (TREE_TYPE (sub)); + if (type_domain && TYPE_MIN_VALUE (type_domain)) + min_val = TYPE_MIN_VALUE (type_domain); + return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE); + } + + return NULL_TREE; +} + /* Subroutine of gimplify_modify_expr to do simplifications of MODIFY_EXPRs based on the code of the RHS. We loop for as long as something changes. */ @@ -2852,8 +3274,8 @@ gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, tree *pre_p, This kind of code arises in C++ when an object is bound to a const reference, and if "x" is a TARGET_EXPR we want to take advantage of the optimization below. */ - tree t = fold_indirect_ref (*from_p); - if (t != *from_p) + tree t = fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)); + if (t) { *from_p = t; ret = GS_OK; @@ -2872,7 +3294,7 @@ gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, tree *pre_p, ??? What about code that pulls out the temp and uses it elsewhere? I think that such code never uses the TARGET_EXPR as - an initializer. If I'm wrong, we'll abort because the temp won't + an initializer. If I'm wrong, we'll die because the temp won't have any RTL. In that case, I guess we'll need to replace references somehow. */ tree init = TARGET_EXPR_INITIAL (*from_p); @@ -2906,13 +3328,118 @@ gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, tree *pre_p, copy in other cases as well. */ if (!is_gimple_reg_type (TREE_TYPE (*from_p))) { - *expr_p = *from_p; - return gimplify_cond_expr (expr_p, pre_p, post_p, *to_p); + /* This code should mirror the code in gimplify_cond_expr. */ + enum tree_code code = TREE_CODE (*expr_p); + tree cond = *from_p; + tree result = *to_p; + + ret = gimplify_expr (&result, pre_p, post_p, + is_gimple_min_lval, fb_lvalue); + if (ret != GS_ERROR) + ret = GS_OK; + + if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node) + TREE_OPERAND (cond, 1) + = build2 (code, void_type_node, result, + TREE_OPERAND (cond, 1)); + if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node) + TREE_OPERAND (cond, 2) + = build2 (code, void_type_node, unshare_expr (result), + TREE_OPERAND (cond, 2)); + + TREE_TYPE (cond) = void_type_node; + recalculate_side_effects (cond); + + if (want_value) + { + gimplify_and_add (cond, pre_p); + *expr_p = unshare_expr (result); + } + else + *expr_p = cond; + return ret; } else ret = GS_UNHANDLED; break; + case CALL_EXPR: + /* For calls that return in memory, give *to_p as the CALL_EXPR's + return slot so that we don't generate a temporary. */ + if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p) + && aggregate_value_p (*from_p, *from_p)) + { + bool use_target; + + if (!(rhs_predicate_for (*to_p))(*from_p)) + /* If we need a temporary, *to_p isn't accurate. */ + use_target = false; + else if (TREE_CODE (*to_p) == RESULT_DECL + && DECL_NAME (*to_p) == NULL_TREE + && needs_to_live_in_memory (*to_p)) + /* It's OK to use the return slot directly unless it's an NRV. */ + use_target = true; + else if (is_gimple_reg_type (TREE_TYPE (*to_p)) + || (DECL_P (*to_p) && DECL_REGISTER (*to_p))) + /* Don't force regs into memory. */ + use_target = false; + else if (TREE_CODE (*to_p) == VAR_DECL + && DECL_GIMPLE_FORMAL_TEMP_P (*to_p)) + /* Don't use the original target if it's a formal temp; we + don't want to take their addresses. */ + use_target = false; + else if (TREE_CODE (*expr_p) == INIT_EXPR) + /* It's OK to use the target directly if it's being + initialized. */ + use_target = true; + else if (!is_gimple_non_addressable (*to_p)) + /* Don't use the original target if it's already addressable; + if its address escapes, and the called function uses the + NRV optimization, a conforming program could see *to_p + change before the called function returns; see c++/19317. + When optimizing, the return_slot pass marks more functions + as safe after we have escape info. */ + use_target = false; + else + use_target = true; + + if (use_target) + { + CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1; + lang_hooks.mark_addressable (*to_p); + } + } + + ret = GS_UNHANDLED; + break; + + /* If we're initializing from a container, push the initialization + inside it. */ + case CLEANUP_POINT_EXPR: + case BIND_EXPR: + case STATEMENT_LIST: + { + tree wrap = *from_p; + tree t; + + ret = gimplify_expr (to_p, pre_p, post_p, + is_gimple_min_lval, fb_lvalue); + if (ret != GS_ERROR) + ret = GS_OK; + + t = voidify_wrapper_expr (wrap, *expr_p); + gcc_assert (t == *expr_p); + + if (want_value) + { + gimplify_and_add (wrap, pre_p); + *expr_p = unshare_expr (*to_p); + } + else + *expr_p = wrap; + return GS_OK; + } + default: ret = GS_UNHANDLED; break; @@ -2921,7 +3448,46 @@ gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, tree *pre_p, return ret; } -/* Gimplify the MODIFY_EXPR node pointed by EXPR_P. +/* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is + a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with + DECL_COMPLEX_GIMPLE_REG_P set. */ + +static enum gimplify_status +gimplify_modify_expr_complex_part (tree *expr_p, tree *pre_p, bool want_value) +{ + enum tree_code code, ocode; + tree lhs, rhs, new_rhs, other, realpart, imagpart; + + lhs = TREE_OPERAND (*expr_p, 0); + rhs = TREE_OPERAND (*expr_p, 1); + code = TREE_CODE (lhs); + lhs = TREE_OPERAND (lhs, 0); + + ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR; + other = build1 (ocode, TREE_TYPE (rhs), lhs); + other = get_formal_tmp_var (other, pre_p); + + realpart = code == REALPART_EXPR ? rhs : other; + imagpart = code == REALPART_EXPR ? other : rhs; + + if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart)) + new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart); + else + new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart); + + TREE_OPERAND (*expr_p, 0) = lhs; + TREE_OPERAND (*expr_p, 1) = new_rhs; + + if (want_value) + { + append_to_statement_list (*expr_p, pre_p); + *expr_p = rhs; + } + + return GS_ALL_DONE; +} + +/* Gimplify the MODIFY_EXPR node pointed to by EXPR_P. modify_expr : varname '=' rhs @@ -2946,9 +3512,17 @@ gimplify_modify_expr (tree *expr_p, tree *pre_p, tree *post_p, bool want_value) gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR || TREE_CODE (*expr_p) == INIT_EXPR); - /* The distinction between MODIFY_EXPR and INIT_EXPR is no longer useful. */ - if (TREE_CODE (*expr_p) == INIT_EXPR) - TREE_SET_CODE (*expr_p, MODIFY_EXPR); + /* For zero sized types only gimplify the left hand side and right hand side + as statements and throw away the assignment. */ + if (zero_sized_type (TREE_TYPE (*from_p))) + { + gimplify_stmt (from_p); + gimplify_stmt (to_p); + append_to_statement_list (*from_p, pre_p); + append_to_statement_list (*to_p, pre_p); + *expr_p = NULL_TREE; + return GS_ALL_DONE; + } /* See if any simplifications can be done based on what the RHS is. */ ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p, @@ -2996,10 +3570,18 @@ gimplify_modify_expr (tree *expr_p, tree *pre_p, tree *post_p, bool want_value) } } + /* Transform partial stores to non-addressable complex variables into + total stores. This allows us to use real instead of virtual operands + for these variables, which improves optimization. */ + if ((TREE_CODE (*to_p) == REALPART_EXPR + || TREE_CODE (*to_p) == IMAGPART_EXPR) + && is_gimple_reg (TREE_OPERAND (*to_p, 0))) + return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value); + if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p)) { /* If we've somehow already got an SSA_NAME on the LHS, then - we're probably modifying it twice. Not good. */ + we're probably modified it twice. Not good. */ gcc_assert (TREE_CODE (*to_p) != SSA_NAME); *to_p = make_ssa_name (*to_p, *expr_p); } @@ -3035,7 +3617,28 @@ gimplify_variable_sized_compare (tree *expr_p) t = implicit_built_in_decls[BUILT_IN_MEMCMP]; t = build_function_call_expr (t, args); *expr_p - = build (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node); + = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node); + + return GS_OK; +} + +/* Gimplify a comparison between two aggregate objects of integral scalar + mode as a comparison between the bitwise equivalent scalar values. */ + +static enum gimplify_status +gimplify_scalar_mode_aggregate_compare (tree *expr_p) +{ + tree op0 = TREE_OPERAND (*expr_p, 0); + tree op1 = TREE_OPERAND (*expr_p, 1); + + tree type = TREE_TYPE (op0); + tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1); + + op0 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op0); + op1 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op1); + + *expr_p + = fold_build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1); return GS_OK; } @@ -3058,9 +3661,9 @@ gimplify_boolean_expr (tree *expr_p) /* Preserve the original type of the expression. */ tree type = TREE_TYPE (*expr_p); - *expr_p = build (COND_EXPR, type, *expr_p, - convert (type, boolean_true_node), - convert (type, boolean_false_node)); + *expr_p = build3 (COND_EXPR, type, *expr_p, + fold_convert (type, boolean_true_node), + fold_convert (type, boolean_false_node)); return GS_OK; } @@ -3110,8 +3713,10 @@ gimplify_compound_expr (tree *expr_p, tree *pre_p, bool want_value) enlightened front-end, or by shortcut_cond_expr. */ static enum gimplify_status -gimplify_statement_list (tree *expr_p) +gimplify_statement_list (tree *expr_p, tree *pre_p) { + tree temp = voidify_wrapper_expr (*expr_p, NULL); + tree_stmt_iterator i = tsi_start (*expr_p); while (!tsi_end_p (i)) @@ -3132,6 +3737,13 @@ gimplify_statement_list (tree *expr_p) tsi_next (&i); } + if (temp) + { + append_to_statement_list (*expr_p, pre_p); + *expr_p = temp; + return GS_OK; + } + return GS_ALL_DONE; } @@ -3176,7 +3788,7 @@ gimplify_save_expr (tree *expr_p, tree *pre_p, tree *post_p) return ret; } -/* Re-write the ADDR_EXPR node pointed by EXPR_P +/* Re-write the ADDR_EXPR node pointed to by EXPR_P unary_expr : ... @@ -3207,6 +3819,9 @@ gimplify_addr_expr (tree *expr_p, tree *pre_p, tree *post_p) builtins like __builtin_va_end). */ /* Caution: the silent array decomposition semantics we allow for ADDR_EXPR means we can't always discard the pair. */ + /* Gimplification of the ADDR_EXPR operand may drop + cv-qualification conversions, so make sure we add them if + needed. */ { tree op00 = TREE_OPERAND (op0, 0); tree t_expr = TREE_TYPE (expr); @@ -3216,9 +3831,9 @@ gimplify_addr_expr (tree *expr_p, tree *pre_p, tree *post_p) { #ifdef ENABLE_CHECKING tree t_op0 = TREE_TYPE (op0); - gcc_assert (TREE_CODE (t_op0) == ARRAY_TYPE - && POINTER_TYPE_P (t_expr) - && cpt_same_type (TREE_TYPE (t_op0), + gcc_assert (POINTER_TYPE_P (t_expr) + && cpt_same_type (TREE_CODE (t_op0) == ARRAY_TYPE + ? TREE_TYPE (t_op0) : t_op0, TREE_TYPE (t_expr)) && POINTER_TYPE_P (t_op00) && cpt_same_type (t_op0, TREE_TYPE (t_op00))); @@ -3266,7 +3881,7 @@ gimplify_addr_expr (tree *expr_p, tree *pre_p, tree *post_p) /* Make sure TREE_INVARIANT, TREE_CONSTANT, and TREE_SIDE_EFFECTS is set properly. */ - recompute_tree_invarant_for_addr_expr (expr); + recompute_tree_invariant_for_addr_expr (expr); /* Mark the RHS addressable. */ lang_hooks.mark_addressable (TREE_OPERAND (expr, 0)); @@ -3360,7 +3975,7 @@ gimplify_asm_expr (tree *expr_p, tree *pre_p, tree *post_p) break; } - str = alloca (len); + str = (char *) alloca (len); for (beg = p + 1, dst = str;;) { const char *tem; @@ -3465,211 +4080,1211 @@ gimplify_cleanup_point_expr (tree *expr_p, tree *pre_p) tree temp = voidify_wrapper_expr (*expr_p, NULL); /* We only care about the number of conditions between the innermost - CLEANUP_POINT_EXPR and the cleanup. So save and reset the count. */ + CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and + any cleanups collected outside the CLEANUP_POINT_EXPR. */ int old_conds = gimplify_ctxp->conditions; + tree old_cleanups = gimplify_ctxp->conditional_cleanups; gimplify_ctxp->conditions = 0; + gimplify_ctxp->conditional_cleanups = NULL_TREE; body = TREE_OPERAND (*expr_p, 0); gimplify_to_stmt_list (&body); gimplify_ctxp->conditions = old_conds; + gimplify_ctxp->conditional_cleanups = old_cleanups; for (iter = tsi_start (body); !tsi_end_p (iter); ) { tree *wce_p = tsi_stmt_ptr (iter); tree wce = *wce_p; - if (TREE_CODE (wce) == WITH_CLEANUP_EXPR) - { - if (tsi_one_before_end_p (iter)) - { - tsi_link_before (&iter, TREE_OPERAND (wce, 0), TSI_SAME_STMT); - tsi_delink (&iter); - break; - } - else - { - tree sl, tfe; - enum tree_code code; + if (TREE_CODE (wce) == WITH_CLEANUP_EXPR) + { + if (tsi_one_before_end_p (iter)) + { + tsi_link_before (&iter, TREE_OPERAND (wce, 0), TSI_SAME_STMT); + tsi_delink (&iter); + break; + } + else + { + tree sl, tfe; + enum tree_code code; + + if (CLEANUP_EH_ONLY (wce)) + code = TRY_CATCH_EXPR; + else + code = TRY_FINALLY_EXPR; + + sl = tsi_split_statement_list_after (&iter); + tfe = build2 (code, void_type_node, sl, NULL_TREE); + append_to_statement_list (TREE_OPERAND (wce, 0), + &TREE_OPERAND (tfe, 1)); + *wce_p = tfe; + iter = tsi_start (sl); + } + } + else + tsi_next (&iter); + } + + if (temp) + { + *expr_p = temp; + append_to_statement_list (body, pre_p); + return GS_OK; + } + else + { + *expr_p = body; + return GS_ALL_DONE; + } +} + +/* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP + is the cleanup action required. */ + +static void +gimple_push_cleanup (tree var, tree cleanup, bool eh_only, tree *pre_p) +{ + tree wce; + + /* Errors can result in improperly nested cleanups. Which results in + confusion when trying to resolve the WITH_CLEANUP_EXPR. */ + if (errorcount || sorrycount) + return; + + if (gimple_conditional_context ()) + { + /* If we're in a conditional context, this is more complex. We only + want to run the cleanup if we actually ran the initialization that + necessitates it, but we want to run it after the end of the + conditional context. So we wrap the try/finally around the + condition and use a flag to determine whether or not to actually + run the destructor. Thus + + test ? f(A()) : 0 + + becomes (approximately) + + flag = 0; + try { + if (test) { A::A(temp); flag = 1; val = f(temp); } + else { val = 0; } + } finally { + if (flag) A::~A(temp); + } + val + */ + + tree flag = create_tmp_var (boolean_type_node, "cleanup"); + tree ffalse = build2 (MODIFY_EXPR, void_type_node, flag, + boolean_false_node); + tree ftrue = build2 (MODIFY_EXPR, void_type_node, flag, + boolean_true_node); + cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL); + wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup); + append_to_statement_list (ffalse, &gimplify_ctxp->conditional_cleanups); + append_to_statement_list (wce, &gimplify_ctxp->conditional_cleanups); + append_to_statement_list (ftrue, pre_p); + + /* Because of this manipulation, and the EH edges that jump + threading cannot redirect, the temporary (VAR) will appear + to be used uninitialized. Don't warn. */ + TREE_NO_WARNING (var) = 1; + } + else + { + wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup); + CLEANUP_EH_ONLY (wce) = eh_only; + append_to_statement_list (wce, pre_p); + } + + gimplify_stmt (&TREE_OPERAND (wce, 0)); +} + +/* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */ + +static enum gimplify_status +gimplify_target_expr (tree *expr_p, tree *pre_p, tree *post_p) +{ + tree targ = *expr_p; + tree temp = TARGET_EXPR_SLOT (targ); + tree init = TARGET_EXPR_INITIAL (targ); + enum gimplify_status ret; + + if (init) + { + /* TARGET_EXPR temps aren't part of the enclosing block, so add it + to the temps list. */ + gimple_add_tmp_var (temp); + + /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the + expression is supposed to initialize the slot. */ + if (VOID_TYPE_P (TREE_TYPE (init))) + ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none); + else + { + init = build2 (INIT_EXPR, void_type_node, temp, init); + ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, + fb_none); + } + if (ret == GS_ERROR) + return GS_ERROR; + append_to_statement_list (init, pre_p); + + /* If needed, push the cleanup for the temp. */ + if (TARGET_EXPR_CLEANUP (targ)) + { + gimplify_stmt (&TARGET_EXPR_CLEANUP (targ)); + gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ), + CLEANUP_EH_ONLY (targ), pre_p); + } + + /* Only expand this once. */ + TREE_OPERAND (targ, 3) = init; + TARGET_EXPR_INITIAL (targ) = NULL_TREE; + } + else + /* We should have expanded this before. */ + gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp)); + + *expr_p = temp; + return GS_OK; +} + +/* Gimplification of expression trees. */ + +/* Gimplify an expression which appears at statement context; usually, this + means replacing it with a suitably gimple STATEMENT_LIST. */ + +void +gimplify_stmt (tree *stmt_p) +{ + gimplify_expr (stmt_p, NULL, NULL, is_gimple_stmt, fb_none); +} + +/* Similarly, but force the result to be a STATEMENT_LIST. */ + +void +gimplify_to_stmt_list (tree *stmt_p) +{ + gimplify_stmt (stmt_p); + if (!*stmt_p) + *stmt_p = alloc_stmt_list (); + else if (TREE_CODE (*stmt_p) != STATEMENT_LIST) + { + tree t = *stmt_p; + *stmt_p = alloc_stmt_list (); + append_to_statement_list (t, stmt_p); + } +} + + +/* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels + to CTX. If entries already exist, force them to be some flavor of private. + If there is no enclosing parallel, do nothing. */ + +void +omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl) +{ + splay_tree_node n; + + if (decl == NULL || !DECL_P (decl)) + return; + + do + { + n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); + if (n != NULL) + { + if (n->value & GOVD_SHARED) + n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN); + else + return; + } + else if (ctx->is_parallel) + omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE); + + ctx = ctx->outer_context; + } + while (ctx); +} + +/* Similarly for each of the type sizes of TYPE. */ + +static void +omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type) +{ + if (type == NULL || type == error_mark_node) + return; + type = TYPE_MAIN_VARIANT (type); + + if (pointer_set_insert (ctx->privatized_types, type)) + return; + + switch (TREE_CODE (type)) + { + case INTEGER_TYPE: + case ENUMERAL_TYPE: + case BOOLEAN_TYPE: + case REAL_TYPE: + omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type)); + omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type)); + break; + + case ARRAY_TYPE: + omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type)); + omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type)); + break; + + case RECORD_TYPE: + case UNION_TYPE: + case QUAL_UNION_TYPE: + { + tree field; + for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field)) + if (TREE_CODE (field) == FIELD_DECL) + { + omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field)); + omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field)); + } + } + break; + + case POINTER_TYPE: + case REFERENCE_TYPE: + omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type)); + break; + + default: + break; + } + + omp_firstprivatize_variable (ctx, TYPE_SIZE (type)); + omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type)); + lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type); +} + +/* Add an entry for DECL in the OpenMP context CTX with FLAGS. */ + +static void +omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags) +{ + splay_tree_node n; + unsigned int nflags; + tree t; + + if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node) + return; + + /* Never elide decls whose type has TREE_ADDRESSABLE set. This means + there are constructors involved somewhere. */ + if (TREE_ADDRESSABLE (TREE_TYPE (decl)) + || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))) + flags |= GOVD_SEEN; + + n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); + if (n != NULL) + { + /* We shouldn't be re-adding the decl with the same data + sharing class. */ + gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0); + /* The only combination of data sharing classes we should see is + FIRSTPRIVATE and LASTPRIVATE. */ + nflags = n->value | flags; + gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS) + == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE)); + n->value = nflags; + return; + } + + /* When adding a variable-sized variable, we have to handle all sorts + of additional bits of data: the pointer replacement variable, and + the parameters of the type. */ + if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) + { + /* Add the pointer replacement variable as PRIVATE if the variable + replacement is private, else FIRSTPRIVATE since we'll need the + address of the original variable either for SHARED, or for the + copy into or out of the context. */ + if (!(flags & GOVD_LOCAL)) + { + nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE; + nflags |= flags & GOVD_SEEN; + t = DECL_VALUE_EXPR (decl); + gcc_assert (TREE_CODE (t) == INDIRECT_REF); + t = TREE_OPERAND (t, 0); + gcc_assert (DECL_P (t)); + omp_add_variable (ctx, t, nflags); + } + + /* Add all of the variable and type parameters (which should have + been gimplified to a formal temporary) as FIRSTPRIVATE. */ + omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl)); + omp_firstprivatize_variable (ctx, DECL_SIZE (decl)); + omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl)); + + /* The variable-sized variable itself is never SHARED, only some form + of PRIVATE. The sharing would take place via the pointer variable + which we remapped above. */ + if (flags & GOVD_SHARED) + flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE + | (flags & (GOVD_SEEN | GOVD_EXPLICIT)); + + /* We're going to make use of the TYPE_SIZE_UNIT at least in the + alloca statement we generate for the variable, so make sure it + is available. This isn't automatically needed for the SHARED + case, since we won't be allocating local storage then. */ + else + omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true); + } + else if (lang_hooks.decls.omp_privatize_by_reference (decl)) + { + gcc_assert ((flags & GOVD_LOCAL) == 0); + omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl)); + + /* Similar to the direct variable sized case above, we'll need the + size of references being privatized. */ + if ((flags & GOVD_SHARED) == 0) + { + t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))); + if (TREE_CODE (t) != INTEGER_CST) + omp_notice_variable (ctx, t, true); + } + } + + splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags); +} + +/* Record the fact that DECL was used within the OpenMP context CTX. + IN_CODE is true when real code uses DECL, and false when we should + merely emit default(none) errors. Return true if DECL is going to + be remapped and thus DECL shouldn't be gimplified into its + DECL_VALUE_EXPR (if any). */ + +static bool +omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code) +{ + splay_tree_node n; + unsigned flags = in_code ? GOVD_SEEN : 0; + bool ret = false, shared; + + if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node) + return false; + + /* Threadprivate variables are predetermined. */ + if (is_global_var (decl)) + { + if (DECL_THREAD_LOCAL_P (decl)) + return false; + + if (DECL_HAS_VALUE_EXPR_P (decl)) + { + tree value = get_base_address (DECL_VALUE_EXPR (decl)); + + if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value)) + return false; + } + } + + n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); + if (n == NULL) + { + enum omp_clause_default_kind default_kind, kind; + + if (!ctx->is_parallel) + goto do_outer; + + /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be + remapped firstprivate instead of shared. To some extent this is + addressed in omp_firstprivatize_type_sizes, but not effectively. */ + default_kind = ctx->default_kind; + kind = lang_hooks.decls.omp_predetermined_sharing (decl); + if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED) + default_kind = kind; + + switch (default_kind) + { + case OMP_CLAUSE_DEFAULT_NONE: + error ("%qs not specified in enclosing parallel", + IDENTIFIER_POINTER (DECL_NAME (decl))); + error ("%Henclosing parallel", &ctx->location); + /* FALLTHRU */ + case OMP_CLAUSE_DEFAULT_SHARED: + flags |= GOVD_SHARED; + break; + case OMP_CLAUSE_DEFAULT_PRIVATE: + flags |= GOVD_PRIVATE; + break; + default: + gcc_unreachable (); + } + + omp_add_variable (ctx, decl, flags); + + shared = (flags & GOVD_SHARED) != 0; + ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared); + goto do_outer; + } + + shared = ((flags | n->value) & GOVD_SHARED) != 0; + ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared); + + /* If nothing changed, there's nothing left to do. */ + if ((n->value & flags) == flags) + return ret; + flags |= n->value; + n->value = flags; + + do_outer: + /* If the variable is private in the current context, then we don't + need to propagate anything to an outer context. */ + if (flags & GOVD_PRIVATE) + return ret; + if (ctx->outer_context + && omp_notice_variable (ctx->outer_context, decl, in_code)) + return true; + return ret; +} + +/* Verify that DECL is private within CTX. If there's specific information + to the contrary in the innermost scope, generate an error. */ + +static bool +omp_is_private (struct gimplify_omp_ctx *ctx, tree decl) +{ + splay_tree_node n; + + n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); + if (n != NULL) + { + if (n->value & GOVD_SHARED) + { + if (ctx == gimplify_omp_ctxp) + { + error ("iteration variable %qs should be private", + IDENTIFIER_POINTER (DECL_NAME (decl))); + n->value = GOVD_PRIVATE; + return true; + } + else + return false; + } + else if ((n->value & GOVD_EXPLICIT) != 0 + && (ctx == gimplify_omp_ctxp + || (ctx->is_combined_parallel + && gimplify_omp_ctxp->outer_context == ctx))) + { + if ((n->value & GOVD_FIRSTPRIVATE) != 0) + error ("iteration variable %qs should not be firstprivate", + IDENTIFIER_POINTER (DECL_NAME (decl))); + else if ((n->value & GOVD_REDUCTION) != 0) + error ("iteration variable %qs should not be reduction", + IDENTIFIER_POINTER (DECL_NAME (decl))); + } + return true; + } + + if (ctx->is_parallel) + return false; + else if (ctx->outer_context) + return omp_is_private (ctx->outer_context, decl); + else + return !is_global_var (decl); +} + +/* Scan the OpenMP clauses in *LIST_P, installing mappings into a new + and previous omp contexts. */ + +static void +gimplify_scan_omp_clauses (tree *list_p, tree *pre_p, bool in_parallel, + bool in_combined_parallel) +{ + struct gimplify_omp_ctx *ctx, *outer_ctx; + tree c; + + ctx = new_omp_context (in_parallel, in_combined_parallel); + outer_ctx = ctx->outer_context; + + while ((c = *list_p) != NULL) + { + enum gimplify_status gs; + bool remove = false; + bool notice_outer = true; + unsigned int flags; + tree decl; + + switch (OMP_CLAUSE_CODE (c)) + { + case OMP_CLAUSE_PRIVATE: + flags = GOVD_PRIVATE | GOVD_EXPLICIT; + notice_outer = false; + goto do_add; + case OMP_CLAUSE_SHARED: + flags = GOVD_SHARED | GOVD_EXPLICIT; + goto do_add; + case OMP_CLAUSE_FIRSTPRIVATE: + flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT; + goto do_add; + case OMP_CLAUSE_LASTPRIVATE: + flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT; + goto do_add; + case OMP_CLAUSE_REDUCTION: + flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT; + goto do_add; + + do_add: + decl = OMP_CLAUSE_DECL (c); + if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node) + { + remove = true; + break; + } + /* Handle NRV results passed by reference. */ + if (TREE_CODE (decl) == INDIRECT_REF + && TREE_CODE (TREE_OPERAND (decl, 0)) == RESULT_DECL + && DECL_BY_REFERENCE (TREE_OPERAND (decl, 0))) + OMP_CLAUSE_DECL (c) = decl = TREE_OPERAND (decl, 0); + omp_add_variable (ctx, decl, flags); + if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION + && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) + { + omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c), + GOVD_LOCAL | GOVD_SEEN); + gimplify_omp_ctxp = ctx; + push_gimplify_context (); + gimplify_stmt (&OMP_CLAUSE_REDUCTION_INIT (c)); + pop_gimplify_context (OMP_CLAUSE_REDUCTION_INIT (c)); + push_gimplify_context (); + gimplify_stmt (&OMP_CLAUSE_REDUCTION_MERGE (c)); + pop_gimplify_context (OMP_CLAUSE_REDUCTION_MERGE (c)); + gimplify_omp_ctxp = outer_ctx; + } + if (notice_outer) + goto do_notice; + break; + + case OMP_CLAUSE_COPYIN: + case OMP_CLAUSE_COPYPRIVATE: + decl = OMP_CLAUSE_DECL (c); + if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node) + { + remove = true; + break; + } + /* Handle NRV results passed by reference. */ + if (TREE_CODE (decl) == INDIRECT_REF + && TREE_CODE (TREE_OPERAND (decl, 0)) == RESULT_DECL + && DECL_BY_REFERENCE (TREE_OPERAND (decl, 0))) + OMP_CLAUSE_DECL (c) = decl = TREE_OPERAND (decl, 0); + do_notice: + if (outer_ctx) + omp_notice_variable (outer_ctx, decl, true); + break; + + case OMP_CLAUSE_IF: + OMP_CLAUSE_OPERAND (c, 0) + = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0)); + /* Fall through. */ + + case OMP_CLAUSE_SCHEDULE: + case OMP_CLAUSE_NUM_THREADS: + gs = gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL, + is_gimple_val, fb_rvalue); + if (gs == GS_ERROR) + remove = true; + break; + + case OMP_CLAUSE_NOWAIT: + case OMP_CLAUSE_ORDERED: + break; + + case OMP_CLAUSE_DEFAULT: + ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c); + break; + + default: + gcc_unreachable (); + } + + if (remove) + *list_p = OMP_CLAUSE_CHAIN (c); + else + list_p = &OMP_CLAUSE_CHAIN (c); + } + + gimplify_omp_ctxp = ctx; +} + +/* For all variables that were not actually used within the context, + remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */ + +static int +gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data) +{ + tree *list_p = (tree *) data; + tree decl = (tree) n->key; + unsigned flags = n->value; + enum omp_clause_code code; + tree clause; + bool private_debug; + + if (flags & (GOVD_EXPLICIT | GOVD_LOCAL)) + return 0; + if ((flags & GOVD_SEEN) == 0) + return 0; + if (flags & GOVD_DEBUG_PRIVATE) + { + gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE); + private_debug = true; + } + else + private_debug + = lang_hooks.decls.omp_private_debug_clause (decl, + !!(flags & GOVD_SHARED)); + if (private_debug) + code = OMP_CLAUSE_PRIVATE; + else if (flags & GOVD_SHARED) + { + if (is_global_var (decl)) + return 0; + code = OMP_CLAUSE_SHARED; + } + else if (flags & GOVD_PRIVATE) + code = OMP_CLAUSE_PRIVATE; + else if (flags & GOVD_FIRSTPRIVATE) + code = OMP_CLAUSE_FIRSTPRIVATE; + else + gcc_unreachable (); + + clause = build_omp_clause (code); + OMP_CLAUSE_DECL (clause) = decl; + OMP_CLAUSE_CHAIN (clause) = *list_p; + if (private_debug) + OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1; + *list_p = clause; + + return 0; +} + +static void +gimplify_adjust_omp_clauses (tree *list_p) +{ + struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; + tree c, decl; + + while ((c = *list_p) != NULL) + { + splay_tree_node n; + bool remove = false; + + switch (OMP_CLAUSE_CODE (c)) + { + case OMP_CLAUSE_PRIVATE: + case OMP_CLAUSE_SHARED: + case OMP_CLAUSE_FIRSTPRIVATE: + decl = OMP_CLAUSE_DECL (c); + n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); + remove = !(n->value & GOVD_SEEN); + if (! remove) + { + bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED; + if ((n->value & GOVD_DEBUG_PRIVATE) + || lang_hooks.decls.omp_private_debug_clause (decl, shared)) + { + gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0 + || ((n->value & GOVD_DATA_SHARE_CLASS) + == GOVD_PRIVATE)); + OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE); + OMP_CLAUSE_PRIVATE_DEBUG (c) = 1; + } + } + break; + + case OMP_CLAUSE_LASTPRIVATE: + /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to + accurately reflect the presence of a FIRSTPRIVATE clause. */ + decl = OMP_CLAUSE_DECL (c); + n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); + OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) + = (n->value & GOVD_FIRSTPRIVATE) != 0; + break; + + case OMP_CLAUSE_REDUCTION: + case OMP_CLAUSE_COPYIN: + case OMP_CLAUSE_COPYPRIVATE: + case OMP_CLAUSE_IF: + case OMP_CLAUSE_NUM_THREADS: + case OMP_CLAUSE_SCHEDULE: + case OMP_CLAUSE_NOWAIT: + case OMP_CLAUSE_ORDERED: + case OMP_CLAUSE_DEFAULT: + break; + + default: + gcc_unreachable (); + } + + if (remove) + *list_p = OMP_CLAUSE_CHAIN (c); + else + list_p = &OMP_CLAUSE_CHAIN (c); + } + + /* Add in any implicit data sharing. */ + splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p); + + gimplify_omp_ctxp = ctx->outer_context; + delete_omp_context (ctx); +} + +/* Gimplify the contents of an OMP_PARALLEL statement. This involves + gimplification of the body, as well as scanning the body for used + variables. We need to do this scan now, because variable-sized + decls will be decomposed during gimplification. */ + +static enum gimplify_status +gimplify_omp_parallel (tree *expr_p, tree *pre_p) +{ + tree expr = *expr_p; + + gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p, true, + OMP_PARALLEL_COMBINED (expr)); + + push_gimplify_context (); + + gimplify_stmt (&OMP_PARALLEL_BODY (expr)); + + if (TREE_CODE (OMP_PARALLEL_BODY (expr)) == BIND_EXPR) + pop_gimplify_context (OMP_PARALLEL_BODY (expr)); + else + pop_gimplify_context (NULL_TREE); + + gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr)); + + return GS_ALL_DONE; +} + +/* Gimplify the gross structure of an OMP_FOR statement. */ + +static enum gimplify_status +gimplify_omp_for (tree *expr_p, tree *pre_p) +{ + tree for_stmt, decl, t; + enum gimplify_status ret = 0; + + for_stmt = *expr_p; + + gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, false, false); + + t = OMP_FOR_INIT (for_stmt); + gcc_assert (TREE_CODE (t) == MODIFY_EXPR); + decl = TREE_OPERAND (t, 0); + gcc_assert (DECL_P (decl)); + gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))); + + /* Make sure the iteration variable is private. */ + if (omp_is_private (gimplify_omp_ctxp, decl)) + omp_notice_variable (gimplify_omp_ctxp, decl, true); + else + omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN); + + ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt), + NULL, is_gimple_val, fb_rvalue); + + t = OMP_FOR_COND (for_stmt); + gcc_assert (COMPARISON_CLASS_P (t)); + gcc_assert (TREE_OPERAND (t, 0) == decl); + + ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt), + NULL, is_gimple_val, fb_rvalue); + + t = OMP_FOR_INCR (for_stmt); + switch (TREE_CODE (t)) + { + case PREINCREMENT_EXPR: + case POSTINCREMENT_EXPR: + t = build_int_cst (TREE_TYPE (decl), 1); + goto build_modify; + case PREDECREMENT_EXPR: + case POSTDECREMENT_EXPR: + t = build_int_cst (TREE_TYPE (decl), -1); + goto build_modify; + build_modify: + t = build2 (PLUS_EXPR, TREE_TYPE (decl), decl, t); + t = build2 (MODIFY_EXPR, void_type_node, decl, t); + OMP_FOR_INCR (for_stmt) = t; + break; + + case MODIFY_EXPR: + gcc_assert (TREE_OPERAND (t, 0) == decl); + t = TREE_OPERAND (t, 1); + switch (TREE_CODE (t)) + { + case PLUS_EXPR: + if (TREE_OPERAND (t, 1) == decl) + { + TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0); + TREE_OPERAND (t, 0) = decl; + break; + } + case MINUS_EXPR: + gcc_assert (TREE_OPERAND (t, 0) == decl); + break; + default: + gcc_unreachable (); + } + + ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt), + NULL, is_gimple_val, fb_rvalue); + break; + + default: + gcc_unreachable (); + } + + gimplify_to_stmt_list (&OMP_FOR_BODY (for_stmt)); + gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt)); + + return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR; +} + +/* Gimplify the gross structure of other OpenMP worksharing constructs. + In particular, OMP_SECTIONS and OMP_SINGLE. */ + +static enum gimplify_status +gimplify_omp_workshare (tree *expr_p, tree *pre_p) +{ + tree stmt = *expr_p; + + gimplify_scan_omp_clauses (&OMP_CLAUSES (stmt), pre_p, false, false); + gimplify_to_stmt_list (&OMP_BODY (stmt)); + gimplify_adjust_omp_clauses (&OMP_CLAUSES (stmt)); + + return GS_ALL_DONE; +} + +/* A subroutine of gimplify_omp_atomic. The front end is supposed to have + stabilized the lhs of the atomic operation as *ADDR. Return true if + EXPR is this stabilized form. */ + +static bool +goa_lhs_expr_p (tree expr, tree addr) +{ + /* Also include casts to other type variants. The C front end is fond + of adding these for e.g. volatile variables. This is like + STRIP_TYPE_NOPS but includes the main variant lookup. */ + while ((TREE_CODE (expr) == NOP_EXPR + || TREE_CODE (expr) == CONVERT_EXPR + || TREE_CODE (expr) == NON_LVALUE_EXPR) + && TREE_OPERAND (expr, 0) != error_mark_node + && (TYPE_MAIN_VARIANT (TREE_TYPE (expr)) + == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0))))) + expr = TREE_OPERAND (expr, 0); + + if (TREE_CODE (expr) == INDIRECT_REF && TREE_OPERAND (expr, 0) == addr) + return true; + if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0)) + return true; + return false; +} + +/* A subroutine of gimplify_omp_atomic. Attempt to implement the atomic + operation as a __sync_fetch_and_op builtin. INDEX is log2 of the + size of the data type, and thus usable to find the index of the builtin + decl. Returns GS_UNHANDLED if the expression is not of the proper form. */ + +static enum gimplify_status +gimplify_omp_atomic_fetch_op (tree *expr_p, tree addr, tree rhs, int index) +{ + enum built_in_function base; + tree decl, args, itype; + enum insn_code *optab; + + /* Check for one of the supported fetch-op operations. */ + switch (TREE_CODE (rhs)) + { + case PLUS_EXPR: + base = BUILT_IN_FETCH_AND_ADD_N; + optab = sync_add_optab; + break; + case MINUS_EXPR: + base = BUILT_IN_FETCH_AND_SUB_N; + optab = sync_add_optab; + break; + case BIT_AND_EXPR: + base = BUILT_IN_FETCH_AND_AND_N; + optab = sync_and_optab; + break; + case BIT_IOR_EXPR: + base = BUILT_IN_FETCH_AND_OR_N; + optab = sync_ior_optab; + break; + case BIT_XOR_EXPR: + base = BUILT_IN_FETCH_AND_XOR_N; + optab = sync_xor_optab; + break; + default: + return GS_UNHANDLED; + } + + /* Make sure the expression is of the proper form. */ + if (goa_lhs_expr_p (TREE_OPERAND (rhs, 0), addr)) + rhs = TREE_OPERAND (rhs, 1); + else if (commutative_tree_code (TREE_CODE (rhs)) + && goa_lhs_expr_p (TREE_OPERAND (rhs, 1), addr)) + rhs = TREE_OPERAND (rhs, 0); + else + return GS_UNHANDLED; + + decl = built_in_decls[base + index + 1]; + itype = TREE_TYPE (TREE_TYPE (decl)); + + if (optab[TYPE_MODE (itype)] == CODE_FOR_nothing) + return GS_UNHANDLED; + + args = tree_cons (NULL, fold_convert (itype, rhs), NULL); + args = tree_cons (NULL, addr, args); + *expr_p = build_function_call_expr (decl, args); + return GS_OK; +} + +/* A subroutine of gimplify_omp_atomic_pipeline. Walk *EXPR_P and replace + appearances of *LHS_ADDR with LHS_VAR. If an expression does not involve + the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as + a subexpression, 0 if it did not, or -1 if an error was encountered. */ - if (CLEANUP_EH_ONLY (wce)) - code = TRY_CATCH_EXPR; - else - code = TRY_FINALLY_EXPR; +static int +goa_stabilize_expr (tree *expr_p, tree *pre_p, tree lhs_addr, tree lhs_var) +{ + tree expr = *expr_p; + int saw_lhs; - sl = tsi_split_statement_list_after (&iter); - tfe = build (code, void_type_node, sl, NULL_TREE); - append_to_statement_list (TREE_OPERAND (wce, 0), - &TREE_OPERAND (tfe, 1)); - *wce_p = tfe; - iter = tsi_start (sl); - } - } - else - tsi_next (&iter); + if (goa_lhs_expr_p (expr, lhs_addr)) + { + *expr_p = lhs_var; + return 1; } - - if (temp) + if (is_gimple_val (expr)) + return 0; + + saw_lhs = 0; + switch (TREE_CODE_CLASS (TREE_CODE (expr))) { - *expr_p = temp; - append_to_statement_list (body, pre_p); - return GS_OK; + case tcc_binary: + saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, + lhs_addr, lhs_var); + case tcc_unary: + saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, + lhs_addr, lhs_var); + break; + default: + break; } - else + + if (saw_lhs == 0) { - *expr_p = body; - return GS_ALL_DONE; + enum gimplify_status gs; + gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue); + if (gs != GS_ALL_DONE) + saw_lhs = -1; } + + return saw_lhs; } -/* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP - is the cleanup action required. */ +/* A subroutine of gimplify_omp_atomic. Implement the atomic operation as: -static void -gimple_push_cleanup (tree var, tree cleanup, bool eh_only, tree *pre_p) -{ - tree wce; + oldval = *addr; + repeat: + newval = rhs; // with oldval replacing *addr in rhs + oldval = __sync_val_compare_and_swap (addr, oldval, newval); + if (oldval != newval) + goto repeat; - /* Errors can result in improperly nested cleanups. Which results in - confusion when trying to resolve the WITH_CLEANUP_EXPR. */ - if (errorcount || sorrycount) - return; + INDEX is log2 of the size of the data type, and thus usable to find the + index of the builtin decl. */ - if (gimple_conditional_context ()) - { - /* If we're in a conditional context, this is more complex. We only - want to run the cleanup if we actually ran the initialization that - necessitates it, but we want to run it after the end of the - conditional context. So we wrap the try/finally around the - condition and use a flag to determine whether or not to actually - run the destructor. Thus +static enum gimplify_status +gimplify_omp_atomic_pipeline (tree *expr_p, tree *pre_p, tree addr, + tree rhs, int index) +{ + tree oldval, oldival, oldival2, newval, newival, label; + tree type, itype, cmpxchg, args, x, iaddr; - test ? f(A()) : 0 + cmpxchg = built_in_decls[BUILT_IN_VAL_COMPARE_AND_SWAP_N + index + 1]; + type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr))); + itype = TREE_TYPE (TREE_TYPE (cmpxchg)); - becomes (approximately) + if (sync_compare_and_swap[TYPE_MODE (itype)] == CODE_FOR_nothing) + return GS_UNHANDLED; - flag = 0; - try { - if (test) { A::A(temp); flag = 1; val = f(temp); } - else { val = 0; } - } finally { - if (flag) A::~A(temp); - } - val - */ + oldval = create_tmp_var (type, NULL); + newval = create_tmp_var (type, NULL); - tree flag = create_tmp_var (boolean_type_node, "cleanup"); - tree ffalse = build (MODIFY_EXPR, void_type_node, flag, - boolean_false_node); - tree ftrue = build (MODIFY_EXPR, void_type_node, flag, - boolean_true_node); - cleanup = build (COND_EXPR, void_type_node, flag, cleanup, NULL); - wce = build (WITH_CLEANUP_EXPR, void_type_node, cleanup); - append_to_statement_list (ffalse, &gimplify_ctxp->conditional_cleanups); - append_to_statement_list (wce, &gimplify_ctxp->conditional_cleanups); - append_to_statement_list (ftrue, pre_p); + /* Precompute as much of RHS as possible. In the same walk, replace + occurrences of the lhs value with our temporary. */ + if (goa_stabilize_expr (&rhs, pre_p, addr, oldval) < 0) + return GS_ERROR; - /* Because of this manipulation, and the EH edges that jump - threading cannot redirect, the temporary (VAR) will appear - to be used uninitialized. Don't warn. */ - TREE_NO_WARNING (var) = 1; + x = build_fold_indirect_ref (addr); + x = build2 (MODIFY_EXPR, void_type_node, oldval, x); + gimplify_and_add (x, pre_p); + + /* For floating-point values, we'll need to view-convert them to integers + so that we can perform the atomic compare and swap. Simplify the + following code by always setting up the "i"ntegral variables. */ + if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)) + { + oldival = oldval; + newival = newval; + iaddr = addr; } else { - wce = build (WITH_CLEANUP_EXPR, void_type_node, cleanup); - CLEANUP_EH_ONLY (wce) = eh_only; - append_to_statement_list (wce, pre_p); + oldival = create_tmp_var (itype, NULL); + newival = create_tmp_var (itype, NULL); + + x = build1 (VIEW_CONVERT_EXPR, itype, oldval); + x = build2 (MODIFY_EXPR, void_type_node, oldival, x); + gimplify_and_add (x, pre_p); + iaddr = fold_convert (build_pointer_type (itype), addr); } - gimplify_stmt (&TREE_OPERAND (wce, 0)); -} + oldival2 = create_tmp_var (itype, NULL); -/* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */ + label = create_artificial_label (); + x = build1 (LABEL_EXPR, void_type_node, label); + gimplify_and_add (x, pre_p); -static enum gimplify_status -gimplify_target_expr (tree *expr_p, tree *pre_p, tree *post_p) -{ - tree targ = *expr_p; - tree temp = TARGET_EXPR_SLOT (targ); - tree init = TARGET_EXPR_INITIAL (targ); - enum gimplify_status ret; + x = build2 (MODIFY_EXPR, void_type_node, newval, rhs); + gimplify_and_add (x, pre_p); - if (init) + if (newval != newival) { - /* TARGET_EXPR temps aren't part of the enclosing block, so add it - to the temps list. */ - gimple_add_tmp_var (temp); - - /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the - expression is supposed to initialize the slot. */ - if (VOID_TYPE_P (TREE_TYPE (init))) - ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none); - else - { - /* Special handling for BIND_EXPR can result in fewer temps. */ - ret = GS_OK; - if (TREE_CODE (init) == BIND_EXPR) - gimplify_bind_expr (&init, temp, pre_p); - if (init != temp) - { - init = build (MODIFY_EXPR, void_type_node, temp, init); - ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, - fb_none); - } - } - if (ret == GS_ERROR) - return GS_ERROR; - append_to_statement_list (init, pre_p); - - /* If needed, push the cleanup for the temp. */ - if (TARGET_EXPR_CLEANUP (targ)) - { - gimplify_stmt (&TARGET_EXPR_CLEANUP (targ)); - gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ), - CLEANUP_EH_ONLY (targ), pre_p); - } + x = build1 (VIEW_CONVERT_EXPR, itype, newval); + x = build2 (MODIFY_EXPR, void_type_node, newival, x); + gimplify_and_add (x, pre_p); + } - /* Only expand this once. */ - TREE_OPERAND (targ, 3) = init; - TARGET_EXPR_INITIAL (targ) = NULL_TREE; + x = build2 (MODIFY_EXPR, void_type_node, oldival2, + fold_convert (itype, oldival)); + gimplify_and_add (x, pre_p); + + args = tree_cons (NULL, fold_convert (itype, newival), NULL); + args = tree_cons (NULL, fold_convert (itype, oldival), args); + args = tree_cons (NULL, iaddr, args); + x = build_function_call_expr (cmpxchg, args); + if (oldval == oldival) + x = fold_convert (type, x); + x = build2 (MODIFY_EXPR, void_type_node, oldival, x); + gimplify_and_add (x, pre_p); + + /* For floating point, be prepared for the loop backedge. */ + if (oldval != oldival) + { + x = build1 (VIEW_CONVERT_EXPR, type, oldival); + x = build2 (MODIFY_EXPR, void_type_node, oldval, x); + gimplify_and_add (x, pre_p); } - else - /* We should have expanded this before. */ - gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp)); - *expr_p = temp; - return GS_OK; + /* Note that we always perform the comparison as an integer, even for + floating point. This allows the atomic operation to properly + succeed even with NaNs and -0.0. */ + x = build3 (COND_EXPR, void_type_node, + build2 (NE_EXPR, boolean_type_node, oldival, oldival2), + build1 (GOTO_EXPR, void_type_node, label), NULL); + gimplify_and_add (x, pre_p); + + *expr_p = NULL; + return GS_ALL_DONE; } -/* Gimplification of expression trees. */ +/* A subroutine of gimplify_omp_atomic. Implement the atomic operation as: -/* Gimplify an expression which appears at statement context; usually, this - means replacing it with a suitably gimple STATEMENT_LIST. */ + GOMP_atomic_start (); + *addr = rhs; + GOMP_atomic_end (); -void -gimplify_stmt (tree *stmt_p) + The result is not globally atomic, but works so long as all parallel + references are within #pragma omp atomic directives. According to + responses received from omp@openmp.org, appears to be within spec. + Which makes sense, since that's how several other compilers handle + this situation as well. */ + +static enum gimplify_status +gimplify_omp_atomic_mutex (tree *expr_p, tree *pre_p, tree addr, tree rhs) { - gimplify_expr (stmt_p, NULL, NULL, is_gimple_stmt, fb_none); + tree t; + + t = built_in_decls[BUILT_IN_GOMP_ATOMIC_START]; + t = build_function_call_expr (t, NULL); + gimplify_and_add (t, pre_p); + + t = build_fold_indirect_ref (addr); + t = build2 (MODIFY_EXPR, void_type_node, t, rhs); + gimplify_and_add (t, pre_p); + + t = built_in_decls[BUILT_IN_GOMP_ATOMIC_END]; + t = build_function_call_expr (t, NULL); + gimplify_and_add (t, pre_p); + + *expr_p = NULL; + return GS_ALL_DONE; } -/* Similarly, but force the result to be a STATEMENT_LIST. */ +/* Gimplify an OMP_ATOMIC statement. */ -void -gimplify_to_stmt_list (tree *stmt_p) +static enum gimplify_status +gimplify_omp_atomic (tree *expr_p, tree *pre_p) { - gimplify_stmt (stmt_p); - if (!*stmt_p) - *stmt_p = alloc_stmt_list (); - else if (TREE_CODE (*stmt_p) != STATEMENT_LIST) + tree addr = TREE_OPERAND (*expr_p, 0); + tree rhs = TREE_OPERAND (*expr_p, 1); + tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr))); + HOST_WIDE_INT index; + + /* Make sure the type is one of the supported sizes. */ + index = tree_low_cst (TYPE_SIZE_UNIT (type), 1); + index = exact_log2 (index); + if (index >= 0 && index <= 4) { - tree t = *stmt_p; - *stmt_p = alloc_stmt_list (); - append_to_statement_list (t, stmt_p); + enum gimplify_status gs; + unsigned int align; + + if (DECL_P (TREE_OPERAND (addr, 0))) + align = DECL_ALIGN_UNIT (TREE_OPERAND (addr, 0)); + else if (TREE_CODE (TREE_OPERAND (addr, 0)) == COMPONENT_REF + && TREE_CODE (TREE_OPERAND (TREE_OPERAND (addr, 0), 1)) + == FIELD_DECL) + align = DECL_ALIGN_UNIT (TREE_OPERAND (TREE_OPERAND (addr, 0), 1)); + else + align = TYPE_ALIGN_UNIT (type); + + /* __sync builtins require strict data alignment. */ + if (exact_log2 (align) >= index) + { + /* When possible, use specialized atomic update functions. */ + if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)) + { + gs = gimplify_omp_atomic_fetch_op (expr_p, addr, rhs, index); + if (gs != GS_UNHANDLED) + return gs; + } + + /* If we don't have specialized __sync builtins, try and implement + as a compare and swap loop. */ + gs = gimplify_omp_atomic_pipeline (expr_p, pre_p, addr, rhs, index); + if (gs != GS_UNHANDLED) + return gs; + } } -} + /* The ultimate fallback is wrapping the operation in a mutex. */ + return gimplify_omp_atomic_mutex (expr_p, pre_p, addr, rhs); +} -/* Gimplifies the expression tree pointed by EXPR_P. Return 0 if +/* Gimplifies the expression tree pointed to by EXPR_P. Return 0 if gimplification failed. PRE_P points to the list where side effects that must happen before @@ -3786,11 +5401,29 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p, break; case COND_EXPR: - ret = gimplify_cond_expr (expr_p, pre_p, post_p, NULL_TREE); + ret = gimplify_cond_expr (expr_p, pre_p, fallback); + /* C99 code may assign to an array in a structure value of a + conditional expression, and this has undefined behavior + only on execution, so create a temporary if an lvalue is + required. */ + if (fallback == fb_lvalue) + { + *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); + lang_hooks.mark_addressable (*expr_p); + } break; case CALL_EXPR: ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none); + /* C99 code may assign to an array in a structure returned + from a function, and this has undefined behavior only on + execution, so create a temporary if an lvalue is + required. */ + if (fallback == fb_lvalue) + { + *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); + lang_hooks.mark_addressable (*expr_p); + } break; case TREE_LIST: @@ -3804,6 +5437,11 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p, case INIT_EXPR: ret = gimplify_modify_expr (expr_p, pre_p, post_p, fallback != fb_none); + + /* The distinction between MODIFY_EXPR and INIT_EXPR is no longer + useful. */ + if (*expr_p && TREE_CODE (*expr_p) == INIT_EXPR) + TREE_SET_CODE (*expr_p, MODIFY_EXPR); break; case TRUTH_ANDIF_EXPR: @@ -3903,7 +5541,7 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p, break; case BIND_EXPR: - ret = gimplify_bind_expr (expr_p, NULL, pre_p); + ret = gimplify_bind_expr (expr_p, pre_p); break; case LOOP_EXPR: @@ -3946,16 +5584,30 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p, gimplify any element that has side-effects. */ if (fallback == fb_none) { - for (tmp = CONSTRUCTOR_ELTS (*expr_p); tmp; - tmp = TREE_CHAIN (tmp)) - if (TREE_SIDE_EFFECTS (TREE_VALUE (tmp))) - gimplify_expr (&TREE_VALUE (tmp), pre_p, post_p, - gimple_test_f, fallback); - - *expr_p = NULL_TREE; + unsigned HOST_WIDE_INT ix; + constructor_elt *ce; + tree temp = NULL_TREE; + for (ix = 0; + VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p), + ix, ce); + ix++) + if (TREE_SIDE_EFFECTS (ce->value)) + append_to_statement_list (ce->value, &temp); + + *expr_p = temp; + ret = GS_OK; } - - ret = GS_ALL_DONE; + /* C99 code may assign to an array in a constructed + structure or union, and this has undefined behavior only + on execution, so create a temporary if an lvalue is + required. */ + else if (fallback == fb_lvalue) + { + *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); + lang_hooks.mark_addressable (*expr_p); + } + else + ret = GS_ALL_DONE; break; /* The following are special cases that are not handled by the @@ -4036,7 +5688,7 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p, break; case STATEMENT_LIST: - ret = gimplify_statement_list (expr_p); + ret = gimplify_statement_list (expr_p, pre_p); break; case WITH_SIZE_EXPR: @@ -4050,35 +5702,14 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p, break; case VAR_DECL: - /* ??? If this is a local variable, and it has not been seen in any - outer BIND_EXPR, then it's probably the result of a duplicate - declaration, for which we've already issued an error. It would - be really nice if the front end wouldn't leak these at all. - Currently the only known culprit is C++ destructors, as seen - in g++.old-deja/g++.jason/binding.C. */ - tmp = *expr_p; - if (!TREE_STATIC (tmp) && !DECL_EXTERNAL (tmp) - && decl_function_context (tmp) == current_function_decl - && !DECL_SEEN_IN_BIND_EXPR_P (tmp)) - { - gcc_assert (errorcount || sorrycount); - ret = GS_ERROR; - break; - } - /* FALLTHRU */ - case PARM_DECL: - tmp = *expr_p; - - /* If this is a local variable sized decl, it must be accessed - indirectly. Perform that substitution. */ - if (DECL_VALUE_EXPR (tmp)) - { - *expr_p = unshare_expr (DECL_VALUE_EXPR (tmp)); - ret = GS_OK; - break; - } + ret = gimplify_var_or_parm_decl (expr_p); + break; + case RESULT_DECL: + /* When within an OpenMP context, notice uses of variables. */ + if (gimplify_omp_ctxp) + omp_notice_variable (gimplify_omp_ctxp, *expr_p, true); ret = GS_ALL_DONE; break; @@ -4087,20 +5718,61 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p, ret = GS_ALL_DONE; break; + case OMP_PARALLEL: + ret = gimplify_omp_parallel (expr_p, pre_p); + break; + + case OMP_FOR: + ret = gimplify_omp_for (expr_p, pre_p); + break; + + case OMP_SECTIONS: + case OMP_SINGLE: + ret = gimplify_omp_workshare (expr_p, pre_p); + break; + + case OMP_SECTION: + case OMP_MASTER: + case OMP_ORDERED: + case OMP_CRITICAL: + gimplify_to_stmt_list (&OMP_BODY (*expr_p)); + break; + + case OMP_ATOMIC: + ret = gimplify_omp_atomic (expr_p, pre_p); + break; + + case OMP_RETURN: + case OMP_CONTINUE: + ret = GS_ALL_DONE; + break; + default: switch (TREE_CODE_CLASS (TREE_CODE (*expr_p))) { case tcc_comparison: - /* If this is a comparison of objects of aggregate type, - handle it specially (by converting to a call to - memcmp). It would be nice to only have to do this - for variable-sized objects, but then we'd have to - allow the same nest of reference nodes we allow for - MODIFY_EXPR and that's too complex. */ - if (!AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 1)))) - goto expr_2; - ret = gimplify_variable_sized_compare (expr_p); - break; + /* Handle comparison of objects of non scalar mode aggregates + with a call to memcmp. It would be nice to only have to do + this for variable-sized objects, but then we'd have to allow + the same nest of reference nodes we allow for MODIFY_EXPR and + that's too complex. + + Compare scalar mode aggregates as scalar mode values. Using + memcmp for them would be very inefficient at best, and is + plain wrong if bitfields are involved. */ + + { + tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1)); + + if (!AGGREGATE_TYPE_P (type)) + goto expr_2; + else if (TYPE_MODE (type) != BLKmode) + ret = gimplify_scalar_mode_aggregate_compare (expr_p); + else + ret = gimplify_variable_sized_compare (expr_p); + + break; + } /* If *EXPR_P does not need to be special-cased, handle it according to its class. */ @@ -4174,7 +5846,9 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p, switch (code) { case COMPONENT_REF: - case REALPART_EXPR: case IMAGPART_EXPR: + case REALPART_EXPR: + case IMAGPART_EXPR: + case VIEW_CONVERT_EXPR: gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, gimple_test_f, fallback); break; @@ -4198,8 +5872,17 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p, { /* Historically, the compiler has treated a bare reference to a volatile lvalue as forcing a load. */ - tree tmp = create_tmp_var (TREE_TYPE (*expr_p), "vol"); - *expr_p = build (MODIFY_EXPR, TREE_TYPE (tmp), tmp, *expr_p); + tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p)); + /* Normally, we do not want to create a temporary for a + TREE_ADDRESSABLE type because such a type should not be + copied by bitwise-assignment. However, we make an + exception here, as all we are doing here is ensuring that + we read the bytes that make up the type. We use + create_tmp_var_raw because create_tmp_var will abort when + given a TREE_ADDRESSABLE type. */ + tree tmp = create_tmp_var_raw (type, "vol"); + gimple_add_tmp_var (tmp); + *expr_p = build2 (MODIFY_EXPR, type, tmp, *expr_p); } else /* We can't do anything useful with a volatile reference to @@ -4285,7 +5968,7 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p, #endif gcc_assert (fallback & fb_mayfail); /* If this is an asm statement, and the user asked for the - impossible, don't abort. Fail and let gimplify_asm_expr + impossible, don't die. Fail and let gimplify_asm_expr issue an error. */ ret = GS_ERROR; goto out; @@ -4313,25 +5996,23 @@ gimplify_type_sizes (tree type, tree *list_p) { tree field, t; - /* Note that we do not check for TYPE_SIZES_GIMPLIFIED already set because - that's not supposed to happen on types where gimplification does anything. - We should assert that it isn't set, but we can indeed be called multiple - times on pointers. Unfortunately, this includes fat pointers which we - can't easily test for. We could pass TYPE down to gimplify_one_sizepos - and test there, but it doesn't seem worth it. */ + if (type == NULL || type == error_mark_node) + return; /* We first do the main variant, then copy into any other variants. */ type = TYPE_MAIN_VARIANT (type); + /* Avoid infinite recursion. */ + if (TYPE_SIZES_GIMPLIFIED (type)) + return; + + TYPE_SIZES_GIMPLIFIED (type) = 1; + switch (TREE_CODE (type)) { - case ERROR_MARK: - return; - case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: - case CHAR_TYPE: case REAL_TYPE: gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p); gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p); @@ -4340,17 +6021,13 @@ gimplify_type_sizes (tree type, tree *list_p) { TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type); TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type); - TYPE_SIZES_GIMPLIFIED (t) = 1; } break; case ARRAY_TYPE: /* These types may not have declarations, so handle them here. */ - if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (type))) - gimplify_type_sizes (TREE_TYPE (type), list_p); - - if (!TYPE_SIZES_GIMPLIFIED (TYPE_DOMAIN (type))) - gimplify_type_sizes (TYPE_DOMAIN (type), list_p); + gimplify_type_sizes (TREE_TYPE (type), list_p); + gimplify_type_sizes (TYPE_DOMAIN (type), list_p); break; case RECORD_TYPE: @@ -4358,7 +6035,26 @@ gimplify_type_sizes (tree type, tree *list_p) case QUAL_UNION_TYPE: for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field)) if (TREE_CODE (field) == FIELD_DECL) - gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p); + { + gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p); + gimplify_type_sizes (TREE_TYPE (field), list_p); + } + break; + + case POINTER_TYPE: + case REFERENCE_TYPE: + /* We used to recurse on the pointed-to type here, which turned out to + be incorrect because its definition might refer to variables not + yet initialized at this point if a forward declaration is involved. + + It was actually useful for anonymous pointed-to types to ensure + that the sizes evaluation dominates every possible later use of the + values. Restricting to such types here would be safe since there + is no possible forward declaration around, but would introduce a + undesireable middle-end semantic to anonymity. We then defer to + front-ends the responsibilty of ensuring that the sizes are + evaluated both early and late enough, e.g. by attaching artifical + type declarations to the tree. */ break; default: @@ -4374,8 +6070,6 @@ gimplify_type_sizes (tree type, tree *list_p) TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type); TYPE_SIZES_GIMPLIFIED (t) = 1; } - - TYPE_SIZES_GIMPLIFIED (type) = 1; } /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P, @@ -4385,18 +6079,46 @@ gimplify_type_sizes (tree type, tree *list_p) void gimplify_one_sizepos (tree *expr_p, tree *stmt_p) { + tree type, expr = *expr_p; + /* We don't do anything if the value isn't there, is constant, or contains A PLACEHOLDER_EXPR. We also don't want to do anything if it's already - a VAR_DECL. If it's a VAR_DECL from another function, the gimplfier + a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier will want to replace it with a new variable, but that will cause problems if this type is from outside the function. It's OK to have that here. */ - if (*expr_p == NULL_TREE || TREE_CONSTANT (*expr_p) - || TREE_CODE (*expr_p) == VAR_DECL - || CONTAINS_PLACEHOLDER_P (*expr_p)) + if (expr == NULL_TREE || TREE_CONSTANT (expr) + || TREE_CODE (expr) == VAR_DECL + || CONTAINS_PLACEHOLDER_P (expr)) return; - *expr_p = unshare_expr (*expr_p); + type = TREE_TYPE (expr); + *expr_p = unshare_expr (expr); + gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue); + expr = *expr_p; + + /* Verify that we've an exact type match with the original expression. + In particular, we do not wish to drop a "sizetype" in favour of a + type of similar dimensions. We don't want to pollute the generic + type-stripping code with this knowledge because it doesn't matter + for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT + and friends retain their "sizetype-ness". */ + if (TREE_TYPE (expr) != type + && TREE_CODE (type) == INTEGER_TYPE + && TYPE_IS_SIZETYPE (type)) + { + tree tmp; + + *expr_p = create_tmp_var (type, NULL); + tmp = build1 (NOP_EXPR, type, expr); + tmp = build2 (MODIFY_EXPR, type, *expr_p, tmp); + if (EXPR_HAS_LOCATION (expr)) + SET_EXPR_LOCUS (tmp, EXPR_LOCUS (expr)); + else + SET_EXPR_LOCATION (tmp, input_location); + + gimplify_and_add (tmp, stmt_p); + } } #ifdef ENABLE_CHECKING @@ -4478,7 +6200,7 @@ check_pointer_types_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, } #endif -/* Gimplify the body of statements pointed by BODY_P. FNDECL is the +/* Gimplify the body of statements pointed to by BODY_P. FNDECL is the function decl containing BODY. */ void @@ -4488,6 +6210,8 @@ gimplify_body (tree *body_p, tree fndecl, bool do_parms) tree body, parm_stmts; timevar_push (TV_TREE_GIMPLIFY); + + gcc_assert (gimplify_ctxp == NULL); push_gimplify_context (); /* Unshare most shared trees in the body and in that of any nested functions. @@ -4520,8 +6244,8 @@ gimplify_body (tree *body_p, tree fndecl, bool do_parms) /* If there isn't an outer BIND_EXPR, add one. */ if (TREE_CODE (body) != BIND_EXPR) { - tree b = build (BIND_EXPR, void_type_node, NULL_TREE, - NULL_TREE, NULL_TREE); + tree b = build3 (BIND_EXPR, void_type_node, NULL_TREE, + NULL_TREE, NULL_TREE); TREE_SIDE_EFFECTS (b) = 1; append_to_statement_list_force (body, &BIND_EXPR_BODY (b)); body = b; @@ -4541,6 +6265,7 @@ gimplify_body (tree *body_p, tree fndecl, bool do_parms) *body_p = body; pop_gimplify_context (body); + gcc_assert (gimplify_ctxp == NULL); #ifdef ENABLE_CHECKING walk_tree (body_p, check_pointer_types_r, NULL, NULL); @@ -4556,7 +6281,7 @@ gimplify_body (tree *body_p, tree fndecl, bool do_parms) void gimplify_function_tree (tree fndecl) { - tree oldfn; + tree oldfn, parm, ret; oldfn = current_function_decl; current_function_decl = fndecl; @@ -4564,6 +6289,22 @@ gimplify_function_tree (tree fndecl) if (cfun == NULL) allocate_struct_function (fndecl); + for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm)) + { + /* Preliminarily mark non-addressed complex variables as eligible + for promotion to gimple registers. We'll transform their uses + as we find them. */ + if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE + && !TREE_THIS_VOLATILE (parm) + && !needs_to_live_in_memory (parm)) + DECL_COMPLEX_GIMPLE_REG_P (parm) = 1; + } + + ret = DECL_RESULT (fndecl); + if (TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE + && !needs_to_live_in_memory (ret)) + DECL_COMPLEX_GIMPLE_REG_P (ret) = 1; + gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true); /* If we're instrumenting function entry/exit, then prepend the call to @@ -4575,7 +6316,7 @@ gimplify_function_tree (tree fndecl) { tree tf, x, bind; - tf = build (TRY_FINALLY_EXPR, void_type_node, NULL, NULL); + tf = build2 (TRY_FINALLY_EXPR, void_type_node, NULL, NULL); TREE_SIDE_EFFECTS (tf) = 1; x = DECL_SAVED_TREE (fndecl); append_to_statement_list (x, &TREE_OPERAND (tf, 0)); @@ -4583,7 +6324,7 @@ gimplify_function_tree (tree fndecl) x = build_function_call_expr (x, NULL); append_to_statement_list (x, &TREE_OPERAND (tf, 1)); - bind = build (BIND_EXPR, void_type_node, NULL, NULL, NULL); + bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL); TREE_SIDE_EFFECTS (bind) = 1; x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER]; x = build_function_call_expr (x, NULL); @@ -4618,21 +6359,40 @@ force_gimple_operand (tree expr, tree *stmts, bool simple, tree var) gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs; push_gimplify_context (); - gimplify_ctxp->into_ssa = true; + gimplify_ctxp->into_ssa = in_ssa_p; if (var) - expr = build (MODIFY_EXPR, TREE_TYPE (var), var, expr); + expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr); ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue); gcc_assert (ret != GS_ERROR); - for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t)) - add_referenced_tmp_var (t); + if (referenced_vars) + { + for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t)) + add_referenced_var (t); + } pop_gimplify_context (NULL); return expr; } +/* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If + some statements are produced, emits them before BSI. */ + +tree +force_gimple_operand_bsi (block_stmt_iterator *bsi, tree expr, + bool simple_p, tree var) +{ + tree stmts; + + expr = force_gimple_operand (expr, &stmts, simple_p, var); + if (stmts) + bsi_insert_before (bsi, stmts, BSI_SAME_STMT); + + return expr; +} + #include "gt-gimplify.h"