/* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
- Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
+ Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
Free Software Foundation, Inc.
Contributed by Jason Merrill <jason@redhat.com>
#include "hashtab.h"
#include "pointer-set.h"
#include "flags.h"
+#include "splay-tree.h"
/* Local declarations. */
{
tree label = bc_label[bc];
- if (label == NULL_TREE)
- {
- if (bc == bc_break)
- error ("break statement not within loop or switch");
- else
- error ("continue statement not within loop or switch");
-
- return NULL_TREE;
- }
-
/* Mark the label used for finish_bc_block. */
TREE_USED (label) = 1;
return label;
gimplify_and_add (body, &try_);
mnt = gimple_build_eh_must_not_throw (terminate_node);
- gimplify_seq_add_stmt (&catch_, mnt);
+ gimple_seq_add_stmt_without_update (&catch_, mnt);
mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
- gimplify_seq_add_stmt (pre_p, mnt);
+ gimple_seq_add_stmt_without_update (pre_p, mnt);
if (temp)
{
*expr_p = temp;
else if ((is_gimple_lvalue (op1) || INDIRECT_REF_P (op1)
|| (TREE_CODE (op1) == CONSTRUCTOR
- && CONSTRUCTOR_NELTS (op1) == 0)
+ && CONSTRUCTOR_NELTS (op1) == 0
+ && !TREE_CLOBBER_P (op1))
|| (TREE_CODE (op1) == CALL_EXPR
&& !CALL_EXPR_RETURN_SLOT_OPT (op1)))
&& is_really_empty_class (TREE_TYPE (op0)))
return ((const struct cxx_int_tree_map *)item)->uid;
}
+/* A stable comparison routine for use with splay trees and DECLs. */
+
+static int
+splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
+{
+ tree a = (tree) xa;
+ tree b = (tree) xb;
+
+ return DECL_UID (a) - DECL_UID (b);
+}
+
+/* OpenMP context during genericization. */
+
+struct cp_genericize_omp_taskreg
+{
+ bool is_parallel;
+ bool default_shared;
+ struct cp_genericize_omp_taskreg *outer;
+ splay_tree variables;
+};
+
+/* Return true if genericization should try to determine if
+ DECL is firstprivate or shared within task regions. */
+
+static bool
+omp_var_to_track (tree decl)
+{
+ tree type = TREE_TYPE (decl);
+ if (is_invisiref_parm (decl))
+ type = TREE_TYPE (type);
+ while (TREE_CODE (type) == ARRAY_TYPE)
+ type = TREE_TYPE (type);
+ if (type == error_mark_node || !CLASS_TYPE_P (type))
+ return false;
+ if (TREE_CODE (decl) == VAR_DECL && DECL_THREAD_LOCAL_P (decl))
+ return false;
+ if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
+ return false;
+ return true;
+}
+
+/* Note DECL use in OpenMP region OMP_CTX during genericization. */
+
+static void
+omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
+{
+ splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
+ (splay_tree_key) decl);
+ if (n == NULL)
+ {
+ int flags = OMP_CLAUSE_DEFAULT_SHARED;
+ if (omp_ctx->outer)
+ omp_cxx_notice_variable (omp_ctx->outer, decl);
+ if (!omp_ctx->default_shared)
+ {
+ struct cp_genericize_omp_taskreg *octx;
+
+ for (octx = omp_ctx->outer; octx; octx = octx->outer)
+ {
+ n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
+ if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
+ {
+ flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
+ break;
+ }
+ if (octx->is_parallel)
+ break;
+ }
+ if (octx == NULL
+ && (TREE_CODE (decl) == PARM_DECL
+ || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
+ && DECL_CONTEXT (decl) == current_function_decl)))
+ flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
+ if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
+ {
+ /* DECL is implicitly determined firstprivate in
+ the current task construct. Ensure copy ctor and
+ dtor are instantiated, because during gimplification
+ it will be already too late. */
+ tree type = TREE_TYPE (decl);
+ if (is_invisiref_parm (decl))
+ type = TREE_TYPE (type);
+ while (TREE_CODE (type) == ARRAY_TYPE)
+ type = TREE_TYPE (type);
+ get_copy_ctor (type, tf_none);
+ get_dtor (type, tf_none);
+ }
+ }
+ splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
+ }
+}
+
+/* Genericization context. */
+
struct cp_genericize_data
{
struct pointer_set_t *p_set;
VEC (tree, heap) *bind_expr_stack;
+ struct cp_genericize_omp_taskreg *omp_ctx;
};
/* Perform any pre-gimplification lowering of C++ front end trees to
struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
struct pointer_set_t *p_set = wtd->p_set;
+ /* If in an OpenMP context, note var uses. */
+ if (__builtin_expect (wtd->omp_ctx != NULL, 0)
+ && (TREE_CODE (stmt) == VAR_DECL
+ || TREE_CODE (stmt) == PARM_DECL
+ || TREE_CODE (stmt) == RESULT_DECL)
+ && omp_var_to_track (stmt))
+ omp_cxx_notice_variable (wtd->omp_ctx, stmt);
+
if (is_invisiref_parm (stmt)
/* Don't dereference parms in a thunk, pass the references through. */
&& !(DECL_THUNK_P (current_function_decl)
if (TREE_CODE (stmt) == ADDR_EXPR
&& is_invisiref_parm (TREE_OPERAND (stmt, 0)))
{
+ /* If in an OpenMP context, note var uses. */
+ if (__builtin_expect (wtd->omp_ctx != NULL, 0)
+ && omp_var_to_track (TREE_OPERAND (stmt, 0)))
+ omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
*stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
*walk_subtrees = 0;
}
}
break;
case OMP_CLAUSE_PRIVATE:
+ /* Don't dereference an invisiref in OpenMP clauses. */
+ if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
+ *walk_subtrees = 0;
+ else if (wtd->omp_ctx != NULL)
+ {
+ /* Private clause doesn't cause any references to the
+ var in outer contexts, avoid calling
+ omp_cxx_notice_variable for it. */
+ struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
+ wtd->omp_ctx = NULL;
+ cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
+ data, NULL);
+ wtd->omp_ctx = old;
+ *walk_subtrees = 0;
+ }
+ break;
case OMP_CLAUSE_SHARED:
case OMP_CLAUSE_FIRSTPRIVATE:
case OMP_CLAUSE_COPYIN:
else if (TREE_CODE (stmt) == BIND_EXPR)
{
+ if (__builtin_expect (wtd->omp_ctx != NULL, 0))
+ {
+ tree decl;
+ for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
+ if (TREE_CODE (decl) == VAR_DECL
+ && !DECL_EXTERNAL (decl)
+ && omp_var_to_track (decl))
+ {
+ splay_tree_node n
+ = splay_tree_lookup (wtd->omp_ctx->variables,
+ (splay_tree_key) decl);
+ if (n == NULL)
+ splay_tree_insert (wtd->omp_ctx->variables,
+ (splay_tree_key) decl,
+ TREE_STATIC (decl)
+ ? OMP_CLAUSE_DEFAULT_SHARED
+ : OMP_CLAUSE_DEFAULT_PRIVATE);
+ }
+ }
VEC_safe_push (tree, heap, wtd->bind_expr_stack, stmt);
cp_walk_tree (&BIND_EXPR_BODY (stmt),
cp_genericize_r, data, NULL);
*stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
*walk_subtrees = 0;
}
+ else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
+ {
+ struct cp_genericize_omp_taskreg omp_ctx;
+ tree c, decl;
+ splay_tree_node n;
+
+ *walk_subtrees = 0;
+ cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
+ omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
+ omp_ctx.default_shared = omp_ctx.is_parallel;
+ omp_ctx.outer = wtd->omp_ctx;
+ omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
+ wtd->omp_ctx = &omp_ctx;
+ for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
+ switch (OMP_CLAUSE_CODE (c))
+ {
+ case OMP_CLAUSE_SHARED:
+ case OMP_CLAUSE_PRIVATE:
+ case OMP_CLAUSE_FIRSTPRIVATE:
+ case OMP_CLAUSE_LASTPRIVATE:
+ decl = OMP_CLAUSE_DECL (c);
+ if (decl == error_mark_node || !omp_var_to_track (decl))
+ break;
+ n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
+ if (n != NULL)
+ break;
+ splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
+ OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
+ ? OMP_CLAUSE_DEFAULT_SHARED
+ : OMP_CLAUSE_DEFAULT_PRIVATE);
+ if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
+ && omp_ctx.outer)
+ omp_cxx_notice_variable (omp_ctx.outer, decl);
+ break;
+ case OMP_CLAUSE_DEFAULT:
+ if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
+ omp_ctx.default_shared = true;
+ default:
+ break;
+ }
+ cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
+ wtd->omp_ctx = omp_ctx.outer;
+ splay_tree_delete (omp_ctx.variables);
+ }
+ else if (TREE_CODE (stmt) == CONVERT_EXPR)
+ gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
pointer_set_insert (p_set, *stmt_p);
walk_tree's hash functionality. */
wtd.p_set = pointer_set_create ();
wtd.bind_expr_stack = NULL;
+ wtd.omp_ctx = NULL;
cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_genericize_r, &wtd, NULL);
pointer_set_destroy (wtd.p_set);
VEC_free (tree, heap, wtd.bind_expr_stack);
start2 = build_fold_addr_expr_loc (input_location, start2);
end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
- end1 = build2 (POINTER_PLUS_EXPR, TREE_TYPE (start1), start1, end1);
+ end1 = fold_build_pointer_plus (start1, end1);
p1 = create_tmp_var (TREE_TYPE (start1), NULL);
t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
append_to_statement_list (t, &ret);
- t = TYPE_SIZE_UNIT (inner_type);
- t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (p1), p1, t);
+ t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
append_to_statement_list (t, &ret);
if (arg2)
{
- t = TYPE_SIZE_UNIT (inner_type);
- t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (p2), p2, t);
+ t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
append_to_statement_list (t, &ret);
}
return is_invisiref_parm (decl);
}
-/* True if OpenMP sharing attribute of DECL is predetermined. */
-
-enum omp_clause_default_kind
-cxx_omp_predetermined_sharing (tree decl)
+/* Return true if DECL is const qualified var having no mutable member. */
+bool
+cxx_omp_const_qual_no_mutable (tree decl)
{
- tree type;
-
- /* Static data members are predetermined as shared. */
- if (TREE_STATIC (decl))
- {
- tree ctx = CP_DECL_CONTEXT (decl);
- if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
- return OMP_CLAUSE_DEFAULT_SHARED;
- }
-
- type = TREE_TYPE (decl);
+ tree type = TREE_TYPE (decl);
if (TREE_CODE (type) == REFERENCE_TYPE)
{
if (!is_invisiref_parm (decl))
- return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
+ return false;
type = TREE_TYPE (type);
if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
}
if (type == error_mark_node)
- return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
+ return false;
/* Variables with const-qualified type having no mutable member
are predetermined shared. */
if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
+ return true;
+
+ return false;
+}
+
+/* True if OpenMP sharing attribute of DECL is predetermined. */
+
+enum omp_clause_default_kind
+cxx_omp_predetermined_sharing (tree decl)
+{
+ /* Static data members are predetermined shared. */
+ if (TREE_STATIC (decl))
+ {
+ tree ctx = CP_DECL_CONTEXT (decl);
+ if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
+ return OMP_CLAUSE_DEFAULT_SHARED;
+ }
+
+ /* Const qualified vars having no mutable member are predetermined
+ shared. */
+ if (cxx_omp_const_qual_no_mutable (decl))
return OMP_CLAUSE_DEFAULT_SHARED;
return OMP_CLAUSE_DEFAULT_UNSPECIFIED;