1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
5 Contributed by Jason Merrill <jason@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
29 #include "c-family/c-common.h"
30 #include "tree-iterator.h"
33 #include "pointer-set.h"
35 #include "splay-tree.h"
37 /* Local declarations. */
39 enum bc_t { bc_break = 0, bc_continue = 1 };
41 /* Stack of labels which are targets for "break" or "continue",
42 linked through TREE_CHAIN. */
43 static tree bc_label[2];
45 /* Begin a scope which can be exited by a break or continue statement. BC
48 Just creates a label and pushes it into the current context. */
51 begin_bc_block (enum bc_t bc)
53 tree label = create_artificial_label (input_location);
54 DECL_CHAIN (label) = bc_label[bc];
59 /* Finish a scope which can be exited by a break or continue statement.
60 LABEL was returned from the most recent call to begin_bc_block. BODY is
61 an expression for the contents of the scope.
63 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
64 body. Otherwise, just forget the label. */
67 finish_bc_block (enum bc_t bc, tree label, gimple_seq body)
69 gcc_assert (label == bc_label[bc]);
71 if (TREE_USED (label))
73 gimple_seq_add_stmt (&body, gimple_build_label (label));
76 bc_label[bc] = DECL_CHAIN (label);
77 DECL_CHAIN (label) = NULL_TREE;
81 /* Get the LABEL_EXPR to represent a break or continue statement
82 in the current block scope. BC indicates which. */
85 get_bc_label (enum bc_t bc)
87 tree label = bc_label[bc];
89 /* Mark the label used for finish_bc_block. */
90 TREE_USED (label) = 1;
94 /* Genericize a TRY_BLOCK. */
97 genericize_try_block (tree *stmt_p)
99 tree body = TRY_STMTS (*stmt_p);
100 tree cleanup = TRY_HANDLERS (*stmt_p);
102 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
105 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
108 genericize_catch_block (tree *stmt_p)
110 tree type = HANDLER_TYPE (*stmt_p);
111 tree body = HANDLER_BODY (*stmt_p);
113 /* FIXME should the caught type go in TREE_TYPE? */
114 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
117 /* A terser interface for building a representation of an exception
121 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
125 /* FIXME should the allowed types go in TREE_TYPE? */
126 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
127 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
129 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
130 append_to_statement_list (body, &TREE_OPERAND (t, 0));
135 /* Genericize an EH_SPEC_BLOCK by converting it to a
136 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
139 genericize_eh_spec_block (tree *stmt_p)
141 tree body = EH_SPEC_STMTS (*stmt_p);
142 tree allowed = EH_SPEC_RAISES (*stmt_p);
143 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
145 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
146 TREE_NO_WARNING (*stmt_p) = true;
147 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
150 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
153 genericize_if_stmt (tree *stmt_p)
155 tree stmt, cond, then_, else_;
156 location_t locus = EXPR_LOCATION (*stmt_p);
159 cond = IF_COND (stmt);
160 then_ = THEN_CLAUSE (stmt);
161 else_ = ELSE_CLAUSE (stmt);
164 then_ = build_empty_stmt (locus);
166 else_ = build_empty_stmt (locus);
168 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
170 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
173 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
174 if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
175 SET_EXPR_LOCATION (stmt, locus);
179 /* Build a generic representation of one of the C loop forms. COND is the
180 loop condition or NULL_TREE. BODY is the (possibly compound) statement
181 controlled by the loop. INCR is the increment expression of a for-loop,
182 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
183 evaluated before the loop body as in while and for loops, or after the
184 loop body as in do-while loops. */
187 gimplify_cp_loop (tree cond, tree body, tree incr, bool cond_is_first)
189 gimple top, entry, stmt;
190 gimple_seq stmt_list, body_seq, incr_seq, exit_seq;
191 tree cont_block, break_block;
192 location_t stmt_locus;
194 stmt_locus = input_location;
201 break_block = begin_bc_block (bc_break);
202 cont_block = begin_bc_block (bc_continue);
204 /* If condition is zero don't generate a loop construct. */
205 if (cond && integer_zerop (cond))
210 stmt = gimple_build_goto (get_bc_label (bc_break));
211 gimple_set_location (stmt, stmt_locus);
212 gimple_seq_add_stmt (&stmt_list, stmt);
217 /* If we use a LOOP_EXPR here, we have to feed the whole thing
218 back through the main gimplifier to lower it. Given that we
219 have to gimplify the loop body NOW so that we can resolve
220 break/continue stmts, seems easier to just expand to gotos. */
221 top = gimple_build_label (create_artificial_label (stmt_locus));
223 /* If we have an exit condition, then we build an IF with gotos either
224 out of the loop, or to the top of it. If there's no exit condition,
225 then we just build a jump back to the top. */
226 if (cond && !integer_nonzerop (cond))
228 if (cond != error_mark_node)
230 gimplify_expr (&cond, &exit_seq, NULL, is_gimple_val, fb_rvalue);
231 stmt = gimple_build_cond (NE_EXPR, cond,
232 build_int_cst (TREE_TYPE (cond), 0),
233 gimple_label_label (top),
234 get_bc_label (bc_break));
235 gimple_seq_add_stmt (&exit_seq, stmt);
242 entry = gimple_build_label
243 (create_artificial_label (stmt_locus));
244 stmt = gimple_build_goto (gimple_label_label (entry));
247 stmt = gimple_build_goto (get_bc_label (bc_continue));
248 gimple_set_location (stmt, stmt_locus);
249 gimple_seq_add_stmt (&stmt_list, stmt);
254 stmt = gimple_build_goto (gimple_label_label (top));
255 gimple_seq_add_stmt (&exit_seq, stmt);
259 gimplify_stmt (&body, &body_seq);
260 gimplify_stmt (&incr, &incr_seq);
262 body_seq = finish_bc_block (bc_continue, cont_block, body_seq);
264 gimple_seq_add_stmt (&stmt_list, top);
265 gimple_seq_add_seq (&stmt_list, body_seq);
266 gimple_seq_add_seq (&stmt_list, incr_seq);
267 gimple_seq_add_stmt (&stmt_list, entry);
268 gimple_seq_add_seq (&stmt_list, exit_seq);
270 annotate_all_with_location (stmt_list, stmt_locus);
272 return finish_bc_block (bc_break, break_block, stmt_list);
275 /* Gimplify a FOR_STMT node. Move the stuff in the for-init-stmt into the
276 prequeue and hand off to gimplify_cp_loop. */
279 gimplify_for_stmt (tree *stmt_p, gimple_seq *pre_p)
283 if (FOR_INIT_STMT (stmt))
284 gimplify_and_add (FOR_INIT_STMT (stmt), pre_p);
286 gimple_seq_add_seq (pre_p,
287 gimplify_cp_loop (FOR_COND (stmt), FOR_BODY (stmt),
288 FOR_EXPR (stmt), 1));
292 /* Gimplify a WHILE_STMT node. */
295 gimplify_while_stmt (tree *stmt_p, gimple_seq *pre_p)
298 gimple_seq_add_seq (pre_p,
299 gimplify_cp_loop (WHILE_COND (stmt), WHILE_BODY (stmt),
304 /* Gimplify a DO_STMT node. */
307 gimplify_do_stmt (tree *stmt_p, gimple_seq *pre_p)
310 gimple_seq_add_seq (pre_p,
311 gimplify_cp_loop (DO_COND (stmt), DO_BODY (stmt),
316 /* Genericize a SWITCH_STMT by turning it into a SWITCH_EXPR. */
319 gimplify_switch_stmt (tree *stmt_p, gimple_seq *pre_p)
322 tree break_block, body, t;
323 location_t stmt_locus = input_location;
324 gimple_seq seq = NULL;
326 break_block = begin_bc_block (bc_break);
328 body = SWITCH_STMT_BODY (stmt);
330 body = build_empty_stmt (stmt_locus);
332 t = build3 (SWITCH_EXPR, SWITCH_STMT_TYPE (stmt),
333 SWITCH_STMT_COND (stmt), body, NULL_TREE);
334 SET_EXPR_LOCATION (t, stmt_locus);
335 gimplify_and_add (t, &seq);
337 seq = finish_bc_block (bc_break, break_block, seq);
338 gimple_seq_add_seq (pre_p, seq);
342 /* Hook into the middle of gimplifying an OMP_FOR node. This is required
343 in order to properly gimplify CONTINUE statements. Here we merely
344 manage the continue stack; the rest of the job is performed by the
345 regular gimplifier. */
347 static enum gimplify_status
348 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
350 tree for_stmt = *expr_p;
353 gimple_seq seq = NULL;
355 /* Protect ourselves from recursion. */
356 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
358 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
360 /* Note that while technically the continue label is enabled too soon
361 here, we should have already diagnosed invalid continues nested within
362 statement expressions within the INIT, COND, or INCR expressions. */
363 cont_block = begin_bc_block (bc_continue);
365 gimplify_and_add (for_stmt, &seq);
366 stmt = gimple_seq_last_stmt (seq);
367 if (gimple_code (stmt) == GIMPLE_OMP_FOR)
368 gimple_omp_set_body (stmt, finish_bc_block (bc_continue, cont_block,
369 gimple_omp_body (stmt)));
371 seq = finish_bc_block (bc_continue, cont_block, seq);
372 gimple_seq_add_seq (pre_p, seq);
374 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
379 /* Gimplify an EXPR_STMT node. */
382 gimplify_expr_stmt (tree *stmt_p)
384 tree stmt = EXPR_STMT_EXPR (*stmt_p);
386 if (stmt == error_mark_node)
389 /* Gimplification of a statement expression will nullify the
390 statement if all its side effects are moved to *PRE_P and *POST_P.
392 In this case we will not want to emit the gimplified statement.
393 However, we may still want to emit a warning, so we do that before
395 if (stmt && warn_unused_value)
397 if (!TREE_SIDE_EFFECTS (stmt))
399 if (!IS_EMPTY_STMT (stmt)
400 && !VOID_TYPE_P (TREE_TYPE (stmt))
401 && !TREE_NO_WARNING (stmt))
402 warning (OPT_Wunused_value, "statement with no effect");
405 warn_if_unused_value (stmt, input_location);
408 if (stmt == NULL_TREE)
409 stmt = alloc_stmt_list ();
414 /* Gimplify initialization from an AGGR_INIT_EXPR. */
417 cp_gimplify_init_expr (tree *expr_p)
419 tree from = TREE_OPERAND (*expr_p, 1);
420 tree to = TREE_OPERAND (*expr_p, 0);
423 /* What about code that pulls out the temp and uses it elsewhere? I
424 think that such code never uses the TARGET_EXPR as an initializer. If
425 I'm wrong, we'll abort because the temp won't have any RTL. In that
426 case, I guess we'll need to replace references somehow. */
427 if (TREE_CODE (from) == TARGET_EXPR)
428 from = TARGET_EXPR_INITIAL (from);
430 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
431 inside the TARGET_EXPR. */
434 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
436 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
437 replace the slot operand with our target.
439 Should we add a target parm to gimplify_expr instead? No, as in this
440 case we want to replace the INIT_EXPR. */
441 if (TREE_CODE (sub) == AGGR_INIT_EXPR
442 || TREE_CODE (sub) == VEC_INIT_EXPR)
444 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
445 AGGR_INIT_EXPR_SLOT (sub) = to;
447 VEC_INIT_EXPR_SLOT (sub) = to;
450 /* The initialization is now a side-effect, so the container can
453 TREE_TYPE (from) = void_type_node;
459 t = TREE_OPERAND (t, 1);
464 /* Gimplify a MUST_NOT_THROW_EXPR. */
466 static enum gimplify_status
467 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
470 tree temp = voidify_wrapper_expr (stmt, NULL);
471 tree body = TREE_OPERAND (stmt, 0);
472 gimple_seq try_ = NULL;
473 gimple_seq catch_ = NULL;
476 gimplify_and_add (body, &try_);
477 mnt = gimple_build_eh_must_not_throw (terminate_node);
478 gimplify_seq_add_stmt (&catch_, mnt);
479 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
481 gimplify_seq_add_stmt (pre_p, mnt);
492 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
495 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
497 int saved_stmts_are_full_exprs_p = 0;
498 enum tree_code code = TREE_CODE (*expr_p);
499 enum gimplify_status ret;
501 if (STATEMENT_CODE_P (code))
503 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
504 current_stmt_tree ()->stmts_are_full_exprs_p
505 = STMT_IS_FULL_EXPR_P (*expr_p);
511 *expr_p = cplus_expand_constant (*expr_p);
516 simplify_aggr_init_expr (expr_p);
522 location_t loc = input_location;
523 tree init = VEC_INIT_EXPR_INIT (*expr_p);
524 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
525 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
526 input_location = EXPR_LOCATION (*expr_p);
527 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
528 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
530 tf_warning_or_error);
532 input_location = loc;
537 /* FIXME communicate throw type to back end, probably by moving
538 THROW_EXPR into ../tree.def. */
539 *expr_p = TREE_OPERAND (*expr_p, 0);
543 case MUST_NOT_THROW_EXPR:
544 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
547 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
548 LHS of an assignment might also be involved in the RHS, as in bug
551 cp_gimplify_init_expr (expr_p);
552 if (TREE_CODE (*expr_p) != INIT_EXPR)
554 /* Otherwise fall through. */
557 /* If the back end isn't clever enough to know that the lhs and rhs
558 types are the same, add an explicit conversion. */
559 tree op0 = TREE_OPERAND (*expr_p, 0);
560 tree op1 = TREE_OPERAND (*expr_p, 1);
562 if (!error_operand_p (op0)
563 && !error_operand_p (op1)
564 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
565 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
566 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
567 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
568 TREE_TYPE (op0), op1);
570 else if ((is_gimple_lvalue (op1) || INDIRECT_REF_P (op1)
571 || (TREE_CODE (op1) == CONSTRUCTOR
572 && CONSTRUCTOR_NELTS (op1) == 0)
573 || (TREE_CODE (op1) == CALL_EXPR
574 && !CALL_EXPR_RETURN_SLOT_OPT (op1)))
575 && is_really_empty_class (TREE_TYPE (op0)))
577 /* Remove any copies of empty classes. We check that the RHS
578 has a simple form so that TARGET_EXPRs and non-empty
579 CONSTRUCTORs get reduced properly, and we leave the return
580 slot optimization alone because it isn't a copy (FIXME so it
581 shouldn't be represented as one).
583 Also drop volatile variables on the RHS to avoid infinite
584 recursion from gimplify_expr trying to load the value. */
585 if (!TREE_SIDE_EFFECTS (op1)
586 || (DECL_P (op1) && TREE_THIS_VOLATILE (op1)))
588 else if (TREE_CODE (op1) == MEM_REF
589 && TREE_THIS_VOLATILE (op1))
591 /* Similarly for volatile MEM_REFs on the RHS. */
592 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (op1, 0)))
595 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
596 TREE_OPERAND (op1, 0), op0);
599 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
606 case EMPTY_CLASS_EXPR:
607 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
608 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
613 *expr_p = BASELINK_FUNCTIONS (*expr_p);
618 genericize_try_block (expr_p);
623 genericize_catch_block (expr_p);
628 genericize_eh_spec_block (expr_p);
636 gimplify_for_stmt (expr_p, pre_p);
641 gimplify_while_stmt (expr_p, pre_p);
646 gimplify_do_stmt (expr_p, pre_p);
651 gimplify_switch_stmt (expr_p, pre_p);
656 ret = cp_gimplify_omp_for (expr_p, pre_p);
660 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_CONTINUE, NOT_TAKEN));
661 gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_continue)));
667 gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_break)));
673 gimplify_expr_stmt (expr_p);
677 case UNARY_PLUS_EXPR:
679 tree arg = TREE_OPERAND (*expr_p, 0);
680 tree type = TREE_TYPE (*expr_p);
681 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
688 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
692 /* Restore saved state. */
693 if (STATEMENT_CODE_P (code))
694 current_stmt_tree ()->stmts_are_full_exprs_p
695 = saved_stmts_are_full_exprs_p;
701 is_invisiref_parm (const_tree t)
703 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
704 && DECL_BY_REFERENCE (t));
707 /* Return true if the uid in both int tree maps are equal. */
710 cxx_int_tree_map_eq (const void *va, const void *vb)
712 const struct cxx_int_tree_map *a = (const struct cxx_int_tree_map *) va;
713 const struct cxx_int_tree_map *b = (const struct cxx_int_tree_map *) vb;
714 return (a->uid == b->uid);
717 /* Hash a UID in a cxx_int_tree_map. */
720 cxx_int_tree_map_hash (const void *item)
722 return ((const struct cxx_int_tree_map *)item)->uid;
725 /* A stable comparison routine for use with splay trees and DECLs. */
728 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
733 return DECL_UID (a) - DECL_UID (b);
736 /* OpenMP context during genericization. */
738 struct cp_genericize_omp_taskreg
742 struct cp_genericize_omp_taskreg *outer;
743 splay_tree variables;
746 /* Return true if genericization should try to determine if
747 DECL is firstprivate or shared within task regions. */
750 omp_var_to_track (tree decl)
752 tree type = TREE_TYPE (decl);
753 if (is_invisiref_parm (decl))
754 type = TREE_TYPE (type);
755 while (TREE_CODE (type) == ARRAY_TYPE)
756 type = TREE_TYPE (type);
757 if (type == error_mark_node || !CLASS_TYPE_P (type))
759 if (TREE_CODE (decl) == VAR_DECL && DECL_THREAD_LOCAL_P (decl))
761 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
766 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
769 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
771 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
772 (splay_tree_key) decl);
775 int flags = OMP_CLAUSE_DEFAULT_SHARED;
777 omp_cxx_notice_variable (omp_ctx->outer, decl);
778 if (!omp_ctx->default_shared)
780 struct cp_genericize_omp_taskreg *octx;
782 for (octx = omp_ctx->outer; octx; octx = octx->outer)
784 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
785 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
787 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
790 if (octx->is_parallel)
794 && (TREE_CODE (decl) == PARM_DECL
795 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
796 && DECL_CONTEXT (decl) == current_function_decl)))
797 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
798 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
800 /* DECL is implicitly determined firstprivate in
801 the current task construct. Ensure copy ctor and
802 dtor are instantiated, because during gimplification
803 it will be already too late. */
804 tree type = TREE_TYPE (decl);
805 if (is_invisiref_parm (decl))
806 type = TREE_TYPE (type);
807 while (TREE_CODE (type) == ARRAY_TYPE)
808 type = TREE_TYPE (type);
809 get_copy_ctor (type, tf_none);
810 get_dtor (type, tf_none);
813 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
817 /* Genericization context. */
819 struct cp_genericize_data
821 struct pointer_set_t *p_set;
822 VEC (tree, heap) *bind_expr_stack;
823 struct cp_genericize_omp_taskreg *omp_ctx;
826 /* Perform any pre-gimplification lowering of C++ front end trees to
830 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
833 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
834 struct pointer_set_t *p_set = wtd->p_set;
836 /* If in an OpenMP context, note var uses. */
837 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
838 && (TREE_CODE (stmt) == VAR_DECL
839 || TREE_CODE (stmt) == PARM_DECL
840 || TREE_CODE (stmt) == RESULT_DECL)
841 && omp_var_to_track (stmt))
842 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
844 if (is_invisiref_parm (stmt)
845 /* Don't dereference parms in a thunk, pass the references through. */
846 && !(DECL_THUNK_P (current_function_decl)
847 && TREE_CODE (stmt) == PARM_DECL))
849 *stmt_p = convert_from_reference (stmt);
854 /* Map block scope extern declarations to visible declarations with the
855 same name and type in outer scopes if any. */
856 if (cp_function_chain->extern_decl_map
857 && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL)
858 && DECL_EXTERNAL (stmt))
860 struct cxx_int_tree_map *h, in;
861 in.uid = DECL_UID (stmt);
862 h = (struct cxx_int_tree_map *)
863 htab_find_with_hash (cp_function_chain->extern_decl_map,
873 /* Other than invisiref parms, don't walk the same tree twice. */
874 if (pointer_set_contains (p_set, stmt))
880 if (TREE_CODE (stmt) == ADDR_EXPR
881 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
883 /* If in an OpenMP context, note var uses. */
884 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
885 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
886 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
887 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
890 else if (TREE_CODE (stmt) == RETURN_EXPR
891 && TREE_OPERAND (stmt, 0)
892 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
893 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
895 else if (TREE_CODE (stmt) == OMP_CLAUSE)
896 switch (OMP_CLAUSE_CODE (stmt))
898 case OMP_CLAUSE_LASTPRIVATE:
899 /* Don't dereference an invisiref in OpenMP clauses. */
900 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
903 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
904 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
905 cp_genericize_r, data, NULL);
908 case OMP_CLAUSE_PRIVATE:
909 /* Don't dereference an invisiref in OpenMP clauses. */
910 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
912 else if (wtd->omp_ctx != NULL)
914 /* Private clause doesn't cause any references to the
915 var in outer contexts, avoid calling
916 omp_cxx_notice_variable for it. */
917 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
919 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
925 case OMP_CLAUSE_SHARED:
926 case OMP_CLAUSE_FIRSTPRIVATE:
927 case OMP_CLAUSE_COPYIN:
928 case OMP_CLAUSE_COPYPRIVATE:
929 /* Don't dereference an invisiref in OpenMP clauses. */
930 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
933 case OMP_CLAUSE_REDUCTION:
934 gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt)));
939 else if (IS_TYPE_OR_DECL_P (stmt))
942 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
943 to lower this construct before scanning it, so we need to lower these
944 before doing anything else. */
945 else if (TREE_CODE (stmt) == CLEANUP_STMT)
946 *stmt_p = build2 (CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
950 CLEANUP_EXPR (stmt));
952 else if (TREE_CODE (stmt) == IF_STMT)
954 genericize_if_stmt (stmt_p);
955 /* *stmt_p has changed, tail recurse to handle it again. */
956 return cp_genericize_r (stmt_p, walk_subtrees, data);
959 /* COND_EXPR might have incompatible types in branches if one or both
960 arms are bitfields. Fix it up now. */
961 else if (TREE_CODE (stmt) == COND_EXPR)
964 = (TREE_OPERAND (stmt, 1)
965 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
968 = (TREE_OPERAND (stmt, 2)
969 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
972 && !useless_type_conversion_p (TREE_TYPE (stmt),
973 TREE_TYPE (TREE_OPERAND (stmt, 1))))
975 TREE_OPERAND (stmt, 1)
976 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
977 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
981 && !useless_type_conversion_p (TREE_TYPE (stmt),
982 TREE_TYPE (TREE_OPERAND (stmt, 2))))
984 TREE_OPERAND (stmt, 2)
985 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
986 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
991 else if (TREE_CODE (stmt) == BIND_EXPR)
993 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
996 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
997 if (TREE_CODE (decl) == VAR_DECL
998 && !DECL_EXTERNAL (decl)
999 && omp_var_to_track (decl))
1002 = splay_tree_lookup (wtd->omp_ctx->variables,
1003 (splay_tree_key) decl);
1005 splay_tree_insert (wtd->omp_ctx->variables,
1006 (splay_tree_key) decl,
1008 ? OMP_CLAUSE_DEFAULT_SHARED
1009 : OMP_CLAUSE_DEFAULT_PRIVATE);
1012 VEC_safe_push (tree, heap, wtd->bind_expr_stack, stmt);
1013 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1014 cp_genericize_r, data, NULL);
1015 VEC_pop (tree, wtd->bind_expr_stack);
1018 else if (TREE_CODE (stmt) == USING_STMT)
1020 tree block = NULL_TREE;
1022 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1023 BLOCK, and append an IMPORTED_DECL to its
1024 BLOCK_VARS chained list. */
1025 if (wtd->bind_expr_stack)
1028 for (i = VEC_length (tree, wtd->bind_expr_stack) - 1; i >= 0; i--)
1029 if ((block = BIND_EXPR_BLOCK (VEC_index (tree,
1030 wtd->bind_expr_stack, i))))
1035 tree using_directive;
1036 gcc_assert (TREE_OPERAND (stmt, 0));
1038 using_directive = make_node (IMPORTED_DECL);
1039 TREE_TYPE (using_directive) = void_type_node;
1041 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1042 = TREE_OPERAND (stmt, 0);
1043 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1044 BLOCK_VARS (block) = using_directive;
1046 /* The USING_STMT won't appear in GENERIC. */
1047 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1051 else if (TREE_CODE (stmt) == DECL_EXPR
1052 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1054 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1055 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1058 else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
1060 struct cp_genericize_omp_taskreg omp_ctx;
1065 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1066 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1067 omp_ctx.default_shared = omp_ctx.is_parallel;
1068 omp_ctx.outer = wtd->omp_ctx;
1069 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1070 wtd->omp_ctx = &omp_ctx;
1071 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1072 switch (OMP_CLAUSE_CODE (c))
1074 case OMP_CLAUSE_SHARED:
1075 case OMP_CLAUSE_PRIVATE:
1076 case OMP_CLAUSE_FIRSTPRIVATE:
1077 case OMP_CLAUSE_LASTPRIVATE:
1078 decl = OMP_CLAUSE_DECL (c);
1079 if (decl == error_mark_node || !omp_var_to_track (decl))
1081 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1084 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1085 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1086 ? OMP_CLAUSE_DEFAULT_SHARED
1087 : OMP_CLAUSE_DEFAULT_PRIVATE);
1088 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1090 omp_cxx_notice_variable (omp_ctx.outer, decl);
1092 case OMP_CLAUSE_DEFAULT:
1093 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1094 omp_ctx.default_shared = true;
1098 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1099 wtd->omp_ctx = omp_ctx.outer;
1100 splay_tree_delete (omp_ctx.variables);
1103 pointer_set_insert (p_set, *stmt_p);
1109 cp_genericize (tree fndecl)
1112 struct cp_genericize_data wtd;
1114 /* Fix up the types of parms passed by invisible reference. */
1115 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1116 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1118 /* If a function's arguments are copied to create a thunk,
1119 then DECL_BY_REFERENCE will be set -- but the type of the
1120 argument will be a pointer type, so we will never get
1122 gcc_assert (!DECL_BY_REFERENCE (t));
1123 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1124 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1125 DECL_BY_REFERENCE (t) = 1;
1126 TREE_ADDRESSABLE (t) = 0;
1130 /* Do the same for the return value. */
1131 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1133 t = DECL_RESULT (fndecl);
1134 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1135 DECL_BY_REFERENCE (t) = 1;
1136 TREE_ADDRESSABLE (t) = 0;
1140 /* Adjust DECL_VALUE_EXPR of the original var. */
1141 tree outer = outer_curly_brace_block (current_function_decl);
1145 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1146 if (DECL_NAME (t) == DECL_NAME (var)
1147 && DECL_HAS_VALUE_EXPR_P (var)
1148 && DECL_VALUE_EXPR (var) == t)
1150 tree val = convert_from_reference (t);
1151 SET_DECL_VALUE_EXPR (var, val);
1157 /* If we're a clone, the body is already GIMPLE. */
1158 if (DECL_CLONED_FUNCTION_P (fndecl))
1161 /* We do want to see every occurrence of the parms, so we can't just use
1162 walk_tree's hash functionality. */
1163 wtd.p_set = pointer_set_create ();
1164 wtd.bind_expr_stack = NULL;
1166 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_genericize_r, &wtd, NULL);
1167 pointer_set_destroy (wtd.p_set);
1168 VEC_free (tree, heap, wtd.bind_expr_stack);
1170 /* Do everything else. */
1171 c_genericize (fndecl);
1173 gcc_assert (bc_label[bc_break] == NULL);
1174 gcc_assert (bc_label[bc_continue] == NULL);
1177 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1178 NULL if there is in fact nothing to do. ARG2 may be null if FN
1179 actually only takes one argument. */
1182 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1184 tree defparm, parm, t;
1192 nargs = list_length (DECL_ARGUMENTS (fn));
1193 argarray = XALLOCAVEC (tree, nargs);
1195 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1197 defparm = TREE_CHAIN (defparm);
1199 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1201 tree inner_type = TREE_TYPE (arg1);
1202 tree start1, end1, p1;
1203 tree start2 = NULL, p2 = NULL;
1204 tree ret = NULL, lab;
1210 inner_type = TREE_TYPE (inner_type);
1211 start1 = build4 (ARRAY_REF, inner_type, start1,
1212 size_zero_node, NULL, NULL);
1214 start2 = build4 (ARRAY_REF, inner_type, start2,
1215 size_zero_node, NULL, NULL);
1217 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1218 start1 = build_fold_addr_expr_loc (input_location, start1);
1220 start2 = build_fold_addr_expr_loc (input_location, start2);
1222 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1223 end1 = fold_build_pointer_plus (start1, end1);
1225 p1 = create_tmp_var (TREE_TYPE (start1), NULL);
1226 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1227 append_to_statement_list (t, &ret);
1231 p2 = create_tmp_var (TREE_TYPE (start2), NULL);
1232 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1233 append_to_statement_list (t, &ret);
1236 lab = create_artificial_label (input_location);
1237 t = build1 (LABEL_EXPR, void_type_node, lab);
1238 append_to_statement_list (t, &ret);
1243 /* Handle default arguments. */
1244 for (parm = defparm; parm && parm != void_list_node;
1245 parm = TREE_CHAIN (parm), i++)
1246 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1247 TREE_PURPOSE (parm), fn, i);
1248 t = build_call_a (fn, i, argarray);
1249 t = fold_convert (void_type_node, t);
1250 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1251 append_to_statement_list (t, &ret);
1253 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1254 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1255 append_to_statement_list (t, &ret);
1259 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1260 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1261 append_to_statement_list (t, &ret);
1264 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1265 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1266 append_to_statement_list (t, &ret);
1272 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1274 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1275 /* Handle default arguments. */
1276 for (parm = defparm; parm && parm != void_list_node;
1277 parm = TREE_CHAIN (parm), i++)
1278 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1279 TREE_PURPOSE (parm),
1281 t = build_call_a (fn, i, argarray);
1282 t = fold_convert (void_type_node, t);
1283 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1287 /* Return code to initialize DECL with its default constructor, or
1288 NULL if there's nothing to do. */
1291 cxx_omp_clause_default_ctor (tree clause, tree decl,
1292 tree outer ATTRIBUTE_UNUSED)
1294 tree info = CP_OMP_CLAUSE_INFO (clause);
1298 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1303 /* Return code to initialize DST with a copy constructor from SRC. */
1306 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1308 tree info = CP_OMP_CLAUSE_INFO (clause);
1312 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1314 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1319 /* Similarly, except use an assignment operator instead. */
1322 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1324 tree info = CP_OMP_CLAUSE_INFO (clause);
1328 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1330 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1335 /* Return code to destroy DECL. */
1338 cxx_omp_clause_dtor (tree clause, tree decl)
1340 tree info = CP_OMP_CLAUSE_INFO (clause);
1344 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1349 /* True if OpenMP should privatize what this DECL points to rather
1350 than the DECL itself. */
1353 cxx_omp_privatize_by_reference (const_tree decl)
1355 return is_invisiref_parm (decl);
1358 /* Return true if DECL is const qualified var having no mutable member. */
1360 cxx_omp_const_qual_no_mutable (tree decl)
1362 tree type = TREE_TYPE (decl);
1363 if (TREE_CODE (type) == REFERENCE_TYPE)
1365 if (!is_invisiref_parm (decl))
1367 type = TREE_TYPE (type);
1369 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1371 /* NVR doesn't preserve const qualification of the
1373 tree outer = outer_curly_brace_block (current_function_decl);
1377 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1378 if (DECL_NAME (decl) == DECL_NAME (var)
1379 && (TYPE_MAIN_VARIANT (type)
1380 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1382 if (TYPE_READONLY (TREE_TYPE (var)))
1383 type = TREE_TYPE (var);
1389 if (type == error_mark_node)
1392 /* Variables with const-qualified type having no mutable member
1393 are predetermined shared. */
1394 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1400 /* True if OpenMP sharing attribute of DECL is predetermined. */
1402 enum omp_clause_default_kind
1403 cxx_omp_predetermined_sharing (tree decl)
1405 /* Static data members are predetermined shared. */
1406 if (TREE_STATIC (decl))
1408 tree ctx = CP_DECL_CONTEXT (decl);
1409 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1410 return OMP_CLAUSE_DEFAULT_SHARED;
1413 /* Const qualified vars having no mutable member are predetermined
1415 if (cxx_omp_const_qual_no_mutable (decl))
1416 return OMP_CLAUSE_DEFAULT_SHARED;
1418 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1421 /* Finalize an implicitly determined clause. */
1424 cxx_omp_finish_clause (tree c)
1426 tree decl, inner_type;
1427 bool make_shared = false;
1429 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1432 decl = OMP_CLAUSE_DECL (c);
1433 decl = require_complete_type (decl);
1434 inner_type = TREE_TYPE (decl);
1435 if (decl == error_mark_node)
1437 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1439 if (is_invisiref_parm (decl))
1440 inner_type = TREE_TYPE (inner_type);
1443 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1449 /* We're interested in the base element, not arrays. */
1450 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1451 inner_type = TREE_TYPE (inner_type);
1453 /* Check for special function availability by building a call to one.
1454 Save the results, because later we won't be in the right context
1455 for making these queries. */
1457 && CLASS_TYPE_P (inner_type)
1458 && cxx_omp_create_clause_info (c, inner_type, false, true, false))
1462 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;