1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
5 Contributed by Jason Merrill <jason@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
29 #include "c-family/c-common.h"
30 #include "tree-iterator.h"
33 #include "pointer-set.h"
35 #include "splay-tree.h"
37 /* Local declarations. */
39 enum bc_t { bc_break = 0, bc_continue = 1 };
41 /* Stack of labels which are targets for "break" or "continue",
42 linked through TREE_CHAIN. */
43 static tree bc_label[2];
45 /* Begin a scope which can be exited by a break or continue statement. BC
48 Just creates a label and pushes it into the current context. */
51 begin_bc_block (enum bc_t bc)
53 tree label = create_artificial_label (input_location);
54 DECL_CHAIN (label) = bc_label[bc];
59 /* Finish a scope which can be exited by a break or continue statement.
60 LABEL was returned from the most recent call to begin_bc_block. BODY is
61 an expression for the contents of the scope.
63 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
64 body. Otherwise, just forget the label. */
67 finish_bc_block (enum bc_t bc, tree label, gimple_seq body)
69 gcc_assert (label == bc_label[bc]);
71 if (TREE_USED (label))
73 gimple_seq_add_stmt (&body, gimple_build_label (label));
76 bc_label[bc] = DECL_CHAIN (label);
77 DECL_CHAIN (label) = NULL_TREE;
81 /* Get the LABEL_EXPR to represent a break or continue statement
82 in the current block scope. BC indicates which. */
85 get_bc_label (enum bc_t bc)
87 tree label = bc_label[bc];
89 /* Mark the label used for finish_bc_block. */
90 TREE_USED (label) = 1;
94 /* Genericize a TRY_BLOCK. */
97 genericize_try_block (tree *stmt_p)
99 tree body = TRY_STMTS (*stmt_p);
100 tree cleanup = TRY_HANDLERS (*stmt_p);
102 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
105 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
108 genericize_catch_block (tree *stmt_p)
110 tree type = HANDLER_TYPE (*stmt_p);
111 tree body = HANDLER_BODY (*stmt_p);
113 /* FIXME should the caught type go in TREE_TYPE? */
114 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
117 /* A terser interface for building a representation of an exception
121 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
125 /* FIXME should the allowed types go in TREE_TYPE? */
126 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
127 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
129 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
130 append_to_statement_list (body, &TREE_OPERAND (t, 0));
135 /* Genericize an EH_SPEC_BLOCK by converting it to a
136 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
139 genericize_eh_spec_block (tree *stmt_p)
141 tree body = EH_SPEC_STMTS (*stmt_p);
142 tree allowed = EH_SPEC_RAISES (*stmt_p);
143 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
145 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
146 TREE_NO_WARNING (*stmt_p) = true;
147 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
150 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
153 genericize_if_stmt (tree *stmt_p)
155 tree stmt, cond, then_, else_;
156 location_t locus = EXPR_LOCATION (*stmt_p);
159 cond = IF_COND (stmt);
160 then_ = THEN_CLAUSE (stmt);
161 else_ = ELSE_CLAUSE (stmt);
164 then_ = build_empty_stmt (locus);
166 else_ = build_empty_stmt (locus);
168 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
170 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
173 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
174 if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
175 SET_EXPR_LOCATION (stmt, locus);
179 /* Build a generic representation of one of the C loop forms. COND is the
180 loop condition or NULL_TREE. BODY is the (possibly compound) statement
181 controlled by the loop. INCR is the increment expression of a for-loop,
182 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
183 evaluated before the loop body as in while and for loops, or after the
184 loop body as in do-while loops. */
187 gimplify_cp_loop (tree cond, tree body, tree incr, bool cond_is_first)
189 gimple top, entry, stmt;
190 gimple_seq stmt_list, body_seq, incr_seq, exit_seq;
191 tree cont_block, break_block;
192 location_t stmt_locus;
194 stmt_locus = input_location;
201 break_block = begin_bc_block (bc_break);
202 cont_block = begin_bc_block (bc_continue);
204 /* If condition is zero don't generate a loop construct. */
205 if (cond && integer_zerop (cond))
210 stmt = gimple_build_goto (get_bc_label (bc_break));
211 gimple_set_location (stmt, stmt_locus);
212 gimple_seq_add_stmt (&stmt_list, stmt);
217 /* If we use a LOOP_EXPR here, we have to feed the whole thing
218 back through the main gimplifier to lower it. Given that we
219 have to gimplify the loop body NOW so that we can resolve
220 break/continue stmts, seems easier to just expand to gotos. */
221 top = gimple_build_label (create_artificial_label (stmt_locus));
223 /* If we have an exit condition, then we build an IF with gotos either
224 out of the loop, or to the top of it. If there's no exit condition,
225 then we just build a jump back to the top. */
226 if (cond && !integer_nonzerop (cond))
228 if (cond != error_mark_node)
230 gimplify_expr (&cond, &exit_seq, NULL, is_gimple_val, fb_rvalue);
231 stmt = gimple_build_cond (NE_EXPR, cond,
232 build_int_cst (TREE_TYPE (cond), 0),
233 gimple_label_label (top),
234 get_bc_label (bc_break));
235 gimple_seq_add_stmt (&exit_seq, stmt);
242 entry = gimple_build_label
243 (create_artificial_label (stmt_locus));
244 stmt = gimple_build_goto (gimple_label_label (entry));
247 stmt = gimple_build_goto (get_bc_label (bc_continue));
248 gimple_set_location (stmt, stmt_locus);
249 gimple_seq_add_stmt (&stmt_list, stmt);
254 stmt = gimple_build_goto (gimple_label_label (top));
255 gimple_seq_add_stmt (&exit_seq, stmt);
259 gimplify_stmt (&body, &body_seq);
260 gimplify_stmt (&incr, &incr_seq);
262 body_seq = finish_bc_block (bc_continue, cont_block, body_seq);
264 gimple_seq_add_stmt (&stmt_list, top);
265 gimple_seq_add_seq (&stmt_list, body_seq);
266 gimple_seq_add_seq (&stmt_list, incr_seq);
267 gimple_seq_add_stmt (&stmt_list, entry);
268 gimple_seq_add_seq (&stmt_list, exit_seq);
270 annotate_all_with_location (stmt_list, stmt_locus);
272 return finish_bc_block (bc_break, break_block, stmt_list);
275 /* Gimplify a FOR_STMT node. Move the stuff in the for-init-stmt into the
276 prequeue and hand off to gimplify_cp_loop. */
279 gimplify_for_stmt (tree *stmt_p, gimple_seq *pre_p)
283 if (FOR_INIT_STMT (stmt))
284 gimplify_and_add (FOR_INIT_STMT (stmt), pre_p);
286 gimple_seq_add_seq (pre_p,
287 gimplify_cp_loop (FOR_COND (stmt), FOR_BODY (stmt),
288 FOR_EXPR (stmt), 1));
292 /* Gimplify a WHILE_STMT node. */
295 gimplify_while_stmt (tree *stmt_p, gimple_seq *pre_p)
298 gimple_seq_add_seq (pre_p,
299 gimplify_cp_loop (WHILE_COND (stmt), WHILE_BODY (stmt),
304 /* Gimplify a DO_STMT node. */
307 gimplify_do_stmt (tree *stmt_p, gimple_seq *pre_p)
310 gimple_seq_add_seq (pre_p,
311 gimplify_cp_loop (DO_COND (stmt), DO_BODY (stmt),
316 /* Genericize a SWITCH_STMT by turning it into a SWITCH_EXPR. */
319 gimplify_switch_stmt (tree *stmt_p, gimple_seq *pre_p)
322 tree break_block, body, t;
323 location_t stmt_locus = input_location;
324 gimple_seq seq = NULL;
326 break_block = begin_bc_block (bc_break);
328 body = SWITCH_STMT_BODY (stmt);
330 body = build_empty_stmt (stmt_locus);
332 t = build3 (SWITCH_EXPR, SWITCH_STMT_TYPE (stmt),
333 SWITCH_STMT_COND (stmt), body, NULL_TREE);
334 SET_EXPR_LOCATION (t, stmt_locus);
335 gimplify_and_add (t, &seq);
337 seq = finish_bc_block (bc_break, break_block, seq);
338 gimple_seq_add_seq (pre_p, seq);
342 /* Hook into the middle of gimplifying an OMP_FOR node. This is required
343 in order to properly gimplify CONTINUE statements. Here we merely
344 manage the continue stack; the rest of the job is performed by the
345 regular gimplifier. */
347 static enum gimplify_status
348 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
350 tree for_stmt = *expr_p;
353 gimple_seq seq = NULL;
355 /* Protect ourselves from recursion. */
356 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
358 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
360 /* Note that while technically the continue label is enabled too soon
361 here, we should have already diagnosed invalid continues nested within
362 statement expressions within the INIT, COND, or INCR expressions. */
363 cont_block = begin_bc_block (bc_continue);
365 gimplify_and_add (for_stmt, &seq);
366 stmt = gimple_seq_last_stmt (seq);
367 if (gimple_code (stmt) == GIMPLE_OMP_FOR)
368 gimple_omp_set_body (stmt, finish_bc_block (bc_continue, cont_block,
369 gimple_omp_body (stmt)));
371 seq = finish_bc_block (bc_continue, cont_block, seq);
372 gimple_seq_add_seq (pre_p, seq);
374 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
379 /* Gimplify an EXPR_STMT node. */
382 gimplify_expr_stmt (tree *stmt_p)
384 tree stmt = EXPR_STMT_EXPR (*stmt_p);
386 if (stmt == error_mark_node)
389 /* Gimplification of a statement expression will nullify the
390 statement if all its side effects are moved to *PRE_P and *POST_P.
392 In this case we will not want to emit the gimplified statement.
393 However, we may still want to emit a warning, so we do that before
395 if (stmt && warn_unused_value)
397 if (!TREE_SIDE_EFFECTS (stmt))
399 if (!IS_EMPTY_STMT (stmt)
400 && !VOID_TYPE_P (TREE_TYPE (stmt))
401 && !TREE_NO_WARNING (stmt))
402 warning (OPT_Wunused_value, "statement with no effect");
405 warn_if_unused_value (stmt, input_location);
408 if (stmt == NULL_TREE)
409 stmt = alloc_stmt_list ();
414 /* Gimplify initialization from an AGGR_INIT_EXPR. */
417 cp_gimplify_init_expr (tree *expr_p)
419 tree from = TREE_OPERAND (*expr_p, 1);
420 tree to = TREE_OPERAND (*expr_p, 0);
423 /* What about code that pulls out the temp and uses it elsewhere? I
424 think that such code never uses the TARGET_EXPR as an initializer. If
425 I'm wrong, we'll abort because the temp won't have any RTL. In that
426 case, I guess we'll need to replace references somehow. */
427 if (TREE_CODE (from) == TARGET_EXPR)
428 from = TARGET_EXPR_INITIAL (from);
430 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
431 inside the TARGET_EXPR. */
434 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
436 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
437 replace the slot operand with our target.
439 Should we add a target parm to gimplify_expr instead? No, as in this
440 case we want to replace the INIT_EXPR. */
441 if (TREE_CODE (sub) == AGGR_INIT_EXPR
442 || TREE_CODE (sub) == VEC_INIT_EXPR)
444 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
445 AGGR_INIT_EXPR_SLOT (sub) = to;
447 VEC_INIT_EXPR_SLOT (sub) = to;
450 /* The initialization is now a side-effect, so the container can
453 TREE_TYPE (from) = void_type_node;
459 t = TREE_OPERAND (t, 1);
464 /* Gimplify a MUST_NOT_THROW_EXPR. */
466 static enum gimplify_status
467 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
470 tree temp = voidify_wrapper_expr (stmt, NULL);
471 tree body = TREE_OPERAND (stmt, 0);
472 gimple_seq try_ = NULL;
473 gimple_seq catch_ = NULL;
476 gimplify_and_add (body, &try_);
477 mnt = gimple_build_eh_must_not_throw (terminate_node);
478 gimple_seq_add_stmt_without_update (&catch_, mnt);
479 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
481 gimple_seq_add_stmt_without_update (pre_p, mnt);
492 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
495 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
497 int saved_stmts_are_full_exprs_p = 0;
498 enum tree_code code = TREE_CODE (*expr_p);
499 enum gimplify_status ret;
501 if (STATEMENT_CODE_P (code))
503 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
504 current_stmt_tree ()->stmts_are_full_exprs_p
505 = STMT_IS_FULL_EXPR_P (*expr_p);
511 *expr_p = cplus_expand_constant (*expr_p);
516 simplify_aggr_init_expr (expr_p);
522 location_t loc = input_location;
523 tree init = VEC_INIT_EXPR_INIT (*expr_p);
524 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
525 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
526 input_location = EXPR_LOCATION (*expr_p);
527 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
528 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
530 tf_warning_or_error);
532 input_location = loc;
537 /* FIXME communicate throw type to back end, probably by moving
538 THROW_EXPR into ../tree.def. */
539 *expr_p = TREE_OPERAND (*expr_p, 0);
543 case MUST_NOT_THROW_EXPR:
544 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
547 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
548 LHS of an assignment might also be involved in the RHS, as in bug
551 cp_gimplify_init_expr (expr_p);
552 if (TREE_CODE (*expr_p) != INIT_EXPR)
554 /* Otherwise fall through. */
557 /* If the back end isn't clever enough to know that the lhs and rhs
558 types are the same, add an explicit conversion. */
559 tree op0 = TREE_OPERAND (*expr_p, 0);
560 tree op1 = TREE_OPERAND (*expr_p, 1);
562 if (!error_operand_p (op0)
563 && !error_operand_p (op1)
564 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
565 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
566 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
567 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
568 TREE_TYPE (op0), op1);
570 else if ((is_gimple_lvalue (op1) || INDIRECT_REF_P (op1)
571 || (TREE_CODE (op1) == CONSTRUCTOR
572 && CONSTRUCTOR_NELTS (op1) == 0
573 && !TREE_CLOBBER_P (op1))
574 || (TREE_CODE (op1) == CALL_EXPR
575 && !CALL_EXPR_RETURN_SLOT_OPT (op1)))
576 && is_really_empty_class (TREE_TYPE (op0)))
578 /* Remove any copies of empty classes. We check that the RHS
579 has a simple form so that TARGET_EXPRs and non-empty
580 CONSTRUCTORs get reduced properly, and we leave the return
581 slot optimization alone because it isn't a copy (FIXME so it
582 shouldn't be represented as one).
584 Also drop volatile variables on the RHS to avoid infinite
585 recursion from gimplify_expr trying to load the value. */
586 if (!TREE_SIDE_EFFECTS (op1)
587 || (DECL_P (op1) && TREE_THIS_VOLATILE (op1)))
589 else if (TREE_CODE (op1) == MEM_REF
590 && TREE_THIS_VOLATILE (op1))
592 /* Similarly for volatile MEM_REFs on the RHS. */
593 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (op1, 0)))
596 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
597 TREE_OPERAND (op1, 0), op0);
600 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
607 case EMPTY_CLASS_EXPR:
608 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
609 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
614 *expr_p = BASELINK_FUNCTIONS (*expr_p);
619 genericize_try_block (expr_p);
624 genericize_catch_block (expr_p);
629 genericize_eh_spec_block (expr_p);
637 gimplify_for_stmt (expr_p, pre_p);
642 gimplify_while_stmt (expr_p, pre_p);
647 gimplify_do_stmt (expr_p, pre_p);
652 gimplify_switch_stmt (expr_p, pre_p);
657 ret = cp_gimplify_omp_for (expr_p, pre_p);
661 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_CONTINUE, NOT_TAKEN));
662 gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_continue)));
668 gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_break)));
674 gimplify_expr_stmt (expr_p);
678 case UNARY_PLUS_EXPR:
680 tree arg = TREE_OPERAND (*expr_p, 0);
681 tree type = TREE_TYPE (*expr_p);
682 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
689 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
693 /* Restore saved state. */
694 if (STATEMENT_CODE_P (code))
695 current_stmt_tree ()->stmts_are_full_exprs_p
696 = saved_stmts_are_full_exprs_p;
702 is_invisiref_parm (const_tree t)
704 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
705 && DECL_BY_REFERENCE (t));
708 /* Return true if the uid in both int tree maps are equal. */
711 cxx_int_tree_map_eq (const void *va, const void *vb)
713 const struct cxx_int_tree_map *a = (const struct cxx_int_tree_map *) va;
714 const struct cxx_int_tree_map *b = (const struct cxx_int_tree_map *) vb;
715 return (a->uid == b->uid);
718 /* Hash a UID in a cxx_int_tree_map. */
721 cxx_int_tree_map_hash (const void *item)
723 return ((const struct cxx_int_tree_map *)item)->uid;
726 /* A stable comparison routine for use with splay trees and DECLs. */
729 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
734 return DECL_UID (a) - DECL_UID (b);
737 /* OpenMP context during genericization. */
739 struct cp_genericize_omp_taskreg
743 struct cp_genericize_omp_taskreg *outer;
744 splay_tree variables;
747 /* Return true if genericization should try to determine if
748 DECL is firstprivate or shared within task regions. */
751 omp_var_to_track (tree decl)
753 tree type = TREE_TYPE (decl);
754 if (is_invisiref_parm (decl))
755 type = TREE_TYPE (type);
756 while (TREE_CODE (type) == ARRAY_TYPE)
757 type = TREE_TYPE (type);
758 if (type == error_mark_node || !CLASS_TYPE_P (type))
760 if (TREE_CODE (decl) == VAR_DECL && DECL_THREAD_LOCAL_P (decl))
762 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
767 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
770 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
772 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
773 (splay_tree_key) decl);
776 int flags = OMP_CLAUSE_DEFAULT_SHARED;
778 omp_cxx_notice_variable (omp_ctx->outer, decl);
779 if (!omp_ctx->default_shared)
781 struct cp_genericize_omp_taskreg *octx;
783 for (octx = omp_ctx->outer; octx; octx = octx->outer)
785 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
786 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
788 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
791 if (octx->is_parallel)
795 && (TREE_CODE (decl) == PARM_DECL
796 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
797 && DECL_CONTEXT (decl) == current_function_decl)))
798 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
799 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
801 /* DECL is implicitly determined firstprivate in
802 the current task construct. Ensure copy ctor and
803 dtor are instantiated, because during gimplification
804 it will be already too late. */
805 tree type = TREE_TYPE (decl);
806 if (is_invisiref_parm (decl))
807 type = TREE_TYPE (type);
808 while (TREE_CODE (type) == ARRAY_TYPE)
809 type = TREE_TYPE (type);
810 get_copy_ctor (type, tf_none);
811 get_dtor (type, tf_none);
814 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
818 /* Genericization context. */
820 struct cp_genericize_data
822 struct pointer_set_t *p_set;
823 VEC (tree, heap) *bind_expr_stack;
824 struct cp_genericize_omp_taskreg *omp_ctx;
827 /* Perform any pre-gimplification lowering of C++ front end trees to
831 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
834 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
835 struct pointer_set_t *p_set = wtd->p_set;
837 /* If in an OpenMP context, note var uses. */
838 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
839 && (TREE_CODE (stmt) == VAR_DECL
840 || TREE_CODE (stmt) == PARM_DECL
841 || TREE_CODE (stmt) == RESULT_DECL)
842 && omp_var_to_track (stmt))
843 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
845 if (is_invisiref_parm (stmt)
846 /* Don't dereference parms in a thunk, pass the references through. */
847 && !(DECL_THUNK_P (current_function_decl)
848 && TREE_CODE (stmt) == PARM_DECL))
850 *stmt_p = convert_from_reference (stmt);
855 /* Map block scope extern declarations to visible declarations with the
856 same name and type in outer scopes if any. */
857 if (cp_function_chain->extern_decl_map
858 && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL)
859 && DECL_EXTERNAL (stmt))
861 struct cxx_int_tree_map *h, in;
862 in.uid = DECL_UID (stmt);
863 h = (struct cxx_int_tree_map *)
864 htab_find_with_hash (cp_function_chain->extern_decl_map,
874 /* Other than invisiref parms, don't walk the same tree twice. */
875 if (pointer_set_contains (p_set, stmt))
881 if (TREE_CODE (stmt) == ADDR_EXPR
882 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
884 /* If in an OpenMP context, note var uses. */
885 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
886 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
887 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
888 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
891 else if (TREE_CODE (stmt) == RETURN_EXPR
892 && TREE_OPERAND (stmt, 0)
893 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
894 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
896 else if (TREE_CODE (stmt) == OMP_CLAUSE)
897 switch (OMP_CLAUSE_CODE (stmt))
899 case OMP_CLAUSE_LASTPRIVATE:
900 /* Don't dereference an invisiref in OpenMP clauses. */
901 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
904 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
905 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
906 cp_genericize_r, data, NULL);
909 case OMP_CLAUSE_PRIVATE:
910 /* Don't dereference an invisiref in OpenMP clauses. */
911 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
913 else if (wtd->omp_ctx != NULL)
915 /* Private clause doesn't cause any references to the
916 var in outer contexts, avoid calling
917 omp_cxx_notice_variable for it. */
918 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
920 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
926 case OMP_CLAUSE_SHARED:
927 case OMP_CLAUSE_FIRSTPRIVATE:
928 case OMP_CLAUSE_COPYIN:
929 case OMP_CLAUSE_COPYPRIVATE:
930 /* Don't dereference an invisiref in OpenMP clauses. */
931 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
934 case OMP_CLAUSE_REDUCTION:
935 gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt)));
940 else if (IS_TYPE_OR_DECL_P (stmt))
943 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
944 to lower this construct before scanning it, so we need to lower these
945 before doing anything else. */
946 else if (TREE_CODE (stmt) == CLEANUP_STMT)
947 *stmt_p = build2 (CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
951 CLEANUP_EXPR (stmt));
953 else if (TREE_CODE (stmt) == IF_STMT)
955 genericize_if_stmt (stmt_p);
956 /* *stmt_p has changed, tail recurse to handle it again. */
957 return cp_genericize_r (stmt_p, walk_subtrees, data);
960 /* COND_EXPR might have incompatible types in branches if one or both
961 arms are bitfields. Fix it up now. */
962 else if (TREE_CODE (stmt) == COND_EXPR)
965 = (TREE_OPERAND (stmt, 1)
966 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
969 = (TREE_OPERAND (stmt, 2)
970 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
973 && !useless_type_conversion_p (TREE_TYPE (stmt),
974 TREE_TYPE (TREE_OPERAND (stmt, 1))))
976 TREE_OPERAND (stmt, 1)
977 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
978 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
982 && !useless_type_conversion_p (TREE_TYPE (stmt),
983 TREE_TYPE (TREE_OPERAND (stmt, 2))))
985 TREE_OPERAND (stmt, 2)
986 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
987 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
992 else if (TREE_CODE (stmt) == BIND_EXPR)
994 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
997 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
998 if (TREE_CODE (decl) == VAR_DECL
999 && !DECL_EXTERNAL (decl)
1000 && omp_var_to_track (decl))
1003 = splay_tree_lookup (wtd->omp_ctx->variables,
1004 (splay_tree_key) decl);
1006 splay_tree_insert (wtd->omp_ctx->variables,
1007 (splay_tree_key) decl,
1009 ? OMP_CLAUSE_DEFAULT_SHARED
1010 : OMP_CLAUSE_DEFAULT_PRIVATE);
1013 VEC_safe_push (tree, heap, wtd->bind_expr_stack, stmt);
1014 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1015 cp_genericize_r, data, NULL);
1016 VEC_pop (tree, wtd->bind_expr_stack);
1019 else if (TREE_CODE (stmt) == USING_STMT)
1021 tree block = NULL_TREE;
1023 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1024 BLOCK, and append an IMPORTED_DECL to its
1025 BLOCK_VARS chained list. */
1026 if (wtd->bind_expr_stack)
1029 for (i = VEC_length (tree, wtd->bind_expr_stack) - 1; i >= 0; i--)
1030 if ((block = BIND_EXPR_BLOCK (VEC_index (tree,
1031 wtd->bind_expr_stack, i))))
1036 tree using_directive;
1037 gcc_assert (TREE_OPERAND (stmt, 0));
1039 using_directive = make_node (IMPORTED_DECL);
1040 TREE_TYPE (using_directive) = void_type_node;
1042 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1043 = TREE_OPERAND (stmt, 0);
1044 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1045 BLOCK_VARS (block) = using_directive;
1047 /* The USING_STMT won't appear in GENERIC. */
1048 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1052 else if (TREE_CODE (stmt) == DECL_EXPR
1053 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1055 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1056 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1059 else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
1061 struct cp_genericize_omp_taskreg omp_ctx;
1066 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1067 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1068 omp_ctx.default_shared = omp_ctx.is_parallel;
1069 omp_ctx.outer = wtd->omp_ctx;
1070 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1071 wtd->omp_ctx = &omp_ctx;
1072 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1073 switch (OMP_CLAUSE_CODE (c))
1075 case OMP_CLAUSE_SHARED:
1076 case OMP_CLAUSE_PRIVATE:
1077 case OMP_CLAUSE_FIRSTPRIVATE:
1078 case OMP_CLAUSE_LASTPRIVATE:
1079 decl = OMP_CLAUSE_DECL (c);
1080 if (decl == error_mark_node || !omp_var_to_track (decl))
1082 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1085 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1086 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1087 ? OMP_CLAUSE_DEFAULT_SHARED
1088 : OMP_CLAUSE_DEFAULT_PRIVATE);
1089 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1091 omp_cxx_notice_variable (omp_ctx.outer, decl);
1093 case OMP_CLAUSE_DEFAULT:
1094 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1095 omp_ctx.default_shared = true;
1099 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1100 wtd->omp_ctx = omp_ctx.outer;
1101 splay_tree_delete (omp_ctx.variables);
1103 else if (TREE_CODE (stmt) == CONVERT_EXPR)
1104 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1106 pointer_set_insert (p_set, *stmt_p);
1112 cp_genericize (tree fndecl)
1115 struct cp_genericize_data wtd;
1117 /* Fix up the types of parms passed by invisible reference. */
1118 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1119 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1121 /* If a function's arguments are copied to create a thunk,
1122 then DECL_BY_REFERENCE will be set -- but the type of the
1123 argument will be a pointer type, so we will never get
1125 gcc_assert (!DECL_BY_REFERENCE (t));
1126 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1127 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1128 DECL_BY_REFERENCE (t) = 1;
1129 TREE_ADDRESSABLE (t) = 0;
1133 /* Do the same for the return value. */
1134 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1136 t = DECL_RESULT (fndecl);
1137 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1138 DECL_BY_REFERENCE (t) = 1;
1139 TREE_ADDRESSABLE (t) = 0;
1143 /* Adjust DECL_VALUE_EXPR of the original var. */
1144 tree outer = outer_curly_brace_block (current_function_decl);
1148 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1149 if (DECL_NAME (t) == DECL_NAME (var)
1150 && DECL_HAS_VALUE_EXPR_P (var)
1151 && DECL_VALUE_EXPR (var) == t)
1153 tree val = convert_from_reference (t);
1154 SET_DECL_VALUE_EXPR (var, val);
1160 /* If we're a clone, the body is already GIMPLE. */
1161 if (DECL_CLONED_FUNCTION_P (fndecl))
1164 /* We do want to see every occurrence of the parms, so we can't just use
1165 walk_tree's hash functionality. */
1166 wtd.p_set = pointer_set_create ();
1167 wtd.bind_expr_stack = NULL;
1169 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_genericize_r, &wtd, NULL);
1170 pointer_set_destroy (wtd.p_set);
1171 VEC_free (tree, heap, wtd.bind_expr_stack);
1173 /* Do everything else. */
1174 c_genericize (fndecl);
1176 gcc_assert (bc_label[bc_break] == NULL);
1177 gcc_assert (bc_label[bc_continue] == NULL);
1180 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1181 NULL if there is in fact nothing to do. ARG2 may be null if FN
1182 actually only takes one argument. */
1185 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1187 tree defparm, parm, t;
1195 nargs = list_length (DECL_ARGUMENTS (fn));
1196 argarray = XALLOCAVEC (tree, nargs);
1198 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1200 defparm = TREE_CHAIN (defparm);
1202 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1204 tree inner_type = TREE_TYPE (arg1);
1205 tree start1, end1, p1;
1206 tree start2 = NULL, p2 = NULL;
1207 tree ret = NULL, lab;
1213 inner_type = TREE_TYPE (inner_type);
1214 start1 = build4 (ARRAY_REF, inner_type, start1,
1215 size_zero_node, NULL, NULL);
1217 start2 = build4 (ARRAY_REF, inner_type, start2,
1218 size_zero_node, NULL, NULL);
1220 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1221 start1 = build_fold_addr_expr_loc (input_location, start1);
1223 start2 = build_fold_addr_expr_loc (input_location, start2);
1225 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1226 end1 = fold_build_pointer_plus (start1, end1);
1228 p1 = create_tmp_var (TREE_TYPE (start1), NULL);
1229 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1230 append_to_statement_list (t, &ret);
1234 p2 = create_tmp_var (TREE_TYPE (start2), NULL);
1235 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1236 append_to_statement_list (t, &ret);
1239 lab = create_artificial_label (input_location);
1240 t = build1 (LABEL_EXPR, void_type_node, lab);
1241 append_to_statement_list (t, &ret);
1246 /* Handle default arguments. */
1247 for (parm = defparm; parm && parm != void_list_node;
1248 parm = TREE_CHAIN (parm), i++)
1249 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1250 TREE_PURPOSE (parm), fn, i);
1251 t = build_call_a (fn, i, argarray);
1252 t = fold_convert (void_type_node, t);
1253 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1254 append_to_statement_list (t, &ret);
1256 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1257 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1258 append_to_statement_list (t, &ret);
1262 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1263 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1264 append_to_statement_list (t, &ret);
1267 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1268 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1269 append_to_statement_list (t, &ret);
1275 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1277 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1278 /* Handle default arguments. */
1279 for (parm = defparm; parm && parm != void_list_node;
1280 parm = TREE_CHAIN (parm), i++)
1281 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1282 TREE_PURPOSE (parm),
1284 t = build_call_a (fn, i, argarray);
1285 t = fold_convert (void_type_node, t);
1286 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1290 /* Return code to initialize DECL with its default constructor, or
1291 NULL if there's nothing to do. */
1294 cxx_omp_clause_default_ctor (tree clause, tree decl,
1295 tree outer ATTRIBUTE_UNUSED)
1297 tree info = CP_OMP_CLAUSE_INFO (clause);
1301 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1306 /* Return code to initialize DST with a copy constructor from SRC. */
1309 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1311 tree info = CP_OMP_CLAUSE_INFO (clause);
1315 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1317 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1322 /* Similarly, except use an assignment operator instead. */
1325 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1327 tree info = CP_OMP_CLAUSE_INFO (clause);
1331 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1333 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1338 /* Return code to destroy DECL. */
1341 cxx_omp_clause_dtor (tree clause, tree decl)
1343 tree info = CP_OMP_CLAUSE_INFO (clause);
1347 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1352 /* True if OpenMP should privatize what this DECL points to rather
1353 than the DECL itself. */
1356 cxx_omp_privatize_by_reference (const_tree decl)
1358 return is_invisiref_parm (decl);
1361 /* Return true if DECL is const qualified var having no mutable member. */
1363 cxx_omp_const_qual_no_mutable (tree decl)
1365 tree type = TREE_TYPE (decl);
1366 if (TREE_CODE (type) == REFERENCE_TYPE)
1368 if (!is_invisiref_parm (decl))
1370 type = TREE_TYPE (type);
1372 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1374 /* NVR doesn't preserve const qualification of the
1376 tree outer = outer_curly_brace_block (current_function_decl);
1380 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1381 if (DECL_NAME (decl) == DECL_NAME (var)
1382 && (TYPE_MAIN_VARIANT (type)
1383 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1385 if (TYPE_READONLY (TREE_TYPE (var)))
1386 type = TREE_TYPE (var);
1392 if (type == error_mark_node)
1395 /* Variables with const-qualified type having no mutable member
1396 are predetermined shared. */
1397 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1403 /* True if OpenMP sharing attribute of DECL is predetermined. */
1405 enum omp_clause_default_kind
1406 cxx_omp_predetermined_sharing (tree decl)
1408 /* Static data members are predetermined shared. */
1409 if (TREE_STATIC (decl))
1411 tree ctx = CP_DECL_CONTEXT (decl);
1412 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1413 return OMP_CLAUSE_DEFAULT_SHARED;
1416 /* Const qualified vars having no mutable member are predetermined
1418 if (cxx_omp_const_qual_no_mutable (decl))
1419 return OMP_CLAUSE_DEFAULT_SHARED;
1421 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1424 /* Finalize an implicitly determined clause. */
1427 cxx_omp_finish_clause (tree c)
1429 tree decl, inner_type;
1430 bool make_shared = false;
1432 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1435 decl = OMP_CLAUSE_DECL (c);
1436 decl = require_complete_type (decl);
1437 inner_type = TREE_TYPE (decl);
1438 if (decl == error_mark_node)
1440 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1442 if (is_invisiref_parm (decl))
1443 inner_type = TREE_TYPE (inner_type);
1446 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1452 /* We're interested in the base element, not arrays. */
1453 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1454 inner_type = TREE_TYPE (inner_type);
1456 /* Check for special function availability by building a call to one.
1457 Save the results, because later we won't be in the right context
1458 for making these queries. */
1460 && CLASS_TYPE_P (inner_type)
1461 && cxx_omp_create_clause_info (c, inner_type, false, true, false))
1465 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;