1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5 Contributed by Jason Merrill <jason@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
29 #include "c-family/c-common.h"
30 #include "tree-iterator.h"
33 #include "pointer-set.h"
36 /* Local declarations. */
38 enum bc_t { bc_break = 0, bc_continue = 1 };
40 /* Stack of labels which are targets for "break" or "continue",
41 linked through TREE_CHAIN. */
42 static tree bc_label[2];
44 /* Begin a scope which can be exited by a break or continue statement. BC
47 Just creates a label and pushes it into the current context. */
50 begin_bc_block (enum bc_t bc)
52 tree label = create_artificial_label (input_location);
53 DECL_CHAIN (label) = bc_label[bc];
58 /* Finish a scope which can be exited by a break or continue statement.
59 LABEL was returned from the most recent call to begin_bc_block. BODY is
60 an expression for the contents of the scope.
62 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
63 body. Otherwise, just forget the label. */
66 finish_bc_block (enum bc_t bc, tree label, gimple_seq body)
68 gcc_assert (label == bc_label[bc]);
70 if (TREE_USED (label))
72 gimple_seq_add_stmt (&body, gimple_build_label (label));
75 bc_label[bc] = DECL_CHAIN (label);
76 DECL_CHAIN (label) = NULL_TREE;
80 /* Get the LABEL_EXPR to represent a break or continue statement
81 in the current block scope. BC indicates which. */
84 get_bc_label (enum bc_t bc)
86 tree label = bc_label[bc];
88 if (label == NULL_TREE)
91 error ("break statement not within loop or switch");
93 error ("continue statement not within loop or switch");
98 /* Mark the label used for finish_bc_block. */
99 TREE_USED (label) = 1;
103 /* Genericize a TRY_BLOCK. */
106 genericize_try_block (tree *stmt_p)
108 tree body = TRY_STMTS (*stmt_p);
109 tree cleanup = TRY_HANDLERS (*stmt_p);
111 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
114 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
117 genericize_catch_block (tree *stmt_p)
119 tree type = HANDLER_TYPE (*stmt_p);
120 tree body = HANDLER_BODY (*stmt_p);
122 /* FIXME should the caught type go in TREE_TYPE? */
123 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
126 /* A terser interface for building a representation of an exception
130 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
134 /* FIXME should the allowed types go in TREE_TYPE? */
135 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
136 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
138 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
139 append_to_statement_list (body, &TREE_OPERAND (t, 0));
144 /* Genericize an EH_SPEC_BLOCK by converting it to a
145 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
148 genericize_eh_spec_block (tree *stmt_p)
150 tree body = EH_SPEC_STMTS (*stmt_p);
151 tree allowed = EH_SPEC_RAISES (*stmt_p);
152 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
154 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
155 TREE_NO_WARNING (*stmt_p) = true;
156 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
159 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
162 genericize_if_stmt (tree *stmt_p)
164 tree stmt, cond, then_, else_;
165 location_t locus = EXPR_LOCATION (*stmt_p);
168 cond = IF_COND (stmt);
169 then_ = THEN_CLAUSE (stmt);
170 else_ = ELSE_CLAUSE (stmt);
173 then_ = build_empty_stmt (locus);
175 else_ = build_empty_stmt (locus);
177 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
179 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
182 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
183 if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
184 SET_EXPR_LOCATION (stmt, locus);
188 /* Build a generic representation of one of the C loop forms. COND is the
189 loop condition or NULL_TREE. BODY is the (possibly compound) statement
190 controlled by the loop. INCR is the increment expression of a for-loop,
191 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
192 evaluated before the loop body as in while and for loops, or after the
193 loop body as in do-while loops. */
196 gimplify_cp_loop (tree cond, tree body, tree incr, bool cond_is_first)
198 gimple top, entry, stmt;
199 gimple_seq stmt_list, body_seq, incr_seq, exit_seq;
200 tree cont_block, break_block;
201 location_t stmt_locus;
203 stmt_locus = input_location;
210 break_block = begin_bc_block (bc_break);
211 cont_block = begin_bc_block (bc_continue);
213 /* If condition is zero don't generate a loop construct. */
214 if (cond && integer_zerop (cond))
219 stmt = gimple_build_goto (get_bc_label (bc_break));
220 gimple_set_location (stmt, stmt_locus);
221 gimple_seq_add_stmt (&stmt_list, stmt);
226 /* If we use a LOOP_EXPR here, we have to feed the whole thing
227 back through the main gimplifier to lower it. Given that we
228 have to gimplify the loop body NOW so that we can resolve
229 break/continue stmts, seems easier to just expand to gotos. */
230 top = gimple_build_label (create_artificial_label (stmt_locus));
232 /* If we have an exit condition, then we build an IF with gotos either
233 out of the loop, or to the top of it. If there's no exit condition,
234 then we just build a jump back to the top. */
235 if (cond && !integer_nonzerop (cond))
237 if (cond != error_mark_node)
239 gimplify_expr (&cond, &exit_seq, NULL, is_gimple_val, fb_rvalue);
240 stmt = gimple_build_cond (NE_EXPR, cond,
241 build_int_cst (TREE_TYPE (cond), 0),
242 gimple_label_label (top),
243 get_bc_label (bc_break));
244 gimple_seq_add_stmt (&exit_seq, stmt);
251 entry = gimple_build_label
252 (create_artificial_label (stmt_locus));
253 stmt = gimple_build_goto (gimple_label_label (entry));
256 stmt = gimple_build_goto (get_bc_label (bc_continue));
257 gimple_set_location (stmt, stmt_locus);
258 gimple_seq_add_stmt (&stmt_list, stmt);
263 stmt = gimple_build_goto (gimple_label_label (top));
264 gimple_seq_add_stmt (&exit_seq, stmt);
268 gimplify_stmt (&body, &body_seq);
269 gimplify_stmt (&incr, &incr_seq);
271 body_seq = finish_bc_block (bc_continue, cont_block, body_seq);
273 gimple_seq_add_stmt (&stmt_list, top);
274 gimple_seq_add_seq (&stmt_list, body_seq);
275 gimple_seq_add_seq (&stmt_list, incr_seq);
276 gimple_seq_add_stmt (&stmt_list, entry);
277 gimple_seq_add_seq (&stmt_list, exit_seq);
279 annotate_all_with_location (stmt_list, stmt_locus);
281 return finish_bc_block (bc_break, break_block, stmt_list);
284 /* Gimplify a FOR_STMT node. Move the stuff in the for-init-stmt into the
285 prequeue and hand off to gimplify_cp_loop. */
288 gimplify_for_stmt (tree *stmt_p, gimple_seq *pre_p)
292 if (FOR_INIT_STMT (stmt))
293 gimplify_and_add (FOR_INIT_STMT (stmt), pre_p);
295 gimple_seq_add_seq (pre_p,
296 gimplify_cp_loop (FOR_COND (stmt), FOR_BODY (stmt),
297 FOR_EXPR (stmt), 1));
301 /* Gimplify a WHILE_STMT node. */
304 gimplify_while_stmt (tree *stmt_p, gimple_seq *pre_p)
307 gimple_seq_add_seq (pre_p,
308 gimplify_cp_loop (WHILE_COND (stmt), WHILE_BODY (stmt),
313 /* Gimplify a DO_STMT node. */
316 gimplify_do_stmt (tree *stmt_p, gimple_seq *pre_p)
319 gimple_seq_add_seq (pre_p,
320 gimplify_cp_loop (DO_COND (stmt), DO_BODY (stmt),
325 /* Genericize a SWITCH_STMT by turning it into a SWITCH_EXPR. */
328 gimplify_switch_stmt (tree *stmt_p, gimple_seq *pre_p)
331 tree break_block, body, t;
332 location_t stmt_locus = input_location;
333 gimple_seq seq = NULL;
335 break_block = begin_bc_block (bc_break);
337 body = SWITCH_STMT_BODY (stmt);
339 body = build_empty_stmt (stmt_locus);
341 t = build3 (SWITCH_EXPR, SWITCH_STMT_TYPE (stmt),
342 SWITCH_STMT_COND (stmt), body, NULL_TREE);
343 SET_EXPR_LOCATION (t, stmt_locus);
344 gimplify_and_add (t, &seq);
346 seq = finish_bc_block (bc_break, break_block, seq);
347 gimple_seq_add_seq (pre_p, seq);
351 /* Hook into the middle of gimplifying an OMP_FOR node. This is required
352 in order to properly gimplify CONTINUE statements. Here we merely
353 manage the continue stack; the rest of the job is performed by the
354 regular gimplifier. */
356 static enum gimplify_status
357 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
359 tree for_stmt = *expr_p;
362 gimple_seq seq = NULL;
364 /* Protect ourselves from recursion. */
365 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
367 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
369 /* Note that while technically the continue label is enabled too soon
370 here, we should have already diagnosed invalid continues nested within
371 statement expressions within the INIT, COND, or INCR expressions. */
372 cont_block = begin_bc_block (bc_continue);
374 gimplify_and_add (for_stmt, &seq);
375 stmt = gimple_seq_last_stmt (seq);
376 if (gimple_code (stmt) == GIMPLE_OMP_FOR)
377 gimple_omp_set_body (stmt, finish_bc_block (bc_continue, cont_block,
378 gimple_omp_body (stmt)));
380 seq = finish_bc_block (bc_continue, cont_block, seq);
381 gimple_seq_add_seq (pre_p, seq);
383 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
388 /* Gimplify an EXPR_STMT node. */
391 gimplify_expr_stmt (tree *stmt_p)
393 tree stmt = EXPR_STMT_EXPR (*stmt_p);
395 if (stmt == error_mark_node)
398 /* Gimplification of a statement expression will nullify the
399 statement if all its side effects are moved to *PRE_P and *POST_P.
401 In this case we will not want to emit the gimplified statement.
402 However, we may still want to emit a warning, so we do that before
404 if (stmt && warn_unused_value)
406 if (!TREE_SIDE_EFFECTS (stmt))
408 if (!IS_EMPTY_STMT (stmt)
409 && !VOID_TYPE_P (TREE_TYPE (stmt))
410 && !TREE_NO_WARNING (stmt))
411 warning (OPT_Wunused_value, "statement with no effect");
414 warn_if_unused_value (stmt, input_location);
417 if (stmt == NULL_TREE)
418 stmt = alloc_stmt_list ();
423 /* Gimplify initialization from an AGGR_INIT_EXPR. */
426 cp_gimplify_init_expr (tree *expr_p)
428 tree from = TREE_OPERAND (*expr_p, 1);
429 tree to = TREE_OPERAND (*expr_p, 0);
432 /* What about code that pulls out the temp and uses it elsewhere? I
433 think that such code never uses the TARGET_EXPR as an initializer. If
434 I'm wrong, we'll abort because the temp won't have any RTL. In that
435 case, I guess we'll need to replace references somehow. */
436 if (TREE_CODE (from) == TARGET_EXPR)
437 from = TARGET_EXPR_INITIAL (from);
439 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
440 inside the TARGET_EXPR. */
443 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
445 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
446 replace the slot operand with our target.
448 Should we add a target parm to gimplify_expr instead? No, as in this
449 case we want to replace the INIT_EXPR. */
450 if (TREE_CODE (sub) == AGGR_INIT_EXPR
451 || TREE_CODE (sub) == VEC_INIT_EXPR)
453 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
454 AGGR_INIT_EXPR_SLOT (sub) = to;
456 VEC_INIT_EXPR_SLOT (sub) = to;
459 /* The initialization is now a side-effect, so the container can
462 TREE_TYPE (from) = void_type_node;
468 t = TREE_OPERAND (t, 1);
473 /* Gimplify a MUST_NOT_THROW_EXPR. */
475 static enum gimplify_status
476 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
479 tree temp = voidify_wrapper_expr (stmt, NULL);
480 tree body = TREE_OPERAND (stmt, 0);
481 gimple_seq try_ = NULL;
482 gimple_seq catch_ = NULL;
485 gimplify_and_add (body, &try_);
486 mnt = gimple_build_eh_must_not_throw (terminate_node);
487 gimplify_seq_add_stmt (&catch_, mnt);
488 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
490 gimplify_seq_add_stmt (pre_p, mnt);
501 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
504 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
506 int saved_stmts_are_full_exprs_p = 0;
507 enum tree_code code = TREE_CODE (*expr_p);
508 enum gimplify_status ret;
510 if (STATEMENT_CODE_P (code))
512 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
513 current_stmt_tree ()->stmts_are_full_exprs_p
514 = STMT_IS_FULL_EXPR_P (*expr_p);
520 *expr_p = cplus_expand_constant (*expr_p);
525 simplify_aggr_init_expr (expr_p);
531 location_t loc = input_location;
532 tree init = VEC_INIT_EXPR_INIT (*expr_p);
533 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
534 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
535 input_location = EXPR_LOCATION (*expr_p);
536 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
537 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
539 tf_warning_or_error);
541 input_location = loc;
546 /* FIXME communicate throw type to back end, probably by moving
547 THROW_EXPR into ../tree.def. */
548 *expr_p = TREE_OPERAND (*expr_p, 0);
552 case MUST_NOT_THROW_EXPR:
553 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
556 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
557 LHS of an assignment might also be involved in the RHS, as in bug
560 cp_gimplify_init_expr (expr_p);
561 if (TREE_CODE (*expr_p) != INIT_EXPR)
563 /* Otherwise fall through. */
566 /* If the back end isn't clever enough to know that the lhs and rhs
567 types are the same, add an explicit conversion. */
568 tree op0 = TREE_OPERAND (*expr_p, 0);
569 tree op1 = TREE_OPERAND (*expr_p, 1);
571 if (!error_operand_p (op0)
572 && !error_operand_p (op1)
573 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
574 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
575 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
576 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
577 TREE_TYPE (op0), op1);
579 else if ((is_gimple_lvalue (op1) || INDIRECT_REF_P (op1)
580 || (TREE_CODE (op1) == CONSTRUCTOR
581 && CONSTRUCTOR_NELTS (op1) == 0)
582 || (TREE_CODE (op1) == CALL_EXPR
583 && !CALL_EXPR_RETURN_SLOT_OPT (op1)))
584 && is_really_empty_class (TREE_TYPE (op0)))
586 /* Remove any copies of empty classes. We check that the RHS
587 has a simple form so that TARGET_EXPRs and non-empty
588 CONSTRUCTORs get reduced properly, and we leave the return
589 slot optimization alone because it isn't a copy (FIXME so it
590 shouldn't be represented as one).
592 Also drop volatile variables on the RHS to avoid infinite
593 recursion from gimplify_expr trying to load the value. */
594 if (!TREE_SIDE_EFFECTS (op1)
595 || (DECL_P (op1) && TREE_THIS_VOLATILE (op1)))
597 else if (TREE_CODE (op1) == MEM_REF
598 && TREE_THIS_VOLATILE (op1))
600 /* Similarly for volatile MEM_REFs on the RHS. */
601 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (op1, 0)))
604 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
605 TREE_OPERAND (op1, 0), op0);
608 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
615 case EMPTY_CLASS_EXPR:
616 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
617 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
622 *expr_p = BASELINK_FUNCTIONS (*expr_p);
627 genericize_try_block (expr_p);
632 genericize_catch_block (expr_p);
637 genericize_eh_spec_block (expr_p);
645 gimplify_for_stmt (expr_p, pre_p);
650 gimplify_while_stmt (expr_p, pre_p);
655 gimplify_do_stmt (expr_p, pre_p);
660 gimplify_switch_stmt (expr_p, pre_p);
665 ret = cp_gimplify_omp_for (expr_p, pre_p);
669 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_CONTINUE, NOT_TAKEN));
670 gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_continue)));
676 gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_break)));
682 gimplify_expr_stmt (expr_p);
686 case UNARY_PLUS_EXPR:
688 tree arg = TREE_OPERAND (*expr_p, 0);
689 tree type = TREE_TYPE (*expr_p);
690 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
697 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
701 /* Restore saved state. */
702 if (STATEMENT_CODE_P (code))
703 current_stmt_tree ()->stmts_are_full_exprs_p
704 = saved_stmts_are_full_exprs_p;
710 is_invisiref_parm (const_tree t)
712 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
713 && DECL_BY_REFERENCE (t));
716 /* Return true if the uid in both int tree maps are equal. */
719 cxx_int_tree_map_eq (const void *va, const void *vb)
721 const struct cxx_int_tree_map *a = (const struct cxx_int_tree_map *) va;
722 const struct cxx_int_tree_map *b = (const struct cxx_int_tree_map *) vb;
723 return (a->uid == b->uid);
726 /* Hash a UID in a cxx_int_tree_map. */
729 cxx_int_tree_map_hash (const void *item)
731 return ((const struct cxx_int_tree_map *)item)->uid;
734 struct cp_genericize_data
736 struct pointer_set_t *p_set;
737 VEC (tree, heap) *bind_expr_stack;
740 /* Perform any pre-gimplification lowering of C++ front end trees to
744 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
747 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
748 struct pointer_set_t *p_set = wtd->p_set;
750 if (is_invisiref_parm (stmt)
751 /* Don't dereference parms in a thunk, pass the references through. */
752 && !(DECL_THUNK_P (current_function_decl)
753 && TREE_CODE (stmt) == PARM_DECL))
755 *stmt_p = convert_from_reference (stmt);
760 /* Map block scope extern declarations to visible declarations with the
761 same name and type in outer scopes if any. */
762 if (cp_function_chain->extern_decl_map
763 && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL)
764 && DECL_EXTERNAL (stmt))
766 struct cxx_int_tree_map *h, in;
767 in.uid = DECL_UID (stmt);
768 h = (struct cxx_int_tree_map *)
769 htab_find_with_hash (cp_function_chain->extern_decl_map,
779 /* Other than invisiref parms, don't walk the same tree twice. */
780 if (pointer_set_contains (p_set, stmt))
786 if (TREE_CODE (stmt) == ADDR_EXPR
787 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
789 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
792 else if (TREE_CODE (stmt) == RETURN_EXPR
793 && TREE_OPERAND (stmt, 0)
794 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
795 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
797 else if (TREE_CODE (stmt) == OMP_CLAUSE)
798 switch (OMP_CLAUSE_CODE (stmt))
800 case OMP_CLAUSE_LASTPRIVATE:
801 /* Don't dereference an invisiref in OpenMP clauses. */
802 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
805 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
806 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
807 cp_genericize_r, data, NULL);
810 case OMP_CLAUSE_PRIVATE:
811 case OMP_CLAUSE_SHARED:
812 case OMP_CLAUSE_FIRSTPRIVATE:
813 case OMP_CLAUSE_COPYIN:
814 case OMP_CLAUSE_COPYPRIVATE:
815 /* Don't dereference an invisiref in OpenMP clauses. */
816 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
819 case OMP_CLAUSE_REDUCTION:
820 gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt)));
825 else if (IS_TYPE_OR_DECL_P (stmt))
828 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
829 to lower this construct before scanning it, so we need to lower these
830 before doing anything else. */
831 else if (TREE_CODE (stmt) == CLEANUP_STMT)
832 *stmt_p = build2 (CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
836 CLEANUP_EXPR (stmt));
838 else if (TREE_CODE (stmt) == IF_STMT)
840 genericize_if_stmt (stmt_p);
841 /* *stmt_p has changed, tail recurse to handle it again. */
842 return cp_genericize_r (stmt_p, walk_subtrees, data);
845 /* COND_EXPR might have incompatible types in branches if one or both
846 arms are bitfields. Fix it up now. */
847 else if (TREE_CODE (stmt) == COND_EXPR)
850 = (TREE_OPERAND (stmt, 1)
851 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
854 = (TREE_OPERAND (stmt, 2)
855 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
858 && !useless_type_conversion_p (TREE_TYPE (stmt),
859 TREE_TYPE (TREE_OPERAND (stmt, 1))))
861 TREE_OPERAND (stmt, 1)
862 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
863 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
867 && !useless_type_conversion_p (TREE_TYPE (stmt),
868 TREE_TYPE (TREE_OPERAND (stmt, 2))))
870 TREE_OPERAND (stmt, 2)
871 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
872 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
877 else if (TREE_CODE (stmt) == BIND_EXPR)
879 VEC_safe_push (tree, heap, wtd->bind_expr_stack, stmt);
880 cp_walk_tree (&BIND_EXPR_BODY (stmt),
881 cp_genericize_r, data, NULL);
882 VEC_pop (tree, wtd->bind_expr_stack);
885 else if (TREE_CODE (stmt) == USING_STMT)
887 tree block = NULL_TREE;
889 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
890 BLOCK, and append an IMPORTED_DECL to its
891 BLOCK_VARS chained list. */
892 if (wtd->bind_expr_stack)
895 for (i = VEC_length (tree, wtd->bind_expr_stack) - 1; i >= 0; i--)
896 if ((block = BIND_EXPR_BLOCK (VEC_index (tree,
897 wtd->bind_expr_stack, i))))
902 tree using_directive;
903 gcc_assert (TREE_OPERAND (stmt, 0));
905 using_directive = make_node (IMPORTED_DECL);
906 TREE_TYPE (using_directive) = void_type_node;
908 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
909 = TREE_OPERAND (stmt, 0);
910 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
911 BLOCK_VARS (block) = using_directive;
913 /* The USING_STMT won't appear in GENERIC. */
914 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
918 else if (TREE_CODE (stmt) == DECL_EXPR
919 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
921 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
922 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
926 pointer_set_insert (p_set, *stmt_p);
932 cp_genericize (tree fndecl)
935 struct cp_genericize_data wtd;
937 /* Fix up the types of parms passed by invisible reference. */
938 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
939 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
941 /* If a function's arguments are copied to create a thunk,
942 then DECL_BY_REFERENCE will be set -- but the type of the
943 argument will be a pointer type, so we will never get
945 gcc_assert (!DECL_BY_REFERENCE (t));
946 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
947 TREE_TYPE (t) = DECL_ARG_TYPE (t);
948 DECL_BY_REFERENCE (t) = 1;
949 TREE_ADDRESSABLE (t) = 0;
953 /* Do the same for the return value. */
954 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
956 t = DECL_RESULT (fndecl);
957 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
958 DECL_BY_REFERENCE (t) = 1;
959 TREE_ADDRESSABLE (t) = 0;
963 /* If we're a clone, the body is already GIMPLE. */
964 if (DECL_CLONED_FUNCTION_P (fndecl))
967 /* We do want to see every occurrence of the parms, so we can't just use
968 walk_tree's hash functionality. */
969 wtd.p_set = pointer_set_create ();
970 wtd.bind_expr_stack = NULL;
971 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_genericize_r, &wtd, NULL);
972 pointer_set_destroy (wtd.p_set);
973 VEC_free (tree, heap, wtd.bind_expr_stack);
975 /* Do everything else. */
976 c_genericize (fndecl);
978 gcc_assert (bc_label[bc_break] == NULL);
979 gcc_assert (bc_label[bc_continue] == NULL);
982 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
983 NULL if there is in fact nothing to do. ARG2 may be null if FN
984 actually only takes one argument. */
987 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
989 tree defparm, parm, t;
997 nargs = list_length (DECL_ARGUMENTS (fn));
998 argarray = XALLOCAVEC (tree, nargs);
1000 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1002 defparm = TREE_CHAIN (defparm);
1004 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1006 tree inner_type = TREE_TYPE (arg1);
1007 tree start1, end1, p1;
1008 tree start2 = NULL, p2 = NULL;
1009 tree ret = NULL, lab;
1015 inner_type = TREE_TYPE (inner_type);
1016 start1 = build4 (ARRAY_REF, inner_type, start1,
1017 size_zero_node, NULL, NULL);
1019 start2 = build4 (ARRAY_REF, inner_type, start2,
1020 size_zero_node, NULL, NULL);
1022 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1023 start1 = build_fold_addr_expr_loc (input_location, start1);
1025 start2 = build_fold_addr_expr_loc (input_location, start2);
1027 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1028 end1 = build2 (POINTER_PLUS_EXPR, TREE_TYPE (start1), start1, end1);
1030 p1 = create_tmp_var (TREE_TYPE (start1), NULL);
1031 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1032 append_to_statement_list (t, &ret);
1036 p2 = create_tmp_var (TREE_TYPE (start2), NULL);
1037 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1038 append_to_statement_list (t, &ret);
1041 lab = create_artificial_label (input_location);
1042 t = build1 (LABEL_EXPR, void_type_node, lab);
1043 append_to_statement_list (t, &ret);
1048 /* Handle default arguments. */
1049 for (parm = defparm; parm && parm != void_list_node;
1050 parm = TREE_CHAIN (parm), i++)
1051 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1052 TREE_PURPOSE (parm), fn, i);
1053 t = build_call_a (fn, i, argarray);
1054 t = fold_convert (void_type_node, t);
1055 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1056 append_to_statement_list (t, &ret);
1058 t = TYPE_SIZE_UNIT (inner_type);
1059 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (p1), p1, t);
1060 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1061 append_to_statement_list (t, &ret);
1065 t = TYPE_SIZE_UNIT (inner_type);
1066 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (p2), p2, t);
1067 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1068 append_to_statement_list (t, &ret);
1071 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1072 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1073 append_to_statement_list (t, &ret);
1079 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1081 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1082 /* Handle default arguments. */
1083 for (parm = defparm; parm && parm != void_list_node;
1084 parm = TREE_CHAIN (parm), i++)
1085 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1086 TREE_PURPOSE (parm),
1088 t = build_call_a (fn, i, argarray);
1089 t = fold_convert (void_type_node, t);
1090 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1094 /* Return code to initialize DECL with its default constructor, or
1095 NULL if there's nothing to do. */
1098 cxx_omp_clause_default_ctor (tree clause, tree decl,
1099 tree outer ATTRIBUTE_UNUSED)
1101 tree info = CP_OMP_CLAUSE_INFO (clause);
1105 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1110 /* Return code to initialize DST with a copy constructor from SRC. */
1113 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1115 tree info = CP_OMP_CLAUSE_INFO (clause);
1119 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1121 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1126 /* Similarly, except use an assignment operator instead. */
1129 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1131 tree info = CP_OMP_CLAUSE_INFO (clause);
1135 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1137 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1142 /* Return code to destroy DECL. */
1145 cxx_omp_clause_dtor (tree clause, tree decl)
1147 tree info = CP_OMP_CLAUSE_INFO (clause);
1151 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1156 /* True if OpenMP should privatize what this DECL points to rather
1157 than the DECL itself. */
1160 cxx_omp_privatize_by_reference (const_tree decl)
1162 return is_invisiref_parm (decl);
1165 /* True if OpenMP sharing attribute of DECL is predetermined. */
1167 enum omp_clause_default_kind
1168 cxx_omp_predetermined_sharing (tree decl)
1172 /* Static data members are predetermined as shared. */
1173 if (TREE_STATIC (decl))
1175 tree ctx = CP_DECL_CONTEXT (decl);
1176 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1177 return OMP_CLAUSE_DEFAULT_SHARED;
1180 type = TREE_TYPE (decl);
1181 if (TREE_CODE (type) == REFERENCE_TYPE)
1183 if (!is_invisiref_parm (decl))
1184 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1185 type = TREE_TYPE (type);
1187 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1189 /* NVR doesn't preserve const qualification of the
1191 tree outer = outer_curly_brace_block (current_function_decl);
1195 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1196 if (DECL_NAME (decl) == DECL_NAME (var)
1197 && (TYPE_MAIN_VARIANT (type)
1198 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1200 if (TYPE_READONLY (TREE_TYPE (var)))
1201 type = TREE_TYPE (var);
1207 if (type == error_mark_node)
1208 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1210 /* Variables with const-qualified type having no mutable member
1211 are predetermined shared. */
1212 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1213 return OMP_CLAUSE_DEFAULT_SHARED;
1215 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1218 /* Finalize an implicitly determined clause. */
1221 cxx_omp_finish_clause (tree c)
1223 tree decl, inner_type;
1224 bool make_shared = false;
1226 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1229 decl = OMP_CLAUSE_DECL (c);
1230 decl = require_complete_type (decl);
1231 inner_type = TREE_TYPE (decl);
1232 if (decl == error_mark_node)
1234 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1236 if (is_invisiref_parm (decl))
1237 inner_type = TREE_TYPE (inner_type);
1240 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1246 /* We're interested in the base element, not arrays. */
1247 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1248 inner_type = TREE_TYPE (inner_type);
1250 /* Check for special function availability by building a call to one.
1251 Save the results, because later we won't be in the right context
1252 for making these queries. */
1254 && CLASS_TYPE_P (inner_type)
1255 && cxx_omp_create_clause_info (c, inner_type, false, true, false))
1259 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;