1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jakub Jelinek <jakub@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
33 #include "trans-stmt.h"
34 #include "trans-types.h"
35 #include "trans-array.h"
36 #include "trans-const.h"
41 /* True if OpenMP should privatize what this DECL points to rather
42 than the DECL itself. */
45 gfc_omp_privatize_by_reference (const_tree decl)
47 tree type = TREE_TYPE (decl);
49 if (TREE_CODE (type) == REFERENCE_TYPE
50 && (!DECL_ARTIFICIAL (decl) || TREE_CODE (decl) == PARM_DECL))
53 if (TREE_CODE (type) == POINTER_TYPE)
55 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
56 that have POINTER_TYPE type and don't have GFC_POINTER_TYPE_P
57 set are supposed to be privatized by reference. */
58 if (GFC_POINTER_TYPE_P (type))
61 if (!DECL_ARTIFICIAL (decl)
62 && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
65 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
67 if (DECL_LANG_SPECIFIC (decl)
68 && GFC_DECL_SAVED_DESCRIPTOR (decl))
75 /* True if OpenMP sharing attribute of DECL is predetermined. */
77 enum omp_clause_default_kind
78 gfc_omp_predetermined_sharing (tree decl)
80 if (DECL_ARTIFICIAL (decl) && ! GFC_DECL_RESULT (decl))
81 return OMP_CLAUSE_DEFAULT_SHARED;
83 /* Cray pointees shouldn't be listed in any clauses and should be
84 gimplified to dereference of the corresponding Cray pointer.
85 Make them all private, so that they are emitted in the debug
87 if (GFC_DECL_CRAY_POINTEE (decl))
88 return OMP_CLAUSE_DEFAULT_PRIVATE;
90 /* Assumed-size arrays are predetermined to inherit sharing
91 attributes of the associated actual argument, which is shared
93 if (TREE_CODE (decl) == PARM_DECL
94 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl))
95 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_UNKNOWN
96 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl),
97 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl)) - 1)
99 return OMP_CLAUSE_DEFAULT_SHARED;
101 /* Dummy procedures aren't considered variables by OpenMP, thus are
102 disallowed in OpenMP clauses. They are represented as PARM_DECLs
103 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
104 to avoid complaining about their uses with default(none). */
105 if (TREE_CODE (decl) == PARM_DECL
106 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
107 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == FUNCTION_TYPE)
108 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
110 /* COMMON and EQUIVALENCE decls are shared. They
111 are only referenced through DECL_VALUE_EXPR of the variables
112 contained in them. If those are privatized, they will not be
113 gimplified to the COMMON or EQUIVALENCE decls. */
114 if (GFC_DECL_COMMON_OR_EQUIV (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
115 return OMP_CLAUSE_DEFAULT_SHARED;
117 if (GFC_DECL_RESULT (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
118 return OMP_CLAUSE_DEFAULT_SHARED;
120 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
124 /* Return true if DECL in private clause needs
125 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
127 gfc_omp_private_outer_ref (tree decl)
129 tree type = TREE_TYPE (decl);
131 if (GFC_DESCRIPTOR_TYPE_P (type)
132 && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
138 /* Return code to initialize DECL with its default constructor, or
139 NULL if there's nothing to do. */
142 gfc_omp_clause_default_ctor (tree clause, tree decl, tree outer)
144 tree type = TREE_TYPE (decl), rank, size, esize, ptr, cond, then_b, else_b;
145 stmtblock_t block, cond_block;
147 if (! GFC_DESCRIPTOR_TYPE_P (type)
148 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
151 gcc_assert (outer != NULL);
152 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_PRIVATE
153 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LASTPRIVATE);
155 /* Allocatable arrays in PRIVATE clauses need to be set to
156 "not currently allocated" allocation status if outer
157 array is "not currently allocated", otherwise should be allocated. */
158 gfc_start_block (&block);
160 gfc_init_block (&cond_block);
162 gfc_add_modify (&cond_block, decl, outer);
163 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
164 size = gfc_conv_descriptor_ubound_get (decl, rank);
165 size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
166 gfc_conv_descriptor_lbound_get (decl, rank));
167 size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
169 if (GFC_TYPE_ARRAY_RANK (type) > 1)
170 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
171 gfc_conv_descriptor_stride_get (decl, rank));
172 esize = fold_convert (gfc_array_index_type,
173 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
174 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
175 size = gfc_evaluate_now (fold_convert (size_type_node, size), &cond_block);
176 ptr = gfc_allocate_array_with_status (&cond_block,
177 build_int_cst (pvoid_type_node, 0),
179 gfc_conv_descriptor_data_set (&cond_block, decl, ptr);
180 then_b = gfc_finish_block (&cond_block);
182 gfc_init_block (&cond_block);
183 gfc_conv_descriptor_data_set (&cond_block, decl, null_pointer_node);
184 else_b = gfc_finish_block (&cond_block);
186 cond = fold_build2 (NE_EXPR, boolean_type_node,
187 fold_convert (pvoid_type_node,
188 gfc_conv_descriptor_data_get (outer)),
190 gfc_add_expr_to_block (&block, build3 (COND_EXPR, void_type_node,
191 cond, then_b, else_b));
193 return gfc_finish_block (&block);
196 /* Build and return code for a copy constructor from SRC to DEST. */
199 gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src)
201 tree type = TREE_TYPE (dest), ptr, size, esize, rank, call;
204 if (! GFC_DESCRIPTOR_TYPE_P (type)
205 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
206 return build2_v (MODIFY_EXPR, dest, src);
208 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_FIRSTPRIVATE);
210 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
211 and copied from SRC. */
212 gfc_start_block (&block);
214 gfc_add_modify (&block, dest, src);
215 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
216 size = gfc_conv_descriptor_ubound_get (dest, rank);
217 size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
218 gfc_conv_descriptor_lbound_get (dest, rank));
219 size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
221 if (GFC_TYPE_ARRAY_RANK (type) > 1)
222 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
223 gfc_conv_descriptor_stride_get (dest, rank));
224 esize = fold_convert (gfc_array_index_type,
225 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
226 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
227 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
228 ptr = gfc_allocate_array_with_status (&block,
229 build_int_cst (pvoid_type_node, 0),
231 gfc_conv_descriptor_data_set (&block, dest, ptr);
232 call = build_call_expr_loc (input_location,
233 built_in_decls[BUILT_IN_MEMCPY], 3, ptr,
234 fold_convert (pvoid_type_node,
235 gfc_conv_descriptor_data_get (src)),
237 gfc_add_expr_to_block (&block, fold_convert (void_type_node, call));
239 return gfc_finish_block (&block);
242 /* Similarly, except use an assignment operator instead. */
245 gfc_omp_clause_assign_op (tree clause ATTRIBUTE_UNUSED, tree dest, tree src)
247 tree type = TREE_TYPE (dest), rank, size, esize, call;
250 if (! GFC_DESCRIPTOR_TYPE_P (type)
251 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
252 return build2_v (MODIFY_EXPR, dest, src);
254 /* Handle copying allocatable arrays. */
255 gfc_start_block (&block);
257 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
258 size = gfc_conv_descriptor_ubound_get (dest, rank);
259 size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
260 gfc_conv_descriptor_lbound_get (dest, rank));
261 size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
263 if (GFC_TYPE_ARRAY_RANK (type) > 1)
264 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
265 gfc_conv_descriptor_stride_get (dest, rank));
266 esize = fold_convert (gfc_array_index_type,
267 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
268 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
269 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
270 call = build_call_expr_loc (input_location,
271 built_in_decls[BUILT_IN_MEMCPY], 3,
272 fold_convert (pvoid_type_node,
273 gfc_conv_descriptor_data_get (dest)),
274 fold_convert (pvoid_type_node,
275 gfc_conv_descriptor_data_get (src)),
277 gfc_add_expr_to_block (&block, fold_convert (void_type_node, call));
279 return gfc_finish_block (&block);
282 /* Build and return code destructing DECL. Return NULL if nothing
286 gfc_omp_clause_dtor (tree clause ATTRIBUTE_UNUSED, tree decl)
288 tree type = TREE_TYPE (decl);
290 if (! GFC_DESCRIPTOR_TYPE_P (type)
291 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
294 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
295 to be deallocated if they were allocated. */
296 return gfc_trans_dealloc_allocated (decl);
300 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
301 disregarded in OpenMP construct, because it is going to be
302 remapped during OpenMP lowering. SHARED is true if DECL
303 is going to be shared, false if it is going to be privatized. */
306 gfc_omp_disregard_value_expr (tree decl, bool shared)
308 if (GFC_DECL_COMMON_OR_EQUIV (decl)
309 && DECL_HAS_VALUE_EXPR_P (decl))
311 tree value = DECL_VALUE_EXPR (decl);
313 if (TREE_CODE (value) == COMPONENT_REF
314 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
315 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
317 /* If variable in COMMON or EQUIVALENCE is privatized, return
318 true, as just that variable is supposed to be privatized,
319 not the whole COMMON or whole EQUIVALENCE.
320 For shared variables in COMMON or EQUIVALENCE, let them be
321 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
322 from the same COMMON or EQUIVALENCE just one sharing of the
323 whole COMMON or EQUIVALENCE is enough. */
328 if (GFC_DECL_RESULT (decl) && DECL_HAS_VALUE_EXPR_P (decl))
334 /* Return true if DECL that is shared iff SHARED is true should
335 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
339 gfc_omp_private_debug_clause (tree decl, bool shared)
341 if (GFC_DECL_CRAY_POINTEE (decl))
344 if (GFC_DECL_COMMON_OR_EQUIV (decl)
345 && DECL_HAS_VALUE_EXPR_P (decl))
347 tree value = DECL_VALUE_EXPR (decl);
349 if (TREE_CODE (value) == COMPONENT_REF
350 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
351 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
358 /* Register language specific type size variables as potentially OpenMP
359 firstprivate variables. */
362 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
364 if (GFC_ARRAY_TYPE_P (type) || GFC_DESCRIPTOR_TYPE_P (type))
368 gcc_assert (TYPE_LANG_SPECIFIC (type) != NULL);
369 for (r = 0; r < GFC_TYPE_ARRAY_RANK (type); r++)
371 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_LBOUND (type, r));
372 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_UBOUND (type, r));
373 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_STRIDE (type, r));
375 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_SIZE (type));
376 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_OFFSET (type));
382 gfc_trans_add_clause (tree node, tree tail)
384 OMP_CLAUSE_CHAIN (node) = tail;
389 gfc_trans_omp_variable (gfc_symbol *sym)
391 tree t = gfc_get_symbol_decl (sym);
395 bool alternate_entry;
398 return_value = sym->attr.function && sym->result == sym;
399 alternate_entry = sym->attr.function && sym->attr.entry
400 && sym->result == sym;
401 entry_master = sym->attr.result
402 && sym->ns->proc_name->attr.entry_master
403 && !gfc_return_by_reference (sym->ns->proc_name);
404 parent_decl = DECL_CONTEXT (current_function_decl);
406 if ((t == parent_decl && return_value)
407 || (sym->ns && sym->ns->proc_name
408 && sym->ns->proc_name->backend_decl == parent_decl
409 && (alternate_entry || entry_master)))
414 /* Special case for assigning the return value of a function.
415 Self recursive functions must have an explicit return value. */
416 if (return_value && (t == current_function_decl || parent_flag))
417 t = gfc_get_fake_result_decl (sym, parent_flag);
419 /* Similarly for alternate entry points. */
420 else if (alternate_entry
421 && (sym->ns->proc_name->backend_decl == current_function_decl
424 gfc_entry_list *el = NULL;
426 for (el = sym->ns->entries; el; el = el->next)
429 t = gfc_get_fake_result_decl (sym, parent_flag);
434 else if (entry_master
435 && (sym->ns->proc_name->backend_decl == current_function_decl
437 t = gfc_get_fake_result_decl (sym, parent_flag);
443 gfc_trans_omp_variable_list (enum omp_clause_code code, gfc_namelist *namelist,
446 for (; namelist != NULL; namelist = namelist->next)
447 if (namelist->sym->attr.referenced)
449 tree t = gfc_trans_omp_variable (namelist->sym);
450 if (t != error_mark_node)
452 tree node = build_omp_clause (input_location, code);
453 OMP_CLAUSE_DECL (node) = t;
454 list = gfc_trans_add_clause (node, list);
461 gfc_trans_omp_array_reduction (tree c, gfc_symbol *sym, locus where)
463 gfc_symtree *root1 = NULL, *root2 = NULL, *root3 = NULL, *root4 = NULL;
464 gfc_symtree *symtree1, *symtree2, *symtree3, *symtree4 = NULL;
465 gfc_symbol init_val_sym, outer_sym, intrinsic_sym;
466 gfc_expr *e1, *e2, *e3, *e4;
468 tree decl, backend_decl, stmt;
469 locus old_loc = gfc_current_locus;
473 decl = OMP_CLAUSE_DECL (c);
474 gfc_current_locus = where;
476 /* Create a fake symbol for init value. */
477 memset (&init_val_sym, 0, sizeof (init_val_sym));
478 init_val_sym.ns = sym->ns;
479 init_val_sym.name = sym->name;
480 init_val_sym.ts = sym->ts;
481 init_val_sym.attr.referenced = 1;
482 init_val_sym.declared_at = where;
483 init_val_sym.attr.flavor = FL_VARIABLE;
484 backend_decl = omp_reduction_init (c, gfc_sym_type (&init_val_sym));
485 init_val_sym.backend_decl = backend_decl;
487 /* Create a fake symbol for the outer array reference. */
489 outer_sym.as = gfc_copy_array_spec (sym->as);
490 outer_sym.attr.dummy = 0;
491 outer_sym.attr.result = 0;
492 outer_sym.attr.flavor = FL_VARIABLE;
493 outer_sym.backend_decl = create_tmp_var_raw (TREE_TYPE (decl), NULL);
495 /* Create fake symtrees for it. */
496 symtree1 = gfc_new_symtree (&root1, sym->name);
497 symtree1->n.sym = sym;
498 gcc_assert (symtree1 == root1);
500 symtree2 = gfc_new_symtree (&root2, sym->name);
501 symtree2->n.sym = &init_val_sym;
502 gcc_assert (symtree2 == root2);
504 symtree3 = gfc_new_symtree (&root3, sym->name);
505 symtree3->n.sym = &outer_sym;
506 gcc_assert (symtree3 == root3);
508 /* Create expressions. */
509 e1 = gfc_get_expr ();
510 e1->expr_type = EXPR_VARIABLE;
512 e1->symtree = symtree1;
514 e1->ref = ref = gfc_get_ref ();
515 ref->type = REF_ARRAY;
516 ref->u.ar.where = where;
517 ref->u.ar.as = sym->as;
518 ref->u.ar.type = AR_FULL;
520 t = gfc_resolve_expr (e1);
521 gcc_assert (t == SUCCESS);
523 e2 = gfc_get_expr ();
524 e2->expr_type = EXPR_VARIABLE;
526 e2->symtree = symtree2;
528 t = gfc_resolve_expr (e2);
529 gcc_assert (t == SUCCESS);
531 e3 = gfc_copy_expr (e1);
532 e3->symtree = symtree3;
533 t = gfc_resolve_expr (e3);
534 gcc_assert (t == SUCCESS);
537 switch (OMP_CLAUSE_REDUCTION_CODE (c))
541 e4 = gfc_add (e3, e1);
544 e4 = gfc_multiply (e3, e1);
546 case TRUTH_ANDIF_EXPR:
547 e4 = gfc_and (e3, e1);
549 case TRUTH_ORIF_EXPR:
550 e4 = gfc_or (e3, e1);
553 e4 = gfc_eqv (e3, e1);
556 e4 = gfc_neqv (e3, e1);
578 memset (&intrinsic_sym, 0, sizeof (intrinsic_sym));
579 intrinsic_sym.ns = sym->ns;
580 intrinsic_sym.name = iname;
581 intrinsic_sym.ts = sym->ts;
582 intrinsic_sym.attr.referenced = 1;
583 intrinsic_sym.attr.intrinsic = 1;
584 intrinsic_sym.attr.function = 1;
585 intrinsic_sym.result = &intrinsic_sym;
586 intrinsic_sym.declared_at = where;
588 symtree4 = gfc_new_symtree (&root4, iname);
589 symtree4->n.sym = &intrinsic_sym;
590 gcc_assert (symtree4 == root4);
592 e4 = gfc_get_expr ();
593 e4->expr_type = EXPR_FUNCTION;
595 e4->symtree = symtree4;
596 e4->value.function.isym = gfc_find_function (iname);
597 e4->value.function.actual = gfc_get_actual_arglist ();
598 e4->value.function.actual->expr = e3;
599 e4->value.function.actual->next = gfc_get_actual_arglist ();
600 e4->value.function.actual->next->expr = e1;
602 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
603 e1 = gfc_copy_expr (e1);
604 e3 = gfc_copy_expr (e3);
605 t = gfc_resolve_expr (e4);
606 gcc_assert (t == SUCCESS);
608 /* Create the init statement list. */
610 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl))
611 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_ALLOCATABLE)
613 /* If decl is an allocatable array, it needs to be allocated
614 with the same bounds as the outer var. */
615 tree type = TREE_TYPE (decl), rank, size, esize, ptr;
618 gfc_start_block (&block);
620 gfc_add_modify (&block, decl, outer_sym.backend_decl);
621 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
622 size = gfc_conv_descriptor_ubound_get (decl, rank);
623 size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
624 gfc_conv_descriptor_lbound_get (decl, rank));
625 size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
627 if (GFC_TYPE_ARRAY_RANK (type) > 1)
628 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
629 gfc_conv_descriptor_stride_get (decl, rank));
630 esize = fold_convert (gfc_array_index_type,
631 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
632 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
633 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
634 ptr = gfc_allocate_array_with_status (&block,
635 build_int_cst (pvoid_type_node, 0),
637 gfc_conv_descriptor_data_set (&block, decl, ptr);
638 gfc_add_expr_to_block (&block, gfc_trans_assignment (e1, e2, false,
640 stmt = gfc_finish_block (&block);
643 stmt = gfc_trans_assignment (e1, e2, false, false);
644 if (TREE_CODE (stmt) != BIND_EXPR)
645 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
648 OMP_CLAUSE_REDUCTION_INIT (c) = stmt;
650 /* Create the merge statement list. */
652 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl))
653 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_ALLOCATABLE)
655 /* If decl is an allocatable array, it needs to be deallocated
659 gfc_start_block (&block);
660 gfc_add_expr_to_block (&block, gfc_trans_assignment (e3, e4, false,
662 gfc_add_expr_to_block (&block, gfc_trans_dealloc_allocated (decl));
663 stmt = gfc_finish_block (&block);
666 stmt = gfc_trans_assignment (e3, e4, false, true);
667 if (TREE_CODE (stmt) != BIND_EXPR)
668 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
671 OMP_CLAUSE_REDUCTION_MERGE (c) = stmt;
673 /* And stick the placeholder VAR_DECL into the clause as well. */
674 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = outer_sym.backend_decl;
676 gfc_current_locus = old_loc;
687 gfc_free_array_spec (outer_sym.as);
691 gfc_trans_omp_reduction_list (gfc_namelist *namelist, tree list,
692 enum tree_code reduction_code, locus where)
694 for (; namelist != NULL; namelist = namelist->next)
695 if (namelist->sym->attr.referenced)
697 tree t = gfc_trans_omp_variable (namelist->sym);
698 if (t != error_mark_node)
700 tree node = build_omp_clause (where.lb->location,
701 OMP_CLAUSE_REDUCTION);
702 OMP_CLAUSE_DECL (node) = t;
703 OMP_CLAUSE_REDUCTION_CODE (node) = reduction_code;
704 if (namelist->sym->attr.dimension)
705 gfc_trans_omp_array_reduction (node, namelist->sym, where);
706 list = gfc_trans_add_clause (node, list);
713 gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
716 tree omp_clauses = NULL_TREE, chunk_size, c;
718 enum omp_clause_code clause_code;
724 for (list = 0; list < OMP_LIST_NUM; list++)
726 gfc_namelist *n = clauses->lists[list];
730 if (list >= OMP_LIST_REDUCTION_FIRST
731 && list <= OMP_LIST_REDUCTION_LAST)
733 enum tree_code reduction_code;
737 reduction_code = PLUS_EXPR;
740 reduction_code = MULT_EXPR;
743 reduction_code = MINUS_EXPR;
746 reduction_code = TRUTH_ANDIF_EXPR;
749 reduction_code = TRUTH_ORIF_EXPR;
752 reduction_code = EQ_EXPR;
755 reduction_code = NE_EXPR;
758 reduction_code = MAX_EXPR;
761 reduction_code = MIN_EXPR;
764 reduction_code = BIT_AND_EXPR;
767 reduction_code = BIT_IOR_EXPR;
770 reduction_code = BIT_XOR_EXPR;
776 = gfc_trans_omp_reduction_list (n, omp_clauses, reduction_code,
782 case OMP_LIST_PRIVATE:
783 clause_code = OMP_CLAUSE_PRIVATE;
785 case OMP_LIST_SHARED:
786 clause_code = OMP_CLAUSE_SHARED;
788 case OMP_LIST_FIRSTPRIVATE:
789 clause_code = OMP_CLAUSE_FIRSTPRIVATE;
791 case OMP_LIST_LASTPRIVATE:
792 clause_code = OMP_CLAUSE_LASTPRIVATE;
794 case OMP_LIST_COPYIN:
795 clause_code = OMP_CLAUSE_COPYIN;
797 case OMP_LIST_COPYPRIVATE:
798 clause_code = OMP_CLAUSE_COPYPRIVATE;
802 = gfc_trans_omp_variable_list (clause_code, n, omp_clauses);
809 if (clauses->if_expr)
813 gfc_init_se (&se, NULL);
814 gfc_conv_expr (&se, clauses->if_expr);
815 gfc_add_block_to_block (block, &se.pre);
816 if_var = gfc_evaluate_now (se.expr, block);
817 gfc_add_block_to_block (block, &se.post);
819 c = build_omp_clause (where.lb->location, OMP_CLAUSE_IF);
820 OMP_CLAUSE_IF_EXPR (c) = if_var;
821 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
824 if (clauses->num_threads)
828 gfc_init_se (&se, NULL);
829 gfc_conv_expr (&se, clauses->num_threads);
830 gfc_add_block_to_block (block, &se.pre);
831 num_threads = gfc_evaluate_now (se.expr, block);
832 gfc_add_block_to_block (block, &se.post);
834 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NUM_THREADS);
835 OMP_CLAUSE_NUM_THREADS_EXPR (c) = num_threads;
836 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
839 chunk_size = NULL_TREE;
840 if (clauses->chunk_size)
842 gfc_init_se (&se, NULL);
843 gfc_conv_expr (&se, clauses->chunk_size);
844 gfc_add_block_to_block (block, &se.pre);
845 chunk_size = gfc_evaluate_now (se.expr, block);
846 gfc_add_block_to_block (block, &se.post);
849 if (clauses->sched_kind != OMP_SCHED_NONE)
851 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SCHEDULE);
852 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = chunk_size;
853 switch (clauses->sched_kind)
855 case OMP_SCHED_STATIC:
856 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_STATIC;
858 case OMP_SCHED_DYNAMIC:
859 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_DYNAMIC;
861 case OMP_SCHED_GUIDED:
862 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_GUIDED;
864 case OMP_SCHED_RUNTIME:
865 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_RUNTIME;
868 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_AUTO;
873 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
876 if (clauses->default_sharing != OMP_DEFAULT_UNKNOWN)
878 c = build_omp_clause (where.lb->location, OMP_CLAUSE_DEFAULT);
879 switch (clauses->default_sharing)
881 case OMP_DEFAULT_NONE:
882 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_NONE;
884 case OMP_DEFAULT_SHARED:
885 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_SHARED;
887 case OMP_DEFAULT_PRIVATE:
888 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_PRIVATE;
890 case OMP_DEFAULT_FIRSTPRIVATE:
891 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
896 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
901 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NOWAIT);
902 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
905 if (clauses->ordered)
907 c = build_omp_clause (where.lb->location, OMP_CLAUSE_ORDERED);
908 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
913 c = build_omp_clause (where.lb->location, OMP_CLAUSE_UNTIED);
914 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
917 if (clauses->collapse)
919 c = build_omp_clause (where.lb->location, OMP_CLAUSE_COLLAPSE);
920 OMP_CLAUSE_COLLAPSE_EXPR (c) = build_int_cst (NULL, clauses->collapse);
921 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
927 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
930 gfc_trans_omp_code (gfc_code *code, bool force_empty)
935 stmt = gfc_trans_code (code);
936 if (TREE_CODE (stmt) != BIND_EXPR)
938 if (!IS_EMPTY_STMT (stmt) || force_empty)
940 tree block = poplevel (1, 0, 0);
941 stmt = build3_v (BIND_EXPR, NULL, stmt, block);
952 static tree gfc_trans_omp_sections (gfc_code *, gfc_omp_clauses *);
953 static tree gfc_trans_omp_workshare (gfc_code *, gfc_omp_clauses *);
956 gfc_trans_omp_atomic (gfc_code *code)
963 tree lhsaddr, type, rhs, x;
964 enum tree_code op = ERROR_MARK;
965 bool var_on_left = false;
967 code = code->block->next;
968 gcc_assert (code->op == EXEC_ASSIGN);
969 gcc_assert (code->next == NULL);
970 var = code->expr1->symtree->n.sym;
972 gfc_init_se (&lse, NULL);
973 gfc_init_se (&rse, NULL);
974 gfc_start_block (&block);
976 gfc_conv_expr (&lse, code->expr1);
977 gfc_add_block_to_block (&block, &lse.pre);
978 type = TREE_TYPE (lse.expr);
979 lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
982 if (expr2->expr_type == EXPR_FUNCTION
983 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
984 expr2 = expr2->value.function.actual->expr;
986 if (expr2->expr_type == EXPR_OP)
989 switch (expr2->value.op.op)
994 case INTRINSIC_TIMES:
997 case INTRINSIC_MINUS:
1000 case INTRINSIC_DIVIDE:
1001 if (expr2->ts.type == BT_INTEGER)
1002 op = TRUNC_DIV_EXPR;
1007 op = TRUTH_ANDIF_EXPR;
1010 op = TRUTH_ORIF_EXPR;
1015 case INTRINSIC_NEQV:
1021 e = expr2->value.op.op1;
1022 if (e->expr_type == EXPR_FUNCTION
1023 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
1024 e = e->value.function.actual->expr;
1025 if (e->expr_type == EXPR_VARIABLE
1026 && e->symtree != NULL
1027 && e->symtree->n.sym == var)
1029 expr2 = expr2->value.op.op2;
1034 e = expr2->value.op.op2;
1035 if (e->expr_type == EXPR_FUNCTION
1036 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
1037 e = e->value.function.actual->expr;
1038 gcc_assert (e->expr_type == EXPR_VARIABLE
1039 && e->symtree != NULL
1040 && e->symtree->n.sym == var);
1041 expr2 = expr2->value.op.op1;
1042 var_on_left = false;
1044 gfc_conv_expr (&rse, expr2);
1045 gfc_add_block_to_block (&block, &rse.pre);
1049 gcc_assert (expr2->expr_type == EXPR_FUNCTION);
1050 switch (expr2->value.function.isym->id)
1070 e = expr2->value.function.actual->expr;
1071 gcc_assert (e->expr_type == EXPR_VARIABLE
1072 && e->symtree != NULL
1073 && e->symtree->n.sym == var);
1075 gfc_conv_expr (&rse, expr2->value.function.actual->next->expr);
1076 gfc_add_block_to_block (&block, &rse.pre);
1077 if (expr2->value.function.actual->next->next != NULL)
1079 tree accum = gfc_create_var (TREE_TYPE (rse.expr), NULL);
1080 gfc_actual_arglist *arg;
1082 gfc_add_modify (&block, accum, rse.expr);
1083 for (arg = expr2->value.function.actual->next->next; arg;
1086 gfc_init_block (&rse.pre);
1087 gfc_conv_expr (&rse, arg->expr);
1088 gfc_add_block_to_block (&block, &rse.pre);
1089 x = fold_build2 (op, TREE_TYPE (accum), accum, rse.expr);
1090 gfc_add_modify (&block, accum, x);
1096 expr2 = expr2->value.function.actual->next->expr;
1099 lhsaddr = save_expr (lhsaddr);
1100 rhs = gfc_evaluate_now (rse.expr, &block);
1101 x = convert (TREE_TYPE (rhs), build_fold_indirect_ref_loc (input_location,
1105 x = fold_build2 (op, TREE_TYPE (rhs), x, rhs);
1107 x = fold_build2 (op, TREE_TYPE (rhs), rhs, x);
1109 if (TREE_CODE (TREE_TYPE (rhs)) == COMPLEX_TYPE
1110 && TREE_CODE (type) != COMPLEX_TYPE)
1111 x = fold_build1 (REALPART_EXPR, TREE_TYPE (TREE_TYPE (rhs)), x);
1113 x = build2_v (OMP_ATOMIC, lhsaddr, convert (type, x));
1114 gfc_add_expr_to_block (&block, x);
1116 gfc_add_block_to_block (&block, &lse.pre);
1117 gfc_add_block_to_block (&block, &rse.pre);
1119 return gfc_finish_block (&block);
1123 gfc_trans_omp_barrier (void)
1125 tree decl = built_in_decls [BUILT_IN_GOMP_BARRIER];
1126 return build_call_expr_loc (input_location, decl, 0);
1130 gfc_trans_omp_critical (gfc_code *code)
1132 tree name = NULL_TREE, stmt;
1133 if (code->ext.omp_name != NULL)
1134 name = get_identifier (code->ext.omp_name);
1135 stmt = gfc_trans_code (code->block->next);
1136 return build2 (OMP_CRITICAL, void_type_node, stmt, name);
1140 gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock,
1141 gfc_omp_clauses *do_clauses, tree par_clauses)
1144 tree dovar, stmt, from, to, step, type, init, cond, incr;
1145 tree count = NULL_TREE, cycle_label, tmp, omp_clauses;
1148 gfc_omp_clauses *clauses = code->ext.omp_clauses;
1149 int i, collapse = clauses->collapse;
1150 tree dovar_init = NULL_TREE;
1155 code = code->block->next;
1156 gcc_assert (code->op == EXEC_DO);
1158 init = make_tree_vec (collapse);
1159 cond = make_tree_vec (collapse);
1160 incr = make_tree_vec (collapse);
1164 gfc_start_block (&block);
1168 omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc);
1170 for (i = 0; i < collapse; i++)
1173 int dovar_found = 0;
1179 for (n = clauses->lists[OMP_LIST_LASTPRIVATE]; n != NULL;
1181 if (code->ext.iterator->var->symtree->n.sym == n->sym)
1186 for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next)
1187 if (code->ext.iterator->var->symtree->n.sym == n->sym)
1193 /* Evaluate all the expressions in the iterator. */
1194 gfc_init_se (&se, NULL);
1195 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1196 gfc_add_block_to_block (pblock, &se.pre);
1198 type = TREE_TYPE (dovar);
1199 gcc_assert (TREE_CODE (type) == INTEGER_TYPE);
1201 gfc_init_se (&se, NULL);
1202 gfc_conv_expr_val (&se, code->ext.iterator->start);
1203 gfc_add_block_to_block (pblock, &se.pre);
1204 from = gfc_evaluate_now (se.expr, pblock);
1206 gfc_init_se (&se, NULL);
1207 gfc_conv_expr_val (&se, code->ext.iterator->end);
1208 gfc_add_block_to_block (pblock, &se.pre);
1209 to = gfc_evaluate_now (se.expr, pblock);
1211 gfc_init_se (&se, NULL);
1212 gfc_conv_expr_val (&se, code->ext.iterator->step);
1213 gfc_add_block_to_block (pblock, &se.pre);
1214 step = gfc_evaluate_now (se.expr, pblock);
1217 /* Special case simple loops. */
1218 if (TREE_CODE (dovar) == VAR_DECL)
1220 if (integer_onep (step))
1222 else if (tree_int_cst_equal (step, integer_minus_one_node))
1227 = gfc_trans_omp_variable (code->ext.iterator->var->symtree->n.sym);
1232 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, dovar, from);
1233 TREE_VEC_ELT (cond, i) = fold_build2 (simple > 0 ? LE_EXPR : GE_EXPR,
1234 boolean_type_node, dovar, to);
1235 TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, dovar, step);
1236 TREE_VEC_ELT (incr, i) = fold_build2 (MODIFY_EXPR, type, dovar,
1237 TREE_VEC_ELT (incr, i));
1241 /* STEP is not 1 or -1. Use:
1242 for (count = 0; count < (to + step - from) / step; count++)
1244 dovar = from + count * step;
1248 tmp = fold_build2 (MINUS_EXPR, type, step, from);
1249 tmp = fold_build2 (PLUS_EXPR, type, to, tmp);
1250 tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step);
1251 tmp = gfc_evaluate_now (tmp, pblock);
1252 count = gfc_create_var (type, "count");
1253 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, count,
1254 build_int_cst (type, 0));
1255 TREE_VEC_ELT (cond, i) = fold_build2 (LT_EXPR, boolean_type_node,
1257 TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, count,
1258 build_int_cst (type, 1));
1259 TREE_VEC_ELT (incr, i) = fold_build2 (MODIFY_EXPR, type,
1260 count, TREE_VEC_ELT (incr, i));
1262 /* Initialize DOVAR. */
1263 tmp = fold_build2 (MULT_EXPR, type, count, step);
1264 tmp = fold_build2 (PLUS_EXPR, type, from, tmp);
1265 dovar_init = tree_cons (dovar, tmp, dovar_init);
1270 tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
1271 OMP_CLAUSE_DECL (tmp) = dovar_decl;
1272 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
1274 else if (dovar_found == 2)
1281 /* If dovar is lastprivate, but different counter is used,
1282 dovar += step needs to be added to
1283 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
1284 will have the value on entry of the last loop, rather
1285 than value after iterator increment. */
1286 tmp = gfc_evaluate_now (step, pblock);
1287 tmp = fold_build2 (PLUS_EXPR, type, dovar, tmp);
1288 tmp = fold_build2 (MODIFY_EXPR, type, dovar, tmp);
1289 for (c = omp_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1290 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1291 && OMP_CLAUSE_DECL (c) == dovar_decl)
1293 OMP_CLAUSE_LASTPRIVATE_STMT (c) = tmp;
1297 if (c == NULL && par_clauses != NULL)
1299 for (c = par_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1300 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1301 && OMP_CLAUSE_DECL (c) == dovar_decl)
1303 tree l = build_omp_clause (input_location,
1304 OMP_CLAUSE_LASTPRIVATE);
1305 OMP_CLAUSE_DECL (l) = dovar_decl;
1306 OMP_CLAUSE_CHAIN (l) = omp_clauses;
1307 OMP_CLAUSE_LASTPRIVATE_STMT (l) = tmp;
1309 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_SHARED);
1313 gcc_assert (simple || c != NULL);
1317 tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
1318 OMP_CLAUSE_DECL (tmp) = count;
1319 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
1322 if (i + 1 < collapse)
1323 code = code->block->next;
1326 if (pblock != &block)
1329 gfc_start_block (&block);
1332 gfc_start_block (&body);
1334 dovar_init = nreverse (dovar_init);
1337 gfc_add_modify (&body, TREE_PURPOSE (dovar_init),
1338 TREE_VALUE (dovar_init));
1339 dovar_init = TREE_CHAIN (dovar_init);
1342 /* Cycle statement is implemented with a goto. Exit statement must not be
1343 present for this loop. */
1344 cycle_label = gfc_build_label_decl (NULL_TREE);
1346 /* Put these labels where they can be found later. We put the
1347 labels in a TREE_LIST node (because TREE_CHAIN is already
1348 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
1349 label in TREE_VALUE (backend_decl). */
1351 code->block->backend_decl = tree_cons (cycle_label, NULL, NULL);
1353 /* Main loop body. */
1354 tmp = gfc_trans_omp_code (code->block->next, true);
1355 gfc_add_expr_to_block (&body, tmp);
1357 /* Label for cycle statements (if needed). */
1358 if (TREE_USED (cycle_label))
1360 tmp = build1_v (LABEL_EXPR, cycle_label);
1361 gfc_add_expr_to_block (&body, tmp);
1364 /* End of loop body. */
1365 stmt = make_node (OMP_FOR);
1367 TREE_TYPE (stmt) = void_type_node;
1368 OMP_FOR_BODY (stmt) = gfc_finish_block (&body);
1369 OMP_FOR_CLAUSES (stmt) = omp_clauses;
1370 OMP_FOR_INIT (stmt) = init;
1371 OMP_FOR_COND (stmt) = cond;
1372 OMP_FOR_INCR (stmt) = incr;
1373 gfc_add_expr_to_block (&block, stmt);
1375 return gfc_finish_block (&block);
1379 gfc_trans_omp_flush (void)
1381 tree decl = built_in_decls [BUILT_IN_SYNCHRONIZE];
1382 return build_call_expr_loc (input_location, decl, 0);
1386 gfc_trans_omp_master (gfc_code *code)
1388 tree stmt = gfc_trans_code (code->block->next);
1389 if (IS_EMPTY_STMT (stmt))
1391 return build1_v (OMP_MASTER, stmt);
1395 gfc_trans_omp_ordered (gfc_code *code)
1397 return build1_v (OMP_ORDERED, gfc_trans_code (code->block->next));
1401 gfc_trans_omp_parallel (gfc_code *code)
1404 tree stmt, omp_clauses;
1406 gfc_start_block (&block);
1407 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1409 stmt = gfc_trans_omp_code (code->block->next, true);
1410 stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
1411 gfc_add_expr_to_block (&block, stmt);
1412 return gfc_finish_block (&block);
1416 gfc_trans_omp_parallel_do (gfc_code *code)
1418 stmtblock_t block, *pblock = NULL;
1419 gfc_omp_clauses parallel_clauses, do_clauses;
1420 tree stmt, omp_clauses = NULL_TREE;
1422 gfc_start_block (&block);
1424 memset (&do_clauses, 0, sizeof (do_clauses));
1425 if (code->ext.omp_clauses != NULL)
1427 memcpy (¶llel_clauses, code->ext.omp_clauses,
1428 sizeof (parallel_clauses));
1429 do_clauses.sched_kind = parallel_clauses.sched_kind;
1430 do_clauses.chunk_size = parallel_clauses.chunk_size;
1431 do_clauses.ordered = parallel_clauses.ordered;
1432 do_clauses.collapse = parallel_clauses.collapse;
1433 parallel_clauses.sched_kind = OMP_SCHED_NONE;
1434 parallel_clauses.chunk_size = NULL;
1435 parallel_clauses.ordered = false;
1436 parallel_clauses.collapse = 0;
1437 omp_clauses = gfc_trans_omp_clauses (&block, ¶llel_clauses,
1440 do_clauses.nowait = true;
1441 if (!do_clauses.ordered && do_clauses.sched_kind != OMP_SCHED_STATIC)
1445 stmt = gfc_trans_omp_do (code, pblock, &do_clauses, omp_clauses);
1446 if (TREE_CODE (stmt) != BIND_EXPR)
1447 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
1450 stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
1451 OMP_PARALLEL_COMBINED (stmt) = 1;
1452 gfc_add_expr_to_block (&block, stmt);
1453 return gfc_finish_block (&block);
1457 gfc_trans_omp_parallel_sections (gfc_code *code)
1460 gfc_omp_clauses section_clauses;
1461 tree stmt, omp_clauses;
1463 memset (§ion_clauses, 0, sizeof (section_clauses));
1464 section_clauses.nowait = true;
1466 gfc_start_block (&block);
1467 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1470 stmt = gfc_trans_omp_sections (code, §ion_clauses);
1471 if (TREE_CODE (stmt) != BIND_EXPR)
1472 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
1475 stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
1476 OMP_PARALLEL_COMBINED (stmt) = 1;
1477 gfc_add_expr_to_block (&block, stmt);
1478 return gfc_finish_block (&block);
1482 gfc_trans_omp_parallel_workshare (gfc_code *code)
1485 gfc_omp_clauses workshare_clauses;
1486 tree stmt, omp_clauses;
1488 memset (&workshare_clauses, 0, sizeof (workshare_clauses));
1489 workshare_clauses.nowait = true;
1491 gfc_start_block (&block);
1492 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1495 stmt = gfc_trans_omp_workshare (code, &workshare_clauses);
1496 if (TREE_CODE (stmt) != BIND_EXPR)
1497 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
1500 stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
1501 OMP_PARALLEL_COMBINED (stmt) = 1;
1502 gfc_add_expr_to_block (&block, stmt);
1503 return gfc_finish_block (&block);
1507 gfc_trans_omp_sections (gfc_code *code, gfc_omp_clauses *clauses)
1509 stmtblock_t block, body;
1510 tree omp_clauses, stmt;
1511 bool has_lastprivate = clauses->lists[OMP_LIST_LASTPRIVATE] != NULL;
1513 gfc_start_block (&block);
1515 omp_clauses = gfc_trans_omp_clauses (&block, clauses, code->loc);
1517 gfc_init_block (&body);
1518 for (code = code->block; code; code = code->block)
1520 /* Last section is special because of lastprivate, so even if it
1521 is empty, chain it in. */
1522 stmt = gfc_trans_omp_code (code->next,
1523 has_lastprivate && code->block == NULL);
1524 if (! IS_EMPTY_STMT (stmt))
1526 stmt = build1_v (OMP_SECTION, stmt);
1527 gfc_add_expr_to_block (&body, stmt);
1530 stmt = gfc_finish_block (&body);
1532 stmt = build2 (OMP_SECTIONS, void_type_node, stmt, omp_clauses);
1533 gfc_add_expr_to_block (&block, stmt);
1535 return gfc_finish_block (&block);
1539 gfc_trans_omp_single (gfc_code *code, gfc_omp_clauses *clauses)
1541 tree omp_clauses = gfc_trans_omp_clauses (NULL, clauses, code->loc);
1542 tree stmt = gfc_trans_omp_code (code->block->next, true);
1543 stmt = build2 (OMP_SINGLE, void_type_node, stmt, omp_clauses);
1548 gfc_trans_omp_task (gfc_code *code)
1551 tree stmt, omp_clauses;
1553 gfc_start_block (&block);
1554 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1556 stmt = gfc_trans_omp_code (code->block->next, true);
1557 stmt = build2 (OMP_TASK, void_type_node, stmt, omp_clauses);
1558 gfc_add_expr_to_block (&block, stmt);
1559 return gfc_finish_block (&block);
1563 gfc_trans_omp_taskwait (void)
1565 tree decl = built_in_decls [BUILT_IN_GOMP_TASKWAIT];
1566 return build_call_expr_loc (input_location, decl, 0);
1570 gfc_trans_omp_workshare (gfc_code *code, gfc_omp_clauses *clauses)
1572 tree res, tmp, stmt;
1573 stmtblock_t block, *pblock = NULL;
1574 stmtblock_t singleblock;
1575 int saved_ompws_flags;
1576 bool singleblock_in_progress = false;
1577 /* True if previous gfc_code in workshare construct is not workshared. */
1578 bool prev_singleunit;
1580 code = code->block->next;
1585 return build_empty_stmt (input_location);
1587 gfc_start_block (&block);
1590 ompws_flags = OMPWS_WORKSHARE_FLAG;
1591 prev_singleunit = false;
1593 /* Translate statements one by one to trees until we reach
1594 the end of the workshare construct. Adjacent gfc_codes that
1595 are a single unit of work are clustered and encapsulated in a
1596 single OMP_SINGLE construct. */
1597 for (; code; code = code->next)
1599 if (code->here != 0)
1601 res = gfc_trans_label_here (code);
1602 gfc_add_expr_to_block (pblock, res);
1605 /* No dependence analysis, use for clauses with wait.
1606 If this is the last gfc_code, use default omp_clauses. */
1607 if (code->next == NULL && clauses->nowait)
1608 ompws_flags |= OMPWS_NOWAIT;
1610 /* By default, every gfc_code is a single unit of work. */
1611 ompws_flags |= OMPWS_CURR_SINGLEUNIT;
1612 ompws_flags &= ~OMPWS_SCALARIZER_WS;
1621 res = gfc_trans_assign (code);
1624 case EXEC_POINTER_ASSIGN:
1625 res = gfc_trans_pointer_assign (code);
1628 case EXEC_INIT_ASSIGN:
1629 res = gfc_trans_init_assign (code);
1633 res = gfc_trans_forall (code);
1637 res = gfc_trans_where (code);
1640 case EXEC_OMP_ATOMIC:
1641 res = gfc_trans_omp_directive (code);
1644 case EXEC_OMP_PARALLEL:
1645 case EXEC_OMP_PARALLEL_DO:
1646 case EXEC_OMP_PARALLEL_SECTIONS:
1647 case EXEC_OMP_PARALLEL_WORKSHARE:
1648 case EXEC_OMP_CRITICAL:
1649 saved_ompws_flags = ompws_flags;
1651 res = gfc_trans_omp_directive (code);
1652 ompws_flags = saved_ompws_flags;
1656 internal_error ("gfc_trans_omp_workshare(): Bad statement code");
1659 gfc_set_backend_locus (&code->loc);
1661 if (res != NULL_TREE && ! IS_EMPTY_STMT (res))
1663 if (prev_singleunit)
1665 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
1666 /* Add current gfc_code to single block. */
1667 gfc_add_expr_to_block (&singleblock, res);
1670 /* Finish single block and add it to pblock. */
1671 tmp = gfc_finish_block (&singleblock);
1672 tmp = build2 (OMP_SINGLE, void_type_node, tmp, NULL_TREE);
1673 gfc_add_expr_to_block (pblock, tmp);
1674 /* Add current gfc_code to pblock. */
1675 gfc_add_expr_to_block (pblock, res);
1676 singleblock_in_progress = false;
1681 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
1683 /* Start single block. */
1684 gfc_init_block (&singleblock);
1685 gfc_add_expr_to_block (&singleblock, res);
1686 singleblock_in_progress = true;
1689 /* Add the new statement to the block. */
1690 gfc_add_expr_to_block (pblock, res);
1692 prev_singleunit = (ompws_flags & OMPWS_CURR_SINGLEUNIT) != 0;
1696 /* Finish remaining SINGLE block, if we were in the middle of one. */
1697 if (singleblock_in_progress)
1699 /* Finish single block and add it to pblock. */
1700 tmp = gfc_finish_block (&singleblock);
1701 tmp = build2 (OMP_SINGLE, void_type_node, tmp,
1703 ? build_omp_clause (input_location, OMP_CLAUSE_NOWAIT)
1705 gfc_add_expr_to_block (pblock, tmp);
1708 stmt = gfc_finish_block (pblock);
1709 if (TREE_CODE (stmt) != BIND_EXPR)
1711 if (!IS_EMPTY_STMT (stmt))
1713 tree bindblock = poplevel (1, 0, 0);
1714 stmt = build3_v (BIND_EXPR, NULL, stmt, bindblock);
1727 gfc_trans_omp_directive (gfc_code *code)
1731 case EXEC_OMP_ATOMIC:
1732 return gfc_trans_omp_atomic (code);
1733 case EXEC_OMP_BARRIER:
1734 return gfc_trans_omp_barrier ();
1735 case EXEC_OMP_CRITICAL:
1736 return gfc_trans_omp_critical (code);
1738 return gfc_trans_omp_do (code, NULL, code->ext.omp_clauses, NULL);
1739 case EXEC_OMP_FLUSH:
1740 return gfc_trans_omp_flush ();
1741 case EXEC_OMP_MASTER:
1742 return gfc_trans_omp_master (code);
1743 case EXEC_OMP_ORDERED:
1744 return gfc_trans_omp_ordered (code);
1745 case EXEC_OMP_PARALLEL:
1746 return gfc_trans_omp_parallel (code);
1747 case EXEC_OMP_PARALLEL_DO:
1748 return gfc_trans_omp_parallel_do (code);
1749 case EXEC_OMP_PARALLEL_SECTIONS:
1750 return gfc_trans_omp_parallel_sections (code);
1751 case EXEC_OMP_PARALLEL_WORKSHARE:
1752 return gfc_trans_omp_parallel_workshare (code);
1753 case EXEC_OMP_SECTIONS:
1754 return gfc_trans_omp_sections (code, code->ext.omp_clauses);
1755 case EXEC_OMP_SINGLE:
1756 return gfc_trans_omp_single (code, code->ext.omp_clauses);
1758 return gfc_trans_omp_task (code);
1759 case EXEC_OMP_TASKWAIT:
1760 return gfc_trans_omp_taskwait ();
1761 case EXEC_OMP_WORKSHARE:
1762 return gfc_trans_omp_workshare (code, code->ext.omp_clauses);