1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
32 #include "trans-stmt.h"
33 #include "trans-types.h"
34 #include "trans-array.h"
35 #include "trans-const.h"
39 /* True if OpenMP should privatize what this DECL points to rather
40 than the DECL itself. */
43 gfc_omp_privatize_by_reference (const_tree decl)
45 tree type = TREE_TYPE (decl);
47 if (TREE_CODE (type) == REFERENCE_TYPE
48 && (!DECL_ARTIFICIAL (decl) || TREE_CODE (decl) == PARM_DECL))
51 if (TREE_CODE (type) == POINTER_TYPE)
53 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
54 that have POINTER_TYPE type and don't have GFC_POINTER_TYPE_P
55 set are supposed to be privatized by reference. */
56 if (GFC_POINTER_TYPE_P (type))
59 if (!DECL_ARTIFICIAL (decl))
62 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
64 if (DECL_LANG_SPECIFIC (decl)
65 && GFC_DECL_SAVED_DESCRIPTOR (decl))
72 /* True if OpenMP sharing attribute of DECL is predetermined. */
74 enum omp_clause_default_kind
75 gfc_omp_predetermined_sharing (tree decl)
77 if (DECL_ARTIFICIAL (decl) && ! GFC_DECL_RESULT (decl))
78 return OMP_CLAUSE_DEFAULT_SHARED;
80 /* Cray pointees shouldn't be listed in any clauses and should be
81 gimplified to dereference of the corresponding Cray pointer.
82 Make them all private, so that they are emitted in the debug
84 if (GFC_DECL_CRAY_POINTEE (decl))
85 return OMP_CLAUSE_DEFAULT_PRIVATE;
87 /* Assumed-size arrays are predetermined to inherit sharing
88 attributes of the associated actual argument, which is shared
90 if (TREE_CODE (decl) == PARM_DECL
91 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl))
92 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_UNKNOWN
93 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl),
94 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl)) - 1)
96 return OMP_CLAUSE_DEFAULT_SHARED;
98 /* COMMON and EQUIVALENCE decls are shared. They
99 are only referenced through DECL_VALUE_EXPR of the variables
100 contained in them. If those are privatized, they will not be
101 gimplified to the COMMON or EQUIVALENCE decls. */
102 if (GFC_DECL_COMMON_OR_EQUIV (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
103 return OMP_CLAUSE_DEFAULT_SHARED;
105 if (GFC_DECL_RESULT (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
106 return OMP_CLAUSE_DEFAULT_SHARED;
108 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
112 /* Return true if DECL in private clause needs
113 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
115 gfc_omp_private_outer_ref (tree decl)
117 tree type = TREE_TYPE (decl);
119 if (GFC_DESCRIPTOR_TYPE_P (type)
120 && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
126 /* Return code to initialize DECL with its default constructor, or
127 NULL if there's nothing to do. */
130 gfc_omp_clause_default_ctor (tree clause, tree decl, tree outer)
132 tree type = TREE_TYPE (decl), rank, size, esize, ptr, cond, then_b, else_b;
133 stmtblock_t block, cond_block;
135 if (! GFC_DESCRIPTOR_TYPE_P (type)
136 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
139 gcc_assert (outer != NULL);
140 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_PRIVATE
141 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LASTPRIVATE);
143 /* Allocatable arrays in PRIVATE clauses need to be set to
144 "not currently allocated" allocation status if outer
145 array is "not currently allocated", otherwise should be allocated. */
146 gfc_start_block (&block);
148 gfc_init_block (&cond_block);
150 gfc_add_modify (&cond_block, decl, outer);
151 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
152 size = gfc_conv_descriptor_ubound (decl, rank);
153 size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
154 gfc_conv_descriptor_lbound (decl, rank));
155 size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
157 if (GFC_TYPE_ARRAY_RANK (type) > 1)
158 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
159 gfc_conv_descriptor_stride (decl, rank));
160 esize = fold_convert (gfc_array_index_type,
161 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
162 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
163 size = gfc_evaluate_now (fold_convert (size_type_node, size), &cond_block);
164 ptr = gfc_allocate_array_with_status (&cond_block,
165 build_int_cst (pvoid_type_node, 0),
167 gfc_conv_descriptor_data_set (&cond_block, decl, ptr);
168 then_b = gfc_finish_block (&cond_block);
170 gfc_init_block (&cond_block);
171 gfc_conv_descriptor_data_set (&cond_block, decl, null_pointer_node);
172 else_b = gfc_finish_block (&cond_block);
174 cond = fold_build2 (NE_EXPR, boolean_type_node,
175 fold_convert (pvoid_type_node,
176 gfc_conv_descriptor_data_get (outer)),
178 gfc_add_expr_to_block (&block, build3 (COND_EXPR, void_type_node,
179 cond, then_b, else_b));
181 return gfc_finish_block (&block);
184 /* Build and return code for a copy constructor from SRC to DEST. */
187 gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src)
189 tree type = TREE_TYPE (dest), ptr, size, esize, rank, call;
192 if (! GFC_DESCRIPTOR_TYPE_P (type)
193 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
194 return build2_v (MODIFY_EXPR, dest, src);
196 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_FIRSTPRIVATE);
198 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
199 and copied from SRC. */
200 gfc_start_block (&block);
202 gfc_add_modify (&block, dest, src);
203 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
204 size = gfc_conv_descriptor_ubound (dest, rank);
205 size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
206 gfc_conv_descriptor_lbound (dest, rank));
207 size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
209 if (GFC_TYPE_ARRAY_RANK (type) > 1)
210 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
211 gfc_conv_descriptor_stride (dest, rank));
212 esize = fold_convert (gfc_array_index_type,
213 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
214 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
215 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
216 ptr = gfc_allocate_array_with_status (&block,
217 build_int_cst (pvoid_type_node, 0),
219 gfc_conv_descriptor_data_set (&block, dest, ptr);
220 call = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3, ptr,
221 fold_convert (pvoid_type_node,
222 gfc_conv_descriptor_data_get (src)),
224 gfc_add_expr_to_block (&block, fold_convert (void_type_node, call));
226 return gfc_finish_block (&block);
229 /* Similarly, except use an assignment operator instead. */
232 gfc_omp_clause_assign_op (tree clause ATTRIBUTE_UNUSED, tree dest, tree src)
234 tree type = TREE_TYPE (dest), rank, size, esize, call;
237 if (! GFC_DESCRIPTOR_TYPE_P (type)
238 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
239 return build2_v (MODIFY_EXPR, dest, src);
241 /* Handle copying allocatable arrays. */
242 gfc_start_block (&block);
244 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
245 size = gfc_conv_descriptor_ubound (dest, rank);
246 size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
247 gfc_conv_descriptor_lbound (dest, rank));
248 size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
250 if (GFC_TYPE_ARRAY_RANK (type) > 1)
251 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
252 gfc_conv_descriptor_stride (dest, rank));
253 esize = fold_convert (gfc_array_index_type,
254 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
255 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
256 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
257 call = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3,
258 fold_convert (pvoid_type_node,
259 gfc_conv_descriptor_data_get (dest)),
260 fold_convert (pvoid_type_node,
261 gfc_conv_descriptor_data_get (src)),
263 gfc_add_expr_to_block (&block, fold_convert (void_type_node, call));
265 return gfc_finish_block (&block);
268 /* Build and return code destructing DECL. Return NULL if nothing
272 gfc_omp_clause_dtor (tree clause ATTRIBUTE_UNUSED, tree decl)
274 tree type = TREE_TYPE (decl);
276 if (! GFC_DESCRIPTOR_TYPE_P (type)
277 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
280 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
281 to be deallocated if they were allocated. */
282 return gfc_trans_dealloc_allocated (decl);
286 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
287 disregarded in OpenMP construct, because it is going to be
288 remapped during OpenMP lowering. SHARED is true if DECL
289 is going to be shared, false if it is going to be privatized. */
292 gfc_omp_disregard_value_expr (tree decl, bool shared)
294 if (GFC_DECL_COMMON_OR_EQUIV (decl)
295 && DECL_HAS_VALUE_EXPR_P (decl))
297 tree value = DECL_VALUE_EXPR (decl);
299 if (TREE_CODE (value) == COMPONENT_REF
300 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
301 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
303 /* If variable in COMMON or EQUIVALENCE is privatized, return
304 true, as just that variable is supposed to be privatized,
305 not the whole COMMON or whole EQUIVALENCE.
306 For shared variables in COMMON or EQUIVALENCE, let them be
307 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
308 from the same COMMON or EQUIVALENCE just one sharing of the
309 whole COMMON or EQUIVALENCE is enough. */
314 if (GFC_DECL_RESULT (decl) && DECL_HAS_VALUE_EXPR_P (decl))
320 /* Return true if DECL that is shared iff SHARED is true should
321 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
325 gfc_omp_private_debug_clause (tree decl, bool shared)
327 if (GFC_DECL_CRAY_POINTEE (decl))
330 if (GFC_DECL_COMMON_OR_EQUIV (decl)
331 && DECL_HAS_VALUE_EXPR_P (decl))
333 tree value = DECL_VALUE_EXPR (decl);
335 if (TREE_CODE (value) == COMPONENT_REF
336 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
337 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
344 /* Register language specific type size variables as potentially OpenMP
345 firstprivate variables. */
348 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
350 if (GFC_ARRAY_TYPE_P (type) || GFC_DESCRIPTOR_TYPE_P (type))
354 gcc_assert (TYPE_LANG_SPECIFIC (type) != NULL);
355 for (r = 0; r < GFC_TYPE_ARRAY_RANK (type); r++)
357 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_LBOUND (type, r));
358 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_UBOUND (type, r));
359 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_STRIDE (type, r));
361 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_SIZE (type));
362 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_OFFSET (type));
368 gfc_trans_add_clause (tree node, tree tail)
370 OMP_CLAUSE_CHAIN (node) = tail;
375 gfc_trans_omp_variable (gfc_symbol *sym)
377 tree t = gfc_get_symbol_decl (sym);
381 bool alternate_entry;
384 return_value = sym->attr.function && sym->result == sym;
385 alternate_entry = sym->attr.function && sym->attr.entry
386 && sym->result == sym;
387 entry_master = sym->attr.result
388 && sym->ns->proc_name->attr.entry_master
389 && !gfc_return_by_reference (sym->ns->proc_name);
390 parent_decl = DECL_CONTEXT (current_function_decl);
392 if ((t == parent_decl && return_value)
393 || (sym->ns && sym->ns->proc_name
394 && sym->ns->proc_name->backend_decl == parent_decl
395 && (alternate_entry || entry_master)))
400 /* Special case for assigning the return value of a function.
401 Self recursive functions must have an explicit return value. */
402 if (return_value && (t == current_function_decl || parent_flag))
403 t = gfc_get_fake_result_decl (sym, parent_flag);
405 /* Similarly for alternate entry points. */
406 else if (alternate_entry
407 && (sym->ns->proc_name->backend_decl == current_function_decl
410 gfc_entry_list *el = NULL;
412 for (el = sym->ns->entries; el; el = el->next)
415 t = gfc_get_fake_result_decl (sym, parent_flag);
420 else if (entry_master
421 && (sym->ns->proc_name->backend_decl == current_function_decl
423 t = gfc_get_fake_result_decl (sym, parent_flag);
429 gfc_trans_omp_variable_list (enum omp_clause_code code, gfc_namelist *namelist,
432 for (; namelist != NULL; namelist = namelist->next)
433 if (namelist->sym->attr.referenced)
435 tree t = gfc_trans_omp_variable (namelist->sym);
436 if (t != error_mark_node)
438 tree node = build_omp_clause (code);
439 OMP_CLAUSE_DECL (node) = t;
440 list = gfc_trans_add_clause (node, list);
447 gfc_trans_omp_array_reduction (tree c, gfc_symbol *sym, locus where)
449 gfc_symtree *root1 = NULL, *root2 = NULL, *root3 = NULL, *root4 = NULL;
450 gfc_symtree *symtree1, *symtree2, *symtree3, *symtree4 = NULL;
451 gfc_symbol init_val_sym, outer_sym, intrinsic_sym;
452 gfc_expr *e1, *e2, *e3, *e4;
454 tree decl, backend_decl, stmt;
455 locus old_loc = gfc_current_locus;
459 decl = OMP_CLAUSE_DECL (c);
460 gfc_current_locus = where;
462 /* Create a fake symbol for init value. */
463 memset (&init_val_sym, 0, sizeof (init_val_sym));
464 init_val_sym.ns = sym->ns;
465 init_val_sym.name = sym->name;
466 init_val_sym.ts = sym->ts;
467 init_val_sym.attr.referenced = 1;
468 init_val_sym.declared_at = where;
469 init_val_sym.attr.flavor = FL_VARIABLE;
470 backend_decl = omp_reduction_init (c, gfc_sym_type (&init_val_sym));
471 init_val_sym.backend_decl = backend_decl;
473 /* Create a fake symbol for the outer array reference. */
475 outer_sym.as = gfc_copy_array_spec (sym->as);
476 outer_sym.attr.dummy = 0;
477 outer_sym.attr.result = 0;
478 outer_sym.attr.flavor = FL_VARIABLE;
479 outer_sym.backend_decl = create_tmp_var_raw (TREE_TYPE (decl), NULL);
481 /* Create fake symtrees for it. */
482 symtree1 = gfc_new_symtree (&root1, sym->name);
483 symtree1->n.sym = sym;
484 gcc_assert (symtree1 == root1);
486 symtree2 = gfc_new_symtree (&root2, sym->name);
487 symtree2->n.sym = &init_val_sym;
488 gcc_assert (symtree2 == root2);
490 symtree3 = gfc_new_symtree (&root3, sym->name);
491 symtree3->n.sym = &outer_sym;
492 gcc_assert (symtree3 == root3);
494 /* Create expressions. */
495 e1 = gfc_get_expr ();
496 e1->expr_type = EXPR_VARIABLE;
498 e1->symtree = symtree1;
500 e1->ref = ref = gfc_get_ref ();
501 ref->type = REF_ARRAY;
502 ref->u.ar.where = where;
503 ref->u.ar.as = sym->as;
504 ref->u.ar.type = AR_FULL;
506 t = gfc_resolve_expr (e1);
507 gcc_assert (t == SUCCESS);
509 e2 = gfc_get_expr ();
510 e2->expr_type = EXPR_VARIABLE;
512 e2->symtree = symtree2;
514 t = gfc_resolve_expr (e2);
515 gcc_assert (t == SUCCESS);
517 e3 = gfc_copy_expr (e1);
518 e3->symtree = symtree3;
519 t = gfc_resolve_expr (e3);
520 gcc_assert (t == SUCCESS);
523 switch (OMP_CLAUSE_REDUCTION_CODE (c))
527 e4 = gfc_add (e3, e1);
530 e4 = gfc_multiply (e3, e1);
532 case TRUTH_ANDIF_EXPR:
533 e4 = gfc_and (e3, e1);
535 case TRUTH_ORIF_EXPR:
536 e4 = gfc_or (e3, e1);
539 e4 = gfc_eqv (e3, e1);
542 e4 = gfc_neqv (e3, e1);
564 memset (&intrinsic_sym, 0, sizeof (intrinsic_sym));
565 intrinsic_sym.ns = sym->ns;
566 intrinsic_sym.name = iname;
567 intrinsic_sym.ts = sym->ts;
568 intrinsic_sym.attr.referenced = 1;
569 intrinsic_sym.attr.intrinsic = 1;
570 intrinsic_sym.attr.function = 1;
571 intrinsic_sym.result = &intrinsic_sym;
572 intrinsic_sym.declared_at = where;
574 symtree4 = gfc_new_symtree (&root4, iname);
575 symtree4->n.sym = &intrinsic_sym;
576 gcc_assert (symtree4 == root4);
578 e4 = gfc_get_expr ();
579 e4->expr_type = EXPR_FUNCTION;
581 e4->symtree = symtree4;
582 e4->value.function.isym = gfc_find_function (iname);
583 e4->value.function.actual = gfc_get_actual_arglist ();
584 e4->value.function.actual->expr = e3;
585 e4->value.function.actual->next = gfc_get_actual_arglist ();
586 e4->value.function.actual->next->expr = e1;
588 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
589 e1 = gfc_copy_expr (e1);
590 e3 = gfc_copy_expr (e3);
591 t = gfc_resolve_expr (e4);
592 gcc_assert (t == SUCCESS);
594 /* Create the init statement list. */
596 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl))
597 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_ALLOCATABLE)
599 /* If decl is an allocatable array, it needs to be allocated
600 with the same bounds as the outer var. */
601 tree type = TREE_TYPE (decl), rank, size, esize, ptr;
604 gfc_start_block (&block);
606 gfc_add_modify (&block, decl, outer_sym.backend_decl);
607 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
608 size = gfc_conv_descriptor_ubound (decl, rank);
609 size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
610 gfc_conv_descriptor_lbound (decl, rank));
611 size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
613 if (GFC_TYPE_ARRAY_RANK (type) > 1)
614 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
615 gfc_conv_descriptor_stride (decl, rank));
616 esize = fold_convert (gfc_array_index_type,
617 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
618 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
619 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
620 ptr = gfc_allocate_array_with_status (&block,
621 build_int_cst (pvoid_type_node, 0),
623 gfc_conv_descriptor_data_set (&block, decl, ptr);
624 gfc_add_expr_to_block (&block, gfc_trans_assignment (e1, e2, false));
625 stmt = gfc_finish_block (&block);
628 stmt = gfc_trans_assignment (e1, e2, false);
629 if (TREE_CODE (stmt) != BIND_EXPR)
630 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
633 OMP_CLAUSE_REDUCTION_INIT (c) = stmt;
635 /* Create the merge statement list. */
637 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl))
638 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_ALLOCATABLE)
640 /* If decl is an allocatable array, it needs to be deallocated
644 gfc_start_block (&block);
645 gfc_add_expr_to_block (&block, gfc_trans_assignment (e3, e4, false));
646 gfc_add_expr_to_block (&block, gfc_trans_dealloc_allocated (decl));
647 stmt = gfc_finish_block (&block);
650 stmt = gfc_trans_assignment (e3, e4, false);
651 if (TREE_CODE (stmt) != BIND_EXPR)
652 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
655 OMP_CLAUSE_REDUCTION_MERGE (c) = stmt;
657 /* And stick the placeholder VAR_DECL into the clause as well. */
658 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = outer_sym.backend_decl;
660 gfc_current_locus = old_loc;
671 gfc_free_array_spec (outer_sym.as);
675 gfc_trans_omp_reduction_list (gfc_namelist *namelist, tree list,
676 enum tree_code reduction_code, locus where)
678 for (; namelist != NULL; namelist = namelist->next)
679 if (namelist->sym->attr.referenced)
681 tree t = gfc_trans_omp_variable (namelist->sym);
682 if (t != error_mark_node)
684 tree node = build_omp_clause (OMP_CLAUSE_REDUCTION);
685 OMP_CLAUSE_DECL (node) = t;
686 OMP_CLAUSE_REDUCTION_CODE (node) = reduction_code;
687 if (namelist->sym->attr.dimension)
688 gfc_trans_omp_array_reduction (node, namelist->sym, where);
689 list = gfc_trans_add_clause (node, list);
696 gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
699 tree omp_clauses = NULL_TREE, chunk_size, c, old_clauses;
701 enum omp_clause_code clause_code;
707 for (list = 0; list < OMP_LIST_NUM; list++)
709 gfc_namelist *n = clauses->lists[list];
713 if (list >= OMP_LIST_REDUCTION_FIRST
714 && list <= OMP_LIST_REDUCTION_LAST)
716 enum tree_code reduction_code;
720 reduction_code = PLUS_EXPR;
723 reduction_code = MULT_EXPR;
726 reduction_code = MINUS_EXPR;
729 reduction_code = TRUTH_ANDIF_EXPR;
732 reduction_code = TRUTH_ORIF_EXPR;
735 reduction_code = EQ_EXPR;
738 reduction_code = NE_EXPR;
741 reduction_code = MAX_EXPR;
744 reduction_code = MIN_EXPR;
747 reduction_code = BIT_AND_EXPR;
750 reduction_code = BIT_IOR_EXPR;
753 reduction_code = BIT_XOR_EXPR;
758 old_clauses = omp_clauses;
760 = gfc_trans_omp_reduction_list (n, omp_clauses, reduction_code,
766 case OMP_LIST_PRIVATE:
767 clause_code = OMP_CLAUSE_PRIVATE;
769 case OMP_LIST_SHARED:
770 clause_code = OMP_CLAUSE_SHARED;
772 case OMP_LIST_FIRSTPRIVATE:
773 clause_code = OMP_CLAUSE_FIRSTPRIVATE;
775 case OMP_LIST_LASTPRIVATE:
776 clause_code = OMP_CLAUSE_LASTPRIVATE;
778 case OMP_LIST_COPYIN:
779 clause_code = OMP_CLAUSE_COPYIN;
781 case OMP_LIST_COPYPRIVATE:
782 clause_code = OMP_CLAUSE_COPYPRIVATE;
786 = gfc_trans_omp_variable_list (clause_code, n, omp_clauses);
793 if (clauses->if_expr)
797 gfc_init_se (&se, NULL);
798 gfc_conv_expr (&se, clauses->if_expr);
799 gfc_add_block_to_block (block, &se.pre);
800 if_var = gfc_evaluate_now (se.expr, block);
801 gfc_add_block_to_block (block, &se.post);
803 c = build_omp_clause (OMP_CLAUSE_IF);
804 OMP_CLAUSE_IF_EXPR (c) = if_var;
805 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
808 if (clauses->num_threads)
812 gfc_init_se (&se, NULL);
813 gfc_conv_expr (&se, clauses->num_threads);
814 gfc_add_block_to_block (block, &se.pre);
815 num_threads = gfc_evaluate_now (se.expr, block);
816 gfc_add_block_to_block (block, &se.post);
818 c = build_omp_clause (OMP_CLAUSE_NUM_THREADS);
819 OMP_CLAUSE_NUM_THREADS_EXPR (c) = num_threads;
820 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
823 chunk_size = NULL_TREE;
824 if (clauses->chunk_size)
826 gfc_init_se (&se, NULL);
827 gfc_conv_expr (&se, clauses->chunk_size);
828 gfc_add_block_to_block (block, &se.pre);
829 chunk_size = gfc_evaluate_now (se.expr, block);
830 gfc_add_block_to_block (block, &se.post);
833 if (clauses->sched_kind != OMP_SCHED_NONE)
835 c = build_omp_clause (OMP_CLAUSE_SCHEDULE);
836 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = chunk_size;
837 switch (clauses->sched_kind)
839 case OMP_SCHED_STATIC:
840 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_STATIC;
842 case OMP_SCHED_DYNAMIC:
843 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_DYNAMIC;
845 case OMP_SCHED_GUIDED:
846 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_GUIDED;
848 case OMP_SCHED_RUNTIME:
849 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_RUNTIME;
852 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_AUTO;
857 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
860 if (clauses->default_sharing != OMP_DEFAULT_UNKNOWN)
862 c = build_omp_clause (OMP_CLAUSE_DEFAULT);
863 switch (clauses->default_sharing)
865 case OMP_DEFAULT_NONE:
866 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_NONE;
868 case OMP_DEFAULT_SHARED:
869 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_SHARED;
871 case OMP_DEFAULT_PRIVATE:
872 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_PRIVATE;
874 case OMP_DEFAULT_FIRSTPRIVATE:
875 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
880 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
885 c = build_omp_clause (OMP_CLAUSE_NOWAIT);
886 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
889 if (clauses->ordered)
891 c = build_omp_clause (OMP_CLAUSE_ORDERED);
892 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
897 c = build_omp_clause (OMP_CLAUSE_UNTIED);
898 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
901 if (clauses->collapse)
903 c = build_omp_clause (OMP_CLAUSE_COLLAPSE);
904 OMP_CLAUSE_COLLAPSE_EXPR (c) = build_int_cst (NULL, clauses->collapse);
905 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
911 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
914 gfc_trans_omp_code (gfc_code *code, bool force_empty)
919 stmt = gfc_trans_code (code);
920 if (TREE_CODE (stmt) != BIND_EXPR)
922 if (!IS_EMPTY_STMT (stmt) || force_empty)
924 tree block = poplevel (1, 0, 0);
925 stmt = build3_v (BIND_EXPR, NULL, stmt, block);
936 static tree gfc_trans_omp_sections (gfc_code *, gfc_omp_clauses *);
937 static tree gfc_trans_omp_workshare (gfc_code *, gfc_omp_clauses *);
940 gfc_trans_omp_atomic (gfc_code *code)
947 tree lhsaddr, type, rhs, x;
948 enum tree_code op = ERROR_MARK;
949 bool var_on_left = false;
951 code = code->block->next;
952 gcc_assert (code->op == EXEC_ASSIGN);
953 gcc_assert (code->next == NULL);
954 var = code->expr->symtree->n.sym;
956 gfc_init_se (&lse, NULL);
957 gfc_init_se (&rse, NULL);
958 gfc_start_block (&block);
960 gfc_conv_expr (&lse, code->expr);
961 gfc_add_block_to_block (&block, &lse.pre);
962 type = TREE_TYPE (lse.expr);
963 lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
966 if (expr2->expr_type == EXPR_FUNCTION
967 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
968 expr2 = expr2->value.function.actual->expr;
970 if (expr2->expr_type == EXPR_OP)
973 switch (expr2->value.op.op)
978 case INTRINSIC_TIMES:
981 case INTRINSIC_MINUS:
984 case INTRINSIC_DIVIDE:
985 if (expr2->ts.type == BT_INTEGER)
991 op = TRUTH_ANDIF_EXPR;
994 op = TRUTH_ORIF_EXPR;
1005 e = expr2->value.op.op1;
1006 if (e->expr_type == EXPR_FUNCTION
1007 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
1008 e = e->value.function.actual->expr;
1009 if (e->expr_type == EXPR_VARIABLE
1010 && e->symtree != NULL
1011 && e->symtree->n.sym == var)
1013 expr2 = expr2->value.op.op2;
1018 e = expr2->value.op.op2;
1019 if (e->expr_type == EXPR_FUNCTION
1020 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
1021 e = e->value.function.actual->expr;
1022 gcc_assert (e->expr_type == EXPR_VARIABLE
1023 && e->symtree != NULL
1024 && e->symtree->n.sym == var);
1025 expr2 = expr2->value.op.op1;
1026 var_on_left = false;
1028 gfc_conv_expr (&rse, expr2);
1029 gfc_add_block_to_block (&block, &rse.pre);
1033 gcc_assert (expr2->expr_type == EXPR_FUNCTION);
1034 switch (expr2->value.function.isym->id)
1054 e = expr2->value.function.actual->expr;
1055 gcc_assert (e->expr_type == EXPR_VARIABLE
1056 && e->symtree != NULL
1057 && e->symtree->n.sym == var);
1059 gfc_conv_expr (&rse, expr2->value.function.actual->next->expr);
1060 gfc_add_block_to_block (&block, &rse.pre);
1061 if (expr2->value.function.actual->next->next != NULL)
1063 tree accum = gfc_create_var (TREE_TYPE (rse.expr), NULL);
1064 gfc_actual_arglist *arg;
1066 gfc_add_modify (&block, accum, rse.expr);
1067 for (arg = expr2->value.function.actual->next->next; arg;
1070 gfc_init_block (&rse.pre);
1071 gfc_conv_expr (&rse, arg->expr);
1072 gfc_add_block_to_block (&block, &rse.pre);
1073 x = fold_build2 (op, TREE_TYPE (accum), accum, rse.expr);
1074 gfc_add_modify (&block, accum, x);
1080 expr2 = expr2->value.function.actual->next->expr;
1083 lhsaddr = save_expr (lhsaddr);
1084 rhs = gfc_evaluate_now (rse.expr, &block);
1085 x = convert (TREE_TYPE (rhs), build_fold_indirect_ref (lhsaddr));
1088 x = fold_build2 (op, TREE_TYPE (rhs), x, rhs);
1090 x = fold_build2 (op, TREE_TYPE (rhs), rhs, x);
1092 if (TREE_CODE (TREE_TYPE (rhs)) == COMPLEX_TYPE
1093 && TREE_CODE (type) != COMPLEX_TYPE)
1094 x = fold_build1 (REALPART_EXPR, TREE_TYPE (TREE_TYPE (rhs)), x);
1096 x = build2_v (OMP_ATOMIC, lhsaddr, convert (type, x));
1097 gfc_add_expr_to_block (&block, x);
1099 gfc_add_block_to_block (&block, &lse.pre);
1100 gfc_add_block_to_block (&block, &rse.pre);
1102 return gfc_finish_block (&block);
1106 gfc_trans_omp_barrier (void)
1108 tree decl = built_in_decls [BUILT_IN_GOMP_BARRIER];
1109 return build_call_expr (decl, 0);
1113 gfc_trans_omp_critical (gfc_code *code)
1115 tree name = NULL_TREE, stmt;
1116 if (code->ext.omp_name != NULL)
1117 name = get_identifier (code->ext.omp_name);
1118 stmt = gfc_trans_code (code->block->next);
1119 return build2 (OMP_CRITICAL, void_type_node, stmt, name);
1123 gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock,
1124 gfc_omp_clauses *do_clauses, tree par_clauses)
1127 tree dovar, stmt, from, to, step, type, init, cond, incr;
1128 tree count = NULL_TREE, cycle_label, tmp, omp_clauses;
1131 gfc_omp_clauses *clauses = code->ext.omp_clauses;
1132 gfc_code *outermost;
1133 int i, collapse = clauses->collapse;
1134 tree dovar_init = NULL_TREE;
1139 outermost = code = code->block->next;
1140 gcc_assert (code->op == EXEC_DO);
1142 init = make_tree_vec (collapse);
1143 cond = make_tree_vec (collapse);
1144 incr = make_tree_vec (collapse);
1148 gfc_start_block (&block);
1152 omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc);
1154 for (i = 0; i < collapse; i++)
1157 int dovar_found = 0;
1162 for (n = clauses->lists[OMP_LIST_LASTPRIVATE]; n != NULL;
1164 if (code->ext.iterator->var->symtree->n.sym == n->sym)
1169 for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next)
1170 if (code->ext.iterator->var->symtree->n.sym == n->sym)
1176 /* Evaluate all the expressions in the iterator. */
1177 gfc_init_se (&se, NULL);
1178 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1179 gfc_add_block_to_block (pblock, &se.pre);
1181 type = TREE_TYPE (dovar);
1182 gcc_assert (TREE_CODE (type) == INTEGER_TYPE);
1184 gfc_init_se (&se, NULL);
1185 gfc_conv_expr_val (&se, code->ext.iterator->start);
1186 gfc_add_block_to_block (pblock, &se.pre);
1187 from = gfc_evaluate_now (se.expr, pblock);
1189 gfc_init_se (&se, NULL);
1190 gfc_conv_expr_val (&se, code->ext.iterator->end);
1191 gfc_add_block_to_block (pblock, &se.pre);
1192 to = gfc_evaluate_now (se.expr, pblock);
1194 gfc_init_se (&se, NULL);
1195 gfc_conv_expr_val (&se, code->ext.iterator->step);
1196 gfc_add_block_to_block (pblock, &se.pre);
1197 step = gfc_evaluate_now (se.expr, pblock);
1199 /* Special case simple loops. */
1200 if (integer_onep (step))
1202 else if (tree_int_cst_equal (step, integer_minus_one_node))
1208 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, dovar, from);
1209 TREE_VEC_ELT (cond, i) = fold_build2 (simple > 0 ? LE_EXPR : GE_EXPR,
1210 boolean_type_node, dovar, to);
1211 TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, dovar, step);
1212 TREE_VEC_ELT (incr, i) = fold_build2 (MODIFY_EXPR, type, dovar,
1213 TREE_VEC_ELT (incr, i));
1217 /* STEP is not 1 or -1. Use:
1218 for (count = 0; count < (to + step - from) / step; count++)
1220 dovar = from + count * step;
1224 tmp = fold_build2 (MINUS_EXPR, type, step, from);
1225 tmp = fold_build2 (PLUS_EXPR, type, to, tmp);
1226 tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step);
1227 tmp = gfc_evaluate_now (tmp, pblock);
1228 count = gfc_create_var (type, "count");
1229 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, count,
1230 build_int_cst (type, 0));
1231 TREE_VEC_ELT (cond, i) = fold_build2 (LT_EXPR, boolean_type_node,
1233 TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, count,
1234 build_int_cst (type, 1));
1235 TREE_VEC_ELT (incr, i) = fold_build2 (MODIFY_EXPR, type,
1236 count, TREE_VEC_ELT (incr, i));
1238 /* Initialize DOVAR. */
1239 tmp = fold_build2 (MULT_EXPR, type, count, step);
1240 tmp = fold_build2 (PLUS_EXPR, type, from, tmp);
1241 dovar_init = tree_cons (dovar, tmp, dovar_init);
1246 tmp = build_omp_clause (OMP_CLAUSE_PRIVATE);
1247 OMP_CLAUSE_DECL (tmp) = dovar;
1248 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
1250 else if (dovar_found == 2)
1257 /* If dovar is lastprivate, but different counter is used,
1258 dovar += step needs to be added to
1259 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
1260 will have the value on entry of the last loop, rather
1261 than value after iterator increment. */
1262 tmp = gfc_evaluate_now (step, pblock);
1263 tmp = fold_build2 (PLUS_EXPR, type, dovar, tmp);
1264 tmp = fold_build2 (MODIFY_EXPR, type, dovar, tmp);
1265 for (c = omp_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1266 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1267 && OMP_CLAUSE_DECL (c) == dovar)
1269 OMP_CLAUSE_LASTPRIVATE_STMT (c) = tmp;
1273 if (c == NULL && par_clauses != NULL)
1275 for (c = par_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1276 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1277 && OMP_CLAUSE_DECL (c) == dovar)
1279 tree l = build_omp_clause (OMP_CLAUSE_LASTPRIVATE);
1280 OMP_CLAUSE_DECL (l) = dovar;
1281 OMP_CLAUSE_CHAIN (l) = omp_clauses;
1282 OMP_CLAUSE_LASTPRIVATE_STMT (l) = tmp;
1284 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_SHARED);
1288 gcc_assert (simple || c != NULL);
1292 tmp = build_omp_clause (OMP_CLAUSE_PRIVATE);
1293 OMP_CLAUSE_DECL (tmp) = count;
1294 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
1297 if (i + 1 < collapse)
1298 code = code->block->next;
1301 if (pblock != &block)
1304 gfc_start_block (&block);
1307 gfc_start_block (&body);
1309 dovar_init = nreverse (dovar_init);
1312 gfc_add_modify (&body, TREE_PURPOSE (dovar_init),
1313 TREE_VALUE (dovar_init));
1314 dovar_init = TREE_CHAIN (dovar_init);
1317 /* Cycle statement is implemented with a goto. Exit statement must not be
1318 present for this loop. */
1319 cycle_label = gfc_build_label_decl (NULL_TREE);
1321 /* Put these labels where they can be found later. We put the
1322 labels in a TREE_LIST node (because TREE_CHAIN is already
1323 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
1324 label in TREE_VALUE (backend_decl). */
1326 code->block->backend_decl = tree_cons (cycle_label, NULL, NULL);
1328 /* Main loop body. */
1329 tmp = gfc_trans_omp_code (code->block->next, true);
1330 gfc_add_expr_to_block (&body, tmp);
1332 /* Label for cycle statements (if needed). */
1333 if (TREE_USED (cycle_label))
1335 tmp = build1_v (LABEL_EXPR, cycle_label);
1336 gfc_add_expr_to_block (&body, tmp);
1339 /* End of loop body. */
1340 stmt = make_node (OMP_FOR);
1342 TREE_TYPE (stmt) = void_type_node;
1343 OMP_FOR_BODY (stmt) = gfc_finish_block (&body);
1344 OMP_FOR_CLAUSES (stmt) = omp_clauses;
1345 OMP_FOR_INIT (stmt) = init;
1346 OMP_FOR_COND (stmt) = cond;
1347 OMP_FOR_INCR (stmt) = incr;
1348 gfc_add_expr_to_block (&block, stmt);
1350 return gfc_finish_block (&block);
1354 gfc_trans_omp_flush (void)
1356 tree decl = built_in_decls [BUILT_IN_SYNCHRONIZE];
1357 return build_call_expr (decl, 0);
1361 gfc_trans_omp_master (gfc_code *code)
1363 tree stmt = gfc_trans_code (code->block->next);
1364 if (IS_EMPTY_STMT (stmt))
1366 return build1_v (OMP_MASTER, stmt);
1370 gfc_trans_omp_ordered (gfc_code *code)
1372 return build1_v (OMP_ORDERED, gfc_trans_code (code->block->next));
1376 gfc_trans_omp_parallel (gfc_code *code)
1379 tree stmt, omp_clauses;
1381 gfc_start_block (&block);
1382 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1384 stmt = gfc_trans_omp_code (code->block->next, true);
1385 stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
1386 gfc_add_expr_to_block (&block, stmt);
1387 return gfc_finish_block (&block);
1391 gfc_trans_omp_parallel_do (gfc_code *code)
1393 stmtblock_t block, *pblock = NULL;
1394 gfc_omp_clauses parallel_clauses, do_clauses;
1395 tree stmt, omp_clauses = NULL_TREE;
1397 gfc_start_block (&block);
1399 memset (&do_clauses, 0, sizeof (do_clauses));
1400 if (code->ext.omp_clauses != NULL)
1402 memcpy (¶llel_clauses, code->ext.omp_clauses,
1403 sizeof (parallel_clauses));
1404 do_clauses.sched_kind = parallel_clauses.sched_kind;
1405 do_clauses.chunk_size = parallel_clauses.chunk_size;
1406 do_clauses.ordered = parallel_clauses.ordered;
1407 do_clauses.collapse = parallel_clauses.collapse;
1408 parallel_clauses.sched_kind = OMP_SCHED_NONE;
1409 parallel_clauses.chunk_size = NULL;
1410 parallel_clauses.ordered = false;
1411 parallel_clauses.collapse = 0;
1412 omp_clauses = gfc_trans_omp_clauses (&block, ¶llel_clauses,
1415 do_clauses.nowait = true;
1416 if (!do_clauses.ordered && do_clauses.sched_kind != OMP_SCHED_STATIC)
1420 stmt = gfc_trans_omp_do (code, pblock, &do_clauses, omp_clauses);
1421 if (TREE_CODE (stmt) != BIND_EXPR)
1422 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
1425 stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
1426 OMP_PARALLEL_COMBINED (stmt) = 1;
1427 gfc_add_expr_to_block (&block, stmt);
1428 return gfc_finish_block (&block);
1432 gfc_trans_omp_parallel_sections (gfc_code *code)
1435 gfc_omp_clauses section_clauses;
1436 tree stmt, omp_clauses;
1438 memset (§ion_clauses, 0, sizeof (section_clauses));
1439 section_clauses.nowait = true;
1441 gfc_start_block (&block);
1442 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1445 stmt = gfc_trans_omp_sections (code, §ion_clauses);
1446 if (TREE_CODE (stmt) != BIND_EXPR)
1447 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
1450 stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
1451 OMP_PARALLEL_COMBINED (stmt) = 1;
1452 gfc_add_expr_to_block (&block, stmt);
1453 return gfc_finish_block (&block);
1457 gfc_trans_omp_parallel_workshare (gfc_code *code)
1460 gfc_omp_clauses workshare_clauses;
1461 tree stmt, omp_clauses;
1463 memset (&workshare_clauses, 0, sizeof (workshare_clauses));
1464 workshare_clauses.nowait = true;
1466 gfc_start_block (&block);
1467 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1470 stmt = gfc_trans_omp_workshare (code, &workshare_clauses);
1471 if (TREE_CODE (stmt) != BIND_EXPR)
1472 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
1475 stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
1476 OMP_PARALLEL_COMBINED (stmt) = 1;
1477 gfc_add_expr_to_block (&block, stmt);
1478 return gfc_finish_block (&block);
1482 gfc_trans_omp_sections (gfc_code *code, gfc_omp_clauses *clauses)
1484 stmtblock_t block, body;
1485 tree omp_clauses, stmt;
1486 bool has_lastprivate = clauses->lists[OMP_LIST_LASTPRIVATE] != NULL;
1488 gfc_start_block (&block);
1490 omp_clauses = gfc_trans_omp_clauses (&block, clauses, code->loc);
1492 gfc_init_block (&body);
1493 for (code = code->block; code; code = code->block)
1495 /* Last section is special because of lastprivate, so even if it
1496 is empty, chain it in. */
1497 stmt = gfc_trans_omp_code (code->next,
1498 has_lastprivate && code->block == NULL);
1499 if (! IS_EMPTY_STMT (stmt))
1501 stmt = build1_v (OMP_SECTION, stmt);
1502 gfc_add_expr_to_block (&body, stmt);
1505 stmt = gfc_finish_block (&body);
1507 stmt = build2 (OMP_SECTIONS, void_type_node, stmt, omp_clauses);
1508 gfc_add_expr_to_block (&block, stmt);
1510 return gfc_finish_block (&block);
1514 gfc_trans_omp_single (gfc_code *code, gfc_omp_clauses *clauses)
1516 tree omp_clauses = gfc_trans_omp_clauses (NULL, clauses, code->loc);
1517 tree stmt = gfc_trans_omp_code (code->block->next, true);
1518 stmt = build2 (OMP_SINGLE, void_type_node, stmt, omp_clauses);
1523 gfc_trans_omp_task (gfc_code *code)
1526 tree stmt, omp_clauses;
1528 gfc_start_block (&block);
1529 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1531 stmt = gfc_trans_omp_code (code->block->next, true);
1532 stmt = build2 (OMP_TASK, void_type_node, stmt, omp_clauses);
1533 gfc_add_expr_to_block (&block, stmt);
1534 return gfc_finish_block (&block);
1538 gfc_trans_omp_taskwait (void)
1540 tree decl = built_in_decls [BUILT_IN_GOMP_TASKWAIT];
1541 return build_call_expr (decl, 0);
1545 gfc_trans_omp_workshare (gfc_code *code, gfc_omp_clauses *clauses)
1548 return gfc_trans_omp_single (code, clauses);
1552 gfc_trans_omp_directive (gfc_code *code)
1556 case EXEC_OMP_ATOMIC:
1557 return gfc_trans_omp_atomic (code);
1558 case EXEC_OMP_BARRIER:
1559 return gfc_trans_omp_barrier ();
1560 case EXEC_OMP_CRITICAL:
1561 return gfc_trans_omp_critical (code);
1563 return gfc_trans_omp_do (code, NULL, code->ext.omp_clauses, NULL);
1564 case EXEC_OMP_FLUSH:
1565 return gfc_trans_omp_flush ();
1566 case EXEC_OMP_MASTER:
1567 return gfc_trans_omp_master (code);
1568 case EXEC_OMP_ORDERED:
1569 return gfc_trans_omp_ordered (code);
1570 case EXEC_OMP_PARALLEL:
1571 return gfc_trans_omp_parallel (code);
1572 case EXEC_OMP_PARALLEL_DO:
1573 return gfc_trans_omp_parallel_do (code);
1574 case EXEC_OMP_PARALLEL_SECTIONS:
1575 return gfc_trans_omp_parallel_sections (code);
1576 case EXEC_OMP_PARALLEL_WORKSHARE:
1577 return gfc_trans_omp_parallel_workshare (code);
1578 case EXEC_OMP_SECTIONS:
1579 return gfc_trans_omp_sections (code, code->ext.omp_clauses);
1580 case EXEC_OMP_SINGLE:
1581 return gfc_trans_omp_single (code, code->ext.omp_clauses);
1583 return gfc_trans_omp_task (code);
1584 case EXEC_OMP_TASKWAIT:
1585 return gfc_trans_omp_taskwait ();
1586 case EXEC_OMP_WORKSHARE:
1587 return gfc_trans_omp_workshare (code, code->ext.omp_clauses);