1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
32 #include "trans-stmt.h"
33 #include "trans-types.h"
34 #include "trans-array.h"
35 #include "trans-const.h"
40 /* True if OpenMP should privatize what this DECL points to rather
41 than the DECL itself. */
44 gfc_omp_privatize_by_reference (const_tree decl)
46 tree type = TREE_TYPE (decl);
48 if (TREE_CODE (type) == REFERENCE_TYPE
49 && (!DECL_ARTIFICIAL (decl) || TREE_CODE (decl) == PARM_DECL))
52 if (TREE_CODE (type) == POINTER_TYPE)
54 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
55 that have POINTER_TYPE type and don't have GFC_POINTER_TYPE_P
56 set are supposed to be privatized by reference. */
57 if (GFC_POINTER_TYPE_P (type))
60 if (!DECL_ARTIFICIAL (decl))
63 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
65 if (DECL_LANG_SPECIFIC (decl)
66 && GFC_DECL_SAVED_DESCRIPTOR (decl))
73 /* True if OpenMP sharing attribute of DECL is predetermined. */
75 enum omp_clause_default_kind
76 gfc_omp_predetermined_sharing (tree decl)
78 if (DECL_ARTIFICIAL (decl) && ! GFC_DECL_RESULT (decl))
79 return OMP_CLAUSE_DEFAULT_SHARED;
81 /* Cray pointees shouldn't be listed in any clauses and should be
82 gimplified to dereference of the corresponding Cray pointer.
83 Make them all private, so that they are emitted in the debug
85 if (GFC_DECL_CRAY_POINTEE (decl))
86 return OMP_CLAUSE_DEFAULT_PRIVATE;
88 /* Assumed-size arrays are predetermined to inherit sharing
89 attributes of the associated actual argument, which is shared
91 if (TREE_CODE (decl) == PARM_DECL
92 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl))
93 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_UNKNOWN
94 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl),
95 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl)) - 1)
97 return OMP_CLAUSE_DEFAULT_SHARED;
99 /* COMMON and EQUIVALENCE decls are shared. They
100 are only referenced through DECL_VALUE_EXPR of the variables
101 contained in them. If those are privatized, they will not be
102 gimplified to the COMMON or EQUIVALENCE decls. */
103 if (GFC_DECL_COMMON_OR_EQUIV (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
104 return OMP_CLAUSE_DEFAULT_SHARED;
106 if (GFC_DECL_RESULT (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
107 return OMP_CLAUSE_DEFAULT_SHARED;
109 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
113 /* Return true if DECL in private clause needs
114 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
116 gfc_omp_private_outer_ref (tree decl)
118 tree type = TREE_TYPE (decl);
120 if (GFC_DESCRIPTOR_TYPE_P (type)
121 && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
127 /* Return code to initialize DECL with its default constructor, or
128 NULL if there's nothing to do. */
131 gfc_omp_clause_default_ctor (tree clause, tree decl, tree outer)
133 tree type = TREE_TYPE (decl), rank, size, esize, ptr, cond, then_b, else_b;
134 stmtblock_t block, cond_block;
136 if (! GFC_DESCRIPTOR_TYPE_P (type)
137 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
140 gcc_assert (outer != NULL);
141 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_PRIVATE
142 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LASTPRIVATE);
144 /* Allocatable arrays in PRIVATE clauses need to be set to
145 "not currently allocated" allocation status if outer
146 array is "not currently allocated", otherwise should be allocated. */
147 gfc_start_block (&block);
149 gfc_init_block (&cond_block);
151 gfc_add_modify (&cond_block, decl, outer);
152 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
153 size = gfc_conv_descriptor_ubound (decl, rank);
154 size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
155 gfc_conv_descriptor_lbound (decl, rank));
156 size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
158 if (GFC_TYPE_ARRAY_RANK (type) > 1)
159 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
160 gfc_conv_descriptor_stride (decl, rank));
161 esize = fold_convert (gfc_array_index_type,
162 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
163 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
164 size = gfc_evaluate_now (fold_convert (size_type_node, size), &cond_block);
165 ptr = gfc_allocate_array_with_status (&cond_block,
166 build_int_cst (pvoid_type_node, 0),
168 gfc_conv_descriptor_data_set (&cond_block, decl, ptr);
169 then_b = gfc_finish_block (&cond_block);
171 gfc_init_block (&cond_block);
172 gfc_conv_descriptor_data_set (&cond_block, decl, null_pointer_node);
173 else_b = gfc_finish_block (&cond_block);
175 cond = fold_build2 (NE_EXPR, boolean_type_node,
176 fold_convert (pvoid_type_node,
177 gfc_conv_descriptor_data_get (outer)),
179 gfc_add_expr_to_block (&block, build3 (COND_EXPR, void_type_node,
180 cond, then_b, else_b));
182 return gfc_finish_block (&block);
185 /* Build and return code for a copy constructor from SRC to DEST. */
188 gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src)
190 tree type = TREE_TYPE (dest), ptr, size, esize, rank, call;
193 if (! GFC_DESCRIPTOR_TYPE_P (type)
194 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
195 return build2_v (MODIFY_EXPR, dest, src);
197 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_FIRSTPRIVATE);
199 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
200 and copied from SRC. */
201 gfc_start_block (&block);
203 gfc_add_modify (&block, dest, src);
204 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
205 size = gfc_conv_descriptor_ubound (dest, rank);
206 size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
207 gfc_conv_descriptor_lbound (dest, rank));
208 size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
210 if (GFC_TYPE_ARRAY_RANK (type) > 1)
211 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
212 gfc_conv_descriptor_stride (dest, rank));
213 esize = fold_convert (gfc_array_index_type,
214 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
215 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
216 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
217 ptr = gfc_allocate_array_with_status (&block,
218 build_int_cst (pvoid_type_node, 0),
220 gfc_conv_descriptor_data_set (&block, dest, ptr);
221 call = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3, ptr,
222 fold_convert (pvoid_type_node,
223 gfc_conv_descriptor_data_get (src)),
225 gfc_add_expr_to_block (&block, fold_convert (void_type_node, call));
227 return gfc_finish_block (&block);
230 /* Similarly, except use an assignment operator instead. */
233 gfc_omp_clause_assign_op (tree clause ATTRIBUTE_UNUSED, tree dest, tree src)
235 tree type = TREE_TYPE (dest), rank, size, esize, call;
238 if (! GFC_DESCRIPTOR_TYPE_P (type)
239 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
240 return build2_v (MODIFY_EXPR, dest, src);
242 /* Handle copying allocatable arrays. */
243 gfc_start_block (&block);
245 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
246 size = gfc_conv_descriptor_ubound (dest, rank);
247 size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
248 gfc_conv_descriptor_lbound (dest, rank));
249 size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
251 if (GFC_TYPE_ARRAY_RANK (type) > 1)
252 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
253 gfc_conv_descriptor_stride (dest, rank));
254 esize = fold_convert (gfc_array_index_type,
255 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
256 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
257 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
258 call = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3,
259 fold_convert (pvoid_type_node,
260 gfc_conv_descriptor_data_get (dest)),
261 fold_convert (pvoid_type_node,
262 gfc_conv_descriptor_data_get (src)),
264 gfc_add_expr_to_block (&block, fold_convert (void_type_node, call));
266 return gfc_finish_block (&block);
269 /* Build and return code destructing DECL. Return NULL if nothing
273 gfc_omp_clause_dtor (tree clause ATTRIBUTE_UNUSED, tree decl)
275 tree type = TREE_TYPE (decl);
277 if (! GFC_DESCRIPTOR_TYPE_P (type)
278 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
281 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
282 to be deallocated if they were allocated. */
283 return gfc_trans_dealloc_allocated (decl);
287 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
288 disregarded in OpenMP construct, because it is going to be
289 remapped during OpenMP lowering. SHARED is true if DECL
290 is going to be shared, false if it is going to be privatized. */
293 gfc_omp_disregard_value_expr (tree decl, bool shared)
295 if (GFC_DECL_COMMON_OR_EQUIV (decl)
296 && DECL_HAS_VALUE_EXPR_P (decl))
298 tree value = DECL_VALUE_EXPR (decl);
300 if (TREE_CODE (value) == COMPONENT_REF
301 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
302 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
304 /* If variable in COMMON or EQUIVALENCE is privatized, return
305 true, as just that variable is supposed to be privatized,
306 not the whole COMMON or whole EQUIVALENCE.
307 For shared variables in COMMON or EQUIVALENCE, let them be
308 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
309 from the same COMMON or EQUIVALENCE just one sharing of the
310 whole COMMON or EQUIVALENCE is enough. */
315 if (GFC_DECL_RESULT (decl) && DECL_HAS_VALUE_EXPR_P (decl))
321 /* Return true if DECL that is shared iff SHARED is true should
322 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
326 gfc_omp_private_debug_clause (tree decl, bool shared)
328 if (GFC_DECL_CRAY_POINTEE (decl))
331 if (GFC_DECL_COMMON_OR_EQUIV (decl)
332 && DECL_HAS_VALUE_EXPR_P (decl))
334 tree value = DECL_VALUE_EXPR (decl);
336 if (TREE_CODE (value) == COMPONENT_REF
337 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
338 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
345 /* Register language specific type size variables as potentially OpenMP
346 firstprivate variables. */
349 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
351 if (GFC_ARRAY_TYPE_P (type) || GFC_DESCRIPTOR_TYPE_P (type))
355 gcc_assert (TYPE_LANG_SPECIFIC (type) != NULL);
356 for (r = 0; r < GFC_TYPE_ARRAY_RANK (type); r++)
358 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_LBOUND (type, r));
359 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_UBOUND (type, r));
360 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_STRIDE (type, r));
362 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_SIZE (type));
363 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_OFFSET (type));
369 gfc_trans_add_clause (tree node, tree tail)
371 OMP_CLAUSE_CHAIN (node) = tail;
376 gfc_trans_omp_variable (gfc_symbol *sym)
378 tree t = gfc_get_symbol_decl (sym);
382 bool alternate_entry;
385 return_value = sym->attr.function && sym->result == sym;
386 alternate_entry = sym->attr.function && sym->attr.entry
387 && sym->result == sym;
388 entry_master = sym->attr.result
389 && sym->ns->proc_name->attr.entry_master
390 && !gfc_return_by_reference (sym->ns->proc_name);
391 parent_decl = DECL_CONTEXT (current_function_decl);
393 if ((t == parent_decl && return_value)
394 || (sym->ns && sym->ns->proc_name
395 && sym->ns->proc_name->backend_decl == parent_decl
396 && (alternate_entry || entry_master)))
401 /* Special case for assigning the return value of a function.
402 Self recursive functions must have an explicit return value. */
403 if (return_value && (t == current_function_decl || parent_flag))
404 t = gfc_get_fake_result_decl (sym, parent_flag);
406 /* Similarly for alternate entry points. */
407 else if (alternate_entry
408 && (sym->ns->proc_name->backend_decl == current_function_decl
411 gfc_entry_list *el = NULL;
413 for (el = sym->ns->entries; el; el = el->next)
416 t = gfc_get_fake_result_decl (sym, parent_flag);
421 else if (entry_master
422 && (sym->ns->proc_name->backend_decl == current_function_decl
424 t = gfc_get_fake_result_decl (sym, parent_flag);
430 gfc_trans_omp_variable_list (enum omp_clause_code code, gfc_namelist *namelist,
433 for (; namelist != NULL; namelist = namelist->next)
434 if (namelist->sym->attr.referenced)
436 tree t = gfc_trans_omp_variable (namelist->sym);
437 if (t != error_mark_node)
439 tree node = build_omp_clause (code);
440 OMP_CLAUSE_DECL (node) = t;
441 list = gfc_trans_add_clause (node, list);
448 gfc_trans_omp_array_reduction (tree c, gfc_symbol *sym, locus where)
450 gfc_symtree *root1 = NULL, *root2 = NULL, *root3 = NULL, *root4 = NULL;
451 gfc_symtree *symtree1, *symtree2, *symtree3, *symtree4 = NULL;
452 gfc_symbol init_val_sym, outer_sym, intrinsic_sym;
453 gfc_expr *e1, *e2, *e3, *e4;
455 tree decl, backend_decl, stmt;
456 locus old_loc = gfc_current_locus;
460 decl = OMP_CLAUSE_DECL (c);
461 gfc_current_locus = where;
463 /* Create a fake symbol for init value. */
464 memset (&init_val_sym, 0, sizeof (init_val_sym));
465 init_val_sym.ns = sym->ns;
466 init_val_sym.name = sym->name;
467 init_val_sym.ts = sym->ts;
468 init_val_sym.attr.referenced = 1;
469 init_val_sym.declared_at = where;
470 init_val_sym.attr.flavor = FL_VARIABLE;
471 backend_decl = omp_reduction_init (c, gfc_sym_type (&init_val_sym));
472 init_val_sym.backend_decl = backend_decl;
474 /* Create a fake symbol for the outer array reference. */
476 outer_sym.as = gfc_copy_array_spec (sym->as);
477 outer_sym.attr.dummy = 0;
478 outer_sym.attr.result = 0;
479 outer_sym.attr.flavor = FL_VARIABLE;
480 outer_sym.backend_decl = create_tmp_var_raw (TREE_TYPE (decl), NULL);
482 /* Create fake symtrees for it. */
483 symtree1 = gfc_new_symtree (&root1, sym->name);
484 symtree1->n.sym = sym;
485 gcc_assert (symtree1 == root1);
487 symtree2 = gfc_new_symtree (&root2, sym->name);
488 symtree2->n.sym = &init_val_sym;
489 gcc_assert (symtree2 == root2);
491 symtree3 = gfc_new_symtree (&root3, sym->name);
492 symtree3->n.sym = &outer_sym;
493 gcc_assert (symtree3 == root3);
495 /* Create expressions. */
496 e1 = gfc_get_expr ();
497 e1->expr_type = EXPR_VARIABLE;
499 e1->symtree = symtree1;
501 e1->ref = ref = gfc_get_ref ();
502 ref->type = REF_ARRAY;
503 ref->u.ar.where = where;
504 ref->u.ar.as = sym->as;
505 ref->u.ar.type = AR_FULL;
507 t = gfc_resolve_expr (e1);
508 gcc_assert (t == SUCCESS);
510 e2 = gfc_get_expr ();
511 e2->expr_type = EXPR_VARIABLE;
513 e2->symtree = symtree2;
515 t = gfc_resolve_expr (e2);
516 gcc_assert (t == SUCCESS);
518 e3 = gfc_copy_expr (e1);
519 e3->symtree = symtree3;
520 t = gfc_resolve_expr (e3);
521 gcc_assert (t == SUCCESS);
524 switch (OMP_CLAUSE_REDUCTION_CODE (c))
528 e4 = gfc_add (e3, e1);
531 e4 = gfc_multiply (e3, e1);
533 case TRUTH_ANDIF_EXPR:
534 e4 = gfc_and (e3, e1);
536 case TRUTH_ORIF_EXPR:
537 e4 = gfc_or (e3, e1);
540 e4 = gfc_eqv (e3, e1);
543 e4 = gfc_neqv (e3, e1);
565 memset (&intrinsic_sym, 0, sizeof (intrinsic_sym));
566 intrinsic_sym.ns = sym->ns;
567 intrinsic_sym.name = iname;
568 intrinsic_sym.ts = sym->ts;
569 intrinsic_sym.attr.referenced = 1;
570 intrinsic_sym.attr.intrinsic = 1;
571 intrinsic_sym.attr.function = 1;
572 intrinsic_sym.result = &intrinsic_sym;
573 intrinsic_sym.declared_at = where;
575 symtree4 = gfc_new_symtree (&root4, iname);
576 symtree4->n.sym = &intrinsic_sym;
577 gcc_assert (symtree4 == root4);
579 e4 = gfc_get_expr ();
580 e4->expr_type = EXPR_FUNCTION;
582 e4->symtree = symtree4;
583 e4->value.function.isym = gfc_find_function (iname);
584 e4->value.function.actual = gfc_get_actual_arglist ();
585 e4->value.function.actual->expr = e3;
586 e4->value.function.actual->next = gfc_get_actual_arglist ();
587 e4->value.function.actual->next->expr = e1;
589 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
590 e1 = gfc_copy_expr (e1);
591 e3 = gfc_copy_expr (e3);
592 t = gfc_resolve_expr (e4);
593 gcc_assert (t == SUCCESS);
595 /* Create the init statement list. */
597 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl))
598 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_ALLOCATABLE)
600 /* If decl is an allocatable array, it needs to be allocated
601 with the same bounds as the outer var. */
602 tree type = TREE_TYPE (decl), rank, size, esize, ptr;
605 gfc_start_block (&block);
607 gfc_add_modify (&block, decl, outer_sym.backend_decl);
608 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
609 size = gfc_conv_descriptor_ubound (decl, rank);
610 size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
611 gfc_conv_descriptor_lbound (decl, rank));
612 size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
614 if (GFC_TYPE_ARRAY_RANK (type) > 1)
615 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
616 gfc_conv_descriptor_stride (decl, rank));
617 esize = fold_convert (gfc_array_index_type,
618 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
619 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
620 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
621 ptr = gfc_allocate_array_with_status (&block,
622 build_int_cst (pvoid_type_node, 0),
624 gfc_conv_descriptor_data_set (&block, decl, ptr);
625 gfc_add_expr_to_block (&block, gfc_trans_assignment (e1, e2, false));
626 stmt = gfc_finish_block (&block);
629 stmt = gfc_trans_assignment (e1, e2, false);
630 if (TREE_CODE (stmt) != BIND_EXPR)
631 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
634 OMP_CLAUSE_REDUCTION_INIT (c) = stmt;
636 /* Create the merge statement list. */
638 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl))
639 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_ALLOCATABLE)
641 /* If decl is an allocatable array, it needs to be deallocated
645 gfc_start_block (&block);
646 gfc_add_expr_to_block (&block, gfc_trans_assignment (e3, e4, false));
647 gfc_add_expr_to_block (&block, gfc_trans_dealloc_allocated (decl));
648 stmt = gfc_finish_block (&block);
651 stmt = gfc_trans_assignment (e3, e4, false);
652 if (TREE_CODE (stmt) != BIND_EXPR)
653 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
656 OMP_CLAUSE_REDUCTION_MERGE (c) = stmt;
658 /* And stick the placeholder VAR_DECL into the clause as well. */
659 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = outer_sym.backend_decl;
661 gfc_current_locus = old_loc;
672 gfc_free_array_spec (outer_sym.as);
676 gfc_trans_omp_reduction_list (gfc_namelist *namelist, tree list,
677 enum tree_code reduction_code, locus where)
679 for (; namelist != NULL; namelist = namelist->next)
680 if (namelist->sym->attr.referenced)
682 tree t = gfc_trans_omp_variable (namelist->sym);
683 if (t != error_mark_node)
685 tree node = build_omp_clause (OMP_CLAUSE_REDUCTION);
686 OMP_CLAUSE_DECL (node) = t;
687 OMP_CLAUSE_REDUCTION_CODE (node) = reduction_code;
688 if (namelist->sym->attr.dimension)
689 gfc_trans_omp_array_reduction (node, namelist->sym, where);
690 list = gfc_trans_add_clause (node, list);
697 gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
700 tree omp_clauses = NULL_TREE, chunk_size, c, old_clauses;
702 enum omp_clause_code clause_code;
708 for (list = 0; list < OMP_LIST_NUM; list++)
710 gfc_namelist *n = clauses->lists[list];
714 if (list >= OMP_LIST_REDUCTION_FIRST
715 && list <= OMP_LIST_REDUCTION_LAST)
717 enum tree_code reduction_code;
721 reduction_code = PLUS_EXPR;
724 reduction_code = MULT_EXPR;
727 reduction_code = MINUS_EXPR;
730 reduction_code = TRUTH_ANDIF_EXPR;
733 reduction_code = TRUTH_ORIF_EXPR;
736 reduction_code = EQ_EXPR;
739 reduction_code = NE_EXPR;
742 reduction_code = MAX_EXPR;
745 reduction_code = MIN_EXPR;
748 reduction_code = BIT_AND_EXPR;
751 reduction_code = BIT_IOR_EXPR;
754 reduction_code = BIT_XOR_EXPR;
759 old_clauses = omp_clauses;
761 = gfc_trans_omp_reduction_list (n, omp_clauses, reduction_code,
767 case OMP_LIST_PRIVATE:
768 clause_code = OMP_CLAUSE_PRIVATE;
770 case OMP_LIST_SHARED:
771 clause_code = OMP_CLAUSE_SHARED;
773 case OMP_LIST_FIRSTPRIVATE:
774 clause_code = OMP_CLAUSE_FIRSTPRIVATE;
776 case OMP_LIST_LASTPRIVATE:
777 clause_code = OMP_CLAUSE_LASTPRIVATE;
779 case OMP_LIST_COPYIN:
780 clause_code = OMP_CLAUSE_COPYIN;
782 case OMP_LIST_COPYPRIVATE:
783 clause_code = OMP_CLAUSE_COPYPRIVATE;
787 = gfc_trans_omp_variable_list (clause_code, n, omp_clauses);
794 if (clauses->if_expr)
798 gfc_init_se (&se, NULL);
799 gfc_conv_expr (&se, clauses->if_expr);
800 gfc_add_block_to_block (block, &se.pre);
801 if_var = gfc_evaluate_now (se.expr, block);
802 gfc_add_block_to_block (block, &se.post);
804 c = build_omp_clause (OMP_CLAUSE_IF);
805 OMP_CLAUSE_IF_EXPR (c) = if_var;
806 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
809 if (clauses->num_threads)
813 gfc_init_se (&se, NULL);
814 gfc_conv_expr (&se, clauses->num_threads);
815 gfc_add_block_to_block (block, &se.pre);
816 num_threads = gfc_evaluate_now (se.expr, block);
817 gfc_add_block_to_block (block, &se.post);
819 c = build_omp_clause (OMP_CLAUSE_NUM_THREADS);
820 OMP_CLAUSE_NUM_THREADS_EXPR (c) = num_threads;
821 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
824 chunk_size = NULL_TREE;
825 if (clauses->chunk_size)
827 gfc_init_se (&se, NULL);
828 gfc_conv_expr (&se, clauses->chunk_size);
829 gfc_add_block_to_block (block, &se.pre);
830 chunk_size = gfc_evaluate_now (se.expr, block);
831 gfc_add_block_to_block (block, &se.post);
834 if (clauses->sched_kind != OMP_SCHED_NONE)
836 c = build_omp_clause (OMP_CLAUSE_SCHEDULE);
837 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = chunk_size;
838 switch (clauses->sched_kind)
840 case OMP_SCHED_STATIC:
841 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_STATIC;
843 case OMP_SCHED_DYNAMIC:
844 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_DYNAMIC;
846 case OMP_SCHED_GUIDED:
847 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_GUIDED;
849 case OMP_SCHED_RUNTIME:
850 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_RUNTIME;
853 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_AUTO;
858 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
861 if (clauses->default_sharing != OMP_DEFAULT_UNKNOWN)
863 c = build_omp_clause (OMP_CLAUSE_DEFAULT);
864 switch (clauses->default_sharing)
866 case OMP_DEFAULT_NONE:
867 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_NONE;
869 case OMP_DEFAULT_SHARED:
870 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_SHARED;
872 case OMP_DEFAULT_PRIVATE:
873 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_PRIVATE;
875 case OMP_DEFAULT_FIRSTPRIVATE:
876 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
881 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
886 c = build_omp_clause (OMP_CLAUSE_NOWAIT);
887 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
890 if (clauses->ordered)
892 c = build_omp_clause (OMP_CLAUSE_ORDERED);
893 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
898 c = build_omp_clause (OMP_CLAUSE_UNTIED);
899 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
902 if (clauses->collapse)
904 c = build_omp_clause (OMP_CLAUSE_COLLAPSE);
905 OMP_CLAUSE_COLLAPSE_EXPR (c) = build_int_cst (NULL, clauses->collapse);
906 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
912 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
915 gfc_trans_omp_code (gfc_code *code, bool force_empty)
920 stmt = gfc_trans_code (code);
921 if (TREE_CODE (stmt) != BIND_EXPR)
923 if (!IS_EMPTY_STMT (stmt) || force_empty)
925 tree block = poplevel (1, 0, 0);
926 stmt = build3_v (BIND_EXPR, NULL, stmt, block);
937 static tree gfc_trans_omp_sections (gfc_code *, gfc_omp_clauses *);
938 static tree gfc_trans_omp_workshare (gfc_code *, gfc_omp_clauses *);
941 gfc_trans_omp_atomic (gfc_code *code)
948 tree lhsaddr, type, rhs, x;
949 enum tree_code op = ERROR_MARK;
950 bool var_on_left = false;
952 code = code->block->next;
953 gcc_assert (code->op == EXEC_ASSIGN);
954 gcc_assert (code->next == NULL);
955 var = code->expr1->symtree->n.sym;
957 gfc_init_se (&lse, NULL);
958 gfc_init_se (&rse, NULL);
959 gfc_start_block (&block);
961 gfc_conv_expr (&lse, code->expr1);
962 gfc_add_block_to_block (&block, &lse.pre);
963 type = TREE_TYPE (lse.expr);
964 lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
967 if (expr2->expr_type == EXPR_FUNCTION
968 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
969 expr2 = expr2->value.function.actual->expr;
971 if (expr2->expr_type == EXPR_OP)
974 switch (expr2->value.op.op)
979 case INTRINSIC_TIMES:
982 case INTRINSIC_MINUS:
985 case INTRINSIC_DIVIDE:
986 if (expr2->ts.type == BT_INTEGER)
992 op = TRUTH_ANDIF_EXPR;
995 op = TRUTH_ORIF_EXPR;
1000 case INTRINSIC_NEQV:
1006 e = expr2->value.op.op1;
1007 if (e->expr_type == EXPR_FUNCTION
1008 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
1009 e = e->value.function.actual->expr;
1010 if (e->expr_type == EXPR_VARIABLE
1011 && e->symtree != NULL
1012 && e->symtree->n.sym == var)
1014 expr2 = expr2->value.op.op2;
1019 e = expr2->value.op.op2;
1020 if (e->expr_type == EXPR_FUNCTION
1021 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
1022 e = e->value.function.actual->expr;
1023 gcc_assert (e->expr_type == EXPR_VARIABLE
1024 && e->symtree != NULL
1025 && e->symtree->n.sym == var);
1026 expr2 = expr2->value.op.op1;
1027 var_on_left = false;
1029 gfc_conv_expr (&rse, expr2);
1030 gfc_add_block_to_block (&block, &rse.pre);
1034 gcc_assert (expr2->expr_type == EXPR_FUNCTION);
1035 switch (expr2->value.function.isym->id)
1055 e = expr2->value.function.actual->expr;
1056 gcc_assert (e->expr_type == EXPR_VARIABLE
1057 && e->symtree != NULL
1058 && e->symtree->n.sym == var);
1060 gfc_conv_expr (&rse, expr2->value.function.actual->next->expr);
1061 gfc_add_block_to_block (&block, &rse.pre);
1062 if (expr2->value.function.actual->next->next != NULL)
1064 tree accum = gfc_create_var (TREE_TYPE (rse.expr), NULL);
1065 gfc_actual_arglist *arg;
1067 gfc_add_modify (&block, accum, rse.expr);
1068 for (arg = expr2->value.function.actual->next->next; arg;
1071 gfc_init_block (&rse.pre);
1072 gfc_conv_expr (&rse, arg->expr);
1073 gfc_add_block_to_block (&block, &rse.pre);
1074 x = fold_build2 (op, TREE_TYPE (accum), accum, rse.expr);
1075 gfc_add_modify (&block, accum, x);
1081 expr2 = expr2->value.function.actual->next->expr;
1084 lhsaddr = save_expr (lhsaddr);
1085 rhs = gfc_evaluate_now (rse.expr, &block);
1086 x = convert (TREE_TYPE (rhs), build_fold_indirect_ref (lhsaddr));
1089 x = fold_build2 (op, TREE_TYPE (rhs), x, rhs);
1091 x = fold_build2 (op, TREE_TYPE (rhs), rhs, x);
1093 if (TREE_CODE (TREE_TYPE (rhs)) == COMPLEX_TYPE
1094 && TREE_CODE (type) != COMPLEX_TYPE)
1095 x = fold_build1 (REALPART_EXPR, TREE_TYPE (TREE_TYPE (rhs)), x);
1097 x = build2_v (OMP_ATOMIC, lhsaddr, convert (type, x));
1098 gfc_add_expr_to_block (&block, x);
1100 gfc_add_block_to_block (&block, &lse.pre);
1101 gfc_add_block_to_block (&block, &rse.pre);
1103 return gfc_finish_block (&block);
1107 gfc_trans_omp_barrier (void)
1109 tree decl = built_in_decls [BUILT_IN_GOMP_BARRIER];
1110 return build_call_expr (decl, 0);
1114 gfc_trans_omp_critical (gfc_code *code)
1116 tree name = NULL_TREE, stmt;
1117 if (code->ext.omp_name != NULL)
1118 name = get_identifier (code->ext.omp_name);
1119 stmt = gfc_trans_code (code->block->next);
1120 return build2 (OMP_CRITICAL, void_type_node, stmt, name);
1124 gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock,
1125 gfc_omp_clauses *do_clauses, tree par_clauses)
1128 tree dovar, stmt, from, to, step, type, init, cond, incr;
1129 tree count = NULL_TREE, cycle_label, tmp, omp_clauses;
1132 gfc_omp_clauses *clauses = code->ext.omp_clauses;
1133 gfc_code *outermost;
1134 int i, collapse = clauses->collapse;
1135 tree dovar_init = NULL_TREE;
1140 outermost = code = code->block->next;
1141 gcc_assert (code->op == EXEC_DO);
1143 init = make_tree_vec (collapse);
1144 cond = make_tree_vec (collapse);
1145 incr = make_tree_vec (collapse);
1149 gfc_start_block (&block);
1153 omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc);
1155 for (i = 0; i < collapse; i++)
1158 int dovar_found = 0;
1163 for (n = clauses->lists[OMP_LIST_LASTPRIVATE]; n != NULL;
1165 if (code->ext.iterator->var->symtree->n.sym == n->sym)
1170 for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next)
1171 if (code->ext.iterator->var->symtree->n.sym == n->sym)
1177 /* Evaluate all the expressions in the iterator. */
1178 gfc_init_se (&se, NULL);
1179 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1180 gfc_add_block_to_block (pblock, &se.pre);
1182 type = TREE_TYPE (dovar);
1183 gcc_assert (TREE_CODE (type) == INTEGER_TYPE);
1185 gfc_init_se (&se, NULL);
1186 gfc_conv_expr_val (&se, code->ext.iterator->start);
1187 gfc_add_block_to_block (pblock, &se.pre);
1188 from = gfc_evaluate_now (se.expr, pblock);
1190 gfc_init_se (&se, NULL);
1191 gfc_conv_expr_val (&se, code->ext.iterator->end);
1192 gfc_add_block_to_block (pblock, &se.pre);
1193 to = gfc_evaluate_now (se.expr, pblock);
1195 gfc_init_se (&se, NULL);
1196 gfc_conv_expr_val (&se, code->ext.iterator->step);
1197 gfc_add_block_to_block (pblock, &se.pre);
1198 step = gfc_evaluate_now (se.expr, pblock);
1200 /* Special case simple loops. */
1201 if (integer_onep (step))
1203 else if (tree_int_cst_equal (step, integer_minus_one_node))
1209 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, dovar, from);
1210 TREE_VEC_ELT (cond, i) = fold_build2 (simple > 0 ? LE_EXPR : GE_EXPR,
1211 boolean_type_node, dovar, to);
1212 TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, dovar, step);
1213 TREE_VEC_ELT (incr, i) = fold_build2 (MODIFY_EXPR, type, dovar,
1214 TREE_VEC_ELT (incr, i));
1218 /* STEP is not 1 or -1. Use:
1219 for (count = 0; count < (to + step - from) / step; count++)
1221 dovar = from + count * step;
1225 tmp = fold_build2 (MINUS_EXPR, type, step, from);
1226 tmp = fold_build2 (PLUS_EXPR, type, to, tmp);
1227 tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step);
1228 tmp = gfc_evaluate_now (tmp, pblock);
1229 count = gfc_create_var (type, "count");
1230 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, count,
1231 build_int_cst (type, 0));
1232 TREE_VEC_ELT (cond, i) = fold_build2 (LT_EXPR, boolean_type_node,
1234 TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, count,
1235 build_int_cst (type, 1));
1236 TREE_VEC_ELT (incr, i) = fold_build2 (MODIFY_EXPR, type,
1237 count, TREE_VEC_ELT (incr, i));
1239 /* Initialize DOVAR. */
1240 tmp = fold_build2 (MULT_EXPR, type, count, step);
1241 tmp = fold_build2 (PLUS_EXPR, type, from, tmp);
1242 dovar_init = tree_cons (dovar, tmp, dovar_init);
1247 tmp = build_omp_clause (OMP_CLAUSE_PRIVATE);
1248 OMP_CLAUSE_DECL (tmp) = dovar;
1249 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
1251 else if (dovar_found == 2)
1258 /* If dovar is lastprivate, but different counter is used,
1259 dovar += step needs to be added to
1260 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
1261 will have the value on entry of the last loop, rather
1262 than value after iterator increment. */
1263 tmp = gfc_evaluate_now (step, pblock);
1264 tmp = fold_build2 (PLUS_EXPR, type, dovar, tmp);
1265 tmp = fold_build2 (MODIFY_EXPR, type, dovar, tmp);
1266 for (c = omp_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1267 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1268 && OMP_CLAUSE_DECL (c) == dovar)
1270 OMP_CLAUSE_LASTPRIVATE_STMT (c) = tmp;
1274 if (c == NULL && par_clauses != NULL)
1276 for (c = par_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1277 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1278 && OMP_CLAUSE_DECL (c) == dovar)
1280 tree l = build_omp_clause (OMP_CLAUSE_LASTPRIVATE);
1281 OMP_CLAUSE_DECL (l) = dovar;
1282 OMP_CLAUSE_CHAIN (l) = omp_clauses;
1283 OMP_CLAUSE_LASTPRIVATE_STMT (l) = tmp;
1285 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_SHARED);
1289 gcc_assert (simple || c != NULL);
1293 tmp = build_omp_clause (OMP_CLAUSE_PRIVATE);
1294 OMP_CLAUSE_DECL (tmp) = count;
1295 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
1298 if (i + 1 < collapse)
1299 code = code->block->next;
1302 if (pblock != &block)
1305 gfc_start_block (&block);
1308 gfc_start_block (&body);
1310 dovar_init = nreverse (dovar_init);
1313 gfc_add_modify (&body, TREE_PURPOSE (dovar_init),
1314 TREE_VALUE (dovar_init));
1315 dovar_init = TREE_CHAIN (dovar_init);
1318 /* Cycle statement is implemented with a goto. Exit statement must not be
1319 present for this loop. */
1320 cycle_label = gfc_build_label_decl (NULL_TREE);
1322 /* Put these labels where they can be found later. We put the
1323 labels in a TREE_LIST node (because TREE_CHAIN is already
1324 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
1325 label in TREE_VALUE (backend_decl). */
1327 code->block->backend_decl = tree_cons (cycle_label, NULL, NULL);
1329 /* Main loop body. */
1330 tmp = gfc_trans_omp_code (code->block->next, true);
1331 gfc_add_expr_to_block (&body, tmp);
1333 /* Label for cycle statements (if needed). */
1334 if (TREE_USED (cycle_label))
1336 tmp = build1_v (LABEL_EXPR, cycle_label);
1337 gfc_add_expr_to_block (&body, tmp);
1340 /* End of loop body. */
1341 stmt = make_node (OMP_FOR);
1343 TREE_TYPE (stmt) = void_type_node;
1344 OMP_FOR_BODY (stmt) = gfc_finish_block (&body);
1345 OMP_FOR_CLAUSES (stmt) = omp_clauses;
1346 OMP_FOR_INIT (stmt) = init;
1347 OMP_FOR_COND (stmt) = cond;
1348 OMP_FOR_INCR (stmt) = incr;
1349 gfc_add_expr_to_block (&block, stmt);
1351 return gfc_finish_block (&block);
1355 gfc_trans_omp_flush (void)
1357 tree decl = built_in_decls [BUILT_IN_SYNCHRONIZE];
1358 return build_call_expr (decl, 0);
1362 gfc_trans_omp_master (gfc_code *code)
1364 tree stmt = gfc_trans_code (code->block->next);
1365 if (IS_EMPTY_STMT (stmt))
1367 return build1_v (OMP_MASTER, stmt);
1371 gfc_trans_omp_ordered (gfc_code *code)
1373 return build1_v (OMP_ORDERED, gfc_trans_code (code->block->next));
1377 gfc_trans_omp_parallel (gfc_code *code)
1380 tree stmt, omp_clauses;
1382 gfc_start_block (&block);
1383 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1385 stmt = gfc_trans_omp_code (code->block->next, true);
1386 stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
1387 gfc_add_expr_to_block (&block, stmt);
1388 return gfc_finish_block (&block);
1392 gfc_trans_omp_parallel_do (gfc_code *code)
1394 stmtblock_t block, *pblock = NULL;
1395 gfc_omp_clauses parallel_clauses, do_clauses;
1396 tree stmt, omp_clauses = NULL_TREE;
1398 gfc_start_block (&block);
1400 memset (&do_clauses, 0, sizeof (do_clauses));
1401 if (code->ext.omp_clauses != NULL)
1403 memcpy (¶llel_clauses, code->ext.omp_clauses,
1404 sizeof (parallel_clauses));
1405 do_clauses.sched_kind = parallel_clauses.sched_kind;
1406 do_clauses.chunk_size = parallel_clauses.chunk_size;
1407 do_clauses.ordered = parallel_clauses.ordered;
1408 do_clauses.collapse = parallel_clauses.collapse;
1409 parallel_clauses.sched_kind = OMP_SCHED_NONE;
1410 parallel_clauses.chunk_size = NULL;
1411 parallel_clauses.ordered = false;
1412 parallel_clauses.collapse = 0;
1413 omp_clauses = gfc_trans_omp_clauses (&block, ¶llel_clauses,
1416 do_clauses.nowait = true;
1417 if (!do_clauses.ordered && do_clauses.sched_kind != OMP_SCHED_STATIC)
1421 stmt = gfc_trans_omp_do (code, pblock, &do_clauses, omp_clauses);
1422 if (TREE_CODE (stmt) != BIND_EXPR)
1423 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
1426 stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
1427 OMP_PARALLEL_COMBINED (stmt) = 1;
1428 gfc_add_expr_to_block (&block, stmt);
1429 return gfc_finish_block (&block);
1433 gfc_trans_omp_parallel_sections (gfc_code *code)
1436 gfc_omp_clauses section_clauses;
1437 tree stmt, omp_clauses;
1439 memset (§ion_clauses, 0, sizeof (section_clauses));
1440 section_clauses.nowait = true;
1442 gfc_start_block (&block);
1443 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1446 stmt = gfc_trans_omp_sections (code, §ion_clauses);
1447 if (TREE_CODE (stmt) != BIND_EXPR)
1448 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
1451 stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
1452 OMP_PARALLEL_COMBINED (stmt) = 1;
1453 gfc_add_expr_to_block (&block, stmt);
1454 return gfc_finish_block (&block);
1458 gfc_trans_omp_parallel_workshare (gfc_code *code)
1461 gfc_omp_clauses workshare_clauses;
1462 tree stmt, omp_clauses;
1464 memset (&workshare_clauses, 0, sizeof (workshare_clauses));
1465 workshare_clauses.nowait = true;
1467 gfc_start_block (&block);
1468 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1471 stmt = gfc_trans_omp_workshare (code, &workshare_clauses);
1472 if (TREE_CODE (stmt) != BIND_EXPR)
1473 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
1476 stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
1477 OMP_PARALLEL_COMBINED (stmt) = 1;
1478 gfc_add_expr_to_block (&block, stmt);
1479 return gfc_finish_block (&block);
1483 gfc_trans_omp_sections (gfc_code *code, gfc_omp_clauses *clauses)
1485 stmtblock_t block, body;
1486 tree omp_clauses, stmt;
1487 bool has_lastprivate = clauses->lists[OMP_LIST_LASTPRIVATE] != NULL;
1489 gfc_start_block (&block);
1491 omp_clauses = gfc_trans_omp_clauses (&block, clauses, code->loc);
1493 gfc_init_block (&body);
1494 for (code = code->block; code; code = code->block)
1496 /* Last section is special because of lastprivate, so even if it
1497 is empty, chain it in. */
1498 stmt = gfc_trans_omp_code (code->next,
1499 has_lastprivate && code->block == NULL);
1500 if (! IS_EMPTY_STMT (stmt))
1502 stmt = build1_v (OMP_SECTION, stmt);
1503 gfc_add_expr_to_block (&body, stmt);
1506 stmt = gfc_finish_block (&body);
1508 stmt = build2 (OMP_SECTIONS, void_type_node, stmt, omp_clauses);
1509 gfc_add_expr_to_block (&block, stmt);
1511 return gfc_finish_block (&block);
1515 gfc_trans_omp_single (gfc_code *code, gfc_omp_clauses *clauses)
1517 tree omp_clauses = gfc_trans_omp_clauses (NULL, clauses, code->loc);
1518 tree stmt = gfc_trans_omp_code (code->block->next, true);
1519 stmt = build2 (OMP_SINGLE, void_type_node, stmt, omp_clauses);
1524 gfc_trans_omp_task (gfc_code *code)
1527 tree stmt, omp_clauses;
1529 gfc_start_block (&block);
1530 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1532 stmt = gfc_trans_omp_code (code->block->next, true);
1533 stmt = build2 (OMP_TASK, void_type_node, stmt, omp_clauses);
1534 gfc_add_expr_to_block (&block, stmt);
1535 return gfc_finish_block (&block);
1539 gfc_trans_omp_taskwait (void)
1541 tree decl = built_in_decls [BUILT_IN_GOMP_TASKWAIT];
1542 return build_call_expr (decl, 0);
1546 gfc_trans_omp_workshare (gfc_code *code, gfc_omp_clauses *clauses)
1548 tree res, tmp, stmt;
1549 stmtblock_t block, *pblock = NULL;
1550 stmtblock_t singleblock;
1551 int saved_ompws_flags;
1552 bool singleblock_in_progress = false;
1553 /* True if previous gfc_code in workshare construct is not workshared. */
1554 bool prev_singleunit;
1556 code = code->block->next;
1561 return build_empty_stmt ();
1563 gfc_start_block (&block);
1566 ompws_flags = OMPWS_WORKSHARE_FLAG;
1567 prev_singleunit = false;
1569 /* Translate statements one by one to trees until we reach
1570 the end of the workshare construct. Adjacent gfc_codes that
1571 are a single unit of work are clustered and encapsulated in a
1572 single OMP_SINGLE construct. */
1573 for (; code; code = code->next)
1575 if (code->here != 0)
1577 res = gfc_trans_label_here (code);
1578 gfc_add_expr_to_block (pblock, res);
1581 /* No dependence analysis, use for clauses with wait.
1582 If this is the last gfc_code, use default omp_clauses. */
1583 if (code->next == NULL && clauses->nowait)
1584 ompws_flags |= OMPWS_NOWAIT;
1586 /* By default, every gfc_code is a single unit of work. */
1587 ompws_flags |= OMPWS_CURR_SINGLEUNIT;
1588 ompws_flags &= ~OMPWS_SCALARIZER_WS;
1597 res = gfc_trans_assign (code);
1600 case EXEC_POINTER_ASSIGN:
1601 res = gfc_trans_pointer_assign (code);
1604 case EXEC_INIT_ASSIGN:
1605 res = gfc_trans_init_assign (code);
1609 res = gfc_trans_forall (code);
1613 res = gfc_trans_where (code);
1616 case EXEC_OMP_ATOMIC:
1617 res = gfc_trans_omp_directive (code);
1620 case EXEC_OMP_PARALLEL:
1621 case EXEC_OMP_PARALLEL_DO:
1622 case EXEC_OMP_PARALLEL_SECTIONS:
1623 case EXEC_OMP_PARALLEL_WORKSHARE:
1624 case EXEC_OMP_CRITICAL:
1625 saved_ompws_flags = ompws_flags;
1627 res = gfc_trans_omp_directive (code);
1628 ompws_flags = saved_ompws_flags;
1632 internal_error ("gfc_trans_omp_workshare(): Bad statement code");
1635 gfc_set_backend_locus (&code->loc);
1637 if (res != NULL_TREE && ! IS_EMPTY_STMT (res))
1639 if (TREE_CODE (res) == STATEMENT_LIST)
1640 tree_annotate_all_with_location (&res, input_location);
1642 SET_EXPR_LOCATION (res, input_location);
1644 if (prev_singleunit)
1646 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
1647 /* Add current gfc_code to single block. */
1648 gfc_add_expr_to_block (&singleblock, res);
1651 /* Finish single block and add it to pblock. */
1652 tmp = gfc_finish_block (&singleblock);
1653 tmp = build2 (OMP_SINGLE, void_type_node, tmp, NULL_TREE);
1654 gfc_add_expr_to_block (pblock, tmp);
1655 /* Add current gfc_code to pblock. */
1656 gfc_add_expr_to_block (pblock, res);
1657 singleblock_in_progress = false;
1662 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
1664 /* Start single block. */
1665 gfc_init_block (&singleblock);
1666 gfc_add_expr_to_block (&singleblock, res);
1667 singleblock_in_progress = true;
1670 /* Add the new statement to the block. */
1671 gfc_add_expr_to_block (pblock, res);
1673 prev_singleunit = (ompws_flags & OMPWS_CURR_SINGLEUNIT) != 0;
1677 /* Finish remaining SINGLE block, if we were in the middle of one. */
1678 if (singleblock_in_progress)
1680 /* Finish single block and add it to pblock. */
1681 tmp = gfc_finish_block (&singleblock);
1682 tmp = build2 (OMP_SINGLE, void_type_node, tmp,
1684 ? build_omp_clause (OMP_CLAUSE_NOWAIT) : NULL_TREE);
1685 gfc_add_expr_to_block (pblock, tmp);
1688 stmt = gfc_finish_block (pblock);
1689 if (TREE_CODE (stmt) != BIND_EXPR)
1691 if (!IS_EMPTY_STMT (stmt))
1693 tree bindblock = poplevel (1, 0, 0);
1694 stmt = build3_v (BIND_EXPR, NULL, stmt, bindblock);
1707 gfc_trans_omp_directive (gfc_code *code)
1711 case EXEC_OMP_ATOMIC:
1712 return gfc_trans_omp_atomic (code);
1713 case EXEC_OMP_BARRIER:
1714 return gfc_trans_omp_barrier ();
1715 case EXEC_OMP_CRITICAL:
1716 return gfc_trans_omp_critical (code);
1718 return gfc_trans_omp_do (code, NULL, code->ext.omp_clauses, NULL);
1719 case EXEC_OMP_FLUSH:
1720 return gfc_trans_omp_flush ();
1721 case EXEC_OMP_MASTER:
1722 return gfc_trans_omp_master (code);
1723 case EXEC_OMP_ORDERED:
1724 return gfc_trans_omp_ordered (code);
1725 case EXEC_OMP_PARALLEL:
1726 return gfc_trans_omp_parallel (code);
1727 case EXEC_OMP_PARALLEL_DO:
1728 return gfc_trans_omp_parallel_do (code);
1729 case EXEC_OMP_PARALLEL_SECTIONS:
1730 return gfc_trans_omp_parallel_sections (code);
1731 case EXEC_OMP_PARALLEL_WORKSHARE:
1732 return gfc_trans_omp_parallel_workshare (code);
1733 case EXEC_OMP_SECTIONS:
1734 return gfc_trans_omp_sections (code, code->ext.omp_clauses);
1735 case EXEC_OMP_SINGLE:
1736 return gfc_trans_omp_single (code, code->ext.omp_clauses);
1738 return gfc_trans_omp_task (code);
1739 case EXEC_OMP_TASKWAIT:
1740 return gfc_trans_omp_taskwait ();
1741 case EXEC_OMP_WORKSHARE:
1742 return gfc_trans_omp_workshare (code, code->ext.omp_clauses);