X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Ffortran%2Ftrans-openmp.c;h=4d461cfa488b3390f41478d15e207e580a781e7b;hb=1b013d97b21e27f9ce50324503d1115c272e19e2;hp=49368661591ebb3f8f4a69a259f5e965418e6dac;hpb=c2f47e150f3c68a813f92460462c2e70155f2c67;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/fortran/trans-openmp.c b/gcc/fortran/trans-openmp.c index 49368661591..4d461cfa488 100644 --- a/gcc/fortran/trans-openmp.c +++ b/gcc/fortran/trans-openmp.c @@ -1,12 +1,12 @@ /* OpenMP directive translation -- generate GCC trees from gfc_code. - Copyright (C) 2005, 2006 Free Software Foundation, Inc. + Copyright (C) 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc. Contributed by Jakub Jelinek This file is part of GCC. GCC is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free -Software Foundation; either version 2, or (at your option) any later +Software Foundation; either version 3, or (at your option) any later version. GCC is distributed in the hope that it will be useful, but WITHOUT ANY @@ -15,16 +15,15 @@ FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License -along with GCC; see the file COPYING. If not, write to the Free -Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA -02110-1301, USA. */ +along with GCC; see the file COPYING3. If not see +. */ #include "config.h" #include "system.h" #include "coretypes.h" #include "tree.h" -#include "tree-gimple.h" +#include "gimple.h" #include "ggc.h" #include "toplev.h" #include "real.h" @@ -36,23 +35,28 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA #include "trans-const.h" #include "arith.h" +int ompws_flags; /* True if OpenMP should privatize what this DECL points to rather than the DECL itself. */ bool -gfc_omp_privatize_by_reference (tree decl) +gfc_omp_privatize_by_reference (const_tree decl) { tree type = TREE_TYPE (decl); - if (TREE_CODE (type) == REFERENCE_TYPE) + if (TREE_CODE (type) == REFERENCE_TYPE + && (!DECL_ARTIFICIAL (decl) || TREE_CODE (decl) == PARM_DECL)) return true; if (TREE_CODE (type) == POINTER_TYPE) { - /* POINTER/ALLOCATABLE have aggregate types, all user variables - that have POINTER_TYPE type are supposed to be privatized - by reference. */ + /* Array POINTER/ALLOCATABLE have aggregate types, all user variables + that have POINTER_TYPE type and don't have GFC_POINTER_TYPE_P + set are supposed to be privatized by reference. */ + if (GFC_POINTER_TYPE_P (type)) + return false; + if (!DECL_ARTIFICIAL (decl)) return true; @@ -81,6 +85,17 @@ gfc_omp_predetermined_sharing (tree decl) if (GFC_DECL_CRAY_POINTEE (decl)) return OMP_CLAUSE_DEFAULT_PRIVATE; + /* Assumed-size arrays are predetermined to inherit sharing + attributes of the associated actual argument, which is shared + for all we care. */ + if (TREE_CODE (decl) == PARM_DECL + && GFC_ARRAY_TYPE_P (TREE_TYPE (decl)) + && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_UNKNOWN + && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl), + GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl)) - 1) + == NULL) + return OMP_CLAUSE_DEFAULT_SHARED; + /* COMMON and EQUIVALENCE decls are shared. They are only referenced through DECL_VALUE_EXPR of the variables contained in them. If those are privatized, they will not be @@ -95,27 +110,181 @@ gfc_omp_predetermined_sharing (tree decl) } +/* Return true if DECL in private clause needs + OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */ +bool +gfc_omp_private_outer_ref (tree decl) +{ + tree type = TREE_TYPE (decl); + + if (GFC_DESCRIPTOR_TYPE_P (type) + && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE) + return true; + + return false; +} + /* Return code to initialize DECL with its default constructor, or NULL if there's nothing to do. */ tree -gfc_omp_clause_default_ctor (tree clause ATTRIBUTE_UNUSED, tree decl) +gfc_omp_clause_default_ctor (tree clause, tree decl, tree outer) { - tree type = TREE_TYPE (decl); - stmtblock_t block; + tree type = TREE_TYPE (decl), rank, size, esize, ptr, cond, then_b, else_b; + stmtblock_t block, cond_block; - if (! GFC_DESCRIPTOR_TYPE_P (type)) + if (! GFC_DESCRIPTOR_TYPE_P (type) + || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE) return NULL; + gcc_assert (outer != NULL); + gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_PRIVATE + || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LASTPRIVATE); + /* Allocatable arrays in PRIVATE clauses need to be set to - "not currently allocated" allocation status. */ - gfc_init_block (&block); + "not currently allocated" allocation status if outer + array is "not currently allocated", otherwise should be allocated. */ + gfc_start_block (&block); - gfc_conv_descriptor_data_set_tuples (&block, decl, null_pointer_node); + gfc_init_block (&cond_block); + + gfc_add_modify (&cond_block, decl, outer); + rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1]; + size = gfc_conv_descriptor_ubound_get (decl, rank); + size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size, + gfc_conv_descriptor_lbound_get (decl, rank)); + size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size, + gfc_index_one_node); + if (GFC_TYPE_ARRAY_RANK (type) > 1) + size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, + gfc_conv_descriptor_stride_get (decl, rank)); + esize = fold_convert (gfc_array_index_type, + TYPE_SIZE_UNIT (gfc_get_element_type (type))); + size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize); + size = gfc_evaluate_now (fold_convert (size_type_node, size), &cond_block); + ptr = gfc_allocate_array_with_status (&cond_block, + build_int_cst (pvoid_type_node, 0), + size, NULL, NULL); + gfc_conv_descriptor_data_set (&cond_block, decl, ptr); + then_b = gfc_finish_block (&cond_block); + + gfc_init_block (&cond_block); + gfc_conv_descriptor_data_set (&cond_block, decl, null_pointer_node); + else_b = gfc_finish_block (&cond_block); + + cond = fold_build2 (NE_EXPR, boolean_type_node, + fold_convert (pvoid_type_node, + gfc_conv_descriptor_data_get (outer)), + null_pointer_node); + gfc_add_expr_to_block (&block, build3 (COND_EXPR, void_type_node, + cond, then_b, else_b)); return gfc_finish_block (&block); } +/* Build and return code for a copy constructor from SRC to DEST. */ + +tree +gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src) +{ + tree type = TREE_TYPE (dest), ptr, size, esize, rank, call; + stmtblock_t block; + + if (! GFC_DESCRIPTOR_TYPE_P (type) + || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE) + return build2_v (MODIFY_EXPR, dest, src); + + gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_FIRSTPRIVATE); + + /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated + and copied from SRC. */ + gfc_start_block (&block); + + gfc_add_modify (&block, dest, src); + rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1]; + size = gfc_conv_descriptor_ubound_get (dest, rank); + size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size, + gfc_conv_descriptor_lbound_get (dest, rank)); + size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size, + gfc_index_one_node); + if (GFC_TYPE_ARRAY_RANK (type) > 1) + size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, + gfc_conv_descriptor_stride_get (dest, rank)); + esize = fold_convert (gfc_array_index_type, + TYPE_SIZE_UNIT (gfc_get_element_type (type))); + size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize); + size = gfc_evaluate_now (fold_convert (size_type_node, size), &block); + ptr = gfc_allocate_array_with_status (&block, + build_int_cst (pvoid_type_node, 0), + size, NULL, NULL); + gfc_conv_descriptor_data_set (&block, dest, ptr); + call = build_call_expr_loc (input_location, + built_in_decls[BUILT_IN_MEMCPY], 3, ptr, + fold_convert (pvoid_type_node, + gfc_conv_descriptor_data_get (src)), + size); + gfc_add_expr_to_block (&block, fold_convert (void_type_node, call)); + + return gfc_finish_block (&block); +} + +/* Similarly, except use an assignment operator instead. */ + +tree +gfc_omp_clause_assign_op (tree clause ATTRIBUTE_UNUSED, tree dest, tree src) +{ + tree type = TREE_TYPE (dest), rank, size, esize, call; + stmtblock_t block; + + if (! GFC_DESCRIPTOR_TYPE_P (type) + || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE) + return build2_v (MODIFY_EXPR, dest, src); + + /* Handle copying allocatable arrays. */ + gfc_start_block (&block); + + rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1]; + size = gfc_conv_descriptor_ubound_get (dest, rank); + size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size, + gfc_conv_descriptor_lbound_get (dest, rank)); + size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size, + gfc_index_one_node); + if (GFC_TYPE_ARRAY_RANK (type) > 1) + size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, + gfc_conv_descriptor_stride_get (dest, rank)); + esize = fold_convert (gfc_array_index_type, + TYPE_SIZE_UNIT (gfc_get_element_type (type))); + size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize); + size = gfc_evaluate_now (fold_convert (size_type_node, size), &block); + call = build_call_expr_loc (input_location, + built_in_decls[BUILT_IN_MEMCPY], 3, + fold_convert (pvoid_type_node, + gfc_conv_descriptor_data_get (dest)), + fold_convert (pvoid_type_node, + gfc_conv_descriptor_data_get (src)), + size); + gfc_add_expr_to_block (&block, fold_convert (void_type_node, call)); + + return gfc_finish_block (&block); +} + +/* Build and return code destructing DECL. Return NULL if nothing + to be done. */ + +tree +gfc_omp_clause_dtor (tree clause ATTRIBUTE_UNUSED, tree decl) +{ + tree type = TREE_TYPE (decl); + + if (! GFC_DESCRIPTOR_TYPE_P (type) + || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE) + return NULL; + + /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need + to be deallocated if they were allocated. */ + return gfc_trans_dealloc_allocated (decl); +} + /* Return true if DECL's DECL_VALUE_EXPR (if any) should be disregarded in OpenMP construct, because it is going to be @@ -269,7 +438,7 @@ gfc_trans_omp_variable_list (enum omp_clause_code code, gfc_namelist *namelist, tree t = gfc_trans_omp_variable (namelist->sym); if (t != error_mark_node) { - tree node = build_omp_clause (code); + tree node = build_omp_clause (input_location, code); OMP_CLAUSE_DECL (node) = t; list = gfc_trans_add_clause (node, list); } @@ -288,7 +457,7 @@ gfc_trans_omp_array_reduction (tree c, gfc_symbol *sym, locus where) tree decl, backend_decl, stmt; locus old_loc = gfc_current_locus; const char *iname; - try t; + gfc_try t; decl = OMP_CLAUSE_DECL (c); gfc_current_locus = where; @@ -332,6 +501,7 @@ gfc_trans_omp_array_reduction (tree c, gfc_symbol *sym, locus where) e1->symtree = symtree1; e1->ts = sym->ts; e1->ref = ref = gfc_get_ref (); + ref->type = REF_ARRAY; ref->u.ar.where = where; ref->u.ar.as = sym->as; ref->u.ar.type = AR_FULL; @@ -426,7 +596,39 @@ gfc_trans_omp_array_reduction (tree c, gfc_symbol *sym, locus where) /* Create the init statement list. */ pushlevel (0); - stmt = gfc_trans_assignment (e1, e2, false); + if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)) + && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_ALLOCATABLE) + { + /* If decl is an allocatable array, it needs to be allocated + with the same bounds as the outer var. */ + tree type = TREE_TYPE (decl), rank, size, esize, ptr; + stmtblock_t block; + + gfc_start_block (&block); + + gfc_add_modify (&block, decl, outer_sym.backend_decl); + rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1]; + size = gfc_conv_descriptor_ubound_get (decl, rank); + size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size, + gfc_conv_descriptor_lbound_get (decl, rank)); + size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size, + gfc_index_one_node); + if (GFC_TYPE_ARRAY_RANK (type) > 1) + size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, + gfc_conv_descriptor_stride_get (decl, rank)); + esize = fold_convert (gfc_array_index_type, + TYPE_SIZE_UNIT (gfc_get_element_type (type))); + size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize); + size = gfc_evaluate_now (fold_convert (size_type_node, size), &block); + ptr = gfc_allocate_array_with_status (&block, + build_int_cst (pvoid_type_node, 0), + size, NULL, NULL); + gfc_conv_descriptor_data_set (&block, decl, ptr); + gfc_add_expr_to_block (&block, gfc_trans_assignment (e1, e2, false)); + stmt = gfc_finish_block (&block); + } + else + stmt = gfc_trans_assignment (e1, e2, false); if (TREE_CODE (stmt) != BIND_EXPR) stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0)); else @@ -435,7 +637,20 @@ gfc_trans_omp_array_reduction (tree c, gfc_symbol *sym, locus where) /* Create the merge statement list. */ pushlevel (0); - stmt = gfc_trans_assignment (e3, e4, false); + if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)) + && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_ALLOCATABLE) + { + /* If decl is an allocatable array, it needs to be deallocated + afterwards. */ + stmtblock_t block; + + gfc_start_block (&block); + gfc_add_expr_to_block (&block, gfc_trans_assignment (e3, e4, false)); + gfc_add_expr_to_block (&block, gfc_trans_dealloc_allocated (decl)); + stmt = gfc_finish_block (&block); + } + else + stmt = gfc_trans_assignment (e3, e4, false); if (TREE_CODE (stmt) != BIND_EXPR) stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0)); else @@ -469,7 +684,8 @@ gfc_trans_omp_reduction_list (gfc_namelist *namelist, tree list, tree t = gfc_trans_omp_variable (namelist->sym); if (t != error_mark_node) { - tree node = build_omp_clause (OMP_CLAUSE_REDUCTION); + tree node = build_omp_clause (where.lb->location, + OMP_CLAUSE_REDUCTION); OMP_CLAUSE_DECL (node) = t; OMP_CLAUSE_REDUCTION_CODE (node) = reduction_code; if (namelist->sym->attr.dimension) @@ -588,7 +804,7 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses, if_var = gfc_evaluate_now (se.expr, block); gfc_add_block_to_block (block, &se.post); - c = build_omp_clause (OMP_CLAUSE_IF); + c = build_omp_clause (where.lb->location, OMP_CLAUSE_IF); OMP_CLAUSE_IF_EXPR (c) = if_var; omp_clauses = gfc_trans_add_clause (c, omp_clauses); } @@ -603,7 +819,7 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses, num_threads = gfc_evaluate_now (se.expr, block); gfc_add_block_to_block (block, &se.post); - c = build_omp_clause (OMP_CLAUSE_NUM_THREADS); + c = build_omp_clause (where.lb->location, OMP_CLAUSE_NUM_THREADS); OMP_CLAUSE_NUM_THREADS_EXPR (c) = num_threads; omp_clauses = gfc_trans_add_clause (c, omp_clauses); } @@ -620,7 +836,7 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses, if (clauses->sched_kind != OMP_SCHED_NONE) { - c = build_omp_clause (OMP_CLAUSE_SCHEDULE); + c = build_omp_clause (where.lb->location, OMP_CLAUSE_SCHEDULE); OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = chunk_size; switch (clauses->sched_kind) { @@ -636,6 +852,9 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses, case OMP_SCHED_RUNTIME: OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_RUNTIME; break; + case OMP_SCHED_AUTO: + OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_AUTO; + break; default: gcc_unreachable (); } @@ -644,7 +863,7 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses, if (clauses->default_sharing != OMP_DEFAULT_UNKNOWN) { - c = build_omp_clause (OMP_CLAUSE_DEFAULT); + c = build_omp_clause (where.lb->location, OMP_CLAUSE_DEFAULT); switch (clauses->default_sharing) { case OMP_DEFAULT_NONE: @@ -656,6 +875,9 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses, case OMP_DEFAULT_PRIVATE: OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_PRIVATE; break; + case OMP_DEFAULT_FIRSTPRIVATE: + OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE; + break; default: gcc_unreachable (); } @@ -664,13 +886,26 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses, if (clauses->nowait) { - c = build_omp_clause (OMP_CLAUSE_NOWAIT); + c = build_omp_clause (where.lb->location, OMP_CLAUSE_NOWAIT); omp_clauses = gfc_trans_add_clause (c, omp_clauses); } if (clauses->ordered) { - c = build_omp_clause (OMP_CLAUSE_ORDERED); + c = build_omp_clause (where.lb->location, OMP_CLAUSE_ORDERED); + omp_clauses = gfc_trans_add_clause (c, omp_clauses); + } + + if (clauses->untied) + { + c = build_omp_clause (where.lb->location, OMP_CLAUSE_UNTIED); + omp_clauses = gfc_trans_add_clause (c, omp_clauses); + } + + if (clauses->collapse) + { + c = build_omp_clause (where.lb->location, OMP_CLAUSE_COLLAPSE); + OMP_CLAUSE_COLLAPSE_EXPR (c) = build_int_cst (NULL, clauses->collapse); omp_clauses = gfc_trans_add_clause (c, omp_clauses); } @@ -720,26 +955,26 @@ gfc_trans_omp_atomic (gfc_code *code) code = code->block->next; gcc_assert (code->op == EXEC_ASSIGN); gcc_assert (code->next == NULL); - var = code->expr->symtree->n.sym; + var = code->expr1->symtree->n.sym; gfc_init_se (&lse, NULL); gfc_init_se (&rse, NULL); gfc_start_block (&block); - gfc_conv_expr (&lse, code->expr); + gfc_conv_expr (&lse, code->expr1); gfc_add_block_to_block (&block, &lse.pre); type = TREE_TYPE (lse.expr); lhsaddr = gfc_build_addr_expr (NULL, lse.expr); expr2 = code->expr2; if (expr2->expr_type == EXPR_FUNCTION - && expr2->value.function.isym->generic_id == GFC_ISYM_CONVERSION) + && expr2->value.function.isym->id == GFC_ISYM_CONVERSION) expr2 = expr2->value.function.actual->expr; if (expr2->expr_type == EXPR_OP) { gfc_expr *e; - switch (expr2->value.op.operator) + switch (expr2->value.op.op) { case INTRINSIC_PLUS: op = PLUS_EXPR; @@ -773,7 +1008,7 @@ gfc_trans_omp_atomic (gfc_code *code) } e = expr2->value.op.op1; if (e->expr_type == EXPR_FUNCTION - && e->value.function.isym->generic_id == GFC_ISYM_CONVERSION) + && e->value.function.isym->id == GFC_ISYM_CONVERSION) e = e->value.function.actual->expr; if (e->expr_type == EXPR_VARIABLE && e->symtree != NULL @@ -786,7 +1021,7 @@ gfc_trans_omp_atomic (gfc_code *code) { e = expr2->value.op.op2; if (e->expr_type == EXPR_FUNCTION - && e->value.function.isym->generic_id == GFC_ISYM_CONVERSION) + && e->value.function.isym->id == GFC_ISYM_CONVERSION) e = e->value.function.actual->expr; gcc_assert (e->expr_type == EXPR_VARIABLE && e->symtree != NULL @@ -800,7 +1035,7 @@ gfc_trans_omp_atomic (gfc_code *code) else { gcc_assert (expr2->expr_type == EXPR_FUNCTION); - switch (expr2->value.function.isym->generic_id) + switch (expr2->value.function.isym->id) { case GFC_ISYM_MIN: op = MIN_EXPR; @@ -832,7 +1067,7 @@ gfc_trans_omp_atomic (gfc_code *code) tree accum = gfc_create_var (TREE_TYPE (rse.expr), NULL); gfc_actual_arglist *arg; - gfc_add_modify_stmt (&block, accum, rse.expr); + gfc_add_modify (&block, accum, rse.expr); for (arg = expr2->value.function.actual->next->next; arg; arg = arg->next) { @@ -840,7 +1075,7 @@ gfc_trans_omp_atomic (gfc_code *code) gfc_conv_expr (&rse, arg->expr); gfc_add_block_to_block (&block, &rse.pre); x = fold_build2 (op, TREE_TYPE (accum), accum, rse.expr); - gfc_add_modify_stmt (&block, accum, x); + gfc_add_modify (&block, accum, x); } rse.expr = accum; @@ -851,7 +1086,8 @@ gfc_trans_omp_atomic (gfc_code *code) lhsaddr = save_expr (lhsaddr); rhs = gfc_evaluate_now (rse.expr, &block); - x = convert (TREE_TYPE (rhs), build_fold_indirect_ref (lhsaddr)); + x = convert (TREE_TYPE (rhs), build_fold_indirect_ref_loc (input_location, + lhsaddr)); if (var_on_left) x = fold_build2 (op, TREE_TYPE (rhs), x, rhs); @@ -860,7 +1096,7 @@ gfc_trans_omp_atomic (gfc_code *code) if (TREE_CODE (TREE_TYPE (rhs)) == COMPLEX_TYPE && TREE_CODE (type) != COMPLEX_TYPE) - x = build1 (REALPART_EXPR, TREE_TYPE (TREE_TYPE (rhs)), x); + x = fold_build1 (REALPART_EXPR, TREE_TYPE (TREE_TYPE (rhs)), x); x = build2_v (OMP_ATOMIC, lhsaddr, convert (type, x)); gfc_add_expr_to_block (&block, x); @@ -875,7 +1111,7 @@ static tree gfc_trans_omp_barrier (void) { tree decl = built_in_decls [BUILT_IN_GOMP_BARRIER]; - return build_call_expr (decl, 0); + return build_call_expr_loc (input_location, decl, 0); } static tree @@ -885,25 +1121,33 @@ gfc_trans_omp_critical (gfc_code *code) if (code->ext.omp_name != NULL) name = get_identifier (code->ext.omp_name); stmt = gfc_trans_code (code->block->next); - return build2_v (OMP_CRITICAL, stmt, name); + return build2 (OMP_CRITICAL, void_type_node, stmt, name); } static tree gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock, - gfc_omp_clauses *do_clauses) + gfc_omp_clauses *do_clauses, tree par_clauses) { gfc_se se; tree dovar, stmt, from, to, step, type, init, cond, incr; tree count = NULL_TREE, cycle_label, tmp, omp_clauses; stmtblock_t block; stmtblock_t body; - int simple = 0; - bool dovar_found = false; gfc_omp_clauses *clauses = code->ext.omp_clauses; + gfc_code *outermost; + int i, collapse = clauses->collapse; + tree dovar_init = NULL_TREE; - code = code->block->next; + if (collapse <= 0) + collapse = 1; + + outermost = code = code->block->next; gcc_assert (code->op == EXEC_DO); + init = make_tree_vec (collapse); + cond = make_tree_vec (collapse); + incr = make_tree_vec (collapse); + if (pblock == NULL) { gfc_start_block (&block); @@ -911,107 +1155,169 @@ gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock, } omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc); - if (clauses) - { - gfc_namelist *n; - for (n = clauses->lists[OMP_LIST_LASTPRIVATE]; n != NULL; n = n->next) - if (code->ext.iterator->var->symtree->n.sym == n->sym) - break; - if (n == NULL) - for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next) - if (code->ext.iterator->var->symtree->n.sym == n->sym) - break; - if (n != NULL) - dovar_found = true; - } - /* Evaluate all the expressions in the iterator. */ - gfc_init_se (&se, NULL); - gfc_conv_expr_lhs (&se, code->ext.iterator->var); - gfc_add_block_to_block (pblock, &se.pre); - dovar = se.expr; - type = TREE_TYPE (dovar); - gcc_assert (TREE_CODE (type) == INTEGER_TYPE); - - gfc_init_se (&se, NULL); - gfc_conv_expr_val (&se, code->ext.iterator->start); - gfc_add_block_to_block (pblock, &se.pre); - from = gfc_evaluate_now (se.expr, pblock); - - gfc_init_se (&se, NULL); - gfc_conv_expr_val (&se, code->ext.iterator->end); - gfc_add_block_to_block (pblock, &se.pre); - to = gfc_evaluate_now (se.expr, pblock); - - gfc_init_se (&se, NULL); - gfc_conv_expr_val (&se, code->ext.iterator->step); - gfc_add_block_to_block (pblock, &se.pre); - step = gfc_evaluate_now (se.expr, pblock); - - /* Special case simple loops. */ - if (integer_onep (step)) - simple = 1; - else if (tree_int_cst_equal (step, integer_minus_one_node)) - simple = -1; - - /* Loop body. */ - if (simple) + for (i = 0; i < collapse; i++) { - init = build2_v (GIMPLE_MODIFY_STMT, dovar, from); - cond = build2 (simple > 0 ? LE_EXPR : GE_EXPR, boolean_type_node, - dovar, to); - incr = fold_build2 (PLUS_EXPR, type, dovar, step); - incr = fold_build2 (GIMPLE_MODIFY_STMT, type, dovar, incr); - if (pblock != &block) + int simple = 0; + int dovar_found = 0; + + if (clauses) { - pushlevel (0); - gfc_start_block (&block); + gfc_namelist *n; + for (n = clauses->lists[OMP_LIST_LASTPRIVATE]; n != NULL; + n = n->next) + if (code->ext.iterator->var->symtree->n.sym == n->sym) + break; + if (n != NULL) + dovar_found = 1; + else if (n == NULL) + for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next) + if (code->ext.iterator->var->symtree->n.sym == n->sym) + break; + if (n != NULL) + dovar_found++; } - gfc_start_block (&body); - } - else - { - /* STEP is not 1 or -1. Use: - for (count = 0; count < (to + step - from) / step; count++) - { - dovar = from + count * step; - body; - cycle_label:; - } */ - tmp = fold_build2 (MINUS_EXPR, type, step, from); - tmp = fold_build2 (PLUS_EXPR, type, to, tmp); - tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step); - tmp = gfc_evaluate_now (tmp, pblock); - count = gfc_create_var (type, "count"); - init = build2_v (GIMPLE_MODIFY_STMT, count, build_int_cst (type, 0)); - cond = build2 (LT_EXPR, boolean_type_node, count, tmp); - incr = fold_build2 (PLUS_EXPR, type, count, build_int_cst (type, 1)); - incr = fold_build2 (GIMPLE_MODIFY_STMT, type, count, incr); - - if (pblock != &block) + + /* Evaluate all the expressions in the iterator. */ + gfc_init_se (&se, NULL); + gfc_conv_expr_lhs (&se, code->ext.iterator->var); + gfc_add_block_to_block (pblock, &se.pre); + dovar = se.expr; + type = TREE_TYPE (dovar); + gcc_assert (TREE_CODE (type) == INTEGER_TYPE); + + gfc_init_se (&se, NULL); + gfc_conv_expr_val (&se, code->ext.iterator->start); + gfc_add_block_to_block (pblock, &se.pre); + from = gfc_evaluate_now (se.expr, pblock); + + gfc_init_se (&se, NULL); + gfc_conv_expr_val (&se, code->ext.iterator->end); + gfc_add_block_to_block (pblock, &se.pre); + to = gfc_evaluate_now (se.expr, pblock); + + gfc_init_se (&se, NULL); + gfc_conv_expr_val (&se, code->ext.iterator->step); + gfc_add_block_to_block (pblock, &se.pre); + step = gfc_evaluate_now (se.expr, pblock); + + /* Special case simple loops. */ + if (integer_onep (step)) + simple = 1; + else if (tree_int_cst_equal (step, integer_minus_one_node)) + simple = -1; + + /* Loop body. */ + if (simple) + { + TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, dovar, from); + TREE_VEC_ELT (cond, i) = fold_build2 (simple > 0 ? LE_EXPR : GE_EXPR, + boolean_type_node, dovar, to); + TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, dovar, step); + TREE_VEC_ELT (incr, i) = fold_build2 (MODIFY_EXPR, type, dovar, + TREE_VEC_ELT (incr, i)); + } + else + { + /* STEP is not 1 or -1. Use: + for (count = 0; count < (to + step - from) / step; count++) + { + dovar = from + count * step; + body; + cycle_label:; + } */ + tmp = fold_build2 (MINUS_EXPR, type, step, from); + tmp = fold_build2 (PLUS_EXPR, type, to, tmp); + tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step); + tmp = gfc_evaluate_now (tmp, pblock); + count = gfc_create_var (type, "count"); + TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, count, + build_int_cst (type, 0)); + TREE_VEC_ELT (cond, i) = fold_build2 (LT_EXPR, boolean_type_node, + count, tmp); + TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, count, + build_int_cst (type, 1)); + TREE_VEC_ELT (incr, i) = fold_build2 (MODIFY_EXPR, type, + count, TREE_VEC_ELT (incr, i)); + + /* Initialize DOVAR. */ + tmp = fold_build2 (MULT_EXPR, type, count, step); + tmp = fold_build2 (PLUS_EXPR, type, from, tmp); + dovar_init = tree_cons (dovar, tmp, dovar_init); + } + + if (!dovar_found) + { + tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE); + OMP_CLAUSE_DECL (tmp) = dovar; + omp_clauses = gfc_trans_add_clause (tmp, omp_clauses); + } + else if (dovar_found == 2) + { + tree c = NULL; + + tmp = NULL; + if (!simple) + { + /* If dovar is lastprivate, but different counter is used, + dovar += step needs to be added to + OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar + will have the value on entry of the last loop, rather + than value after iterator increment. */ + tmp = gfc_evaluate_now (step, pblock); + tmp = fold_build2 (PLUS_EXPR, type, dovar, tmp); + tmp = fold_build2 (MODIFY_EXPR, type, dovar, tmp); + for (c = omp_clauses; c ; c = OMP_CLAUSE_CHAIN (c)) + if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE + && OMP_CLAUSE_DECL (c) == dovar) + { + OMP_CLAUSE_LASTPRIVATE_STMT (c) = tmp; + break; + } + } + if (c == NULL && par_clauses != NULL) + { + for (c = par_clauses; c ; c = OMP_CLAUSE_CHAIN (c)) + if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE + && OMP_CLAUSE_DECL (c) == dovar) + { + tree l = build_omp_clause (input_location, + OMP_CLAUSE_LASTPRIVATE); + OMP_CLAUSE_DECL (l) = dovar; + OMP_CLAUSE_CHAIN (l) = omp_clauses; + OMP_CLAUSE_LASTPRIVATE_STMT (l) = tmp; + omp_clauses = l; + OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_SHARED); + break; + } + } + gcc_assert (simple || c != NULL); + } + if (!simple) { - pushlevel (0); - gfc_start_block (&block); + tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE); + OMP_CLAUSE_DECL (tmp) = count; + omp_clauses = gfc_trans_add_clause (tmp, omp_clauses); } - gfc_start_block (&body); - /* Initialize DOVAR. */ - tmp = fold_build2 (MULT_EXPR, type, count, step); - tmp = build2 (PLUS_EXPR, type, from, tmp); - gfc_add_modify_stmt (&body, dovar, tmp); + if (i + 1 < collapse) + code = code->block->next; } - if (!dovar_found) + if (pblock != &block) { - tmp = build_omp_clause (OMP_CLAUSE_PRIVATE); - OMP_CLAUSE_DECL (tmp) = dovar; - omp_clauses = gfc_trans_add_clause (tmp, omp_clauses); + pushlevel (0); + gfc_start_block (&block); } - if (!simple) + + gfc_start_block (&body); + + dovar_init = nreverse (dovar_init); + while (dovar_init) { - tmp = build_omp_clause (OMP_CLAUSE_PRIVATE); - OMP_CLAUSE_DECL (tmp) = count; - omp_clauses = gfc_trans_add_clause (tmp, omp_clauses); + gfc_add_modify (&body, TREE_PURPOSE (dovar_init), + TREE_VALUE (dovar_init)); + dovar_init = TREE_CHAIN (dovar_init); } /* Cycle statement is implemented with a goto. Exit statement must not be @@ -1054,7 +1360,7 @@ static tree gfc_trans_omp_flush (void) { tree decl = built_in_decls [BUILT_IN_SYNCHRONIZE]; - return build_call_expr (decl, 0); + return build_call_expr_loc (input_location, decl, 0); } static tree @@ -1082,7 +1388,7 @@ gfc_trans_omp_parallel (gfc_code *code) omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses, code->loc); stmt = gfc_trans_omp_code (code->block->next, true); - stmt = build4_v (OMP_PARALLEL, stmt, omp_clauses, NULL, NULL); + stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses); gfc_add_expr_to_block (&block, stmt); return gfc_finish_block (&block); } @@ -1104,9 +1410,11 @@ gfc_trans_omp_parallel_do (gfc_code *code) do_clauses.sched_kind = parallel_clauses.sched_kind; do_clauses.chunk_size = parallel_clauses.chunk_size; do_clauses.ordered = parallel_clauses.ordered; + do_clauses.collapse = parallel_clauses.collapse; parallel_clauses.sched_kind = OMP_SCHED_NONE; parallel_clauses.chunk_size = NULL; parallel_clauses.ordered = false; + parallel_clauses.collapse = 0; omp_clauses = gfc_trans_omp_clauses (&block, ¶llel_clauses, code->loc); } @@ -1115,12 +1423,12 @@ gfc_trans_omp_parallel_do (gfc_code *code) pblock = █ else pushlevel (0); - stmt = gfc_trans_omp_do (code, pblock, &do_clauses); + stmt = gfc_trans_omp_do (code, pblock, &do_clauses, omp_clauses); if (TREE_CODE (stmt) != BIND_EXPR) stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0)); else poplevel (0, 0, 0); - stmt = build4_v (OMP_PARALLEL, stmt, omp_clauses, NULL, NULL); + stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses); OMP_PARALLEL_COMBINED (stmt) = 1; gfc_add_expr_to_block (&block, stmt); return gfc_finish_block (&block); @@ -1145,7 +1453,7 @@ gfc_trans_omp_parallel_sections (gfc_code *code) stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0)); else poplevel (0, 0, 0); - stmt = build4_v (OMP_PARALLEL, stmt, omp_clauses, NULL, NULL); + stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses); OMP_PARALLEL_COMBINED (stmt) = 1; gfc_add_expr_to_block (&block, stmt); return gfc_finish_block (&block); @@ -1170,7 +1478,7 @@ gfc_trans_omp_parallel_workshare (gfc_code *code) stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0)); else poplevel (0, 0, 0); - stmt = build4_v (OMP_PARALLEL, stmt, omp_clauses, NULL, NULL); + stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses); OMP_PARALLEL_COMBINED (stmt) = 1; gfc_add_expr_to_block (&block, stmt); return gfc_finish_block (&block); @@ -1202,7 +1510,7 @@ gfc_trans_omp_sections (gfc_code *code, gfc_omp_clauses *clauses) } stmt = gfc_finish_block (&body); - stmt = build2_v (OMP_SECTIONS, stmt, omp_clauses); + stmt = build2 (OMP_SECTIONS, void_type_node, stmt, omp_clauses); gfc_add_expr_to_block (&block, stmt); return gfc_finish_block (&block); @@ -1213,15 +1521,187 @@ gfc_trans_omp_single (gfc_code *code, gfc_omp_clauses *clauses) { tree omp_clauses = gfc_trans_omp_clauses (NULL, clauses, code->loc); tree stmt = gfc_trans_omp_code (code->block->next, true); - stmt = build2_v (OMP_SINGLE, stmt, omp_clauses); + stmt = build2 (OMP_SINGLE, void_type_node, stmt, omp_clauses); return stmt; } static tree +gfc_trans_omp_task (gfc_code *code) +{ + stmtblock_t block; + tree stmt, omp_clauses; + + gfc_start_block (&block); + omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses, + code->loc); + stmt = gfc_trans_omp_code (code->block->next, true); + stmt = build2 (OMP_TASK, void_type_node, stmt, omp_clauses); + gfc_add_expr_to_block (&block, stmt); + return gfc_finish_block (&block); +} + +static tree +gfc_trans_omp_taskwait (void) +{ + tree decl = built_in_decls [BUILT_IN_GOMP_TASKWAIT]; + return build_call_expr_loc (input_location, decl, 0); +} + +static tree gfc_trans_omp_workshare (gfc_code *code, gfc_omp_clauses *clauses) { - /* XXX */ - return gfc_trans_omp_single (code, clauses); + tree res, tmp, stmt; + stmtblock_t block, *pblock = NULL; + stmtblock_t singleblock; + int saved_ompws_flags; + bool singleblock_in_progress = false; + /* True if previous gfc_code in workshare construct is not workshared. */ + bool prev_singleunit; + + code = code->block->next; + + pushlevel (0); + + if (!code) + return build_empty_stmt (input_location); + + gfc_start_block (&block); + pblock = █ + + ompws_flags = OMPWS_WORKSHARE_FLAG; + prev_singleunit = false; + + /* Translate statements one by one to trees until we reach + the end of the workshare construct. Adjacent gfc_codes that + are a single unit of work are clustered and encapsulated in a + single OMP_SINGLE construct. */ + for (; code; code = code->next) + { + if (code->here != 0) + { + res = gfc_trans_label_here (code); + gfc_add_expr_to_block (pblock, res); + } + + /* No dependence analysis, use for clauses with wait. + If this is the last gfc_code, use default omp_clauses. */ + if (code->next == NULL && clauses->nowait) + ompws_flags |= OMPWS_NOWAIT; + + /* By default, every gfc_code is a single unit of work. */ + ompws_flags |= OMPWS_CURR_SINGLEUNIT; + ompws_flags &= ~OMPWS_SCALARIZER_WS; + + switch (code->op) + { + case EXEC_NOP: + res = NULL_TREE; + break; + + case EXEC_ASSIGN: + res = gfc_trans_assign (code); + break; + + case EXEC_POINTER_ASSIGN: + res = gfc_trans_pointer_assign (code); + break; + + case EXEC_INIT_ASSIGN: + res = gfc_trans_init_assign (code); + break; + + case EXEC_FORALL: + res = gfc_trans_forall (code); + break; + + case EXEC_WHERE: + res = gfc_trans_where (code); + break; + + case EXEC_OMP_ATOMIC: + res = gfc_trans_omp_directive (code); + break; + + case EXEC_OMP_PARALLEL: + case EXEC_OMP_PARALLEL_DO: + case EXEC_OMP_PARALLEL_SECTIONS: + case EXEC_OMP_PARALLEL_WORKSHARE: + case EXEC_OMP_CRITICAL: + saved_ompws_flags = ompws_flags; + ompws_flags = 0; + res = gfc_trans_omp_directive (code); + ompws_flags = saved_ompws_flags; + break; + + default: + internal_error ("gfc_trans_omp_workshare(): Bad statement code"); + } + + gfc_set_backend_locus (&code->loc); + + if (res != NULL_TREE && ! IS_EMPTY_STMT (res)) + { + if (prev_singleunit) + { + if (ompws_flags & OMPWS_CURR_SINGLEUNIT) + /* Add current gfc_code to single block. */ + gfc_add_expr_to_block (&singleblock, res); + else + { + /* Finish single block and add it to pblock. */ + tmp = gfc_finish_block (&singleblock); + tmp = build2 (OMP_SINGLE, void_type_node, tmp, NULL_TREE); + gfc_add_expr_to_block (pblock, tmp); + /* Add current gfc_code to pblock. */ + gfc_add_expr_to_block (pblock, res); + singleblock_in_progress = false; + } + } + else + { + if (ompws_flags & OMPWS_CURR_SINGLEUNIT) + { + /* Start single block. */ + gfc_init_block (&singleblock); + gfc_add_expr_to_block (&singleblock, res); + singleblock_in_progress = true; + } + else + /* Add the new statement to the block. */ + gfc_add_expr_to_block (pblock, res); + } + prev_singleunit = (ompws_flags & OMPWS_CURR_SINGLEUNIT) != 0; + } + } + + /* Finish remaining SINGLE block, if we were in the middle of one. */ + if (singleblock_in_progress) + { + /* Finish single block and add it to pblock. */ + tmp = gfc_finish_block (&singleblock); + tmp = build2 (OMP_SINGLE, void_type_node, tmp, + clauses->nowait + ? build_omp_clause (input_location, OMP_CLAUSE_NOWAIT) + : NULL_TREE); + gfc_add_expr_to_block (pblock, tmp); + } + + stmt = gfc_finish_block (pblock); + if (TREE_CODE (stmt) != BIND_EXPR) + { + if (!IS_EMPTY_STMT (stmt)) + { + tree bindblock = poplevel (1, 0, 0); + stmt = build3_v (BIND_EXPR, NULL, stmt, bindblock); + } + else + poplevel (0, 0, 0); + } + else + poplevel (0, 0, 0); + + ompws_flags = 0; + return stmt; } tree @@ -1236,7 +1716,7 @@ gfc_trans_omp_directive (gfc_code *code) case EXEC_OMP_CRITICAL: return gfc_trans_omp_critical (code); case EXEC_OMP_DO: - return gfc_trans_omp_do (code, NULL, code->ext.omp_clauses); + return gfc_trans_omp_do (code, NULL, code->ext.omp_clauses, NULL); case EXEC_OMP_FLUSH: return gfc_trans_omp_flush (); case EXEC_OMP_MASTER: @@ -1255,6 +1735,10 @@ gfc_trans_omp_directive (gfc_code *code) return gfc_trans_omp_sections (code, code->ext.omp_clauses); case EXEC_OMP_SINGLE: return gfc_trans_omp_single (code, code->ext.omp_clauses); + case EXEC_OMP_TASK: + return gfc_trans_omp_task (code); + case EXEC_OMP_TASKWAIT: + return gfc_trans_omp_taskwait (); case EXEC_OMP_WORKSHARE: return gfc_trans_omp_workshare (code, code->ext.omp_clauses); default: