X-Git-Url: http://git.sourceforge.jp/view?p=pf3gnuchains%2Fgcc-fork.git;a=blobdiff_plain;f=gcc%2Ffortran%2Ftrans-stmt.c;h=c7ae36005d21aa8ee22bfc2b4ec7aa1267cd75a3;hp=75eeb333d6209adb97949e9f48c5ff3c0a9a9871;hb=40386751ff4443cecb2d9704efac328b6dec66f1;hpb=86f2ad37d4b6b31e1ca896e252402c372f3f09b3 diff --git a/gcc/fortran/trans-stmt.c b/gcc/fortran/trans-stmt.c index 75eeb333d62..c7ae36005d2 100644 --- a/gcc/fortran/trans-stmt.c +++ b/gcc/fortran/trans-stmt.c @@ -1,5 +1,6 @@ /* Statement translation -- generate GCC trees from gfc_code. - Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 + Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, + 2011 Free Software Foundation, Inc. Contributed by Paul Brook and Steven Bosscher @@ -25,10 +26,6 @@ along with GCC; see the file COPYING3. If not see #include "system.h" #include "coretypes.h" #include "tree.h" -#include "gimple.h" -#include "ggc.h" -#include "toplev.h" -#include "real.h" #include "gfortran.h" #include "flags.h" #include "trans.h" @@ -38,6 +35,7 @@ along with GCC; see the file COPYING3. If not see #include "trans-const.h" #include "arith.h" #include "dependency.h" +#include "ggc.h" typedef struct iter_info { @@ -104,24 +102,24 @@ gfc_trans_label_assign (gfc_code * code) /* Start a new block. */ gfc_init_se (&se, NULL); gfc_start_block (&se.pre); - gfc_conv_label_variable (&se, code->expr); + gfc_conv_label_variable (&se, code->expr1); len = GFC_DECL_STRING_LEN (se.expr); addr = GFC_DECL_ASSIGN_ADDR (se.expr); - label_tree = gfc_get_label_decl (code->label); + label_tree = gfc_get_label_decl (code->label1); - if (code->label->defined == ST_LABEL_TARGET) + if (code->label1->defined == ST_LABEL_TARGET) { label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree); len_tree = integer_minus_one_node; } else { - gfc_expr *format = code->label->format; + gfc_expr *format = code->label1->format; label_len = format->value.character.length; - len_tree = build_int_cst (NULL_TREE, label_len); + len_tree = build_int_cst (gfc_charlen_type_node, label_len); label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1, format->value.character.string); label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree); @@ -144,46 +142,31 @@ gfc_trans_goto (gfc_code * code) tree tmp; gfc_se se; - if (code->label != NULL) - return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label)); + if (code->label1 != NULL) + return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1)); /* ASSIGNED GOTO. */ gfc_init_se (&se, NULL); gfc_start_block (&se.pre); - gfc_conv_label_variable (&se, code->expr); + gfc_conv_label_variable (&se, code->expr1); tmp = GFC_DECL_STRING_LEN (se.expr); - tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp, - build_int_cst (TREE_TYPE (tmp), -1)); + tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, tmp, + build_int_cst (TREE_TYPE (tmp), -1)); gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc, "Assigned label is not a target label"); assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr); - code = code->block; - if (code == NULL) - { - target = fold_build1 (GOTO_EXPR, void_type_node, assigned_goto); - gfc_add_expr_to_block (&se.pre, target); - return gfc_finish_block (&se.pre); - } - - /* Check the label list. */ - do - { - target = gfc_get_label_decl (code->label); - tmp = gfc_build_addr_expr (pvoid_type_node, target); - tmp = fold_build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto); - tmp = build3_v (COND_EXPR, tmp, - fold_build1 (GOTO_EXPR, void_type_node, target), - build_empty_stmt ()); - gfc_add_expr_to_block (&se.pre, tmp); - code = code->block; - } - while (code != NULL); - gfc_trans_runtime_check (true, false, boolean_true_node, &se.pre, &loc, - "Assigned label is not in the list"); + /* We're going to ignore a label list. It does not really change the + statement's semantics (because it is just a further restriction on + what's legal code); before, we were comparing label addresses here, but + that's a very fragile business and may break with optimization. So + just ignore it. */ - return gfc_finish_block (&se.pre); + target = fold_build1_loc (input_location, GOTO_EXPR, void_type_node, + assigned_goto); + gfc_add_expr_to_block (&se.pre, target); + return gfc_finish_block (&se.pre); } @@ -212,6 +195,7 @@ gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse, gfc_ss *ss; gfc_ss_info *info; gfc_symbol *fsym; + gfc_ref *ref; int n; tree data; tree offset; @@ -257,6 +241,7 @@ gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse, /* Make a local loopinfo for the temporary creation, so that none of the other ss->info's have to be renormalized. */ gfc_init_loopinfo (&tmp_loop); + tmp_loop.dimen = info->dimen; for (n = 0; n < info->dimen; n++) { tmp_loop.to[n] = loopse->loop->to[n]; @@ -267,12 +252,42 @@ gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse, /* Obtain the argument descriptor for unpacking. */ gfc_init_se (&parmse, NULL); parmse.want_pointer = 1; + + /* The scalarizer introduces some specific peculiarities when + handling elemental subroutines; the stride can be needed up to + the dim_array - 1, rather than dim_loop - 1 to calculate + offsets outside the loop. For this reason, we make sure that + the descriptor has the dimensionality of the array by converting + trailing elements into ranges with end = start. */ + for (ref = e->ref; ref; ref = ref->next) + if (ref->type == REF_ARRAY && ref->u.ar.type == AR_SECTION) + break; + + if (ref) + { + bool seen_range = false; + for (n = 0; n < ref->u.ar.dimen; n++) + { + if (ref->u.ar.dimen_type[n] == DIMEN_RANGE) + seen_range = true; + + if (!seen_range + || ref->u.ar.dimen_type[n] != DIMEN_ELEMENT) + continue; + + ref->u.ar.end[n] = gfc_copy_expr (ref->u.ar.start[n]); + ref->u.ar.dimen_type[n] = DIMEN_RANGE; + } + } + gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e)); gfc_add_block_to_block (&se->pre, &parmse.pre); - /* If we've got INTENT(INOUT), initialize the array temporary with - a copy of the values. */ - if (fsym->attr.intent == INTENT_INOUT) + /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT), + initialize the array temporary with a copy of the values. */ + if (fsym->attr.intent == INTENT_INOUT + || (fsym->ts.type ==BT_DERIVED + && fsym->attr.intent == INTENT_OUT)) initial = parmse.expr; else initial = NULL_TREE; @@ -295,7 +310,7 @@ gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse, data = gfc_create_var (pvoid_type_node, NULL); gfc_init_block (&temp_post); tmp = gfc_trans_create_temp_array (&se->pre, &temp_post, - &tmp_loop, info, temptype, + &tmp_loop, ss, temptype, initial, false, true, false, &arg->expr->where); @@ -307,18 +322,20 @@ gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse, offset = gfc_index_zero_node; for (n = 0; n < info->dimen; n++) { - tmp = gfc_conv_descriptor_stride (info->descriptor, - gfc_rank_cst[n]); - tmp = fold_build2 (MULT_EXPR, gfc_array_index_type, - loopse->loop->from[n], tmp); - offset = fold_build2 (MINUS_EXPR, gfc_array_index_type, - offset, tmp); + tmp = gfc_conv_descriptor_stride_get (info->descriptor, + gfc_rank_cst[n]); + tmp = fold_build2_loc (input_location, MULT_EXPR, + gfc_array_index_type, + loopse->loop->from[n], tmp); + offset = fold_build2_loc (input_location, MINUS_EXPR, + gfc_array_index_type, offset, tmp); } info->offset = gfc_create_var (gfc_array_index_type, NULL); gfc_add_modify (&se->pre, info->offset, offset); /* Copy the result back using unpack. */ - tmp = build_call_expr (gfor_fndecl_in_unpack, 2, parmse.expr, data); + tmp = build_call_expr_loc (input_location, + gfor_fndecl_in_unpack, 2, parmse.expr, data); gfc_add_expr_to_block (&se->post, tmp); /* parmse.pre is already added above. */ @@ -332,12 +349,16 @@ gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse, /* Translate the CALL statement. Builds a call to an F95 subroutine. */ tree -gfc_trans_call (gfc_code * code, bool dependency_check) +gfc_trans_call (gfc_code * code, bool dependency_check, + tree mask, tree count1, bool invert) { gfc_se se; gfc_ss * ss; int has_alternate_specifier; gfc_dep_check check_variable; + tree index = NULL_TREE; + tree maskexpr = NULL_TREE; + tree tmp; /* A CALL starts a new block because the actual arguments may have to be evaluated first. */ @@ -356,8 +377,8 @@ gfc_trans_call (gfc_code * code, bool dependency_check) /* Translate the call. */ has_alternate_specifier - = gfc_conv_function_call (&se, code->resolved_sym, code->ext.actual, - NULL_TREE); + = gfc_conv_procedure_call (&se, code->resolved_sym, code->ext.actual, + code->expr1, NULL); /* A subroutine without side-effect, by definition, does nothing! */ TREE_SIDE_EFFECTS (se.expr) = 1; @@ -369,7 +390,7 @@ gfc_trans_call (gfc_code * code, bool dependency_check) gfc_symbol *sym; select_code = code->next; gcc_assert(select_code->op == EXEC_SELECT); - sym = select_code->expr->symtree->n.sym; + sym = select_code->expr1->symtree->n.sym; se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr); if (sym->backend_decl == NULL) sym->backend_decl = gfc_get_symbol_decl (sym); @@ -405,7 +426,7 @@ gfc_trans_call (gfc_code * code, bool dependency_check) subscripts. This could be prevented in the elemental case as temporaries are handled separatedly (below in gfc_conv_elemental_dependencies). */ - gfc_conv_loop_setup (&loop, &code->expr->where); + gfc_conv_loop_setup (&loop, &code->expr1->where); gfc_mark_ss_chain_used (ss, 1); /* Convert the arguments, checking for dependencies. */ @@ -429,10 +450,32 @@ gfc_trans_call (gfc_code * code, bool dependency_check) gfc_start_scalarized_body (&loop, &body); gfc_init_block (&block); + if (mask && count1) + { + /* Form the mask expression according to the mask. */ + index = count1; + maskexpr = gfc_build_array_ref (mask, index, NULL); + if (invert) + maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR, + TREE_TYPE (maskexpr), maskexpr); + } + /* Add the subroutine call to the block. */ - gfc_conv_function_call (&loopse, code->resolved_sym, code->ext.actual, - NULL_TREE); - gfc_add_expr_to_block (&loopse.pre, loopse.expr); + gfc_conv_procedure_call (&loopse, code->resolved_sym, + code->ext.actual, code->expr1, NULL); + + if (mask && count1) + { + tmp = build3_v (COND_EXPR, maskexpr, loopse.expr, + build_empty_stmt (input_location)); + gfc_add_expr_to_block (&loopse.pre, tmp); + tmp = fold_build2_loc (input_location, PLUS_EXPR, + gfc_array_index_type, + count1, gfc_index_one_node); + gfc_add_modify (&loopse.pre, count1, tmp); + } + else + gfc_add_expr_to_block (&loopse.pre, loopse.expr); gfc_add_block_to_block (&block, &loopse.pre); gfc_add_block_to_block (&block, &loopse.post); @@ -453,43 +496,47 @@ gfc_trans_call (gfc_code * code, bool dependency_check) /* Translate the RETURN statement. */ tree -gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED) +gfc_trans_return (gfc_code * code) { - if (code->expr) + if (code->expr1) { gfc_se se; tree tmp; tree result; /* If code->expr is not NULL, this return statement must appear - in a subroutine and current_fake_result_decl has already + in a subroutine and current_fake_result_decl has already been generated. */ result = gfc_get_fake_result_decl (NULL, 0); if (!result) - { - gfc_warning ("An alternate return at %L without a * dummy argument", - &code->expr->where); - return build1_v (GOTO_EXPR, gfc_get_return_label ()); - } + { + gfc_warning ("An alternate return at %L without a * dummy argument", + &code->expr1->where); + return gfc_generate_return (); + } /* Start a new block for this statement. */ gfc_init_se (&se, NULL); gfc_start_block (&se.pre); - gfc_conv_expr (&se, code->expr); + gfc_conv_expr (&se, code->expr1); - tmp = fold_build2 (MODIFY_EXPR, TREE_TYPE (result), result, - fold_convert (TREE_TYPE (result), se.expr)); + /* Note that the actually returned expression is a simple value and + does not depend on any pointers or such; thus we can clean-up with + se.post before returning. */ + tmp = fold_build2_loc (input_location, MODIFY_EXPR, TREE_TYPE (result), + result, fold_convert (TREE_TYPE (result), + se.expr)); gfc_add_expr_to_block (&se.pre, tmp); + gfc_add_block_to_block (&se.pre, &se.post); - tmp = build1_v (GOTO_EXPR, gfc_get_return_label ()); + tmp = gfc_generate_return (); gfc_add_expr_to_block (&se.pre, tmp); - gfc_add_block_to_block (&se.pre, &se.post); return gfc_finish_block (&se.pre); } - else - return build1_v (GOTO_EXPR, gfc_get_return_label ()); + + return gfc_generate_return (); } @@ -508,15 +555,25 @@ gfc_trans_pause (gfc_code * code) gfc_start_block (&se.pre); - if (code->expr == NULL) + if (code->expr1 == NULL) + { + tmp = build_int_cst (gfc_int4_type_node, 0); + tmp = build_call_expr_loc (input_location, + gfor_fndecl_pause_string, 2, + build_int_cst (pchar_type_node, 0), tmp); + } + else if (code->expr1->ts.type == BT_INTEGER) { - tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code); - tmp = build_call_expr (gfor_fndecl_pause_numeric, 1, tmp); + gfc_conv_expr (&se, code->expr1); + tmp = build_call_expr_loc (input_location, + gfor_fndecl_pause_numeric, 1, + fold_convert (gfc_int4_type_node, se.expr)); } else { - gfc_conv_expr_reference (&se, code->expr); - tmp = build_call_expr (gfor_fndecl_pause_string, 2, + gfc_conv_expr_reference (&se, code->expr1); + tmp = build_call_expr_loc (input_location, + gfor_fndecl_pause_string, 2, se.expr, se.string_length); } @@ -532,7 +589,7 @@ gfc_trans_pause (gfc_code * code) to a runtime library call. */ tree -gfc_trans_stop (gfc_code * code) +gfc_trans_stop (gfc_code *code, bool error_stop) { tree gfc_int4_type_node = gfc_get_int_type (4); gfc_se se; @@ -542,17 +599,49 @@ gfc_trans_stop (gfc_code * code) gfc_init_se (&se, NULL); gfc_start_block (&se.pre); + if (gfc_option.coarray == GFC_FCOARRAY_LIB && !error_stop) + { + /* Per F2008, 8.5.1 STOP implies a SYNC MEMORY. */ + tmp = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE); + tmp = build_call_expr_loc (input_location, tmp, 0); + gfc_add_expr_to_block (&se.pre, tmp); + + tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_finalize, 0); + gfc_add_expr_to_block (&se.pre, tmp); + } - if (code->expr == NULL) + if (code->expr1 == NULL) + { + tmp = build_int_cst (gfc_int4_type_node, 0); + tmp = build_call_expr_loc (input_location, + error_stop + ? (gfc_option.coarray == GFC_FCOARRAY_LIB + ? gfor_fndecl_caf_error_stop_str + : gfor_fndecl_error_stop_string) + : gfor_fndecl_stop_string, + 2, build_int_cst (pchar_type_node, 0), tmp); + } + else if (code->expr1->ts.type == BT_INTEGER) { - tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code); - tmp = build_call_expr (gfor_fndecl_stop_numeric, 1, tmp); + gfc_conv_expr (&se, code->expr1); + tmp = build_call_expr_loc (input_location, + error_stop + ? (gfc_option.coarray == GFC_FCOARRAY_LIB + ? gfor_fndecl_caf_error_stop + : gfor_fndecl_error_stop_numeric) + : gfor_fndecl_stop_numeric_f08, 1, + fold_convert (gfc_int4_type_node, se.expr)); } else { - gfc_conv_expr_reference (&se, code->expr); - tmp = build_call_expr (gfor_fndecl_stop_string, 2, - se.expr, se.string_length); + gfc_conv_expr_reference (&se, code->expr1); + tmp = build_call_expr_loc (input_location, + error_stop + ? (gfc_option.coarray == GFC_FCOARRAY_LIB + ? gfor_fndecl_caf_error_stop_str + : gfor_fndecl_error_stop_string) + : gfor_fndecl_stop_string, + 2, se.expr, se.string_length); } gfc_add_expr_to_block (&se.pre, tmp); @@ -563,6 +652,242 @@ gfc_trans_stop (gfc_code * code) } +tree +gfc_trans_lock_unlock (gfc_code *code, gfc_exec_op type ATTRIBUTE_UNUSED) +{ + gfc_se se, argse; + tree stat = NULL_TREE, lock_acquired = NULL_TREE; + + /* Short cut: For single images without STAT= or LOCK_ACQUIRED + return early. (ERRMSG= is always untouched for -fcoarray=single.) */ + if (!code->expr2 && !code->expr4 && gfc_option.coarray != GFC_FCOARRAY_LIB) + return NULL_TREE; + + gfc_init_se (&se, NULL); + gfc_start_block (&se.pre); + + if (code->expr2) + { + gcc_assert (code->expr2->expr_type == EXPR_VARIABLE); + gfc_init_se (&argse, NULL); + gfc_conv_expr_val (&argse, code->expr2); + stat = argse.expr; + } + + if (code->expr4) + { + gcc_assert (code->expr4->expr_type == EXPR_VARIABLE); + gfc_init_se (&argse, NULL); + gfc_conv_expr_val (&argse, code->expr4); + lock_acquired = argse.expr; + } + + if (stat != NULL_TREE) + gfc_add_modify (&se.pre, stat, build_int_cst (TREE_TYPE (stat), 0)); + + if (lock_acquired != NULL_TREE) + gfc_add_modify (&se.pre, lock_acquired, + fold_convert (TREE_TYPE (lock_acquired), + boolean_true_node)); + + return gfc_finish_block (&se.pre); +} + + +tree +gfc_trans_sync (gfc_code *code, gfc_exec_op type) +{ + gfc_se se, argse; + tree tmp; + tree images = NULL_TREE, stat = NULL_TREE, + errmsg = NULL_TREE, errmsglen = NULL_TREE; + + /* Short cut: For single images without bound checking or without STAT=, + return early. (ERRMSG= is always untouched for -fcoarray=single.) */ + if (!code->expr2 && !(gfc_option.rtcheck & GFC_RTCHECK_BOUNDS) + && gfc_option.coarray != GFC_FCOARRAY_LIB) + return NULL_TREE; + + gfc_init_se (&se, NULL); + gfc_start_block (&se.pre); + + if (code->expr1 && code->expr1->rank == 0) + { + gfc_init_se (&argse, NULL); + gfc_conv_expr_val (&argse, code->expr1); + images = argse.expr; + } + + if (code->expr2) + { + gcc_assert (code->expr2->expr_type == EXPR_VARIABLE); + gfc_init_se (&argse, NULL); + gfc_conv_expr_val (&argse, code->expr2); + stat = argse.expr; + } + else + stat = null_pointer_node; + + if (code->expr3 && gfc_option.coarray == GFC_FCOARRAY_LIB + && type != EXEC_SYNC_MEMORY) + { + gcc_assert (code->expr3->expr_type == EXPR_VARIABLE); + gfc_init_se (&argse, NULL); + gfc_conv_expr (&argse, code->expr3); + gfc_conv_string_parameter (&argse); + errmsg = gfc_build_addr_expr (NULL, argse.expr); + errmsglen = argse.string_length; + } + else if (gfc_option.coarray == GFC_FCOARRAY_LIB && type != EXEC_SYNC_MEMORY) + { + errmsg = null_pointer_node; + errmsglen = build_int_cst (integer_type_node, 0); + } + + /* Check SYNC IMAGES(imageset) for valid image index. + FIXME: Add a check for image-set arrays. */ + if (code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS) + && code->expr1->rank == 0) + { + tree cond; + if (gfc_option.coarray != GFC_FCOARRAY_LIB) + cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, + images, build_int_cst (TREE_TYPE (images), 1)); + else + { + tree cond2; + cond = fold_build2_loc (input_location, GE_EXPR, boolean_type_node, + images, gfort_gvar_caf_num_images); + cond2 = fold_build2_loc (input_location, LT_EXPR, boolean_type_node, + images, + build_int_cst (TREE_TYPE (images), 1)); + cond = fold_build2_loc (input_location, TRUTH_AND_EXPR, + boolean_type_node, cond, cond2); + } + gfc_trans_runtime_check (true, false, cond, &se.pre, + &code->expr1->where, "Invalid image number " + "%d in SYNC IMAGES", + fold_convert (integer_type_node, se.expr)); + } + + /* Per F2008, 8.5.1, a SYNC MEMORY is implied by calling the + image control statements SYNC IMAGES and SYNC ALL. */ + if (gfc_option.coarray == GFC_FCOARRAY_LIB) + { + tmp = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE); + tmp = build_call_expr_loc (input_location, tmp, 0); + gfc_add_expr_to_block (&se.pre, tmp); + } + + if (gfc_option.coarray != GFC_FCOARRAY_LIB || type == EXEC_SYNC_MEMORY) + { + /* Set STAT to zero. */ + if (code->expr2) + gfc_add_modify (&se.pre, stat, build_int_cst (TREE_TYPE (stat), 0)); + } + else if (type == EXEC_SYNC_ALL) + { + /* SYNC ALL => stat == null_pointer_node + SYNC ALL(stat=s) => stat has an integer type + + If "stat" has the wrong integer type, use a temp variable of + the right type and later cast the result back into "stat". */ + if (stat == null_pointer_node || TREE_TYPE (stat) == integer_type_node) + { + if (TREE_TYPE (stat) == integer_type_node) + stat = gfc_build_addr_expr (NULL, stat); + + tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_all, + 3, stat, errmsg, errmsglen); + gfc_add_expr_to_block (&se.pre, tmp); + } + else + { + tree tmp_stat = gfc_create_var (integer_type_node, "stat"); + + tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_all, + 3, gfc_build_addr_expr (NULL, tmp_stat), + errmsg, errmsglen); + gfc_add_expr_to_block (&se.pre, tmp); + + gfc_add_modify (&se.pre, stat, + fold_convert (TREE_TYPE (stat), tmp_stat)); + } + } + else + { + tree len; + + gcc_assert (type == EXEC_SYNC_IMAGES); + + if (!code->expr1) + { + len = build_int_cst (integer_type_node, -1); + images = null_pointer_node; + } + else if (code->expr1->rank == 0) + { + len = build_int_cst (integer_type_node, 1); + images = gfc_build_addr_expr (NULL_TREE, images); + } + else + { + /* FIXME. */ + if (code->expr1->ts.kind != gfc_c_int_kind) + gfc_fatal_error ("Sorry, only support for integer kind %d " + "implemented for image-set at %L", + gfc_c_int_kind, &code->expr1->where); + + gfc_conv_array_parameter (&se, code->expr1, + gfc_walk_expr (code->expr1), true, NULL, + NULL, &len); + images = se.expr; + + tmp = gfc_typenode_for_spec (&code->expr1->ts); + if (GFC_ARRAY_TYPE_P (tmp) || GFC_DESCRIPTOR_TYPE_P (tmp)) + tmp = gfc_get_element_type (tmp); + + len = fold_build2_loc (input_location, TRUNC_DIV_EXPR, + TREE_TYPE (len), len, + fold_convert (TREE_TYPE (len), + TYPE_SIZE_UNIT (tmp))); + len = fold_convert (integer_type_node, len); + } + + /* SYNC IMAGES(imgs) => stat == null_pointer_node + SYNC IMAGES(imgs,stat=s) => stat has an integer type + + If "stat" has the wrong integer type, use a temp variable of + the right type and later cast the result back into "stat". */ + if (stat == null_pointer_node || TREE_TYPE (stat) == integer_type_node) + { + if (TREE_TYPE (stat) == integer_type_node) + stat = gfc_build_addr_expr (NULL, stat); + + tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_images, + 5, fold_convert (integer_type_node, len), + images, stat, errmsg, errmsglen); + gfc_add_expr_to_block (&se.pre, tmp); + } + else + { + tree tmp_stat = gfc_create_var (integer_type_node, "stat"); + + tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_images, + 5, fold_convert (integer_type_node, len), + images, gfc_build_addr_expr (NULL, tmp_stat), + errmsg, errmsglen); + gfc_add_expr_to_block (&se.pre, tmp); + + gfc_add_modify (&se.pre, stat, + fold_convert (TREE_TYPE (stat), tmp_stat)); + } + } + + return gfc_finish_block (&se.pre); +} + + /* Generate GENERIC for the IF construct. This function also deals with the simple IF statement, because the front end translates the IF statement into an IF construct. @@ -608,9 +933,11 @@ gfc_trans_if_1 (gfc_code * code) { gfc_se if_se; tree stmt, elsestmt; + locus saved_loc; + location_t loc; /* Check for an unconditional ELSE clause. */ - if (!code->expr) + if (!code->expr1) return gfc_trans_code (code->next); /* Initialize a statement builder for each block. Puts in NULL_TREEs. */ @@ -618,7 +945,16 @@ gfc_trans_if_1 (gfc_code * code) gfc_start_block (&if_se.pre); /* Calculate the IF condition expression. */ - gfc_conv_expr_val (&if_se, code->expr); + if (code->expr1->where.lb) + { + gfc_save_backend_locus (&saved_loc); + gfc_set_backend_locus (&code->expr1->where); + } + + gfc_conv_expr_val (&if_se, code->expr1); + + if (code->expr1->where.lb) + gfc_restore_backend_locus (&saved_loc); /* Translate the THEN clause. */ stmt = gfc_trans_code (code->next); @@ -627,10 +963,12 @@ gfc_trans_if_1 (gfc_code * code) if (code->block) elsestmt = gfc_trans_if_1 (code->block); else - elsestmt = build_empty_stmt (); + elsestmt = build_empty_stmt (input_location); /* Build the condition expression and add it to the condition block. */ - stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt); + loc = code->expr1->where.lb ? code->expr1->where.lb->location : input_location; + stmt = fold_build3_loc (loc, COND_EXPR, void_type_node, if_se.expr, stmt, + elsestmt); gfc_add_expr_to_block (&if_se.pre, stmt); @@ -641,10 +979,21 @@ gfc_trans_if_1 (gfc_code * code) tree gfc_trans_if (gfc_code * code) { - /* Ignore the top EXEC_IF, it only announces an IF construct. The - actual code we must translate is in code->block. */ + stmtblock_t body; + tree exit_label; + + /* Create exit label so it is available for trans'ing the body code. */ + exit_label = gfc_build_label_decl (NULL_TREE); + code->exit_label = exit_label; + + /* Translate the actual code in code->block. */ + gfc_init_block (&body); + gfc_add_expr_to_block (&body, gfc_trans_if_1 (code->block)); + + /* Add exit label. */ + gfc_add_expr_to_block (&body, build1_v (LABEL_EXPR, exit_label)); - return gfc_trans_if_1 (code->block); + return gfc_finish_block (&body); } @@ -685,36 +1034,41 @@ gfc_trans_arithmetic_if (gfc_code * code) gfc_start_block (&se.pre); /* Pre-evaluate COND. */ - gfc_conv_expr_val (&se, code->expr); + gfc_conv_expr_val (&se, code->expr1); se.expr = gfc_evaluate_now (se.expr, &se.pre); /* Build something to compare with. */ zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node); - if (code->label->value != code->label2->value) + if (code->label1->value != code->label2->value) { /* If (cond < 0) take branch1 else take branch2. First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */ - branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label)); + branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1)); branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2)); - if (code->label->value != code->label3->value) - tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero); + if (code->label1->value != code->label3->value) + tmp = fold_build2_loc (input_location, LT_EXPR, boolean_type_node, + se.expr, zero); else - tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero); + tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, + se.expr, zero); - branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2); + branch1 = fold_build3_loc (input_location, COND_EXPR, void_type_node, + tmp, branch1, branch2); } else - branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label)); + branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1)); - if (code->label->value != code->label3->value + if (code->label1->value != code->label3->value && code->label2->value != code->label3->value) { /* if (cond <= 0) take branch1 else take branch2. */ branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3)); - tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero); - branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2); + tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, + se.expr, zero); + branch1 = fold_build3_loc (input_location, COND_EXPR, void_type_node, + tmp, branch1, branch2); } /* Append the COND_EXPR to the evaluation of COND, and return. */ @@ -723,6 +1077,161 @@ gfc_trans_arithmetic_if (gfc_code * code) } +/* Translate a CRITICAL block. */ +tree +gfc_trans_critical (gfc_code *code) +{ + stmtblock_t block; + tree tmp; + + gfc_start_block (&block); + + if (gfc_option.coarray == GFC_FCOARRAY_LIB) + { + tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_critical, 0); + gfc_add_expr_to_block (&block, tmp); + } + + tmp = gfc_trans_code (code->block->next); + gfc_add_expr_to_block (&block, tmp); + + if (gfc_option.coarray == GFC_FCOARRAY_LIB) + { + tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_end_critical, + 0); + gfc_add_expr_to_block (&block, tmp); + } + + + return gfc_finish_block (&block); +} + + +/* Do proper initialization for ASSOCIATE names. */ + +static void +trans_associate_var (gfc_symbol *sym, gfc_wrapped_block *block) +{ + gfc_expr *e; + tree tmp; + + gcc_assert (sym->assoc); + e = sym->assoc->target; + + /* Do a `pointer assignment' with updated descriptor (or assign descriptor + to array temporary) for arrays with either unknown shape or if associating + to a variable. */ + if (sym->attr.dimension + && (sym->as->type == AS_DEFERRED || sym->assoc->variable)) + { + gfc_se se; + gfc_ss *ss; + tree desc; + + desc = sym->backend_decl; + + /* If association is to an expression, evaluate it and create temporary. + Otherwise, get descriptor of target for pointer assignment. */ + gfc_init_se (&se, NULL); + ss = gfc_walk_expr (e); + if (sym->assoc->variable) + { + se.direct_byref = 1; + se.expr = desc; + } + gfc_conv_expr_descriptor (&se, e, ss); + + /* If we didn't already do the pointer assignment, set associate-name + descriptor to the one generated for the temporary. */ + if (!sym->assoc->variable) + { + int dim; + + gfc_add_modify (&se.pre, desc, se.expr); + + /* The generated descriptor has lower bound zero (as array + temporary), shift bounds so we get lower bounds of 1. */ + for (dim = 0; dim < e->rank; ++dim) + gfc_conv_shift_descriptor_lbound (&se.pre, desc, + dim, gfc_index_one_node); + } + + /* Done, register stuff as init / cleanup code. */ + gfc_add_init_cleanup (block, gfc_finish_block (&se.pre), + gfc_finish_block (&se.post)); + } + + /* Do a scalar pointer assignment; this is for scalar variable targets. */ + else if (gfc_is_associate_pointer (sym)) + { + gfc_se se; + + gcc_assert (!sym->attr.dimension); + + gfc_init_se (&se, NULL); + gfc_conv_expr (&se, e); + + tmp = TREE_TYPE (sym->backend_decl); + tmp = gfc_build_addr_expr (tmp, se.expr); + gfc_add_modify (&se.pre, sym->backend_decl, tmp); + + gfc_add_init_cleanup (block, gfc_finish_block( &se.pre), + gfc_finish_block (&se.post)); + } + + /* Do a simple assignment. This is for scalar expressions, where we + can simply use expression assignment. */ + else + { + gfc_expr *lhs; + + lhs = gfc_lval_expr_from_sym (sym); + tmp = gfc_trans_assignment (lhs, e, false, true); + gfc_add_init_cleanup (block, tmp, NULL_TREE); + } +} + + +/* Translate a BLOCK construct. This is basically what we would do for a + procedure body. */ + +tree +gfc_trans_block_construct (gfc_code* code) +{ + gfc_namespace* ns; + gfc_symbol* sym; + gfc_wrapped_block block; + tree exit_label; + stmtblock_t body; + gfc_association_list *ass; + + ns = code->ext.block.ns; + gcc_assert (ns); + sym = ns->proc_name; + gcc_assert (sym); + + /* Process local variables. */ + gcc_assert (!sym->tlink); + sym->tlink = sym; + gfc_process_block_locals (ns); + + /* Generate code including exit-label. */ + gfc_init_block (&body); + exit_label = gfc_build_label_decl (NULL_TREE); + code->exit_label = exit_label; + gfc_add_expr_to_block (&body, gfc_trans_code (ns->code)); + gfc_add_expr_to_block (&body, build1_v (LABEL_EXPR, exit_label)); + + /* Finish everything. */ + gfc_start_wrapped_block (&block, gfc_finish_block (&body)); + gfc_trans_deferred_vars (sym, &block); + for (ass = code->ext.block.assoc; ass; ass = ass->next) + trans_associate_var (ass->st->n.sym, &block); + + return gfc_finish_wrapped_block (&block); +} + + /* Translate the simple DO construct. This is where the loop variable has integer type and step +-1. We can't use this in the general case because integer overflow and floating point errors could give incorrect @@ -755,32 +1264,44 @@ gfc_trans_arithmetic_if (gfc_code * code) static tree gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar, - tree from, tree to, tree step) + tree from, tree to, tree step, tree exit_cond) { stmtblock_t body; tree type; tree cond; tree tmp; + tree saved_dovar = NULL; tree cycle_label; tree exit_label; + location_t loc; type = TREE_TYPE (dovar); + loc = code->ext.iterator->start->where.lb->location; + /* Initialize the DO variable: dovar = from. */ - gfc_add_modify (pblock, dovar, from); + gfc_add_modify_loc (loc, pblock, dovar, from); + + /* Save value for do-tinkering checking. */ + if (gfc_option.rtcheck & GFC_RTCHECK_DO) + { + saved_dovar = gfc_create_var (type, ".saved_dovar"); + gfc_add_modify_loc (loc, pblock, saved_dovar, dovar); + } /* Cycle and exit statements are implemented with gotos. */ cycle_label = gfc_build_label_decl (NULL_TREE); exit_label = gfc_build_label_decl (NULL_TREE); /* Put the labels where they can be found later. See gfc_trans_do(). */ - code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL); + code->cycle_label = cycle_label; + code->exit_label = exit_label; /* Loop body. */ gfc_start_block (&body); /* Main loop body. */ - tmp = gfc_trans_code (code->block->next); + tmp = gfc_trans_code_cond (code->block->next, exit_cond); gfc_add_expr_to_block (&body, tmp); /* Label for cycle statements (if needed). */ @@ -790,32 +1311,57 @@ gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar, gfc_add_expr_to_block (&body, tmp); } + /* Check whether someone has modified the loop variable. */ + if (gfc_option.rtcheck & GFC_RTCHECK_DO) + { + tmp = fold_build2_loc (loc, NE_EXPR, boolean_type_node, + dovar, saved_dovar); + gfc_trans_runtime_check (true, false, tmp, &body, &code->loc, + "Loop variable has been modified"); + } + + /* Exit the loop if there is an I/O result condition or error. */ + if (exit_cond) + { + tmp = build1_v (GOTO_EXPR, exit_label); + tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, + exit_cond, tmp, + build_empty_stmt (loc)); + gfc_add_expr_to_block (&body, tmp); + } + /* Evaluate the loop condition. */ - cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to); - cond = gfc_evaluate_now (cond, &body); + cond = fold_build2_loc (loc, EQ_EXPR, boolean_type_node, dovar, + to); + cond = gfc_evaluate_now_loc (loc, cond, &body); /* Increment the loop variable. */ - tmp = fold_build2 (PLUS_EXPR, type, dovar, step); - gfc_add_modify (&body, dovar, tmp); + tmp = fold_build2_loc (loc, PLUS_EXPR, type, dovar, step); + gfc_add_modify_loc (loc, &body, dovar, tmp); + + if (gfc_option.rtcheck & GFC_RTCHECK_DO) + gfc_add_modify_loc (loc, &body, saved_dovar, dovar); /* The loop exit. */ - tmp = build1_v (GOTO_EXPR, exit_label); + tmp = fold_build1_loc (loc, GOTO_EXPR, void_type_node, exit_label); TREE_USED (exit_label) = 1; - tmp = fold_build3 (COND_EXPR, void_type_node, - cond, tmp, build_empty_stmt ()); + tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, + cond, tmp, build_empty_stmt (loc)); gfc_add_expr_to_block (&body, tmp); /* Finish the loop body. */ tmp = gfc_finish_block (&body); - tmp = build1_v (LOOP_EXPR, tmp); + tmp = fold_build1_loc (loc, LOOP_EXPR, void_type_node, tmp); /* Only execute the loop if the number of iterations is positive. */ if (tree_int_cst_sgn (step) > 0) - cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to); + cond = fold_build2_loc (loc, LE_EXPR, boolean_type_node, dovar, + to); else - cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to); - tmp = fold_build3 (COND_EXPR, void_type_node, - cond, tmp, build_empty_stmt ()); + cond = fold_build2_loc (loc, GE_EXPR, boolean_type_node, dovar, + to); + tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, cond, tmp, + build_empty_stmt (loc)); gfc_add_expr_to_block (pblock, tmp); /* Add the exit label. */ @@ -860,10 +1406,11 @@ exit_label: because the loop count itself can overflow. */ tree -gfc_trans_do (gfc_code * code) +gfc_trans_do (gfc_code * code, tree exit_cond) { gfc_se se; tree dovar; + tree saved_dovar = NULL; tree from; tree to; tree step; @@ -877,9 +1424,12 @@ gfc_trans_do (gfc_code * code) tree pos_step; stmtblock_t block; stmtblock_t body; + location_t loc; gfc_start_block (&block); + loc = code->ext.iterator->start->where.lb->location; + /* Evaluate all the expressions in the iterator. */ gfc_init_se (&se, NULL); gfc_conv_expr_lhs (&se, code->ext.iterator->var); @@ -902,14 +1452,22 @@ gfc_trans_do (gfc_code * code) gfc_add_block_to_block (&block, &se.pre); step = gfc_evaluate_now (se.expr, &block); + if (gfc_option.rtcheck & GFC_RTCHECK_DO) + { + tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, step, + build_zero_cst (type)); + gfc_trans_runtime_check (true, false, tmp, &block, &code->loc, + "DO step value is zero"); + } + /* Special case simple loops. */ if (TREE_CODE (type) == INTEGER_TYPE && (integer_onep (step) || tree_int_cst_equal (step, integer_minus_one_node))) - return gfc_trans_simple_do (code, &block, dovar, from, to, step); + return gfc_trans_simple_do (code, &block, dovar, from, to, step, exit_cond); - pos_step = fold_build2 (GT_EXPR, boolean_type_node, step, - fold_convert (type, integer_zero_node)); + pos_step = fold_build2_loc (loc, GT_EXPR, boolean_type_node, step, + build_zero_cst (type)); if (TREE_CODE (type) == INTEGER_TYPE) utype = unsigned_type_for (type); @@ -922,49 +1480,77 @@ gfc_trans_do (gfc_code * code) exit_label = gfc_build_label_decl (NULL_TREE); TREE_USED (exit_label) = 1; + /* Put these labels where they can be found later. */ + code->cycle_label = cycle_label; + code->exit_label = exit_label; + /* Initialize the DO variable: dovar = from. */ gfc_add_modify (&block, dovar, from); + /* Save value for do-tinkering checking. */ + if (gfc_option.rtcheck & GFC_RTCHECK_DO) + { + saved_dovar = gfc_create_var (type, ".saved_dovar"); + gfc_add_modify_loc (loc, &block, saved_dovar, dovar); + } + /* Initialize loop count and jump to exit label if the loop is empty. This code is executed before we enter the loop body. We generate: + step_sign = sign(1,step); if (step > 0) { - if (to < from) goto exit_label; - countm1 = (to - from) / step; + if (to < from) + goto exit_label; } else { - if (to > from) goto exit_label; - countm1 = (from - to) / -step; - } */ + if (to > from) + goto exit_label; + } + countm1 = (to*step_sign - from*step_sign) / (step*step_sign); + + */ + if (TREE_CODE (type) == INTEGER_TYPE) { - tree pos, neg; + tree pos, neg, step_sign, to2, from2, step2; + + /* Calculate SIGN (1,step), as (step < 0 ? -1 : 1) */ + + tmp = fold_build2_loc (loc, LT_EXPR, boolean_type_node, step, + build_int_cst (TREE_TYPE (step), 0)); + step_sign = fold_build3_loc (loc, COND_EXPR, type, tmp, + build_int_cst (type, -1), + build_int_cst (type, 1)); + + tmp = fold_build2_loc (loc, LT_EXPR, boolean_type_node, to, from); + pos = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp, + fold_build1_loc (loc, GOTO_EXPR, void_type_node, + exit_label), + build_empty_stmt (loc)); + + tmp = fold_build2_loc (loc, GT_EXPR, boolean_type_node, to, + from); + neg = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp, + fold_build1_loc (loc, GOTO_EXPR, void_type_node, + exit_label), + build_empty_stmt (loc)); + tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, + pos_step, pos, neg); - tmp = fold_build2 (LT_EXPR, boolean_type_node, to, from); - pos = fold_build3 (COND_EXPR, void_type_node, tmp, - build1_v (GOTO_EXPR, exit_label), - build_empty_stmt ()); - tmp = fold_build2 (MINUS_EXPR, type, to, from); - tmp = fold_convert (utype, tmp); - tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp, - fold_convert (utype, step)); - tmp = build2 (MODIFY_EXPR, void_type_node, countm1, tmp); - pos = build2 (COMPOUND_EXPR, void_type_node, pos, tmp); + gfc_add_expr_to_block (&block, tmp); - tmp = fold_build2 (GT_EXPR, boolean_type_node, to, from); - neg = fold_build3 (COND_EXPR, void_type_node, tmp, - build1_v (GOTO_EXPR, exit_label), - build_empty_stmt ()); - tmp = fold_build2 (MINUS_EXPR, type, from, to); - tmp = fold_convert (utype, tmp); - tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp, - fold_convert (utype, fold_build1 (NEGATE_EXPR, - type, step))); - tmp = build2 (MODIFY_EXPR, void_type_node, countm1, tmp); - neg = build2 (COMPOUND_EXPR, void_type_node, neg, tmp); + /* Calculate the loop count. to-from can overflow, so + we cast to unsigned. */ - tmp = fold_build3 (COND_EXPR, void_type_node, pos_step, pos, neg); + to2 = fold_build2_loc (loc, MULT_EXPR, type, step_sign, to); + from2 = fold_build2_loc (loc, MULT_EXPR, type, step_sign, from); + step2 = fold_build2_loc (loc, MULT_EXPR, type, step_sign, step); + step2 = fold_convert (utype, step2); + tmp = fold_build2_loc (loc, MINUS_EXPR, type, to2, from2); + tmp = fold_convert (utype, tmp); + tmp = fold_build2_loc (loc, TRUNC_DIV_EXPR, utype, tmp, step2); + tmp = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, countm1, tmp); gfc_add_expr_to_block (&block, tmp); } else @@ -973,35 +1559,30 @@ gfc_trans_do (gfc_code * code) This would probably cause more problems that it solves when we implement "long double" types. */ - tmp = fold_build2 (MINUS_EXPR, type, to, from); - tmp = fold_build2 (RDIV_EXPR, type, tmp, step); - tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp); + tmp = fold_build2_loc (loc, MINUS_EXPR, type, to, from); + tmp = fold_build2_loc (loc, RDIV_EXPR, type, tmp, step); + tmp = fold_build1_loc (loc, FIX_TRUNC_EXPR, utype, tmp); gfc_add_modify (&block, countm1, tmp); /* We need a special check for empty loops: empty = (step > 0 ? to < from : to > from); */ - tmp = fold_build3 (COND_EXPR, boolean_type_node, pos_step, - fold_build2 (LT_EXPR, boolean_type_node, to, from), - fold_build2 (GT_EXPR, boolean_type_node, to, from)); + tmp = fold_build3_loc (loc, COND_EXPR, boolean_type_node, pos_step, + fold_build2_loc (loc, LT_EXPR, + boolean_type_node, to, from), + fold_build2_loc (loc, GT_EXPR, + boolean_type_node, to, from)); /* If the loop is empty, go directly to the exit label. */ - tmp = fold_build3 (COND_EXPR, void_type_node, tmp, + tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp, build1_v (GOTO_EXPR, exit_label), - build_empty_stmt ()); + build_empty_stmt (input_location)); gfc_add_expr_to_block (&block, tmp); } /* Loop body. */ gfc_start_block (&body); - /* Put these labels where they can be found later. We put the - labels in a TREE_LIST node (because TREE_CHAIN is already - used). cycle_label goes in TREE_PURPOSE (backend_decl), exit - label in TREE_VALUE (backend_decl). */ - - code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL); - /* Main loop body. */ - tmp = gfc_trans_code (code->block->next); + tmp = gfc_trans_code_cond (code->block->next, exit_cond); gfc_add_expr_to_block (&body, tmp); /* Label for cycle statements (if needed). */ @@ -1011,27 +1592,50 @@ gfc_trans_do (gfc_code * code) gfc_add_expr_to_block (&body, tmp); } + /* Check whether someone has modified the loop variable. */ + if (gfc_option.rtcheck & GFC_RTCHECK_DO) + { + tmp = fold_build2_loc (loc, NE_EXPR, boolean_type_node, dovar, + saved_dovar); + gfc_trans_runtime_check (true, false, tmp, &body, &code->loc, + "Loop variable has been modified"); + } + + /* Exit the loop if there is an I/O result condition or error. */ + if (exit_cond) + { + tmp = build1_v (GOTO_EXPR, exit_label); + tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, + exit_cond, tmp, + build_empty_stmt (input_location)); + gfc_add_expr_to_block (&body, tmp); + } + /* Increment the loop variable. */ - tmp = fold_build2 (PLUS_EXPR, type, dovar, step); - gfc_add_modify (&body, dovar, tmp); + tmp = fold_build2_loc (loc, PLUS_EXPR, type, dovar, step); + gfc_add_modify_loc (loc, &body, dovar, tmp); + + if (gfc_option.rtcheck & GFC_RTCHECK_DO) + gfc_add_modify_loc (loc, &body, saved_dovar, dovar); /* End with the loop condition. Loop until countm1 == 0. */ - cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1, - build_int_cst (utype, 0)); - tmp = build1_v (GOTO_EXPR, exit_label); - tmp = fold_build3 (COND_EXPR, void_type_node, - cond, tmp, build_empty_stmt ()); + cond = fold_build2_loc (loc, EQ_EXPR, boolean_type_node, countm1, + build_int_cst (utype, 0)); + tmp = fold_build1_loc (loc, GOTO_EXPR, void_type_node, exit_label); + tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, + cond, tmp, build_empty_stmt (loc)); gfc_add_expr_to_block (&body, tmp); /* Decrement the loop count. */ - tmp = fold_build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1)); - gfc_add_modify (&body, countm1, tmp); + tmp = fold_build2_loc (loc, MINUS_EXPR, utype, countm1, + build_int_cst (utype, 1)); + gfc_add_modify_loc (loc, &body, countm1, tmp); /* End of loop body. */ tmp = gfc_finish_block (&body); /* The for loop itself. */ - tmp = build1_v (LOOP_EXPR, tmp); + tmp = fold_build1_loc (loc, LOOP_EXPR, void_type_node, tmp); gfc_add_expr_to_block (&block, tmp); /* Add the exit label. */ @@ -1082,19 +1686,22 @@ gfc_trans_do_while (gfc_code * code) exit_label = gfc_build_label_decl (NULL_TREE); /* Put the labels where they can be found later. See gfc_trans_do(). */ - code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL); + code->cycle_label = cycle_label; + code->exit_label = exit_label; /* Create a GIMPLE version of the exit condition. */ gfc_init_se (&cond, NULL); - gfc_conv_expr_val (&cond, code->expr); + gfc_conv_expr_val (&cond, code->expr1); gfc_add_block_to_block (&block, &cond.pre); - cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr); + cond.expr = fold_build1_loc (code->expr1->where.lb->location, + TRUTH_NOT_EXPR, boolean_type_node, cond.expr); /* Build "IF (! cond) GOTO exit_label". */ tmp = build1_v (GOTO_EXPR, exit_label); TREE_USED (exit_label) = 1; - tmp = fold_build3 (COND_EXPR, void_type_node, - cond.expr, tmp, build_empty_stmt ()); + tmp = fold_build3_loc (code->expr1->where.lb->location, COND_EXPR, + void_type_node, cond.expr, tmp, + build_empty_stmt (code->expr1->where.lb->location)); gfc_add_expr_to_block (&block, tmp); /* The main body of the loop. */ @@ -1113,7 +1720,8 @@ gfc_trans_do_while (gfc_code * code) gfc_init_block (&block); /* Build the loop. */ - tmp = build1_v (LOOP_EXPR, tmp); + tmp = fold_build1_loc (code->expr1->where.lb->location, LOOP_EXPR, + void_type_node, tmp); gfc_add_expr_to_block (&block, tmp); /* Add the exit label. */ @@ -1184,7 +1792,7 @@ gfc_trans_integer_select (gfc_code * code) /* Calculate the switch expression. */ gfc_init_se (&se, NULL); - gfc_conv_expr_val (&se, code->expr); + gfc_conv_expr_val (&se, code->expr1); gfc_add_block_to_block (&block, &se.pre); end_label = gfc_build_label_decl (NULL_TREE); @@ -1193,7 +1801,7 @@ gfc_trans_integer_select (gfc_code * code) for (c = code->block; c; c = c->block) { - for (cp = c->ext.case_list; cp; cp = cp->next) + for (cp = c->ext.block.case_list; cp; cp = cp->next) { tree low, high; tree label; @@ -1247,8 +1855,7 @@ gfc_trans_integer_select (gfc_code * code) /* Add this case label. Add parameter 'label', make it match GCC backend. */ - tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node, - low, high, label); + tmp = build_case_label (low, high, label); gfc_add_expr_to_block (&body, tmp); } @@ -1305,7 +1912,7 @@ gfc_trans_logical_select (gfc_code * code) always executed, and we don't generate code a COND_EXPR. */ for (c = code->block; c; c = c->block) { - for (cp = c->ext.case_list; cp; cp = cp->next) + for (cp = c->ext.block.case_list; cp; cp = cp->next) { if (cp->low) { @@ -1325,7 +1932,7 @@ gfc_trans_logical_select (gfc_code * code) /* Calculate the switch expression. We always need to do this because it may have side effects. */ gfc_init_se (&se, NULL); - gfc_conv_expr_val (&se, code->expr); + gfc_conv_expr_val (&se, code->expr1); gfc_add_block_to_block (&block, &se.pre); if (t == f && t != NULL) @@ -1339,8 +1946,8 @@ gfc_trans_logical_select (gfc_code * code) { tree true_tree, false_tree, stmt; - true_tree = build_empty_stmt (); - false_tree = build_empty_stmt (); + true_tree = build_empty_stmt (input_location); + false_tree = build_empty_stmt (input_location); /* If we have a case for .TRUE. and for .FALSE., discard the default case. Otherwise, if .TRUE. or .FALSE. is missing and there is a default case, @@ -1363,8 +1970,8 @@ gfc_trans_logical_select (gfc_code * code) if (f != NULL) false_tree = gfc_trans_code (f->next); - stmt = fold_build3 (COND_EXPR, void_type_node, se.expr, - true_tree, false_tree); + stmt = fold_build3_loc (input_location, COND_EXPR, void_type_node, + se.expr, true_tree, false_tree); gfc_add_expr_to_block (&block, stmt); } @@ -1372,6 +1979,10 @@ gfc_trans_logical_select (gfc_code * code) } +/* The jump table types are stored in static variables to avoid + constructing them from scratch every single time. */ +static GTY(()) tree select_struct[2]; + /* Translate the SELECT CASE construct for CHARACTER case expressions. Instead of generating compares and jumps, it is far simpler to generate a data structure describing the cases in order and call a @@ -1384,45 +1995,200 @@ gfc_trans_logical_select (gfc_code * code) static tree gfc_trans_character_select (gfc_code *code) { - tree init, node, end_label, tmp, type, case_num, label, fndecl; + tree init, end_label, tmp, type, case_num, label, fndecl; stmtblock_t block, body; gfc_case *cp, *d; gfc_code *c; - gfc_se se; + gfc_se se, expr1se; int n, k; + VEC(constructor_elt,gc) *inits = NULL; + + tree pchartype = gfc_get_pchar_type (code->expr1->ts.kind); /* The jump table types are stored in static variables to avoid constructing them from scratch every single time. */ - static tree select_struct[2]; static tree ss_string1[2], ss_string1_len[2]; static tree ss_string2[2], ss_string2_len[2]; static tree ss_target[2]; - tree pchartype = gfc_get_pchar_type (code->expr->ts.kind); + cp = code->block->ext.block.case_list; + while (cp->left != NULL) + cp = cp->left; + + /* Generate the body */ + gfc_start_block (&block); + gfc_init_se (&expr1se, NULL); + gfc_conv_expr_reference (&expr1se, code->expr1); + + gfc_add_block_to_block (&block, &expr1se.pre); + + end_label = gfc_build_label_decl (NULL_TREE); + + gfc_init_block (&body); + + /* Attempt to optimize length 1 selects. */ + if (integer_onep (expr1se.string_length)) + { + for (d = cp; d; d = d->right) + { + int i; + if (d->low) + { + gcc_assert (d->low->expr_type == EXPR_CONSTANT + && d->low->ts.type == BT_CHARACTER); + if (d->low->value.character.length > 1) + { + for (i = 1; i < d->low->value.character.length; i++) + if (d->low->value.character.string[i] != ' ') + break; + if (i != d->low->value.character.length) + { + if (optimize && d->high && i == 1) + { + gcc_assert (d->high->expr_type == EXPR_CONSTANT + && d->high->ts.type == BT_CHARACTER); + if (d->high->value.character.length > 1 + && (d->low->value.character.string[0] + == d->high->value.character.string[0]) + && d->high->value.character.string[1] != ' ' + && ((d->low->value.character.string[1] < ' ') + == (d->high->value.character.string[1] + < ' '))) + continue; + } + break; + } + } + } + if (d->high) + { + gcc_assert (d->high->expr_type == EXPR_CONSTANT + && d->high->ts.type == BT_CHARACTER); + if (d->high->value.character.length > 1) + { + for (i = 1; i < d->high->value.character.length; i++) + if (d->high->value.character.string[i] != ' ') + break; + if (i != d->high->value.character.length) + break; + } + } + } + if (d == NULL) + { + tree ctype = gfc_get_char_type (code->expr1->ts.kind); + + for (c = code->block; c; c = c->block) + { + for (cp = c->ext.block.case_list; cp; cp = cp->next) + { + tree low, high; + tree label; + gfc_char_t r; + + /* Assume it's the default case. */ + low = high = NULL_TREE; + + if (cp->low) + { + /* CASE ('ab') or CASE ('ab':'az') will never match + any length 1 character. */ + if (cp->low->value.character.length > 1 + && cp->low->value.character.string[1] != ' ') + continue; + + if (cp->low->value.character.length > 0) + r = cp->low->value.character.string[0]; + else + r = ' '; + low = build_int_cst (ctype, r); + + /* If there's only a lower bound, set the high bound + to the maximum value of the case expression. */ + if (!cp->high) + high = TYPE_MAX_VALUE (ctype); + } + + if (cp->high) + { + if (!cp->low + || (cp->low->value.character.string[0] + != cp->high->value.character.string[0])) + { + if (cp->high->value.character.length > 0) + r = cp->high->value.character.string[0]; + else + r = ' '; + high = build_int_cst (ctype, r); + } + + /* Unbounded case. */ + if (!cp->low) + low = TYPE_MIN_VALUE (ctype); + } + + /* Build a label. */ + label = gfc_build_label_decl (NULL_TREE); + + /* Add this case label. + Add parameter 'label', make it match GCC backend. */ + tmp = build_case_label (low, high, label); + gfc_add_expr_to_block (&body, tmp); + } + + /* Add the statements for this case. */ + tmp = gfc_trans_code (c->next); + gfc_add_expr_to_block (&body, tmp); + + /* Break to the end of the construct. */ + tmp = build1_v (GOTO_EXPR, end_label); + gfc_add_expr_to_block (&body, tmp); + } + + tmp = gfc_string_to_single_character (expr1se.string_length, + expr1se.expr, + code->expr1->ts.kind); + case_num = gfc_create_var (ctype, "case_num"); + gfc_add_modify (&block, case_num, tmp); + + gfc_add_block_to_block (&block, &expr1se.post); - if (code->expr->ts.kind == 1) + tmp = gfc_finish_block (&body); + tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE); + gfc_add_expr_to_block (&block, tmp); + + tmp = build1_v (LABEL_EXPR, end_label); + gfc_add_expr_to_block (&block, tmp); + + return gfc_finish_block (&block); + } + } + + if (code->expr1->ts.kind == 1) k = 0; - else if (code->expr->ts.kind == 4) + else if (code->expr1->ts.kind == 4) k = 1; else gcc_unreachable (); if (select_struct[k] == NULL) { + tree *chain = NULL; select_struct[k] = make_node (RECORD_TYPE); - if (code->expr->ts.kind == 1) + if (code->expr1->ts.kind == 1) TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1"); - else if (code->expr->ts.kind == 4) + else if (code->expr1->ts.kind == 4) TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4"); else gcc_unreachable (); #undef ADD_FIELD -#define ADD_FIELD(NAME, TYPE) \ - ss_##NAME[k] = gfc_add_field_to_struct \ - (&(TYPE_FIELDS (select_struct[k])), select_struct[k], \ - get_identifier (stringize(NAME)), TYPE) +#define ADD_FIELD(NAME, TYPE) \ + ss_##NAME[k] = gfc_add_field_to_struct (select_struct[k], \ + get_identifier (stringize(NAME)), \ + TYPE, \ + &chain) ADD_FIELD (string1, pchartype); ADD_FIELD (string1_len, gfc_charlen_type_node); @@ -1436,28 +2202,19 @@ gfc_trans_character_select (gfc_code *code) gfc_finish_type (select_struct[k]); } - cp = code->block->ext.case_list; - while (cp->left != NULL) - cp = cp->left; - n = 0; for (d = cp; d; d = d->right) d->n = n++; - end_label = gfc_build_label_decl (NULL_TREE); - - /* Generate the body */ - gfc_start_block (&block); - gfc_init_block (&body); - for (c = code->block; c; c = c->block) { - for (d = c->ext.case_list; d; d = d->next) + for (d = c->ext.block.case_list; d; d = d->next) { label = gfc_build_label_decl (NULL_TREE); - tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node, - build_int_cst (NULL_TREE, d->n), - build_int_cst (NULL_TREE, d->n), label); + tmp = build_case_label ((d->low == NULL && d->high == NULL) + ? NULL + : build_int_cst (integer_type_node, d->n), + NULL, label); gfc_add_expr_to_block (&body, tmp); } @@ -1469,52 +2226,50 @@ gfc_trans_character_select (gfc_code *code) } /* Generate the structure describing the branches */ - init = NULL_TREE; - - for(d = cp; d; d = d->right) + for (d = cp; d; d = d->right) { - node = NULL_TREE; + VEC(constructor_elt,gc) *node = NULL; gfc_init_se (&se, NULL); if (d->low == NULL) { - node = tree_cons (ss_string1[k], null_pointer_node, node); - node = tree_cons (ss_string1_len[k], integer_zero_node, node); + CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], null_pointer_node); + CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], integer_zero_node); } else { gfc_conv_expr_reference (&se, d->low); - node = tree_cons (ss_string1[k], se.expr, node); - node = tree_cons (ss_string1_len[k], se.string_length, node); + CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], se.expr); + CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], se.string_length); } if (d->high == NULL) { - node = tree_cons (ss_string2[k], null_pointer_node, node); - node = tree_cons (ss_string2_len[k], integer_zero_node, node); + CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], null_pointer_node); + CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], integer_zero_node); } else { gfc_init_se (&se, NULL); gfc_conv_expr_reference (&se, d->high); - node = tree_cons (ss_string2[k], se.expr, node); - node = tree_cons (ss_string2_len[k], se.string_length, node); + CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], se.expr); + CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], se.string_length); } - node = tree_cons (ss_target[k], build_int_cst (integer_type_node, d->n), - node); + CONSTRUCTOR_APPEND_ELT (node, ss_target[k], + build_int_cst (integer_type_node, d->n)); - tmp = build_constructor_from_list (select_struct[k], nreverse (node)); - init = tree_cons (NULL_TREE, tmp, init); + tmp = build_constructor (select_struct[k], node); + CONSTRUCTOR_APPEND_ELT (inits, NULL_TREE, tmp); } type = build_array_type (select_struct[k], - build_index_type (build_int_cst (NULL_TREE, n-1))); + build_index_type (size_int (n-1))); - init = build_constructor_from_list (type, nreverse(init)); + init = build_constructor (type, inits); TREE_CONSTANT (init) = 1; TREE_STATIC (init) = 1; /* Create a static variable to hold the jump table. */ @@ -1528,24 +2283,21 @@ gfc_trans_character_select (gfc_code *code) /* Build the library call */ init = gfc_build_addr_expr (pvoid_type_node, init); - gfc_init_se (&se, NULL); - gfc_conv_expr_reference (&se, code->expr); - - gfc_add_block_to_block (&block, &se.pre); - - if (code->expr->ts.kind == 1) + if (code->expr1->ts.kind == 1) fndecl = gfor_fndecl_select_string; - else if (code->expr->ts.kind == 4) + else if (code->expr1->ts.kind == 4) fndecl = gfor_fndecl_select_string_char4; else gcc_unreachable (); - tmp = build_call_expr (fndecl, 4, init, build_int_cst (NULL_TREE, n), - se.expr, se.string_length); + tmp = build_call_expr_loc (input_location, + fndecl, 4, init, + build_int_cst (gfc_charlen_type_node, n), + expr1se.expr, expr1se.string_length); case_num = gfc_create_var (integer_type_node, "case_num"); gfc_add_modify (&block, case_num, tmp); - gfc_add_block_to_block (&block, &se.post); + gfc_add_block_to_block (&block, &expr1se.post); tmp = gfc_finish_block (&body); tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE); @@ -1575,22 +2327,47 @@ gfc_trans_character_select (gfc_code *code) tree gfc_trans_select (gfc_code * code) { - gcc_assert (code && code->expr); + stmtblock_t block; + tree body; + tree exit_label; + + gcc_assert (code && code->expr1); + gfc_init_block (&block); + + /* Build the exit label and hang it in. */ + exit_label = gfc_build_label_decl (NULL_TREE); + code->exit_label = exit_label; /* Empty SELECT constructs are legal. */ if (code->block == NULL) - return build_empty_stmt (); + body = build_empty_stmt (input_location); /* Select the correct translation function. */ - switch (code->expr->ts.type) - { - case BT_LOGICAL: return gfc_trans_logical_select (code); - case BT_INTEGER: return gfc_trans_integer_select (code); - case BT_CHARACTER: return gfc_trans_character_select (code); - default: - gfc_internal_error ("gfc_trans_select(): Bad type for case expr."); - /* Not reached */ - } + else + switch (code->expr1->ts.type) + { + case BT_LOGICAL: + body = gfc_trans_logical_select (code); + break; + + case BT_INTEGER: + body = gfc_trans_integer_select (code); + break; + + case BT_CHARACTER: + body = gfc_trans_character_select (code); + break; + + default: + gfc_internal_error ("gfc_trans_select(): Bad type for case expr."); + /* Not reached */ + } + + /* Build everything together. */ + gfc_add_expr_to_block (&block, body); + gfc_add_expr_to_block (&block, build1_v (LABEL_EXPR, exit_label)); + + return gfc_finish_block (&block); } @@ -1658,23 +2435,22 @@ forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post) tree tmp; /* Build a copy of the lvalue. */ - old_symtree = c->expr->symtree; + old_symtree = c->expr1->symtree; old_sym = old_symtree->n.sym; e = gfc_lval_expr_from_sym (old_sym); if (old_sym->attr.dimension) { gfc_init_se (&tse, NULL); - gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN); + gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN, false); gfc_add_block_to_block (pre, &tse.pre); gfc_add_block_to_block (post, &tse.post); - tse.expr = build_fold_indirect_ref (tse.expr); + tse.expr = build_fold_indirect_ref_loc (input_location, tse.expr); if (e->ts.type != BT_CHARACTER) { /* Use the variable offset for the temporary. */ - tmp = gfc_conv_descriptor_offset (tse.expr); - gfc_add_modify (pre, tmp, - gfc_conv_array_offset (old_sym->backend_decl)); + tmp = gfc_conv_array_offset (old_sym->backend_decl); + gfc_conv_descriptor_offset_set (pre, tse.expr, tmp); } } else @@ -1699,7 +2475,7 @@ forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post) } tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true, - e->expr_type == EXPR_VARIABLE); + e->expr_type == EXPR_VARIABLE, true); gfc_add_expr_to_block (pre, tmp); } gfc_free_expr (e); @@ -1708,6 +2484,7 @@ forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post) new_sym = gfc_new_symbol (old_sym->name, NULL); new_sym->ts = old_sym->ts; new_sym->attr.referenced = 1; + new_sym->attr.temporary = 1; new_sym->attr.dimension = old_sym->attr.dimension; new_sym->attr.flavor = old_sym->attr.flavor; @@ -1722,7 +2499,7 @@ forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post) /* Go through the expression reference replacing the old_symtree with the new. */ - forall_replace_symtree (c->expr, old_sym, 2); + forall_replace_symtree (c->expr1, old_sym, 2); /* Now we have made this temporary, we might as well use it for the right hand side. */ @@ -1739,8 +2516,8 @@ check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post) int need_temp; gfc_symbol *lsym; - lsym = c->expr->symtree->n.sym; - need_temp = gfc_check_dependency (c->expr, c->expr2, 0); + lsym = c->expr1->symtree->n.sym; + need_temp = gfc_check_dependency (c->expr1, c->expr2, 0); /* Now check for dependencies within the 'variable' expression itself. These are treated by making a complete @@ -1750,11 +2527,11 @@ check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post) pointer components. We therefore leave these to their own devices. */ if (lsym->ts.type == BT_DERIVED - && lsym->ts.derived->attr.pointer_comp) + && lsym->ts.u.derived->attr.pointer_comp) return need_temp; new_symtree = NULL; - if (find_forall_index (c->expr, lsym, 2) == SUCCESS) + if (find_forall_index (c->expr1, lsym, 2) == SUCCESS) { forall_make_variable_temp (c, pre, post); need_temp = 0; @@ -1762,12 +2539,12 @@ check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post) /* Substrings with dependencies are treated in the same way. */ - if (c->expr->ts.type == BT_CHARACTER - && c->expr->ref + if (c->expr1->ts.type == BT_CHARACTER + && c->expr1->ref && c->expr2->expr_type == EXPR_VARIABLE && lsym == c->expr2->symtree->n.sym) { - for (lref = c->expr->ref; lref; lref = lref->next) + for (lref = c->expr1->ref; lref; lref = lref->next) if (lref->type == REF_SUBSTRING) break; for (rref = c->expr2->ref; rref; rref = rref->next) @@ -1788,10 +2565,10 @@ check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post) static void cleanup_forall_symtrees (gfc_code *c) { - forall_restore_symtree (c->expr); + forall_restore_symtree (c->expr1); forall_restore_symtree (c->expr2); - gfc_free (new_symtree->n.sym); - gfc_free (new_symtree); + free (new_symtree->n.sym); + free (new_symtree); } @@ -1850,18 +2627,19 @@ gfc_trans_forall_loop (forall_info *forall_tmp, tree body, gfc_init_block (&block); /* The exit condition. */ - cond = fold_build2 (LE_EXPR, boolean_type_node, - count, build_int_cst (TREE_TYPE (count), 0)); + cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, + count, build_int_cst (TREE_TYPE (count), 0)); tmp = build1_v (GOTO_EXPR, exit_label); - tmp = fold_build3 (COND_EXPR, void_type_node, - cond, tmp, build_empty_stmt ()); + tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, + cond, tmp, build_empty_stmt (input_location)); gfc_add_expr_to_block (&block, tmp); /* The main loop body. */ gfc_add_expr_to_block (&block, body); /* Increment the loop variable. */ - tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), var, step); + tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (var), var, + step); gfc_add_modify (&block, var, tmp); /* Advance to the next mask element. Only do this for the @@ -1869,14 +2647,14 @@ gfc_trans_forall_loop (forall_info *forall_tmp, tree body, if (n == 0 && mask_flag && forall_tmp->mask) { tree maskindex = forall_tmp->maskindex; - tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type, - maskindex, gfc_index_one_node); + tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, + maskindex, gfc_index_one_node); gfc_add_modify (&block, maskindex, tmp); } /* Decrement the loop counter. */ - tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), count, - build_int_cst (TREE_TYPE (var), 1)); + tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (var), count, + build_int_cst (TREE_TYPE (var), 1)); gfc_add_modify (&block, count, tmp); body = gfc_finish_block (&block); @@ -1887,9 +2665,12 @@ gfc_trans_forall_loop (forall_info *forall_tmp, tree body, /* Initialize the loop counter. */ - tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start); - tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp); - tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step); + tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (var), step, + start); + tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (var), end, + tmp); + tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR, TREE_TYPE (var), + tmp, step); gfc_add_modify (&block, count, tmp); /* The loop expression. */ @@ -1936,7 +2717,8 @@ gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body, if (mask) { tmp = gfc_build_array_ref (mask, maskindex, NULL); - body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ()); + body = build3_v (COND_EXPR, tmp, body, + build_empty_stmt (input_location)); } } body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header); @@ -1961,10 +2743,8 @@ gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock, tree tmp; if (INTEGER_CST_P (size)) - { - tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size, - gfc_index_one_node); - } + tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type, + size, gfc_index_one_node); else tmp = NULL_TREE; @@ -2022,8 +2802,8 @@ generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3, gfc_add_block_to_block (&block, &lse.post); /* Increment the count1. */ - tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1, - gfc_index_one_node); + tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (count1), + count1, gfc_index_one_node); gfc_add_modify (&block, count1, tmp); tmp = gfc_finish_block (&block); @@ -2061,32 +2841,34 @@ generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3, /* Use the scalar assignment. */ rse.string_length = lse.string_length; - tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false); + tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, true, true); /* Form the mask expression according to the mask tree list. */ if (wheremask) { wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL); if (invert) - wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR, - TREE_TYPE (wheremaskexpr), - wheremaskexpr); - tmp = fold_build3 (COND_EXPR, void_type_node, - wheremaskexpr, tmp, build_empty_stmt ()); + wheremaskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR, + TREE_TYPE (wheremaskexpr), + wheremaskexpr); + tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, + wheremaskexpr, tmp, + build_empty_stmt (input_location)); } gfc_add_expr_to_block (&body, tmp); /* Increment count1. */ - tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type, - count1, gfc_index_one_node); + tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, + count1, gfc_index_one_node); gfc_add_modify (&body, count1, tmp); /* Increment count3. */ if (count3) { - tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type, - count3, gfc_index_one_node); + tmp = fold_build2_loc (input_location, PLUS_EXPR, + gfc_array_index_type, count3, + gfc_index_one_node); gfc_add_modify (&body, count3, tmp); } @@ -2158,18 +2940,19 @@ generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3, /* Use the scalar assignment. */ lse.string_length = rse.string_length; tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true, - expr2->expr_type == EXPR_VARIABLE); + expr2->expr_type == EXPR_VARIABLE, true); /* Form the mask expression according to the mask tree list. */ if (wheremask) { wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL); if (invert) - wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR, - TREE_TYPE (wheremaskexpr), - wheremaskexpr); - tmp = fold_build3 (COND_EXPR, void_type_node, - wheremaskexpr, tmp, build_empty_stmt ()); + wheremaskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR, + TREE_TYPE (wheremaskexpr), + wheremaskexpr); + tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, + wheremaskexpr, tmp, + build_empty_stmt (input_location)); } gfc_add_expr_to_block (&body1, tmp); @@ -2179,22 +2962,23 @@ generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3, gfc_add_block_to_block (&block, &body1); /* Increment count1. */ - tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1, - gfc_index_one_node); + tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (count1), + count1, gfc_index_one_node); gfc_add_modify (&block, count1, tmp); } else { /* Increment count1. */ - tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type, - count1, gfc_index_one_node); + tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, + count1, gfc_index_one_node); gfc_add_modify (&body1, count1, tmp); /* Increment count3. */ if (count3) { - tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type, - count3, gfc_index_one_node); + tmp = fold_build2_loc (input_location, PLUS_EXPR, + gfc_array_index_type, + count3, gfc_index_one_node); gfc_add_modify (&body1, count3, tmp); } @@ -2239,13 +3023,8 @@ compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2, /* Walk the RHS of the expression. */ *rss = gfc_walk_expr (expr2); if (*rss == gfc_ss_terminator) - { - /* The rhs is scalar. Add a ss for the expression. */ - *rss = gfc_get_ss (); - (*rss)->next = gfc_ss_terminator; - (*rss)->type = GFC_SS_SCALAR; - (*rss)->expr = expr2; - } + /* The rhs is scalar. Add a ss for the expression. */ + *rss = gfc_get_scalar_ss (gfc_ss_terminator, expr2); /* Associate the SS with the loop. */ gfc_add_ss_to_loop (&loop, *lss); @@ -2258,20 +3037,22 @@ compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2, loop.array_parameter = 1; /* Calculate the bounds of the scalarization. */ - save_flag = flag_bounds_check; - flag_bounds_check = 0; + save_flag = gfc_option.rtcheck; + gfc_option.rtcheck &= !GFC_RTCHECK_BOUNDS; gfc_conv_ss_startstride (&loop); - flag_bounds_check = save_flag; + gfc_option.rtcheck = save_flag; gfc_conv_loop_setup (&loop, &expr2->where); /* Figure out how many elements we need. */ for (i = 0; i < loop.dimen; i++) { - tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, - gfc_index_one_node, loop.from[i]); - tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type, - tmp, loop.to[i]); - size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp); + tmp = fold_build2_loc (input_location, MINUS_EXPR, + gfc_array_index_type, + gfc_index_one_node, loop.from[i]); + tmp = fold_build2_loc (input_location, PLUS_EXPR, + gfc_array_index_type, tmp, loop.to[i]); + size = fold_build2_loc (input_location, MULT_EXPR, + gfc_array_index_type, size, tmp); } gfc_add_block_to_block (pblock, &loop.pre); size = gfc_evaluate_now (size, pblock); @@ -2309,8 +3090,9 @@ compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size, && !forall_tmp->mask && INTEGER_CST_P (forall_tmp->size)) { - inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type, - inner_size, forall_tmp->size); + inner_size = fold_build2_loc (input_location, MULT_EXPR, + gfc_array_index_type, + inner_size, forall_tmp->size); forall_tmp = forall_tmp->prev_nest; } @@ -2327,8 +3109,8 @@ compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size, if (inner_size_body) gfc_add_block_to_block (&body, inner_size_body); if (forall_tmp) - tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type, - number, inner_size); + tmp = fold_build2_loc (input_location, PLUS_EXPR, + gfc_array_index_type, number, inner_size); else tmp = inner_size; gfc_add_modify (&body, number, tmp); @@ -2357,7 +3139,8 @@ allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block, unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type)); if (!integer_onep (unit)) - bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit); + bytesize = fold_build2_loc (input_location, MULT_EXPR, + gfc_array_index_type, size, unit); else bytesize = size; @@ -2365,7 +3148,7 @@ allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block, tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type); if (*ptemp1) - tmp = build_fold_indirect_ref (tmp); + tmp = build_fold_indirect_ref_loc (input_location, tmp); return tmp; } @@ -2457,17 +3240,17 @@ gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2, &lss, &rss); /* The type of LHS. Used in function allocate_temp_for_forall_nest */ - if (expr1->ts.type == BT_CHARACTER && expr1->ts.cl->length) + if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->length) { - if (!expr1->ts.cl->backend_decl) + if (!expr1->ts.u.cl->backend_decl) { gfc_se tse; gfc_init_se (&tse, NULL); - gfc_conv_expr (&tse, expr1->ts.cl->length); - expr1->ts.cl->backend_decl = tse.expr; + gfc_conv_expr (&tse, expr1->ts.u.cl->length); + expr1->ts.u.cl->backend_decl = tse.expr; } type = gfc_get_character_type_len (gfc_default_character_kind, - expr1->ts.cl->backend_decl); + expr1->ts.u.cl->backend_decl); } else type = gfc_typenode_for_spec (&expr1->ts); @@ -2535,7 +3318,7 @@ gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2, count = gfc_create_var (gfc_array_index_type, "count"); gfc_add_modify (block, count, gfc_index_zero_node); - inner_size = integer_one_node; + inner_size = gfc_index_one_node; lss = gfc_walk_expr (expr1); rss = gfc_walk_expr (expr2); if (lss == gfc_ss_terminator) @@ -2559,8 +3342,8 @@ gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2, gfc_add_block_to_block (&body, &rse.post); /* Increment count. */ - tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type, - count, gfc_index_one_node); + tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, + count, gfc_index_one_node); gfc_add_modify (&body, count, tmp); tmp = gfc_finish_block (&body); @@ -2583,8 +3366,8 @@ gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2, gfc_add_modify (&body, lse.expr, rse.expr); gfc_add_block_to_block (&body, &lse.post); /* Increment count. */ - tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type, - count, gfc_index_one_node); + tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, + count, gfc_index_one_node); gfc_add_modify (&body, count, tmp); tmp = gfc_finish_block (&body); @@ -2610,9 +3393,9 @@ gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2, /* Make a new descriptor. */ parmtype = gfc_get_element_type (TREE_TYPE (desc)); - parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, + parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, 0, loop.from, loop.to, 1, - GFC_ARRAY_UNKNOWN); + GFC_ARRAY_UNKNOWN, true); /* Allocate temporary for nested forall construct. */ tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype, @@ -2628,8 +3411,8 @@ gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2, gfc_add_block_to_block (&body, &lse.post); /* Increment count. */ - tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type, - count, gfc_index_one_node); + tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, + count, gfc_index_one_node); gfc_add_modify (&body, count, tmp); tmp = gfc_finish_block (&body); @@ -2652,8 +3435,8 @@ gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2, gfc_add_block_to_block (&body, &lse.post); /* Increment count. */ - tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type, - count, gfc_index_one_node); + tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, + count, gfc_index_one_node); gfc_add_modify (&body, count, tmp); tmp = gfc_finish_block (&body); @@ -2726,6 +3509,7 @@ gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info) tree maskindex; tree mask; tree pmask; + tree cycle_label = NULL_TREE; int n; int nvar; int need_temp; @@ -2738,10 +3522,10 @@ gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info) bool need_mask; /* Do nothing if the mask is false. */ - if (code->expr - && code->expr->expr_type == EXPR_CONSTANT - && !code->expr->value.logical) - return build_empty_stmt (); + if (code->expr1 + && code->expr1->expr_type == EXPR_CONSTANT + && !code->expr1->value.logical) + return build_empty_stmt (input_location); n = 0; /* Count the FORALL index number. */ @@ -2750,15 +3534,15 @@ gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info) nvar = n; /* Allocate the space for var, start, end, step, varexpr. */ - var = (tree *) gfc_getmem (nvar * sizeof (tree)); - start = (tree *) gfc_getmem (nvar * sizeof (tree)); - end = (tree *) gfc_getmem (nvar * sizeof (tree)); - step = (tree *) gfc_getmem (nvar * sizeof (tree)); - varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *)); - saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var)); + var = XCNEWVEC (tree, nvar); + start = XCNEWVEC (tree, nvar); + end = XCNEWVEC (tree, nvar); + step = XCNEWVEC (tree, nvar); + varexpr = XCNEWVEC (gfc_expr *, nvar); + saved_vars = XCNEWVEC (gfc_saved_var, nvar); /* Allocate the space for info. */ - info = (forall_info *) gfc_getmem (sizeof (forall_info)); + info = XCNEW (forall_info); gfc_start_block (&pre); gfc_init_block (&post); @@ -2770,7 +3554,7 @@ gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info) gfc_symbol *sym = fa->var->symtree->n.sym; /* Allocate space for this_forall. */ - this_forall = (iter_info *) gfc_getmem (sizeof (iter_info)); + this_forall = XCNEW (iter_info); /* Create a temporary variable for the FORALL index. */ tmp = gfc_typenode_for_spec (&sym->ts); @@ -2829,25 +3613,27 @@ gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info) for (n = 0; n < nvar; n++) { /* size = (end + step - start) / step. */ - tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]), - step[n], start[n]); - tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp); - - tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]); + tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (start[n]), + step[n], start[n]); + tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (end[n]), + end[n], tmp); + tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, TREE_TYPE (tmp), + tmp, step[n]); tmp = convert (gfc_array_index_type, tmp); - size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp); + size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type, + size, tmp); } /* Record the nvar and size of current forall level. */ info->nvar = nvar; info->size = size; - if (code->expr) + if (code->expr1) { /* If the mask is .true., consider the FORALL unconditional. */ - if (code->expr->expr_type == EXPR_CONSTANT - && code->expr->value.logical) + if (code->expr1->expr_type == EXPR_CONSTANT + && code->expr1->value.logical) need_mask = false; else need_mask = true; @@ -2893,7 +3679,7 @@ gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info) /* Evaluate the mask expression. */ gfc_init_se (&se, NULL); - gfc_conv_expr_val (&se, code->expr); + gfc_conv_expr_val (&se, code->expr1); gfc_add_block_to_block (&body, &se.pre); /* Store the mask. */ @@ -2903,8 +3689,8 @@ gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info) gfc_add_modify (&body, tmp, se.expr); /* Advance to the next mask element. */ - tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type, - maskindex, gfc_index_one_node); + tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, + maskindex, gfc_index_one_node); gfc_add_modify (&body, maskindex, tmp); /* Generate the loops. */ @@ -2913,6 +3699,26 @@ gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info) gfc_add_expr_to_block (&block, tmp); } + if (code->op == EXEC_DO_CONCURRENT) + { + gfc_init_block (&body); + cycle_label = gfc_build_label_decl (NULL_TREE); + code->cycle_label = cycle_label; + tmp = gfc_trans_code (code->block->next); + gfc_add_expr_to_block (&body, tmp); + + if (TREE_USED (cycle_label)) + { + tmp = build1_v (LABEL_EXPR, cycle_label); + gfc_add_expr_to_block (&body, tmp); + } + + tmp = gfc_finish_block (&body); + tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1); + gfc_add_expr_to_block (&block, tmp); + goto done; + } + c = code->block->next; /* TODO: loop merging in FORALL statements. */ @@ -2930,12 +3736,12 @@ gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info) /* Temporaries due to array assignment data dependencies introduce no end of problems. */ if (need_temp) - gfc_trans_assign_need_temp (c->expr, c->expr2, NULL, false, + gfc_trans_assign_need_temp (c->expr1, c->expr2, NULL, false, nested_forall_info, &block); else { /* Use the normal assignment copying routines. */ - assign = gfc_trans_assignment (c->expr, c->expr2, false); + assign = gfc_trans_assignment (c->expr1, c->expr2, false, true); /* Generate body and loops. */ tmp = gfc_trans_nested_forall_loop (nested_forall_info, @@ -2957,14 +3763,14 @@ gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info) /* Pointer assignment inside FORALL. */ case EXEC_POINTER_ASSIGN: - need_temp = gfc_check_dependency (c->expr, c->expr2, 0); + need_temp = gfc_check_dependency (c->expr1, c->expr2, 0); if (need_temp) - gfc_trans_pointer_assign_need_temp (c->expr, c->expr2, + gfc_trans_pointer_assign_need_temp (c->expr1, c->expr2, nested_forall_info, &block); else { /* Use the normal assignment copying routines. */ - assign = gfc_trans_pointer_assignment (c->expr, c->expr2); + assign = gfc_trans_pointer_assignment (c->expr1, c->expr2); /* Generate body and loops. */ tmp = gfc_trans_nested_forall_loop (nested_forall_info, @@ -2981,7 +3787,7 @@ gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info) /* Explicit subroutine calls are prevented by the frontend but interface assignments can legitimately produce them. */ case EXEC_ASSIGN_CALL: - assign = gfc_trans_call (c, true); + assign = gfc_trans_call (c, true, NULL_TREE, NULL_TREE, false); tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1); gfc_add_expr_to_block (&block, tmp); break; @@ -2993,20 +3799,28 @@ gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info) c = c->next; } +done: /* Restore the original index variables. */ for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++) gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]); /* Free the space for var, start, end, step, varexpr. */ - gfc_free (var); - gfc_free (start); - gfc_free (end); - gfc_free (step); - gfc_free (varexpr); - gfc_free (saved_vars); + free (var); + free (start); + free (end); + free (step); + free (varexpr); + free (saved_vars); + + for (this_forall = info->this_loop; this_forall;) + { + iter_info *next = this_forall->next; + free (this_forall); + this_forall = next; + } /* Free the space for this forall_info. */ - gfc_free (info); + free (info); if (pmask) { @@ -3032,6 +3846,14 @@ tree gfc_trans_forall (gfc_code * code) } +/* Translate the DO CONCURRENT construct. */ + +tree gfc_trans_do_concurrent (gfc_code * code) +{ + return gfc_trans_forall_1 (code, NULL); +} + + /* Evaluate the WHERE mask expression, copy its value to a temporary. If the WHERE construct is nested in FORALL, compute the overall temporary needed by the WHERE mask expression multiplied by the iterator number of @@ -3111,7 +3933,7 @@ gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info, { tmp = gfc_build_array_ref (mask, count, NULL); if (invert) - tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp); + tmp = fold_build1_loc (input_location, TRUTH_NOT_EXPR, mask_type, tmp); gfc_add_modify (&body1, mtmp, tmp); } @@ -3120,16 +3942,18 @@ gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info, tmp1 = gfc_build_array_ref (cmask, count, NULL); tmp = cond; if (mask) - tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp); + tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, mask_type, + mtmp, tmp); gfc_add_modify (&body1, tmp1, tmp); } if (pmask) { tmp1 = gfc_build_array_ref (pmask, count, NULL); - tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, cond); + tmp = fold_build1_loc (input_location, TRUTH_NOT_EXPR, mask_type, cond); if (mask) - tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp); + tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, mask_type, mtmp, + tmp); gfc_add_modify (&body1, tmp1, tmp); } @@ -3143,8 +3967,8 @@ gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info, else { /* Increment count. */ - tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count, - gfc_index_one_node); + tmp1 = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, + count, gfc_index_one_node); gfc_add_modify (&body1, count, tmp1); /* Generate the copying loops. */ @@ -3176,7 +4000,7 @@ static tree gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2, tree mask, bool invert, tree count1, tree count2, - gfc_symbol *sym) + gfc_code *cnext) { gfc_se lse; gfc_se rse; @@ -3190,6 +4014,10 @@ gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2, stmtblock_t body; tree index, maskexpr; + /* A defined assignment. */ + if (cnext && cnext->resolved_sym) + return gfc_trans_call (cnext, true, mask, count1, invert); + #if 0 /* TODO: handle this special case. Special case a single function returning an array. */ @@ -3231,13 +4059,10 @@ gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2, /* Walk the rhs. */ rss = gfc_walk_expr (expr2); if (rss == gfc_ss_terminator) - { - /* The rhs is scalar. Add a ss for the expression. */ - rss = gfc_get_ss (); - rss->where = 1; - rss->next = gfc_ss_terminator; - rss->type = GFC_SS_SCALAR; - rss->expr = expr2; + { + /* The rhs is scalar. Add a ss for the expression. */ + rss = gfc_get_scalar_ss (gfc_ss_terminator, expr2); + rss->where = 1; } /* Associate the SS with the loop. */ @@ -3277,10 +4102,7 @@ gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2, /* Translate the expression. */ gfc_conv_expr (&rse, expr2); if (lss != gfc_ss_terminator && loop.temp_ss != NULL) - { - gfc_conv_tmp_array_ref (&lse); - gfc_advance_se_ss_chain (&lse); - } + gfc_conv_tmp_array_ref (&lse); else gfc_conv_expr (&lse, expr1); @@ -3288,24 +4110,22 @@ gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2, index = count1; maskexpr = gfc_build_array_ref (mask, index, NULL); if (invert) - maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr); + maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR, + TREE_TYPE (maskexpr), maskexpr); /* Use the scalar assignment as is. */ - if (sym == NULL) - tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, - loop.temp_ss != NULL, false); - else - tmp = gfc_conv_operator_assign (&lse, &rse, sym); + tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, + loop.temp_ss != NULL, false, true); - tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ()); + tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt (input_location)); gfc_add_expr_to_block (&body, tmp); if (lss == gfc_ss_terminator) { /* Increment count1. */ - tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type, - count1, gfc_index_one_node); + tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, + count1, gfc_index_one_node); gfc_add_modify (&body, count1, tmp); /* Use the scalar assignment as is. */ @@ -3320,8 +4140,8 @@ gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2, { /* Increment count1 before finish the main body of a scalarized expression. */ - tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type, - count1, gfc_index_one_node); + tmp = fold_build2_loc (input_location, PLUS_EXPR, + gfc_array_index_type, count1, gfc_index_one_node); gfc_add_modify (&body, count1, tmp); gfc_trans_scalarized_loop_boundary (&loop, &body); @@ -3335,7 +4155,6 @@ gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2, lse.ss = lss; gfc_conv_tmp_array_ref (&rse); - gfc_advance_se_ss_chain (&rse); gfc_conv_expr (&lse, expr1); gcc_assert (lse.ss == gfc_ss_terminator @@ -3345,24 +4164,28 @@ gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2, index = count2; maskexpr = gfc_build_array_ref (mask, index, NULL); if (invert) - maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), - maskexpr); + maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR, + TREE_TYPE (maskexpr), maskexpr); /* Use the scalar assignment as is. */ - tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false); - tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ()); + tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false, + true); + tmp = build3_v (COND_EXPR, maskexpr, tmp, + build_empty_stmt (input_location)); gfc_add_expr_to_block (&body, tmp); /* Increment count2. */ - tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type, - count2, gfc_index_one_node); + tmp = fold_build2_loc (input_location, PLUS_EXPR, + gfc_array_index_type, count2, + gfc_index_one_node); gfc_add_modify (&body, count2, tmp); } else { /* Increment count1. */ - tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type, - count1, gfc_index_one_node); + tmp = fold_build2_loc (input_location, PLUS_EXPR, + gfc_array_index_type, count1, + gfc_index_one_node); gfc_add_modify (&body, count1, tmp); } @@ -3443,7 +4266,7 @@ gfc_trans_where_2 (gfc_code * code, tree mask, bool invert, /* Two clauses, the first empty, the second non-empty. */ else if (mask) { - need_cmask = (cblock->block->expr != 0); + need_cmask = (cblock->block->expr1 != 0); need_pmask = true; } else @@ -3456,18 +4279,21 @@ gfc_trans_where_2 (gfc_code * code, tree mask, bool invert, { /* Calculate the size of temporary needed by the mask-expr. */ gfc_init_block (&inner_size_body); - inner_size = compute_inner_temp_size (cblock->expr, cblock->expr, + inner_size = compute_inner_temp_size (cblock->expr1, cblock->expr1, &inner_size_body, &lss, &rss); + gfc_free_ss_chain (lss); + gfc_free_ss_chain (rss); + /* Calculate the total size of temporary needed. */ size = compute_overall_iter_number (nested_forall_info, inner_size, &inner_size_body, block); /* Check whether the size is negative. */ - cond = fold_build2 (LE_EXPR, boolean_type_node, size, - gfc_index_zero_node); - size = fold_build3 (COND_EXPR, gfc_array_index_type, cond, - gfc_index_zero_node, size); + cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, size, + gfc_index_zero_node); + size = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type, + cond, gfc_index_zero_node, size); size = gfc_evaluate_now (size, block); /* Allocate temporary for WHERE mask if needed. */ @@ -3488,7 +4314,7 @@ gfc_trans_where_2 (gfc_code * code, tree mask, bool invert, bottom of the loop. */ /* Has mask-expr. */ - if (cblock->expr) + if (cblock->expr1) { /* Ensure that the WHERE mask will be evaluated exactly once. If there are no statements in this WHERE/ELSEWHERE clause, @@ -3496,13 +4322,13 @@ gfc_trans_where_2 (gfc_code * code, tree mask, bool invert, If this is the last clause of the WHERE construct, then we don't need to update the pending control mask (pmask). */ if (mask) - gfc_evaluate_where_mask (cblock->expr, nested_forall_info, + gfc_evaluate_where_mask (cblock->expr1, nested_forall_info, mask, invert, cblock->next ? cmask : NULL_TREE, cblock->block ? pmask : NULL_TREE, mask_type, block); else - gfc_evaluate_where_mask (cblock->expr, nested_forall_info, + gfc_evaluate_where_mask (cblock->expr1, nested_forall_info, NULL_TREE, false, (cblock->next || cblock->block) ? cmask : NULL_TREE, @@ -3541,7 +4367,7 @@ gfc_trans_where_2 (gfc_code * code, tree mask, bool invert, goto evaluate; case EXEC_ASSIGN: - expr1 = cnext->expr; + expr1 = cnext->expr1; expr2 = cnext->expr2; evaluate: if (nested_forall_info != NULL) @@ -3562,7 +4388,7 @@ gfc_trans_where_2 (gfc_code * code, tree mask, bool invert, tmp = gfc_trans_where_assign (expr1, expr2, cmask, invert, count1, count2, - cnext->resolved_sym); + cnext); tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1); @@ -3580,7 +4406,7 @@ gfc_trans_where_2 (gfc_code * code, tree mask, bool invert, tmp = gfc_trans_where_assign (expr1, expr2, cmask, invert, count1, count2, - cnext->resolved_sym); + cnext); gfc_add_expr_to_block (block, tmp); } @@ -3649,10 +4475,14 @@ gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock) gfc_ss *edss = 0; gfc_ss *esss = 0; - cond = cblock->expr; - tdst = cblock->next->expr; + /* Allow the scalarizer to workshare simple where loops. */ + if (ompws_flags & OMPWS_WORKSHARE_FLAG) + ompws_flags |= OMPWS_SCALARIZER_WS; + + cond = cblock->expr1; + tdst = cblock->next->expr1; tsrc = cblock->next->expr2; - edst = eblock ? eblock->next->expr : NULL; + edst = eblock ? eblock->next->expr1 : NULL; esrc = eblock ? eblock->next->expr2 : NULL; gfc_start_block (&block); @@ -3670,11 +4500,8 @@ gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock) tsss = gfc_walk_expr (tsrc); if (tsss == gfc_ss_terminator) { - tsss = gfc_get_ss (); + tsss = gfc_get_scalar_ss (gfc_ss_terminator, tsrc); tsss->where = 1; - tsss->next = gfc_ss_terminator; - tsss->type = GFC_SS_SCALAR; - tsss->expr = tsrc; } gfc_add_ss_to_loop (&loop, tdss); gfc_add_ss_to_loop (&loop, tsss); @@ -3688,11 +4515,8 @@ gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock) esss = gfc_walk_expr (esrc); if (esss == gfc_ss_terminator) { - esss = gfc_get_ss (); + esss = gfc_get_scalar_ss (gfc_ss_terminator, esrc); esss->where = 1; - esss->next = gfc_ss_terminator; - esss->type = GFC_SS_SCALAR; - esss->expr = esrc; } gfc_add_ss_to_loop (&loop, edss); gfc_add_ss_to_loop (&loop, esss); @@ -3732,10 +4556,7 @@ gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock) gfc_conv_expr (&tsse, tsrc); if (tdss != gfc_ss_terminator && loop.temp_ss != NULL) - { - gfc_conv_tmp_array_ref (&tdse); - gfc_advance_se_ss_chain (&tdse); - } + gfc_conv_tmp_array_ref (&tdse); else gfc_conv_expr (&tdse, tdst); @@ -3743,17 +4564,15 @@ gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock) { gfc_conv_expr (&esse, esrc); if (edss != gfc_ss_terminator && loop.temp_ss != NULL) - { - gfc_conv_tmp_array_ref (&edse); - gfc_advance_se_ss_chain (&edse); - } + gfc_conv_tmp_array_ref (&edse); else - gfc_conv_expr (&edse, edst); + gfc_conv_expr (&edse, edst); } - tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false); - estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false) - : build_empty_stmt (); + tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false, true); + estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, + false, true) + : build_empty_stmt (input_location); tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt); gfc_add_expr_to_block (&body, tmp); gfc_add_block_to_block (&body, &cse.post); @@ -3788,13 +4607,13 @@ gfc_trans_where (gfc_code * code) /* A simple "WHERE (cond) x = y" statement or block is dependence free if cond is not dependent upon writing x, and the source y is unaffected by the destination x. */ - if (!gfc_check_dependency (cblock->next->expr, - cblock->expr, 0) - && !gfc_check_dependency (cblock->next->expr, + if (!gfc_check_dependency (cblock->next->expr1, + cblock->expr1, 0) + && !gfc_check_dependency (cblock->next->expr1, cblock->next->expr2, 0)) return gfc_trans_where_3 (cblock, NULL); } - else if (!eblock->expr + else if (!eblock->expr1 && !eblock->block && eblock->next && eblock->next->op == EXEC_ASSIGN @@ -3810,22 +4629,22 @@ gfc_trans_where (gfc_code * code) are the same. In short, this is VERY conservative and this is needed because the two loops, required by the standard are coalesced in gfc_trans_where_3. */ - if (!gfc_check_dependency(cblock->next->expr, - cblock->expr, 0) - && !gfc_check_dependency(eblock->next->expr, - cblock->expr, 0) - && !gfc_check_dependency(cblock->next->expr, + if (!gfc_check_dependency(cblock->next->expr1, + cblock->expr1, 0) + && !gfc_check_dependency(eblock->next->expr1, + cblock->expr1, 0) + && !gfc_check_dependency(cblock->next->expr1, eblock->next->expr2, 1) - && !gfc_check_dependency(eblock->next->expr, + && !gfc_check_dependency(eblock->next->expr1, cblock->next->expr2, 1) - && !gfc_check_dependency(cblock->next->expr, + && !gfc_check_dependency(cblock->next->expr1, cblock->next->expr2, 1) - && !gfc_check_dependency(eblock->next->expr, + && !gfc_check_dependency(eblock->next->expr1, eblock->next->expr2, 1) - && !gfc_check_dependency(cblock->next->expr, - eblock->next->expr, 0) - && !gfc_check_dependency(eblock->next->expr, - cblock->next->expr, 0)) + && !gfc_check_dependency(cblock->next->expr1, + eblock->next->expr1, 0) + && !gfc_check_dependency(eblock->next->expr1, + cblock->next->expr1, 0)) return gfc_trans_where_3 (cblock, eblock); } } @@ -3847,7 +4666,9 @@ gfc_trans_cycle (gfc_code * code) { tree cycle_label; - cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl); + cycle_label = code->ext.which_construct->cycle_label; + gcc_assert (cycle_label); + TREE_USED (cycle_label) = 1; return build1_v (GOTO_EXPR, cycle_label); } @@ -3862,7 +4683,9 @@ gfc_trans_exit (gfc_code * code) { tree exit_label; - exit_label = TREE_VALUE (code->ext.whichloop->backend_decl); + exit_label = code->ext.which_construct->exit_label; + gcc_assert (exit_label); + TREE_USED (exit_label) = 1; return build1_v (GOTO_EXPR, exit_label); } @@ -3879,125 +4702,435 @@ gfc_trans_allocate (gfc_code * code) tree tmp; tree parm; tree stat; - tree pstat; - tree error_label; + tree errmsg; + tree errlen; + tree label_errmsg; + tree label_finish; + tree memsz; + tree expr3; + tree slen3; stmtblock_t block; + stmtblock_t post; + gfc_expr *sz; + gfc_se se_sz; - if (!code->ext.alloc_list) + if (!code->ext.alloc.list) return NULL_TREE; - gfc_start_block (&block); + stat = tmp = memsz = NULL_TREE; + label_errmsg = label_finish = errmsg = errlen = NULL_TREE; - if (code->expr) + gfc_init_block (&block); + gfc_init_block (&post); + + /* STAT= (and maybe ERRMSG=) is present. */ + if (code->expr1) { + /* STAT=. */ tree gfc_int4_type_node = gfc_get_int_type (4); - stat = gfc_create_var (gfc_int4_type_node, "stat"); - pstat = gfc_build_addr_expr (NULL_TREE, stat); - error_label = gfc_build_label_decl (NULL_TREE); - TREE_USED (error_label) = 1; + /* ERRMSG= only makes sense with STAT=. */ + if (code->expr2) + { + gfc_init_se (&se, NULL); + gfc_conv_expr_lhs (&se, code->expr2); + + errlen = gfc_get_expr_charlen (code->expr2); + errmsg = gfc_build_addr_expr (pchar_type_node, se.expr); + } + else + { + errmsg = null_pointer_node; + errlen = build_int_cst (gfc_charlen_type_node, 0); + } + + /* GOTO destinations. */ + label_errmsg = gfc_build_label_decl (NULL_TREE); + label_finish = gfc_build_label_decl (NULL_TREE); + TREE_USED (label_errmsg) = 1; + TREE_USED (label_finish) = 1; } - else - pstat = stat = error_label = NULL_TREE; - for (al = code->ext.alloc_list; al != NULL; al = al->next) + expr3 = NULL_TREE; + slen3 = NULL_TREE; + + for (al = code->ext.alloc.list; al != NULL; al = al->next) { - expr = al->expr; + expr = gfc_copy_expr (al->expr); + + if (expr->ts.type == BT_CLASS) + gfc_add_data_component (expr); gfc_init_se (&se, NULL); - gfc_start_block (&se.pre); se.want_pointer = 1; se.descriptor_only = 1; gfc_conv_expr (&se, expr); - if (!gfc_array_allocate (&se, expr, pstat)) + if (!gfc_array_allocate (&se, expr, stat, errmsg, errlen)) { /* A scalar or derived type. */ - tmp = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr))); - if (expr->ts.type == BT_CHARACTER && tmp == NULL_TREE) - tmp = se.string_length; + /* Determine allocate size. */ + if (al->expr->ts.type == BT_CLASS && code->expr3) + { + if (code->expr3->ts.type == BT_CLASS) + { + sz = gfc_copy_expr (code->expr3); + gfc_add_vptr_component (sz); + gfc_add_size_component (sz); + gfc_init_se (&se_sz, NULL); + gfc_conv_expr (&se_sz, sz); + gfc_free_expr (sz); + memsz = se_sz.expr; + } + else + memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->expr3->ts)); + } + else if (al->expr->ts.type == BT_CHARACTER + && al->expr->ts.deferred && code->expr3) + { + if (!code->expr3->ts.u.cl->backend_decl) + { + /* Convert and use the length expression. */ + gfc_init_se (&se_sz, NULL); + if (code->expr3->expr_type == EXPR_VARIABLE + || code->expr3->expr_type == EXPR_CONSTANT) + { + gfc_conv_expr (&se_sz, code->expr3); + gfc_add_block_to_block (&se.pre, &se_sz.pre); + se_sz.string_length + = gfc_evaluate_now (se_sz.string_length, &se.pre); + gfc_add_block_to_block (&se.pre, &se_sz.post); + memsz = se_sz.string_length; + } + else if (code->expr3->mold + && code->expr3->ts.u.cl + && code->expr3->ts.u.cl->length) + { + gfc_conv_expr (&se_sz, code->expr3->ts.u.cl->length); + gfc_add_block_to_block (&se.pre, &se_sz.pre); + se_sz.expr = gfc_evaluate_now (se_sz.expr, &se.pre); + gfc_add_block_to_block (&se.pre, &se_sz.post); + memsz = se_sz.expr; + } + else + { + /* This is would be inefficient and possibly could + generate wrong code if the result were not stored + in expr3/slen3. */ + if (slen3 == NULL_TREE) + { + gfc_conv_expr (&se_sz, code->expr3); + gfc_add_block_to_block (&se.pre, &se_sz.pre); + expr3 = gfc_evaluate_now (se_sz.expr, &se.pre); + gfc_add_block_to_block (&post, &se_sz.post); + slen3 = gfc_evaluate_now (se_sz.string_length, + &se.pre); + } + memsz = slen3; + } + } + else + /* Otherwise use the stored string length. */ + memsz = code->expr3->ts.u.cl->backend_decl; + tmp = al->expr->ts.u.cl->backend_decl; + + /* Store the string length. */ + if (tmp && TREE_CODE (tmp) == VAR_DECL) + gfc_add_modify (&se.pre, tmp, fold_convert (TREE_TYPE (tmp), + memsz)); + + /* Convert to size in bytes, using the character KIND. */ + tmp = TREE_TYPE (gfc_typenode_for_spec (&al->expr->ts)); + tmp = TYPE_SIZE_UNIT (tmp); + memsz = fold_build2_loc (input_location, MULT_EXPR, + TREE_TYPE (tmp), tmp, + fold_convert (TREE_TYPE (tmp), memsz)); + } + else if (al->expr->ts.type == BT_CHARACTER && al->expr->ts.deferred) + { + gcc_assert (code->ext.alloc.ts.u.cl && code->ext.alloc.ts.u.cl->length); + gfc_init_se (&se_sz, NULL); + gfc_conv_expr (&se_sz, code->ext.alloc.ts.u.cl->length); + gfc_add_block_to_block (&se.pre, &se_sz.pre); + se_sz.expr = gfc_evaluate_now (se_sz.expr, &se.pre); + gfc_add_block_to_block (&se.pre, &se_sz.post); + /* Store the string length. */ + tmp = al->expr->ts.u.cl->backend_decl; + gfc_add_modify (&se.pre, tmp, fold_convert (TREE_TYPE (tmp), + se_sz.expr)); + tmp = TREE_TYPE (gfc_typenode_for_spec (&code->ext.alloc.ts)); + tmp = TYPE_SIZE_UNIT (tmp); + memsz = fold_build2_loc (input_location, MULT_EXPR, + TREE_TYPE (tmp), tmp, + fold_convert (TREE_TYPE (se_sz.expr), + se_sz.expr)); + } + else if (code->ext.alloc.ts.type != BT_UNKNOWN) + memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->ext.alloc.ts)); + else + memsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr))); + + if (expr->ts.type == BT_CHARACTER && memsz == NULL_TREE) + { + memsz = se.string_length; + + /* Convert to size in bytes, using the character KIND. */ + tmp = TREE_TYPE (gfc_typenode_for_spec (&code->ext.alloc.ts)); + tmp = TYPE_SIZE_UNIT (tmp); + memsz = fold_build2_loc (input_location, MULT_EXPR, + TREE_TYPE (tmp), tmp, + fold_convert (TREE_TYPE (tmp), memsz)); + } - tmp = gfc_allocate_with_status (&se.pre, tmp, pstat); - tmp = fold_build2 (MODIFY_EXPR, void_type_node, se.expr, - fold_convert (TREE_TYPE (se.expr), tmp)); - gfc_add_expr_to_block (&se.pre, tmp); + /* Allocate - for non-pointers with re-alloc checking. */ + if (gfc_expr_attr (expr).allocatable) + gfc_allocate_allocatable (&se.pre, se.expr, memsz, NULL_TREE, + stat, errmsg, errlen, expr); + else + gfc_allocate_using_malloc (&se.pre, se.expr, memsz, stat); - if (code->expr) + if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp) { - tmp = build1_v (GOTO_EXPR, error_label); - parm = fold_build2 (NE_EXPR, boolean_type_node, - stat, build_int_cst (TREE_TYPE (stat), 0)); - tmp = fold_build3 (COND_EXPR, void_type_node, - parm, tmp, build_empty_stmt ()); + tmp = build_fold_indirect_ref_loc (input_location, se.expr); + tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, tmp, 0); gfc_add_expr_to_block (&se.pre, tmp); } + } + + gfc_add_block_to_block (&block, &se.pre); - if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp) + /* Error checking -- Note: ERRMSG only makes sense with STAT. */ + if (code->expr1) + { + /* The coarray library already sets the errmsg. */ + if (gfc_option.coarray == GFC_FCOARRAY_LIB + && gfc_expr_attr (expr).codimension) + tmp = build1_v (GOTO_EXPR, label_finish); + else + tmp = build1_v (GOTO_EXPR, label_errmsg); + + parm = fold_build2_loc (input_location, NE_EXPR, + boolean_type_node, stat, + build_int_cst (TREE_TYPE (stat), 0)); + tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, + gfc_unlikely(parm), tmp, + build_empty_stmt (input_location)); + gfc_add_expr_to_block (&block, tmp); + } + + if (code->expr3 && !code->expr3->mold) + { + /* Initialization via SOURCE block + (or static default initializer). */ + gfc_expr *rhs = gfc_copy_expr (code->expr3); + if (al->expr->ts.type == BT_CLASS) { - tmp = build_fold_indirect_ref (se.expr); - tmp = gfc_nullify_alloc_comp (expr->ts.derived, tmp, 0); - gfc_add_expr_to_block (&se.pre, tmp); + gfc_se call; + gfc_actual_arglist *actual; + gfc_expr *ppc; + gfc_init_se (&call, NULL); + /* Do a polymorphic deep copy. */ + actual = gfc_get_actual_arglist (); + actual->expr = gfc_copy_expr (rhs); + if (rhs->ts.type == BT_CLASS) + gfc_add_data_component (actual->expr); + actual->next = gfc_get_actual_arglist (); + actual->next->expr = gfc_copy_expr (al->expr); + gfc_add_data_component (actual->next->expr); + if (rhs->ts.type == BT_CLASS) + { + ppc = gfc_copy_expr (rhs); + gfc_add_vptr_component (ppc); + } + else + ppc = gfc_lval_expr_from_sym (gfc_find_derived_vtab (rhs->ts.u.derived)); + gfc_add_component_ref (ppc, "_copy"); + gfc_conv_procedure_call (&call, ppc->symtree->n.sym, actual, + ppc, NULL); + gfc_add_expr_to_block (&call.pre, call.expr); + gfc_add_block_to_block (&call.pre, &call.post); + tmp = gfc_finish_block (&call.pre); } + else if (expr3 != NULL_TREE) + { + tmp = build_fold_indirect_ref_loc (input_location, se.expr); + gfc_trans_string_copy (&block, slen3, tmp, code->expr3->ts.kind, + slen3, expr3, code->expr3->ts.kind); + tmp = NULL_TREE; + } + else + { + /* Switch off automatic reallocation since we have just done + the ALLOCATE. */ + int realloc_lhs = gfc_option.flag_realloc_lhs; + gfc_option.flag_realloc_lhs = 0; + tmp = gfc_trans_assignment (gfc_expr_to_initialize (expr), + rhs, false, false); + gfc_option.flag_realloc_lhs = realloc_lhs; + } + gfc_free_expr (rhs); + gfc_add_expr_to_block (&block, tmp); + } + else if (code->expr3 && code->expr3->mold + && code->expr3->ts.type == BT_CLASS) + { + /* Default-initialization via MOLD (polymorphic). */ + gfc_expr *rhs = gfc_copy_expr (code->expr3); + gfc_se dst,src; + gfc_add_vptr_component (rhs); + gfc_add_def_init_component (rhs); + gfc_init_se (&dst, NULL); + gfc_init_se (&src, NULL); + gfc_conv_expr (&dst, expr); + gfc_conv_expr (&src, rhs); + gfc_add_block_to_block (&block, &src.pre); + tmp = gfc_build_memcpy_call (dst.expr, src.expr, memsz); + gfc_add_expr_to_block (&block, tmp); + gfc_free_expr (rhs); + } + /* Allocation of CLASS entities. */ + gfc_free_expr (expr); + expr = al->expr; + if (expr->ts.type == BT_CLASS) + { + gfc_expr *lhs,*rhs; + gfc_se lse; + + /* Initialize VPTR for CLASS objects. */ + lhs = gfc_expr_to_initialize (expr); + gfc_add_vptr_component (lhs); + rhs = NULL; + if (code->expr3 && code->expr3->ts.type == BT_CLASS) + { + /* Polymorphic SOURCE: VPTR must be determined at run time. */ + rhs = gfc_copy_expr (code->expr3); + gfc_add_vptr_component (rhs); + tmp = gfc_trans_pointer_assignment (lhs, rhs); + gfc_add_expr_to_block (&block, tmp); + gfc_free_expr (rhs); + } + else + { + /* VPTR is fixed at compile time. */ + gfc_symbol *vtab; + gfc_typespec *ts; + if (code->expr3) + ts = &code->expr3->ts; + else if (expr->ts.type == BT_DERIVED) + ts = &expr->ts; + else if (code->ext.alloc.ts.type == BT_DERIVED) + ts = &code->ext.alloc.ts; + else if (expr->ts.type == BT_CLASS) + ts = &CLASS_DATA (expr)->ts; + else + ts = &expr->ts; + + if (ts->type == BT_DERIVED) + { + vtab = gfc_find_derived_vtab (ts->u.derived); + gcc_assert (vtab); + gfc_init_se (&lse, NULL); + lse.want_pointer = 1; + gfc_conv_expr (&lse, lhs); + tmp = gfc_build_addr_expr (NULL_TREE, + gfc_get_symbol_decl (vtab)); + gfc_add_modify (&block, lse.expr, + fold_convert (TREE_TYPE (lse.expr), tmp)); + } + } + gfc_free_expr (lhs); } - tmp = gfc_finish_block (&se.pre); + } + + /* STAT (ERRMSG only makes sense with STAT). */ + if (code->expr1) + { + tmp = build1_v (LABEL_EXPR, label_errmsg); + gfc_add_expr_to_block (&block, tmp); + } + + /* ERRMSG block. */ + if (code->expr2) + { + /* A better error message may be possible, but not required. */ + const char *msg = "Attempt to allocate an allocated object"; + tree slen, dlen; + + gfc_init_se (&se, NULL); + gfc_conv_expr_lhs (&se, code->expr2); + + errmsg = gfc_create_var (pchar_type_node, "ERRMSG"); + + gfc_add_modify (&block, errmsg, + gfc_build_addr_expr (pchar_type_node, + gfc_build_localized_cstring_const (msg))); + + slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg))); + dlen = gfc_get_expr_charlen (code->expr2); + slen = fold_build2_loc (input_location, MIN_EXPR, TREE_TYPE (slen), dlen, + slen); + + dlen = build_call_expr_loc (input_location, + builtin_decl_explicit (BUILT_IN_MEMCPY), 3, + gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen); + + tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, stat, + build_int_cst (TREE_TYPE (stat), 0)); + + tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location)); + gfc_add_expr_to_block (&block, tmp); } - /* Assign the value to the status variable. */ - if (code->expr) + /* STAT (ERRMSG only makes sense with STAT). */ + if (code->expr1) { - tmp = build1_v (LABEL_EXPR, error_label); + tmp = build1_v (LABEL_EXPR, label_finish); gfc_add_expr_to_block (&block, tmp); + } + /* STAT block. */ + if (code->expr1) + { gfc_init_se (&se, NULL); - gfc_conv_expr_lhs (&se, code->expr); + gfc_conv_expr_lhs (&se, code->expr1); tmp = convert (TREE_TYPE (se.expr), stat); gfc_add_modify (&block, se.expr, tmp); } + gfc_add_block_to_block (&block, &se.post); + gfc_add_block_to_block (&block, &post); + return gfc_finish_block (&block); } -/* Translate a DEALLOCATE statement. - There are two cases within the for loop: - (1) deallocate(a1, a2, a3) is translated into the following sequence - _gfortran_deallocate(a1, 0B) - _gfortran_deallocate(a2, 0B) - _gfortran_deallocate(a3, 0B) - where the STAT= variable is passed a NULL pointer. - (2) deallocate(a1, a2, a3, stat=i) is translated into the following - astat = 0 - _gfortran_deallocate(a1, &stat) - astat = astat + stat - _gfortran_deallocate(a2, &stat) - astat = astat + stat - _gfortran_deallocate(a3, &stat) - astat = astat + stat - In case (1), we simply return at the end of the for loop. In case (2) - we set STAT= astat. */ +/* Translate a DEALLOCATE statement. */ + tree -gfc_trans_deallocate (gfc_code * code) +gfc_trans_deallocate (gfc_code *code) { gfc_se se; gfc_alloc *al; - gfc_expr *expr; tree apstat, astat, pstat, stat, tmp; stmtblock_t block; + pstat = apstat = stat = astat = tmp = NULL_TREE; + gfc_start_block (&block); - /* Set up the optional STAT= */ - if (code->expr) + /* Count the number of failed deallocations. If deallocate() was + called with STAT= , then set STAT to the count. If deallocate + was called with ERRMSG, then set ERRMG to a string. */ + if (code->expr1 || code->expr2) { tree gfc_int4_type_node = gfc_get_int_type (4); - /* Variable used with the library call. */ stat = gfc_create_var (gfc_int4_type_node, "stat"); pstat = gfc_build_addr_expr (NULL_TREE, stat); @@ -4008,14 +5141,15 @@ gfc_trans_deallocate (gfc_code * code) /* Initialize astat to 0. */ gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0)); } - else - pstat = apstat = stat = astat = NULL_TREE; - for (al = code->ext.alloc_list; al != NULL; al = al->next) + for (al = code->ext.alloc.list; al != NULL; al = al->next) { - expr = al->expr; + gfc_expr *expr = gfc_copy_expr (al->expr); gcc_assert (expr->expr_type == EXPR_VARIABLE); + if (expr->ts.type == BT_CLASS) + gfc_add_data_component (expr); + gfc_init_se (&se, NULL); gfc_start_block (&se.pre); @@ -4023,61 +5157,112 @@ gfc_trans_deallocate (gfc_code * code) se.descriptor_only = 1; gfc_conv_expr (&se, expr); - if (expr->ts.type == BT_DERIVED - && expr->ts.derived->attr.alloc_comp) - { - gfc_ref *ref; - gfc_ref *last = NULL; - for (ref = expr->ref; ref; ref = ref->next) - if (ref->type == REF_COMPONENT) - last = ref; - - /* Do not deallocate the components of a derived type - ultimate pointer component. */ - if (!(last && last->u.c.component->attr.pointer) - && !(!last && expr->symtree->n.sym->attr.pointer)) + if (expr->rank || gfc_expr_attr (expr).codimension) + { + if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp) { - tmp = gfc_deallocate_alloc_comp (expr->ts.derived, se.expr, - expr->rank); - gfc_add_expr_to_block (&se.pre, tmp); + gfc_ref *ref; + gfc_ref *last = NULL; + for (ref = expr->ref; ref; ref = ref->next) + if (ref->type == REF_COMPONENT) + last = ref; + + /* Do not deallocate the components of a derived type + ultimate pointer component. */ + if (!(last && last->u.c.component->attr.pointer) + && !(!last && expr->symtree->n.sym->attr.pointer)) + { + tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, se.expr, + expr->rank); + gfc_add_expr_to_block (&se.pre, tmp); + } } + tmp = gfc_array_deallocate (se.expr, pstat, expr); + gfc_add_expr_to_block (&se.pre, tmp); } - - if (expr->rank) - tmp = gfc_array_deallocate (se.expr, pstat, expr); else { - tmp = gfc_deallocate_with_status (se.expr, pstat, false, expr); + tmp = gfc_deallocate_scalar_with_status (se.expr, pstat, false, + expr, expr->ts); gfc_add_expr_to_block (&se.pre, tmp); - tmp = fold_build2 (MODIFY_EXPR, void_type_node, - se.expr, build_int_cst (TREE_TYPE (se.expr), 0)); + /* Set to zero after deallocation. */ + tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node, + se.expr, + build_int_cst (TREE_TYPE (se.expr), 0)); + gfc_add_expr_to_block (&se.pre, tmp); + + if (al->expr->ts.type == BT_CLASS) + { + /* Reset _vptr component to declared type. */ + gfc_expr *rhs, *lhs = gfc_copy_expr (al->expr); + gfc_symbol *vtab = gfc_find_derived_vtab (al->expr->ts.u.derived); + gfc_add_vptr_component (lhs); + rhs = gfc_lval_expr_from_sym (vtab); + tmp = gfc_trans_pointer_assignment (lhs, rhs); + gfc_add_expr_to_block (&se.pre, tmp); + gfc_free_expr (lhs); + gfc_free_expr (rhs); + } } - gfc_add_expr_to_block (&se.pre, tmp); - /* Keep track of the number of failed deallocations by adding stat of the last deallocation to the running total. */ - if (code->expr) + if (code->expr1 || code->expr2) { - apstat = fold_build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat); + apstat = fold_build2_loc (input_location, PLUS_EXPR, + TREE_TYPE (stat), astat, stat); gfc_add_modify (&se.pre, astat, apstat); } tmp = gfc_finish_block (&se.pre); gfc_add_expr_to_block (&block, tmp); - + gfc_free_expr (expr); } - /* Assign the value to the status variable. */ - if (code->expr) + /* Set STAT. */ + if (code->expr1) { gfc_init_se (&se, NULL); - gfc_conv_expr_lhs (&se, code->expr); + gfc_conv_expr_lhs (&se, code->expr1); tmp = convert (TREE_TYPE (se.expr), astat); gfc_add_modify (&block, se.expr, tmp); } + /* Set ERRMSG. */ + if (code->expr2) + { + /* A better error message may be possible, but not required. */ + const char *msg = "Attempt to deallocate an unallocated object"; + tree errmsg, slen, dlen; + + gfc_init_se (&se, NULL); + gfc_conv_expr_lhs (&se, code->expr2); + + errmsg = gfc_create_var (pchar_type_node, "ERRMSG"); + + gfc_add_modify (&block, errmsg, + gfc_build_addr_expr (pchar_type_node, + gfc_build_localized_cstring_const (msg))); + + slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg))); + dlen = gfc_get_expr_charlen (code->expr2); + slen = fold_build2_loc (input_location, MIN_EXPR, TREE_TYPE (slen), dlen, + slen); + + dlen = build_call_expr_loc (input_location, + builtin_decl_explicit (BUILT_IN_MEMCPY), 3, + gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen); + + tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, astat, + build_int_cst (TREE_TYPE (astat), 0)); + + tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location)); + + gfc_add_expr_to_block (&block, tmp); + } + return gfc_finish_block (&block); } +#include "gt-fortran-trans-stmt.h"