1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 Free Software Foundation, Inc.
5 Contributed by Paul Brook <paul@nowt.org>
6 and Steven Bosscher <s.bosscher@student.tudelft.nl>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "trans-stmt.h"
33 #include "trans-types.h"
34 #include "trans-array.h"
35 #include "trans-const.h"
37 #include "dependency.h"
40 typedef struct iter_info
46 struct iter_info *next;
50 typedef struct forall_info
57 struct forall_info *prev_nest;
61 static void gfc_trans_where_2 (gfc_code *, tree, bool,
62 forall_info *, stmtblock_t *);
64 /* Translate a F95 label number to a LABEL_EXPR. */
67 gfc_trans_label_here (gfc_code * code)
69 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
73 /* Given a variable expression which has been ASSIGNed to, find the decl
74 containing the auxiliary variables. For variables in common blocks this
78 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
80 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
81 gfc_conv_expr (se, expr);
82 /* Deals with variable in common block. Get the field declaration. */
83 if (TREE_CODE (se->expr) == COMPONENT_REF)
84 se->expr = TREE_OPERAND (se->expr, 1);
85 /* Deals with dummy argument. Get the parameter declaration. */
86 else if (TREE_CODE (se->expr) == INDIRECT_REF)
87 se->expr = TREE_OPERAND (se->expr, 0);
90 /* Translate a label assignment statement. */
93 gfc_trans_label_assign (gfc_code * code)
102 /* Start a new block. */
103 gfc_init_se (&se, NULL);
104 gfc_start_block (&se.pre);
105 gfc_conv_label_variable (&se, code->expr1);
107 len = GFC_DECL_STRING_LEN (se.expr);
108 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
110 label_tree = gfc_get_label_decl (code->label1);
112 if (code->label1->defined == ST_LABEL_TARGET)
114 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
115 len_tree = integer_minus_one_node;
119 gfc_expr *format = code->label1->format;
121 label_len = format->value.character.length;
122 len_tree = build_int_cst (gfc_charlen_type_node, label_len);
123 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
124 format->value.character.string);
125 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
128 gfc_add_modify (&se.pre, len, len_tree);
129 gfc_add_modify (&se.pre, addr, label_tree);
131 return gfc_finish_block (&se.pre);
134 /* Translate a GOTO statement. */
137 gfc_trans_goto (gfc_code * code)
139 locus loc = code->loc;
145 if (code->label1 != NULL)
146 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
149 gfc_init_se (&se, NULL);
150 gfc_start_block (&se.pre);
151 gfc_conv_label_variable (&se, code->expr1);
152 tmp = GFC_DECL_STRING_LEN (se.expr);
153 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, tmp,
154 build_int_cst (TREE_TYPE (tmp), -1));
155 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
156 "Assigned label is not a target label");
158 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
160 /* We're going to ignore a label list. It does not really change the
161 statement's semantics (because it is just a further restriction on
162 what's legal code); before, we were comparing label addresses here, but
163 that's a very fragile business and may break with optimization. So
166 target = fold_build1_loc (input_location, GOTO_EXPR, void_type_node,
168 gfc_add_expr_to_block (&se.pre, target);
169 return gfc_finish_block (&se.pre);
173 /* Translate an ENTRY statement. Just adds a label for this entry point. */
175 gfc_trans_entry (gfc_code * code)
177 return build1_v (LABEL_EXPR, code->ext.entry->label);
181 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
182 elemental subroutines. Make temporaries for output arguments if any such
183 dependencies are found. Output arguments are chosen because internal_unpack
184 can be used, as is, to copy the result back to the variable. */
186 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
187 gfc_symbol * sym, gfc_actual_arglist * arg,
188 gfc_dep_check check_variable)
190 gfc_actual_arglist *arg0;
192 gfc_formal_arglist *formal;
193 gfc_loopinfo tmp_loop;
205 if (loopse->ss == NULL)
210 formal = sym->formal;
212 /* Loop over all the arguments testing for dependencies. */
213 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
219 /* Obtain the info structure for the current argument. */
221 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
225 info = &ss->data.info;
229 /* If there is a dependency, create a temporary and use it
230 instead of the variable. */
231 fsym = formal ? formal->sym : NULL;
232 if (e->expr_type == EXPR_VARIABLE
234 && fsym->attr.intent != INTENT_IN
235 && gfc_check_fncall_dependency (e, fsym->attr.intent,
236 sym, arg0, check_variable))
238 tree initial, temptype;
239 stmtblock_t temp_post;
241 /* Make a local loopinfo for the temporary creation, so that
242 none of the other ss->info's have to be renormalized. */
243 gfc_init_loopinfo (&tmp_loop);
244 tmp_loop.dimen = info->dimen;
245 for (n = 0; n < info->dimen; n++)
247 tmp_loop.to[n] = loopse->loop->to[n];
248 tmp_loop.from[n] = loopse->loop->from[n];
249 tmp_loop.order[n] = loopse->loop->order[n];
252 /* Obtain the argument descriptor for unpacking. */
253 gfc_init_se (&parmse, NULL);
254 parmse.want_pointer = 1;
256 /* The scalarizer introduces some specific peculiarities when
257 handling elemental subroutines; the stride can be needed up to
258 the dim_array - 1, rather than dim_loop - 1 to calculate
259 offsets outside the loop. For this reason, we make sure that
260 the descriptor has the dimensionality of the array by converting
261 trailing elements into ranges with end = start. */
262 for (ref = e->ref; ref; ref = ref->next)
263 if (ref->type == REF_ARRAY && ref->u.ar.type == AR_SECTION)
268 bool seen_range = false;
269 for (n = 0; n < ref->u.ar.dimen; n++)
271 if (ref->u.ar.dimen_type[n] == DIMEN_RANGE)
275 || ref->u.ar.dimen_type[n] != DIMEN_ELEMENT)
278 ref->u.ar.end[n] = gfc_copy_expr (ref->u.ar.start[n]);
279 ref->u.ar.dimen_type[n] = DIMEN_RANGE;
283 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
284 gfc_add_block_to_block (&se->pre, &parmse.pre);
286 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
287 initialize the array temporary with a copy of the values. */
288 if (fsym->attr.intent == INTENT_INOUT
289 || (fsym->ts.type ==BT_DERIVED
290 && fsym->attr.intent == INTENT_OUT))
291 initial = parmse.expr;
295 /* Find the type of the temporary to create; we don't use the type
296 of e itself as this breaks for subcomponent-references in e (where
297 the type of e is that of the final reference, but parmse.expr's
298 type corresponds to the full derived-type). */
299 /* TODO: Fix this somehow so we don't need a temporary of the whole
300 array but instead only the components referenced. */
301 temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */
302 gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
303 temptype = TREE_TYPE (temptype);
304 temptype = gfc_get_element_type (temptype);
306 /* Generate the temporary. Cleaning up the temporary should be the
307 very last thing done, so we add the code to a new block and add it
308 to se->post as last instructions. */
309 size = gfc_create_var (gfc_array_index_type, NULL);
310 data = gfc_create_var (pvoid_type_node, NULL);
311 gfc_init_block (&temp_post);
312 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post,
313 &tmp_loop, info, temptype,
317 gfc_add_modify (&se->pre, size, tmp);
318 tmp = fold_convert (pvoid_type_node, info->data);
319 gfc_add_modify (&se->pre, data, tmp);
321 /* Calculate the offset for the temporary. */
322 offset = gfc_index_zero_node;
323 for (n = 0; n < info->dimen; n++)
325 tmp = gfc_conv_descriptor_stride_get (info->descriptor,
327 tmp = fold_build2_loc (input_location, MULT_EXPR,
328 gfc_array_index_type,
329 loopse->loop->from[n], tmp);
330 offset = fold_build2_loc (input_location, MINUS_EXPR,
331 gfc_array_index_type, offset, tmp);
333 info->offset = gfc_create_var (gfc_array_index_type, NULL);
334 gfc_add_modify (&se->pre, info->offset, offset);
336 /* Copy the result back using unpack. */
337 tmp = build_call_expr_loc (input_location,
338 gfor_fndecl_in_unpack, 2, parmse.expr, data);
339 gfc_add_expr_to_block (&se->post, tmp);
341 /* parmse.pre is already added above. */
342 gfc_add_block_to_block (&se->post, &parmse.post);
343 gfc_add_block_to_block (&se->post, &temp_post);
349 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
352 gfc_trans_call (gfc_code * code, bool dependency_check,
353 tree mask, tree count1, bool invert)
357 int has_alternate_specifier;
358 gfc_dep_check check_variable;
359 tree index = NULL_TREE;
360 tree maskexpr = NULL_TREE;
363 /* A CALL starts a new block because the actual arguments may have to
364 be evaluated first. */
365 gfc_init_se (&se, NULL);
366 gfc_start_block (&se.pre);
368 gcc_assert (code->resolved_sym);
370 ss = gfc_ss_terminator;
371 if (code->resolved_sym->attr.elemental)
372 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
374 /* Is not an elemental subroutine call with array valued arguments. */
375 if (ss == gfc_ss_terminator)
378 /* Translate the call. */
379 has_alternate_specifier
380 = gfc_conv_procedure_call (&se, code->resolved_sym, code->ext.actual,
383 /* A subroutine without side-effect, by definition, does nothing! */
384 TREE_SIDE_EFFECTS (se.expr) = 1;
386 /* Chain the pieces together and return the block. */
387 if (has_alternate_specifier)
389 gfc_code *select_code;
391 select_code = code->next;
392 gcc_assert(select_code->op == EXEC_SELECT);
393 sym = select_code->expr1->symtree->n.sym;
394 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
395 if (sym->backend_decl == NULL)
396 sym->backend_decl = gfc_get_symbol_decl (sym);
397 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
400 gfc_add_expr_to_block (&se.pre, se.expr);
402 gfc_add_block_to_block (&se.pre, &se.post);
407 /* An elemental subroutine call with array valued arguments has
415 /* gfc_walk_elemental_function_args renders the ss chain in the
416 reverse order to the actual argument order. */
417 ss = gfc_reverse_ss (ss);
419 /* Initialize the loop. */
420 gfc_init_se (&loopse, NULL);
421 gfc_init_loopinfo (&loop);
422 gfc_add_ss_to_loop (&loop, ss);
424 gfc_conv_ss_startstride (&loop);
425 /* TODO: gfc_conv_loop_setup generates a temporary for vector
426 subscripts. This could be prevented in the elemental case
427 as temporaries are handled separatedly
428 (below in gfc_conv_elemental_dependencies). */
429 gfc_conv_loop_setup (&loop, &code->expr1->where);
430 gfc_mark_ss_chain_used (ss, 1);
432 /* Convert the arguments, checking for dependencies. */
433 gfc_copy_loopinfo_to_se (&loopse, &loop);
436 /* For operator assignment, do dependency checking. */
437 if (dependency_check)
438 check_variable = ELEM_CHECK_VARIABLE;
440 check_variable = ELEM_DONT_CHECK_VARIABLE;
442 gfc_init_se (&depse, NULL);
443 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
444 code->ext.actual, check_variable);
446 gfc_add_block_to_block (&loop.pre, &depse.pre);
447 gfc_add_block_to_block (&loop.post, &depse.post);
449 /* Generate the loop body. */
450 gfc_start_scalarized_body (&loop, &body);
451 gfc_init_block (&block);
455 /* Form the mask expression according to the mask. */
457 maskexpr = gfc_build_array_ref (mask, index, NULL);
459 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
460 TREE_TYPE (maskexpr), maskexpr);
463 /* Add the subroutine call to the block. */
464 gfc_conv_procedure_call (&loopse, code->resolved_sym,
465 code->ext.actual, code->expr1, NULL);
469 tmp = build3_v (COND_EXPR, maskexpr, loopse.expr,
470 build_empty_stmt (input_location));
471 gfc_add_expr_to_block (&loopse.pre, tmp);
472 tmp = fold_build2_loc (input_location, PLUS_EXPR,
473 gfc_array_index_type,
474 count1, gfc_index_one_node);
475 gfc_add_modify (&loopse.pre, count1, tmp);
478 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
480 gfc_add_block_to_block (&block, &loopse.pre);
481 gfc_add_block_to_block (&block, &loopse.post);
483 /* Finish up the loop block and the loop. */
484 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
485 gfc_trans_scalarizing_loops (&loop, &body);
486 gfc_add_block_to_block (&se.pre, &loop.pre);
487 gfc_add_block_to_block (&se.pre, &loop.post);
488 gfc_add_block_to_block (&se.pre, &se.post);
489 gfc_cleanup_loop (&loop);
492 return gfc_finish_block (&se.pre);
496 /* Translate the RETURN statement. */
499 gfc_trans_return (gfc_code * code)
507 /* If code->expr is not NULL, this return statement must appear
508 in a subroutine and current_fake_result_decl has already
511 result = gfc_get_fake_result_decl (NULL, 0);
514 gfc_warning ("An alternate return at %L without a * dummy argument",
515 &code->expr1->where);
516 return gfc_generate_return ();
519 /* Start a new block for this statement. */
520 gfc_init_se (&se, NULL);
521 gfc_start_block (&se.pre);
523 gfc_conv_expr (&se, code->expr1);
525 /* Note that the actually returned expression is a simple value and
526 does not depend on any pointers or such; thus we can clean-up with
527 se.post before returning. */
528 tmp = fold_build2_loc (input_location, MODIFY_EXPR, TREE_TYPE (result),
529 result, fold_convert (TREE_TYPE (result),
531 gfc_add_expr_to_block (&se.pre, tmp);
532 gfc_add_block_to_block (&se.pre, &se.post);
534 tmp = gfc_generate_return ();
535 gfc_add_expr_to_block (&se.pre, tmp);
536 return gfc_finish_block (&se.pre);
539 return gfc_generate_return ();
543 /* Translate the PAUSE statement. We have to translate this statement
544 to a runtime library call. */
547 gfc_trans_pause (gfc_code * code)
549 tree gfc_int4_type_node = gfc_get_int_type (4);
553 /* Start a new block for this statement. */
554 gfc_init_se (&se, NULL);
555 gfc_start_block (&se.pre);
558 if (code->expr1 == NULL)
560 tmp = build_int_cst (gfc_int4_type_node, 0);
561 tmp = build_call_expr_loc (input_location,
562 gfor_fndecl_pause_string, 2,
563 build_int_cst (pchar_type_node, 0), tmp);
565 else if (code->expr1->ts.type == BT_INTEGER)
567 gfc_conv_expr (&se, code->expr1);
568 tmp = build_call_expr_loc (input_location,
569 gfor_fndecl_pause_numeric, 1,
570 fold_convert (gfc_int4_type_node, se.expr));
574 gfc_conv_expr_reference (&se, code->expr1);
575 tmp = build_call_expr_loc (input_location,
576 gfor_fndecl_pause_string, 2,
577 se.expr, se.string_length);
580 gfc_add_expr_to_block (&se.pre, tmp);
582 gfc_add_block_to_block (&se.pre, &se.post);
584 return gfc_finish_block (&se.pre);
588 /* Translate the STOP statement. We have to translate this statement
589 to a runtime library call. */
592 gfc_trans_stop (gfc_code *code, bool error_stop)
594 tree gfc_int4_type_node = gfc_get_int_type (4);
598 /* Start a new block for this statement. */
599 gfc_init_se (&se, NULL);
600 gfc_start_block (&se.pre);
602 if (gfc_option.coarray == GFC_FCOARRAY_LIB && !error_stop)
604 /* Per F2008, 8.5.1 STOP implies a SYNC MEMORY. */
605 tmp = built_in_decls [BUILT_IN_SYNCHRONIZE];
606 tmp = build_call_expr_loc (input_location, tmp, 0);
607 gfc_add_expr_to_block (&se.pre, tmp);
609 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_finalize, 0);
610 gfc_add_expr_to_block (&se.pre, tmp);
613 if (code->expr1 == NULL)
615 tmp = build_int_cst (gfc_int4_type_node, 0);
616 tmp = build_call_expr_loc (input_location,
618 ? (gfc_option.coarray == GFC_FCOARRAY_LIB
619 ? gfor_fndecl_caf_error_stop_str
620 : gfor_fndecl_error_stop_string)
621 : gfor_fndecl_stop_string,
622 2, build_int_cst (pchar_type_node, 0), tmp);
624 else if (code->expr1->ts.type == BT_INTEGER)
626 gfc_conv_expr (&se, code->expr1);
627 tmp = build_call_expr_loc (input_location,
629 ? (gfc_option.coarray == GFC_FCOARRAY_LIB
630 ? gfor_fndecl_caf_error_stop
631 : gfor_fndecl_error_stop_numeric)
632 : gfor_fndecl_stop_numeric_f08, 1,
633 fold_convert (gfc_int4_type_node, se.expr));
637 gfc_conv_expr_reference (&se, code->expr1);
638 tmp = build_call_expr_loc (input_location,
640 ? (gfc_option.coarray == GFC_FCOARRAY_LIB
641 ? gfor_fndecl_caf_error_stop_str
642 : gfor_fndecl_error_stop_string)
643 : gfor_fndecl_stop_string,
644 2, se.expr, se.string_length);
647 gfc_add_expr_to_block (&se.pre, tmp);
649 gfc_add_block_to_block (&se.pre, &se.post);
651 return gfc_finish_block (&se.pre);
656 gfc_trans_sync (gfc_code *code, gfc_exec_op type)
660 tree images = NULL_TREE, stat = NULL_TREE,
661 errmsg = NULL_TREE, errmsglen = NULL_TREE;
663 /* Short cut: For single images without bound checking or without STAT=,
664 return early. (ERRMSG= is always untouched for -fcoarray=single.) */
665 if (!code->expr2 && !(gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
666 && gfc_option.coarray != GFC_FCOARRAY_LIB)
669 gfc_init_se (&se, NULL);
670 gfc_start_block (&se.pre);
672 if (code->expr1 && code->expr1->rank == 0)
674 gfc_init_se (&argse, NULL);
675 gfc_conv_expr_val (&argse, code->expr1);
681 gcc_assert (code->expr2->expr_type == EXPR_VARIABLE);
682 gfc_init_se (&argse, NULL);
683 gfc_conv_expr_val (&argse, code->expr2);
687 if (code->expr3 && gfc_option.coarray == GFC_FCOARRAY_LIB
688 && type != EXEC_SYNC_MEMORY)
690 gcc_assert (code->expr3->expr_type == EXPR_VARIABLE);
691 gfc_init_se (&argse, NULL);
692 gfc_conv_expr (&argse, code->expr3);
693 gfc_conv_string_parameter (&argse);
695 errmsglen = argse.string_length;
697 else if (gfc_option.coarray == GFC_FCOARRAY_LIB && type != EXEC_SYNC_MEMORY)
699 errmsg = null_pointer_node;
700 errmsglen = build_int_cst (integer_type_node, 0);
703 /* Check SYNC IMAGES(imageset) for valid image index.
704 FIXME: Add a check for image-set arrays. */
705 if (code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
706 && code->expr1->rank == 0)
709 if (gfc_option.coarray != GFC_FCOARRAY_LIB)
710 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
711 images, build_int_cst (TREE_TYPE (images), 1));
715 cond = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
716 images, gfort_gvar_caf_num_images);
717 cond2 = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
719 build_int_cst (TREE_TYPE (images), 1));
720 cond = fold_build2_loc (input_location, TRUTH_AND_EXPR,
721 boolean_type_node, cond, cond2);
723 gfc_trans_runtime_check (true, false, cond, &se.pre,
724 &code->expr1->where, "Invalid image number "
726 fold_convert (integer_type_node, se.expr));
729 /* Per F2008, 8.5.1, a SYNC MEMORY is implied by calling the
730 image control statements SYNC IMAGES and SYNC ALL. */
731 if (gfc_option.coarray == GFC_FCOARRAY_LIB)
733 tmp = built_in_decls [BUILT_IN_SYNCHRONIZE];
734 tmp = build_call_expr_loc (input_location, tmp, 0);
735 gfc_add_expr_to_block (&se.pre, tmp);
738 if (gfc_option.coarray != GFC_FCOARRAY_LIB || type == EXEC_SYNC_MEMORY)
740 /* Set STAT to zero. */
742 gfc_add_modify (&se.pre, stat, build_int_cst (TREE_TYPE (stat), 0));
744 else if (type == EXEC_SYNC_ALL)
746 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_all,
747 2, errmsg, errmsglen);
749 gfc_add_modify (&se.pre, stat, fold_convert (TREE_TYPE (stat), tmp));
751 gfc_add_expr_to_block (&se.pre, tmp);
757 gcc_assert (type == EXEC_SYNC_IMAGES);
761 len = build_int_cst (integer_type_node, -1);
762 images = null_pointer_node;
764 else if (code->expr1->rank == 0)
766 len = build_int_cst (integer_type_node, 1);
767 images = gfc_build_addr_expr (NULL_TREE, images);
772 if (code->expr1->ts.kind != gfc_c_int_kind)
773 gfc_fatal_error ("Sorry, only support for integer kind %d "
774 "implemented for image-set at %L",
775 gfc_c_int_kind, &code->expr1->where);
777 gfc_conv_array_parameter (&se, code->expr1,
778 gfc_walk_expr (code->expr1), true, NULL,
782 tmp = gfc_typenode_for_spec (&code->expr1->ts);
783 if (GFC_ARRAY_TYPE_P (tmp) || GFC_DESCRIPTOR_TYPE_P (tmp))
784 tmp = gfc_get_element_type (tmp);
786 len = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
787 TREE_TYPE (len), len,
788 fold_convert (TREE_TYPE (len),
789 TYPE_SIZE_UNIT (tmp)));
790 len = fold_convert (integer_type_node, len);
793 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_images, 4,
794 fold_convert (integer_type_node, len), images,
797 gfc_add_modify (&se.pre, stat, fold_convert (TREE_TYPE (stat), tmp));
799 gfc_add_expr_to_block (&se.pre, tmp);
802 return gfc_finish_block (&se.pre);
806 /* Generate GENERIC for the IF construct. This function also deals with
807 the simple IF statement, because the front end translates the IF
808 statement into an IF construct.
840 where COND_S is the simplified version of the predicate. PRE_COND_S
841 are the pre side-effects produced by the translation of the
843 We need to build the chain recursively otherwise we run into
844 problems with folding incomplete statements. */
847 gfc_trans_if_1 (gfc_code * code)
854 /* Check for an unconditional ELSE clause. */
856 return gfc_trans_code (code->next);
858 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
859 gfc_init_se (&if_se, NULL);
860 gfc_start_block (&if_se.pre);
862 /* Calculate the IF condition expression. */
863 if (code->expr1->where.lb)
865 gfc_save_backend_locus (&saved_loc);
866 gfc_set_backend_locus (&code->expr1->where);
869 gfc_conv_expr_val (&if_se, code->expr1);
871 if (code->expr1->where.lb)
872 gfc_restore_backend_locus (&saved_loc);
874 /* Translate the THEN clause. */
875 stmt = gfc_trans_code (code->next);
877 /* Translate the ELSE clause. */
879 elsestmt = gfc_trans_if_1 (code->block);
881 elsestmt = build_empty_stmt (input_location);
883 /* Build the condition expression and add it to the condition block. */
884 loc = code->expr1->where.lb ? code->expr1->where.lb->location : input_location;
885 stmt = fold_build3_loc (loc, COND_EXPR, void_type_node, if_se.expr, stmt,
888 gfc_add_expr_to_block (&if_se.pre, stmt);
890 /* Finish off this statement. */
891 return gfc_finish_block (&if_se.pre);
895 gfc_trans_if (gfc_code * code)
900 /* Create exit label so it is available for trans'ing the body code. */
901 exit_label = gfc_build_label_decl (NULL_TREE);
902 code->exit_label = exit_label;
904 /* Translate the actual code in code->block. */
905 gfc_init_block (&body);
906 gfc_add_expr_to_block (&body, gfc_trans_if_1 (code->block));
908 /* Add exit label. */
909 gfc_add_expr_to_block (&body, build1_v (LABEL_EXPR, exit_label));
911 return gfc_finish_block (&body);
915 /* Translate an arithmetic IF expression.
917 IF (cond) label1, label2, label3 translates to
929 An optimized version can be generated in case of equal labels.
930 E.g., if label1 is equal to label2, we can translate it to
939 gfc_trans_arithmetic_if (gfc_code * code)
947 /* Start a new block. */
948 gfc_init_se (&se, NULL);
949 gfc_start_block (&se.pre);
951 /* Pre-evaluate COND. */
952 gfc_conv_expr_val (&se, code->expr1);
953 se.expr = gfc_evaluate_now (se.expr, &se.pre);
955 /* Build something to compare with. */
956 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
958 if (code->label1->value != code->label2->value)
960 /* If (cond < 0) take branch1 else take branch2.
961 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
962 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
963 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
965 if (code->label1->value != code->label3->value)
966 tmp = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
969 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
972 branch1 = fold_build3_loc (input_location, COND_EXPR, void_type_node,
973 tmp, branch1, branch2);
976 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
978 if (code->label1->value != code->label3->value
979 && code->label2->value != code->label3->value)
981 /* if (cond <= 0) take branch1 else take branch2. */
982 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
983 tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
985 branch1 = fold_build3_loc (input_location, COND_EXPR, void_type_node,
986 tmp, branch1, branch2);
989 /* Append the COND_EXPR to the evaluation of COND, and return. */
990 gfc_add_expr_to_block (&se.pre, branch1);
991 return gfc_finish_block (&se.pre);
995 /* Translate a CRITICAL block. */
997 gfc_trans_critical (gfc_code *code)
1002 gfc_start_block (&block);
1004 if (gfc_option.coarray == GFC_FCOARRAY_LIB)
1006 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_critical, 0);
1007 gfc_add_expr_to_block (&block, tmp);
1010 tmp = gfc_trans_code (code->block->next);
1011 gfc_add_expr_to_block (&block, tmp);
1013 if (gfc_option.coarray == GFC_FCOARRAY_LIB)
1015 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_end_critical,
1017 gfc_add_expr_to_block (&block, tmp);
1021 return gfc_finish_block (&block);
1025 /* Do proper initialization for ASSOCIATE names. */
1028 trans_associate_var (gfc_symbol *sym, gfc_wrapped_block *block)
1033 gcc_assert (sym->assoc);
1034 e = sym->assoc->target;
1036 /* Do a `pointer assignment' with updated descriptor (or assign descriptor
1037 to array temporary) for arrays with either unknown shape or if associating
1039 if (sym->attr.dimension
1040 && (sym->as->type == AS_DEFERRED || sym->assoc->variable))
1046 desc = sym->backend_decl;
1048 /* If association is to an expression, evaluate it and create temporary.
1049 Otherwise, get descriptor of target for pointer assignment. */
1050 gfc_init_se (&se, NULL);
1051 ss = gfc_walk_expr (e);
1052 if (sym->assoc->variable)
1054 se.direct_byref = 1;
1057 gfc_conv_expr_descriptor (&se, e, ss);
1059 /* If we didn't already do the pointer assignment, set associate-name
1060 descriptor to the one generated for the temporary. */
1061 if (!sym->assoc->variable)
1065 gfc_add_modify (&se.pre, desc, se.expr);
1067 /* The generated descriptor has lower bound zero (as array
1068 temporary), shift bounds so we get lower bounds of 1. */
1069 for (dim = 0; dim < e->rank; ++dim)
1070 gfc_conv_shift_descriptor_lbound (&se.pre, desc,
1071 dim, gfc_index_one_node);
1074 /* Done, register stuff as init / cleanup code. */
1075 gfc_add_init_cleanup (block, gfc_finish_block (&se.pre),
1076 gfc_finish_block (&se.post));
1079 /* Do a scalar pointer assignment; this is for scalar variable targets. */
1080 else if (gfc_is_associate_pointer (sym))
1084 gcc_assert (!sym->attr.dimension);
1086 gfc_init_se (&se, NULL);
1087 gfc_conv_expr (&se, e);
1089 tmp = TREE_TYPE (sym->backend_decl);
1090 tmp = gfc_build_addr_expr (tmp, se.expr);
1091 gfc_add_modify (&se.pre, sym->backend_decl, tmp);
1093 gfc_add_init_cleanup (block, gfc_finish_block( &se.pre),
1094 gfc_finish_block (&se.post));
1097 /* Do a simple assignment. This is for scalar expressions, where we
1098 can simply use expression assignment. */
1103 lhs = gfc_lval_expr_from_sym (sym);
1104 tmp = gfc_trans_assignment (lhs, e, false, true);
1105 gfc_add_init_cleanup (block, tmp, NULL_TREE);
1110 /* Translate a BLOCK construct. This is basically what we would do for a
1114 gfc_trans_block_construct (gfc_code* code)
1118 gfc_wrapped_block block;
1121 gfc_association_list *ass;
1123 ns = code->ext.block.ns;
1125 sym = ns->proc_name;
1128 /* Process local variables. */
1129 gcc_assert (!sym->tlink);
1131 gfc_process_block_locals (ns);
1133 /* Generate code including exit-label. */
1134 gfc_init_block (&body);
1135 exit_label = gfc_build_label_decl (NULL_TREE);
1136 code->exit_label = exit_label;
1137 gfc_add_expr_to_block (&body, gfc_trans_code (ns->code));
1138 gfc_add_expr_to_block (&body, build1_v (LABEL_EXPR, exit_label));
1140 /* Finish everything. */
1141 gfc_start_wrapped_block (&block, gfc_finish_block (&body));
1142 gfc_trans_deferred_vars (sym, &block);
1143 for (ass = code->ext.block.assoc; ass; ass = ass->next)
1144 trans_associate_var (ass->st->n.sym, &block);
1146 return gfc_finish_wrapped_block (&block);
1150 /* Translate the simple DO construct. This is where the loop variable has
1151 integer type and step +-1. We can't use this in the general case
1152 because integer overflow and floating point errors could give incorrect
1154 We translate a do loop from:
1156 DO dovar = from, to, step
1162 [Evaluate loop bounds and step]
1164 if ((step > 0) ? (dovar <= to) : (dovar => to))
1170 cond = (dovar == to);
1172 if (cond) goto end_label;
1177 This helps the optimizers by avoiding the extra induction variable
1178 used in the general case. */
1181 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
1182 tree from, tree to, tree step, tree exit_cond)
1188 tree saved_dovar = NULL;
1193 type = TREE_TYPE (dovar);
1195 loc = code->ext.iterator->start->where.lb->location;
1197 /* Initialize the DO variable: dovar = from. */
1198 gfc_add_modify_loc (loc, pblock, dovar, from);
1200 /* Save value for do-tinkering checking. */
1201 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1203 saved_dovar = gfc_create_var (type, ".saved_dovar");
1204 gfc_add_modify_loc (loc, pblock, saved_dovar, dovar);
1207 /* Cycle and exit statements are implemented with gotos. */
1208 cycle_label = gfc_build_label_decl (NULL_TREE);
1209 exit_label = gfc_build_label_decl (NULL_TREE);
1211 /* Put the labels where they can be found later. See gfc_trans_do(). */
1212 code->cycle_label = cycle_label;
1213 code->exit_label = exit_label;
1216 gfc_start_block (&body);
1218 /* Main loop body. */
1219 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1220 gfc_add_expr_to_block (&body, tmp);
1222 /* Label for cycle statements (if needed). */
1223 if (TREE_USED (cycle_label))
1225 tmp = build1_v (LABEL_EXPR, cycle_label);
1226 gfc_add_expr_to_block (&body, tmp);
1229 /* Check whether someone has modified the loop variable. */
1230 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1232 tmp = fold_build2_loc (loc, NE_EXPR, boolean_type_node,
1233 dovar, saved_dovar);
1234 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1235 "Loop variable has been modified");
1238 /* Exit the loop if there is an I/O result condition or error. */
1241 tmp = build1_v (GOTO_EXPR, exit_label);
1242 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1244 build_empty_stmt (loc));
1245 gfc_add_expr_to_block (&body, tmp);
1248 /* Evaluate the loop condition. */
1249 cond = fold_build2_loc (loc, EQ_EXPR, boolean_type_node, dovar,
1251 cond = gfc_evaluate_now_loc (loc, cond, &body);
1253 /* Increment the loop variable. */
1254 tmp = fold_build2_loc (loc, PLUS_EXPR, type, dovar, step);
1255 gfc_add_modify_loc (loc, &body, dovar, tmp);
1257 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1258 gfc_add_modify_loc (loc, &body, saved_dovar, dovar);
1260 /* The loop exit. */
1261 tmp = fold_build1_loc (loc, GOTO_EXPR, void_type_node, exit_label);
1262 TREE_USED (exit_label) = 1;
1263 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1264 cond, tmp, build_empty_stmt (loc));
1265 gfc_add_expr_to_block (&body, tmp);
1267 /* Finish the loop body. */
1268 tmp = gfc_finish_block (&body);
1269 tmp = fold_build1_loc (loc, LOOP_EXPR, void_type_node, tmp);
1271 /* Only execute the loop if the number of iterations is positive. */
1272 if (tree_int_cst_sgn (step) > 0)
1273 cond = fold_build2_loc (loc, LE_EXPR, boolean_type_node, dovar,
1276 cond = fold_build2_loc (loc, GE_EXPR, boolean_type_node, dovar,
1278 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, cond, tmp,
1279 build_empty_stmt (loc));
1280 gfc_add_expr_to_block (pblock, tmp);
1282 /* Add the exit label. */
1283 tmp = build1_v (LABEL_EXPR, exit_label);
1284 gfc_add_expr_to_block (pblock, tmp);
1286 return gfc_finish_block (pblock);
1289 /* Translate the DO construct. This obviously is one of the most
1290 important ones to get right with any compiler, but especially
1293 We special case some loop forms as described in gfc_trans_simple_do.
1294 For other cases we implement them with a separate loop count,
1295 as described in the standard.
1297 We translate a do loop from:
1299 DO dovar = from, to, step
1305 [evaluate loop bounds and step]
1306 empty = (step > 0 ? to < from : to > from);
1307 countm1 = (to - from) / step;
1309 if (empty) goto exit_label;
1315 if (countm1 ==0) goto exit_label;
1320 countm1 is an unsigned integer. It is equal to the loop count minus one,
1321 because the loop count itself can overflow. */
1324 gfc_trans_do (gfc_code * code, tree exit_cond)
1328 tree saved_dovar = NULL;
1344 gfc_start_block (&block);
1346 loc = code->ext.iterator->start->where.lb->location;
1348 /* Evaluate all the expressions in the iterator. */
1349 gfc_init_se (&se, NULL);
1350 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1351 gfc_add_block_to_block (&block, &se.pre);
1353 type = TREE_TYPE (dovar);
1355 gfc_init_se (&se, NULL);
1356 gfc_conv_expr_val (&se, code->ext.iterator->start);
1357 gfc_add_block_to_block (&block, &se.pre);
1358 from = gfc_evaluate_now (se.expr, &block);
1360 gfc_init_se (&se, NULL);
1361 gfc_conv_expr_val (&se, code->ext.iterator->end);
1362 gfc_add_block_to_block (&block, &se.pre);
1363 to = gfc_evaluate_now (se.expr, &block);
1365 gfc_init_se (&se, NULL);
1366 gfc_conv_expr_val (&se, code->ext.iterator->step);
1367 gfc_add_block_to_block (&block, &se.pre);
1368 step = gfc_evaluate_now (se.expr, &block);
1370 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1372 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, step,
1373 build_zero_cst (type));
1374 gfc_trans_runtime_check (true, false, tmp, &block, &code->loc,
1375 "DO step value is zero");
1378 /* Special case simple loops. */
1379 if (TREE_CODE (type) == INTEGER_TYPE
1380 && (integer_onep (step)
1381 || tree_int_cst_equal (step, integer_minus_one_node)))
1382 return gfc_trans_simple_do (code, &block, dovar, from, to, step, exit_cond);
1384 pos_step = fold_build2_loc (loc, GT_EXPR, boolean_type_node, step,
1385 build_zero_cst (type));
1387 if (TREE_CODE (type) == INTEGER_TYPE)
1388 utype = unsigned_type_for (type);
1390 utype = unsigned_type_for (gfc_array_index_type);
1391 countm1 = gfc_create_var (utype, "countm1");
1393 /* Cycle and exit statements are implemented with gotos. */
1394 cycle_label = gfc_build_label_decl (NULL_TREE);
1395 exit_label = gfc_build_label_decl (NULL_TREE);
1396 TREE_USED (exit_label) = 1;
1398 /* Put these labels where they can be found later. */
1399 code->cycle_label = cycle_label;
1400 code->exit_label = exit_label;
1402 /* Initialize the DO variable: dovar = from. */
1403 gfc_add_modify (&block, dovar, from);
1405 /* Save value for do-tinkering checking. */
1406 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1408 saved_dovar = gfc_create_var (type, ".saved_dovar");
1409 gfc_add_modify_loc (loc, &block, saved_dovar, dovar);
1412 /* Initialize loop count and jump to exit label if the loop is empty.
1413 This code is executed before we enter the loop body. We generate:
1414 step_sign = sign(1,step);
1425 countm1 = (to*step_sign - from*step_sign) / (step*step_sign);
1429 if (TREE_CODE (type) == INTEGER_TYPE)
1431 tree pos, neg, step_sign, to2, from2, step2;
1433 /* Calculate SIGN (1,step), as (step < 0 ? -1 : 1) */
1435 tmp = fold_build2_loc (loc, LT_EXPR, boolean_type_node, step,
1436 build_int_cst (TREE_TYPE (step), 0));
1437 step_sign = fold_build3_loc (loc, COND_EXPR, type, tmp,
1438 build_int_cst (type, -1),
1439 build_int_cst (type, 1));
1441 tmp = fold_build2_loc (loc, LT_EXPR, boolean_type_node, to, from);
1442 pos = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp,
1443 fold_build1_loc (loc, GOTO_EXPR, void_type_node,
1445 build_empty_stmt (loc));
1447 tmp = fold_build2_loc (loc, GT_EXPR, boolean_type_node, to,
1449 neg = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp,
1450 fold_build1_loc (loc, GOTO_EXPR, void_type_node,
1452 build_empty_stmt (loc));
1453 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1454 pos_step, pos, neg);
1456 gfc_add_expr_to_block (&block, tmp);
1458 /* Calculate the loop count. to-from can overflow, so
1459 we cast to unsigned. */
1461 to2 = fold_build2_loc (loc, MULT_EXPR, type, step_sign, to);
1462 from2 = fold_build2_loc (loc, MULT_EXPR, type, step_sign, from);
1463 step2 = fold_build2_loc (loc, MULT_EXPR, type, step_sign, step);
1464 step2 = fold_convert (utype, step2);
1465 tmp = fold_build2_loc (loc, MINUS_EXPR, type, to2, from2);
1466 tmp = fold_convert (utype, tmp);
1467 tmp = fold_build2_loc (loc, TRUNC_DIV_EXPR, utype, tmp, step2);
1468 tmp = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, countm1, tmp);
1469 gfc_add_expr_to_block (&block, tmp);
1473 /* TODO: We could use the same width as the real type.
1474 This would probably cause more problems that it solves
1475 when we implement "long double" types. */
1477 tmp = fold_build2_loc (loc, MINUS_EXPR, type, to, from);
1478 tmp = fold_build2_loc (loc, RDIV_EXPR, type, tmp, step);
1479 tmp = fold_build1_loc (loc, FIX_TRUNC_EXPR, utype, tmp);
1480 gfc_add_modify (&block, countm1, tmp);
1482 /* We need a special check for empty loops:
1483 empty = (step > 0 ? to < from : to > from); */
1484 tmp = fold_build3_loc (loc, COND_EXPR, boolean_type_node, pos_step,
1485 fold_build2_loc (loc, LT_EXPR,
1486 boolean_type_node, to, from),
1487 fold_build2_loc (loc, GT_EXPR,
1488 boolean_type_node, to, from));
1489 /* If the loop is empty, go directly to the exit label. */
1490 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp,
1491 build1_v (GOTO_EXPR, exit_label),
1492 build_empty_stmt (input_location));
1493 gfc_add_expr_to_block (&block, tmp);
1497 gfc_start_block (&body);
1499 /* Main loop body. */
1500 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1501 gfc_add_expr_to_block (&body, tmp);
1503 /* Label for cycle statements (if needed). */
1504 if (TREE_USED (cycle_label))
1506 tmp = build1_v (LABEL_EXPR, cycle_label);
1507 gfc_add_expr_to_block (&body, tmp);
1510 /* Check whether someone has modified the loop variable. */
1511 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1513 tmp = fold_build2_loc (loc, NE_EXPR, boolean_type_node, dovar,
1515 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1516 "Loop variable has been modified");
1519 /* Exit the loop if there is an I/O result condition or error. */
1522 tmp = build1_v (GOTO_EXPR, exit_label);
1523 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1525 build_empty_stmt (input_location));
1526 gfc_add_expr_to_block (&body, tmp);
1529 /* Increment the loop variable. */
1530 tmp = fold_build2_loc (loc, PLUS_EXPR, type, dovar, step);
1531 gfc_add_modify_loc (loc, &body, dovar, tmp);
1533 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1534 gfc_add_modify_loc (loc, &body, saved_dovar, dovar);
1536 /* End with the loop condition. Loop until countm1 == 0. */
1537 cond = fold_build2_loc (loc, EQ_EXPR, boolean_type_node, countm1,
1538 build_int_cst (utype, 0));
1539 tmp = fold_build1_loc (loc, GOTO_EXPR, void_type_node, exit_label);
1540 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1541 cond, tmp, build_empty_stmt (loc));
1542 gfc_add_expr_to_block (&body, tmp);
1544 /* Decrement the loop count. */
1545 tmp = fold_build2_loc (loc, MINUS_EXPR, utype, countm1,
1546 build_int_cst (utype, 1));
1547 gfc_add_modify_loc (loc, &body, countm1, tmp);
1549 /* End of loop body. */
1550 tmp = gfc_finish_block (&body);
1552 /* The for loop itself. */
1553 tmp = fold_build1_loc (loc, LOOP_EXPR, void_type_node, tmp);
1554 gfc_add_expr_to_block (&block, tmp);
1556 /* Add the exit label. */
1557 tmp = build1_v (LABEL_EXPR, exit_label);
1558 gfc_add_expr_to_block (&block, tmp);
1560 return gfc_finish_block (&block);
1564 /* Translate the DO WHILE construct.
1577 if (! cond) goto exit_label;
1583 Because the evaluation of the exit condition `cond' may have side
1584 effects, we can't do much for empty loop bodies. The backend optimizers
1585 should be smart enough to eliminate any dead loops. */
1588 gfc_trans_do_while (gfc_code * code)
1596 /* Everything we build here is part of the loop body. */
1597 gfc_start_block (&block);
1599 /* Cycle and exit statements are implemented with gotos. */
1600 cycle_label = gfc_build_label_decl (NULL_TREE);
1601 exit_label = gfc_build_label_decl (NULL_TREE);
1603 /* Put the labels where they can be found later. See gfc_trans_do(). */
1604 code->cycle_label = cycle_label;
1605 code->exit_label = exit_label;
1607 /* Create a GIMPLE version of the exit condition. */
1608 gfc_init_se (&cond, NULL);
1609 gfc_conv_expr_val (&cond, code->expr1);
1610 gfc_add_block_to_block (&block, &cond.pre);
1611 cond.expr = fold_build1_loc (code->expr1->where.lb->location,
1612 TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1614 /* Build "IF (! cond) GOTO exit_label". */
1615 tmp = build1_v (GOTO_EXPR, exit_label);
1616 TREE_USED (exit_label) = 1;
1617 tmp = fold_build3_loc (code->expr1->where.lb->location, COND_EXPR,
1618 void_type_node, cond.expr, tmp,
1619 build_empty_stmt (code->expr1->where.lb->location));
1620 gfc_add_expr_to_block (&block, tmp);
1622 /* The main body of the loop. */
1623 tmp = gfc_trans_code (code->block->next);
1624 gfc_add_expr_to_block (&block, tmp);
1626 /* Label for cycle statements (if needed). */
1627 if (TREE_USED (cycle_label))
1629 tmp = build1_v (LABEL_EXPR, cycle_label);
1630 gfc_add_expr_to_block (&block, tmp);
1633 /* End of loop body. */
1634 tmp = gfc_finish_block (&block);
1636 gfc_init_block (&block);
1637 /* Build the loop. */
1638 tmp = fold_build1_loc (code->expr1->where.lb->location, LOOP_EXPR,
1639 void_type_node, tmp);
1640 gfc_add_expr_to_block (&block, tmp);
1642 /* Add the exit label. */
1643 tmp = build1_v (LABEL_EXPR, exit_label);
1644 gfc_add_expr_to_block (&block, tmp);
1646 return gfc_finish_block (&block);
1650 /* Translate the SELECT CASE construct for INTEGER case expressions,
1651 without killing all potential optimizations. The problem is that
1652 Fortran allows unbounded cases, but the back-end does not, so we
1653 need to intercept those before we enter the equivalent SWITCH_EXPR
1656 For example, we translate this,
1659 CASE (:100,101,105:115)
1669 to the GENERIC equivalent,
1673 case (minimum value for typeof(expr) ... 100:
1679 case 200 ... (maximum value for typeof(expr):
1696 gfc_trans_integer_select (gfc_code * code)
1706 gfc_start_block (&block);
1708 /* Calculate the switch expression. */
1709 gfc_init_se (&se, NULL);
1710 gfc_conv_expr_val (&se, code->expr1);
1711 gfc_add_block_to_block (&block, &se.pre);
1713 end_label = gfc_build_label_decl (NULL_TREE);
1715 gfc_init_block (&body);
1717 for (c = code->block; c; c = c->block)
1719 for (cp = c->ext.block.case_list; cp; cp = cp->next)
1724 /* Assume it's the default case. */
1725 low = high = NULL_TREE;
1729 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1732 /* If there's only a lower bound, set the high bound to the
1733 maximum value of the case expression. */
1735 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1740 /* Three cases are possible here:
1742 1) There is no lower bound, e.g. CASE (:N).
1743 2) There is a lower bound .NE. high bound, that is
1744 a case range, e.g. CASE (N:M) where M>N (we make
1745 sure that M>N during type resolution).
1746 3) There is a lower bound, and it has the same value
1747 as the high bound, e.g. CASE (N:N). This is our
1748 internal representation of CASE(N).
1750 In the first and second case, we need to set a value for
1751 high. In the third case, we don't because the GCC middle
1752 end represents a single case value by just letting high be
1753 a NULL_TREE. We can't do that because we need to be able
1754 to represent unbounded cases. */
1758 && mpz_cmp (cp->low->value.integer,
1759 cp->high->value.integer) != 0))
1760 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1763 /* Unbounded case. */
1765 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1768 /* Build a label. */
1769 label = gfc_build_label_decl (NULL_TREE);
1771 /* Add this case label.
1772 Add parameter 'label', make it match GCC backend. */
1773 tmp = build_case_label (low, high, label);
1774 gfc_add_expr_to_block (&body, tmp);
1777 /* Add the statements for this case. */
1778 tmp = gfc_trans_code (c->next);
1779 gfc_add_expr_to_block (&body, tmp);
1781 /* Break to the end of the construct. */
1782 tmp = build1_v (GOTO_EXPR, end_label);
1783 gfc_add_expr_to_block (&body, tmp);
1786 tmp = gfc_finish_block (&body);
1787 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1788 gfc_add_expr_to_block (&block, tmp);
1790 tmp = build1_v (LABEL_EXPR, end_label);
1791 gfc_add_expr_to_block (&block, tmp);
1793 return gfc_finish_block (&block);
1797 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1799 There are only two cases possible here, even though the standard
1800 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1801 .FALSE., and DEFAULT.
1803 We never generate more than two blocks here. Instead, we always
1804 try to eliminate the DEFAULT case. This way, we can translate this
1805 kind of SELECT construct to a simple
1809 expression in GENERIC. */
1812 gfc_trans_logical_select (gfc_code * code)
1815 gfc_code *t, *f, *d;
1820 /* Assume we don't have any cases at all. */
1823 /* Now see which ones we actually do have. We can have at most two
1824 cases in a single case list: one for .TRUE. and one for .FALSE.
1825 The default case is always separate. If the cases for .TRUE. and
1826 .FALSE. are in the same case list, the block for that case list
1827 always executed, and we don't generate code a COND_EXPR. */
1828 for (c = code->block; c; c = c->block)
1830 for (cp = c->ext.block.case_list; cp; cp = cp->next)
1834 if (cp->low->value.logical == 0) /* .FALSE. */
1836 else /* if (cp->value.logical != 0), thus .TRUE. */
1844 /* Start a new block. */
1845 gfc_start_block (&block);
1847 /* Calculate the switch expression. We always need to do this
1848 because it may have side effects. */
1849 gfc_init_se (&se, NULL);
1850 gfc_conv_expr_val (&se, code->expr1);
1851 gfc_add_block_to_block (&block, &se.pre);
1853 if (t == f && t != NULL)
1855 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1856 translate the code for these cases, append it to the current
1858 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1862 tree true_tree, false_tree, stmt;
1864 true_tree = build_empty_stmt (input_location);
1865 false_tree = build_empty_stmt (input_location);
1867 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1868 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1869 make the missing case the default case. */
1870 if (t != NULL && f != NULL)
1880 /* Translate the code for each of these blocks, and append it to
1881 the current block. */
1883 true_tree = gfc_trans_code (t->next);
1886 false_tree = gfc_trans_code (f->next);
1888 stmt = fold_build3_loc (input_location, COND_EXPR, void_type_node,
1889 se.expr, true_tree, false_tree);
1890 gfc_add_expr_to_block (&block, stmt);
1893 return gfc_finish_block (&block);
1897 /* The jump table types are stored in static variables to avoid
1898 constructing them from scratch every single time. */
1899 static GTY(()) tree select_struct[2];
1901 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1902 Instead of generating compares and jumps, it is far simpler to
1903 generate a data structure describing the cases in order and call a
1904 library subroutine that locates the right case.
1905 This is particularly true because this is the only case where we
1906 might have to dispose of a temporary.
1907 The library subroutine returns a pointer to jump to or NULL if no
1908 branches are to be taken. */
1911 gfc_trans_character_select (gfc_code *code)
1913 tree init, end_label, tmp, type, case_num, label, fndecl;
1914 stmtblock_t block, body;
1919 VEC(constructor_elt,gc) *inits = NULL;
1921 tree pchartype = gfc_get_pchar_type (code->expr1->ts.kind);
1923 /* The jump table types are stored in static variables to avoid
1924 constructing them from scratch every single time. */
1925 static tree ss_string1[2], ss_string1_len[2];
1926 static tree ss_string2[2], ss_string2_len[2];
1927 static tree ss_target[2];
1929 cp = code->block->ext.block.case_list;
1930 while (cp->left != NULL)
1933 /* Generate the body */
1934 gfc_start_block (&block);
1935 gfc_init_se (&expr1se, NULL);
1936 gfc_conv_expr_reference (&expr1se, code->expr1);
1938 gfc_add_block_to_block (&block, &expr1se.pre);
1940 end_label = gfc_build_label_decl (NULL_TREE);
1942 gfc_init_block (&body);
1944 /* Attempt to optimize length 1 selects. */
1945 if (integer_onep (expr1se.string_length))
1947 for (d = cp; d; d = d->right)
1952 gcc_assert (d->low->expr_type == EXPR_CONSTANT
1953 && d->low->ts.type == BT_CHARACTER);
1954 if (d->low->value.character.length > 1)
1956 for (i = 1; i < d->low->value.character.length; i++)
1957 if (d->low->value.character.string[i] != ' ')
1959 if (i != d->low->value.character.length)
1961 if (optimize && d->high && i == 1)
1963 gcc_assert (d->high->expr_type == EXPR_CONSTANT
1964 && d->high->ts.type == BT_CHARACTER);
1965 if (d->high->value.character.length > 1
1966 && (d->low->value.character.string[0]
1967 == d->high->value.character.string[0])
1968 && d->high->value.character.string[1] != ' '
1969 && ((d->low->value.character.string[1] < ' ')
1970 == (d->high->value.character.string[1]
1980 gcc_assert (d->high->expr_type == EXPR_CONSTANT
1981 && d->high->ts.type == BT_CHARACTER);
1982 if (d->high->value.character.length > 1)
1984 for (i = 1; i < d->high->value.character.length; i++)
1985 if (d->high->value.character.string[i] != ' ')
1987 if (i != d->high->value.character.length)
1994 tree ctype = gfc_get_char_type (code->expr1->ts.kind);
1996 for (c = code->block; c; c = c->block)
1998 for (cp = c->ext.block.case_list; cp; cp = cp->next)
2004 /* Assume it's the default case. */
2005 low = high = NULL_TREE;
2009 /* CASE ('ab') or CASE ('ab':'az') will never match
2010 any length 1 character. */
2011 if (cp->low->value.character.length > 1
2012 && cp->low->value.character.string[1] != ' ')
2015 if (cp->low->value.character.length > 0)
2016 r = cp->low->value.character.string[0];
2019 low = build_int_cst (ctype, r);
2021 /* If there's only a lower bound, set the high bound
2022 to the maximum value of the case expression. */
2024 high = TYPE_MAX_VALUE (ctype);
2030 || (cp->low->value.character.string[0]
2031 != cp->high->value.character.string[0]))
2033 if (cp->high->value.character.length > 0)
2034 r = cp->high->value.character.string[0];
2037 high = build_int_cst (ctype, r);
2040 /* Unbounded case. */
2042 low = TYPE_MIN_VALUE (ctype);
2045 /* Build a label. */
2046 label = gfc_build_label_decl (NULL_TREE);
2048 /* Add this case label.
2049 Add parameter 'label', make it match GCC backend. */
2050 tmp = build_case_label (low, high, label);
2051 gfc_add_expr_to_block (&body, tmp);
2054 /* Add the statements for this case. */
2055 tmp = gfc_trans_code (c->next);
2056 gfc_add_expr_to_block (&body, tmp);
2058 /* Break to the end of the construct. */
2059 tmp = build1_v (GOTO_EXPR, end_label);
2060 gfc_add_expr_to_block (&body, tmp);
2063 tmp = gfc_string_to_single_character (expr1se.string_length,
2065 code->expr1->ts.kind);
2066 case_num = gfc_create_var (ctype, "case_num");
2067 gfc_add_modify (&block, case_num, tmp);
2069 gfc_add_block_to_block (&block, &expr1se.post);
2071 tmp = gfc_finish_block (&body);
2072 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
2073 gfc_add_expr_to_block (&block, tmp);
2075 tmp = build1_v (LABEL_EXPR, end_label);
2076 gfc_add_expr_to_block (&block, tmp);
2078 return gfc_finish_block (&block);
2082 if (code->expr1->ts.kind == 1)
2084 else if (code->expr1->ts.kind == 4)
2089 if (select_struct[k] == NULL)
2092 select_struct[k] = make_node (RECORD_TYPE);
2094 if (code->expr1->ts.kind == 1)
2095 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
2096 else if (code->expr1->ts.kind == 4)
2097 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
2102 #define ADD_FIELD(NAME, TYPE) \
2103 ss_##NAME[k] = gfc_add_field_to_struct (select_struct[k], \
2104 get_identifier (stringize(NAME)), \
2108 ADD_FIELD (string1, pchartype);
2109 ADD_FIELD (string1_len, gfc_charlen_type_node);
2111 ADD_FIELD (string2, pchartype);
2112 ADD_FIELD (string2_len, gfc_charlen_type_node);
2114 ADD_FIELD (target, integer_type_node);
2117 gfc_finish_type (select_struct[k]);
2121 for (d = cp; d; d = d->right)
2124 for (c = code->block; c; c = c->block)
2126 for (d = c->ext.block.case_list; d; d = d->next)
2128 label = gfc_build_label_decl (NULL_TREE);
2129 tmp = build_case_label ((d->low == NULL && d->high == NULL)
2131 : build_int_cst (integer_type_node, d->n),
2133 gfc_add_expr_to_block (&body, tmp);
2136 tmp = gfc_trans_code (c->next);
2137 gfc_add_expr_to_block (&body, tmp);
2139 tmp = build1_v (GOTO_EXPR, end_label);
2140 gfc_add_expr_to_block (&body, tmp);
2143 /* Generate the structure describing the branches */
2144 for (d = cp; d; d = d->right)
2146 VEC(constructor_elt,gc) *node = NULL;
2148 gfc_init_se (&se, NULL);
2152 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], null_pointer_node);
2153 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], integer_zero_node);
2157 gfc_conv_expr_reference (&se, d->low);
2159 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], se.expr);
2160 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], se.string_length);
2163 if (d->high == NULL)
2165 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], null_pointer_node);
2166 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], integer_zero_node);
2170 gfc_init_se (&se, NULL);
2171 gfc_conv_expr_reference (&se, d->high);
2173 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], se.expr);
2174 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], se.string_length);
2177 CONSTRUCTOR_APPEND_ELT (node, ss_target[k],
2178 build_int_cst (integer_type_node, d->n));
2180 tmp = build_constructor (select_struct[k], node);
2181 CONSTRUCTOR_APPEND_ELT (inits, NULL_TREE, tmp);
2184 type = build_array_type (select_struct[k],
2185 build_index_type (size_int (n-1)));
2187 init = build_constructor (type, inits);
2188 TREE_CONSTANT (init) = 1;
2189 TREE_STATIC (init) = 1;
2190 /* Create a static variable to hold the jump table. */
2191 tmp = gfc_create_var (type, "jumptable");
2192 TREE_CONSTANT (tmp) = 1;
2193 TREE_STATIC (tmp) = 1;
2194 TREE_READONLY (tmp) = 1;
2195 DECL_INITIAL (tmp) = init;
2198 /* Build the library call */
2199 init = gfc_build_addr_expr (pvoid_type_node, init);
2201 if (code->expr1->ts.kind == 1)
2202 fndecl = gfor_fndecl_select_string;
2203 else if (code->expr1->ts.kind == 4)
2204 fndecl = gfor_fndecl_select_string_char4;
2208 tmp = build_call_expr_loc (input_location,
2210 build_int_cst (gfc_charlen_type_node, n),
2211 expr1se.expr, expr1se.string_length);
2212 case_num = gfc_create_var (integer_type_node, "case_num");
2213 gfc_add_modify (&block, case_num, tmp);
2215 gfc_add_block_to_block (&block, &expr1se.post);
2217 tmp = gfc_finish_block (&body);
2218 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
2219 gfc_add_expr_to_block (&block, tmp);
2221 tmp = build1_v (LABEL_EXPR, end_label);
2222 gfc_add_expr_to_block (&block, tmp);
2224 return gfc_finish_block (&block);
2228 /* Translate the three variants of the SELECT CASE construct.
2230 SELECT CASEs with INTEGER case expressions can be translated to an
2231 equivalent GENERIC switch statement, and for LOGICAL case
2232 expressions we build one or two if-else compares.
2234 SELECT CASEs with CHARACTER case expressions are a whole different
2235 story, because they don't exist in GENERIC. So we sort them and
2236 do a binary search at runtime.
2238 Fortran has no BREAK statement, and it does not allow jumps from
2239 one case block to another. That makes things a lot easier for
2243 gfc_trans_select (gfc_code * code)
2249 gcc_assert (code && code->expr1);
2250 gfc_init_block (&block);
2252 /* Build the exit label and hang it in. */
2253 exit_label = gfc_build_label_decl (NULL_TREE);
2254 code->exit_label = exit_label;
2256 /* Empty SELECT constructs are legal. */
2257 if (code->block == NULL)
2258 body = build_empty_stmt (input_location);
2260 /* Select the correct translation function. */
2262 switch (code->expr1->ts.type)
2265 body = gfc_trans_logical_select (code);
2269 body = gfc_trans_integer_select (code);
2273 body = gfc_trans_character_select (code);
2277 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
2281 /* Build everything together. */
2282 gfc_add_expr_to_block (&block, body);
2283 gfc_add_expr_to_block (&block, build1_v (LABEL_EXPR, exit_label));
2285 return gfc_finish_block (&block);
2289 /* Traversal function to substitute a replacement symtree if the symbol
2290 in the expression is the same as that passed. f == 2 signals that
2291 that variable itself is not to be checked - only the references.
2292 This group of functions is used when the variable expression in a
2293 FORALL assignment has internal references. For example:
2294 FORALL (i = 1:4) p(p(i)) = i
2295 The only recourse here is to store a copy of 'p' for the index
2298 static gfc_symtree *new_symtree;
2299 static gfc_symtree *old_symtree;
2302 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
2304 if (expr->expr_type != EXPR_VARIABLE)
2309 else if (expr->symtree->n.sym == sym)
2310 expr->symtree = new_symtree;
2316 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
2318 gfc_traverse_expr (e, sym, forall_replace, f);
2322 forall_restore (gfc_expr *expr,
2323 gfc_symbol *sym ATTRIBUTE_UNUSED,
2324 int *f ATTRIBUTE_UNUSED)
2326 if (expr->expr_type != EXPR_VARIABLE)
2329 if (expr->symtree == new_symtree)
2330 expr->symtree = old_symtree;
2336 forall_restore_symtree (gfc_expr *e)
2338 gfc_traverse_expr (e, NULL, forall_restore, 0);
2342 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
2347 gfc_symbol *new_sym;
2348 gfc_symbol *old_sym;
2352 /* Build a copy of the lvalue. */
2353 old_symtree = c->expr1->symtree;
2354 old_sym = old_symtree->n.sym;
2355 e = gfc_lval_expr_from_sym (old_sym);
2356 if (old_sym->attr.dimension)
2358 gfc_init_se (&tse, NULL);
2359 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN, false);
2360 gfc_add_block_to_block (pre, &tse.pre);
2361 gfc_add_block_to_block (post, &tse.post);
2362 tse.expr = build_fold_indirect_ref_loc (input_location, tse.expr);
2364 if (e->ts.type != BT_CHARACTER)
2366 /* Use the variable offset for the temporary. */
2367 tmp = gfc_conv_array_offset (old_sym->backend_decl);
2368 gfc_conv_descriptor_offset_set (pre, tse.expr, tmp);
2373 gfc_init_se (&tse, NULL);
2374 gfc_init_se (&rse, NULL);
2375 gfc_conv_expr (&rse, e);
2376 if (e->ts.type == BT_CHARACTER)
2378 tse.string_length = rse.string_length;
2379 tmp = gfc_get_character_type_len (gfc_default_character_kind,
2381 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
2383 gfc_add_block_to_block (pre, &tse.pre);
2384 gfc_add_block_to_block (post, &tse.post);
2388 tmp = gfc_typenode_for_spec (&e->ts);
2389 tse.expr = gfc_create_var (tmp, "temp");
2392 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
2393 e->expr_type == EXPR_VARIABLE, true);
2394 gfc_add_expr_to_block (pre, tmp);
2398 /* Create a new symbol to represent the lvalue. */
2399 new_sym = gfc_new_symbol (old_sym->name, NULL);
2400 new_sym->ts = old_sym->ts;
2401 new_sym->attr.referenced = 1;
2402 new_sym->attr.temporary = 1;
2403 new_sym->attr.dimension = old_sym->attr.dimension;
2404 new_sym->attr.flavor = old_sym->attr.flavor;
2406 /* Use the temporary as the backend_decl. */
2407 new_sym->backend_decl = tse.expr;
2409 /* Create a fake symtree for it. */
2411 new_symtree = gfc_new_symtree (&root, old_sym->name);
2412 new_symtree->n.sym = new_sym;
2413 gcc_assert (new_symtree == root);
2415 /* Go through the expression reference replacing the old_symtree
2417 forall_replace_symtree (c->expr1, old_sym, 2);
2419 /* Now we have made this temporary, we might as well use it for
2420 the right hand side. */
2421 forall_replace_symtree (c->expr2, old_sym, 1);
2425 /* Handles dependencies in forall assignments. */
2427 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
2434 lsym = c->expr1->symtree->n.sym;
2435 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
2437 /* Now check for dependencies within the 'variable'
2438 expression itself. These are treated by making a complete
2439 copy of variable and changing all the references to it
2440 point to the copy instead. Note that the shallow copy of
2441 the variable will not suffice for derived types with
2442 pointer components. We therefore leave these to their
2444 if (lsym->ts.type == BT_DERIVED
2445 && lsym->ts.u.derived->attr.pointer_comp)
2449 if (find_forall_index (c->expr1, lsym, 2) == SUCCESS)
2451 forall_make_variable_temp (c, pre, post);
2455 /* Substrings with dependencies are treated in the same
2457 if (c->expr1->ts.type == BT_CHARACTER
2459 && c->expr2->expr_type == EXPR_VARIABLE
2460 && lsym == c->expr2->symtree->n.sym)
2462 for (lref = c->expr1->ref; lref; lref = lref->next)
2463 if (lref->type == REF_SUBSTRING)
2465 for (rref = c->expr2->ref; rref; rref = rref->next)
2466 if (rref->type == REF_SUBSTRING)
2470 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
2472 forall_make_variable_temp (c, pre, post);
2481 cleanup_forall_symtrees (gfc_code *c)
2483 forall_restore_symtree (c->expr1);
2484 forall_restore_symtree (c->expr2);
2485 free (new_symtree->n.sym);
2490 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
2491 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
2492 indicates whether we should generate code to test the FORALLs mask
2493 array. OUTER is the loop header to be used for initializing mask
2496 The generated loop format is:
2497 count = (end - start + step) / step
2510 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
2511 int mask_flag, stmtblock_t *outer)
2519 tree var, start, end, step;
2522 /* Initialize the mask index outside the FORALL nest. */
2523 if (mask_flag && forall_tmp->mask)
2524 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
2526 iter = forall_tmp->this_loop;
2527 nvar = forall_tmp->nvar;
2528 for (n = 0; n < nvar; n++)
2531 start = iter->start;
2535 exit_label = gfc_build_label_decl (NULL_TREE);
2536 TREE_USED (exit_label) = 1;
2538 /* The loop counter. */
2539 count = gfc_create_var (TREE_TYPE (var), "count");
2541 /* The body of the loop. */
2542 gfc_init_block (&block);
2544 /* The exit condition. */
2545 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
2546 count, build_int_cst (TREE_TYPE (count), 0));
2547 tmp = build1_v (GOTO_EXPR, exit_label);
2548 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2549 cond, tmp, build_empty_stmt (input_location));
2550 gfc_add_expr_to_block (&block, tmp);
2552 /* The main loop body. */
2553 gfc_add_expr_to_block (&block, body);
2555 /* Increment the loop variable. */
2556 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (var), var,
2558 gfc_add_modify (&block, var, tmp);
2560 /* Advance to the next mask element. Only do this for the
2562 if (n == 0 && mask_flag && forall_tmp->mask)
2564 tree maskindex = forall_tmp->maskindex;
2565 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2566 maskindex, gfc_index_one_node);
2567 gfc_add_modify (&block, maskindex, tmp);
2570 /* Decrement the loop counter. */
2571 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (var), count,
2572 build_int_cst (TREE_TYPE (var), 1));
2573 gfc_add_modify (&block, count, tmp);
2575 body = gfc_finish_block (&block);
2577 /* Loop var initialization. */
2578 gfc_init_block (&block);
2579 gfc_add_modify (&block, var, start);
2582 /* Initialize the loop counter. */
2583 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (var), step,
2585 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (var), end,
2587 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR, TREE_TYPE (var),
2589 gfc_add_modify (&block, count, tmp);
2591 /* The loop expression. */
2592 tmp = build1_v (LOOP_EXPR, body);
2593 gfc_add_expr_to_block (&block, tmp);
2595 /* The exit label. */
2596 tmp = build1_v (LABEL_EXPR, exit_label);
2597 gfc_add_expr_to_block (&block, tmp);
2599 body = gfc_finish_block (&block);
2606 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2607 is nonzero, the body is controlled by all masks in the forall nest.
2608 Otherwise, the innermost loop is not controlled by it's mask. This
2609 is used for initializing that mask. */
2612 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
2617 forall_info *forall_tmp;
2618 tree mask, maskindex;
2620 gfc_start_block (&header);
2622 forall_tmp = nested_forall_info;
2623 while (forall_tmp != NULL)
2625 /* Generate body with masks' control. */
2628 mask = forall_tmp->mask;
2629 maskindex = forall_tmp->maskindex;
2631 /* If a mask was specified make the assignment conditional. */
2634 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2635 body = build3_v (COND_EXPR, tmp, body,
2636 build_empty_stmt (input_location));
2639 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
2640 forall_tmp = forall_tmp->prev_nest;
2644 gfc_add_expr_to_block (&header, body);
2645 return gfc_finish_block (&header);
2649 /* Allocate data for holding a temporary array. Returns either a local
2650 temporary array or a pointer variable. */
2653 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
2660 if (INTEGER_CST_P (size))
2661 tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
2662 size, gfc_index_one_node);
2666 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
2667 type = build_array_type (elem_type, type);
2668 if (gfc_can_put_var_on_stack (bytesize))
2670 gcc_assert (INTEGER_CST_P (size));
2671 tmpvar = gfc_create_var (type, "temp");
2676 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
2677 *pdata = convert (pvoid_type_node, tmpvar);
2679 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
2680 gfc_add_modify (pblock, tmpvar, tmp);
2686 /* Generate codes to copy the temporary to the actual lhs. */
2689 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
2690 tree count1, tree wheremask, bool invert)
2694 stmtblock_t block, body;
2700 lss = gfc_walk_expr (expr);
2702 if (lss == gfc_ss_terminator)
2704 gfc_start_block (&block);
2706 gfc_init_se (&lse, NULL);
2708 /* Translate the expression. */
2709 gfc_conv_expr (&lse, expr);
2711 /* Form the expression for the temporary. */
2712 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2714 /* Use the scalar assignment as is. */
2715 gfc_add_block_to_block (&block, &lse.pre);
2716 gfc_add_modify (&block, lse.expr, tmp);
2717 gfc_add_block_to_block (&block, &lse.post);
2719 /* Increment the count1. */
2720 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (count1),
2721 count1, gfc_index_one_node);
2722 gfc_add_modify (&block, count1, tmp);
2724 tmp = gfc_finish_block (&block);
2728 gfc_start_block (&block);
2730 gfc_init_loopinfo (&loop1);
2731 gfc_init_se (&rse, NULL);
2732 gfc_init_se (&lse, NULL);
2734 /* Associate the lss with the loop. */
2735 gfc_add_ss_to_loop (&loop1, lss);
2737 /* Calculate the bounds of the scalarization. */
2738 gfc_conv_ss_startstride (&loop1);
2739 /* Setup the scalarizing loops. */
2740 gfc_conv_loop_setup (&loop1, &expr->where);
2742 gfc_mark_ss_chain_used (lss, 1);
2744 /* Start the scalarized loop body. */
2745 gfc_start_scalarized_body (&loop1, &body);
2747 /* Setup the gfc_se structures. */
2748 gfc_copy_loopinfo_to_se (&lse, &loop1);
2751 /* Form the expression of the temporary. */
2752 if (lss != gfc_ss_terminator)
2753 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2754 /* Translate expr. */
2755 gfc_conv_expr (&lse, expr);
2757 /* Use the scalar assignment. */
2758 rse.string_length = lse.string_length;
2759 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, true, true);
2761 /* Form the mask expression according to the mask tree list. */
2764 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2766 wheremaskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
2767 TREE_TYPE (wheremaskexpr),
2769 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2771 build_empty_stmt (input_location));
2774 gfc_add_expr_to_block (&body, tmp);
2776 /* Increment count1. */
2777 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2778 count1, gfc_index_one_node);
2779 gfc_add_modify (&body, count1, tmp);
2781 /* Increment count3. */
2784 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2785 gfc_array_index_type, count3,
2786 gfc_index_one_node);
2787 gfc_add_modify (&body, count3, tmp);
2790 /* Generate the copying loops. */
2791 gfc_trans_scalarizing_loops (&loop1, &body);
2792 gfc_add_block_to_block (&block, &loop1.pre);
2793 gfc_add_block_to_block (&block, &loop1.post);
2794 gfc_cleanup_loop (&loop1);
2796 tmp = gfc_finish_block (&block);
2802 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2803 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2804 and should not be freed. WHEREMASK is the conditional execution mask
2805 whose sense may be inverted by INVERT. */
2808 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2809 tree count1, gfc_ss *lss, gfc_ss *rss,
2810 tree wheremask, bool invert)
2812 stmtblock_t block, body1;
2819 gfc_start_block (&block);
2821 gfc_init_se (&rse, NULL);
2822 gfc_init_se (&lse, NULL);
2824 if (lss == gfc_ss_terminator)
2826 gfc_init_block (&body1);
2827 gfc_conv_expr (&rse, expr2);
2828 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2832 /* Initialize the loop. */
2833 gfc_init_loopinfo (&loop);
2835 /* We may need LSS to determine the shape of the expression. */
2836 gfc_add_ss_to_loop (&loop, lss);
2837 gfc_add_ss_to_loop (&loop, rss);
2839 gfc_conv_ss_startstride (&loop);
2840 gfc_conv_loop_setup (&loop, &expr2->where);
2842 gfc_mark_ss_chain_used (rss, 1);
2843 /* Start the loop body. */
2844 gfc_start_scalarized_body (&loop, &body1);
2846 /* Translate the expression. */
2847 gfc_copy_loopinfo_to_se (&rse, &loop);
2849 gfc_conv_expr (&rse, expr2);
2851 /* Form the expression of the temporary. */
2852 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2855 /* Use the scalar assignment. */
2856 lse.string_length = rse.string_length;
2857 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2858 expr2->expr_type == EXPR_VARIABLE, true);
2860 /* Form the mask expression according to the mask tree list. */
2863 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2865 wheremaskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
2866 TREE_TYPE (wheremaskexpr),
2868 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2870 build_empty_stmt (input_location));
2873 gfc_add_expr_to_block (&body1, tmp);
2875 if (lss == gfc_ss_terminator)
2877 gfc_add_block_to_block (&block, &body1);
2879 /* Increment count1. */
2880 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (count1),
2881 count1, gfc_index_one_node);
2882 gfc_add_modify (&block, count1, tmp);
2886 /* Increment count1. */
2887 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2888 count1, gfc_index_one_node);
2889 gfc_add_modify (&body1, count1, tmp);
2891 /* Increment count3. */
2894 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2895 gfc_array_index_type,
2896 count3, gfc_index_one_node);
2897 gfc_add_modify (&body1, count3, tmp);
2900 /* Generate the copying loops. */
2901 gfc_trans_scalarizing_loops (&loop, &body1);
2903 gfc_add_block_to_block (&block, &loop.pre);
2904 gfc_add_block_to_block (&block, &loop.post);
2906 gfc_cleanup_loop (&loop);
2907 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2908 as tree nodes in SS may not be valid in different scope. */
2911 tmp = gfc_finish_block (&block);
2916 /* Calculate the size of temporary needed in the assignment inside forall.
2917 LSS and RSS are filled in this function. */
2920 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2921 stmtblock_t * pblock,
2922 gfc_ss **lss, gfc_ss **rss)
2930 *lss = gfc_walk_expr (expr1);
2933 size = gfc_index_one_node;
2934 if (*lss != gfc_ss_terminator)
2936 gfc_init_loopinfo (&loop);
2938 /* Walk the RHS of the expression. */
2939 *rss = gfc_walk_expr (expr2);
2940 if (*rss == gfc_ss_terminator)
2942 /* The rhs is scalar. Add a ss for the expression. */
2943 *rss = gfc_get_ss ();
2944 (*rss)->next = gfc_ss_terminator;
2945 (*rss)->type = GFC_SS_SCALAR;
2946 (*rss)->expr = expr2;
2949 /* Associate the SS with the loop. */
2950 gfc_add_ss_to_loop (&loop, *lss);
2951 /* We don't actually need to add the rhs at this point, but it might
2952 make guessing the loop bounds a bit easier. */
2953 gfc_add_ss_to_loop (&loop, *rss);
2955 /* We only want the shape of the expression, not rest of the junk
2956 generated by the scalarizer. */
2957 loop.array_parameter = 1;
2959 /* Calculate the bounds of the scalarization. */
2960 save_flag = gfc_option.rtcheck;
2961 gfc_option.rtcheck &= !GFC_RTCHECK_BOUNDS;
2962 gfc_conv_ss_startstride (&loop);
2963 gfc_option.rtcheck = save_flag;
2964 gfc_conv_loop_setup (&loop, &expr2->where);
2966 /* Figure out how many elements we need. */
2967 for (i = 0; i < loop.dimen; i++)
2969 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2970 gfc_array_index_type,
2971 gfc_index_one_node, loop.from[i]);
2972 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2973 gfc_array_index_type, tmp, loop.to[i]);
2974 size = fold_build2_loc (input_location, MULT_EXPR,
2975 gfc_array_index_type, size, tmp);
2977 gfc_add_block_to_block (pblock, &loop.pre);
2978 size = gfc_evaluate_now (size, pblock);
2979 gfc_add_block_to_block (pblock, &loop.post);
2981 /* TODO: write a function that cleans up a loopinfo without freeing
2982 the SS chains. Currently a NOP. */
2989 /* Calculate the overall iterator number of the nested forall construct.
2990 This routine actually calculates the number of times the body of the
2991 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2992 that by the expression INNER_SIZE. The BLOCK argument specifies the
2993 block in which to calculate the result, and the optional INNER_SIZE_BODY
2994 argument contains any statements that need to executed (inside the loop)
2995 to initialize or calculate INNER_SIZE. */
2998 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2999 stmtblock_t *inner_size_body, stmtblock_t *block)
3001 forall_info *forall_tmp = nested_forall_info;
3005 /* We can eliminate the innermost unconditional loops with constant
3007 if (INTEGER_CST_P (inner_size))
3010 && !forall_tmp->mask
3011 && INTEGER_CST_P (forall_tmp->size))
3013 inner_size = fold_build2_loc (input_location, MULT_EXPR,
3014 gfc_array_index_type,
3015 inner_size, forall_tmp->size);
3016 forall_tmp = forall_tmp->prev_nest;
3019 /* If there are no loops left, we have our constant result. */
3024 /* Otherwise, create a temporary variable to compute the result. */
3025 number = gfc_create_var (gfc_array_index_type, "num");
3026 gfc_add_modify (block, number, gfc_index_zero_node);
3028 gfc_start_block (&body);
3029 if (inner_size_body)
3030 gfc_add_block_to_block (&body, inner_size_body);
3032 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3033 gfc_array_index_type, number, inner_size);
3036 gfc_add_modify (&body, number, tmp);
3037 tmp = gfc_finish_block (&body);
3039 /* Generate loops. */
3040 if (forall_tmp != NULL)
3041 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
3043 gfc_add_expr_to_block (block, tmp);
3049 /* Allocate temporary for forall construct. SIZE is the size of temporary
3050 needed. PTEMP1 is returned for space free. */
3053 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
3060 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
3061 if (!integer_onep (unit))
3062 bytesize = fold_build2_loc (input_location, MULT_EXPR,
3063 gfc_array_index_type, size, unit);
3068 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
3071 tmp = build_fold_indirect_ref_loc (input_location, tmp);
3076 /* Allocate temporary for forall construct according to the information in
3077 nested_forall_info. INNER_SIZE is the size of temporary needed in the
3078 assignment inside forall. PTEMP1 is returned for space free. */
3081 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
3082 tree inner_size, stmtblock_t * inner_size_body,
3083 stmtblock_t * block, tree * ptemp1)
3087 /* Calculate the total size of temporary needed in forall construct. */
3088 size = compute_overall_iter_number (nested_forall_info, inner_size,
3089 inner_size_body, block);
3091 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
3095 /* Handle assignments inside forall which need temporary.
3097 forall (i=start:end:stride; maskexpr)
3100 (where e,f<i> are arbitrary expressions possibly involving i
3101 and there is a dependency between e<i> and f<i>)
3103 masktmp(:) = maskexpr(:)
3108 for (i = start; i <= end; i += stride)
3112 for (i = start; i <= end; i += stride)
3114 if (masktmp[maskindex++])
3115 tmp[count1++] = f<i>
3119 for (i = start; i <= end; i += stride)
3121 if (masktmp[maskindex++])
3122 e<i> = tmp[count1++]
3127 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
3128 tree wheremask, bool invert,
3129 forall_info * nested_forall_info,
3130 stmtblock_t * block)
3138 stmtblock_t inner_size_body;
3140 /* Create vars. count1 is the current iterator number of the nested
3142 count1 = gfc_create_var (gfc_array_index_type, "count1");
3144 /* Count is the wheremask index. */
3147 count = gfc_create_var (gfc_array_index_type, "count");
3148 gfc_add_modify (block, count, gfc_index_zero_node);
3153 /* Initialize count1. */
3154 gfc_add_modify (block, count1, gfc_index_zero_node);
3156 /* Calculate the size of temporary needed in the assignment. Return loop, lss
3157 and rss which are used in function generate_loop_for_rhs_to_temp(). */
3158 gfc_init_block (&inner_size_body);
3159 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
3162 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
3163 if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->length)
3165 if (!expr1->ts.u.cl->backend_decl)
3168 gfc_init_se (&tse, NULL);
3169 gfc_conv_expr (&tse, expr1->ts.u.cl->length);
3170 expr1->ts.u.cl->backend_decl = tse.expr;
3172 type = gfc_get_character_type_len (gfc_default_character_kind,
3173 expr1->ts.u.cl->backend_decl);
3176 type = gfc_typenode_for_spec (&expr1->ts);
3178 /* Allocate temporary for nested forall construct according to the
3179 information in nested_forall_info and inner_size. */
3180 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
3181 &inner_size_body, block, &ptemp1);
3183 /* Generate codes to copy rhs to the temporary . */
3184 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
3187 /* Generate body and loops according to the information in
3188 nested_forall_info. */
3189 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3190 gfc_add_expr_to_block (block, tmp);
3193 gfc_add_modify (block, count1, gfc_index_zero_node);
3197 gfc_add_modify (block, count, gfc_index_zero_node);
3199 /* Generate codes to copy the temporary to lhs. */
3200 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
3203 /* Generate body and loops according to the information in
3204 nested_forall_info. */
3205 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3206 gfc_add_expr_to_block (block, tmp);
3210 /* Free the temporary. */
3211 tmp = gfc_call_free (ptemp1);
3212 gfc_add_expr_to_block (block, tmp);
3217 /* Translate pointer assignment inside FORALL which need temporary. */
3220 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
3221 forall_info * nested_forall_info,
3222 stmtblock_t * block)
3236 tree tmp, tmp1, ptemp1;
3238 count = gfc_create_var (gfc_array_index_type, "count");
3239 gfc_add_modify (block, count, gfc_index_zero_node);
3241 inner_size = integer_one_node;
3242 lss = gfc_walk_expr (expr1);
3243 rss = gfc_walk_expr (expr2);
3244 if (lss == gfc_ss_terminator)
3246 type = gfc_typenode_for_spec (&expr1->ts);
3247 type = build_pointer_type (type);
3249 /* Allocate temporary for nested forall construct according to the
3250 information in nested_forall_info and inner_size. */
3251 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
3252 inner_size, NULL, block, &ptemp1);
3253 gfc_start_block (&body);
3254 gfc_init_se (&lse, NULL);
3255 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
3256 gfc_init_se (&rse, NULL);
3257 rse.want_pointer = 1;
3258 gfc_conv_expr (&rse, expr2);
3259 gfc_add_block_to_block (&body, &rse.pre);
3260 gfc_add_modify (&body, lse.expr,
3261 fold_convert (TREE_TYPE (lse.expr), rse.expr));
3262 gfc_add_block_to_block (&body, &rse.post);
3264 /* Increment count. */
3265 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3266 count, gfc_index_one_node);
3267 gfc_add_modify (&body, count, tmp);
3269 tmp = gfc_finish_block (&body);
3271 /* Generate body and loops according to the information in
3272 nested_forall_info. */
3273 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3274 gfc_add_expr_to_block (block, tmp);
3277 gfc_add_modify (block, count, gfc_index_zero_node);
3279 gfc_start_block (&body);
3280 gfc_init_se (&lse, NULL);
3281 gfc_init_se (&rse, NULL);
3282 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
3283 lse.want_pointer = 1;
3284 gfc_conv_expr (&lse, expr1);
3285 gfc_add_block_to_block (&body, &lse.pre);
3286 gfc_add_modify (&body, lse.expr, rse.expr);
3287 gfc_add_block_to_block (&body, &lse.post);
3288 /* Increment count. */
3289 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3290 count, gfc_index_one_node);
3291 gfc_add_modify (&body, count, tmp);
3292 tmp = gfc_finish_block (&body);
3294 /* Generate body and loops according to the information in
3295 nested_forall_info. */
3296 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3297 gfc_add_expr_to_block (block, tmp);
3301 gfc_init_loopinfo (&loop);
3303 /* Associate the SS with the loop. */
3304 gfc_add_ss_to_loop (&loop, rss);
3306 /* Setup the scalarizing loops and bounds. */
3307 gfc_conv_ss_startstride (&loop);
3309 gfc_conv_loop_setup (&loop, &expr2->where);
3311 info = &rss->data.info;
3312 desc = info->descriptor;
3314 /* Make a new descriptor. */
3315 parmtype = gfc_get_element_type (TREE_TYPE (desc));
3316 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, 0,
3317 loop.from, loop.to, 1,
3318 GFC_ARRAY_UNKNOWN, true);
3320 /* Allocate temporary for nested forall construct. */
3321 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
3322 inner_size, NULL, block, &ptemp1);
3323 gfc_start_block (&body);
3324 gfc_init_se (&lse, NULL);
3325 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
3326 lse.direct_byref = 1;
3327 rss = gfc_walk_expr (expr2);
3328 gfc_conv_expr_descriptor (&lse, expr2, rss);
3330 gfc_add_block_to_block (&body, &lse.pre);
3331 gfc_add_block_to_block (&body, &lse.post);
3333 /* Increment count. */
3334 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3335 count, gfc_index_one_node);
3336 gfc_add_modify (&body, count, tmp);
3338 tmp = gfc_finish_block (&body);
3340 /* Generate body and loops according to the information in
3341 nested_forall_info. */
3342 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3343 gfc_add_expr_to_block (block, tmp);
3346 gfc_add_modify (block, count, gfc_index_zero_node);
3348 parm = gfc_build_array_ref (tmp1, count, NULL);
3349 lss = gfc_walk_expr (expr1);
3350 gfc_init_se (&lse, NULL);
3351 gfc_conv_expr_descriptor (&lse, expr1, lss);
3352 gfc_add_modify (&lse.pre, lse.expr, parm);
3353 gfc_start_block (&body);
3354 gfc_add_block_to_block (&body, &lse.pre);
3355 gfc_add_block_to_block (&body, &lse.post);
3357 /* Increment count. */
3358 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3359 count, gfc_index_one_node);
3360 gfc_add_modify (&body, count, tmp);
3362 tmp = gfc_finish_block (&body);
3364 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3365 gfc_add_expr_to_block (block, tmp);
3367 /* Free the temporary. */
3370 tmp = gfc_call_free (ptemp1);
3371 gfc_add_expr_to_block (block, tmp);
3376 /* FORALL and WHERE statements are really nasty, especially when you nest
3377 them. All the rhs of a forall assignment must be evaluated before the
3378 actual assignments are performed. Presumably this also applies to all the
3379 assignments in an inner where statement. */
3381 /* Generate code for a FORALL statement. Any temporaries are allocated as a
3382 linear array, relying on the fact that we process in the same order in all
3385 forall (i=start:end:stride; maskexpr)
3389 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
3391 count = ((end + 1 - start) / stride)
3392 masktmp(:) = maskexpr(:)
3395 for (i = start; i <= end; i += stride)
3397 if (masktmp[maskindex++])
3401 for (i = start; i <= end; i += stride)
3403 if (masktmp[maskindex++])
3407 Note that this code only works when there are no dependencies.
3408 Forall loop with array assignments and data dependencies are a real pain,
3409 because the size of the temporary cannot always be determined before the
3410 loop is executed. This problem is compounded by the presence of nested
3415 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
3435 gfc_forall_iterator *fa;
3438 gfc_saved_var *saved_vars;
3439 iter_info *this_forall;
3443 /* Do nothing if the mask is false. */
3445 && code->expr1->expr_type == EXPR_CONSTANT
3446 && !code->expr1->value.logical)
3447 return build_empty_stmt (input_location);
3450 /* Count the FORALL index number. */
3451 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
3455 /* Allocate the space for var, start, end, step, varexpr. */
3456 var = XCNEWVEC (tree, nvar);
3457 start = XCNEWVEC (tree, nvar);
3458 end = XCNEWVEC (tree, nvar);
3459 step = XCNEWVEC (tree, nvar);
3460 varexpr = XCNEWVEC (gfc_expr *, nvar);
3461 saved_vars = XCNEWVEC (gfc_saved_var, nvar);
3463 /* Allocate the space for info. */
3464 info = XCNEW (forall_info);
3466 gfc_start_block (&pre);
3467 gfc_init_block (&post);
3468 gfc_init_block (&block);
3471 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
3473 gfc_symbol *sym = fa->var->symtree->n.sym;
3475 /* Allocate space for this_forall. */
3476 this_forall = XCNEW (iter_info);
3478 /* Create a temporary variable for the FORALL index. */
3479 tmp = gfc_typenode_for_spec (&sym->ts);
3480 var[n] = gfc_create_var (tmp, sym->name);
3481 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
3483 /* Record it in this_forall. */
3484 this_forall->var = var[n];
3486 /* Replace the index symbol's backend_decl with the temporary decl. */
3487 sym->backend_decl = var[n];
3489 /* Work out the start, end and stride for the loop. */
3490 gfc_init_se (&se, NULL);
3491 gfc_conv_expr_val (&se, fa->start);
3492 /* Record it in this_forall. */
3493 this_forall->start = se.expr;
3494 gfc_add_block_to_block (&block, &se.pre);
3497 gfc_init_se (&se, NULL);
3498 gfc_conv_expr_val (&se, fa->end);
3499 /* Record it in this_forall. */
3500 this_forall->end = se.expr;
3501 gfc_make_safe_expr (&se);
3502 gfc_add_block_to_block (&block, &se.pre);
3505 gfc_init_se (&se, NULL);
3506 gfc_conv_expr_val (&se, fa->stride);
3507 /* Record it in this_forall. */
3508 this_forall->step = se.expr;
3509 gfc_make_safe_expr (&se);
3510 gfc_add_block_to_block (&block, &se.pre);
3513 /* Set the NEXT field of this_forall to NULL. */
3514 this_forall->next = NULL;
3515 /* Link this_forall to the info construct. */
3516 if (info->this_loop)
3518 iter_info *iter_tmp = info->this_loop;
3519 while (iter_tmp->next != NULL)
3520 iter_tmp = iter_tmp->next;
3521 iter_tmp->next = this_forall;
3524 info->this_loop = this_forall;
3530 /* Calculate the size needed for the current forall level. */
3531 size = gfc_index_one_node;
3532 for (n = 0; n < nvar; n++)
3534 /* size = (end + step - start) / step. */
3535 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (start[n]),
3537 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (end[n]),
3539 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, TREE_TYPE (tmp),
3541 tmp = convert (gfc_array_index_type, tmp);
3543 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
3547 /* Record the nvar and size of current forall level. */
3553 /* If the mask is .true., consider the FORALL unconditional. */
3554 if (code->expr1->expr_type == EXPR_CONSTANT
3555 && code->expr1->value.logical)
3563 /* First we need to allocate the mask. */
3566 /* As the mask array can be very big, prefer compact boolean types. */
3567 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3568 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
3569 size, NULL, &block, &pmask);
3570 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
3572 /* Record them in the info structure. */
3573 info->maskindex = maskindex;
3578 /* No mask was specified. */
3579 maskindex = NULL_TREE;
3580 mask = pmask = NULL_TREE;
3583 /* Link the current forall level to nested_forall_info. */
3584 info->prev_nest = nested_forall_info;
3585 nested_forall_info = info;
3587 /* Copy the mask into a temporary variable if required.
3588 For now we assume a mask temporary is needed. */
3591 /* As the mask array can be very big, prefer compact boolean types. */
3592 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3594 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
3596 /* Start of mask assignment loop body. */
3597 gfc_start_block (&body);
3599 /* Evaluate the mask expression. */
3600 gfc_init_se (&se, NULL);
3601 gfc_conv_expr_val (&se, code->expr1);
3602 gfc_add_block_to_block (&body, &se.pre);
3604 /* Store the mask. */
3605 se.expr = convert (mask_type, se.expr);
3607 tmp = gfc_build_array_ref (mask, maskindex, NULL);
3608 gfc_add_modify (&body, tmp, se.expr);
3610 /* Advance to the next mask element. */
3611 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3612 maskindex, gfc_index_one_node);
3613 gfc_add_modify (&body, maskindex, tmp);
3615 /* Generate the loops. */
3616 tmp = gfc_finish_block (&body);
3617 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
3618 gfc_add_expr_to_block (&block, tmp);
3621 c = code->block->next;
3623 /* TODO: loop merging in FORALL statements. */
3624 /* Now that we've got a copy of the mask, generate the assignment loops. */
3630 /* A scalar or array assignment. DO the simple check for
3631 lhs to rhs dependencies. These make a temporary for the
3632 rhs and form a second forall block to copy to variable. */
3633 need_temp = check_forall_dependencies(c, &pre, &post);
3635 /* Temporaries due to array assignment data dependencies introduce
3636 no end of problems. */
3638 gfc_trans_assign_need_temp (c->expr1, c->expr2, NULL, false,
3639 nested_forall_info, &block);
3642 /* Use the normal assignment copying routines. */
3643 assign = gfc_trans_assignment (c->expr1, c->expr2, false, true);
3645 /* Generate body and loops. */
3646 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3648 gfc_add_expr_to_block (&block, tmp);
3651 /* Cleanup any temporary symtrees that have been made to deal
3652 with dependencies. */
3654 cleanup_forall_symtrees (c);
3659 /* Translate WHERE or WHERE construct nested in FORALL. */
3660 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
3663 /* Pointer assignment inside FORALL. */
3664 case EXEC_POINTER_ASSIGN:
3665 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
3667 gfc_trans_pointer_assign_need_temp (c->expr1, c->expr2,
3668 nested_forall_info, &block);
3671 /* Use the normal assignment copying routines. */
3672 assign = gfc_trans_pointer_assignment (c->expr1, c->expr2);
3674 /* Generate body and loops. */
3675 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3677 gfc_add_expr_to_block (&block, tmp);
3682 tmp = gfc_trans_forall_1 (c, nested_forall_info);
3683 gfc_add_expr_to_block (&block, tmp);
3686 /* Explicit subroutine calls are prevented by the frontend but interface
3687 assignments can legitimately produce them. */
3688 case EXEC_ASSIGN_CALL:
3689 assign = gfc_trans_call (c, true, NULL_TREE, NULL_TREE, false);
3690 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
3691 gfc_add_expr_to_block (&block, tmp);
3701 /* Restore the original index variables. */
3702 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
3703 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
3705 /* Free the space for var, start, end, step, varexpr. */
3713 for (this_forall = info->this_loop; this_forall;)
3715 iter_info *next = this_forall->next;
3720 /* Free the space for this forall_info. */
3725 /* Free the temporary for the mask. */
3726 tmp = gfc_call_free (pmask);
3727 gfc_add_expr_to_block (&block, tmp);
3730 pushdecl (maskindex);
3732 gfc_add_block_to_block (&pre, &block);
3733 gfc_add_block_to_block (&pre, &post);
3735 return gfc_finish_block (&pre);
3739 /* Translate the FORALL statement or construct. */
3741 tree gfc_trans_forall (gfc_code * code)
3743 return gfc_trans_forall_1 (code, NULL);
3747 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3748 If the WHERE construct is nested in FORALL, compute the overall temporary
3749 needed by the WHERE mask expression multiplied by the iterator number of
3751 ME is the WHERE mask expression.
3752 MASK is the current execution mask upon input, whose sense may or may
3753 not be inverted as specified by the INVERT argument.
3754 CMASK is the updated execution mask on output, or NULL if not required.
3755 PMASK is the pending execution mask on output, or NULL if not required.
3756 BLOCK is the block in which to place the condition evaluation loops. */
3759 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3760 tree mask, bool invert, tree cmask, tree pmask,
3761 tree mask_type, stmtblock_t * block)
3766 stmtblock_t body, body1;
3767 tree count, cond, mtmp;
3770 gfc_init_loopinfo (&loop);
3772 lss = gfc_walk_expr (me);
3773 rss = gfc_walk_expr (me);
3775 /* Variable to index the temporary. */
3776 count = gfc_create_var (gfc_array_index_type, "count");
3777 /* Initialize count. */
3778 gfc_add_modify (block, count, gfc_index_zero_node);
3780 gfc_start_block (&body);
3782 gfc_init_se (&rse, NULL);
3783 gfc_init_se (&lse, NULL);
3785 if (lss == gfc_ss_terminator)
3787 gfc_init_block (&body1);
3791 /* Initialize the loop. */
3792 gfc_init_loopinfo (&loop);
3794 /* We may need LSS to determine the shape of the expression. */
3795 gfc_add_ss_to_loop (&loop, lss);
3796 gfc_add_ss_to_loop (&loop, rss);
3798 gfc_conv_ss_startstride (&loop);
3799 gfc_conv_loop_setup (&loop, &me->where);
3801 gfc_mark_ss_chain_used (rss, 1);
3802 /* Start the loop body. */
3803 gfc_start_scalarized_body (&loop, &body1);
3805 /* Translate the expression. */
3806 gfc_copy_loopinfo_to_se (&rse, &loop);
3808 gfc_conv_expr (&rse, me);
3811 /* Variable to evaluate mask condition. */
3812 cond = gfc_create_var (mask_type, "cond");
3813 if (mask && (cmask || pmask))
3814 mtmp = gfc_create_var (mask_type, "mask");
3815 else mtmp = NULL_TREE;
3817 gfc_add_block_to_block (&body1, &lse.pre);
3818 gfc_add_block_to_block (&body1, &rse.pre);
3820 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3822 if (mask && (cmask || pmask))
3824 tmp = gfc_build_array_ref (mask, count, NULL);
3826 tmp = fold_build1_loc (input_location, TRUTH_NOT_EXPR, mask_type, tmp);
3827 gfc_add_modify (&body1, mtmp, tmp);
3832 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3835 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, mask_type,
3837 gfc_add_modify (&body1, tmp1, tmp);
3842 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3843 tmp = fold_build1_loc (input_location, TRUTH_NOT_EXPR, mask_type, cond);
3845 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, mask_type, mtmp,
3847 gfc_add_modify (&body1, tmp1, tmp);
3850 gfc_add_block_to_block (&body1, &lse.post);
3851 gfc_add_block_to_block (&body1, &rse.post);
3853 if (lss == gfc_ss_terminator)
3855 gfc_add_block_to_block (&body, &body1);
3859 /* Increment count. */
3860 tmp1 = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3861 count, gfc_index_one_node);
3862 gfc_add_modify (&body1, count, tmp1);
3864 /* Generate the copying loops. */
3865 gfc_trans_scalarizing_loops (&loop, &body1);
3867 gfc_add_block_to_block (&body, &loop.pre);
3868 gfc_add_block_to_block (&body, &loop.post);
3870 gfc_cleanup_loop (&loop);
3871 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3872 as tree nodes in SS may not be valid in different scope. */
3875 tmp1 = gfc_finish_block (&body);
3876 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3877 if (nested_forall_info != NULL)
3878 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3880 gfc_add_expr_to_block (block, tmp1);
3884 /* Translate an assignment statement in a WHERE statement or construct
3885 statement. The MASK expression is used to control which elements
3886 of EXPR1 shall be assigned. The sense of MASK is specified by
3890 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3891 tree mask, bool invert,
3892 tree count1, tree count2,
3898 gfc_ss *lss_section;
3905 tree index, maskexpr;
3907 /* A defined assignment. */
3908 if (cnext && cnext->resolved_sym)
3909 return gfc_trans_call (cnext, true, mask, count1, invert);
3912 /* TODO: handle this special case.
3913 Special case a single function returning an array. */
3914 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3916 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3922 /* Assignment of the form lhs = rhs. */
3923 gfc_start_block (&block);
3925 gfc_init_se (&lse, NULL);
3926 gfc_init_se (&rse, NULL);
3929 lss = gfc_walk_expr (expr1);
3932 /* In each where-assign-stmt, the mask-expr and the variable being
3933 defined shall be arrays of the same shape. */
3934 gcc_assert (lss != gfc_ss_terminator);
3936 /* The assignment needs scalarization. */
3939 /* Find a non-scalar SS from the lhs. */
3940 while (lss_section != gfc_ss_terminator
3941 && lss_section->type != GFC_SS_SECTION)
3942 lss_section = lss_section->next;
3944 gcc_assert (lss_section != gfc_ss_terminator);
3946 /* Initialize the scalarizer. */
3947 gfc_init_loopinfo (&loop);
3950 rss = gfc_walk_expr (expr2);
3951 if (rss == gfc_ss_terminator)
3953 /* The rhs is scalar. Add a ss for the expression. */
3954 rss = gfc_get_ss ();
3956 rss->next = gfc_ss_terminator;
3957 rss->type = GFC_SS_SCALAR;
3961 /* Associate the SS with the loop. */
3962 gfc_add_ss_to_loop (&loop, lss);
3963 gfc_add_ss_to_loop (&loop, rss);
3965 /* Calculate the bounds of the scalarization. */
3966 gfc_conv_ss_startstride (&loop);
3968 /* Resolve any data dependencies in the statement. */
3969 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3971 /* Setup the scalarizing loops. */
3972 gfc_conv_loop_setup (&loop, &expr2->where);
3974 /* Setup the gfc_se structures. */
3975 gfc_copy_loopinfo_to_se (&lse, &loop);
3976 gfc_copy_loopinfo_to_se (&rse, &loop);
3979 gfc_mark_ss_chain_used (rss, 1);
3980 if (loop.temp_ss == NULL)
3983 gfc_mark_ss_chain_used (lss, 1);
3987 lse.ss = loop.temp_ss;
3988 gfc_mark_ss_chain_used (lss, 3);
3989 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3992 /* Start the scalarized loop body. */
3993 gfc_start_scalarized_body (&loop, &body);
3995 /* Translate the expression. */
3996 gfc_conv_expr (&rse, expr2);
3997 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3998 gfc_conv_tmp_array_ref (&lse);
4000 gfc_conv_expr (&lse, expr1);
4002 /* Form the mask expression according to the mask. */
4004 maskexpr = gfc_build_array_ref (mask, index, NULL);
4006 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
4007 TREE_TYPE (maskexpr), maskexpr);
4009 /* Use the scalar assignment as is. */
4010 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
4011 loop.temp_ss != NULL, false, true);
4013 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt (input_location));
4015 gfc_add_expr_to_block (&body, tmp);
4017 if (lss == gfc_ss_terminator)
4019 /* Increment count1. */
4020 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
4021 count1, gfc_index_one_node);
4022 gfc_add_modify (&body, count1, tmp);
4024 /* Use the scalar assignment as is. */
4025 gfc_add_block_to_block (&block, &body);
4029 gcc_assert (lse.ss == gfc_ss_terminator
4030 && rse.ss == gfc_ss_terminator);
4032 if (loop.temp_ss != NULL)
4034 /* Increment count1 before finish the main body of a scalarized
4036 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4037 gfc_array_index_type, count1, gfc_index_one_node);
4038 gfc_add_modify (&body, count1, tmp);
4039 gfc_trans_scalarized_loop_boundary (&loop, &body);
4041 /* We need to copy the temporary to the actual lhs. */
4042 gfc_init_se (&lse, NULL);
4043 gfc_init_se (&rse, NULL);
4044 gfc_copy_loopinfo_to_se (&lse, &loop);
4045 gfc_copy_loopinfo_to_se (&rse, &loop);
4047 rse.ss = loop.temp_ss;
4050 gfc_conv_tmp_array_ref (&rse);
4051 gfc_conv_expr (&lse, expr1);
4053 gcc_assert (lse.ss == gfc_ss_terminator
4054 && rse.ss == gfc_ss_terminator);
4056 /* Form the mask expression according to the mask tree list. */
4058 maskexpr = gfc_build_array_ref (mask, index, NULL);
4060 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
4061 TREE_TYPE (maskexpr), maskexpr);
4063 /* Use the scalar assignment as is. */
4064 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false,
4066 tmp = build3_v (COND_EXPR, maskexpr, tmp,
4067 build_empty_stmt (input_location));
4068 gfc_add_expr_to_block (&body, tmp);
4070 /* Increment count2. */
4071 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4072 gfc_array_index_type, count2,
4073 gfc_index_one_node);
4074 gfc_add_modify (&body, count2, tmp);
4078 /* Increment count1. */
4079 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4080 gfc_array_index_type, count1,
4081 gfc_index_one_node);
4082 gfc_add_modify (&body, count1, tmp);
4085 /* Generate the copying loops. */
4086 gfc_trans_scalarizing_loops (&loop, &body);
4088 /* Wrap the whole thing up. */
4089 gfc_add_block_to_block (&block, &loop.pre);
4090 gfc_add_block_to_block (&block, &loop.post);
4091 gfc_cleanup_loop (&loop);
4094 return gfc_finish_block (&block);
4098 /* Translate the WHERE construct or statement.
4099 This function can be called iteratively to translate the nested WHERE
4100 construct or statement.
4101 MASK is the control mask. */
4104 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
4105 forall_info * nested_forall_info, stmtblock_t * block)
4107 stmtblock_t inner_size_body;
4108 tree inner_size, size;
4117 tree count1, count2;
4121 tree pcmask = NULL_TREE;
4122 tree ppmask = NULL_TREE;
4123 tree cmask = NULL_TREE;
4124 tree pmask = NULL_TREE;
4125 gfc_actual_arglist *arg;
4127 /* the WHERE statement or the WHERE construct statement. */
4128 cblock = code->block;
4130 /* As the mask array can be very big, prefer compact boolean types. */
4131 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
4133 /* Determine which temporary masks are needed. */
4136 /* One clause: No ELSEWHEREs. */
4137 need_cmask = (cblock->next != 0);
4140 else if (cblock->block->block)
4142 /* Three or more clauses: Conditional ELSEWHEREs. */
4146 else if (cblock->next)
4148 /* Two clauses, the first non-empty. */
4150 need_pmask = (mask != NULL_TREE
4151 && cblock->block->next != 0);
4153 else if (!cblock->block->next)
4155 /* Two clauses, both empty. */
4159 /* Two clauses, the first empty, the second non-empty. */
4162 need_cmask = (cblock->block->expr1 != 0);
4171 if (need_cmask || need_pmask)
4173 /* Calculate the size of temporary needed by the mask-expr. */
4174 gfc_init_block (&inner_size_body);
4175 inner_size = compute_inner_temp_size (cblock->expr1, cblock->expr1,
4176 &inner_size_body, &lss, &rss);
4178 gfc_free_ss_chain (lss);
4179 gfc_free_ss_chain (rss);
4181 /* Calculate the total size of temporary needed. */
4182 size = compute_overall_iter_number (nested_forall_info, inner_size,
4183 &inner_size_body, block);
4185 /* Check whether the size is negative. */
4186 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, size,
4187 gfc_index_zero_node);
4188 size = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
4189 cond, gfc_index_zero_node, size);
4190 size = gfc_evaluate_now (size, block);
4192 /* Allocate temporary for WHERE mask if needed. */
4194 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
4197 /* Allocate temporary for !mask if needed. */
4199 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
4205 /* Each time around this loop, the where clause is conditional
4206 on the value of mask and invert, which are updated at the
4207 bottom of the loop. */
4209 /* Has mask-expr. */
4212 /* Ensure that the WHERE mask will be evaluated exactly once.
4213 If there are no statements in this WHERE/ELSEWHERE clause,
4214 then we don't need to update the control mask (cmask).
4215 If this is the last clause of the WHERE construct, then
4216 we don't need to update the pending control mask (pmask). */
4218 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
4220 cblock->next ? cmask : NULL_TREE,
4221 cblock->block ? pmask : NULL_TREE,
4224 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
4226 (cblock->next || cblock->block)
4227 ? cmask : NULL_TREE,
4228 NULL_TREE, mask_type, block);
4232 /* It's a final elsewhere-stmt. No mask-expr is present. */
4236 /* The body of this where clause are controlled by cmask with
4237 sense specified by invert. */
4239 /* Get the assignment statement of a WHERE statement, or the first
4240 statement in where-body-construct of a WHERE construct. */
4241 cnext = cblock->next;
4246 /* WHERE assignment statement. */
4247 case EXEC_ASSIGN_CALL:
4249 arg = cnext->ext.actual;
4250 expr1 = expr2 = NULL;
4251 for (; arg; arg = arg->next)
4263 expr1 = cnext->expr1;
4264 expr2 = cnext->expr2;
4266 if (nested_forall_info != NULL)
4268 need_temp = gfc_check_dependency (expr1, expr2, 0);
4269 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
4270 gfc_trans_assign_need_temp (expr1, expr2,
4272 nested_forall_info, block);
4275 /* Variables to control maskexpr. */
4276 count1 = gfc_create_var (gfc_array_index_type, "count1");
4277 count2 = gfc_create_var (gfc_array_index_type, "count2");
4278 gfc_add_modify (block, count1, gfc_index_zero_node);
4279 gfc_add_modify (block, count2, gfc_index_zero_node);
4281 tmp = gfc_trans_where_assign (expr1, expr2,
4286 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
4288 gfc_add_expr_to_block (block, tmp);
4293 /* Variables to control maskexpr. */
4294 count1 = gfc_create_var (gfc_array_index_type, "count1");
4295 count2 = gfc_create_var (gfc_array_index_type, "count2");
4296 gfc_add_modify (block, count1, gfc_index_zero_node);
4297 gfc_add_modify (block, count2, gfc_index_zero_node);
4299 tmp = gfc_trans_where_assign (expr1, expr2,
4303 gfc_add_expr_to_block (block, tmp);
4308 /* WHERE or WHERE construct is part of a where-body-construct. */
4310 gfc_trans_where_2 (cnext, cmask, invert,
4311 nested_forall_info, block);
4318 /* The next statement within the same where-body-construct. */
4319 cnext = cnext->next;
4321 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
4322 cblock = cblock->block;
4323 if (mask == NULL_TREE)
4325 /* If we're the initial WHERE, we can simply invert the sense
4326 of the current mask to obtain the "mask" for the remaining
4333 /* Otherwise, for nested WHERE's we need to use the pending mask. */
4339 /* If we allocated a pending mask array, deallocate it now. */
4342 tmp = gfc_call_free (ppmask);
4343 gfc_add_expr_to_block (block, tmp);
4346 /* If we allocated a current mask array, deallocate it now. */
4349 tmp = gfc_call_free (pcmask);
4350 gfc_add_expr_to_block (block, tmp);
4354 /* Translate a simple WHERE construct or statement without dependencies.
4355 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
4356 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
4357 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
4360 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
4362 stmtblock_t block, body;
4363 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
4364 tree tmp, cexpr, tstmt, estmt;
4365 gfc_ss *css, *tdss, *tsss;
4366 gfc_se cse, tdse, tsse, edse, esse;
4371 /* Allow the scalarizer to workshare simple where loops. */
4372 if (ompws_flags & OMPWS_WORKSHARE_FLAG)
4373 ompws_flags |= OMPWS_SCALARIZER_WS;
4375 cond = cblock->expr1;
4376 tdst = cblock->next->expr1;
4377 tsrc = cblock->next->expr2;
4378 edst = eblock ? eblock->next->expr1 : NULL;
4379 esrc = eblock ? eblock->next->expr2 : NULL;
4381 gfc_start_block (&block);
4382 gfc_init_loopinfo (&loop);
4384 /* Handle the condition. */
4385 gfc_init_se (&cse, NULL);
4386 css = gfc_walk_expr (cond);
4387 gfc_add_ss_to_loop (&loop, css);
4389 /* Handle the then-clause. */
4390 gfc_init_se (&tdse, NULL);
4391 gfc_init_se (&tsse, NULL);
4392 tdss = gfc_walk_expr (tdst);
4393 tsss = gfc_walk_expr (tsrc);
4394 if (tsss == gfc_ss_terminator)
4396 tsss = gfc_get_ss ();
4398 tsss->next = gfc_ss_terminator;
4399 tsss->type = GFC_SS_SCALAR;
4402 gfc_add_ss_to_loop (&loop, tdss);
4403 gfc_add_ss_to_loop (&loop, tsss);
4407 /* Handle the else clause. */
4408 gfc_init_se (&edse, NULL);
4409 gfc_init_se (&esse, NULL);
4410 edss = gfc_walk_expr (edst);
4411 esss = gfc_walk_expr (esrc);
4412 if (esss == gfc_ss_terminator)
4414 esss = gfc_get_ss ();
4416 esss->next = gfc_ss_terminator;
4417 esss->type = GFC_SS_SCALAR;
4420 gfc_add_ss_to_loop (&loop, edss);
4421 gfc_add_ss_to_loop (&loop, esss);
4424 gfc_conv_ss_startstride (&loop);
4425 gfc_conv_loop_setup (&loop, &tdst->where);
4427 gfc_mark_ss_chain_used (css, 1);
4428 gfc_mark_ss_chain_used (tdss, 1);
4429 gfc_mark_ss_chain_used (tsss, 1);
4432 gfc_mark_ss_chain_used (edss, 1);
4433 gfc_mark_ss_chain_used (esss, 1);
4436 gfc_start_scalarized_body (&loop, &body);
4438 gfc_copy_loopinfo_to_se (&cse, &loop);
4439 gfc_copy_loopinfo_to_se (&tdse, &loop);
4440 gfc_copy_loopinfo_to_se (&tsse, &loop);
4446 gfc_copy_loopinfo_to_se (&edse, &loop);
4447 gfc_copy_loopinfo_to_se (&esse, &loop);
4452 gfc_conv_expr (&cse, cond);
4453 gfc_add_block_to_block (&body, &cse.pre);
4456 gfc_conv_expr (&tsse, tsrc);
4457 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
4458 gfc_conv_tmp_array_ref (&tdse);
4460 gfc_conv_expr (&tdse, tdst);
4464 gfc_conv_expr (&esse, esrc);
4465 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
4466 gfc_conv_tmp_array_ref (&edse);
4468 gfc_conv_expr (&edse, edst);
4471 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false, true);
4472 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false,
4474 : build_empty_stmt (input_location);
4475 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
4476 gfc_add_expr_to_block (&body, tmp);
4477 gfc_add_block_to_block (&body, &cse.post);
4479 gfc_trans_scalarizing_loops (&loop, &body);
4480 gfc_add_block_to_block (&block, &loop.pre);
4481 gfc_add_block_to_block (&block, &loop.post);
4482 gfc_cleanup_loop (&loop);
4484 return gfc_finish_block (&block);
4487 /* As the WHERE or WHERE construct statement can be nested, we call
4488 gfc_trans_where_2 to do the translation, and pass the initial
4489 NULL values for both the control mask and the pending control mask. */
4492 gfc_trans_where (gfc_code * code)
4498 cblock = code->block;
4500 && cblock->next->op == EXEC_ASSIGN
4501 && !cblock->next->next)
4503 eblock = cblock->block;
4506 /* A simple "WHERE (cond) x = y" statement or block is
4507 dependence free if cond is not dependent upon writing x,
4508 and the source y is unaffected by the destination x. */
4509 if (!gfc_check_dependency (cblock->next->expr1,
4511 && !gfc_check_dependency (cblock->next->expr1,
4512 cblock->next->expr2, 0))
4513 return gfc_trans_where_3 (cblock, NULL);
4515 else if (!eblock->expr1
4518 && eblock->next->op == EXEC_ASSIGN
4519 && !eblock->next->next)
4521 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
4522 block is dependence free if cond is not dependent on writes
4523 to x1 and x2, y1 is not dependent on writes to x2, and y2
4524 is not dependent on writes to x1, and both y's are not
4525 dependent upon their own x's. In addition to this, the
4526 final two dependency checks below exclude all but the same
4527 array reference if the where and elswhere destinations
4528 are the same. In short, this is VERY conservative and this
4529 is needed because the two loops, required by the standard
4530 are coalesced in gfc_trans_where_3. */
4531 if (!gfc_check_dependency(cblock->next->expr1,
4533 && !gfc_check_dependency(eblock->next->expr1,
4535 && !gfc_check_dependency(cblock->next->expr1,
4536 eblock->next->expr2, 1)
4537 && !gfc_check_dependency(eblock->next->expr1,
4538 cblock->next->expr2, 1)
4539 && !gfc_check_dependency(cblock->next->expr1,
4540 cblock->next->expr2, 1)
4541 && !gfc_check_dependency(eblock->next->expr1,
4542 eblock->next->expr2, 1)
4543 && !gfc_check_dependency(cblock->next->expr1,
4544 eblock->next->expr1, 0)
4545 && !gfc_check_dependency(eblock->next->expr1,
4546 cblock->next->expr1, 0))
4547 return gfc_trans_where_3 (cblock, eblock);
4551 gfc_start_block (&block);
4553 gfc_trans_where_2 (code, NULL, false, NULL, &block);
4555 return gfc_finish_block (&block);
4559 /* CYCLE a DO loop. The label decl has already been created by
4560 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
4561 node at the head of the loop. We must mark the label as used. */
4564 gfc_trans_cycle (gfc_code * code)
4568 cycle_label = code->ext.which_construct->cycle_label;
4569 gcc_assert (cycle_label);
4571 TREE_USED (cycle_label) = 1;
4572 return build1_v (GOTO_EXPR, cycle_label);
4576 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
4577 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
4581 gfc_trans_exit (gfc_code * code)
4585 exit_label = code->ext.which_construct->exit_label;
4586 gcc_assert (exit_label);
4588 TREE_USED (exit_label) = 1;
4589 return build1_v (GOTO_EXPR, exit_label);
4593 /* Translate the ALLOCATE statement. */
4596 gfc_trans_allocate (gfc_code * code)
4614 if (!code->ext.alloc.list)
4617 pstat = stat = error_label = tmp = memsz = NULL_TREE;
4619 gfc_init_block (&block);
4620 gfc_init_block (&post);
4622 /* Either STAT= and/or ERRMSG is present. */
4623 if (code->expr1 || code->expr2)
4625 tree gfc_int4_type_node = gfc_get_int_type (4);
4627 stat = gfc_create_var (gfc_int4_type_node, "stat");
4628 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4630 error_label = gfc_build_label_decl (NULL_TREE);
4631 TREE_USED (error_label) = 1;
4637 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4639 expr = gfc_copy_expr (al->expr);
4641 if (expr->ts.type == BT_CLASS)
4642 gfc_add_data_component (expr);
4644 gfc_init_se (&se, NULL);
4646 se.want_pointer = 1;
4647 se.descriptor_only = 1;
4648 gfc_conv_expr (&se, expr);
4650 if (!gfc_array_allocate (&se, expr, pstat))
4652 /* A scalar or derived type. */
4654 /* Determine allocate size. */
4655 if (al->expr->ts.type == BT_CLASS && code->expr3)
4657 if (code->expr3->ts.type == BT_CLASS)
4659 sz = gfc_copy_expr (code->expr3);
4660 gfc_add_vptr_component (sz);
4661 gfc_add_size_component (sz);
4662 gfc_init_se (&se_sz, NULL);
4663 gfc_conv_expr (&se_sz, sz);
4668 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->expr3->ts));
4670 else if (al->expr->ts.type == BT_CHARACTER
4671 && al->expr->ts.deferred && code->expr3)
4673 if (!code->expr3->ts.u.cl->backend_decl)
4675 /* Convert and use the length expression. */
4676 gfc_init_se (&se_sz, NULL);
4677 if (code->expr3->expr_type == EXPR_VARIABLE
4678 || code->expr3->expr_type == EXPR_CONSTANT)
4680 gfc_conv_expr (&se_sz, code->expr3);
4681 memsz = se_sz.string_length;
4683 else if (code->expr3->mold
4684 && code->expr3->ts.u.cl
4685 && code->expr3->ts.u.cl->length)
4687 gfc_conv_expr (&se_sz, code->expr3->ts.u.cl->length);
4688 gfc_add_block_to_block (&se.pre, &se_sz.pre);
4689 se_sz.expr = gfc_evaluate_now (se_sz.expr, &se.pre);
4690 gfc_add_block_to_block (&se.pre, &se_sz.post);
4695 /* This is would be inefficient and possibly could
4696 generate wrong code if the result were not stored
4698 if (slen3 == NULL_TREE)
4700 gfc_conv_expr (&se_sz, code->expr3);
4701 gfc_add_block_to_block (&se.pre, &se_sz.pre);
4702 expr3 = gfc_evaluate_now (se_sz.expr, &se.pre);
4703 gfc_add_block_to_block (&post, &se_sz.post);
4704 slen3 = gfc_evaluate_now (se_sz.string_length,
4711 /* Otherwise use the stored string length. */
4712 memsz = code->expr3->ts.u.cl->backend_decl;
4713 tmp = al->expr->ts.u.cl->backend_decl;
4715 /* Store the string length. */
4716 if (tmp && TREE_CODE (tmp) == VAR_DECL)
4717 gfc_add_modify (&se.pre, tmp, fold_convert (TREE_TYPE (tmp),
4720 /* Convert to size in bytes, using the character KIND. */
4721 tmp = TREE_TYPE (gfc_typenode_for_spec (&al->expr->ts));
4722 tmp = TYPE_SIZE_UNIT (tmp);
4723 memsz = fold_build2_loc (input_location, MULT_EXPR,
4724 TREE_TYPE (tmp), tmp,
4725 fold_convert (TREE_TYPE (tmp), memsz));
4727 else if (al->expr->ts.type == BT_CHARACTER && al->expr->ts.deferred)
4729 gcc_assert (code->ext.alloc.ts.u.cl && code->ext.alloc.ts.u.cl->length);
4730 gfc_init_se (&se_sz, NULL);
4731 gfc_conv_expr (&se_sz, code->ext.alloc.ts.u.cl->length);
4732 gfc_add_block_to_block (&se.pre, &se_sz.pre);
4733 se_sz.expr = gfc_evaluate_now (se_sz.expr, &se.pre);
4734 gfc_add_block_to_block (&se.pre, &se_sz.post);
4735 /* Store the string length. */
4736 tmp = al->expr->ts.u.cl->backend_decl;
4737 gfc_add_modify (&se.pre, tmp, fold_convert (TREE_TYPE (tmp),
4739 tmp = TREE_TYPE (gfc_typenode_for_spec (&code->ext.alloc.ts));
4740 tmp = TYPE_SIZE_UNIT (tmp);
4741 memsz = fold_build2_loc (input_location, MULT_EXPR,
4742 TREE_TYPE (tmp), tmp,
4743 fold_convert (TREE_TYPE (se_sz.expr),
4746 else if (code->ext.alloc.ts.type != BT_UNKNOWN)
4747 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->ext.alloc.ts));
4749 memsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
4751 if (expr->ts.type == BT_CHARACTER && memsz == NULL_TREE)
4753 memsz = se.string_length;
4755 /* Convert to size in bytes, using the character KIND. */
4756 tmp = TREE_TYPE (gfc_typenode_for_spec (&code->ext.alloc.ts));
4757 tmp = TYPE_SIZE_UNIT (tmp);
4758 memsz = fold_build2_loc (input_location, MULT_EXPR,
4759 TREE_TYPE (tmp), tmp,
4760 fold_convert (TREE_TYPE (tmp), memsz));
4763 /* Allocate - for non-pointers with re-alloc checking. */
4764 if (gfc_expr_attr (expr).allocatable)
4765 tmp = gfc_allocate_array_with_status (&se.pre, se.expr, memsz,
4768 tmp = gfc_allocate_with_status (&se.pre, memsz, pstat);
4770 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
4772 fold_convert (TREE_TYPE (se.expr), tmp));
4773 gfc_add_expr_to_block (&se.pre, tmp);
4775 if (code->expr1 || code->expr2)
4777 tmp = build1_v (GOTO_EXPR, error_label);
4778 parm = fold_build2_loc (input_location, NE_EXPR,
4779 boolean_type_node, stat,
4780 build_int_cst (TREE_TYPE (stat), 0));
4781 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
4783 build_empty_stmt (input_location));
4784 gfc_add_expr_to_block (&se.pre, tmp);
4787 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4789 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
4790 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, tmp, 0);
4791 gfc_add_expr_to_block (&se.pre, tmp);
4795 gfc_add_block_to_block (&block, &se.pre);
4797 if (code->expr3 && !code->expr3->mold)
4799 /* Initialization via SOURCE block
4800 (or static default initializer). */
4801 gfc_expr *rhs = gfc_copy_expr (code->expr3);
4802 if (al->expr->ts.type == BT_CLASS)
4805 gfc_actual_arglist *actual;
4807 gfc_init_se (&call, NULL);
4808 /* Do a polymorphic deep copy. */
4809 actual = gfc_get_actual_arglist ();
4810 actual->expr = gfc_copy_expr (rhs);
4811 if (rhs->ts.type == BT_CLASS)
4812 gfc_add_data_component (actual->expr);
4813 actual->next = gfc_get_actual_arglist ();
4814 actual->next->expr = gfc_copy_expr (al->expr);
4815 gfc_add_data_component (actual->next->expr);
4816 if (rhs->ts.type == BT_CLASS)
4818 ppc = gfc_copy_expr (rhs);
4819 gfc_add_vptr_component (ppc);
4822 ppc = gfc_lval_expr_from_sym (gfc_find_derived_vtab (rhs->ts.u.derived));
4823 gfc_add_component_ref (ppc, "_copy");
4824 gfc_conv_procedure_call (&call, ppc->symtree->n.sym, actual,
4826 gfc_add_expr_to_block (&call.pre, call.expr);
4827 gfc_add_block_to_block (&call.pre, &call.post);
4828 tmp = gfc_finish_block (&call.pre);
4830 else if (expr3 != NULL_TREE)
4832 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
4833 gfc_trans_string_copy (&block, slen3, tmp, code->expr3->ts.kind,
4834 slen3, expr3, code->expr3->ts.kind);
4839 /* Switch off automatic reallocation since we have just done
4841 int realloc_lhs = gfc_option.flag_realloc_lhs;
4842 gfc_option.flag_realloc_lhs = 0;
4843 tmp = gfc_trans_assignment (gfc_expr_to_initialize (expr),
4845 gfc_option.flag_realloc_lhs = realloc_lhs;
4847 gfc_free_expr (rhs);
4848 gfc_add_expr_to_block (&block, tmp);
4850 else if (code->expr3 && code->expr3->mold
4851 && code->expr3->ts.type == BT_CLASS)
4853 /* Default-initialization via MOLD (polymorphic). */
4854 gfc_expr *rhs = gfc_copy_expr (code->expr3);
4856 gfc_add_vptr_component (rhs);
4857 gfc_add_def_init_component (rhs);
4858 gfc_init_se (&dst, NULL);
4859 gfc_init_se (&src, NULL);
4860 gfc_conv_expr (&dst, expr);
4861 gfc_conv_expr (&src, rhs);
4862 gfc_add_block_to_block (&block, &src.pre);
4863 tmp = gfc_build_memcpy_call (dst.expr, src.expr, memsz);
4864 gfc_add_expr_to_block (&block, tmp);
4865 gfc_free_expr (rhs);
4868 /* Allocation of CLASS entities. */
4869 gfc_free_expr (expr);
4871 if (expr->ts.type == BT_CLASS)
4876 /* Initialize VPTR for CLASS objects. */
4877 lhs = gfc_expr_to_initialize (expr);
4878 gfc_add_vptr_component (lhs);
4880 if (code->expr3 && code->expr3->ts.type == BT_CLASS)
4882 /* Polymorphic SOURCE: VPTR must be determined at run time. */
4883 rhs = gfc_copy_expr (code->expr3);
4884 gfc_add_vptr_component (rhs);
4885 tmp = gfc_trans_pointer_assignment (lhs, rhs);
4886 gfc_add_expr_to_block (&block, tmp);
4887 gfc_free_expr (rhs);
4891 /* VPTR is fixed at compile time. */
4895 ts = &code->expr3->ts;
4896 else if (expr->ts.type == BT_DERIVED)
4898 else if (code->ext.alloc.ts.type == BT_DERIVED)
4899 ts = &code->ext.alloc.ts;
4900 else if (expr->ts.type == BT_CLASS)
4901 ts = &CLASS_DATA (expr)->ts;
4905 if (ts->type == BT_DERIVED)
4907 vtab = gfc_find_derived_vtab (ts->u.derived);
4909 gfc_init_se (&lse, NULL);
4910 lse.want_pointer = 1;
4911 gfc_conv_expr (&lse, lhs);
4912 tmp = gfc_build_addr_expr (NULL_TREE,
4913 gfc_get_symbol_decl (vtab));
4914 gfc_add_modify (&block, lse.expr,
4915 fold_convert (TREE_TYPE (lse.expr), tmp));
4918 gfc_free_expr (lhs);
4926 tmp = build1_v (LABEL_EXPR, error_label);
4927 gfc_add_expr_to_block (&block, tmp);
4929 gfc_init_se (&se, NULL);
4930 gfc_conv_expr_lhs (&se, code->expr1);
4931 tmp = convert (TREE_TYPE (se.expr), stat);
4932 gfc_add_modify (&block, se.expr, tmp);
4938 /* A better error message may be possible, but not required. */
4939 const char *msg = "Attempt to allocate an allocated object";
4940 tree errmsg, slen, dlen;
4942 gfc_init_se (&se, NULL);
4943 gfc_conv_expr_lhs (&se, code->expr2);
4945 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4947 gfc_add_modify (&block, errmsg,
4948 gfc_build_addr_expr (pchar_type_node,
4949 gfc_build_localized_cstring_const (msg)));
4951 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4952 dlen = gfc_get_expr_charlen (code->expr2);
4953 slen = fold_build2_loc (input_location, MIN_EXPR, TREE_TYPE (slen), dlen,
4956 dlen = build_call_expr_loc (input_location,
4957 built_in_decls[BUILT_IN_MEMCPY], 3,
4958 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4960 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, stat,
4961 build_int_cst (TREE_TYPE (stat), 0));
4963 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4965 gfc_add_expr_to_block (&block, tmp);
4968 gfc_add_block_to_block (&block, &se.post);
4969 gfc_add_block_to_block (&block, &post);
4971 return gfc_finish_block (&block);
4975 /* Translate a DEALLOCATE statement. */
4978 gfc_trans_deallocate (gfc_code *code)
4982 tree apstat, astat, pstat, stat, tmp;
4985 pstat = apstat = stat = astat = tmp = NULL_TREE;
4987 gfc_start_block (&block);
4989 /* Count the number of failed deallocations. If deallocate() was
4990 called with STAT= , then set STAT to the count. If deallocate
4991 was called with ERRMSG, then set ERRMG to a string. */
4992 if (code->expr1 || code->expr2)
4994 tree gfc_int4_type_node = gfc_get_int_type (4);
4996 stat = gfc_create_var (gfc_int4_type_node, "stat");
4997 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4999 /* Running total of possible deallocation failures. */
5000 astat = gfc_create_var (gfc_int4_type_node, "astat");
5001 apstat = gfc_build_addr_expr (NULL_TREE, astat);
5003 /* Initialize astat to 0. */
5004 gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
5007 for (al = code->ext.alloc.list; al != NULL; al = al->next)
5009 gfc_expr *expr = gfc_copy_expr (al->expr);
5010 gcc_assert (expr->expr_type == EXPR_VARIABLE);
5012 if (expr->ts.type == BT_CLASS)
5013 gfc_add_data_component (expr);
5015 gfc_init_se (&se, NULL);
5016 gfc_start_block (&se.pre);
5018 se.want_pointer = 1;
5019 se.descriptor_only = 1;
5020 gfc_conv_expr (&se, expr);
5024 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
5027 gfc_ref *last = NULL;
5028 for (ref = expr->ref; ref; ref = ref->next)
5029 if (ref->type == REF_COMPONENT)
5032 /* Do not deallocate the components of a derived type
5033 ultimate pointer component. */
5034 if (!(last && last->u.c.component->attr.pointer)
5035 && !(!last && expr->symtree->n.sym->attr.pointer))
5037 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, se.expr,
5039 gfc_add_expr_to_block (&se.pre, tmp);
5042 tmp = gfc_array_deallocate (se.expr, pstat, expr);
5043 gfc_add_expr_to_block (&se.pre, tmp);
5047 tmp = gfc_deallocate_scalar_with_status (se.expr, pstat, false,
5049 gfc_add_expr_to_block (&se.pre, tmp);
5051 /* Set to zero after deallocation. */
5052 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
5054 build_int_cst (TREE_TYPE (se.expr), 0));
5055 gfc_add_expr_to_block (&se.pre, tmp);
5057 if (al->expr->ts.type == BT_CLASS)
5059 /* Reset _vptr component to declared type. */
5060 gfc_expr *rhs, *lhs = gfc_copy_expr (al->expr);
5061 gfc_symbol *vtab = gfc_find_derived_vtab (al->expr->ts.u.derived);
5062 gfc_add_vptr_component (lhs);
5063 rhs = gfc_lval_expr_from_sym (vtab);
5064 tmp = gfc_trans_pointer_assignment (lhs, rhs);
5065 gfc_add_expr_to_block (&se.pre, tmp);
5066 gfc_free_expr (lhs);
5067 gfc_free_expr (rhs);
5071 /* Keep track of the number of failed deallocations by adding stat
5072 of the last deallocation to the running total. */
5073 if (code->expr1 || code->expr2)
5075 apstat = fold_build2_loc (input_location, PLUS_EXPR,
5076 TREE_TYPE (stat), astat, stat);
5077 gfc_add_modify (&se.pre, astat, apstat);
5080 tmp = gfc_finish_block (&se.pre);
5081 gfc_add_expr_to_block (&block, tmp);
5082 gfc_free_expr (expr);
5088 gfc_init_se (&se, NULL);
5089 gfc_conv_expr_lhs (&se, code->expr1);
5090 tmp = convert (TREE_TYPE (se.expr), astat);
5091 gfc_add_modify (&block, se.expr, tmp);
5097 /* A better error message may be possible, but not required. */
5098 const char *msg = "Attempt to deallocate an unallocated object";
5099 tree errmsg, slen, dlen;
5101 gfc_init_se (&se, NULL);
5102 gfc_conv_expr_lhs (&se, code->expr2);
5104 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
5106 gfc_add_modify (&block, errmsg,
5107 gfc_build_addr_expr (pchar_type_node,
5108 gfc_build_localized_cstring_const (msg)));
5110 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
5111 dlen = gfc_get_expr_charlen (code->expr2);
5112 slen = fold_build2_loc (input_location, MIN_EXPR, TREE_TYPE (slen), dlen,
5115 dlen = build_call_expr_loc (input_location,
5116 built_in_decls[BUILT_IN_MEMCPY], 3,
5117 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
5119 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, astat,
5120 build_int_cst (TREE_TYPE (astat), 0));
5122 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
5124 gfc_add_expr_to_block (&block, tmp);
5127 return gfc_finish_block (&block);
5130 #include "gt-fortran-trans-stmt.h"