1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
28 #include "tree-gimple.h"
35 #include "trans-stmt.h"
36 #include "trans-types.h"
37 #include "trans-array.h"
38 #include "trans-const.h"
40 #include "dependency.h"
42 typedef struct iter_info
48 struct iter_info *next;
52 typedef struct forall_info
59 struct forall_info *prev_nest;
63 static void gfc_trans_where_2 (gfc_code *, tree, bool,
64 forall_info *, stmtblock_t *);
66 /* Translate a F95 label number to a LABEL_EXPR. */
69 gfc_trans_label_here (gfc_code * code)
71 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
75 /* Given a variable expression which has been ASSIGNed to, find the decl
76 containing the auxiliary variables. For variables in common blocks this
80 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
82 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
83 gfc_conv_expr (se, expr);
84 /* Deals with variable in common block. Get the field declaration. */
85 if (TREE_CODE (se->expr) == COMPONENT_REF)
86 se->expr = TREE_OPERAND (se->expr, 1);
87 /* Deals with dummy argument. Get the parameter declaration. */
88 else if (TREE_CODE (se->expr) == INDIRECT_REF)
89 se->expr = TREE_OPERAND (se->expr, 0);
92 /* Translate a label assignment statement. */
95 gfc_trans_label_assign (gfc_code * code)
105 /* Start a new block. */
106 gfc_init_se (&se, NULL);
107 gfc_start_block (&se.pre);
108 gfc_conv_label_variable (&se, code->expr);
110 len = GFC_DECL_STRING_LEN (se.expr);
111 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
113 label_tree = gfc_get_label_decl (code->label);
115 if (code->label->defined == ST_LABEL_TARGET)
117 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
118 len_tree = integer_minus_one_node;
122 label_str = code->label->format->value.character.string;
123 label_len = code->label->format->value.character.length;
124 len_tree = build_int_cst (NULL_TREE, label_len);
125 label_tree = gfc_build_string_const (label_len + 1, label_str);
126 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
129 gfc_add_modify_expr (&se.pre, len, len_tree);
130 gfc_add_modify_expr (&se.pre, addr, label_tree);
132 return gfc_finish_block (&se.pre);
135 /* Translate a GOTO statement. */
138 gfc_trans_goto (gfc_code * code)
140 locus loc = code->loc;
146 if (code->label != NULL)
147 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
150 gfc_init_se (&se, NULL);
151 gfc_start_block (&se.pre);
152 gfc_conv_label_variable (&se, code->expr);
153 tmp = GFC_DECL_STRING_LEN (se.expr);
154 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
155 build_int_cst (TREE_TYPE (tmp), -1));
156 gfc_trans_runtime_check (tmp, &se.pre, &loc,
157 "Assigned label is not a target label");
159 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
164 target = build1 (GOTO_EXPR, void_type_node, assigned_goto);
165 gfc_add_expr_to_block (&se.pre, target);
166 return gfc_finish_block (&se.pre);
169 /* Check the label list. */
172 target = gfc_get_label_decl (code->label);
173 tmp = gfc_build_addr_expr (pvoid_type_node, target);
174 tmp = build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
175 tmp = build3_v (COND_EXPR, tmp,
176 build1 (GOTO_EXPR, void_type_node, target),
177 build_empty_stmt ());
178 gfc_add_expr_to_block (&se.pre, tmp);
181 while (code != NULL);
182 gfc_trans_runtime_check (boolean_true_node, &se.pre, &loc,
183 "Assigned label is not in the list");
185 return gfc_finish_block (&se.pre);
189 /* Translate an ENTRY statement. Just adds a label for this entry point. */
191 gfc_trans_entry (gfc_code * code)
193 return build1_v (LABEL_EXPR, code->ext.entry->label);
197 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
198 elemental subroutines. Make temporaries for output arguments if any such
199 dependencies are found. Output arguments are chosen because internal_unpack
200 can be used, as is, to copy the result back to the variable. */
202 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
203 gfc_symbol * sym, gfc_actual_arglist * arg)
205 gfc_actual_arglist *arg0;
207 gfc_formal_arglist *formal;
208 gfc_loopinfo tmp_loop;
220 if (loopse->ss == NULL)
225 formal = sym->formal;
227 /* Loop over all the arguments testing for dependencies. */
228 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
234 /* Obtain the info structure for the current argument. */
236 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
240 info = &ss->data.info;
244 /* If there is a dependency, create a temporary and use it
245 instead of the variable. */
246 fsym = formal ? formal->sym : NULL;
247 if (e->expr_type == EXPR_VARIABLE
249 && fsym->attr.intent != INTENT_IN
250 && gfc_check_fncall_dependency (e, fsym->attr.intent,
253 /* Make a local loopinfo for the temporary creation, so that
254 none of the other ss->info's have to be renormalized. */
255 gfc_init_loopinfo (&tmp_loop);
256 for (n = 0; n < info->dimen; n++)
258 tmp_loop.to[n] = loopse->loop->to[n];
259 tmp_loop.from[n] = loopse->loop->from[n];
260 tmp_loop.order[n] = loopse->loop->order[n];
263 /* Generate the temporary. Merge the block so that the
264 declarations are put at the right binding level. */
265 size = gfc_create_var (gfc_array_index_type, NULL);
266 data = gfc_create_var (pvoid_type_node, NULL);
267 gfc_start_block (&block);
268 tmp = gfc_typenode_for_spec (&e->ts);
269 tmp = gfc_trans_create_temp_array (&se->pre, &se->post,
270 &tmp_loop, info, tmp,
272 gfc_add_modify_expr (&se->pre, size, tmp);
273 tmp = fold_convert (pvoid_type_node, info->data);
274 gfc_add_modify_expr (&se->pre, data, tmp);
275 gfc_merge_block_scope (&block);
277 /* Obtain the argument descriptor for unpacking. */
278 gfc_init_se (&parmse, NULL);
279 parmse.want_pointer = 1;
280 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
281 gfc_add_block_to_block (&se->pre, &parmse.pre);
283 /* Calculate the offset for the temporary. */
284 offset = gfc_index_zero_node;
285 for (n = 0; n < info->dimen; n++)
287 tmp = gfc_conv_descriptor_stride (info->descriptor,
289 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
290 loopse->loop->from[n], tmp);
291 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
294 info->offset = gfc_create_var (gfc_array_index_type, NULL);
295 gfc_add_modify_expr (&se->pre, info->offset, offset);
297 /* Copy the result back using unpack. */
298 tmp = build_call_expr (gfor_fndecl_in_unpack, 2, parmse.expr, data);
299 gfc_add_expr_to_block (&se->post, tmp);
301 gfc_add_block_to_block (&se->post, &parmse.post);
307 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
310 gfc_trans_call (gfc_code * code, bool dependency_check)
314 int has_alternate_specifier;
316 /* A CALL starts a new block because the actual arguments may have to
317 be evaluated first. */
318 gfc_init_se (&se, NULL);
319 gfc_start_block (&se.pre);
321 gcc_assert (code->resolved_sym);
323 ss = gfc_ss_terminator;
324 if (code->resolved_sym->attr.elemental)
325 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
327 /* Is not an elemental subroutine call with array valued arguments. */
328 if (ss == gfc_ss_terminator)
331 /* Translate the call. */
332 has_alternate_specifier
333 = gfc_conv_function_call (&se, code->resolved_sym, code->ext.actual,
336 /* A subroutine without side-effect, by definition, does nothing! */
337 TREE_SIDE_EFFECTS (se.expr) = 1;
339 /* Chain the pieces together and return the block. */
340 if (has_alternate_specifier)
342 gfc_code *select_code;
344 select_code = code->next;
345 gcc_assert(select_code->op == EXEC_SELECT);
346 sym = select_code->expr->symtree->n.sym;
347 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
348 if (sym->backend_decl == NULL)
349 sym->backend_decl = gfc_get_symbol_decl (sym);
350 gfc_add_modify_expr (&se.pre, sym->backend_decl, se.expr);
353 gfc_add_expr_to_block (&se.pre, se.expr);
355 gfc_add_block_to_block (&se.pre, &se.post);
360 /* An elemental subroutine call with array valued arguments has
367 /* gfc_walk_elemental_function_args renders the ss chain in the
368 reverse order to the actual argument order. */
369 ss = gfc_reverse_ss (ss);
371 /* Initialize the loop. */
372 gfc_init_se (&loopse, NULL);
373 gfc_init_loopinfo (&loop);
374 gfc_add_ss_to_loop (&loop, ss);
376 gfc_conv_ss_startstride (&loop);
377 gfc_conv_loop_setup (&loop);
378 gfc_mark_ss_chain_used (ss, 1);
380 /* Convert the arguments, checking for dependencies. */
381 gfc_copy_loopinfo_to_se (&loopse, &loop);
384 /* For operator assignment, do dependency checking. */
385 if (dependency_check)
388 sym = code->resolved_sym;
389 gfc_conv_elemental_dependencies (&se, &loopse, sym,
393 /* Generate the loop body. */
394 gfc_start_scalarized_body (&loop, &body);
395 gfc_init_block (&block);
397 /* Add the subroutine call to the block. */
398 gfc_conv_function_call (&loopse, code->resolved_sym, code->ext.actual,
400 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
402 gfc_add_block_to_block (&block, &loopse.pre);
403 gfc_add_block_to_block (&block, &loopse.post);
405 /* Finish up the loop block and the loop. */
406 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
407 gfc_trans_scalarizing_loops (&loop, &body);
408 gfc_add_block_to_block (&se.pre, &loop.pre);
409 gfc_add_block_to_block (&se.pre, &loop.post);
410 gfc_add_block_to_block (&se.pre, &se.post);
411 gfc_cleanup_loop (&loop);
414 return gfc_finish_block (&se.pre);
418 /* Translate the RETURN statement. */
421 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
429 /* If code->expr is not NULL, this return statement must appear
430 in a subroutine and current_fake_result_decl has already
433 result = gfc_get_fake_result_decl (NULL, 0);
436 gfc_warning ("An alternate return at %L without a * dummy argument",
438 return build1_v (GOTO_EXPR, gfc_get_return_label ());
441 /* Start a new block for this statement. */
442 gfc_init_se (&se, NULL);
443 gfc_start_block (&se.pre);
445 gfc_conv_expr (&se, code->expr);
447 tmp = build2 (MODIFY_EXPR, TREE_TYPE (result), result,
448 fold_convert (TREE_TYPE (result), se.expr));
449 gfc_add_expr_to_block (&se.pre, tmp);
451 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
452 gfc_add_expr_to_block (&se.pre, tmp);
453 gfc_add_block_to_block (&se.pre, &se.post);
454 return gfc_finish_block (&se.pre);
457 return build1_v (GOTO_EXPR, gfc_get_return_label ());
461 /* Translate the PAUSE statement. We have to translate this statement
462 to a runtime library call. */
465 gfc_trans_pause (gfc_code * code)
467 tree gfc_int4_type_node = gfc_get_int_type (4);
471 /* Start a new block for this statement. */
472 gfc_init_se (&se, NULL);
473 gfc_start_block (&se.pre);
476 if (code->expr == NULL)
478 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
479 tmp = build_call_expr (gfor_fndecl_pause_numeric, 1, tmp);
483 gfc_conv_expr_reference (&se, code->expr);
484 tmp = build_call_expr (gfor_fndecl_pause_string, 2,
485 se.expr, se.string_length);
488 gfc_add_expr_to_block (&se.pre, tmp);
490 gfc_add_block_to_block (&se.pre, &se.post);
492 return gfc_finish_block (&se.pre);
496 /* Translate the STOP statement. We have to translate this statement
497 to a runtime library call. */
500 gfc_trans_stop (gfc_code * code)
502 tree gfc_int4_type_node = gfc_get_int_type (4);
506 /* Start a new block for this statement. */
507 gfc_init_se (&se, NULL);
508 gfc_start_block (&se.pre);
511 if (code->expr == NULL)
513 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
514 tmp = build_call_expr (gfor_fndecl_stop_numeric, 1, tmp);
518 gfc_conv_expr_reference (&se, code->expr);
519 tmp = build_call_expr (gfor_fndecl_stop_string, 2,
520 se.expr, se.string_length);
523 gfc_add_expr_to_block (&se.pre, tmp);
525 gfc_add_block_to_block (&se.pre, &se.post);
527 return gfc_finish_block (&se.pre);
531 /* Generate GENERIC for the IF construct. This function also deals with
532 the simple IF statement, because the front end translates the IF
533 statement into an IF construct.
565 where COND_S is the simplified version of the predicate. PRE_COND_S
566 are the pre side-effects produced by the translation of the
568 We need to build the chain recursively otherwise we run into
569 problems with folding incomplete statements. */
572 gfc_trans_if_1 (gfc_code * code)
577 /* Check for an unconditional ELSE clause. */
579 return gfc_trans_code (code->next);
581 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
582 gfc_init_se (&if_se, NULL);
583 gfc_start_block (&if_se.pre);
585 /* Calculate the IF condition expression. */
586 gfc_conv_expr_val (&if_se, code->expr);
588 /* Translate the THEN clause. */
589 stmt = gfc_trans_code (code->next);
591 /* Translate the ELSE clause. */
593 elsestmt = gfc_trans_if_1 (code->block);
595 elsestmt = build_empty_stmt ();
597 /* Build the condition expression and add it to the condition block. */
598 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
600 gfc_add_expr_to_block (&if_se.pre, stmt);
602 /* Finish off this statement. */
603 return gfc_finish_block (&if_se.pre);
607 gfc_trans_if (gfc_code * code)
609 /* Ignore the top EXEC_IF, it only announces an IF construct. The
610 actual code we must translate is in code->block. */
612 return gfc_trans_if_1 (code->block);
616 /* Translate an arithmetic IF expression.
618 IF (cond) label1, label2, label3 translates to
630 An optimized version can be generated in case of equal labels.
631 E.g., if label1 is equal to label2, we can translate it to
640 gfc_trans_arithmetic_if (gfc_code * code)
648 /* Start a new block. */
649 gfc_init_se (&se, NULL);
650 gfc_start_block (&se.pre);
652 /* Pre-evaluate COND. */
653 gfc_conv_expr_val (&se, code->expr);
654 se.expr = gfc_evaluate_now (se.expr, &se.pre);
656 /* Build something to compare with. */
657 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
659 if (code->label->value != code->label2->value)
661 /* If (cond < 0) take branch1 else take branch2.
662 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
663 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
664 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
666 if (code->label->value != code->label3->value)
667 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
669 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
671 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
674 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
676 if (code->label->value != code->label3->value
677 && code->label2->value != code->label3->value)
679 /* if (cond <= 0) take branch1 else take branch2. */
680 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
681 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
682 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
685 /* Append the COND_EXPR to the evaluation of COND, and return. */
686 gfc_add_expr_to_block (&se.pre, branch1);
687 return gfc_finish_block (&se.pre);
691 /* Translate the simple DO construct. This is where the loop variable has
692 integer type and step +-1. We can't use this in the general case
693 because integer overflow and floating point errors could give incorrect
695 We translate a do loop from:
697 DO dovar = from, to, step
703 [Evaluate loop bounds and step]
705 if ((step > 0) ? (dovar <= to) : (dovar => to))
711 cond = (dovar == to);
713 if (cond) goto end_label;
718 This helps the optimizers by avoiding the extra induction variable
719 used in the general case. */
722 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
723 tree from, tree to, tree step)
732 type = TREE_TYPE (dovar);
734 /* Initialize the DO variable: dovar = from. */
735 gfc_add_modify_expr (pblock, dovar, from);
737 /* Cycle and exit statements are implemented with gotos. */
738 cycle_label = gfc_build_label_decl (NULL_TREE);
739 exit_label = gfc_build_label_decl (NULL_TREE);
741 /* Put the labels where they can be found later. See gfc_trans_do(). */
742 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
745 gfc_start_block (&body);
747 /* Main loop body. */
748 tmp = gfc_trans_code (code->block->next);
749 gfc_add_expr_to_block (&body, tmp);
751 /* Label for cycle statements (if needed). */
752 if (TREE_USED (cycle_label))
754 tmp = build1_v (LABEL_EXPR, cycle_label);
755 gfc_add_expr_to_block (&body, tmp);
758 /* Evaluate the loop condition. */
759 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
760 cond = gfc_evaluate_now (cond, &body);
762 /* Increment the loop variable. */
763 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
764 gfc_add_modify_expr (&body, dovar, tmp);
767 tmp = build1_v (GOTO_EXPR, exit_label);
768 TREE_USED (exit_label) = 1;
769 tmp = fold_build3 (COND_EXPR, void_type_node,
770 cond, tmp, build_empty_stmt ());
771 gfc_add_expr_to_block (&body, tmp);
773 /* Finish the loop body. */
774 tmp = gfc_finish_block (&body);
775 tmp = build1_v (LOOP_EXPR, tmp);
777 /* Only execute the loop if the number of iterations is positive. */
778 if (tree_int_cst_sgn (step) > 0)
779 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
781 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
782 tmp = fold_build3 (COND_EXPR, void_type_node,
783 cond, tmp, build_empty_stmt ());
784 gfc_add_expr_to_block (pblock, tmp);
786 /* Add the exit label. */
787 tmp = build1_v (LABEL_EXPR, exit_label);
788 gfc_add_expr_to_block (pblock, tmp);
790 return gfc_finish_block (pblock);
793 /* Translate the DO construct. This obviously is one of the most
794 important ones to get right with any compiler, but especially
797 We special case some loop forms as described in gfc_trans_simple_do.
798 For other cases we implement them with a separate loop count,
799 as described in the standard.
801 We translate a do loop from:
803 DO dovar = from, to, step
809 [evaluate loop bounds and step]
810 empty = (step > 0 ? to < from : to > from);
811 countm1 = (to - from) / step;
813 if (empty) goto exit_label;
819 if (countm1 ==0) goto exit_label;
824 countm1 is an unsigned integer. It is equal to the loop count minus one,
825 because the loop count itself can overflow. */
828 gfc_trans_do (gfc_code * code)
847 gfc_start_block (&block);
849 /* Evaluate all the expressions in the iterator. */
850 gfc_init_se (&se, NULL);
851 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
852 gfc_add_block_to_block (&block, &se.pre);
854 type = TREE_TYPE (dovar);
856 gfc_init_se (&se, NULL);
857 gfc_conv_expr_val (&se, code->ext.iterator->start);
858 gfc_add_block_to_block (&block, &se.pre);
859 from = gfc_evaluate_now (se.expr, &block);
861 gfc_init_se (&se, NULL);
862 gfc_conv_expr_val (&se, code->ext.iterator->end);
863 gfc_add_block_to_block (&block, &se.pre);
864 to = gfc_evaluate_now (se.expr, &block);
866 gfc_init_se (&se, NULL);
867 gfc_conv_expr_val (&se, code->ext.iterator->step);
868 gfc_add_block_to_block (&block, &se.pre);
869 step = gfc_evaluate_now (se.expr, &block);
871 /* Special case simple loops. */
872 if (TREE_CODE (type) == INTEGER_TYPE
873 && (integer_onep (step)
874 || tree_int_cst_equal (step, integer_minus_one_node)))
875 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
877 /* We need a special check for empty loops:
878 empty = (step > 0 ? to < from : to > from); */
879 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
880 fold_convert (type, integer_zero_node));
881 empty = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
882 fold_build2 (LT_EXPR, boolean_type_node, to, from),
883 fold_build2 (GT_EXPR, boolean_type_node, to, from));
885 /* Initialize loop count. This code is executed before we enter the
886 loop body. We generate: countm1 = abs(to - from) / abs(step). */
887 if (TREE_CODE (type) == INTEGER_TYPE)
891 utype = unsigned_type_for (type);
893 /* tmp = abs(to - from) / abs(step) */
894 ustep = fold_convert (utype, fold_build1 (ABS_EXPR, type, step));
895 tmp = fold_build3 (COND_EXPR, type, pos_step,
896 fold_build2 (MINUS_EXPR, type, to, from),
897 fold_build2 (MINUS_EXPR, type, from, to));
898 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, fold_convert (utype, tmp),
903 /* TODO: We could use the same width as the real type.
904 This would probably cause more problems that it solves
905 when we implement "long double" types. */
906 utype = unsigned_type_for (gfc_array_index_type);
907 tmp = fold_build2 (MINUS_EXPR, type, to, from);
908 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
909 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
911 countm1 = gfc_create_var (utype, "countm1");
912 gfc_add_modify_expr (&block, countm1, tmp);
914 /* Cycle and exit statements are implemented with gotos. */
915 cycle_label = gfc_build_label_decl (NULL_TREE);
916 exit_label = gfc_build_label_decl (NULL_TREE);
917 TREE_USED (exit_label) = 1;
919 /* Initialize the DO variable: dovar = from. */
920 gfc_add_modify_expr (&block, dovar, from);
922 /* If the loop is empty, go directly to the exit label. */
923 tmp = fold_build3 (COND_EXPR, void_type_node, empty,
924 build1_v (GOTO_EXPR, exit_label), build_empty_stmt ());
925 gfc_add_expr_to_block (&block, tmp);
928 gfc_start_block (&body);
930 /* Put these labels where they can be found later. We put the
931 labels in a TREE_LIST node (because TREE_CHAIN is already
932 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
933 label in TREE_VALUE (backend_decl). */
935 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
937 /* Main loop body. */
938 tmp = gfc_trans_code (code->block->next);
939 gfc_add_expr_to_block (&body, tmp);
941 /* Label for cycle statements (if needed). */
942 if (TREE_USED (cycle_label))
944 tmp = build1_v (LABEL_EXPR, cycle_label);
945 gfc_add_expr_to_block (&body, tmp);
948 /* Increment the loop variable. */
949 tmp = build2 (PLUS_EXPR, type, dovar, step);
950 gfc_add_modify_expr (&body, dovar, tmp);
952 /* End with the loop condition. Loop until countm1 == 0. */
953 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
954 build_int_cst (utype, 0));
955 tmp = build1_v (GOTO_EXPR, exit_label);
956 tmp = fold_build3 (COND_EXPR, void_type_node,
957 cond, tmp, build_empty_stmt ());
958 gfc_add_expr_to_block (&body, tmp);
960 /* Decrement the loop count. */
961 tmp = build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
962 gfc_add_modify_expr (&body, countm1, tmp);
964 /* End of loop body. */
965 tmp = gfc_finish_block (&body);
967 /* The for loop itself. */
968 tmp = build1_v (LOOP_EXPR, tmp);
969 gfc_add_expr_to_block (&block, tmp);
971 /* Add the exit label. */
972 tmp = build1_v (LABEL_EXPR, exit_label);
973 gfc_add_expr_to_block (&block, tmp);
975 return gfc_finish_block (&block);
979 /* Translate the DO WHILE construct.
992 if (! cond) goto exit_label;
998 Because the evaluation of the exit condition `cond' may have side
999 effects, we can't do much for empty loop bodies. The backend optimizers
1000 should be smart enough to eliminate any dead loops. */
1003 gfc_trans_do_while (gfc_code * code)
1011 /* Everything we build here is part of the loop body. */
1012 gfc_start_block (&block);
1014 /* Cycle and exit statements are implemented with gotos. */
1015 cycle_label = gfc_build_label_decl (NULL_TREE);
1016 exit_label = gfc_build_label_decl (NULL_TREE);
1018 /* Put the labels where they can be found later. See gfc_trans_do(). */
1019 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1021 /* Create a GIMPLE version of the exit condition. */
1022 gfc_init_se (&cond, NULL);
1023 gfc_conv_expr_val (&cond, code->expr);
1024 gfc_add_block_to_block (&block, &cond.pre);
1025 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1027 /* Build "IF (! cond) GOTO exit_label". */
1028 tmp = build1_v (GOTO_EXPR, exit_label);
1029 TREE_USED (exit_label) = 1;
1030 tmp = fold_build3 (COND_EXPR, void_type_node,
1031 cond.expr, tmp, build_empty_stmt ());
1032 gfc_add_expr_to_block (&block, tmp);
1034 /* The main body of the loop. */
1035 tmp = gfc_trans_code (code->block->next);
1036 gfc_add_expr_to_block (&block, tmp);
1038 /* Label for cycle statements (if needed). */
1039 if (TREE_USED (cycle_label))
1041 tmp = build1_v (LABEL_EXPR, cycle_label);
1042 gfc_add_expr_to_block (&block, tmp);
1045 /* End of loop body. */
1046 tmp = gfc_finish_block (&block);
1048 gfc_init_block (&block);
1049 /* Build the loop. */
1050 tmp = build1_v (LOOP_EXPR, tmp);
1051 gfc_add_expr_to_block (&block, tmp);
1053 /* Add the exit label. */
1054 tmp = build1_v (LABEL_EXPR, exit_label);
1055 gfc_add_expr_to_block (&block, tmp);
1057 return gfc_finish_block (&block);
1061 /* Translate the SELECT CASE construct for INTEGER case expressions,
1062 without killing all potential optimizations. The problem is that
1063 Fortran allows unbounded cases, but the back-end does not, so we
1064 need to intercept those before we enter the equivalent SWITCH_EXPR
1067 For example, we translate this,
1070 CASE (:100,101,105:115)
1080 to the GENERIC equivalent,
1084 case (minimum value for typeof(expr) ... 100:
1090 case 200 ... (maximum value for typeof(expr):
1107 gfc_trans_integer_select (gfc_code * code)
1117 gfc_start_block (&block);
1119 /* Calculate the switch expression. */
1120 gfc_init_se (&se, NULL);
1121 gfc_conv_expr_val (&se, code->expr);
1122 gfc_add_block_to_block (&block, &se.pre);
1124 end_label = gfc_build_label_decl (NULL_TREE);
1126 gfc_init_block (&body);
1128 for (c = code->block; c; c = c->block)
1130 for (cp = c->ext.case_list; cp; cp = cp->next)
1135 /* Assume it's the default case. */
1136 low = high = NULL_TREE;
1140 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1143 /* If there's only a lower bound, set the high bound to the
1144 maximum value of the case expression. */
1146 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1151 /* Three cases are possible here:
1153 1) There is no lower bound, e.g. CASE (:N).
1154 2) There is a lower bound .NE. high bound, that is
1155 a case range, e.g. CASE (N:M) where M>N (we make
1156 sure that M>N during type resolution).
1157 3) There is a lower bound, and it has the same value
1158 as the high bound, e.g. CASE (N:N). This is our
1159 internal representation of CASE(N).
1161 In the first and second case, we need to set a value for
1162 high. In the third case, we don't because the GCC middle
1163 end represents a single case value by just letting high be
1164 a NULL_TREE. We can't do that because we need to be able
1165 to represent unbounded cases. */
1169 && mpz_cmp (cp->low->value.integer,
1170 cp->high->value.integer) != 0))
1171 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1174 /* Unbounded case. */
1176 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1179 /* Build a label. */
1180 label = gfc_build_label_decl (NULL_TREE);
1182 /* Add this case label.
1183 Add parameter 'label', make it match GCC backend. */
1184 tmp = build3 (CASE_LABEL_EXPR, void_type_node, low, high, label);
1185 gfc_add_expr_to_block (&body, tmp);
1188 /* Add the statements for this case. */
1189 tmp = gfc_trans_code (c->next);
1190 gfc_add_expr_to_block (&body, tmp);
1192 /* Break to the end of the construct. */
1193 tmp = build1_v (GOTO_EXPR, end_label);
1194 gfc_add_expr_to_block (&body, tmp);
1197 tmp = gfc_finish_block (&body);
1198 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1199 gfc_add_expr_to_block (&block, tmp);
1201 tmp = build1_v (LABEL_EXPR, end_label);
1202 gfc_add_expr_to_block (&block, tmp);
1204 return gfc_finish_block (&block);
1208 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1210 There are only two cases possible here, even though the standard
1211 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1212 .FALSE., and DEFAULT.
1214 We never generate more than two blocks here. Instead, we always
1215 try to eliminate the DEFAULT case. This way, we can translate this
1216 kind of SELECT construct to a simple
1220 expression in GENERIC. */
1223 gfc_trans_logical_select (gfc_code * code)
1226 gfc_code *t, *f, *d;
1231 /* Assume we don't have any cases at all. */
1234 /* Now see which ones we actually do have. We can have at most two
1235 cases in a single case list: one for .TRUE. and one for .FALSE.
1236 The default case is always separate. If the cases for .TRUE. and
1237 .FALSE. are in the same case list, the block for that case list
1238 always executed, and we don't generate code a COND_EXPR. */
1239 for (c = code->block; c; c = c->block)
1241 for (cp = c->ext.case_list; cp; cp = cp->next)
1245 if (cp->low->value.logical == 0) /* .FALSE. */
1247 else /* if (cp->value.logical != 0), thus .TRUE. */
1255 /* Start a new block. */
1256 gfc_start_block (&block);
1258 /* Calculate the switch expression. We always need to do this
1259 because it may have side effects. */
1260 gfc_init_se (&se, NULL);
1261 gfc_conv_expr_val (&se, code->expr);
1262 gfc_add_block_to_block (&block, &se.pre);
1264 if (t == f && t != NULL)
1266 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1267 translate the code for these cases, append it to the current
1269 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1273 tree true_tree, false_tree, stmt;
1275 true_tree = build_empty_stmt ();
1276 false_tree = build_empty_stmt ();
1278 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1279 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1280 make the missing case the default case. */
1281 if (t != NULL && f != NULL)
1291 /* Translate the code for each of these blocks, and append it to
1292 the current block. */
1294 true_tree = gfc_trans_code (t->next);
1297 false_tree = gfc_trans_code (f->next);
1299 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1300 true_tree, false_tree);
1301 gfc_add_expr_to_block (&block, stmt);
1304 return gfc_finish_block (&block);
1308 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1309 Instead of generating compares and jumps, it is far simpler to
1310 generate a data structure describing the cases in order and call a
1311 library subroutine that locates the right case.
1312 This is particularly true because this is the only case where we
1313 might have to dispose of a temporary.
1314 The library subroutine returns a pointer to jump to or NULL if no
1315 branches are to be taken. */
1318 gfc_trans_character_select (gfc_code *code)
1320 tree init, node, end_label, tmp, type, case_num, label;
1321 stmtblock_t block, body;
1327 static tree select_struct;
1328 static tree ss_string1, ss_string1_len;
1329 static tree ss_string2, ss_string2_len;
1330 static tree ss_target;
1332 if (select_struct == NULL)
1334 tree gfc_int4_type_node = gfc_get_int_type (4);
1336 select_struct = make_node (RECORD_TYPE);
1337 TYPE_NAME (select_struct) = get_identifier ("_jump_struct");
1340 #define ADD_FIELD(NAME, TYPE) \
1341 ss_##NAME = gfc_add_field_to_struct \
1342 (&(TYPE_FIELDS (select_struct)), select_struct, \
1343 get_identifier (stringize(NAME)), TYPE)
1345 ADD_FIELD (string1, pchar_type_node);
1346 ADD_FIELD (string1_len, gfc_int4_type_node);
1348 ADD_FIELD (string2, pchar_type_node);
1349 ADD_FIELD (string2_len, gfc_int4_type_node);
1351 ADD_FIELD (target, integer_type_node);
1354 gfc_finish_type (select_struct);
1357 cp = code->block->ext.case_list;
1358 while (cp->left != NULL)
1362 for (d = cp; d; d = d->right)
1365 end_label = gfc_build_label_decl (NULL_TREE);
1367 /* Generate the body */
1368 gfc_start_block (&block);
1369 gfc_init_block (&body);
1371 for (c = code->block; c; c = c->block)
1373 for (d = c->ext.case_list; d; d = d->next)
1375 label = gfc_build_label_decl (NULL_TREE);
1376 tmp = build3 (CASE_LABEL_EXPR, void_type_node,
1377 build_int_cst (NULL_TREE, d->n),
1378 build_int_cst (NULL_TREE, d->n), label);
1379 gfc_add_expr_to_block (&body, tmp);
1382 tmp = gfc_trans_code (c->next);
1383 gfc_add_expr_to_block (&body, tmp);
1385 tmp = build1_v (GOTO_EXPR, end_label);
1386 gfc_add_expr_to_block (&body, tmp);
1389 /* Generate the structure describing the branches */
1392 for(d = cp; d; d = d->right)
1396 gfc_init_se (&se, NULL);
1400 node = tree_cons (ss_string1, null_pointer_node, node);
1401 node = tree_cons (ss_string1_len, integer_zero_node, node);
1405 gfc_conv_expr_reference (&se, d->low);
1407 node = tree_cons (ss_string1, se.expr, node);
1408 node = tree_cons (ss_string1_len, se.string_length, node);
1411 if (d->high == NULL)
1413 node = tree_cons (ss_string2, null_pointer_node, node);
1414 node = tree_cons (ss_string2_len, integer_zero_node, node);
1418 gfc_init_se (&se, NULL);
1419 gfc_conv_expr_reference (&se, d->high);
1421 node = tree_cons (ss_string2, se.expr, node);
1422 node = tree_cons (ss_string2_len, se.string_length, node);
1425 node = tree_cons (ss_target, build_int_cst (integer_type_node, d->n),
1428 tmp = build_constructor_from_list (select_struct, nreverse (node));
1429 init = tree_cons (NULL_TREE, tmp, init);
1432 type = build_array_type (select_struct, build_index_type
1433 (build_int_cst (NULL_TREE, n - 1)));
1435 init = build_constructor_from_list (type, nreverse(init));
1436 TREE_CONSTANT (init) = 1;
1437 TREE_INVARIANT (init) = 1;
1438 TREE_STATIC (init) = 1;
1439 /* Create a static variable to hold the jump table. */
1440 tmp = gfc_create_var (type, "jumptable");
1441 TREE_CONSTANT (tmp) = 1;
1442 TREE_INVARIANT (tmp) = 1;
1443 TREE_STATIC (tmp) = 1;
1444 TREE_READONLY (tmp) = 1;
1445 DECL_INITIAL (tmp) = init;
1448 /* Build the library call */
1449 init = gfc_build_addr_expr (pvoid_type_node, init);
1451 gfc_init_se (&se, NULL);
1452 gfc_conv_expr_reference (&se, code->expr);
1454 gfc_add_block_to_block (&block, &se.pre);
1456 tmp = build_call_expr (gfor_fndecl_select_string, 4, init,
1457 build_int_cst (NULL_TREE, n), se.expr,
1459 case_num = gfc_create_var (integer_type_node, "case_num");
1460 gfc_add_modify_expr (&block, case_num, tmp);
1462 gfc_add_block_to_block (&block, &se.post);
1464 tmp = gfc_finish_block (&body);
1465 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1466 gfc_add_expr_to_block (&block, tmp);
1468 tmp = build1_v (LABEL_EXPR, end_label);
1469 gfc_add_expr_to_block (&block, tmp);
1471 return gfc_finish_block (&block);
1475 /* Translate the three variants of the SELECT CASE construct.
1477 SELECT CASEs with INTEGER case expressions can be translated to an
1478 equivalent GENERIC switch statement, and for LOGICAL case
1479 expressions we build one or two if-else compares.
1481 SELECT CASEs with CHARACTER case expressions are a whole different
1482 story, because they don't exist in GENERIC. So we sort them and
1483 do a binary search at runtime.
1485 Fortran has no BREAK statement, and it does not allow jumps from
1486 one case block to another. That makes things a lot easier for
1490 gfc_trans_select (gfc_code * code)
1492 gcc_assert (code && code->expr);
1494 /* Empty SELECT constructs are legal. */
1495 if (code->block == NULL)
1496 return build_empty_stmt ();
1498 /* Select the correct translation function. */
1499 switch (code->expr->ts.type)
1501 case BT_LOGICAL: return gfc_trans_logical_select (code);
1502 case BT_INTEGER: return gfc_trans_integer_select (code);
1503 case BT_CHARACTER: return gfc_trans_character_select (code);
1505 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1511 /* Traversal function to substitute a replacement symtree if the symbol
1512 in the expression is the same as that passed. f == 2 signals that
1513 that variable itself is not to be checked - only the references.
1514 This group of functions is used when the variable expression in a
1515 FORALL assignment has internal references. For example:
1516 FORALL (i = 1:4) p(p(i)) = i
1517 The only recourse here is to store a copy of 'p' for the index
1520 static gfc_symtree *new_symtree;
1521 static gfc_symtree *old_symtree;
1524 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
1526 if (expr->expr_type != EXPR_VARIABLE)
1531 else if (expr->symtree->n.sym == sym)
1532 expr->symtree = new_symtree;
1538 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
1540 gfc_traverse_expr (e, sym, forall_replace, f);
1544 forall_restore (gfc_expr *expr,
1545 gfc_symbol *sym ATTRIBUTE_UNUSED,
1546 int *f ATTRIBUTE_UNUSED)
1548 if (expr->expr_type != EXPR_VARIABLE)
1551 if (expr->symtree == new_symtree)
1552 expr->symtree = old_symtree;
1558 forall_restore_symtree (gfc_expr *e)
1560 gfc_traverse_expr (e, NULL, forall_restore, 0);
1564 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1569 gfc_symbol *new_sym;
1570 gfc_symbol *old_sym;
1574 /* Build a copy of the lvalue. */
1575 old_symtree = c->expr->symtree;
1576 old_sym = old_symtree->n.sym;
1577 e = gfc_lval_expr_from_sym (old_sym);
1578 if (old_sym->attr.dimension)
1580 gfc_init_se (&tse, NULL);
1581 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN);
1582 gfc_add_block_to_block (pre, &tse.pre);
1583 gfc_add_block_to_block (post, &tse.post);
1584 tse.expr = build_fold_indirect_ref (tse.expr);
1586 if (e->ts.type != BT_CHARACTER)
1588 /* Use the variable offset for the temporary. */
1589 tmp = gfc_conv_descriptor_offset (tse.expr);
1590 gfc_add_modify_expr (pre, tmp,
1591 gfc_conv_array_offset (old_sym->backend_decl));
1596 gfc_init_se (&tse, NULL);
1597 gfc_init_se (&rse, NULL);
1598 gfc_conv_expr (&rse, e);
1599 if (e->ts.type == BT_CHARACTER)
1601 tse.string_length = rse.string_length;
1602 tmp = gfc_get_character_type_len (gfc_default_character_kind,
1604 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
1606 gfc_add_block_to_block (pre, &tse.pre);
1607 gfc_add_block_to_block (post, &tse.post);
1611 tmp = gfc_typenode_for_spec (&e->ts);
1612 tse.expr = gfc_create_var (tmp, "temp");
1615 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
1616 e->expr_type == EXPR_VARIABLE);
1617 gfc_add_expr_to_block (pre, tmp);
1621 /* Create a new symbol to represent the lvalue. */
1622 new_sym = gfc_new_symbol (old_sym->name, NULL);
1623 new_sym->ts = old_sym->ts;
1624 new_sym->attr.referenced = 1;
1625 new_sym->attr.dimension = old_sym->attr.dimension;
1626 new_sym->attr.flavor = old_sym->attr.flavor;
1628 /* Use the temporary as the backend_decl. */
1629 new_sym->backend_decl = tse.expr;
1631 /* Create a fake symtree for it. */
1633 new_symtree = gfc_new_symtree (&root, old_sym->name);
1634 new_symtree->n.sym = new_sym;
1635 gcc_assert (new_symtree == root);
1637 /* Go through the expression reference replacing the old_symtree
1639 forall_replace_symtree (c->expr, old_sym, 2);
1641 /* Now we have made this temporary, we might as well use it for
1642 the right hand side. */
1643 forall_replace_symtree (c->expr2, old_sym, 1);
1647 /* Handles dependencies in forall assignments. */
1649 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1656 lsym = c->expr->symtree->n.sym;
1657 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
1659 /* Now check for dependencies within the 'variable'
1660 expression itself. These are treated by making a complete
1661 copy of variable and changing all the references to it
1662 point to the copy instead. Note that the shallow copy of
1663 the variable will not suffice for derived types with
1664 pointer components. We therefore leave these to their
1666 if (lsym->ts.type == BT_DERIVED
1667 && lsym->ts.derived->attr.pointer_comp)
1671 if (find_forall_index (c->expr, lsym, 2) == SUCCESS)
1673 forall_make_variable_temp (c, pre, post);
1677 /* Substrings with dependencies are treated in the same
1679 if (c->expr->ts.type == BT_CHARACTER
1681 && c->expr2->expr_type == EXPR_VARIABLE
1682 && lsym == c->expr2->symtree->n.sym)
1684 for (lref = c->expr->ref; lref; lref = lref->next)
1685 if (lref->type == REF_SUBSTRING)
1687 for (rref = c->expr2->ref; rref; rref = rref->next)
1688 if (rref->type == REF_SUBSTRING)
1692 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
1694 forall_make_variable_temp (c, pre, post);
1703 cleanup_forall_symtrees (gfc_code *c)
1705 forall_restore_symtree (c->expr);
1706 forall_restore_symtree (c->expr2);
1707 gfc_free (new_symtree->n.sym);
1708 gfc_free (new_symtree);
1712 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
1713 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
1714 indicates whether we should generate code to test the FORALLs mask
1715 array. OUTER is the loop header to be used for initializing mask
1718 The generated loop format is:
1719 count = (end - start + step) / step
1732 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
1733 int mask_flag, stmtblock_t *outer)
1741 tree var, start, end, step;
1744 /* Initialize the mask index outside the FORALL nest. */
1745 if (mask_flag && forall_tmp->mask)
1746 gfc_add_modify_expr (outer, forall_tmp->maskindex, gfc_index_zero_node);
1748 iter = forall_tmp->this_loop;
1749 nvar = forall_tmp->nvar;
1750 for (n = 0; n < nvar; n++)
1753 start = iter->start;
1757 exit_label = gfc_build_label_decl (NULL_TREE);
1758 TREE_USED (exit_label) = 1;
1760 /* The loop counter. */
1761 count = gfc_create_var (TREE_TYPE (var), "count");
1763 /* The body of the loop. */
1764 gfc_init_block (&block);
1766 /* The exit condition. */
1767 cond = fold_build2 (LE_EXPR, boolean_type_node,
1768 count, build_int_cst (TREE_TYPE (count), 0));
1769 tmp = build1_v (GOTO_EXPR, exit_label);
1770 tmp = fold_build3 (COND_EXPR, void_type_node,
1771 cond, tmp, build_empty_stmt ());
1772 gfc_add_expr_to_block (&block, tmp);
1774 /* The main loop body. */
1775 gfc_add_expr_to_block (&block, body);
1777 /* Increment the loop variable. */
1778 tmp = build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1779 gfc_add_modify_expr (&block, var, tmp);
1781 /* Advance to the next mask element. Only do this for the
1783 if (n == 0 && mask_flag && forall_tmp->mask)
1785 tree maskindex = forall_tmp->maskindex;
1786 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
1787 maskindex, gfc_index_one_node);
1788 gfc_add_modify_expr (&block, maskindex, tmp);
1791 /* Decrement the loop counter. */
1792 tmp = build2 (MINUS_EXPR, TREE_TYPE (var), count,
1793 build_int_cst (TREE_TYPE (var), 1));
1794 gfc_add_modify_expr (&block, count, tmp);
1796 body = gfc_finish_block (&block);
1798 /* Loop var initialization. */
1799 gfc_init_block (&block);
1800 gfc_add_modify_expr (&block, var, start);
1803 /* Initialize the loop counter. */
1804 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1805 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1806 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1807 gfc_add_modify_expr (&block, count, tmp);
1809 /* The loop expression. */
1810 tmp = build1_v (LOOP_EXPR, body);
1811 gfc_add_expr_to_block (&block, tmp);
1813 /* The exit label. */
1814 tmp = build1_v (LABEL_EXPR, exit_label);
1815 gfc_add_expr_to_block (&block, tmp);
1817 body = gfc_finish_block (&block);
1824 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
1825 is nonzero, the body is controlled by all masks in the forall nest.
1826 Otherwise, the innermost loop is not controlled by it's mask. This
1827 is used for initializing that mask. */
1830 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1835 forall_info *forall_tmp;
1836 tree mask, maskindex;
1838 gfc_start_block (&header);
1840 forall_tmp = nested_forall_info;
1841 while (forall_tmp != NULL)
1843 /* Generate body with masks' control. */
1846 mask = forall_tmp->mask;
1847 maskindex = forall_tmp->maskindex;
1849 /* If a mask was specified make the assignment conditional. */
1852 tmp = gfc_build_array_ref (mask, maskindex, NULL);
1853 body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ());
1856 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
1857 forall_tmp = forall_tmp->prev_nest;
1861 gfc_add_expr_to_block (&header, body);
1862 return gfc_finish_block (&header);
1866 /* Allocate data for holding a temporary array. Returns either a local
1867 temporary array or a pointer variable. */
1870 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
1877 if (INTEGER_CST_P (size))
1879 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
1880 gfc_index_one_node);
1885 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
1886 type = build_array_type (elem_type, type);
1887 if (gfc_can_put_var_on_stack (bytesize))
1889 gcc_assert (INTEGER_CST_P (size));
1890 tmpvar = gfc_create_var (type, "temp");
1895 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
1896 *pdata = convert (pvoid_type_node, tmpvar);
1898 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
1899 gfc_add_modify_expr (pblock, tmpvar, tmp);
1905 /* Generate codes to copy the temporary to the actual lhs. */
1908 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
1909 tree count1, tree wheremask, bool invert)
1913 stmtblock_t block, body;
1919 lss = gfc_walk_expr (expr);
1921 if (lss == gfc_ss_terminator)
1923 gfc_start_block (&block);
1925 gfc_init_se (&lse, NULL);
1927 /* Translate the expression. */
1928 gfc_conv_expr (&lse, expr);
1930 /* Form the expression for the temporary. */
1931 tmp = gfc_build_array_ref (tmp1, count1, NULL);
1933 /* Use the scalar assignment as is. */
1934 gfc_add_block_to_block (&block, &lse.pre);
1935 gfc_add_modify_expr (&block, lse.expr, tmp);
1936 gfc_add_block_to_block (&block, &lse.post);
1938 /* Increment the count1. */
1939 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1940 gfc_index_one_node);
1941 gfc_add_modify_expr (&block, count1, tmp);
1943 tmp = gfc_finish_block (&block);
1947 gfc_start_block (&block);
1949 gfc_init_loopinfo (&loop1);
1950 gfc_init_se (&rse, NULL);
1951 gfc_init_se (&lse, NULL);
1953 /* Associate the lss with the loop. */
1954 gfc_add_ss_to_loop (&loop1, lss);
1956 /* Calculate the bounds of the scalarization. */
1957 gfc_conv_ss_startstride (&loop1);
1958 /* Setup the scalarizing loops. */
1959 gfc_conv_loop_setup (&loop1);
1961 gfc_mark_ss_chain_used (lss, 1);
1963 /* Start the scalarized loop body. */
1964 gfc_start_scalarized_body (&loop1, &body);
1966 /* Setup the gfc_se structures. */
1967 gfc_copy_loopinfo_to_se (&lse, &loop1);
1970 /* Form the expression of the temporary. */
1971 if (lss != gfc_ss_terminator)
1972 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
1973 /* Translate expr. */
1974 gfc_conv_expr (&lse, expr);
1976 /* Use the scalar assignment. */
1977 rse.string_length = lse.string_length;
1978 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
1980 /* Form the mask expression according to the mask tree list. */
1983 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
1985 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1986 TREE_TYPE (wheremaskexpr),
1988 tmp = fold_build3 (COND_EXPR, void_type_node,
1989 wheremaskexpr, tmp, build_empty_stmt ());
1992 gfc_add_expr_to_block (&body, tmp);
1994 /* Increment count1. */
1995 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1996 count1, gfc_index_one_node);
1997 gfc_add_modify_expr (&body, count1, tmp);
1999 /* Increment count3. */
2002 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2003 count3, gfc_index_one_node);
2004 gfc_add_modify_expr (&body, count3, tmp);
2007 /* Generate the copying loops. */
2008 gfc_trans_scalarizing_loops (&loop1, &body);
2009 gfc_add_block_to_block (&block, &loop1.pre);
2010 gfc_add_block_to_block (&block, &loop1.post);
2011 gfc_cleanup_loop (&loop1);
2013 tmp = gfc_finish_block (&block);
2019 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2020 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2021 and should not be freed. WHEREMASK is the conditional execution mask
2022 whose sense may be inverted by INVERT. */
2025 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2026 tree count1, gfc_ss *lss, gfc_ss *rss,
2027 tree wheremask, bool invert)
2029 stmtblock_t block, body1;
2036 gfc_start_block (&block);
2038 gfc_init_se (&rse, NULL);
2039 gfc_init_se (&lse, NULL);
2041 if (lss == gfc_ss_terminator)
2043 gfc_init_block (&body1);
2044 gfc_conv_expr (&rse, expr2);
2045 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2049 /* Initialize the loop. */
2050 gfc_init_loopinfo (&loop);
2052 /* We may need LSS to determine the shape of the expression. */
2053 gfc_add_ss_to_loop (&loop, lss);
2054 gfc_add_ss_to_loop (&loop, rss);
2056 gfc_conv_ss_startstride (&loop);
2057 gfc_conv_loop_setup (&loop);
2059 gfc_mark_ss_chain_used (rss, 1);
2060 /* Start the loop body. */
2061 gfc_start_scalarized_body (&loop, &body1);
2063 /* Translate the expression. */
2064 gfc_copy_loopinfo_to_se (&rse, &loop);
2066 gfc_conv_expr (&rse, expr2);
2068 /* Form the expression of the temporary. */
2069 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2072 /* Use the scalar assignment. */
2073 lse.string_length = rse.string_length;
2074 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2075 expr2->expr_type == EXPR_VARIABLE);
2077 /* Form the mask expression according to the mask tree list. */
2080 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2082 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2083 TREE_TYPE (wheremaskexpr),
2085 tmp = fold_build3 (COND_EXPR, void_type_node,
2086 wheremaskexpr, tmp, build_empty_stmt ());
2089 gfc_add_expr_to_block (&body1, tmp);
2091 if (lss == gfc_ss_terminator)
2093 gfc_add_block_to_block (&block, &body1);
2095 /* Increment count1. */
2096 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2097 gfc_index_one_node);
2098 gfc_add_modify_expr (&block, count1, tmp);
2102 /* Increment count1. */
2103 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2104 count1, gfc_index_one_node);
2105 gfc_add_modify_expr (&body1, count1, tmp);
2107 /* Increment count3. */
2110 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2111 count3, gfc_index_one_node);
2112 gfc_add_modify_expr (&body1, count3, tmp);
2115 /* Generate the copying loops. */
2116 gfc_trans_scalarizing_loops (&loop, &body1);
2118 gfc_add_block_to_block (&block, &loop.pre);
2119 gfc_add_block_to_block (&block, &loop.post);
2121 gfc_cleanup_loop (&loop);
2122 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2123 as tree nodes in SS may not be valid in different scope. */
2126 tmp = gfc_finish_block (&block);
2131 /* Calculate the size of temporary needed in the assignment inside forall.
2132 LSS and RSS are filled in this function. */
2135 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2136 stmtblock_t * pblock,
2137 gfc_ss **lss, gfc_ss **rss)
2145 *lss = gfc_walk_expr (expr1);
2148 size = gfc_index_one_node;
2149 if (*lss != gfc_ss_terminator)
2151 gfc_init_loopinfo (&loop);
2153 /* Walk the RHS of the expression. */
2154 *rss = gfc_walk_expr (expr2);
2155 if (*rss == gfc_ss_terminator)
2157 /* The rhs is scalar. Add a ss for the expression. */
2158 *rss = gfc_get_ss ();
2159 (*rss)->next = gfc_ss_terminator;
2160 (*rss)->type = GFC_SS_SCALAR;
2161 (*rss)->expr = expr2;
2164 /* Associate the SS with the loop. */
2165 gfc_add_ss_to_loop (&loop, *lss);
2166 /* We don't actually need to add the rhs at this point, but it might
2167 make guessing the loop bounds a bit easier. */
2168 gfc_add_ss_to_loop (&loop, *rss);
2170 /* We only want the shape of the expression, not rest of the junk
2171 generated by the scalarizer. */
2172 loop.array_parameter = 1;
2174 /* Calculate the bounds of the scalarization. */
2175 save_flag = flag_bounds_check;
2176 flag_bounds_check = 0;
2177 gfc_conv_ss_startstride (&loop);
2178 flag_bounds_check = save_flag;
2179 gfc_conv_loop_setup (&loop);
2181 /* Figure out how many elements we need. */
2182 for (i = 0; i < loop.dimen; i++)
2184 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2185 gfc_index_one_node, loop.from[i]);
2186 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2188 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2190 gfc_add_block_to_block (pblock, &loop.pre);
2191 size = gfc_evaluate_now (size, pblock);
2192 gfc_add_block_to_block (pblock, &loop.post);
2194 /* TODO: write a function that cleans up a loopinfo without freeing
2195 the SS chains. Currently a NOP. */
2202 /* Calculate the overall iterator number of the nested forall construct.
2203 This routine actually calculates the number of times the body of the
2204 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2205 that by the expression INNER_SIZE. The BLOCK argument specifies the
2206 block in which to calculate the result, and the optional INNER_SIZE_BODY
2207 argument contains any statements that need to executed (inside the loop)
2208 to initialize or calculate INNER_SIZE. */
2211 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2212 stmtblock_t *inner_size_body, stmtblock_t *block)
2214 forall_info *forall_tmp = nested_forall_info;
2218 /* We can eliminate the innermost unconditional loops with constant
2220 if (INTEGER_CST_P (inner_size))
2223 && !forall_tmp->mask
2224 && INTEGER_CST_P (forall_tmp->size))
2226 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2227 inner_size, forall_tmp->size);
2228 forall_tmp = forall_tmp->prev_nest;
2231 /* If there are no loops left, we have our constant result. */
2236 /* Otherwise, create a temporary variable to compute the result. */
2237 number = gfc_create_var (gfc_array_index_type, "num");
2238 gfc_add_modify_expr (block, number, gfc_index_zero_node);
2240 gfc_start_block (&body);
2241 if (inner_size_body)
2242 gfc_add_block_to_block (&body, inner_size_body);
2244 tmp = build2 (PLUS_EXPR, gfc_array_index_type, number,
2248 gfc_add_modify_expr (&body, number, tmp);
2249 tmp = gfc_finish_block (&body);
2251 /* Generate loops. */
2252 if (forall_tmp != NULL)
2253 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2255 gfc_add_expr_to_block (block, tmp);
2261 /* Allocate temporary for forall construct. SIZE is the size of temporary
2262 needed. PTEMP1 is returned for space free. */
2265 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2272 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2273 if (!integer_onep (unit))
2274 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2279 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2282 tmp = build_fold_indirect_ref (tmp);
2287 /* Allocate temporary for forall construct according to the information in
2288 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2289 assignment inside forall. PTEMP1 is returned for space free. */
2292 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2293 tree inner_size, stmtblock_t * inner_size_body,
2294 stmtblock_t * block, tree * ptemp1)
2298 /* Calculate the total size of temporary needed in forall construct. */
2299 size = compute_overall_iter_number (nested_forall_info, inner_size,
2300 inner_size_body, block);
2302 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2306 /* Handle assignments inside forall which need temporary.
2308 forall (i=start:end:stride; maskexpr)
2311 (where e,f<i> are arbitrary expressions possibly involving i
2312 and there is a dependency between e<i> and f<i>)
2314 masktmp(:) = maskexpr(:)
2319 for (i = start; i <= end; i += stride)
2323 for (i = start; i <= end; i += stride)
2325 if (masktmp[maskindex++])
2326 tmp[count1++] = f<i>
2330 for (i = start; i <= end; i += stride)
2332 if (masktmp[maskindex++])
2333 e<i> = tmp[count1++]
2338 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2339 tree wheremask, bool invert,
2340 forall_info * nested_forall_info,
2341 stmtblock_t * block)
2349 stmtblock_t inner_size_body;
2351 /* Create vars. count1 is the current iterator number of the nested
2353 count1 = gfc_create_var (gfc_array_index_type, "count1");
2355 /* Count is the wheremask index. */
2358 count = gfc_create_var (gfc_array_index_type, "count");
2359 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2364 /* Initialize count1. */
2365 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2367 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2368 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2369 gfc_init_block (&inner_size_body);
2370 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2373 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2374 if (expr1->ts.type == BT_CHARACTER && expr1->ts.cl->length)
2376 if (!expr1->ts.cl->backend_decl)
2379 gfc_init_se (&tse, NULL);
2380 gfc_conv_expr (&tse, expr1->ts.cl->length);
2381 expr1->ts.cl->backend_decl = tse.expr;
2383 type = gfc_get_character_type_len (gfc_default_character_kind,
2384 expr1->ts.cl->backend_decl);
2387 type = gfc_typenode_for_spec (&expr1->ts);
2389 /* Allocate temporary for nested forall construct according to the
2390 information in nested_forall_info and inner_size. */
2391 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2392 &inner_size_body, block, &ptemp1);
2394 /* Generate codes to copy rhs to the temporary . */
2395 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2398 /* Generate body and loops according to the information in
2399 nested_forall_info. */
2400 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2401 gfc_add_expr_to_block (block, tmp);
2404 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2408 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2410 /* Generate codes to copy the temporary to lhs. */
2411 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2414 /* Generate body and loops according to the information in
2415 nested_forall_info. */
2416 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2417 gfc_add_expr_to_block (block, tmp);
2421 /* Free the temporary. */
2422 tmp = gfc_call_free (ptemp1);
2423 gfc_add_expr_to_block (block, tmp);
2428 /* Translate pointer assignment inside FORALL which need temporary. */
2431 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2432 forall_info * nested_forall_info,
2433 stmtblock_t * block)
2447 tree tmp, tmp1, ptemp1;
2449 count = gfc_create_var (gfc_array_index_type, "count");
2450 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2452 inner_size = integer_one_node;
2453 lss = gfc_walk_expr (expr1);
2454 rss = gfc_walk_expr (expr2);
2455 if (lss == gfc_ss_terminator)
2457 type = gfc_typenode_for_spec (&expr1->ts);
2458 type = build_pointer_type (type);
2460 /* Allocate temporary for nested forall construct according to the
2461 information in nested_forall_info and inner_size. */
2462 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2463 inner_size, NULL, block, &ptemp1);
2464 gfc_start_block (&body);
2465 gfc_init_se (&lse, NULL);
2466 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2467 gfc_init_se (&rse, NULL);
2468 rse.want_pointer = 1;
2469 gfc_conv_expr (&rse, expr2);
2470 gfc_add_block_to_block (&body, &rse.pre);
2471 gfc_add_modify_expr (&body, lse.expr,
2472 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2473 gfc_add_block_to_block (&body, &rse.post);
2475 /* Increment count. */
2476 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2477 count, gfc_index_one_node);
2478 gfc_add_modify_expr (&body, count, tmp);
2480 tmp = gfc_finish_block (&body);
2482 /* Generate body and loops according to the information in
2483 nested_forall_info. */
2484 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2485 gfc_add_expr_to_block (block, tmp);
2488 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2490 gfc_start_block (&body);
2491 gfc_init_se (&lse, NULL);
2492 gfc_init_se (&rse, NULL);
2493 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
2494 lse.want_pointer = 1;
2495 gfc_conv_expr (&lse, expr1);
2496 gfc_add_block_to_block (&body, &lse.pre);
2497 gfc_add_modify_expr (&body, lse.expr, rse.expr);
2498 gfc_add_block_to_block (&body, &lse.post);
2499 /* Increment count. */
2500 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2501 count, gfc_index_one_node);
2502 gfc_add_modify_expr (&body, count, tmp);
2503 tmp = gfc_finish_block (&body);
2505 /* Generate body and loops according to the information in
2506 nested_forall_info. */
2507 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2508 gfc_add_expr_to_block (block, tmp);
2512 gfc_init_loopinfo (&loop);
2514 /* Associate the SS with the loop. */
2515 gfc_add_ss_to_loop (&loop, rss);
2517 /* Setup the scalarizing loops and bounds. */
2518 gfc_conv_ss_startstride (&loop);
2520 gfc_conv_loop_setup (&loop);
2522 info = &rss->data.info;
2523 desc = info->descriptor;
2525 /* Make a new descriptor. */
2526 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2527 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2528 loop.from, loop.to, 1,
2531 /* Allocate temporary for nested forall construct. */
2532 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2533 inner_size, NULL, block, &ptemp1);
2534 gfc_start_block (&body);
2535 gfc_init_se (&lse, NULL);
2536 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2537 lse.direct_byref = 1;
2538 rss = gfc_walk_expr (expr2);
2539 gfc_conv_expr_descriptor (&lse, expr2, rss);
2541 gfc_add_block_to_block (&body, &lse.pre);
2542 gfc_add_block_to_block (&body, &lse.post);
2544 /* Increment count. */
2545 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2546 count, gfc_index_one_node);
2547 gfc_add_modify_expr (&body, count, tmp);
2549 tmp = gfc_finish_block (&body);
2551 /* Generate body and loops according to the information in
2552 nested_forall_info. */
2553 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2554 gfc_add_expr_to_block (block, tmp);
2557 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2559 parm = gfc_build_array_ref (tmp1, count, NULL);
2560 lss = gfc_walk_expr (expr1);
2561 gfc_init_se (&lse, NULL);
2562 gfc_conv_expr_descriptor (&lse, expr1, lss);
2563 gfc_add_modify_expr (&lse.pre, lse.expr, parm);
2564 gfc_start_block (&body);
2565 gfc_add_block_to_block (&body, &lse.pre);
2566 gfc_add_block_to_block (&body, &lse.post);
2568 /* Increment count. */
2569 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2570 count, gfc_index_one_node);
2571 gfc_add_modify_expr (&body, count, tmp);
2573 tmp = gfc_finish_block (&body);
2575 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2576 gfc_add_expr_to_block (block, tmp);
2578 /* Free the temporary. */
2581 tmp = gfc_call_free (ptemp1);
2582 gfc_add_expr_to_block (block, tmp);
2587 /* FORALL and WHERE statements are really nasty, especially when you nest
2588 them. All the rhs of a forall assignment must be evaluated before the
2589 actual assignments are performed. Presumably this also applies to all the
2590 assignments in an inner where statement. */
2592 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2593 linear array, relying on the fact that we process in the same order in all
2596 forall (i=start:end:stride; maskexpr)
2600 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2602 count = ((end + 1 - start) / stride)
2603 masktmp(:) = maskexpr(:)
2606 for (i = start; i <= end; i += stride)
2608 if (masktmp[maskindex++])
2612 for (i = start; i <= end; i += stride)
2614 if (masktmp[maskindex++])
2618 Note that this code only works when there are no dependencies.
2619 Forall loop with array assignments and data dependencies are a real pain,
2620 because the size of the temporary cannot always be determined before the
2621 loop is executed. This problem is compounded by the presence of nested
2626 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2646 gfc_forall_iterator *fa;
2649 gfc_saved_var *saved_vars;
2650 iter_info *this_forall;
2654 /* Do nothing if the mask is false. */
2656 && code->expr->expr_type == EXPR_CONSTANT
2657 && !code->expr->value.logical)
2658 return build_empty_stmt ();
2661 /* Count the FORALL index number. */
2662 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2666 /* Allocate the space for var, start, end, step, varexpr. */
2667 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2668 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2669 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2670 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2671 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2672 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2674 /* Allocate the space for info. */
2675 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2677 gfc_start_block (&pre);
2678 gfc_init_block (&post);
2679 gfc_init_block (&block);
2682 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2684 gfc_symbol *sym = fa->var->symtree->n.sym;
2686 /* Allocate space for this_forall. */
2687 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2689 /* Create a temporary variable for the FORALL index. */
2690 tmp = gfc_typenode_for_spec (&sym->ts);
2691 var[n] = gfc_create_var (tmp, sym->name);
2692 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2694 /* Record it in this_forall. */
2695 this_forall->var = var[n];
2697 /* Replace the index symbol's backend_decl with the temporary decl. */
2698 sym->backend_decl = var[n];
2700 /* Work out the start, end and stride for the loop. */
2701 gfc_init_se (&se, NULL);
2702 gfc_conv_expr_val (&se, fa->start);
2703 /* Record it in this_forall. */
2704 this_forall->start = se.expr;
2705 gfc_add_block_to_block (&block, &se.pre);
2708 gfc_init_se (&se, NULL);
2709 gfc_conv_expr_val (&se, fa->end);
2710 /* Record it in this_forall. */
2711 this_forall->end = se.expr;
2712 gfc_make_safe_expr (&se);
2713 gfc_add_block_to_block (&block, &se.pre);
2716 gfc_init_se (&se, NULL);
2717 gfc_conv_expr_val (&se, fa->stride);
2718 /* Record it in this_forall. */
2719 this_forall->step = se.expr;
2720 gfc_make_safe_expr (&se);
2721 gfc_add_block_to_block (&block, &se.pre);
2724 /* Set the NEXT field of this_forall to NULL. */
2725 this_forall->next = NULL;
2726 /* Link this_forall to the info construct. */
2727 if (info->this_loop)
2729 iter_info *iter_tmp = info->this_loop;
2730 while (iter_tmp->next != NULL)
2731 iter_tmp = iter_tmp->next;
2732 iter_tmp->next = this_forall;
2735 info->this_loop = this_forall;
2741 /* Calculate the size needed for the current forall level. */
2742 size = gfc_index_one_node;
2743 for (n = 0; n < nvar; n++)
2745 /* size = (end + step - start) / step. */
2746 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2748 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2750 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2751 tmp = convert (gfc_array_index_type, tmp);
2753 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2756 /* Record the nvar and size of current forall level. */
2762 /* If the mask is .true., consider the FORALL unconditional. */
2763 if (code->expr->expr_type == EXPR_CONSTANT
2764 && code->expr->value.logical)
2772 /* First we need to allocate the mask. */
2775 /* As the mask array can be very big, prefer compact boolean types. */
2776 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2777 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
2778 size, NULL, &block, &pmask);
2779 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2781 /* Record them in the info structure. */
2782 info->maskindex = maskindex;
2787 /* No mask was specified. */
2788 maskindex = NULL_TREE;
2789 mask = pmask = NULL_TREE;
2792 /* Link the current forall level to nested_forall_info. */
2793 info->prev_nest = nested_forall_info;
2794 nested_forall_info = info;
2796 /* Copy the mask into a temporary variable if required.
2797 For now we assume a mask temporary is needed. */
2800 /* As the mask array can be very big, prefer compact boolean types. */
2801 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2803 gfc_add_modify_expr (&block, maskindex, gfc_index_zero_node);
2805 /* Start of mask assignment loop body. */
2806 gfc_start_block (&body);
2808 /* Evaluate the mask expression. */
2809 gfc_init_se (&se, NULL);
2810 gfc_conv_expr_val (&se, code->expr);
2811 gfc_add_block_to_block (&body, &se.pre);
2813 /* Store the mask. */
2814 se.expr = convert (mask_type, se.expr);
2816 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2817 gfc_add_modify_expr (&body, tmp, se.expr);
2819 /* Advance to the next mask element. */
2820 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
2821 maskindex, gfc_index_one_node);
2822 gfc_add_modify_expr (&body, maskindex, tmp);
2824 /* Generate the loops. */
2825 tmp = gfc_finish_block (&body);
2826 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
2827 gfc_add_expr_to_block (&block, tmp);
2830 c = code->block->next;
2832 /* TODO: loop merging in FORALL statements. */
2833 /* Now that we've got a copy of the mask, generate the assignment loops. */
2839 /* A scalar or array assignment. DO the simple check for
2840 lhs to rhs dependencies. These make a temporary for the
2841 rhs and form a second forall block to copy to variable. */
2842 need_temp = check_forall_dependencies(c, &pre, &post);
2844 /* Temporaries due to array assignment data dependencies introduce
2845 no end of problems. */
2847 gfc_trans_assign_need_temp (c->expr, c->expr2, NULL, false,
2848 nested_forall_info, &block);
2851 /* Use the normal assignment copying routines. */
2852 assign = gfc_trans_assignment (c->expr, c->expr2, false);
2854 /* Generate body and loops. */
2855 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2857 gfc_add_expr_to_block (&block, tmp);
2860 /* Cleanup any temporary symtrees that have been made to deal
2861 with dependencies. */
2863 cleanup_forall_symtrees (c);
2868 /* Translate WHERE or WHERE construct nested in FORALL. */
2869 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
2872 /* Pointer assignment inside FORALL. */
2873 case EXEC_POINTER_ASSIGN:
2874 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2876 gfc_trans_pointer_assign_need_temp (c->expr, c->expr2,
2877 nested_forall_info, &block);
2880 /* Use the normal assignment copying routines. */
2881 assign = gfc_trans_pointer_assignment (c->expr, c->expr2);
2883 /* Generate body and loops. */
2884 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2886 gfc_add_expr_to_block (&block, tmp);
2891 tmp = gfc_trans_forall_1 (c, nested_forall_info);
2892 gfc_add_expr_to_block (&block, tmp);
2895 /* Explicit subroutine calls are prevented by the frontend but interface
2896 assignments can legitimately produce them. */
2897 case EXEC_ASSIGN_CALL:
2898 assign = gfc_trans_call (c, true);
2899 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
2900 gfc_add_expr_to_block (&block, tmp);
2910 /* Restore the original index variables. */
2911 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
2912 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
2914 /* Free the space for var, start, end, step, varexpr. */
2920 gfc_free (saved_vars);
2922 /* Free the space for this forall_info. */
2927 /* Free the temporary for the mask. */
2928 tmp = gfc_call_free (pmask);
2929 gfc_add_expr_to_block (&block, tmp);
2932 pushdecl (maskindex);
2934 gfc_add_block_to_block (&pre, &block);
2935 gfc_add_block_to_block (&pre, &post);
2937 return gfc_finish_block (&pre);
2941 /* Translate the FORALL statement or construct. */
2943 tree gfc_trans_forall (gfc_code * code)
2945 return gfc_trans_forall_1 (code, NULL);
2949 /* Evaluate the WHERE mask expression, copy its value to a temporary.
2950 If the WHERE construct is nested in FORALL, compute the overall temporary
2951 needed by the WHERE mask expression multiplied by the iterator number of
2953 ME is the WHERE mask expression.
2954 MASK is the current execution mask upon input, whose sense may or may
2955 not be inverted as specified by the INVERT argument.
2956 CMASK is the updated execution mask on output, or NULL if not required.
2957 PMASK is the pending execution mask on output, or NULL if not required.
2958 BLOCK is the block in which to place the condition evaluation loops. */
2961 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
2962 tree mask, bool invert, tree cmask, tree pmask,
2963 tree mask_type, stmtblock_t * block)
2968 stmtblock_t body, body1;
2969 tree count, cond, mtmp;
2972 gfc_init_loopinfo (&loop);
2974 lss = gfc_walk_expr (me);
2975 rss = gfc_walk_expr (me);
2977 /* Variable to index the temporary. */
2978 count = gfc_create_var (gfc_array_index_type, "count");
2979 /* Initialize count. */
2980 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2982 gfc_start_block (&body);
2984 gfc_init_se (&rse, NULL);
2985 gfc_init_se (&lse, NULL);
2987 if (lss == gfc_ss_terminator)
2989 gfc_init_block (&body1);
2993 /* Initialize the loop. */
2994 gfc_init_loopinfo (&loop);
2996 /* We may need LSS to determine the shape of the expression. */
2997 gfc_add_ss_to_loop (&loop, lss);
2998 gfc_add_ss_to_loop (&loop, rss);
3000 gfc_conv_ss_startstride (&loop);
3001 gfc_conv_loop_setup (&loop);
3003 gfc_mark_ss_chain_used (rss, 1);
3004 /* Start the loop body. */
3005 gfc_start_scalarized_body (&loop, &body1);
3007 /* Translate the expression. */
3008 gfc_copy_loopinfo_to_se (&rse, &loop);
3010 gfc_conv_expr (&rse, me);
3013 /* Variable to evaluate mask condition. */
3014 cond = gfc_create_var (mask_type, "cond");
3015 if (mask && (cmask || pmask))
3016 mtmp = gfc_create_var (mask_type, "mask");
3017 else mtmp = NULL_TREE;
3019 gfc_add_block_to_block (&body1, &lse.pre);
3020 gfc_add_block_to_block (&body1, &rse.pre);
3022 gfc_add_modify_expr (&body1, cond, fold_convert (mask_type, rse.expr));
3024 if (mask && (cmask || pmask))
3026 tmp = gfc_build_array_ref (mask, count, NULL);
3028 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
3029 gfc_add_modify_expr (&body1, mtmp, tmp);
3034 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3037 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3038 gfc_add_modify_expr (&body1, tmp1, tmp);
3043 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3044 tmp = build1 (TRUTH_NOT_EXPR, mask_type, cond);
3046 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3047 gfc_add_modify_expr (&body1, tmp1, tmp);
3050 gfc_add_block_to_block (&body1, &lse.post);
3051 gfc_add_block_to_block (&body1, &rse.post);
3053 if (lss == gfc_ss_terminator)
3055 gfc_add_block_to_block (&body, &body1);
3059 /* Increment count. */
3060 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
3061 gfc_index_one_node);
3062 gfc_add_modify_expr (&body1, count, tmp1);
3064 /* Generate the copying loops. */
3065 gfc_trans_scalarizing_loops (&loop, &body1);
3067 gfc_add_block_to_block (&body, &loop.pre);
3068 gfc_add_block_to_block (&body, &loop.post);
3070 gfc_cleanup_loop (&loop);
3071 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3072 as tree nodes in SS may not be valid in different scope. */
3075 tmp1 = gfc_finish_block (&body);
3076 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3077 if (nested_forall_info != NULL)
3078 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3080 gfc_add_expr_to_block (block, tmp1);
3084 /* Translate an assignment statement in a WHERE statement or construct
3085 statement. The MASK expression is used to control which elements
3086 of EXPR1 shall be assigned. The sense of MASK is specified by
3090 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3091 tree mask, bool invert,
3092 tree count1, tree count2,
3098 gfc_ss *lss_section;
3105 tree index, maskexpr;
3108 /* TODO: handle this special case.
3109 Special case a single function returning an array. */
3110 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3112 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3118 /* Assignment of the form lhs = rhs. */
3119 gfc_start_block (&block);
3121 gfc_init_se (&lse, NULL);
3122 gfc_init_se (&rse, NULL);
3125 lss = gfc_walk_expr (expr1);
3128 /* In each where-assign-stmt, the mask-expr and the variable being
3129 defined shall be arrays of the same shape. */
3130 gcc_assert (lss != gfc_ss_terminator);
3132 /* The assignment needs scalarization. */
3135 /* Find a non-scalar SS from the lhs. */
3136 while (lss_section != gfc_ss_terminator
3137 && lss_section->type != GFC_SS_SECTION)
3138 lss_section = lss_section->next;
3140 gcc_assert (lss_section != gfc_ss_terminator);
3142 /* Initialize the scalarizer. */
3143 gfc_init_loopinfo (&loop);
3146 rss = gfc_walk_expr (expr2);
3147 if (rss == gfc_ss_terminator)
3149 /* The rhs is scalar. Add a ss for the expression. */
3150 rss = gfc_get_ss ();
3151 rss->next = gfc_ss_terminator;
3152 rss->type = GFC_SS_SCALAR;
3156 /* Associate the SS with the loop. */
3157 gfc_add_ss_to_loop (&loop, lss);
3158 gfc_add_ss_to_loop (&loop, rss);
3160 /* Calculate the bounds of the scalarization. */
3161 gfc_conv_ss_startstride (&loop);
3163 /* Resolve any data dependencies in the statement. */
3164 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3166 /* Setup the scalarizing loops. */
3167 gfc_conv_loop_setup (&loop);
3169 /* Setup the gfc_se structures. */
3170 gfc_copy_loopinfo_to_se (&lse, &loop);
3171 gfc_copy_loopinfo_to_se (&rse, &loop);
3174 gfc_mark_ss_chain_used (rss, 1);
3175 if (loop.temp_ss == NULL)
3178 gfc_mark_ss_chain_used (lss, 1);
3182 lse.ss = loop.temp_ss;
3183 gfc_mark_ss_chain_used (lss, 3);
3184 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3187 /* Start the scalarized loop body. */
3188 gfc_start_scalarized_body (&loop, &body);
3190 /* Translate the expression. */
3191 gfc_conv_expr (&rse, expr2);
3192 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3194 gfc_conv_tmp_array_ref (&lse);
3195 gfc_advance_se_ss_chain (&lse);
3198 gfc_conv_expr (&lse, expr1);
3200 /* Form the mask expression according to the mask. */
3202 maskexpr = gfc_build_array_ref (mask, index, NULL);
3204 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
3206 /* Use the scalar assignment as is. */
3208 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3209 loop.temp_ss != NULL, false);
3211 tmp = gfc_conv_operator_assign (&lse, &rse, sym);
3213 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3215 gfc_add_expr_to_block (&body, tmp);
3217 if (lss == gfc_ss_terminator)
3219 /* Increment count1. */
3220 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3221 count1, gfc_index_one_node);
3222 gfc_add_modify_expr (&body, count1, tmp);
3224 /* Use the scalar assignment as is. */
3225 gfc_add_block_to_block (&block, &body);
3229 gcc_assert (lse.ss == gfc_ss_terminator
3230 && rse.ss == gfc_ss_terminator);
3232 if (loop.temp_ss != NULL)
3234 /* Increment count1 before finish the main body of a scalarized
3236 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3237 count1, gfc_index_one_node);
3238 gfc_add_modify_expr (&body, count1, tmp);
3239 gfc_trans_scalarized_loop_boundary (&loop, &body);
3241 /* We need to copy the temporary to the actual lhs. */
3242 gfc_init_se (&lse, NULL);
3243 gfc_init_se (&rse, NULL);
3244 gfc_copy_loopinfo_to_se (&lse, &loop);
3245 gfc_copy_loopinfo_to_se (&rse, &loop);
3247 rse.ss = loop.temp_ss;
3250 gfc_conv_tmp_array_ref (&rse);
3251 gfc_advance_se_ss_chain (&rse);
3252 gfc_conv_expr (&lse, expr1);
3254 gcc_assert (lse.ss == gfc_ss_terminator
3255 && rse.ss == gfc_ss_terminator);
3257 /* Form the mask expression according to the mask tree list. */
3259 maskexpr = gfc_build_array_ref (mask, index, NULL);
3261 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3264 /* Use the scalar assignment as is. */
3265 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3266 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3267 gfc_add_expr_to_block (&body, tmp);
3269 /* Increment count2. */
3270 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3271 count2, gfc_index_one_node);
3272 gfc_add_modify_expr (&body, count2, tmp);
3276 /* Increment count1. */
3277 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3278 count1, gfc_index_one_node);
3279 gfc_add_modify_expr (&body, count1, tmp);
3282 /* Generate the copying loops. */
3283 gfc_trans_scalarizing_loops (&loop, &body);
3285 /* Wrap the whole thing up. */
3286 gfc_add_block_to_block (&block, &loop.pre);
3287 gfc_add_block_to_block (&block, &loop.post);
3288 gfc_cleanup_loop (&loop);
3291 return gfc_finish_block (&block);
3295 /* Translate the WHERE construct or statement.
3296 This function can be called iteratively to translate the nested WHERE
3297 construct or statement.
3298 MASK is the control mask. */
3301 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3302 forall_info * nested_forall_info, stmtblock_t * block)
3304 stmtblock_t inner_size_body;
3305 tree inner_size, size;
3313 tree count1, count2;
3317 tree pcmask = NULL_TREE;
3318 tree ppmask = NULL_TREE;
3319 tree cmask = NULL_TREE;
3320 tree pmask = NULL_TREE;
3321 gfc_actual_arglist *arg;
3323 /* the WHERE statement or the WHERE construct statement. */
3324 cblock = code->block;
3326 /* As the mask array can be very big, prefer compact boolean types. */
3327 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3329 /* Determine which temporary masks are needed. */
3332 /* One clause: No ELSEWHEREs. */
3333 need_cmask = (cblock->next != 0);
3336 else if (cblock->block->block)
3338 /* Three or more clauses: Conditional ELSEWHEREs. */
3342 else if (cblock->next)
3344 /* Two clauses, the first non-empty. */
3346 need_pmask = (mask != NULL_TREE
3347 && cblock->block->next != 0);
3349 else if (!cblock->block->next)
3351 /* Two clauses, both empty. */
3355 /* Two clauses, the first empty, the second non-empty. */
3358 need_cmask = (cblock->block->expr != 0);
3367 if (need_cmask || need_pmask)
3369 /* Calculate the size of temporary needed by the mask-expr. */
3370 gfc_init_block (&inner_size_body);
3371 inner_size = compute_inner_temp_size (cblock->expr, cblock->expr,
3372 &inner_size_body, &lss, &rss);
3374 /* Calculate the total size of temporary needed. */
3375 size = compute_overall_iter_number (nested_forall_info, inner_size,
3376 &inner_size_body, block);
3378 /* Allocate temporary for WHERE mask if needed. */
3380 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3383 /* Allocate temporary for !mask if needed. */
3385 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3391 /* Each time around this loop, the where clause is conditional
3392 on the value of mask and invert, which are updated at the
3393 bottom of the loop. */
3395 /* Has mask-expr. */
3398 /* Ensure that the WHERE mask will be evaluated exactly once.
3399 If there are no statements in this WHERE/ELSEWHERE clause,
3400 then we don't need to update the control mask (cmask).
3401 If this is the last clause of the WHERE construct, then
3402 we don't need to update the pending control mask (pmask). */
3404 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3406 cblock->next ? cmask : NULL_TREE,
3407 cblock->block ? pmask : NULL_TREE,
3410 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3412 (cblock->next || cblock->block)
3413 ? cmask : NULL_TREE,
3414 NULL_TREE, mask_type, block);
3418 /* It's a final elsewhere-stmt. No mask-expr is present. */
3422 /* The body of this where clause are controlled by cmask with
3423 sense specified by invert. */
3425 /* Get the assignment statement of a WHERE statement, or the first
3426 statement in where-body-construct of a WHERE construct. */
3427 cnext = cblock->next;
3432 /* WHERE assignment statement. */
3433 case EXEC_ASSIGN_CALL:
3435 arg = cnext->ext.actual;
3436 expr1 = expr2 = NULL;
3437 for (; arg; arg = arg->next)
3449 expr1 = cnext->expr;
3450 expr2 = cnext->expr2;
3452 if (nested_forall_info != NULL)
3454 need_temp = gfc_check_dependency (expr1, expr2, 0);
3455 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3456 gfc_trans_assign_need_temp (expr1, expr2,
3458 nested_forall_info, block);
3461 /* Variables to control maskexpr. */
3462 count1 = gfc_create_var (gfc_array_index_type, "count1");
3463 count2 = gfc_create_var (gfc_array_index_type, "count2");
3464 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3465 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3467 tmp = gfc_trans_where_assign (expr1, expr2,
3470 cnext->resolved_sym);
3472 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3474 gfc_add_expr_to_block (block, tmp);
3479 /* Variables to control maskexpr. */
3480 count1 = gfc_create_var (gfc_array_index_type, "count1");
3481 count2 = gfc_create_var (gfc_array_index_type, "count2");
3482 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3483 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3485 tmp = gfc_trans_where_assign (expr1, expr2,
3488 cnext->resolved_sym);
3489 gfc_add_expr_to_block (block, tmp);
3494 /* WHERE or WHERE construct is part of a where-body-construct. */
3496 gfc_trans_where_2 (cnext, cmask, invert,
3497 nested_forall_info, block);
3504 /* The next statement within the same where-body-construct. */
3505 cnext = cnext->next;
3507 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3508 cblock = cblock->block;
3509 if (mask == NULL_TREE)
3511 /* If we're the initial WHERE, we can simply invert the sense
3512 of the current mask to obtain the "mask" for the remaining
3519 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3525 /* If we allocated a pending mask array, deallocate it now. */
3528 tmp = gfc_call_free (ppmask);
3529 gfc_add_expr_to_block (block, tmp);
3532 /* If we allocated a current mask array, deallocate it now. */
3535 tmp = gfc_call_free (pcmask);
3536 gfc_add_expr_to_block (block, tmp);
3540 /* Translate a simple WHERE construct or statement without dependencies.
3541 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3542 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3543 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3546 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3548 stmtblock_t block, body;
3549 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3550 tree tmp, cexpr, tstmt, estmt;
3551 gfc_ss *css, *tdss, *tsss;
3552 gfc_se cse, tdse, tsse, edse, esse;
3557 cond = cblock->expr;
3558 tdst = cblock->next->expr;
3559 tsrc = cblock->next->expr2;
3560 edst = eblock ? eblock->next->expr : NULL;
3561 esrc = eblock ? eblock->next->expr2 : NULL;
3563 gfc_start_block (&block);
3564 gfc_init_loopinfo (&loop);
3566 /* Handle the condition. */
3567 gfc_init_se (&cse, NULL);
3568 css = gfc_walk_expr (cond);
3569 gfc_add_ss_to_loop (&loop, css);
3571 /* Handle the then-clause. */
3572 gfc_init_se (&tdse, NULL);
3573 gfc_init_se (&tsse, NULL);
3574 tdss = gfc_walk_expr (tdst);
3575 tsss = gfc_walk_expr (tsrc);
3576 if (tsss == gfc_ss_terminator)
3578 tsss = gfc_get_ss ();
3579 tsss->next = gfc_ss_terminator;
3580 tsss->type = GFC_SS_SCALAR;
3583 gfc_add_ss_to_loop (&loop, tdss);
3584 gfc_add_ss_to_loop (&loop, tsss);
3588 /* Handle the else clause. */
3589 gfc_init_se (&edse, NULL);
3590 gfc_init_se (&esse, NULL);
3591 edss = gfc_walk_expr (edst);
3592 esss = gfc_walk_expr (esrc);
3593 if (esss == gfc_ss_terminator)
3595 esss = gfc_get_ss ();
3596 esss->next = gfc_ss_terminator;
3597 esss->type = GFC_SS_SCALAR;
3600 gfc_add_ss_to_loop (&loop, edss);
3601 gfc_add_ss_to_loop (&loop, esss);
3604 gfc_conv_ss_startstride (&loop);
3605 gfc_conv_loop_setup (&loop);
3607 gfc_mark_ss_chain_used (css, 1);
3608 gfc_mark_ss_chain_used (tdss, 1);
3609 gfc_mark_ss_chain_used (tsss, 1);
3612 gfc_mark_ss_chain_used (edss, 1);
3613 gfc_mark_ss_chain_used (esss, 1);
3616 gfc_start_scalarized_body (&loop, &body);
3618 gfc_copy_loopinfo_to_se (&cse, &loop);
3619 gfc_copy_loopinfo_to_se (&tdse, &loop);
3620 gfc_copy_loopinfo_to_se (&tsse, &loop);
3626 gfc_copy_loopinfo_to_se (&edse, &loop);
3627 gfc_copy_loopinfo_to_se (&esse, &loop);
3632 gfc_conv_expr (&cse, cond);
3633 gfc_add_block_to_block (&body, &cse.pre);
3636 gfc_conv_expr (&tsse, tsrc);
3637 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3639 gfc_conv_tmp_array_ref (&tdse);
3640 gfc_advance_se_ss_chain (&tdse);
3643 gfc_conv_expr (&tdse, tdst);
3647 gfc_conv_expr (&esse, esrc);
3648 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3650 gfc_conv_tmp_array_ref (&edse);
3651 gfc_advance_se_ss_chain (&edse);
3654 gfc_conv_expr (&edse, edst);
3657 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false);
3658 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false)
3659 : build_empty_stmt ();
3660 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3661 gfc_add_expr_to_block (&body, tmp);
3662 gfc_add_block_to_block (&body, &cse.post);
3664 gfc_trans_scalarizing_loops (&loop, &body);
3665 gfc_add_block_to_block (&block, &loop.pre);
3666 gfc_add_block_to_block (&block, &loop.post);
3667 gfc_cleanup_loop (&loop);
3669 return gfc_finish_block (&block);
3672 /* As the WHERE or WHERE construct statement can be nested, we call
3673 gfc_trans_where_2 to do the translation, and pass the initial
3674 NULL values for both the control mask and the pending control mask. */
3677 gfc_trans_where (gfc_code * code)
3683 cblock = code->block;
3685 && cblock->next->op == EXEC_ASSIGN
3686 && !cblock->next->next)
3688 eblock = cblock->block;
3691 /* A simple "WHERE (cond) x = y" statement or block is
3692 dependence free if cond is not dependent upon writing x,
3693 and the source y is unaffected by the destination x. */
3694 if (!gfc_check_dependency (cblock->next->expr,
3696 && !gfc_check_dependency (cblock->next->expr,
3697 cblock->next->expr2, 0))
3698 return gfc_trans_where_3 (cblock, NULL);
3700 else if (!eblock->expr
3703 && eblock->next->op == EXEC_ASSIGN
3704 && !eblock->next->next)
3706 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3707 block is dependence free if cond is not dependent on writes
3708 to x1 and x2, y1 is not dependent on writes to x2, and y2
3709 is not dependent on writes to x1, and both y's are not
3710 dependent upon their own x's. */
3711 if (!gfc_check_dependency(cblock->next->expr,
3713 && !gfc_check_dependency(eblock->next->expr,
3715 && !gfc_check_dependency(cblock->next->expr,
3716 eblock->next->expr2, 0)
3717 && !gfc_check_dependency(eblock->next->expr,
3718 cblock->next->expr2, 0)
3719 && !gfc_check_dependency(cblock->next->expr,
3720 cblock->next->expr2, 0)
3721 && !gfc_check_dependency(eblock->next->expr,
3722 eblock->next->expr2, 0))
3723 return gfc_trans_where_3 (cblock, eblock);
3727 gfc_start_block (&block);
3729 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3731 return gfc_finish_block (&block);
3735 /* CYCLE a DO loop. The label decl has already been created by
3736 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3737 node at the head of the loop. We must mark the label as used. */
3740 gfc_trans_cycle (gfc_code * code)
3744 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3745 TREE_USED (cycle_label) = 1;
3746 return build1_v (GOTO_EXPR, cycle_label);
3750 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3751 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3755 gfc_trans_exit (gfc_code * code)
3759 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3760 TREE_USED (exit_label) = 1;
3761 return build1_v (GOTO_EXPR, exit_label);
3765 /* Translate the ALLOCATE statement. */
3768 gfc_trans_allocate (gfc_code * code)
3780 if (!code->ext.alloc_list)
3783 gfc_start_block (&block);
3787 tree gfc_int4_type_node = gfc_get_int_type (4);
3789 stat = gfc_create_var (gfc_int4_type_node, "stat");
3790 pstat = build_fold_addr_expr (stat);
3792 error_label = gfc_build_label_decl (NULL_TREE);
3793 TREE_USED (error_label) = 1;
3796 pstat = stat = error_label = NULL_TREE;
3798 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3802 gfc_init_se (&se, NULL);
3803 gfc_start_block (&se.pre);
3805 se.want_pointer = 1;
3806 se.descriptor_only = 1;
3807 gfc_conv_expr (&se, expr);
3809 if (!gfc_array_allocate (&se, expr, pstat))
3811 /* A scalar or derived type. */
3812 tmp = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
3814 if (expr->ts.type == BT_CHARACTER && tmp == NULL_TREE)
3815 tmp = se.string_length;
3817 tmp = gfc_allocate_with_status (&se.pre, tmp, pstat);
3818 tmp = build2 (MODIFY_EXPR, void_type_node, se.expr,
3819 fold_convert (TREE_TYPE (se.expr), tmp));
3820 gfc_add_expr_to_block (&se.pre, tmp);
3824 tmp = build1_v (GOTO_EXPR, error_label);
3825 parm = fold_build2 (NE_EXPR, boolean_type_node,
3826 stat, build_int_cst (TREE_TYPE (stat), 0));
3827 tmp = fold_build3 (COND_EXPR, void_type_node,
3828 parm, tmp, build_empty_stmt ());
3829 gfc_add_expr_to_block (&se.pre, tmp);
3832 if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
3834 tmp = build_fold_indirect_ref (se.expr);
3835 tmp = gfc_nullify_alloc_comp (expr->ts.derived, tmp, 0);
3836 gfc_add_expr_to_block (&se.pre, tmp);
3841 tmp = gfc_finish_block (&se.pre);
3842 gfc_add_expr_to_block (&block, tmp);
3845 /* Assign the value to the status variable. */
3848 tmp = build1_v (LABEL_EXPR, error_label);
3849 gfc_add_expr_to_block (&block, tmp);
3851 gfc_init_se (&se, NULL);
3852 gfc_conv_expr_lhs (&se, code->expr);
3853 tmp = convert (TREE_TYPE (se.expr), stat);
3854 gfc_add_modify_expr (&block, se.expr, tmp);
3857 return gfc_finish_block (&block);
3861 /* Translate a DEALLOCATE statement.
3862 There are two cases within the for loop:
3863 (1) deallocate(a1, a2, a3) is translated into the following sequence
3864 _gfortran_deallocate(a1, 0B)
3865 _gfortran_deallocate(a2, 0B)
3866 _gfortran_deallocate(a3, 0B)
3867 where the STAT= variable is passed a NULL pointer.
3868 (2) deallocate(a1, a2, a3, stat=i) is translated into the following
3870 _gfortran_deallocate(a1, &stat)
3871 astat = astat + stat
3872 _gfortran_deallocate(a2, &stat)
3873 astat = astat + stat
3874 _gfortran_deallocate(a3, &stat)
3875 astat = astat + stat
3876 In case (1), we simply return at the end of the for loop. In case (2)
3877 we set STAT= astat. */
3879 gfc_trans_deallocate (gfc_code * code)
3884 tree apstat, astat, pstat, stat, tmp;
3887 gfc_start_block (&block);
3889 /* Set up the optional STAT= */
3892 tree gfc_int4_type_node = gfc_get_int_type (4);
3894 /* Variable used with the library call. */
3895 stat = gfc_create_var (gfc_int4_type_node, "stat");
3896 pstat = build_fold_addr_expr (stat);
3898 /* Running total of possible deallocation failures. */
3899 astat = gfc_create_var (gfc_int4_type_node, "astat");
3900 apstat = build_fold_addr_expr (astat);
3902 /* Initialize astat to 0. */
3903 gfc_add_modify_expr (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
3906 pstat = apstat = stat = astat = NULL_TREE;
3908 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3911 gcc_assert (expr->expr_type == EXPR_VARIABLE);
3913 gfc_init_se (&se, NULL);
3914 gfc_start_block (&se.pre);
3916 se.want_pointer = 1;
3917 se.descriptor_only = 1;
3918 gfc_conv_expr (&se, expr);
3920 if (expr->ts.type == BT_DERIVED
3921 && expr->ts.derived->attr.alloc_comp)
3924 gfc_ref *last = NULL;
3925 for (ref = expr->ref; ref; ref = ref->next)
3926 if (ref->type == REF_COMPONENT)
3929 /* Do not deallocate the components of a derived type
3930 ultimate pointer component. */
3931 if (!(last && last->u.c.component->pointer)
3932 && !(!last && expr->symtree->n.sym->attr.pointer))
3934 tmp = gfc_deallocate_alloc_comp (expr->ts.derived, se.expr,
3936 gfc_add_expr_to_block (&se.pre, tmp);
3941 tmp = gfc_array_deallocate (se.expr, pstat);
3944 tmp = gfc_deallocate_with_status (se.expr, pstat, false);
3945 gfc_add_expr_to_block (&se.pre, tmp);
3947 tmp = build2 (MODIFY_EXPR, void_type_node,
3948 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
3951 gfc_add_expr_to_block (&se.pre, tmp);
3953 /* Keep track of the number of failed deallocations by adding stat
3954 of the last deallocation to the running total. */
3957 apstat = build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
3958 gfc_add_modify_expr (&se.pre, astat, apstat);
3961 tmp = gfc_finish_block (&se.pre);
3962 gfc_add_expr_to_block (&block, tmp);
3966 /* Assign the value to the status variable. */
3969 gfc_init_se (&se, NULL);
3970 gfc_conv_expr_lhs (&se, code->expr);
3971 tmp = convert (TREE_TYPE (se.expr), astat);
3972 gfc_add_modify_expr (&block, se.expr, tmp);
3975 return gfc_finish_block (&block);