1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
35 #include "trans-stmt.h"
36 #include "trans-types.h"
37 #include "trans-array.h"
38 #include "trans-const.h"
40 #include "dependency.h"
42 typedef struct iter_info
48 struct iter_info *next;
52 typedef struct forall_info
59 struct forall_info *prev_nest;
63 static void gfc_trans_where_2 (gfc_code *, tree, bool,
64 forall_info *, stmtblock_t *);
66 /* Translate a F95 label number to a LABEL_EXPR. */
69 gfc_trans_label_here (gfc_code * code)
71 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
75 /* Given a variable expression which has been ASSIGNed to, find the decl
76 containing the auxiliary variables. For variables in common blocks this
80 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
82 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
83 gfc_conv_expr (se, expr);
84 /* Deals with variable in common block. Get the field declaration. */
85 if (TREE_CODE (se->expr) == COMPONENT_REF)
86 se->expr = TREE_OPERAND (se->expr, 1);
87 /* Deals with dummy argument. Get the parameter declaration. */
88 else if (TREE_CODE (se->expr) == INDIRECT_REF)
89 se->expr = TREE_OPERAND (se->expr, 0);
92 /* Translate a label assignment statement. */
95 gfc_trans_label_assign (gfc_code * code)
104 /* Start a new block. */
105 gfc_init_se (&se, NULL);
106 gfc_start_block (&se.pre);
107 gfc_conv_label_variable (&se, code->expr);
109 len = GFC_DECL_STRING_LEN (se.expr);
110 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
112 label_tree = gfc_get_label_decl (code->label);
114 if (code->label->defined == ST_LABEL_TARGET)
116 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
117 len_tree = integer_minus_one_node;
121 gfc_expr *format = code->label->format;
123 label_len = format->value.character.length;
124 len_tree = build_int_cst (NULL_TREE, label_len);
125 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
126 format->value.character.string);
127 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
130 gfc_add_modify (&se.pre, len, len_tree);
131 gfc_add_modify (&se.pre, addr, label_tree);
133 return gfc_finish_block (&se.pre);
136 /* Translate a GOTO statement. */
139 gfc_trans_goto (gfc_code * code)
141 locus loc = code->loc;
147 if (code->label != NULL)
148 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
151 gfc_init_se (&se, NULL);
152 gfc_start_block (&se.pre);
153 gfc_conv_label_variable (&se, code->expr);
154 tmp = GFC_DECL_STRING_LEN (se.expr);
155 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
156 build_int_cst (TREE_TYPE (tmp), -1));
157 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
158 "Assigned label is not a target label");
160 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
165 target = fold_build1 (GOTO_EXPR, void_type_node, assigned_goto);
166 gfc_add_expr_to_block (&se.pre, target);
167 return gfc_finish_block (&se.pre);
170 /* Check the label list. */
173 target = gfc_get_label_decl (code->label);
174 tmp = gfc_build_addr_expr (pvoid_type_node, target);
175 tmp = fold_build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
176 tmp = build3_v (COND_EXPR, tmp,
177 fold_build1 (GOTO_EXPR, void_type_node, target),
178 build_empty_stmt ());
179 gfc_add_expr_to_block (&se.pre, tmp);
182 while (code != NULL);
183 gfc_trans_runtime_check (true, false, boolean_true_node, &se.pre, &loc,
184 "Assigned label is not in the list");
186 return gfc_finish_block (&se.pre);
190 /* Translate an ENTRY statement. Just adds a label for this entry point. */
192 gfc_trans_entry (gfc_code * code)
194 return build1_v (LABEL_EXPR, code->ext.entry->label);
198 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
199 elemental subroutines. Make temporaries for output arguments if any such
200 dependencies are found. Output arguments are chosen because internal_unpack
201 can be used, as is, to copy the result back to the variable. */
203 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
204 gfc_symbol * sym, gfc_actual_arglist * arg,
205 gfc_dep_check check_variable)
207 gfc_actual_arglist *arg0;
209 gfc_formal_arglist *formal;
210 gfc_loopinfo tmp_loop;
222 if (loopse->ss == NULL)
227 formal = sym->formal;
229 /* Loop over all the arguments testing for dependencies. */
230 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
236 /* Obtain the info structure for the current argument. */
238 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
242 info = &ss->data.info;
246 /* If there is a dependency, create a temporary and use it
247 instead of the variable. */
248 fsym = formal ? formal->sym : NULL;
249 if (e->expr_type == EXPR_VARIABLE
251 && fsym->attr.intent != INTENT_IN
252 && gfc_check_fncall_dependency (e, fsym->attr.intent,
253 sym, arg0, check_variable))
256 stmtblock_t temp_post;
258 /* Make a local loopinfo for the temporary creation, so that
259 none of the other ss->info's have to be renormalized. */
260 gfc_init_loopinfo (&tmp_loop);
261 for (n = 0; n < info->dimen; n++)
263 tmp_loop.to[n] = loopse->loop->to[n];
264 tmp_loop.from[n] = loopse->loop->from[n];
265 tmp_loop.order[n] = loopse->loop->order[n];
268 /* Obtain the argument descriptor for unpacking. */
269 gfc_init_se (&parmse, NULL);
270 parmse.want_pointer = 1;
271 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
272 gfc_add_block_to_block (&se->pre, &parmse.pre);
274 /* If we've got INTENT(INOUT), initialize the array temporary with
275 a copy of the values. */
276 if (fsym->attr.intent == INTENT_INOUT)
277 initial = parmse.expr;
281 /* Generate the temporary. Merge the block so that the
282 declarations are put at the right binding level. Cleaning up the
283 temporary should be the very last thing done, so we add the code to
284 a new block and add it to se->post as last instructions. */
285 size = gfc_create_var (gfc_array_index_type, NULL);
286 data = gfc_create_var (pvoid_type_node, NULL);
287 gfc_start_block (&block);
288 gfc_init_block (&temp_post);
289 tmp = gfc_typenode_for_spec (&e->ts);
290 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post,
291 &tmp_loop, info, tmp,
295 gfc_add_modify (&se->pre, size, tmp);
296 tmp = fold_convert (pvoid_type_node, info->data);
297 gfc_add_modify (&se->pre, data, tmp);
298 gfc_merge_block_scope (&block);
300 /* Calculate the offset for the temporary. */
301 offset = gfc_index_zero_node;
302 for (n = 0; n < info->dimen; n++)
304 tmp = gfc_conv_descriptor_stride (info->descriptor,
306 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
307 loopse->loop->from[n], tmp);
308 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
311 info->offset = gfc_create_var (gfc_array_index_type, NULL);
312 gfc_add_modify (&se->pre, info->offset, offset);
314 /* Copy the result back using unpack. */
315 tmp = build_call_expr (gfor_fndecl_in_unpack, 2, parmse.expr, data);
316 gfc_add_expr_to_block (&se->post, tmp);
318 gfc_add_block_to_block (&se->pre, &parmse.pre);
319 gfc_add_block_to_block (&se->post, &parmse.post);
320 gfc_add_block_to_block (&se->post, &temp_post);
326 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
329 gfc_trans_call (gfc_code * code, bool dependency_check)
333 int has_alternate_specifier;
334 gfc_dep_check check_variable;
336 /* A CALL starts a new block because the actual arguments may have to
337 be evaluated first. */
338 gfc_init_se (&se, NULL);
339 gfc_start_block (&se.pre);
341 gcc_assert (code->resolved_sym);
343 ss = gfc_ss_terminator;
344 if (code->resolved_sym->attr.elemental)
345 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
347 /* Is not an elemental subroutine call with array valued arguments. */
348 if (ss == gfc_ss_terminator)
351 /* Translate the call. */
352 has_alternate_specifier
353 = gfc_conv_function_call (&se, code->resolved_sym, code->ext.actual,
356 /* A subroutine without side-effect, by definition, does nothing! */
357 TREE_SIDE_EFFECTS (se.expr) = 1;
359 /* Chain the pieces together and return the block. */
360 if (has_alternate_specifier)
362 gfc_code *select_code;
364 select_code = code->next;
365 gcc_assert(select_code->op == EXEC_SELECT);
366 sym = select_code->expr->symtree->n.sym;
367 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
368 if (sym->backend_decl == NULL)
369 sym->backend_decl = gfc_get_symbol_decl (sym);
370 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
373 gfc_add_expr_to_block (&se.pre, se.expr);
375 gfc_add_block_to_block (&se.pre, &se.post);
380 /* An elemental subroutine call with array valued arguments has
388 /* gfc_walk_elemental_function_args renders the ss chain in the
389 reverse order to the actual argument order. */
390 ss = gfc_reverse_ss (ss);
392 /* Initialize the loop. */
393 gfc_init_se (&loopse, NULL);
394 gfc_init_loopinfo (&loop);
395 gfc_add_ss_to_loop (&loop, ss);
397 gfc_conv_ss_startstride (&loop);
398 /* TODO: gfc_conv_loop_setup generates a temporary for vector
399 subscripts. This could be prevented in the elemental case
400 as temporaries are handled separatedly
401 (below in gfc_conv_elemental_dependencies). */
402 gfc_conv_loop_setup (&loop, &code->expr->where);
403 gfc_mark_ss_chain_used (ss, 1);
405 /* Convert the arguments, checking for dependencies. */
406 gfc_copy_loopinfo_to_se (&loopse, &loop);
409 /* For operator assignment, do dependency checking. */
410 if (dependency_check)
411 check_variable = ELEM_CHECK_VARIABLE;
413 check_variable = ELEM_DONT_CHECK_VARIABLE;
415 gfc_init_se (&depse, NULL);
416 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
417 code->ext.actual, check_variable);
419 gfc_add_block_to_block (&loop.pre, &depse.pre);
420 gfc_add_block_to_block (&loop.post, &depse.post);
422 /* Generate the loop body. */
423 gfc_start_scalarized_body (&loop, &body);
424 gfc_init_block (&block);
426 /* Add the subroutine call to the block. */
427 gfc_conv_function_call (&loopse, code->resolved_sym, code->ext.actual,
429 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
431 gfc_add_block_to_block (&block, &loopse.pre);
432 gfc_add_block_to_block (&block, &loopse.post);
434 /* Finish up the loop block and the loop. */
435 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
436 gfc_trans_scalarizing_loops (&loop, &body);
437 gfc_add_block_to_block (&se.pre, &loop.pre);
438 gfc_add_block_to_block (&se.pre, &loop.post);
439 gfc_add_block_to_block (&se.pre, &se.post);
440 gfc_cleanup_loop (&loop);
443 return gfc_finish_block (&se.pre);
447 /* Translate the RETURN statement. */
450 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
458 /* If code->expr is not NULL, this return statement must appear
459 in a subroutine and current_fake_result_decl has already
462 result = gfc_get_fake_result_decl (NULL, 0);
465 gfc_warning ("An alternate return at %L without a * dummy argument",
467 return build1_v (GOTO_EXPR, gfc_get_return_label ());
470 /* Start a new block for this statement. */
471 gfc_init_se (&se, NULL);
472 gfc_start_block (&se.pre);
474 gfc_conv_expr (&se, code->expr);
476 tmp = fold_build2 (MODIFY_EXPR, TREE_TYPE (result), result,
477 fold_convert (TREE_TYPE (result), se.expr));
478 gfc_add_expr_to_block (&se.pre, tmp);
480 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
481 gfc_add_expr_to_block (&se.pre, tmp);
482 gfc_add_block_to_block (&se.pre, &se.post);
483 return gfc_finish_block (&se.pre);
486 return build1_v (GOTO_EXPR, gfc_get_return_label ());
490 /* Translate the PAUSE statement. We have to translate this statement
491 to a runtime library call. */
494 gfc_trans_pause (gfc_code * code)
496 tree gfc_int4_type_node = gfc_get_int_type (4);
500 /* Start a new block for this statement. */
501 gfc_init_se (&se, NULL);
502 gfc_start_block (&se.pre);
505 if (code->expr == NULL)
507 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
508 tmp = build_call_expr (gfor_fndecl_pause_numeric, 1, tmp);
512 gfc_conv_expr_reference (&se, code->expr);
513 tmp = build_call_expr (gfor_fndecl_pause_string, 2,
514 se.expr, se.string_length);
517 gfc_add_expr_to_block (&se.pre, tmp);
519 gfc_add_block_to_block (&se.pre, &se.post);
521 return gfc_finish_block (&se.pre);
525 /* Translate the STOP statement. We have to translate this statement
526 to a runtime library call. */
529 gfc_trans_stop (gfc_code * code)
531 tree gfc_int4_type_node = gfc_get_int_type (4);
535 /* Start a new block for this statement. */
536 gfc_init_se (&se, NULL);
537 gfc_start_block (&se.pre);
540 if (code->expr == NULL)
542 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
543 tmp = build_call_expr (gfor_fndecl_stop_numeric, 1, tmp);
547 gfc_conv_expr_reference (&se, code->expr);
548 tmp = build_call_expr (gfor_fndecl_stop_string, 2,
549 se.expr, se.string_length);
552 gfc_add_expr_to_block (&se.pre, tmp);
554 gfc_add_block_to_block (&se.pre, &se.post);
556 return gfc_finish_block (&se.pre);
560 /* Generate GENERIC for the IF construct. This function also deals with
561 the simple IF statement, because the front end translates the IF
562 statement into an IF construct.
594 where COND_S is the simplified version of the predicate. PRE_COND_S
595 are the pre side-effects produced by the translation of the
597 We need to build the chain recursively otherwise we run into
598 problems with folding incomplete statements. */
601 gfc_trans_if_1 (gfc_code * code)
606 /* Check for an unconditional ELSE clause. */
608 return gfc_trans_code (code->next);
610 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
611 gfc_init_se (&if_se, NULL);
612 gfc_start_block (&if_se.pre);
614 /* Calculate the IF condition expression. */
615 gfc_conv_expr_val (&if_se, code->expr);
617 /* Translate the THEN clause. */
618 stmt = gfc_trans_code (code->next);
620 /* Translate the ELSE clause. */
622 elsestmt = gfc_trans_if_1 (code->block);
624 elsestmt = build_empty_stmt ();
626 /* Build the condition expression and add it to the condition block. */
627 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
629 gfc_add_expr_to_block (&if_se.pre, stmt);
631 /* Finish off this statement. */
632 return gfc_finish_block (&if_se.pre);
636 gfc_trans_if (gfc_code * code)
638 /* Ignore the top EXEC_IF, it only announces an IF construct. The
639 actual code we must translate is in code->block. */
641 return gfc_trans_if_1 (code->block);
645 /* Translate an arithmetic IF expression.
647 IF (cond) label1, label2, label3 translates to
659 An optimized version can be generated in case of equal labels.
660 E.g., if label1 is equal to label2, we can translate it to
669 gfc_trans_arithmetic_if (gfc_code * code)
677 /* Start a new block. */
678 gfc_init_se (&se, NULL);
679 gfc_start_block (&se.pre);
681 /* Pre-evaluate COND. */
682 gfc_conv_expr_val (&se, code->expr);
683 se.expr = gfc_evaluate_now (se.expr, &se.pre);
685 /* Build something to compare with. */
686 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
688 if (code->label->value != code->label2->value)
690 /* If (cond < 0) take branch1 else take branch2.
691 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
692 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
693 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
695 if (code->label->value != code->label3->value)
696 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
698 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
700 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
703 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
705 if (code->label->value != code->label3->value
706 && code->label2->value != code->label3->value)
708 /* if (cond <= 0) take branch1 else take branch2. */
709 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
710 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
711 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
714 /* Append the COND_EXPR to the evaluation of COND, and return. */
715 gfc_add_expr_to_block (&se.pre, branch1);
716 return gfc_finish_block (&se.pre);
720 /* Translate the simple DO construct. This is where the loop variable has
721 integer type and step +-1. We can't use this in the general case
722 because integer overflow and floating point errors could give incorrect
724 We translate a do loop from:
726 DO dovar = from, to, step
732 [Evaluate loop bounds and step]
734 if ((step > 0) ? (dovar <= to) : (dovar => to))
740 cond = (dovar == to);
742 if (cond) goto end_label;
747 This helps the optimizers by avoiding the extra induction variable
748 used in the general case. */
751 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
752 tree from, tree to, tree step)
761 type = TREE_TYPE (dovar);
763 /* Initialize the DO variable: dovar = from. */
764 gfc_add_modify (pblock, dovar, from);
766 /* Cycle and exit statements are implemented with gotos. */
767 cycle_label = gfc_build_label_decl (NULL_TREE);
768 exit_label = gfc_build_label_decl (NULL_TREE);
770 /* Put the labels where they can be found later. See gfc_trans_do(). */
771 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
774 gfc_start_block (&body);
776 /* Main loop body. */
777 tmp = gfc_trans_code (code->block->next);
778 gfc_add_expr_to_block (&body, tmp);
780 /* Label for cycle statements (if needed). */
781 if (TREE_USED (cycle_label))
783 tmp = build1_v (LABEL_EXPR, cycle_label);
784 gfc_add_expr_to_block (&body, tmp);
787 /* Evaluate the loop condition. */
788 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
789 cond = gfc_evaluate_now (cond, &body);
791 /* Increment the loop variable. */
792 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
793 gfc_add_modify (&body, dovar, tmp);
796 tmp = build1_v (GOTO_EXPR, exit_label);
797 TREE_USED (exit_label) = 1;
798 tmp = fold_build3 (COND_EXPR, void_type_node,
799 cond, tmp, build_empty_stmt ());
800 gfc_add_expr_to_block (&body, tmp);
802 /* Finish the loop body. */
803 tmp = gfc_finish_block (&body);
804 tmp = build1_v (LOOP_EXPR, tmp);
806 /* Only execute the loop if the number of iterations is positive. */
807 if (tree_int_cst_sgn (step) > 0)
808 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
810 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
811 tmp = fold_build3 (COND_EXPR, void_type_node,
812 cond, tmp, build_empty_stmt ());
813 gfc_add_expr_to_block (pblock, tmp);
815 /* Add the exit label. */
816 tmp = build1_v (LABEL_EXPR, exit_label);
817 gfc_add_expr_to_block (pblock, tmp);
819 return gfc_finish_block (pblock);
822 /* Translate the DO construct. This obviously is one of the most
823 important ones to get right with any compiler, but especially
826 We special case some loop forms as described in gfc_trans_simple_do.
827 For other cases we implement them with a separate loop count,
828 as described in the standard.
830 We translate a do loop from:
832 DO dovar = from, to, step
838 [evaluate loop bounds and step]
839 empty = (step > 0 ? to < from : to > from);
840 countm1 = (to - from) / step;
842 if (empty) goto exit_label;
848 if (countm1 ==0) goto exit_label;
853 countm1 is an unsigned integer. It is equal to the loop count minus one,
854 because the loop count itself can overflow. */
857 gfc_trans_do (gfc_code * code)
875 gfc_start_block (&block);
877 /* Evaluate all the expressions in the iterator. */
878 gfc_init_se (&se, NULL);
879 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
880 gfc_add_block_to_block (&block, &se.pre);
882 type = TREE_TYPE (dovar);
884 gfc_init_se (&se, NULL);
885 gfc_conv_expr_val (&se, code->ext.iterator->start);
886 gfc_add_block_to_block (&block, &se.pre);
887 from = gfc_evaluate_now (se.expr, &block);
889 gfc_init_se (&se, NULL);
890 gfc_conv_expr_val (&se, code->ext.iterator->end);
891 gfc_add_block_to_block (&block, &se.pre);
892 to = gfc_evaluate_now (se.expr, &block);
894 gfc_init_se (&se, NULL);
895 gfc_conv_expr_val (&se, code->ext.iterator->step);
896 gfc_add_block_to_block (&block, &se.pre);
897 step = gfc_evaluate_now (se.expr, &block);
899 /* Special case simple loops. */
900 if (TREE_CODE (type) == INTEGER_TYPE
901 && (integer_onep (step)
902 || tree_int_cst_equal (step, integer_minus_one_node)))
903 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
905 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
906 fold_convert (type, integer_zero_node));
908 if (TREE_CODE (type) == INTEGER_TYPE)
909 utype = unsigned_type_for (type);
911 utype = unsigned_type_for (gfc_array_index_type);
912 countm1 = gfc_create_var (utype, "countm1");
914 /* Cycle and exit statements are implemented with gotos. */
915 cycle_label = gfc_build_label_decl (NULL_TREE);
916 exit_label = gfc_build_label_decl (NULL_TREE);
917 TREE_USED (exit_label) = 1;
919 /* Initialize the DO variable: dovar = from. */
920 gfc_add_modify (&block, dovar, from);
922 /* Initialize loop count and jump to exit label if the loop is empty.
923 This code is executed before we enter the loop body. We generate:
926 if (to < from) goto exit_label;
927 countm1 = (to - from) / step;
931 if (to > from) goto exit_label;
932 countm1 = (from - to) / -step;
934 if (TREE_CODE (type) == INTEGER_TYPE)
938 tmp = fold_build2 (LT_EXPR, boolean_type_node, to, from);
939 pos = fold_build3 (COND_EXPR, void_type_node, tmp,
940 build1_v (GOTO_EXPR, exit_label),
941 build_empty_stmt ());
942 tmp = fold_build2 (MINUS_EXPR, type, to, from);
943 tmp = fold_convert (utype, tmp);
944 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp,
945 fold_convert (utype, step));
946 tmp = build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
947 pos = build2 (COMPOUND_EXPR, void_type_node, pos, tmp);
949 tmp = fold_build2 (GT_EXPR, boolean_type_node, to, from);
950 neg = fold_build3 (COND_EXPR, void_type_node, tmp,
951 build1_v (GOTO_EXPR, exit_label),
952 build_empty_stmt ());
953 tmp = fold_build2 (MINUS_EXPR, type, from, to);
954 tmp = fold_convert (utype, tmp);
955 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp,
956 fold_convert (utype, fold_build1 (NEGATE_EXPR,
958 tmp = build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
959 neg = build2 (COMPOUND_EXPR, void_type_node, neg, tmp);
961 tmp = fold_build3 (COND_EXPR, void_type_node, pos_step, pos, neg);
962 gfc_add_expr_to_block (&block, tmp);
966 /* TODO: We could use the same width as the real type.
967 This would probably cause more problems that it solves
968 when we implement "long double" types. */
970 tmp = fold_build2 (MINUS_EXPR, type, to, from);
971 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
972 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
973 gfc_add_modify (&block, countm1, tmp);
975 /* We need a special check for empty loops:
976 empty = (step > 0 ? to < from : to > from); */
977 tmp = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
978 fold_build2 (LT_EXPR, boolean_type_node, to, from),
979 fold_build2 (GT_EXPR, boolean_type_node, to, from));
980 /* If the loop is empty, go directly to the exit label. */
981 tmp = fold_build3 (COND_EXPR, void_type_node, tmp,
982 build1_v (GOTO_EXPR, exit_label),
983 build_empty_stmt ());
984 gfc_add_expr_to_block (&block, tmp);
988 gfc_start_block (&body);
990 /* Put these labels where they can be found later. We put the
991 labels in a TREE_LIST node (because TREE_CHAIN is already
992 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
993 label in TREE_VALUE (backend_decl). */
995 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
997 /* Main loop body. */
998 tmp = gfc_trans_code (code->block->next);
999 gfc_add_expr_to_block (&body, tmp);
1001 /* Label for cycle statements (if needed). */
1002 if (TREE_USED (cycle_label))
1004 tmp = build1_v (LABEL_EXPR, cycle_label);
1005 gfc_add_expr_to_block (&body, tmp);
1008 /* Increment the loop variable. */
1009 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
1010 gfc_add_modify (&body, dovar, tmp);
1012 /* End with the loop condition. Loop until countm1 == 0. */
1013 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
1014 build_int_cst (utype, 0));
1015 tmp = build1_v (GOTO_EXPR, exit_label);
1016 tmp = fold_build3 (COND_EXPR, void_type_node,
1017 cond, tmp, build_empty_stmt ());
1018 gfc_add_expr_to_block (&body, tmp);
1020 /* Decrement the loop count. */
1021 tmp = fold_build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
1022 gfc_add_modify (&body, countm1, tmp);
1024 /* End of loop body. */
1025 tmp = gfc_finish_block (&body);
1027 /* The for loop itself. */
1028 tmp = build1_v (LOOP_EXPR, tmp);
1029 gfc_add_expr_to_block (&block, tmp);
1031 /* Add the exit label. */
1032 tmp = build1_v (LABEL_EXPR, exit_label);
1033 gfc_add_expr_to_block (&block, tmp);
1035 return gfc_finish_block (&block);
1039 /* Translate the DO WHILE construct.
1052 if (! cond) goto exit_label;
1058 Because the evaluation of the exit condition `cond' may have side
1059 effects, we can't do much for empty loop bodies. The backend optimizers
1060 should be smart enough to eliminate any dead loops. */
1063 gfc_trans_do_while (gfc_code * code)
1071 /* Everything we build here is part of the loop body. */
1072 gfc_start_block (&block);
1074 /* Cycle and exit statements are implemented with gotos. */
1075 cycle_label = gfc_build_label_decl (NULL_TREE);
1076 exit_label = gfc_build_label_decl (NULL_TREE);
1078 /* Put the labels where they can be found later. See gfc_trans_do(). */
1079 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1081 /* Create a GIMPLE version of the exit condition. */
1082 gfc_init_se (&cond, NULL);
1083 gfc_conv_expr_val (&cond, code->expr);
1084 gfc_add_block_to_block (&block, &cond.pre);
1085 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1087 /* Build "IF (! cond) GOTO exit_label". */
1088 tmp = build1_v (GOTO_EXPR, exit_label);
1089 TREE_USED (exit_label) = 1;
1090 tmp = fold_build3 (COND_EXPR, void_type_node,
1091 cond.expr, tmp, build_empty_stmt ());
1092 gfc_add_expr_to_block (&block, tmp);
1094 /* The main body of the loop. */
1095 tmp = gfc_trans_code (code->block->next);
1096 gfc_add_expr_to_block (&block, tmp);
1098 /* Label for cycle statements (if needed). */
1099 if (TREE_USED (cycle_label))
1101 tmp = build1_v (LABEL_EXPR, cycle_label);
1102 gfc_add_expr_to_block (&block, tmp);
1105 /* End of loop body. */
1106 tmp = gfc_finish_block (&block);
1108 gfc_init_block (&block);
1109 /* Build the loop. */
1110 tmp = build1_v (LOOP_EXPR, tmp);
1111 gfc_add_expr_to_block (&block, tmp);
1113 /* Add the exit label. */
1114 tmp = build1_v (LABEL_EXPR, exit_label);
1115 gfc_add_expr_to_block (&block, tmp);
1117 return gfc_finish_block (&block);
1121 /* Translate the SELECT CASE construct for INTEGER case expressions,
1122 without killing all potential optimizations. The problem is that
1123 Fortran allows unbounded cases, but the back-end does not, so we
1124 need to intercept those before we enter the equivalent SWITCH_EXPR
1127 For example, we translate this,
1130 CASE (:100,101,105:115)
1140 to the GENERIC equivalent,
1144 case (minimum value for typeof(expr) ... 100:
1150 case 200 ... (maximum value for typeof(expr):
1167 gfc_trans_integer_select (gfc_code * code)
1177 gfc_start_block (&block);
1179 /* Calculate the switch expression. */
1180 gfc_init_se (&se, NULL);
1181 gfc_conv_expr_val (&se, code->expr);
1182 gfc_add_block_to_block (&block, &se.pre);
1184 end_label = gfc_build_label_decl (NULL_TREE);
1186 gfc_init_block (&body);
1188 for (c = code->block; c; c = c->block)
1190 for (cp = c->ext.case_list; cp; cp = cp->next)
1195 /* Assume it's the default case. */
1196 low = high = NULL_TREE;
1200 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1203 /* If there's only a lower bound, set the high bound to the
1204 maximum value of the case expression. */
1206 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1211 /* Three cases are possible here:
1213 1) There is no lower bound, e.g. CASE (:N).
1214 2) There is a lower bound .NE. high bound, that is
1215 a case range, e.g. CASE (N:M) where M>N (we make
1216 sure that M>N during type resolution).
1217 3) There is a lower bound, and it has the same value
1218 as the high bound, e.g. CASE (N:N). This is our
1219 internal representation of CASE(N).
1221 In the first and second case, we need to set a value for
1222 high. In the third case, we don't because the GCC middle
1223 end represents a single case value by just letting high be
1224 a NULL_TREE. We can't do that because we need to be able
1225 to represent unbounded cases. */
1229 && mpz_cmp (cp->low->value.integer,
1230 cp->high->value.integer) != 0))
1231 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1234 /* Unbounded case. */
1236 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1239 /* Build a label. */
1240 label = gfc_build_label_decl (NULL_TREE);
1242 /* Add this case label.
1243 Add parameter 'label', make it match GCC backend. */
1244 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1246 gfc_add_expr_to_block (&body, tmp);
1249 /* Add the statements for this case. */
1250 tmp = gfc_trans_code (c->next);
1251 gfc_add_expr_to_block (&body, tmp);
1253 /* Break to the end of the construct. */
1254 tmp = build1_v (GOTO_EXPR, end_label);
1255 gfc_add_expr_to_block (&body, tmp);
1258 tmp = gfc_finish_block (&body);
1259 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1260 gfc_add_expr_to_block (&block, tmp);
1262 tmp = build1_v (LABEL_EXPR, end_label);
1263 gfc_add_expr_to_block (&block, tmp);
1265 return gfc_finish_block (&block);
1269 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1271 There are only two cases possible here, even though the standard
1272 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1273 .FALSE., and DEFAULT.
1275 We never generate more than two blocks here. Instead, we always
1276 try to eliminate the DEFAULT case. This way, we can translate this
1277 kind of SELECT construct to a simple
1281 expression in GENERIC. */
1284 gfc_trans_logical_select (gfc_code * code)
1287 gfc_code *t, *f, *d;
1292 /* Assume we don't have any cases at all. */
1295 /* Now see which ones we actually do have. We can have at most two
1296 cases in a single case list: one for .TRUE. and one for .FALSE.
1297 The default case is always separate. If the cases for .TRUE. and
1298 .FALSE. are in the same case list, the block for that case list
1299 always executed, and we don't generate code a COND_EXPR. */
1300 for (c = code->block; c; c = c->block)
1302 for (cp = c->ext.case_list; cp; cp = cp->next)
1306 if (cp->low->value.logical == 0) /* .FALSE. */
1308 else /* if (cp->value.logical != 0), thus .TRUE. */
1316 /* Start a new block. */
1317 gfc_start_block (&block);
1319 /* Calculate the switch expression. We always need to do this
1320 because it may have side effects. */
1321 gfc_init_se (&se, NULL);
1322 gfc_conv_expr_val (&se, code->expr);
1323 gfc_add_block_to_block (&block, &se.pre);
1325 if (t == f && t != NULL)
1327 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1328 translate the code for these cases, append it to the current
1330 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1334 tree true_tree, false_tree, stmt;
1336 true_tree = build_empty_stmt ();
1337 false_tree = build_empty_stmt ();
1339 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1340 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1341 make the missing case the default case. */
1342 if (t != NULL && f != NULL)
1352 /* Translate the code for each of these blocks, and append it to
1353 the current block. */
1355 true_tree = gfc_trans_code (t->next);
1358 false_tree = gfc_trans_code (f->next);
1360 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1361 true_tree, false_tree);
1362 gfc_add_expr_to_block (&block, stmt);
1365 return gfc_finish_block (&block);
1369 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1370 Instead of generating compares and jumps, it is far simpler to
1371 generate a data structure describing the cases in order and call a
1372 library subroutine that locates the right case.
1373 This is particularly true because this is the only case where we
1374 might have to dispose of a temporary.
1375 The library subroutine returns a pointer to jump to or NULL if no
1376 branches are to be taken. */
1379 gfc_trans_character_select (gfc_code *code)
1381 tree init, node, end_label, tmp, type, case_num, label, fndecl;
1382 stmtblock_t block, body;
1388 /* The jump table types are stored in static variables to avoid
1389 constructing them from scratch every single time. */
1390 static tree select_struct[2];
1391 static tree ss_string1[2], ss_string1_len[2];
1392 static tree ss_string2[2], ss_string2_len[2];
1393 static tree ss_target[2];
1395 tree pchartype = gfc_get_pchar_type (code->expr->ts.kind);
1397 if (code->expr->ts.kind == 1)
1399 else if (code->expr->ts.kind == 4)
1404 if (select_struct[k] == NULL)
1406 select_struct[k] = make_node (RECORD_TYPE);
1408 if (code->expr->ts.kind == 1)
1409 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
1410 else if (code->expr->ts.kind == 4)
1411 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
1416 #define ADD_FIELD(NAME, TYPE) \
1417 ss_##NAME[k] = gfc_add_field_to_struct \
1418 (&(TYPE_FIELDS (select_struct[k])), select_struct[k], \
1419 get_identifier (stringize(NAME)), TYPE)
1421 ADD_FIELD (string1, pchartype);
1422 ADD_FIELD (string1_len, gfc_charlen_type_node);
1424 ADD_FIELD (string2, pchartype);
1425 ADD_FIELD (string2_len, gfc_charlen_type_node);
1427 ADD_FIELD (target, integer_type_node);
1430 gfc_finish_type (select_struct[k]);
1433 cp = code->block->ext.case_list;
1434 while (cp->left != NULL)
1438 for (d = cp; d; d = d->right)
1441 end_label = gfc_build_label_decl (NULL_TREE);
1443 /* Generate the body */
1444 gfc_start_block (&block);
1445 gfc_init_block (&body);
1447 for (c = code->block; c; c = c->block)
1449 for (d = c->ext.case_list; d; d = d->next)
1451 label = gfc_build_label_decl (NULL_TREE);
1452 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1453 build_int_cst (NULL_TREE, d->n),
1454 build_int_cst (NULL_TREE, d->n), label);
1455 gfc_add_expr_to_block (&body, tmp);
1458 tmp = gfc_trans_code (c->next);
1459 gfc_add_expr_to_block (&body, tmp);
1461 tmp = build1_v (GOTO_EXPR, end_label);
1462 gfc_add_expr_to_block (&body, tmp);
1465 /* Generate the structure describing the branches */
1468 for(d = cp; d; d = d->right)
1472 gfc_init_se (&se, NULL);
1476 node = tree_cons (ss_string1[k], null_pointer_node, node);
1477 node = tree_cons (ss_string1_len[k], integer_zero_node, node);
1481 gfc_conv_expr_reference (&se, d->low);
1483 node = tree_cons (ss_string1[k], se.expr, node);
1484 node = tree_cons (ss_string1_len[k], se.string_length, node);
1487 if (d->high == NULL)
1489 node = tree_cons (ss_string2[k], null_pointer_node, node);
1490 node = tree_cons (ss_string2_len[k], integer_zero_node, node);
1494 gfc_init_se (&se, NULL);
1495 gfc_conv_expr_reference (&se, d->high);
1497 node = tree_cons (ss_string2[k], se.expr, node);
1498 node = tree_cons (ss_string2_len[k], se.string_length, node);
1501 node = tree_cons (ss_target[k], build_int_cst (integer_type_node, d->n),
1504 tmp = build_constructor_from_list (select_struct[k], nreverse (node));
1505 init = tree_cons (NULL_TREE, tmp, init);
1508 type = build_array_type (select_struct[k],
1509 build_index_type (build_int_cst (NULL_TREE, n-1)));
1511 init = build_constructor_from_list (type, nreverse(init));
1512 TREE_CONSTANT (init) = 1;
1513 TREE_STATIC (init) = 1;
1514 /* Create a static variable to hold the jump table. */
1515 tmp = gfc_create_var (type, "jumptable");
1516 TREE_CONSTANT (tmp) = 1;
1517 TREE_STATIC (tmp) = 1;
1518 TREE_READONLY (tmp) = 1;
1519 DECL_INITIAL (tmp) = init;
1522 /* Build the library call */
1523 init = gfc_build_addr_expr (pvoid_type_node, init);
1525 gfc_init_se (&se, NULL);
1526 gfc_conv_expr_reference (&se, code->expr);
1528 gfc_add_block_to_block (&block, &se.pre);
1530 if (code->expr->ts.kind == 1)
1531 fndecl = gfor_fndecl_select_string;
1532 else if (code->expr->ts.kind == 4)
1533 fndecl = gfor_fndecl_select_string_char4;
1537 tmp = build_call_expr (fndecl, 4, init, build_int_cst (NULL_TREE, n),
1538 se.expr, se.string_length);
1539 case_num = gfc_create_var (integer_type_node, "case_num");
1540 gfc_add_modify (&block, case_num, tmp);
1542 gfc_add_block_to_block (&block, &se.post);
1544 tmp = gfc_finish_block (&body);
1545 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1546 gfc_add_expr_to_block (&block, tmp);
1548 tmp = build1_v (LABEL_EXPR, end_label);
1549 gfc_add_expr_to_block (&block, tmp);
1551 return gfc_finish_block (&block);
1555 /* Translate the three variants of the SELECT CASE construct.
1557 SELECT CASEs with INTEGER case expressions can be translated to an
1558 equivalent GENERIC switch statement, and for LOGICAL case
1559 expressions we build one or two if-else compares.
1561 SELECT CASEs with CHARACTER case expressions are a whole different
1562 story, because they don't exist in GENERIC. So we sort them and
1563 do a binary search at runtime.
1565 Fortran has no BREAK statement, and it does not allow jumps from
1566 one case block to another. That makes things a lot easier for
1570 gfc_trans_select (gfc_code * code)
1572 gcc_assert (code && code->expr);
1574 /* Empty SELECT constructs are legal. */
1575 if (code->block == NULL)
1576 return build_empty_stmt ();
1578 /* Select the correct translation function. */
1579 switch (code->expr->ts.type)
1581 case BT_LOGICAL: return gfc_trans_logical_select (code);
1582 case BT_INTEGER: return gfc_trans_integer_select (code);
1583 case BT_CHARACTER: return gfc_trans_character_select (code);
1585 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1591 /* Traversal function to substitute a replacement symtree if the symbol
1592 in the expression is the same as that passed. f == 2 signals that
1593 that variable itself is not to be checked - only the references.
1594 This group of functions is used when the variable expression in a
1595 FORALL assignment has internal references. For example:
1596 FORALL (i = 1:4) p(p(i)) = i
1597 The only recourse here is to store a copy of 'p' for the index
1600 static gfc_symtree *new_symtree;
1601 static gfc_symtree *old_symtree;
1604 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
1606 if (expr->expr_type != EXPR_VARIABLE)
1611 else if (expr->symtree->n.sym == sym)
1612 expr->symtree = new_symtree;
1618 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
1620 gfc_traverse_expr (e, sym, forall_replace, f);
1624 forall_restore (gfc_expr *expr,
1625 gfc_symbol *sym ATTRIBUTE_UNUSED,
1626 int *f ATTRIBUTE_UNUSED)
1628 if (expr->expr_type != EXPR_VARIABLE)
1631 if (expr->symtree == new_symtree)
1632 expr->symtree = old_symtree;
1638 forall_restore_symtree (gfc_expr *e)
1640 gfc_traverse_expr (e, NULL, forall_restore, 0);
1644 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1649 gfc_symbol *new_sym;
1650 gfc_symbol *old_sym;
1654 /* Build a copy of the lvalue. */
1655 old_symtree = c->expr->symtree;
1656 old_sym = old_symtree->n.sym;
1657 e = gfc_lval_expr_from_sym (old_sym);
1658 if (old_sym->attr.dimension)
1660 gfc_init_se (&tse, NULL);
1661 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN);
1662 gfc_add_block_to_block (pre, &tse.pre);
1663 gfc_add_block_to_block (post, &tse.post);
1664 tse.expr = build_fold_indirect_ref (tse.expr);
1666 if (e->ts.type != BT_CHARACTER)
1668 /* Use the variable offset for the temporary. */
1669 tmp = gfc_conv_descriptor_offset (tse.expr);
1670 gfc_add_modify (pre, tmp,
1671 gfc_conv_array_offset (old_sym->backend_decl));
1676 gfc_init_se (&tse, NULL);
1677 gfc_init_se (&rse, NULL);
1678 gfc_conv_expr (&rse, e);
1679 if (e->ts.type == BT_CHARACTER)
1681 tse.string_length = rse.string_length;
1682 tmp = gfc_get_character_type_len (gfc_default_character_kind,
1684 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
1686 gfc_add_block_to_block (pre, &tse.pre);
1687 gfc_add_block_to_block (post, &tse.post);
1691 tmp = gfc_typenode_for_spec (&e->ts);
1692 tse.expr = gfc_create_var (tmp, "temp");
1695 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
1696 e->expr_type == EXPR_VARIABLE);
1697 gfc_add_expr_to_block (pre, tmp);
1701 /* Create a new symbol to represent the lvalue. */
1702 new_sym = gfc_new_symbol (old_sym->name, NULL);
1703 new_sym->ts = old_sym->ts;
1704 new_sym->attr.referenced = 1;
1705 new_sym->attr.dimension = old_sym->attr.dimension;
1706 new_sym->attr.flavor = old_sym->attr.flavor;
1708 /* Use the temporary as the backend_decl. */
1709 new_sym->backend_decl = tse.expr;
1711 /* Create a fake symtree for it. */
1713 new_symtree = gfc_new_symtree (&root, old_sym->name);
1714 new_symtree->n.sym = new_sym;
1715 gcc_assert (new_symtree == root);
1717 /* Go through the expression reference replacing the old_symtree
1719 forall_replace_symtree (c->expr, old_sym, 2);
1721 /* Now we have made this temporary, we might as well use it for
1722 the right hand side. */
1723 forall_replace_symtree (c->expr2, old_sym, 1);
1727 /* Handles dependencies in forall assignments. */
1729 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1736 lsym = c->expr->symtree->n.sym;
1737 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
1739 /* Now check for dependencies within the 'variable'
1740 expression itself. These are treated by making a complete
1741 copy of variable and changing all the references to it
1742 point to the copy instead. Note that the shallow copy of
1743 the variable will not suffice for derived types with
1744 pointer components. We therefore leave these to their
1746 if (lsym->ts.type == BT_DERIVED
1747 && lsym->ts.derived->attr.pointer_comp)
1751 if (find_forall_index (c->expr, lsym, 2) == SUCCESS)
1753 forall_make_variable_temp (c, pre, post);
1757 /* Substrings with dependencies are treated in the same
1759 if (c->expr->ts.type == BT_CHARACTER
1761 && c->expr2->expr_type == EXPR_VARIABLE
1762 && lsym == c->expr2->symtree->n.sym)
1764 for (lref = c->expr->ref; lref; lref = lref->next)
1765 if (lref->type == REF_SUBSTRING)
1767 for (rref = c->expr2->ref; rref; rref = rref->next)
1768 if (rref->type == REF_SUBSTRING)
1772 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
1774 forall_make_variable_temp (c, pre, post);
1783 cleanup_forall_symtrees (gfc_code *c)
1785 forall_restore_symtree (c->expr);
1786 forall_restore_symtree (c->expr2);
1787 gfc_free (new_symtree->n.sym);
1788 gfc_free (new_symtree);
1792 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
1793 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
1794 indicates whether we should generate code to test the FORALLs mask
1795 array. OUTER is the loop header to be used for initializing mask
1798 The generated loop format is:
1799 count = (end - start + step) / step
1812 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
1813 int mask_flag, stmtblock_t *outer)
1821 tree var, start, end, step;
1824 /* Initialize the mask index outside the FORALL nest. */
1825 if (mask_flag && forall_tmp->mask)
1826 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
1828 iter = forall_tmp->this_loop;
1829 nvar = forall_tmp->nvar;
1830 for (n = 0; n < nvar; n++)
1833 start = iter->start;
1837 exit_label = gfc_build_label_decl (NULL_TREE);
1838 TREE_USED (exit_label) = 1;
1840 /* The loop counter. */
1841 count = gfc_create_var (TREE_TYPE (var), "count");
1843 /* The body of the loop. */
1844 gfc_init_block (&block);
1846 /* The exit condition. */
1847 cond = fold_build2 (LE_EXPR, boolean_type_node,
1848 count, build_int_cst (TREE_TYPE (count), 0));
1849 tmp = build1_v (GOTO_EXPR, exit_label);
1850 tmp = fold_build3 (COND_EXPR, void_type_node,
1851 cond, tmp, build_empty_stmt ());
1852 gfc_add_expr_to_block (&block, tmp);
1854 /* The main loop body. */
1855 gfc_add_expr_to_block (&block, body);
1857 /* Increment the loop variable. */
1858 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1859 gfc_add_modify (&block, var, tmp);
1861 /* Advance to the next mask element. Only do this for the
1863 if (n == 0 && mask_flag && forall_tmp->mask)
1865 tree maskindex = forall_tmp->maskindex;
1866 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1867 maskindex, gfc_index_one_node);
1868 gfc_add_modify (&block, maskindex, tmp);
1871 /* Decrement the loop counter. */
1872 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), count,
1873 build_int_cst (TREE_TYPE (var), 1));
1874 gfc_add_modify (&block, count, tmp);
1876 body = gfc_finish_block (&block);
1878 /* Loop var initialization. */
1879 gfc_init_block (&block);
1880 gfc_add_modify (&block, var, start);
1883 /* Initialize the loop counter. */
1884 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1885 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1886 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1887 gfc_add_modify (&block, count, tmp);
1889 /* The loop expression. */
1890 tmp = build1_v (LOOP_EXPR, body);
1891 gfc_add_expr_to_block (&block, tmp);
1893 /* The exit label. */
1894 tmp = build1_v (LABEL_EXPR, exit_label);
1895 gfc_add_expr_to_block (&block, tmp);
1897 body = gfc_finish_block (&block);
1904 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
1905 is nonzero, the body is controlled by all masks in the forall nest.
1906 Otherwise, the innermost loop is not controlled by it's mask. This
1907 is used for initializing that mask. */
1910 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1915 forall_info *forall_tmp;
1916 tree mask, maskindex;
1918 gfc_start_block (&header);
1920 forall_tmp = nested_forall_info;
1921 while (forall_tmp != NULL)
1923 /* Generate body with masks' control. */
1926 mask = forall_tmp->mask;
1927 maskindex = forall_tmp->maskindex;
1929 /* If a mask was specified make the assignment conditional. */
1932 tmp = gfc_build_array_ref (mask, maskindex, NULL);
1933 body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ());
1936 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
1937 forall_tmp = forall_tmp->prev_nest;
1941 gfc_add_expr_to_block (&header, body);
1942 return gfc_finish_block (&header);
1946 /* Allocate data for holding a temporary array. Returns either a local
1947 temporary array or a pointer variable. */
1950 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
1957 if (INTEGER_CST_P (size))
1959 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
1960 gfc_index_one_node);
1965 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
1966 type = build_array_type (elem_type, type);
1967 if (gfc_can_put_var_on_stack (bytesize))
1969 gcc_assert (INTEGER_CST_P (size));
1970 tmpvar = gfc_create_var (type, "temp");
1975 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
1976 *pdata = convert (pvoid_type_node, tmpvar);
1978 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
1979 gfc_add_modify (pblock, tmpvar, tmp);
1985 /* Generate codes to copy the temporary to the actual lhs. */
1988 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
1989 tree count1, tree wheremask, bool invert)
1993 stmtblock_t block, body;
1999 lss = gfc_walk_expr (expr);
2001 if (lss == gfc_ss_terminator)
2003 gfc_start_block (&block);
2005 gfc_init_se (&lse, NULL);
2007 /* Translate the expression. */
2008 gfc_conv_expr (&lse, expr);
2010 /* Form the expression for the temporary. */
2011 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2013 /* Use the scalar assignment as is. */
2014 gfc_add_block_to_block (&block, &lse.pre);
2015 gfc_add_modify (&block, lse.expr, tmp);
2016 gfc_add_block_to_block (&block, &lse.post);
2018 /* Increment the count1. */
2019 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2020 gfc_index_one_node);
2021 gfc_add_modify (&block, count1, tmp);
2023 tmp = gfc_finish_block (&block);
2027 gfc_start_block (&block);
2029 gfc_init_loopinfo (&loop1);
2030 gfc_init_se (&rse, NULL);
2031 gfc_init_se (&lse, NULL);
2033 /* Associate the lss with the loop. */
2034 gfc_add_ss_to_loop (&loop1, lss);
2036 /* Calculate the bounds of the scalarization. */
2037 gfc_conv_ss_startstride (&loop1);
2038 /* Setup the scalarizing loops. */
2039 gfc_conv_loop_setup (&loop1, &expr->where);
2041 gfc_mark_ss_chain_used (lss, 1);
2043 /* Start the scalarized loop body. */
2044 gfc_start_scalarized_body (&loop1, &body);
2046 /* Setup the gfc_se structures. */
2047 gfc_copy_loopinfo_to_se (&lse, &loop1);
2050 /* Form the expression of the temporary. */
2051 if (lss != gfc_ss_terminator)
2052 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2053 /* Translate expr. */
2054 gfc_conv_expr (&lse, expr);
2056 /* Use the scalar assignment. */
2057 rse.string_length = lse.string_length;
2058 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
2060 /* Form the mask expression according to the mask tree list. */
2063 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2065 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2066 TREE_TYPE (wheremaskexpr),
2068 tmp = fold_build3 (COND_EXPR, void_type_node,
2069 wheremaskexpr, tmp, build_empty_stmt ());
2072 gfc_add_expr_to_block (&body, tmp);
2074 /* Increment count1. */
2075 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2076 count1, gfc_index_one_node);
2077 gfc_add_modify (&body, count1, tmp);
2079 /* Increment count3. */
2082 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2083 count3, gfc_index_one_node);
2084 gfc_add_modify (&body, count3, tmp);
2087 /* Generate the copying loops. */
2088 gfc_trans_scalarizing_loops (&loop1, &body);
2089 gfc_add_block_to_block (&block, &loop1.pre);
2090 gfc_add_block_to_block (&block, &loop1.post);
2091 gfc_cleanup_loop (&loop1);
2093 tmp = gfc_finish_block (&block);
2099 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2100 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2101 and should not be freed. WHEREMASK is the conditional execution mask
2102 whose sense may be inverted by INVERT. */
2105 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2106 tree count1, gfc_ss *lss, gfc_ss *rss,
2107 tree wheremask, bool invert)
2109 stmtblock_t block, body1;
2116 gfc_start_block (&block);
2118 gfc_init_se (&rse, NULL);
2119 gfc_init_se (&lse, NULL);
2121 if (lss == gfc_ss_terminator)
2123 gfc_init_block (&body1);
2124 gfc_conv_expr (&rse, expr2);
2125 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2129 /* Initialize the loop. */
2130 gfc_init_loopinfo (&loop);
2132 /* We may need LSS to determine the shape of the expression. */
2133 gfc_add_ss_to_loop (&loop, lss);
2134 gfc_add_ss_to_loop (&loop, rss);
2136 gfc_conv_ss_startstride (&loop);
2137 gfc_conv_loop_setup (&loop, &expr2->where);
2139 gfc_mark_ss_chain_used (rss, 1);
2140 /* Start the loop body. */
2141 gfc_start_scalarized_body (&loop, &body1);
2143 /* Translate the expression. */
2144 gfc_copy_loopinfo_to_se (&rse, &loop);
2146 gfc_conv_expr (&rse, expr2);
2148 /* Form the expression of the temporary. */
2149 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2152 /* Use the scalar assignment. */
2153 lse.string_length = rse.string_length;
2154 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2155 expr2->expr_type == EXPR_VARIABLE);
2157 /* Form the mask expression according to the mask tree list. */
2160 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2162 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2163 TREE_TYPE (wheremaskexpr),
2165 tmp = fold_build3 (COND_EXPR, void_type_node,
2166 wheremaskexpr, tmp, build_empty_stmt ());
2169 gfc_add_expr_to_block (&body1, tmp);
2171 if (lss == gfc_ss_terminator)
2173 gfc_add_block_to_block (&block, &body1);
2175 /* Increment count1. */
2176 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2177 gfc_index_one_node);
2178 gfc_add_modify (&block, count1, tmp);
2182 /* Increment count1. */
2183 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2184 count1, gfc_index_one_node);
2185 gfc_add_modify (&body1, count1, tmp);
2187 /* Increment count3. */
2190 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2191 count3, gfc_index_one_node);
2192 gfc_add_modify (&body1, count3, tmp);
2195 /* Generate the copying loops. */
2196 gfc_trans_scalarizing_loops (&loop, &body1);
2198 gfc_add_block_to_block (&block, &loop.pre);
2199 gfc_add_block_to_block (&block, &loop.post);
2201 gfc_cleanup_loop (&loop);
2202 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2203 as tree nodes in SS may not be valid in different scope. */
2206 tmp = gfc_finish_block (&block);
2211 /* Calculate the size of temporary needed in the assignment inside forall.
2212 LSS and RSS are filled in this function. */
2215 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2216 stmtblock_t * pblock,
2217 gfc_ss **lss, gfc_ss **rss)
2225 *lss = gfc_walk_expr (expr1);
2228 size = gfc_index_one_node;
2229 if (*lss != gfc_ss_terminator)
2231 gfc_init_loopinfo (&loop);
2233 /* Walk the RHS of the expression. */
2234 *rss = gfc_walk_expr (expr2);
2235 if (*rss == gfc_ss_terminator)
2237 /* The rhs is scalar. Add a ss for the expression. */
2238 *rss = gfc_get_ss ();
2239 (*rss)->next = gfc_ss_terminator;
2240 (*rss)->type = GFC_SS_SCALAR;
2241 (*rss)->expr = expr2;
2244 /* Associate the SS with the loop. */
2245 gfc_add_ss_to_loop (&loop, *lss);
2246 /* We don't actually need to add the rhs at this point, but it might
2247 make guessing the loop bounds a bit easier. */
2248 gfc_add_ss_to_loop (&loop, *rss);
2250 /* We only want the shape of the expression, not rest of the junk
2251 generated by the scalarizer. */
2252 loop.array_parameter = 1;
2254 /* Calculate the bounds of the scalarization. */
2255 save_flag = flag_bounds_check;
2256 flag_bounds_check = 0;
2257 gfc_conv_ss_startstride (&loop);
2258 flag_bounds_check = save_flag;
2259 gfc_conv_loop_setup (&loop, &expr2->where);
2261 /* Figure out how many elements we need. */
2262 for (i = 0; i < loop.dimen; i++)
2264 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2265 gfc_index_one_node, loop.from[i]);
2266 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2268 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2270 gfc_add_block_to_block (pblock, &loop.pre);
2271 size = gfc_evaluate_now (size, pblock);
2272 gfc_add_block_to_block (pblock, &loop.post);
2274 /* TODO: write a function that cleans up a loopinfo without freeing
2275 the SS chains. Currently a NOP. */
2282 /* Calculate the overall iterator number of the nested forall construct.
2283 This routine actually calculates the number of times the body of the
2284 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2285 that by the expression INNER_SIZE. The BLOCK argument specifies the
2286 block in which to calculate the result, and the optional INNER_SIZE_BODY
2287 argument contains any statements that need to executed (inside the loop)
2288 to initialize or calculate INNER_SIZE. */
2291 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2292 stmtblock_t *inner_size_body, stmtblock_t *block)
2294 forall_info *forall_tmp = nested_forall_info;
2298 /* We can eliminate the innermost unconditional loops with constant
2300 if (INTEGER_CST_P (inner_size))
2303 && !forall_tmp->mask
2304 && INTEGER_CST_P (forall_tmp->size))
2306 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2307 inner_size, forall_tmp->size);
2308 forall_tmp = forall_tmp->prev_nest;
2311 /* If there are no loops left, we have our constant result. */
2316 /* Otherwise, create a temporary variable to compute the result. */
2317 number = gfc_create_var (gfc_array_index_type, "num");
2318 gfc_add_modify (block, number, gfc_index_zero_node);
2320 gfc_start_block (&body);
2321 if (inner_size_body)
2322 gfc_add_block_to_block (&body, inner_size_body);
2324 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2325 number, inner_size);
2328 gfc_add_modify (&body, number, tmp);
2329 tmp = gfc_finish_block (&body);
2331 /* Generate loops. */
2332 if (forall_tmp != NULL)
2333 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2335 gfc_add_expr_to_block (block, tmp);
2341 /* Allocate temporary for forall construct. SIZE is the size of temporary
2342 needed. PTEMP1 is returned for space free. */
2345 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2352 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2353 if (!integer_onep (unit))
2354 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2359 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2362 tmp = build_fold_indirect_ref (tmp);
2367 /* Allocate temporary for forall construct according to the information in
2368 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2369 assignment inside forall. PTEMP1 is returned for space free. */
2372 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2373 tree inner_size, stmtblock_t * inner_size_body,
2374 stmtblock_t * block, tree * ptemp1)
2378 /* Calculate the total size of temporary needed in forall construct. */
2379 size = compute_overall_iter_number (nested_forall_info, inner_size,
2380 inner_size_body, block);
2382 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2386 /* Handle assignments inside forall which need temporary.
2388 forall (i=start:end:stride; maskexpr)
2391 (where e,f<i> are arbitrary expressions possibly involving i
2392 and there is a dependency between e<i> and f<i>)
2394 masktmp(:) = maskexpr(:)
2399 for (i = start; i <= end; i += stride)
2403 for (i = start; i <= end; i += stride)
2405 if (masktmp[maskindex++])
2406 tmp[count1++] = f<i>
2410 for (i = start; i <= end; i += stride)
2412 if (masktmp[maskindex++])
2413 e<i> = tmp[count1++]
2418 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2419 tree wheremask, bool invert,
2420 forall_info * nested_forall_info,
2421 stmtblock_t * block)
2429 stmtblock_t inner_size_body;
2431 /* Create vars. count1 is the current iterator number of the nested
2433 count1 = gfc_create_var (gfc_array_index_type, "count1");
2435 /* Count is the wheremask index. */
2438 count = gfc_create_var (gfc_array_index_type, "count");
2439 gfc_add_modify (block, count, gfc_index_zero_node);
2444 /* Initialize count1. */
2445 gfc_add_modify (block, count1, gfc_index_zero_node);
2447 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2448 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2449 gfc_init_block (&inner_size_body);
2450 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2453 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2454 if (expr1->ts.type == BT_CHARACTER && expr1->ts.cl->length)
2456 if (!expr1->ts.cl->backend_decl)
2459 gfc_init_se (&tse, NULL);
2460 gfc_conv_expr (&tse, expr1->ts.cl->length);
2461 expr1->ts.cl->backend_decl = tse.expr;
2463 type = gfc_get_character_type_len (gfc_default_character_kind,
2464 expr1->ts.cl->backend_decl);
2467 type = gfc_typenode_for_spec (&expr1->ts);
2469 /* Allocate temporary for nested forall construct according to the
2470 information in nested_forall_info and inner_size. */
2471 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2472 &inner_size_body, block, &ptemp1);
2474 /* Generate codes to copy rhs to the temporary . */
2475 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2478 /* Generate body and loops according to the information in
2479 nested_forall_info. */
2480 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2481 gfc_add_expr_to_block (block, tmp);
2484 gfc_add_modify (block, count1, gfc_index_zero_node);
2488 gfc_add_modify (block, count, gfc_index_zero_node);
2490 /* Generate codes to copy the temporary to lhs. */
2491 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2494 /* Generate body and loops according to the information in
2495 nested_forall_info. */
2496 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2497 gfc_add_expr_to_block (block, tmp);
2501 /* Free the temporary. */
2502 tmp = gfc_call_free (ptemp1);
2503 gfc_add_expr_to_block (block, tmp);
2508 /* Translate pointer assignment inside FORALL which need temporary. */
2511 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2512 forall_info * nested_forall_info,
2513 stmtblock_t * block)
2527 tree tmp, tmp1, ptemp1;
2529 count = gfc_create_var (gfc_array_index_type, "count");
2530 gfc_add_modify (block, count, gfc_index_zero_node);
2532 inner_size = integer_one_node;
2533 lss = gfc_walk_expr (expr1);
2534 rss = gfc_walk_expr (expr2);
2535 if (lss == gfc_ss_terminator)
2537 type = gfc_typenode_for_spec (&expr1->ts);
2538 type = build_pointer_type (type);
2540 /* Allocate temporary for nested forall construct according to the
2541 information in nested_forall_info and inner_size. */
2542 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2543 inner_size, NULL, block, &ptemp1);
2544 gfc_start_block (&body);
2545 gfc_init_se (&lse, NULL);
2546 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2547 gfc_init_se (&rse, NULL);
2548 rse.want_pointer = 1;
2549 gfc_conv_expr (&rse, expr2);
2550 gfc_add_block_to_block (&body, &rse.pre);
2551 gfc_add_modify (&body, lse.expr,
2552 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2553 gfc_add_block_to_block (&body, &rse.post);
2555 /* Increment count. */
2556 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2557 count, gfc_index_one_node);
2558 gfc_add_modify (&body, count, tmp);
2560 tmp = gfc_finish_block (&body);
2562 /* Generate body and loops according to the information in
2563 nested_forall_info. */
2564 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2565 gfc_add_expr_to_block (block, tmp);
2568 gfc_add_modify (block, count, gfc_index_zero_node);
2570 gfc_start_block (&body);
2571 gfc_init_se (&lse, NULL);
2572 gfc_init_se (&rse, NULL);
2573 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
2574 lse.want_pointer = 1;
2575 gfc_conv_expr (&lse, expr1);
2576 gfc_add_block_to_block (&body, &lse.pre);
2577 gfc_add_modify (&body, lse.expr, rse.expr);
2578 gfc_add_block_to_block (&body, &lse.post);
2579 /* Increment count. */
2580 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2581 count, gfc_index_one_node);
2582 gfc_add_modify (&body, count, tmp);
2583 tmp = gfc_finish_block (&body);
2585 /* Generate body and loops according to the information in
2586 nested_forall_info. */
2587 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2588 gfc_add_expr_to_block (block, tmp);
2592 gfc_init_loopinfo (&loop);
2594 /* Associate the SS with the loop. */
2595 gfc_add_ss_to_loop (&loop, rss);
2597 /* Setup the scalarizing loops and bounds. */
2598 gfc_conv_ss_startstride (&loop);
2600 gfc_conv_loop_setup (&loop, &expr2->where);
2602 info = &rss->data.info;
2603 desc = info->descriptor;
2605 /* Make a new descriptor. */
2606 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2607 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2608 loop.from, loop.to, 1,
2611 /* Allocate temporary for nested forall construct. */
2612 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2613 inner_size, NULL, block, &ptemp1);
2614 gfc_start_block (&body);
2615 gfc_init_se (&lse, NULL);
2616 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2617 lse.direct_byref = 1;
2618 rss = gfc_walk_expr (expr2);
2619 gfc_conv_expr_descriptor (&lse, expr2, rss);
2621 gfc_add_block_to_block (&body, &lse.pre);
2622 gfc_add_block_to_block (&body, &lse.post);
2624 /* Increment count. */
2625 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2626 count, gfc_index_one_node);
2627 gfc_add_modify (&body, count, tmp);
2629 tmp = gfc_finish_block (&body);
2631 /* Generate body and loops according to the information in
2632 nested_forall_info. */
2633 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2634 gfc_add_expr_to_block (block, tmp);
2637 gfc_add_modify (block, count, gfc_index_zero_node);
2639 parm = gfc_build_array_ref (tmp1, count, NULL);
2640 lss = gfc_walk_expr (expr1);
2641 gfc_init_se (&lse, NULL);
2642 gfc_conv_expr_descriptor (&lse, expr1, lss);
2643 gfc_add_modify (&lse.pre, lse.expr, parm);
2644 gfc_start_block (&body);
2645 gfc_add_block_to_block (&body, &lse.pre);
2646 gfc_add_block_to_block (&body, &lse.post);
2648 /* Increment count. */
2649 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2650 count, gfc_index_one_node);
2651 gfc_add_modify (&body, count, tmp);
2653 tmp = gfc_finish_block (&body);
2655 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2656 gfc_add_expr_to_block (block, tmp);
2658 /* Free the temporary. */
2661 tmp = gfc_call_free (ptemp1);
2662 gfc_add_expr_to_block (block, tmp);
2667 /* FORALL and WHERE statements are really nasty, especially when you nest
2668 them. All the rhs of a forall assignment must be evaluated before the
2669 actual assignments are performed. Presumably this also applies to all the
2670 assignments in an inner where statement. */
2672 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2673 linear array, relying on the fact that we process in the same order in all
2676 forall (i=start:end:stride; maskexpr)
2680 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2682 count = ((end + 1 - start) / stride)
2683 masktmp(:) = maskexpr(:)
2686 for (i = start; i <= end; i += stride)
2688 if (masktmp[maskindex++])
2692 for (i = start; i <= end; i += stride)
2694 if (masktmp[maskindex++])
2698 Note that this code only works when there are no dependencies.
2699 Forall loop with array assignments and data dependencies are a real pain,
2700 because the size of the temporary cannot always be determined before the
2701 loop is executed. This problem is compounded by the presence of nested
2706 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2726 gfc_forall_iterator *fa;
2729 gfc_saved_var *saved_vars;
2730 iter_info *this_forall;
2734 /* Do nothing if the mask is false. */
2736 && code->expr->expr_type == EXPR_CONSTANT
2737 && !code->expr->value.logical)
2738 return build_empty_stmt ();
2741 /* Count the FORALL index number. */
2742 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2746 /* Allocate the space for var, start, end, step, varexpr. */
2747 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2748 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2749 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2750 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2751 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2752 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2754 /* Allocate the space for info. */
2755 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2757 gfc_start_block (&pre);
2758 gfc_init_block (&post);
2759 gfc_init_block (&block);
2762 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2764 gfc_symbol *sym = fa->var->symtree->n.sym;
2766 /* Allocate space for this_forall. */
2767 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2769 /* Create a temporary variable for the FORALL index. */
2770 tmp = gfc_typenode_for_spec (&sym->ts);
2771 var[n] = gfc_create_var (tmp, sym->name);
2772 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2774 /* Record it in this_forall. */
2775 this_forall->var = var[n];
2777 /* Replace the index symbol's backend_decl with the temporary decl. */
2778 sym->backend_decl = var[n];
2780 /* Work out the start, end and stride for the loop. */
2781 gfc_init_se (&se, NULL);
2782 gfc_conv_expr_val (&se, fa->start);
2783 /* Record it in this_forall. */
2784 this_forall->start = se.expr;
2785 gfc_add_block_to_block (&block, &se.pre);
2788 gfc_init_se (&se, NULL);
2789 gfc_conv_expr_val (&se, fa->end);
2790 /* Record it in this_forall. */
2791 this_forall->end = se.expr;
2792 gfc_make_safe_expr (&se);
2793 gfc_add_block_to_block (&block, &se.pre);
2796 gfc_init_se (&se, NULL);
2797 gfc_conv_expr_val (&se, fa->stride);
2798 /* Record it in this_forall. */
2799 this_forall->step = se.expr;
2800 gfc_make_safe_expr (&se);
2801 gfc_add_block_to_block (&block, &se.pre);
2804 /* Set the NEXT field of this_forall to NULL. */
2805 this_forall->next = NULL;
2806 /* Link this_forall to the info construct. */
2807 if (info->this_loop)
2809 iter_info *iter_tmp = info->this_loop;
2810 while (iter_tmp->next != NULL)
2811 iter_tmp = iter_tmp->next;
2812 iter_tmp->next = this_forall;
2815 info->this_loop = this_forall;
2821 /* Calculate the size needed for the current forall level. */
2822 size = gfc_index_one_node;
2823 for (n = 0; n < nvar; n++)
2825 /* size = (end + step - start) / step. */
2826 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2828 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2830 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2831 tmp = convert (gfc_array_index_type, tmp);
2833 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2836 /* Record the nvar and size of current forall level. */
2842 /* If the mask is .true., consider the FORALL unconditional. */
2843 if (code->expr->expr_type == EXPR_CONSTANT
2844 && code->expr->value.logical)
2852 /* First we need to allocate the mask. */
2855 /* As the mask array can be very big, prefer compact boolean types. */
2856 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2857 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
2858 size, NULL, &block, &pmask);
2859 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2861 /* Record them in the info structure. */
2862 info->maskindex = maskindex;
2867 /* No mask was specified. */
2868 maskindex = NULL_TREE;
2869 mask = pmask = NULL_TREE;
2872 /* Link the current forall level to nested_forall_info. */
2873 info->prev_nest = nested_forall_info;
2874 nested_forall_info = info;
2876 /* Copy the mask into a temporary variable if required.
2877 For now we assume a mask temporary is needed. */
2880 /* As the mask array can be very big, prefer compact boolean types. */
2881 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2883 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
2885 /* Start of mask assignment loop body. */
2886 gfc_start_block (&body);
2888 /* Evaluate the mask expression. */
2889 gfc_init_se (&se, NULL);
2890 gfc_conv_expr_val (&se, code->expr);
2891 gfc_add_block_to_block (&body, &se.pre);
2893 /* Store the mask. */
2894 se.expr = convert (mask_type, se.expr);
2896 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2897 gfc_add_modify (&body, tmp, se.expr);
2899 /* Advance to the next mask element. */
2900 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2901 maskindex, gfc_index_one_node);
2902 gfc_add_modify (&body, maskindex, tmp);
2904 /* Generate the loops. */
2905 tmp = gfc_finish_block (&body);
2906 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
2907 gfc_add_expr_to_block (&block, tmp);
2910 c = code->block->next;
2912 /* TODO: loop merging in FORALL statements. */
2913 /* Now that we've got a copy of the mask, generate the assignment loops. */
2919 /* A scalar or array assignment. DO the simple check for
2920 lhs to rhs dependencies. These make a temporary for the
2921 rhs and form a second forall block to copy to variable. */
2922 need_temp = check_forall_dependencies(c, &pre, &post);
2924 /* Temporaries due to array assignment data dependencies introduce
2925 no end of problems. */
2927 gfc_trans_assign_need_temp (c->expr, c->expr2, NULL, false,
2928 nested_forall_info, &block);
2931 /* Use the normal assignment copying routines. */
2932 assign = gfc_trans_assignment (c->expr, c->expr2, false);
2934 /* Generate body and loops. */
2935 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2937 gfc_add_expr_to_block (&block, tmp);
2940 /* Cleanup any temporary symtrees that have been made to deal
2941 with dependencies. */
2943 cleanup_forall_symtrees (c);
2948 /* Translate WHERE or WHERE construct nested in FORALL. */
2949 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
2952 /* Pointer assignment inside FORALL. */
2953 case EXEC_POINTER_ASSIGN:
2954 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2956 gfc_trans_pointer_assign_need_temp (c->expr, c->expr2,
2957 nested_forall_info, &block);
2960 /* Use the normal assignment copying routines. */
2961 assign = gfc_trans_pointer_assignment (c->expr, c->expr2);
2963 /* Generate body and loops. */
2964 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2966 gfc_add_expr_to_block (&block, tmp);
2971 tmp = gfc_trans_forall_1 (c, nested_forall_info);
2972 gfc_add_expr_to_block (&block, tmp);
2975 /* Explicit subroutine calls are prevented by the frontend but interface
2976 assignments can legitimately produce them. */
2977 case EXEC_ASSIGN_CALL:
2978 assign = gfc_trans_call (c, true);
2979 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
2980 gfc_add_expr_to_block (&block, tmp);
2990 /* Restore the original index variables. */
2991 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
2992 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
2994 /* Free the space for var, start, end, step, varexpr. */
3000 gfc_free (saved_vars);
3002 /* Free the space for this forall_info. */
3007 /* Free the temporary for the mask. */
3008 tmp = gfc_call_free (pmask);
3009 gfc_add_expr_to_block (&block, tmp);
3012 pushdecl (maskindex);
3014 gfc_add_block_to_block (&pre, &block);
3015 gfc_add_block_to_block (&pre, &post);
3017 return gfc_finish_block (&pre);
3021 /* Translate the FORALL statement or construct. */
3023 tree gfc_trans_forall (gfc_code * code)
3025 return gfc_trans_forall_1 (code, NULL);
3029 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3030 If the WHERE construct is nested in FORALL, compute the overall temporary
3031 needed by the WHERE mask expression multiplied by the iterator number of
3033 ME is the WHERE mask expression.
3034 MASK is the current execution mask upon input, whose sense may or may
3035 not be inverted as specified by the INVERT argument.
3036 CMASK is the updated execution mask on output, or NULL if not required.
3037 PMASK is the pending execution mask on output, or NULL if not required.
3038 BLOCK is the block in which to place the condition evaluation loops. */
3041 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3042 tree mask, bool invert, tree cmask, tree pmask,
3043 tree mask_type, stmtblock_t * block)
3048 stmtblock_t body, body1;
3049 tree count, cond, mtmp;
3052 gfc_init_loopinfo (&loop);
3054 lss = gfc_walk_expr (me);
3055 rss = gfc_walk_expr (me);
3057 /* Variable to index the temporary. */
3058 count = gfc_create_var (gfc_array_index_type, "count");
3059 /* Initialize count. */
3060 gfc_add_modify (block, count, gfc_index_zero_node);
3062 gfc_start_block (&body);
3064 gfc_init_se (&rse, NULL);
3065 gfc_init_se (&lse, NULL);
3067 if (lss == gfc_ss_terminator)
3069 gfc_init_block (&body1);
3073 /* Initialize the loop. */
3074 gfc_init_loopinfo (&loop);
3076 /* We may need LSS to determine the shape of the expression. */
3077 gfc_add_ss_to_loop (&loop, lss);
3078 gfc_add_ss_to_loop (&loop, rss);
3080 gfc_conv_ss_startstride (&loop);
3081 gfc_conv_loop_setup (&loop, &me->where);
3083 gfc_mark_ss_chain_used (rss, 1);
3084 /* Start the loop body. */
3085 gfc_start_scalarized_body (&loop, &body1);
3087 /* Translate the expression. */
3088 gfc_copy_loopinfo_to_se (&rse, &loop);
3090 gfc_conv_expr (&rse, me);
3093 /* Variable to evaluate mask condition. */
3094 cond = gfc_create_var (mask_type, "cond");
3095 if (mask && (cmask || pmask))
3096 mtmp = gfc_create_var (mask_type, "mask");
3097 else mtmp = NULL_TREE;
3099 gfc_add_block_to_block (&body1, &lse.pre);
3100 gfc_add_block_to_block (&body1, &rse.pre);
3102 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3104 if (mask && (cmask || pmask))
3106 tmp = gfc_build_array_ref (mask, count, NULL);
3108 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
3109 gfc_add_modify (&body1, mtmp, tmp);
3114 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3117 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3118 gfc_add_modify (&body1, tmp1, tmp);
3123 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3124 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, cond);
3126 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3127 gfc_add_modify (&body1, tmp1, tmp);
3130 gfc_add_block_to_block (&body1, &lse.post);
3131 gfc_add_block_to_block (&body1, &rse.post);
3133 if (lss == gfc_ss_terminator)
3135 gfc_add_block_to_block (&body, &body1);
3139 /* Increment count. */
3140 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
3141 gfc_index_one_node);
3142 gfc_add_modify (&body1, count, tmp1);
3144 /* Generate the copying loops. */
3145 gfc_trans_scalarizing_loops (&loop, &body1);
3147 gfc_add_block_to_block (&body, &loop.pre);
3148 gfc_add_block_to_block (&body, &loop.post);
3150 gfc_cleanup_loop (&loop);
3151 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3152 as tree nodes in SS may not be valid in different scope. */
3155 tmp1 = gfc_finish_block (&body);
3156 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3157 if (nested_forall_info != NULL)
3158 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3160 gfc_add_expr_to_block (block, tmp1);
3164 /* Translate an assignment statement in a WHERE statement or construct
3165 statement. The MASK expression is used to control which elements
3166 of EXPR1 shall be assigned. The sense of MASK is specified by
3170 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3171 tree mask, bool invert,
3172 tree count1, tree count2,
3178 gfc_ss *lss_section;
3185 tree index, maskexpr;
3188 /* TODO: handle this special case.
3189 Special case a single function returning an array. */
3190 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3192 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3198 /* Assignment of the form lhs = rhs. */
3199 gfc_start_block (&block);
3201 gfc_init_se (&lse, NULL);
3202 gfc_init_se (&rse, NULL);
3205 lss = gfc_walk_expr (expr1);
3208 /* In each where-assign-stmt, the mask-expr and the variable being
3209 defined shall be arrays of the same shape. */
3210 gcc_assert (lss != gfc_ss_terminator);
3212 /* The assignment needs scalarization. */
3215 /* Find a non-scalar SS from the lhs. */
3216 while (lss_section != gfc_ss_terminator
3217 && lss_section->type != GFC_SS_SECTION)
3218 lss_section = lss_section->next;
3220 gcc_assert (lss_section != gfc_ss_terminator);
3222 /* Initialize the scalarizer. */
3223 gfc_init_loopinfo (&loop);
3226 rss = gfc_walk_expr (expr2);
3227 if (rss == gfc_ss_terminator)
3229 /* The rhs is scalar. Add a ss for the expression. */
3230 rss = gfc_get_ss ();
3232 rss->next = gfc_ss_terminator;
3233 rss->type = GFC_SS_SCALAR;
3237 /* Associate the SS with the loop. */
3238 gfc_add_ss_to_loop (&loop, lss);
3239 gfc_add_ss_to_loop (&loop, rss);
3241 /* Calculate the bounds of the scalarization. */
3242 gfc_conv_ss_startstride (&loop);
3244 /* Resolve any data dependencies in the statement. */
3245 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3247 /* Setup the scalarizing loops. */
3248 gfc_conv_loop_setup (&loop, &expr2->where);
3250 /* Setup the gfc_se structures. */
3251 gfc_copy_loopinfo_to_se (&lse, &loop);
3252 gfc_copy_loopinfo_to_se (&rse, &loop);
3255 gfc_mark_ss_chain_used (rss, 1);
3256 if (loop.temp_ss == NULL)
3259 gfc_mark_ss_chain_used (lss, 1);
3263 lse.ss = loop.temp_ss;
3264 gfc_mark_ss_chain_used (lss, 3);
3265 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3268 /* Start the scalarized loop body. */
3269 gfc_start_scalarized_body (&loop, &body);
3271 /* Translate the expression. */
3272 gfc_conv_expr (&rse, expr2);
3273 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3275 gfc_conv_tmp_array_ref (&lse);
3276 gfc_advance_se_ss_chain (&lse);
3279 gfc_conv_expr (&lse, expr1);
3281 /* Form the mask expression according to the mask. */
3283 maskexpr = gfc_build_array_ref (mask, index, NULL);
3285 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
3287 /* Use the scalar assignment as is. */
3289 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3290 loop.temp_ss != NULL, false);
3292 tmp = gfc_conv_operator_assign (&lse, &rse, sym);
3294 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3296 gfc_add_expr_to_block (&body, tmp);
3298 if (lss == gfc_ss_terminator)
3300 /* Increment count1. */
3301 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3302 count1, gfc_index_one_node);
3303 gfc_add_modify (&body, count1, tmp);
3305 /* Use the scalar assignment as is. */
3306 gfc_add_block_to_block (&block, &body);
3310 gcc_assert (lse.ss == gfc_ss_terminator
3311 && rse.ss == gfc_ss_terminator);
3313 if (loop.temp_ss != NULL)
3315 /* Increment count1 before finish the main body of a scalarized
3317 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3318 count1, gfc_index_one_node);
3319 gfc_add_modify (&body, count1, tmp);
3320 gfc_trans_scalarized_loop_boundary (&loop, &body);
3322 /* We need to copy the temporary to the actual lhs. */
3323 gfc_init_se (&lse, NULL);
3324 gfc_init_se (&rse, NULL);
3325 gfc_copy_loopinfo_to_se (&lse, &loop);
3326 gfc_copy_loopinfo_to_se (&rse, &loop);
3328 rse.ss = loop.temp_ss;
3331 gfc_conv_tmp_array_ref (&rse);
3332 gfc_advance_se_ss_chain (&rse);
3333 gfc_conv_expr (&lse, expr1);
3335 gcc_assert (lse.ss == gfc_ss_terminator
3336 && rse.ss == gfc_ss_terminator);
3338 /* Form the mask expression according to the mask tree list. */
3340 maskexpr = gfc_build_array_ref (mask, index, NULL);
3342 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3345 /* Use the scalar assignment as is. */
3346 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3347 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3348 gfc_add_expr_to_block (&body, tmp);
3350 /* Increment count2. */
3351 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3352 count2, gfc_index_one_node);
3353 gfc_add_modify (&body, count2, tmp);
3357 /* Increment count1. */
3358 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3359 count1, gfc_index_one_node);
3360 gfc_add_modify (&body, count1, tmp);
3363 /* Generate the copying loops. */
3364 gfc_trans_scalarizing_loops (&loop, &body);
3366 /* Wrap the whole thing up. */
3367 gfc_add_block_to_block (&block, &loop.pre);
3368 gfc_add_block_to_block (&block, &loop.post);
3369 gfc_cleanup_loop (&loop);
3372 return gfc_finish_block (&block);
3376 /* Translate the WHERE construct or statement.
3377 This function can be called iteratively to translate the nested WHERE
3378 construct or statement.
3379 MASK is the control mask. */
3382 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3383 forall_info * nested_forall_info, stmtblock_t * block)
3385 stmtblock_t inner_size_body;
3386 tree inner_size, size;
3395 tree count1, count2;
3399 tree pcmask = NULL_TREE;
3400 tree ppmask = NULL_TREE;
3401 tree cmask = NULL_TREE;
3402 tree pmask = NULL_TREE;
3403 gfc_actual_arglist *arg;
3405 /* the WHERE statement or the WHERE construct statement. */
3406 cblock = code->block;
3408 /* As the mask array can be very big, prefer compact boolean types. */
3409 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3411 /* Determine which temporary masks are needed. */
3414 /* One clause: No ELSEWHEREs. */
3415 need_cmask = (cblock->next != 0);
3418 else if (cblock->block->block)
3420 /* Three or more clauses: Conditional ELSEWHEREs. */
3424 else if (cblock->next)
3426 /* Two clauses, the first non-empty. */
3428 need_pmask = (mask != NULL_TREE
3429 && cblock->block->next != 0);
3431 else if (!cblock->block->next)
3433 /* Two clauses, both empty. */
3437 /* Two clauses, the first empty, the second non-empty. */
3440 need_cmask = (cblock->block->expr != 0);
3449 if (need_cmask || need_pmask)
3451 /* Calculate the size of temporary needed by the mask-expr. */
3452 gfc_init_block (&inner_size_body);
3453 inner_size = compute_inner_temp_size (cblock->expr, cblock->expr,
3454 &inner_size_body, &lss, &rss);
3456 /* Calculate the total size of temporary needed. */
3457 size = compute_overall_iter_number (nested_forall_info, inner_size,
3458 &inner_size_body, block);
3460 /* Check whether the size is negative. */
3461 cond = fold_build2 (LE_EXPR, boolean_type_node, size,
3462 gfc_index_zero_node);
3463 size = fold_build3 (COND_EXPR, gfc_array_index_type, cond,
3464 gfc_index_zero_node, size);
3465 size = gfc_evaluate_now (size, block);
3467 /* Allocate temporary for WHERE mask if needed. */
3469 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3472 /* Allocate temporary for !mask if needed. */
3474 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3480 /* Each time around this loop, the where clause is conditional
3481 on the value of mask and invert, which are updated at the
3482 bottom of the loop. */
3484 /* Has mask-expr. */
3487 /* Ensure that the WHERE mask will be evaluated exactly once.
3488 If there are no statements in this WHERE/ELSEWHERE clause,
3489 then we don't need to update the control mask (cmask).
3490 If this is the last clause of the WHERE construct, then
3491 we don't need to update the pending control mask (pmask). */
3493 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3495 cblock->next ? cmask : NULL_TREE,
3496 cblock->block ? pmask : NULL_TREE,
3499 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3501 (cblock->next || cblock->block)
3502 ? cmask : NULL_TREE,