1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
27 #include "coretypes.h"
29 #include "tree-gimple.h"
36 #include "trans-stmt.h"
37 #include "trans-types.h"
38 #include "trans-array.h"
39 #include "trans-const.h"
41 #include "dependency.h"
43 typedef struct iter_info
49 struct iter_info *next;
53 typedef struct forall_info
61 struct forall_info *outer;
62 struct forall_info *next_nest;
66 static void gfc_trans_where_2 (gfc_code *, tree, bool,
67 forall_info *, stmtblock_t *);
69 /* Translate a F95 label number to a LABEL_EXPR. */
72 gfc_trans_label_here (gfc_code * code)
74 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
78 /* Given a variable expression which has been ASSIGNed to, find the decl
79 containing the auxiliary variables. For variables in common blocks this
83 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
85 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
86 gfc_conv_expr (se, expr);
87 /* Deals with variable in common block. Get the field declaration. */
88 if (TREE_CODE (se->expr) == COMPONENT_REF)
89 se->expr = TREE_OPERAND (se->expr, 1);
90 /* Deals with dummy argument. Get the parameter declaration. */
91 else if (TREE_CODE (se->expr) == INDIRECT_REF)
92 se->expr = TREE_OPERAND (se->expr, 0);
95 /* Translate a label assignment statement. */
98 gfc_trans_label_assign (gfc_code * code)
108 /* Start a new block. */
109 gfc_init_se (&se, NULL);
110 gfc_start_block (&se.pre);
111 gfc_conv_label_variable (&se, code->expr);
113 len = GFC_DECL_STRING_LEN (se.expr);
114 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
116 label_tree = gfc_get_label_decl (code->label);
118 if (code->label->defined == ST_LABEL_TARGET)
120 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
121 len_tree = integer_minus_one_node;
125 label_str = code->label->format->value.character.string;
126 label_len = code->label->format->value.character.length;
127 len_tree = build_int_cst (NULL_TREE, label_len);
128 label_tree = gfc_build_string_const (label_len + 1, label_str);
129 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
132 gfc_add_modify_expr (&se.pre, len, len_tree);
133 gfc_add_modify_expr (&se.pre, addr, label_tree);
135 return gfc_finish_block (&se.pre);
138 /* Translate a GOTO statement. */
141 gfc_trans_goto (gfc_code * code)
143 locus loc = code->loc;
149 if (code->label != NULL)
150 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
153 gfc_init_se (&se, NULL);
154 gfc_start_block (&se.pre);
155 gfc_conv_label_variable (&se, code->expr);
156 tmp = GFC_DECL_STRING_LEN (se.expr);
157 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
158 build_int_cst (TREE_TYPE (tmp), -1));
159 gfc_trans_runtime_check (tmp, "Assigned label is not a target label",
162 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
167 target = build1 (GOTO_EXPR, void_type_node, assigned_goto);
168 gfc_add_expr_to_block (&se.pre, target);
169 return gfc_finish_block (&se.pre);
172 /* Check the label list. */
175 target = gfc_get_label_decl (code->label);
176 tmp = gfc_build_addr_expr (pvoid_type_node, target);
177 tmp = build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
178 tmp = build3_v (COND_EXPR, tmp,
179 build1 (GOTO_EXPR, void_type_node, target),
180 build_empty_stmt ());
181 gfc_add_expr_to_block (&se.pre, tmp);
184 while (code != NULL);
185 gfc_trans_runtime_check (boolean_true_node,
186 "Assigned label is not in the list", &se.pre, &loc);
188 return gfc_finish_block (&se.pre);
192 /* Translate an ENTRY statement. Just adds a label for this entry point. */
194 gfc_trans_entry (gfc_code * code)
196 return build1_v (LABEL_EXPR, code->ext.entry->label);
200 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
201 elemental subroutines. Make temporaries for output arguments if any such
202 dependencies are found. Output arguments are chosen because internal_unpack
203 can be used, as is, to copy the result back to the variable. */
205 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
206 gfc_symbol * sym, gfc_actual_arglist * arg)
208 gfc_actual_arglist *arg0;
210 gfc_formal_arglist *formal;
211 gfc_loopinfo tmp_loop;
223 if (loopse->ss == NULL)
228 formal = sym->formal;
230 /* Loop over all the arguments testing for dependencies. */
231 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
237 /* Obtain the info structure for the current argument. */
239 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
243 info = &ss->data.info;
247 /* If there is a dependency, create a temporary and use it
248 instead of the variable. */
249 fsym = formal ? formal->sym : NULL;
250 if (e->expr_type == EXPR_VARIABLE
252 && fsym->attr.intent == INTENT_OUT
253 && gfc_check_fncall_dependency (e, INTENT_OUT, sym, arg0))
255 /* Make a local loopinfo for the temporary creation, so that
256 none of the other ss->info's have to be renormalized. */
257 gfc_init_loopinfo (&tmp_loop);
258 for (n = 0; n < info->dimen; n++)
260 tmp_loop.to[n] = loopse->loop->to[n];
261 tmp_loop.from[n] = loopse->loop->from[n];
262 tmp_loop.order[n] = loopse->loop->order[n];
265 /* Generate the temporary. Merge the block so that the
266 declarations are put at the right binding level. */
267 size = gfc_create_var (gfc_array_index_type, NULL);
268 data = gfc_create_var (pvoid_type_node, NULL);
269 gfc_start_block (&block);
270 tmp = gfc_typenode_for_spec (&e->ts);
271 tmp = gfc_trans_create_temp_array (&se->pre, &se->post,
272 &tmp_loop, info, tmp,
273 false, true, false, false);
274 gfc_add_modify_expr (&se->pre, size, tmp);
275 tmp = fold_convert (pvoid_type_node, info->data);
276 gfc_add_modify_expr (&se->pre, data, tmp);
277 gfc_merge_block_scope (&block);
279 /* Obtain the argument descriptor for unpacking. */
280 gfc_init_se (&parmse, NULL);
281 parmse.want_pointer = 1;
282 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
283 gfc_add_block_to_block (&se->pre, &parmse.pre);
285 /* Calculate the offset for the temporary. */
286 offset = gfc_index_zero_node;
287 for (n = 0; n < info->dimen; n++)
289 tmp = gfc_conv_descriptor_stride (info->descriptor,
291 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
292 loopse->loop->from[n], tmp);
293 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
296 info->offset = gfc_create_var (gfc_array_index_type, NULL);
297 gfc_add_modify_expr (&se->pre, info->offset, offset);
299 /* Copy the result back using unpack. */
300 tmp = gfc_chainon_list (NULL_TREE, parmse.expr);
301 tmp = gfc_chainon_list (tmp, data);
302 tmp = build_function_call_expr (gfor_fndecl_in_unpack, tmp);
303 gfc_add_expr_to_block (&se->post, tmp);
305 gfc_add_block_to_block (&se->post, &parmse.post);
311 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
314 gfc_trans_call (gfc_code * code, bool dependency_check)
318 int has_alternate_specifier;
320 /* A CALL starts a new block because the actual arguments may have to
321 be evaluated first. */
322 gfc_init_se (&se, NULL);
323 gfc_start_block (&se.pre);
325 gcc_assert (code->resolved_sym);
327 ss = gfc_ss_terminator;
328 if (code->resolved_sym->attr.elemental)
329 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
331 /* Is not an elemental subroutine call with array valued arguments. */
332 if (ss == gfc_ss_terminator)
335 /* Translate the call. */
336 has_alternate_specifier
337 = gfc_conv_function_call (&se, code->resolved_sym, code->ext.actual);
339 /* A subroutine without side-effect, by definition, does nothing! */
340 TREE_SIDE_EFFECTS (se.expr) = 1;
342 /* Chain the pieces together and return the block. */
343 if (has_alternate_specifier)
345 gfc_code *select_code;
347 select_code = code->next;
348 gcc_assert(select_code->op == EXEC_SELECT);
349 sym = select_code->expr->symtree->n.sym;
350 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
351 gfc_add_modify_expr (&se.pre, sym->backend_decl, se.expr);
354 gfc_add_expr_to_block (&se.pre, se.expr);
356 gfc_add_block_to_block (&se.pre, &se.post);
361 /* An elemental subroutine call with array valued arguments has
368 /* gfc_walk_elemental_function_args renders the ss chain in the
369 reverse order to the actual argument order. */
370 ss = gfc_reverse_ss (ss);
372 /* Initialize the loop. */
373 gfc_init_se (&loopse, NULL);
374 gfc_init_loopinfo (&loop);
375 gfc_add_ss_to_loop (&loop, ss);
377 gfc_conv_ss_startstride (&loop);
378 gfc_conv_loop_setup (&loop);
379 gfc_mark_ss_chain_used (ss, 1);
381 /* Convert the arguments, checking for dependencies. */
382 gfc_copy_loopinfo_to_se (&loopse, &loop);
385 /* For operator assignment, we need to do dependency checking.
386 We also check the intent of the parameters. */
387 if (dependency_check)
390 sym = code->resolved_sym;
391 gcc_assert (sym->formal->sym->attr.intent = INTENT_OUT);
392 gcc_assert (sym->formal->next->sym->attr.intent = INTENT_IN);
393 gfc_conv_elemental_dependencies (&se, &loopse, sym,
397 /* Generate the loop body. */
398 gfc_start_scalarized_body (&loop, &body);
399 gfc_init_block (&block);
401 /* Add the subroutine call to the block. */
402 gfc_conv_function_call (&loopse, code->resolved_sym, code->ext.actual);
403 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
405 gfc_add_block_to_block (&block, &loopse.pre);
406 gfc_add_block_to_block (&block, &loopse.post);
408 /* Finish up the loop block and the loop. */
409 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
410 gfc_trans_scalarizing_loops (&loop, &body);
411 gfc_add_block_to_block (&se.pre, &loop.pre);
412 gfc_add_block_to_block (&se.pre, &loop.post);
413 gfc_add_block_to_block (&se.pre, &se.post);
414 gfc_cleanup_loop (&loop);
417 return gfc_finish_block (&se.pre);
421 /* Translate the RETURN statement. */
424 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
432 /* if code->expr is not NULL, this return statement must appear
433 in a subroutine and current_fake_result_decl has already
436 result = gfc_get_fake_result_decl (NULL, 0);
439 gfc_warning ("An alternate return at %L without a * dummy argument",
441 return build1_v (GOTO_EXPR, gfc_get_return_label ());
444 /* Start a new block for this statement. */
445 gfc_init_se (&se, NULL);
446 gfc_start_block (&se.pre);
448 gfc_conv_expr (&se, code->expr);
450 tmp = build2 (MODIFY_EXPR, TREE_TYPE (result), result, se.expr);
451 gfc_add_expr_to_block (&se.pre, tmp);
453 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
454 gfc_add_expr_to_block (&se.pre, tmp);
455 gfc_add_block_to_block (&se.pre, &se.post);
456 return gfc_finish_block (&se.pre);
459 return build1_v (GOTO_EXPR, gfc_get_return_label ());
463 /* Translate the PAUSE statement. We have to translate this statement
464 to a runtime library call. */
467 gfc_trans_pause (gfc_code * code)
469 tree gfc_int4_type_node = gfc_get_int_type (4);
475 /* Start a new block for this statement. */
476 gfc_init_se (&se, NULL);
477 gfc_start_block (&se.pre);
480 if (code->expr == NULL)
482 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
483 args = gfc_chainon_list (NULL_TREE, tmp);
484 fndecl = gfor_fndecl_pause_numeric;
488 gfc_conv_expr_reference (&se, code->expr);
489 args = gfc_chainon_list (NULL_TREE, se.expr);
490 args = gfc_chainon_list (args, se.string_length);
491 fndecl = gfor_fndecl_pause_string;
494 tmp = build_function_call_expr (fndecl, args);
495 gfc_add_expr_to_block (&se.pre, tmp);
497 gfc_add_block_to_block (&se.pre, &se.post);
499 return gfc_finish_block (&se.pre);
503 /* Translate the STOP statement. We have to translate this statement
504 to a runtime library call. */
507 gfc_trans_stop (gfc_code * code)
509 tree gfc_int4_type_node = gfc_get_int_type (4);
515 /* Start a new block for this statement. */
516 gfc_init_se (&se, NULL);
517 gfc_start_block (&se.pre);
520 if (code->expr == NULL)
522 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
523 args = gfc_chainon_list (NULL_TREE, tmp);
524 fndecl = gfor_fndecl_stop_numeric;
528 gfc_conv_expr_reference (&se, code->expr);
529 args = gfc_chainon_list (NULL_TREE, se.expr);
530 args = gfc_chainon_list (args, se.string_length);
531 fndecl = gfor_fndecl_stop_string;
534 tmp = build_function_call_expr (fndecl, args);
535 gfc_add_expr_to_block (&se.pre, tmp);
537 gfc_add_block_to_block (&se.pre, &se.post);
539 return gfc_finish_block (&se.pre);
543 /* Generate GENERIC for the IF construct. This function also deals with
544 the simple IF statement, because the front end translates the IF
545 statement into an IF construct.
577 where COND_S is the simplified version of the predicate. PRE_COND_S
578 are the pre side-effects produced by the translation of the
580 We need to build the chain recursively otherwise we run into
581 problems with folding incomplete statements. */
584 gfc_trans_if_1 (gfc_code * code)
589 /* Check for an unconditional ELSE clause. */
591 return gfc_trans_code (code->next);
593 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
594 gfc_init_se (&if_se, NULL);
595 gfc_start_block (&if_se.pre);
597 /* Calculate the IF condition expression. */
598 gfc_conv_expr_val (&if_se, code->expr);
600 /* Translate the THEN clause. */
601 stmt = gfc_trans_code (code->next);
603 /* Translate the ELSE clause. */
605 elsestmt = gfc_trans_if_1 (code->block);
607 elsestmt = build_empty_stmt ();
609 /* Build the condition expression and add it to the condition block. */
610 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
612 gfc_add_expr_to_block (&if_se.pre, stmt);
614 /* Finish off this statement. */
615 return gfc_finish_block (&if_se.pre);
619 gfc_trans_if (gfc_code * code)
621 /* Ignore the top EXEC_IF, it only announces an IF construct. The
622 actual code we must translate is in code->block. */
624 return gfc_trans_if_1 (code->block);
628 /* Translate an arithmetic IF expression.
630 IF (cond) label1, label2, label3 translates to
642 An optimized version can be generated in case of equal labels.
643 E.g., if label1 is equal to label2, we can translate it to
652 gfc_trans_arithmetic_if (gfc_code * code)
660 /* Start a new block. */
661 gfc_init_se (&se, NULL);
662 gfc_start_block (&se.pre);
664 /* Pre-evaluate COND. */
665 gfc_conv_expr_val (&se, code->expr);
666 se.expr = gfc_evaluate_now (se.expr, &se.pre);
668 /* Build something to compare with. */
669 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
671 if (code->label->value != code->label2->value)
673 /* If (cond < 0) take branch1 else take branch2.
674 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
675 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
676 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
678 if (code->label->value != code->label3->value)
679 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
681 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
683 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
686 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
688 if (code->label->value != code->label3->value
689 && code->label2->value != code->label3->value)
691 /* if (cond <= 0) take branch1 else take branch2. */
692 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
693 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
694 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
697 /* Append the COND_EXPR to the evaluation of COND, and return. */
698 gfc_add_expr_to_block (&se.pre, branch1);
699 return gfc_finish_block (&se.pre);
703 /* Translate the simple DO construct. This is where the loop variable has
704 integer type and step +-1. We can't use this in the general case
705 because integer overflow and floating point errors could give incorrect
707 We translate a do loop from:
709 DO dovar = from, to, step
715 [Evaluate loop bounds and step]
717 if ((step > 0) ? (dovar <= to) : (dovar => to))
723 cond = (dovar == to);
725 if (cond) goto end_label;
730 This helps the optimizers by avoiding the extra induction variable
731 used in the general case. */
734 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
735 tree from, tree to, tree step)
744 type = TREE_TYPE (dovar);
746 /* Initialize the DO variable: dovar = from. */
747 gfc_add_modify_expr (pblock, dovar, from);
749 /* Cycle and exit statements are implemented with gotos. */
750 cycle_label = gfc_build_label_decl (NULL_TREE);
751 exit_label = gfc_build_label_decl (NULL_TREE);
753 /* Put the labels where they can be found later. See gfc_trans_do(). */
754 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
757 gfc_start_block (&body);
759 /* Main loop body. */
760 tmp = gfc_trans_code (code->block->next);
761 gfc_add_expr_to_block (&body, tmp);
763 /* Label for cycle statements (if needed). */
764 if (TREE_USED (cycle_label))
766 tmp = build1_v (LABEL_EXPR, cycle_label);
767 gfc_add_expr_to_block (&body, tmp);
770 /* Evaluate the loop condition. */
771 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
772 cond = gfc_evaluate_now (cond, &body);
774 /* Increment the loop variable. */
775 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
776 gfc_add_modify_expr (&body, dovar, tmp);
779 tmp = build1_v (GOTO_EXPR, exit_label);
780 TREE_USED (exit_label) = 1;
781 tmp = fold_build3 (COND_EXPR, void_type_node,
782 cond, tmp, build_empty_stmt ());
783 gfc_add_expr_to_block (&body, tmp);
785 /* Finish the loop body. */
786 tmp = gfc_finish_block (&body);
787 tmp = build1_v (LOOP_EXPR, tmp);
789 /* Only execute the loop if the number of iterations is positive. */
790 if (tree_int_cst_sgn (step) > 0)
791 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
793 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
794 tmp = fold_build3 (COND_EXPR, void_type_node,
795 cond, tmp, build_empty_stmt ());
796 gfc_add_expr_to_block (pblock, tmp);
798 /* Add the exit label. */
799 tmp = build1_v (LABEL_EXPR, exit_label);
800 gfc_add_expr_to_block (pblock, tmp);
802 return gfc_finish_block (pblock);
805 /* Translate the DO construct. This obviously is one of the most
806 important ones to get right with any compiler, but especially
809 We special case some loop forms as described in gfc_trans_simple_do.
810 For other cases we implement them with a separate loop count,
811 as described in the standard.
813 We translate a do loop from:
815 DO dovar = from, to, step
821 [evaluate loop bounds and step]
822 count = (to + step - from) / step;
830 if (count <=0) goto exit_label;
834 TODO: Large loop counts
835 The code above assumes the loop count fits into a signed integer kind,
836 i.e. Does not work for loop counts > 2^31 for integer(kind=4) variables
837 We must support the full range. */
840 gfc_trans_do (gfc_code * code)
857 gfc_start_block (&block);
859 /* Evaluate all the expressions in the iterator. */
860 gfc_init_se (&se, NULL);
861 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
862 gfc_add_block_to_block (&block, &se.pre);
864 type = TREE_TYPE (dovar);
866 gfc_init_se (&se, NULL);
867 gfc_conv_expr_val (&se, code->ext.iterator->start);
868 gfc_add_block_to_block (&block, &se.pre);
869 from = gfc_evaluate_now (se.expr, &block);
871 gfc_init_se (&se, NULL);
872 gfc_conv_expr_val (&se, code->ext.iterator->end);
873 gfc_add_block_to_block (&block, &se.pre);
874 to = gfc_evaluate_now (se.expr, &block);
876 gfc_init_se (&se, NULL);
877 gfc_conv_expr_val (&se, code->ext.iterator->step);
878 gfc_add_block_to_block (&block, &se.pre);
879 step = gfc_evaluate_now (se.expr, &block);
881 /* Special case simple loops. */
882 if (TREE_CODE (type) == INTEGER_TYPE
883 && (integer_onep (step)
884 || tree_int_cst_equal (step, integer_minus_one_node)))
885 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
887 /* Initialize loop count. This code is executed before we enter the
888 loop body. We generate: count = (to + step - from) / step. */
890 tmp = fold_build2 (MINUS_EXPR, type, step, from);
891 tmp = fold_build2 (PLUS_EXPR, type, to, tmp);
892 if (TREE_CODE (type) == INTEGER_TYPE)
894 tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step);
895 count = gfc_create_var (type, "count");
899 /* TODO: We could use the same width as the real type.
900 This would probably cause more problems that it solves
901 when we implement "long double" types. */
902 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
903 tmp = fold_build1 (FIX_TRUNC_EXPR, gfc_array_index_type, tmp);
904 count = gfc_create_var (gfc_array_index_type, "count");
906 gfc_add_modify_expr (&block, count, tmp);
908 count_one = build_int_cst (TREE_TYPE (count), 1);
910 /* Initialize the DO variable: dovar = from. */
911 gfc_add_modify_expr (&block, dovar, from);
914 gfc_start_block (&body);
916 /* Cycle and exit statements are implemented with gotos. */
917 cycle_label = gfc_build_label_decl (NULL_TREE);
918 exit_label = gfc_build_label_decl (NULL_TREE);
920 /* Start with the loop condition. Loop until count <= 0. */
921 cond = fold_build2 (LE_EXPR, boolean_type_node, count,
922 build_int_cst (TREE_TYPE (count), 0));
923 tmp = build1_v (GOTO_EXPR, exit_label);
924 TREE_USED (exit_label) = 1;
925 tmp = fold_build3 (COND_EXPR, void_type_node,
926 cond, tmp, build_empty_stmt ());
927 gfc_add_expr_to_block (&body, tmp);
929 /* Put these labels where they can be found later. We put the
930 labels in a TREE_LIST node (because TREE_CHAIN is already
931 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
932 label in TREE_VALUE (backend_decl). */
934 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
936 /* Main loop body. */
937 tmp = gfc_trans_code (code->block->next);
938 gfc_add_expr_to_block (&body, tmp);
940 /* Label for cycle statements (if needed). */
941 if (TREE_USED (cycle_label))
943 tmp = build1_v (LABEL_EXPR, cycle_label);
944 gfc_add_expr_to_block (&body, tmp);
947 /* Increment the loop variable. */
948 tmp = build2 (PLUS_EXPR, type, dovar, step);
949 gfc_add_modify_expr (&body, dovar, tmp);
951 /* Decrement the loop count. */
952 tmp = build2 (MINUS_EXPR, TREE_TYPE (count), count, count_one);
953 gfc_add_modify_expr (&body, count, tmp);
955 /* End of loop body. */
956 tmp = gfc_finish_block (&body);
958 /* The for loop itself. */
959 tmp = build1_v (LOOP_EXPR, tmp);
960 gfc_add_expr_to_block (&block, tmp);
962 /* Add the exit label. */
963 tmp = build1_v (LABEL_EXPR, exit_label);
964 gfc_add_expr_to_block (&block, tmp);
966 return gfc_finish_block (&block);
970 /* Translate the DO WHILE construct.
983 if (! cond) goto exit_label;
989 Because the evaluation of the exit condition `cond' may have side
990 effects, we can't do much for empty loop bodies. The backend optimizers
991 should be smart enough to eliminate any dead loops. */
994 gfc_trans_do_while (gfc_code * code)
1002 /* Everything we build here is part of the loop body. */
1003 gfc_start_block (&block);
1005 /* Cycle and exit statements are implemented with gotos. */
1006 cycle_label = gfc_build_label_decl (NULL_TREE);
1007 exit_label = gfc_build_label_decl (NULL_TREE);
1009 /* Put the labels where they can be found later. See gfc_trans_do(). */
1010 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1012 /* Create a GIMPLE version of the exit condition. */
1013 gfc_init_se (&cond, NULL);
1014 gfc_conv_expr_val (&cond, code->expr);
1015 gfc_add_block_to_block (&block, &cond.pre);
1016 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1018 /* Build "IF (! cond) GOTO exit_label". */
1019 tmp = build1_v (GOTO_EXPR, exit_label);
1020 TREE_USED (exit_label) = 1;
1021 tmp = fold_build3 (COND_EXPR, void_type_node,
1022 cond.expr, tmp, build_empty_stmt ());
1023 gfc_add_expr_to_block (&block, tmp);
1025 /* The main body of the loop. */
1026 tmp = gfc_trans_code (code->block->next);
1027 gfc_add_expr_to_block (&block, tmp);
1029 /* Label for cycle statements (if needed). */
1030 if (TREE_USED (cycle_label))
1032 tmp = build1_v (LABEL_EXPR, cycle_label);
1033 gfc_add_expr_to_block (&block, tmp);
1036 /* End of loop body. */
1037 tmp = gfc_finish_block (&block);
1039 gfc_init_block (&block);
1040 /* Build the loop. */
1041 tmp = build1_v (LOOP_EXPR, tmp);
1042 gfc_add_expr_to_block (&block, tmp);
1044 /* Add the exit label. */
1045 tmp = build1_v (LABEL_EXPR, exit_label);
1046 gfc_add_expr_to_block (&block, tmp);
1048 return gfc_finish_block (&block);
1052 /* Translate the SELECT CASE construct for INTEGER case expressions,
1053 without killing all potential optimizations. The problem is that
1054 Fortran allows unbounded cases, but the back-end does not, so we
1055 need to intercept those before we enter the equivalent SWITCH_EXPR
1058 For example, we translate this,
1061 CASE (:100,101,105:115)
1071 to the GENERIC equivalent,
1075 case (minimum value for typeof(expr) ... 100:
1081 case 200 ... (maximum value for typeof(expr):
1098 gfc_trans_integer_select (gfc_code * code)
1108 gfc_start_block (&block);
1110 /* Calculate the switch expression. */
1111 gfc_init_se (&se, NULL);
1112 gfc_conv_expr_val (&se, code->expr);
1113 gfc_add_block_to_block (&block, &se.pre);
1115 end_label = gfc_build_label_decl (NULL_TREE);
1117 gfc_init_block (&body);
1119 for (c = code->block; c; c = c->block)
1121 for (cp = c->ext.case_list; cp; cp = cp->next)
1126 /* Assume it's the default case. */
1127 low = high = NULL_TREE;
1131 low = gfc_conv_constant_to_tree (cp->low);
1133 /* If there's only a lower bound, set the high bound to the
1134 maximum value of the case expression. */
1136 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1141 /* Three cases are possible here:
1143 1) There is no lower bound, e.g. CASE (:N).
1144 2) There is a lower bound .NE. high bound, that is
1145 a case range, e.g. CASE (N:M) where M>N (we make
1146 sure that M>N during type resolution).
1147 3) There is a lower bound, and it has the same value
1148 as the high bound, e.g. CASE (N:N). This is our
1149 internal representation of CASE(N).
1151 In the first and second case, we need to set a value for
1152 high. In the third case, we don't because the GCC middle
1153 end represents a single case value by just letting high be
1154 a NULL_TREE. We can't do that because we need to be able
1155 to represent unbounded cases. */
1159 && mpz_cmp (cp->low->value.integer,
1160 cp->high->value.integer) != 0))
1161 high = gfc_conv_constant_to_tree (cp->high);
1163 /* Unbounded case. */
1165 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1168 /* Build a label. */
1169 label = gfc_build_label_decl (NULL_TREE);
1171 /* Add this case label.
1172 Add parameter 'label', make it match GCC backend. */
1173 tmp = build3 (CASE_LABEL_EXPR, void_type_node, low, high, label);
1174 gfc_add_expr_to_block (&body, tmp);
1177 /* Add the statements for this case. */
1178 tmp = gfc_trans_code (c->next);
1179 gfc_add_expr_to_block (&body, tmp);
1181 /* Break to the end of the construct. */
1182 tmp = build1_v (GOTO_EXPR, end_label);
1183 gfc_add_expr_to_block (&body, tmp);
1186 tmp = gfc_finish_block (&body);
1187 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1188 gfc_add_expr_to_block (&block, tmp);
1190 tmp = build1_v (LABEL_EXPR, end_label);
1191 gfc_add_expr_to_block (&block, tmp);
1193 return gfc_finish_block (&block);
1197 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1199 There are only two cases possible here, even though the standard
1200 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1201 .FALSE., and DEFAULT.
1203 We never generate more than two blocks here. Instead, we always
1204 try to eliminate the DEFAULT case. This way, we can translate this
1205 kind of SELECT construct to a simple
1209 expression in GENERIC. */
1212 gfc_trans_logical_select (gfc_code * code)
1215 gfc_code *t, *f, *d;
1220 /* Assume we don't have any cases at all. */
1223 /* Now see which ones we actually do have. We can have at most two
1224 cases in a single case list: one for .TRUE. and one for .FALSE.
1225 The default case is always separate. If the cases for .TRUE. and
1226 .FALSE. are in the same case list, the block for that case list
1227 always executed, and we don't generate code a COND_EXPR. */
1228 for (c = code->block; c; c = c->block)
1230 for (cp = c->ext.case_list; cp; cp = cp->next)
1234 if (cp->low->value.logical == 0) /* .FALSE. */
1236 else /* if (cp->value.logical != 0), thus .TRUE. */
1244 /* Start a new block. */
1245 gfc_start_block (&block);
1247 /* Calculate the switch expression. We always need to do this
1248 because it may have side effects. */
1249 gfc_init_se (&se, NULL);
1250 gfc_conv_expr_val (&se, code->expr);
1251 gfc_add_block_to_block (&block, &se.pre);
1253 if (t == f && t != NULL)
1255 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1256 translate the code for these cases, append it to the current
1258 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1262 tree true_tree, false_tree, stmt;
1264 true_tree = build_empty_stmt ();
1265 false_tree = build_empty_stmt ();
1267 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1268 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1269 make the missing case the default case. */
1270 if (t != NULL && f != NULL)
1280 /* Translate the code for each of these blocks, and append it to
1281 the current block. */
1283 true_tree = gfc_trans_code (t->next);
1286 false_tree = gfc_trans_code (f->next);
1288 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1289 true_tree, false_tree);
1290 gfc_add_expr_to_block (&block, stmt);
1293 return gfc_finish_block (&block);
1297 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1298 Instead of generating compares and jumps, it is far simpler to
1299 generate a data structure describing the cases in order and call a
1300 library subroutine that locates the right case.
1301 This is particularly true because this is the only case where we
1302 might have to dispose of a temporary.
1303 The library subroutine returns a pointer to jump to or NULL if no
1304 branches are to be taken. */
1307 gfc_trans_character_select (gfc_code *code)
1309 tree init, node, end_label, tmp, type, args, *labels;
1311 stmtblock_t block, body;
1317 static tree select_struct;
1318 static tree ss_string1, ss_string1_len;
1319 static tree ss_string2, ss_string2_len;
1320 static tree ss_target;
1322 if (select_struct == NULL)
1324 tree gfc_int4_type_node = gfc_get_int_type (4);
1326 select_struct = make_node (RECORD_TYPE);
1327 TYPE_NAME (select_struct) = get_identifier ("_jump_struct");
1330 #define ADD_FIELD(NAME, TYPE) \
1331 ss_##NAME = gfc_add_field_to_struct \
1332 (&(TYPE_FIELDS (select_struct)), select_struct, \
1333 get_identifier (stringize(NAME)), TYPE)
1335 ADD_FIELD (string1, pchar_type_node);
1336 ADD_FIELD (string1_len, gfc_int4_type_node);
1338 ADD_FIELD (string2, pchar_type_node);
1339 ADD_FIELD (string2_len, gfc_int4_type_node);
1341 ADD_FIELD (target, pvoid_type_node);
1344 gfc_finish_type (select_struct);
1347 cp = code->block->ext.case_list;
1348 while (cp->left != NULL)
1352 for (d = cp; d; d = d->right)
1356 labels = gfc_getmem (n * sizeof (tree));
1360 for(i = 0; i < n; i++)
1362 labels[i] = gfc_build_label_decl (NULL_TREE);
1363 TREE_USED (labels[i]) = 1;
1364 /* TODO: The gimplifier should do this for us, but it has
1365 inadequacies when dealing with static initializers. */
1366 FORCED_LABEL (labels[i]) = 1;
1369 end_label = gfc_build_label_decl (NULL_TREE);
1371 /* Generate the body */
1372 gfc_start_block (&block);
1373 gfc_init_block (&body);
1375 for (c = code->block; c; c = c->block)
1377 for (d = c->ext.case_list; d; d = d->next)
1379 tmp = build1_v (LABEL_EXPR, labels[d->n]);
1380 gfc_add_expr_to_block (&body, tmp);
1383 tmp = gfc_trans_code (c->next);
1384 gfc_add_expr_to_block (&body, tmp);
1386 tmp = build1_v (GOTO_EXPR, end_label);
1387 gfc_add_expr_to_block (&body, tmp);
1390 /* Generate the structure describing the branches */
1394 for(d = cp; d; d = d->right, i++)
1398 gfc_init_se (&se, NULL);
1402 node = tree_cons (ss_string1, null_pointer_node, node);
1403 node = tree_cons (ss_string1_len, integer_zero_node, node);
1407 gfc_conv_expr_reference (&se, d->low);
1409 node = tree_cons (ss_string1, se.expr, node);
1410 node = tree_cons (ss_string1_len, se.string_length, node);
1413 if (d->high == NULL)
1415 node = tree_cons (ss_string2, null_pointer_node, node);
1416 node = tree_cons (ss_string2_len, integer_zero_node, node);
1420 gfc_init_se (&se, NULL);
1421 gfc_conv_expr_reference (&se, d->high);
1423 node = tree_cons (ss_string2, se.expr, node);
1424 node = tree_cons (ss_string2_len, se.string_length, node);
1427 tmp = gfc_build_addr_expr (pvoid_type_node, labels[i]);
1428 node = tree_cons (ss_target, tmp, node);
1430 tmp = build_constructor_from_list (select_struct, nreverse (node));
1431 init = tree_cons (NULL_TREE, tmp, init);
1434 type = build_array_type (select_struct, build_index_type
1435 (build_int_cst (NULL_TREE, n - 1)));
1437 init = build_constructor_from_list (type, nreverse(init));
1438 TREE_CONSTANT (init) = 1;
1439 TREE_INVARIANT (init) = 1;
1440 TREE_STATIC (init) = 1;
1441 /* Create a static variable to hold the jump table. */
1442 tmp = gfc_create_var (type, "jumptable");
1443 TREE_CONSTANT (tmp) = 1;
1444 TREE_INVARIANT (tmp) = 1;
1445 TREE_STATIC (tmp) = 1;
1446 DECL_INITIAL (tmp) = init;
1449 /* Build an argument list for the library call */
1450 init = gfc_build_addr_expr (pvoid_type_node, init);
1451 args = gfc_chainon_list (NULL_TREE, init);
1453 tmp = build_int_cst (NULL_TREE, n);
1454 args = gfc_chainon_list (args, tmp);
1456 tmp = gfc_build_addr_expr (pvoid_type_node, end_label);
1457 args = gfc_chainon_list (args, tmp);
1459 gfc_init_se (&se, NULL);
1460 gfc_conv_expr_reference (&se, code->expr);
1462 args = gfc_chainon_list (args, se.expr);
1463 args = gfc_chainon_list (args, se.string_length);
1465 gfc_add_block_to_block (&block, &se.pre);
1467 tmp = build_function_call_expr (gfor_fndecl_select_string, args);
1468 case_label = gfc_create_var (TREE_TYPE (tmp), "case_label");
1469 gfc_add_modify_expr (&block, case_label, tmp);
1471 gfc_add_block_to_block (&block, &se.post);
1473 tmp = build1 (GOTO_EXPR, void_type_node, case_label);
1474 gfc_add_expr_to_block (&block, tmp);
1476 tmp = gfc_finish_block (&body);
1477 gfc_add_expr_to_block (&block, tmp);
1478 tmp = build1_v (LABEL_EXPR, end_label);
1479 gfc_add_expr_to_block (&block, tmp);
1484 return gfc_finish_block (&block);
1488 /* Translate the three variants of the SELECT CASE construct.
1490 SELECT CASEs with INTEGER case expressions can be translated to an
1491 equivalent GENERIC switch statement, and for LOGICAL case
1492 expressions we build one or two if-else compares.
1494 SELECT CASEs with CHARACTER case expressions are a whole different
1495 story, because they don't exist in GENERIC. So we sort them and
1496 do a binary search at runtime.
1498 Fortran has no BREAK statement, and it does not allow jumps from
1499 one case block to another. That makes things a lot easier for
1503 gfc_trans_select (gfc_code * code)
1505 gcc_assert (code && code->expr);
1507 /* Empty SELECT constructs are legal. */
1508 if (code->block == NULL)
1509 return build_empty_stmt ();
1511 /* Select the correct translation function. */
1512 switch (code->expr->ts.type)
1514 case BT_LOGICAL: return gfc_trans_logical_select (code);
1515 case BT_INTEGER: return gfc_trans_integer_select (code);
1516 case BT_CHARACTER: return gfc_trans_character_select (code);
1518 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1524 /* Generate the loops for a FORALL block. The normal loop format:
1525 count = (end - start + step) / step
1538 gfc_trans_forall_loop (forall_info *forall_tmp, int nvar, tree body, int mask_flag)
1546 tree var, start, end, step;
1549 iter = forall_tmp->this_loop;
1550 for (n = 0; n < nvar; n++)
1553 start = iter->start;
1557 exit_label = gfc_build_label_decl (NULL_TREE);
1558 TREE_USED (exit_label) = 1;
1560 /* The loop counter. */
1561 count = gfc_create_var (TREE_TYPE (var), "count");
1563 /* The body of the loop. */
1564 gfc_init_block (&block);
1566 /* The exit condition. */
1567 cond = fold_build2 (LE_EXPR, boolean_type_node,
1568 count, build_int_cst (TREE_TYPE (count), 0));
1569 tmp = build1_v (GOTO_EXPR, exit_label);
1570 tmp = fold_build3 (COND_EXPR, void_type_node,
1571 cond, tmp, build_empty_stmt ());
1572 gfc_add_expr_to_block (&block, tmp);
1574 /* The main loop body. */
1575 gfc_add_expr_to_block (&block, body);
1577 /* Increment the loop variable. */
1578 tmp = build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1579 gfc_add_modify_expr (&block, var, tmp);
1581 /* Advance to the next mask element. Only do this for the
1583 if (n == 0 && mask_flag && forall_tmp->mask)
1585 tree maskindex = forall_tmp->maskindex;
1586 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
1587 maskindex, gfc_index_one_node);
1588 gfc_add_modify_expr (&block, maskindex, tmp);
1591 /* Decrement the loop counter. */
1592 tmp = build2 (MINUS_EXPR, TREE_TYPE (var), count, gfc_index_one_node);
1593 gfc_add_modify_expr (&block, count, tmp);
1595 body = gfc_finish_block (&block);
1597 /* Loop var initialization. */
1598 gfc_init_block (&block);
1599 gfc_add_modify_expr (&block, var, start);
1601 /* Initialize maskindex counter. Only do this before the
1603 if (n == nvar - 1 && mask_flag && forall_tmp->mask)
1604 gfc_add_modify_expr (&block, forall_tmp->maskindex,
1605 gfc_index_zero_node);
1607 /* Initialize the loop counter. */
1608 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1609 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1610 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1611 gfc_add_modify_expr (&block, count, tmp);
1613 /* The loop expression. */
1614 tmp = build1_v (LOOP_EXPR, body);
1615 gfc_add_expr_to_block (&block, tmp);
1617 /* The exit label. */
1618 tmp = build1_v (LABEL_EXPR, exit_label);
1619 gfc_add_expr_to_block (&block, tmp);
1621 body = gfc_finish_block (&block);
1628 /* Generate the body and loops according to MASK_FLAG and NEST_FLAG.
1629 if MASK_FLAG is nonzero, the body is controlled by maskes in forall
1630 nest, otherwise, the body is not controlled by maskes.
1631 if NEST_FLAG is nonzero, generate loops for nested forall, otherwise,
1632 only generate loops for the current forall level. */
1635 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1636 int mask_flag, int nest_flag)
1640 forall_info *forall_tmp;
1641 tree pmask, mask, maskindex;
1643 forall_tmp = nested_forall_info;
1644 /* Generate loops for nested forall. */
1647 while (forall_tmp->next_nest != NULL)
1648 forall_tmp = forall_tmp->next_nest;
1649 while (forall_tmp != NULL)
1651 /* Generate body with masks' control. */
1654 pmask = forall_tmp->pmask;
1655 mask = forall_tmp->mask;
1656 maskindex = forall_tmp->maskindex;
1660 /* If a mask was specified make the assignment conditional. */
1662 tmp = build_fold_indirect_ref (mask);
1665 tmp = gfc_build_array_ref (tmp, maskindex);
1667 body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ());
1670 nvar = forall_tmp->nvar;
1671 body = gfc_trans_forall_loop (forall_tmp, nvar, body, mask_flag);
1672 forall_tmp = forall_tmp->outer;
1677 nvar = forall_tmp->nvar;
1678 body = gfc_trans_forall_loop (forall_tmp, nvar, body, mask_flag);
1685 /* Allocate data for holding a temporary array. Returns either a local
1686 temporary array or a pointer variable. */
1689 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
1697 if (INTEGER_CST_P (size))
1699 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
1700 gfc_index_one_node);
1705 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
1706 type = build_array_type (elem_type, type);
1707 if (gfc_can_put_var_on_stack (bytesize))
1709 gcc_assert (INTEGER_CST_P (size));
1710 tmpvar = gfc_create_var (type, "temp");
1715 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
1716 *pdata = convert (pvoid_type_node, tmpvar);
1718 args = gfc_chainon_list (NULL_TREE, bytesize);
1719 if (gfc_index_integer_kind == 4)
1720 tmp = gfor_fndecl_internal_malloc;
1721 else if (gfc_index_integer_kind == 8)
1722 tmp = gfor_fndecl_internal_malloc64;
1725 tmp = build_function_call_expr (tmp, args);
1726 tmp = convert (TREE_TYPE (tmpvar), tmp);
1727 gfc_add_modify_expr (pblock, tmpvar, tmp);
1733 /* Generate codes to copy the temporary to the actual lhs. */
1736 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
1737 tree count1, tree wheremask, bool invert)
1741 stmtblock_t block, body;
1747 lss = gfc_walk_expr (expr);
1749 if (lss == gfc_ss_terminator)
1751 gfc_start_block (&block);
1753 gfc_init_se (&lse, NULL);
1755 /* Translate the expression. */
1756 gfc_conv_expr (&lse, expr);
1758 /* Form the expression for the temporary. */
1759 tmp = gfc_build_array_ref (tmp1, count1);
1761 /* Use the scalar assignment as is. */
1762 gfc_add_block_to_block (&block, &lse.pre);
1763 gfc_add_modify_expr (&block, lse.expr, tmp);
1764 gfc_add_block_to_block (&block, &lse.post);
1766 /* Increment the count1. */
1767 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1768 gfc_index_one_node);
1769 gfc_add_modify_expr (&block, count1, tmp);
1771 tmp = gfc_finish_block (&block);
1775 gfc_start_block (&block);
1777 gfc_init_loopinfo (&loop1);
1778 gfc_init_se (&rse, NULL);
1779 gfc_init_se (&lse, NULL);
1781 /* Associate the lss with the loop. */
1782 gfc_add_ss_to_loop (&loop1, lss);
1784 /* Calculate the bounds of the scalarization. */
1785 gfc_conv_ss_startstride (&loop1);
1786 /* Setup the scalarizing loops. */
1787 gfc_conv_loop_setup (&loop1);
1789 gfc_mark_ss_chain_used (lss, 1);
1791 /* Start the scalarized loop body. */
1792 gfc_start_scalarized_body (&loop1, &body);
1794 /* Setup the gfc_se structures. */
1795 gfc_copy_loopinfo_to_se (&lse, &loop1);
1798 /* Form the expression of the temporary. */
1799 if (lss != gfc_ss_terminator)
1800 rse.expr = gfc_build_array_ref (tmp1, count1);
1801 /* Translate expr. */
1802 gfc_conv_expr (&lse, expr);
1804 /* Use the scalar assignment. */
1805 rse.string_length = lse.string_length;
1806 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
1808 /* Form the mask expression according to the mask tree list. */
1811 wheremaskexpr = gfc_build_array_ref (wheremask, count3);
1813 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1814 TREE_TYPE (wheremaskexpr),
1816 tmp = fold_build3 (COND_EXPR, void_type_node,
1817 wheremaskexpr, tmp, build_empty_stmt ());
1820 gfc_add_expr_to_block (&body, tmp);
1822 /* Increment count1. */
1823 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1824 count1, gfc_index_one_node);
1825 gfc_add_modify_expr (&body, count1, tmp);
1827 /* Increment count3. */
1830 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1831 count3, gfc_index_one_node);
1832 gfc_add_modify_expr (&body, count3, tmp);
1835 /* Generate the copying loops. */
1836 gfc_trans_scalarizing_loops (&loop1, &body);
1837 gfc_add_block_to_block (&block, &loop1.pre);
1838 gfc_add_block_to_block (&block, &loop1.post);
1839 gfc_cleanup_loop (&loop1);
1841 tmp = gfc_finish_block (&block);
1847 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
1848 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
1849 and should not be freed. WHEREMASK is the conditional execution mask
1850 whose sense may be inverted by INVERT. */
1853 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
1854 tree count1, gfc_ss *lss, gfc_ss *rss,
1855 tree wheremask, bool invert)
1857 stmtblock_t block, body1;
1864 gfc_start_block (&block);
1866 gfc_init_se (&rse, NULL);
1867 gfc_init_se (&lse, NULL);
1869 if (lss == gfc_ss_terminator)
1871 gfc_init_block (&body1);
1872 gfc_conv_expr (&rse, expr2);
1873 lse.expr = gfc_build_array_ref (tmp1, count1);
1877 /* Initialize the loop. */
1878 gfc_init_loopinfo (&loop);
1880 /* We may need LSS to determine the shape of the expression. */
1881 gfc_add_ss_to_loop (&loop, lss);
1882 gfc_add_ss_to_loop (&loop, rss);
1884 gfc_conv_ss_startstride (&loop);
1885 gfc_conv_loop_setup (&loop);
1887 gfc_mark_ss_chain_used (rss, 1);
1888 /* Start the loop body. */
1889 gfc_start_scalarized_body (&loop, &body1);
1891 /* Translate the expression. */
1892 gfc_copy_loopinfo_to_se (&rse, &loop);
1894 gfc_conv_expr (&rse, expr2);
1896 /* Form the expression of the temporary. */
1897 lse.expr = gfc_build_array_ref (tmp1, count1);
1900 /* Use the scalar assignment. */
1901 lse.string_length = rse.string_length;
1902 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
1903 expr2->expr_type == EXPR_VARIABLE);
1905 /* Form the mask expression according to the mask tree list. */
1908 wheremaskexpr = gfc_build_array_ref (wheremask, count3);
1910 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1911 TREE_TYPE (wheremaskexpr),
1913 tmp = fold_build3 (COND_EXPR, void_type_node,
1914 wheremaskexpr, tmp, build_empty_stmt ());
1917 gfc_add_expr_to_block (&body1, tmp);
1919 if (lss == gfc_ss_terminator)
1921 gfc_add_block_to_block (&block, &body1);
1923 /* Increment count1. */
1924 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1925 gfc_index_one_node);
1926 gfc_add_modify_expr (&block, count1, tmp);
1930 /* Increment count1. */
1931 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1932 count1, gfc_index_one_node);
1933 gfc_add_modify_expr (&body1, count1, tmp);
1935 /* Increment count3. */
1938 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1939 count3, gfc_index_one_node);
1940 gfc_add_modify_expr (&body1, count3, tmp);
1943 /* Generate the copying loops. */
1944 gfc_trans_scalarizing_loops (&loop, &body1);
1946 gfc_add_block_to_block (&block, &loop.pre);
1947 gfc_add_block_to_block (&block, &loop.post);
1949 gfc_cleanup_loop (&loop);
1950 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
1951 as tree nodes in SS may not be valid in different scope. */
1954 tmp = gfc_finish_block (&block);
1959 /* Calculate the size of temporary needed in the assignment inside forall.
1960 LSS and RSS are filled in this function. */
1963 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
1964 stmtblock_t * pblock,
1965 gfc_ss **lss, gfc_ss **rss)
1973 *lss = gfc_walk_expr (expr1);
1976 size = gfc_index_one_node;
1977 if (*lss != gfc_ss_terminator)
1979 gfc_init_loopinfo (&loop);
1981 /* Walk the RHS of the expression. */
1982 *rss = gfc_walk_expr (expr2);
1983 if (*rss == gfc_ss_terminator)
1985 /* The rhs is scalar. Add a ss for the expression. */
1986 *rss = gfc_get_ss ();
1987 (*rss)->next = gfc_ss_terminator;
1988 (*rss)->type = GFC_SS_SCALAR;
1989 (*rss)->expr = expr2;
1992 /* Associate the SS with the loop. */
1993 gfc_add_ss_to_loop (&loop, *lss);
1994 /* We don't actually need to add the rhs at this point, but it might
1995 make guessing the loop bounds a bit easier. */
1996 gfc_add_ss_to_loop (&loop, *rss);
1998 /* We only want the shape of the expression, not rest of the junk
1999 generated by the scalarizer. */
2000 loop.array_parameter = 1;
2002 /* Calculate the bounds of the scalarization. */
2003 save_flag = flag_bounds_check;
2004 flag_bounds_check = 0;
2005 gfc_conv_ss_startstride (&loop);
2006 flag_bounds_check = save_flag;
2007 gfc_conv_loop_setup (&loop);
2009 /* Figure out how many elements we need. */
2010 for (i = 0; i < loop.dimen; i++)
2012 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2013 gfc_index_one_node, loop.from[i]);
2014 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2016 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2018 gfc_add_block_to_block (pblock, &loop.pre);
2019 size = gfc_evaluate_now (size, pblock);
2020 gfc_add_block_to_block (pblock, &loop.post);
2022 /* TODO: write a function that cleans up a loopinfo without freeing
2023 the SS chains. Currently a NOP. */
2030 /* Calculate the overall iterator number of the nested forall construct. */
2033 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2034 stmtblock_t *inner_size_body, stmtblock_t *block)
2039 /* TODO: optimizing the computing process. */
2040 number = gfc_create_var (gfc_array_index_type, "num");
2041 gfc_add_modify_expr (block, number, gfc_index_zero_node);
2043 gfc_start_block (&body);
2044 if (inner_size_body)
2045 gfc_add_block_to_block (&body, inner_size_body);
2046 if (nested_forall_info)
2047 tmp = build2 (PLUS_EXPR, gfc_array_index_type, number,
2051 gfc_add_modify_expr (&body, number, tmp);
2052 tmp = gfc_finish_block (&body);
2054 /* Generate loops. */
2055 if (nested_forall_info != NULL)
2056 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 0, 1);
2058 gfc_add_expr_to_block (block, tmp);
2064 /* Allocate temporary for forall construct. SIZE is the size of temporary
2065 needed. PTEMP1 is returned for space free. */
2068 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2076 unit = TYPE_SIZE_UNIT (type);
2077 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2080 temp1 = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2083 tmp = build_fold_indirect_ref (temp1);
2091 /* Allocate temporary for forall construct according to the information in
2092 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2093 assignment inside forall. PTEMP1 is returned for space free. */
2096 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2097 tree inner_size, stmtblock_t * inner_size_body,
2098 stmtblock_t * block, tree * ptemp1)
2102 /* Calculate the total size of temporary needed in forall construct. */
2103 size = compute_overall_iter_number (nested_forall_info, inner_size,
2104 inner_size_body, block);
2106 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2110 /* Handle assignments inside forall which need temporary.
2112 forall (i=start:end:stride; maskexpr)
2115 (where e,f<i> are arbitrary expressions possibly involving i
2116 and there is a dependency between e<i> and f<i>)
2118 masktmp(:) = maskexpr(:)
2123 for (i = start; i <= end; i += stride)
2127 for (i = start; i <= end; i += stride)
2129 if (masktmp[maskindex++])
2130 tmp[count1++] = f<i>
2134 for (i = start; i <= end; i += stride)
2136 if (masktmp[maskindex++])
2137 e<i> = tmp[count1++]
2142 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2143 tree wheremask, bool invert,
2144 forall_info * nested_forall_info,
2145 stmtblock_t * block)
2153 stmtblock_t inner_size_body;
2155 /* Create vars. count1 is the current iterator number of the nested
2157 count1 = gfc_create_var (gfc_array_index_type, "count1");
2159 /* Count is the wheremask index. */
2162 count = gfc_create_var (gfc_array_index_type, "count");
2163 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2168 /* Initialize count1. */
2169 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2171 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2172 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2173 gfc_init_block (&inner_size_body);
2174 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2177 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2178 type = gfc_typenode_for_spec (&expr1->ts);
2180 /* Allocate temporary for nested forall construct according to the
2181 information in nested_forall_info and inner_size. */
2182 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2183 &inner_size_body, block, &ptemp1);
2185 /* Generate codes to copy rhs to the temporary . */
2186 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2189 /* Generate body and loops according to the information in
2190 nested_forall_info. */
2191 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2192 gfc_add_expr_to_block (block, tmp);
2195 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2199 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2201 /* Generate codes to copy the temporary to lhs. */
2202 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2205 /* Generate body and loops according to the information in
2206 nested_forall_info. */
2207 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2208 gfc_add_expr_to_block (block, tmp);
2212 /* Free the temporary. */
2213 tmp = gfc_chainon_list (NULL_TREE, ptemp1);
2214 tmp = build_function_call_expr (gfor_fndecl_internal_free, tmp);
2215 gfc_add_expr_to_block (block, tmp);
2220 /* Translate pointer assignment inside FORALL which need temporary. */
2223 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2224 forall_info * nested_forall_info,
2225 stmtblock_t * block)
2239 tree tmp, tmp1, ptemp1;
2241 count = gfc_create_var (gfc_array_index_type, "count");
2242 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2244 inner_size = integer_one_node;
2245 lss = gfc_walk_expr (expr1);
2246 rss = gfc_walk_expr (expr2);
2247 if (lss == gfc_ss_terminator)
2249 type = gfc_typenode_for_spec (&expr1->ts);
2250 type = build_pointer_type (type);
2252 /* Allocate temporary for nested forall construct according to the
2253 information in nested_forall_info and inner_size. */
2254 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2255 inner_size, NULL, block, &ptemp1);
2256 gfc_start_block (&body);
2257 gfc_init_se (&lse, NULL);
2258 lse.expr = gfc_build_array_ref (tmp1, count);
2259 gfc_init_se (&rse, NULL);
2260 rse.want_pointer = 1;
2261 gfc_conv_expr (&rse, expr2);
2262 gfc_add_block_to_block (&body, &rse.pre);
2263 gfc_add_modify_expr (&body, lse.expr,
2264 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2265 gfc_add_block_to_block (&body, &rse.post);
2267 /* Increment count. */
2268 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2269 count, gfc_index_one_node);
2270 gfc_add_modify_expr (&body, count, tmp);
2272 tmp = gfc_finish_block (&body);
2274 /* Generate body and loops according to the information in
2275 nested_forall_info. */
2276 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2277 gfc_add_expr_to_block (block, tmp);
2280 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2282 gfc_start_block (&body);
2283 gfc_init_se (&lse, NULL);
2284 gfc_init_se (&rse, NULL);
2285 rse.expr = gfc_build_array_ref (tmp1, count);
2286 lse.want_pointer = 1;
2287 gfc_conv_expr (&lse, expr1);
2288 gfc_add_block_to_block (&body, &lse.pre);
2289 gfc_add_modify_expr (&body, lse.expr, rse.expr);
2290 gfc_add_block_to_block (&body, &lse.post);
2291 /* Increment count. */
2292 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2293 count, gfc_index_one_node);
2294 gfc_add_modify_expr (&body, count, tmp);
2295 tmp = gfc_finish_block (&body);
2297 /* Generate body and loops according to the information in
2298 nested_forall_info. */
2299 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2300 gfc_add_expr_to_block (block, tmp);
2304 gfc_init_loopinfo (&loop);
2306 /* Associate the SS with the loop. */
2307 gfc_add_ss_to_loop (&loop, rss);
2309 /* Setup the scalarizing loops and bounds. */
2310 gfc_conv_ss_startstride (&loop);
2312 gfc_conv_loop_setup (&loop);
2314 info = &rss->data.info;
2315 desc = info->descriptor;
2317 /* Make a new descriptor. */
2318 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2319 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2320 loop.from, loop.to, 1);
2322 /* Allocate temporary for nested forall construct. */
2323 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2324 inner_size, NULL, block, &ptemp1);
2325 gfc_start_block (&body);
2326 gfc_init_se (&lse, NULL);
2327 lse.expr = gfc_build_array_ref (tmp1, count);
2328 lse.direct_byref = 1;
2329 rss = gfc_walk_expr (expr2);
2330 gfc_conv_expr_descriptor (&lse, expr2, rss);
2332 gfc_add_block_to_block (&body, &lse.pre);
2333 gfc_add_block_to_block (&body, &lse.post);
2335 /* Increment count. */
2336 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2337 count, gfc_index_one_node);
2338 gfc_add_modify_expr (&body, count, tmp);
2340 tmp = gfc_finish_block (&body);
2342 /* Generate body and loops according to the information in
2343 nested_forall_info. */
2344 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2345 gfc_add_expr_to_block (block, tmp);
2348 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2350 parm = gfc_build_array_ref (tmp1, count);
2351 lss = gfc_walk_expr (expr1);
2352 gfc_init_se (&lse, NULL);
2353 gfc_conv_expr_descriptor (&lse, expr1, lss);
2354 gfc_add_modify_expr (&lse.pre, lse.expr, parm);
2355 gfc_start_block (&body);
2356 gfc_add_block_to_block (&body, &lse.pre);
2357 gfc_add_block_to_block (&body, &lse.post);
2359 /* Increment count. */
2360 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2361 count, gfc_index_one_node);
2362 gfc_add_modify_expr (&body, count, tmp);
2364 tmp = gfc_finish_block (&body);
2366 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2367 gfc_add_expr_to_block (block, tmp);
2369 /* Free the temporary. */
2372 tmp = gfc_chainon_list (NULL_TREE, ptemp1);
2373 tmp = build_function_call_expr (gfor_fndecl_internal_free, tmp);
2374 gfc_add_expr_to_block (block, tmp);
2379 /* FORALL and WHERE statements are really nasty, especially when you nest
2380 them. All the rhs of a forall assignment must be evaluated before the
2381 actual assignments are performed. Presumably this also applies to all the
2382 assignments in an inner where statement. */
2384 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2385 linear array, relying on the fact that we process in the same order in all
2388 forall (i=start:end:stride; maskexpr)
2392 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2394 count = ((end + 1 - start) / stride)
2395 masktmp(:) = maskexpr(:)
2398 for (i = start; i <= end; i += stride)
2400 if (masktmp[maskindex++])
2404 for (i = start; i <= end; i += stride)
2406 if (masktmp[maskindex++])
2410 Note that this code only works when there are no dependencies.
2411 Forall loop with array assignments and data dependencies are a real pain,
2412 because the size of the temporary cannot always be determined before the
2413 loop is executed. This problem is compounded by the presence of nested
2418 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2440 gfc_forall_iterator *fa;
2443 gfc_saved_var *saved_vars;
2444 iter_info *this_forall, *iter_tmp;
2445 forall_info *info, *forall_tmp;
2447 gfc_start_block (&block);
2450 /* Count the FORALL index number. */
2451 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2455 /* Allocate the space for var, start, end, step, varexpr. */
2456 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2457 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2458 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2459 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2460 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2461 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2463 /* Allocate the space for info. */
2464 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2466 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2468 gfc_symbol *sym = fa->var->symtree->n.sym;
2470 /* allocate space for this_forall. */
2471 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2473 /* Create a temporary variable for the FORALL index. */
2474 tmp = gfc_typenode_for_spec (&sym->ts);
2475 var[n] = gfc_create_var (tmp, sym->name);
2476 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2478 /* Record it in this_forall. */
2479 this_forall->var = var[n];
2481 /* Replace the index symbol's backend_decl with the temporary decl. */
2482 sym->backend_decl = var[n];
2484 /* Work out the start, end and stride for the loop. */
2485 gfc_init_se (&se, NULL);
2486 gfc_conv_expr_val (&se, fa->start);
2487 /* Record it in this_forall. */
2488 this_forall->start = se.expr;
2489 gfc_add_block_to_block (&block, &se.pre);
2492 gfc_init_se (&se, NULL);
2493 gfc_conv_expr_val (&se, fa->end);
2494 /* Record it in this_forall. */
2495 this_forall->end = se.expr;
2496 gfc_make_safe_expr (&se);
2497 gfc_add_block_to_block (&block, &se.pre);
2500 gfc_init_se (&se, NULL);
2501 gfc_conv_expr_val (&se, fa->stride);
2502 /* Record it in this_forall. */
2503 this_forall->step = se.expr;
2504 gfc_make_safe_expr (&se);
2505 gfc_add_block_to_block (&block, &se.pre);
2508 /* Set the NEXT field of this_forall to NULL. */
2509 this_forall->next = NULL;
2510 /* Link this_forall to the info construct. */
2511 if (info->this_loop == NULL)
2512 info->this_loop = this_forall;
2515 iter_tmp = info->this_loop;
2516 while (iter_tmp->next != NULL)
2517 iter_tmp = iter_tmp->next;
2518 iter_tmp->next = this_forall;
2525 /* Work out the number of elements in the mask array. */
2528 size = gfc_index_one_node;
2529 sizevar = NULL_TREE;
2531 for (n = 0; n < nvar; n++)
2533 if (lenvar && TREE_TYPE (lenvar) != TREE_TYPE (start[n]))
2536 /* size = (end + step - start) / step. */
2537 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2539 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2541 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2542 tmp = convert (gfc_array_index_type, tmp);
2544 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2547 /* Record the nvar and size of current forall level. */
2551 /* Link the current forall level to nested_forall_info. */
2552 forall_tmp = nested_forall_info;
2553 if (forall_tmp == NULL)
2554 nested_forall_info = info;
2557 while (forall_tmp->next_nest != NULL)
2558 forall_tmp = forall_tmp->next_nest;
2559 info->outer = forall_tmp;
2560 forall_tmp->next_nest = info;
2563 /* Copy the mask into a temporary variable if required.
2564 For now we assume a mask temporary is needed. */
2567 /* As the mask array can be very big, prefer compact
2569 tree smallest_boolean_type_node
2570 = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2572 /* Allocate the mask temporary. */
2573 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
2574 TYPE_SIZE_UNIT (smallest_boolean_type_node));
2576 mask = gfc_do_allocate (bytesize, size, &pmask, &block,
2577 smallest_boolean_type_node);
2579 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2580 /* Record them in the info structure. */
2581 info->pmask = pmask;
2583 info->maskindex = maskindex;
2585 gfc_add_modify_expr (&block, maskindex, gfc_index_zero_node);
2587 /* Start of mask assignment loop body. */
2588 gfc_start_block (&body);
2590 /* Evaluate the mask expression. */
2591 gfc_init_se (&se, NULL);
2592 gfc_conv_expr_val (&se, code->expr);
2593 gfc_add_block_to_block (&body, &se.pre);
2595 /* Store the mask. */
2596 se.expr = convert (smallest_boolean_type_node, se.expr);
2599 tmp = build_fold_indirect_ref (mask);
2602 tmp = gfc_build_array_ref (tmp, maskindex);
2603 gfc_add_modify_expr (&body, tmp, se.expr);
2605 /* Advance to the next mask element. */
2606 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
2607 maskindex, gfc_index_one_node);
2608 gfc_add_modify_expr (&body, maskindex, tmp);
2610 /* Generate the loops. */
2611 tmp = gfc_finish_block (&body);
2612 tmp = gfc_trans_nested_forall_loop (info, tmp, 0, 0);
2613 gfc_add_expr_to_block (&block, tmp);
2617 /* No mask was specified. */
2618 maskindex = NULL_TREE;
2619 mask = pmask = NULL_TREE;
2622 c = code->block->next;
2624 /* TODO: loop merging in FORALL statements. */
2625 /* Now that we've got a copy of the mask, generate the assignment loops. */
2631 /* A scalar or array assignment. */
2632 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2633 /* Temporaries due to array assignment data dependencies introduce
2634 no end of problems. */
2636 gfc_trans_assign_need_temp (c->expr, c->expr2, NULL, false,
2637 nested_forall_info, &block);
2640 /* Use the normal assignment copying routines. */
2641 assign = gfc_trans_assignment (c->expr, c->expr2);
2643 /* Generate body and loops. */
2644 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1, 1);
2645 gfc_add_expr_to_block (&block, tmp);
2651 /* Translate WHERE or WHERE construct nested in FORALL. */
2652 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
2655 /* Pointer assignment inside FORALL. */
2656 case EXEC_POINTER_ASSIGN:
2657 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2659 gfc_trans_pointer_assign_need_temp (c->expr, c->expr2,
2660 nested_forall_info, &block);
2663 /* Use the normal assignment copying routines. */
2664 assign = gfc_trans_pointer_assignment (c->expr, c->expr2);
2666 /* Generate body and loops. */
2667 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign,
2669 gfc_add_expr_to_block (&block, tmp);
2674 tmp = gfc_trans_forall_1 (c, nested_forall_info);
2675 gfc_add_expr_to_block (&block, tmp);
2678 /* Explicit subroutine calls are prevented by the frontend but interface
2679 assignments can legitimately produce them. */
2680 case EXEC_ASSIGN_CALL:
2681 assign = gfc_trans_call (c, true);
2682 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1, 1);
2683 gfc_add_expr_to_block (&block, tmp);
2693 /* Restore the original index variables. */
2694 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
2695 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
2697 /* Free the space for var, start, end, step, varexpr. */
2703 gfc_free (saved_vars);
2707 /* Free the temporary for the mask. */
2708 tmp = gfc_chainon_list (NULL_TREE, pmask);
2709 tmp = build_function_call_expr (gfor_fndecl_internal_free, tmp);
2710 gfc_add_expr_to_block (&block, tmp);
2713 pushdecl (maskindex);
2715 return gfc_finish_block (&block);
2719 /* Translate the FORALL statement or construct. */
2721 tree gfc_trans_forall (gfc_code * code)
2723 return gfc_trans_forall_1 (code, NULL);
2727 /* Evaluate the WHERE mask expression, copy its value to a temporary.
2728 If the WHERE construct is nested in FORALL, compute the overall temporary
2729 needed by the WHERE mask expression multiplied by the iterator number of
2731 ME is the WHERE mask expression.
2732 MASK is the current execution mask upon input, whose sense may or may
2733 not be inverted as specified by the INVERT argument.
2734 CMASK is the updated execution mask on output, or NULL if not required.
2735 PMASK is the pending execution mask on output, or NULL if not required.
2736 BLOCK is the block in which to place the condition evaluation loops. */
2739 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
2740 tree mask, bool invert, tree cmask, tree pmask,
2741 tree mask_type, stmtblock_t * block)
2746 stmtblock_t body, body1;
2747 tree count, cond, mtmp;
2750 gfc_init_loopinfo (&loop);
2752 lss = gfc_walk_expr (me);
2753 rss = gfc_walk_expr (me);
2755 /* Variable to index the temporary. */
2756 count = gfc_create_var (gfc_array_index_type, "count");
2757 /* Initialize count. */
2758 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2760 gfc_start_block (&body);
2762 gfc_init_se (&rse, NULL);
2763 gfc_init_se (&lse, NULL);
2765 if (lss == gfc_ss_terminator)
2767 gfc_init_block (&body1);
2771 /* Initialize the loop. */
2772 gfc_init_loopinfo (&loop);
2774 /* We may need LSS to determine the shape of the expression. */
2775 gfc_add_ss_to_loop (&loop, lss);
2776 gfc_add_ss_to_loop (&loop, rss);
2778 gfc_conv_ss_startstride (&loop);
2779 gfc_conv_loop_setup (&loop);
2781 gfc_mark_ss_chain_used (rss, 1);
2782 /* Start the loop body. */
2783 gfc_start_scalarized_body (&loop, &body1);
2785 /* Translate the expression. */
2786 gfc_copy_loopinfo_to_se (&rse, &loop);
2788 gfc_conv_expr (&rse, me);
2791 /* Variable to evaluate mask condition. */
2792 cond = gfc_create_var (mask_type, "cond");
2793 if (mask && (cmask || pmask))
2794 mtmp = gfc_create_var (mask_type, "mask");
2795 else mtmp = NULL_TREE;
2797 gfc_add_block_to_block (&body1, &lse.pre);
2798 gfc_add_block_to_block (&body1, &rse.pre);
2800 gfc_add_modify_expr (&body1, cond, fold_convert (mask_type, rse.expr));
2802 if (mask && (cmask || pmask))
2804 tmp = gfc_build_array_ref (mask, count);
2806 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
2807 gfc_add_modify_expr (&body1, mtmp, tmp);
2812 tmp1 = gfc_build_array_ref (cmask, count);
2815 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
2816 gfc_add_modify_expr (&body1, tmp1, tmp);
2821 tmp1 = gfc_build_array_ref (pmask, count);
2822 tmp = build1 (TRUTH_NOT_EXPR, mask_type, cond);
2824 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
2825 gfc_add_modify_expr (&body1, tmp1, tmp);
2828 gfc_add_block_to_block (&body1, &lse.post);
2829 gfc_add_block_to_block (&body1, &rse.post);
2831 if (lss == gfc_ss_terminator)
2833 gfc_add_block_to_block (&body, &body1);
2837 /* Increment count. */
2838 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
2839 gfc_index_one_node);
2840 gfc_add_modify_expr (&body1, count, tmp1);
2842 /* Generate the copying loops. */
2843 gfc_trans_scalarizing_loops (&loop, &body1);
2845 gfc_add_block_to_block (&body, &loop.pre);
2846 gfc_add_block_to_block (&body, &loop.post);
2848 gfc_cleanup_loop (&loop);
2849 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2850 as tree nodes in SS may not be valid in different scope. */
2853 tmp1 = gfc_finish_block (&body);
2854 /* If the WHERE construct is inside FORALL, fill the full temporary. */
2855 if (nested_forall_info != NULL)
2856 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1, 1);
2858 gfc_add_expr_to_block (block, tmp1);
2862 /* Translate an assignment statement in a WHERE statement or construct
2863 statement. The MASK expression is used to control which elements
2864 of EXPR1 shall be assigned. The sense of MASK is specified by
2868 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
2869 tree mask, bool invert,
2870 tree count1, tree count2)
2875 gfc_ss *lss_section;
2882 tree index, maskexpr;
2885 /* TODO: handle this special case.
2886 Special case a single function returning an array. */
2887 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
2889 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
2895 /* Assignment of the form lhs = rhs. */
2896 gfc_start_block (&block);
2898 gfc_init_se (&lse, NULL);
2899 gfc_init_se (&rse, NULL);
2902 lss = gfc_walk_expr (expr1);
2905 /* In each where-assign-stmt, the mask-expr and the variable being
2906 defined shall be arrays of the same shape. */
2907 gcc_assert (lss != gfc_ss_terminator);
2909 /* The assignment needs scalarization. */
2912 /* Find a non-scalar SS from the lhs. */
2913 while (lss_section != gfc_ss_terminator
2914 && lss_section->type != GFC_SS_SECTION)
2915 lss_section = lss_section->next;
2917 gcc_assert (lss_section != gfc_ss_terminator);
2919 /* Initialize the scalarizer. */
2920 gfc_init_loopinfo (&loop);
2923 rss = gfc_walk_expr (expr2);
2924 if (rss == gfc_ss_terminator)
2926 /* The rhs is scalar. Add a ss for the expression. */
2927 rss = gfc_get_ss ();
2928 rss->next = gfc_ss_terminator;
2929 rss->type = GFC_SS_SCALAR;
2933 /* Associate the SS with the loop. */
2934 gfc_add_ss_to_loop (&loop, lss);
2935 gfc_add_ss_to_loop (&loop, rss);
2937 /* Calculate the bounds of the scalarization. */
2938 gfc_conv_ss_startstride (&loop);
2940 /* Resolve any data dependencies in the statement. */
2941 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
2943 /* Setup the scalarizing loops. */
2944 gfc_conv_loop_setup (&loop);
2946 /* Setup the gfc_se structures. */
2947 gfc_copy_loopinfo_to_se (&lse, &loop);
2948 gfc_copy_loopinfo_to_se (&rse, &loop);
2951 gfc_mark_ss_chain_used (rss, 1);
2952 if (loop.temp_ss == NULL)
2955 gfc_mark_ss_chain_used (lss, 1);
2959 lse.ss = loop.temp_ss;
2960 gfc_mark_ss_chain_used (lss, 3);
2961 gfc_mark_ss_chain_used (loop.temp_ss, 3);
2964 /* Start the scalarized loop body. */
2965 gfc_start_scalarized_body (&loop, &body);
2967 /* Translate the expression. */
2968 gfc_conv_expr (&rse, expr2);
2969 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
2971 gfc_conv_tmp_array_ref (&lse);
2972 gfc_advance_se_ss_chain (&lse);
2975 gfc_conv_expr (&lse, expr1);
2977 /* Form the mask expression according to the mask. */
2979 maskexpr = gfc_build_array_ref (mask, index);
2981 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
2983 /* Use the scalar assignment as is. */
2984 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
2985 loop.temp_ss != NULL, false);
2986 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
2988 gfc_add_expr_to_block (&body, tmp);
2990 if (lss == gfc_ss_terminator)
2992 /* Increment count1. */
2993 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2994 count1, gfc_index_one_node);
2995 gfc_add_modify_expr (&body, count1, tmp);
2997 /* Use the scalar assignment as is. */
2998 gfc_add_block_to_block (&block, &body);
3002 gcc_assert (lse.ss == gfc_ss_terminator
3003 && rse.ss == gfc_ss_terminator);
3005 if (loop.temp_ss != NULL)
3007 /* Increment count1 before finish the main body of a scalarized
3009 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3010 count1, gfc_index_one_node);
3011 gfc_add_modify_expr (&body, count1, tmp);
3012 gfc_trans_scalarized_loop_boundary (&loop, &body);
3014 /* We need to copy the temporary to the actual lhs. */
3015 gfc_init_se (&lse, NULL);
3016 gfc_init_se (&rse, NULL);
3017 gfc_copy_loopinfo_to_se (&lse, &loop);
3018 gfc_copy_loopinfo_to_se (&rse, &loop);
3020 rse.ss = loop.temp_ss;
3023 gfc_conv_tmp_array_ref (&rse);
3024 gfc_advance_se_ss_chain (&rse);
3025 gfc_conv_expr (&lse, expr1);
3027 gcc_assert (lse.ss == gfc_ss_terminator
3028 && rse.ss == gfc_ss_terminator);
3030 /* Form the mask expression according to the mask tree list. */
3032 maskexpr = gfc_build_array_ref (mask, index);
3034 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3037 /* Use the scalar assignment as is. */
3038 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3039 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3040 gfc_add_expr_to_block (&body, tmp);
3042 /* Increment count2. */
3043 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3044 count2, gfc_index_one_node);
3045 gfc_add_modify_expr (&body, count2, tmp);
3049 /* Increment count1. */
3050 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3051 count1, gfc_index_one_node);
3052 gfc_add_modify_expr (&body, count1, tmp);
3055 /* Generate the copying loops. */
3056 gfc_trans_scalarizing_loops (&loop, &body);
3058 /* Wrap the whole thing up. */
3059 gfc_add_block_to_block (&block, &loop.pre);
3060 gfc_add_block_to_block (&block, &loop.post);
3061 gfc_cleanup_loop (&loop);
3064 return gfc_finish_block (&block);
3068 /* Translate the WHERE construct or statement.
3069 This function can be called iteratively to translate the nested WHERE
3070 construct or statement.
3071 MASK is the control mask. */
3074 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3075 forall_info * nested_forall_info, stmtblock_t * block)
3077 stmtblock_t inner_size_body;
3078 tree inner_size, size;
3086 tree count1, count2;
3090 tree pcmask = NULL_TREE;
3091 tree ppmask = NULL_TREE;
3092 tree cmask = NULL_TREE;
3093 tree pmask = NULL_TREE;
3095 /* the WHERE statement or the WHERE construct statement. */
3096 cblock = code->block;
3098 /* As the mask array can be very big, prefer compact boolean types. */
3099 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3101 /* Determine which temporary masks are needed. */
3104 /* One clause: No ELSEWHEREs. */
3105 need_cmask = (cblock->next != 0);
3108 else if (cblock->block->block)
3110 /* Three or more clauses: Conditional ELSEWHEREs. */
3114 else if (cblock->next)
3116 /* Two clauses, the first non-empty. */
3118 need_pmask = (mask != NULL_TREE
3119 && cblock->block->next != 0);
3121 else if (!cblock->block->next)
3123 /* Two clauses, both empty. */
3127 /* Two clauses, the first empty, the second non-empty. */
3130 need_cmask = (cblock->block->expr != 0);
3139 if (need_cmask || need_pmask)
3141 /* Calculate the size of temporary needed by the mask-expr. */
3142 gfc_init_block (&inner_size_body);
3143 inner_size = compute_inner_temp_size (cblock->expr, cblock->expr,
3144 &inner_size_body, &lss, &rss);
3146 /* Calculate the total size of temporary needed. */
3147 size = compute_overall_iter_number (nested_forall_info, inner_size,
3148 &inner_size_body, block);
3150 /* Allocate temporary for WHERE mask if needed. */
3152 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3155 /* Allocate temporary for !mask if needed. */
3157 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3163 /* Each time around this loop, the where clause is conditional
3164 on the value of mask and invert, which are updated at the
3165 bottom of the loop. */
3167 /* Has mask-expr. */
3170 /* Ensure that the WHERE mask will be evaluated exactly once.
3171 If there are no statements in this WHERE/ELSEWHERE clause,
3172 then we don't need to update the control mask (cmask).
3173 If this is the last clause of the WHERE construct, then
3174 we don't need to update the pending control mask (pmask). */
3176 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3178 cblock->next ? cmask : NULL_TREE,
3179 cblock->block ? pmask : NULL_TREE,
3182 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3184 (cblock->next || cblock->block)
3185 ? cmask : NULL_TREE,
3186 NULL_TREE, mask_type, block);
3190 /* It's a final elsewhere-stmt. No mask-expr is present. */
3194 /* The body of this where clause are controlled by cmask with
3195 sense specified by invert. */
3197 /* Get the assignment statement of a WHERE statement, or the first
3198 statement in where-body-construct of a WHERE construct. */
3199 cnext = cblock->next;
3204 /* WHERE assignment statement. */
3206 expr1 = cnext->expr;
3207 expr2 = cnext->expr2;
3208 if (nested_forall_info != NULL)
3210 need_temp = gfc_check_dependency (expr1, expr2, 0);
3212 gfc_trans_assign_need_temp (expr1, expr2,
3214 nested_forall_info, block);
3217 /* Variables to control maskexpr. */
3218 count1 = gfc_create_var (gfc_array_index_type, "count1");
3219 count2 = gfc_create_var (gfc_array_index_type, "count2");
3220 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3221 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3223 tmp = gfc_trans_where_assign (expr1, expr2,
3227 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3229 gfc_add_expr_to_block (block, tmp);
3234 /* Variables to control maskexpr. */
3235 count1 = gfc_create_var (gfc_array_index_type, "count1");
3236 count2 = gfc_create_var (gfc_array_index_type, "count2");
3237 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3238 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3240 tmp = gfc_trans_where_assign (expr1, expr2,
3243 gfc_add_expr_to_block (block, tmp);
3248 /* WHERE or WHERE construct is part of a where-body-construct. */
3250 gfc_trans_where_2 (cnext, cmask, invert,
3251 nested_forall_info, block);
3258 /* The next statement within the same where-body-construct. */
3259 cnext = cnext->next;
3261 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3262 cblock = cblock->block;
3263 if (mask == NULL_TREE)
3265 /* If we're the initial WHERE, we can simply invert the sense
3266 of the current mask to obtain the "mask" for the remaining
3273 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3279 /* If we allocated a pending mask array, deallocate it now. */
3282 tree args = gfc_chainon_list (NULL_TREE, ppmask);
3283 tmp = build_function_call_expr (gfor_fndecl_internal_free, args);
3284 gfc_add_expr_to_block (block, tmp);
3287 /* If we allocated a current mask array, deallocate it now. */
3290 tree args = gfc_chainon_list (NULL_TREE, pcmask);
3291 tmp = build_function_call_expr (gfor_fndecl_internal_free, args);
3292 gfc_add_expr_to_block (block, tmp);
3296 /* Translate a simple WHERE construct or statement without dependencies.
3297 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3298 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3299 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3302 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3304 stmtblock_t block, body;
3305 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3306 tree tmp, cexpr, tstmt, estmt;
3307 gfc_ss *css, *tdss, *tsss;
3308 gfc_se cse, tdse, tsse, edse, esse;
3313 cond = cblock->expr;
3314 tdst = cblock->next->expr;
3315 tsrc = cblock->next->expr2;
3316 edst = eblock ? eblock->next->expr : NULL;
3317 esrc = eblock ? eblock->next->expr2 : NULL;
3319 gfc_start_block (&block);
3320 gfc_init_loopinfo (&loop);
3322 /* Handle the condition. */
3323 gfc_init_se (&cse, NULL);
3324 css = gfc_walk_expr (cond);
3325 gfc_add_ss_to_loop (&loop, css);
3327 /* Handle the then-clause. */
3328 gfc_init_se (&tdse, NULL);
3329 gfc_init_se (&tsse, NULL);
3330 tdss = gfc_walk_expr (tdst);
3331 tsss = gfc_walk_expr (tsrc);
3332 if (tsss == gfc_ss_terminator)
3334 tsss = gfc_get_ss ();
3335 tsss->next = gfc_ss_terminator;
3336 tsss->type = GFC_SS_SCALAR;
3339 gfc_add_ss_to_loop (&loop, tdss);
3340 gfc_add_ss_to_loop (&loop, tsss);
3344 /* Handle the else clause. */
3345 gfc_init_se (&edse, NULL);
3346 gfc_init_se (&esse, NULL);
3347 edss = gfc_walk_expr (edst);
3348 esss = gfc_walk_expr (esrc);
3349 if (esss == gfc_ss_terminator)
3351 esss = gfc_get_ss ();
3352 esss->next = gfc_ss_terminator;
3353 esss->type = GFC_SS_SCALAR;
3356 gfc_add_ss_to_loop (&loop, edss);
3357 gfc_add_ss_to_loop (&loop, esss);
3360 gfc_conv_ss_startstride (&loop);
3361 gfc_conv_loop_setup (&loop);
3363 gfc_mark_ss_chain_used (css, 1);
3364 gfc_mark_ss_chain_used (tdss, 1);
3365 gfc_mark_ss_chain_used (tsss, 1);
3368 gfc_mark_ss_chain_used (edss, 1);
3369 gfc_mark_ss_chain_used (esss, 1);
3372 gfc_start_scalarized_body (&loop, &body);
3374 gfc_copy_loopinfo_to_se (&cse, &loop);
3375 gfc_copy_loopinfo_to_se (&tdse, &loop);
3376 gfc_copy_loopinfo_to_se (&tsse, &loop);
3382 gfc_copy_loopinfo_to_se (&edse, &loop);
3383 gfc_copy_loopinfo_to_se (&esse, &loop);
3388 gfc_conv_expr (&cse, cond);
3389 gfc_add_block_to_block (&body, &cse.pre);
3392 gfc_conv_expr (&tsse, tsrc);
3393 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3395 gfc_conv_tmp_array_ref (&tdse);
3396 gfc_advance_se_ss_chain (&tdse);
3399 gfc_conv_expr (&tdse, tdst);
3403 gfc_conv_expr (&esse, esrc);
3404 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3406 gfc_conv_tmp_array_ref (&edse);
3407 gfc_advance_se_ss_chain (&edse);
3410 gfc_conv_expr (&edse, edst);
3413 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false);
3414 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false)
3415 : build_empty_stmt ();
3416 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3417 gfc_add_expr_to_block (&body, tmp);
3418 gfc_add_block_to_block (&body, &cse.post);
3420 gfc_trans_scalarizing_loops (&loop, &body);
3421 gfc_add_block_to_block (&block, &loop.pre);
3422 gfc_add_block_to_block (&block, &loop.post);
3423 gfc_cleanup_loop (&loop);
3425 return gfc_finish_block (&block);
3428 /* As the WHERE or WHERE construct statement can be nested, we call
3429 gfc_trans_where_2 to do the translation, and pass the initial
3430 NULL values for both the control mask and the pending control mask. */
3433 gfc_trans_where (gfc_code * code)
3439 cblock = code->block;
3441 && cblock->next->op == EXEC_ASSIGN
3442 && !cblock->next->next)
3444 eblock = cblock->block;
3447 /* A simple "WHERE (cond) x = y" statement or block is
3448 dependence free if cond is not dependent upon writing x,
3449 and the source y is unaffected by the destination x. */
3450 if (!gfc_check_dependency (cblock->next->expr,
3452 && !gfc_check_dependency (cblock->next->expr,
3453 cblock->next->expr2, 0))
3454 return gfc_trans_where_3 (cblock, NULL);
3456 else if (!eblock->expr
3459 && eblock->next->op == EXEC_ASSIGN
3460 && !eblock->next->next)
3462 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3463 block is dependence free if cond is not dependent on writes
3464 to x1 and x2, y1 is not dependent on writes to x2, and y2
3465 is not dependent on writes to x1, and both y's are not
3466 dependent upon their own x's. */
3467 if (!gfc_check_dependency(cblock->next->expr,
3469 && !gfc_check_dependency(eblock->next->expr,
3471 && !gfc_check_dependency(cblock->next->expr,
3472 eblock->next->expr2, 0)
3473 && !gfc_check_dependency(eblock->next->expr,
3474 cblock->next->expr2, 0)
3475 && !gfc_check_dependency(cblock->next->expr,
3476 cblock->next->expr2, 0)
3477 && !gfc_check_dependency(eblock->next->expr,
3478 eblock->next->expr2, 0))
3479 return gfc_trans_where_3 (cblock, eblock);
3483 gfc_start_block (&block);
3485 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3487 return gfc_finish_block (&block);
3491 /* CYCLE a DO loop. The label decl has already been created by
3492 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3493 node at the head of the loop. We must mark the label as used. */
3496 gfc_trans_cycle (gfc_code * code)
3500 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3501 TREE_USED (cycle_label) = 1;
3502 return build1_v (GOTO_EXPR, cycle_label);