1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
27 #include "coretypes.h"
29 #include "tree-gimple.h"
36 #include "trans-stmt.h"
37 #include "trans-types.h"
38 #include "trans-array.h"
39 #include "trans-const.h"
41 #include "dependency.h"
43 typedef struct iter_info
49 struct iter_info *next;
53 typedef struct forall_info
60 struct forall_info *prev_nest;
64 static void gfc_trans_where_2 (gfc_code *, tree, bool,
65 forall_info *, stmtblock_t *);
67 /* Translate a F95 label number to a LABEL_EXPR. */
70 gfc_trans_label_here (gfc_code * code)
72 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
76 /* Given a variable expression which has been ASSIGNed to, find the decl
77 containing the auxiliary variables. For variables in common blocks this
81 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
83 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
84 gfc_conv_expr (se, expr);
85 /* Deals with variable in common block. Get the field declaration. */
86 if (TREE_CODE (se->expr) == COMPONENT_REF)
87 se->expr = TREE_OPERAND (se->expr, 1);
88 /* Deals with dummy argument. Get the parameter declaration. */
89 else if (TREE_CODE (se->expr) == INDIRECT_REF)
90 se->expr = TREE_OPERAND (se->expr, 0);
93 /* Translate a label assignment statement. */
96 gfc_trans_label_assign (gfc_code * code)
106 /* Start a new block. */
107 gfc_init_se (&se, NULL);
108 gfc_start_block (&se.pre);
109 gfc_conv_label_variable (&se, code->expr);
111 len = GFC_DECL_STRING_LEN (se.expr);
112 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
114 label_tree = gfc_get_label_decl (code->label);
116 if (code->label->defined == ST_LABEL_TARGET)
118 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
119 len_tree = integer_minus_one_node;
123 label_str = code->label->format->value.character.string;
124 label_len = code->label->format->value.character.length;
125 len_tree = build_int_cst (NULL_TREE, label_len);
126 label_tree = gfc_build_string_const (label_len + 1, label_str);
127 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
130 gfc_add_modify_expr (&se.pre, len, len_tree);
131 gfc_add_modify_expr (&se.pre, addr, label_tree);
133 return gfc_finish_block (&se.pre);
136 /* Translate a GOTO statement. */
139 gfc_trans_goto (gfc_code * code)
141 locus loc = code->loc;
147 if (code->label != NULL)
148 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
151 gfc_init_se (&se, NULL);
152 gfc_start_block (&se.pre);
153 gfc_conv_label_variable (&se, code->expr);
154 tmp = GFC_DECL_STRING_LEN (se.expr);
155 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
156 build_int_cst (TREE_TYPE (tmp), -1));
157 gfc_trans_runtime_check (tmp, "Assigned label is not a target label",
160 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
165 target = build1 (GOTO_EXPR, void_type_node, assigned_goto);
166 gfc_add_expr_to_block (&se.pre, target);
167 return gfc_finish_block (&se.pre);
170 /* Check the label list. */
173 target = gfc_get_label_decl (code->label);
174 tmp = gfc_build_addr_expr (pvoid_type_node, target);
175 tmp = build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
176 tmp = build3_v (COND_EXPR, tmp,
177 build1 (GOTO_EXPR, void_type_node, target),
178 build_empty_stmt ());
179 gfc_add_expr_to_block (&se.pre, tmp);
182 while (code != NULL);
183 gfc_trans_runtime_check (boolean_true_node,
184 "Assigned label is not in the list", &se.pre, &loc);
186 return gfc_finish_block (&se.pre);
190 /* Translate an ENTRY statement. Just adds a label for this entry point. */
192 gfc_trans_entry (gfc_code * code)
194 return build1_v (LABEL_EXPR, code->ext.entry->label);
198 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
199 elemental subroutines. Make temporaries for output arguments if any such
200 dependencies are found. Output arguments are chosen because internal_unpack
201 can be used, as is, to copy the result back to the variable. */
203 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
204 gfc_symbol * sym, gfc_actual_arglist * arg)
206 gfc_actual_arglist *arg0;
208 gfc_formal_arglist *formal;
209 gfc_loopinfo tmp_loop;
221 if (loopse->ss == NULL)
226 formal = sym->formal;
228 /* Loop over all the arguments testing for dependencies. */
229 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
235 /* Obtain the info structure for the current argument. */
237 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
241 info = &ss->data.info;
245 /* If there is a dependency, create a temporary and use it
246 instead of the variable. */
247 fsym = formal ? formal->sym : NULL;
248 if (e->expr_type == EXPR_VARIABLE
250 && fsym->attr.intent == INTENT_OUT
251 && gfc_check_fncall_dependency (e, INTENT_OUT, sym, arg0))
253 /* Make a local loopinfo for the temporary creation, so that
254 none of the other ss->info's have to be renormalized. */
255 gfc_init_loopinfo (&tmp_loop);
256 for (n = 0; n < info->dimen; n++)
258 tmp_loop.to[n] = loopse->loop->to[n];
259 tmp_loop.from[n] = loopse->loop->from[n];
260 tmp_loop.order[n] = loopse->loop->order[n];
263 /* Generate the temporary. Merge the block so that the
264 declarations are put at the right binding level. */
265 size = gfc_create_var (gfc_array_index_type, NULL);
266 data = gfc_create_var (pvoid_type_node, NULL);
267 gfc_start_block (&block);
268 tmp = gfc_typenode_for_spec (&e->ts);
269 tmp = gfc_trans_create_temp_array (&se->pre, &se->post,
270 &tmp_loop, info, tmp,
272 gfc_add_modify_expr (&se->pre, size, tmp);
273 tmp = fold_convert (pvoid_type_node, info->data);
274 gfc_add_modify_expr (&se->pre, data, tmp);
275 gfc_merge_block_scope (&block);
277 /* Obtain the argument descriptor for unpacking. */
278 gfc_init_se (&parmse, NULL);
279 parmse.want_pointer = 1;
280 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
281 gfc_add_block_to_block (&se->pre, &parmse.pre);
283 /* Calculate the offset for the temporary. */
284 offset = gfc_index_zero_node;
285 for (n = 0; n < info->dimen; n++)
287 tmp = gfc_conv_descriptor_stride (info->descriptor,
289 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
290 loopse->loop->from[n], tmp);
291 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
294 info->offset = gfc_create_var (gfc_array_index_type, NULL);
295 gfc_add_modify_expr (&se->pre, info->offset, offset);
297 /* Copy the result back using unpack. */
298 tmp = build_call_expr (gfor_fndecl_in_unpack, 2, parmse.expr, data);
299 gfc_add_expr_to_block (&se->post, tmp);
301 gfc_add_block_to_block (&se->post, &parmse.post);
307 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
310 gfc_trans_call (gfc_code * code, bool dependency_check)
314 int has_alternate_specifier;
316 /* A CALL starts a new block because the actual arguments may have to
317 be evaluated first. */
318 gfc_init_se (&se, NULL);
319 gfc_start_block (&se.pre);
321 gcc_assert (code->resolved_sym);
323 ss = gfc_ss_terminator;
324 if (code->resolved_sym->attr.elemental)
325 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
327 /* Is not an elemental subroutine call with array valued arguments. */
328 if (ss == gfc_ss_terminator)
331 /* Translate the call. */
332 has_alternate_specifier
333 = gfc_conv_function_call (&se, code->resolved_sym, code->ext.actual,
336 /* A subroutine without side-effect, by definition, does nothing! */
337 TREE_SIDE_EFFECTS (se.expr) = 1;
339 /* Chain the pieces together and return the block. */
340 if (has_alternate_specifier)
342 gfc_code *select_code;
344 select_code = code->next;
345 gcc_assert(select_code->op == EXEC_SELECT);
346 sym = select_code->expr->symtree->n.sym;
347 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
348 if (sym->backend_decl == NULL)
349 sym->backend_decl = gfc_get_symbol_decl (sym);
350 gfc_add_modify_expr (&se.pre, sym->backend_decl, se.expr);
353 gfc_add_expr_to_block (&se.pre, se.expr);
355 gfc_add_block_to_block (&se.pre, &se.post);
360 /* An elemental subroutine call with array valued arguments has
367 /* gfc_walk_elemental_function_args renders the ss chain in the
368 reverse order to the actual argument order. */
369 ss = gfc_reverse_ss (ss);
371 /* Initialize the loop. */
372 gfc_init_se (&loopse, NULL);
373 gfc_init_loopinfo (&loop);
374 gfc_add_ss_to_loop (&loop, ss);
376 gfc_conv_ss_startstride (&loop);
377 gfc_conv_loop_setup (&loop);
378 gfc_mark_ss_chain_used (ss, 1);
380 /* Convert the arguments, checking for dependencies. */
381 gfc_copy_loopinfo_to_se (&loopse, &loop);
384 /* For operator assignment, we need to do dependency checking.
385 We also check the intent of the parameters. */
386 if (dependency_check)
389 sym = code->resolved_sym;
390 gcc_assert (sym->formal->sym->attr.intent = INTENT_OUT);
391 gcc_assert (sym->formal->next->sym->attr.intent = INTENT_IN);
392 gfc_conv_elemental_dependencies (&se, &loopse, sym,
396 /* Generate the loop body. */
397 gfc_start_scalarized_body (&loop, &body);
398 gfc_init_block (&block);
400 /* Add the subroutine call to the block. */
401 gfc_conv_function_call (&loopse, code->resolved_sym, code->ext.actual,
403 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
405 gfc_add_block_to_block (&block, &loopse.pre);
406 gfc_add_block_to_block (&block, &loopse.post);
408 /* Finish up the loop block and the loop. */
409 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
410 gfc_trans_scalarizing_loops (&loop, &body);
411 gfc_add_block_to_block (&se.pre, &loop.pre);
412 gfc_add_block_to_block (&se.pre, &loop.post);
413 gfc_add_block_to_block (&se.pre, &se.post);
414 gfc_cleanup_loop (&loop);
417 return gfc_finish_block (&se.pre);
421 /* Translate the RETURN statement. */
424 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
432 /* If code->expr is not NULL, this return statement must appear
433 in a subroutine and current_fake_result_decl has already
436 result = gfc_get_fake_result_decl (NULL, 0);
439 gfc_warning ("An alternate return at %L without a * dummy argument",
441 return build1_v (GOTO_EXPR, gfc_get_return_label ());
444 /* Start a new block for this statement. */
445 gfc_init_se (&se, NULL);
446 gfc_start_block (&se.pre);
448 gfc_conv_expr (&se, code->expr);
450 tmp = build2 (MODIFY_EXPR, TREE_TYPE (result), result, se.expr);
451 gfc_add_expr_to_block (&se.pre, tmp);
453 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
454 gfc_add_expr_to_block (&se.pre, tmp);
455 gfc_add_block_to_block (&se.pre, &se.post);
456 return gfc_finish_block (&se.pre);
459 return build1_v (GOTO_EXPR, gfc_get_return_label ());
463 /* Translate the PAUSE statement. We have to translate this statement
464 to a runtime library call. */
467 gfc_trans_pause (gfc_code * code)
469 tree gfc_int4_type_node = gfc_get_int_type (4);
473 /* Start a new block for this statement. */
474 gfc_init_se (&se, NULL);
475 gfc_start_block (&se.pre);
478 if (code->expr == NULL)
480 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
481 tmp = build_call_expr (gfor_fndecl_pause_numeric, 1, tmp);
485 gfc_conv_expr_reference (&se, code->expr);
486 tmp = build_call_expr (gfor_fndecl_pause_string, 2,
487 se.expr, se.string_length);
490 gfc_add_expr_to_block (&se.pre, tmp);
492 gfc_add_block_to_block (&se.pre, &se.post);
494 return gfc_finish_block (&se.pre);
498 /* Translate the STOP statement. We have to translate this statement
499 to a runtime library call. */
502 gfc_trans_stop (gfc_code * code)
504 tree gfc_int4_type_node = gfc_get_int_type (4);
508 /* Start a new block for this statement. */
509 gfc_init_se (&se, NULL);
510 gfc_start_block (&se.pre);
513 if (code->expr == NULL)
515 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
516 tmp = build_call_expr (gfor_fndecl_stop_numeric, 1, tmp);
520 gfc_conv_expr_reference (&se, code->expr);
521 tmp = build_call_expr (gfor_fndecl_stop_string, 2,
522 se.expr, se.string_length);
525 gfc_add_expr_to_block (&se.pre, tmp);
527 gfc_add_block_to_block (&se.pre, &se.post);
529 return gfc_finish_block (&se.pre);
533 /* Generate GENERIC for the IF construct. This function also deals with
534 the simple IF statement, because the front end translates the IF
535 statement into an IF construct.
567 where COND_S is the simplified version of the predicate. PRE_COND_S
568 are the pre side-effects produced by the translation of the
570 We need to build the chain recursively otherwise we run into
571 problems with folding incomplete statements. */
574 gfc_trans_if_1 (gfc_code * code)
579 /* Check for an unconditional ELSE clause. */
581 return gfc_trans_code (code->next);
583 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
584 gfc_init_se (&if_se, NULL);
585 gfc_start_block (&if_se.pre);
587 /* Calculate the IF condition expression. */
588 gfc_conv_expr_val (&if_se, code->expr);
590 /* Translate the THEN clause. */
591 stmt = gfc_trans_code (code->next);
593 /* Translate the ELSE clause. */
595 elsestmt = gfc_trans_if_1 (code->block);
597 elsestmt = build_empty_stmt ();
599 /* Build the condition expression and add it to the condition block. */
600 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
602 gfc_add_expr_to_block (&if_se.pre, stmt);
604 /* Finish off this statement. */
605 return gfc_finish_block (&if_se.pre);
609 gfc_trans_if (gfc_code * code)
611 /* Ignore the top EXEC_IF, it only announces an IF construct. The
612 actual code we must translate is in code->block. */
614 return gfc_trans_if_1 (code->block);
618 /* Translate an arithmetic IF expression.
620 IF (cond) label1, label2, label3 translates to
632 An optimized version can be generated in case of equal labels.
633 E.g., if label1 is equal to label2, we can translate it to
642 gfc_trans_arithmetic_if (gfc_code * code)
650 /* Start a new block. */
651 gfc_init_se (&se, NULL);
652 gfc_start_block (&se.pre);
654 /* Pre-evaluate COND. */
655 gfc_conv_expr_val (&se, code->expr);
656 se.expr = gfc_evaluate_now (se.expr, &se.pre);
658 /* Build something to compare with. */
659 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
661 if (code->label->value != code->label2->value)
663 /* If (cond < 0) take branch1 else take branch2.
664 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
665 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
666 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
668 if (code->label->value != code->label3->value)
669 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
671 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
673 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
676 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
678 if (code->label->value != code->label3->value
679 && code->label2->value != code->label3->value)
681 /* if (cond <= 0) take branch1 else take branch2. */
682 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
683 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
684 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
687 /* Append the COND_EXPR to the evaluation of COND, and return. */
688 gfc_add_expr_to_block (&se.pre, branch1);
689 return gfc_finish_block (&se.pre);
693 /* Translate the simple DO construct. This is where the loop variable has
694 integer type and step +-1. We can't use this in the general case
695 because integer overflow and floating point errors could give incorrect
697 We translate a do loop from:
699 DO dovar = from, to, step
705 [Evaluate loop bounds and step]
707 if ((step > 0) ? (dovar <= to) : (dovar => to))
713 cond = (dovar == to);
715 if (cond) goto end_label;
720 This helps the optimizers by avoiding the extra induction variable
721 used in the general case. */
724 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
725 tree from, tree to, tree step)
734 type = TREE_TYPE (dovar);
736 /* Initialize the DO variable: dovar = from. */
737 gfc_add_modify_expr (pblock, dovar, from);
739 /* Cycle and exit statements are implemented with gotos. */
740 cycle_label = gfc_build_label_decl (NULL_TREE);
741 exit_label = gfc_build_label_decl (NULL_TREE);
743 /* Put the labels where they can be found later. See gfc_trans_do(). */
744 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
747 gfc_start_block (&body);
749 /* Main loop body. */
750 tmp = gfc_trans_code (code->block->next);
751 gfc_add_expr_to_block (&body, tmp);
753 /* Label for cycle statements (if needed). */
754 if (TREE_USED (cycle_label))
756 tmp = build1_v (LABEL_EXPR, cycle_label);
757 gfc_add_expr_to_block (&body, tmp);
760 /* Evaluate the loop condition. */
761 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
762 cond = gfc_evaluate_now (cond, &body);
764 /* Increment the loop variable. */
765 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
766 gfc_add_modify_expr (&body, dovar, tmp);
769 tmp = build1_v (GOTO_EXPR, exit_label);
770 TREE_USED (exit_label) = 1;
771 tmp = fold_build3 (COND_EXPR, void_type_node,
772 cond, tmp, build_empty_stmt ());
773 gfc_add_expr_to_block (&body, tmp);
775 /* Finish the loop body. */
776 tmp = gfc_finish_block (&body);
777 tmp = build1_v (LOOP_EXPR, tmp);
779 /* Only execute the loop if the number of iterations is positive. */
780 if (tree_int_cst_sgn (step) > 0)
781 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
783 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
784 tmp = fold_build3 (COND_EXPR, void_type_node,
785 cond, tmp, build_empty_stmt ());
786 gfc_add_expr_to_block (pblock, tmp);
788 /* Add the exit label. */
789 tmp = build1_v (LABEL_EXPR, exit_label);
790 gfc_add_expr_to_block (pblock, tmp);
792 return gfc_finish_block (pblock);
795 /* Translate the DO construct. This obviously is one of the most
796 important ones to get right with any compiler, but especially
799 We special case some loop forms as described in gfc_trans_simple_do.
800 For other cases we implement them with a separate loop count,
801 as described in the standard.
803 We translate a do loop from:
805 DO dovar = from, to, step
811 [evaluate loop bounds and step]
812 empty = (step > 0 ? to < from : to > from);
813 countm1 = (to - from) / step;
815 if (empty) goto exit_label;
821 if (countm1 ==0) goto exit_label;
826 countm1 is an unsigned integer. It is equal to the loop count minus one,
827 because the loop count itself can overflow. */
830 gfc_trans_do (gfc_code * code)
849 gfc_start_block (&block);
851 /* Evaluate all the expressions in the iterator. */
852 gfc_init_se (&se, NULL);
853 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
854 gfc_add_block_to_block (&block, &se.pre);
856 type = TREE_TYPE (dovar);
858 gfc_init_se (&se, NULL);
859 gfc_conv_expr_val (&se, code->ext.iterator->start);
860 gfc_add_block_to_block (&block, &se.pre);
861 from = gfc_evaluate_now (se.expr, &block);
863 gfc_init_se (&se, NULL);
864 gfc_conv_expr_val (&se, code->ext.iterator->end);
865 gfc_add_block_to_block (&block, &se.pre);
866 to = gfc_evaluate_now (se.expr, &block);
868 gfc_init_se (&se, NULL);
869 gfc_conv_expr_val (&se, code->ext.iterator->step);
870 gfc_add_block_to_block (&block, &se.pre);
871 step = gfc_evaluate_now (se.expr, &block);
873 /* Special case simple loops. */
874 if (TREE_CODE (type) == INTEGER_TYPE
875 && (integer_onep (step)
876 || tree_int_cst_equal (step, integer_minus_one_node)))
877 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
879 /* We need a special check for empty loops:
880 empty = (step > 0 ? to < from : to > from); */
881 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
882 fold_convert (type, integer_zero_node));
883 empty = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
884 fold_build2 (LT_EXPR, boolean_type_node, to, from),
885 fold_build2 (GT_EXPR, boolean_type_node, to, from));
887 /* Initialize loop count. This code is executed before we enter the
888 loop body. We generate: countm1 = abs(to - from) / abs(step). */
889 if (TREE_CODE (type) == INTEGER_TYPE)
893 utype = unsigned_type_for (type);
895 /* tmp = abs(to - from) / abs(step) */
896 ustep = fold_convert (utype, fold_build1 (ABS_EXPR, type, step));
897 tmp = fold_build3 (COND_EXPR, type, pos_step,
898 fold_build2 (MINUS_EXPR, type, to, from),
899 fold_build2 (MINUS_EXPR, type, from, to));
900 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, fold_convert (utype, tmp),
905 /* TODO: We could use the same width as the real type.
906 This would probably cause more problems that it solves
907 when we implement "long double" types. */
908 utype = unsigned_type_for (gfc_array_index_type);
909 tmp = fold_build2 (MINUS_EXPR, type, to, from);
910 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
911 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
913 countm1 = gfc_create_var (utype, "countm1");
914 gfc_add_modify_expr (&block, countm1, tmp);
916 /* Cycle and exit statements are implemented with gotos. */
917 cycle_label = gfc_build_label_decl (NULL_TREE);
918 exit_label = gfc_build_label_decl (NULL_TREE);
919 TREE_USED (exit_label) = 1;
921 /* Initialize the DO variable: dovar = from. */
922 gfc_add_modify_expr (&block, dovar, from);
924 /* If the loop is empty, go directly to the exit label. */
925 tmp = fold_build3 (COND_EXPR, void_type_node, empty,
926 build1_v (GOTO_EXPR, exit_label), build_empty_stmt ());
927 gfc_add_expr_to_block (&block, tmp);
930 gfc_start_block (&body);
932 /* Put these labels where they can be found later. We put the
933 labels in a TREE_LIST node (because TREE_CHAIN is already
934 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
935 label in TREE_VALUE (backend_decl). */
937 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
939 /* Main loop body. */
940 tmp = gfc_trans_code (code->block->next);
941 gfc_add_expr_to_block (&body, tmp);
943 /* Label for cycle statements (if needed). */
944 if (TREE_USED (cycle_label))
946 tmp = build1_v (LABEL_EXPR, cycle_label);
947 gfc_add_expr_to_block (&body, tmp);
950 /* Increment the loop variable. */
951 tmp = build2 (PLUS_EXPR, type, dovar, step);
952 gfc_add_modify_expr (&body, dovar, tmp);
954 /* End with the loop condition. Loop until countm1 == 0. */
955 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
956 build_int_cst (utype, 0));
957 tmp = build1_v (GOTO_EXPR, exit_label);
958 tmp = fold_build3 (COND_EXPR, void_type_node,
959 cond, tmp, build_empty_stmt ());
960 gfc_add_expr_to_block (&body, tmp);
962 /* Decrement the loop count. */
963 tmp = build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
964 gfc_add_modify_expr (&body, countm1, tmp);
966 /* End of loop body. */
967 tmp = gfc_finish_block (&body);
969 /* The for loop itself. */
970 tmp = build1_v (LOOP_EXPR, tmp);
971 gfc_add_expr_to_block (&block, tmp);
973 /* Add the exit label. */
974 tmp = build1_v (LABEL_EXPR, exit_label);
975 gfc_add_expr_to_block (&block, tmp);
977 return gfc_finish_block (&block);
981 /* Translate the DO WHILE construct.
994 if (! cond) goto exit_label;
1000 Because the evaluation of the exit condition `cond' may have side
1001 effects, we can't do much for empty loop bodies. The backend optimizers
1002 should be smart enough to eliminate any dead loops. */
1005 gfc_trans_do_while (gfc_code * code)
1013 /* Everything we build here is part of the loop body. */
1014 gfc_start_block (&block);
1016 /* Cycle and exit statements are implemented with gotos. */
1017 cycle_label = gfc_build_label_decl (NULL_TREE);
1018 exit_label = gfc_build_label_decl (NULL_TREE);
1020 /* Put the labels where they can be found later. See gfc_trans_do(). */
1021 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1023 /* Create a GIMPLE version of the exit condition. */
1024 gfc_init_se (&cond, NULL);
1025 gfc_conv_expr_val (&cond, code->expr);
1026 gfc_add_block_to_block (&block, &cond.pre);
1027 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1029 /* Build "IF (! cond) GOTO exit_label". */
1030 tmp = build1_v (GOTO_EXPR, exit_label);
1031 TREE_USED (exit_label) = 1;
1032 tmp = fold_build3 (COND_EXPR, void_type_node,
1033 cond.expr, tmp, build_empty_stmt ());
1034 gfc_add_expr_to_block (&block, tmp);
1036 /* The main body of the loop. */
1037 tmp = gfc_trans_code (code->block->next);
1038 gfc_add_expr_to_block (&block, tmp);
1040 /* Label for cycle statements (if needed). */
1041 if (TREE_USED (cycle_label))
1043 tmp = build1_v (LABEL_EXPR, cycle_label);
1044 gfc_add_expr_to_block (&block, tmp);
1047 /* End of loop body. */
1048 tmp = gfc_finish_block (&block);
1050 gfc_init_block (&block);
1051 /* Build the loop. */
1052 tmp = build1_v (LOOP_EXPR, tmp);
1053 gfc_add_expr_to_block (&block, tmp);
1055 /* Add the exit label. */
1056 tmp = build1_v (LABEL_EXPR, exit_label);
1057 gfc_add_expr_to_block (&block, tmp);
1059 return gfc_finish_block (&block);
1063 /* Translate the SELECT CASE construct for INTEGER case expressions,
1064 without killing all potential optimizations. The problem is that
1065 Fortran allows unbounded cases, but the back-end does not, so we
1066 need to intercept those before we enter the equivalent SWITCH_EXPR
1069 For example, we translate this,
1072 CASE (:100,101,105:115)
1082 to the GENERIC equivalent,
1086 case (minimum value for typeof(expr) ... 100:
1092 case 200 ... (maximum value for typeof(expr):
1109 gfc_trans_integer_select (gfc_code * code)
1119 gfc_start_block (&block);
1121 /* Calculate the switch expression. */
1122 gfc_init_se (&se, NULL);
1123 gfc_conv_expr_val (&se, code->expr);
1124 gfc_add_block_to_block (&block, &se.pre);
1126 end_label = gfc_build_label_decl (NULL_TREE);
1128 gfc_init_block (&body);
1130 for (c = code->block; c; c = c->block)
1132 for (cp = c->ext.case_list; cp; cp = cp->next)
1137 /* Assume it's the default case. */
1138 low = high = NULL_TREE;
1142 low = gfc_conv_constant_to_tree (cp->low);
1144 /* If there's only a lower bound, set the high bound to the
1145 maximum value of the case expression. */
1147 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1152 /* Three cases are possible here:
1154 1) There is no lower bound, e.g. CASE (:N).
1155 2) There is a lower bound .NE. high bound, that is
1156 a case range, e.g. CASE (N:M) where M>N (we make
1157 sure that M>N during type resolution).
1158 3) There is a lower bound, and it has the same value
1159 as the high bound, e.g. CASE (N:N). This is our
1160 internal representation of CASE(N).
1162 In the first and second case, we need to set a value for
1163 high. In the third case, we don't because the GCC middle
1164 end represents a single case value by just letting high be
1165 a NULL_TREE. We can't do that because we need to be able
1166 to represent unbounded cases. */
1170 && mpz_cmp (cp->low->value.integer,
1171 cp->high->value.integer) != 0))
1172 high = gfc_conv_constant_to_tree (cp->high);
1174 /* Unbounded case. */
1176 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1179 /* Build a label. */
1180 label = gfc_build_label_decl (NULL_TREE);
1182 /* Add this case label.
1183 Add parameter 'label', make it match GCC backend. */
1184 tmp = build3 (CASE_LABEL_EXPR, void_type_node, low, high, label);
1185 gfc_add_expr_to_block (&body, tmp);
1188 /* Add the statements for this case. */
1189 tmp = gfc_trans_code (c->next);
1190 gfc_add_expr_to_block (&body, tmp);
1192 /* Break to the end of the construct. */
1193 tmp = build1_v (GOTO_EXPR, end_label);
1194 gfc_add_expr_to_block (&body, tmp);
1197 tmp = gfc_finish_block (&body);
1198 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1199 gfc_add_expr_to_block (&block, tmp);
1201 tmp = build1_v (LABEL_EXPR, end_label);
1202 gfc_add_expr_to_block (&block, tmp);
1204 return gfc_finish_block (&block);
1208 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1210 There are only two cases possible here, even though the standard
1211 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1212 .FALSE., and DEFAULT.
1214 We never generate more than two blocks here. Instead, we always
1215 try to eliminate the DEFAULT case. This way, we can translate this
1216 kind of SELECT construct to a simple
1220 expression in GENERIC. */
1223 gfc_trans_logical_select (gfc_code * code)
1226 gfc_code *t, *f, *d;
1231 /* Assume we don't have any cases at all. */
1234 /* Now see which ones we actually do have. We can have at most two
1235 cases in a single case list: one for .TRUE. and one for .FALSE.
1236 The default case is always separate. If the cases for .TRUE. and
1237 .FALSE. are in the same case list, the block for that case list
1238 always executed, and we don't generate code a COND_EXPR. */
1239 for (c = code->block; c; c = c->block)
1241 for (cp = c->ext.case_list; cp; cp = cp->next)
1245 if (cp->low->value.logical == 0) /* .FALSE. */
1247 else /* if (cp->value.logical != 0), thus .TRUE. */
1255 /* Start a new block. */
1256 gfc_start_block (&block);
1258 /* Calculate the switch expression. We always need to do this
1259 because it may have side effects. */
1260 gfc_init_se (&se, NULL);
1261 gfc_conv_expr_val (&se, code->expr);
1262 gfc_add_block_to_block (&block, &se.pre);
1264 if (t == f && t != NULL)
1266 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1267 translate the code for these cases, append it to the current
1269 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1273 tree true_tree, false_tree, stmt;
1275 true_tree = build_empty_stmt ();
1276 false_tree = build_empty_stmt ();
1278 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1279 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1280 make the missing case the default case. */
1281 if (t != NULL && f != NULL)
1291 /* Translate the code for each of these blocks, and append it to
1292 the current block. */
1294 true_tree = gfc_trans_code (t->next);
1297 false_tree = gfc_trans_code (f->next);
1299 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1300 true_tree, false_tree);
1301 gfc_add_expr_to_block (&block, stmt);
1304 return gfc_finish_block (&block);
1308 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1309 Instead of generating compares and jumps, it is far simpler to
1310 generate a data structure describing the cases in order and call a
1311 library subroutine that locates the right case.
1312 This is particularly true because this is the only case where we
1313 might have to dispose of a temporary.
1314 The library subroutine returns a pointer to jump to or NULL if no
1315 branches are to be taken. */
1318 gfc_trans_character_select (gfc_code *code)
1320 tree init, node, end_label, tmp, type, *labels;
1322 stmtblock_t block, body;
1328 static tree select_struct;
1329 static tree ss_string1, ss_string1_len;
1330 static tree ss_string2, ss_string2_len;
1331 static tree ss_target;
1333 if (select_struct == NULL)
1335 tree gfc_int4_type_node = gfc_get_int_type (4);
1337 select_struct = make_node (RECORD_TYPE);
1338 TYPE_NAME (select_struct) = get_identifier ("_jump_struct");
1341 #define ADD_FIELD(NAME, TYPE) \
1342 ss_##NAME = gfc_add_field_to_struct \
1343 (&(TYPE_FIELDS (select_struct)), select_struct, \
1344 get_identifier (stringize(NAME)), TYPE)
1346 ADD_FIELD (string1, pchar_type_node);
1347 ADD_FIELD (string1_len, gfc_int4_type_node);
1349 ADD_FIELD (string2, pchar_type_node);
1350 ADD_FIELD (string2_len, gfc_int4_type_node);
1352 ADD_FIELD (target, pvoid_type_node);
1355 gfc_finish_type (select_struct);
1358 cp = code->block->ext.case_list;
1359 while (cp->left != NULL)
1363 for (d = cp; d; d = d->right)
1367 labels = gfc_getmem (n * sizeof (tree));
1371 for(i = 0; i < n; i++)
1373 labels[i] = gfc_build_label_decl (NULL_TREE);
1374 TREE_USED (labels[i]) = 1;
1375 /* TODO: The gimplifier should do this for us, but it has
1376 inadequacies when dealing with static initializers. */
1377 FORCED_LABEL (labels[i]) = 1;
1380 end_label = gfc_build_label_decl (NULL_TREE);
1382 /* Generate the body */
1383 gfc_start_block (&block);
1384 gfc_init_block (&body);
1386 for (c = code->block; c; c = c->block)
1388 for (d = c->ext.case_list; d; d = d->next)
1390 tmp = build1_v (LABEL_EXPR, labels[d->n]);
1391 gfc_add_expr_to_block (&body, tmp);
1394 tmp = gfc_trans_code (c->next);
1395 gfc_add_expr_to_block (&body, tmp);
1397 tmp = build1_v (GOTO_EXPR, end_label);
1398 gfc_add_expr_to_block (&body, tmp);
1401 /* Generate the structure describing the branches */
1405 for(d = cp; d; d = d->right, i++)
1409 gfc_init_se (&se, NULL);
1413 node = tree_cons (ss_string1, null_pointer_node, node);
1414 node = tree_cons (ss_string1_len, integer_zero_node, node);
1418 gfc_conv_expr_reference (&se, d->low);
1420 node = tree_cons (ss_string1, se.expr, node);
1421 node = tree_cons (ss_string1_len, se.string_length, node);
1424 if (d->high == NULL)
1426 node = tree_cons (ss_string2, null_pointer_node, node);
1427 node = tree_cons (ss_string2_len, integer_zero_node, node);
1431 gfc_init_se (&se, NULL);
1432 gfc_conv_expr_reference (&se, d->high);
1434 node = tree_cons (ss_string2, se.expr, node);
1435 node = tree_cons (ss_string2_len, se.string_length, node);
1438 tmp = gfc_build_addr_expr (pvoid_type_node, labels[i]);
1439 node = tree_cons (ss_target, tmp, node);
1441 tmp = build_constructor_from_list (select_struct, nreverse (node));
1442 init = tree_cons (NULL_TREE, tmp, init);
1445 type = build_array_type (select_struct, build_index_type
1446 (build_int_cst (NULL_TREE, n - 1)));
1448 init = build_constructor_from_list (type, nreverse(init));
1449 TREE_CONSTANT (init) = 1;
1450 TREE_INVARIANT (init) = 1;
1451 TREE_STATIC (init) = 1;
1452 /* Create a static variable to hold the jump table. */
1453 tmp = gfc_create_var (type, "jumptable");
1454 TREE_CONSTANT (tmp) = 1;
1455 TREE_INVARIANT (tmp) = 1;
1456 TREE_STATIC (tmp) = 1;
1457 TREE_READONLY (tmp) = 1;
1458 DECL_INITIAL (tmp) = init;
1461 /* Build the library call */
1462 init = gfc_build_addr_expr (pvoid_type_node, init);
1463 tmp = gfc_build_addr_expr (pvoid_type_node, end_label);
1465 gfc_init_se (&se, NULL);
1466 gfc_conv_expr_reference (&se, code->expr);
1468 gfc_add_block_to_block (&block, &se.pre);
1470 tmp = build_call_expr (gfor_fndecl_select_string, 5,
1471 init, build_int_cst (NULL_TREE, n),
1472 tmp, se.expr, se.string_length);
1474 case_label = gfc_create_var (TREE_TYPE (tmp), "case_label");
1475 gfc_add_modify_expr (&block, case_label, tmp);
1477 gfc_add_block_to_block (&block, &se.post);
1479 tmp = build1 (GOTO_EXPR, void_type_node, case_label);
1480 gfc_add_expr_to_block (&block, tmp);
1482 tmp = gfc_finish_block (&body);
1483 gfc_add_expr_to_block (&block, tmp);
1484 tmp = build1_v (LABEL_EXPR, end_label);
1485 gfc_add_expr_to_block (&block, tmp);
1490 return gfc_finish_block (&block);
1494 /* Translate the three variants of the SELECT CASE construct.
1496 SELECT CASEs with INTEGER case expressions can be translated to an
1497 equivalent GENERIC switch statement, and for LOGICAL case
1498 expressions we build one or two if-else compares.
1500 SELECT CASEs with CHARACTER case expressions are a whole different
1501 story, because they don't exist in GENERIC. So we sort them and
1502 do a binary search at runtime.
1504 Fortran has no BREAK statement, and it does not allow jumps from
1505 one case block to another. That makes things a lot easier for
1509 gfc_trans_select (gfc_code * code)
1511 gcc_assert (code && code->expr);
1513 /* Empty SELECT constructs are legal. */
1514 if (code->block == NULL)
1515 return build_empty_stmt ();
1517 /* Select the correct translation function. */
1518 switch (code->expr->ts.type)
1520 case BT_LOGICAL: return gfc_trans_logical_select (code);
1521 case BT_INTEGER: return gfc_trans_integer_select (code);
1522 case BT_CHARACTER: return gfc_trans_character_select (code);
1524 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1530 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
1531 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
1532 indicates whether we should generate code to test the FORALLs mask
1533 array. OUTER is the loop header to be used for initializing mask
1536 The generated loop format is:
1537 count = (end - start + step) / step
1550 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
1551 int mask_flag, stmtblock_t *outer)
1559 tree var, start, end, step;
1562 /* Initialize the mask index outside the FORALL nest. */
1563 if (mask_flag && forall_tmp->mask)
1564 gfc_add_modify_expr (outer, forall_tmp->maskindex, gfc_index_zero_node);
1566 iter = forall_tmp->this_loop;
1567 nvar = forall_tmp->nvar;
1568 for (n = 0; n < nvar; n++)
1571 start = iter->start;
1575 exit_label = gfc_build_label_decl (NULL_TREE);
1576 TREE_USED (exit_label) = 1;
1578 /* The loop counter. */
1579 count = gfc_create_var (TREE_TYPE (var), "count");
1581 /* The body of the loop. */
1582 gfc_init_block (&block);
1584 /* The exit condition. */
1585 cond = fold_build2 (LE_EXPR, boolean_type_node,
1586 count, build_int_cst (TREE_TYPE (count), 0));
1587 tmp = build1_v (GOTO_EXPR, exit_label);
1588 tmp = fold_build3 (COND_EXPR, void_type_node,
1589 cond, tmp, build_empty_stmt ());
1590 gfc_add_expr_to_block (&block, tmp);
1592 /* The main loop body. */
1593 gfc_add_expr_to_block (&block, body);
1595 /* Increment the loop variable. */
1596 tmp = build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1597 gfc_add_modify_expr (&block, var, tmp);
1599 /* Advance to the next mask element. Only do this for the
1601 if (n == 0 && mask_flag && forall_tmp->mask)
1603 tree maskindex = forall_tmp->maskindex;
1604 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
1605 maskindex, gfc_index_one_node);
1606 gfc_add_modify_expr (&block, maskindex, tmp);
1609 /* Decrement the loop counter. */
1610 tmp = build2 (MINUS_EXPR, TREE_TYPE (var), count, gfc_index_one_node);
1611 gfc_add_modify_expr (&block, count, tmp);
1613 body = gfc_finish_block (&block);
1615 /* Loop var initialization. */
1616 gfc_init_block (&block);
1617 gfc_add_modify_expr (&block, var, start);
1620 /* Initialize the loop counter. */
1621 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1622 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1623 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1624 gfc_add_modify_expr (&block, count, tmp);
1626 /* The loop expression. */
1627 tmp = build1_v (LOOP_EXPR, body);
1628 gfc_add_expr_to_block (&block, tmp);
1630 /* The exit label. */
1631 tmp = build1_v (LABEL_EXPR, exit_label);
1632 gfc_add_expr_to_block (&block, tmp);
1634 body = gfc_finish_block (&block);
1641 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
1642 is nonzero, the body is controlled by all masks in the forall nest.
1643 Otherwise, the innermost loop is not controlled by it's mask. This
1644 is used for initializing that mask. */
1647 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1652 forall_info *forall_tmp;
1653 tree mask, maskindex;
1655 gfc_start_block (&header);
1657 forall_tmp = nested_forall_info;
1658 while (forall_tmp != NULL)
1660 /* Generate body with masks' control. */
1663 mask = forall_tmp->mask;
1664 maskindex = forall_tmp->maskindex;
1666 /* If a mask was specified make the assignment conditional. */
1669 tmp = gfc_build_array_ref (mask, maskindex);
1670 body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ());
1673 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
1674 forall_tmp = forall_tmp->prev_nest;
1678 gfc_add_expr_to_block (&header, body);
1679 return gfc_finish_block (&header);
1683 /* Allocate data for holding a temporary array. Returns either a local
1684 temporary array or a pointer variable. */
1687 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
1694 if (INTEGER_CST_P (size))
1696 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
1697 gfc_index_one_node);
1702 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
1703 type = build_array_type (elem_type, type);
1704 if (gfc_can_put_var_on_stack (bytesize))
1706 gcc_assert (INTEGER_CST_P (size));
1707 tmpvar = gfc_create_var (type, "temp");
1712 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
1713 *pdata = convert (pvoid_type_node, tmpvar);
1715 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
1716 gfc_add_modify_expr (pblock, tmpvar, tmp);
1722 /* Generate codes to copy the temporary to the actual lhs. */
1725 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
1726 tree count1, tree wheremask, bool invert)
1730 stmtblock_t block, body;
1736 lss = gfc_walk_expr (expr);
1738 if (lss == gfc_ss_terminator)
1740 gfc_start_block (&block);
1742 gfc_init_se (&lse, NULL);
1744 /* Translate the expression. */
1745 gfc_conv_expr (&lse, expr);
1747 /* Form the expression for the temporary. */
1748 tmp = gfc_build_array_ref (tmp1, count1);
1750 /* Use the scalar assignment as is. */
1751 gfc_add_block_to_block (&block, &lse.pre);
1752 gfc_add_modify_expr (&block, lse.expr, tmp);
1753 gfc_add_block_to_block (&block, &lse.post);
1755 /* Increment the count1. */
1756 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1757 gfc_index_one_node);
1758 gfc_add_modify_expr (&block, count1, tmp);
1760 tmp = gfc_finish_block (&block);
1764 gfc_start_block (&block);
1766 gfc_init_loopinfo (&loop1);
1767 gfc_init_se (&rse, NULL);
1768 gfc_init_se (&lse, NULL);
1770 /* Associate the lss with the loop. */
1771 gfc_add_ss_to_loop (&loop1, lss);
1773 /* Calculate the bounds of the scalarization. */
1774 gfc_conv_ss_startstride (&loop1);
1775 /* Setup the scalarizing loops. */
1776 gfc_conv_loop_setup (&loop1);
1778 gfc_mark_ss_chain_used (lss, 1);
1780 /* Start the scalarized loop body. */
1781 gfc_start_scalarized_body (&loop1, &body);
1783 /* Setup the gfc_se structures. */
1784 gfc_copy_loopinfo_to_se (&lse, &loop1);
1787 /* Form the expression of the temporary. */
1788 if (lss != gfc_ss_terminator)
1789 rse.expr = gfc_build_array_ref (tmp1, count1);
1790 /* Translate expr. */
1791 gfc_conv_expr (&lse, expr);
1793 /* Use the scalar assignment. */
1794 rse.string_length = lse.string_length;
1795 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
1797 /* Form the mask expression according to the mask tree list. */
1800 wheremaskexpr = gfc_build_array_ref (wheremask, count3);
1802 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1803 TREE_TYPE (wheremaskexpr),
1805 tmp = fold_build3 (COND_EXPR, void_type_node,
1806 wheremaskexpr, tmp, build_empty_stmt ());
1809 gfc_add_expr_to_block (&body, tmp);
1811 /* Increment count1. */
1812 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1813 count1, gfc_index_one_node);
1814 gfc_add_modify_expr (&body, count1, tmp);
1816 /* Increment count3. */
1819 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1820 count3, gfc_index_one_node);
1821 gfc_add_modify_expr (&body, count3, tmp);
1824 /* Generate the copying loops. */
1825 gfc_trans_scalarizing_loops (&loop1, &body);
1826 gfc_add_block_to_block (&block, &loop1.pre);
1827 gfc_add_block_to_block (&block, &loop1.post);
1828 gfc_cleanup_loop (&loop1);
1830 tmp = gfc_finish_block (&block);
1836 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
1837 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
1838 and should not be freed. WHEREMASK is the conditional execution mask
1839 whose sense may be inverted by INVERT. */
1842 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
1843 tree count1, gfc_ss *lss, gfc_ss *rss,
1844 tree wheremask, bool invert)
1846 stmtblock_t block, body1;
1853 gfc_start_block (&block);
1855 gfc_init_se (&rse, NULL);
1856 gfc_init_se (&lse, NULL);
1858 if (lss == gfc_ss_terminator)
1860 gfc_init_block (&body1);
1861 gfc_conv_expr (&rse, expr2);
1862 lse.expr = gfc_build_array_ref (tmp1, count1);
1866 /* Initialize the loop. */
1867 gfc_init_loopinfo (&loop);
1869 /* We may need LSS to determine the shape of the expression. */
1870 gfc_add_ss_to_loop (&loop, lss);
1871 gfc_add_ss_to_loop (&loop, rss);
1873 gfc_conv_ss_startstride (&loop);
1874 gfc_conv_loop_setup (&loop);
1876 gfc_mark_ss_chain_used (rss, 1);
1877 /* Start the loop body. */
1878 gfc_start_scalarized_body (&loop, &body1);
1880 /* Translate the expression. */
1881 gfc_copy_loopinfo_to_se (&rse, &loop);
1883 gfc_conv_expr (&rse, expr2);
1885 /* Form the expression of the temporary. */
1886 lse.expr = gfc_build_array_ref (tmp1, count1);
1889 /* Use the scalar assignment. */
1890 lse.string_length = rse.string_length;
1891 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
1892 expr2->expr_type == EXPR_VARIABLE);
1894 /* Form the mask expression according to the mask tree list. */
1897 wheremaskexpr = gfc_build_array_ref (wheremask, count3);
1899 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1900 TREE_TYPE (wheremaskexpr),
1902 tmp = fold_build3 (COND_EXPR, void_type_node,
1903 wheremaskexpr, tmp, build_empty_stmt ());
1906 gfc_add_expr_to_block (&body1, tmp);
1908 if (lss == gfc_ss_terminator)
1910 gfc_add_block_to_block (&block, &body1);
1912 /* Increment count1. */
1913 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1914 gfc_index_one_node);
1915 gfc_add_modify_expr (&block, count1, tmp);
1919 /* Increment count1. */
1920 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1921 count1, gfc_index_one_node);
1922 gfc_add_modify_expr (&body1, count1, tmp);
1924 /* Increment count3. */
1927 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1928 count3, gfc_index_one_node);
1929 gfc_add_modify_expr (&body1, count3, tmp);
1932 /* Generate the copying loops. */
1933 gfc_trans_scalarizing_loops (&loop, &body1);
1935 gfc_add_block_to_block (&block, &loop.pre);
1936 gfc_add_block_to_block (&block, &loop.post);
1938 gfc_cleanup_loop (&loop);
1939 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
1940 as tree nodes in SS may not be valid in different scope. */
1943 tmp = gfc_finish_block (&block);
1948 /* Calculate the size of temporary needed in the assignment inside forall.
1949 LSS and RSS are filled in this function. */
1952 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
1953 stmtblock_t * pblock,
1954 gfc_ss **lss, gfc_ss **rss)
1962 *lss = gfc_walk_expr (expr1);
1965 size = gfc_index_one_node;
1966 if (*lss != gfc_ss_terminator)
1968 gfc_init_loopinfo (&loop);
1970 /* Walk the RHS of the expression. */
1971 *rss = gfc_walk_expr (expr2);
1972 if (*rss == gfc_ss_terminator)
1974 /* The rhs is scalar. Add a ss for the expression. */
1975 *rss = gfc_get_ss ();
1976 (*rss)->next = gfc_ss_terminator;
1977 (*rss)->type = GFC_SS_SCALAR;
1978 (*rss)->expr = expr2;
1981 /* Associate the SS with the loop. */
1982 gfc_add_ss_to_loop (&loop, *lss);
1983 /* We don't actually need to add the rhs at this point, but it might
1984 make guessing the loop bounds a bit easier. */
1985 gfc_add_ss_to_loop (&loop, *rss);
1987 /* We only want the shape of the expression, not rest of the junk
1988 generated by the scalarizer. */
1989 loop.array_parameter = 1;
1991 /* Calculate the bounds of the scalarization. */
1992 save_flag = flag_bounds_check;
1993 flag_bounds_check = 0;
1994 gfc_conv_ss_startstride (&loop);
1995 flag_bounds_check = save_flag;
1996 gfc_conv_loop_setup (&loop);
1998 /* Figure out how many elements we need. */
1999 for (i = 0; i < loop.dimen; i++)
2001 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2002 gfc_index_one_node, loop.from[i]);
2003 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2005 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2007 gfc_add_block_to_block (pblock, &loop.pre);
2008 size = gfc_evaluate_now (size, pblock);
2009 gfc_add_block_to_block (pblock, &loop.post);
2011 /* TODO: write a function that cleans up a loopinfo without freeing
2012 the SS chains. Currently a NOP. */
2019 /* Calculate the overall iterator number of the nested forall construct.
2020 This routine actually calculates the number of times the body of the
2021 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2022 that by the expression INNER_SIZE. The BLOCK argument specifies the
2023 block in which to calculate the result, and the optional INNER_SIZE_BODY
2024 argument contains any statements that need to executed (inside the loop)
2025 to initialize or calculate INNER_SIZE. */
2028 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2029 stmtblock_t *inner_size_body, stmtblock_t *block)
2031 forall_info *forall_tmp = nested_forall_info;
2035 /* We can eliminate the innermost unconditional loops with constant
2037 if (INTEGER_CST_P (inner_size))
2040 && !forall_tmp->mask
2041 && INTEGER_CST_P (forall_tmp->size))
2043 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2044 inner_size, forall_tmp->size);
2045 forall_tmp = forall_tmp->prev_nest;
2048 /* If there are no loops left, we have our constant result. */
2053 /* Otherwise, create a temporary variable to compute the result. */
2054 number = gfc_create_var (gfc_array_index_type, "num");
2055 gfc_add_modify_expr (block, number, gfc_index_zero_node);
2057 gfc_start_block (&body);
2058 if (inner_size_body)
2059 gfc_add_block_to_block (&body, inner_size_body);
2061 tmp = build2 (PLUS_EXPR, gfc_array_index_type, number,
2065 gfc_add_modify_expr (&body, number, tmp);
2066 tmp = gfc_finish_block (&body);
2068 /* Generate loops. */
2069 if (forall_tmp != NULL)
2070 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2072 gfc_add_expr_to_block (block, tmp);
2078 /* Allocate temporary for forall construct. SIZE is the size of temporary
2079 needed. PTEMP1 is returned for space free. */
2082 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2089 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2090 if (!integer_onep (unit))
2091 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2096 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2099 tmp = build_fold_indirect_ref (tmp);
2104 /* Allocate temporary for forall construct according to the information in
2105 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2106 assignment inside forall. PTEMP1 is returned for space free. */
2109 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2110 tree inner_size, stmtblock_t * inner_size_body,
2111 stmtblock_t * block, tree * ptemp1)
2115 /* Calculate the total size of temporary needed in forall construct. */
2116 size = compute_overall_iter_number (nested_forall_info, inner_size,
2117 inner_size_body, block);
2119 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2123 /* Handle assignments inside forall which need temporary.
2125 forall (i=start:end:stride; maskexpr)
2128 (where e,f<i> are arbitrary expressions possibly involving i
2129 and there is a dependency between e<i> and f<i>)
2131 masktmp(:) = maskexpr(:)
2136 for (i = start; i <= end; i += stride)
2140 for (i = start; i <= end; i += stride)
2142 if (masktmp[maskindex++])
2143 tmp[count1++] = f<i>
2147 for (i = start; i <= end; i += stride)
2149 if (masktmp[maskindex++])
2150 e<i> = tmp[count1++]
2155 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2156 tree wheremask, bool invert,
2157 forall_info * nested_forall_info,
2158 stmtblock_t * block)
2166 stmtblock_t inner_size_body;
2168 /* Create vars. count1 is the current iterator number of the nested
2170 count1 = gfc_create_var (gfc_array_index_type, "count1");
2172 /* Count is the wheremask index. */
2175 count = gfc_create_var (gfc_array_index_type, "count");
2176 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2181 /* Initialize count1. */
2182 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2184 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2185 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2186 gfc_init_block (&inner_size_body);
2187 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2190 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2191 type = gfc_typenode_for_spec (&expr1->ts);
2193 /* Allocate temporary for nested forall construct according to the
2194 information in nested_forall_info and inner_size. */
2195 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2196 &inner_size_body, block, &ptemp1);
2198 /* Generate codes to copy rhs to the temporary . */
2199 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2202 /* Generate body and loops according to the information in
2203 nested_forall_info. */
2204 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2205 gfc_add_expr_to_block (block, tmp);
2208 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2212 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2214 /* Generate codes to copy the temporary to lhs. */
2215 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2218 /* Generate body and loops according to the information in
2219 nested_forall_info. */
2220 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2221 gfc_add_expr_to_block (block, tmp);
2225 /* Free the temporary. */
2226 tmp = gfc_call_free (ptemp1);
2227 gfc_add_expr_to_block (block, tmp);
2232 /* Translate pointer assignment inside FORALL which need temporary. */
2235 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2236 forall_info * nested_forall_info,
2237 stmtblock_t * block)
2251 tree tmp, tmp1, ptemp1;
2253 count = gfc_create_var (gfc_array_index_type, "count");
2254 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2256 inner_size = integer_one_node;
2257 lss = gfc_walk_expr (expr1);
2258 rss = gfc_walk_expr (expr2);
2259 if (lss == gfc_ss_terminator)
2261 type = gfc_typenode_for_spec (&expr1->ts);
2262 type = build_pointer_type (type);
2264 /* Allocate temporary for nested forall construct according to the
2265 information in nested_forall_info and inner_size. */
2266 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2267 inner_size, NULL, block, &ptemp1);
2268 gfc_start_block (&body);
2269 gfc_init_se (&lse, NULL);
2270 lse.expr = gfc_build_array_ref (tmp1, count);
2271 gfc_init_se (&rse, NULL);
2272 rse.want_pointer = 1;
2273 gfc_conv_expr (&rse, expr2);
2274 gfc_add_block_to_block (&body, &rse.pre);
2275 gfc_add_modify_expr (&body, lse.expr,
2276 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2277 gfc_add_block_to_block (&body, &rse.post);
2279 /* Increment count. */
2280 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2281 count, gfc_index_one_node);
2282 gfc_add_modify_expr (&body, count, tmp);
2284 tmp = gfc_finish_block (&body);
2286 /* Generate body and loops according to the information in
2287 nested_forall_info. */
2288 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2289 gfc_add_expr_to_block (block, tmp);
2292 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2294 gfc_start_block (&body);
2295 gfc_init_se (&lse, NULL);
2296 gfc_init_se (&rse, NULL);
2297 rse.expr = gfc_build_array_ref (tmp1, count);
2298 lse.want_pointer = 1;
2299 gfc_conv_expr (&lse, expr1);
2300 gfc_add_block_to_block (&body, &lse.pre);
2301 gfc_add_modify_expr (&body, lse.expr, rse.expr);
2302 gfc_add_block_to_block (&body, &lse.post);
2303 /* Increment count. */
2304 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2305 count, gfc_index_one_node);
2306 gfc_add_modify_expr (&body, count, tmp);
2307 tmp = gfc_finish_block (&body);
2309 /* Generate body and loops according to the information in
2310 nested_forall_info. */
2311 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2312 gfc_add_expr_to_block (block, tmp);
2316 gfc_init_loopinfo (&loop);
2318 /* Associate the SS with the loop. */
2319 gfc_add_ss_to_loop (&loop, rss);
2321 /* Setup the scalarizing loops and bounds. */
2322 gfc_conv_ss_startstride (&loop);
2324 gfc_conv_loop_setup (&loop);
2326 info = &rss->data.info;
2327 desc = info->descriptor;
2329 /* Make a new descriptor. */
2330 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2331 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2332 loop.from, loop.to, 1);
2334 /* Allocate temporary for nested forall construct. */
2335 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2336 inner_size, NULL, block, &ptemp1);
2337 gfc_start_block (&body);
2338 gfc_init_se (&lse, NULL);
2339 lse.expr = gfc_build_array_ref (tmp1, count);
2340 lse.direct_byref = 1;
2341 rss = gfc_walk_expr (expr2);
2342 gfc_conv_expr_descriptor (&lse, expr2, rss);
2344 gfc_add_block_to_block (&body, &lse.pre);
2345 gfc_add_block_to_block (&body, &lse.post);
2347 /* Increment count. */
2348 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2349 count, gfc_index_one_node);
2350 gfc_add_modify_expr (&body, count, tmp);
2352 tmp = gfc_finish_block (&body);
2354 /* Generate body and loops according to the information in
2355 nested_forall_info. */
2356 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2357 gfc_add_expr_to_block (block, tmp);
2360 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2362 parm = gfc_build_array_ref (tmp1, count);
2363 lss = gfc_walk_expr (expr1);
2364 gfc_init_se (&lse, NULL);
2365 gfc_conv_expr_descriptor (&lse, expr1, lss);
2366 gfc_add_modify_expr (&lse.pre, lse.expr, parm);
2367 gfc_start_block (&body);
2368 gfc_add_block_to_block (&body, &lse.pre);
2369 gfc_add_block_to_block (&body, &lse.post);
2371 /* Increment count. */
2372 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2373 count, gfc_index_one_node);
2374 gfc_add_modify_expr (&body, count, tmp);
2376 tmp = gfc_finish_block (&body);
2378 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2379 gfc_add_expr_to_block (block, tmp);
2381 /* Free the temporary. */
2384 tmp = gfc_call_free (ptemp1);
2385 gfc_add_expr_to_block (block, tmp);
2390 /* FORALL and WHERE statements are really nasty, especially when you nest
2391 them. All the rhs of a forall assignment must be evaluated before the
2392 actual assignments are performed. Presumably this also applies to all the
2393 assignments in an inner where statement. */
2395 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2396 linear array, relying on the fact that we process in the same order in all
2399 forall (i=start:end:stride; maskexpr)
2403 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2405 count = ((end + 1 - start) / stride)
2406 masktmp(:) = maskexpr(:)
2409 for (i = start; i <= end; i += stride)
2411 if (masktmp[maskindex++])
2415 for (i = start; i <= end; i += stride)
2417 if (masktmp[maskindex++])
2421 Note that this code only works when there are no dependencies.
2422 Forall loop with array assignments and data dependencies are a real pain,
2423 because the size of the temporary cannot always be determined before the
2424 loop is executed. This problem is compounded by the presence of nested
2429 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2447 gfc_forall_iterator *fa;
2450 gfc_saved_var *saved_vars;
2451 iter_info *this_forall;
2455 /* Do nothing if the mask is false. */
2457 && code->expr->expr_type == EXPR_CONSTANT
2458 && !code->expr->value.logical)
2459 return build_empty_stmt ();
2462 /* Count the FORALL index number. */
2463 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2467 /* Allocate the space for var, start, end, step, varexpr. */
2468 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2469 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2470 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2471 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2472 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2473 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2475 /* Allocate the space for info. */
2476 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2478 gfc_start_block (&block);
2481 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2483 gfc_symbol *sym = fa->var->symtree->n.sym;
2485 /* Allocate space for this_forall. */
2486 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2488 /* Create a temporary variable for the FORALL index. */
2489 tmp = gfc_typenode_for_spec (&sym->ts);
2490 var[n] = gfc_create_var (tmp, sym->name);
2491 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2493 /* Record it in this_forall. */
2494 this_forall->var = var[n];
2496 /* Replace the index symbol's backend_decl with the temporary decl. */
2497 sym->backend_decl = var[n];
2499 /* Work out the start, end and stride for the loop. */
2500 gfc_init_se (&se, NULL);
2501 gfc_conv_expr_val (&se, fa->start);
2502 /* Record it in this_forall. */
2503 this_forall->start = se.expr;
2504 gfc_add_block_to_block (&block, &se.pre);
2507 gfc_init_se (&se, NULL);
2508 gfc_conv_expr_val (&se, fa->end);
2509 /* Record it in this_forall. */
2510 this_forall->end = se.expr;
2511 gfc_make_safe_expr (&se);
2512 gfc_add_block_to_block (&block, &se.pre);
2515 gfc_init_se (&se, NULL);
2516 gfc_conv_expr_val (&se, fa->stride);
2517 /* Record it in this_forall. */
2518 this_forall->step = se.expr;
2519 gfc_make_safe_expr (&se);
2520 gfc_add_block_to_block (&block, &se.pre);
2523 /* Set the NEXT field of this_forall to NULL. */
2524 this_forall->next = NULL;
2525 /* Link this_forall to the info construct. */
2526 if (info->this_loop)
2528 iter_info *iter_tmp = info->this_loop;
2529 while (iter_tmp->next != NULL)
2530 iter_tmp = iter_tmp->next;
2531 iter_tmp->next = this_forall;
2534 info->this_loop = this_forall;
2540 /* Calculate the size needed for the current forall level. */
2541 size = gfc_index_one_node;
2542 for (n = 0; n < nvar; n++)
2544 /* size = (end + step - start) / step. */
2545 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2547 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2549 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2550 tmp = convert (gfc_array_index_type, tmp);
2552 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2555 /* Record the nvar and size of current forall level. */
2561 /* If the mask is .true., consider the FORALL unconditional. */
2562 if (code->expr->expr_type == EXPR_CONSTANT
2563 && code->expr->value.logical)
2571 /* First we need to allocate the mask. */
2574 /* As the mask array can be very big, prefer compact boolean types. */
2575 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2576 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
2577 size, NULL, &block, &pmask);
2578 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2580 /* Record them in the info structure. */
2581 info->maskindex = maskindex;
2586 /* No mask was specified. */
2587 maskindex = NULL_TREE;
2588 mask = pmask = NULL_TREE;
2591 /* Link the current forall level to nested_forall_info. */
2592 info->prev_nest = nested_forall_info;
2593 nested_forall_info = info;
2595 /* Copy the mask into a temporary variable if required.
2596 For now we assume a mask temporary is needed. */
2599 /* As the mask array can be very big, prefer compact boolean types. */
2600 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2602 gfc_add_modify_expr (&block, maskindex, gfc_index_zero_node);
2604 /* Start of mask assignment loop body. */
2605 gfc_start_block (&body);
2607 /* Evaluate the mask expression. */
2608 gfc_init_se (&se, NULL);
2609 gfc_conv_expr_val (&se, code->expr);
2610 gfc_add_block_to_block (&body, &se.pre);
2612 /* Store the mask. */
2613 se.expr = convert (mask_type, se.expr);
2615 tmp = gfc_build_array_ref (mask, maskindex);
2616 gfc_add_modify_expr (&body, tmp, se.expr);
2618 /* Advance to the next mask element. */
2619 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
2620 maskindex, gfc_index_one_node);
2621 gfc_add_modify_expr (&body, maskindex, tmp);
2623 /* Generate the loops. */
2624 tmp = gfc_finish_block (&body);
2625 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
2626 gfc_add_expr_to_block (&block, tmp);
2629 c = code->block->next;
2631 /* TODO: loop merging in FORALL statements. */
2632 /* Now that we've got a copy of the mask, generate the assignment loops. */
2638 /* A scalar or array assignment. */
2639 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2640 /* Temporaries due to array assignment data dependencies introduce
2641 no end of problems. */
2643 gfc_trans_assign_need_temp (c->expr, c->expr2, NULL, false,
2644 nested_forall_info, &block);
2647 /* Use the normal assignment copying routines. */
2648 assign = gfc_trans_assignment (c->expr, c->expr2, false);
2650 /* Generate body and loops. */
2651 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2653 gfc_add_expr_to_block (&block, tmp);
2659 /* Translate WHERE or WHERE construct nested in FORALL. */
2660 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
2663 /* Pointer assignment inside FORALL. */
2664 case EXEC_POINTER_ASSIGN:
2665 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2667 gfc_trans_pointer_assign_need_temp (c->expr, c->expr2,
2668 nested_forall_info, &block);
2671 /* Use the normal assignment copying routines. */
2672 assign = gfc_trans_pointer_assignment (c->expr, c->expr2);
2674 /* Generate body and loops. */
2675 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2677 gfc_add_expr_to_block (&block, tmp);
2682 tmp = gfc_trans_forall_1 (c, nested_forall_info);
2683 gfc_add_expr_to_block (&block, tmp);
2686 /* Explicit subroutine calls are prevented by the frontend but interface
2687 assignments can legitimately produce them. */
2688 case EXEC_ASSIGN_CALL:
2689 assign = gfc_trans_call (c, true);
2690 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
2691 gfc_add_expr_to_block (&block, tmp);
2701 /* Restore the original index variables. */
2702 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
2703 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
2705 /* Free the space for var, start, end, step, varexpr. */
2711 gfc_free (saved_vars);
2713 /* Free the space for this forall_info. */
2718 /* Free the temporary for the mask. */
2719 tmp = gfc_call_free (pmask);
2720 gfc_add_expr_to_block (&block, tmp);
2723 pushdecl (maskindex);
2725 return gfc_finish_block (&block);
2729 /* Translate the FORALL statement or construct. */
2731 tree gfc_trans_forall (gfc_code * code)
2733 return gfc_trans_forall_1 (code, NULL);
2737 /* Evaluate the WHERE mask expression, copy its value to a temporary.
2738 If the WHERE construct is nested in FORALL, compute the overall temporary
2739 needed by the WHERE mask expression multiplied by the iterator number of
2741 ME is the WHERE mask expression.
2742 MASK is the current execution mask upon input, whose sense may or may
2743 not be inverted as specified by the INVERT argument.
2744 CMASK is the updated execution mask on output, or NULL if not required.
2745 PMASK is the pending execution mask on output, or NULL if not required.
2746 BLOCK is the block in which to place the condition evaluation loops. */
2749 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
2750 tree mask, bool invert, tree cmask, tree pmask,
2751 tree mask_type, stmtblock_t * block)
2756 stmtblock_t body, body1;
2757 tree count, cond, mtmp;
2760 gfc_init_loopinfo (&loop);
2762 lss = gfc_walk_expr (me);
2763 rss = gfc_walk_expr (me);
2765 /* Variable to index the temporary. */
2766 count = gfc_create_var (gfc_array_index_type, "count");
2767 /* Initialize count. */
2768 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2770 gfc_start_block (&body);
2772 gfc_init_se (&rse, NULL);
2773 gfc_init_se (&lse, NULL);
2775 if (lss == gfc_ss_terminator)
2777 gfc_init_block (&body1);
2781 /* Initialize the loop. */
2782 gfc_init_loopinfo (&loop);
2784 /* We may need LSS to determine the shape of the expression. */
2785 gfc_add_ss_to_loop (&loop, lss);
2786 gfc_add_ss_to_loop (&loop, rss);
2788 gfc_conv_ss_startstride (&loop);
2789 gfc_conv_loop_setup (&loop);
2791 gfc_mark_ss_chain_used (rss, 1);
2792 /* Start the loop body. */
2793 gfc_start_scalarized_body (&loop, &body1);
2795 /* Translate the expression. */
2796 gfc_copy_loopinfo_to_se (&rse, &loop);
2798 gfc_conv_expr (&rse, me);
2801 /* Variable to evaluate mask condition. */
2802 cond = gfc_create_var (mask_type, "cond");
2803 if (mask && (cmask || pmask))
2804 mtmp = gfc_create_var (mask_type, "mask");
2805 else mtmp = NULL_TREE;
2807 gfc_add_block_to_block (&body1, &lse.pre);
2808 gfc_add_block_to_block (&body1, &rse.pre);
2810 gfc_add_modify_expr (&body1, cond, fold_convert (mask_type, rse.expr));
2812 if (mask && (cmask || pmask))
2814 tmp = gfc_build_array_ref (mask, count);
2816 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
2817 gfc_add_modify_expr (&body1, mtmp, tmp);
2822 tmp1 = gfc_build_array_ref (cmask, count);
2825 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
2826 gfc_add_modify_expr (&body1, tmp1, tmp);
2831 tmp1 = gfc_build_array_ref (pmask, count);
2832 tmp = build1 (TRUTH_NOT_EXPR, mask_type, cond);
2834 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
2835 gfc_add_modify_expr (&body1, tmp1, tmp);
2838 gfc_add_block_to_block (&body1, &lse.post);
2839 gfc_add_block_to_block (&body1, &rse.post);
2841 if (lss == gfc_ss_terminator)
2843 gfc_add_block_to_block (&body, &body1);
2847 /* Increment count. */
2848 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
2849 gfc_index_one_node);
2850 gfc_add_modify_expr (&body1, count, tmp1);
2852 /* Generate the copying loops. */
2853 gfc_trans_scalarizing_loops (&loop, &body1);
2855 gfc_add_block_to_block (&body, &loop.pre);
2856 gfc_add_block_to_block (&body, &loop.post);
2858 gfc_cleanup_loop (&loop);
2859 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2860 as tree nodes in SS may not be valid in different scope. */
2863 tmp1 = gfc_finish_block (&body);
2864 /* If the WHERE construct is inside FORALL, fill the full temporary. */
2865 if (nested_forall_info != NULL)
2866 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
2868 gfc_add_expr_to_block (block, tmp1);
2872 /* Translate an assignment statement in a WHERE statement or construct
2873 statement. The MASK expression is used to control which elements
2874 of EXPR1 shall be assigned. The sense of MASK is specified by
2878 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
2879 tree mask, bool invert,
2880 tree count1, tree count2,
2886 gfc_ss *lss_section;
2893 tree index, maskexpr;
2896 /* TODO: handle this special case.
2897 Special case a single function returning an array. */
2898 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
2900 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
2906 /* Assignment of the form lhs = rhs. */
2907 gfc_start_block (&block);
2909 gfc_init_se (&lse, NULL);
2910 gfc_init_se (&rse, NULL);
2913 lss = gfc_walk_expr (expr1);
2916 /* In each where-assign-stmt, the mask-expr and the variable being
2917 defined shall be arrays of the same shape. */
2918 gcc_assert (lss != gfc_ss_terminator);
2920 /* The assignment needs scalarization. */
2923 /* Find a non-scalar SS from the lhs. */
2924 while (lss_section != gfc_ss_terminator
2925 && lss_section->type != GFC_SS_SECTION)
2926 lss_section = lss_section->next;
2928 gcc_assert (lss_section != gfc_ss_terminator);
2930 /* Initialize the scalarizer. */
2931 gfc_init_loopinfo (&loop);
2934 rss = gfc_walk_expr (expr2);
2935 if (rss == gfc_ss_terminator)
2937 /* The rhs is scalar. Add a ss for the expression. */
2938 rss = gfc_get_ss ();
2939 rss->next = gfc_ss_terminator;
2940 rss->type = GFC_SS_SCALAR;
2944 /* Associate the SS with the loop. */
2945 gfc_add_ss_to_loop (&loop, lss);
2946 gfc_add_ss_to_loop (&loop, rss);
2948 /* Calculate the bounds of the scalarization. */
2949 gfc_conv_ss_startstride (&loop);
2951 /* Resolve any data dependencies in the statement. */
2952 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
2954 /* Setup the scalarizing loops. */
2955 gfc_conv_loop_setup (&loop);
2957 /* Setup the gfc_se structures. */
2958 gfc_copy_loopinfo_to_se (&lse, &loop);
2959 gfc_copy_loopinfo_to_se (&rse, &loop);
2962 gfc_mark_ss_chain_used (rss, 1);
2963 if (loop.temp_ss == NULL)
2966 gfc_mark_ss_chain_used (lss, 1);
2970 lse.ss = loop.temp_ss;
2971 gfc_mark_ss_chain_used (lss, 3);
2972 gfc_mark_ss_chain_used (loop.temp_ss, 3);
2975 /* Start the scalarized loop body. */
2976 gfc_start_scalarized_body (&loop, &body);
2978 /* Translate the expression. */
2979 gfc_conv_expr (&rse, expr2);
2980 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
2982 gfc_conv_tmp_array_ref (&lse);
2983 gfc_advance_se_ss_chain (&lse);
2986 gfc_conv_expr (&lse, expr1);
2988 /* Form the mask expression according to the mask. */
2990 maskexpr = gfc_build_array_ref (mask, index);
2992 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
2994 /* Use the scalar assignment as is. */
2996 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
2997 loop.temp_ss != NULL, false);
2999 tmp = gfc_conv_operator_assign (&lse, &rse, sym);
3001 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3003 gfc_add_expr_to_block (&body, tmp);
3005 if (lss == gfc_ss_terminator)
3007 /* Increment count1. */
3008 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3009 count1, gfc_index_one_node);
3010 gfc_add_modify_expr (&body, count1, tmp);
3012 /* Use the scalar assignment as is. */
3013 gfc_add_block_to_block (&block, &body);
3017 gcc_assert (lse.ss == gfc_ss_terminator
3018 && rse.ss == gfc_ss_terminator);
3020 if (loop.temp_ss != NULL)
3022 /* Increment count1 before finish the main body of a scalarized
3024 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3025 count1, gfc_index_one_node);
3026 gfc_add_modify_expr (&body, count1, tmp);
3027 gfc_trans_scalarized_loop_boundary (&loop, &body);
3029 /* We need to copy the temporary to the actual lhs. */
3030 gfc_init_se (&lse, NULL);
3031 gfc_init_se (&rse, NULL);
3032 gfc_copy_loopinfo_to_se (&lse, &loop);
3033 gfc_copy_loopinfo_to_se (&rse, &loop);
3035 rse.ss = loop.temp_ss;
3038 gfc_conv_tmp_array_ref (&rse);
3039 gfc_advance_se_ss_chain (&rse);
3040 gfc_conv_expr (&lse, expr1);
3042 gcc_assert (lse.ss == gfc_ss_terminator
3043 && rse.ss == gfc_ss_terminator);
3045 /* Form the mask expression according to the mask tree list. */
3047 maskexpr = gfc_build_array_ref (mask, index);
3049 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3052 /* Use the scalar assignment as is. */
3053 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3054 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3055 gfc_add_expr_to_block (&body, tmp);
3057 /* Increment count2. */
3058 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3059 count2, gfc_index_one_node);
3060 gfc_add_modify_expr (&body, count2, tmp);
3064 /* Increment count1. */
3065 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3066 count1, gfc_index_one_node);
3067 gfc_add_modify_expr (&body, count1, tmp);
3070 /* Generate the copying loops. */
3071 gfc_trans_scalarizing_loops (&loop, &body);
3073 /* Wrap the whole thing up. */
3074 gfc_add_block_to_block (&block, &loop.pre);
3075 gfc_add_block_to_block (&block, &loop.post);
3076 gfc_cleanup_loop (&loop);
3079 return gfc_finish_block (&block);
3083 /* Translate the WHERE construct or statement.
3084 This function can be called iteratively to translate the nested WHERE
3085 construct or statement.
3086 MASK is the control mask. */
3089 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3090 forall_info * nested_forall_info, stmtblock_t * block)
3092 stmtblock_t inner_size_body;
3093 tree inner_size, size;
3101 tree count1, count2;
3105 tree pcmask = NULL_TREE;
3106 tree ppmask = NULL_TREE;
3107 tree cmask = NULL_TREE;
3108 tree pmask = NULL_TREE;
3109 gfc_actual_arglist *arg;
3111 /* the WHERE statement or the WHERE construct statement. */
3112 cblock = code->block;
3114 /* As the mask array can be very big, prefer compact boolean types. */
3115 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3117 /* Determine which temporary masks are needed. */
3120 /* One clause: No ELSEWHEREs. */
3121 need_cmask = (cblock->next != 0);
3124 else if (cblock->block->block)
3126 /* Three or more clauses: Conditional ELSEWHEREs. */
3130 else if (cblock->next)
3132 /* Two clauses, the first non-empty. */
3134 need_pmask = (mask != NULL_TREE
3135 && cblock->block->next != 0);
3137 else if (!cblock->block->next)
3139 /* Two clauses, both empty. */
3143 /* Two clauses, the first empty, the second non-empty. */
3146 need_cmask = (cblock->block->expr != 0);
3155 if (need_cmask || need_pmask)
3157 /* Calculate the size of temporary needed by the mask-expr. */
3158 gfc_init_block (&inner_size_body);
3159 inner_size = compute_inner_temp_size (cblock->expr, cblock->expr,
3160 &inner_size_body, &lss, &rss);
3162 /* Calculate the total size of temporary needed. */
3163 size = compute_overall_iter_number (nested_forall_info, inner_size,
3164 &inner_size_body, block);
3166 /* Allocate temporary for WHERE mask if needed. */
3168 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3171 /* Allocate temporary for !mask if needed. */
3173 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3179 /* Each time around this loop, the where clause is conditional
3180 on the value of mask and invert, which are updated at the
3181 bottom of the loop. */
3183 /* Has mask-expr. */
3186 /* Ensure that the WHERE mask will be evaluated exactly once.
3187 If there are no statements in this WHERE/ELSEWHERE clause,
3188 then we don't need to update the control mask (cmask).
3189 If this is the last clause of the WHERE construct, then
3190 we don't need to update the pending control mask (pmask). */
3192 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3194 cblock->next ? cmask : NULL_TREE,
3195 cblock->block ? pmask : NULL_TREE,
3198 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3200 (cblock->next || cblock->block)
3201 ? cmask : NULL_TREE,
3202 NULL_TREE, mask_type, block);
3206 /* It's a final elsewhere-stmt. No mask-expr is present. */
3210 /* The body of this where clause are controlled by cmask with
3211 sense specified by invert. */
3213 /* Get the assignment statement of a WHERE statement, or the first
3214 statement in where-body-construct of a WHERE construct. */
3215 cnext = cblock->next;
3220 /* WHERE assignment statement. */
3221 case EXEC_ASSIGN_CALL:
3223 arg = cnext->ext.actual;
3224 expr1 = expr2 = NULL;
3225 for (; arg; arg = arg->next)
3237 expr1 = cnext->expr;
3238 expr2 = cnext->expr2;
3240 if (nested_forall_info != NULL)
3242 need_temp = gfc_check_dependency (expr1, expr2, 0);
3243 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3244 gfc_trans_assign_need_temp (expr1, expr2,
3246 nested_forall_info, block);
3249 /* Variables to control maskexpr. */
3250 count1 = gfc_create_var (gfc_array_index_type, "count1");
3251 count2 = gfc_create_var (gfc_array_index_type, "count2");
3252 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3253 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3255 tmp = gfc_trans_where_assign (expr1, expr2,
3258 cnext->resolved_sym);
3260 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3262 gfc_add_expr_to_block (block, tmp);
3267 /* Variables to control maskexpr. */
3268 count1 = gfc_create_var (gfc_array_index_type, "count1");
3269 count2 = gfc_create_var (gfc_array_index_type, "count2");
3270 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3271 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3273 tmp = gfc_trans_where_assign (expr1, expr2,
3276 cnext->resolved_sym);
3277 gfc_add_expr_to_block (block, tmp);
3282 /* WHERE or WHERE construct is part of a where-body-construct. */
3284 gfc_trans_where_2 (cnext, cmask, invert,
3285 nested_forall_info, block);
3292 /* The next statement within the same where-body-construct. */
3293 cnext = cnext->next;
3295 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3296 cblock = cblock->block;
3297 if (mask == NULL_TREE)
3299 /* If we're the initial WHERE, we can simply invert the sense
3300 of the current mask to obtain the "mask" for the remaining
3307 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3313 /* If we allocated a pending mask array, deallocate it now. */
3316 tmp = gfc_call_free (ppmask);
3317 gfc_add_expr_to_block (block, tmp);
3320 /* If we allocated a current mask array, deallocate it now. */
3323 tmp = gfc_call_free (pcmask);
3324 gfc_add_expr_to_block (block, tmp);
3328 /* Translate a simple WHERE construct or statement without dependencies.
3329 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3330 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3331 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3334 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3336 stmtblock_t block, body;
3337 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3338 tree tmp, cexpr, tstmt, estmt;
3339 gfc_ss *css, *tdss, *tsss;
3340 gfc_se cse, tdse, tsse, edse, esse;
3345 cond = cblock->expr;
3346 tdst = cblock->next->expr;
3347 tsrc = cblock->next->expr2;
3348 edst = eblock ? eblock->next->expr : NULL;
3349 esrc = eblock ? eblock->next->expr2 : NULL;
3351 gfc_start_block (&block);
3352 gfc_init_loopinfo (&loop);
3354 /* Handle the condition. */
3355 gfc_init_se (&cse, NULL);
3356 css = gfc_walk_expr (cond);
3357 gfc_add_ss_to_loop (&loop, css);
3359 /* Handle the then-clause. */
3360 gfc_init_se (&tdse, NULL);
3361 gfc_init_se (&tsse, NULL);
3362 tdss = gfc_walk_expr (tdst);
3363 tsss = gfc_walk_expr (tsrc);
3364 if (tsss == gfc_ss_terminator)
3366 tsss = gfc_get_ss ();
3367 tsss->next = gfc_ss_terminator;
3368 tsss->type = GFC_SS_SCALAR;
3371 gfc_add_ss_to_loop (&loop, tdss);
3372 gfc_add_ss_to_loop (&loop, tsss);
3376 /* Handle the else clause. */
3377 gfc_init_se (&edse, NULL);
3378 gfc_init_se (&esse, NULL);
3379 edss = gfc_walk_expr (edst);
3380 esss = gfc_walk_expr (esrc);
3381 if (esss == gfc_ss_terminator)
3383 esss = gfc_get_ss ();
3384 esss->next = gfc_ss_terminator;
3385 esss->type = GFC_SS_SCALAR;
3388 gfc_add_ss_to_loop (&loop, edss);
3389 gfc_add_ss_to_loop (&loop, esss);
3392 gfc_conv_ss_startstride (&loop);
3393 gfc_conv_loop_setup (&loop);
3395 gfc_mark_ss_chain_used (css, 1);
3396 gfc_mark_ss_chain_used (tdss, 1);
3397 gfc_mark_ss_chain_used (tsss, 1);
3400 gfc_mark_ss_chain_used (edss, 1);
3401 gfc_mark_ss_chain_used (esss, 1);
3404 gfc_start_scalarized_body (&loop, &body);
3406 gfc_copy_loopinfo_to_se (&cse, &loop);
3407 gfc_copy_loopinfo_to_se (&tdse, &loop);
3408 gfc_copy_loopinfo_to_se (&tsse, &loop);
3414 gfc_copy_loopinfo_to_se (&edse, &loop);
3415 gfc_copy_loopinfo_to_se (&esse, &loop);
3420 gfc_conv_expr (&cse, cond);
3421 gfc_add_block_to_block (&body, &cse.pre);
3424 gfc_conv_expr (&tsse, tsrc);
3425 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3427 gfc_conv_tmp_array_ref (&tdse);
3428 gfc_advance_se_ss_chain (&tdse);
3431 gfc_conv_expr (&tdse, tdst);
3435 gfc_conv_expr (&esse, esrc);
3436 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3438 gfc_conv_tmp_array_ref (&edse);
3439 gfc_advance_se_ss_chain (&edse);
3442 gfc_conv_expr (&edse, edst);
3445 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false);
3446 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false)
3447 : build_empty_stmt ();
3448 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3449 gfc_add_expr_to_block (&body, tmp);
3450 gfc_add_block_to_block (&body, &cse.post);
3452 gfc_trans_scalarizing_loops (&loop, &body);
3453 gfc_add_block_to_block (&block, &loop.pre);
3454 gfc_add_block_to_block (&block, &loop.post);
3455 gfc_cleanup_loop (&loop);
3457 return gfc_finish_block (&block);
3460 /* As the WHERE or WHERE construct statement can be nested, we call
3461 gfc_trans_where_2 to do the translation, and pass the initial
3462 NULL values for both the control mask and the pending control mask. */
3465 gfc_trans_where (gfc_code * code)
3471 cblock = code->block;
3473 && cblock->next->op == EXEC_ASSIGN
3474 && !cblock->next->next)
3476 eblock = cblock->block;
3479 /* A simple "WHERE (cond) x = y" statement or block is
3480 dependence free if cond is not dependent upon writing x,
3481 and the source y is unaffected by the destination x. */
3482 if (!gfc_check_dependency (cblock->next->expr,
3484 && !gfc_check_dependency (cblock->next->expr,
3485 cblock->next->expr2, 0))
3486 return gfc_trans_where_3 (cblock, NULL);
3488 else if (!eblock->expr
3491 && eblock->next->op == EXEC_ASSIGN
3492 && !eblock->next->next)
3494 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3495 block is dependence free if cond is not dependent on writes
3496 to x1 and x2, y1 is not dependent on writes to x2, and y2
3497 is not dependent on writes to x1, and both y's are not
3498 dependent upon their own x's. */
3499 if (!gfc_check_dependency(cblock->next->expr,
3501 && !gfc_check_dependency(eblock->next->expr,
3503 && !gfc_check_dependency(cblock->next->expr,
3504 eblock->next->expr2, 0)
3505 && !gfc_check_dependency(eblock->next->expr,
3506 cblock->next->expr2, 0)
3507 && !gfc_check_dependency(cblock->next->expr,
3508 cblock->next->expr2, 0)
3509 && !gfc_check_dependency(eblock->next->expr,
3510 eblock->next->expr2, 0))
3511 return gfc_trans_where_3 (cblock, eblock);
3515 gfc_start_block (&block);
3517 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3519 return gfc_finish_block (&block);
3523 /* CYCLE a DO loop. The label decl has already been created by
3524 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3525 node at the head of the loop. We must mark the label as used. */
3528 gfc_trans_cycle (gfc_code * code)
3532 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3533 TREE_USED (cycle_label) = 1;
3534 return build1_v (GOTO_EXPR, cycle_label);
3538 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3539 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3543 gfc_trans_exit (gfc_code * code)
3547 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3548 TREE_USED (exit_label) = 1;
3549 return build1_v (GOTO_EXPR, exit_label);
3553 /* Translate the ALLOCATE statement. */
3556 gfc_trans_allocate (gfc_code * code)
3568 if (!code->ext.alloc_list)
3571 gfc_start_block (&block);
3575 tree gfc_int4_type_node = gfc_get_int_type (4);
3577 stat = gfc_create_var (gfc_int4_type_node, "stat");
3578 pstat = build_fold_addr_expr (stat);
3580 error_label = gfc_build_label_decl (NULL_TREE);
3581 TREE_USED (error_label) = 1;
3585 pstat = integer_zero_node;
3586 stat = error_label = NULL_TREE;
3590 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3594 gfc_init_se (&se, NULL);
3595 gfc_start_block (&se.pre);
3597 se.want_pointer = 1;
3598 se.descriptor_only = 1;
3599 gfc_conv_expr (&se, expr);
3601 if (!gfc_array_allocate (&se, expr, pstat))
3603 /* A scalar or derived type. */
3604 tmp = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
3606 if (expr->ts.type == BT_CHARACTER && tmp == NULL_TREE)
3607 tmp = se.string_length;
3609 tmp = build_call_expr (gfor_fndecl_allocate, 2, tmp, pstat);
3610 tmp = build2 (MODIFY_EXPR, void_type_node, se.expr, tmp);
3611 gfc_add_expr_to_block (&se.pre, tmp);
3615 tmp = build1_v (GOTO_EXPR, error_label);
3616 parm = fold_build2 (NE_EXPR, boolean_type_node,
3617 stat, build_int_cst (TREE_TYPE (stat), 0));
3618 tmp = fold_build3 (COND_EXPR, void_type_node,
3619 parm, tmp, build_empty_stmt ());
3620 gfc_add_expr_to_block (&se.pre, tmp);
3623 if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
3625 tmp = build_fold_indirect_ref (se.expr);
3626 tmp = gfc_nullify_alloc_comp (expr->ts.derived, tmp, 0);
3627 gfc_add_expr_to_block (&se.pre, tmp);
3632 tmp = gfc_finish_block (&se.pre);
3633 gfc_add_expr_to_block (&block, tmp);
3636 /* Assign the value to the status variable. */
3639 tmp = build1_v (LABEL_EXPR, error_label);
3640 gfc_add_expr_to_block (&block, tmp);
3642 gfc_init_se (&se, NULL);
3643 gfc_conv_expr_lhs (&se, code->expr);
3644 tmp = convert (TREE_TYPE (se.expr), stat);
3645 gfc_add_modify_expr (&block, se.expr, tmp);
3648 return gfc_finish_block (&block);
3652 /* Translate a DEALLOCATE statement.
3653 There are two cases within the for loop:
3654 (1) deallocate(a1, a2, a3) is translated into the following sequence
3655 _gfortran_deallocate(a1, 0B)
3656 _gfortran_deallocate(a2, 0B)
3657 _gfortran_deallocate(a3, 0B)
3658 where the STAT= variable is passed a NULL pointer.
3659 (2) deallocate(a1, a2, a3, stat=i) is translated into the following
3661 _gfortran_deallocate(a1, &stat)
3662 astat = astat + stat
3663 _gfortran_deallocate(a2, &stat)
3664 astat = astat + stat
3665 _gfortran_deallocate(a3, &stat)
3666 astat = astat + stat
3667 In case (1), we simply return at the end of the for loop. In case (2)
3668 we set STAT= astat. */
3670 gfc_trans_deallocate (gfc_code * code)
3675 tree apstat, astat, pstat, stat, tmp;
3678 gfc_start_block (&block);
3680 /* Set up the optional STAT= */
3683 tree gfc_int4_type_node = gfc_get_int_type (4);
3685 /* Variable used with the library call. */
3686 stat = gfc_create_var (gfc_int4_type_node, "stat");
3687 pstat = build_fold_addr_expr (stat);
3689 /* Running total of possible deallocation failures. */
3690 astat = gfc_create_var (gfc_int4_type_node, "astat");
3691 apstat = build_fold_addr_expr (astat);
3693 /* Initialize astat to 0. */
3694 gfc_add_modify_expr (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
3698 pstat = apstat = null_pointer_node;
3699 stat = astat = NULL_TREE;
3702 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3705 gcc_assert (expr->expr_type == EXPR_VARIABLE);
3707 gfc_init_se (&se, NULL);
3708 gfc_start_block (&se.pre);
3710 se.want_pointer = 1;
3711 se.descriptor_only = 1;
3712 gfc_conv_expr (&se, expr);
3714 if (expr->ts.type == BT_DERIVED
3715 && expr->ts.derived->attr.alloc_comp)
3718 gfc_ref *last = NULL;
3719 for (ref = expr->ref; ref; ref = ref->next)
3720 if (ref->type == REF_COMPONENT)
3723 /* Do not deallocate the components of a derived type
3724 ultimate pointer component. */
3725 if (!(last && last->u.c.component->pointer)
3726 && !(!last && expr->symtree->n.sym->attr.pointer))
3728 tmp = gfc_deallocate_alloc_comp (expr->ts.derived, se.expr,
3730 gfc_add_expr_to_block (&se.pre, tmp);
3735 tmp = gfc_array_deallocate (se.expr, pstat);
3738 tmp = build_call_expr (gfor_fndecl_deallocate, 2, se.expr, pstat);
3739 gfc_add_expr_to_block (&se.pre, tmp);
3741 tmp = build2 (MODIFY_EXPR, void_type_node,
3742 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
3745 gfc_add_expr_to_block (&se.pre, tmp);
3747 /* Keep track of the number of failed deallocations by adding stat
3748 of the last deallocation to the running total. */
3751 apstat = build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
3752 gfc_add_modify_expr (&se.pre, astat, apstat);
3755 tmp = gfc_finish_block (&se.pre);
3756 gfc_add_expr_to_block (&block, tmp);
3760 /* Assign the value to the status variable. */
3763 gfc_init_se (&se, NULL);
3764 gfc_conv_expr_lhs (&se, code->expr);
3765 tmp = convert (TREE_TYPE (se.expr), astat);
3766 gfc_add_modify_expr (&block, se.expr, tmp);
3769 return gfc_finish_block (&block);