1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
27 #include "coretypes.h"
29 #include "tree-gimple.h"
36 #include "trans-stmt.h"
37 #include "trans-types.h"
38 #include "trans-array.h"
39 #include "trans-const.h"
41 #include "dependency.h"
43 typedef struct iter_info
49 struct iter_info *next;
53 typedef struct forall_info
60 struct forall_info *prev_nest;
64 static void gfc_trans_where_2 (gfc_code *, tree, bool,
65 forall_info *, stmtblock_t *);
67 /* Translate a F95 label number to a LABEL_EXPR. */
70 gfc_trans_label_here (gfc_code * code)
72 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
76 /* Given a variable expression which has been ASSIGNed to, find the decl
77 containing the auxiliary variables. For variables in common blocks this
81 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
83 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
84 gfc_conv_expr (se, expr);
85 /* Deals with variable in common block. Get the field declaration. */
86 if (TREE_CODE (se->expr) == COMPONENT_REF)
87 se->expr = TREE_OPERAND (se->expr, 1);
88 /* Deals with dummy argument. Get the parameter declaration. */
89 else if (TREE_CODE (se->expr) == INDIRECT_REF)
90 se->expr = TREE_OPERAND (se->expr, 0);
93 /* Translate a label assignment statement. */
96 gfc_trans_label_assign (gfc_code * code)
106 /* Start a new block. */
107 gfc_init_se (&se, NULL);
108 gfc_start_block (&se.pre);
109 gfc_conv_label_variable (&se, code->expr);
111 len = GFC_DECL_STRING_LEN (se.expr);
112 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
114 label_tree = gfc_get_label_decl (code->label);
116 if (code->label->defined == ST_LABEL_TARGET)
118 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
119 len_tree = integer_minus_one_node;
123 label_str = code->label->format->value.character.string;
124 label_len = code->label->format->value.character.length;
125 len_tree = build_int_cst (NULL_TREE, label_len);
126 label_tree = gfc_build_string_const (label_len + 1, label_str);
127 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
130 gfc_add_modify_expr (&se.pre, len, len_tree);
131 gfc_add_modify_expr (&se.pre, addr, label_tree);
133 return gfc_finish_block (&se.pre);
136 /* Translate a GOTO statement. */
139 gfc_trans_goto (gfc_code * code)
141 locus loc = code->loc;
147 if (code->label != NULL)
148 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
151 gfc_init_se (&se, NULL);
152 gfc_start_block (&se.pre);
153 gfc_conv_label_variable (&se, code->expr);
154 tmp = GFC_DECL_STRING_LEN (se.expr);
155 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
156 build_int_cst (TREE_TYPE (tmp), -1));
157 gfc_trans_runtime_check (tmp, "Assigned label is not a target label",
160 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
165 target = build1 (GOTO_EXPR, void_type_node, assigned_goto);
166 gfc_add_expr_to_block (&se.pre, target);
167 return gfc_finish_block (&se.pre);
170 /* Check the label list. */
173 target = gfc_get_label_decl (code->label);
174 tmp = gfc_build_addr_expr (pvoid_type_node, target);
175 tmp = build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
176 tmp = build3_v (COND_EXPR, tmp,
177 build1 (GOTO_EXPR, void_type_node, target),
178 build_empty_stmt ());
179 gfc_add_expr_to_block (&se.pre, tmp);
182 while (code != NULL);
183 gfc_trans_runtime_check (boolean_true_node,
184 "Assigned label is not in the list", &se.pre, &loc);
186 return gfc_finish_block (&se.pre);
190 /* Translate an ENTRY statement. Just adds a label for this entry point. */
192 gfc_trans_entry (gfc_code * code)
194 return build1_v (LABEL_EXPR, code->ext.entry->label);
198 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
199 elemental subroutines. Make temporaries for output arguments if any such
200 dependencies are found. Output arguments are chosen because internal_unpack
201 can be used, as is, to copy the result back to the variable. */
203 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
204 gfc_symbol * sym, gfc_actual_arglist * arg)
206 gfc_actual_arglist *arg0;
208 gfc_formal_arglist *formal;
209 gfc_loopinfo tmp_loop;
221 if (loopse->ss == NULL)
226 formal = sym->formal;
228 /* Loop over all the arguments testing for dependencies. */
229 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
235 /* Obtain the info structure for the current argument. */
237 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
241 info = &ss->data.info;
245 /* If there is a dependency, create a temporary and use it
246 instead of the variable. */
247 fsym = formal ? formal->sym : NULL;
248 if (e->expr_type == EXPR_VARIABLE
250 && fsym->attr.intent == INTENT_OUT
251 && gfc_check_fncall_dependency (e, INTENT_OUT, sym, arg0))
253 /* Make a local loopinfo for the temporary creation, so that
254 none of the other ss->info's have to be renormalized. */
255 gfc_init_loopinfo (&tmp_loop);
256 for (n = 0; n < info->dimen; n++)
258 tmp_loop.to[n] = loopse->loop->to[n];
259 tmp_loop.from[n] = loopse->loop->from[n];
260 tmp_loop.order[n] = loopse->loop->order[n];
263 /* Generate the temporary. Merge the block so that the
264 declarations are put at the right binding level. */
265 size = gfc_create_var (gfc_array_index_type, NULL);
266 data = gfc_create_var (pvoid_type_node, NULL);
267 gfc_start_block (&block);
268 tmp = gfc_typenode_for_spec (&e->ts);
269 tmp = gfc_trans_create_temp_array (&se->pre, &se->post,
270 &tmp_loop, info, tmp,
272 gfc_add_modify_expr (&se->pre, size, tmp);
273 tmp = fold_convert (pvoid_type_node, info->data);
274 gfc_add_modify_expr (&se->pre, data, tmp);
275 gfc_merge_block_scope (&block);
277 /* Obtain the argument descriptor for unpacking. */
278 gfc_init_se (&parmse, NULL);
279 parmse.want_pointer = 1;
280 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
281 gfc_add_block_to_block (&se->pre, &parmse.pre);
283 /* Calculate the offset for the temporary. */
284 offset = gfc_index_zero_node;
285 for (n = 0; n < info->dimen; n++)
287 tmp = gfc_conv_descriptor_stride (info->descriptor,
289 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
290 loopse->loop->from[n], tmp);
291 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
294 info->offset = gfc_create_var (gfc_array_index_type, NULL);
295 gfc_add_modify_expr (&se->pre, info->offset, offset);
297 /* Copy the result back using unpack. */
298 tmp = build_call_expr (gfor_fndecl_in_unpack, 2, parmse.expr, data);
299 gfc_add_expr_to_block (&se->post, tmp);
301 gfc_add_block_to_block (&se->post, &parmse.post);
307 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
310 gfc_trans_call (gfc_code * code, bool dependency_check)
314 int has_alternate_specifier;
316 /* A CALL starts a new block because the actual arguments may have to
317 be evaluated first. */
318 gfc_init_se (&se, NULL);
319 gfc_start_block (&se.pre);
321 gcc_assert (code->resolved_sym);
323 ss = gfc_ss_terminator;
324 if (code->resolved_sym->attr.elemental)
325 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
327 /* Is not an elemental subroutine call with array valued arguments. */
328 if (ss == gfc_ss_terminator)
331 /* Translate the call. */
332 has_alternate_specifier
333 = gfc_conv_function_call (&se, code->resolved_sym, code->ext.actual,
336 /* A subroutine without side-effect, by definition, does nothing! */
337 TREE_SIDE_EFFECTS (se.expr) = 1;
339 /* Chain the pieces together and return the block. */
340 if (has_alternate_specifier)
342 gfc_code *select_code;
344 select_code = code->next;
345 gcc_assert(select_code->op == EXEC_SELECT);
346 sym = select_code->expr->symtree->n.sym;
347 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
348 if (sym->backend_decl == NULL)
349 sym->backend_decl = gfc_get_symbol_decl (sym);
350 gfc_add_modify_expr (&se.pre, sym->backend_decl, se.expr);
353 gfc_add_expr_to_block (&se.pre, se.expr);
355 gfc_add_block_to_block (&se.pre, &se.post);
360 /* An elemental subroutine call with array valued arguments has
367 /* gfc_walk_elemental_function_args renders the ss chain in the
368 reverse order to the actual argument order. */
369 ss = gfc_reverse_ss (ss);
371 /* Initialize the loop. */
372 gfc_init_se (&loopse, NULL);
373 gfc_init_loopinfo (&loop);
374 gfc_add_ss_to_loop (&loop, ss);
376 gfc_conv_ss_startstride (&loop);
377 gfc_conv_loop_setup (&loop);
378 gfc_mark_ss_chain_used (ss, 1);
380 /* Convert the arguments, checking for dependencies. */
381 gfc_copy_loopinfo_to_se (&loopse, &loop);
384 /* For operator assignment, we need to do dependency checking.
385 We also check the intent of the parameters. */
386 if (dependency_check)
389 sym = code->resolved_sym;
390 gcc_assert (sym->formal->sym->attr.intent == INTENT_OUT);
391 gcc_assert (sym->formal->next->sym->attr.intent == INTENT_IN);
392 gfc_conv_elemental_dependencies (&se, &loopse, sym,
396 /* Generate the loop body. */
397 gfc_start_scalarized_body (&loop, &body);
398 gfc_init_block (&block);
400 /* Add the subroutine call to the block. */
401 gfc_conv_function_call (&loopse, code->resolved_sym, code->ext.actual,
403 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
405 gfc_add_block_to_block (&block, &loopse.pre);
406 gfc_add_block_to_block (&block, &loopse.post);
408 /* Finish up the loop block and the loop. */
409 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
410 gfc_trans_scalarizing_loops (&loop, &body);
411 gfc_add_block_to_block (&se.pre, &loop.pre);
412 gfc_add_block_to_block (&se.pre, &loop.post);
413 gfc_add_block_to_block (&se.pre, &se.post);
414 gfc_cleanup_loop (&loop);
417 return gfc_finish_block (&se.pre);
421 /* Translate the RETURN statement. */
424 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
432 /* If code->expr is not NULL, this return statement must appear
433 in a subroutine and current_fake_result_decl has already
436 result = gfc_get_fake_result_decl (NULL, 0);
439 gfc_warning ("An alternate return at %L without a * dummy argument",
441 return build1_v (GOTO_EXPR, gfc_get_return_label ());
444 /* Start a new block for this statement. */
445 gfc_init_se (&se, NULL);
446 gfc_start_block (&se.pre);
448 gfc_conv_expr (&se, code->expr);
450 tmp = build2 (MODIFY_EXPR, TREE_TYPE (result), result, se.expr);
451 gfc_add_expr_to_block (&se.pre, tmp);
453 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
454 gfc_add_expr_to_block (&se.pre, tmp);
455 gfc_add_block_to_block (&se.pre, &se.post);
456 return gfc_finish_block (&se.pre);
459 return build1_v (GOTO_EXPR, gfc_get_return_label ());
463 /* Translate the PAUSE statement. We have to translate this statement
464 to a runtime library call. */
467 gfc_trans_pause (gfc_code * code)
469 tree gfc_int4_type_node = gfc_get_int_type (4);
473 /* Start a new block for this statement. */
474 gfc_init_se (&se, NULL);
475 gfc_start_block (&se.pre);
478 if (code->expr == NULL)
480 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
481 tmp = build_call_expr (gfor_fndecl_pause_numeric, 1, tmp);
485 gfc_conv_expr_reference (&se, code->expr);
486 tmp = build_call_expr (gfor_fndecl_pause_string, 2,
487 se.expr, se.string_length);
490 gfc_add_expr_to_block (&se.pre, tmp);
492 gfc_add_block_to_block (&se.pre, &se.post);
494 return gfc_finish_block (&se.pre);
498 /* Translate the STOP statement. We have to translate this statement
499 to a runtime library call. */
502 gfc_trans_stop (gfc_code * code)
504 tree gfc_int4_type_node = gfc_get_int_type (4);
508 /* Start a new block for this statement. */
509 gfc_init_se (&se, NULL);
510 gfc_start_block (&se.pre);
513 if (code->expr == NULL)
515 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
516 tmp = build_call_expr (gfor_fndecl_stop_numeric, 1, tmp);
520 gfc_conv_expr_reference (&se, code->expr);
521 tmp = build_call_expr (gfor_fndecl_stop_string, 2,
522 se.expr, se.string_length);
525 gfc_add_expr_to_block (&se.pre, tmp);
527 gfc_add_block_to_block (&se.pre, &se.post);
529 return gfc_finish_block (&se.pre);
533 /* Generate GENERIC for the IF construct. This function also deals with
534 the simple IF statement, because the front end translates the IF
535 statement into an IF construct.
567 where COND_S is the simplified version of the predicate. PRE_COND_S
568 are the pre side-effects produced by the translation of the
570 We need to build the chain recursively otherwise we run into
571 problems with folding incomplete statements. */
574 gfc_trans_if_1 (gfc_code * code)
579 /* Check for an unconditional ELSE clause. */
581 return gfc_trans_code (code->next);
583 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
584 gfc_init_se (&if_se, NULL);
585 gfc_start_block (&if_se.pre);
587 /* Calculate the IF condition expression. */
588 gfc_conv_expr_val (&if_se, code->expr);
590 /* Translate the THEN clause. */
591 stmt = gfc_trans_code (code->next);
593 /* Translate the ELSE clause. */
595 elsestmt = gfc_trans_if_1 (code->block);
597 elsestmt = build_empty_stmt ();
599 /* Build the condition expression and add it to the condition block. */
600 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
602 gfc_add_expr_to_block (&if_se.pre, stmt);
604 /* Finish off this statement. */
605 return gfc_finish_block (&if_se.pre);
609 gfc_trans_if (gfc_code * code)
611 /* Ignore the top EXEC_IF, it only announces an IF construct. The
612 actual code we must translate is in code->block. */
614 return gfc_trans_if_1 (code->block);
618 /* Translate an arithmetic IF expression.
620 IF (cond) label1, label2, label3 translates to
632 An optimized version can be generated in case of equal labels.
633 E.g., if label1 is equal to label2, we can translate it to
642 gfc_trans_arithmetic_if (gfc_code * code)
650 /* Start a new block. */
651 gfc_init_se (&se, NULL);
652 gfc_start_block (&se.pre);
654 /* Pre-evaluate COND. */
655 gfc_conv_expr_val (&se, code->expr);
656 se.expr = gfc_evaluate_now (se.expr, &se.pre);
658 /* Build something to compare with. */
659 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
661 if (code->label->value != code->label2->value)
663 /* If (cond < 0) take branch1 else take branch2.
664 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
665 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
666 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
668 if (code->label->value != code->label3->value)
669 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
671 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
673 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
676 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
678 if (code->label->value != code->label3->value
679 && code->label2->value != code->label3->value)
681 /* if (cond <= 0) take branch1 else take branch2. */
682 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
683 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
684 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
687 /* Append the COND_EXPR to the evaluation of COND, and return. */
688 gfc_add_expr_to_block (&se.pre, branch1);
689 return gfc_finish_block (&se.pre);
693 /* Translate the simple DO construct. This is where the loop variable has
694 integer type and step +-1. We can't use this in the general case
695 because integer overflow and floating point errors could give incorrect
697 We translate a do loop from:
699 DO dovar = from, to, step
705 [Evaluate loop bounds and step]
707 if ((step > 0) ? (dovar <= to) : (dovar => to))
713 cond = (dovar == to);
715 if (cond) goto end_label;
720 This helps the optimizers by avoiding the extra induction variable
721 used in the general case. */
724 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
725 tree from, tree to, tree step)
734 type = TREE_TYPE (dovar);
736 /* Initialize the DO variable: dovar = from. */
737 gfc_add_modify_expr (pblock, dovar, from);
739 /* Cycle and exit statements are implemented with gotos. */
740 cycle_label = gfc_build_label_decl (NULL_TREE);
741 exit_label = gfc_build_label_decl (NULL_TREE);
743 /* Put the labels where they can be found later. See gfc_trans_do(). */
744 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
747 gfc_start_block (&body);
749 /* Main loop body. */
750 tmp = gfc_trans_code (code->block->next);
751 gfc_add_expr_to_block (&body, tmp);
753 /* Label for cycle statements (if needed). */
754 if (TREE_USED (cycle_label))
756 tmp = build1_v (LABEL_EXPR, cycle_label);
757 gfc_add_expr_to_block (&body, tmp);
760 /* Evaluate the loop condition. */
761 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
762 cond = gfc_evaluate_now (cond, &body);
764 /* Increment the loop variable. */
765 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
766 gfc_add_modify_expr (&body, dovar, tmp);
769 tmp = build1_v (GOTO_EXPR, exit_label);
770 TREE_USED (exit_label) = 1;
771 tmp = fold_build3 (COND_EXPR, void_type_node,
772 cond, tmp, build_empty_stmt ());
773 gfc_add_expr_to_block (&body, tmp);
775 /* Finish the loop body. */
776 tmp = gfc_finish_block (&body);
777 tmp = build1_v (LOOP_EXPR, tmp);
779 /* Only execute the loop if the number of iterations is positive. */
780 if (tree_int_cst_sgn (step) > 0)
781 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
783 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
784 tmp = fold_build3 (COND_EXPR, void_type_node,
785 cond, tmp, build_empty_stmt ());
786 gfc_add_expr_to_block (pblock, tmp);
788 /* Add the exit label. */
789 tmp = build1_v (LABEL_EXPR, exit_label);
790 gfc_add_expr_to_block (pblock, tmp);
792 return gfc_finish_block (pblock);
795 /* Translate the DO construct. This obviously is one of the most
796 important ones to get right with any compiler, but especially
799 We special case some loop forms as described in gfc_trans_simple_do.
800 For other cases we implement them with a separate loop count,
801 as described in the standard.
803 We translate a do loop from:
805 DO dovar = from, to, step
811 [evaluate loop bounds and step]
812 empty = (step > 0 ? to < from : to > from);
813 countm1 = (to - from) / step;
815 if (empty) goto exit_label;
821 if (countm1 ==0) goto exit_label;
826 countm1 is an unsigned integer. It is equal to the loop count minus one,
827 because the loop count itself can overflow. */
830 gfc_trans_do (gfc_code * code)
849 gfc_start_block (&block);
851 /* Evaluate all the expressions in the iterator. */
852 gfc_init_se (&se, NULL);
853 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
854 gfc_add_block_to_block (&block, &se.pre);
856 type = TREE_TYPE (dovar);
858 gfc_init_se (&se, NULL);
859 gfc_conv_expr_val (&se, code->ext.iterator->start);
860 gfc_add_block_to_block (&block, &se.pre);
861 from = gfc_evaluate_now (se.expr, &block);
863 gfc_init_se (&se, NULL);
864 gfc_conv_expr_val (&se, code->ext.iterator->end);
865 gfc_add_block_to_block (&block, &se.pre);
866 to = gfc_evaluate_now (se.expr, &block);
868 gfc_init_se (&se, NULL);
869 gfc_conv_expr_val (&se, code->ext.iterator->step);
870 gfc_add_block_to_block (&block, &se.pre);
871 step = gfc_evaluate_now (se.expr, &block);
873 /* Special case simple loops. */
874 if (TREE_CODE (type) == INTEGER_TYPE
875 && (integer_onep (step)
876 || tree_int_cst_equal (step, integer_minus_one_node)))
877 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
879 /* We need a special check for empty loops:
880 empty = (step > 0 ? to < from : to > from); */
881 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
882 fold_convert (type, integer_zero_node));
883 empty = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
884 fold_build2 (LT_EXPR, boolean_type_node, to, from),
885 fold_build2 (GT_EXPR, boolean_type_node, to, from));
887 /* Initialize loop count. This code is executed before we enter the
888 loop body. We generate: countm1 = abs(to - from) / abs(step). */
889 if (TREE_CODE (type) == INTEGER_TYPE)
893 utype = unsigned_type_for (type);
895 /* tmp = abs(to - from) / abs(step) */
896 ustep = fold_convert (utype, fold_build1 (ABS_EXPR, type, step));
897 tmp = fold_build3 (COND_EXPR, type, pos_step,
898 fold_build2 (MINUS_EXPR, type, to, from),
899 fold_build2 (MINUS_EXPR, type, from, to));
900 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, fold_convert (utype, tmp),
905 /* TODO: We could use the same width as the real type.
906 This would probably cause more problems that it solves
907 when we implement "long double" types. */
908 utype = unsigned_type_for (gfc_array_index_type);
909 tmp = fold_build2 (MINUS_EXPR, type, to, from);
910 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
911 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
913 countm1 = gfc_create_var (utype, "countm1");
914 gfc_add_modify_expr (&block, countm1, tmp);
916 /* Cycle and exit statements are implemented with gotos. */
917 cycle_label = gfc_build_label_decl (NULL_TREE);
918 exit_label = gfc_build_label_decl (NULL_TREE);
919 TREE_USED (exit_label) = 1;
921 /* Initialize the DO variable: dovar = from. */
922 gfc_add_modify_expr (&block, dovar, from);
924 /* If the loop is empty, go directly to the exit label. */
925 tmp = fold_build3 (COND_EXPR, void_type_node, empty,
926 build1_v (GOTO_EXPR, exit_label), build_empty_stmt ());
927 gfc_add_expr_to_block (&block, tmp);
930 gfc_start_block (&body);
932 /* Put these labels where they can be found later. We put the
933 labels in a TREE_LIST node (because TREE_CHAIN is already
934 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
935 label in TREE_VALUE (backend_decl). */
937 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
939 /* Main loop body. */
940 tmp = gfc_trans_code (code->block->next);
941 gfc_add_expr_to_block (&body, tmp);
943 /* Label for cycle statements (if needed). */
944 if (TREE_USED (cycle_label))
946 tmp = build1_v (LABEL_EXPR, cycle_label);
947 gfc_add_expr_to_block (&body, tmp);
950 /* Increment the loop variable. */
951 tmp = build2 (PLUS_EXPR, type, dovar, step);
952 gfc_add_modify_expr (&body, dovar, tmp);
954 /* End with the loop condition. Loop until countm1 == 0. */
955 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
956 build_int_cst (utype, 0));
957 tmp = build1_v (GOTO_EXPR, exit_label);
958 tmp = fold_build3 (COND_EXPR, void_type_node,
959 cond, tmp, build_empty_stmt ());
960 gfc_add_expr_to_block (&body, tmp);
962 /* Decrement the loop count. */
963 tmp = build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
964 gfc_add_modify_expr (&body, countm1, tmp);
966 /* End of loop body. */
967 tmp = gfc_finish_block (&body);
969 /* The for loop itself. */
970 tmp = build1_v (LOOP_EXPR, tmp);
971 gfc_add_expr_to_block (&block, tmp);
973 /* Add the exit label. */
974 tmp = build1_v (LABEL_EXPR, exit_label);
975 gfc_add_expr_to_block (&block, tmp);
977 return gfc_finish_block (&block);
981 /* Translate the DO WHILE construct.
994 if (! cond) goto exit_label;
1000 Because the evaluation of the exit condition `cond' may have side
1001 effects, we can't do much for empty loop bodies. The backend optimizers
1002 should be smart enough to eliminate any dead loops. */
1005 gfc_trans_do_while (gfc_code * code)
1013 /* Everything we build here is part of the loop body. */
1014 gfc_start_block (&block);
1016 /* Cycle and exit statements are implemented with gotos. */
1017 cycle_label = gfc_build_label_decl (NULL_TREE);
1018 exit_label = gfc_build_label_decl (NULL_TREE);
1020 /* Put the labels where they can be found later. See gfc_trans_do(). */
1021 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1023 /* Create a GIMPLE version of the exit condition. */
1024 gfc_init_se (&cond, NULL);
1025 gfc_conv_expr_val (&cond, code->expr);
1026 gfc_add_block_to_block (&block, &cond.pre);
1027 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1029 /* Build "IF (! cond) GOTO exit_label". */
1030 tmp = build1_v (GOTO_EXPR, exit_label);
1031 TREE_USED (exit_label) = 1;
1032 tmp = fold_build3 (COND_EXPR, void_type_node,
1033 cond.expr, tmp, build_empty_stmt ());
1034 gfc_add_expr_to_block (&block, tmp);
1036 /* The main body of the loop. */
1037 tmp = gfc_trans_code (code->block->next);
1038 gfc_add_expr_to_block (&block, tmp);
1040 /* Label for cycle statements (if needed). */
1041 if (TREE_USED (cycle_label))
1043 tmp = build1_v (LABEL_EXPR, cycle_label);
1044 gfc_add_expr_to_block (&block, tmp);
1047 /* End of loop body. */
1048 tmp = gfc_finish_block (&block);
1050 gfc_init_block (&block);
1051 /* Build the loop. */
1052 tmp = build1_v (LOOP_EXPR, tmp);
1053 gfc_add_expr_to_block (&block, tmp);
1055 /* Add the exit label. */
1056 tmp = build1_v (LABEL_EXPR, exit_label);
1057 gfc_add_expr_to_block (&block, tmp);
1059 return gfc_finish_block (&block);
1063 /* Translate the SELECT CASE construct for INTEGER case expressions,
1064 without killing all potential optimizations. The problem is that
1065 Fortran allows unbounded cases, but the back-end does not, so we
1066 need to intercept those before we enter the equivalent SWITCH_EXPR
1069 For example, we translate this,
1072 CASE (:100,101,105:115)
1082 to the GENERIC equivalent,
1086 case (minimum value for typeof(expr) ... 100:
1092 case 200 ... (maximum value for typeof(expr):
1109 gfc_trans_integer_select (gfc_code * code)
1119 gfc_start_block (&block);
1121 /* Calculate the switch expression. */
1122 gfc_init_se (&se, NULL);
1123 gfc_conv_expr_val (&se, code->expr);
1124 gfc_add_block_to_block (&block, &se.pre);
1126 end_label = gfc_build_label_decl (NULL_TREE);
1128 gfc_init_block (&body);
1130 for (c = code->block; c; c = c->block)
1132 for (cp = c->ext.case_list; cp; cp = cp->next)
1137 /* Assume it's the default case. */
1138 low = high = NULL_TREE;
1142 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1145 /* If there's only a lower bound, set the high bound to the
1146 maximum value of the case expression. */
1148 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1153 /* Three cases are possible here:
1155 1) There is no lower bound, e.g. CASE (:N).
1156 2) There is a lower bound .NE. high bound, that is
1157 a case range, e.g. CASE (N:M) where M>N (we make
1158 sure that M>N during type resolution).
1159 3) There is a lower bound, and it has the same value
1160 as the high bound, e.g. CASE (N:N). This is our
1161 internal representation of CASE(N).
1163 In the first and second case, we need to set a value for
1164 high. In the third case, we don't because the GCC middle
1165 end represents a single case value by just letting high be
1166 a NULL_TREE. We can't do that because we need to be able
1167 to represent unbounded cases. */
1171 && mpz_cmp (cp->low->value.integer,
1172 cp->high->value.integer) != 0))
1173 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1176 /* Unbounded case. */
1178 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1181 /* Build a label. */
1182 label = gfc_build_label_decl (NULL_TREE);
1184 /* Add this case label.
1185 Add parameter 'label', make it match GCC backend. */
1186 tmp = build3 (CASE_LABEL_EXPR, void_type_node, low, high, label);
1187 gfc_add_expr_to_block (&body, tmp);
1190 /* Add the statements for this case. */
1191 tmp = gfc_trans_code (c->next);
1192 gfc_add_expr_to_block (&body, tmp);
1194 /* Break to the end of the construct. */
1195 tmp = build1_v (GOTO_EXPR, end_label);
1196 gfc_add_expr_to_block (&body, tmp);
1199 tmp = gfc_finish_block (&body);
1200 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1201 gfc_add_expr_to_block (&block, tmp);
1203 tmp = build1_v (LABEL_EXPR, end_label);
1204 gfc_add_expr_to_block (&block, tmp);
1206 return gfc_finish_block (&block);
1210 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1212 There are only two cases possible here, even though the standard
1213 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1214 .FALSE., and DEFAULT.
1216 We never generate more than two blocks here. Instead, we always
1217 try to eliminate the DEFAULT case. This way, we can translate this
1218 kind of SELECT construct to a simple
1222 expression in GENERIC. */
1225 gfc_trans_logical_select (gfc_code * code)
1228 gfc_code *t, *f, *d;
1233 /* Assume we don't have any cases at all. */
1236 /* Now see which ones we actually do have. We can have at most two
1237 cases in a single case list: one for .TRUE. and one for .FALSE.
1238 The default case is always separate. If the cases for .TRUE. and
1239 .FALSE. are in the same case list, the block for that case list
1240 always executed, and we don't generate code a COND_EXPR. */
1241 for (c = code->block; c; c = c->block)
1243 for (cp = c->ext.case_list; cp; cp = cp->next)
1247 if (cp->low->value.logical == 0) /* .FALSE. */
1249 else /* if (cp->value.logical != 0), thus .TRUE. */
1257 /* Start a new block. */
1258 gfc_start_block (&block);
1260 /* Calculate the switch expression. We always need to do this
1261 because it may have side effects. */
1262 gfc_init_se (&se, NULL);
1263 gfc_conv_expr_val (&se, code->expr);
1264 gfc_add_block_to_block (&block, &se.pre);
1266 if (t == f && t != NULL)
1268 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1269 translate the code for these cases, append it to the current
1271 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1275 tree true_tree, false_tree, stmt;
1277 true_tree = build_empty_stmt ();
1278 false_tree = build_empty_stmt ();
1280 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1281 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1282 make the missing case the default case. */
1283 if (t != NULL && f != NULL)
1293 /* Translate the code for each of these blocks, and append it to
1294 the current block. */
1296 true_tree = gfc_trans_code (t->next);
1299 false_tree = gfc_trans_code (f->next);
1301 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1302 true_tree, false_tree);
1303 gfc_add_expr_to_block (&block, stmt);
1306 return gfc_finish_block (&block);
1310 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1311 Instead of generating compares and jumps, it is far simpler to
1312 generate a data structure describing the cases in order and call a
1313 library subroutine that locates the right case.
1314 This is particularly true because this is the only case where we
1315 might have to dispose of a temporary.
1316 The library subroutine returns a pointer to jump to or NULL if no
1317 branches are to be taken. */
1320 gfc_trans_character_select (gfc_code *code)
1322 tree init, node, end_label, tmp, type, *labels;
1324 stmtblock_t block, body;
1330 static tree select_struct;
1331 static tree ss_string1, ss_string1_len;
1332 static tree ss_string2, ss_string2_len;
1333 static tree ss_target;
1335 if (select_struct == NULL)
1337 tree gfc_int4_type_node = gfc_get_int_type (4);
1339 select_struct = make_node (RECORD_TYPE);
1340 TYPE_NAME (select_struct) = get_identifier ("_jump_struct");
1343 #define ADD_FIELD(NAME, TYPE) \
1344 ss_##NAME = gfc_add_field_to_struct \
1345 (&(TYPE_FIELDS (select_struct)), select_struct, \
1346 get_identifier (stringize(NAME)), TYPE)
1348 ADD_FIELD (string1, pchar_type_node);
1349 ADD_FIELD (string1_len, gfc_int4_type_node);
1351 ADD_FIELD (string2, pchar_type_node);
1352 ADD_FIELD (string2_len, gfc_int4_type_node);
1354 ADD_FIELD (target, pvoid_type_node);
1357 gfc_finish_type (select_struct);
1360 cp = code->block->ext.case_list;
1361 while (cp->left != NULL)
1365 for (d = cp; d; d = d->right)
1369 labels = gfc_getmem (n * sizeof (tree));
1373 for(i = 0; i < n; i++)
1375 labels[i] = gfc_build_label_decl (NULL_TREE);
1376 TREE_USED (labels[i]) = 1;
1377 /* TODO: The gimplifier should do this for us, but it has
1378 inadequacies when dealing with static initializers. */
1379 FORCED_LABEL (labels[i]) = 1;
1382 end_label = gfc_build_label_decl (NULL_TREE);
1384 /* Generate the body */
1385 gfc_start_block (&block);
1386 gfc_init_block (&body);
1388 for (c = code->block; c; c = c->block)
1390 for (d = c->ext.case_list; d; d = d->next)
1392 tmp = build1_v (LABEL_EXPR, labels[d->n]);
1393 gfc_add_expr_to_block (&body, tmp);
1396 tmp = gfc_trans_code (c->next);
1397 gfc_add_expr_to_block (&body, tmp);
1399 tmp = build1_v (GOTO_EXPR, end_label);
1400 gfc_add_expr_to_block (&body, tmp);
1403 /* Generate the structure describing the branches */
1407 for(d = cp; d; d = d->right, i++)
1411 gfc_init_se (&se, NULL);
1415 node = tree_cons (ss_string1, null_pointer_node, node);
1416 node = tree_cons (ss_string1_len, integer_zero_node, node);
1420 gfc_conv_expr_reference (&se, d->low);
1422 node = tree_cons (ss_string1, se.expr, node);
1423 node = tree_cons (ss_string1_len, se.string_length, node);
1426 if (d->high == NULL)
1428 node = tree_cons (ss_string2, null_pointer_node, node);
1429 node = tree_cons (ss_string2_len, integer_zero_node, node);
1433 gfc_init_se (&se, NULL);
1434 gfc_conv_expr_reference (&se, d->high);
1436 node = tree_cons (ss_string2, se.expr, node);
1437 node = tree_cons (ss_string2_len, se.string_length, node);
1440 tmp = gfc_build_addr_expr (pvoid_type_node, labels[i]);
1441 node = tree_cons (ss_target, tmp, node);
1443 tmp = build_constructor_from_list (select_struct, nreverse (node));
1444 init = tree_cons (NULL_TREE, tmp, init);
1447 type = build_array_type (select_struct, build_index_type
1448 (build_int_cst (NULL_TREE, n - 1)));
1450 init = build_constructor_from_list (type, nreverse(init));
1451 TREE_CONSTANT (init) = 1;
1452 TREE_INVARIANT (init) = 1;
1453 TREE_STATIC (init) = 1;
1454 /* Create a static variable to hold the jump table. */
1455 tmp = gfc_create_var (type, "jumptable");
1456 TREE_CONSTANT (tmp) = 1;
1457 TREE_INVARIANT (tmp) = 1;
1458 TREE_STATIC (tmp) = 1;
1459 TREE_READONLY (tmp) = 1;
1460 DECL_INITIAL (tmp) = init;
1463 /* Build the library call */
1464 init = gfc_build_addr_expr (pvoid_type_node, init);
1465 tmp = gfc_build_addr_expr (pvoid_type_node, end_label);
1467 gfc_init_se (&se, NULL);
1468 gfc_conv_expr_reference (&se, code->expr);
1470 gfc_add_block_to_block (&block, &se.pre);
1472 tmp = build_call_expr (gfor_fndecl_select_string, 5,
1473 init, build_int_cst (NULL_TREE, n),
1474 tmp, se.expr, se.string_length);
1476 case_label = gfc_create_var (TREE_TYPE (tmp), "case_label");
1477 gfc_add_modify_expr (&block, case_label, tmp);
1479 gfc_add_block_to_block (&block, &se.post);
1481 tmp = build1 (GOTO_EXPR, void_type_node, case_label);
1482 gfc_add_expr_to_block (&block, tmp);
1484 tmp = gfc_finish_block (&body);
1485 gfc_add_expr_to_block (&block, tmp);
1486 tmp = build1_v (LABEL_EXPR, end_label);
1487 gfc_add_expr_to_block (&block, tmp);
1492 return gfc_finish_block (&block);
1496 /* Translate the three variants of the SELECT CASE construct.
1498 SELECT CASEs with INTEGER case expressions can be translated to an
1499 equivalent GENERIC switch statement, and for LOGICAL case
1500 expressions we build one or two if-else compares.
1502 SELECT CASEs with CHARACTER case expressions are a whole different
1503 story, because they don't exist in GENERIC. So we sort them and
1504 do a binary search at runtime.
1506 Fortran has no BREAK statement, and it does not allow jumps from
1507 one case block to another. That makes things a lot easier for
1511 gfc_trans_select (gfc_code * code)
1513 gcc_assert (code && code->expr);
1515 /* Empty SELECT constructs are legal. */
1516 if (code->block == NULL)
1517 return build_empty_stmt ();
1519 /* Select the correct translation function. */
1520 switch (code->expr->ts.type)
1522 case BT_LOGICAL: return gfc_trans_logical_select (code);
1523 case BT_INTEGER: return gfc_trans_integer_select (code);
1524 case BT_CHARACTER: return gfc_trans_character_select (code);
1526 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1532 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
1533 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
1534 indicates whether we should generate code to test the FORALLs mask
1535 array. OUTER is the loop header to be used for initializing mask
1538 The generated loop format is:
1539 count = (end - start + step) / step
1552 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
1553 int mask_flag, stmtblock_t *outer)
1561 tree var, start, end, step;
1564 /* Initialize the mask index outside the FORALL nest. */
1565 if (mask_flag && forall_tmp->mask)
1566 gfc_add_modify_expr (outer, forall_tmp->maskindex, gfc_index_zero_node);
1568 iter = forall_tmp->this_loop;
1569 nvar = forall_tmp->nvar;
1570 for (n = 0; n < nvar; n++)
1573 start = iter->start;
1577 exit_label = gfc_build_label_decl (NULL_TREE);
1578 TREE_USED (exit_label) = 1;
1580 /* The loop counter. */
1581 count = gfc_create_var (TREE_TYPE (var), "count");
1583 /* The body of the loop. */
1584 gfc_init_block (&block);
1586 /* The exit condition. */
1587 cond = fold_build2 (LE_EXPR, boolean_type_node,
1588 count, build_int_cst (TREE_TYPE (count), 0));
1589 tmp = build1_v (GOTO_EXPR, exit_label);
1590 tmp = fold_build3 (COND_EXPR, void_type_node,
1591 cond, tmp, build_empty_stmt ());
1592 gfc_add_expr_to_block (&block, tmp);
1594 /* The main loop body. */
1595 gfc_add_expr_to_block (&block, body);
1597 /* Increment the loop variable. */
1598 tmp = build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1599 gfc_add_modify_expr (&block, var, tmp);
1601 /* Advance to the next mask element. Only do this for the
1603 if (n == 0 && mask_flag && forall_tmp->mask)
1605 tree maskindex = forall_tmp->maskindex;
1606 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
1607 maskindex, gfc_index_one_node);
1608 gfc_add_modify_expr (&block, maskindex, tmp);
1611 /* Decrement the loop counter. */
1612 tmp = build2 (MINUS_EXPR, TREE_TYPE (var), count, gfc_index_one_node);
1613 gfc_add_modify_expr (&block, count, tmp);
1615 body = gfc_finish_block (&block);
1617 /* Loop var initialization. */
1618 gfc_init_block (&block);
1619 gfc_add_modify_expr (&block, var, start);
1622 /* Initialize the loop counter. */
1623 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1624 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1625 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1626 gfc_add_modify_expr (&block, count, tmp);
1628 /* The loop expression. */
1629 tmp = build1_v (LOOP_EXPR, body);
1630 gfc_add_expr_to_block (&block, tmp);
1632 /* The exit label. */
1633 tmp = build1_v (LABEL_EXPR, exit_label);
1634 gfc_add_expr_to_block (&block, tmp);
1636 body = gfc_finish_block (&block);
1643 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
1644 is nonzero, the body is controlled by all masks in the forall nest.
1645 Otherwise, the innermost loop is not controlled by it's mask. This
1646 is used for initializing that mask. */
1649 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1654 forall_info *forall_tmp;
1655 tree mask, maskindex;
1657 gfc_start_block (&header);
1659 forall_tmp = nested_forall_info;
1660 while (forall_tmp != NULL)
1662 /* Generate body with masks' control. */
1665 mask = forall_tmp->mask;
1666 maskindex = forall_tmp->maskindex;
1668 /* If a mask was specified make the assignment conditional. */
1671 tmp = gfc_build_array_ref (mask, maskindex);
1672 body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ());
1675 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
1676 forall_tmp = forall_tmp->prev_nest;
1680 gfc_add_expr_to_block (&header, body);
1681 return gfc_finish_block (&header);
1685 /* Allocate data for holding a temporary array. Returns either a local
1686 temporary array or a pointer variable. */
1689 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
1696 if (INTEGER_CST_P (size))
1698 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
1699 gfc_index_one_node);
1704 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
1705 type = build_array_type (elem_type, type);
1706 if (gfc_can_put_var_on_stack (bytesize))
1708 gcc_assert (INTEGER_CST_P (size));
1709 tmpvar = gfc_create_var (type, "temp");
1714 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
1715 *pdata = convert (pvoid_type_node, tmpvar);
1717 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
1718 gfc_add_modify_expr (pblock, tmpvar, tmp);
1724 /* Generate codes to copy the temporary to the actual lhs. */
1727 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
1728 tree count1, tree wheremask, bool invert)
1732 stmtblock_t block, body;
1738 lss = gfc_walk_expr (expr);
1740 if (lss == gfc_ss_terminator)
1742 gfc_start_block (&block);
1744 gfc_init_se (&lse, NULL);
1746 /* Translate the expression. */
1747 gfc_conv_expr (&lse, expr);
1749 /* Form the expression for the temporary. */
1750 tmp = gfc_build_array_ref (tmp1, count1);
1752 /* Use the scalar assignment as is. */
1753 gfc_add_block_to_block (&block, &lse.pre);
1754 gfc_add_modify_expr (&block, lse.expr, tmp);
1755 gfc_add_block_to_block (&block, &lse.post);
1757 /* Increment the count1. */
1758 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1759 gfc_index_one_node);
1760 gfc_add_modify_expr (&block, count1, tmp);
1762 tmp = gfc_finish_block (&block);
1766 gfc_start_block (&block);
1768 gfc_init_loopinfo (&loop1);
1769 gfc_init_se (&rse, NULL);
1770 gfc_init_se (&lse, NULL);
1772 /* Associate the lss with the loop. */
1773 gfc_add_ss_to_loop (&loop1, lss);
1775 /* Calculate the bounds of the scalarization. */
1776 gfc_conv_ss_startstride (&loop1);
1777 /* Setup the scalarizing loops. */
1778 gfc_conv_loop_setup (&loop1);
1780 gfc_mark_ss_chain_used (lss, 1);
1782 /* Start the scalarized loop body. */
1783 gfc_start_scalarized_body (&loop1, &body);
1785 /* Setup the gfc_se structures. */
1786 gfc_copy_loopinfo_to_se (&lse, &loop1);
1789 /* Form the expression of the temporary. */
1790 if (lss != gfc_ss_terminator)
1791 rse.expr = gfc_build_array_ref (tmp1, count1);
1792 /* Translate expr. */
1793 gfc_conv_expr (&lse, expr);
1795 /* Use the scalar assignment. */
1796 rse.string_length = lse.string_length;
1797 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
1799 /* Form the mask expression according to the mask tree list. */
1802 wheremaskexpr = gfc_build_array_ref (wheremask, count3);
1804 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1805 TREE_TYPE (wheremaskexpr),
1807 tmp = fold_build3 (COND_EXPR, void_type_node,
1808 wheremaskexpr, tmp, build_empty_stmt ());
1811 gfc_add_expr_to_block (&body, tmp);
1813 /* Increment count1. */
1814 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1815 count1, gfc_index_one_node);
1816 gfc_add_modify_expr (&body, count1, tmp);
1818 /* Increment count3. */
1821 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1822 count3, gfc_index_one_node);
1823 gfc_add_modify_expr (&body, count3, tmp);
1826 /* Generate the copying loops. */
1827 gfc_trans_scalarizing_loops (&loop1, &body);
1828 gfc_add_block_to_block (&block, &loop1.pre);
1829 gfc_add_block_to_block (&block, &loop1.post);
1830 gfc_cleanup_loop (&loop1);
1832 tmp = gfc_finish_block (&block);
1838 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
1839 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
1840 and should not be freed. WHEREMASK is the conditional execution mask
1841 whose sense may be inverted by INVERT. */
1844 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
1845 tree count1, gfc_ss *lss, gfc_ss *rss,
1846 tree wheremask, bool invert)
1848 stmtblock_t block, body1;
1855 gfc_start_block (&block);
1857 gfc_init_se (&rse, NULL);
1858 gfc_init_se (&lse, NULL);
1860 if (lss == gfc_ss_terminator)
1862 gfc_init_block (&body1);
1863 gfc_conv_expr (&rse, expr2);
1864 lse.expr = gfc_build_array_ref (tmp1, count1);
1868 /* Initialize the loop. */
1869 gfc_init_loopinfo (&loop);
1871 /* We may need LSS to determine the shape of the expression. */
1872 gfc_add_ss_to_loop (&loop, lss);
1873 gfc_add_ss_to_loop (&loop, rss);
1875 gfc_conv_ss_startstride (&loop);
1876 gfc_conv_loop_setup (&loop);
1878 gfc_mark_ss_chain_used (rss, 1);
1879 /* Start the loop body. */
1880 gfc_start_scalarized_body (&loop, &body1);
1882 /* Translate the expression. */
1883 gfc_copy_loopinfo_to_se (&rse, &loop);
1885 gfc_conv_expr (&rse, expr2);
1887 /* Form the expression of the temporary. */
1888 lse.expr = gfc_build_array_ref (tmp1, count1);
1891 /* Use the scalar assignment. */
1892 lse.string_length = rse.string_length;
1893 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
1894 expr2->expr_type == EXPR_VARIABLE);
1896 /* Form the mask expression according to the mask tree list. */
1899 wheremaskexpr = gfc_build_array_ref (wheremask, count3);
1901 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1902 TREE_TYPE (wheremaskexpr),
1904 tmp = fold_build3 (COND_EXPR, void_type_node,
1905 wheremaskexpr, tmp, build_empty_stmt ());
1908 gfc_add_expr_to_block (&body1, tmp);
1910 if (lss == gfc_ss_terminator)
1912 gfc_add_block_to_block (&block, &body1);
1914 /* Increment count1. */
1915 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1916 gfc_index_one_node);
1917 gfc_add_modify_expr (&block, count1, tmp);
1921 /* Increment count1. */
1922 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1923 count1, gfc_index_one_node);
1924 gfc_add_modify_expr (&body1, count1, tmp);
1926 /* Increment count3. */
1929 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1930 count3, gfc_index_one_node);
1931 gfc_add_modify_expr (&body1, count3, tmp);
1934 /* Generate the copying loops. */
1935 gfc_trans_scalarizing_loops (&loop, &body1);
1937 gfc_add_block_to_block (&block, &loop.pre);
1938 gfc_add_block_to_block (&block, &loop.post);
1940 gfc_cleanup_loop (&loop);
1941 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
1942 as tree nodes in SS may not be valid in different scope. */
1945 tmp = gfc_finish_block (&block);
1950 /* Calculate the size of temporary needed in the assignment inside forall.
1951 LSS and RSS are filled in this function. */
1954 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
1955 stmtblock_t * pblock,
1956 gfc_ss **lss, gfc_ss **rss)
1964 *lss = gfc_walk_expr (expr1);
1967 size = gfc_index_one_node;
1968 if (*lss != gfc_ss_terminator)
1970 gfc_init_loopinfo (&loop);
1972 /* Walk the RHS of the expression. */
1973 *rss = gfc_walk_expr (expr2);
1974 if (*rss == gfc_ss_terminator)
1976 /* The rhs is scalar. Add a ss for the expression. */
1977 *rss = gfc_get_ss ();
1978 (*rss)->next = gfc_ss_terminator;
1979 (*rss)->type = GFC_SS_SCALAR;
1980 (*rss)->expr = expr2;
1983 /* Associate the SS with the loop. */
1984 gfc_add_ss_to_loop (&loop, *lss);
1985 /* We don't actually need to add the rhs at this point, but it might
1986 make guessing the loop bounds a bit easier. */
1987 gfc_add_ss_to_loop (&loop, *rss);
1989 /* We only want the shape of the expression, not rest of the junk
1990 generated by the scalarizer. */
1991 loop.array_parameter = 1;
1993 /* Calculate the bounds of the scalarization. */
1994 save_flag = flag_bounds_check;
1995 flag_bounds_check = 0;
1996 gfc_conv_ss_startstride (&loop);
1997 flag_bounds_check = save_flag;
1998 gfc_conv_loop_setup (&loop);
2000 /* Figure out how many elements we need. */
2001 for (i = 0; i < loop.dimen; i++)
2003 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2004 gfc_index_one_node, loop.from[i]);
2005 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2007 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2009 gfc_add_block_to_block (pblock, &loop.pre);
2010 size = gfc_evaluate_now (size, pblock);
2011 gfc_add_block_to_block (pblock, &loop.post);
2013 /* TODO: write a function that cleans up a loopinfo without freeing
2014 the SS chains. Currently a NOP. */
2021 /* Calculate the overall iterator number of the nested forall construct.
2022 This routine actually calculates the number of times the body of the
2023 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2024 that by the expression INNER_SIZE. The BLOCK argument specifies the
2025 block in which to calculate the result, and the optional INNER_SIZE_BODY
2026 argument contains any statements that need to executed (inside the loop)
2027 to initialize or calculate INNER_SIZE. */
2030 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2031 stmtblock_t *inner_size_body, stmtblock_t *block)
2033 forall_info *forall_tmp = nested_forall_info;
2037 /* We can eliminate the innermost unconditional loops with constant
2039 if (INTEGER_CST_P (inner_size))
2042 && !forall_tmp->mask
2043 && INTEGER_CST_P (forall_tmp->size))
2045 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2046 inner_size, forall_tmp->size);
2047 forall_tmp = forall_tmp->prev_nest;
2050 /* If there are no loops left, we have our constant result. */
2055 /* Otherwise, create a temporary variable to compute the result. */
2056 number = gfc_create_var (gfc_array_index_type, "num");
2057 gfc_add_modify_expr (block, number, gfc_index_zero_node);
2059 gfc_start_block (&body);
2060 if (inner_size_body)
2061 gfc_add_block_to_block (&body, inner_size_body);
2063 tmp = build2 (PLUS_EXPR, gfc_array_index_type, number,
2067 gfc_add_modify_expr (&body, number, tmp);
2068 tmp = gfc_finish_block (&body);
2070 /* Generate loops. */
2071 if (forall_tmp != NULL)
2072 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2074 gfc_add_expr_to_block (block, tmp);
2080 /* Allocate temporary for forall construct. SIZE is the size of temporary
2081 needed. PTEMP1 is returned for space free. */
2084 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2091 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2092 if (!integer_onep (unit))
2093 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2098 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2101 tmp = build_fold_indirect_ref (tmp);
2106 /* Allocate temporary for forall construct according to the information in
2107 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2108 assignment inside forall. PTEMP1 is returned for space free. */
2111 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2112 tree inner_size, stmtblock_t * inner_size_body,
2113 stmtblock_t * block, tree * ptemp1)
2117 /* Calculate the total size of temporary needed in forall construct. */
2118 size = compute_overall_iter_number (nested_forall_info, inner_size,
2119 inner_size_body, block);
2121 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2125 /* Handle assignments inside forall which need temporary.
2127 forall (i=start:end:stride; maskexpr)
2130 (where e,f<i> are arbitrary expressions possibly involving i
2131 and there is a dependency between e<i> and f<i>)
2133 masktmp(:) = maskexpr(:)
2138 for (i = start; i <= end; i += stride)
2142 for (i = start; i <= end; i += stride)
2144 if (masktmp[maskindex++])
2145 tmp[count1++] = f<i>
2149 for (i = start; i <= end; i += stride)
2151 if (masktmp[maskindex++])
2152 e<i> = tmp[count1++]
2157 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2158 tree wheremask, bool invert,
2159 forall_info * nested_forall_info,
2160 stmtblock_t * block)
2168 stmtblock_t inner_size_body;
2170 /* Create vars. count1 is the current iterator number of the nested
2172 count1 = gfc_create_var (gfc_array_index_type, "count1");
2174 /* Count is the wheremask index. */
2177 count = gfc_create_var (gfc_array_index_type, "count");
2178 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2183 /* Initialize count1. */
2184 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2186 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2187 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2188 gfc_init_block (&inner_size_body);
2189 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2192 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2193 type = gfc_typenode_for_spec (&expr1->ts);
2195 /* Allocate temporary for nested forall construct according to the
2196 information in nested_forall_info and inner_size. */
2197 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2198 &inner_size_body, block, &ptemp1);
2200 /* Generate codes to copy rhs to the temporary . */
2201 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2204 /* Generate body and loops according to the information in
2205 nested_forall_info. */
2206 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2207 gfc_add_expr_to_block (block, tmp);
2210 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2214 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2216 /* Generate codes to copy the temporary to lhs. */
2217 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2220 /* Generate body and loops according to the information in
2221 nested_forall_info. */
2222 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2223 gfc_add_expr_to_block (block, tmp);
2227 /* Free the temporary. */
2228 tmp = gfc_call_free (ptemp1);
2229 gfc_add_expr_to_block (block, tmp);
2234 /* Translate pointer assignment inside FORALL which need temporary. */
2237 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2238 forall_info * nested_forall_info,
2239 stmtblock_t * block)
2253 tree tmp, tmp1, ptemp1;
2255 count = gfc_create_var (gfc_array_index_type, "count");
2256 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2258 inner_size = integer_one_node;
2259 lss = gfc_walk_expr (expr1);
2260 rss = gfc_walk_expr (expr2);
2261 if (lss == gfc_ss_terminator)
2263 type = gfc_typenode_for_spec (&expr1->ts);
2264 type = build_pointer_type (type);
2266 /* Allocate temporary for nested forall construct according to the
2267 information in nested_forall_info and inner_size. */
2268 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2269 inner_size, NULL, block, &ptemp1);
2270 gfc_start_block (&body);
2271 gfc_init_se (&lse, NULL);
2272 lse.expr = gfc_build_array_ref (tmp1, count);
2273 gfc_init_se (&rse, NULL);
2274 rse.want_pointer = 1;
2275 gfc_conv_expr (&rse, expr2);
2276 gfc_add_block_to_block (&body, &rse.pre);
2277 gfc_add_modify_expr (&body, lse.expr,
2278 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2279 gfc_add_block_to_block (&body, &rse.post);
2281 /* Increment count. */
2282 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2283 count, gfc_index_one_node);
2284 gfc_add_modify_expr (&body, count, tmp);
2286 tmp = gfc_finish_block (&body);
2288 /* Generate body and loops according to the information in
2289 nested_forall_info. */
2290 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2291 gfc_add_expr_to_block (block, tmp);
2294 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2296 gfc_start_block (&body);
2297 gfc_init_se (&lse, NULL);
2298 gfc_init_se (&rse, NULL);
2299 rse.expr = gfc_build_array_ref (tmp1, count);
2300 lse.want_pointer = 1;
2301 gfc_conv_expr (&lse, expr1);
2302 gfc_add_block_to_block (&body, &lse.pre);
2303 gfc_add_modify_expr (&body, lse.expr, rse.expr);
2304 gfc_add_block_to_block (&body, &lse.post);
2305 /* Increment count. */
2306 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2307 count, gfc_index_one_node);
2308 gfc_add_modify_expr (&body, count, tmp);
2309 tmp = gfc_finish_block (&body);
2311 /* Generate body and loops according to the information in
2312 nested_forall_info. */
2313 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2314 gfc_add_expr_to_block (block, tmp);
2318 gfc_init_loopinfo (&loop);
2320 /* Associate the SS with the loop. */
2321 gfc_add_ss_to_loop (&loop, rss);
2323 /* Setup the scalarizing loops and bounds. */
2324 gfc_conv_ss_startstride (&loop);
2326 gfc_conv_loop_setup (&loop);
2328 info = &rss->data.info;
2329 desc = info->descriptor;
2331 /* Make a new descriptor. */
2332 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2333 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2334 loop.from, loop.to, 1);
2336 /* Allocate temporary for nested forall construct. */
2337 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2338 inner_size, NULL, block, &ptemp1);
2339 gfc_start_block (&body);
2340 gfc_init_se (&lse, NULL);
2341 lse.expr = gfc_build_array_ref (tmp1, count);
2342 lse.direct_byref = 1;
2343 rss = gfc_walk_expr (expr2);
2344 gfc_conv_expr_descriptor (&lse, expr2, rss);
2346 gfc_add_block_to_block (&body, &lse.pre);
2347 gfc_add_block_to_block (&body, &lse.post);
2349 /* Increment count. */
2350 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2351 count, gfc_index_one_node);
2352 gfc_add_modify_expr (&body, count, tmp);
2354 tmp = gfc_finish_block (&body);
2356 /* Generate body and loops according to the information in
2357 nested_forall_info. */
2358 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2359 gfc_add_expr_to_block (block, tmp);
2362 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2364 parm = gfc_build_array_ref (tmp1, count);
2365 lss = gfc_walk_expr (expr1);
2366 gfc_init_se (&lse, NULL);
2367 gfc_conv_expr_descriptor (&lse, expr1, lss);
2368 gfc_add_modify_expr (&lse.pre, lse.expr, parm);
2369 gfc_start_block (&body);
2370 gfc_add_block_to_block (&body, &lse.pre);
2371 gfc_add_block_to_block (&body, &lse.post);
2373 /* Increment count. */
2374 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2375 count, gfc_index_one_node);
2376 gfc_add_modify_expr (&body, count, tmp);
2378 tmp = gfc_finish_block (&body);
2380 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2381 gfc_add_expr_to_block (block, tmp);
2383 /* Free the temporary. */
2386 tmp = gfc_call_free (ptemp1);
2387 gfc_add_expr_to_block (block, tmp);
2392 /* FORALL and WHERE statements are really nasty, especially when you nest
2393 them. All the rhs of a forall assignment must be evaluated before the
2394 actual assignments are performed. Presumably this also applies to all the
2395 assignments in an inner where statement. */
2397 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2398 linear array, relying on the fact that we process in the same order in all
2401 forall (i=start:end:stride; maskexpr)
2405 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2407 count = ((end + 1 - start) / stride)
2408 masktmp(:) = maskexpr(:)
2411 for (i = start; i <= end; i += stride)
2413 if (masktmp[maskindex++])
2417 for (i = start; i <= end; i += stride)
2419 if (masktmp[maskindex++])
2423 Note that this code only works when there are no dependencies.
2424 Forall loop with array assignments and data dependencies are a real pain,
2425 because the size of the temporary cannot always be determined before the
2426 loop is executed. This problem is compounded by the presence of nested
2431 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2449 gfc_forall_iterator *fa;
2452 gfc_saved_var *saved_vars;
2453 iter_info *this_forall;
2457 /* Do nothing if the mask is false. */
2459 && code->expr->expr_type == EXPR_CONSTANT
2460 && !code->expr->value.logical)
2461 return build_empty_stmt ();
2464 /* Count the FORALL index number. */
2465 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2469 /* Allocate the space for var, start, end, step, varexpr. */
2470 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2471 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2472 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2473 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2474 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2475 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2477 /* Allocate the space for info. */
2478 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2480 gfc_start_block (&block);
2483 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2485 gfc_symbol *sym = fa->var->symtree->n.sym;
2487 /* Allocate space for this_forall. */
2488 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2490 /* Create a temporary variable for the FORALL index. */
2491 tmp = gfc_typenode_for_spec (&sym->ts);
2492 var[n] = gfc_create_var (tmp, sym->name);
2493 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2495 /* Record it in this_forall. */
2496 this_forall->var = var[n];
2498 /* Replace the index symbol's backend_decl with the temporary decl. */
2499 sym->backend_decl = var[n];
2501 /* Work out the start, end and stride for the loop. */
2502 gfc_init_se (&se, NULL);
2503 gfc_conv_expr_val (&se, fa->start);
2504 /* Record it in this_forall. */
2505 this_forall->start = se.expr;
2506 gfc_add_block_to_block (&block, &se.pre);
2509 gfc_init_se (&se, NULL);
2510 gfc_conv_expr_val (&se, fa->end);
2511 /* Record it in this_forall. */
2512 this_forall->end = se.expr;
2513 gfc_make_safe_expr (&se);
2514 gfc_add_block_to_block (&block, &se.pre);
2517 gfc_init_se (&se, NULL);
2518 gfc_conv_expr_val (&se, fa->stride);
2519 /* Record it in this_forall. */
2520 this_forall->step = se.expr;
2521 gfc_make_safe_expr (&se);
2522 gfc_add_block_to_block (&block, &se.pre);
2525 /* Set the NEXT field of this_forall to NULL. */
2526 this_forall->next = NULL;
2527 /* Link this_forall to the info construct. */
2528 if (info->this_loop)
2530 iter_info *iter_tmp = info->this_loop;
2531 while (iter_tmp->next != NULL)
2532 iter_tmp = iter_tmp->next;
2533 iter_tmp->next = this_forall;
2536 info->this_loop = this_forall;
2542 /* Calculate the size needed for the current forall level. */
2543 size = gfc_index_one_node;
2544 for (n = 0; n < nvar; n++)
2546 /* size = (end + step - start) / step. */
2547 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2549 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2551 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2552 tmp = convert (gfc_array_index_type, tmp);
2554 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2557 /* Record the nvar and size of current forall level. */
2563 /* If the mask is .true., consider the FORALL unconditional. */
2564 if (code->expr->expr_type == EXPR_CONSTANT
2565 && code->expr->value.logical)
2573 /* First we need to allocate the mask. */
2576 /* As the mask array can be very big, prefer compact boolean types. */
2577 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2578 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
2579 size, NULL, &block, &pmask);
2580 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2582 /* Record them in the info structure. */
2583 info->maskindex = maskindex;
2588 /* No mask was specified. */
2589 maskindex = NULL_TREE;
2590 mask = pmask = NULL_TREE;
2593 /* Link the current forall level to nested_forall_info. */
2594 info->prev_nest = nested_forall_info;
2595 nested_forall_info = info;
2597 /* Copy the mask into a temporary variable if required.
2598 For now we assume a mask temporary is needed. */
2601 /* As the mask array can be very big, prefer compact boolean types. */
2602 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2604 gfc_add_modify_expr (&block, maskindex, gfc_index_zero_node);
2606 /* Start of mask assignment loop body. */
2607 gfc_start_block (&body);
2609 /* Evaluate the mask expression. */
2610 gfc_init_se (&se, NULL);
2611 gfc_conv_expr_val (&se, code->expr);
2612 gfc_add_block_to_block (&body, &se.pre);
2614 /* Store the mask. */
2615 se.expr = convert (mask_type, se.expr);
2617 tmp = gfc_build_array_ref (mask, maskindex);
2618 gfc_add_modify_expr (&body, tmp, se.expr);
2620 /* Advance to the next mask element. */
2621 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
2622 maskindex, gfc_index_one_node);
2623 gfc_add_modify_expr (&body, maskindex, tmp);
2625 /* Generate the loops. */
2626 tmp = gfc_finish_block (&body);
2627 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
2628 gfc_add_expr_to_block (&block, tmp);
2631 c = code->block->next;
2633 /* TODO: loop merging in FORALL statements. */
2634 /* Now that we've got a copy of the mask, generate the assignment loops. */
2640 /* A scalar or array assignment. */
2641 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2642 /* Temporaries due to array assignment data dependencies introduce
2643 no end of problems. */
2645 gfc_trans_assign_need_temp (c->expr, c->expr2, NULL, false,
2646 nested_forall_info, &block);
2649 /* Use the normal assignment copying routines. */
2650 assign = gfc_trans_assignment (c->expr, c->expr2, false);
2652 /* Generate body and loops. */
2653 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2655 gfc_add_expr_to_block (&block, tmp);
2661 /* Translate WHERE or WHERE construct nested in FORALL. */
2662 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
2665 /* Pointer assignment inside FORALL. */
2666 case EXEC_POINTER_ASSIGN:
2667 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2669 gfc_trans_pointer_assign_need_temp (c->expr, c->expr2,
2670 nested_forall_info, &block);
2673 /* Use the normal assignment copying routines. */
2674 assign = gfc_trans_pointer_assignment (c->expr, c->expr2);
2676 /* Generate body and loops. */
2677 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2679 gfc_add_expr_to_block (&block, tmp);
2684 tmp = gfc_trans_forall_1 (c, nested_forall_info);
2685 gfc_add_expr_to_block (&block, tmp);
2688 /* Explicit subroutine calls are prevented by the frontend but interface
2689 assignments can legitimately produce them. */
2690 case EXEC_ASSIGN_CALL:
2691 assign = gfc_trans_call (c, true);
2692 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
2693 gfc_add_expr_to_block (&block, tmp);
2703 /* Restore the original index variables. */
2704 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
2705 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
2707 /* Free the space for var, start, end, step, varexpr. */
2713 gfc_free (saved_vars);
2715 /* Free the space for this forall_info. */
2720 /* Free the temporary for the mask. */
2721 tmp = gfc_call_free (pmask);
2722 gfc_add_expr_to_block (&block, tmp);
2725 pushdecl (maskindex);
2727 return gfc_finish_block (&block);
2731 /* Translate the FORALL statement or construct. */
2733 tree gfc_trans_forall (gfc_code * code)
2735 return gfc_trans_forall_1 (code, NULL);
2739 /* Evaluate the WHERE mask expression, copy its value to a temporary.
2740 If the WHERE construct is nested in FORALL, compute the overall temporary
2741 needed by the WHERE mask expression multiplied by the iterator number of
2743 ME is the WHERE mask expression.
2744 MASK is the current execution mask upon input, whose sense may or may
2745 not be inverted as specified by the INVERT argument.
2746 CMASK is the updated execution mask on output, or NULL if not required.
2747 PMASK is the pending execution mask on output, or NULL if not required.
2748 BLOCK is the block in which to place the condition evaluation loops. */
2751 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
2752 tree mask, bool invert, tree cmask, tree pmask,
2753 tree mask_type, stmtblock_t * block)
2758 stmtblock_t body, body1;
2759 tree count, cond, mtmp;
2762 gfc_init_loopinfo (&loop);
2764 lss = gfc_walk_expr (me);
2765 rss = gfc_walk_expr (me);
2767 /* Variable to index the temporary. */
2768 count = gfc_create_var (gfc_array_index_type, "count");
2769 /* Initialize count. */
2770 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2772 gfc_start_block (&body);
2774 gfc_init_se (&rse, NULL);
2775 gfc_init_se (&lse, NULL);
2777 if (lss == gfc_ss_terminator)
2779 gfc_init_block (&body1);
2783 /* Initialize the loop. */
2784 gfc_init_loopinfo (&loop);
2786 /* We may need LSS to determine the shape of the expression. */
2787 gfc_add_ss_to_loop (&loop, lss);
2788 gfc_add_ss_to_loop (&loop, rss);
2790 gfc_conv_ss_startstride (&loop);
2791 gfc_conv_loop_setup (&loop);
2793 gfc_mark_ss_chain_used (rss, 1);
2794 /* Start the loop body. */
2795 gfc_start_scalarized_body (&loop, &body1);
2797 /* Translate the expression. */
2798 gfc_copy_loopinfo_to_se (&rse, &loop);
2800 gfc_conv_expr (&rse, me);
2803 /* Variable to evaluate mask condition. */
2804 cond = gfc_create_var (mask_type, "cond");
2805 if (mask && (cmask || pmask))
2806 mtmp = gfc_create_var (mask_type, "mask");
2807 else mtmp = NULL_TREE;
2809 gfc_add_block_to_block (&body1, &lse.pre);
2810 gfc_add_block_to_block (&body1, &rse.pre);
2812 gfc_add_modify_expr (&body1, cond, fold_convert (mask_type, rse.expr));
2814 if (mask && (cmask || pmask))
2816 tmp = gfc_build_array_ref (mask, count);
2818 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
2819 gfc_add_modify_expr (&body1, mtmp, tmp);
2824 tmp1 = gfc_build_array_ref (cmask, count);
2827 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
2828 gfc_add_modify_expr (&body1, tmp1, tmp);
2833 tmp1 = gfc_build_array_ref (pmask, count);
2834 tmp = build1 (TRUTH_NOT_EXPR, mask_type, cond);
2836 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
2837 gfc_add_modify_expr (&body1, tmp1, tmp);
2840 gfc_add_block_to_block (&body1, &lse.post);
2841 gfc_add_block_to_block (&body1, &rse.post);
2843 if (lss == gfc_ss_terminator)
2845 gfc_add_block_to_block (&body, &body1);
2849 /* Increment count. */
2850 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
2851 gfc_index_one_node);
2852 gfc_add_modify_expr (&body1, count, tmp1);
2854 /* Generate the copying loops. */
2855 gfc_trans_scalarizing_loops (&loop, &body1);
2857 gfc_add_block_to_block (&body, &loop.pre);
2858 gfc_add_block_to_block (&body, &loop.post);
2860 gfc_cleanup_loop (&loop);
2861 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2862 as tree nodes in SS may not be valid in different scope. */
2865 tmp1 = gfc_finish_block (&body);
2866 /* If the WHERE construct is inside FORALL, fill the full temporary. */
2867 if (nested_forall_info != NULL)
2868 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
2870 gfc_add_expr_to_block (block, tmp1);
2874 /* Translate an assignment statement in a WHERE statement or construct
2875 statement. The MASK expression is used to control which elements
2876 of EXPR1 shall be assigned. The sense of MASK is specified by
2880 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
2881 tree mask, bool invert,
2882 tree count1, tree count2,
2888 gfc_ss *lss_section;
2895 tree index, maskexpr;
2898 /* TODO: handle this special case.
2899 Special case a single function returning an array. */
2900 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
2902 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
2908 /* Assignment of the form lhs = rhs. */
2909 gfc_start_block (&block);
2911 gfc_init_se (&lse, NULL);
2912 gfc_init_se (&rse, NULL);
2915 lss = gfc_walk_expr (expr1);
2918 /* In each where-assign-stmt, the mask-expr and the variable being
2919 defined shall be arrays of the same shape. */
2920 gcc_assert (lss != gfc_ss_terminator);
2922 /* The assignment needs scalarization. */
2925 /* Find a non-scalar SS from the lhs. */
2926 while (lss_section != gfc_ss_terminator
2927 && lss_section->type != GFC_SS_SECTION)
2928 lss_section = lss_section->next;
2930 gcc_assert (lss_section != gfc_ss_terminator);
2932 /* Initialize the scalarizer. */
2933 gfc_init_loopinfo (&loop);
2936 rss = gfc_walk_expr (expr2);
2937 if (rss == gfc_ss_terminator)
2939 /* The rhs is scalar. Add a ss for the expression. */
2940 rss = gfc_get_ss ();
2941 rss->next = gfc_ss_terminator;
2942 rss->type = GFC_SS_SCALAR;
2946 /* Associate the SS with the loop. */
2947 gfc_add_ss_to_loop (&loop, lss);
2948 gfc_add_ss_to_loop (&loop, rss);
2950 /* Calculate the bounds of the scalarization. */
2951 gfc_conv_ss_startstride (&loop);
2953 /* Resolve any data dependencies in the statement. */
2954 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
2956 /* Setup the scalarizing loops. */
2957 gfc_conv_loop_setup (&loop);
2959 /* Setup the gfc_se structures. */
2960 gfc_copy_loopinfo_to_se (&lse, &loop);
2961 gfc_copy_loopinfo_to_se (&rse, &loop);
2964 gfc_mark_ss_chain_used (rss, 1);
2965 if (loop.temp_ss == NULL)
2968 gfc_mark_ss_chain_used (lss, 1);
2972 lse.ss = loop.temp_ss;
2973 gfc_mark_ss_chain_used (lss, 3);
2974 gfc_mark_ss_chain_used (loop.temp_ss, 3);
2977 /* Start the scalarized loop body. */
2978 gfc_start_scalarized_body (&loop, &body);
2980 /* Translate the expression. */
2981 gfc_conv_expr (&rse, expr2);
2982 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
2984 gfc_conv_tmp_array_ref (&lse);
2985 gfc_advance_se_ss_chain (&lse);
2988 gfc_conv_expr (&lse, expr1);
2990 /* Form the mask expression according to the mask. */
2992 maskexpr = gfc_build_array_ref (mask, index);
2994 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
2996 /* Use the scalar assignment as is. */
2998 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
2999 loop.temp_ss != NULL, false);
3001 tmp = gfc_conv_operator_assign (&lse, &rse, sym);
3003 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3005 gfc_add_expr_to_block (&body, tmp);
3007 if (lss == gfc_ss_terminator)
3009 /* Increment count1. */
3010 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3011 count1, gfc_index_one_node);
3012 gfc_add_modify_expr (&body, count1, tmp);
3014 /* Use the scalar assignment as is. */
3015 gfc_add_block_to_block (&block, &body);
3019 gcc_assert (lse.ss == gfc_ss_terminator
3020 && rse.ss == gfc_ss_terminator);
3022 if (loop.temp_ss != NULL)
3024 /* Increment count1 before finish the main body of a scalarized
3026 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3027 count1, gfc_index_one_node);
3028 gfc_add_modify_expr (&body, count1, tmp);
3029 gfc_trans_scalarized_loop_boundary (&loop, &body);
3031 /* We need to copy the temporary to the actual lhs. */
3032 gfc_init_se (&lse, NULL);
3033 gfc_init_se (&rse, NULL);
3034 gfc_copy_loopinfo_to_se (&lse, &loop);
3035 gfc_copy_loopinfo_to_se (&rse, &loop);
3037 rse.ss = loop.temp_ss;
3040 gfc_conv_tmp_array_ref (&rse);
3041 gfc_advance_se_ss_chain (&rse);
3042 gfc_conv_expr (&lse, expr1);
3044 gcc_assert (lse.ss == gfc_ss_terminator
3045 && rse.ss == gfc_ss_terminator);
3047 /* Form the mask expression according to the mask tree list. */
3049 maskexpr = gfc_build_array_ref (mask, index);
3051 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3054 /* Use the scalar assignment as is. */
3055 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3056 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3057 gfc_add_expr_to_block (&body, tmp);
3059 /* Increment count2. */
3060 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3061 count2, gfc_index_one_node);
3062 gfc_add_modify_expr (&body, count2, tmp);
3066 /* Increment count1. */
3067 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3068 count1, gfc_index_one_node);
3069 gfc_add_modify_expr (&body, count1, tmp);
3072 /* Generate the copying loops. */
3073 gfc_trans_scalarizing_loops (&loop, &body);
3075 /* Wrap the whole thing up. */
3076 gfc_add_block_to_block (&block, &loop.pre);
3077 gfc_add_block_to_block (&block, &loop.post);
3078 gfc_cleanup_loop (&loop);
3081 return gfc_finish_block (&block);
3085 /* Translate the WHERE construct or statement.
3086 This function can be called iteratively to translate the nested WHERE
3087 construct or statement.
3088 MASK is the control mask. */
3091 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3092 forall_info * nested_forall_info, stmtblock_t * block)
3094 stmtblock_t inner_size_body;
3095 tree inner_size, size;
3103 tree count1, count2;
3107 tree pcmask = NULL_TREE;
3108 tree ppmask = NULL_TREE;
3109 tree cmask = NULL_TREE;
3110 tree pmask = NULL_TREE;
3111 gfc_actual_arglist *arg;
3113 /* the WHERE statement or the WHERE construct statement. */
3114 cblock = code->block;
3116 /* As the mask array can be very big, prefer compact boolean types. */
3117 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3119 /* Determine which temporary masks are needed. */
3122 /* One clause: No ELSEWHEREs. */
3123 need_cmask = (cblock->next != 0);
3126 else if (cblock->block->block)
3128 /* Three or more clauses: Conditional ELSEWHEREs. */
3132 else if (cblock->next)
3134 /* Two clauses, the first non-empty. */
3136 need_pmask = (mask != NULL_TREE
3137 && cblock->block->next != 0);
3139 else if (!cblock->block->next)
3141 /* Two clauses, both empty. */
3145 /* Two clauses, the first empty, the second non-empty. */
3148 need_cmask = (cblock->block->expr != 0);
3157 if (need_cmask || need_pmask)
3159 /* Calculate the size of temporary needed by the mask-expr. */
3160 gfc_init_block (&inner_size_body);
3161 inner_size = compute_inner_temp_size (cblock->expr, cblock->expr,
3162 &inner_size_body, &lss, &rss);
3164 /* Calculate the total size of temporary needed. */
3165 size = compute_overall_iter_number (nested_forall_info, inner_size,
3166 &inner_size_body, block);
3168 /* Allocate temporary for WHERE mask if needed. */
3170 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3173 /* Allocate temporary for !mask if needed. */
3175 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3181 /* Each time around this loop, the where clause is conditional
3182 on the value of mask and invert, which are updated at the
3183 bottom of the loop. */
3185 /* Has mask-expr. */
3188 /* Ensure that the WHERE mask will be evaluated exactly once.
3189 If there are no statements in this WHERE/ELSEWHERE clause,
3190 then we don't need to update the control mask (cmask).
3191 If this is the last clause of the WHERE construct, then
3192 we don't need to update the pending control mask (pmask). */
3194 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3196 cblock->next ? cmask : NULL_TREE,
3197 cblock->block ? pmask : NULL_TREE,
3200 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3202 (cblock->next || cblock->block)
3203 ? cmask : NULL_TREE,
3204 NULL_TREE, mask_type, block);
3208 /* It's a final elsewhere-stmt. No mask-expr is present. */
3212 /* The body of this where clause are controlled by cmask with
3213 sense specified by invert. */
3215 /* Get the assignment statement of a WHERE statement, or the first
3216 statement in where-body-construct of a WHERE construct. */
3217 cnext = cblock->next;
3222 /* WHERE assignment statement. */
3223 case EXEC_ASSIGN_CALL:
3225 arg = cnext->ext.actual;
3226 expr1 = expr2 = NULL;
3227 for (; arg; arg = arg->next)
3239 expr1 = cnext->expr;
3240 expr2 = cnext->expr2;
3242 if (nested_forall_info != NULL)
3244 need_temp = gfc_check_dependency (expr1, expr2, 0);
3245 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3246 gfc_trans_assign_need_temp (expr1, expr2,
3248 nested_forall_info, block);
3251 /* Variables to control maskexpr. */
3252 count1 = gfc_create_var (gfc_array_index_type, "count1");
3253 count2 = gfc_create_var (gfc_array_index_type, "count2");
3254 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3255 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3257 tmp = gfc_trans_where_assign (expr1, expr2,
3260 cnext->resolved_sym);
3262 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3264 gfc_add_expr_to_block (block, tmp);
3269 /* Variables to control maskexpr. */
3270 count1 = gfc_create_var (gfc_array_index_type, "count1");
3271 count2 = gfc_create_var (gfc_array_index_type, "count2");
3272 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3273 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3275 tmp = gfc_trans_where_assign (expr1, expr2,
3278 cnext->resolved_sym);
3279 gfc_add_expr_to_block (block, tmp);
3284 /* WHERE or WHERE construct is part of a where-body-construct. */
3286 gfc_trans_where_2 (cnext, cmask, invert,
3287 nested_forall_info, block);
3294 /* The next statement within the same where-body-construct. */
3295 cnext = cnext->next;
3297 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3298 cblock = cblock->block;
3299 if (mask == NULL_TREE)
3301 /* If we're the initial WHERE, we can simply invert the sense
3302 of the current mask to obtain the "mask" for the remaining
3309 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3315 /* If we allocated a pending mask array, deallocate it now. */
3318 tmp = gfc_call_free (ppmask);
3319 gfc_add_expr_to_block (block, tmp);
3322 /* If we allocated a current mask array, deallocate it now. */
3325 tmp = gfc_call_free (pcmask);
3326 gfc_add_expr_to_block (block, tmp);
3330 /* Translate a simple WHERE construct or statement without dependencies.
3331 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3332 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3333 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3336 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3338 stmtblock_t block, body;
3339 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3340 tree tmp, cexpr, tstmt, estmt;
3341 gfc_ss *css, *tdss, *tsss;
3342 gfc_se cse, tdse, tsse, edse, esse;
3347 cond = cblock->expr;
3348 tdst = cblock->next->expr;
3349 tsrc = cblock->next->expr2;
3350 edst = eblock ? eblock->next->expr : NULL;
3351 esrc = eblock ? eblock->next->expr2 : NULL;
3353 gfc_start_block (&block);
3354 gfc_init_loopinfo (&loop);
3356 /* Handle the condition. */
3357 gfc_init_se (&cse, NULL);
3358 css = gfc_walk_expr (cond);
3359 gfc_add_ss_to_loop (&loop, css);
3361 /* Handle the then-clause. */
3362 gfc_init_se (&tdse, NULL);
3363 gfc_init_se (&tsse, NULL);
3364 tdss = gfc_walk_expr (tdst);
3365 tsss = gfc_walk_expr (tsrc);
3366 if (tsss == gfc_ss_terminator)
3368 tsss = gfc_get_ss ();
3369 tsss->next = gfc_ss_terminator;
3370 tsss->type = GFC_SS_SCALAR;
3373 gfc_add_ss_to_loop (&loop, tdss);
3374 gfc_add_ss_to_loop (&loop, tsss);
3378 /* Handle the else clause. */
3379 gfc_init_se (&edse, NULL);
3380 gfc_init_se (&esse, NULL);
3381 edss = gfc_walk_expr (edst);
3382 esss = gfc_walk_expr (esrc);
3383 if (esss == gfc_ss_terminator)
3385 esss = gfc_get_ss ();
3386 esss->next = gfc_ss_terminator;
3387 esss->type = GFC_SS_SCALAR;
3390 gfc_add_ss_to_loop (&loop, edss);
3391 gfc_add_ss_to_loop (&loop, esss);
3394 gfc_conv_ss_startstride (&loop);
3395 gfc_conv_loop_setup (&loop);
3397 gfc_mark_ss_chain_used (css, 1);
3398 gfc_mark_ss_chain_used (tdss, 1);
3399 gfc_mark_ss_chain_used (tsss, 1);
3402 gfc_mark_ss_chain_used (edss, 1);
3403 gfc_mark_ss_chain_used (esss, 1);
3406 gfc_start_scalarized_body (&loop, &body);
3408 gfc_copy_loopinfo_to_se (&cse, &loop);
3409 gfc_copy_loopinfo_to_se (&tdse, &loop);
3410 gfc_copy_loopinfo_to_se (&tsse, &loop);
3416 gfc_copy_loopinfo_to_se (&edse, &loop);
3417 gfc_copy_loopinfo_to_se (&esse, &loop);
3422 gfc_conv_expr (&cse, cond);
3423 gfc_add_block_to_block (&body, &cse.pre);
3426 gfc_conv_expr (&tsse, tsrc);
3427 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3429 gfc_conv_tmp_array_ref (&tdse);
3430 gfc_advance_se_ss_chain (&tdse);
3433 gfc_conv_expr (&tdse, tdst);
3437 gfc_conv_expr (&esse, esrc);
3438 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3440 gfc_conv_tmp_array_ref (&edse);
3441 gfc_advance_se_ss_chain (&edse);
3444 gfc_conv_expr (&edse, edst);
3447 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false);
3448 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false)
3449 : build_empty_stmt ();
3450 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3451 gfc_add_expr_to_block (&body, tmp);
3452 gfc_add_block_to_block (&body, &cse.post);
3454 gfc_trans_scalarizing_loops (&loop, &body);
3455 gfc_add_block_to_block (&block, &loop.pre);
3456 gfc_add_block_to_block (&block, &loop.post);
3457 gfc_cleanup_loop (&loop);
3459 return gfc_finish_block (&block);
3462 /* As the WHERE or WHERE construct statement can be nested, we call
3463 gfc_trans_where_2 to do the translation, and pass the initial
3464 NULL values for both the control mask and the pending control mask. */
3467 gfc_trans_where (gfc_code * code)
3473 cblock = code->block;
3475 && cblock->next->op == EXEC_ASSIGN
3476 && !cblock->next->next)
3478 eblock = cblock->block;
3481 /* A simple "WHERE (cond) x = y" statement or block is
3482 dependence free if cond is not dependent upon writing x,
3483 and the source y is unaffected by the destination x. */
3484 if (!gfc_check_dependency (cblock->next->expr,
3486 && !gfc_check_dependency (cblock->next->expr,
3487 cblock->next->expr2, 0))
3488 return gfc_trans_where_3 (cblock, NULL);
3490 else if (!eblock->expr
3493 && eblock->next->op == EXEC_ASSIGN
3494 && !eblock->next->next)
3496 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3497 block is dependence free if cond is not dependent on writes
3498 to x1 and x2, y1 is not dependent on writes to x2, and y2
3499 is not dependent on writes to x1, and both y's are not
3500 dependent upon their own x's. */
3501 if (!gfc_check_dependency(cblock->next->expr,
3503 && !gfc_check_dependency(eblock->next->expr,
3505 && !gfc_check_dependency(cblock->next->expr,
3506 eblock->next->expr2, 0)
3507 && !gfc_check_dependency(eblock->next->expr,
3508 cblock->next->expr2, 0)
3509 && !gfc_check_dependency(cblock->next->expr,
3510 cblock->next->expr2, 0)
3511 && !gfc_check_dependency(eblock->next->expr,
3512 eblock->next->expr2, 0))
3513 return gfc_trans_where_3 (cblock, eblock);
3517 gfc_start_block (&block);
3519 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3521 return gfc_finish_block (&block);
3525 /* CYCLE a DO loop. The label decl has already been created by
3526 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3527 node at the head of the loop. We must mark the label as used. */
3530 gfc_trans_cycle (gfc_code * code)
3534 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3535 TREE_USED (cycle_label) = 1;
3536 return build1_v (GOTO_EXPR, cycle_label);
3540 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3541 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3545 gfc_trans_exit (gfc_code * code)
3549 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3550 TREE_USED (exit_label) = 1;
3551 return build1_v (GOTO_EXPR, exit_label);
3555 /* Translate the ALLOCATE statement. */
3558 gfc_trans_allocate (gfc_code * code)
3570 if (!code->ext.alloc_list)
3573 gfc_start_block (&block);
3577 tree gfc_int4_type_node = gfc_get_int_type (4);
3579 stat = gfc_create_var (gfc_int4_type_node, "stat");
3580 pstat = build_fold_addr_expr (stat);
3582 error_label = gfc_build_label_decl (NULL_TREE);
3583 TREE_USED (error_label) = 1;
3587 pstat = integer_zero_node;
3588 stat = error_label = NULL_TREE;
3592 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3596 gfc_init_se (&se, NULL);
3597 gfc_start_block (&se.pre);
3599 se.want_pointer = 1;
3600 se.descriptor_only = 1;
3601 gfc_conv_expr (&se, expr);
3603 if (!gfc_array_allocate (&se, expr, pstat))
3605 /* A scalar or derived type. */
3606 tmp = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
3608 if (expr->ts.type == BT_CHARACTER && tmp == NULL_TREE)
3609 tmp = se.string_length;
3611 tmp = build_call_expr (gfor_fndecl_allocate, 2, tmp, pstat);
3612 tmp = build2 (MODIFY_EXPR, void_type_node, se.expr, tmp);
3613 gfc_add_expr_to_block (&se.pre, tmp);
3617 tmp = build1_v (GOTO_EXPR, error_label);
3618 parm = fold_build2 (NE_EXPR, boolean_type_node,
3619 stat, build_int_cst (TREE_TYPE (stat), 0));
3620 tmp = fold_build3 (COND_EXPR, void_type_node,
3621 parm, tmp, build_empty_stmt ());
3622 gfc_add_expr_to_block (&se.pre, tmp);
3625 if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
3627 tmp = build_fold_indirect_ref (se.expr);
3628 tmp = gfc_nullify_alloc_comp (expr->ts.derived, tmp, 0);
3629 gfc_add_expr_to_block (&se.pre, tmp);
3634 tmp = gfc_finish_block (&se.pre);
3635 gfc_add_expr_to_block (&block, tmp);
3638 /* Assign the value to the status variable. */
3641 tmp = build1_v (LABEL_EXPR, error_label);
3642 gfc_add_expr_to_block (&block, tmp);
3644 gfc_init_se (&se, NULL);
3645 gfc_conv_expr_lhs (&se, code->expr);
3646 tmp = convert (TREE_TYPE (se.expr), stat);
3647 gfc_add_modify_expr (&block, se.expr, tmp);
3650 return gfc_finish_block (&block);
3654 /* Translate a DEALLOCATE statement.
3655 There are two cases within the for loop:
3656 (1) deallocate(a1, a2, a3) is translated into the following sequence
3657 _gfortran_deallocate(a1, 0B)
3658 _gfortran_deallocate(a2, 0B)
3659 _gfortran_deallocate(a3, 0B)
3660 where the STAT= variable is passed a NULL pointer.
3661 (2) deallocate(a1, a2, a3, stat=i) is translated into the following
3663 _gfortran_deallocate(a1, &stat)
3664 astat = astat + stat
3665 _gfortran_deallocate(a2, &stat)
3666 astat = astat + stat
3667 _gfortran_deallocate(a3, &stat)
3668 astat = astat + stat
3669 In case (1), we simply return at the end of the for loop. In case (2)
3670 we set STAT= astat. */
3672 gfc_trans_deallocate (gfc_code * code)
3677 tree apstat, astat, pstat, stat, tmp;
3680 gfc_start_block (&block);
3682 /* Set up the optional STAT= */
3685 tree gfc_int4_type_node = gfc_get_int_type (4);
3687 /* Variable used with the library call. */
3688 stat = gfc_create_var (gfc_int4_type_node, "stat");
3689 pstat = build_fold_addr_expr (stat);
3691 /* Running total of possible deallocation failures. */
3692 astat = gfc_create_var (gfc_int4_type_node, "astat");
3693 apstat = build_fold_addr_expr (astat);
3695 /* Initialize astat to 0. */
3696 gfc_add_modify_expr (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
3700 pstat = apstat = null_pointer_node;
3701 stat = astat = NULL_TREE;
3704 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3707 gcc_assert (expr->expr_type == EXPR_VARIABLE);
3709 gfc_init_se (&se, NULL);
3710 gfc_start_block (&se.pre);
3712 se.want_pointer = 1;
3713 se.descriptor_only = 1;
3714 gfc_conv_expr (&se, expr);
3716 if (expr->ts.type == BT_DERIVED
3717 && expr->ts.derived->attr.alloc_comp)
3720 gfc_ref *last = NULL;
3721 for (ref = expr->ref; ref; ref = ref->next)
3722 if (ref->type == REF_COMPONENT)
3725 /* Do not deallocate the components of a derived type
3726 ultimate pointer component. */
3727 if (!(last && last->u.c.component->pointer)
3728 && !(!last && expr->symtree->n.sym->attr.pointer))
3730 tmp = gfc_deallocate_alloc_comp (expr->ts.derived, se.expr,
3732 gfc_add_expr_to_block (&se.pre, tmp);
3737 tmp = gfc_array_deallocate (se.expr, pstat);
3740 tmp = build_call_expr (gfor_fndecl_deallocate, 2, se.expr, pstat);
3741 gfc_add_expr_to_block (&se.pre, tmp);
3743 tmp = build2 (MODIFY_EXPR, void_type_node,
3744 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
3747 gfc_add_expr_to_block (&se.pre, tmp);
3749 /* Keep track of the number of failed deallocations by adding stat
3750 of the last deallocation to the running total. */
3753 apstat = build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
3754 gfc_add_modify_expr (&se.pre, astat, apstat);
3757 tmp = gfc_finish_block (&se.pre);
3758 gfc_add_expr_to_block (&block, tmp);
3762 /* Assign the value to the status variable. */
3765 gfc_init_se (&se, NULL);
3766 gfc_conv_expr_lhs (&se, code->expr);
3767 tmp = convert (TREE_TYPE (se.expr), astat);
3768 gfc_add_modify_expr (&block, se.expr, tmp);
3771 return gfc_finish_block (&block);