1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
27 #include "coretypes.h"
29 #include "tree-gimple.h"
36 #include "trans-stmt.h"
37 #include "trans-types.h"
38 #include "trans-array.h"
39 #include "trans-const.h"
41 #include "dependency.h"
43 typedef struct iter_info
49 struct iter_info *next;
53 typedef struct forall_info
60 struct forall_info *prev_nest;
64 static void gfc_trans_where_2 (gfc_code *, tree, bool,
65 forall_info *, stmtblock_t *);
67 /* Translate a F95 label number to a LABEL_EXPR. */
70 gfc_trans_label_here (gfc_code * code)
72 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
76 /* Given a variable expression which has been ASSIGNed to, find the decl
77 containing the auxiliary variables. For variables in common blocks this
81 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
83 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
84 gfc_conv_expr (se, expr);
85 /* Deals with variable in common block. Get the field declaration. */
86 if (TREE_CODE (se->expr) == COMPONENT_REF)
87 se->expr = TREE_OPERAND (se->expr, 1);
88 /* Deals with dummy argument. Get the parameter declaration. */
89 else if (TREE_CODE (se->expr) == INDIRECT_REF)
90 se->expr = TREE_OPERAND (se->expr, 0);
93 /* Translate a label assignment statement. */
96 gfc_trans_label_assign (gfc_code * code)
106 /* Start a new block. */
107 gfc_init_se (&se, NULL);
108 gfc_start_block (&se.pre);
109 gfc_conv_label_variable (&se, code->expr);
111 len = GFC_DECL_STRING_LEN (se.expr);
112 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
114 label_tree = gfc_get_label_decl (code->label);
116 if (code->label->defined == ST_LABEL_TARGET)
118 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
119 len_tree = integer_minus_one_node;
123 label_str = code->label->format->value.character.string;
124 label_len = code->label->format->value.character.length;
125 len_tree = build_int_cst (NULL_TREE, label_len);
126 label_tree = gfc_build_string_const (label_len + 1, label_str);
127 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
130 gfc_add_modify_expr (&se.pre, len, len_tree);
131 gfc_add_modify_expr (&se.pre, addr, label_tree);
133 return gfc_finish_block (&se.pre);
136 /* Translate a GOTO statement. */
139 gfc_trans_goto (gfc_code * code)
141 locus loc = code->loc;
147 if (code->label != NULL)
148 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
151 gfc_init_se (&se, NULL);
152 gfc_start_block (&se.pre);
153 gfc_conv_label_variable (&se, code->expr);
154 tmp = GFC_DECL_STRING_LEN (se.expr);
155 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
156 build_int_cst (TREE_TYPE (tmp), -1));
157 gfc_trans_runtime_check (tmp, "Assigned label is not a target label",
160 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
165 target = build1 (GOTO_EXPR, void_type_node, assigned_goto);
166 gfc_add_expr_to_block (&se.pre, target);
167 return gfc_finish_block (&se.pre);
170 /* Check the label list. */
173 target = gfc_get_label_decl (code->label);
174 tmp = gfc_build_addr_expr (pvoid_type_node, target);
175 tmp = build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
176 tmp = build3_v (COND_EXPR, tmp,
177 build1 (GOTO_EXPR, void_type_node, target),
178 build_empty_stmt ());
179 gfc_add_expr_to_block (&se.pre, tmp);
182 while (code != NULL);
183 gfc_trans_runtime_check (boolean_true_node,
184 "Assigned label is not in the list", &se.pre, &loc);
186 return gfc_finish_block (&se.pre);
190 /* Translate an ENTRY statement. Just adds a label for this entry point. */
192 gfc_trans_entry (gfc_code * code)
194 return build1_v (LABEL_EXPR, code->ext.entry->label);
198 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
199 elemental subroutines. Make temporaries for output arguments if any such
200 dependencies are found. Output arguments are chosen because internal_unpack
201 can be used, as is, to copy the result back to the variable. */
203 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
204 gfc_symbol * sym, gfc_actual_arglist * arg)
206 gfc_actual_arglist *arg0;
208 gfc_formal_arglist *formal;
209 gfc_loopinfo tmp_loop;
221 if (loopse->ss == NULL)
226 formal = sym->formal;
228 /* Loop over all the arguments testing for dependencies. */
229 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
235 /* Obtain the info structure for the current argument. */
237 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
241 info = &ss->data.info;
245 /* If there is a dependency, create a temporary and use it
246 instead of the variable. */
247 fsym = formal ? formal->sym : NULL;
248 if (e->expr_type == EXPR_VARIABLE
250 && fsym->attr.intent == INTENT_OUT
251 && gfc_check_fncall_dependency (e, INTENT_OUT, sym, arg0))
253 /* Make a local loopinfo for the temporary creation, so that
254 none of the other ss->info's have to be renormalized. */
255 gfc_init_loopinfo (&tmp_loop);
256 for (n = 0; n < info->dimen; n++)
258 tmp_loop.to[n] = loopse->loop->to[n];
259 tmp_loop.from[n] = loopse->loop->from[n];
260 tmp_loop.order[n] = loopse->loop->order[n];
263 /* Generate the temporary. Merge the block so that the
264 declarations are put at the right binding level. */
265 size = gfc_create_var (gfc_array_index_type, NULL);
266 data = gfc_create_var (pvoid_type_node, NULL);
267 gfc_start_block (&block);
268 tmp = gfc_typenode_for_spec (&e->ts);
269 tmp = gfc_trans_create_temp_array (&se->pre, &se->post,
270 &tmp_loop, info, tmp,
272 gfc_add_modify_expr (&se->pre, size, tmp);
273 tmp = fold_convert (pvoid_type_node, info->data);
274 gfc_add_modify_expr (&se->pre, data, tmp);
275 gfc_merge_block_scope (&block);
277 /* Obtain the argument descriptor for unpacking. */
278 gfc_init_se (&parmse, NULL);
279 parmse.want_pointer = 1;
280 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
281 gfc_add_block_to_block (&se->pre, &parmse.pre);
283 /* Calculate the offset for the temporary. */
284 offset = gfc_index_zero_node;
285 for (n = 0; n < info->dimen; n++)
287 tmp = gfc_conv_descriptor_stride (info->descriptor,
289 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
290 loopse->loop->from[n], tmp);
291 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
294 info->offset = gfc_create_var (gfc_array_index_type, NULL);
295 gfc_add_modify_expr (&se->pre, info->offset, offset);
297 /* Copy the result back using unpack. */
298 tmp = build_call_expr (gfor_fndecl_in_unpack, 2, parmse.expr, data);
299 gfc_add_expr_to_block (&se->post, tmp);
301 gfc_add_block_to_block (&se->post, &parmse.post);
307 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
310 gfc_trans_call (gfc_code * code, bool dependency_check)
314 int has_alternate_specifier;
316 /* A CALL starts a new block because the actual arguments may have to
317 be evaluated first. */
318 gfc_init_se (&se, NULL);
319 gfc_start_block (&se.pre);
321 gcc_assert (code->resolved_sym);
323 ss = gfc_ss_terminator;
324 if (code->resolved_sym->attr.elemental)
325 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
327 /* Is not an elemental subroutine call with array valued arguments. */
328 if (ss == gfc_ss_terminator)
331 /* Translate the call. */
332 has_alternate_specifier
333 = gfc_conv_function_call (&se, code->resolved_sym, code->ext.actual,
336 /* A subroutine without side-effect, by definition, does nothing! */
337 TREE_SIDE_EFFECTS (se.expr) = 1;
339 /* Chain the pieces together and return the block. */
340 if (has_alternate_specifier)
342 gfc_code *select_code;
344 select_code = code->next;
345 gcc_assert(select_code->op == EXEC_SELECT);
346 sym = select_code->expr->symtree->n.sym;
347 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
348 if (sym->backend_decl == NULL)
349 sym->backend_decl = gfc_get_symbol_decl (sym);
350 gfc_add_modify_expr (&se.pre, sym->backend_decl, se.expr);
353 gfc_add_expr_to_block (&se.pre, se.expr);
355 gfc_add_block_to_block (&se.pre, &se.post);
360 /* An elemental subroutine call with array valued arguments has
367 /* gfc_walk_elemental_function_args renders the ss chain in the
368 reverse order to the actual argument order. */
369 ss = gfc_reverse_ss (ss);
371 /* Initialize the loop. */
372 gfc_init_se (&loopse, NULL);
373 gfc_init_loopinfo (&loop);
374 gfc_add_ss_to_loop (&loop, ss);
376 gfc_conv_ss_startstride (&loop);
377 gfc_conv_loop_setup (&loop);
378 gfc_mark_ss_chain_used (ss, 1);
380 /* Convert the arguments, checking for dependencies. */
381 gfc_copy_loopinfo_to_se (&loopse, &loop);
384 /* For operator assignment, we need to do dependency checking.
385 We also check the intent of the parameters. */
386 if (dependency_check)
389 sym = code->resolved_sym;
390 gcc_assert (sym->formal->sym->attr.intent == INTENT_OUT);
391 gcc_assert (sym->formal->next->sym->attr.intent == INTENT_IN);
392 gfc_conv_elemental_dependencies (&se, &loopse, sym,
396 /* Generate the loop body. */
397 gfc_start_scalarized_body (&loop, &body);
398 gfc_init_block (&block);
400 /* Add the subroutine call to the block. */
401 gfc_conv_function_call (&loopse, code->resolved_sym, code->ext.actual,
403 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
405 gfc_add_block_to_block (&block, &loopse.pre);
406 gfc_add_block_to_block (&block, &loopse.post);
408 /* Finish up the loop block and the loop. */
409 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
410 gfc_trans_scalarizing_loops (&loop, &body);
411 gfc_add_block_to_block (&se.pre, &loop.pre);
412 gfc_add_block_to_block (&se.pre, &loop.post);
413 gfc_add_block_to_block (&se.pre, &se.post);
414 gfc_cleanup_loop (&loop);
417 return gfc_finish_block (&se.pre);
421 /* Translate the RETURN statement. */
424 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
432 /* If code->expr is not NULL, this return statement must appear
433 in a subroutine and current_fake_result_decl has already
436 result = gfc_get_fake_result_decl (NULL, 0);
439 gfc_warning ("An alternate return at %L without a * dummy argument",
441 return build1_v (GOTO_EXPR, gfc_get_return_label ());
444 /* Start a new block for this statement. */
445 gfc_init_se (&se, NULL);
446 gfc_start_block (&se.pre);
448 gfc_conv_expr (&se, code->expr);
450 tmp = build2 (MODIFY_EXPR, TREE_TYPE (result), result, se.expr);
451 gfc_add_expr_to_block (&se.pre, tmp);
453 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
454 gfc_add_expr_to_block (&se.pre, tmp);
455 gfc_add_block_to_block (&se.pre, &se.post);
456 return gfc_finish_block (&se.pre);
459 return build1_v (GOTO_EXPR, gfc_get_return_label ());
463 /* Translate the PAUSE statement. We have to translate this statement
464 to a runtime library call. */
467 gfc_trans_pause (gfc_code * code)
469 tree gfc_int4_type_node = gfc_get_int_type (4);
473 /* Start a new block for this statement. */
474 gfc_init_se (&se, NULL);
475 gfc_start_block (&se.pre);
478 if (code->expr == NULL)
480 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
481 tmp = build_call_expr (gfor_fndecl_pause_numeric, 1, tmp);
485 gfc_conv_expr_reference (&se, code->expr);
486 tmp = build_call_expr (gfor_fndecl_pause_string, 2,
487 se.expr, se.string_length);
490 gfc_add_expr_to_block (&se.pre, tmp);
492 gfc_add_block_to_block (&se.pre, &se.post);
494 return gfc_finish_block (&se.pre);
498 /* Translate the STOP statement. We have to translate this statement
499 to a runtime library call. */
502 gfc_trans_stop (gfc_code * code)
504 tree gfc_int4_type_node = gfc_get_int_type (4);
508 /* Start a new block for this statement. */
509 gfc_init_se (&se, NULL);
510 gfc_start_block (&se.pre);
513 if (code->expr == NULL)
515 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
516 tmp = build_call_expr (gfor_fndecl_stop_numeric, 1, tmp);
520 gfc_conv_expr_reference (&se, code->expr);
521 tmp = build_call_expr (gfor_fndecl_stop_string, 2,
522 se.expr, se.string_length);
525 gfc_add_expr_to_block (&se.pre, tmp);
527 gfc_add_block_to_block (&se.pre, &se.post);
529 return gfc_finish_block (&se.pre);
533 /* Generate GENERIC for the IF construct. This function also deals with
534 the simple IF statement, because the front end translates the IF
535 statement into an IF construct.
567 where COND_S is the simplified version of the predicate. PRE_COND_S
568 are the pre side-effects produced by the translation of the
570 We need to build the chain recursively otherwise we run into
571 problems with folding incomplete statements. */
574 gfc_trans_if_1 (gfc_code * code)
579 /* Check for an unconditional ELSE clause. */
581 return gfc_trans_code (code->next);
583 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
584 gfc_init_se (&if_se, NULL);
585 gfc_start_block (&if_se.pre);
587 /* Calculate the IF condition expression. */
588 gfc_conv_expr_val (&if_se, code->expr);
590 /* Translate the THEN clause. */
591 stmt = gfc_trans_code (code->next);
593 /* Translate the ELSE clause. */
595 elsestmt = gfc_trans_if_1 (code->block);
597 elsestmt = build_empty_stmt ();
599 /* Build the condition expression and add it to the condition block. */
600 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
602 gfc_add_expr_to_block (&if_se.pre, stmt);
604 /* Finish off this statement. */
605 return gfc_finish_block (&if_se.pre);
609 gfc_trans_if (gfc_code * code)
611 /* Ignore the top EXEC_IF, it only announces an IF construct. The
612 actual code we must translate is in code->block. */
614 return gfc_trans_if_1 (code->block);
618 /* Translate an arithmetic IF expression.
620 IF (cond) label1, label2, label3 translates to
632 An optimized version can be generated in case of equal labels.
633 E.g., if label1 is equal to label2, we can translate it to
642 gfc_trans_arithmetic_if (gfc_code * code)
650 /* Start a new block. */
651 gfc_init_se (&se, NULL);
652 gfc_start_block (&se.pre);
654 /* Pre-evaluate COND. */
655 gfc_conv_expr_val (&se, code->expr);
656 se.expr = gfc_evaluate_now (se.expr, &se.pre);
658 /* Build something to compare with. */
659 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
661 if (code->label->value != code->label2->value)
663 /* If (cond < 0) take branch1 else take branch2.
664 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
665 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
666 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
668 if (code->label->value != code->label3->value)
669 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
671 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
673 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
676 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
678 if (code->label->value != code->label3->value
679 && code->label2->value != code->label3->value)
681 /* if (cond <= 0) take branch1 else take branch2. */
682 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
683 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
684 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
687 /* Append the COND_EXPR to the evaluation of COND, and return. */
688 gfc_add_expr_to_block (&se.pre, branch1);
689 return gfc_finish_block (&se.pre);
693 /* Translate the simple DO construct. This is where the loop variable has
694 integer type and step +-1. We can't use this in the general case
695 because integer overflow and floating point errors could give incorrect
697 We translate a do loop from:
699 DO dovar = from, to, step
705 [Evaluate loop bounds and step]
707 if ((step > 0) ? (dovar <= to) : (dovar => to))
713 cond = (dovar == to);
715 if (cond) goto end_label;
720 This helps the optimizers by avoiding the extra induction variable
721 used in the general case. */
724 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
725 tree from, tree to, tree step)
734 type = TREE_TYPE (dovar);
736 /* Initialize the DO variable: dovar = from. */
737 gfc_add_modify_expr (pblock, dovar, from);
739 /* Cycle and exit statements are implemented with gotos. */
740 cycle_label = gfc_build_label_decl (NULL_TREE);
741 exit_label = gfc_build_label_decl (NULL_TREE);
743 /* Put the labels where they can be found later. See gfc_trans_do(). */
744 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
747 gfc_start_block (&body);
749 /* Main loop body. */
750 tmp = gfc_trans_code (code->block->next);
751 gfc_add_expr_to_block (&body, tmp);
753 /* Label for cycle statements (if needed). */
754 if (TREE_USED (cycle_label))
756 tmp = build1_v (LABEL_EXPR, cycle_label);
757 gfc_add_expr_to_block (&body, tmp);
760 /* Evaluate the loop condition. */
761 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
762 cond = gfc_evaluate_now (cond, &body);
764 /* Increment the loop variable. */
765 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
766 gfc_add_modify_expr (&body, dovar, tmp);
769 tmp = build1_v (GOTO_EXPR, exit_label);
770 TREE_USED (exit_label) = 1;
771 tmp = fold_build3 (COND_EXPR, void_type_node,
772 cond, tmp, build_empty_stmt ());
773 gfc_add_expr_to_block (&body, tmp);
775 /* Finish the loop body. */
776 tmp = gfc_finish_block (&body);
777 tmp = build1_v (LOOP_EXPR, tmp);
779 /* Only execute the loop if the number of iterations is positive. */
780 if (tree_int_cst_sgn (step) > 0)
781 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
783 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
784 tmp = fold_build3 (COND_EXPR, void_type_node,
785 cond, tmp, build_empty_stmt ());
786 gfc_add_expr_to_block (pblock, tmp);
788 /* Add the exit label. */
789 tmp = build1_v (LABEL_EXPR, exit_label);
790 gfc_add_expr_to_block (pblock, tmp);
792 return gfc_finish_block (pblock);
795 /* Translate the DO construct. This obviously is one of the most
796 important ones to get right with any compiler, but especially
799 We special case some loop forms as described in gfc_trans_simple_do.
800 For other cases we implement them with a separate loop count,
801 as described in the standard.
803 We translate a do loop from:
805 DO dovar = from, to, step
811 [evaluate loop bounds and step]
812 empty = (step > 0 ? to < from : to > from);
813 countm1 = (to - from) / step;
815 if (empty) goto exit_label;
821 if (countm1 ==0) goto exit_label;
826 countm1 is an unsigned integer. It is equal to the loop count minus one,
827 because the loop count itself can overflow. */
830 gfc_trans_do (gfc_code * code)
849 gfc_start_block (&block);
851 /* Evaluate all the expressions in the iterator. */
852 gfc_init_se (&se, NULL);
853 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
854 gfc_add_block_to_block (&block, &se.pre);
856 type = TREE_TYPE (dovar);
858 gfc_init_se (&se, NULL);
859 gfc_conv_expr_val (&se, code->ext.iterator->start);
860 gfc_add_block_to_block (&block, &se.pre);
861 from = gfc_evaluate_now (se.expr, &block);
863 gfc_init_se (&se, NULL);
864 gfc_conv_expr_val (&se, code->ext.iterator->end);
865 gfc_add_block_to_block (&block, &se.pre);
866 to = gfc_evaluate_now (se.expr, &block);
868 gfc_init_se (&se, NULL);
869 gfc_conv_expr_val (&se, code->ext.iterator->step);
870 gfc_add_block_to_block (&block, &se.pre);
871 step = gfc_evaluate_now (se.expr, &block);
873 /* Special case simple loops. */
874 if (TREE_CODE (type) == INTEGER_TYPE
875 && (integer_onep (step)
876 || tree_int_cst_equal (step, integer_minus_one_node)))
877 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
879 /* We need a special check for empty loops:
880 empty = (step > 0 ? to < from : to > from); */
881 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
882 fold_convert (type, integer_zero_node));
883 empty = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
884 fold_build2 (LT_EXPR, boolean_type_node, to, from),
885 fold_build2 (GT_EXPR, boolean_type_node, to, from));
887 /* Initialize loop count. This code is executed before we enter the
888 loop body. We generate: countm1 = abs(to - from) / abs(step). */
889 if (TREE_CODE (type) == INTEGER_TYPE)
893 utype = unsigned_type_for (type);
895 /* tmp = abs(to - from) / abs(step) */
896 ustep = fold_convert (utype, fold_build1 (ABS_EXPR, type, step));
897 tmp = fold_build3 (COND_EXPR, type, pos_step,
898 fold_build2 (MINUS_EXPR, type, to, from),
899 fold_build2 (MINUS_EXPR, type, from, to));
900 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, fold_convert (utype, tmp),
905 /* TODO: We could use the same width as the real type.
906 This would probably cause more problems that it solves
907 when we implement "long double" types. */
908 utype = unsigned_type_for (gfc_array_index_type);
909 tmp = fold_build2 (MINUS_EXPR, type, to, from);
910 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
911 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
913 countm1 = gfc_create_var (utype, "countm1");
914 gfc_add_modify_expr (&block, countm1, tmp);
916 /* Cycle and exit statements are implemented with gotos. */
917 cycle_label = gfc_build_label_decl (NULL_TREE);
918 exit_label = gfc_build_label_decl (NULL_TREE);
919 TREE_USED (exit_label) = 1;
921 /* Initialize the DO variable: dovar = from. */
922 gfc_add_modify_expr (&block, dovar, from);
924 /* If the loop is empty, go directly to the exit label. */
925 tmp = fold_build3 (COND_EXPR, void_type_node, empty,
926 build1_v (GOTO_EXPR, exit_label), build_empty_stmt ());
927 gfc_add_expr_to_block (&block, tmp);
930 gfc_start_block (&body);
932 /* Put these labels where they can be found later. We put the
933 labels in a TREE_LIST node (because TREE_CHAIN is already
934 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
935 label in TREE_VALUE (backend_decl). */
937 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
939 /* Main loop body. */
940 tmp = gfc_trans_code (code->block->next);
941 gfc_add_expr_to_block (&body, tmp);
943 /* Label for cycle statements (if needed). */
944 if (TREE_USED (cycle_label))
946 tmp = build1_v (LABEL_EXPR, cycle_label);
947 gfc_add_expr_to_block (&body, tmp);
950 /* Increment the loop variable. */
951 tmp = build2 (PLUS_EXPR, type, dovar, step);
952 gfc_add_modify_expr (&body, dovar, tmp);
954 /* End with the loop condition. Loop until countm1 == 0. */
955 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
956 build_int_cst (utype, 0));
957 tmp = build1_v (GOTO_EXPR, exit_label);
958 tmp = fold_build3 (COND_EXPR, void_type_node,
959 cond, tmp, build_empty_stmt ());
960 gfc_add_expr_to_block (&body, tmp);
962 /* Decrement the loop count. */
963 tmp = build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
964 gfc_add_modify_expr (&body, countm1, tmp);
966 /* End of loop body. */
967 tmp = gfc_finish_block (&body);
969 /* The for loop itself. */
970 tmp = build1_v (LOOP_EXPR, tmp);
971 gfc_add_expr_to_block (&block, tmp);
973 /* Add the exit label. */
974 tmp = build1_v (LABEL_EXPR, exit_label);
975 gfc_add_expr_to_block (&block, tmp);
977 return gfc_finish_block (&block);
981 /* Translate the DO WHILE construct.
994 if (! cond) goto exit_label;
1000 Because the evaluation of the exit condition `cond' may have side
1001 effects, we can't do much for empty loop bodies. The backend optimizers
1002 should be smart enough to eliminate any dead loops. */
1005 gfc_trans_do_while (gfc_code * code)
1013 /* Everything we build here is part of the loop body. */
1014 gfc_start_block (&block);
1016 /* Cycle and exit statements are implemented with gotos. */
1017 cycle_label = gfc_build_label_decl (NULL_TREE);
1018 exit_label = gfc_build_label_decl (NULL_TREE);
1020 /* Put the labels where they can be found later. See gfc_trans_do(). */
1021 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1023 /* Create a GIMPLE version of the exit condition. */
1024 gfc_init_se (&cond, NULL);
1025 gfc_conv_expr_val (&cond, code->expr);
1026 gfc_add_block_to_block (&block, &cond.pre);
1027 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1029 /* Build "IF (! cond) GOTO exit_label". */
1030 tmp = build1_v (GOTO_EXPR, exit_label);
1031 TREE_USED (exit_label) = 1;
1032 tmp = fold_build3 (COND_EXPR, void_type_node,
1033 cond.expr, tmp, build_empty_stmt ());
1034 gfc_add_expr_to_block (&block, tmp);
1036 /* The main body of the loop. */
1037 tmp = gfc_trans_code (code->block->next);
1038 gfc_add_expr_to_block (&block, tmp);
1040 /* Label for cycle statements (if needed). */
1041 if (TREE_USED (cycle_label))
1043 tmp = build1_v (LABEL_EXPR, cycle_label);
1044 gfc_add_expr_to_block (&block, tmp);
1047 /* End of loop body. */
1048 tmp = gfc_finish_block (&block);
1050 gfc_init_block (&block);
1051 /* Build the loop. */
1052 tmp = build1_v (LOOP_EXPR, tmp);
1053 gfc_add_expr_to_block (&block, tmp);
1055 /* Add the exit label. */
1056 tmp = build1_v (LABEL_EXPR, exit_label);
1057 gfc_add_expr_to_block (&block, tmp);
1059 return gfc_finish_block (&block);
1063 /* Translate the SELECT CASE construct for INTEGER case expressions,
1064 without killing all potential optimizations. The problem is that
1065 Fortran allows unbounded cases, but the back-end does not, so we
1066 need to intercept those before we enter the equivalent SWITCH_EXPR
1069 For example, we translate this,
1072 CASE (:100,101,105:115)
1082 to the GENERIC equivalent,
1086 case (minimum value for typeof(expr) ... 100:
1092 case 200 ... (maximum value for typeof(expr):
1109 gfc_trans_integer_select (gfc_code * code)
1119 gfc_start_block (&block);
1121 /* Calculate the switch expression. */
1122 gfc_init_se (&se, NULL);
1123 gfc_conv_expr_val (&se, code->expr);
1124 gfc_add_block_to_block (&block, &se.pre);
1126 end_label = gfc_build_label_decl (NULL_TREE);
1128 gfc_init_block (&body);
1130 for (c = code->block; c; c = c->block)
1132 for (cp = c->ext.case_list; cp; cp = cp->next)
1137 /* Assume it's the default case. */
1138 low = high = NULL_TREE;
1142 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1145 /* If there's only a lower bound, set the high bound to the
1146 maximum value of the case expression. */
1148 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1153 /* Three cases are possible here:
1155 1) There is no lower bound, e.g. CASE (:N).
1156 2) There is a lower bound .NE. high bound, that is
1157 a case range, e.g. CASE (N:M) where M>N (we make
1158 sure that M>N during type resolution).
1159 3) There is a lower bound, and it has the same value
1160 as the high bound, e.g. CASE (N:N). This is our
1161 internal representation of CASE(N).
1163 In the first and second case, we need to set a value for
1164 high. In the third case, we don't because the GCC middle
1165 end represents a single case value by just letting high be
1166 a NULL_TREE. We can't do that because we need to be able
1167 to represent unbounded cases. */
1171 && mpz_cmp (cp->low->value.integer,
1172 cp->high->value.integer) != 0))
1173 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1176 /* Unbounded case. */
1178 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1181 /* Build a label. */
1182 label = gfc_build_label_decl (NULL_TREE);
1184 /* Add this case label.
1185 Add parameter 'label', make it match GCC backend. */
1186 tmp = build3 (CASE_LABEL_EXPR, void_type_node, low, high, label);
1187 gfc_add_expr_to_block (&body, tmp);
1190 /* Add the statements for this case. */
1191 tmp = gfc_trans_code (c->next);
1192 gfc_add_expr_to_block (&body, tmp);
1194 /* Break to the end of the construct. */
1195 tmp = build1_v (GOTO_EXPR, end_label);
1196 gfc_add_expr_to_block (&body, tmp);
1199 tmp = gfc_finish_block (&body);
1200 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1201 gfc_add_expr_to_block (&block, tmp);
1203 tmp = build1_v (LABEL_EXPR, end_label);
1204 gfc_add_expr_to_block (&block, tmp);
1206 return gfc_finish_block (&block);
1210 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1212 There are only two cases possible here, even though the standard
1213 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1214 .FALSE., and DEFAULT.
1216 We never generate more than two blocks here. Instead, we always
1217 try to eliminate the DEFAULT case. This way, we can translate this
1218 kind of SELECT construct to a simple
1222 expression in GENERIC. */
1225 gfc_trans_logical_select (gfc_code * code)
1228 gfc_code *t, *f, *d;
1233 /* Assume we don't have any cases at all. */
1236 /* Now see which ones we actually do have. We can have at most two
1237 cases in a single case list: one for .TRUE. and one for .FALSE.
1238 The default case is always separate. If the cases for .TRUE. and
1239 .FALSE. are in the same case list, the block for that case list
1240 always executed, and we don't generate code a COND_EXPR. */
1241 for (c = code->block; c; c = c->block)
1243 for (cp = c->ext.case_list; cp; cp = cp->next)
1247 if (cp->low->value.logical == 0) /* .FALSE. */
1249 else /* if (cp->value.logical != 0), thus .TRUE. */
1257 /* Start a new block. */
1258 gfc_start_block (&block);
1260 /* Calculate the switch expression. We always need to do this
1261 because it may have side effects. */
1262 gfc_init_se (&se, NULL);
1263 gfc_conv_expr_val (&se, code->expr);
1264 gfc_add_block_to_block (&block, &se.pre);
1266 if (t == f && t != NULL)
1268 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1269 translate the code for these cases, append it to the current
1271 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1275 tree true_tree, false_tree, stmt;
1277 true_tree = build_empty_stmt ();
1278 false_tree = build_empty_stmt ();
1280 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1281 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1282 make the missing case the default case. */
1283 if (t != NULL && f != NULL)
1293 /* Translate the code for each of these blocks, and append it to
1294 the current block. */
1296 true_tree = gfc_trans_code (t->next);
1299 false_tree = gfc_trans_code (f->next);
1301 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1302 true_tree, false_tree);
1303 gfc_add_expr_to_block (&block, stmt);
1306 return gfc_finish_block (&block);
1310 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1311 Instead of generating compares and jumps, it is far simpler to
1312 generate a data structure describing the cases in order and call a
1313 library subroutine that locates the right case.
1314 This is particularly true because this is the only case where we
1315 might have to dispose of a temporary.
1316 The library subroutine returns a pointer to jump to or NULL if no
1317 branches are to be taken. */
1320 gfc_trans_character_select (gfc_code *code)
1322 tree init, node, end_label, tmp, type, *labels;
1324 stmtblock_t block, body;
1330 static tree select_struct;
1331 static tree ss_string1, ss_string1_len;
1332 static tree ss_string2, ss_string2_len;
1333 static tree ss_target;
1335 if (select_struct == NULL)
1337 tree gfc_int4_type_node = gfc_get_int_type (4);
1339 select_struct = make_node (RECORD_TYPE);
1340 TYPE_NAME (select_struct) = get_identifier ("_jump_struct");
1343 #define ADD_FIELD(NAME, TYPE) \
1344 ss_##NAME = gfc_add_field_to_struct \
1345 (&(TYPE_FIELDS (select_struct)), select_struct, \
1346 get_identifier (stringize(NAME)), TYPE)
1348 ADD_FIELD (string1, pchar_type_node);
1349 ADD_FIELD (string1_len, gfc_int4_type_node);
1351 ADD_FIELD (string2, pchar_type_node);
1352 ADD_FIELD (string2_len, gfc_int4_type_node);
1354 ADD_FIELD (target, pvoid_type_node);
1357 gfc_finish_type (select_struct);
1360 cp = code->block->ext.case_list;
1361 while (cp->left != NULL)
1365 for (d = cp; d; d = d->right)
1369 labels = gfc_getmem (n * sizeof (tree));
1373 for(i = 0; i < n; i++)
1375 labels[i] = gfc_build_label_decl (NULL_TREE);
1376 TREE_USED (labels[i]) = 1;
1377 /* TODO: The gimplifier should do this for us, but it has
1378 inadequacies when dealing with static initializers. */
1379 FORCED_LABEL (labels[i]) = 1;
1382 end_label = gfc_build_label_decl (NULL_TREE);
1384 /* Generate the body */
1385 gfc_start_block (&block);
1386 gfc_init_block (&body);
1388 for (c = code->block; c; c = c->block)
1390 for (d = c->ext.case_list; d; d = d->next)
1392 tmp = build1_v (LABEL_EXPR, labels[d->n]);
1393 gfc_add_expr_to_block (&body, tmp);
1396 tmp = gfc_trans_code (c->next);
1397 gfc_add_expr_to_block (&body, tmp);
1399 tmp = build1_v (GOTO_EXPR, end_label);
1400 gfc_add_expr_to_block (&body, tmp);
1403 /* Generate the structure describing the branches */
1407 for(d = cp; d; d = d->right, i++)
1411 gfc_init_se (&se, NULL);
1415 node = tree_cons (ss_string1, null_pointer_node, node);
1416 node = tree_cons (ss_string1_len, integer_zero_node, node);
1420 gfc_conv_expr_reference (&se, d->low);
1422 node = tree_cons (ss_string1, se.expr, node);
1423 node = tree_cons (ss_string1_len, se.string_length, node);
1426 if (d->high == NULL)
1428 node = tree_cons (ss_string2, null_pointer_node, node);
1429 node = tree_cons (ss_string2_len, integer_zero_node, node);
1433 gfc_init_se (&se, NULL);
1434 gfc_conv_expr_reference (&se, d->high);
1436 node = tree_cons (ss_string2, se.expr, node);
1437 node = tree_cons (ss_string2_len, se.string_length, node);
1440 tmp = gfc_build_addr_expr (pvoid_type_node, labels[i]);
1441 node = tree_cons (ss_target, tmp, node);
1443 tmp = build_constructor_from_list (select_struct, nreverse (node));
1444 init = tree_cons (NULL_TREE, tmp, init);
1447 type = build_array_type (select_struct, build_index_type
1448 (build_int_cst (NULL_TREE, n - 1)));
1450 init = build_constructor_from_list (type, nreverse(init));
1451 TREE_CONSTANT (init) = 1;
1452 TREE_INVARIANT (init) = 1;
1453 TREE_STATIC (init) = 1;
1454 /* Create a static variable to hold the jump table. */
1455 tmp = gfc_create_var (type, "jumptable");
1456 TREE_CONSTANT (tmp) = 1;
1457 TREE_INVARIANT (tmp) = 1;
1458 TREE_STATIC (tmp) = 1;
1459 TREE_READONLY (tmp) = 1;
1460 DECL_INITIAL (tmp) = init;
1463 /* Build the library call */
1464 init = gfc_build_addr_expr (pvoid_type_node, init);
1465 tmp = gfc_build_addr_expr (pvoid_type_node, end_label);
1467 gfc_init_se (&se, NULL);
1468 gfc_conv_expr_reference (&se, code->expr);
1470 gfc_add_block_to_block (&block, &se.pre);
1472 tmp = build_call_expr (gfor_fndecl_select_string, 5,
1473 init, build_int_cst (NULL_TREE, n),
1474 tmp, se.expr, se.string_length);
1476 case_label = gfc_create_var (TREE_TYPE (tmp), "case_label");
1477 gfc_add_modify_expr (&block, case_label, tmp);
1479 gfc_add_block_to_block (&block, &se.post);
1481 tmp = build1 (GOTO_EXPR, void_type_node, case_label);
1482 gfc_add_expr_to_block (&block, tmp);
1484 tmp = gfc_finish_block (&body);
1485 gfc_add_expr_to_block (&block, tmp);
1486 tmp = build1_v (LABEL_EXPR, end_label);
1487 gfc_add_expr_to_block (&block, tmp);
1492 return gfc_finish_block (&block);
1496 /* Translate the three variants of the SELECT CASE construct.
1498 SELECT CASEs with INTEGER case expressions can be translated to an
1499 equivalent GENERIC switch statement, and for LOGICAL case
1500 expressions we build one or two if-else compares.
1502 SELECT CASEs with CHARACTER case expressions are a whole different
1503 story, because they don't exist in GENERIC. So we sort them and
1504 do a binary search at runtime.
1506 Fortran has no BREAK statement, and it does not allow jumps from
1507 one case block to another. That makes things a lot easier for
1511 gfc_trans_select (gfc_code * code)
1513 gcc_assert (code && code->expr);
1515 /* Empty SELECT constructs are legal. */
1516 if (code->block == NULL)
1517 return build_empty_stmt ();
1519 /* Select the correct translation function. */
1520 switch (code->expr->ts.type)
1522 case BT_LOGICAL: return gfc_trans_logical_select (code);
1523 case BT_INTEGER: return gfc_trans_integer_select (code);
1524 case BT_CHARACTER: return gfc_trans_character_select (code);
1526 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1532 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
1533 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
1534 indicates whether we should generate code to test the FORALLs mask
1535 array. OUTER is the loop header to be used for initializing mask
1538 The generated loop format is:
1539 count = (end - start + step) / step
1552 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
1553 int mask_flag, stmtblock_t *outer)
1561 tree var, start, end, step;
1564 /* Initialize the mask index outside the FORALL nest. */
1565 if (mask_flag && forall_tmp->mask)
1566 gfc_add_modify_expr (outer, forall_tmp->maskindex, gfc_index_zero_node);
1568 iter = forall_tmp->this_loop;
1569 nvar = forall_tmp->nvar;
1570 for (n = 0; n < nvar; n++)
1573 start = iter->start;
1577 exit_label = gfc_build_label_decl (NULL_TREE);
1578 TREE_USED (exit_label) = 1;
1580 /* The loop counter. */
1581 count = gfc_create_var (TREE_TYPE (var), "count");
1583 /* The body of the loop. */
1584 gfc_init_block (&block);
1586 /* The exit condition. */
1587 cond = fold_build2 (LE_EXPR, boolean_type_node,
1588 count, build_int_cst (TREE_TYPE (count), 0));
1589 tmp = build1_v (GOTO_EXPR, exit_label);
1590 tmp = fold_build3 (COND_EXPR, void_type_node,
1591 cond, tmp, build_empty_stmt ());
1592 gfc_add_expr_to_block (&block, tmp);
1594 /* The main loop body. */
1595 gfc_add_expr_to_block (&block, body);
1597 /* Increment the loop variable. */
1598 tmp = build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1599 gfc_add_modify_expr (&block, var, tmp);
1601 /* Advance to the next mask element. Only do this for the
1603 if (n == 0 && mask_flag && forall_tmp->mask)
1605 tree maskindex = forall_tmp->maskindex;
1606 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
1607 maskindex, gfc_index_one_node);
1608 gfc_add_modify_expr (&block, maskindex, tmp);
1611 /* Decrement the loop counter. */
1612 tmp = build2 (MINUS_EXPR, TREE_TYPE (var), count,
1613 build_int_cst (TREE_TYPE (var), 1));
1614 gfc_add_modify_expr (&block, count, tmp);
1616 body = gfc_finish_block (&block);
1618 /* Loop var initialization. */
1619 gfc_init_block (&block);
1620 gfc_add_modify_expr (&block, var, start);
1623 /* Initialize the loop counter. */
1624 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1625 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1626 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1627 gfc_add_modify_expr (&block, count, tmp);
1629 /* The loop expression. */
1630 tmp = build1_v (LOOP_EXPR, body);
1631 gfc_add_expr_to_block (&block, tmp);
1633 /* The exit label. */
1634 tmp = build1_v (LABEL_EXPR, exit_label);
1635 gfc_add_expr_to_block (&block, tmp);
1637 body = gfc_finish_block (&block);
1644 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
1645 is nonzero, the body is controlled by all masks in the forall nest.
1646 Otherwise, the innermost loop is not controlled by it's mask. This
1647 is used for initializing that mask. */
1650 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1655 forall_info *forall_tmp;
1656 tree mask, maskindex;
1658 gfc_start_block (&header);
1660 forall_tmp = nested_forall_info;
1661 while (forall_tmp != NULL)
1663 /* Generate body with masks' control. */
1666 mask = forall_tmp->mask;
1667 maskindex = forall_tmp->maskindex;
1669 /* If a mask was specified make the assignment conditional. */
1672 tmp = gfc_build_array_ref (mask, maskindex);
1673 body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ());
1676 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
1677 forall_tmp = forall_tmp->prev_nest;
1681 gfc_add_expr_to_block (&header, body);
1682 return gfc_finish_block (&header);
1686 /* Allocate data for holding a temporary array. Returns either a local
1687 temporary array or a pointer variable. */
1690 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
1697 if (INTEGER_CST_P (size))
1699 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
1700 gfc_index_one_node);
1705 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
1706 type = build_array_type (elem_type, type);
1707 if (gfc_can_put_var_on_stack (bytesize))
1709 gcc_assert (INTEGER_CST_P (size));
1710 tmpvar = gfc_create_var (type, "temp");
1715 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
1716 *pdata = convert (pvoid_type_node, tmpvar);
1718 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
1719 gfc_add_modify_expr (pblock, tmpvar, tmp);
1725 /* Generate codes to copy the temporary to the actual lhs. */
1728 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
1729 tree count1, tree wheremask, bool invert)
1733 stmtblock_t block, body;
1739 lss = gfc_walk_expr (expr);
1741 if (lss == gfc_ss_terminator)
1743 gfc_start_block (&block);
1745 gfc_init_se (&lse, NULL);
1747 /* Translate the expression. */
1748 gfc_conv_expr (&lse, expr);
1750 /* Form the expression for the temporary. */
1751 tmp = gfc_build_array_ref (tmp1, count1);
1753 /* Use the scalar assignment as is. */
1754 gfc_add_block_to_block (&block, &lse.pre);
1755 gfc_add_modify_expr (&block, lse.expr, tmp);
1756 gfc_add_block_to_block (&block, &lse.post);
1758 /* Increment the count1. */
1759 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1760 gfc_index_one_node);
1761 gfc_add_modify_expr (&block, count1, tmp);
1763 tmp = gfc_finish_block (&block);
1767 gfc_start_block (&block);
1769 gfc_init_loopinfo (&loop1);
1770 gfc_init_se (&rse, NULL);
1771 gfc_init_se (&lse, NULL);
1773 /* Associate the lss with the loop. */
1774 gfc_add_ss_to_loop (&loop1, lss);
1776 /* Calculate the bounds of the scalarization. */
1777 gfc_conv_ss_startstride (&loop1);
1778 /* Setup the scalarizing loops. */
1779 gfc_conv_loop_setup (&loop1);
1781 gfc_mark_ss_chain_used (lss, 1);
1783 /* Start the scalarized loop body. */
1784 gfc_start_scalarized_body (&loop1, &body);
1786 /* Setup the gfc_se structures. */
1787 gfc_copy_loopinfo_to_se (&lse, &loop1);
1790 /* Form the expression of the temporary. */
1791 if (lss != gfc_ss_terminator)
1792 rse.expr = gfc_build_array_ref (tmp1, count1);
1793 /* Translate expr. */
1794 gfc_conv_expr (&lse, expr);
1796 /* Use the scalar assignment. */
1797 rse.string_length = lse.string_length;
1798 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
1800 /* Form the mask expression according to the mask tree list. */
1803 wheremaskexpr = gfc_build_array_ref (wheremask, count3);
1805 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1806 TREE_TYPE (wheremaskexpr),
1808 tmp = fold_build3 (COND_EXPR, void_type_node,
1809 wheremaskexpr, tmp, build_empty_stmt ());
1812 gfc_add_expr_to_block (&body, tmp);
1814 /* Increment count1. */
1815 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1816 count1, gfc_index_one_node);
1817 gfc_add_modify_expr (&body, count1, tmp);
1819 /* Increment count3. */
1822 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1823 count3, gfc_index_one_node);
1824 gfc_add_modify_expr (&body, count3, tmp);
1827 /* Generate the copying loops. */
1828 gfc_trans_scalarizing_loops (&loop1, &body);
1829 gfc_add_block_to_block (&block, &loop1.pre);
1830 gfc_add_block_to_block (&block, &loop1.post);
1831 gfc_cleanup_loop (&loop1);
1833 tmp = gfc_finish_block (&block);
1839 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
1840 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
1841 and should not be freed. WHEREMASK is the conditional execution mask
1842 whose sense may be inverted by INVERT. */
1845 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
1846 tree count1, gfc_ss *lss, gfc_ss *rss,
1847 tree wheremask, bool invert)
1849 stmtblock_t block, body1;
1856 gfc_start_block (&block);
1858 gfc_init_se (&rse, NULL);
1859 gfc_init_se (&lse, NULL);
1861 if (lss == gfc_ss_terminator)
1863 gfc_init_block (&body1);
1864 gfc_conv_expr (&rse, expr2);
1865 lse.expr = gfc_build_array_ref (tmp1, count1);
1869 /* Initialize the loop. */
1870 gfc_init_loopinfo (&loop);
1872 /* We may need LSS to determine the shape of the expression. */
1873 gfc_add_ss_to_loop (&loop, lss);
1874 gfc_add_ss_to_loop (&loop, rss);
1876 gfc_conv_ss_startstride (&loop);
1877 gfc_conv_loop_setup (&loop);
1879 gfc_mark_ss_chain_used (rss, 1);
1880 /* Start the loop body. */
1881 gfc_start_scalarized_body (&loop, &body1);
1883 /* Translate the expression. */
1884 gfc_copy_loopinfo_to_se (&rse, &loop);
1886 gfc_conv_expr (&rse, expr2);
1888 /* Form the expression of the temporary. */
1889 lse.expr = gfc_build_array_ref (tmp1, count1);
1892 /* Use the scalar assignment. */
1893 lse.string_length = rse.string_length;
1894 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
1895 expr2->expr_type == EXPR_VARIABLE);
1897 /* Form the mask expression according to the mask tree list. */
1900 wheremaskexpr = gfc_build_array_ref (wheremask, count3);
1902 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1903 TREE_TYPE (wheremaskexpr),
1905 tmp = fold_build3 (COND_EXPR, void_type_node,
1906 wheremaskexpr, tmp, build_empty_stmt ());
1909 gfc_add_expr_to_block (&body1, tmp);
1911 if (lss == gfc_ss_terminator)
1913 gfc_add_block_to_block (&block, &body1);
1915 /* Increment count1. */
1916 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1917 gfc_index_one_node);
1918 gfc_add_modify_expr (&block, count1, tmp);
1922 /* Increment count1. */
1923 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1924 count1, gfc_index_one_node);
1925 gfc_add_modify_expr (&body1, count1, tmp);
1927 /* Increment count3. */
1930 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1931 count3, gfc_index_one_node);
1932 gfc_add_modify_expr (&body1, count3, tmp);
1935 /* Generate the copying loops. */
1936 gfc_trans_scalarizing_loops (&loop, &body1);
1938 gfc_add_block_to_block (&block, &loop.pre);
1939 gfc_add_block_to_block (&block, &loop.post);
1941 gfc_cleanup_loop (&loop);
1942 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
1943 as tree nodes in SS may not be valid in different scope. */
1946 tmp = gfc_finish_block (&block);
1951 /* Calculate the size of temporary needed in the assignment inside forall.
1952 LSS and RSS are filled in this function. */
1955 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
1956 stmtblock_t * pblock,
1957 gfc_ss **lss, gfc_ss **rss)
1965 *lss = gfc_walk_expr (expr1);
1968 size = gfc_index_one_node;
1969 if (*lss != gfc_ss_terminator)
1971 gfc_init_loopinfo (&loop);
1973 /* Walk the RHS of the expression. */
1974 *rss = gfc_walk_expr (expr2);
1975 if (*rss == gfc_ss_terminator)
1977 /* The rhs is scalar. Add a ss for the expression. */
1978 *rss = gfc_get_ss ();
1979 (*rss)->next = gfc_ss_terminator;
1980 (*rss)->type = GFC_SS_SCALAR;
1981 (*rss)->expr = expr2;
1984 /* Associate the SS with the loop. */
1985 gfc_add_ss_to_loop (&loop, *lss);
1986 /* We don't actually need to add the rhs at this point, but it might
1987 make guessing the loop bounds a bit easier. */
1988 gfc_add_ss_to_loop (&loop, *rss);
1990 /* We only want the shape of the expression, not rest of the junk
1991 generated by the scalarizer. */
1992 loop.array_parameter = 1;
1994 /* Calculate the bounds of the scalarization. */
1995 save_flag = flag_bounds_check;
1996 flag_bounds_check = 0;
1997 gfc_conv_ss_startstride (&loop);
1998 flag_bounds_check = save_flag;
1999 gfc_conv_loop_setup (&loop);
2001 /* Figure out how many elements we need. */
2002 for (i = 0; i < loop.dimen; i++)
2004 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2005 gfc_index_one_node, loop.from[i]);
2006 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2008 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2010 gfc_add_block_to_block (pblock, &loop.pre);
2011 size = gfc_evaluate_now (size, pblock);
2012 gfc_add_block_to_block (pblock, &loop.post);
2014 /* TODO: write a function that cleans up a loopinfo without freeing
2015 the SS chains. Currently a NOP. */
2022 /* Calculate the overall iterator number of the nested forall construct.
2023 This routine actually calculates the number of times the body of the
2024 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2025 that by the expression INNER_SIZE. The BLOCK argument specifies the
2026 block in which to calculate the result, and the optional INNER_SIZE_BODY
2027 argument contains any statements that need to executed (inside the loop)
2028 to initialize or calculate INNER_SIZE. */
2031 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2032 stmtblock_t *inner_size_body, stmtblock_t *block)
2034 forall_info *forall_tmp = nested_forall_info;
2038 /* We can eliminate the innermost unconditional loops with constant
2040 if (INTEGER_CST_P (inner_size))
2043 && !forall_tmp->mask
2044 && INTEGER_CST_P (forall_tmp->size))
2046 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2047 inner_size, forall_tmp->size);
2048 forall_tmp = forall_tmp->prev_nest;
2051 /* If there are no loops left, we have our constant result. */
2056 /* Otherwise, create a temporary variable to compute the result. */
2057 number = gfc_create_var (gfc_array_index_type, "num");
2058 gfc_add_modify_expr (block, number, gfc_index_zero_node);
2060 gfc_start_block (&body);
2061 if (inner_size_body)
2062 gfc_add_block_to_block (&body, inner_size_body);
2064 tmp = build2 (PLUS_EXPR, gfc_array_index_type, number,
2068 gfc_add_modify_expr (&body, number, tmp);
2069 tmp = gfc_finish_block (&body);
2071 /* Generate loops. */
2072 if (forall_tmp != NULL)
2073 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2075 gfc_add_expr_to_block (block, tmp);
2081 /* Allocate temporary for forall construct. SIZE is the size of temporary
2082 needed. PTEMP1 is returned for space free. */
2085 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2092 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2093 if (!integer_onep (unit))
2094 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2099 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2102 tmp = build_fold_indirect_ref (tmp);
2107 /* Allocate temporary for forall construct according to the information in
2108 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2109 assignment inside forall. PTEMP1 is returned for space free. */
2112 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2113 tree inner_size, stmtblock_t * inner_size_body,
2114 stmtblock_t * block, tree * ptemp1)
2118 /* Calculate the total size of temporary needed in forall construct. */
2119 size = compute_overall_iter_number (nested_forall_info, inner_size,
2120 inner_size_body, block);
2122 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2126 /* Handle assignments inside forall which need temporary.
2128 forall (i=start:end:stride; maskexpr)
2131 (where e,f<i> are arbitrary expressions possibly involving i
2132 and there is a dependency between e<i> and f<i>)
2134 masktmp(:) = maskexpr(:)
2139 for (i = start; i <= end; i += stride)
2143 for (i = start; i <= end; i += stride)
2145 if (masktmp[maskindex++])
2146 tmp[count1++] = f<i>
2150 for (i = start; i <= end; i += stride)
2152 if (masktmp[maskindex++])
2153 e<i> = tmp[count1++]
2158 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2159 tree wheremask, bool invert,
2160 forall_info * nested_forall_info,
2161 stmtblock_t * block)
2169 stmtblock_t inner_size_body;
2171 /* Create vars. count1 is the current iterator number of the nested
2173 count1 = gfc_create_var (gfc_array_index_type, "count1");
2175 /* Count is the wheremask index. */
2178 count = gfc_create_var (gfc_array_index_type, "count");
2179 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2184 /* Initialize count1. */
2185 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2187 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2188 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2189 gfc_init_block (&inner_size_body);
2190 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2193 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2194 type = gfc_typenode_for_spec (&expr1->ts);
2196 /* Allocate temporary for nested forall construct according to the
2197 information in nested_forall_info and inner_size. */
2198 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2199 &inner_size_body, block, &ptemp1);
2201 /* Generate codes to copy rhs to the temporary . */
2202 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2205 /* Generate body and loops according to the information in
2206 nested_forall_info. */
2207 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2208 gfc_add_expr_to_block (block, tmp);
2211 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2215 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2217 /* Generate codes to copy the temporary to lhs. */
2218 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2221 /* Generate body and loops according to the information in
2222 nested_forall_info. */
2223 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2224 gfc_add_expr_to_block (block, tmp);
2228 /* Free the temporary. */
2229 tmp = gfc_call_free (ptemp1);
2230 gfc_add_expr_to_block (block, tmp);
2235 /* Translate pointer assignment inside FORALL which need temporary. */
2238 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2239 forall_info * nested_forall_info,
2240 stmtblock_t * block)
2254 tree tmp, tmp1, ptemp1;
2256 count = gfc_create_var (gfc_array_index_type, "count");
2257 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2259 inner_size = integer_one_node;
2260 lss = gfc_walk_expr (expr1);
2261 rss = gfc_walk_expr (expr2);
2262 if (lss == gfc_ss_terminator)
2264 type = gfc_typenode_for_spec (&expr1->ts);
2265 type = build_pointer_type (type);
2267 /* Allocate temporary for nested forall construct according to the
2268 information in nested_forall_info and inner_size. */
2269 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2270 inner_size, NULL, block, &ptemp1);
2271 gfc_start_block (&body);
2272 gfc_init_se (&lse, NULL);
2273 lse.expr = gfc_build_array_ref (tmp1, count);
2274 gfc_init_se (&rse, NULL);
2275 rse.want_pointer = 1;
2276 gfc_conv_expr (&rse, expr2);
2277 gfc_add_block_to_block (&body, &rse.pre);
2278 gfc_add_modify_expr (&body, lse.expr,
2279 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2280 gfc_add_block_to_block (&body, &rse.post);
2282 /* Increment count. */
2283 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2284 count, gfc_index_one_node);
2285 gfc_add_modify_expr (&body, count, tmp);
2287 tmp = gfc_finish_block (&body);
2289 /* Generate body and loops according to the information in
2290 nested_forall_info. */
2291 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2292 gfc_add_expr_to_block (block, tmp);
2295 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2297 gfc_start_block (&body);
2298 gfc_init_se (&lse, NULL);
2299 gfc_init_se (&rse, NULL);
2300 rse.expr = gfc_build_array_ref (tmp1, count);
2301 lse.want_pointer = 1;
2302 gfc_conv_expr (&lse, expr1);
2303 gfc_add_block_to_block (&body, &lse.pre);
2304 gfc_add_modify_expr (&body, lse.expr, rse.expr);
2305 gfc_add_block_to_block (&body, &lse.post);
2306 /* Increment count. */
2307 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2308 count, gfc_index_one_node);
2309 gfc_add_modify_expr (&body, count, tmp);
2310 tmp = gfc_finish_block (&body);
2312 /* Generate body and loops according to the information in
2313 nested_forall_info. */
2314 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2315 gfc_add_expr_to_block (block, tmp);
2319 gfc_init_loopinfo (&loop);
2321 /* Associate the SS with the loop. */
2322 gfc_add_ss_to_loop (&loop, rss);
2324 /* Setup the scalarizing loops and bounds. */
2325 gfc_conv_ss_startstride (&loop);
2327 gfc_conv_loop_setup (&loop);
2329 info = &rss->data.info;
2330 desc = info->descriptor;
2332 /* Make a new descriptor. */
2333 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2334 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2335 loop.from, loop.to, 1);
2337 /* Allocate temporary for nested forall construct. */
2338 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2339 inner_size, NULL, block, &ptemp1);
2340 gfc_start_block (&body);
2341 gfc_init_se (&lse, NULL);
2342 lse.expr = gfc_build_array_ref (tmp1, count);
2343 lse.direct_byref = 1;
2344 rss = gfc_walk_expr (expr2);
2345 gfc_conv_expr_descriptor (&lse, expr2, rss);
2347 gfc_add_block_to_block (&body, &lse.pre);
2348 gfc_add_block_to_block (&body, &lse.post);
2350 /* Increment count. */
2351 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2352 count, gfc_index_one_node);
2353 gfc_add_modify_expr (&body, count, tmp);
2355 tmp = gfc_finish_block (&body);
2357 /* Generate body and loops according to the information in
2358 nested_forall_info. */
2359 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2360 gfc_add_expr_to_block (block, tmp);
2363 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2365 parm = gfc_build_array_ref (tmp1, count);
2366 lss = gfc_walk_expr (expr1);
2367 gfc_init_se (&lse, NULL);
2368 gfc_conv_expr_descriptor (&lse, expr1, lss);
2369 gfc_add_modify_expr (&lse.pre, lse.expr, parm);
2370 gfc_start_block (&body);
2371 gfc_add_block_to_block (&body, &lse.pre);
2372 gfc_add_block_to_block (&body, &lse.post);
2374 /* Increment count. */
2375 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2376 count, gfc_index_one_node);
2377 gfc_add_modify_expr (&body, count, tmp);
2379 tmp = gfc_finish_block (&body);
2381 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2382 gfc_add_expr_to_block (block, tmp);
2384 /* Free the temporary. */
2387 tmp = gfc_call_free (ptemp1);
2388 gfc_add_expr_to_block (block, tmp);
2393 /* FORALL and WHERE statements are really nasty, especially when you nest
2394 them. All the rhs of a forall assignment must be evaluated before the
2395 actual assignments are performed. Presumably this also applies to all the
2396 assignments in an inner where statement. */
2398 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2399 linear array, relying on the fact that we process in the same order in all
2402 forall (i=start:end:stride; maskexpr)
2406 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2408 count = ((end + 1 - start) / stride)
2409 masktmp(:) = maskexpr(:)
2412 for (i = start; i <= end; i += stride)
2414 if (masktmp[maskindex++])
2418 for (i = start; i <= end; i += stride)
2420 if (masktmp[maskindex++])
2424 Note that this code only works when there are no dependencies.
2425 Forall loop with array assignments and data dependencies are a real pain,
2426 because the size of the temporary cannot always be determined before the
2427 loop is executed. This problem is compounded by the presence of nested
2432 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2450 gfc_forall_iterator *fa;
2453 gfc_saved_var *saved_vars;
2454 iter_info *this_forall;
2458 /* Do nothing if the mask is false. */
2460 && code->expr->expr_type == EXPR_CONSTANT
2461 && !code->expr->value.logical)
2462 return build_empty_stmt ();
2465 /* Count the FORALL index number. */
2466 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2470 /* Allocate the space for var, start, end, step, varexpr. */
2471 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2472 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2473 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2474 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2475 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2476 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2478 /* Allocate the space for info. */
2479 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2481 gfc_start_block (&block);
2484 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2486 gfc_symbol *sym = fa->var->symtree->n.sym;
2488 /* Allocate space for this_forall. */
2489 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2491 /* Create a temporary variable for the FORALL index. */
2492 tmp = gfc_typenode_for_spec (&sym->ts);
2493 var[n] = gfc_create_var (tmp, sym->name);
2494 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2496 /* Record it in this_forall. */
2497 this_forall->var = var[n];
2499 /* Replace the index symbol's backend_decl with the temporary decl. */
2500 sym->backend_decl = var[n];
2502 /* Work out the start, end and stride for the loop. */
2503 gfc_init_se (&se, NULL);
2504 gfc_conv_expr_val (&se, fa->start);
2505 /* Record it in this_forall. */
2506 this_forall->start = se.expr;
2507 gfc_add_block_to_block (&block, &se.pre);
2510 gfc_init_se (&se, NULL);
2511 gfc_conv_expr_val (&se, fa->end);
2512 /* Record it in this_forall. */
2513 this_forall->end = se.expr;
2514 gfc_make_safe_expr (&se);
2515 gfc_add_block_to_block (&block, &se.pre);
2518 gfc_init_se (&se, NULL);
2519 gfc_conv_expr_val (&se, fa->stride);
2520 /* Record it in this_forall. */
2521 this_forall->step = se.expr;
2522 gfc_make_safe_expr (&se);
2523 gfc_add_block_to_block (&block, &se.pre);
2526 /* Set the NEXT field of this_forall to NULL. */
2527 this_forall->next = NULL;
2528 /* Link this_forall to the info construct. */
2529 if (info->this_loop)
2531 iter_info *iter_tmp = info->this_loop;
2532 while (iter_tmp->next != NULL)
2533 iter_tmp = iter_tmp->next;
2534 iter_tmp->next = this_forall;
2537 info->this_loop = this_forall;
2543 /* Calculate the size needed for the current forall level. */
2544 size = gfc_index_one_node;
2545 for (n = 0; n < nvar; n++)
2547 /* size = (end + step - start) / step. */
2548 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2550 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2552 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2553 tmp = convert (gfc_array_index_type, tmp);
2555 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2558 /* Record the nvar and size of current forall level. */
2564 /* If the mask is .true., consider the FORALL unconditional. */
2565 if (code->expr->expr_type == EXPR_CONSTANT
2566 && code->expr->value.logical)
2574 /* First we need to allocate the mask. */
2577 /* As the mask array can be very big, prefer compact boolean types. */
2578 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2579 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
2580 size, NULL, &block, &pmask);
2581 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2583 /* Record them in the info structure. */
2584 info->maskindex = maskindex;
2589 /* No mask was specified. */
2590 maskindex = NULL_TREE;
2591 mask = pmask = NULL_TREE;
2594 /* Link the current forall level to nested_forall_info. */
2595 info->prev_nest = nested_forall_info;
2596 nested_forall_info = info;
2598 /* Copy the mask into a temporary variable if required.
2599 For now we assume a mask temporary is needed. */
2602 /* As the mask array can be very big, prefer compact boolean types. */
2603 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2605 gfc_add_modify_expr (&block, maskindex, gfc_index_zero_node);
2607 /* Start of mask assignment loop body. */
2608 gfc_start_block (&body);
2610 /* Evaluate the mask expression. */
2611 gfc_init_se (&se, NULL);
2612 gfc_conv_expr_val (&se, code->expr);
2613 gfc_add_block_to_block (&body, &se.pre);
2615 /* Store the mask. */
2616 se.expr = convert (mask_type, se.expr);
2618 tmp = gfc_build_array_ref (mask, maskindex);
2619 gfc_add_modify_expr (&body, tmp, se.expr);
2621 /* Advance to the next mask element. */
2622 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
2623 maskindex, gfc_index_one_node);
2624 gfc_add_modify_expr (&body, maskindex, tmp);
2626 /* Generate the loops. */
2627 tmp = gfc_finish_block (&body);
2628 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
2629 gfc_add_expr_to_block (&block, tmp);
2632 c = code->block->next;
2634 /* TODO: loop merging in FORALL statements. */
2635 /* Now that we've got a copy of the mask, generate the assignment loops. */
2641 /* A scalar or array assignment. */
2642 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2643 /* Temporaries due to array assignment data dependencies introduce
2644 no end of problems. */
2646 gfc_trans_assign_need_temp (c->expr, c->expr2, NULL, false,
2647 nested_forall_info, &block);
2650 /* Use the normal assignment copying routines. */
2651 assign = gfc_trans_assignment (c->expr, c->expr2, false);
2653 /* Generate body and loops. */
2654 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2656 gfc_add_expr_to_block (&block, tmp);
2662 /* Translate WHERE or WHERE construct nested in FORALL. */
2663 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
2666 /* Pointer assignment inside FORALL. */
2667 case EXEC_POINTER_ASSIGN:
2668 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2670 gfc_trans_pointer_assign_need_temp (c->expr, c->expr2,
2671 nested_forall_info, &block);
2674 /* Use the normal assignment copying routines. */
2675 assign = gfc_trans_pointer_assignment (c->expr, c->expr2);
2677 /* Generate body and loops. */
2678 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2680 gfc_add_expr_to_block (&block, tmp);
2685 tmp = gfc_trans_forall_1 (c, nested_forall_info);
2686 gfc_add_expr_to_block (&block, tmp);
2689 /* Explicit subroutine calls are prevented by the frontend but interface
2690 assignments can legitimately produce them. */
2691 case EXEC_ASSIGN_CALL:
2692 assign = gfc_trans_call (c, true);
2693 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
2694 gfc_add_expr_to_block (&block, tmp);
2704 /* Restore the original index variables. */
2705 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
2706 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
2708 /* Free the space for var, start, end, step, varexpr. */
2714 gfc_free (saved_vars);
2716 /* Free the space for this forall_info. */
2721 /* Free the temporary for the mask. */
2722 tmp = gfc_call_free (pmask);
2723 gfc_add_expr_to_block (&block, tmp);
2726 pushdecl (maskindex);
2728 return gfc_finish_block (&block);
2732 /* Translate the FORALL statement or construct. */
2734 tree gfc_trans_forall (gfc_code * code)
2736 return gfc_trans_forall_1 (code, NULL);
2740 /* Evaluate the WHERE mask expression, copy its value to a temporary.
2741 If the WHERE construct is nested in FORALL, compute the overall temporary
2742 needed by the WHERE mask expression multiplied by the iterator number of
2744 ME is the WHERE mask expression.
2745 MASK is the current execution mask upon input, whose sense may or may
2746 not be inverted as specified by the INVERT argument.
2747 CMASK is the updated execution mask on output, or NULL if not required.
2748 PMASK is the pending execution mask on output, or NULL if not required.
2749 BLOCK is the block in which to place the condition evaluation loops. */
2752 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
2753 tree mask, bool invert, tree cmask, tree pmask,
2754 tree mask_type, stmtblock_t * block)
2759 stmtblock_t body, body1;
2760 tree count, cond, mtmp;
2763 gfc_init_loopinfo (&loop);
2765 lss = gfc_walk_expr (me);
2766 rss = gfc_walk_expr (me);
2768 /* Variable to index the temporary. */
2769 count = gfc_create_var (gfc_array_index_type, "count");
2770 /* Initialize count. */
2771 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2773 gfc_start_block (&body);
2775 gfc_init_se (&rse, NULL);
2776 gfc_init_se (&lse, NULL);
2778 if (lss == gfc_ss_terminator)
2780 gfc_init_block (&body1);
2784 /* Initialize the loop. */
2785 gfc_init_loopinfo (&loop);
2787 /* We may need LSS to determine the shape of the expression. */
2788 gfc_add_ss_to_loop (&loop, lss);
2789 gfc_add_ss_to_loop (&loop, rss);
2791 gfc_conv_ss_startstride (&loop);
2792 gfc_conv_loop_setup (&loop);
2794 gfc_mark_ss_chain_used (rss, 1);
2795 /* Start the loop body. */
2796 gfc_start_scalarized_body (&loop, &body1);
2798 /* Translate the expression. */
2799 gfc_copy_loopinfo_to_se (&rse, &loop);
2801 gfc_conv_expr (&rse, me);
2804 /* Variable to evaluate mask condition. */
2805 cond = gfc_create_var (mask_type, "cond");
2806 if (mask && (cmask || pmask))
2807 mtmp = gfc_create_var (mask_type, "mask");
2808 else mtmp = NULL_TREE;
2810 gfc_add_block_to_block (&body1, &lse.pre);
2811 gfc_add_block_to_block (&body1, &rse.pre);
2813 gfc_add_modify_expr (&body1, cond, fold_convert (mask_type, rse.expr));
2815 if (mask && (cmask || pmask))
2817 tmp = gfc_build_array_ref (mask, count);
2819 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
2820 gfc_add_modify_expr (&body1, mtmp, tmp);
2825 tmp1 = gfc_build_array_ref (cmask, count);
2828 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
2829 gfc_add_modify_expr (&body1, tmp1, tmp);
2834 tmp1 = gfc_build_array_ref (pmask, count);
2835 tmp = build1 (TRUTH_NOT_EXPR, mask_type, cond);
2837 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
2838 gfc_add_modify_expr (&body1, tmp1, tmp);
2841 gfc_add_block_to_block (&body1, &lse.post);
2842 gfc_add_block_to_block (&body1, &rse.post);
2844 if (lss == gfc_ss_terminator)
2846 gfc_add_block_to_block (&body, &body1);
2850 /* Increment count. */
2851 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
2852 gfc_index_one_node);
2853 gfc_add_modify_expr (&body1, count, tmp1);
2855 /* Generate the copying loops. */
2856 gfc_trans_scalarizing_loops (&loop, &body1);
2858 gfc_add_block_to_block (&body, &loop.pre);
2859 gfc_add_block_to_block (&body, &loop.post);
2861 gfc_cleanup_loop (&loop);
2862 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2863 as tree nodes in SS may not be valid in different scope. */
2866 tmp1 = gfc_finish_block (&body);
2867 /* If the WHERE construct is inside FORALL, fill the full temporary. */
2868 if (nested_forall_info != NULL)
2869 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
2871 gfc_add_expr_to_block (block, tmp1);
2875 /* Translate an assignment statement in a WHERE statement or construct
2876 statement. The MASK expression is used to control which elements
2877 of EXPR1 shall be assigned. The sense of MASK is specified by
2881 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
2882 tree mask, bool invert,
2883 tree count1, tree count2,
2889 gfc_ss *lss_section;
2896 tree index, maskexpr;
2899 /* TODO: handle this special case.
2900 Special case a single function returning an array. */
2901 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
2903 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
2909 /* Assignment of the form lhs = rhs. */
2910 gfc_start_block (&block);
2912 gfc_init_se (&lse, NULL);
2913 gfc_init_se (&rse, NULL);
2916 lss = gfc_walk_expr (expr1);
2919 /* In each where-assign-stmt, the mask-expr and the variable being
2920 defined shall be arrays of the same shape. */
2921 gcc_assert (lss != gfc_ss_terminator);
2923 /* The assignment needs scalarization. */
2926 /* Find a non-scalar SS from the lhs. */
2927 while (lss_section != gfc_ss_terminator
2928 && lss_section->type != GFC_SS_SECTION)
2929 lss_section = lss_section->next;
2931 gcc_assert (lss_section != gfc_ss_terminator);
2933 /* Initialize the scalarizer. */
2934 gfc_init_loopinfo (&loop);
2937 rss = gfc_walk_expr (expr2);
2938 if (rss == gfc_ss_terminator)
2940 /* The rhs is scalar. Add a ss for the expression. */
2941 rss = gfc_get_ss ();
2942 rss->next = gfc_ss_terminator;
2943 rss->type = GFC_SS_SCALAR;
2947 /* Associate the SS with the loop. */
2948 gfc_add_ss_to_loop (&loop, lss);
2949 gfc_add_ss_to_loop (&loop, rss);
2951 /* Calculate the bounds of the scalarization. */
2952 gfc_conv_ss_startstride (&loop);
2954 /* Resolve any data dependencies in the statement. */
2955 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
2957 /* Setup the scalarizing loops. */
2958 gfc_conv_loop_setup (&loop);
2960 /* Setup the gfc_se structures. */
2961 gfc_copy_loopinfo_to_se (&lse, &loop);
2962 gfc_copy_loopinfo_to_se (&rse, &loop);
2965 gfc_mark_ss_chain_used (rss, 1);
2966 if (loop.temp_ss == NULL)
2969 gfc_mark_ss_chain_used (lss, 1);
2973 lse.ss = loop.temp_ss;
2974 gfc_mark_ss_chain_used (lss, 3);
2975 gfc_mark_ss_chain_used (loop.temp_ss, 3);
2978 /* Start the scalarized loop body. */
2979 gfc_start_scalarized_body (&loop, &body);
2981 /* Translate the expression. */
2982 gfc_conv_expr (&rse, expr2);
2983 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
2985 gfc_conv_tmp_array_ref (&lse);
2986 gfc_advance_se_ss_chain (&lse);
2989 gfc_conv_expr (&lse, expr1);
2991 /* Form the mask expression according to the mask. */
2993 maskexpr = gfc_build_array_ref (mask, index);
2995 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
2997 /* Use the scalar assignment as is. */
2999 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3000 loop.temp_ss != NULL, false);
3002 tmp = gfc_conv_operator_assign (&lse, &rse, sym);
3004 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3006 gfc_add_expr_to_block (&body, tmp);
3008 if (lss == gfc_ss_terminator)
3010 /* Increment count1. */
3011 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3012 count1, gfc_index_one_node);
3013 gfc_add_modify_expr (&body, count1, tmp);
3015 /* Use the scalar assignment as is. */
3016 gfc_add_block_to_block (&block, &body);
3020 gcc_assert (lse.ss == gfc_ss_terminator
3021 && rse.ss == gfc_ss_terminator);
3023 if (loop.temp_ss != NULL)
3025 /* Increment count1 before finish the main body of a scalarized
3027 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3028 count1, gfc_index_one_node);
3029 gfc_add_modify_expr (&body, count1, tmp);
3030 gfc_trans_scalarized_loop_boundary (&loop, &body);
3032 /* We need to copy the temporary to the actual lhs. */
3033 gfc_init_se (&lse, NULL);
3034 gfc_init_se (&rse, NULL);
3035 gfc_copy_loopinfo_to_se (&lse, &loop);
3036 gfc_copy_loopinfo_to_se (&rse, &loop);
3038 rse.ss = loop.temp_ss;
3041 gfc_conv_tmp_array_ref (&rse);
3042 gfc_advance_se_ss_chain (&rse);
3043 gfc_conv_expr (&lse, expr1);
3045 gcc_assert (lse.ss == gfc_ss_terminator
3046 && rse.ss == gfc_ss_terminator);
3048 /* Form the mask expression according to the mask tree list. */
3050 maskexpr = gfc_build_array_ref (mask, index);
3052 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3055 /* Use the scalar assignment as is. */
3056 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3057 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3058 gfc_add_expr_to_block (&body, tmp);
3060 /* Increment count2. */
3061 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3062 count2, gfc_index_one_node);
3063 gfc_add_modify_expr (&body, count2, tmp);
3067 /* Increment count1. */
3068 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3069 count1, gfc_index_one_node);
3070 gfc_add_modify_expr (&body, count1, tmp);
3073 /* Generate the copying loops. */
3074 gfc_trans_scalarizing_loops (&loop, &body);
3076 /* Wrap the whole thing up. */
3077 gfc_add_block_to_block (&block, &loop.pre);
3078 gfc_add_block_to_block (&block, &loop.post);
3079 gfc_cleanup_loop (&loop);
3082 return gfc_finish_block (&block);
3086 /* Translate the WHERE construct or statement.
3087 This function can be called iteratively to translate the nested WHERE
3088 construct or statement.
3089 MASK is the control mask. */
3092 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3093 forall_info * nested_forall_info, stmtblock_t * block)
3095 stmtblock_t inner_size_body;
3096 tree inner_size, size;
3104 tree count1, count2;
3108 tree pcmask = NULL_TREE;
3109 tree ppmask = NULL_TREE;
3110 tree cmask = NULL_TREE;
3111 tree pmask = NULL_TREE;
3112 gfc_actual_arglist *arg;
3114 /* the WHERE statement or the WHERE construct statement. */
3115 cblock = code->block;
3117 /* As the mask array can be very big, prefer compact boolean types. */
3118 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3120 /* Determine which temporary masks are needed. */
3123 /* One clause: No ELSEWHEREs. */
3124 need_cmask = (cblock->next != 0);
3127 else if (cblock->block->block)
3129 /* Three or more clauses: Conditional ELSEWHEREs. */
3133 else if (cblock->next)
3135 /* Two clauses, the first non-empty. */
3137 need_pmask = (mask != NULL_TREE
3138 && cblock->block->next != 0);
3140 else if (!cblock->block->next)
3142 /* Two clauses, both empty. */
3146 /* Two clauses, the first empty, the second non-empty. */
3149 need_cmask = (cblock->block->expr != 0);
3158 if (need_cmask || need_pmask)
3160 /* Calculate the size of temporary needed by the mask-expr. */
3161 gfc_init_block (&inner_size_body);
3162 inner_size = compute_inner_temp_size (cblock->expr, cblock->expr,
3163 &inner_size_body, &lss, &rss);
3165 /* Calculate the total size of temporary needed. */
3166 size = compute_overall_iter_number (nested_forall_info, inner_size,
3167 &inner_size_body, block);
3169 /* Allocate temporary for WHERE mask if needed. */
3171 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3174 /* Allocate temporary for !mask if needed. */
3176 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3182 /* Each time around this loop, the where clause is conditional
3183 on the value of mask and invert, which are updated at the
3184 bottom of the loop. */
3186 /* Has mask-expr. */
3189 /* Ensure that the WHERE mask will be evaluated exactly once.
3190 If there are no statements in this WHERE/ELSEWHERE clause,
3191 then we don't need to update the control mask (cmask).
3192 If this is the last clause of the WHERE construct, then
3193 we don't need to update the pending control mask (pmask). */
3195 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3197 cblock->next ? cmask : NULL_TREE,
3198 cblock->block ? pmask : NULL_TREE,
3201 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3203 (cblock->next || cblock->block)
3204 ? cmask : NULL_TREE,
3205 NULL_TREE, mask_type, block);
3209 /* It's a final elsewhere-stmt. No mask-expr is present. */
3213 /* The body of this where clause are controlled by cmask with
3214 sense specified by invert. */
3216 /* Get the assignment statement of a WHERE statement, or the first
3217 statement in where-body-construct of a WHERE construct. */
3218 cnext = cblock->next;
3223 /* WHERE assignment statement. */
3224 case EXEC_ASSIGN_CALL:
3226 arg = cnext->ext.actual;
3227 expr1 = expr2 = NULL;
3228 for (; arg; arg = arg->next)
3240 expr1 = cnext->expr;
3241 expr2 = cnext->expr2;
3243 if (nested_forall_info != NULL)
3245 need_temp = gfc_check_dependency (expr1, expr2, 0);
3246 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3247 gfc_trans_assign_need_temp (expr1, expr2,
3249 nested_forall_info, block);
3252 /* Variables to control maskexpr. */
3253 count1 = gfc_create_var (gfc_array_index_type, "count1");
3254 count2 = gfc_create_var (gfc_array_index_type, "count2");
3255 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3256 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3258 tmp = gfc_trans_where_assign (expr1, expr2,
3261 cnext->resolved_sym);
3263 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3265 gfc_add_expr_to_block (block, tmp);
3270 /* Variables to control maskexpr. */
3271 count1 = gfc_create_var (gfc_array_index_type, "count1");
3272 count2 = gfc_create_var (gfc_array_index_type, "count2");
3273 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3274 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3276 tmp = gfc_trans_where_assign (expr1, expr2,
3279 cnext->resolved_sym);
3280 gfc_add_expr_to_block (block, tmp);
3285 /* WHERE or WHERE construct is part of a where-body-construct. */
3287 gfc_trans_where_2 (cnext, cmask, invert,
3288 nested_forall_info, block);
3295 /* The next statement within the same where-body-construct. */
3296 cnext = cnext->next;
3298 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3299 cblock = cblock->block;
3300 if (mask == NULL_TREE)
3302 /* If we're the initial WHERE, we can simply invert the sense
3303 of the current mask to obtain the "mask" for the remaining
3310 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3316 /* If we allocated a pending mask array, deallocate it now. */
3319 tmp = gfc_call_free (ppmask);
3320 gfc_add_expr_to_block (block, tmp);
3323 /* If we allocated a current mask array, deallocate it now. */
3326 tmp = gfc_call_free (pcmask);
3327 gfc_add_expr_to_block (block, tmp);
3331 /* Translate a simple WHERE construct or statement without dependencies.
3332 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3333 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3334 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3337 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3339 stmtblock_t block, body;
3340 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3341 tree tmp, cexpr, tstmt, estmt;
3342 gfc_ss *css, *tdss, *tsss;
3343 gfc_se cse, tdse, tsse, edse, esse;
3348 cond = cblock->expr;
3349 tdst = cblock->next->expr;
3350 tsrc = cblock->next->expr2;
3351 edst = eblock ? eblock->next->expr : NULL;
3352 esrc = eblock ? eblock->next->expr2 : NULL;
3354 gfc_start_block (&block);
3355 gfc_init_loopinfo (&loop);
3357 /* Handle the condition. */
3358 gfc_init_se (&cse, NULL);
3359 css = gfc_walk_expr (cond);
3360 gfc_add_ss_to_loop (&loop, css);
3362 /* Handle the then-clause. */
3363 gfc_init_se (&tdse, NULL);
3364 gfc_init_se (&tsse, NULL);
3365 tdss = gfc_walk_expr (tdst);
3366 tsss = gfc_walk_expr (tsrc);
3367 if (tsss == gfc_ss_terminator)
3369 tsss = gfc_get_ss ();
3370 tsss->next = gfc_ss_terminator;
3371 tsss->type = GFC_SS_SCALAR;
3374 gfc_add_ss_to_loop (&loop, tdss);
3375 gfc_add_ss_to_loop (&loop, tsss);
3379 /* Handle the else clause. */
3380 gfc_init_se (&edse, NULL);
3381 gfc_init_se (&esse, NULL);
3382 edss = gfc_walk_expr (edst);
3383 esss = gfc_walk_expr (esrc);
3384 if (esss == gfc_ss_terminator)
3386 esss = gfc_get_ss ();
3387 esss->next = gfc_ss_terminator;
3388 esss->type = GFC_SS_SCALAR;
3391 gfc_add_ss_to_loop (&loop, edss);
3392 gfc_add_ss_to_loop (&loop, esss);
3395 gfc_conv_ss_startstride (&loop);
3396 gfc_conv_loop_setup (&loop);
3398 gfc_mark_ss_chain_used (css, 1);
3399 gfc_mark_ss_chain_used (tdss, 1);
3400 gfc_mark_ss_chain_used (tsss, 1);
3403 gfc_mark_ss_chain_used (edss, 1);
3404 gfc_mark_ss_chain_used (esss, 1);
3407 gfc_start_scalarized_body (&loop, &body);
3409 gfc_copy_loopinfo_to_se (&cse, &loop);
3410 gfc_copy_loopinfo_to_se (&tdse, &loop);
3411 gfc_copy_loopinfo_to_se (&tsse, &loop);
3417 gfc_copy_loopinfo_to_se (&edse, &loop);
3418 gfc_copy_loopinfo_to_se (&esse, &loop);
3423 gfc_conv_expr (&cse, cond);
3424 gfc_add_block_to_block (&body, &cse.pre);
3427 gfc_conv_expr (&tsse, tsrc);
3428 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3430 gfc_conv_tmp_array_ref (&tdse);
3431 gfc_advance_se_ss_chain (&tdse);
3434 gfc_conv_expr (&tdse, tdst);
3438 gfc_conv_expr (&esse, esrc);
3439 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3441 gfc_conv_tmp_array_ref (&edse);
3442 gfc_advance_se_ss_chain (&edse);
3445 gfc_conv_expr (&edse, edst);
3448 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false);
3449 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false)
3450 : build_empty_stmt ();
3451 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3452 gfc_add_expr_to_block (&body, tmp);
3453 gfc_add_block_to_block (&body, &cse.post);
3455 gfc_trans_scalarizing_loops (&loop, &body);
3456 gfc_add_block_to_block (&block, &loop.pre);
3457 gfc_add_block_to_block (&block, &loop.post);
3458 gfc_cleanup_loop (&loop);
3460 return gfc_finish_block (&block);
3463 /* As the WHERE or WHERE construct statement can be nested, we call
3464 gfc_trans_where_2 to do the translation, and pass the initial
3465 NULL values for both the control mask and the pending control mask. */
3468 gfc_trans_where (gfc_code * code)
3474 cblock = code->block;
3476 && cblock->next->op == EXEC_ASSIGN
3477 && !cblock->next->next)
3479 eblock = cblock->block;
3482 /* A simple "WHERE (cond) x = y" statement or block is
3483 dependence free if cond is not dependent upon writing x,
3484 and the source y is unaffected by the destination x. */
3485 if (!gfc_check_dependency (cblock->next->expr,
3487 && !gfc_check_dependency (cblock->next->expr,
3488 cblock->next->expr2, 0))
3489 return gfc_trans_where_3 (cblock, NULL);
3491 else if (!eblock->expr
3494 && eblock->next->op == EXEC_ASSIGN
3495 && !eblock->next->next)
3497 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3498 block is dependence free if cond is not dependent on writes
3499 to x1 and x2, y1 is not dependent on writes to x2, and y2
3500 is not dependent on writes to x1, and both y's are not
3501 dependent upon their own x's. */
3502 if (!gfc_check_dependency(cblock->next->expr,
3504 && !gfc_check_dependency(eblock->next->expr,
3506 && !gfc_check_dependency(cblock->next->expr,
3507 eblock->next->expr2, 0)
3508 && !gfc_check_dependency(eblock->next->expr,
3509 cblock->next->expr2, 0)
3510 && !gfc_check_dependency(cblock->next->expr,
3511 cblock->next->expr2, 0)
3512 && !gfc_check_dependency(eblock->next->expr,
3513 eblock->next->expr2, 0))
3514 return gfc_trans_where_3 (cblock, eblock);
3518 gfc_start_block (&block);
3520 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3522 return gfc_finish_block (&block);
3526 /* CYCLE a DO loop. The label decl has already been created by
3527 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3528 node at the head of the loop. We must mark the label as used. */
3531 gfc_trans_cycle (gfc_code * code)
3535 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3536 TREE_USED (cycle_label) = 1;
3537 return build1_v (GOTO_EXPR, cycle_label);
3541 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3542 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3546 gfc_trans_exit (gfc_code * code)
3550 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3551 TREE_USED (exit_label) = 1;
3552 return build1_v (GOTO_EXPR, exit_label);
3556 /* Translate the ALLOCATE statement. */
3559 gfc_trans_allocate (gfc_code * code)
3571 if (!code->ext.alloc_list)
3574 gfc_start_block (&block);
3578 tree gfc_int4_type_node = gfc_get_int_type (4);
3580 stat = gfc_create_var (gfc_int4_type_node, "stat");
3581 pstat = build_fold_addr_expr (stat);
3583 error_label = gfc_build_label_decl (NULL_TREE);
3584 TREE_USED (error_label) = 1;
3588 pstat = integer_zero_node;
3589 stat = error_label = NULL_TREE;
3593 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3597 gfc_init_se (&se, NULL);
3598 gfc_start_block (&se.pre);
3600 se.want_pointer = 1;
3601 se.descriptor_only = 1;
3602 gfc_conv_expr (&se, expr);
3604 if (!gfc_array_allocate (&se, expr, pstat))
3606 /* A scalar or derived type. */
3607 tmp = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
3609 if (expr->ts.type == BT_CHARACTER && tmp == NULL_TREE)
3610 tmp = se.string_length;
3612 tmp = build_call_expr (gfor_fndecl_allocate, 2, tmp, pstat);
3613 tmp = build2 (MODIFY_EXPR, void_type_node, se.expr,
3614 fold_convert (TREE_TYPE (se.expr), tmp));
3615 gfc_add_expr_to_block (&se.pre, tmp);
3619 tmp = build1_v (GOTO_EXPR, error_label);
3620 parm = fold_build2 (NE_EXPR, boolean_type_node,
3621 stat, build_int_cst (TREE_TYPE (stat), 0));
3622 tmp = fold_build3 (COND_EXPR, void_type_node,
3623 parm, tmp, build_empty_stmt ());
3624 gfc_add_expr_to_block (&se.pre, tmp);
3627 if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
3629 tmp = build_fold_indirect_ref (se.expr);
3630 tmp = gfc_nullify_alloc_comp (expr->ts.derived, tmp, 0);
3631 gfc_add_expr_to_block (&se.pre, tmp);
3636 tmp = gfc_finish_block (&se.pre);
3637 gfc_add_expr_to_block (&block, tmp);
3640 /* Assign the value to the status variable. */
3643 tmp = build1_v (LABEL_EXPR, error_label);
3644 gfc_add_expr_to_block (&block, tmp);
3646 gfc_init_se (&se, NULL);
3647 gfc_conv_expr_lhs (&se, code->expr);
3648 tmp = convert (TREE_TYPE (se.expr), stat);
3649 gfc_add_modify_expr (&block, se.expr, tmp);
3652 return gfc_finish_block (&block);
3656 /* Translate a DEALLOCATE statement.
3657 There are two cases within the for loop:
3658 (1) deallocate(a1, a2, a3) is translated into the following sequence
3659 _gfortran_deallocate(a1, 0B)
3660 _gfortran_deallocate(a2, 0B)
3661 _gfortran_deallocate(a3, 0B)
3662 where the STAT= variable is passed a NULL pointer.
3663 (2) deallocate(a1, a2, a3, stat=i) is translated into the following
3665 _gfortran_deallocate(a1, &stat)
3666 astat = astat + stat
3667 _gfortran_deallocate(a2, &stat)
3668 astat = astat + stat
3669 _gfortran_deallocate(a3, &stat)
3670 astat = astat + stat
3671 In case (1), we simply return at the end of the for loop. In case (2)
3672 we set STAT= astat. */
3674 gfc_trans_deallocate (gfc_code * code)
3679 tree apstat, astat, pstat, stat, tmp;
3682 gfc_start_block (&block);
3684 /* Set up the optional STAT= */
3687 tree gfc_int4_type_node = gfc_get_int_type (4);
3689 /* Variable used with the library call. */
3690 stat = gfc_create_var (gfc_int4_type_node, "stat");
3691 pstat = build_fold_addr_expr (stat);
3693 /* Running total of possible deallocation failures. */
3694 astat = gfc_create_var (gfc_int4_type_node, "astat");
3695 apstat = build_fold_addr_expr (astat);
3697 /* Initialize astat to 0. */
3698 gfc_add_modify_expr (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
3702 pstat = apstat = null_pointer_node;
3703 stat = astat = NULL_TREE;
3706 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3709 gcc_assert (expr->expr_type == EXPR_VARIABLE);
3711 gfc_init_se (&se, NULL);
3712 gfc_start_block (&se.pre);
3714 se.want_pointer = 1;
3715 se.descriptor_only = 1;
3716 gfc_conv_expr (&se, expr);
3718 if (expr->ts.type == BT_DERIVED
3719 && expr->ts.derived->attr.alloc_comp)
3722 gfc_ref *last = NULL;
3723 for (ref = expr->ref; ref; ref = ref->next)
3724 if (ref->type == REF_COMPONENT)
3727 /* Do not deallocate the components of a derived type
3728 ultimate pointer component. */
3729 if (!(last && last->u.c.component->pointer)
3730 && !(!last && expr->symtree->n.sym->attr.pointer))
3732 tmp = gfc_deallocate_alloc_comp (expr->ts.derived, se.expr,
3734 gfc_add_expr_to_block (&se.pre, tmp);
3739 tmp = gfc_array_deallocate (se.expr, pstat);
3742 tmp = build_call_expr (gfor_fndecl_deallocate, 2, se.expr, pstat);
3743 gfc_add_expr_to_block (&se.pre, tmp);
3745 tmp = build2 (MODIFY_EXPR, void_type_node,
3746 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
3749 gfc_add_expr_to_block (&se.pre, tmp);
3751 /* Keep track of the number of failed deallocations by adding stat
3752 of the last deallocation to the running total. */
3755 apstat = build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
3756 gfc_add_modify_expr (&se.pre, astat, apstat);
3759 tmp = gfc_finish_block (&se.pre);
3760 gfc_add_expr_to_block (&block, tmp);
3764 /* Assign the value to the status variable. */
3767 gfc_init_se (&se, NULL);
3768 gfc_conv_expr_lhs (&se, code->expr);
3769 tmp = convert (TREE_TYPE (se.expr), astat);
3770 gfc_add_modify_expr (&block, se.expr, tmp);
3773 return gfc_finish_block (&block);