1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
35 #include "trans-stmt.h"
36 #include "trans-types.h"
37 #include "trans-array.h"
38 #include "trans-const.h"
40 #include "dependency.h"
42 typedef struct iter_info
48 struct iter_info *next;
52 typedef struct forall_info
59 struct forall_info *prev_nest;
63 static void gfc_trans_where_2 (gfc_code *, tree, bool,
64 forall_info *, stmtblock_t *);
66 /* Translate a F95 label number to a LABEL_EXPR. */
69 gfc_trans_label_here (gfc_code * code)
71 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
75 /* Given a variable expression which has been ASSIGNed to, find the decl
76 containing the auxiliary variables. For variables in common blocks this
80 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
82 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
83 gfc_conv_expr (se, expr);
84 /* Deals with variable in common block. Get the field declaration. */
85 if (TREE_CODE (se->expr) == COMPONENT_REF)
86 se->expr = TREE_OPERAND (se->expr, 1);
87 /* Deals with dummy argument. Get the parameter declaration. */
88 else if (TREE_CODE (se->expr) == INDIRECT_REF)
89 se->expr = TREE_OPERAND (se->expr, 0);
92 /* Translate a label assignment statement. */
95 gfc_trans_label_assign (gfc_code * code)
104 /* Start a new block. */
105 gfc_init_se (&se, NULL);
106 gfc_start_block (&se.pre);
107 gfc_conv_label_variable (&se, code->expr);
109 len = GFC_DECL_STRING_LEN (se.expr);
110 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
112 label_tree = gfc_get_label_decl (code->label);
114 if (code->label->defined == ST_LABEL_TARGET)
116 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
117 len_tree = integer_minus_one_node;
121 gfc_expr *format = code->label->format;
123 label_len = format->value.character.length;
124 len_tree = build_int_cst (NULL_TREE, label_len);
125 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
126 format->value.character.string);
127 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
130 gfc_add_modify (&se.pre, len, len_tree);
131 gfc_add_modify (&se.pre, addr, label_tree);
133 return gfc_finish_block (&se.pre);
136 /* Translate a GOTO statement. */
139 gfc_trans_goto (gfc_code * code)
141 locus loc = code->loc;
147 if (code->label != NULL)
148 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
151 gfc_init_se (&se, NULL);
152 gfc_start_block (&se.pre);
153 gfc_conv_label_variable (&se, code->expr);
154 tmp = GFC_DECL_STRING_LEN (se.expr);
155 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
156 build_int_cst (TREE_TYPE (tmp), -1));
157 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
158 "Assigned label is not a target label");
160 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
165 target = fold_build1 (GOTO_EXPR, void_type_node, assigned_goto);
166 gfc_add_expr_to_block (&se.pre, target);
167 return gfc_finish_block (&se.pre);
170 /* Check the label list. */
173 target = gfc_get_label_decl (code->label);
174 tmp = gfc_build_addr_expr (pvoid_type_node, target);
175 tmp = fold_build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
176 tmp = build3_v (COND_EXPR, tmp,
177 fold_build1 (GOTO_EXPR, void_type_node, target),
178 build_empty_stmt ());
179 gfc_add_expr_to_block (&se.pre, tmp);
182 while (code != NULL);
183 gfc_trans_runtime_check (true, false, boolean_true_node, &se.pre, &loc,
184 "Assigned label is not in the list");
186 return gfc_finish_block (&se.pre);
190 /* Translate an ENTRY statement. Just adds a label for this entry point. */
192 gfc_trans_entry (gfc_code * code)
194 return build1_v (LABEL_EXPR, code->ext.entry->label);
198 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
199 elemental subroutines. Make temporaries for output arguments if any such
200 dependencies are found. Output arguments are chosen because internal_unpack
201 can be used, as is, to copy the result back to the variable. */
203 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
204 gfc_symbol * sym, gfc_actual_arglist * arg)
206 gfc_actual_arglist *arg0;
208 gfc_formal_arglist *formal;
209 gfc_loopinfo tmp_loop;
221 if (loopse->ss == NULL)
226 formal = sym->formal;
228 /* Loop over all the arguments testing for dependencies. */
229 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
235 /* Obtain the info structure for the current argument. */
237 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
241 info = &ss->data.info;
245 /* If there is a dependency, create a temporary and use it
246 instead of the variable. */
247 fsym = formal ? formal->sym : NULL;
248 if (e->expr_type == EXPR_VARIABLE
250 && fsym->attr.intent != INTENT_IN
251 && gfc_check_fncall_dependency (e, fsym->attr.intent,
254 /* Make a local loopinfo for the temporary creation, so that
255 none of the other ss->info's have to be renormalized. */
256 gfc_init_loopinfo (&tmp_loop);
257 for (n = 0; n < info->dimen; n++)
259 tmp_loop.to[n] = loopse->loop->to[n];
260 tmp_loop.from[n] = loopse->loop->from[n];
261 tmp_loop.order[n] = loopse->loop->order[n];
264 /* Generate the temporary. Merge the block so that the
265 declarations are put at the right binding level. */
266 size = gfc_create_var (gfc_array_index_type, NULL);
267 data = gfc_create_var (pvoid_type_node, NULL);
268 gfc_start_block (&block);
269 tmp = gfc_typenode_for_spec (&e->ts);
270 tmp = gfc_trans_create_temp_array (&se->pre, &se->post,
271 &tmp_loop, info, tmp,
274 gfc_add_modify (&se->pre, size, tmp);
275 tmp = fold_convert (pvoid_type_node, info->data);
276 gfc_add_modify (&se->pre, data, tmp);
277 gfc_merge_block_scope (&block);
279 /* Obtain the argument descriptor for unpacking. */
280 gfc_init_se (&parmse, NULL);
281 parmse.want_pointer = 1;
282 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
283 gfc_add_block_to_block (&se->pre, &parmse.pre);
285 /* Calculate the offset for the temporary. */
286 offset = gfc_index_zero_node;
287 for (n = 0; n < info->dimen; n++)
289 tmp = gfc_conv_descriptor_stride (info->descriptor,
291 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
292 loopse->loop->from[n], tmp);
293 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
296 info->offset = gfc_create_var (gfc_array_index_type, NULL);
297 gfc_add_modify (&se->pre, info->offset, offset);
299 /* Copy the result back using unpack. */
300 tmp = build_call_expr (gfor_fndecl_in_unpack, 2, parmse.expr, data);
301 gfc_add_expr_to_block (&se->post, tmp);
303 gfc_add_block_to_block (&se->post, &parmse.post);
309 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
312 gfc_trans_call (gfc_code * code, bool dependency_check)
316 int has_alternate_specifier;
318 /* A CALL starts a new block because the actual arguments may have to
319 be evaluated first. */
320 gfc_init_se (&se, NULL);
321 gfc_start_block (&se.pre);
323 gcc_assert (code->resolved_sym);
325 ss = gfc_ss_terminator;
326 if (code->resolved_sym->attr.elemental)
327 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
329 /* Is not an elemental subroutine call with array valued arguments. */
330 if (ss == gfc_ss_terminator)
333 /* Translate the call. */
334 has_alternate_specifier
335 = gfc_conv_function_call (&se, code->resolved_sym, code->ext.actual,
338 /* A subroutine without side-effect, by definition, does nothing! */
339 TREE_SIDE_EFFECTS (se.expr) = 1;
341 /* Chain the pieces together and return the block. */
342 if (has_alternate_specifier)
344 gfc_code *select_code;
346 select_code = code->next;
347 gcc_assert(select_code->op == EXEC_SELECT);
348 sym = select_code->expr->symtree->n.sym;
349 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
350 if (sym->backend_decl == NULL)
351 sym->backend_decl = gfc_get_symbol_decl (sym);
352 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
355 gfc_add_expr_to_block (&se.pre, se.expr);
357 gfc_add_block_to_block (&se.pre, &se.post);
362 /* An elemental subroutine call with array valued arguments has
369 /* gfc_walk_elemental_function_args renders the ss chain in the
370 reverse order to the actual argument order. */
371 ss = gfc_reverse_ss (ss);
373 /* Initialize the loop. */
374 gfc_init_se (&loopse, NULL);
375 gfc_init_loopinfo (&loop);
376 gfc_add_ss_to_loop (&loop, ss);
378 gfc_conv_ss_startstride (&loop);
379 gfc_conv_loop_setup (&loop, &code->expr->where);
380 gfc_mark_ss_chain_used (ss, 1);
382 /* Convert the arguments, checking for dependencies. */
383 gfc_copy_loopinfo_to_se (&loopse, &loop);
386 /* For operator assignment, do dependency checking. */
387 if (dependency_check)
390 sym = code->resolved_sym;
391 gfc_conv_elemental_dependencies (&se, &loopse, sym,
395 /* Generate the loop body. */
396 gfc_start_scalarized_body (&loop, &body);
397 gfc_init_block (&block);
399 /* Add the subroutine call to the block. */
400 gfc_conv_function_call (&loopse, code->resolved_sym, code->ext.actual,
402 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
404 gfc_add_block_to_block (&block, &loopse.pre);
405 gfc_add_block_to_block (&block, &loopse.post);
407 /* Finish up the loop block and the loop. */
408 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
409 gfc_trans_scalarizing_loops (&loop, &body);
410 gfc_add_block_to_block (&se.pre, &loop.pre);
411 gfc_add_block_to_block (&se.pre, &loop.post);
412 gfc_add_block_to_block (&se.pre, &se.post);
413 gfc_cleanup_loop (&loop);
416 return gfc_finish_block (&se.pre);
420 /* Translate the RETURN statement. */
423 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
431 /* If code->expr is not NULL, this return statement must appear
432 in a subroutine and current_fake_result_decl has already
435 result = gfc_get_fake_result_decl (NULL, 0);
438 gfc_warning ("An alternate return at %L without a * dummy argument",
440 return build1_v (GOTO_EXPR, gfc_get_return_label ());
443 /* Start a new block for this statement. */
444 gfc_init_se (&se, NULL);
445 gfc_start_block (&se.pre);
447 gfc_conv_expr (&se, code->expr);
449 tmp = fold_build2 (MODIFY_EXPR, TREE_TYPE (result), result,
450 fold_convert (TREE_TYPE (result), se.expr));
451 gfc_add_expr_to_block (&se.pre, tmp);
453 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
454 gfc_add_expr_to_block (&se.pre, tmp);
455 gfc_add_block_to_block (&se.pre, &se.post);
456 return gfc_finish_block (&se.pre);
459 return build1_v (GOTO_EXPR, gfc_get_return_label ());
463 /* Translate the PAUSE statement. We have to translate this statement
464 to a runtime library call. */
467 gfc_trans_pause (gfc_code * code)
469 tree gfc_int4_type_node = gfc_get_int_type (4);
473 /* Start a new block for this statement. */
474 gfc_init_se (&se, NULL);
475 gfc_start_block (&se.pre);
478 if (code->expr == NULL)
480 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
481 tmp = build_call_expr (gfor_fndecl_pause_numeric, 1, tmp);
485 gfc_conv_expr_reference (&se, code->expr);
486 tmp = build_call_expr (gfor_fndecl_pause_string, 2,
487 se.expr, se.string_length);
490 gfc_add_expr_to_block (&se.pre, tmp);
492 gfc_add_block_to_block (&se.pre, &se.post);
494 return gfc_finish_block (&se.pre);
498 /* Translate the STOP statement. We have to translate this statement
499 to a runtime library call. */
502 gfc_trans_stop (gfc_code * code)
504 tree gfc_int4_type_node = gfc_get_int_type (4);
508 /* Start a new block for this statement. */
509 gfc_init_se (&se, NULL);
510 gfc_start_block (&se.pre);
513 if (code->expr == NULL)
515 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
516 tmp = build_call_expr (gfor_fndecl_stop_numeric, 1, tmp);
520 gfc_conv_expr_reference (&se, code->expr);
521 tmp = build_call_expr (gfor_fndecl_stop_string, 2,
522 se.expr, se.string_length);
525 gfc_add_expr_to_block (&se.pre, tmp);
527 gfc_add_block_to_block (&se.pre, &se.post);
529 return gfc_finish_block (&se.pre);
533 /* Generate GENERIC for the IF construct. This function also deals with
534 the simple IF statement, because the front end translates the IF
535 statement into an IF construct.
567 where COND_S is the simplified version of the predicate. PRE_COND_S
568 are the pre side-effects produced by the translation of the
570 We need to build the chain recursively otherwise we run into
571 problems with folding incomplete statements. */
574 gfc_trans_if_1 (gfc_code * code)
579 /* Check for an unconditional ELSE clause. */
581 return gfc_trans_code (code->next);
583 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
584 gfc_init_se (&if_se, NULL);
585 gfc_start_block (&if_se.pre);
587 /* Calculate the IF condition expression. */
588 gfc_conv_expr_val (&if_se, code->expr);
590 /* Translate the THEN clause. */
591 stmt = gfc_trans_code (code->next);
593 /* Translate the ELSE clause. */
595 elsestmt = gfc_trans_if_1 (code->block);
597 elsestmt = build_empty_stmt ();
599 /* Build the condition expression and add it to the condition block. */
600 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
602 gfc_add_expr_to_block (&if_se.pre, stmt);
604 /* Finish off this statement. */
605 return gfc_finish_block (&if_se.pre);
609 gfc_trans_if (gfc_code * code)
611 /* Ignore the top EXEC_IF, it only announces an IF construct. The
612 actual code we must translate is in code->block. */
614 return gfc_trans_if_1 (code->block);
618 /* Translate an arithmetic IF expression.
620 IF (cond) label1, label2, label3 translates to
632 An optimized version can be generated in case of equal labels.
633 E.g., if label1 is equal to label2, we can translate it to
642 gfc_trans_arithmetic_if (gfc_code * code)
650 /* Start a new block. */
651 gfc_init_se (&se, NULL);
652 gfc_start_block (&se.pre);
654 /* Pre-evaluate COND. */
655 gfc_conv_expr_val (&se, code->expr);
656 se.expr = gfc_evaluate_now (se.expr, &se.pre);
658 /* Build something to compare with. */
659 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
661 if (code->label->value != code->label2->value)
663 /* If (cond < 0) take branch1 else take branch2.
664 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
665 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
666 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
668 if (code->label->value != code->label3->value)
669 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
671 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
673 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
676 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
678 if (code->label->value != code->label3->value
679 && code->label2->value != code->label3->value)
681 /* if (cond <= 0) take branch1 else take branch2. */
682 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
683 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
684 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
687 /* Append the COND_EXPR to the evaluation of COND, and return. */
688 gfc_add_expr_to_block (&se.pre, branch1);
689 return gfc_finish_block (&se.pre);
693 /* Translate the simple DO construct. This is where the loop variable has
694 integer type and step +-1. We can't use this in the general case
695 because integer overflow and floating point errors could give incorrect
697 We translate a do loop from:
699 DO dovar = from, to, step
705 [Evaluate loop bounds and step]
707 if ((step > 0) ? (dovar <= to) : (dovar => to))
713 cond = (dovar == to);
715 if (cond) goto end_label;
720 This helps the optimizers by avoiding the extra induction variable
721 used in the general case. */
724 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
725 tree from, tree to, tree step)
734 type = TREE_TYPE (dovar);
736 /* Initialize the DO variable: dovar = from. */
737 gfc_add_modify (pblock, dovar, from);
739 /* Cycle and exit statements are implemented with gotos. */
740 cycle_label = gfc_build_label_decl (NULL_TREE);
741 exit_label = gfc_build_label_decl (NULL_TREE);
743 /* Put the labels where they can be found later. See gfc_trans_do(). */
744 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
747 gfc_start_block (&body);
749 /* Main loop body. */
750 tmp = gfc_trans_code (code->block->next);
751 gfc_add_expr_to_block (&body, tmp);
753 /* Label for cycle statements (if needed). */
754 if (TREE_USED (cycle_label))
756 tmp = build1_v (LABEL_EXPR, cycle_label);
757 gfc_add_expr_to_block (&body, tmp);
760 /* Evaluate the loop condition. */
761 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
762 cond = gfc_evaluate_now (cond, &body);
764 /* Increment the loop variable. */
765 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
766 gfc_add_modify (&body, dovar, tmp);
769 tmp = build1_v (GOTO_EXPR, exit_label);
770 TREE_USED (exit_label) = 1;
771 tmp = fold_build3 (COND_EXPR, void_type_node,
772 cond, tmp, build_empty_stmt ());
773 gfc_add_expr_to_block (&body, tmp);
775 /* Finish the loop body. */
776 tmp = gfc_finish_block (&body);
777 tmp = build1_v (LOOP_EXPR, tmp);
779 /* Only execute the loop if the number of iterations is positive. */
780 if (tree_int_cst_sgn (step) > 0)
781 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
783 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
784 tmp = fold_build3 (COND_EXPR, void_type_node,
785 cond, tmp, build_empty_stmt ());
786 gfc_add_expr_to_block (pblock, tmp);
788 /* Add the exit label. */
789 tmp = build1_v (LABEL_EXPR, exit_label);
790 gfc_add_expr_to_block (pblock, tmp);
792 return gfc_finish_block (pblock);
795 /* Translate the DO construct. This obviously is one of the most
796 important ones to get right with any compiler, but especially
799 We special case some loop forms as described in gfc_trans_simple_do.
800 For other cases we implement them with a separate loop count,
801 as described in the standard.
803 We translate a do loop from:
805 DO dovar = from, to, step
811 [evaluate loop bounds and step]
812 empty = (step > 0 ? to < from : to > from);
813 countm1 = (to - from) / step;
815 if (empty) goto exit_label;
821 if (countm1 ==0) goto exit_label;
826 countm1 is an unsigned integer. It is equal to the loop count minus one,
827 because the loop count itself can overflow. */
830 gfc_trans_do (gfc_code * code)
849 gfc_start_block (&block);
851 /* Evaluate all the expressions in the iterator. */
852 gfc_init_se (&se, NULL);
853 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
854 gfc_add_block_to_block (&block, &se.pre);
856 type = TREE_TYPE (dovar);
858 gfc_init_se (&se, NULL);
859 gfc_conv_expr_val (&se, code->ext.iterator->start);
860 gfc_add_block_to_block (&block, &se.pre);
861 from = gfc_evaluate_now (se.expr, &block);
863 gfc_init_se (&se, NULL);
864 gfc_conv_expr_val (&se, code->ext.iterator->end);
865 gfc_add_block_to_block (&block, &se.pre);
866 to = gfc_evaluate_now (se.expr, &block);
868 gfc_init_se (&se, NULL);
869 gfc_conv_expr_val (&se, code->ext.iterator->step);
870 gfc_add_block_to_block (&block, &se.pre);
871 step = gfc_evaluate_now (se.expr, &block);
873 /* Special case simple loops. */
874 if (TREE_CODE (type) == INTEGER_TYPE
875 && (integer_onep (step)
876 || tree_int_cst_equal (step, integer_minus_one_node)))
877 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
879 /* We need a special check for empty loops:
880 empty = (step > 0 ? to < from : to > from); */
881 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
882 fold_convert (type, integer_zero_node));
883 empty = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
884 fold_build2 (LT_EXPR, boolean_type_node, to, from),
885 fold_build2 (GT_EXPR, boolean_type_node, to, from));
887 /* Initialize loop count. This code is executed before we enter the
888 loop body. We generate: countm1 = abs(to - from) / abs(step). */
889 if (TREE_CODE (type) == INTEGER_TYPE)
893 utype = unsigned_type_for (type);
895 /* tmp = abs(to - from) / abs(step) */
896 ustep = fold_convert (utype, fold_build1 (ABS_EXPR, type, step));
897 tmp = fold_build3 (COND_EXPR, type, pos_step,
898 fold_build2 (MINUS_EXPR, type, to, from),
899 fold_build2 (MINUS_EXPR, type, from, to));
900 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, fold_convert (utype, tmp),
905 /* TODO: We could use the same width as the real type.
906 This would probably cause more problems that it solves
907 when we implement "long double" types. */
908 utype = unsigned_type_for (gfc_array_index_type);
909 tmp = fold_build2 (MINUS_EXPR, type, to, from);
910 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
911 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
913 countm1 = gfc_create_var (utype, "countm1");
914 gfc_add_modify (&block, countm1, tmp);
916 /* Cycle and exit statements are implemented with gotos. */
917 cycle_label = gfc_build_label_decl (NULL_TREE);
918 exit_label = gfc_build_label_decl (NULL_TREE);
919 TREE_USED (exit_label) = 1;
921 /* Initialize the DO variable: dovar = from. */
922 gfc_add_modify (&block, dovar, from);
924 /* If the loop is empty, go directly to the exit label. */
925 tmp = fold_build3 (COND_EXPR, void_type_node, empty,
926 build1_v (GOTO_EXPR, exit_label), build_empty_stmt ());
927 gfc_add_expr_to_block (&block, tmp);
930 gfc_start_block (&body);
932 /* Put these labels where they can be found later. We put the
933 labels in a TREE_LIST node (because TREE_CHAIN is already
934 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
935 label in TREE_VALUE (backend_decl). */
937 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
939 /* Main loop body. */
940 tmp = gfc_trans_code (code->block->next);
941 gfc_add_expr_to_block (&body, tmp);
943 /* Label for cycle statements (if needed). */
944 if (TREE_USED (cycle_label))
946 tmp = build1_v (LABEL_EXPR, cycle_label);
947 gfc_add_expr_to_block (&body, tmp);
950 /* Increment the loop variable. */
951 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
952 gfc_add_modify (&body, dovar, tmp);
954 /* End with the loop condition. Loop until countm1 == 0. */
955 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
956 build_int_cst (utype, 0));
957 tmp = build1_v (GOTO_EXPR, exit_label);
958 tmp = fold_build3 (COND_EXPR, void_type_node,
959 cond, tmp, build_empty_stmt ());
960 gfc_add_expr_to_block (&body, tmp);
962 /* Decrement the loop count. */
963 tmp = fold_build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
964 gfc_add_modify (&body, countm1, tmp);
966 /* End of loop body. */
967 tmp = gfc_finish_block (&body);
969 /* The for loop itself. */
970 tmp = build1_v (LOOP_EXPR, tmp);
971 gfc_add_expr_to_block (&block, tmp);
973 /* Add the exit label. */
974 tmp = build1_v (LABEL_EXPR, exit_label);
975 gfc_add_expr_to_block (&block, tmp);
977 return gfc_finish_block (&block);
981 /* Translate the DO WHILE construct.
994 if (! cond) goto exit_label;
1000 Because the evaluation of the exit condition `cond' may have side
1001 effects, we can't do much for empty loop bodies. The backend optimizers
1002 should be smart enough to eliminate any dead loops. */
1005 gfc_trans_do_while (gfc_code * code)
1013 /* Everything we build here is part of the loop body. */
1014 gfc_start_block (&block);
1016 /* Cycle and exit statements are implemented with gotos. */
1017 cycle_label = gfc_build_label_decl (NULL_TREE);
1018 exit_label = gfc_build_label_decl (NULL_TREE);
1020 /* Put the labels where they can be found later. See gfc_trans_do(). */
1021 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1023 /* Create a GIMPLE version of the exit condition. */
1024 gfc_init_se (&cond, NULL);
1025 gfc_conv_expr_val (&cond, code->expr);
1026 gfc_add_block_to_block (&block, &cond.pre);
1027 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1029 /* Build "IF (! cond) GOTO exit_label". */
1030 tmp = build1_v (GOTO_EXPR, exit_label);
1031 TREE_USED (exit_label) = 1;
1032 tmp = fold_build3 (COND_EXPR, void_type_node,
1033 cond.expr, tmp, build_empty_stmt ());
1034 gfc_add_expr_to_block (&block, tmp);
1036 /* The main body of the loop. */
1037 tmp = gfc_trans_code (code->block->next);
1038 gfc_add_expr_to_block (&block, tmp);
1040 /* Label for cycle statements (if needed). */
1041 if (TREE_USED (cycle_label))
1043 tmp = build1_v (LABEL_EXPR, cycle_label);
1044 gfc_add_expr_to_block (&block, tmp);
1047 /* End of loop body. */
1048 tmp = gfc_finish_block (&block);
1050 gfc_init_block (&block);
1051 /* Build the loop. */
1052 tmp = build1_v (LOOP_EXPR, tmp);
1053 gfc_add_expr_to_block (&block, tmp);
1055 /* Add the exit label. */
1056 tmp = build1_v (LABEL_EXPR, exit_label);
1057 gfc_add_expr_to_block (&block, tmp);
1059 return gfc_finish_block (&block);
1063 /* Translate the SELECT CASE construct for INTEGER case expressions,
1064 without killing all potential optimizations. The problem is that
1065 Fortran allows unbounded cases, but the back-end does not, so we
1066 need to intercept those before we enter the equivalent SWITCH_EXPR
1069 For example, we translate this,
1072 CASE (:100,101,105:115)
1082 to the GENERIC equivalent,
1086 case (minimum value for typeof(expr) ... 100:
1092 case 200 ... (maximum value for typeof(expr):
1109 gfc_trans_integer_select (gfc_code * code)
1119 gfc_start_block (&block);
1121 /* Calculate the switch expression. */
1122 gfc_init_se (&se, NULL);
1123 gfc_conv_expr_val (&se, code->expr);
1124 gfc_add_block_to_block (&block, &se.pre);
1126 end_label = gfc_build_label_decl (NULL_TREE);
1128 gfc_init_block (&body);
1130 for (c = code->block; c; c = c->block)
1132 for (cp = c->ext.case_list; cp; cp = cp->next)
1137 /* Assume it's the default case. */
1138 low = high = NULL_TREE;
1142 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1145 /* If there's only a lower bound, set the high bound to the
1146 maximum value of the case expression. */
1148 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1153 /* Three cases are possible here:
1155 1) There is no lower bound, e.g. CASE (:N).
1156 2) There is a lower bound .NE. high bound, that is
1157 a case range, e.g. CASE (N:M) where M>N (we make
1158 sure that M>N during type resolution).
1159 3) There is a lower bound, and it has the same value
1160 as the high bound, e.g. CASE (N:N). This is our
1161 internal representation of CASE(N).
1163 In the first and second case, we need to set a value for
1164 high. In the third case, we don't because the GCC middle
1165 end represents a single case value by just letting high be
1166 a NULL_TREE. We can't do that because we need to be able
1167 to represent unbounded cases. */
1171 && mpz_cmp (cp->low->value.integer,
1172 cp->high->value.integer) != 0))
1173 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1176 /* Unbounded case. */
1178 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1181 /* Build a label. */
1182 label = gfc_build_label_decl (NULL_TREE);
1184 /* Add this case label.
1185 Add parameter 'label', make it match GCC backend. */
1186 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1188 gfc_add_expr_to_block (&body, tmp);
1191 /* Add the statements for this case. */
1192 tmp = gfc_trans_code (c->next);
1193 gfc_add_expr_to_block (&body, tmp);
1195 /* Break to the end of the construct. */
1196 tmp = build1_v (GOTO_EXPR, end_label);
1197 gfc_add_expr_to_block (&body, tmp);
1200 tmp = gfc_finish_block (&body);
1201 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1202 gfc_add_expr_to_block (&block, tmp);
1204 tmp = build1_v (LABEL_EXPR, end_label);
1205 gfc_add_expr_to_block (&block, tmp);
1207 return gfc_finish_block (&block);
1211 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1213 There are only two cases possible here, even though the standard
1214 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1215 .FALSE., and DEFAULT.
1217 We never generate more than two blocks here. Instead, we always
1218 try to eliminate the DEFAULT case. This way, we can translate this
1219 kind of SELECT construct to a simple
1223 expression in GENERIC. */
1226 gfc_trans_logical_select (gfc_code * code)
1229 gfc_code *t, *f, *d;
1234 /* Assume we don't have any cases at all. */
1237 /* Now see which ones we actually do have. We can have at most two
1238 cases in a single case list: one for .TRUE. and one for .FALSE.
1239 The default case is always separate. If the cases for .TRUE. and
1240 .FALSE. are in the same case list, the block for that case list
1241 always executed, and we don't generate code a COND_EXPR. */
1242 for (c = code->block; c; c = c->block)
1244 for (cp = c->ext.case_list; cp; cp = cp->next)
1248 if (cp->low->value.logical == 0) /* .FALSE. */
1250 else /* if (cp->value.logical != 0), thus .TRUE. */
1258 /* Start a new block. */
1259 gfc_start_block (&block);
1261 /* Calculate the switch expression. We always need to do this
1262 because it may have side effects. */
1263 gfc_init_se (&se, NULL);
1264 gfc_conv_expr_val (&se, code->expr);
1265 gfc_add_block_to_block (&block, &se.pre);
1267 if (t == f && t != NULL)
1269 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1270 translate the code for these cases, append it to the current
1272 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1276 tree true_tree, false_tree, stmt;
1278 true_tree = build_empty_stmt ();
1279 false_tree = build_empty_stmt ();
1281 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1282 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1283 make the missing case the default case. */
1284 if (t != NULL && f != NULL)
1294 /* Translate the code for each of these blocks, and append it to
1295 the current block. */
1297 true_tree = gfc_trans_code (t->next);
1300 false_tree = gfc_trans_code (f->next);
1302 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1303 true_tree, false_tree);
1304 gfc_add_expr_to_block (&block, stmt);
1307 return gfc_finish_block (&block);
1311 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1312 Instead of generating compares and jumps, it is far simpler to
1313 generate a data structure describing the cases in order and call a
1314 library subroutine that locates the right case.
1315 This is particularly true because this is the only case where we
1316 might have to dispose of a temporary.
1317 The library subroutine returns a pointer to jump to or NULL if no
1318 branches are to be taken. */
1321 gfc_trans_character_select (gfc_code *code)
1323 tree init, node, end_label, tmp, type, case_num, label, fndecl;
1324 stmtblock_t block, body;
1330 /* The jump table types are stored in static variables to avoid
1331 constructing them from scratch every single time. */
1332 static tree select_struct[2];
1333 static tree ss_string1[2], ss_string1_len[2];
1334 static tree ss_string2[2], ss_string2_len[2];
1335 static tree ss_target[2];
1337 tree pchartype = gfc_get_pchar_type (code->expr->ts.kind);
1339 if (code->expr->ts.kind == 1)
1341 else if (code->expr->ts.kind == 4)
1346 if (select_struct[k] == NULL)
1348 select_struct[k] = make_node (RECORD_TYPE);
1350 if (code->expr->ts.kind == 1)
1351 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
1352 else if (code->expr->ts.kind == 4)
1353 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
1358 #define ADD_FIELD(NAME, TYPE) \
1359 ss_##NAME[k] = gfc_add_field_to_struct \
1360 (&(TYPE_FIELDS (select_struct[k])), select_struct[k], \
1361 get_identifier (stringize(NAME)), TYPE)
1363 ADD_FIELD (string1, pchartype);
1364 ADD_FIELD (string1_len, gfc_charlen_type_node);
1366 ADD_FIELD (string2, pchartype);
1367 ADD_FIELD (string2_len, gfc_charlen_type_node);
1369 ADD_FIELD (target, integer_type_node);
1372 gfc_finish_type (select_struct[k]);
1375 cp = code->block->ext.case_list;
1376 while (cp->left != NULL)
1380 for (d = cp; d; d = d->right)
1383 end_label = gfc_build_label_decl (NULL_TREE);
1385 /* Generate the body */
1386 gfc_start_block (&block);
1387 gfc_init_block (&body);
1389 for (c = code->block; c; c = c->block)
1391 for (d = c->ext.case_list; d; d = d->next)
1393 label = gfc_build_label_decl (NULL_TREE);
1394 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1395 build_int_cst (NULL_TREE, d->n),
1396 build_int_cst (NULL_TREE, d->n), label);
1397 gfc_add_expr_to_block (&body, tmp);
1400 tmp = gfc_trans_code (c->next);
1401 gfc_add_expr_to_block (&body, tmp);
1403 tmp = build1_v (GOTO_EXPR, end_label);
1404 gfc_add_expr_to_block (&body, tmp);
1407 /* Generate the structure describing the branches */
1410 for(d = cp; d; d = d->right)
1414 gfc_init_se (&se, NULL);
1418 node = tree_cons (ss_string1[k], null_pointer_node, node);
1419 node = tree_cons (ss_string1_len[k], integer_zero_node, node);
1423 gfc_conv_expr_reference (&se, d->low);
1425 node = tree_cons (ss_string1[k], se.expr, node);
1426 node = tree_cons (ss_string1_len[k], se.string_length, node);
1429 if (d->high == NULL)
1431 node = tree_cons (ss_string2[k], null_pointer_node, node);
1432 node = tree_cons (ss_string2_len[k], integer_zero_node, node);
1436 gfc_init_se (&se, NULL);
1437 gfc_conv_expr_reference (&se, d->high);
1439 node = tree_cons (ss_string2[k], se.expr, node);
1440 node = tree_cons (ss_string2_len[k], se.string_length, node);
1443 node = tree_cons (ss_target[k], build_int_cst (integer_type_node, d->n),
1446 tmp = build_constructor_from_list (select_struct[k], nreverse (node));
1447 init = tree_cons (NULL_TREE, tmp, init);
1450 type = build_array_type (select_struct[k],
1451 build_index_type (build_int_cst (NULL_TREE, n-1)));
1453 init = build_constructor_from_list (type, nreverse(init));
1454 TREE_CONSTANT (init) = 1;
1455 TREE_STATIC (init) = 1;
1456 /* Create a static variable to hold the jump table. */
1457 tmp = gfc_create_var (type, "jumptable");
1458 TREE_CONSTANT (tmp) = 1;
1459 TREE_STATIC (tmp) = 1;
1460 TREE_READONLY (tmp) = 1;
1461 DECL_INITIAL (tmp) = init;
1464 /* Build the library call */
1465 init = gfc_build_addr_expr (pvoid_type_node, init);
1467 gfc_init_se (&se, NULL);
1468 gfc_conv_expr_reference (&se, code->expr);
1470 gfc_add_block_to_block (&block, &se.pre);
1472 if (code->expr->ts.kind == 1)
1473 fndecl = gfor_fndecl_select_string;
1474 else if (code->expr->ts.kind == 4)
1475 fndecl = gfor_fndecl_select_string_char4;
1479 tmp = build_call_expr (fndecl, 4, init, build_int_cst (NULL_TREE, n),
1480 se.expr, se.string_length);
1481 case_num = gfc_create_var (integer_type_node, "case_num");
1482 gfc_add_modify (&block, case_num, tmp);
1484 gfc_add_block_to_block (&block, &se.post);
1486 tmp = gfc_finish_block (&body);
1487 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1488 gfc_add_expr_to_block (&block, tmp);
1490 tmp = build1_v (LABEL_EXPR, end_label);
1491 gfc_add_expr_to_block (&block, tmp);
1493 return gfc_finish_block (&block);
1497 /* Translate the three variants of the SELECT CASE construct.
1499 SELECT CASEs with INTEGER case expressions can be translated to an
1500 equivalent GENERIC switch statement, and for LOGICAL case
1501 expressions we build one or two if-else compares.
1503 SELECT CASEs with CHARACTER case expressions are a whole different
1504 story, because they don't exist in GENERIC. So we sort them and
1505 do a binary search at runtime.
1507 Fortran has no BREAK statement, and it does not allow jumps from
1508 one case block to another. That makes things a lot easier for
1512 gfc_trans_select (gfc_code * code)
1514 gcc_assert (code && code->expr);
1516 /* Empty SELECT constructs are legal. */
1517 if (code->block == NULL)
1518 return build_empty_stmt ();
1520 /* Select the correct translation function. */
1521 switch (code->expr->ts.type)
1523 case BT_LOGICAL: return gfc_trans_logical_select (code);
1524 case BT_INTEGER: return gfc_trans_integer_select (code);
1525 case BT_CHARACTER: return gfc_trans_character_select (code);
1527 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1533 /* Traversal function to substitute a replacement symtree if the symbol
1534 in the expression is the same as that passed. f == 2 signals that
1535 that variable itself is not to be checked - only the references.
1536 This group of functions is used when the variable expression in a
1537 FORALL assignment has internal references. For example:
1538 FORALL (i = 1:4) p(p(i)) = i
1539 The only recourse here is to store a copy of 'p' for the index
1542 static gfc_symtree *new_symtree;
1543 static gfc_symtree *old_symtree;
1546 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
1548 if (expr->expr_type != EXPR_VARIABLE)
1553 else if (expr->symtree->n.sym == sym)
1554 expr->symtree = new_symtree;
1560 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
1562 gfc_traverse_expr (e, sym, forall_replace, f);
1566 forall_restore (gfc_expr *expr,
1567 gfc_symbol *sym ATTRIBUTE_UNUSED,
1568 int *f ATTRIBUTE_UNUSED)
1570 if (expr->expr_type != EXPR_VARIABLE)
1573 if (expr->symtree == new_symtree)
1574 expr->symtree = old_symtree;
1580 forall_restore_symtree (gfc_expr *e)
1582 gfc_traverse_expr (e, NULL, forall_restore, 0);
1586 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1591 gfc_symbol *new_sym;
1592 gfc_symbol *old_sym;
1596 /* Build a copy of the lvalue. */
1597 old_symtree = c->expr->symtree;
1598 old_sym = old_symtree->n.sym;
1599 e = gfc_lval_expr_from_sym (old_sym);
1600 if (old_sym->attr.dimension)
1602 gfc_init_se (&tse, NULL);
1603 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN);
1604 gfc_add_block_to_block (pre, &tse.pre);
1605 gfc_add_block_to_block (post, &tse.post);
1606 tse.expr = build_fold_indirect_ref (tse.expr);
1608 if (e->ts.type != BT_CHARACTER)
1610 /* Use the variable offset for the temporary. */
1611 tmp = gfc_conv_descriptor_offset (tse.expr);
1612 gfc_add_modify (pre, tmp,
1613 gfc_conv_array_offset (old_sym->backend_decl));
1618 gfc_init_se (&tse, NULL);
1619 gfc_init_se (&rse, NULL);
1620 gfc_conv_expr (&rse, e);
1621 if (e->ts.type == BT_CHARACTER)
1623 tse.string_length = rse.string_length;
1624 tmp = gfc_get_character_type_len (gfc_default_character_kind,
1626 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
1628 gfc_add_block_to_block (pre, &tse.pre);
1629 gfc_add_block_to_block (post, &tse.post);
1633 tmp = gfc_typenode_for_spec (&e->ts);
1634 tse.expr = gfc_create_var (tmp, "temp");
1637 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
1638 e->expr_type == EXPR_VARIABLE);
1639 gfc_add_expr_to_block (pre, tmp);
1643 /* Create a new symbol to represent the lvalue. */
1644 new_sym = gfc_new_symbol (old_sym->name, NULL);
1645 new_sym->ts = old_sym->ts;
1646 new_sym->attr.referenced = 1;
1647 new_sym->attr.dimension = old_sym->attr.dimension;
1648 new_sym->attr.flavor = old_sym->attr.flavor;
1650 /* Use the temporary as the backend_decl. */
1651 new_sym->backend_decl = tse.expr;
1653 /* Create a fake symtree for it. */
1655 new_symtree = gfc_new_symtree (&root, old_sym->name);
1656 new_symtree->n.sym = new_sym;
1657 gcc_assert (new_symtree == root);
1659 /* Go through the expression reference replacing the old_symtree
1661 forall_replace_symtree (c->expr, old_sym, 2);
1663 /* Now we have made this temporary, we might as well use it for
1664 the right hand side. */
1665 forall_replace_symtree (c->expr2, old_sym, 1);
1669 /* Handles dependencies in forall assignments. */
1671 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1678 lsym = c->expr->symtree->n.sym;
1679 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
1681 /* Now check for dependencies within the 'variable'
1682 expression itself. These are treated by making a complete
1683 copy of variable and changing all the references to it
1684 point to the copy instead. Note that the shallow copy of
1685 the variable will not suffice for derived types with
1686 pointer components. We therefore leave these to their
1688 if (lsym->ts.type == BT_DERIVED
1689 && lsym->ts.derived->attr.pointer_comp)
1693 if (find_forall_index (c->expr, lsym, 2) == SUCCESS)
1695 forall_make_variable_temp (c, pre, post);
1699 /* Substrings with dependencies are treated in the same
1701 if (c->expr->ts.type == BT_CHARACTER
1703 && c->expr2->expr_type == EXPR_VARIABLE
1704 && lsym == c->expr2->symtree->n.sym)
1706 for (lref = c->expr->ref; lref; lref = lref->next)
1707 if (lref->type == REF_SUBSTRING)
1709 for (rref = c->expr2->ref; rref; rref = rref->next)
1710 if (rref->type == REF_SUBSTRING)
1714 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
1716 forall_make_variable_temp (c, pre, post);
1725 cleanup_forall_symtrees (gfc_code *c)
1727 forall_restore_symtree (c->expr);
1728 forall_restore_symtree (c->expr2);
1729 gfc_free (new_symtree->n.sym);
1730 gfc_free (new_symtree);
1734 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
1735 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
1736 indicates whether we should generate code to test the FORALLs mask
1737 array. OUTER is the loop header to be used for initializing mask
1740 The generated loop format is:
1741 count = (end - start + step) / step
1754 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
1755 int mask_flag, stmtblock_t *outer)
1763 tree var, start, end, step;
1766 /* Initialize the mask index outside the FORALL nest. */
1767 if (mask_flag && forall_tmp->mask)
1768 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
1770 iter = forall_tmp->this_loop;
1771 nvar = forall_tmp->nvar;
1772 for (n = 0; n < nvar; n++)
1775 start = iter->start;
1779 exit_label = gfc_build_label_decl (NULL_TREE);
1780 TREE_USED (exit_label) = 1;
1782 /* The loop counter. */
1783 count = gfc_create_var (TREE_TYPE (var), "count");
1785 /* The body of the loop. */
1786 gfc_init_block (&block);
1788 /* The exit condition. */
1789 cond = fold_build2 (LE_EXPR, boolean_type_node,
1790 count, build_int_cst (TREE_TYPE (count), 0));
1791 tmp = build1_v (GOTO_EXPR, exit_label);
1792 tmp = fold_build3 (COND_EXPR, void_type_node,
1793 cond, tmp, build_empty_stmt ());
1794 gfc_add_expr_to_block (&block, tmp);
1796 /* The main loop body. */
1797 gfc_add_expr_to_block (&block, body);
1799 /* Increment the loop variable. */
1800 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1801 gfc_add_modify (&block, var, tmp);
1803 /* Advance to the next mask element. Only do this for the
1805 if (n == 0 && mask_flag && forall_tmp->mask)
1807 tree maskindex = forall_tmp->maskindex;
1808 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1809 maskindex, gfc_index_one_node);
1810 gfc_add_modify (&block, maskindex, tmp);
1813 /* Decrement the loop counter. */
1814 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), count,
1815 build_int_cst (TREE_TYPE (var), 1));
1816 gfc_add_modify (&block, count, tmp);
1818 body = gfc_finish_block (&block);
1820 /* Loop var initialization. */
1821 gfc_init_block (&block);
1822 gfc_add_modify (&block, var, start);
1825 /* Initialize the loop counter. */
1826 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1827 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1828 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1829 gfc_add_modify (&block, count, tmp);
1831 /* The loop expression. */
1832 tmp = build1_v (LOOP_EXPR, body);
1833 gfc_add_expr_to_block (&block, tmp);
1835 /* The exit label. */
1836 tmp = build1_v (LABEL_EXPR, exit_label);
1837 gfc_add_expr_to_block (&block, tmp);
1839 body = gfc_finish_block (&block);
1846 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
1847 is nonzero, the body is controlled by all masks in the forall nest.
1848 Otherwise, the innermost loop is not controlled by it's mask. This
1849 is used for initializing that mask. */
1852 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1857 forall_info *forall_tmp;
1858 tree mask, maskindex;
1860 gfc_start_block (&header);
1862 forall_tmp = nested_forall_info;
1863 while (forall_tmp != NULL)
1865 /* Generate body with masks' control. */
1868 mask = forall_tmp->mask;
1869 maskindex = forall_tmp->maskindex;
1871 /* If a mask was specified make the assignment conditional. */
1874 tmp = gfc_build_array_ref (mask, maskindex, NULL);
1875 body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ());
1878 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
1879 forall_tmp = forall_tmp->prev_nest;
1883 gfc_add_expr_to_block (&header, body);
1884 return gfc_finish_block (&header);
1888 /* Allocate data for holding a temporary array. Returns either a local
1889 temporary array or a pointer variable. */
1892 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
1899 if (INTEGER_CST_P (size))
1901 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
1902 gfc_index_one_node);
1907 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
1908 type = build_array_type (elem_type, type);
1909 if (gfc_can_put_var_on_stack (bytesize))
1911 gcc_assert (INTEGER_CST_P (size));
1912 tmpvar = gfc_create_var (type, "temp");
1917 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
1918 *pdata = convert (pvoid_type_node, tmpvar);
1920 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
1921 gfc_add_modify (pblock, tmpvar, tmp);
1927 /* Generate codes to copy the temporary to the actual lhs. */
1930 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
1931 tree count1, tree wheremask, bool invert)
1935 stmtblock_t block, body;
1941 lss = gfc_walk_expr (expr);
1943 if (lss == gfc_ss_terminator)
1945 gfc_start_block (&block);
1947 gfc_init_se (&lse, NULL);
1949 /* Translate the expression. */
1950 gfc_conv_expr (&lse, expr);
1952 /* Form the expression for the temporary. */
1953 tmp = gfc_build_array_ref (tmp1, count1, NULL);
1955 /* Use the scalar assignment as is. */
1956 gfc_add_block_to_block (&block, &lse.pre);
1957 gfc_add_modify (&block, lse.expr, tmp);
1958 gfc_add_block_to_block (&block, &lse.post);
1960 /* Increment the count1. */
1961 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1962 gfc_index_one_node);
1963 gfc_add_modify (&block, count1, tmp);
1965 tmp = gfc_finish_block (&block);
1969 gfc_start_block (&block);
1971 gfc_init_loopinfo (&loop1);
1972 gfc_init_se (&rse, NULL);
1973 gfc_init_se (&lse, NULL);
1975 /* Associate the lss with the loop. */
1976 gfc_add_ss_to_loop (&loop1, lss);
1978 /* Calculate the bounds of the scalarization. */
1979 gfc_conv_ss_startstride (&loop1);
1980 /* Setup the scalarizing loops. */
1981 gfc_conv_loop_setup (&loop1, &expr->where);
1983 gfc_mark_ss_chain_used (lss, 1);
1985 /* Start the scalarized loop body. */
1986 gfc_start_scalarized_body (&loop1, &body);
1988 /* Setup the gfc_se structures. */
1989 gfc_copy_loopinfo_to_se (&lse, &loop1);
1992 /* Form the expression of the temporary. */
1993 if (lss != gfc_ss_terminator)
1994 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
1995 /* Translate expr. */
1996 gfc_conv_expr (&lse, expr);
1998 /* Use the scalar assignment. */
1999 rse.string_length = lse.string_length;
2000 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
2002 /* Form the mask expression according to the mask tree list. */
2005 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2007 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2008 TREE_TYPE (wheremaskexpr),
2010 tmp = fold_build3 (COND_EXPR, void_type_node,
2011 wheremaskexpr, tmp, build_empty_stmt ());
2014 gfc_add_expr_to_block (&body, tmp);
2016 /* Increment count1. */
2017 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2018 count1, gfc_index_one_node);
2019 gfc_add_modify (&body, count1, tmp);
2021 /* Increment count3. */
2024 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2025 count3, gfc_index_one_node);
2026 gfc_add_modify (&body, count3, tmp);
2029 /* Generate the copying loops. */
2030 gfc_trans_scalarizing_loops (&loop1, &body);
2031 gfc_add_block_to_block (&block, &loop1.pre);
2032 gfc_add_block_to_block (&block, &loop1.post);
2033 gfc_cleanup_loop (&loop1);
2035 tmp = gfc_finish_block (&block);
2041 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2042 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2043 and should not be freed. WHEREMASK is the conditional execution mask
2044 whose sense may be inverted by INVERT. */
2047 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2048 tree count1, gfc_ss *lss, gfc_ss *rss,
2049 tree wheremask, bool invert)
2051 stmtblock_t block, body1;
2058 gfc_start_block (&block);
2060 gfc_init_se (&rse, NULL);
2061 gfc_init_se (&lse, NULL);
2063 if (lss == gfc_ss_terminator)
2065 gfc_init_block (&body1);
2066 gfc_conv_expr (&rse, expr2);
2067 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2071 /* Initialize the loop. */
2072 gfc_init_loopinfo (&loop);
2074 /* We may need LSS to determine the shape of the expression. */
2075 gfc_add_ss_to_loop (&loop, lss);
2076 gfc_add_ss_to_loop (&loop, rss);
2078 gfc_conv_ss_startstride (&loop);
2079 gfc_conv_loop_setup (&loop, &expr2->where);
2081 gfc_mark_ss_chain_used (rss, 1);
2082 /* Start the loop body. */
2083 gfc_start_scalarized_body (&loop, &body1);
2085 /* Translate the expression. */
2086 gfc_copy_loopinfo_to_se (&rse, &loop);
2088 gfc_conv_expr (&rse, expr2);
2090 /* Form the expression of the temporary. */
2091 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2094 /* Use the scalar assignment. */
2095 lse.string_length = rse.string_length;
2096 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2097 expr2->expr_type == EXPR_VARIABLE);
2099 /* Form the mask expression according to the mask tree list. */
2102 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2104 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2105 TREE_TYPE (wheremaskexpr),
2107 tmp = fold_build3 (COND_EXPR, void_type_node,
2108 wheremaskexpr, tmp, build_empty_stmt ());
2111 gfc_add_expr_to_block (&body1, tmp);
2113 if (lss == gfc_ss_terminator)
2115 gfc_add_block_to_block (&block, &body1);
2117 /* Increment count1. */
2118 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2119 gfc_index_one_node);
2120 gfc_add_modify (&block, count1, tmp);
2124 /* Increment count1. */
2125 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2126 count1, gfc_index_one_node);
2127 gfc_add_modify (&body1, count1, tmp);
2129 /* Increment count3. */
2132 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2133 count3, gfc_index_one_node);
2134 gfc_add_modify (&body1, count3, tmp);
2137 /* Generate the copying loops. */
2138 gfc_trans_scalarizing_loops (&loop, &body1);
2140 gfc_add_block_to_block (&block, &loop.pre);
2141 gfc_add_block_to_block (&block, &loop.post);
2143 gfc_cleanup_loop (&loop);
2144 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2145 as tree nodes in SS may not be valid in different scope. */
2148 tmp = gfc_finish_block (&block);
2153 /* Calculate the size of temporary needed in the assignment inside forall.
2154 LSS and RSS are filled in this function. */
2157 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2158 stmtblock_t * pblock,
2159 gfc_ss **lss, gfc_ss **rss)
2167 *lss = gfc_walk_expr (expr1);
2170 size = gfc_index_one_node;
2171 if (*lss != gfc_ss_terminator)
2173 gfc_init_loopinfo (&loop);
2175 /* Walk the RHS of the expression. */
2176 *rss = gfc_walk_expr (expr2);
2177 if (*rss == gfc_ss_terminator)
2179 /* The rhs is scalar. Add a ss for the expression. */
2180 *rss = gfc_get_ss ();
2181 (*rss)->next = gfc_ss_terminator;
2182 (*rss)->type = GFC_SS_SCALAR;
2183 (*rss)->expr = expr2;
2186 /* Associate the SS with the loop. */
2187 gfc_add_ss_to_loop (&loop, *lss);
2188 /* We don't actually need to add the rhs at this point, but it might
2189 make guessing the loop bounds a bit easier. */
2190 gfc_add_ss_to_loop (&loop, *rss);
2192 /* We only want the shape of the expression, not rest of the junk
2193 generated by the scalarizer. */
2194 loop.array_parameter = 1;
2196 /* Calculate the bounds of the scalarization. */
2197 save_flag = flag_bounds_check;
2198 flag_bounds_check = 0;
2199 gfc_conv_ss_startstride (&loop);
2200 flag_bounds_check = save_flag;
2201 gfc_conv_loop_setup (&loop, &expr2->where);
2203 /* Figure out how many elements we need. */
2204 for (i = 0; i < loop.dimen; i++)
2206 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2207 gfc_index_one_node, loop.from[i]);
2208 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2210 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2212 gfc_add_block_to_block (pblock, &loop.pre);
2213 size = gfc_evaluate_now (size, pblock);
2214 gfc_add_block_to_block (pblock, &loop.post);
2216 /* TODO: write a function that cleans up a loopinfo without freeing
2217 the SS chains. Currently a NOP. */
2224 /* Calculate the overall iterator number of the nested forall construct.
2225 This routine actually calculates the number of times the body of the
2226 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2227 that by the expression INNER_SIZE. The BLOCK argument specifies the
2228 block in which to calculate the result, and the optional INNER_SIZE_BODY
2229 argument contains any statements that need to executed (inside the loop)
2230 to initialize or calculate INNER_SIZE. */
2233 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2234 stmtblock_t *inner_size_body, stmtblock_t *block)
2236 forall_info *forall_tmp = nested_forall_info;
2240 /* We can eliminate the innermost unconditional loops with constant
2242 if (INTEGER_CST_P (inner_size))
2245 && !forall_tmp->mask
2246 && INTEGER_CST_P (forall_tmp->size))
2248 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2249 inner_size, forall_tmp->size);
2250 forall_tmp = forall_tmp->prev_nest;
2253 /* If there are no loops left, we have our constant result. */
2258 /* Otherwise, create a temporary variable to compute the result. */
2259 number = gfc_create_var (gfc_array_index_type, "num");
2260 gfc_add_modify (block, number, gfc_index_zero_node);
2262 gfc_start_block (&body);
2263 if (inner_size_body)
2264 gfc_add_block_to_block (&body, inner_size_body);
2266 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2267 number, inner_size);
2270 gfc_add_modify (&body, number, tmp);
2271 tmp = gfc_finish_block (&body);
2273 /* Generate loops. */
2274 if (forall_tmp != NULL)
2275 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2277 gfc_add_expr_to_block (block, tmp);
2283 /* Allocate temporary for forall construct. SIZE is the size of temporary
2284 needed. PTEMP1 is returned for space free. */
2287 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2294 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2295 if (!integer_onep (unit))
2296 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2301 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2304 tmp = build_fold_indirect_ref (tmp);
2309 /* Allocate temporary for forall construct according to the information in
2310 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2311 assignment inside forall. PTEMP1 is returned for space free. */
2314 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2315 tree inner_size, stmtblock_t * inner_size_body,
2316 stmtblock_t * block, tree * ptemp1)
2320 /* Calculate the total size of temporary needed in forall construct. */
2321 size = compute_overall_iter_number (nested_forall_info, inner_size,
2322 inner_size_body, block);
2324 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2328 /* Handle assignments inside forall which need temporary.
2330 forall (i=start:end:stride; maskexpr)
2333 (where e,f<i> are arbitrary expressions possibly involving i
2334 and there is a dependency between e<i> and f<i>)
2336 masktmp(:) = maskexpr(:)
2341 for (i = start; i <= end; i += stride)
2345 for (i = start; i <= end; i += stride)
2347 if (masktmp[maskindex++])
2348 tmp[count1++] = f<i>
2352 for (i = start; i <= end; i += stride)
2354 if (masktmp[maskindex++])
2355 e<i> = tmp[count1++]
2360 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2361 tree wheremask, bool invert,
2362 forall_info * nested_forall_info,
2363 stmtblock_t * block)
2371 stmtblock_t inner_size_body;
2373 /* Create vars. count1 is the current iterator number of the nested
2375 count1 = gfc_create_var (gfc_array_index_type, "count1");
2377 /* Count is the wheremask index. */
2380 count = gfc_create_var (gfc_array_index_type, "count");
2381 gfc_add_modify (block, count, gfc_index_zero_node);
2386 /* Initialize count1. */
2387 gfc_add_modify (block, count1, gfc_index_zero_node);
2389 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2390 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2391 gfc_init_block (&inner_size_body);
2392 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2395 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2396 if (expr1->ts.type == BT_CHARACTER && expr1->ts.cl->length)
2398 if (!expr1->ts.cl->backend_decl)
2401 gfc_init_se (&tse, NULL);
2402 gfc_conv_expr (&tse, expr1->ts.cl->length);
2403 expr1->ts.cl->backend_decl = tse.expr;
2405 type = gfc_get_character_type_len (gfc_default_character_kind,
2406 expr1->ts.cl->backend_decl);
2409 type = gfc_typenode_for_spec (&expr1->ts);
2411 /* Allocate temporary for nested forall construct according to the
2412 information in nested_forall_info and inner_size. */
2413 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2414 &inner_size_body, block, &ptemp1);
2416 /* Generate codes to copy rhs to the temporary . */
2417 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2420 /* Generate body and loops according to the information in
2421 nested_forall_info. */
2422 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2423 gfc_add_expr_to_block (block, tmp);
2426 gfc_add_modify (block, count1, gfc_index_zero_node);
2430 gfc_add_modify (block, count, gfc_index_zero_node);
2432 /* Generate codes to copy the temporary to lhs. */
2433 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2436 /* Generate body and loops according to the information in
2437 nested_forall_info. */
2438 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2439 gfc_add_expr_to_block (block, tmp);
2443 /* Free the temporary. */
2444 tmp = gfc_call_free (ptemp1);
2445 gfc_add_expr_to_block (block, tmp);
2450 /* Translate pointer assignment inside FORALL which need temporary. */
2453 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2454 forall_info * nested_forall_info,
2455 stmtblock_t * block)
2469 tree tmp, tmp1, ptemp1;
2471 count = gfc_create_var (gfc_array_index_type, "count");
2472 gfc_add_modify (block, count, gfc_index_zero_node);
2474 inner_size = integer_one_node;
2475 lss = gfc_walk_expr (expr1);
2476 rss = gfc_walk_expr (expr2);
2477 if (lss == gfc_ss_terminator)
2479 type = gfc_typenode_for_spec (&expr1->ts);
2480 type = build_pointer_type (type);
2482 /* Allocate temporary for nested forall construct according to the
2483 information in nested_forall_info and inner_size. */
2484 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2485 inner_size, NULL, block, &ptemp1);
2486 gfc_start_block (&body);
2487 gfc_init_se (&lse, NULL);
2488 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2489 gfc_init_se (&rse, NULL);
2490 rse.want_pointer = 1;
2491 gfc_conv_expr (&rse, expr2);
2492 gfc_add_block_to_block (&body, &rse.pre);
2493 gfc_add_modify (&body, lse.expr,
2494 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2495 gfc_add_block_to_block (&body, &rse.post);
2497 /* Increment count. */
2498 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2499 count, gfc_index_one_node);
2500 gfc_add_modify (&body, count, tmp);
2502 tmp = gfc_finish_block (&body);
2504 /* Generate body and loops according to the information in
2505 nested_forall_info. */
2506 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2507 gfc_add_expr_to_block (block, tmp);
2510 gfc_add_modify (block, count, gfc_index_zero_node);
2512 gfc_start_block (&body);
2513 gfc_init_se (&lse, NULL);
2514 gfc_init_se (&rse, NULL);
2515 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
2516 lse.want_pointer = 1;
2517 gfc_conv_expr (&lse, expr1);
2518 gfc_add_block_to_block (&body, &lse.pre);
2519 gfc_add_modify (&body, lse.expr, rse.expr);
2520 gfc_add_block_to_block (&body, &lse.post);
2521 /* Increment count. */
2522 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2523 count, gfc_index_one_node);
2524 gfc_add_modify (&body, count, tmp);
2525 tmp = gfc_finish_block (&body);
2527 /* Generate body and loops according to the information in
2528 nested_forall_info. */
2529 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2530 gfc_add_expr_to_block (block, tmp);
2534 gfc_init_loopinfo (&loop);
2536 /* Associate the SS with the loop. */
2537 gfc_add_ss_to_loop (&loop, rss);
2539 /* Setup the scalarizing loops and bounds. */
2540 gfc_conv_ss_startstride (&loop);
2542 gfc_conv_loop_setup (&loop, &expr2->where);
2544 info = &rss->data.info;
2545 desc = info->descriptor;
2547 /* Make a new descriptor. */
2548 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2549 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2550 loop.from, loop.to, 1,
2553 /* Allocate temporary for nested forall construct. */
2554 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2555 inner_size, NULL, block, &ptemp1);
2556 gfc_start_block (&body);
2557 gfc_init_se (&lse, NULL);
2558 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2559 lse.direct_byref = 1;
2560 rss = gfc_walk_expr (expr2);
2561 gfc_conv_expr_descriptor (&lse, expr2, rss);
2563 gfc_add_block_to_block (&body, &lse.pre);
2564 gfc_add_block_to_block (&body, &lse.post);
2566 /* Increment count. */
2567 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2568 count, gfc_index_one_node);
2569 gfc_add_modify (&body, count, tmp);
2571 tmp = gfc_finish_block (&body);
2573 /* Generate body and loops according to the information in
2574 nested_forall_info. */
2575 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2576 gfc_add_expr_to_block (block, tmp);
2579 gfc_add_modify (block, count, gfc_index_zero_node);
2581 parm = gfc_build_array_ref (tmp1, count, NULL);
2582 lss = gfc_walk_expr (expr1);
2583 gfc_init_se (&lse, NULL);
2584 gfc_conv_expr_descriptor (&lse, expr1, lss);
2585 gfc_add_modify (&lse.pre, lse.expr, parm);
2586 gfc_start_block (&body);
2587 gfc_add_block_to_block (&body, &lse.pre);
2588 gfc_add_block_to_block (&body, &lse.post);
2590 /* Increment count. */
2591 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2592 count, gfc_index_one_node);
2593 gfc_add_modify (&body, count, tmp);
2595 tmp = gfc_finish_block (&body);
2597 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2598 gfc_add_expr_to_block (block, tmp);
2600 /* Free the temporary. */
2603 tmp = gfc_call_free (ptemp1);
2604 gfc_add_expr_to_block (block, tmp);
2609 /* FORALL and WHERE statements are really nasty, especially when you nest
2610 them. All the rhs of a forall assignment must be evaluated before the
2611 actual assignments are performed. Presumably this also applies to all the
2612 assignments in an inner where statement. */
2614 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2615 linear array, relying on the fact that we process in the same order in all
2618 forall (i=start:end:stride; maskexpr)
2622 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2624 count = ((end + 1 - start) / stride)
2625 masktmp(:) = maskexpr(:)
2628 for (i = start; i <= end; i += stride)
2630 if (masktmp[maskindex++])
2634 for (i = start; i <= end; i += stride)
2636 if (masktmp[maskindex++])
2640 Note that this code only works when there are no dependencies.
2641 Forall loop with array assignments and data dependencies are a real pain,
2642 because the size of the temporary cannot always be determined before the
2643 loop is executed. This problem is compounded by the presence of nested
2648 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2668 gfc_forall_iterator *fa;
2671 gfc_saved_var *saved_vars;
2672 iter_info *this_forall;
2676 /* Do nothing if the mask is false. */
2678 && code->expr->expr_type == EXPR_CONSTANT
2679 && !code->expr->value.logical)
2680 return build_empty_stmt ();
2683 /* Count the FORALL index number. */
2684 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2688 /* Allocate the space for var, start, end, step, varexpr. */
2689 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2690 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2691 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2692 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2693 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2694 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2696 /* Allocate the space for info. */
2697 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2699 gfc_start_block (&pre);
2700 gfc_init_block (&post);
2701 gfc_init_block (&block);
2704 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2706 gfc_symbol *sym = fa->var->symtree->n.sym;
2708 /* Allocate space for this_forall. */
2709 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2711 /* Create a temporary variable for the FORALL index. */
2712 tmp = gfc_typenode_for_spec (&sym->ts);
2713 var[n] = gfc_create_var (tmp, sym->name);
2714 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2716 /* Record it in this_forall. */
2717 this_forall->var = var[n];
2719 /* Replace the index symbol's backend_decl with the temporary decl. */
2720 sym->backend_decl = var[n];
2722 /* Work out the start, end and stride for the loop. */
2723 gfc_init_se (&se, NULL);
2724 gfc_conv_expr_val (&se, fa->start);
2725 /* Record it in this_forall. */
2726 this_forall->start = se.expr;
2727 gfc_add_block_to_block (&block, &se.pre);
2730 gfc_init_se (&se, NULL);
2731 gfc_conv_expr_val (&se, fa->end);
2732 /* Record it in this_forall. */
2733 this_forall->end = se.expr;
2734 gfc_make_safe_expr (&se);
2735 gfc_add_block_to_block (&block, &se.pre);
2738 gfc_init_se (&se, NULL);
2739 gfc_conv_expr_val (&se, fa->stride);
2740 /* Record it in this_forall. */
2741 this_forall->step = se.expr;
2742 gfc_make_safe_expr (&se);
2743 gfc_add_block_to_block (&block, &se.pre);
2746 /* Set the NEXT field of this_forall to NULL. */
2747 this_forall->next = NULL;
2748 /* Link this_forall to the info construct. */
2749 if (info->this_loop)
2751 iter_info *iter_tmp = info->this_loop;
2752 while (iter_tmp->next != NULL)
2753 iter_tmp = iter_tmp->next;
2754 iter_tmp->next = this_forall;
2757 info->this_loop = this_forall;
2763 /* Calculate the size needed for the current forall level. */
2764 size = gfc_index_one_node;
2765 for (n = 0; n < nvar; n++)
2767 /* size = (end + step - start) / step. */
2768 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2770 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2772 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2773 tmp = convert (gfc_array_index_type, tmp);
2775 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2778 /* Record the nvar and size of current forall level. */
2784 /* If the mask is .true., consider the FORALL unconditional. */
2785 if (code->expr->expr_type == EXPR_CONSTANT
2786 && code->expr->value.logical)
2794 /* First we need to allocate the mask. */
2797 /* As the mask array can be very big, prefer compact boolean types. */
2798 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2799 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
2800 size, NULL, &block, &pmask);
2801 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2803 /* Record them in the info structure. */
2804 info->maskindex = maskindex;
2809 /* No mask was specified. */
2810 maskindex = NULL_TREE;
2811 mask = pmask = NULL_TREE;
2814 /* Link the current forall level to nested_forall_info. */
2815 info->prev_nest = nested_forall_info;
2816 nested_forall_info = info;
2818 /* Copy the mask into a temporary variable if required.
2819 For now we assume a mask temporary is needed. */
2822 /* As the mask array can be very big, prefer compact boolean types. */
2823 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2825 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
2827 /* Start of mask assignment loop body. */
2828 gfc_start_block (&body);
2830 /* Evaluate the mask expression. */
2831 gfc_init_se (&se, NULL);
2832 gfc_conv_expr_val (&se, code->expr);
2833 gfc_add_block_to_block (&body, &se.pre);
2835 /* Store the mask. */
2836 se.expr = convert (mask_type, se.expr);
2838 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2839 gfc_add_modify (&body, tmp, se.expr);
2841 /* Advance to the next mask element. */
2842 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2843 maskindex, gfc_index_one_node);
2844 gfc_add_modify (&body, maskindex, tmp);
2846 /* Generate the loops. */
2847 tmp = gfc_finish_block (&body);
2848 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
2849 gfc_add_expr_to_block (&block, tmp);
2852 c = code->block->next;
2854 /* TODO: loop merging in FORALL statements. */
2855 /* Now that we've got a copy of the mask, generate the assignment loops. */
2861 /* A scalar or array assignment. DO the simple check for
2862 lhs to rhs dependencies. These make a temporary for the
2863 rhs and form a second forall block to copy to variable. */
2864 need_temp = check_forall_dependencies(c, &pre, &post);
2866 /* Temporaries due to array assignment data dependencies introduce
2867 no end of problems. */
2869 gfc_trans_assign_need_temp (c->expr, c->expr2, NULL, false,
2870 nested_forall_info, &block);
2873 /* Use the normal assignment copying routines. */
2874 assign = gfc_trans_assignment (c->expr, c->expr2, false);
2876 /* Generate body and loops. */
2877 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2879 gfc_add_expr_to_block (&block, tmp);
2882 /* Cleanup any temporary symtrees that have been made to deal
2883 with dependencies. */
2885 cleanup_forall_symtrees (c);
2890 /* Translate WHERE or WHERE construct nested in FORALL. */
2891 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
2894 /* Pointer assignment inside FORALL. */
2895 case EXEC_POINTER_ASSIGN:
2896 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2898 gfc_trans_pointer_assign_need_temp (c->expr, c->expr2,
2899 nested_forall_info, &block);
2902 /* Use the normal assignment copying routines. */
2903 assign = gfc_trans_pointer_assignment (c->expr, c->expr2);
2905 /* Generate body and loops. */
2906 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2908 gfc_add_expr_to_block (&block, tmp);
2913 tmp = gfc_trans_forall_1 (c, nested_forall_info);
2914 gfc_add_expr_to_block (&block, tmp);
2917 /* Explicit subroutine calls are prevented by the frontend but interface
2918 assignments can legitimately produce them. */
2919 case EXEC_ASSIGN_CALL:
2920 assign = gfc_trans_call (c, true);
2921 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
2922 gfc_add_expr_to_block (&block, tmp);
2932 /* Restore the original index variables. */
2933 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
2934 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
2936 /* Free the space for var, start, end, step, varexpr. */
2942 gfc_free (saved_vars);
2944 /* Free the space for this forall_info. */
2949 /* Free the temporary for the mask. */
2950 tmp = gfc_call_free (pmask);
2951 gfc_add_expr_to_block (&block, tmp);
2954 pushdecl (maskindex);
2956 gfc_add_block_to_block (&pre, &block);
2957 gfc_add_block_to_block (&pre, &post);
2959 return gfc_finish_block (&pre);
2963 /* Translate the FORALL statement or construct. */
2965 tree gfc_trans_forall (gfc_code * code)
2967 return gfc_trans_forall_1 (code, NULL);
2971 /* Evaluate the WHERE mask expression, copy its value to a temporary.
2972 If the WHERE construct is nested in FORALL, compute the overall temporary
2973 needed by the WHERE mask expression multiplied by the iterator number of
2975 ME is the WHERE mask expression.
2976 MASK is the current execution mask upon input, whose sense may or may
2977 not be inverted as specified by the INVERT argument.
2978 CMASK is the updated execution mask on output, or NULL if not required.
2979 PMASK is the pending execution mask on output, or NULL if not required.
2980 BLOCK is the block in which to place the condition evaluation loops. */
2983 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
2984 tree mask, bool invert, tree cmask, tree pmask,
2985 tree mask_type, stmtblock_t * block)
2990 stmtblock_t body, body1;
2991 tree count, cond, mtmp;
2994 gfc_init_loopinfo (&loop);
2996 lss = gfc_walk_expr (me);
2997 rss = gfc_walk_expr (me);
2999 /* Variable to index the temporary. */
3000 count = gfc_create_var (gfc_array_index_type, "count");
3001 /* Initialize count. */
3002 gfc_add_modify (block, count, gfc_index_zero_node);
3004 gfc_start_block (&body);
3006 gfc_init_se (&rse, NULL);
3007 gfc_init_se (&lse, NULL);
3009 if (lss == gfc_ss_terminator)
3011 gfc_init_block (&body1);
3015 /* Initialize the loop. */
3016 gfc_init_loopinfo (&loop);
3018 /* We may need LSS to determine the shape of the expression. */
3019 gfc_add_ss_to_loop (&loop, lss);
3020 gfc_add_ss_to_loop (&loop, rss);
3022 gfc_conv_ss_startstride (&loop);
3023 gfc_conv_loop_setup (&loop, &me->where);
3025 gfc_mark_ss_chain_used (rss, 1);
3026 /* Start the loop body. */
3027 gfc_start_scalarized_body (&loop, &body1);
3029 /* Translate the expression. */
3030 gfc_copy_loopinfo_to_se (&rse, &loop);
3032 gfc_conv_expr (&rse, me);
3035 /* Variable to evaluate mask condition. */
3036 cond = gfc_create_var (mask_type, "cond");
3037 if (mask && (cmask || pmask))
3038 mtmp = gfc_create_var (mask_type, "mask");
3039 else mtmp = NULL_TREE;
3041 gfc_add_block_to_block (&body1, &lse.pre);
3042 gfc_add_block_to_block (&body1, &rse.pre);
3044 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3046 if (mask && (cmask || pmask))
3048 tmp = gfc_build_array_ref (mask, count, NULL);
3050 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
3051 gfc_add_modify (&body1, mtmp, tmp);
3056 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3059 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3060 gfc_add_modify (&body1, tmp1, tmp);
3065 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3066 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, cond);
3068 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3069 gfc_add_modify (&body1, tmp1, tmp);
3072 gfc_add_block_to_block (&body1, &lse.post);
3073 gfc_add_block_to_block (&body1, &rse.post);
3075 if (lss == gfc_ss_terminator)
3077 gfc_add_block_to_block (&body, &body1);
3081 /* Increment count. */
3082 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
3083 gfc_index_one_node);
3084 gfc_add_modify (&body1, count, tmp1);
3086 /* Generate the copying loops. */
3087 gfc_trans_scalarizing_loops (&loop, &body1);
3089 gfc_add_block_to_block (&body, &loop.pre);
3090 gfc_add_block_to_block (&body, &loop.post);
3092 gfc_cleanup_loop (&loop);
3093 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3094 as tree nodes in SS may not be valid in different scope. */
3097 tmp1 = gfc_finish_block (&body);
3098 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3099 if (nested_forall_info != NULL)
3100 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3102 gfc_add_expr_to_block (block, tmp1);
3106 /* Translate an assignment statement in a WHERE statement or construct
3107 statement. The MASK expression is used to control which elements
3108 of EXPR1 shall be assigned. The sense of MASK is specified by
3112 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3113 tree mask, bool invert,
3114 tree count1, tree count2,
3120 gfc_ss *lss_section;
3127 tree index, maskexpr;
3130 /* TODO: handle this special case.
3131 Special case a single function returning an array. */
3132 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3134 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3140 /* Assignment of the form lhs = rhs. */
3141 gfc_start_block (&block);
3143 gfc_init_se (&lse, NULL);
3144 gfc_init_se (&rse, NULL);
3147 lss = gfc_walk_expr (expr1);
3150 /* In each where-assign-stmt, the mask-expr and the variable being
3151 defined shall be arrays of the same shape. */
3152 gcc_assert (lss != gfc_ss_terminator);
3154 /* The assignment needs scalarization. */
3157 /* Find a non-scalar SS from the lhs. */
3158 while (lss_section != gfc_ss_terminator
3159 && lss_section->type != GFC_SS_SECTION)
3160 lss_section = lss_section->next;
3162 gcc_assert (lss_section != gfc_ss_terminator);
3164 /* Initialize the scalarizer. */
3165 gfc_init_loopinfo (&loop);
3168 rss = gfc_walk_expr (expr2);
3169 if (rss == gfc_ss_terminator)
3171 /* The rhs is scalar. Add a ss for the expression. */
3172 rss = gfc_get_ss ();
3174 rss->next = gfc_ss_terminator;
3175 rss->type = GFC_SS_SCALAR;
3179 /* Associate the SS with the loop. */
3180 gfc_add_ss_to_loop (&loop, lss);
3181 gfc_add_ss_to_loop (&loop, rss);
3183 /* Calculate the bounds of the scalarization. */
3184 gfc_conv_ss_startstride (&loop);
3186 /* Resolve any data dependencies in the statement. */
3187 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3189 /* Setup the scalarizing loops. */
3190 gfc_conv_loop_setup (&loop, &expr2->where);
3192 /* Setup the gfc_se structures. */
3193 gfc_copy_loopinfo_to_se (&lse, &loop);
3194 gfc_copy_loopinfo_to_se (&rse, &loop);
3197 gfc_mark_ss_chain_used (rss, 1);
3198 if (loop.temp_ss == NULL)
3201 gfc_mark_ss_chain_used (lss, 1);
3205 lse.ss = loop.temp_ss;
3206 gfc_mark_ss_chain_used (lss, 3);
3207 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3210 /* Start the scalarized loop body. */
3211 gfc_start_scalarized_body (&loop, &body);
3213 /* Translate the expression. */
3214 gfc_conv_expr (&rse, expr2);
3215 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3217 gfc_conv_tmp_array_ref (&lse);
3218 gfc_advance_se_ss_chain (&lse);
3221 gfc_conv_expr (&lse, expr1);
3223 /* Form the mask expression according to the mask. */
3225 maskexpr = gfc_build_array_ref (mask, index, NULL);
3227 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
3229 /* Use the scalar assignment as is. */
3231 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3232 loop.temp_ss != NULL, false);
3234 tmp = gfc_conv_operator_assign (&lse, &rse, sym);
3236 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3238 gfc_add_expr_to_block (&body, tmp);
3240 if (lss == gfc_ss_terminator)
3242 /* Increment count1. */
3243 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3244 count1, gfc_index_one_node);
3245 gfc_add_modify (&body, count1, tmp);
3247 /* Use the scalar assignment as is. */
3248 gfc_add_block_to_block (&block, &body);
3252 gcc_assert (lse.ss == gfc_ss_terminator
3253 && rse.ss == gfc_ss_terminator);
3255 if (loop.temp_ss != NULL)
3257 /* Increment count1 before finish the main body of a scalarized
3259 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3260 count1, gfc_index_one_node);
3261 gfc_add_modify (&body, count1, tmp);
3262 gfc_trans_scalarized_loop_boundary (&loop, &body);
3264 /* We need to copy the temporary to the actual lhs. */
3265 gfc_init_se (&lse, NULL);
3266 gfc_init_se (&rse, NULL);
3267 gfc_copy_loopinfo_to_se (&lse, &loop);
3268 gfc_copy_loopinfo_to_se (&rse, &loop);
3270 rse.ss = loop.temp_ss;
3273 gfc_conv_tmp_array_ref (&rse);
3274 gfc_advance_se_ss_chain (&rse);
3275 gfc_conv_expr (&lse, expr1);
3277 gcc_assert (lse.ss == gfc_ss_terminator
3278 && rse.ss == gfc_ss_terminator);
3280 /* Form the mask expression according to the mask tree list. */
3282 maskexpr = gfc_build_array_ref (mask, index, NULL);
3284 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3287 /* Use the scalar assignment as is. */
3288 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3289 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3290 gfc_add_expr_to_block (&body, tmp);
3292 /* Increment count2. */
3293 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3294 count2, gfc_index_one_node);
3295 gfc_add_modify (&body, count2, tmp);
3299 /* Increment count1. */
3300 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3301 count1, gfc_index_one_node);
3302 gfc_add_modify (&body, count1, tmp);
3305 /* Generate the copying loops. */
3306 gfc_trans_scalarizing_loops (&loop, &body);
3308 /* Wrap the whole thing up. */
3309 gfc_add_block_to_block (&block, &loop.pre);
3310 gfc_add_block_to_block (&block, &loop.post);
3311 gfc_cleanup_loop (&loop);
3314 return gfc_finish_block (&block);
3318 /* Translate the WHERE construct or statement.
3319 This function can be called iteratively to translate the nested WHERE
3320 construct or statement.
3321 MASK is the control mask. */
3324 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3325 forall_info * nested_forall_info, stmtblock_t * block)
3327 stmtblock_t inner_size_body;
3328 tree inner_size, size;
3337 tree count1, count2;
3341 tree pcmask = NULL_TREE;
3342 tree ppmask = NULL_TREE;
3343 tree cmask = NULL_TREE;
3344 tree pmask = NULL_TREE;
3345 gfc_actual_arglist *arg;
3347 /* the WHERE statement or the WHERE construct statement. */
3348 cblock = code->block;
3350 /* As the mask array can be very big, prefer compact boolean types. */
3351 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3353 /* Determine which temporary masks are needed. */
3356 /* One clause: No ELSEWHEREs. */
3357 need_cmask = (cblock->next != 0);
3360 else if (cblock->block->block)
3362 /* Three or more clauses: Conditional ELSEWHEREs. */
3366 else if (cblock->next)
3368 /* Two clauses, the first non-empty. */
3370 need_pmask = (mask != NULL_TREE
3371 && cblock->block->next != 0);
3373 else if (!cblock->block->next)
3375 /* Two clauses, both empty. */
3379 /* Two clauses, the first empty, the second non-empty. */
3382 need_cmask = (cblock->block->expr != 0);
3391 if (need_cmask || need_pmask)
3393 /* Calculate the size of temporary needed by the mask-expr. */
3394 gfc_init_block (&inner_size_body);
3395 inner_size = compute_inner_temp_size (cblock->expr, cblock->expr,
3396 &inner_size_body, &lss, &rss);
3398 /* Calculate the total size of temporary needed. */
3399 size = compute_overall_iter_number (nested_forall_info, inner_size,
3400 &inner_size_body, block);
3402 /* Check whether the size is negative. */
3403 cond = fold_build2 (LE_EXPR, boolean_type_node, size,
3404 gfc_index_zero_node);
3405 size = fold_build3 (COND_EXPR, gfc_array_index_type, cond,
3406 gfc_index_zero_node, size);
3407 size = gfc_evaluate_now (size, block);
3409 /* Allocate temporary for WHERE mask if needed. */
3411 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3414 /* Allocate temporary for !mask if needed. */
3416 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3422 /* Each time around this loop, the where clause is conditional
3423 on the value of mask and invert, which are updated at the
3424 bottom of the loop. */
3426 /* Has mask-expr. */
3429 /* Ensure that the WHERE mask will be evaluated exactly once.
3430 If there are no statements in this WHERE/ELSEWHERE clause,
3431 then we don't need to update the control mask (cmask).
3432 If this is the last clause of the WHERE construct, then
3433 we don't need to update the pending control mask (pmask). */
3435 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3437 cblock->next ? cmask : NULL_TREE,
3438 cblock->block ? pmask : NULL_TREE,
3441 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3443 (cblock->next || cblock->block)
3444 ? cmask : NULL_TREE,
3445 NULL_TREE, mask_type, block);
3449 /* It's a final elsewhere-stmt. No mask-expr is present. */
3453 /* The body of this where clause are controlled by cmask with
3454 sense specified by invert. */
3456 /* Get the assignment statement of a WHERE statement, or the first
3457 statement in where-body-construct of a WHERE construct. */
3458 cnext = cblock->next;
3463 /* WHERE assignment statement. */
3464 case EXEC_ASSIGN_CALL:
3466 arg = cnext->ext.actual;
3467 expr1 = expr2 = NULL;
3468 for (; arg; arg = arg->next)
3480 expr1 = cnext->expr;
3481 expr2 = cnext->expr2;
3483 if (nested_forall_info != NULL)
3485 need_temp = gfc_check_dependency (expr1, expr2, 0);
3486 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3487 gfc_trans_assign_need_temp (expr1, expr2,
3489 nested_forall_info, block);
3492 /* Variables to control maskexpr. */
3493 count1 = gfc_create_var (gfc_array_index_type, "count1");
3494 count2 = gfc_create_var (gfc_array_index_type, "count2");
3495 gfc_add_modify (block, count1, gfc_index_zero_node);
3496 gfc_add_modify (block, count2, gfc_index_zero_node);
3498 tmp = gfc_trans_where_assign (expr1, expr2,
3501 cnext->resolved_sym);
3503 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3505 gfc_add_expr_to_block (block, tmp);
3510 /* Variables to control maskexpr. */
3511 count1 = gfc_create_var (gfc_array_index_type, "count1");
3512 count2 = gfc_create_var (gfc_array_index_type, "count2");
3513 gfc_add_modify (block, count1, gfc_index_zero_node);
3514 gfc_add_modify (block, count2, gfc_index_zero_node);
3516 tmp = gfc_trans_where_assign (expr1, expr2,
3519 cnext->resolved_sym);
3520 gfc_add_expr_to_block (block, tmp);
3525 /* WHERE or WHERE construct is part of a where-body-construct. */
3527 gfc_trans_where_2 (cnext, cmask, invert,
3528 nested_forall_info, block);
3535 /* The next statement within the same where-body-construct. */
3536 cnext = cnext->next;
3538 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3539 cblock = cblock->block;
3540 if (mask == NULL_TREE)
3542 /* If we're the initial WHERE, we can simply invert the sense
3543 of the current mask to obtain the "mask" for the remaining
3550 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3556 /* If we allocated a pending mask array, deallocate it now. */
3559 tmp = gfc_call_free (ppmask);
3560 gfc_add_expr_to_block (block, tmp);
3563 /* If we allocated a current mask array, deallocate it now. */
3566 tmp = gfc_call_free (pcmask);
3567 gfc_add_expr_to_block (block, tmp);
3571 /* Translate a simple WHERE construct or statement without dependencies.
3572 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3573 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3574 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3577 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3579 stmtblock_t block, body;
3580 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3581 tree tmp, cexpr, tstmt, estmt;
3582 gfc_ss *css, *tdss, *tsss;
3583 gfc_se cse, tdse, tsse, edse, esse;
3588 cond = cblock->expr;
3589 tdst = cblock->next->expr;
3590 tsrc = cblock->next->expr2;
3591 edst = eblock ? eblock->next->expr : NULL;
3592 esrc = eblock ? eblock->next->expr2 : NULL;
3594 gfc_start_block (&block);
3595 gfc_init_loopinfo (&loop);
3597 /* Handle the condition. */
3598 gfc_init_se (&cse, NULL);
3599 css = gfc_walk_expr (cond);
3600 gfc_add_ss_to_loop (&loop, css);
3602 /* Handle the then-clause. */
3603 gfc_init_se (&tdse, NULL);
3604 gfc_init_se (&tsse, NULL);
3605 tdss = gfc_walk_expr (tdst);
3606 tsss = gfc_walk_expr (tsrc);
3607 if (tsss == gfc_ss_terminator)
3609 tsss = gfc_get_ss ();
3611 tsss->next = gfc_ss_terminator;
3612 tsss->type = GFC_SS_SCALAR;
3615 gfc_add_ss_to_loop (&loop, tdss);
3616 gfc_add_ss_to_loop (&loop, tsss);
3620 /* Handle the else clause. */
3621 gfc_init_se (&edse, NULL);
3622 gfc_init_se (&esse, NULL);
3623 edss = gfc_walk_expr (edst);
3624 esss = gfc_walk_expr (esrc);
3625 if (esss == gfc_ss_terminator)
3627 esss = gfc_get_ss ();
3629 esss->next = gfc_ss_terminator;
3630 esss->type = GFC_SS_SCALAR;
3633 gfc_add_ss_to_loop (&loop, edss);
3634 gfc_add_ss_to_loop (&loop, esss);
3637 gfc_conv_ss_startstride (&loop);
3638 gfc_conv_loop_setup (&loop, &tdst->where);
3640 gfc_mark_ss_chain_used (css, 1);
3641 gfc_mark_ss_chain_used (tdss, 1);
3642 gfc_mark_ss_chain_used (tsss, 1);
3645 gfc_mark_ss_chain_used (edss, 1);
3646 gfc_mark_ss_chain_used (esss, 1);
3649 gfc_start_scalarized_body (&loop, &body);
3651 gfc_copy_loopinfo_to_se (&cse, &loop);
3652 gfc_copy_loopinfo_to_se (&tdse, &loop);
3653 gfc_copy_loopinfo_to_se (&tsse, &loop);
3659 gfc_copy_loopinfo_to_se (&edse, &loop);
3660 gfc_copy_loopinfo_to_se (&esse, &loop);
3665 gfc_conv_expr (&cse, cond);
3666 gfc_add_block_to_block (&body, &cse.pre);
3669 gfc_conv_expr (&tsse, tsrc);
3670 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3672 gfc_conv_tmp_array_ref (&tdse);
3673 gfc_advance_se_ss_chain (&tdse);
3676 gfc_conv_expr (&tdse, tdst);
3680 gfc_conv_expr (&esse, esrc);
3681 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3683 gfc_conv_tmp_array_ref (&edse);
3684 gfc_advance_se_ss_chain (&edse);
3687 gfc_conv_expr (&edse, edst);
3690 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false);
3691 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false)
3692 : build_empty_stmt ();
3693 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3694 gfc_add_expr_to_block (&body, tmp);
3695 gfc_add_block_to_block (&body, &cse.post);
3697 gfc_trans_scalarizing_loops (&loop, &body);
3698 gfc_add_block_to_block (&block, &loop.pre);
3699 gfc_add_block_to_block (&block, &loop.post);
3700 gfc_cleanup_loop (&loop);
3702 return gfc_finish_block (&block);
3705 /* As the WHERE or WHERE construct statement can be nested, we call
3706 gfc_trans_where_2 to do the translation, and pass the initial
3707 NULL values for both the control mask and the pending control mask. */
3710 gfc_trans_where (gfc_code * code)
3716 cblock = code->block;
3718 && cblock->next->op == EXEC_ASSIGN
3719 && !cblock->next->next)
3721 eblock = cblock->block;
3724 /* A simple "WHERE (cond) x = y" statement or block is
3725 dependence free if cond is not dependent upon writing x,
3726 and the source y is unaffected by the destination x. */
3727 if (!gfc_check_dependency (cblock->next->expr,
3729 && !gfc_check_dependency (cblock->next->expr,
3730 cblock->next->expr2, 0))
3731 return gfc_trans_where_3 (cblock, NULL);
3733 else if (!eblock->expr
3736 && eblock->next->op == EXEC_ASSIGN
3737 && !eblock->next->next)
3739 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3740 block is dependence free if cond is not dependent on writes
3741 to x1 and x2, y1 is not dependent on writes to x2, and y2
3742 is not dependent on writes to x1, and both y's are not
3743 dependent upon their own x's. In addition to this, the
3744 final two dependency checks below exclude all but the same
3745 array reference if the where and elswhere destinations
3746 are the same. In short, this is VERY conservative and this
3747 is needed because the two loops, required by the standard
3748 are coalesced in gfc_trans_where_3. */
3749 if (!gfc_check_dependency(cblock->next->expr,
3751 && !gfc_check_dependency(eblock->next->expr,
3753 && !gfc_check_dependency(cblock->next->expr,
3754 eblock->next->expr2, 1)
3755 && !gfc_check_dependency(eblock->next->expr,
3756 cblock->next->expr2, 1)
3757 && !gfc_check_dependency(cblock->next->expr,
3758 cblock->next->expr2, 1)
3759 && !gfc_check_dependency(eblock->next->expr,
3760 eblock->next->expr2, 1)
3761 && !gfc_check_dependency(cblock->next->expr,
3762 eblock->next->expr, 0)
3763 && !gfc_check_dependency(eblock->next->expr,
3764 cblock->next->expr, 0))
3765 return gfc_trans_where_3 (cblock, eblock);
3769 gfc_start_block (&block);
3771 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3773 return gfc_finish_block (&block);
3777 /* CYCLE a DO loop. The label decl has already been created by
3778 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3779 node at the head of the loop. We must mark the label as used. */
3782 gfc_trans_cycle (gfc_code * code)
3786 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3787 TREE_USED (cycle_label) = 1;
3788 return build1_v (GOTO_EXPR, cycle_label);
3792 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3793 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3797 gfc_trans_exit (gfc_code * code)
3801 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3802 TREE_USED (exit_label) = 1;
3803 return build1_v (GOTO_EXPR, exit_label);
3807 /* Translate the ALLOCATE statement. */
3810 gfc_trans_allocate (gfc_code * code)
3822 if (!code->ext.alloc_list)
3825 gfc_start_block (&block);
3829 tree gfc_int4_type_node = gfc_get_int_type (4);
3831 stat = gfc_create_var (gfc_int4_type_node, "stat");
3832 pstat = build_fold_addr_expr (stat);
3834 error_label = gfc_build_label_decl (NULL_TREE);
3835 TREE_USED (error_label) = 1;
3838 pstat = stat = error_label = NULL_TREE;
3840 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3844 gfc_init_se (&se, NULL);
3845 gfc_start_block (&se.pre);
3847 se.want_pointer = 1;
3848 se.descriptor_only = 1;
3849 gfc_conv_expr (&se, expr);
3851 if (!gfc_array_allocate (&se, expr, pstat))
3853 /* A scalar or derived type. */
3854 tmp = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
3856 if (expr->ts.type == BT_CHARACTER && tmp == NULL_TREE)
3857 tmp = se.string_length;
3859 tmp = gfc_allocate_with_status (&se.pre, tmp, pstat);
3860 tmp = fold_build2 (MODIFY_EXPR, void_type_node, se.expr,
3861 fold_convert (TREE_TYPE (se.expr), tmp));
3862 gfc_add_expr_to_block (&se.pre, tmp);
3866 tmp = build1_v (GOTO_EXPR, error_label);
3867 parm = fold_build2 (NE_EXPR, boolean_type_node,
3868 stat, build_int_cst (TREE_TYPE (stat), 0));
3869 tmp = fold_build3 (COND_EXPR, void_type_node,
3870 parm, tmp, build_empty_stmt ());
3871 gfc_add_expr_to_block (&se.pre, tmp);
3874 if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
3876 tmp = build_fold_indirect_ref (se.expr);
3877 tmp = gfc_nullify_alloc_comp (expr->ts.derived, tmp, 0);
3878 gfc_add_expr_to_block (&se.pre, tmp);
3883 tmp = gfc_finish_block (&se.pre);
3884 gfc_add_expr_to_block (&block, tmp);
3887 /* Assign the value to the status variable. */
3890 tmp = build1_v (LABEL_EXPR, error_label);
3891 gfc_add_expr_to_block (&block, tmp);
3893 gfc_init_se (&se, NULL);
3894 gfc_conv_expr_lhs (&se, code->expr);
3895 tmp = convert (TREE_TYPE (se.expr), stat);
3896 gfc_add_modify (&block, se.expr, tmp);
3899 return gfc_finish_block (&block);
3903 /* Translate a DEALLOCATE statement.
3904 There are two cases within the for loop:
3905 (1) deallocate(a1, a2, a3) is translated into the following sequence
3906 _gfortran_deallocate(a1, 0B)
3907 _gfortran_deallocate(a2, 0B)
3908 _gfortran_deallocate(a3, 0B)
3909 where the STAT= variable is passed a NULL pointer.
3910 (2) deallocate(a1, a2, a3, stat=i) is translated into the following
3912 _gfortran_deallocate(a1, &stat)
3913 astat = astat + stat
3914 _gfortran_deallocate(a2, &stat)
3915 astat = astat + stat
3916 _gfortran_deallocate(a3, &stat)
3917 astat = astat + stat
3918 In case (1), we simply return at the end of the for loop. In case (2)
3919 we set STAT= astat. */
3921 gfc_trans_deallocate (gfc_code * code)
3926 tree apstat, astat, pstat, stat, tmp;
3929 gfc_start_block (&block);
3931 /* Set up the optional STAT= */
3934 tree gfc_int4_type_node = gfc_get_int_type (4);
3936 /* Variable used with the library call. */
3937 stat = gfc_create_var (gfc_int4_type_node, "stat");
3938 pstat = build_fold_addr_expr (stat);
3940 /* Running total of possible deallocation failures. */
3941 astat = gfc_create_var (gfc_int4_type_node, "astat");
3942 apstat = build_fold_addr_expr (astat);
3944 /* Initialize astat to 0. */
3945 gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
3948 pstat = apstat = stat = astat = NULL_TREE;
3950 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3953 gcc_assert (expr->expr_type == EXPR_VARIABLE);
3955 gfc_init_se (&se, NULL);
3956 gfc_start_block (&se.pre);
3958 se.want_pointer = 1;
3959 se.descriptor_only = 1;
3960 gfc_conv_expr (&se, expr);
3962 if (expr->ts.type == BT_DERIVED
3963 && expr->ts.derived->attr.alloc_comp)
3966 gfc_ref *last = NULL;
3967 for (ref = expr->ref; ref; ref = ref->next)
3968 if (ref->type == REF_COMPONENT)
3971 /* Do not deallocate the components of a derived type
3972 ultimate pointer component. */
3973 if (!(last && last->u.c.component->pointer)
3974 && !(!last && expr->symtree->n.sym->attr.pointer))
3976 tmp = gfc_deallocate_alloc_comp (expr->ts.derived, se.expr,
3978 gfc_add_expr_to_block (&se.pre, tmp);
3983 tmp = gfc_array_deallocate (se.expr, pstat);
3986 tmp = gfc_deallocate_with_status (se.expr, pstat, false);
3987 gfc_add_expr_to_block (&se.pre, tmp);
3989 tmp = fold_build2 (MODIFY_EXPR, void_type_node,
3990 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
3993 gfc_add_expr_to_block (&se.pre, tmp);
3995 /* Keep track of the number of failed deallocations by adding stat
3996 of the last deallocation to the running total. */
3999 apstat = fold_build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
4000 gfc_add_modify (&se.pre, astat, apstat);
4003 tmp = gfc_finish_block (&se.pre);
4004 gfc_add_expr_to_block (&block, tmp);
4008 /* Assign the value to the status variable. */
4011 gfc_init_se (&se, NULL);
4012 gfc_conv_expr_lhs (&se, code->expr);
4013 tmp = convert (TREE_TYPE (se.expr), astat);
4014 gfc_add_modify (&block, se.expr, tmp);
4017 return gfc_finish_block (&block);