1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
27 #include "coretypes.h"
29 #include "tree-gimple.h"
36 #include "trans-stmt.h"
37 #include "trans-types.h"
38 #include "trans-array.h"
39 #include "trans-const.h"
41 #include "dependency.h"
43 typedef struct iter_info
49 struct iter_info *next;
53 typedef struct forall_info
60 struct forall_info *prev_nest;
64 static void gfc_trans_where_2 (gfc_code *, tree, bool,
65 forall_info *, stmtblock_t *);
67 /* Translate a F95 label number to a LABEL_EXPR. */
70 gfc_trans_label_here (gfc_code * code)
72 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
76 /* Given a variable expression which has been ASSIGNed to, find the decl
77 containing the auxiliary variables. For variables in common blocks this
81 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
83 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
84 gfc_conv_expr (se, expr);
85 /* Deals with variable in common block. Get the field declaration. */
86 if (TREE_CODE (se->expr) == COMPONENT_REF)
87 se->expr = TREE_OPERAND (se->expr, 1);
88 /* Deals with dummy argument. Get the parameter declaration. */
89 else if (TREE_CODE (se->expr) == INDIRECT_REF)
90 se->expr = TREE_OPERAND (se->expr, 0);
93 /* Translate a label assignment statement. */
96 gfc_trans_label_assign (gfc_code * code)
106 /* Start a new block. */
107 gfc_init_se (&se, NULL);
108 gfc_start_block (&se.pre);
109 gfc_conv_label_variable (&se, code->expr);
111 len = GFC_DECL_STRING_LEN (se.expr);
112 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
114 label_tree = gfc_get_label_decl (code->label);
116 if (code->label->defined == ST_LABEL_TARGET)
118 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
119 len_tree = integer_minus_one_node;
123 label_str = code->label->format->value.character.string;
124 label_len = code->label->format->value.character.length;
125 len_tree = build_int_cst (NULL_TREE, label_len);
126 label_tree = gfc_build_string_const (label_len + 1, label_str);
127 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
130 gfc_add_modify_expr (&se.pre, len, len_tree);
131 gfc_add_modify_expr (&se.pre, addr, label_tree);
133 return gfc_finish_block (&se.pre);
136 /* Translate a GOTO statement. */
139 gfc_trans_goto (gfc_code * code)
141 locus loc = code->loc;
147 if (code->label != NULL)
148 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
151 gfc_init_se (&se, NULL);
152 gfc_start_block (&se.pre);
153 gfc_conv_label_variable (&se, code->expr);
154 tmp = GFC_DECL_STRING_LEN (se.expr);
155 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
156 build_int_cst (TREE_TYPE (tmp), -1));
157 gfc_trans_runtime_check (tmp, "Assigned label is not a target label",
160 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
165 target = build1 (GOTO_EXPR, void_type_node, assigned_goto);
166 gfc_add_expr_to_block (&se.pre, target);
167 return gfc_finish_block (&se.pre);
170 /* Check the label list. */
173 target = gfc_get_label_decl (code->label);
174 tmp = gfc_build_addr_expr (pvoid_type_node, target);
175 tmp = build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
176 tmp = build3_v (COND_EXPR, tmp,
177 build1 (GOTO_EXPR, void_type_node, target),
178 build_empty_stmt ());
179 gfc_add_expr_to_block (&se.pre, tmp);
182 while (code != NULL);
183 gfc_trans_runtime_check (boolean_true_node,
184 "Assigned label is not in the list", &se.pre, &loc);
186 return gfc_finish_block (&se.pre);
190 /* Translate an ENTRY statement. Just adds a label for this entry point. */
192 gfc_trans_entry (gfc_code * code)
194 return build1_v (LABEL_EXPR, code->ext.entry->label);
198 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
199 elemental subroutines. Make temporaries for output arguments if any such
200 dependencies are found. Output arguments are chosen because internal_unpack
201 can be used, as is, to copy the result back to the variable. */
203 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
204 gfc_symbol * sym, gfc_actual_arglist * arg)
206 gfc_actual_arglist *arg0;
208 gfc_formal_arglist *formal;
209 gfc_loopinfo tmp_loop;
221 if (loopse->ss == NULL)
226 formal = sym->formal;
228 /* Loop over all the arguments testing for dependencies. */
229 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
235 /* Obtain the info structure for the current argument. */
237 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
241 info = &ss->data.info;
245 /* If there is a dependency, create a temporary and use it
246 instead of the variable. */
247 fsym = formal ? formal->sym : NULL;
248 if (e->expr_type == EXPR_VARIABLE
250 && fsym->attr.intent == INTENT_OUT
251 && gfc_check_fncall_dependency (e, INTENT_OUT, sym, arg0))
253 /* Make a local loopinfo for the temporary creation, so that
254 none of the other ss->info's have to be renormalized. */
255 gfc_init_loopinfo (&tmp_loop);
256 for (n = 0; n < info->dimen; n++)
258 tmp_loop.to[n] = loopse->loop->to[n];
259 tmp_loop.from[n] = loopse->loop->from[n];
260 tmp_loop.order[n] = loopse->loop->order[n];
263 /* Generate the temporary. Merge the block so that the
264 declarations are put at the right binding level. */
265 size = gfc_create_var (gfc_array_index_type, NULL);
266 data = gfc_create_var (pvoid_type_node, NULL);
267 gfc_start_block (&block);
268 tmp = gfc_typenode_for_spec (&e->ts);
269 tmp = gfc_trans_create_temp_array (&se->pre, &se->post,
270 &tmp_loop, info, tmp,
272 gfc_add_modify_expr (&se->pre, size, tmp);
273 tmp = fold_convert (pvoid_type_node, info->data);
274 gfc_add_modify_expr (&se->pre, data, tmp);
275 gfc_merge_block_scope (&block);
277 /* Obtain the argument descriptor for unpacking. */
278 gfc_init_se (&parmse, NULL);
279 parmse.want_pointer = 1;
280 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
281 gfc_add_block_to_block (&se->pre, &parmse.pre);
283 /* Calculate the offset for the temporary. */
284 offset = gfc_index_zero_node;
285 for (n = 0; n < info->dimen; n++)
287 tmp = gfc_conv_descriptor_stride (info->descriptor,
289 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
290 loopse->loop->from[n], tmp);
291 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
294 info->offset = gfc_create_var (gfc_array_index_type, NULL);
295 gfc_add_modify_expr (&se->pre, info->offset, offset);
297 /* Copy the result back using unpack. */
298 tmp = build_call_expr (gfor_fndecl_in_unpack, 2, parmse.expr, data);
299 gfc_add_expr_to_block (&se->post, tmp);
301 gfc_add_block_to_block (&se->post, &parmse.post);
307 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
310 gfc_trans_call (gfc_code * code, bool dependency_check)
314 int has_alternate_specifier;
316 /* A CALL starts a new block because the actual arguments may have to
317 be evaluated first. */
318 gfc_init_se (&se, NULL);
319 gfc_start_block (&se.pre);
321 gcc_assert (code->resolved_sym);
323 ss = gfc_ss_terminator;
324 if (code->resolved_sym->attr.elemental)
325 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
327 /* Is not an elemental subroutine call with array valued arguments. */
328 if (ss == gfc_ss_terminator)
331 /* Translate the call. */
332 has_alternate_specifier
333 = gfc_conv_function_call (&se, code->resolved_sym, code->ext.actual,
336 /* A subroutine without side-effect, by definition, does nothing! */
337 TREE_SIDE_EFFECTS (se.expr) = 1;
339 /* Chain the pieces together and return the block. */
340 if (has_alternate_specifier)
342 gfc_code *select_code;
344 select_code = code->next;
345 gcc_assert(select_code->op == EXEC_SELECT);
346 sym = select_code->expr->symtree->n.sym;
347 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
348 if (sym->backend_decl == NULL)
349 sym->backend_decl = gfc_get_symbol_decl (sym);
350 gfc_add_modify_expr (&se.pre, sym->backend_decl, se.expr);
353 gfc_add_expr_to_block (&se.pre, se.expr);
355 gfc_add_block_to_block (&se.pre, &se.post);
360 /* An elemental subroutine call with array valued arguments has
367 /* gfc_walk_elemental_function_args renders the ss chain in the
368 reverse order to the actual argument order. */
369 ss = gfc_reverse_ss (ss);
371 /* Initialize the loop. */
372 gfc_init_se (&loopse, NULL);
373 gfc_init_loopinfo (&loop);
374 gfc_add_ss_to_loop (&loop, ss);
376 gfc_conv_ss_startstride (&loop);
377 gfc_conv_loop_setup (&loop);
378 gfc_mark_ss_chain_used (ss, 1);
380 /* Convert the arguments, checking for dependencies. */
381 gfc_copy_loopinfo_to_se (&loopse, &loop);
384 /* For operator assignment, we need to do dependency checking.
385 We also check the intent of the parameters. */
386 if (dependency_check)
389 sym = code->resolved_sym;
390 gcc_assert (sym->formal->sym->attr.intent = INTENT_OUT);
391 gcc_assert (sym->formal->next->sym->attr.intent = INTENT_IN);
392 gfc_conv_elemental_dependencies (&se, &loopse, sym,
396 /* Generate the loop body. */
397 gfc_start_scalarized_body (&loop, &body);
398 gfc_init_block (&block);
400 /* Add the subroutine call to the block. */
401 gfc_conv_function_call (&loopse, code->resolved_sym, code->ext.actual,
403 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
405 gfc_add_block_to_block (&block, &loopse.pre);
406 gfc_add_block_to_block (&block, &loopse.post);
408 /* Finish up the loop block and the loop. */
409 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
410 gfc_trans_scalarizing_loops (&loop, &body);
411 gfc_add_block_to_block (&se.pre, &loop.pre);
412 gfc_add_block_to_block (&se.pre, &loop.post);
413 gfc_add_block_to_block (&se.pre, &se.post);
414 gfc_cleanup_loop (&loop);
417 return gfc_finish_block (&se.pre);
421 /* Translate the RETURN statement. */
424 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
432 /* If code->expr is not NULL, this return statement must appear
433 in a subroutine and current_fake_result_decl has already
436 result = gfc_get_fake_result_decl (NULL, 0);
439 gfc_warning ("An alternate return at %L without a * dummy argument",
441 return build1_v (GOTO_EXPR, gfc_get_return_label ());
444 /* Start a new block for this statement. */
445 gfc_init_se (&se, NULL);
446 gfc_start_block (&se.pre);
448 gfc_conv_expr (&se, code->expr);
450 tmp = build2 (MODIFY_EXPR, TREE_TYPE (result), result, se.expr);
451 gfc_add_expr_to_block (&se.pre, tmp);
453 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
454 gfc_add_expr_to_block (&se.pre, tmp);
455 gfc_add_block_to_block (&se.pre, &se.post);
456 return gfc_finish_block (&se.pre);
459 return build1_v (GOTO_EXPR, gfc_get_return_label ());
463 /* Translate the PAUSE statement. We have to translate this statement
464 to a runtime library call. */
467 gfc_trans_pause (gfc_code * code)
469 tree gfc_int4_type_node = gfc_get_int_type (4);
473 /* Start a new block for this statement. */
474 gfc_init_se (&se, NULL);
475 gfc_start_block (&se.pre);
478 if (code->expr == NULL)
480 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
481 tmp = build_call_expr (gfor_fndecl_pause_numeric, 1, tmp);
485 gfc_conv_expr_reference (&se, code->expr);
486 tmp = build_call_expr (gfor_fndecl_pause_string, 2,
487 se.expr, se.string_length);
490 gfc_add_expr_to_block (&se.pre, tmp);
492 gfc_add_block_to_block (&se.pre, &se.post);
494 return gfc_finish_block (&se.pre);
498 /* Translate the STOP statement. We have to translate this statement
499 to a runtime library call. */
502 gfc_trans_stop (gfc_code * code)
504 tree gfc_int4_type_node = gfc_get_int_type (4);
508 /* Start a new block for this statement. */
509 gfc_init_se (&se, NULL);
510 gfc_start_block (&se.pre);
513 if (code->expr == NULL)
515 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
516 tmp = build_call_expr (gfor_fndecl_stop_numeric, 1, tmp);
520 gfc_conv_expr_reference (&se, code->expr);
521 tmp = build_call_expr (gfor_fndecl_stop_string, 2,
522 se.expr, se.string_length);
525 gfc_add_expr_to_block (&se.pre, tmp);
527 gfc_add_block_to_block (&se.pre, &se.post);
529 return gfc_finish_block (&se.pre);
533 /* Generate GENERIC for the IF construct. This function also deals with
534 the simple IF statement, because the front end translates the IF
535 statement into an IF construct.
567 where COND_S is the simplified version of the predicate. PRE_COND_S
568 are the pre side-effects produced by the translation of the
570 We need to build the chain recursively otherwise we run into
571 problems with folding incomplete statements. */
574 gfc_trans_if_1 (gfc_code * code)
579 /* Check for an unconditional ELSE clause. */
581 return gfc_trans_code (code->next);
583 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
584 gfc_init_se (&if_se, NULL);
585 gfc_start_block (&if_se.pre);
587 /* Calculate the IF condition expression. */
588 gfc_conv_expr_val (&if_se, code->expr);
590 /* Translate the THEN clause. */
591 stmt = gfc_trans_code (code->next);
593 /* Translate the ELSE clause. */
595 elsestmt = gfc_trans_if_1 (code->block);
597 elsestmt = build_empty_stmt ();
599 /* Build the condition expression and add it to the condition block. */
600 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
602 gfc_add_expr_to_block (&if_se.pre, stmt);
604 /* Finish off this statement. */
605 return gfc_finish_block (&if_se.pre);
609 gfc_trans_if (gfc_code * code)
611 /* Ignore the top EXEC_IF, it only announces an IF construct. The
612 actual code we must translate is in code->block. */
614 return gfc_trans_if_1 (code->block);
618 /* Translate an arithmetic IF expression.
620 IF (cond) label1, label2, label3 translates to
632 An optimized version can be generated in case of equal labels.
633 E.g., if label1 is equal to label2, we can translate it to
642 gfc_trans_arithmetic_if (gfc_code * code)
650 /* Start a new block. */
651 gfc_init_se (&se, NULL);
652 gfc_start_block (&se.pre);
654 /* Pre-evaluate COND. */
655 gfc_conv_expr_val (&se, code->expr);
656 se.expr = gfc_evaluate_now (se.expr, &se.pre);
658 /* Build something to compare with. */
659 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
661 if (code->label->value != code->label2->value)
663 /* If (cond < 0) take branch1 else take branch2.
664 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
665 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
666 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
668 if (code->label->value != code->label3->value)
669 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
671 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
673 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
676 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
678 if (code->label->value != code->label3->value
679 && code->label2->value != code->label3->value)
681 /* if (cond <= 0) take branch1 else take branch2. */
682 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
683 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
684 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
687 /* Append the COND_EXPR to the evaluation of COND, and return. */
688 gfc_add_expr_to_block (&se.pre, branch1);
689 return gfc_finish_block (&se.pre);
693 /* Translate the simple DO construct. This is where the loop variable has
694 integer type and step +-1. We can't use this in the general case
695 because integer overflow and floating point errors could give incorrect
697 We translate a do loop from:
699 DO dovar = from, to, step
705 [Evaluate loop bounds and step]
707 if ((step > 0) ? (dovar <= to) : (dovar => to))
713 cond = (dovar == to);
715 if (cond) goto end_label;
720 This helps the optimizers by avoiding the extra induction variable
721 used in the general case. */
724 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
725 tree from, tree to, tree step)
734 type = TREE_TYPE (dovar);
736 /* Initialize the DO variable: dovar = from. */
737 gfc_add_modify_expr (pblock, dovar, from);
739 /* Cycle and exit statements are implemented with gotos. */
740 cycle_label = gfc_build_label_decl (NULL_TREE);
741 exit_label = gfc_build_label_decl (NULL_TREE);
743 /* Put the labels where they can be found later. See gfc_trans_do(). */
744 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
747 gfc_start_block (&body);
749 /* Main loop body. */
750 tmp = gfc_trans_code (code->block->next);
751 gfc_add_expr_to_block (&body, tmp);
753 /* Label for cycle statements (if needed). */
754 if (TREE_USED (cycle_label))
756 tmp = build1_v (LABEL_EXPR, cycle_label);
757 gfc_add_expr_to_block (&body, tmp);
760 /* Evaluate the loop condition. */
761 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
762 cond = gfc_evaluate_now (cond, &body);
764 /* Increment the loop variable. */
765 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
766 gfc_add_modify_expr (&body, dovar, tmp);
769 tmp = build1_v (GOTO_EXPR, exit_label);
770 TREE_USED (exit_label) = 1;
771 tmp = fold_build3 (COND_EXPR, void_type_node,
772 cond, tmp, build_empty_stmt ());
773 gfc_add_expr_to_block (&body, tmp);
775 /* Finish the loop body. */
776 tmp = gfc_finish_block (&body);
777 tmp = build1_v (LOOP_EXPR, tmp);
779 /* Only execute the loop if the number of iterations is positive. */
780 if (tree_int_cst_sgn (step) > 0)
781 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
783 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
784 tmp = fold_build3 (COND_EXPR, void_type_node,
785 cond, tmp, build_empty_stmt ());
786 gfc_add_expr_to_block (pblock, tmp);
788 /* Add the exit label. */
789 tmp = build1_v (LABEL_EXPR, exit_label);
790 gfc_add_expr_to_block (pblock, tmp);
792 return gfc_finish_block (pblock);
795 /* Translate the DO construct. This obviously is one of the most
796 important ones to get right with any compiler, but especially
799 We special case some loop forms as described in gfc_trans_simple_do.
800 For other cases we implement them with a separate loop count,
801 as described in the standard.
803 We translate a do loop from:
805 DO dovar = from, to, step
811 [evaluate loop bounds and step]
812 empty = (step > 0 ? to < from : to > from);
813 countm1 = (to - from) / step;
815 if (empty) goto exit_label;
822 if (countm1 ==0) goto exit_label;
826 countm1 is an unsigned integer. It is equal to the loop count minus one,
827 because the loop count itself can overflow. */
830 gfc_trans_do (gfc_code * code)
849 gfc_start_block (&block);
851 /* Evaluate all the expressions in the iterator. */
852 gfc_init_se (&se, NULL);
853 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
854 gfc_add_block_to_block (&block, &se.pre);
856 type = TREE_TYPE (dovar);
858 gfc_init_se (&se, NULL);
859 gfc_conv_expr_val (&se, code->ext.iterator->start);
860 gfc_add_block_to_block (&block, &se.pre);
861 from = gfc_evaluate_now (se.expr, &block);
863 gfc_init_se (&se, NULL);
864 gfc_conv_expr_val (&se, code->ext.iterator->end);
865 gfc_add_block_to_block (&block, &se.pre);
866 to = gfc_evaluate_now (se.expr, &block);
868 gfc_init_se (&se, NULL);
869 gfc_conv_expr_val (&se, code->ext.iterator->step);
870 gfc_add_block_to_block (&block, &se.pre);
871 step = gfc_evaluate_now (se.expr, &block);
873 /* Special case simple loops. */
874 if (TREE_CODE (type) == INTEGER_TYPE
875 && (integer_onep (step)
876 || tree_int_cst_equal (step, integer_minus_one_node)))
877 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
879 /* We need a special check for empty loops:
880 empty = (step > 0 ? to < from : to > from); */
881 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
882 fold_convert (type, integer_zero_node));
883 empty = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
884 fold_build2 (LT_EXPR, boolean_type_node, to, from),
885 fold_build2 (GT_EXPR, boolean_type_node, to, from));
887 /* Initialize loop count. This code is executed before we enter the
888 loop body. We generate: countm1 = abs(to - from) / abs(step). */
889 if (TREE_CODE (type) == INTEGER_TYPE)
893 utype = gfc_unsigned_type (type);
895 /* tmp = abs(to - from) / abs(step) */
896 ustep = fold_convert (utype, fold_build1 (ABS_EXPR, type, step));
897 tmp = fold_build3 (COND_EXPR, type, pos_step,
898 fold_build2 (MINUS_EXPR, type, to, from),
899 fold_build2 (MINUS_EXPR, type, from, to));
900 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, fold_convert (utype, tmp),
905 /* TODO: We could use the same width as the real type.
906 This would probably cause more problems that it solves
907 when we implement "long double" types. */
908 utype = gfc_unsigned_type (gfc_array_index_type);
909 tmp = fold_build2 (MINUS_EXPR, type, to, from);
910 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
911 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
913 countm1 = gfc_create_var (utype, "countm1");
914 gfc_add_modify_expr (&block, countm1, tmp);
916 /* Cycle and exit statements are implemented with gotos. */
917 cycle_label = gfc_build_label_decl (NULL_TREE);
918 exit_label = gfc_build_label_decl (NULL_TREE);
919 TREE_USED (exit_label) = 1;
921 /* Initialize the DO variable: dovar = from. */
922 gfc_add_modify_expr (&block, dovar, from);
924 /* If the loop is empty, go directly to the exit label. */
925 tmp = fold_build3 (COND_EXPR, void_type_node, empty,
926 build1_v (GOTO_EXPR, exit_label), build_empty_stmt ());
927 gfc_add_expr_to_block (&block, tmp);
930 gfc_start_block (&body);
932 /* Put these labels where they can be found later. We put the
933 labels in a TREE_LIST node (because TREE_CHAIN is already
934 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
935 label in TREE_VALUE (backend_decl). */
937 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
939 /* Main loop body. */
940 tmp = gfc_trans_code (code->block->next);
941 gfc_add_expr_to_block (&body, tmp);
943 /* Label for cycle statements (if needed). */
944 if (TREE_USED (cycle_label))
946 tmp = build1_v (LABEL_EXPR, cycle_label);
947 gfc_add_expr_to_block (&body, tmp);
950 /* End with the loop condition. Loop until countm1 == 0. */
951 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
952 build_int_cst (utype, 0));
953 tmp = build1_v (GOTO_EXPR, exit_label);
954 tmp = fold_build3 (COND_EXPR, void_type_node,
955 cond, tmp, build_empty_stmt ());
956 gfc_add_expr_to_block (&body, tmp);
958 /* Increment the loop variable. */
959 tmp = build2 (PLUS_EXPR, type, dovar, step);
960 gfc_add_modify_expr (&body, dovar, tmp);
962 /* Decrement the loop count. */
963 tmp = build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
964 gfc_add_modify_expr (&body, countm1, tmp);
966 /* End of loop body. */
967 tmp = gfc_finish_block (&body);
969 /* The for loop itself. */
970 tmp = build1_v (LOOP_EXPR, tmp);
971 gfc_add_expr_to_block (&block, tmp);
973 /* Add the exit label. */
974 tmp = build1_v (LABEL_EXPR, exit_label);
975 gfc_add_expr_to_block (&block, tmp);
977 return gfc_finish_block (&block);
981 /* Translate the DO WHILE construct.
994 if (! cond) goto exit_label;
1000 Because the evaluation of the exit condition `cond' may have side
1001 effects, we can't do much for empty loop bodies. The backend optimizers
1002 should be smart enough to eliminate any dead loops. */
1005 gfc_trans_do_while (gfc_code * code)
1013 /* Everything we build here is part of the loop body. */
1014 gfc_start_block (&block);
1016 /* Cycle and exit statements are implemented with gotos. */
1017 cycle_label = gfc_build_label_decl (NULL_TREE);
1018 exit_label = gfc_build_label_decl (NULL_TREE);
1020 /* Put the labels where they can be found later. See gfc_trans_do(). */
1021 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1023 /* Create a GIMPLE version of the exit condition. */
1024 gfc_init_se (&cond, NULL);
1025 gfc_conv_expr_val (&cond, code->expr);
1026 gfc_add_block_to_block (&block, &cond.pre);
1027 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1029 /* Build "IF (! cond) GOTO exit_label". */
1030 tmp = build1_v (GOTO_EXPR, exit_label);
1031 TREE_USED (exit_label) = 1;
1032 tmp = fold_build3 (COND_EXPR, void_type_node,
1033 cond.expr, tmp, build_empty_stmt ());
1034 gfc_add_expr_to_block (&block, tmp);
1036 /* The main body of the loop. */
1037 tmp = gfc_trans_code (code->block->next);
1038 gfc_add_expr_to_block (&block, tmp);
1040 /* Label for cycle statements (if needed). */
1041 if (TREE_USED (cycle_label))
1043 tmp = build1_v (LABEL_EXPR, cycle_label);
1044 gfc_add_expr_to_block (&block, tmp);
1047 /* End of loop body. */
1048 tmp = gfc_finish_block (&block);
1050 gfc_init_block (&block);
1051 /* Build the loop. */
1052 tmp = build1_v (LOOP_EXPR, tmp);
1053 gfc_add_expr_to_block (&block, tmp);
1055 /* Add the exit label. */
1056 tmp = build1_v (LABEL_EXPR, exit_label);
1057 gfc_add_expr_to_block (&block, tmp);
1059 return gfc_finish_block (&block);
1063 /* Translate the SELECT CASE construct for INTEGER case expressions,
1064 without killing all potential optimizations. The problem is that
1065 Fortran allows unbounded cases, but the back-end does not, so we
1066 need to intercept those before we enter the equivalent SWITCH_EXPR
1069 For example, we translate this,
1072 CASE (:100,101,105:115)
1082 to the GENERIC equivalent,
1086 case (minimum value for typeof(expr) ... 100:
1092 case 200 ... (maximum value for typeof(expr):
1109 gfc_trans_integer_select (gfc_code * code)
1119 gfc_start_block (&block);
1121 /* Calculate the switch expression. */
1122 gfc_init_se (&se, NULL);
1123 gfc_conv_expr_val (&se, code->expr);
1124 gfc_add_block_to_block (&block, &se.pre);
1126 end_label = gfc_build_label_decl (NULL_TREE);
1128 gfc_init_block (&body);
1130 for (c = code->block; c; c = c->block)
1132 for (cp = c->ext.case_list; cp; cp = cp->next)
1137 /* Assume it's the default case. */
1138 low = high = NULL_TREE;
1142 low = gfc_conv_constant_to_tree (cp->low);
1144 /* If there's only a lower bound, set the high bound to the
1145 maximum value of the case expression. */
1147 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1152 /* Three cases are possible here:
1154 1) There is no lower bound, e.g. CASE (:N).
1155 2) There is a lower bound .NE. high bound, that is
1156 a case range, e.g. CASE (N:M) where M>N (we make
1157 sure that M>N during type resolution).
1158 3) There is a lower bound, and it has the same value
1159 as the high bound, e.g. CASE (N:N). This is our
1160 internal representation of CASE(N).
1162 In the first and second case, we need to set a value for
1163 high. In the third case, we don't because the GCC middle
1164 end represents a single case value by just letting high be
1165 a NULL_TREE. We can't do that because we need to be able
1166 to represent unbounded cases. */
1170 && mpz_cmp (cp->low->value.integer,
1171 cp->high->value.integer) != 0))
1172 high = gfc_conv_constant_to_tree (cp->high);
1174 /* Unbounded case. */
1176 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1179 /* Build a label. */
1180 label = gfc_build_label_decl (NULL_TREE);
1182 /* Add this case label.
1183 Add parameter 'label', make it match GCC backend. */
1184 tmp = build3 (CASE_LABEL_EXPR, void_type_node, low, high, label);
1185 gfc_add_expr_to_block (&body, tmp);
1188 /* Add the statements for this case. */
1189 tmp = gfc_trans_code (c->next);
1190 gfc_add_expr_to_block (&body, tmp);
1192 /* Break to the end of the construct. */
1193 tmp = build1_v (GOTO_EXPR, end_label);
1194 gfc_add_expr_to_block (&body, tmp);
1197 tmp = gfc_finish_block (&body);
1198 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1199 gfc_add_expr_to_block (&block, tmp);
1201 tmp = build1_v (LABEL_EXPR, end_label);
1202 gfc_add_expr_to_block (&block, tmp);
1204 return gfc_finish_block (&block);
1208 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1210 There are only two cases possible here, even though the standard
1211 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1212 .FALSE., and DEFAULT.
1214 We never generate more than two blocks here. Instead, we always
1215 try to eliminate the DEFAULT case. This way, we can translate this
1216 kind of SELECT construct to a simple
1220 expression in GENERIC. */
1223 gfc_trans_logical_select (gfc_code * code)
1226 gfc_code *t, *f, *d;
1231 /* Assume we don't have any cases at all. */
1234 /* Now see which ones we actually do have. We can have at most two
1235 cases in a single case list: one for .TRUE. and one for .FALSE.
1236 The default case is always separate. If the cases for .TRUE. and
1237 .FALSE. are in the same case list, the block for that case list
1238 always executed, and we don't generate code a COND_EXPR. */
1239 for (c = code->block; c; c = c->block)
1241 for (cp = c->ext.case_list; cp; cp = cp->next)
1245 if (cp->low->value.logical == 0) /* .FALSE. */
1247 else /* if (cp->value.logical != 0), thus .TRUE. */
1255 /* Start a new block. */
1256 gfc_start_block (&block);
1258 /* Calculate the switch expression. We always need to do this
1259 because it may have side effects. */
1260 gfc_init_se (&se, NULL);
1261 gfc_conv_expr_val (&se, code->expr);
1262 gfc_add_block_to_block (&block, &se.pre);
1264 if (t == f && t != NULL)
1266 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1267 translate the code for these cases, append it to the current
1269 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1273 tree true_tree, false_tree, stmt;
1275 true_tree = build_empty_stmt ();
1276 false_tree = build_empty_stmt ();
1278 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1279 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1280 make the missing case the default case. */
1281 if (t != NULL && f != NULL)
1291 /* Translate the code for each of these blocks, and append it to
1292 the current block. */
1294 true_tree = gfc_trans_code (t->next);
1297 false_tree = gfc_trans_code (f->next);
1299 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1300 true_tree, false_tree);
1301 gfc_add_expr_to_block (&block, stmt);
1304 return gfc_finish_block (&block);
1308 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1309 Instead of generating compares and jumps, it is far simpler to
1310 generate a data structure describing the cases in order and call a
1311 library subroutine that locates the right case.
1312 This is particularly true because this is the only case where we
1313 might have to dispose of a temporary.
1314 The library subroutine returns a pointer to jump to or NULL if no
1315 branches are to be taken. */
1318 gfc_trans_character_select (gfc_code *code)
1320 tree init, node, end_label, tmp, type, *labels;
1322 stmtblock_t block, body;
1328 static tree select_struct;
1329 static tree ss_string1, ss_string1_len;
1330 static tree ss_string2, ss_string2_len;
1331 static tree ss_target;
1333 if (select_struct == NULL)
1335 tree gfc_int4_type_node = gfc_get_int_type (4);
1337 select_struct = make_node (RECORD_TYPE);
1338 TYPE_NAME (select_struct) = get_identifier ("_jump_struct");
1341 #define ADD_FIELD(NAME, TYPE) \
1342 ss_##NAME = gfc_add_field_to_struct \
1343 (&(TYPE_FIELDS (select_struct)), select_struct, \
1344 get_identifier (stringize(NAME)), TYPE)
1346 ADD_FIELD (string1, pchar_type_node);
1347 ADD_FIELD (string1_len, gfc_int4_type_node);
1349 ADD_FIELD (string2, pchar_type_node);
1350 ADD_FIELD (string2_len, gfc_int4_type_node);
1352 ADD_FIELD (target, pvoid_type_node);
1355 gfc_finish_type (select_struct);
1358 cp = code->block->ext.case_list;
1359 while (cp->left != NULL)
1363 for (d = cp; d; d = d->right)
1367 labels = gfc_getmem (n * sizeof (tree));
1371 for(i = 0; i < n; i++)
1373 labels[i] = gfc_build_label_decl (NULL_TREE);
1374 TREE_USED (labels[i]) = 1;
1375 /* TODO: The gimplifier should do this for us, but it has
1376 inadequacies when dealing with static initializers. */
1377 FORCED_LABEL (labels[i]) = 1;
1380 end_label = gfc_build_label_decl (NULL_TREE);
1382 /* Generate the body */
1383 gfc_start_block (&block);
1384 gfc_init_block (&body);
1386 for (c = code->block; c; c = c->block)
1388 for (d = c->ext.case_list; d; d = d->next)
1390 tmp = build1_v (LABEL_EXPR, labels[d->n]);
1391 gfc_add_expr_to_block (&body, tmp);
1394 tmp = gfc_trans_code (c->next);
1395 gfc_add_expr_to_block (&body, tmp);
1397 tmp = build1_v (GOTO_EXPR, end_label);
1398 gfc_add_expr_to_block (&body, tmp);
1401 /* Generate the structure describing the branches */
1405 for(d = cp; d; d = d->right, i++)
1409 gfc_init_se (&se, NULL);
1413 node = tree_cons (ss_string1, null_pointer_node, node);
1414 node = tree_cons (ss_string1_len, integer_zero_node, node);
1418 gfc_conv_expr_reference (&se, d->low);
1420 node = tree_cons (ss_string1, se.expr, node);
1421 node = tree_cons (ss_string1_len, se.string_length, node);
1424 if (d->high == NULL)
1426 node = tree_cons (ss_string2, null_pointer_node, node);
1427 node = tree_cons (ss_string2_len, integer_zero_node, node);
1431 gfc_init_se (&se, NULL);
1432 gfc_conv_expr_reference (&se, d->high);
1434 node = tree_cons (ss_string2, se.expr, node);
1435 node = tree_cons (ss_string2_len, se.string_length, node);
1438 tmp = gfc_build_addr_expr (pvoid_type_node, labels[i]);
1439 node = tree_cons (ss_target, tmp, node);
1441 tmp = build_constructor_from_list (select_struct, nreverse (node));
1442 init = tree_cons (NULL_TREE, tmp, init);
1445 type = build_array_type (select_struct, build_index_type
1446 (build_int_cst (NULL_TREE, n - 1)));
1448 init = build_constructor_from_list (type, nreverse(init));
1449 TREE_CONSTANT (init) = 1;
1450 TREE_INVARIANT (init) = 1;
1451 TREE_STATIC (init) = 1;
1452 /* Create a static variable to hold the jump table. */
1453 tmp = gfc_create_var (type, "jumptable");
1454 TREE_CONSTANT (tmp) = 1;
1455 TREE_INVARIANT (tmp) = 1;
1456 TREE_STATIC (tmp) = 1;
1457 TREE_READONLY (tmp) = 1;
1458 DECL_INITIAL (tmp) = init;
1461 /* Build the library call */
1462 init = gfc_build_addr_expr (pvoid_type_node, init);
1463 tmp = gfc_build_addr_expr (pvoid_type_node, end_label);
1465 gfc_init_se (&se, NULL);
1466 gfc_conv_expr_reference (&se, code->expr);
1468 gfc_add_block_to_block (&block, &se.pre);
1470 tmp = build_call_expr (gfor_fndecl_select_string, 5,
1471 init, build_int_cst (NULL_TREE, n),
1472 tmp, se.expr, se.string_length);
1474 case_label = gfc_create_var (TREE_TYPE (tmp), "case_label");
1475 gfc_add_modify_expr (&block, case_label, tmp);
1477 gfc_add_block_to_block (&block, &se.post);
1479 tmp = build1 (GOTO_EXPR, void_type_node, case_label);
1480 gfc_add_expr_to_block (&block, tmp);
1482 tmp = gfc_finish_block (&body);
1483 gfc_add_expr_to_block (&block, tmp);
1484 tmp = build1_v (LABEL_EXPR, end_label);
1485 gfc_add_expr_to_block (&block, tmp);
1490 return gfc_finish_block (&block);
1494 /* Translate the three variants of the SELECT CASE construct.
1496 SELECT CASEs with INTEGER case expressions can be translated to an
1497 equivalent GENERIC switch statement, and for LOGICAL case
1498 expressions we build one or two if-else compares.
1500 SELECT CASEs with CHARACTER case expressions are a whole different
1501 story, because they don't exist in GENERIC. So we sort them and
1502 do a binary search at runtime.
1504 Fortran has no BREAK statement, and it does not allow jumps from
1505 one case block to another. That makes things a lot easier for
1509 gfc_trans_select (gfc_code * code)
1511 gcc_assert (code && code->expr);
1513 /* Empty SELECT constructs are legal. */
1514 if (code->block == NULL)
1515 return build_empty_stmt ();
1517 /* Select the correct translation function. */
1518 switch (code->expr->ts.type)
1520 case BT_LOGICAL: return gfc_trans_logical_select (code);
1521 case BT_INTEGER: return gfc_trans_integer_select (code);
1522 case BT_CHARACTER: return gfc_trans_character_select (code);
1524 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1530 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
1531 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
1532 indicates whether we should generate code to test the FORALLs mask
1533 array. OUTER is the loop header to be used for initializing mask
1536 The generated loop format is:
1537 count = (end - start + step) / step
1550 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
1551 int mask_flag, stmtblock_t *outer)
1559 tree var, start, end, step;
1562 /* Initialize the mask index outside the FORALL nest. */
1563 if (mask_flag && forall_tmp->mask)
1564 gfc_add_modify_expr (outer, forall_tmp->maskindex, gfc_index_zero_node);
1566 iter = forall_tmp->this_loop;
1567 nvar = forall_tmp->nvar;
1568 for (n = 0; n < nvar; n++)
1571 start = iter->start;
1575 exit_label = gfc_build_label_decl (NULL_TREE);
1576 TREE_USED (exit_label) = 1;
1578 /* The loop counter. */
1579 count = gfc_create_var (TREE_TYPE (var), "count");
1581 /* The body of the loop. */
1582 gfc_init_block (&block);
1584 /* The exit condition. */
1585 cond = fold_build2 (LE_EXPR, boolean_type_node,
1586 count, build_int_cst (TREE_TYPE (count), 0));
1587 tmp = build1_v (GOTO_EXPR, exit_label);
1588 tmp = fold_build3 (COND_EXPR, void_type_node,
1589 cond, tmp, build_empty_stmt ());
1590 gfc_add_expr_to_block (&block, tmp);
1592 /* The main loop body. */
1593 gfc_add_expr_to_block (&block, body);
1595 /* Increment the loop variable. */
1596 tmp = build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1597 gfc_add_modify_expr (&block, var, tmp);
1599 /* Advance to the next mask element. Only do this for the
1601 if (n == 0 && mask_flag && forall_tmp->mask)
1603 tree maskindex = forall_tmp->maskindex;
1604 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
1605 maskindex, gfc_index_one_node);
1606 gfc_add_modify_expr (&block, maskindex, tmp);
1609 /* Decrement the loop counter. */
1610 tmp = build2 (MINUS_EXPR, TREE_TYPE (var), count, gfc_index_one_node);
1611 gfc_add_modify_expr (&block, count, tmp);
1613 body = gfc_finish_block (&block);
1615 /* Loop var initialization. */
1616 gfc_init_block (&block);
1617 gfc_add_modify_expr (&block, var, start);
1620 /* Initialize the loop counter. */
1621 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1622 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1623 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1624 gfc_add_modify_expr (&block, count, tmp);
1626 /* The loop expression. */
1627 tmp = build1_v (LOOP_EXPR, body);
1628 gfc_add_expr_to_block (&block, tmp);
1630 /* The exit label. */
1631 tmp = build1_v (LABEL_EXPR, exit_label);
1632 gfc_add_expr_to_block (&block, tmp);
1634 body = gfc_finish_block (&block);
1641 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
1642 is nonzero, the body is controlled by all masks in the forall nest.
1643 Otherwise, the innermost loop is not controlled by it's mask. This
1644 is used for initializing that mask. */
1647 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1652 forall_info *forall_tmp;
1653 tree mask, maskindex;
1655 gfc_start_block (&header);
1657 forall_tmp = nested_forall_info;
1658 while (forall_tmp != NULL)
1660 /* Generate body with masks' control. */
1663 mask = forall_tmp->mask;
1664 maskindex = forall_tmp->maskindex;
1666 /* If a mask was specified make the assignment conditional. */
1669 tmp = gfc_build_array_ref (mask, maskindex);
1670 body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ());
1673 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
1674 forall_tmp = forall_tmp->prev_nest;
1678 gfc_add_expr_to_block (&header, body);
1679 return gfc_finish_block (&header);
1683 /* Allocate data for holding a temporary array. Returns either a local
1684 temporary array or a pointer variable. */
1687 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
1694 if (INTEGER_CST_P (size))
1696 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
1697 gfc_index_one_node);
1702 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
1703 type = build_array_type (elem_type, type);
1704 if (gfc_can_put_var_on_stack (bytesize))
1706 gcc_assert (INTEGER_CST_P (size));
1707 tmpvar = gfc_create_var (type, "temp");
1712 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
1713 *pdata = convert (pvoid_type_node, tmpvar);
1715 if (gfc_index_integer_kind == 4)
1716 tmp = gfor_fndecl_internal_malloc;
1717 else if (gfc_index_integer_kind == 8)
1718 tmp = gfor_fndecl_internal_malloc64;
1721 tmp = build_call_expr (tmp, 1, bytesize);
1722 tmp = convert (TREE_TYPE (tmpvar), tmp);
1723 gfc_add_modify_expr (pblock, tmpvar, tmp);
1729 /* Generate codes to copy the temporary to the actual lhs. */
1732 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
1733 tree count1, tree wheremask, bool invert)
1737 stmtblock_t block, body;
1743 lss = gfc_walk_expr (expr);
1745 if (lss == gfc_ss_terminator)
1747 gfc_start_block (&block);
1749 gfc_init_se (&lse, NULL);
1751 /* Translate the expression. */
1752 gfc_conv_expr (&lse, expr);
1754 /* Form the expression for the temporary. */
1755 tmp = gfc_build_array_ref (tmp1, count1);
1757 /* Use the scalar assignment as is. */
1758 gfc_add_block_to_block (&block, &lse.pre);
1759 gfc_add_modify_expr (&block, lse.expr, tmp);
1760 gfc_add_block_to_block (&block, &lse.post);
1762 /* Increment the count1. */
1763 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1764 gfc_index_one_node);
1765 gfc_add_modify_expr (&block, count1, tmp);
1767 tmp = gfc_finish_block (&block);
1771 gfc_start_block (&block);
1773 gfc_init_loopinfo (&loop1);
1774 gfc_init_se (&rse, NULL);
1775 gfc_init_se (&lse, NULL);
1777 /* Associate the lss with the loop. */
1778 gfc_add_ss_to_loop (&loop1, lss);
1780 /* Calculate the bounds of the scalarization. */
1781 gfc_conv_ss_startstride (&loop1);
1782 /* Setup the scalarizing loops. */
1783 gfc_conv_loop_setup (&loop1);
1785 gfc_mark_ss_chain_used (lss, 1);
1787 /* Start the scalarized loop body. */
1788 gfc_start_scalarized_body (&loop1, &body);
1790 /* Setup the gfc_se structures. */
1791 gfc_copy_loopinfo_to_se (&lse, &loop1);
1794 /* Form the expression of the temporary. */
1795 if (lss != gfc_ss_terminator)
1796 rse.expr = gfc_build_array_ref (tmp1, count1);
1797 /* Translate expr. */
1798 gfc_conv_expr (&lse, expr);
1800 /* Use the scalar assignment. */
1801 rse.string_length = lse.string_length;
1802 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
1804 /* Form the mask expression according to the mask tree list. */
1807 wheremaskexpr = gfc_build_array_ref (wheremask, count3);
1809 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1810 TREE_TYPE (wheremaskexpr),
1812 tmp = fold_build3 (COND_EXPR, void_type_node,
1813 wheremaskexpr, tmp, build_empty_stmt ());
1816 gfc_add_expr_to_block (&body, tmp);
1818 /* Increment count1. */
1819 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1820 count1, gfc_index_one_node);
1821 gfc_add_modify_expr (&body, count1, tmp);
1823 /* Increment count3. */
1826 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1827 count3, gfc_index_one_node);
1828 gfc_add_modify_expr (&body, count3, tmp);
1831 /* Generate the copying loops. */
1832 gfc_trans_scalarizing_loops (&loop1, &body);
1833 gfc_add_block_to_block (&block, &loop1.pre);
1834 gfc_add_block_to_block (&block, &loop1.post);
1835 gfc_cleanup_loop (&loop1);
1837 tmp = gfc_finish_block (&block);
1843 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
1844 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
1845 and should not be freed. WHEREMASK is the conditional execution mask
1846 whose sense may be inverted by INVERT. */
1849 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
1850 tree count1, gfc_ss *lss, gfc_ss *rss,
1851 tree wheremask, bool invert)
1853 stmtblock_t block, body1;
1860 gfc_start_block (&block);
1862 gfc_init_se (&rse, NULL);
1863 gfc_init_se (&lse, NULL);
1865 if (lss == gfc_ss_terminator)
1867 gfc_init_block (&body1);
1868 gfc_conv_expr (&rse, expr2);
1869 lse.expr = gfc_build_array_ref (tmp1, count1);
1873 /* Initialize the loop. */
1874 gfc_init_loopinfo (&loop);
1876 /* We may need LSS to determine the shape of the expression. */
1877 gfc_add_ss_to_loop (&loop, lss);
1878 gfc_add_ss_to_loop (&loop, rss);
1880 gfc_conv_ss_startstride (&loop);
1881 gfc_conv_loop_setup (&loop);
1883 gfc_mark_ss_chain_used (rss, 1);
1884 /* Start the loop body. */
1885 gfc_start_scalarized_body (&loop, &body1);
1887 /* Translate the expression. */
1888 gfc_copy_loopinfo_to_se (&rse, &loop);
1890 gfc_conv_expr (&rse, expr2);
1892 /* Form the expression of the temporary. */
1893 lse.expr = gfc_build_array_ref (tmp1, count1);
1896 /* Use the scalar assignment. */
1897 lse.string_length = rse.string_length;
1898 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
1899 expr2->expr_type == EXPR_VARIABLE);
1901 /* Form the mask expression according to the mask tree list. */
1904 wheremaskexpr = gfc_build_array_ref (wheremask, count3);
1906 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1907 TREE_TYPE (wheremaskexpr),
1909 tmp = fold_build3 (COND_EXPR, void_type_node,
1910 wheremaskexpr, tmp, build_empty_stmt ());
1913 gfc_add_expr_to_block (&body1, tmp);
1915 if (lss == gfc_ss_terminator)
1917 gfc_add_block_to_block (&block, &body1);
1919 /* Increment count1. */
1920 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1921 gfc_index_one_node);
1922 gfc_add_modify_expr (&block, count1, tmp);
1926 /* Increment count1. */
1927 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1928 count1, gfc_index_one_node);
1929 gfc_add_modify_expr (&body1, count1, tmp);
1931 /* Increment count3. */
1934 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1935 count3, gfc_index_one_node);
1936 gfc_add_modify_expr (&body1, count3, tmp);
1939 /* Generate the copying loops. */
1940 gfc_trans_scalarizing_loops (&loop, &body1);
1942 gfc_add_block_to_block (&block, &loop.pre);
1943 gfc_add_block_to_block (&block, &loop.post);
1945 gfc_cleanup_loop (&loop);
1946 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
1947 as tree nodes in SS may not be valid in different scope. */
1950 tmp = gfc_finish_block (&block);
1955 /* Calculate the size of temporary needed in the assignment inside forall.
1956 LSS and RSS are filled in this function. */
1959 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
1960 stmtblock_t * pblock,
1961 gfc_ss **lss, gfc_ss **rss)
1969 *lss = gfc_walk_expr (expr1);
1972 size = gfc_index_one_node;
1973 if (*lss != gfc_ss_terminator)
1975 gfc_init_loopinfo (&loop);
1977 /* Walk the RHS of the expression. */
1978 *rss = gfc_walk_expr (expr2);
1979 if (*rss == gfc_ss_terminator)
1981 /* The rhs is scalar. Add a ss for the expression. */
1982 *rss = gfc_get_ss ();
1983 (*rss)->next = gfc_ss_terminator;
1984 (*rss)->type = GFC_SS_SCALAR;
1985 (*rss)->expr = expr2;
1988 /* Associate the SS with the loop. */
1989 gfc_add_ss_to_loop (&loop, *lss);
1990 /* We don't actually need to add the rhs at this point, but it might
1991 make guessing the loop bounds a bit easier. */
1992 gfc_add_ss_to_loop (&loop, *rss);
1994 /* We only want the shape of the expression, not rest of the junk
1995 generated by the scalarizer. */
1996 loop.array_parameter = 1;
1998 /* Calculate the bounds of the scalarization. */
1999 save_flag = flag_bounds_check;
2000 flag_bounds_check = 0;
2001 gfc_conv_ss_startstride (&loop);
2002 flag_bounds_check = save_flag;
2003 gfc_conv_loop_setup (&loop);
2005 /* Figure out how many elements we need. */
2006 for (i = 0; i < loop.dimen; i++)
2008 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2009 gfc_index_one_node, loop.from[i]);
2010 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2012 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2014 gfc_add_block_to_block (pblock, &loop.pre);
2015 size = gfc_evaluate_now (size, pblock);
2016 gfc_add_block_to_block (pblock, &loop.post);
2018 /* TODO: write a function that cleans up a loopinfo without freeing
2019 the SS chains. Currently a NOP. */
2026 /* Calculate the overall iterator number of the nested forall construct.
2027 This routine actually calculates the number of times the body of the
2028 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2029 that by the expression INNER_SIZE. The BLOCK argument specifies the
2030 block in which to calculate the result, and the optional INNER_SIZE_BODY
2031 argument contains any statements that need to executed (inside the loop)
2032 to initialize or calculate INNER_SIZE. */
2035 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2036 stmtblock_t *inner_size_body, stmtblock_t *block)
2038 forall_info *forall_tmp = nested_forall_info;
2042 /* We can eliminate the innermost unconditional loops with constant
2044 if (INTEGER_CST_P (inner_size))
2047 && !forall_tmp->mask
2048 && INTEGER_CST_P (forall_tmp->size))
2050 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2051 inner_size, forall_tmp->size);
2052 forall_tmp = forall_tmp->prev_nest;
2055 /* If there are no loops left, we have our constant result. */
2060 /* Otherwise, create a temporary variable to compute the result. */
2061 number = gfc_create_var (gfc_array_index_type, "num");
2062 gfc_add_modify_expr (block, number, gfc_index_zero_node);
2064 gfc_start_block (&body);
2065 if (inner_size_body)
2066 gfc_add_block_to_block (&body, inner_size_body);
2068 tmp = build2 (PLUS_EXPR, gfc_array_index_type, number,
2072 gfc_add_modify_expr (&body, number, tmp);
2073 tmp = gfc_finish_block (&body);
2075 /* Generate loops. */
2076 if (forall_tmp != NULL)
2077 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2079 gfc_add_expr_to_block (block, tmp);
2085 /* Allocate temporary for forall construct. SIZE is the size of temporary
2086 needed. PTEMP1 is returned for space free. */
2089 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2096 unit = TYPE_SIZE_UNIT (type);
2097 if (!integer_onep (unit))
2098 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2103 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2106 tmp = build_fold_indirect_ref (tmp);
2111 /* Allocate temporary for forall construct according to the information in
2112 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2113 assignment inside forall. PTEMP1 is returned for space free. */
2116 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2117 tree inner_size, stmtblock_t * inner_size_body,
2118 stmtblock_t * block, tree * ptemp1)
2122 /* Calculate the total size of temporary needed in forall construct. */
2123 size = compute_overall_iter_number (nested_forall_info, inner_size,
2124 inner_size_body, block);
2126 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2130 /* Handle assignments inside forall which need temporary.
2132 forall (i=start:end:stride; maskexpr)
2135 (where e,f<i> are arbitrary expressions possibly involving i
2136 and there is a dependency between e<i> and f<i>)
2138 masktmp(:) = maskexpr(:)
2143 for (i = start; i <= end; i += stride)
2147 for (i = start; i <= end; i += stride)
2149 if (masktmp[maskindex++])
2150 tmp[count1++] = f<i>
2154 for (i = start; i <= end; i += stride)
2156 if (masktmp[maskindex++])
2157 e<i> = tmp[count1++]
2162 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2163 tree wheremask, bool invert,
2164 forall_info * nested_forall_info,
2165 stmtblock_t * block)
2173 stmtblock_t inner_size_body;
2175 /* Create vars. count1 is the current iterator number of the nested
2177 count1 = gfc_create_var (gfc_array_index_type, "count1");
2179 /* Count is the wheremask index. */
2182 count = gfc_create_var (gfc_array_index_type, "count");
2183 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2188 /* Initialize count1. */
2189 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2191 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2192 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2193 gfc_init_block (&inner_size_body);
2194 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2197 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2198 type = gfc_typenode_for_spec (&expr1->ts);
2200 /* Allocate temporary for nested forall construct according to the
2201 information in nested_forall_info and inner_size. */
2202 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2203 &inner_size_body, block, &ptemp1);
2205 /* Generate codes to copy rhs to the temporary . */
2206 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2209 /* Generate body and loops according to the information in
2210 nested_forall_info. */
2211 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2212 gfc_add_expr_to_block (block, tmp);
2215 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2219 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2221 /* Generate codes to copy the temporary to lhs. */
2222 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2225 /* Generate body and loops according to the information in
2226 nested_forall_info. */
2227 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2228 gfc_add_expr_to_block (block, tmp);
2232 /* Free the temporary. */
2233 tmp = build_call_expr (gfor_fndecl_internal_free, 1, ptemp1);
2234 gfc_add_expr_to_block (block, tmp);
2239 /* Translate pointer assignment inside FORALL which need temporary. */
2242 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2243 forall_info * nested_forall_info,
2244 stmtblock_t * block)
2258 tree tmp, tmp1, ptemp1;
2260 count = gfc_create_var (gfc_array_index_type, "count");
2261 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2263 inner_size = integer_one_node;
2264 lss = gfc_walk_expr (expr1);
2265 rss = gfc_walk_expr (expr2);
2266 if (lss == gfc_ss_terminator)
2268 type = gfc_typenode_for_spec (&expr1->ts);
2269 type = build_pointer_type (type);
2271 /* Allocate temporary for nested forall construct according to the
2272 information in nested_forall_info and inner_size. */
2273 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2274 inner_size, NULL, block, &ptemp1);
2275 gfc_start_block (&body);
2276 gfc_init_se (&lse, NULL);
2277 lse.expr = gfc_build_array_ref (tmp1, count);
2278 gfc_init_se (&rse, NULL);
2279 rse.want_pointer = 1;
2280 gfc_conv_expr (&rse, expr2);
2281 gfc_add_block_to_block (&body, &rse.pre);
2282 gfc_add_modify_expr (&body, lse.expr,
2283 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2284 gfc_add_block_to_block (&body, &rse.post);
2286 /* Increment count. */
2287 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2288 count, gfc_index_one_node);
2289 gfc_add_modify_expr (&body, count, tmp);
2291 tmp = gfc_finish_block (&body);
2293 /* Generate body and loops according to the information in
2294 nested_forall_info. */
2295 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2296 gfc_add_expr_to_block (block, tmp);
2299 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2301 gfc_start_block (&body);
2302 gfc_init_se (&lse, NULL);
2303 gfc_init_se (&rse, NULL);
2304 rse.expr = gfc_build_array_ref (tmp1, count);
2305 lse.want_pointer = 1;
2306 gfc_conv_expr (&lse, expr1);
2307 gfc_add_block_to_block (&body, &lse.pre);
2308 gfc_add_modify_expr (&body, lse.expr, rse.expr);
2309 gfc_add_block_to_block (&body, &lse.post);
2310 /* Increment count. */
2311 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2312 count, gfc_index_one_node);
2313 gfc_add_modify_expr (&body, count, tmp);
2314 tmp = gfc_finish_block (&body);
2316 /* Generate body and loops according to the information in
2317 nested_forall_info. */
2318 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2319 gfc_add_expr_to_block (block, tmp);
2323 gfc_init_loopinfo (&loop);
2325 /* Associate the SS with the loop. */
2326 gfc_add_ss_to_loop (&loop, rss);
2328 /* Setup the scalarizing loops and bounds. */
2329 gfc_conv_ss_startstride (&loop);
2331 gfc_conv_loop_setup (&loop);
2333 info = &rss->data.info;
2334 desc = info->descriptor;
2336 /* Make a new descriptor. */
2337 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2338 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2339 loop.from, loop.to, 1);
2341 /* Allocate temporary for nested forall construct. */
2342 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2343 inner_size, NULL, block, &ptemp1);
2344 gfc_start_block (&body);
2345 gfc_init_se (&lse, NULL);
2346 lse.expr = gfc_build_array_ref (tmp1, count);
2347 lse.direct_byref = 1;
2348 rss = gfc_walk_expr (expr2);
2349 gfc_conv_expr_descriptor (&lse, expr2, rss);
2351 gfc_add_block_to_block (&body, &lse.pre);
2352 gfc_add_block_to_block (&body, &lse.post);
2354 /* Increment count. */
2355 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2356 count, gfc_index_one_node);
2357 gfc_add_modify_expr (&body, count, tmp);
2359 tmp = gfc_finish_block (&body);
2361 /* Generate body and loops according to the information in
2362 nested_forall_info. */
2363 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2364 gfc_add_expr_to_block (block, tmp);
2367 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2369 parm = gfc_build_array_ref (tmp1, count);
2370 lss = gfc_walk_expr (expr1);
2371 gfc_init_se (&lse, NULL);
2372 gfc_conv_expr_descriptor (&lse, expr1, lss);
2373 gfc_add_modify_expr (&lse.pre, lse.expr, parm);
2374 gfc_start_block (&body);
2375 gfc_add_block_to_block (&body, &lse.pre);
2376 gfc_add_block_to_block (&body, &lse.post);
2378 /* Increment count. */
2379 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2380 count, gfc_index_one_node);
2381 gfc_add_modify_expr (&body, count, tmp);
2383 tmp = gfc_finish_block (&body);
2385 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2386 gfc_add_expr_to_block (block, tmp);
2388 /* Free the temporary. */
2391 tmp = build_call_expr (gfor_fndecl_internal_free, 1, ptemp1);
2392 gfc_add_expr_to_block (block, tmp);
2397 /* FORALL and WHERE statements are really nasty, especially when you nest
2398 them. All the rhs of a forall assignment must be evaluated before the
2399 actual assignments are performed. Presumably this also applies to all the
2400 assignments in an inner where statement. */
2402 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2403 linear array, relying on the fact that we process in the same order in all
2406 forall (i=start:end:stride; maskexpr)
2410 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2412 count = ((end + 1 - start) / stride)
2413 masktmp(:) = maskexpr(:)
2416 for (i = start; i <= end; i += stride)
2418 if (masktmp[maskindex++])
2422 for (i = start; i <= end; i += stride)
2424 if (masktmp[maskindex++])
2428 Note that this code only works when there are no dependencies.
2429 Forall loop with array assignments and data dependencies are a real pain,
2430 because the size of the temporary cannot always be determined before the
2431 loop is executed. This problem is compounded by the presence of nested
2436 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2454 gfc_forall_iterator *fa;
2457 gfc_saved_var *saved_vars;
2458 iter_info *this_forall;
2462 /* Do nothing if the mask is false. */
2464 && code->expr->expr_type == EXPR_CONSTANT
2465 && !code->expr->value.logical)
2466 return build_empty_stmt ();
2469 /* Count the FORALL index number. */
2470 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2474 /* Allocate the space for var, start, end, step, varexpr. */
2475 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2476 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2477 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2478 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2479 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2480 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2482 /* Allocate the space for info. */
2483 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2485 gfc_start_block (&block);
2488 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2490 gfc_symbol *sym = fa->var->symtree->n.sym;
2492 /* Allocate space for this_forall. */
2493 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2495 /* Create a temporary variable for the FORALL index. */
2496 tmp = gfc_typenode_for_spec (&sym->ts);
2497 var[n] = gfc_create_var (tmp, sym->name);
2498 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2500 /* Record it in this_forall. */
2501 this_forall->var = var[n];
2503 /* Replace the index symbol's backend_decl with the temporary decl. */
2504 sym->backend_decl = var[n];
2506 /* Work out the start, end and stride for the loop. */
2507 gfc_init_se (&se, NULL);
2508 gfc_conv_expr_val (&se, fa->start);
2509 /* Record it in this_forall. */
2510 this_forall->start = se.expr;
2511 gfc_add_block_to_block (&block, &se.pre);
2514 gfc_init_se (&se, NULL);
2515 gfc_conv_expr_val (&se, fa->end);
2516 /* Record it in this_forall. */
2517 this_forall->end = se.expr;
2518 gfc_make_safe_expr (&se);
2519 gfc_add_block_to_block (&block, &se.pre);
2522 gfc_init_se (&se, NULL);
2523 gfc_conv_expr_val (&se, fa->stride);
2524 /* Record it in this_forall. */
2525 this_forall->step = se.expr;
2526 gfc_make_safe_expr (&se);
2527 gfc_add_block_to_block (&block, &se.pre);
2530 /* Set the NEXT field of this_forall to NULL. */
2531 this_forall->next = NULL;
2532 /* Link this_forall to the info construct. */
2533 if (info->this_loop)
2535 iter_info *iter_tmp = info->this_loop;
2536 while (iter_tmp->next != NULL)
2537 iter_tmp = iter_tmp->next;
2538 iter_tmp->next = this_forall;
2541 info->this_loop = this_forall;
2547 /* Calculate the size needed for the current forall level. */
2548 size = gfc_index_one_node;
2549 for (n = 0; n < nvar; n++)
2551 /* size = (end + step - start) / step. */
2552 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2554 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2556 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2557 tmp = convert (gfc_array_index_type, tmp);
2559 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2562 /* Record the nvar and size of current forall level. */
2568 /* If the mask is .true., consider the FORALL unconditional. */
2569 if (code->expr->expr_type == EXPR_CONSTANT
2570 && code->expr->value.logical)
2578 /* First we need to allocate the mask. */
2581 /* As the mask array can be very big, prefer compact boolean types. */
2582 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2583 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
2584 size, NULL, &block, &pmask);
2585 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2587 /* Record them in the info structure. */
2588 info->maskindex = maskindex;
2593 /* No mask was specified. */
2594 maskindex = NULL_TREE;
2595 mask = pmask = NULL_TREE;
2598 /* Link the current forall level to nested_forall_info. */
2599 info->prev_nest = nested_forall_info;
2600 nested_forall_info = info;
2602 /* Copy the mask into a temporary variable if required.
2603 For now we assume a mask temporary is needed. */
2606 /* As the mask array can be very big, prefer compact boolean types. */
2607 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2609 gfc_add_modify_expr (&block, maskindex, gfc_index_zero_node);
2611 /* Start of mask assignment loop body. */
2612 gfc_start_block (&body);
2614 /* Evaluate the mask expression. */
2615 gfc_init_se (&se, NULL);
2616 gfc_conv_expr_val (&se, code->expr);
2617 gfc_add_block_to_block (&body, &se.pre);
2619 /* Store the mask. */
2620 se.expr = convert (mask_type, se.expr);
2622 tmp = gfc_build_array_ref (mask, maskindex);
2623 gfc_add_modify_expr (&body, tmp, se.expr);
2625 /* Advance to the next mask element. */
2626 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
2627 maskindex, gfc_index_one_node);
2628 gfc_add_modify_expr (&body, maskindex, tmp);
2630 /* Generate the loops. */
2631 tmp = gfc_finish_block (&body);
2632 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
2633 gfc_add_expr_to_block (&block, tmp);
2636 c = code->block->next;
2638 /* TODO: loop merging in FORALL statements. */
2639 /* Now that we've got a copy of the mask, generate the assignment loops. */
2645 /* A scalar or array assignment. */
2646 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2647 /* Temporaries due to array assignment data dependencies introduce
2648 no end of problems. */
2650 gfc_trans_assign_need_temp (c->expr, c->expr2, NULL, false,
2651 nested_forall_info, &block);
2654 /* Use the normal assignment copying routines. */
2655 assign = gfc_trans_assignment (c->expr, c->expr2, false);
2657 /* Generate body and loops. */
2658 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2660 gfc_add_expr_to_block (&block, tmp);
2666 /* Translate WHERE or WHERE construct nested in FORALL. */
2667 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
2670 /* Pointer assignment inside FORALL. */
2671 case EXEC_POINTER_ASSIGN:
2672 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2674 gfc_trans_pointer_assign_need_temp (c->expr, c->expr2,
2675 nested_forall_info, &block);
2678 /* Use the normal assignment copying routines. */
2679 assign = gfc_trans_pointer_assignment (c->expr, c->expr2);
2681 /* Generate body and loops. */
2682 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2684 gfc_add_expr_to_block (&block, tmp);
2689 tmp = gfc_trans_forall_1 (c, nested_forall_info);
2690 gfc_add_expr_to_block (&block, tmp);
2693 /* Explicit subroutine calls are prevented by the frontend but interface
2694 assignments can legitimately produce them. */
2695 case EXEC_ASSIGN_CALL:
2696 assign = gfc_trans_call (c, true);
2697 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
2698 gfc_add_expr_to_block (&block, tmp);
2708 /* Restore the original index variables. */
2709 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
2710 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
2712 /* Free the space for var, start, end, step, varexpr. */
2718 gfc_free (saved_vars);
2720 /* Free the space for this forall_info. */
2725 /* Free the temporary for the mask. */
2726 tmp = build_call_expr (gfor_fndecl_internal_free, 1, pmask);
2727 gfc_add_expr_to_block (&block, tmp);
2730 pushdecl (maskindex);
2732 return gfc_finish_block (&block);
2736 /* Translate the FORALL statement or construct. */
2738 tree gfc_trans_forall (gfc_code * code)
2740 return gfc_trans_forall_1 (code, NULL);
2744 /* Evaluate the WHERE mask expression, copy its value to a temporary.
2745 If the WHERE construct is nested in FORALL, compute the overall temporary
2746 needed by the WHERE mask expression multiplied by the iterator number of
2748 ME is the WHERE mask expression.
2749 MASK is the current execution mask upon input, whose sense may or may
2750 not be inverted as specified by the INVERT argument.
2751 CMASK is the updated execution mask on output, or NULL if not required.
2752 PMASK is the pending execution mask on output, or NULL if not required.
2753 BLOCK is the block in which to place the condition evaluation loops. */
2756 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
2757 tree mask, bool invert, tree cmask, tree pmask,
2758 tree mask_type, stmtblock_t * block)
2763 stmtblock_t body, body1;
2764 tree count, cond, mtmp;
2767 gfc_init_loopinfo (&loop);
2769 lss = gfc_walk_expr (me);
2770 rss = gfc_walk_expr (me);
2772 /* Variable to index the temporary. */
2773 count = gfc_create_var (gfc_array_index_type, "count");
2774 /* Initialize count. */
2775 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2777 gfc_start_block (&body);
2779 gfc_init_se (&rse, NULL);
2780 gfc_init_se (&lse, NULL);
2782 if (lss == gfc_ss_terminator)
2784 gfc_init_block (&body1);
2788 /* Initialize the loop. */
2789 gfc_init_loopinfo (&loop);
2791 /* We may need LSS to determine the shape of the expression. */
2792 gfc_add_ss_to_loop (&loop, lss);
2793 gfc_add_ss_to_loop (&loop, rss);
2795 gfc_conv_ss_startstride (&loop);
2796 gfc_conv_loop_setup (&loop);
2798 gfc_mark_ss_chain_used (rss, 1);
2799 /* Start the loop body. */
2800 gfc_start_scalarized_body (&loop, &body1);
2802 /* Translate the expression. */
2803 gfc_copy_loopinfo_to_se (&rse, &loop);
2805 gfc_conv_expr (&rse, me);
2808 /* Variable to evaluate mask condition. */
2809 cond = gfc_create_var (mask_type, "cond");
2810 if (mask && (cmask || pmask))
2811 mtmp = gfc_create_var (mask_type, "mask");
2812 else mtmp = NULL_TREE;
2814 gfc_add_block_to_block (&body1, &lse.pre);
2815 gfc_add_block_to_block (&body1, &rse.pre);
2817 gfc_add_modify_expr (&body1, cond, fold_convert (mask_type, rse.expr));
2819 if (mask && (cmask || pmask))
2821 tmp = gfc_build_array_ref (mask, count);
2823 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
2824 gfc_add_modify_expr (&body1, mtmp, tmp);
2829 tmp1 = gfc_build_array_ref (cmask, count);
2832 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
2833 gfc_add_modify_expr (&body1, tmp1, tmp);
2838 tmp1 = gfc_build_array_ref (pmask, count);
2839 tmp = build1 (TRUTH_NOT_EXPR, mask_type, cond);
2841 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
2842 gfc_add_modify_expr (&body1, tmp1, tmp);
2845 gfc_add_block_to_block (&body1, &lse.post);
2846 gfc_add_block_to_block (&body1, &rse.post);
2848 if (lss == gfc_ss_terminator)
2850 gfc_add_block_to_block (&body, &body1);
2854 /* Increment count. */
2855 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
2856 gfc_index_one_node);
2857 gfc_add_modify_expr (&body1, count, tmp1);
2859 /* Generate the copying loops. */
2860 gfc_trans_scalarizing_loops (&loop, &body1);
2862 gfc_add_block_to_block (&body, &loop.pre);
2863 gfc_add_block_to_block (&body, &loop.post);
2865 gfc_cleanup_loop (&loop);
2866 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2867 as tree nodes in SS may not be valid in different scope. */
2870 tmp1 = gfc_finish_block (&body);
2871 /* If the WHERE construct is inside FORALL, fill the full temporary. */
2872 if (nested_forall_info != NULL)
2873 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
2875 gfc_add_expr_to_block (block, tmp1);
2879 /* Translate an assignment statement in a WHERE statement or construct
2880 statement. The MASK expression is used to control which elements
2881 of EXPR1 shall be assigned. The sense of MASK is specified by
2885 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
2886 tree mask, bool invert,
2887 tree count1, tree count2,
2893 gfc_ss *lss_section;
2900 tree index, maskexpr;
2903 /* TODO: handle this special case.
2904 Special case a single function returning an array. */
2905 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
2907 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
2913 /* Assignment of the form lhs = rhs. */
2914 gfc_start_block (&block);
2916 gfc_init_se (&lse, NULL);
2917 gfc_init_se (&rse, NULL);
2920 lss = gfc_walk_expr (expr1);
2923 /* In each where-assign-stmt, the mask-expr and the variable being
2924 defined shall be arrays of the same shape. */
2925 gcc_assert (lss != gfc_ss_terminator);
2927 /* The assignment needs scalarization. */
2930 /* Find a non-scalar SS from the lhs. */
2931 while (lss_section != gfc_ss_terminator
2932 && lss_section->type != GFC_SS_SECTION)
2933 lss_section = lss_section->next;
2935 gcc_assert (lss_section != gfc_ss_terminator);
2937 /* Initialize the scalarizer. */
2938 gfc_init_loopinfo (&loop);
2941 rss = gfc_walk_expr (expr2);
2942 if (rss == gfc_ss_terminator)
2944 /* The rhs is scalar. Add a ss for the expression. */
2945 rss = gfc_get_ss ();
2946 rss->next = gfc_ss_terminator;
2947 rss->type = GFC_SS_SCALAR;
2951 /* Associate the SS with the loop. */
2952 gfc_add_ss_to_loop (&loop, lss);
2953 gfc_add_ss_to_loop (&loop, rss);
2955 /* Calculate the bounds of the scalarization. */
2956 gfc_conv_ss_startstride (&loop);
2958 /* Resolve any data dependencies in the statement. */
2959 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
2961 /* Setup the scalarizing loops. */
2962 gfc_conv_loop_setup (&loop);
2964 /* Setup the gfc_se structures. */
2965 gfc_copy_loopinfo_to_se (&lse, &loop);
2966 gfc_copy_loopinfo_to_se (&rse, &loop);
2969 gfc_mark_ss_chain_used (rss, 1);
2970 if (loop.temp_ss == NULL)
2973 gfc_mark_ss_chain_used (lss, 1);
2977 lse.ss = loop.temp_ss;
2978 gfc_mark_ss_chain_used (lss, 3);
2979 gfc_mark_ss_chain_used (loop.temp_ss, 3);
2982 /* Start the scalarized loop body. */
2983 gfc_start_scalarized_body (&loop, &body);
2985 /* Translate the expression. */
2986 gfc_conv_expr (&rse, expr2);
2987 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
2989 gfc_conv_tmp_array_ref (&lse);
2990 gfc_advance_se_ss_chain (&lse);
2993 gfc_conv_expr (&lse, expr1);
2995 /* Form the mask expression according to the mask. */
2997 maskexpr = gfc_build_array_ref (mask, index);
2999 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
3001 /* Use the scalar assignment as is. */
3003 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3004 loop.temp_ss != NULL, false);
3006 tmp = gfc_conv_operator_assign (&lse, &rse, sym);
3008 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3010 gfc_add_expr_to_block (&body, tmp);
3012 if (lss == gfc_ss_terminator)
3014 /* Increment count1. */
3015 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3016 count1, gfc_index_one_node);
3017 gfc_add_modify_expr (&body, count1, tmp);
3019 /* Use the scalar assignment as is. */
3020 gfc_add_block_to_block (&block, &body);
3024 gcc_assert (lse.ss == gfc_ss_terminator
3025 && rse.ss == gfc_ss_terminator);
3027 if (loop.temp_ss != NULL)
3029 /* Increment count1 before finish the main body of a scalarized
3031 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3032 count1, gfc_index_one_node);
3033 gfc_add_modify_expr (&body, count1, tmp);
3034 gfc_trans_scalarized_loop_boundary (&loop, &body);
3036 /* We need to copy the temporary to the actual lhs. */
3037 gfc_init_se (&lse, NULL);
3038 gfc_init_se (&rse, NULL);
3039 gfc_copy_loopinfo_to_se (&lse, &loop);
3040 gfc_copy_loopinfo_to_se (&rse, &loop);
3042 rse.ss = loop.temp_ss;
3045 gfc_conv_tmp_array_ref (&rse);
3046 gfc_advance_se_ss_chain (&rse);
3047 gfc_conv_expr (&lse, expr1);
3049 gcc_assert (lse.ss == gfc_ss_terminator
3050 && rse.ss == gfc_ss_terminator);
3052 /* Form the mask expression according to the mask tree list. */
3054 maskexpr = gfc_build_array_ref (mask, index);
3056 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3059 /* Use the scalar assignment as is. */
3060 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3061 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3062 gfc_add_expr_to_block (&body, tmp);
3064 /* Increment count2. */
3065 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3066 count2, gfc_index_one_node);
3067 gfc_add_modify_expr (&body, count2, tmp);
3071 /* Increment count1. */
3072 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3073 count1, gfc_index_one_node);
3074 gfc_add_modify_expr (&body, count1, tmp);
3077 /* Generate the copying loops. */
3078 gfc_trans_scalarizing_loops (&loop, &body);
3080 /* Wrap the whole thing up. */
3081 gfc_add_block_to_block (&block, &loop.pre);
3082 gfc_add_block_to_block (&block, &loop.post);
3083 gfc_cleanup_loop (&loop);
3086 return gfc_finish_block (&block);
3090 /* Translate the WHERE construct or statement.
3091 This function can be called iteratively to translate the nested WHERE
3092 construct or statement.
3093 MASK is the control mask. */
3096 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3097 forall_info * nested_forall_info, stmtblock_t * block)
3099 stmtblock_t inner_size_body;
3100 tree inner_size, size;
3108 tree count1, count2;
3112 tree pcmask = NULL_TREE;
3113 tree ppmask = NULL_TREE;
3114 tree cmask = NULL_TREE;
3115 tree pmask = NULL_TREE;
3116 gfc_actual_arglist *arg;
3118 /* the WHERE statement or the WHERE construct statement. */
3119 cblock = code->block;
3121 /* As the mask array can be very big, prefer compact boolean types. */
3122 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3124 /* Determine which temporary masks are needed. */
3127 /* One clause: No ELSEWHEREs. */
3128 need_cmask = (cblock->next != 0);
3131 else if (cblock->block->block)
3133 /* Three or more clauses: Conditional ELSEWHEREs. */
3137 else if (cblock->next)
3139 /* Two clauses, the first non-empty. */
3141 need_pmask = (mask != NULL_TREE
3142 && cblock->block->next != 0);
3144 else if (!cblock->block->next)
3146 /* Two clauses, both empty. */
3150 /* Two clauses, the first empty, the second non-empty. */
3153 need_cmask = (cblock->block->expr != 0);
3162 if (need_cmask || need_pmask)
3164 /* Calculate the size of temporary needed by the mask-expr. */
3165 gfc_init_block (&inner_size_body);
3166 inner_size = compute_inner_temp_size (cblock->expr, cblock->expr,
3167 &inner_size_body, &lss, &rss);
3169 /* Calculate the total size of temporary needed. */
3170 size = compute_overall_iter_number (nested_forall_info, inner_size,
3171 &inner_size_body, block);
3173 /* Allocate temporary for WHERE mask if needed. */
3175 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3178 /* Allocate temporary for !mask if needed. */
3180 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3186 /* Each time around this loop, the where clause is conditional
3187 on the value of mask and invert, which are updated at the
3188 bottom of the loop. */
3190 /* Has mask-expr. */
3193 /* Ensure that the WHERE mask will be evaluated exactly once.
3194 If there are no statements in this WHERE/ELSEWHERE clause,
3195 then we don't need to update the control mask (cmask).
3196 If this is the last clause of the WHERE construct, then
3197 we don't need to update the pending control mask (pmask). */
3199 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3201 cblock->next ? cmask : NULL_TREE,
3202 cblock->block ? pmask : NULL_TREE,
3205 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3207 (cblock->next || cblock->block)
3208 ? cmask : NULL_TREE,
3209 NULL_TREE, mask_type, block);
3213 /* It's a final elsewhere-stmt. No mask-expr is present. */
3217 /* The body of this where clause are controlled by cmask with
3218 sense specified by invert. */
3220 /* Get the assignment statement of a WHERE statement, or the first
3221 statement in where-body-construct of a WHERE construct. */
3222 cnext = cblock->next;
3227 /* WHERE assignment statement. */
3228 case EXEC_ASSIGN_CALL:
3230 arg = cnext->ext.actual;
3231 expr1 = expr2 = NULL;
3232 for (; arg; arg = arg->next)
3244 expr1 = cnext->expr;
3245 expr2 = cnext->expr2;
3247 if (nested_forall_info != NULL)
3249 need_temp = gfc_check_dependency (expr1, expr2, 0);
3250 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3251 gfc_trans_assign_need_temp (expr1, expr2,
3253 nested_forall_info, block);
3256 /* Variables to control maskexpr. */
3257 count1 = gfc_create_var (gfc_array_index_type, "count1");
3258 count2 = gfc_create_var (gfc_array_index_type, "count2");
3259 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3260 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3262 tmp = gfc_trans_where_assign (expr1, expr2,
3265 cnext->resolved_sym);
3267 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3269 gfc_add_expr_to_block (block, tmp);
3274 /* Variables to control maskexpr. */
3275 count1 = gfc_create_var (gfc_array_index_type, "count1");
3276 count2 = gfc_create_var (gfc_array_index_type, "count2");
3277 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3278 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3280 tmp = gfc_trans_where_assign (expr1, expr2,
3283 cnext->resolved_sym);
3284 gfc_add_expr_to_block (block, tmp);
3289 /* WHERE or WHERE construct is part of a where-body-construct. */
3291 gfc_trans_where_2 (cnext, cmask, invert,
3292 nested_forall_info, block);
3299 /* The next statement within the same where-body-construct. */
3300 cnext = cnext->next;
3302 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3303 cblock = cblock->block;
3304 if (mask == NULL_TREE)
3306 /* If we're the initial WHERE, we can simply invert the sense
3307 of the current mask to obtain the "mask" for the remaining
3314 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3320 /* If we allocated a pending mask array, deallocate it now. */
3323 tmp = build_call_expr (gfor_fndecl_internal_free, 1, ppmask);
3324 gfc_add_expr_to_block (block, tmp);
3327 /* If we allocated a current mask array, deallocate it now. */
3330 tmp = build_call_expr (gfor_fndecl_internal_free, 1, pcmask);
3331 gfc_add_expr_to_block (block, tmp);
3335 /* Translate a simple WHERE construct or statement without dependencies.
3336 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3337 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3338 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3341 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3343 stmtblock_t block, body;
3344 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3345 tree tmp, cexpr, tstmt, estmt;
3346 gfc_ss *css, *tdss, *tsss;
3347 gfc_se cse, tdse, tsse, edse, esse;
3352 cond = cblock->expr;
3353 tdst = cblock->next->expr;
3354 tsrc = cblock->next->expr2;
3355 edst = eblock ? eblock->next->expr : NULL;
3356 esrc = eblock ? eblock->next->expr2 : NULL;
3358 gfc_start_block (&block);
3359 gfc_init_loopinfo (&loop);
3361 /* Handle the condition. */
3362 gfc_init_se (&cse, NULL);
3363 css = gfc_walk_expr (cond);
3364 gfc_add_ss_to_loop (&loop, css);
3366 /* Handle the then-clause. */
3367 gfc_init_se (&tdse, NULL);
3368 gfc_init_se (&tsse, NULL);
3369 tdss = gfc_walk_expr (tdst);
3370 tsss = gfc_walk_expr (tsrc);
3371 if (tsss == gfc_ss_terminator)
3373 tsss = gfc_get_ss ();
3374 tsss->next = gfc_ss_terminator;
3375 tsss->type = GFC_SS_SCALAR;
3378 gfc_add_ss_to_loop (&loop, tdss);
3379 gfc_add_ss_to_loop (&loop, tsss);
3383 /* Handle the else clause. */
3384 gfc_init_se (&edse, NULL);
3385 gfc_init_se (&esse, NULL);
3386 edss = gfc_walk_expr (edst);
3387 esss = gfc_walk_expr (esrc);
3388 if (esss == gfc_ss_terminator)
3390 esss = gfc_get_ss ();
3391 esss->next = gfc_ss_terminator;
3392 esss->type = GFC_SS_SCALAR;
3395 gfc_add_ss_to_loop (&loop, edss);
3396 gfc_add_ss_to_loop (&loop, esss);
3399 gfc_conv_ss_startstride (&loop);
3400 gfc_conv_loop_setup (&loop);
3402 gfc_mark_ss_chain_used (css, 1);
3403 gfc_mark_ss_chain_used (tdss, 1);
3404 gfc_mark_ss_chain_used (tsss, 1);
3407 gfc_mark_ss_chain_used (edss, 1);
3408 gfc_mark_ss_chain_used (esss, 1);
3411 gfc_start_scalarized_body (&loop, &body);
3413 gfc_copy_loopinfo_to_se (&cse, &loop);
3414 gfc_copy_loopinfo_to_se (&tdse, &loop);
3415 gfc_copy_loopinfo_to_se (&tsse, &loop);
3421 gfc_copy_loopinfo_to_se (&edse, &loop);
3422 gfc_copy_loopinfo_to_se (&esse, &loop);
3427 gfc_conv_expr (&cse, cond);
3428 gfc_add_block_to_block (&body, &cse.pre);
3431 gfc_conv_expr (&tsse, tsrc);
3432 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3434 gfc_conv_tmp_array_ref (&tdse);
3435 gfc_advance_se_ss_chain (&tdse);
3438 gfc_conv_expr (&tdse, tdst);
3442 gfc_conv_expr (&esse, esrc);
3443 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3445 gfc_conv_tmp_array_ref (&edse);
3446 gfc_advance_se_ss_chain (&edse);
3449 gfc_conv_expr (&edse, edst);
3452 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false);
3453 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false)
3454 : build_empty_stmt ();
3455 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3456 gfc_add_expr_to_block (&body, tmp);
3457 gfc_add_block_to_block (&body, &cse.post);
3459 gfc_trans_scalarizing_loops (&loop, &body);
3460 gfc_add_block_to_block (&block, &loop.pre);
3461 gfc_add_block_to_block (&block, &loop.post);
3462 gfc_cleanup_loop (&loop);
3464 return gfc_finish_block (&block);
3467 /* As the WHERE or WHERE construct statement can be nested, we call
3468 gfc_trans_where_2 to do the translation, and pass the initial
3469 NULL values for both the control mask and the pending control mask. */
3472 gfc_trans_where (gfc_code * code)
3478 cblock = code->block;
3480 && cblock->next->op == EXEC_ASSIGN
3481 && !cblock->next->next)
3483 eblock = cblock->block;
3486 /* A simple "WHERE (cond) x = y" statement or block is
3487 dependence free if cond is not dependent upon writing x,
3488 and the source y is unaffected by the destination x. */
3489 if (!gfc_check_dependency (cblock->next->expr,
3491 && !gfc_check_dependency (cblock->next->expr,
3492 cblock->next->expr2, 0))
3493 return gfc_trans_where_3 (cblock, NULL);
3495 else if (!eblock->expr
3498 && eblock->next->op == EXEC_ASSIGN
3499 && !eblock->next->next)
3501 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3502 block is dependence free if cond is not dependent on writes
3503 to x1 and x2, y1 is not dependent on writes to x2, and y2
3504 is not dependent on writes to x1, and both y's are not
3505 dependent upon their own x's. */
3506 if (!gfc_check_dependency(cblock->next->expr,
3508 && !gfc_check_dependency(eblock->next->expr,
3510 && !gfc_check_dependency(cblock->next->expr,
3511 eblock->next->expr2, 0)
3512 && !gfc_check_dependency(eblock->next->expr,
3513 cblock->next->expr2, 0)
3514 && !gfc_check_dependency(cblock->next->expr,
3515 cblock->next->expr2, 0)
3516 && !gfc_check_dependency(eblock->next->expr,
3517 eblock->next->expr2, 0))
3518 return gfc_trans_where_3 (cblock, eblock);
3522 gfc_start_block (&block);
3524 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3526 return gfc_finish_block (&block);
3530 /* CYCLE a DO loop. The label decl has already been created by
3531 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3532 node at the head of the loop. We must mark the label as used. */
3535 gfc_trans_cycle (gfc_code * code)
3539 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3540 TREE_USED (cycle_label) = 1;
3541 return build1_v (GOTO_EXPR, cycle_label);
3545 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3546 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3550 gfc_trans_exit (gfc_code * code)
3554 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3555 TREE_USED (exit_label) = 1;
3556 return build1_v (GOTO_EXPR, exit_label);
3560 /* Translate the ALLOCATE statement. */
3563 gfc_trans_allocate (gfc_code * code)
3575 if (!code->ext.alloc_list)
3578 gfc_start_block (&block);
3582 tree gfc_int4_type_node = gfc_get_int_type (4);
3584 stat = gfc_create_var (gfc_int4_type_node, "stat");
3585 pstat = build_fold_addr_expr (stat);
3587 error_label = gfc_build_label_decl (NULL_TREE);
3588 TREE_USED (error_label) = 1;
3592 pstat = integer_zero_node;
3593 stat = error_label = NULL_TREE;
3597 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3601 gfc_init_se (&se, NULL);
3602 gfc_start_block (&se.pre);
3604 se.want_pointer = 1;
3605 se.descriptor_only = 1;
3606 gfc_conv_expr (&se, expr);
3608 if (!gfc_array_allocate (&se, expr, pstat))
3610 /* A scalar or derived type. */
3611 tmp = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
3613 if (expr->ts.type == BT_CHARACTER && tmp == NULL_TREE)
3614 tmp = se.string_length;
3616 tmp = build_call_expr (gfor_fndecl_allocate, 2, tmp, pstat);
3617 tmp = build2 (MODIFY_EXPR, void_type_node, se.expr, tmp);
3618 gfc_add_expr_to_block (&se.pre, tmp);
3622 tmp = build1_v (GOTO_EXPR, error_label);
3623 parm = fold_build2 (NE_EXPR, boolean_type_node,
3624 stat, build_int_cst (TREE_TYPE (stat), 0));
3625 tmp = fold_build3 (COND_EXPR, void_type_node,
3626 parm, tmp, build_empty_stmt ());
3627 gfc_add_expr_to_block (&se.pre, tmp);
3630 if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
3632 tmp = build_fold_indirect_ref (se.expr);
3633 tmp = gfc_nullify_alloc_comp (expr->ts.derived, tmp, 0);
3634 gfc_add_expr_to_block (&se.pre, tmp);
3639 tmp = gfc_finish_block (&se.pre);
3640 gfc_add_expr_to_block (&block, tmp);
3643 /* Assign the value to the status variable. */
3646 tmp = build1_v (LABEL_EXPR, error_label);
3647 gfc_add_expr_to_block (&block, tmp);
3649 gfc_init_se (&se, NULL);
3650 gfc_conv_expr_lhs (&se, code->expr);
3651 tmp = convert (TREE_TYPE (se.expr), stat);
3652 gfc_add_modify_expr (&block, se.expr, tmp);
3655 return gfc_finish_block (&block);
3659 /* Translate a DEALLOCATE statement.
3660 There are two cases within the for loop:
3661 (1) deallocate(a1, a2, a3) is translated into the following sequence
3662 _gfortran_deallocate(a1, 0B)
3663 _gfortran_deallocate(a2, 0B)
3664 _gfortran_deallocate(a3, 0B)
3665 where the STAT= variable is passed a NULL pointer.
3666 (2) deallocate(a1, a2, a3, stat=i) is translated into the following
3668 _gfortran_deallocate(a1, &stat)
3669 astat = astat + stat
3670 _gfortran_deallocate(a2, &stat)
3671 astat = astat + stat
3672 _gfortran_deallocate(a3, &stat)
3673 astat = astat + stat
3674 In case (1), we simply return at the end of the for loop. In case (2)
3675 we set STAT= astat. */
3677 gfc_trans_deallocate (gfc_code * code)
3682 tree apstat, astat, pstat, stat, tmp;
3685 gfc_start_block (&block);
3687 /* Set up the optional STAT= */
3690 tree gfc_int4_type_node = gfc_get_int_type (4);
3692 /* Variable used with the library call. */
3693 stat = gfc_create_var (gfc_int4_type_node, "stat");
3694 pstat = build_fold_addr_expr (stat);
3696 /* Running total of possible deallocation failures. */
3697 astat = gfc_create_var (gfc_int4_type_node, "astat");
3698 apstat = build_fold_addr_expr (astat);
3700 /* Initialize astat to 0. */
3701 gfc_add_modify_expr (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
3705 pstat = apstat = null_pointer_node;
3706 stat = astat = NULL_TREE;
3709 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3712 gcc_assert (expr->expr_type == EXPR_VARIABLE);
3714 gfc_init_se (&se, NULL);
3715 gfc_start_block (&se.pre);
3717 se.want_pointer = 1;
3718 se.descriptor_only = 1;
3719 gfc_conv_expr (&se, expr);
3721 if (expr->ts.type == BT_DERIVED
3722 && expr->ts.derived->attr.alloc_comp)
3725 gfc_ref *last = NULL;
3726 for (ref = expr->ref; ref; ref = ref->next)
3727 if (ref->type == REF_COMPONENT)
3730 /* Do not deallocate the components of a derived type
3731 ultimate pointer component. */
3732 if (!(last && last->u.c.component->pointer)
3733 && !(!last && expr->symtree->n.sym->attr.pointer))
3735 tmp = gfc_deallocate_alloc_comp (expr->ts.derived, se.expr,
3737 gfc_add_expr_to_block (&se.pre, tmp);
3742 tmp = gfc_array_deallocate (se.expr, pstat);
3745 tmp = build_call_expr (gfor_fndecl_deallocate, 2, se.expr, pstat);
3746 gfc_add_expr_to_block (&se.pre, tmp);
3748 tmp = build2 (MODIFY_EXPR, void_type_node,
3749 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
3752 gfc_add_expr_to_block (&se.pre, tmp);
3754 /* Keep track of the number of failed deallocations by adding stat
3755 of the last deallocation to the running total. */
3758 apstat = build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
3759 gfc_add_modify_expr (&se.pre, astat, apstat);
3762 tmp = gfc_finish_block (&se.pre);
3763 gfc_add_expr_to_block (&block, tmp);
3767 /* Assign the value to the status variable. */
3770 gfc_init_se (&se, NULL);
3771 gfc_conv_expr_lhs (&se, code->expr);
3772 tmp = convert (TREE_TYPE (se.expr), astat);
3773 gfc_add_modify_expr (&block, se.expr, tmp);
3776 return gfc_finish_block (&block);