1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
27 #include "coretypes.h"
29 #include "tree-gimple.h"
36 #include "trans-stmt.h"
37 #include "trans-types.h"
38 #include "trans-array.h"
39 #include "trans-const.h"
41 #include "dependency.h"
43 typedef struct iter_info
49 struct iter_info *next;
53 typedef struct forall_info
60 struct forall_info *outer;
61 struct forall_info *next_nest;
65 static void gfc_trans_where_2 (gfc_code *, tree, bool,
66 forall_info *, stmtblock_t *);
68 /* Translate a F95 label number to a LABEL_EXPR. */
71 gfc_trans_label_here (gfc_code * code)
73 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
77 /* Given a variable expression which has been ASSIGNed to, find the decl
78 containing the auxiliary variables. For variables in common blocks this
82 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
84 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
85 gfc_conv_expr (se, expr);
86 /* Deals with variable in common block. Get the field declaration. */
87 if (TREE_CODE (se->expr) == COMPONENT_REF)
88 se->expr = TREE_OPERAND (se->expr, 1);
89 /* Deals with dummy argument. Get the parameter declaration. */
90 else if (TREE_CODE (se->expr) == INDIRECT_REF)
91 se->expr = TREE_OPERAND (se->expr, 0);
94 /* Translate a label assignment statement. */
97 gfc_trans_label_assign (gfc_code * code)
107 /* Start a new block. */
108 gfc_init_se (&se, NULL);
109 gfc_start_block (&se.pre);
110 gfc_conv_label_variable (&se, code->expr);
112 len = GFC_DECL_STRING_LEN (se.expr);
113 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
115 label_tree = gfc_get_label_decl (code->label);
117 if (code->label->defined == ST_LABEL_TARGET)
119 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
120 len_tree = integer_minus_one_node;
124 label_str = code->label->format->value.character.string;
125 label_len = code->label->format->value.character.length;
126 len_tree = build_int_cst (NULL_TREE, label_len);
127 label_tree = gfc_build_string_const (label_len + 1, label_str);
128 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
131 gfc_add_modify_expr (&se.pre, len, len_tree);
132 gfc_add_modify_expr (&se.pre, addr, label_tree);
134 return gfc_finish_block (&se.pre);
137 /* Translate a GOTO statement. */
140 gfc_trans_goto (gfc_code * code)
142 locus loc = code->loc;
148 if (code->label != NULL)
149 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
152 gfc_init_se (&se, NULL);
153 gfc_start_block (&se.pre);
154 gfc_conv_label_variable (&se, code->expr);
155 tmp = GFC_DECL_STRING_LEN (se.expr);
156 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
157 build_int_cst (TREE_TYPE (tmp), -1));
158 gfc_trans_runtime_check (tmp, "Assigned label is not a target label",
161 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
166 target = build1 (GOTO_EXPR, void_type_node, assigned_goto);
167 gfc_add_expr_to_block (&se.pre, target);
168 return gfc_finish_block (&se.pre);
171 /* Check the label list. */
174 target = gfc_get_label_decl (code->label);
175 tmp = gfc_build_addr_expr (pvoid_type_node, target);
176 tmp = build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
177 tmp = build3_v (COND_EXPR, tmp,
178 build1 (GOTO_EXPR, void_type_node, target),
179 build_empty_stmt ());
180 gfc_add_expr_to_block (&se.pre, tmp);
183 while (code != NULL);
184 gfc_trans_runtime_check (boolean_true_node,
185 "Assigned label is not in the list", &se.pre, &loc);
187 return gfc_finish_block (&se.pre);
191 /* Translate an ENTRY statement. Just adds a label for this entry point. */
193 gfc_trans_entry (gfc_code * code)
195 return build1_v (LABEL_EXPR, code->ext.entry->label);
199 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
200 elemental subroutines. Make temporaries for output arguments if any such
201 dependencies are found. Output arguments are chosen because internal_unpack
202 can be used, as is, to copy the result back to the variable. */
204 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
205 gfc_symbol * sym, gfc_actual_arglist * arg)
207 gfc_actual_arglist *arg0;
209 gfc_formal_arglist *formal;
210 gfc_loopinfo tmp_loop;
222 if (loopse->ss == NULL)
227 formal = sym->formal;
229 /* Loop over all the arguments testing for dependencies. */
230 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
236 /* Obtain the info structure for the current argument. */
238 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
242 info = &ss->data.info;
246 /* If there is a dependency, create a temporary and use it
247 instead of the variable. */
248 fsym = formal ? formal->sym : NULL;
249 if (e->expr_type == EXPR_VARIABLE
251 && fsym->attr.intent == INTENT_OUT
252 && gfc_check_fncall_dependency (e, INTENT_OUT, sym, arg0))
254 /* Make a local loopinfo for the temporary creation, so that
255 none of the other ss->info's have to be renormalized. */
256 gfc_init_loopinfo (&tmp_loop);
257 for (n = 0; n < info->dimen; n++)
259 tmp_loop.to[n] = loopse->loop->to[n];
260 tmp_loop.from[n] = loopse->loop->from[n];
261 tmp_loop.order[n] = loopse->loop->order[n];
264 /* Generate the temporary. Merge the block so that the
265 declarations are put at the right binding level. */
266 size = gfc_create_var (gfc_array_index_type, NULL);
267 data = gfc_create_var (pvoid_type_node, NULL);
268 gfc_start_block (&block);
269 tmp = gfc_typenode_for_spec (&e->ts);
270 tmp = gfc_trans_create_temp_array (&se->pre, &se->post,
271 &tmp_loop, info, tmp,
272 false, true, false, false);
273 gfc_add_modify_expr (&se->pre, size, tmp);
274 tmp = fold_convert (pvoid_type_node, info->data);
275 gfc_add_modify_expr (&se->pre, data, tmp);
276 gfc_merge_block_scope (&block);
278 /* Obtain the argument descriptor for unpacking. */
279 gfc_init_se (&parmse, NULL);
280 parmse.want_pointer = 1;
281 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
282 gfc_add_block_to_block (&se->pre, &parmse.pre);
284 /* Calculate the offset for the temporary. */
285 offset = gfc_index_zero_node;
286 for (n = 0; n < info->dimen; n++)
288 tmp = gfc_conv_descriptor_stride (info->descriptor,
290 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
291 loopse->loop->from[n], tmp);
292 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
295 info->offset = gfc_create_var (gfc_array_index_type, NULL);
296 gfc_add_modify_expr (&se->pre, info->offset, offset);
298 /* Copy the result back using unpack. */
299 tmp = gfc_chainon_list (NULL_TREE, parmse.expr);
300 tmp = gfc_chainon_list (tmp, data);
301 tmp = build_function_call_expr (gfor_fndecl_in_unpack, tmp);
302 gfc_add_expr_to_block (&se->post, tmp);
304 gfc_add_block_to_block (&se->post, &parmse.post);
310 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
313 gfc_trans_call (gfc_code * code, bool dependency_check)
317 int has_alternate_specifier;
319 /* A CALL starts a new block because the actual arguments may have to
320 be evaluated first. */
321 gfc_init_se (&se, NULL);
322 gfc_start_block (&se.pre);
324 gcc_assert (code->resolved_sym);
326 ss = gfc_ss_terminator;
327 if (code->resolved_sym->attr.elemental)
328 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
330 /* Is not an elemental subroutine call with array valued arguments. */
331 if (ss == gfc_ss_terminator)
334 /* Translate the call. */
335 has_alternate_specifier
336 = gfc_conv_function_call (&se, code->resolved_sym, code->ext.actual,
339 /* A subroutine without side-effect, by definition, does nothing! */
340 TREE_SIDE_EFFECTS (se.expr) = 1;
342 /* Chain the pieces together and return the block. */
343 if (has_alternate_specifier)
345 gfc_code *select_code;
347 select_code = code->next;
348 gcc_assert(select_code->op == EXEC_SELECT);
349 sym = select_code->expr->symtree->n.sym;
350 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
351 if (sym->backend_decl == NULL)
352 sym->backend_decl = gfc_get_symbol_decl (sym);
353 gfc_add_modify_expr (&se.pre, sym->backend_decl, se.expr);
356 gfc_add_expr_to_block (&se.pre, se.expr);
358 gfc_add_block_to_block (&se.pre, &se.post);
363 /* An elemental subroutine call with array valued arguments has
370 /* gfc_walk_elemental_function_args renders the ss chain in the
371 reverse order to the actual argument order. */
372 ss = gfc_reverse_ss (ss);
374 /* Initialize the loop. */
375 gfc_init_se (&loopse, NULL);
376 gfc_init_loopinfo (&loop);
377 gfc_add_ss_to_loop (&loop, ss);
379 gfc_conv_ss_startstride (&loop);
380 gfc_conv_loop_setup (&loop);
381 gfc_mark_ss_chain_used (ss, 1);
383 /* Convert the arguments, checking for dependencies. */
384 gfc_copy_loopinfo_to_se (&loopse, &loop);
387 /* For operator assignment, we need to do dependency checking.
388 We also check the intent of the parameters. */
389 if (dependency_check)
392 sym = code->resolved_sym;
393 gcc_assert (sym->formal->sym->attr.intent = INTENT_OUT);
394 gcc_assert (sym->formal->next->sym->attr.intent = INTENT_IN);
395 gfc_conv_elemental_dependencies (&se, &loopse, sym,
399 /* Generate the loop body. */
400 gfc_start_scalarized_body (&loop, &body);
401 gfc_init_block (&block);
403 /* Add the subroutine call to the block. */
404 gfc_conv_function_call (&loopse, code->resolved_sym, code->ext.actual,
406 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
408 gfc_add_block_to_block (&block, &loopse.pre);
409 gfc_add_block_to_block (&block, &loopse.post);
411 /* Finish up the loop block and the loop. */
412 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
413 gfc_trans_scalarizing_loops (&loop, &body);
414 gfc_add_block_to_block (&se.pre, &loop.pre);
415 gfc_add_block_to_block (&se.pre, &loop.post);
416 gfc_add_block_to_block (&se.pre, &se.post);
417 gfc_cleanup_loop (&loop);
420 return gfc_finish_block (&se.pre);
424 /* Translate the RETURN statement. */
427 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
435 /* If code->expr is not NULL, this return statement must appear
436 in a subroutine and current_fake_result_decl has already
439 result = gfc_get_fake_result_decl (NULL, 0);
442 gfc_warning ("An alternate return at %L without a * dummy argument",
444 return build1_v (GOTO_EXPR, gfc_get_return_label ());
447 /* Start a new block for this statement. */
448 gfc_init_se (&se, NULL);
449 gfc_start_block (&se.pre);
451 gfc_conv_expr (&se, code->expr);
453 tmp = build2 (MODIFY_EXPR, TREE_TYPE (result), result, se.expr);
454 gfc_add_expr_to_block (&se.pre, tmp);
456 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
457 gfc_add_expr_to_block (&se.pre, tmp);
458 gfc_add_block_to_block (&se.pre, &se.post);
459 return gfc_finish_block (&se.pre);
462 return build1_v (GOTO_EXPR, gfc_get_return_label ());
466 /* Translate the PAUSE statement. We have to translate this statement
467 to a runtime library call. */
470 gfc_trans_pause (gfc_code * code)
472 tree gfc_int4_type_node = gfc_get_int_type (4);
478 /* Start a new block for this statement. */
479 gfc_init_se (&se, NULL);
480 gfc_start_block (&se.pre);
483 if (code->expr == NULL)
485 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
486 args = gfc_chainon_list (NULL_TREE, tmp);
487 fndecl = gfor_fndecl_pause_numeric;
491 gfc_conv_expr_reference (&se, code->expr);
492 args = gfc_chainon_list (NULL_TREE, se.expr);
493 args = gfc_chainon_list (args, se.string_length);
494 fndecl = gfor_fndecl_pause_string;
497 tmp = build_function_call_expr (fndecl, args);
498 gfc_add_expr_to_block (&se.pre, tmp);
500 gfc_add_block_to_block (&se.pre, &se.post);
502 return gfc_finish_block (&se.pre);
506 /* Translate the STOP statement. We have to translate this statement
507 to a runtime library call. */
510 gfc_trans_stop (gfc_code * code)
512 tree gfc_int4_type_node = gfc_get_int_type (4);
518 /* Start a new block for this statement. */
519 gfc_init_se (&se, NULL);
520 gfc_start_block (&se.pre);
523 if (code->expr == NULL)
525 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
526 args = gfc_chainon_list (NULL_TREE, tmp);
527 fndecl = gfor_fndecl_stop_numeric;
531 gfc_conv_expr_reference (&se, code->expr);
532 args = gfc_chainon_list (NULL_TREE, se.expr);
533 args = gfc_chainon_list (args, se.string_length);
534 fndecl = gfor_fndecl_stop_string;
537 tmp = build_function_call_expr (fndecl, args);
538 gfc_add_expr_to_block (&se.pre, tmp);
540 gfc_add_block_to_block (&se.pre, &se.post);
542 return gfc_finish_block (&se.pre);
546 /* Generate GENERIC for the IF construct. This function also deals with
547 the simple IF statement, because the front end translates the IF
548 statement into an IF construct.
580 where COND_S is the simplified version of the predicate. PRE_COND_S
581 are the pre side-effects produced by the translation of the
583 We need to build the chain recursively otherwise we run into
584 problems with folding incomplete statements. */
587 gfc_trans_if_1 (gfc_code * code)
592 /* Check for an unconditional ELSE clause. */
594 return gfc_trans_code (code->next);
596 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
597 gfc_init_se (&if_se, NULL);
598 gfc_start_block (&if_se.pre);
600 /* Calculate the IF condition expression. */
601 gfc_conv_expr_val (&if_se, code->expr);
603 /* Translate the THEN clause. */
604 stmt = gfc_trans_code (code->next);
606 /* Translate the ELSE clause. */
608 elsestmt = gfc_trans_if_1 (code->block);
610 elsestmt = build_empty_stmt ();
612 /* Build the condition expression and add it to the condition block. */
613 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
615 gfc_add_expr_to_block (&if_se.pre, stmt);
617 /* Finish off this statement. */
618 return gfc_finish_block (&if_se.pre);
622 gfc_trans_if (gfc_code * code)
624 /* Ignore the top EXEC_IF, it only announces an IF construct. The
625 actual code we must translate is in code->block. */
627 return gfc_trans_if_1 (code->block);
631 /* Translate an arithmetic IF expression.
633 IF (cond) label1, label2, label3 translates to
645 An optimized version can be generated in case of equal labels.
646 E.g., if label1 is equal to label2, we can translate it to
655 gfc_trans_arithmetic_if (gfc_code * code)
663 /* Start a new block. */
664 gfc_init_se (&se, NULL);
665 gfc_start_block (&se.pre);
667 /* Pre-evaluate COND. */
668 gfc_conv_expr_val (&se, code->expr);
669 se.expr = gfc_evaluate_now (se.expr, &se.pre);
671 /* Build something to compare with. */
672 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
674 if (code->label->value != code->label2->value)
676 /* If (cond < 0) take branch1 else take branch2.
677 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
678 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
679 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
681 if (code->label->value != code->label3->value)
682 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
684 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
686 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
689 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
691 if (code->label->value != code->label3->value
692 && code->label2->value != code->label3->value)
694 /* if (cond <= 0) take branch1 else take branch2. */
695 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
696 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
697 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
700 /* Append the COND_EXPR to the evaluation of COND, and return. */
701 gfc_add_expr_to_block (&se.pre, branch1);
702 return gfc_finish_block (&se.pre);
706 /* Translate the simple DO construct. This is where the loop variable has
707 integer type and step +-1. We can't use this in the general case
708 because integer overflow and floating point errors could give incorrect
710 We translate a do loop from:
712 DO dovar = from, to, step
718 [Evaluate loop bounds and step]
720 if ((step > 0) ? (dovar <= to) : (dovar => to))
726 cond = (dovar == to);
728 if (cond) goto end_label;
733 This helps the optimizers by avoiding the extra induction variable
734 used in the general case. */
737 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
738 tree from, tree to, tree step)
747 type = TREE_TYPE (dovar);
749 /* Initialize the DO variable: dovar = from. */
750 gfc_add_modify_expr (pblock, dovar, from);
752 /* Cycle and exit statements are implemented with gotos. */
753 cycle_label = gfc_build_label_decl (NULL_TREE);
754 exit_label = gfc_build_label_decl (NULL_TREE);
756 /* Put the labels where they can be found later. See gfc_trans_do(). */
757 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
760 gfc_start_block (&body);
762 /* Main loop body. */
763 tmp = gfc_trans_code (code->block->next);
764 gfc_add_expr_to_block (&body, tmp);
766 /* Label for cycle statements (if needed). */
767 if (TREE_USED (cycle_label))
769 tmp = build1_v (LABEL_EXPR, cycle_label);
770 gfc_add_expr_to_block (&body, tmp);
773 /* Evaluate the loop condition. */
774 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
775 cond = gfc_evaluate_now (cond, &body);
777 /* Increment the loop variable. */
778 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
779 gfc_add_modify_expr (&body, dovar, tmp);
782 tmp = build1_v (GOTO_EXPR, exit_label);
783 TREE_USED (exit_label) = 1;
784 tmp = fold_build3 (COND_EXPR, void_type_node,
785 cond, tmp, build_empty_stmt ());
786 gfc_add_expr_to_block (&body, tmp);
788 /* Finish the loop body. */
789 tmp = gfc_finish_block (&body);
790 tmp = build1_v (LOOP_EXPR, tmp);
792 /* Only execute the loop if the number of iterations is positive. */
793 if (tree_int_cst_sgn (step) > 0)
794 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
796 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
797 tmp = fold_build3 (COND_EXPR, void_type_node,
798 cond, tmp, build_empty_stmt ());
799 gfc_add_expr_to_block (pblock, tmp);
801 /* Add the exit label. */
802 tmp = build1_v (LABEL_EXPR, exit_label);
803 gfc_add_expr_to_block (pblock, tmp);
805 return gfc_finish_block (pblock);
808 /* Translate the DO construct. This obviously is one of the most
809 important ones to get right with any compiler, but especially
812 We special case some loop forms as described in gfc_trans_simple_do.
813 For other cases we implement them with a separate loop count,
814 as described in the standard.
816 We translate a do loop from:
818 DO dovar = from, to, step
824 [evaluate loop bounds and step]
825 count = (to + step - from) / step;
833 if (count <=0) goto exit_label;
837 TODO: Large loop counts
838 The code above assumes the loop count fits into a signed integer kind,
839 i.e. Does not work for loop counts > 2^31 for integer(kind=4) variables
840 We must support the full range. */
843 gfc_trans_do (gfc_code * code)
860 gfc_start_block (&block);
862 /* Evaluate all the expressions in the iterator. */
863 gfc_init_se (&se, NULL);
864 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
865 gfc_add_block_to_block (&block, &se.pre);
867 type = TREE_TYPE (dovar);
869 gfc_init_se (&se, NULL);
870 gfc_conv_expr_val (&se, code->ext.iterator->start);
871 gfc_add_block_to_block (&block, &se.pre);
872 from = gfc_evaluate_now (se.expr, &block);
874 gfc_init_se (&se, NULL);
875 gfc_conv_expr_val (&se, code->ext.iterator->end);
876 gfc_add_block_to_block (&block, &se.pre);
877 to = gfc_evaluate_now (se.expr, &block);
879 gfc_init_se (&se, NULL);
880 gfc_conv_expr_val (&se, code->ext.iterator->step);
881 gfc_add_block_to_block (&block, &se.pre);
882 step = gfc_evaluate_now (se.expr, &block);
884 /* Special case simple loops. */
885 if (TREE_CODE (type) == INTEGER_TYPE
886 && (integer_onep (step)
887 || tree_int_cst_equal (step, integer_minus_one_node)))
888 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
890 /* Initialize loop count. This code is executed before we enter the
891 loop body. We generate: count = (to + step - from) / step. */
893 tmp = fold_build2 (MINUS_EXPR, type, step, from);
894 tmp = fold_build2 (PLUS_EXPR, type, to, tmp);
895 if (TREE_CODE (type) == INTEGER_TYPE)
897 tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step);
898 count = gfc_create_var (type, "count");
902 /* TODO: We could use the same width as the real type.
903 This would probably cause more problems that it solves
904 when we implement "long double" types. */
905 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
906 tmp = fold_build1 (FIX_TRUNC_EXPR, gfc_array_index_type, tmp);
907 count = gfc_create_var (gfc_array_index_type, "count");
909 gfc_add_modify_expr (&block, count, tmp);
911 count_one = build_int_cst (TREE_TYPE (count), 1);
913 /* Initialize the DO variable: dovar = from. */
914 gfc_add_modify_expr (&block, dovar, from);
917 gfc_start_block (&body);
919 /* Cycle and exit statements are implemented with gotos. */
920 cycle_label = gfc_build_label_decl (NULL_TREE);
921 exit_label = gfc_build_label_decl (NULL_TREE);
923 /* Start with the loop condition. Loop until count <= 0. */
924 cond = fold_build2 (LE_EXPR, boolean_type_node, count,
925 build_int_cst (TREE_TYPE (count), 0));
926 tmp = build1_v (GOTO_EXPR, exit_label);
927 TREE_USED (exit_label) = 1;
928 tmp = fold_build3 (COND_EXPR, void_type_node,
929 cond, tmp, build_empty_stmt ());
930 gfc_add_expr_to_block (&body, tmp);
932 /* Put these labels where they can be found later. We put the
933 labels in a TREE_LIST node (because TREE_CHAIN is already
934 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
935 label in TREE_VALUE (backend_decl). */
937 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
939 /* Main loop body. */
940 tmp = gfc_trans_code (code->block->next);
941 gfc_add_expr_to_block (&body, tmp);
943 /* Label for cycle statements (if needed). */
944 if (TREE_USED (cycle_label))
946 tmp = build1_v (LABEL_EXPR, cycle_label);
947 gfc_add_expr_to_block (&body, tmp);
950 /* Increment the loop variable. */
951 tmp = build2 (PLUS_EXPR, type, dovar, step);
952 gfc_add_modify_expr (&body, dovar, tmp);
954 /* Decrement the loop count. */
955 tmp = build2 (MINUS_EXPR, TREE_TYPE (count), count, count_one);
956 gfc_add_modify_expr (&body, count, tmp);
958 /* End of loop body. */
959 tmp = gfc_finish_block (&body);
961 /* The for loop itself. */
962 tmp = build1_v (LOOP_EXPR, tmp);
963 gfc_add_expr_to_block (&block, tmp);
965 /* Add the exit label. */
966 tmp = build1_v (LABEL_EXPR, exit_label);
967 gfc_add_expr_to_block (&block, tmp);
969 return gfc_finish_block (&block);
973 /* Translate the DO WHILE construct.
986 if (! cond) goto exit_label;
992 Because the evaluation of the exit condition `cond' may have side
993 effects, we can't do much for empty loop bodies. The backend optimizers
994 should be smart enough to eliminate any dead loops. */
997 gfc_trans_do_while (gfc_code * code)
1005 /* Everything we build here is part of the loop body. */
1006 gfc_start_block (&block);
1008 /* Cycle and exit statements are implemented with gotos. */
1009 cycle_label = gfc_build_label_decl (NULL_TREE);
1010 exit_label = gfc_build_label_decl (NULL_TREE);
1012 /* Put the labels where they can be found later. See gfc_trans_do(). */
1013 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1015 /* Create a GIMPLE version of the exit condition. */
1016 gfc_init_se (&cond, NULL);
1017 gfc_conv_expr_val (&cond, code->expr);
1018 gfc_add_block_to_block (&block, &cond.pre);
1019 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1021 /* Build "IF (! cond) GOTO exit_label". */
1022 tmp = build1_v (GOTO_EXPR, exit_label);
1023 TREE_USED (exit_label) = 1;
1024 tmp = fold_build3 (COND_EXPR, void_type_node,
1025 cond.expr, tmp, build_empty_stmt ());
1026 gfc_add_expr_to_block (&block, tmp);
1028 /* The main body of the loop. */
1029 tmp = gfc_trans_code (code->block->next);
1030 gfc_add_expr_to_block (&block, tmp);
1032 /* Label for cycle statements (if needed). */
1033 if (TREE_USED (cycle_label))
1035 tmp = build1_v (LABEL_EXPR, cycle_label);
1036 gfc_add_expr_to_block (&block, tmp);
1039 /* End of loop body. */
1040 tmp = gfc_finish_block (&block);
1042 gfc_init_block (&block);
1043 /* Build the loop. */
1044 tmp = build1_v (LOOP_EXPR, tmp);
1045 gfc_add_expr_to_block (&block, tmp);
1047 /* Add the exit label. */
1048 tmp = build1_v (LABEL_EXPR, exit_label);
1049 gfc_add_expr_to_block (&block, tmp);
1051 return gfc_finish_block (&block);
1055 /* Translate the SELECT CASE construct for INTEGER case expressions,
1056 without killing all potential optimizations. The problem is that
1057 Fortran allows unbounded cases, but the back-end does not, so we
1058 need to intercept those before we enter the equivalent SWITCH_EXPR
1061 For example, we translate this,
1064 CASE (:100,101,105:115)
1074 to the GENERIC equivalent,
1078 case (minimum value for typeof(expr) ... 100:
1084 case 200 ... (maximum value for typeof(expr):
1101 gfc_trans_integer_select (gfc_code * code)
1111 gfc_start_block (&block);
1113 /* Calculate the switch expression. */
1114 gfc_init_se (&se, NULL);
1115 gfc_conv_expr_val (&se, code->expr);
1116 gfc_add_block_to_block (&block, &se.pre);
1118 end_label = gfc_build_label_decl (NULL_TREE);
1120 gfc_init_block (&body);
1122 for (c = code->block; c; c = c->block)
1124 for (cp = c->ext.case_list; cp; cp = cp->next)
1129 /* Assume it's the default case. */
1130 low = high = NULL_TREE;
1134 low = gfc_conv_constant_to_tree (cp->low);
1136 /* If there's only a lower bound, set the high bound to the
1137 maximum value of the case expression. */
1139 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1144 /* Three cases are possible here:
1146 1) There is no lower bound, e.g. CASE (:N).
1147 2) There is a lower bound .NE. high bound, that is
1148 a case range, e.g. CASE (N:M) where M>N (we make
1149 sure that M>N during type resolution).
1150 3) There is a lower bound, and it has the same value
1151 as the high bound, e.g. CASE (N:N). This is our
1152 internal representation of CASE(N).
1154 In the first and second case, we need to set a value for
1155 high. In the third case, we don't because the GCC middle
1156 end represents a single case value by just letting high be
1157 a NULL_TREE. We can't do that because we need to be able
1158 to represent unbounded cases. */
1162 && mpz_cmp (cp->low->value.integer,
1163 cp->high->value.integer) != 0))
1164 high = gfc_conv_constant_to_tree (cp->high);
1166 /* Unbounded case. */
1168 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1171 /* Build a label. */
1172 label = gfc_build_label_decl (NULL_TREE);
1174 /* Add this case label.
1175 Add parameter 'label', make it match GCC backend. */
1176 tmp = build3 (CASE_LABEL_EXPR, void_type_node, low, high, label);
1177 gfc_add_expr_to_block (&body, tmp);
1180 /* Add the statements for this case. */
1181 tmp = gfc_trans_code (c->next);
1182 gfc_add_expr_to_block (&body, tmp);
1184 /* Break to the end of the construct. */
1185 tmp = build1_v (GOTO_EXPR, end_label);
1186 gfc_add_expr_to_block (&body, tmp);
1189 tmp = gfc_finish_block (&body);
1190 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1191 gfc_add_expr_to_block (&block, tmp);
1193 tmp = build1_v (LABEL_EXPR, end_label);
1194 gfc_add_expr_to_block (&block, tmp);
1196 return gfc_finish_block (&block);
1200 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1202 There are only two cases possible here, even though the standard
1203 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1204 .FALSE., and DEFAULT.
1206 We never generate more than two blocks here. Instead, we always
1207 try to eliminate the DEFAULT case. This way, we can translate this
1208 kind of SELECT construct to a simple
1212 expression in GENERIC. */
1215 gfc_trans_logical_select (gfc_code * code)
1218 gfc_code *t, *f, *d;
1223 /* Assume we don't have any cases at all. */
1226 /* Now see which ones we actually do have. We can have at most two
1227 cases in a single case list: one for .TRUE. and one for .FALSE.
1228 The default case is always separate. If the cases for .TRUE. and
1229 .FALSE. are in the same case list, the block for that case list
1230 always executed, and we don't generate code a COND_EXPR. */
1231 for (c = code->block; c; c = c->block)
1233 for (cp = c->ext.case_list; cp; cp = cp->next)
1237 if (cp->low->value.logical == 0) /* .FALSE. */
1239 else /* if (cp->value.logical != 0), thus .TRUE. */
1247 /* Start a new block. */
1248 gfc_start_block (&block);
1250 /* Calculate the switch expression. We always need to do this
1251 because it may have side effects. */
1252 gfc_init_se (&se, NULL);
1253 gfc_conv_expr_val (&se, code->expr);
1254 gfc_add_block_to_block (&block, &se.pre);
1256 if (t == f && t != NULL)
1258 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1259 translate the code for these cases, append it to the current
1261 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1265 tree true_tree, false_tree, stmt;
1267 true_tree = build_empty_stmt ();
1268 false_tree = build_empty_stmt ();
1270 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1271 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1272 make the missing case the default case. */
1273 if (t != NULL && f != NULL)
1283 /* Translate the code for each of these blocks, and append it to
1284 the current block. */
1286 true_tree = gfc_trans_code (t->next);
1289 false_tree = gfc_trans_code (f->next);
1291 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1292 true_tree, false_tree);
1293 gfc_add_expr_to_block (&block, stmt);
1296 return gfc_finish_block (&block);
1300 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1301 Instead of generating compares and jumps, it is far simpler to
1302 generate a data structure describing the cases in order and call a
1303 library subroutine that locates the right case.
1304 This is particularly true because this is the only case where we
1305 might have to dispose of a temporary.
1306 The library subroutine returns a pointer to jump to or NULL if no
1307 branches are to be taken. */
1310 gfc_trans_character_select (gfc_code *code)
1312 tree init, node, end_label, tmp, type, args, *labels;
1314 stmtblock_t block, body;
1320 static tree select_struct;
1321 static tree ss_string1, ss_string1_len;
1322 static tree ss_string2, ss_string2_len;
1323 static tree ss_target;
1325 if (select_struct == NULL)
1327 tree gfc_int4_type_node = gfc_get_int_type (4);
1329 select_struct = make_node (RECORD_TYPE);
1330 TYPE_NAME (select_struct) = get_identifier ("_jump_struct");
1333 #define ADD_FIELD(NAME, TYPE) \
1334 ss_##NAME = gfc_add_field_to_struct \
1335 (&(TYPE_FIELDS (select_struct)), select_struct, \
1336 get_identifier (stringize(NAME)), TYPE)
1338 ADD_FIELD (string1, pchar_type_node);
1339 ADD_FIELD (string1_len, gfc_int4_type_node);
1341 ADD_FIELD (string2, pchar_type_node);
1342 ADD_FIELD (string2_len, gfc_int4_type_node);
1344 ADD_FIELD (target, pvoid_type_node);
1347 gfc_finish_type (select_struct);
1350 cp = code->block->ext.case_list;
1351 while (cp->left != NULL)
1355 for (d = cp; d; d = d->right)
1359 labels = gfc_getmem (n * sizeof (tree));
1363 for(i = 0; i < n; i++)
1365 labels[i] = gfc_build_label_decl (NULL_TREE);
1366 TREE_USED (labels[i]) = 1;
1367 /* TODO: The gimplifier should do this for us, but it has
1368 inadequacies when dealing with static initializers. */
1369 FORCED_LABEL (labels[i]) = 1;
1372 end_label = gfc_build_label_decl (NULL_TREE);
1374 /* Generate the body */
1375 gfc_start_block (&block);
1376 gfc_init_block (&body);
1378 for (c = code->block; c; c = c->block)
1380 for (d = c->ext.case_list; d; d = d->next)
1382 tmp = build1_v (LABEL_EXPR, labels[d->n]);
1383 gfc_add_expr_to_block (&body, tmp);
1386 tmp = gfc_trans_code (c->next);
1387 gfc_add_expr_to_block (&body, tmp);
1389 tmp = build1_v (GOTO_EXPR, end_label);
1390 gfc_add_expr_to_block (&body, tmp);
1393 /* Generate the structure describing the branches */
1397 for(d = cp; d; d = d->right, i++)
1401 gfc_init_se (&se, NULL);
1405 node = tree_cons (ss_string1, null_pointer_node, node);
1406 node = tree_cons (ss_string1_len, integer_zero_node, node);
1410 gfc_conv_expr_reference (&se, d->low);
1412 node = tree_cons (ss_string1, se.expr, node);
1413 node = tree_cons (ss_string1_len, se.string_length, node);
1416 if (d->high == NULL)
1418 node = tree_cons (ss_string2, null_pointer_node, node);
1419 node = tree_cons (ss_string2_len, integer_zero_node, node);
1423 gfc_init_se (&se, NULL);
1424 gfc_conv_expr_reference (&se, d->high);
1426 node = tree_cons (ss_string2, se.expr, node);
1427 node = tree_cons (ss_string2_len, se.string_length, node);
1430 tmp = gfc_build_addr_expr (pvoid_type_node, labels[i]);
1431 node = tree_cons (ss_target, tmp, node);
1433 tmp = build_constructor_from_list (select_struct, nreverse (node));
1434 init = tree_cons (NULL_TREE, tmp, init);
1437 type = build_array_type (select_struct, build_index_type
1438 (build_int_cst (NULL_TREE, n - 1)));
1440 init = build_constructor_from_list (type, nreverse(init));
1441 TREE_CONSTANT (init) = 1;
1442 TREE_INVARIANT (init) = 1;
1443 TREE_STATIC (init) = 1;
1444 /* Create a static variable to hold the jump table. */
1445 tmp = gfc_create_var (type, "jumptable");
1446 TREE_CONSTANT (tmp) = 1;
1447 TREE_INVARIANT (tmp) = 1;
1448 TREE_STATIC (tmp) = 1;
1449 TREE_READONLY (tmp) = 1;
1450 DECL_INITIAL (tmp) = init;
1453 /* Build an argument list for the library call */
1454 init = gfc_build_addr_expr (pvoid_type_node, init);
1455 args = gfc_chainon_list (NULL_TREE, init);
1457 tmp = build_int_cst (NULL_TREE, n);
1458 args = gfc_chainon_list (args, tmp);
1460 tmp = gfc_build_addr_expr (pvoid_type_node, end_label);
1461 args = gfc_chainon_list (args, tmp);
1463 gfc_init_se (&se, NULL);
1464 gfc_conv_expr_reference (&se, code->expr);
1466 args = gfc_chainon_list (args, se.expr);
1467 args = gfc_chainon_list (args, se.string_length);
1469 gfc_add_block_to_block (&block, &se.pre);
1471 tmp = build_function_call_expr (gfor_fndecl_select_string, args);
1472 case_label = gfc_create_var (TREE_TYPE (tmp), "case_label");
1473 gfc_add_modify_expr (&block, case_label, tmp);
1475 gfc_add_block_to_block (&block, &se.post);
1477 tmp = build1 (GOTO_EXPR, void_type_node, case_label);
1478 gfc_add_expr_to_block (&block, tmp);
1480 tmp = gfc_finish_block (&body);
1481 gfc_add_expr_to_block (&block, tmp);
1482 tmp = build1_v (LABEL_EXPR, end_label);
1483 gfc_add_expr_to_block (&block, tmp);
1488 return gfc_finish_block (&block);
1492 /* Translate the three variants of the SELECT CASE construct.
1494 SELECT CASEs with INTEGER case expressions can be translated to an
1495 equivalent GENERIC switch statement, and for LOGICAL case
1496 expressions we build one or two if-else compares.
1498 SELECT CASEs with CHARACTER case expressions are a whole different
1499 story, because they don't exist in GENERIC. So we sort them and
1500 do a binary search at runtime.
1502 Fortran has no BREAK statement, and it does not allow jumps from
1503 one case block to another. That makes things a lot easier for
1507 gfc_trans_select (gfc_code * code)
1509 gcc_assert (code && code->expr);
1511 /* Empty SELECT constructs are legal. */
1512 if (code->block == NULL)
1513 return build_empty_stmt ();
1515 /* Select the correct translation function. */
1516 switch (code->expr->ts.type)
1518 case BT_LOGICAL: return gfc_trans_logical_select (code);
1519 case BT_INTEGER: return gfc_trans_integer_select (code);
1520 case BT_CHARACTER: return gfc_trans_character_select (code);
1522 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1528 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
1529 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
1530 indicates whether we should generate code to test the FORALLs mask
1531 array. OUTER is the loop header to be used for initializing mask
1534 The generated loop format is:
1535 count = (end - start + step) / step
1548 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
1549 int mask_flag, stmtblock_t *outer)
1557 tree var, start, end, step;
1560 /* Initialize the mask index outside the FORALL nest. */
1561 if (mask_flag && forall_tmp->mask)
1562 gfc_add_modify_expr (outer, forall_tmp->maskindex, gfc_index_zero_node);
1564 iter = forall_tmp->this_loop;
1565 nvar = forall_tmp->nvar;
1566 for (n = 0; n < nvar; n++)
1569 start = iter->start;
1573 exit_label = gfc_build_label_decl (NULL_TREE);
1574 TREE_USED (exit_label) = 1;
1576 /* The loop counter. */
1577 count = gfc_create_var (TREE_TYPE (var), "count");
1579 /* The body of the loop. */
1580 gfc_init_block (&block);
1582 /* The exit condition. */
1583 cond = fold_build2 (LE_EXPR, boolean_type_node,
1584 count, build_int_cst (TREE_TYPE (count), 0));
1585 tmp = build1_v (GOTO_EXPR, exit_label);
1586 tmp = fold_build3 (COND_EXPR, void_type_node,
1587 cond, tmp, build_empty_stmt ());
1588 gfc_add_expr_to_block (&block, tmp);
1590 /* The main loop body. */
1591 gfc_add_expr_to_block (&block, body);
1593 /* Increment the loop variable. */
1594 tmp = build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1595 gfc_add_modify_expr (&block, var, tmp);
1597 /* Advance to the next mask element. Only do this for the
1599 if (n == 0 && mask_flag && forall_tmp->mask)
1601 tree maskindex = forall_tmp->maskindex;
1602 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
1603 maskindex, gfc_index_one_node);
1604 gfc_add_modify_expr (&block, maskindex, tmp);
1607 /* Decrement the loop counter. */
1608 tmp = build2 (MINUS_EXPR, TREE_TYPE (var), count, gfc_index_one_node);
1609 gfc_add_modify_expr (&block, count, tmp);
1611 body = gfc_finish_block (&block);
1613 /* Loop var initialization. */
1614 gfc_init_block (&block);
1615 gfc_add_modify_expr (&block, var, start);
1618 /* Initialize the loop counter. */
1619 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1620 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1621 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1622 gfc_add_modify_expr (&block, count, tmp);
1624 /* The loop expression. */
1625 tmp = build1_v (LOOP_EXPR, body);
1626 gfc_add_expr_to_block (&block, tmp);
1628 /* The exit label. */
1629 tmp = build1_v (LABEL_EXPR, exit_label);
1630 gfc_add_expr_to_block (&block, tmp);
1632 body = gfc_finish_block (&block);
1639 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
1640 is nonzero, the body is controlled by all masks in the forall nest.
1641 Otherwise, the innermost loop is not controlled by it's mask. This
1642 is used for initializing that mask. */
1645 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1650 forall_info *forall_tmp;
1651 tree mask, maskindex;
1653 gfc_start_block (&header);
1655 forall_tmp = nested_forall_info;
1656 while (forall_tmp->next_nest != NULL)
1657 forall_tmp = forall_tmp->next_nest;
1658 while (forall_tmp != NULL)
1660 /* Generate body with masks' control. */
1663 mask = forall_tmp->mask;
1664 maskindex = forall_tmp->maskindex;
1666 /* If a mask was specified make the assignment conditional. */
1669 tmp = gfc_build_array_ref (mask, maskindex);
1670 body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ());
1673 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
1674 forall_tmp = forall_tmp->outer;
1678 gfc_add_expr_to_block (&header, body);
1679 return gfc_finish_block (&header);
1683 /* Allocate data for holding a temporary array. Returns either a local
1684 temporary array or a pointer variable. */
1687 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
1695 if (INTEGER_CST_P (size))
1697 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
1698 gfc_index_one_node);
1703 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
1704 type = build_array_type (elem_type, type);
1705 if (gfc_can_put_var_on_stack (bytesize))
1707 gcc_assert (INTEGER_CST_P (size));
1708 tmpvar = gfc_create_var (type, "temp");
1713 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
1714 *pdata = convert (pvoid_type_node, tmpvar);
1716 args = gfc_chainon_list (NULL_TREE, bytesize);
1717 if (gfc_index_integer_kind == 4)
1718 tmp = gfor_fndecl_internal_malloc;
1719 else if (gfc_index_integer_kind == 8)
1720 tmp = gfor_fndecl_internal_malloc64;
1723 tmp = build_function_call_expr (tmp, args);
1724 tmp = convert (TREE_TYPE (tmpvar), tmp);
1725 gfc_add_modify_expr (pblock, tmpvar, tmp);
1731 /* Generate codes to copy the temporary to the actual lhs. */
1734 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
1735 tree count1, tree wheremask, bool invert)
1739 stmtblock_t block, body;
1745 lss = gfc_walk_expr (expr);
1747 if (lss == gfc_ss_terminator)
1749 gfc_start_block (&block);
1751 gfc_init_se (&lse, NULL);
1753 /* Translate the expression. */
1754 gfc_conv_expr (&lse, expr);
1756 /* Form the expression for the temporary. */
1757 tmp = gfc_build_array_ref (tmp1, count1);
1759 /* Use the scalar assignment as is. */
1760 gfc_add_block_to_block (&block, &lse.pre);
1761 gfc_add_modify_expr (&block, lse.expr, tmp);
1762 gfc_add_block_to_block (&block, &lse.post);
1764 /* Increment the count1. */
1765 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1766 gfc_index_one_node);
1767 gfc_add_modify_expr (&block, count1, tmp);
1769 tmp = gfc_finish_block (&block);
1773 gfc_start_block (&block);
1775 gfc_init_loopinfo (&loop1);
1776 gfc_init_se (&rse, NULL);
1777 gfc_init_se (&lse, NULL);
1779 /* Associate the lss with the loop. */
1780 gfc_add_ss_to_loop (&loop1, lss);
1782 /* Calculate the bounds of the scalarization. */
1783 gfc_conv_ss_startstride (&loop1);
1784 /* Setup the scalarizing loops. */
1785 gfc_conv_loop_setup (&loop1);
1787 gfc_mark_ss_chain_used (lss, 1);
1789 /* Start the scalarized loop body. */
1790 gfc_start_scalarized_body (&loop1, &body);
1792 /* Setup the gfc_se structures. */
1793 gfc_copy_loopinfo_to_se (&lse, &loop1);
1796 /* Form the expression of the temporary. */
1797 if (lss != gfc_ss_terminator)
1798 rse.expr = gfc_build_array_ref (tmp1, count1);
1799 /* Translate expr. */
1800 gfc_conv_expr (&lse, expr);
1802 /* Use the scalar assignment. */
1803 rse.string_length = lse.string_length;
1804 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
1806 /* Form the mask expression according to the mask tree list. */
1809 wheremaskexpr = gfc_build_array_ref (wheremask, count3);
1811 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1812 TREE_TYPE (wheremaskexpr),
1814 tmp = fold_build3 (COND_EXPR, void_type_node,
1815 wheremaskexpr, tmp, build_empty_stmt ());
1818 gfc_add_expr_to_block (&body, tmp);
1820 /* Increment count1. */
1821 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1822 count1, gfc_index_one_node);
1823 gfc_add_modify_expr (&body, count1, tmp);
1825 /* Increment count3. */
1828 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1829 count3, gfc_index_one_node);
1830 gfc_add_modify_expr (&body, count3, tmp);
1833 /* Generate the copying loops. */
1834 gfc_trans_scalarizing_loops (&loop1, &body);
1835 gfc_add_block_to_block (&block, &loop1.pre);
1836 gfc_add_block_to_block (&block, &loop1.post);
1837 gfc_cleanup_loop (&loop1);
1839 tmp = gfc_finish_block (&block);
1845 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
1846 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
1847 and should not be freed. WHEREMASK is the conditional execution mask
1848 whose sense may be inverted by INVERT. */
1851 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
1852 tree count1, gfc_ss *lss, gfc_ss *rss,
1853 tree wheremask, bool invert)
1855 stmtblock_t block, body1;
1862 gfc_start_block (&block);
1864 gfc_init_se (&rse, NULL);
1865 gfc_init_se (&lse, NULL);
1867 if (lss == gfc_ss_terminator)
1869 gfc_init_block (&body1);
1870 gfc_conv_expr (&rse, expr2);
1871 lse.expr = gfc_build_array_ref (tmp1, count1);
1875 /* Initialize the loop. */
1876 gfc_init_loopinfo (&loop);
1878 /* We may need LSS to determine the shape of the expression. */
1879 gfc_add_ss_to_loop (&loop, lss);
1880 gfc_add_ss_to_loop (&loop, rss);
1882 gfc_conv_ss_startstride (&loop);
1883 gfc_conv_loop_setup (&loop);
1885 gfc_mark_ss_chain_used (rss, 1);
1886 /* Start the loop body. */
1887 gfc_start_scalarized_body (&loop, &body1);
1889 /* Translate the expression. */
1890 gfc_copy_loopinfo_to_se (&rse, &loop);
1892 gfc_conv_expr (&rse, expr2);
1894 /* Form the expression of the temporary. */
1895 lse.expr = gfc_build_array_ref (tmp1, count1);
1898 /* Use the scalar assignment. */
1899 lse.string_length = rse.string_length;
1900 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
1901 expr2->expr_type == EXPR_VARIABLE);
1903 /* Form the mask expression according to the mask tree list. */
1906 wheremaskexpr = gfc_build_array_ref (wheremask, count3);
1908 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1909 TREE_TYPE (wheremaskexpr),
1911 tmp = fold_build3 (COND_EXPR, void_type_node,
1912 wheremaskexpr, tmp, build_empty_stmt ());
1915 gfc_add_expr_to_block (&body1, tmp);
1917 if (lss == gfc_ss_terminator)
1919 gfc_add_block_to_block (&block, &body1);
1921 /* Increment count1. */
1922 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1923 gfc_index_one_node);
1924 gfc_add_modify_expr (&block, count1, tmp);
1928 /* Increment count1. */
1929 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1930 count1, gfc_index_one_node);
1931 gfc_add_modify_expr (&body1, count1, tmp);
1933 /* Increment count3. */
1936 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1937 count3, gfc_index_one_node);
1938 gfc_add_modify_expr (&body1, count3, tmp);
1941 /* Generate the copying loops. */
1942 gfc_trans_scalarizing_loops (&loop, &body1);
1944 gfc_add_block_to_block (&block, &loop.pre);
1945 gfc_add_block_to_block (&block, &loop.post);
1947 gfc_cleanup_loop (&loop);
1948 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
1949 as tree nodes in SS may not be valid in different scope. */
1952 tmp = gfc_finish_block (&block);
1957 /* Calculate the size of temporary needed in the assignment inside forall.
1958 LSS and RSS are filled in this function. */
1961 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
1962 stmtblock_t * pblock,
1963 gfc_ss **lss, gfc_ss **rss)
1971 *lss = gfc_walk_expr (expr1);
1974 size = gfc_index_one_node;
1975 if (*lss != gfc_ss_terminator)
1977 gfc_init_loopinfo (&loop);
1979 /* Walk the RHS of the expression. */
1980 *rss = gfc_walk_expr (expr2);
1981 if (*rss == gfc_ss_terminator)
1983 /* The rhs is scalar. Add a ss for the expression. */
1984 *rss = gfc_get_ss ();
1985 (*rss)->next = gfc_ss_terminator;
1986 (*rss)->type = GFC_SS_SCALAR;
1987 (*rss)->expr = expr2;
1990 /* Associate the SS with the loop. */
1991 gfc_add_ss_to_loop (&loop, *lss);
1992 /* We don't actually need to add the rhs at this point, but it might
1993 make guessing the loop bounds a bit easier. */
1994 gfc_add_ss_to_loop (&loop, *rss);
1996 /* We only want the shape of the expression, not rest of the junk
1997 generated by the scalarizer. */
1998 loop.array_parameter = 1;
2000 /* Calculate the bounds of the scalarization. */
2001 save_flag = flag_bounds_check;
2002 flag_bounds_check = 0;
2003 gfc_conv_ss_startstride (&loop);
2004 flag_bounds_check = save_flag;
2005 gfc_conv_loop_setup (&loop);
2007 /* Figure out how many elements we need. */
2008 for (i = 0; i < loop.dimen; i++)
2010 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2011 gfc_index_one_node, loop.from[i]);
2012 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2014 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2016 gfc_add_block_to_block (pblock, &loop.pre);
2017 size = gfc_evaluate_now (size, pblock);
2018 gfc_add_block_to_block (pblock, &loop.post);
2020 /* TODO: write a function that cleans up a loopinfo without freeing
2021 the SS chains. Currently a NOP. */
2028 /* Calculate the overall iterator number of the nested forall construct. */
2031 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2032 stmtblock_t *inner_size_body, stmtblock_t *block)
2037 /* Optimize the case of unconditional FORALL nests with constant bounds. */
2038 if (INTEGER_CST_P (inner_size))
2040 bool all_const_p = true;
2041 forall_info *forall_tmp;
2043 /* First check whether all the bounds are constant. */
2044 for (forall_tmp = nested_forall_info;
2046 forall_tmp = forall_tmp->next_nest)
2047 if (forall_tmp->mask || !INTEGER_CST_P (forall_tmp->size))
2049 all_const_p = false;
2055 tree tmp = inner_size;
2056 for (forall_tmp = nested_forall_info;
2058 forall_tmp = forall_tmp->next_nest)
2059 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
2060 tmp, forall_tmp->size);
2065 /* TODO: optimizing the computing process. */
2066 number = gfc_create_var (gfc_array_index_type, "num");
2067 gfc_add_modify_expr (block, number, gfc_index_zero_node);
2069 gfc_start_block (&body);
2070 if (inner_size_body)
2071 gfc_add_block_to_block (&body, inner_size_body);
2072 if (nested_forall_info)
2073 tmp = build2 (PLUS_EXPR, gfc_array_index_type, number,
2077 gfc_add_modify_expr (&body, number, tmp);
2078 tmp = gfc_finish_block (&body);
2080 /* Generate loops. */
2081 if (nested_forall_info != NULL)
2082 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2084 gfc_add_expr_to_block (block, tmp);
2090 /* Allocate temporary for forall construct. SIZE is the size of temporary
2091 needed. PTEMP1 is returned for space free. */
2094 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2101 unit = TYPE_SIZE_UNIT (type);
2102 if (!integer_onep (unit))
2103 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2108 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2111 tmp = build_fold_indirect_ref (tmp);
2116 /* Allocate temporary for forall construct according to the information in
2117 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2118 assignment inside forall. PTEMP1 is returned for space free. */
2121 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2122 tree inner_size, stmtblock_t * inner_size_body,
2123 stmtblock_t * block, tree * ptemp1)
2127 /* Calculate the total size of temporary needed in forall construct. */
2128 size = compute_overall_iter_number (nested_forall_info, inner_size,
2129 inner_size_body, block);
2131 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2135 /* Handle assignments inside forall which need temporary.
2137 forall (i=start:end:stride; maskexpr)
2140 (where e,f<i> are arbitrary expressions possibly involving i
2141 and there is a dependency between e<i> and f<i>)
2143 masktmp(:) = maskexpr(:)
2148 for (i = start; i <= end; i += stride)
2152 for (i = start; i <= end; i += stride)
2154 if (masktmp[maskindex++])
2155 tmp[count1++] = f<i>
2159 for (i = start; i <= end; i += stride)
2161 if (masktmp[maskindex++])
2162 e<i> = tmp[count1++]
2167 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2168 tree wheremask, bool invert,
2169 forall_info * nested_forall_info,
2170 stmtblock_t * block)
2178 stmtblock_t inner_size_body;
2180 /* Create vars. count1 is the current iterator number of the nested
2182 count1 = gfc_create_var (gfc_array_index_type, "count1");
2184 /* Count is the wheremask index. */
2187 count = gfc_create_var (gfc_array_index_type, "count");
2188 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2193 /* Initialize count1. */
2194 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2196 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2197 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2198 gfc_init_block (&inner_size_body);
2199 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2202 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2203 type = gfc_typenode_for_spec (&expr1->ts);
2205 /* Allocate temporary for nested forall construct according to the
2206 information in nested_forall_info and inner_size. */
2207 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2208 &inner_size_body, block, &ptemp1);
2210 /* Generate codes to copy rhs to the temporary . */
2211 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2214 /* Generate body and loops according to the information in
2215 nested_forall_info. */
2216 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2217 gfc_add_expr_to_block (block, tmp);
2220 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2224 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2226 /* Generate codes to copy the temporary to lhs. */
2227 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2230 /* Generate body and loops according to the information in
2231 nested_forall_info. */
2232 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2233 gfc_add_expr_to_block (block, tmp);
2237 /* Free the temporary. */
2238 tmp = gfc_chainon_list (NULL_TREE, ptemp1);
2239 tmp = build_function_call_expr (gfor_fndecl_internal_free, tmp);
2240 gfc_add_expr_to_block (block, tmp);
2245 /* Translate pointer assignment inside FORALL which need temporary. */
2248 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2249 forall_info * nested_forall_info,
2250 stmtblock_t * block)
2264 tree tmp, tmp1, ptemp1;
2266 count = gfc_create_var (gfc_array_index_type, "count");
2267 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2269 inner_size = integer_one_node;
2270 lss = gfc_walk_expr (expr1);
2271 rss = gfc_walk_expr (expr2);
2272 if (lss == gfc_ss_terminator)
2274 type = gfc_typenode_for_spec (&expr1->ts);
2275 type = build_pointer_type (type);
2277 /* Allocate temporary for nested forall construct according to the
2278 information in nested_forall_info and inner_size. */
2279 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2280 inner_size, NULL, block, &ptemp1);
2281 gfc_start_block (&body);
2282 gfc_init_se (&lse, NULL);
2283 lse.expr = gfc_build_array_ref (tmp1, count);
2284 gfc_init_se (&rse, NULL);
2285 rse.want_pointer = 1;
2286 gfc_conv_expr (&rse, expr2);
2287 gfc_add_block_to_block (&body, &rse.pre);
2288 gfc_add_modify_expr (&body, lse.expr,
2289 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2290 gfc_add_block_to_block (&body, &rse.post);
2292 /* Increment count. */
2293 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2294 count, gfc_index_one_node);
2295 gfc_add_modify_expr (&body, count, tmp);
2297 tmp = gfc_finish_block (&body);
2299 /* Generate body and loops according to the information in
2300 nested_forall_info. */
2301 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2302 gfc_add_expr_to_block (block, tmp);
2305 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2307 gfc_start_block (&body);
2308 gfc_init_se (&lse, NULL);
2309 gfc_init_se (&rse, NULL);
2310 rse.expr = gfc_build_array_ref (tmp1, count);
2311 lse.want_pointer = 1;
2312 gfc_conv_expr (&lse, expr1);
2313 gfc_add_block_to_block (&body, &lse.pre);
2314 gfc_add_modify_expr (&body, lse.expr, rse.expr);
2315 gfc_add_block_to_block (&body, &lse.post);
2316 /* Increment count. */
2317 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2318 count, gfc_index_one_node);
2319 gfc_add_modify_expr (&body, count, tmp);
2320 tmp = gfc_finish_block (&body);
2322 /* Generate body and loops according to the information in
2323 nested_forall_info. */
2324 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2325 gfc_add_expr_to_block (block, tmp);
2329 gfc_init_loopinfo (&loop);
2331 /* Associate the SS with the loop. */
2332 gfc_add_ss_to_loop (&loop, rss);
2334 /* Setup the scalarizing loops and bounds. */
2335 gfc_conv_ss_startstride (&loop);
2337 gfc_conv_loop_setup (&loop);
2339 info = &rss->data.info;
2340 desc = info->descriptor;
2342 /* Make a new descriptor. */
2343 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2344 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2345 loop.from, loop.to, 1);
2347 /* Allocate temporary for nested forall construct. */
2348 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2349 inner_size, NULL, block, &ptemp1);
2350 gfc_start_block (&body);
2351 gfc_init_se (&lse, NULL);
2352 lse.expr = gfc_build_array_ref (tmp1, count);
2353 lse.direct_byref = 1;
2354 rss = gfc_walk_expr (expr2);
2355 gfc_conv_expr_descriptor (&lse, expr2, rss);
2357 gfc_add_block_to_block (&body, &lse.pre);
2358 gfc_add_block_to_block (&body, &lse.post);
2360 /* Increment count. */
2361 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2362 count, gfc_index_one_node);
2363 gfc_add_modify_expr (&body, count, tmp);
2365 tmp = gfc_finish_block (&body);
2367 /* Generate body and loops according to the information in
2368 nested_forall_info. */
2369 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2370 gfc_add_expr_to_block (block, tmp);
2373 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2375 parm = gfc_build_array_ref (tmp1, count);
2376 lss = gfc_walk_expr (expr1);
2377 gfc_init_se (&lse, NULL);
2378 gfc_conv_expr_descriptor (&lse, expr1, lss);
2379 gfc_add_modify_expr (&lse.pre, lse.expr, parm);
2380 gfc_start_block (&body);
2381 gfc_add_block_to_block (&body, &lse.pre);
2382 gfc_add_block_to_block (&body, &lse.post);
2384 /* Increment count. */
2385 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2386 count, gfc_index_one_node);
2387 gfc_add_modify_expr (&body, count, tmp);
2389 tmp = gfc_finish_block (&body);
2391 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2392 gfc_add_expr_to_block (block, tmp);
2394 /* Free the temporary. */
2397 tmp = gfc_chainon_list (NULL_TREE, ptemp1);
2398 tmp = build_function_call_expr (gfor_fndecl_internal_free, tmp);
2399 gfc_add_expr_to_block (block, tmp);
2404 /* FORALL and WHERE statements are really nasty, especially when you nest
2405 them. All the rhs of a forall assignment must be evaluated before the
2406 actual assignments are performed. Presumably this also applies to all the
2407 assignments in an inner where statement. */
2409 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2410 linear array, relying on the fact that we process in the same order in all
2413 forall (i=start:end:stride; maskexpr)
2417 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2419 count = ((end + 1 - start) / stride)
2420 masktmp(:) = maskexpr(:)
2423 for (i = start; i <= end; i += stride)
2425 if (masktmp[maskindex++])
2429 for (i = start; i <= end; i += stride)
2431 if (masktmp[maskindex++])
2435 Note that this code only works when there are no dependencies.
2436 Forall loop with array assignments and data dependencies are a real pain,
2437 because the size of the temporary cannot always be determined before the
2438 loop is executed. This problem is compounded by the presence of nested
2443 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2461 gfc_forall_iterator *fa;
2464 gfc_saved_var *saved_vars;
2465 iter_info *this_forall;
2469 /* Count the FORALL index number. */
2470 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2474 /* Allocate the space for var, start, end, step, varexpr. */
2475 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2476 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2477 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2478 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2479 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2480 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2482 /* Allocate the space for info. */
2483 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2485 gfc_start_block (&block);
2488 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2490 gfc_symbol *sym = fa->var->symtree->n.sym;
2492 /* Allocate space for this_forall. */
2493 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2495 /* Create a temporary variable for the FORALL index. */
2496 tmp = gfc_typenode_for_spec (&sym->ts);
2497 var[n] = gfc_create_var (tmp, sym->name);
2498 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2500 /* Record it in this_forall. */
2501 this_forall->var = var[n];
2503 /* Replace the index symbol's backend_decl with the temporary decl. */
2504 sym->backend_decl = var[n];
2506 /* Work out the start, end and stride for the loop. */
2507 gfc_init_se (&se, NULL);
2508 gfc_conv_expr_val (&se, fa->start);
2509 /* Record it in this_forall. */
2510 this_forall->start = se.expr;
2511 gfc_add_block_to_block (&block, &se.pre);
2514 gfc_init_se (&se, NULL);
2515 gfc_conv_expr_val (&se, fa->end);
2516 /* Record it in this_forall. */
2517 this_forall->end = se.expr;
2518 gfc_make_safe_expr (&se);
2519 gfc_add_block_to_block (&block, &se.pre);
2522 gfc_init_se (&se, NULL);
2523 gfc_conv_expr_val (&se, fa->stride);
2524 /* Record it in this_forall. */
2525 this_forall->step = se.expr;
2526 gfc_make_safe_expr (&se);
2527 gfc_add_block_to_block (&block, &se.pre);
2530 /* Set the NEXT field of this_forall to NULL. */
2531 this_forall->next = NULL;
2532 /* Link this_forall to the info construct. */
2533 if (info->this_loop)
2535 iter_info *iter_tmp = info->this_loop;
2536 while (iter_tmp->next != NULL)
2537 iter_tmp = iter_tmp->next;
2538 iter_tmp->next = this_forall;
2541 info->this_loop = this_forall;
2547 /* Calculate the size needed for the current forall level. */
2548 size = gfc_index_one_node;
2549 for (n = 0; n < nvar; n++)
2551 /* size = (end + step - start) / step. */
2552 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2554 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2556 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2557 tmp = convert (gfc_array_index_type, tmp);
2559 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2562 /* Record the nvar and size of current forall level. */
2566 /* First we need to allocate the mask. */
2569 /* As the mask array can be very big, prefer compact boolean types. */
2570 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2571 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
2572 size, NULL, &block, &pmask);
2573 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2575 /* Record them in the info structure. */
2576 info->maskindex = maskindex;
2581 /* No mask was specified. */
2582 maskindex = NULL_TREE;
2583 mask = pmask = NULL_TREE;
2586 /* Link the current forall level to nested_forall_info. */
2587 if (nested_forall_info)
2589 forall_info *forall_tmp = nested_forall_info;
2590 while (forall_tmp->next_nest != NULL)
2591 forall_tmp = forall_tmp->next_nest;
2592 info->outer = forall_tmp;
2593 forall_tmp->next_nest = info;
2596 nested_forall_info = info;
2598 /* Copy the mask into a temporary variable if required.
2599 For now we assume a mask temporary is needed. */
2602 /* As the mask array can be very big, prefer compact boolean types. */
2603 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2605 gfc_add_modify_expr (&block, maskindex, gfc_index_zero_node);
2607 /* Start of mask assignment loop body. */
2608 gfc_start_block (&body);
2610 /* Evaluate the mask expression. */
2611 gfc_init_se (&se, NULL);
2612 gfc_conv_expr_val (&se, code->expr);
2613 gfc_add_block_to_block (&body, &se.pre);
2615 /* Store the mask. */
2616 se.expr = convert (mask_type, se.expr);
2618 tmp = gfc_build_array_ref (mask, maskindex);
2619 gfc_add_modify_expr (&body, tmp, se.expr);
2621 /* Advance to the next mask element. */
2622 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
2623 maskindex, gfc_index_one_node);
2624 gfc_add_modify_expr (&body, maskindex, tmp);
2626 /* Generate the loops. */
2627 tmp = gfc_finish_block (&body);
2628 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
2629 gfc_add_expr_to_block (&block, tmp);
2632 c = code->block->next;
2634 /* TODO: loop merging in FORALL statements. */
2635 /* Now that we've got a copy of the mask, generate the assignment loops. */
2641 /* A scalar or array assignment. */
2642 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2643 /* Temporaries due to array assignment data dependencies introduce
2644 no end of problems. */
2646 gfc_trans_assign_need_temp (c->expr, c->expr2, NULL, false,
2647 nested_forall_info, &block);
2650 /* Use the normal assignment copying routines. */
2651 assign = gfc_trans_assignment (c->expr, c->expr2, false);
2653 /* Generate body and loops. */
2654 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2656 gfc_add_expr_to_block (&block, tmp);
2662 /* Translate WHERE or WHERE construct nested in FORALL. */
2663 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
2666 /* Pointer assignment inside FORALL. */
2667 case EXEC_POINTER_ASSIGN:
2668 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2670 gfc_trans_pointer_assign_need_temp (c->expr, c->expr2,
2671 nested_forall_info, &block);
2674 /* Use the normal assignment copying routines. */
2675 assign = gfc_trans_pointer_assignment (c->expr, c->expr2);
2677 /* Generate body and loops. */
2678 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2680 gfc_add_expr_to_block (&block, tmp);
2685 tmp = gfc_trans_forall_1 (c, nested_forall_info);
2686 gfc_add_expr_to_block (&block, tmp);
2689 /* Explicit subroutine calls are prevented by the frontend but interface
2690 assignments can legitimately produce them. */
2691 case EXEC_ASSIGN_CALL:
2692 assign = gfc_trans_call (c, true);
2693 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
2694 gfc_add_expr_to_block (&block, tmp);
2704 /* Restore the original index variables. */
2705 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
2706 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
2708 /* Free the space for var, start, end, step, varexpr. */
2714 gfc_free (saved_vars);
2718 /* Free the temporary for the mask. */
2719 tmp = gfc_chainon_list (NULL_TREE, pmask);
2720 tmp = build_function_call_expr (gfor_fndecl_internal_free, tmp);
2721 gfc_add_expr_to_block (&block, tmp);
2724 pushdecl (maskindex);
2726 return gfc_finish_block (&block);
2730 /* Translate the FORALL statement or construct. */
2732 tree gfc_trans_forall (gfc_code * code)
2734 return gfc_trans_forall_1 (code, NULL);
2738 /* Evaluate the WHERE mask expression, copy its value to a temporary.
2739 If the WHERE construct is nested in FORALL, compute the overall temporary
2740 needed by the WHERE mask expression multiplied by the iterator number of
2742 ME is the WHERE mask expression.
2743 MASK is the current execution mask upon input, whose sense may or may
2744 not be inverted as specified by the INVERT argument.
2745 CMASK is the updated execution mask on output, or NULL if not required.
2746 PMASK is the pending execution mask on output, or NULL if not required.
2747 BLOCK is the block in which to place the condition evaluation loops. */
2750 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
2751 tree mask, bool invert, tree cmask, tree pmask,
2752 tree mask_type, stmtblock_t * block)
2757 stmtblock_t body, body1;
2758 tree count, cond, mtmp;
2761 gfc_init_loopinfo (&loop);
2763 lss = gfc_walk_expr (me);
2764 rss = gfc_walk_expr (me);
2766 /* Variable to index the temporary. */
2767 count = gfc_create_var (gfc_array_index_type, "count");
2768 /* Initialize count. */
2769 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2771 gfc_start_block (&body);
2773 gfc_init_se (&rse, NULL);
2774 gfc_init_se (&lse, NULL);
2776 if (lss == gfc_ss_terminator)
2778 gfc_init_block (&body1);
2782 /* Initialize the loop. */
2783 gfc_init_loopinfo (&loop);
2785 /* We may need LSS to determine the shape of the expression. */
2786 gfc_add_ss_to_loop (&loop, lss);
2787 gfc_add_ss_to_loop (&loop, rss);
2789 gfc_conv_ss_startstride (&loop);
2790 gfc_conv_loop_setup (&loop);
2792 gfc_mark_ss_chain_used (rss, 1);
2793 /* Start the loop body. */
2794 gfc_start_scalarized_body (&loop, &body1);
2796 /* Translate the expression. */
2797 gfc_copy_loopinfo_to_se (&rse, &loop);
2799 gfc_conv_expr (&rse, me);
2802 /* Variable to evaluate mask condition. */
2803 cond = gfc_create_var (mask_type, "cond");
2804 if (mask && (cmask || pmask))
2805 mtmp = gfc_create_var (mask_type, "mask");
2806 else mtmp = NULL_TREE;
2808 gfc_add_block_to_block (&body1, &lse.pre);
2809 gfc_add_block_to_block (&body1, &rse.pre);
2811 gfc_add_modify_expr (&body1, cond, fold_convert (mask_type, rse.expr));
2813 if (mask && (cmask || pmask))
2815 tmp = gfc_build_array_ref (mask, count);
2817 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
2818 gfc_add_modify_expr (&body1, mtmp, tmp);
2823 tmp1 = gfc_build_array_ref (cmask, count);
2826 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
2827 gfc_add_modify_expr (&body1, tmp1, tmp);
2832 tmp1 = gfc_build_array_ref (pmask, count);
2833 tmp = build1 (TRUTH_NOT_EXPR, mask_type, cond);
2835 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
2836 gfc_add_modify_expr (&body1, tmp1, tmp);
2839 gfc_add_block_to_block (&body1, &lse.post);
2840 gfc_add_block_to_block (&body1, &rse.post);
2842 if (lss == gfc_ss_terminator)
2844 gfc_add_block_to_block (&body, &body1);
2848 /* Increment count. */
2849 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
2850 gfc_index_one_node);
2851 gfc_add_modify_expr (&body1, count, tmp1);
2853 /* Generate the copying loops. */
2854 gfc_trans_scalarizing_loops (&loop, &body1);
2856 gfc_add_block_to_block (&body, &loop.pre);
2857 gfc_add_block_to_block (&body, &loop.post);
2859 gfc_cleanup_loop (&loop);
2860 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2861 as tree nodes in SS may not be valid in different scope. */
2864 tmp1 = gfc_finish_block (&body);
2865 /* If the WHERE construct is inside FORALL, fill the full temporary. */
2866 if (nested_forall_info != NULL)
2867 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
2869 gfc_add_expr_to_block (block, tmp1);
2873 /* Translate an assignment statement in a WHERE statement or construct
2874 statement. The MASK expression is used to control which elements
2875 of EXPR1 shall be assigned. The sense of MASK is specified by
2879 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
2880 tree mask, bool invert,
2881 tree count1, tree count2)
2886 gfc_ss *lss_section;
2893 tree index, maskexpr;
2896 /* TODO: handle this special case.
2897 Special case a single function returning an array. */
2898 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
2900 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
2906 /* Assignment of the form lhs = rhs. */
2907 gfc_start_block (&block);
2909 gfc_init_se (&lse, NULL);
2910 gfc_init_se (&rse, NULL);
2913 lss = gfc_walk_expr (expr1);
2916 /* In each where-assign-stmt, the mask-expr and the variable being
2917 defined shall be arrays of the same shape. */
2918 gcc_assert (lss != gfc_ss_terminator);
2920 /* The assignment needs scalarization. */
2923 /* Find a non-scalar SS from the lhs. */
2924 while (lss_section != gfc_ss_terminator
2925 && lss_section->type != GFC_SS_SECTION)
2926 lss_section = lss_section->next;
2928 gcc_assert (lss_section != gfc_ss_terminator);
2930 /* Initialize the scalarizer. */
2931 gfc_init_loopinfo (&loop);
2934 rss = gfc_walk_expr (expr2);
2935 if (rss == gfc_ss_terminator)
2937 /* The rhs is scalar. Add a ss for the expression. */
2938 rss = gfc_get_ss ();
2939 rss->next = gfc_ss_terminator;
2940 rss->type = GFC_SS_SCALAR;
2944 /* Associate the SS with the loop. */
2945 gfc_add_ss_to_loop (&loop, lss);
2946 gfc_add_ss_to_loop (&loop, rss);
2948 /* Calculate the bounds of the scalarization. */
2949 gfc_conv_ss_startstride (&loop);
2951 /* Resolve any data dependencies in the statement. */
2952 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
2954 /* Setup the scalarizing loops. */
2955 gfc_conv_loop_setup (&loop);
2957 /* Setup the gfc_se structures. */
2958 gfc_copy_loopinfo_to_se (&lse, &loop);
2959 gfc_copy_loopinfo_to_se (&rse, &loop);
2962 gfc_mark_ss_chain_used (rss, 1);
2963 if (loop.temp_ss == NULL)
2966 gfc_mark_ss_chain_used (lss, 1);
2970 lse.ss = loop.temp_ss;
2971 gfc_mark_ss_chain_used (lss, 3);
2972 gfc_mark_ss_chain_used (loop.temp_ss, 3);
2975 /* Start the scalarized loop body. */
2976 gfc_start_scalarized_body (&loop, &body);
2978 /* Translate the expression. */
2979 gfc_conv_expr (&rse, expr2);
2980 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
2982 gfc_conv_tmp_array_ref (&lse);
2983 gfc_advance_se_ss_chain (&lse);
2986 gfc_conv_expr (&lse, expr1);
2988 /* Form the mask expression according to the mask. */
2990 maskexpr = gfc_build_array_ref (mask, index);
2992 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
2994 /* Use the scalar assignment as is. */
2995 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
2996 loop.temp_ss != NULL, false);
2997 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
2999 gfc_add_expr_to_block (&body, tmp);
3001 if (lss == gfc_ss_terminator)
3003 /* Increment count1. */
3004 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3005 count1, gfc_index_one_node);
3006 gfc_add_modify_expr (&body, count1, tmp);
3008 /* Use the scalar assignment as is. */
3009 gfc_add_block_to_block (&block, &body);
3013 gcc_assert (lse.ss == gfc_ss_terminator
3014 && rse.ss == gfc_ss_terminator);
3016 if (loop.temp_ss != NULL)
3018 /* Increment count1 before finish the main body of a scalarized
3020 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3021 count1, gfc_index_one_node);
3022 gfc_add_modify_expr (&body, count1, tmp);
3023 gfc_trans_scalarized_loop_boundary (&loop, &body);
3025 /* We need to copy the temporary to the actual lhs. */
3026 gfc_init_se (&lse, NULL);
3027 gfc_init_se (&rse, NULL);
3028 gfc_copy_loopinfo_to_se (&lse, &loop);
3029 gfc_copy_loopinfo_to_se (&rse, &loop);
3031 rse.ss = loop.temp_ss;
3034 gfc_conv_tmp_array_ref (&rse);
3035 gfc_advance_se_ss_chain (&rse);
3036 gfc_conv_expr (&lse, expr1);
3038 gcc_assert (lse.ss == gfc_ss_terminator
3039 && rse.ss == gfc_ss_terminator);
3041 /* Form the mask expression according to the mask tree list. */
3043 maskexpr = gfc_build_array_ref (mask, index);
3045 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3048 /* Use the scalar assignment as is. */
3049 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3050 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3051 gfc_add_expr_to_block (&body, tmp);
3053 /* Increment count2. */
3054 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3055 count2, gfc_index_one_node);
3056 gfc_add_modify_expr (&body, count2, tmp);
3060 /* Increment count1. */
3061 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3062 count1, gfc_index_one_node);
3063 gfc_add_modify_expr (&body, count1, tmp);
3066 /* Generate the copying loops. */
3067 gfc_trans_scalarizing_loops (&loop, &body);
3069 /* Wrap the whole thing up. */
3070 gfc_add_block_to_block (&block, &loop.pre);
3071 gfc_add_block_to_block (&block, &loop.post);
3072 gfc_cleanup_loop (&loop);
3075 return gfc_finish_block (&block);
3079 /* Translate the WHERE construct or statement.
3080 This function can be called iteratively to translate the nested WHERE
3081 construct or statement.
3082 MASK is the control mask. */
3085 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3086 forall_info * nested_forall_info, stmtblock_t * block)
3088 stmtblock_t inner_size_body;
3089 tree inner_size, size;
3097 tree count1, count2;
3101 tree pcmask = NULL_TREE;
3102 tree ppmask = NULL_TREE;
3103 tree cmask = NULL_TREE;
3104 tree pmask = NULL_TREE;
3106 /* the WHERE statement or the WHERE construct statement. */
3107 cblock = code->block;
3109 /* As the mask array can be very big, prefer compact boolean types. */
3110 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3112 /* Determine which temporary masks are needed. */
3115 /* One clause: No ELSEWHEREs. */
3116 need_cmask = (cblock->next != 0);
3119 else if (cblock->block->block)
3121 /* Three or more clauses: Conditional ELSEWHEREs. */
3125 else if (cblock->next)
3127 /* Two clauses, the first non-empty. */
3129 need_pmask = (mask != NULL_TREE
3130 && cblock->block->next != 0);
3132 else if (!cblock->block->next)
3134 /* Two clauses, both empty. */
3138 /* Two clauses, the first empty, the second non-empty. */
3141 need_cmask = (cblock->block->expr != 0);
3150 if (need_cmask || need_pmask)
3152 /* Calculate the size of temporary needed by the mask-expr. */
3153 gfc_init_block (&inner_size_body);
3154 inner_size = compute_inner_temp_size (cblock->expr, cblock->expr,
3155 &inner_size_body, &lss, &rss);
3157 /* Calculate the total size of temporary needed. */
3158 size = compute_overall_iter_number (nested_forall_info, inner_size,
3159 &inner_size_body, block);
3161 /* Allocate temporary for WHERE mask if needed. */
3163 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3166 /* Allocate temporary for !mask if needed. */
3168 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3174 /* Each time around this loop, the where clause is conditional
3175 on the value of mask and invert, which are updated at the
3176 bottom of the loop. */
3178 /* Has mask-expr. */
3181 /* Ensure that the WHERE mask will be evaluated exactly once.
3182 If there are no statements in this WHERE/ELSEWHERE clause,
3183 then we don't need to update the control mask (cmask).
3184 If this is the last clause of the WHERE construct, then
3185 we don't need to update the pending control mask (pmask). */
3187 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3189 cblock->next ? cmask : NULL_TREE,
3190 cblock->block ? pmask : NULL_TREE,
3193 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3195 (cblock->next || cblock->block)
3196 ? cmask : NULL_TREE,
3197 NULL_TREE, mask_type, block);
3201 /* It's a final elsewhere-stmt. No mask-expr is present. */
3205 /* The body of this where clause are controlled by cmask with
3206 sense specified by invert. */
3208 /* Get the assignment statement of a WHERE statement, or the first
3209 statement in where-body-construct of a WHERE construct. */
3210 cnext = cblock->next;
3215 /* WHERE assignment statement. */
3217 expr1 = cnext->expr;
3218 expr2 = cnext->expr2;
3219 if (nested_forall_info != NULL)
3221 need_temp = gfc_check_dependency (expr1, expr2, 0);
3223 gfc_trans_assign_need_temp (expr1, expr2,
3225 nested_forall_info, block);
3228 /* Variables to control maskexpr. */
3229 count1 = gfc_create_var (gfc_array_index_type, "count1");
3230 count2 = gfc_create_var (gfc_array_index_type, "count2");
3231 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3232 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3234 tmp = gfc_trans_where_assign (expr1, expr2,
3238 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3240 gfc_add_expr_to_block (block, tmp);
3245 /* Variables to control maskexpr. */
3246 count1 = gfc_create_var (gfc_array_index_type, "count1");
3247 count2 = gfc_create_var (gfc_array_index_type, "count2");
3248 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3249 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3251 tmp = gfc_trans_where_assign (expr1, expr2,
3254 gfc_add_expr_to_block (block, tmp);
3259 /* WHERE or WHERE construct is part of a where-body-construct. */
3261 gfc_trans_where_2 (cnext, cmask, invert,
3262 nested_forall_info, block);
3269 /* The next statement within the same where-body-construct. */
3270 cnext = cnext->next;
3272 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3273 cblock = cblock->block;
3274 if (mask == NULL_TREE)
3276 /* If we're the initial WHERE, we can simply invert the sense
3277 of the current mask to obtain the "mask" for the remaining
3284 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3290 /* If we allocated a pending mask array, deallocate it now. */
3293 tree args = gfc_chainon_list (NULL_TREE, ppmask);
3294 tmp = build_function_call_expr (gfor_fndecl_internal_free, args);
3295 gfc_add_expr_to_block (block, tmp);
3298 /* If we allocated a current mask array, deallocate it now. */
3301 tree args = gfc_chainon_list (NULL_TREE, pcmask);
3302 tmp = build_function_call_expr (gfor_fndecl_internal_free, args);
3303 gfc_add_expr_to_block (block, tmp);
3307 /* Translate a simple WHERE construct or statement without dependencies.
3308 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3309 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3310 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3313 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3315 stmtblock_t block, body;
3316 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3317 tree tmp, cexpr, tstmt, estmt;
3318 gfc_ss *css, *tdss, *tsss;
3319 gfc_se cse, tdse, tsse, edse, esse;
3324 cond = cblock->expr;
3325 tdst = cblock->next->expr;
3326 tsrc = cblock->next->expr2;
3327 edst = eblock ? eblock->next->expr : NULL;
3328 esrc = eblock ? eblock->next->expr2 : NULL;
3330 gfc_start_block (&block);
3331 gfc_init_loopinfo (&loop);
3333 /* Handle the condition. */
3334 gfc_init_se (&cse, NULL);
3335 css = gfc_walk_expr (cond);
3336 gfc_add_ss_to_loop (&loop, css);
3338 /* Handle the then-clause. */
3339 gfc_init_se (&tdse, NULL);
3340 gfc_init_se (&tsse, NULL);
3341 tdss = gfc_walk_expr (tdst);
3342 tsss = gfc_walk_expr (tsrc);
3343 if (tsss == gfc_ss_terminator)
3345 tsss = gfc_get_ss ();
3346 tsss->next = gfc_ss_terminator;
3347 tsss->type = GFC_SS_SCALAR;
3350 gfc_add_ss_to_loop (&loop, tdss);
3351 gfc_add_ss_to_loop (&loop, tsss);
3355 /* Handle the else clause. */
3356 gfc_init_se (&edse, NULL);
3357 gfc_init_se (&esse, NULL);
3358 edss = gfc_walk_expr (edst);
3359 esss = gfc_walk_expr (esrc);
3360 if (esss == gfc_ss_terminator)
3362 esss = gfc_get_ss ();
3363 esss->next = gfc_ss_terminator;
3364 esss->type = GFC_SS_SCALAR;
3367 gfc_add_ss_to_loop (&loop, edss);
3368 gfc_add_ss_to_loop (&loop, esss);
3371 gfc_conv_ss_startstride (&loop);
3372 gfc_conv_loop_setup (&loop);
3374 gfc_mark_ss_chain_used (css, 1);
3375 gfc_mark_ss_chain_used (tdss, 1);
3376 gfc_mark_ss_chain_used (tsss, 1);
3379 gfc_mark_ss_chain_used (edss, 1);
3380 gfc_mark_ss_chain_used (esss, 1);
3383 gfc_start_scalarized_body (&loop, &body);
3385 gfc_copy_loopinfo_to_se (&cse, &loop);
3386 gfc_copy_loopinfo_to_se (&tdse, &loop);
3387 gfc_copy_loopinfo_to_se (&tsse, &loop);
3393 gfc_copy_loopinfo_to_se (&edse, &loop);
3394 gfc_copy_loopinfo_to_se (&esse, &loop);
3399 gfc_conv_expr (&cse, cond);
3400 gfc_add_block_to_block (&body, &cse.pre);
3403 gfc_conv_expr (&tsse, tsrc);
3404 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3406 gfc_conv_tmp_array_ref (&tdse);
3407 gfc_advance_se_ss_chain (&tdse);
3410 gfc_conv_expr (&tdse, tdst);
3414 gfc_conv_expr (&esse, esrc);
3415 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3417 gfc_conv_tmp_array_ref (&edse);
3418 gfc_advance_se_ss_chain (&edse);
3421 gfc_conv_expr (&edse, edst);
3424 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false);
3425 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false)
3426 : build_empty_stmt ();
3427 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3428 gfc_add_expr_to_block (&body, tmp);
3429 gfc_add_block_to_block (&body, &cse.post);
3431 gfc_trans_scalarizing_loops (&loop, &body);
3432 gfc_add_block_to_block (&block, &loop.pre);
3433 gfc_add_block_to_block (&block, &loop.post);
3434 gfc_cleanup_loop (&loop);
3436 return gfc_finish_block (&block);
3439 /* As the WHERE or WHERE construct statement can be nested, we call
3440 gfc_trans_where_2 to do the translation, and pass the initial
3441 NULL values for both the control mask and the pending control mask. */
3444 gfc_trans_where (gfc_code * code)
3450 cblock = code->block;
3452 && cblock->next->op == EXEC_ASSIGN
3453 && !cblock->next->next)
3455 eblock = cblock->block;
3458 /* A simple "WHERE (cond) x = y" statement or block is
3459 dependence free if cond is not dependent upon writing x,
3460 and the source y is unaffected by the destination x. */
3461 if (!gfc_check_dependency (cblock->next->expr,
3463 && !gfc_check_dependency (cblock->next->expr,
3464 cblock->next->expr2, 0))
3465 return gfc_trans_where_3 (cblock, NULL);
3467 else if (!eblock->expr
3470 && eblock->next->op == EXEC_ASSIGN
3471 && !eblock->next->next)
3473 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3474 block is dependence free if cond is not dependent on writes
3475 to x1 and x2, y1 is not dependent on writes to x2, and y2
3476 is not dependent on writes to x1, and both y's are not
3477 dependent upon their own x's. */
3478 if (!gfc_check_dependency(cblock->next->expr,
3480 && !gfc_check_dependency(eblock->next->expr,
3482 && !gfc_check_dependency(cblock->next->expr,
3483 eblock->next->expr2, 0)
3484 && !gfc_check_dependency(eblock->next->expr,
3485 cblock->next->expr2, 0)
3486 && !gfc_check_dependency(cblock->next->expr,
3487 cblock->next->expr2, 0)
3488 && !gfc_check_dependency(eblock->next->expr,
3489 eblock->next->expr2, 0))
3490 return gfc_trans_where_3 (cblock, eblock);
3494 gfc_start_block (&block);
3496 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3498 return gfc_finish_block (&block);
3502 /* CYCLE a DO loop. The label decl has already been created by
3503 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3504 node at the head of the loop. We must mark the label as used. */
3507 gfc_trans_cycle (gfc_code * code)
3511 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3512 TREE_USED (cycle_label) = 1;
3513 return build1_v (GOTO_EXPR, cycle_label);
3517 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3518 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3522 gfc_trans_exit (gfc_code * code)
3526 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3527 TREE_USED (exit_label) = 1;
3528 return build1_v (GOTO_EXPR, exit_label);
3532 /* Translate the ALLOCATE statement. */
3535 gfc_trans_allocate (gfc_code * code)
3547 if (!code->ext.alloc_list)
3550 gfc_start_block (&block);
3554 tree gfc_int4_type_node = gfc_get_int_type (4);
3556 stat = gfc_create_var (gfc_int4_type_node, "stat");
3557 pstat = build_fold_addr_expr (stat);
3559 error_label = gfc_build_label_decl (NULL_TREE);
3560 TREE_USED (error_label) = 1;
3564 pstat = integer_zero_node;
3565 stat = error_label = NULL_TREE;
3569 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3573 gfc_init_se (&se, NULL);
3574 gfc_start_block (&se.pre);
3576 se.want_pointer = 1;
3577 se.descriptor_only = 1;
3578 gfc_conv_expr (&se, expr);
3580 if (!gfc_array_allocate (&se, expr, pstat))
3582 /* A scalar or derived type. */
3583 tmp = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
3585 if (expr->ts.type == BT_CHARACTER && tmp == NULL_TREE)
3586 tmp = se.string_length;
3588 parm = gfc_chainon_list (NULL_TREE, tmp);
3589 parm = gfc_chainon_list (parm, pstat);
3590 tmp = build_function_call_expr (gfor_fndecl_allocate, parm);
3591 tmp = build2 (MODIFY_EXPR, void_type_node, se.expr, tmp);
3592 gfc_add_expr_to_block (&se.pre, tmp);
3596 tmp = build1_v (GOTO_EXPR, error_label);
3597 parm = fold_build2 (NE_EXPR, boolean_type_node,
3598 stat, build_int_cst (TREE_TYPE (stat), 0));
3599 tmp = fold_build3 (COND_EXPR, void_type_node,
3600 parm, tmp, build_empty_stmt ());
3601 gfc_add_expr_to_block (&se.pre, tmp);
3604 if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
3606 tmp = build_fold_indirect_ref (se.expr);
3607 tmp = gfc_nullify_alloc_comp (expr->ts.derived, tmp, 0);
3608 gfc_add_expr_to_block (&se.pre, tmp);
3613 tmp = gfc_finish_block (&se.pre);
3614 gfc_add_expr_to_block (&block, tmp);
3617 /* Assign the value to the status variable. */
3620 tmp = build1_v (LABEL_EXPR, error_label);
3621 gfc_add_expr_to_block (&block, tmp);
3623 gfc_init_se (&se, NULL);
3624 gfc_conv_expr_lhs (&se, code->expr);
3625 tmp = convert (TREE_TYPE (se.expr), stat);
3626 gfc_add_modify_expr (&block, se.expr, tmp);
3629 return gfc_finish_block (&block);
3633 /* Translate a DEALLOCATE statement.
3634 There are two cases within the for loop:
3635 (1) deallocate(a1, a2, a3) is translated into the following sequence
3636 _gfortran_deallocate(a1, 0B)
3637 _gfortran_deallocate(a2, 0B)
3638 _gfortran_deallocate(a3, 0B)
3639 where the STAT= variable is passed a NULL pointer.
3640 (2) deallocate(a1, a2, a3, stat=i) is translated into the following
3642 _gfortran_deallocate(a1, &stat)
3643 astat = astat + stat
3644 _gfortran_deallocate(a2, &stat)
3645 astat = astat + stat
3646 _gfortran_deallocate(a3, &stat)
3647 astat = astat + stat
3648 In case (1), we simply return at the end of the for loop. In case (2)
3649 we set STAT= astat. */
3651 gfc_trans_deallocate (gfc_code * code)
3656 tree apstat, astat, parm, pstat, stat, tmp;
3659 gfc_start_block (&block);
3661 /* Set up the optional STAT= */
3664 tree gfc_int4_type_node = gfc_get_int_type (4);
3666 /* Variable used with the library call. */
3667 stat = gfc_create_var (gfc_int4_type_node, "stat");
3668 pstat = build_fold_addr_expr (stat);
3670 /* Running total of possible deallocation failures. */
3671 astat = gfc_create_var (gfc_int4_type_node, "astat");
3672 apstat = build_fold_addr_expr (astat);
3674 /* Initialize astat to 0. */
3675 gfc_add_modify_expr (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
3679 pstat = apstat = null_pointer_node;
3680 stat = astat = NULL_TREE;
3683 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3686 gcc_assert (expr->expr_type == EXPR_VARIABLE);
3688 gfc_init_se (&se, NULL);
3689 gfc_start_block (&se.pre);
3691 se.want_pointer = 1;
3692 se.descriptor_only = 1;
3693 gfc_conv_expr (&se, expr);
3695 if (expr->ts.type == BT_DERIVED
3696 && expr->ts.derived->attr.alloc_comp)
3699 gfc_ref *last = NULL;
3700 for (ref = expr->ref; ref; ref = ref->next)
3701 if (ref->type == REF_COMPONENT)
3704 /* Do not deallocate the components of a derived type
3705 ultimate pointer component. */
3706 if (!(last && last->u.c.component->pointer)
3707 && !(!last && expr->symtree->n.sym->attr.pointer))
3709 tmp = gfc_deallocate_alloc_comp (expr->ts.derived, se.expr,
3711 gfc_add_expr_to_block (&se.pre, tmp);
3716 tmp = gfc_array_deallocate (se.expr, pstat);
3719 parm = gfc_chainon_list (NULL_TREE, se.expr);
3720 parm = gfc_chainon_list (parm, pstat);
3721 tmp = build_function_call_expr (gfor_fndecl_deallocate, parm);
3722 gfc_add_expr_to_block (&se.pre, tmp);
3724 tmp = build2 (MODIFY_EXPR, void_type_node,
3725 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
3728 gfc_add_expr_to_block (&se.pre, tmp);
3730 /* Keep track of the number of failed deallocations by adding stat
3731 of the last deallocation to the running total. */
3734 apstat = build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
3735 gfc_add_modify_expr (&se.pre, astat, apstat);
3738 tmp = gfc_finish_block (&se.pre);
3739 gfc_add_expr_to_block (&block, tmp);
3743 /* Assign the value to the status variable. */
3746 gfc_init_se (&se, NULL);
3747 gfc_conv_expr_lhs (&se, code->expr);
3748 tmp = convert (TREE_TYPE (se.expr), astat);
3749 gfc_add_modify_expr (&block, se.expr, tmp);
3752 return gfc_finish_block (&block);