1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
27 #include "coretypes.h"
29 #include "tree-gimple.h"
36 #include "trans-stmt.h"
37 #include "trans-types.h"
38 #include "trans-array.h"
39 #include "trans-const.h"
41 #include "dependency.h"
43 typedef struct iter_info
49 struct iter_info *next;
53 typedef struct forall_info
61 struct forall_info *outer;
62 struct forall_info *next_nest;
66 static void gfc_trans_where_2 (gfc_code *, tree, bool,
67 forall_info *, stmtblock_t *);
69 /* Translate a F95 label number to a LABEL_EXPR. */
72 gfc_trans_label_here (gfc_code * code)
74 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
78 /* Given a variable expression which has been ASSIGNed to, find the decl
79 containing the auxiliary variables. For variables in common blocks this
83 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
85 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
86 gfc_conv_expr (se, expr);
87 /* Deals with variable in common block. Get the field declaration. */
88 if (TREE_CODE (se->expr) == COMPONENT_REF)
89 se->expr = TREE_OPERAND (se->expr, 1);
90 /* Deals with dummy argument. Get the parameter declaration. */
91 else if (TREE_CODE (se->expr) == INDIRECT_REF)
92 se->expr = TREE_OPERAND (se->expr, 0);
95 /* Translate a label assignment statement. */
98 gfc_trans_label_assign (gfc_code * code)
108 /* Start a new block. */
109 gfc_init_se (&se, NULL);
110 gfc_start_block (&se.pre);
111 gfc_conv_label_variable (&se, code->expr);
113 len = GFC_DECL_STRING_LEN (se.expr);
114 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
116 label_tree = gfc_get_label_decl (code->label);
118 if (code->label->defined == ST_LABEL_TARGET)
120 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
121 len_tree = integer_minus_one_node;
125 label_str = code->label->format->value.character.string;
126 label_len = code->label->format->value.character.length;
127 len_tree = build_int_cst (NULL_TREE, label_len);
128 label_tree = gfc_build_string_const (label_len + 1, label_str);
129 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
132 gfc_add_modify_expr (&se.pre, len, len_tree);
133 gfc_add_modify_expr (&se.pre, addr, label_tree);
135 return gfc_finish_block (&se.pre);
138 /* Translate a GOTO statement. */
141 gfc_trans_goto (gfc_code * code)
143 locus loc = code->loc;
149 if (code->label != NULL)
150 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
153 gfc_init_se (&se, NULL);
154 gfc_start_block (&se.pre);
155 gfc_conv_label_variable (&se, code->expr);
156 tmp = GFC_DECL_STRING_LEN (se.expr);
157 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
158 build_int_cst (TREE_TYPE (tmp), -1));
159 gfc_trans_runtime_check (tmp, "Assigned label is not a target label",
162 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
167 target = build1 (GOTO_EXPR, void_type_node, assigned_goto);
168 gfc_add_expr_to_block (&se.pre, target);
169 return gfc_finish_block (&se.pre);
172 /* Check the label list. */
175 target = gfc_get_label_decl (code->label);
176 tmp = gfc_build_addr_expr (pvoid_type_node, target);
177 tmp = build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
178 tmp = build3_v (COND_EXPR, tmp,
179 build1 (GOTO_EXPR, void_type_node, target),
180 build_empty_stmt ());
181 gfc_add_expr_to_block (&se.pre, tmp);
184 while (code != NULL);
185 gfc_trans_runtime_check (boolean_true_node,
186 "Assigned label is not in the list", &se.pre, &loc);
188 return gfc_finish_block (&se.pre);
192 /* Translate an ENTRY statement. Just adds a label for this entry point. */
194 gfc_trans_entry (gfc_code * code)
196 return build1_v (LABEL_EXPR, code->ext.entry->label);
200 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
201 elemental subroutines. Make temporaries for output arguments if any such
202 dependencies are found. Output arguments are chosen because internal_unpack
203 can be used, as is, to copy the result back to the variable. */
205 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
206 gfc_symbol * sym, gfc_actual_arglist * arg)
208 gfc_actual_arglist *arg0;
210 gfc_formal_arglist *formal;
211 gfc_loopinfo tmp_loop;
223 if (loopse->ss == NULL)
228 formal = sym->formal;
230 /* Loop over all the arguments testing for dependencies. */
231 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
237 /* Obtain the info structure for the current argument. */
239 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
243 info = &ss->data.info;
247 /* If there is a dependency, create a temporary and use it
248 instead of the variable. */
249 fsym = formal ? formal->sym : NULL;
250 if (e->expr_type == EXPR_VARIABLE
252 && fsym->attr.intent == INTENT_OUT
253 && gfc_check_fncall_dependency (e, INTENT_OUT, sym, arg0))
255 /* Make a local loopinfo for the temporary creation, so that
256 none of the other ss->info's have to be renormalized. */
257 gfc_init_loopinfo (&tmp_loop);
258 for (n = 0; n < info->dimen; n++)
260 tmp_loop.to[n] = loopse->loop->to[n];
261 tmp_loop.from[n] = loopse->loop->from[n];
262 tmp_loop.order[n] = loopse->loop->order[n];
265 /* Generate the temporary. Merge the block so that the
266 declarations are put at the right binding level. */
267 size = gfc_create_var (gfc_array_index_type, NULL);
268 data = gfc_create_var (pvoid_type_node, NULL);
269 gfc_start_block (&block);
270 tmp = gfc_typenode_for_spec (&e->ts);
271 tmp = gfc_trans_create_temp_array (&se->pre, &se->post,
272 &tmp_loop, info, tmp,
273 false, true, false, false);
274 gfc_add_modify_expr (&se->pre, size, tmp);
275 tmp = fold_convert (pvoid_type_node, info->data);
276 gfc_add_modify_expr (&se->pre, data, tmp);
277 gfc_merge_block_scope (&block);
279 /* Obtain the argument descriptor for unpacking. */
280 gfc_init_se (&parmse, NULL);
281 parmse.want_pointer = 1;
282 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
283 gfc_add_block_to_block (&se->pre, &parmse.pre);
285 /* Calculate the offset for the temporary. */
286 offset = gfc_index_zero_node;
287 for (n = 0; n < info->dimen; n++)
289 tmp = gfc_conv_descriptor_stride (info->descriptor,
291 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
292 loopse->loop->from[n], tmp);
293 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
296 info->offset = gfc_create_var (gfc_array_index_type, NULL);
297 gfc_add_modify_expr (&se->pre, info->offset, offset);
299 /* Copy the result back using unpack. */
300 tmp = gfc_chainon_list (NULL_TREE, parmse.expr);
301 tmp = gfc_chainon_list (tmp, data);
302 tmp = build_function_call_expr (gfor_fndecl_in_unpack, tmp);
303 gfc_add_expr_to_block (&se->post, tmp);
305 gfc_add_block_to_block (&se->post, &parmse.post);
311 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
314 gfc_trans_call (gfc_code * code, bool dependency_check)
318 int has_alternate_specifier;
320 /* A CALL starts a new block because the actual arguments may have to
321 be evaluated first. */
322 gfc_init_se (&se, NULL);
323 gfc_start_block (&se.pre);
325 gcc_assert (code->resolved_sym);
327 ss = gfc_ss_terminator;
328 if (code->resolved_sym->attr.elemental)
329 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
331 /* Is not an elemental subroutine call with array valued arguments. */
332 if (ss == gfc_ss_terminator)
335 /* Translate the call. */
336 has_alternate_specifier
337 = gfc_conv_function_call (&se, code->resolved_sym, code->ext.actual);
339 /* A subroutine without side-effect, by definition, does nothing! */
340 TREE_SIDE_EFFECTS (se.expr) = 1;
342 /* Chain the pieces together and return the block. */
343 if (has_alternate_specifier)
345 gfc_code *select_code;
347 select_code = code->next;
348 gcc_assert(select_code->op == EXEC_SELECT);
349 sym = select_code->expr->symtree->n.sym;
350 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
351 gfc_add_modify_expr (&se.pre, sym->backend_decl, se.expr);
354 gfc_add_expr_to_block (&se.pre, se.expr);
356 gfc_add_block_to_block (&se.pre, &se.post);
361 /* An elemental subroutine call with array valued arguments has
368 /* gfc_walk_elemental_function_args renders the ss chain in the
369 reverse order to the actual argument order. */
370 ss = gfc_reverse_ss (ss);
372 /* Initialize the loop. */
373 gfc_init_se (&loopse, NULL);
374 gfc_init_loopinfo (&loop);
375 gfc_add_ss_to_loop (&loop, ss);
377 gfc_conv_ss_startstride (&loop);
378 gfc_conv_loop_setup (&loop);
379 gfc_mark_ss_chain_used (ss, 1);
381 /* Convert the arguments, checking for dependencies. */
382 gfc_copy_loopinfo_to_se (&loopse, &loop);
385 /* For operator assignment, we need to do dependency checking.
386 We also check the intent of the parameters. */
387 if (dependency_check)
390 sym = code->resolved_sym;
391 gcc_assert (sym->formal->sym->attr.intent = INTENT_OUT);
392 gcc_assert (sym->formal->next->sym->attr.intent = INTENT_IN);
393 gfc_conv_elemental_dependencies (&se, &loopse, sym,
397 /* Generate the loop body. */
398 gfc_start_scalarized_body (&loop, &body);
399 gfc_init_block (&block);
401 /* Add the subroutine call to the block. */
402 gfc_conv_function_call (&loopse, code->resolved_sym, code->ext.actual);
403 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
405 gfc_add_block_to_block (&block, &loopse.pre);
406 gfc_add_block_to_block (&block, &loopse.post);
408 /* Finish up the loop block and the loop. */
409 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
410 gfc_trans_scalarizing_loops (&loop, &body);
411 gfc_add_block_to_block (&se.pre, &loop.pre);
412 gfc_add_block_to_block (&se.pre, &loop.post);
413 gfc_add_block_to_block (&se.pre, &se.post);
414 gfc_cleanup_loop (&loop);
417 return gfc_finish_block (&se.pre);
421 /* Translate the RETURN statement. */
424 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
432 /* if code->expr is not NULL, this return statement must appear
433 in a subroutine and current_fake_result_decl has already
436 result = gfc_get_fake_result_decl (NULL, 0);
439 gfc_warning ("An alternate return at %L without a * dummy argument",
441 return build1_v (GOTO_EXPR, gfc_get_return_label ());
444 /* Start a new block for this statement. */
445 gfc_init_se (&se, NULL);
446 gfc_start_block (&se.pre);
448 gfc_conv_expr (&se, code->expr);
450 tmp = build2 (MODIFY_EXPR, TREE_TYPE (result), result, se.expr);
451 gfc_add_expr_to_block (&se.pre, tmp);
453 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
454 gfc_add_expr_to_block (&se.pre, tmp);
455 gfc_add_block_to_block (&se.pre, &se.post);
456 return gfc_finish_block (&se.pre);
459 return build1_v (GOTO_EXPR, gfc_get_return_label ());
463 /* Translate the PAUSE statement. We have to translate this statement
464 to a runtime library call. */
467 gfc_trans_pause (gfc_code * code)
469 tree gfc_int4_type_node = gfc_get_int_type (4);
475 /* Start a new block for this statement. */
476 gfc_init_se (&se, NULL);
477 gfc_start_block (&se.pre);
480 if (code->expr == NULL)
482 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
483 args = gfc_chainon_list (NULL_TREE, tmp);
484 fndecl = gfor_fndecl_pause_numeric;
488 gfc_conv_expr_reference (&se, code->expr);
489 args = gfc_chainon_list (NULL_TREE, se.expr);
490 args = gfc_chainon_list (args, se.string_length);
491 fndecl = gfor_fndecl_pause_string;
494 tmp = build_function_call_expr (fndecl, args);
495 gfc_add_expr_to_block (&se.pre, tmp);
497 gfc_add_block_to_block (&se.pre, &se.post);
499 return gfc_finish_block (&se.pre);
503 /* Translate the STOP statement. We have to translate this statement
504 to a runtime library call. */
507 gfc_trans_stop (gfc_code * code)
509 tree gfc_int4_type_node = gfc_get_int_type (4);
515 /* Start a new block for this statement. */
516 gfc_init_se (&se, NULL);
517 gfc_start_block (&se.pre);
520 if (code->expr == NULL)
522 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
523 args = gfc_chainon_list (NULL_TREE, tmp);
524 fndecl = gfor_fndecl_stop_numeric;
528 gfc_conv_expr_reference (&se, code->expr);
529 args = gfc_chainon_list (NULL_TREE, se.expr);
530 args = gfc_chainon_list (args, se.string_length);
531 fndecl = gfor_fndecl_stop_string;
534 tmp = build_function_call_expr (fndecl, args);
535 gfc_add_expr_to_block (&se.pre, tmp);
537 gfc_add_block_to_block (&se.pre, &se.post);
539 return gfc_finish_block (&se.pre);
543 /* Generate GENERIC for the IF construct. This function also deals with
544 the simple IF statement, because the front end translates the IF
545 statement into an IF construct.
577 where COND_S is the simplified version of the predicate. PRE_COND_S
578 are the pre side-effects produced by the translation of the
580 We need to build the chain recursively otherwise we run into
581 problems with folding incomplete statements. */
584 gfc_trans_if_1 (gfc_code * code)
589 /* Check for an unconditional ELSE clause. */
591 return gfc_trans_code (code->next);
593 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
594 gfc_init_se (&if_se, NULL);
595 gfc_start_block (&if_se.pre);
597 /* Calculate the IF condition expression. */
598 gfc_conv_expr_val (&if_se, code->expr);
600 /* Translate the THEN clause. */
601 stmt = gfc_trans_code (code->next);
603 /* Translate the ELSE clause. */
605 elsestmt = gfc_trans_if_1 (code->block);
607 elsestmt = build_empty_stmt ();
609 /* Build the condition expression and add it to the condition block. */
610 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
612 gfc_add_expr_to_block (&if_se.pre, stmt);
614 /* Finish off this statement. */
615 return gfc_finish_block (&if_se.pre);
619 gfc_trans_if (gfc_code * code)
621 /* Ignore the top EXEC_IF, it only announces an IF construct. The
622 actual code we must translate is in code->block. */
624 return gfc_trans_if_1 (code->block);
628 /* Translage an arithmetic IF expression.
630 IF (cond) label1, label2, label3 translates to
642 An optimized version can be generated in case of equal labels.
643 E.g., if label1 is equal to label2, we can translate it to
652 gfc_trans_arithmetic_if (gfc_code * code)
660 /* Start a new block. */
661 gfc_init_se (&se, NULL);
662 gfc_start_block (&se.pre);
664 /* Pre-evaluate COND. */
665 gfc_conv_expr_val (&se, code->expr);
666 se.expr = gfc_evaluate_now (se.expr, &se.pre);
668 /* Build something to compare with. */
669 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
671 if (code->label->value != code->label2->value)
673 /* If (cond < 0) take branch1 else take branch2.
674 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
675 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
676 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
678 if (code->label->value != code->label3->value)
679 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
681 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
683 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
686 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
688 if (code->label->value != code->label3->value
689 && code->label2->value != code->label3->value)
691 /* if (cond <= 0) take branch1 else take branch2. */
692 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
693 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
694 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
697 /* Append the COND_EXPR to the evaluation of COND, and return. */
698 gfc_add_expr_to_block (&se.pre, branch1);
699 return gfc_finish_block (&se.pre);
703 /* Translate the simple DO construct. This is where the loop variable has
704 integer type and step +-1. We can't use this in the general case
705 because integer overflow and floating point errors could give incorrect
707 We translate a do loop from:
709 DO dovar = from, to, step
715 [Evaluate loop bounds and step]
717 if ((step > 0) ? (dovar <= to) : (dovar => to))
723 cond = (dovar == to);
725 if (cond) goto end_label;
730 This helps the optimizers by avoiding the extra induction variable
731 used in the general case. */
734 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
735 tree from, tree to, tree step)
744 type = TREE_TYPE (dovar);
746 /* Initialize the DO variable: dovar = from. */
747 gfc_add_modify_expr (pblock, dovar, from);
749 /* Cycle and exit statements are implemented with gotos. */
750 cycle_label = gfc_build_label_decl (NULL_TREE);
751 exit_label = gfc_build_label_decl (NULL_TREE);
753 /* Put the labels where they can be found later. See gfc_trans_do(). */
754 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
757 gfc_start_block (&body);
759 /* Main loop body. */
760 tmp = gfc_trans_code (code->block->next);
761 gfc_add_expr_to_block (&body, tmp);
763 /* Label for cycle statements (if needed). */
764 if (TREE_USED (cycle_label))
766 tmp = build1_v (LABEL_EXPR, cycle_label);
767 gfc_add_expr_to_block (&body, tmp);
770 /* Evaluate the loop condition. */
771 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
772 cond = gfc_evaluate_now (cond, &body);
774 /* Increment the loop variable. */
775 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
776 gfc_add_modify_expr (&body, dovar, tmp);
779 tmp = build1_v (GOTO_EXPR, exit_label);
780 TREE_USED (exit_label) = 1;
781 tmp = fold_build3 (COND_EXPR, void_type_node,
782 cond, tmp, build_empty_stmt ());
783 gfc_add_expr_to_block (&body, tmp);
785 /* Finish the loop body. */
786 tmp = gfc_finish_block (&body);
787 tmp = build1_v (LOOP_EXPR, tmp);
789 /* Only execute the loop if the number of iterations is positive. */
790 if (tree_int_cst_sgn (step) > 0)
791 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
793 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
794 tmp = fold_build3 (COND_EXPR, void_type_node,
795 cond, tmp, build_empty_stmt ());
796 gfc_add_expr_to_block (pblock, tmp);
798 /* Add the exit label. */
799 tmp = build1_v (LABEL_EXPR, exit_label);
800 gfc_add_expr_to_block (pblock, tmp);
802 return gfc_finish_block (pblock);
805 /* Translate the DO construct. This obviously is one of the most
806 important ones to get right with any compiler, but especially
809 We special case some loop forms as described in gfc_trans_simple_do.
810 For other cases we implement them with a separate loop count,
811 as described in the standard.
813 We translate a do loop from:
815 DO dovar = from, to, step
821 [evaluate loop bounds and step]
822 count = (to + step - from) / step;
830 if (count <=0) goto exit_label;
834 TODO: Large loop counts
835 The code above assumes the loop count fits into a signed integer kind,
836 i.e. Does not work for loop counts > 2^31 for integer(kind=4) variables
837 We must support the full range. */
840 gfc_trans_do (gfc_code * code)
857 gfc_start_block (&block);
859 /* Evaluate all the expressions in the iterator. */
860 gfc_init_se (&se, NULL);
861 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
862 gfc_add_block_to_block (&block, &se.pre);
864 type = TREE_TYPE (dovar);
866 gfc_init_se (&se, NULL);
867 gfc_conv_expr_val (&se, code->ext.iterator->start);
868 gfc_add_block_to_block (&block, &se.pre);
869 from = gfc_evaluate_now (se.expr, &block);
871 gfc_init_se (&se, NULL);
872 gfc_conv_expr_val (&se, code->ext.iterator->end);
873 gfc_add_block_to_block (&block, &se.pre);
874 to = gfc_evaluate_now (se.expr, &block);
876 gfc_init_se (&se, NULL);
877 gfc_conv_expr_val (&se, code->ext.iterator->step);
878 gfc_add_block_to_block (&block, &se.pre);
879 step = gfc_evaluate_now (se.expr, &block);
881 /* Special case simple loops. */
882 if (TREE_CODE (type) == INTEGER_TYPE
883 && (integer_onep (step)
884 || tree_int_cst_equal (step, integer_minus_one_node)))
885 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
887 /* Initialize loop count. This code is executed before we enter the
888 loop body. We generate: count = (to + step - from) / step. */
890 tmp = fold_build2 (MINUS_EXPR, type, step, from);
891 tmp = fold_build2 (PLUS_EXPR, type, to, tmp);
892 if (TREE_CODE (type) == INTEGER_TYPE)
894 tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step);
895 count = gfc_create_var (type, "count");
899 /* TODO: We could use the same width as the real type.
900 This would probably cause more problems that it solves
901 when we implement "long double" types. */
902 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
903 tmp = fold_build1 (FIX_TRUNC_EXPR, gfc_array_index_type, tmp);
904 count = gfc_create_var (gfc_array_index_type, "count");
906 gfc_add_modify_expr (&block, count, tmp);
908 count_one = build_int_cst (TREE_TYPE (count), 1);
910 /* Initialize the DO variable: dovar = from. */
911 gfc_add_modify_expr (&block, dovar, from);
914 gfc_start_block (&body);
916 /* Cycle and exit statements are implemented with gotos. */
917 cycle_label = gfc_build_label_decl (NULL_TREE);
918 exit_label = gfc_build_label_decl (NULL_TREE);
920 /* Start with the loop condition. Loop until count <= 0. */
921 cond = fold_build2 (LE_EXPR, boolean_type_node, count,
922 build_int_cst (TREE_TYPE (count), 0));
923 tmp = build1_v (GOTO_EXPR, exit_label);
924 TREE_USED (exit_label) = 1;
925 tmp = fold_build3 (COND_EXPR, void_type_node,
926 cond, tmp, build_empty_stmt ());
927 gfc_add_expr_to_block (&body, tmp);
929 /* Put these labels where they can be found later. We put the
930 labels in a TREE_LIST node (because TREE_CHAIN is already
931 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
932 label in TREE_VALUE (backend_decl). */
934 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
936 /* Main loop body. */
937 tmp = gfc_trans_code (code->block->next);
938 gfc_add_expr_to_block (&body, tmp);
940 /* Label for cycle statements (if needed). */
941 if (TREE_USED (cycle_label))
943 tmp = build1_v (LABEL_EXPR, cycle_label);
944 gfc_add_expr_to_block (&body, tmp);
947 /* Increment the loop variable. */
948 tmp = build2 (PLUS_EXPR, type, dovar, step);
949 gfc_add_modify_expr (&body, dovar, tmp);
951 /* Decrement the loop count. */
952 tmp = build2 (MINUS_EXPR, TREE_TYPE (count), count, count_one);
953 gfc_add_modify_expr (&body, count, tmp);
955 /* End of loop body. */
956 tmp = gfc_finish_block (&body);
958 /* The for loop itself. */
959 tmp = build1_v (LOOP_EXPR, tmp);
960 gfc_add_expr_to_block (&block, tmp);
962 /* Add the exit label. */
963 tmp = build1_v (LABEL_EXPR, exit_label);
964 gfc_add_expr_to_block (&block, tmp);
966 return gfc_finish_block (&block);
970 /* Translate the DO WHILE construct.
983 if (! cond) goto exit_label;
989 Because the evaluation of the exit condition `cond' may have side
990 effects, we can't do much for empty loop bodies. The backend optimizers
991 should be smart enough to eliminate any dead loops. */
994 gfc_trans_do_while (gfc_code * code)
1002 /* Everything we build here is part of the loop body. */
1003 gfc_start_block (&block);
1005 /* Cycle and exit statements are implemented with gotos. */
1006 cycle_label = gfc_build_label_decl (NULL_TREE);
1007 exit_label = gfc_build_label_decl (NULL_TREE);
1009 /* Put the labels where they can be found later. See gfc_trans_do(). */
1010 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1012 /* Create a GIMPLE version of the exit condition. */
1013 gfc_init_se (&cond, NULL);
1014 gfc_conv_expr_val (&cond, code->expr);
1015 gfc_add_block_to_block (&block, &cond.pre);
1016 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1018 /* Build "IF (! cond) GOTO exit_label". */
1019 tmp = build1_v (GOTO_EXPR, exit_label);
1020 TREE_USED (exit_label) = 1;
1021 tmp = fold_build3 (COND_EXPR, void_type_node,
1022 cond.expr, tmp, build_empty_stmt ());
1023 gfc_add_expr_to_block (&block, tmp);
1025 /* The main body of the loop. */
1026 tmp = gfc_trans_code (code->block->next);
1027 gfc_add_expr_to_block (&block, tmp);
1029 /* Label for cycle statements (if needed). */
1030 if (TREE_USED (cycle_label))
1032 tmp = build1_v (LABEL_EXPR, cycle_label);
1033 gfc_add_expr_to_block (&block, tmp);
1036 /* End of loop body. */
1037 tmp = gfc_finish_block (&block);
1039 gfc_init_block (&block);
1040 /* Build the loop. */
1041 tmp = build1_v (LOOP_EXPR, tmp);
1042 gfc_add_expr_to_block (&block, tmp);
1044 /* Add the exit label. */
1045 tmp = build1_v (LABEL_EXPR, exit_label);
1046 gfc_add_expr_to_block (&block, tmp);
1048 return gfc_finish_block (&block);
1052 /* Translate the SELECT CASE construct for INTEGER case expressions,
1053 without killing all potential optimizations. The problem is that
1054 Fortran allows unbounded cases, but the back-end does not, so we
1055 need to intercept those before we enter the equivalent SWITCH_EXPR
1058 For example, we translate this,
1061 CASE (:100,101,105:115)
1071 to the GENERIC equivalent,
1075 case (minimum value for typeof(expr) ... 100:
1081 case 200 ... (maximum value for typeof(expr):
1098 gfc_trans_integer_select (gfc_code * code)
1108 gfc_start_block (&block);
1110 /* Calculate the switch expression. */
1111 gfc_init_se (&se, NULL);
1112 gfc_conv_expr_val (&se, code->expr);
1113 gfc_add_block_to_block (&block, &se.pre);
1115 end_label = gfc_build_label_decl (NULL_TREE);
1117 gfc_init_block (&body);
1119 for (c = code->block; c; c = c->block)
1121 for (cp = c->ext.case_list; cp; cp = cp->next)
1126 /* Assume it's the default case. */
1127 low = high = NULL_TREE;
1131 low = gfc_conv_constant_to_tree (cp->low);
1133 /* If there's only a lower bound, set the high bound to the
1134 maximum value of the case expression. */
1136 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1141 /* Three cases are possible here:
1143 1) There is no lower bound, e.g. CASE (:N).
1144 2) There is a lower bound .NE. high bound, that is
1145 a case range, e.g. CASE (N:M) where M>N (we make
1146 sure that M>N during type resolution).
1147 3) There is a lower bound, and it has the same value
1148 as the high bound, e.g. CASE (N:N). This is our
1149 internal representation of CASE(N).
1151 In the first and second case, we need to set a value for
1152 high. In the third case, we don't because the GCC middle
1153 end represents a single case value by just letting high be
1154 a NULL_TREE. We can't do that because we need to be able
1155 to represent unbounded cases. */
1159 && mpz_cmp (cp->low->value.integer,
1160 cp->high->value.integer) != 0))
1161 high = gfc_conv_constant_to_tree (cp->high);
1163 /* Unbounded case. */
1165 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1168 /* Build a label. */
1169 label = gfc_build_label_decl (NULL_TREE);
1171 /* Add this case label.
1172 Add parameter 'label', make it match GCC backend. */
1173 tmp = build3 (CASE_LABEL_EXPR, void_type_node, low, high, label);
1174 gfc_add_expr_to_block (&body, tmp);
1177 /* Add the statements for this case. */
1178 tmp = gfc_trans_code (c->next);
1179 gfc_add_expr_to_block (&body, tmp);
1181 /* Break to the end of the construct. */
1182 tmp = build1_v (GOTO_EXPR, end_label);
1183 gfc_add_expr_to_block (&body, tmp);
1186 tmp = gfc_finish_block (&body);
1187 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1188 gfc_add_expr_to_block (&block, tmp);
1190 tmp = build1_v (LABEL_EXPR, end_label);
1191 gfc_add_expr_to_block (&block, tmp);
1193 return gfc_finish_block (&block);
1197 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1199 There are only two cases possible here, even though the standard
1200 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1201 .FALSE., and DEFAULT.
1203 We never generate more than two blocks here. Instead, we always
1204 try to eliminate the DEFAULT case. This way, we can translate this
1205 kind of SELECT construct to a simple
1209 expression in GENERIC. */
1212 gfc_trans_logical_select (gfc_code * code)
1215 gfc_code *t, *f, *d;
1220 /* Assume we don't have any cases at all. */
1223 /* Now see which ones we actually do have. We can have at most two
1224 cases in a single case list: one for .TRUE. and one for .FALSE.
1225 The default case is always separate. If the cases for .TRUE. and
1226 .FALSE. are in the same case list, the block for that case list
1227 always executed, and we don't generate code a COND_EXPR. */
1228 for (c = code->block; c; c = c->block)
1230 for (cp = c->ext.case_list; cp; cp = cp->next)
1234 if (cp->low->value.logical == 0) /* .FALSE. */
1236 else /* if (cp->value.logical != 0), thus .TRUE. */
1244 /* Start a new block. */
1245 gfc_start_block (&block);
1247 /* Calculate the switch expression. We always need to do this
1248 because it may have side effects. */
1249 gfc_init_se (&se, NULL);
1250 gfc_conv_expr_val (&se, code->expr);
1251 gfc_add_block_to_block (&block, &se.pre);
1253 if (t == f && t != NULL)
1255 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1256 translate the code for these cases, append it to the current
1258 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1262 tree true_tree, false_tree, stmt;
1264 true_tree = build_empty_stmt ();
1265 false_tree = build_empty_stmt ();
1267 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1268 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1269 make the missing case the default case. */
1270 if (t != NULL && f != NULL)
1280 /* Translate the code for each of these blocks, and append it to
1281 the current block. */
1283 true_tree = gfc_trans_code (t->next);
1286 false_tree = gfc_trans_code (f->next);
1288 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1289 true_tree, false_tree);
1290 gfc_add_expr_to_block (&block, stmt);
1293 return gfc_finish_block (&block);
1297 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1298 Instead of generating compares and jumps, it is far simpler to
1299 generate a data structure describing the cases in order and call a
1300 library subroutine that locates the right case.
1301 This is particularly true because this is the only case where we
1302 might have to dispose of a temporary.
1303 The library subroutine returns a pointer to jump to or NULL if no
1304 branches are to be taken. */
1307 gfc_trans_character_select (gfc_code *code)
1309 tree init, node, end_label, tmp, type, args, *labels;
1310 stmtblock_t block, body;
1316 static tree select_struct;
1317 static tree ss_string1, ss_string1_len;
1318 static tree ss_string2, ss_string2_len;
1319 static tree ss_target;
1321 if (select_struct == NULL)
1323 tree gfc_int4_type_node = gfc_get_int_type (4);
1325 select_struct = make_node (RECORD_TYPE);
1326 TYPE_NAME (select_struct) = get_identifier ("_jump_struct");
1329 #define ADD_FIELD(NAME, TYPE) \
1330 ss_##NAME = gfc_add_field_to_struct \
1331 (&(TYPE_FIELDS (select_struct)), select_struct, \
1332 get_identifier (stringize(NAME)), TYPE)
1334 ADD_FIELD (string1, pchar_type_node);
1335 ADD_FIELD (string1_len, gfc_int4_type_node);
1337 ADD_FIELD (string2, pchar_type_node);
1338 ADD_FIELD (string2_len, gfc_int4_type_node);
1340 ADD_FIELD (target, pvoid_type_node);
1343 gfc_finish_type (select_struct);
1346 cp = code->block->ext.case_list;
1347 while (cp->left != NULL)
1351 for (d = cp; d; d = d->right)
1355 labels = gfc_getmem (n * sizeof (tree));
1359 for(i = 0; i < n; i++)
1361 labels[i] = gfc_build_label_decl (NULL_TREE);
1362 TREE_USED (labels[i]) = 1;
1363 /* TODO: The gimplifier should do this for us, but it has
1364 inadequacies when dealing with static initializers. */
1365 FORCED_LABEL (labels[i]) = 1;
1368 end_label = gfc_build_label_decl (NULL_TREE);
1370 /* Generate the body */
1371 gfc_start_block (&block);
1372 gfc_init_block (&body);
1374 for (c = code->block; c; c = c->block)
1376 for (d = c->ext.case_list; d; d = d->next)
1378 tmp = build1_v (LABEL_EXPR, labels[d->n]);
1379 gfc_add_expr_to_block (&body, tmp);
1382 tmp = gfc_trans_code (c->next);
1383 gfc_add_expr_to_block (&body, tmp);
1385 tmp = build1_v (GOTO_EXPR, end_label);
1386 gfc_add_expr_to_block (&body, tmp);
1389 /* Generate the structure describing the branches */
1393 for(d = cp; d; d = d->right, i++)
1397 gfc_init_se (&se, NULL);
1401 node = tree_cons (ss_string1, null_pointer_node, node);
1402 node = tree_cons (ss_string1_len, integer_zero_node, node);
1406 gfc_conv_expr_reference (&se, d->low);
1408 node = tree_cons (ss_string1, se.expr, node);
1409 node = tree_cons (ss_string1_len, se.string_length, node);
1412 if (d->high == NULL)
1414 node = tree_cons (ss_string2, null_pointer_node, node);
1415 node = tree_cons (ss_string2_len, integer_zero_node, node);
1419 gfc_init_se (&se, NULL);
1420 gfc_conv_expr_reference (&se, d->high);
1422 node = tree_cons (ss_string2, se.expr, node);
1423 node = tree_cons (ss_string2_len, se.string_length, node);
1426 tmp = gfc_build_addr_expr (pvoid_type_node, labels[i]);
1427 node = tree_cons (ss_target, tmp, node);
1429 tmp = build_constructor_from_list (select_struct, nreverse (node));
1430 init = tree_cons (NULL_TREE, tmp, init);
1433 type = build_array_type (select_struct, build_index_type
1434 (build_int_cst (NULL_TREE, n - 1)));
1436 init = build_constructor_from_list (type, nreverse(init));
1437 TREE_CONSTANT (init) = 1;
1438 TREE_INVARIANT (init) = 1;
1439 TREE_STATIC (init) = 1;
1440 /* Create a static variable to hold the jump table. */
1441 tmp = gfc_create_var (type, "jumptable");
1442 TREE_CONSTANT (tmp) = 1;
1443 TREE_INVARIANT (tmp) = 1;
1444 TREE_STATIC (tmp) = 1;
1445 DECL_INITIAL (tmp) = init;
1448 /* Build an argument list for the library call */
1449 init = gfc_build_addr_expr (pvoid_type_node, init);
1450 args = gfc_chainon_list (NULL_TREE, init);
1452 tmp = build_int_cst (NULL_TREE, n);
1453 args = gfc_chainon_list (args, tmp);
1455 tmp = gfc_build_addr_expr (pvoid_type_node, end_label);
1456 args = gfc_chainon_list (args, tmp);
1458 gfc_init_se (&se, NULL);
1459 gfc_conv_expr_reference (&se, code->expr);
1461 args = gfc_chainon_list (args, se.expr);
1462 args = gfc_chainon_list (args, se.string_length);
1464 gfc_add_block_to_block (&block, &se.pre);
1466 tmp = build_function_call_expr (gfor_fndecl_select_string, args);
1467 tmp = build1 (GOTO_EXPR, void_type_node, tmp);
1468 gfc_add_expr_to_block (&block, tmp);
1470 tmp = gfc_finish_block (&body);
1471 gfc_add_expr_to_block (&block, tmp);
1472 tmp = build1_v (LABEL_EXPR, end_label);
1473 gfc_add_expr_to_block (&block, tmp);
1478 return gfc_finish_block (&block);
1482 /* Translate the three variants of the SELECT CASE construct.
1484 SELECT CASEs with INTEGER case expressions can be translated to an
1485 equivalent GENERIC switch statement, and for LOGICAL case
1486 expressions we build one or two if-else compares.
1488 SELECT CASEs with CHARACTER case expressions are a whole different
1489 story, because they don't exist in GENERIC. So we sort them and
1490 do a binary search at runtime.
1492 Fortran has no BREAK statement, and it does not allow jumps from
1493 one case block to another. That makes things a lot easier for
1497 gfc_trans_select (gfc_code * code)
1499 gcc_assert (code && code->expr);
1501 /* Empty SELECT constructs are legal. */
1502 if (code->block == NULL)
1503 return build_empty_stmt ();
1505 /* Select the correct translation function. */
1506 switch (code->expr->ts.type)
1508 case BT_LOGICAL: return gfc_trans_logical_select (code);
1509 case BT_INTEGER: return gfc_trans_integer_select (code);
1510 case BT_CHARACTER: return gfc_trans_character_select (code);
1512 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1518 /* Generate the loops for a FORALL block. The normal loop format:
1519 count = (end - start + step) / step
1532 gfc_trans_forall_loop (forall_info *forall_tmp, int nvar, tree body, int mask_flag)
1540 tree var, start, end, step;
1543 iter = forall_tmp->this_loop;
1544 for (n = 0; n < nvar; n++)
1547 start = iter->start;
1551 exit_label = gfc_build_label_decl (NULL_TREE);
1552 TREE_USED (exit_label) = 1;
1554 /* The loop counter. */
1555 count = gfc_create_var (TREE_TYPE (var), "count");
1557 /* The body of the loop. */
1558 gfc_init_block (&block);
1560 /* The exit condition. */
1561 cond = fold_build2 (LE_EXPR, boolean_type_node,
1562 count, build_int_cst (TREE_TYPE (count), 0));
1563 tmp = build1_v (GOTO_EXPR, exit_label);
1564 tmp = fold_build3 (COND_EXPR, void_type_node,
1565 cond, tmp, build_empty_stmt ());
1566 gfc_add_expr_to_block (&block, tmp);
1568 /* The main loop body. */
1569 gfc_add_expr_to_block (&block, body);
1571 /* Increment the loop variable. */
1572 tmp = build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1573 gfc_add_modify_expr (&block, var, tmp);
1575 /* Advance to the next mask element. Only do this for the
1577 if (n == 0 && mask_flag && forall_tmp->mask)
1579 tree maskindex = forall_tmp->maskindex;
1580 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
1581 maskindex, gfc_index_one_node);
1582 gfc_add_modify_expr (&block, maskindex, tmp);
1585 /* Decrement the loop counter. */
1586 tmp = build2 (MINUS_EXPR, TREE_TYPE (var), count, gfc_index_one_node);
1587 gfc_add_modify_expr (&block, count, tmp);
1589 body = gfc_finish_block (&block);
1591 /* Loop var initialization. */
1592 gfc_init_block (&block);
1593 gfc_add_modify_expr (&block, var, start);
1595 /* Initialize maskindex counter. Only do this before the
1597 if (n == nvar - 1 && mask_flag && forall_tmp->mask)
1598 gfc_add_modify_expr (&block, forall_tmp->maskindex,
1599 gfc_index_zero_node);
1601 /* Initialize the loop counter. */
1602 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1603 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1604 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1605 gfc_add_modify_expr (&block, count, tmp);
1607 /* The loop expression. */
1608 tmp = build1_v (LOOP_EXPR, body);
1609 gfc_add_expr_to_block (&block, tmp);
1611 /* The exit label. */
1612 tmp = build1_v (LABEL_EXPR, exit_label);
1613 gfc_add_expr_to_block (&block, tmp);
1615 body = gfc_finish_block (&block);
1622 /* Generate the body and loops according to MASK_FLAG and NEST_FLAG.
1623 if MASK_FLAG is nonzero, the body is controlled by maskes in forall
1624 nest, otherwise, the body is not controlled by maskes.
1625 if NEST_FLAG is nonzero, generate loops for nested forall, otherwise,
1626 only generate loops for the current forall level. */
1629 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1630 int mask_flag, int nest_flag)
1634 forall_info *forall_tmp;
1635 tree pmask, mask, maskindex;
1637 forall_tmp = nested_forall_info;
1638 /* Generate loops for nested forall. */
1641 while (forall_tmp->next_nest != NULL)
1642 forall_tmp = forall_tmp->next_nest;
1643 while (forall_tmp != NULL)
1645 /* Generate body with masks' control. */
1648 pmask = forall_tmp->pmask;
1649 mask = forall_tmp->mask;
1650 maskindex = forall_tmp->maskindex;
1654 /* If a mask was specified make the assignment conditional. */
1656 tmp = build_fold_indirect_ref (mask);
1659 tmp = gfc_build_array_ref (tmp, maskindex);
1661 body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ());
1664 nvar = forall_tmp->nvar;
1665 body = gfc_trans_forall_loop (forall_tmp, nvar, body, mask_flag);
1666 forall_tmp = forall_tmp->outer;
1671 nvar = forall_tmp->nvar;
1672 body = gfc_trans_forall_loop (forall_tmp, nvar, body, mask_flag);
1679 /* Allocate data for holding a temporary array. Returns either a local
1680 temporary array or a pointer variable. */
1683 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
1691 if (INTEGER_CST_P (size))
1693 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
1694 gfc_index_one_node);
1699 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
1700 type = build_array_type (elem_type, type);
1701 if (gfc_can_put_var_on_stack (bytesize))
1703 gcc_assert (INTEGER_CST_P (size));
1704 tmpvar = gfc_create_var (type, "temp");
1709 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
1710 *pdata = convert (pvoid_type_node, tmpvar);
1712 args = gfc_chainon_list (NULL_TREE, bytesize);
1713 if (gfc_index_integer_kind == 4)
1714 tmp = gfor_fndecl_internal_malloc;
1715 else if (gfc_index_integer_kind == 8)
1716 tmp = gfor_fndecl_internal_malloc64;
1719 tmp = build_function_call_expr (tmp, args);
1720 tmp = convert (TREE_TYPE (tmpvar), tmp);
1721 gfc_add_modify_expr (pblock, tmpvar, tmp);
1727 /* Generate codes to copy the temporary to the actual lhs. */
1730 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
1731 tree count1, tree wheremask, bool invert)
1735 stmtblock_t block, body;
1741 lss = gfc_walk_expr (expr);
1743 if (lss == gfc_ss_terminator)
1745 gfc_start_block (&block);
1747 gfc_init_se (&lse, NULL);
1749 /* Translate the expression. */
1750 gfc_conv_expr (&lse, expr);
1752 /* Form the expression for the temporary. */
1753 tmp = gfc_build_array_ref (tmp1, count1);
1755 /* Use the scalar assignment as is. */
1756 gfc_add_block_to_block (&block, &lse.pre);
1757 gfc_add_modify_expr (&block, lse.expr, tmp);
1758 gfc_add_block_to_block (&block, &lse.post);
1760 /* Increment the count1. */
1761 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1762 gfc_index_one_node);
1763 gfc_add_modify_expr (&block, count1, tmp);
1765 tmp = gfc_finish_block (&block);
1769 gfc_start_block (&block);
1771 gfc_init_loopinfo (&loop1);
1772 gfc_init_se (&rse, NULL);
1773 gfc_init_se (&lse, NULL);
1775 /* Associate the lss with the loop. */
1776 gfc_add_ss_to_loop (&loop1, lss);
1778 /* Calculate the bounds of the scalarization. */
1779 gfc_conv_ss_startstride (&loop1);
1780 /* Setup the scalarizing loops. */
1781 gfc_conv_loop_setup (&loop1);
1783 gfc_mark_ss_chain_used (lss, 1);
1785 /* Start the scalarized loop body. */
1786 gfc_start_scalarized_body (&loop1, &body);
1788 /* Setup the gfc_se structures. */
1789 gfc_copy_loopinfo_to_se (&lse, &loop1);
1792 /* Form the expression of the temporary. */
1793 if (lss != gfc_ss_terminator)
1794 rse.expr = gfc_build_array_ref (tmp1, count1);
1795 /* Translate expr. */
1796 gfc_conv_expr (&lse, expr);
1798 /* Use the scalar assignment. */
1799 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts.type);
1801 /* Form the mask expression according to the mask tree list. */
1804 wheremaskexpr = gfc_build_array_ref (wheremask, count3);
1806 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1807 TREE_TYPE (wheremaskexpr),
1809 tmp = fold_build3 (COND_EXPR, void_type_node,
1810 wheremaskexpr, tmp, build_empty_stmt ());
1813 gfc_add_expr_to_block (&body, tmp);
1815 /* Increment count1. */
1816 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1817 count1, gfc_index_one_node);
1818 gfc_add_modify_expr (&body, count1, tmp);
1820 /* Increment count3. */
1823 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1824 count3, gfc_index_one_node);
1825 gfc_add_modify_expr (&body, count3, tmp);
1828 /* Generate the copying loops. */
1829 gfc_trans_scalarizing_loops (&loop1, &body);
1830 gfc_add_block_to_block (&block, &loop1.pre);
1831 gfc_add_block_to_block (&block, &loop1.post);
1832 gfc_cleanup_loop (&loop1);
1834 tmp = gfc_finish_block (&block);
1840 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
1841 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
1842 and should not be freed. WHEREMASK is the conditional execution mask
1843 whose sense may be inverted by INVERT. */
1846 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
1847 tree count1, gfc_ss *lss, gfc_ss *rss,
1848 tree wheremask, bool invert)
1850 stmtblock_t block, body1;
1857 gfc_start_block (&block);
1859 gfc_init_se (&rse, NULL);
1860 gfc_init_se (&lse, NULL);
1862 if (lss == gfc_ss_terminator)
1864 gfc_init_block (&body1);
1865 gfc_conv_expr (&rse, expr2);
1866 lse.expr = gfc_build_array_ref (tmp1, count1);
1870 /* Initialize the loop. */
1871 gfc_init_loopinfo (&loop);
1873 /* We may need LSS to determine the shape of the expression. */
1874 gfc_add_ss_to_loop (&loop, lss);
1875 gfc_add_ss_to_loop (&loop, rss);
1877 gfc_conv_ss_startstride (&loop);
1878 gfc_conv_loop_setup (&loop);
1880 gfc_mark_ss_chain_used (rss, 1);
1881 /* Start the loop body. */
1882 gfc_start_scalarized_body (&loop, &body1);
1884 /* Translate the expression. */
1885 gfc_copy_loopinfo_to_se (&rse, &loop);
1887 gfc_conv_expr (&rse, expr2);
1889 /* Form the expression of the temporary. */
1890 lse.expr = gfc_build_array_ref (tmp1, count1);
1893 /* Use the scalar assignment. */
1894 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts.type);
1896 /* Form the mask expression according to the mask tree list. */
1899 wheremaskexpr = gfc_build_array_ref (wheremask, count3);
1901 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1902 TREE_TYPE (wheremaskexpr),
1904 tmp = fold_build3 (COND_EXPR, void_type_node,
1905 wheremaskexpr, tmp, build_empty_stmt ());
1908 gfc_add_expr_to_block (&body1, tmp);
1910 if (lss == gfc_ss_terminator)
1912 gfc_add_block_to_block (&block, &body1);
1914 /* Increment count1. */
1915 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1916 gfc_index_one_node);
1917 gfc_add_modify_expr (&block, count1, tmp);
1921 /* Increment count1. */
1922 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1923 count1, gfc_index_one_node);
1924 gfc_add_modify_expr (&body1, count1, tmp);
1926 /* Increment count3. */
1929 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1930 count3, gfc_index_one_node);
1931 gfc_add_modify_expr (&body1, count3, tmp);
1934 /* Generate the copying loops. */
1935 gfc_trans_scalarizing_loops (&loop, &body1);
1937 gfc_add_block_to_block (&block, &loop.pre);
1938 gfc_add_block_to_block (&block, &loop.post);
1940 gfc_cleanup_loop (&loop);
1941 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
1942 as tree nodes in SS may not be valid in different scope. */
1945 tmp = gfc_finish_block (&block);
1950 /* Calculate the size of temporary needed in the assignment inside forall.
1951 LSS and RSS are filled in this function. */
1954 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
1955 stmtblock_t * pblock,
1956 gfc_ss **lss, gfc_ss **rss)
1964 *lss = gfc_walk_expr (expr1);
1967 size = gfc_index_one_node;
1968 if (*lss != gfc_ss_terminator)
1970 gfc_init_loopinfo (&loop);
1972 /* Walk the RHS of the expression. */
1973 *rss = gfc_walk_expr (expr2);
1974 if (*rss == gfc_ss_terminator)
1976 /* The rhs is scalar. Add a ss for the expression. */
1977 *rss = gfc_get_ss ();
1978 (*rss)->next = gfc_ss_terminator;
1979 (*rss)->type = GFC_SS_SCALAR;
1980 (*rss)->expr = expr2;
1983 /* Associate the SS with the loop. */
1984 gfc_add_ss_to_loop (&loop, *lss);
1985 /* We don't actually need to add the rhs at this point, but it might
1986 make guessing the loop bounds a bit easier. */
1987 gfc_add_ss_to_loop (&loop, *rss);
1989 /* We only want the shape of the expression, not rest of the junk
1990 generated by the scalarizer. */
1991 loop.array_parameter = 1;
1993 /* Calculate the bounds of the scalarization. */
1994 save_flag = flag_bounds_check;
1995 flag_bounds_check = 0;
1996 gfc_conv_ss_startstride (&loop);
1997 flag_bounds_check = save_flag;
1998 gfc_conv_loop_setup (&loop);
2000 /* Figure out how many elements we need. */
2001 for (i = 0; i < loop.dimen; i++)
2003 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2004 gfc_index_one_node, loop.from[i]);
2005 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2007 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2009 gfc_add_block_to_block (pblock, &loop.pre);
2010 size = gfc_evaluate_now (size, pblock);
2011 gfc_add_block_to_block (pblock, &loop.post);
2013 /* TODO: write a function that cleans up a loopinfo without freeing
2014 the SS chains. Currently a NOP. */
2021 /* Calculate the overall iterator number of the nested forall construct. */
2024 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2025 stmtblock_t *inner_size_body, stmtblock_t *block)
2030 /* TODO: optimizing the computing process. */
2031 number = gfc_create_var (gfc_array_index_type, "num");
2032 gfc_add_modify_expr (block, number, gfc_index_zero_node);
2034 gfc_start_block (&body);
2035 if (inner_size_body)
2036 gfc_add_block_to_block (&body, inner_size_body);
2037 if (nested_forall_info)
2038 tmp = build2 (PLUS_EXPR, gfc_array_index_type, number,
2042 gfc_add_modify_expr (&body, number, tmp);
2043 tmp = gfc_finish_block (&body);
2045 /* Generate loops. */
2046 if (nested_forall_info != NULL)
2047 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 0, 1);
2049 gfc_add_expr_to_block (block, tmp);
2055 /* Allocate temporary for forall construct. SIZE is the size of temporary
2056 needed. PTEMP1 is returned for space free. */
2059 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2067 unit = TYPE_SIZE_UNIT (type);
2068 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2071 temp1 = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2074 tmp = build_fold_indirect_ref (temp1);
2082 /* Allocate temporary for forall construct according to the information in
2083 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2084 assignment inside forall. PTEMP1 is returned for space free. */
2087 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2088 tree inner_size, stmtblock_t * inner_size_body,
2089 stmtblock_t * block, tree * ptemp1)
2093 /* Calculate the total size of temporary needed in forall construct. */
2094 size = compute_overall_iter_number (nested_forall_info, inner_size,
2095 inner_size_body, block);
2097 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2101 /* Handle assignments inside forall which need temporary.
2103 forall (i=start:end:stride; maskexpr)
2106 (where e,f<i> are arbitrary expressions possibly involving i
2107 and there is a dependency between e<i> and f<i>)
2109 masktmp(:) = maskexpr(:)
2114 for (i = start; i <= end; i += stride)
2118 for (i = start; i <= end; i += stride)
2120 if (masktmp[maskindex++])
2121 tmp[count1++] = f<i>
2125 for (i = start; i <= end; i += stride)
2127 if (masktmp[maskindex++])
2128 e<i> = tmp[count1++]
2133 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2134 tree wheremask, bool invert,
2135 forall_info * nested_forall_info,
2136 stmtblock_t * block)
2144 stmtblock_t inner_size_body;
2146 /* Create vars. count1 is the current iterator number of the nested
2148 count1 = gfc_create_var (gfc_array_index_type, "count1");
2150 /* Count is the wheremask index. */
2153 count = gfc_create_var (gfc_array_index_type, "count");
2154 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2159 /* Initialize count1. */
2160 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2162 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2163 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2164 gfc_init_block (&inner_size_body);
2165 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2168 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2169 type = gfc_typenode_for_spec (&expr1->ts);
2171 /* Allocate temporary for nested forall construct according to the
2172 information in nested_forall_info and inner_size. */
2173 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2174 &inner_size_body, block, &ptemp1);
2176 /* Generate codes to copy rhs to the temporary . */
2177 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2180 /* Generate body and loops according to the information in
2181 nested_forall_info. */
2182 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2183 gfc_add_expr_to_block (block, tmp);
2186 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2190 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2192 /* Generate codes to copy the temporary to lhs. */
2193 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2196 /* Generate body and loops according to the information in
2197 nested_forall_info. */
2198 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2199 gfc_add_expr_to_block (block, tmp);
2203 /* Free the temporary. */
2204 tmp = gfc_chainon_list (NULL_TREE, ptemp1);
2205 tmp = build_function_call_expr (gfor_fndecl_internal_free, tmp);
2206 gfc_add_expr_to_block (block, tmp);
2211 /* Translate pointer assignment inside FORALL which need temporary. */
2214 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2215 forall_info * nested_forall_info,
2216 stmtblock_t * block)
2230 tree tmp, tmp1, ptemp1;
2232 count = gfc_create_var (gfc_array_index_type, "count");
2233 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2235 inner_size = integer_one_node;
2236 lss = gfc_walk_expr (expr1);
2237 rss = gfc_walk_expr (expr2);
2238 if (lss == gfc_ss_terminator)
2240 type = gfc_typenode_for_spec (&expr1->ts);
2241 type = build_pointer_type (type);
2243 /* Allocate temporary for nested forall construct according to the
2244 information in nested_forall_info and inner_size. */
2245 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2246 inner_size, NULL, block, &ptemp1);
2247 gfc_start_block (&body);
2248 gfc_init_se (&lse, NULL);
2249 lse.expr = gfc_build_array_ref (tmp1, count);
2250 gfc_init_se (&rse, NULL);
2251 rse.want_pointer = 1;
2252 gfc_conv_expr (&rse, expr2);
2253 gfc_add_block_to_block (&body, &rse.pre);
2254 gfc_add_modify_expr (&body, lse.expr,
2255 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2256 gfc_add_block_to_block (&body, &rse.post);
2258 /* Increment count. */
2259 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2260 count, gfc_index_one_node);
2261 gfc_add_modify_expr (&body, count, tmp);
2263 tmp = gfc_finish_block (&body);
2265 /* Generate body and loops according to the information in
2266 nested_forall_info. */
2267 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2268 gfc_add_expr_to_block (block, tmp);
2271 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2273 gfc_start_block (&body);
2274 gfc_init_se (&lse, NULL);
2275 gfc_init_se (&rse, NULL);
2276 rse.expr = gfc_build_array_ref (tmp1, count);
2277 lse.want_pointer = 1;
2278 gfc_conv_expr (&lse, expr1);
2279 gfc_add_block_to_block (&body, &lse.pre);
2280 gfc_add_modify_expr (&body, lse.expr, rse.expr);
2281 gfc_add_block_to_block (&body, &lse.post);
2282 /* Increment count. */
2283 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2284 count, gfc_index_one_node);
2285 gfc_add_modify_expr (&body, count, tmp);
2286 tmp = gfc_finish_block (&body);
2288 /* Generate body and loops according to the information in
2289 nested_forall_info. */
2290 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2291 gfc_add_expr_to_block (block, tmp);
2295 gfc_init_loopinfo (&loop);
2297 /* Associate the SS with the loop. */
2298 gfc_add_ss_to_loop (&loop, rss);
2300 /* Setup the scalarizing loops and bounds. */
2301 gfc_conv_ss_startstride (&loop);
2303 gfc_conv_loop_setup (&loop);
2305 info = &rss->data.info;
2306 desc = info->descriptor;
2308 /* Make a new descriptor. */
2309 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2310 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2311 loop.from, loop.to, 1);
2313 /* Allocate temporary for nested forall construct. */
2314 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2315 inner_size, NULL, block, &ptemp1);
2316 gfc_start_block (&body);
2317 gfc_init_se (&lse, NULL);
2318 lse.expr = gfc_build_array_ref (tmp1, count);
2319 lse.direct_byref = 1;
2320 rss = gfc_walk_expr (expr2);
2321 gfc_conv_expr_descriptor (&lse, expr2, rss);
2323 gfc_add_block_to_block (&body, &lse.pre);
2324 gfc_add_block_to_block (&body, &lse.post);
2326 /* Increment count. */
2327 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2328 count, gfc_index_one_node);
2329 gfc_add_modify_expr (&body, count, tmp);
2331 tmp = gfc_finish_block (&body);
2333 /* Generate body and loops according to the information in
2334 nested_forall_info. */
2335 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2336 gfc_add_expr_to_block (block, tmp);
2339 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2341 parm = gfc_build_array_ref (tmp1, count);
2342 lss = gfc_walk_expr (expr1);
2343 gfc_init_se (&lse, NULL);
2344 gfc_conv_expr_descriptor (&lse, expr1, lss);
2345 gfc_add_modify_expr (&lse.pre, lse.expr, parm);
2346 gfc_start_block (&body);
2347 gfc_add_block_to_block (&body, &lse.pre);
2348 gfc_add_block_to_block (&body, &lse.post);
2350 /* Increment count. */
2351 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2352 count, gfc_index_one_node);
2353 gfc_add_modify_expr (&body, count, tmp);
2355 tmp = gfc_finish_block (&body);
2357 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2358 gfc_add_expr_to_block (block, tmp);
2360 /* Free the temporary. */
2363 tmp = gfc_chainon_list (NULL_TREE, ptemp1);
2364 tmp = build_function_call_expr (gfor_fndecl_internal_free, tmp);
2365 gfc_add_expr_to_block (block, tmp);
2370 /* FORALL and WHERE statements are really nasty, especially when you nest
2371 them. All the rhs of a forall assignment must be evaluated before the
2372 actual assignments are performed. Presumably this also applies to all the
2373 assignments in an inner where statement. */
2375 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2376 linear array, relying on the fact that we process in the same order in all
2379 forall (i=start:end:stride; maskexpr)
2383 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2385 count = ((end + 1 - start) / stride)
2386 masktmp(:) = maskexpr(:)
2389 for (i = start; i <= end; i += stride)
2391 if (masktmp[maskindex++])
2395 for (i = start; i <= end; i += stride)
2397 if (masktmp[maskindex++])
2401 Note that this code only works when there are no dependencies.
2402 Forall loop with array assignments and data dependencies are a real pain,
2403 because the size of the temporary cannot always be determined before the
2404 loop is executed. This problem is compounded by the presence of nested
2409 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2431 gfc_forall_iterator *fa;
2434 gfc_saved_var *saved_vars;
2435 iter_info *this_forall, *iter_tmp;
2436 forall_info *info, *forall_tmp;
2438 gfc_start_block (&block);
2441 /* Count the FORALL index number. */
2442 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2446 /* Allocate the space for var, start, end, step, varexpr. */
2447 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2448 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2449 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2450 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2451 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2452 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2454 /* Allocate the space for info. */
2455 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2457 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2459 gfc_symbol *sym = fa->var->symtree->n.sym;
2461 /* allocate space for this_forall. */
2462 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2464 /* Create a temporary variable for the FORALL index. */
2465 tmp = gfc_typenode_for_spec (&sym->ts);
2466 var[n] = gfc_create_var (tmp, sym->name);
2467 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2469 /* Record it in this_forall. */
2470 this_forall->var = var[n];
2472 /* Replace the index symbol's backend_decl with the temporary decl. */
2473 sym->backend_decl = var[n];
2475 /* Work out the start, end and stride for the loop. */
2476 gfc_init_se (&se, NULL);
2477 gfc_conv_expr_val (&se, fa->start);
2478 /* Record it in this_forall. */
2479 this_forall->start = se.expr;
2480 gfc_add_block_to_block (&block, &se.pre);
2483 gfc_init_se (&se, NULL);
2484 gfc_conv_expr_val (&se, fa->end);
2485 /* Record it in this_forall. */
2486 this_forall->end = se.expr;
2487 gfc_make_safe_expr (&se);
2488 gfc_add_block_to_block (&block, &se.pre);
2491 gfc_init_se (&se, NULL);
2492 gfc_conv_expr_val (&se, fa->stride);
2493 /* Record it in this_forall. */
2494 this_forall->step = se.expr;
2495 gfc_make_safe_expr (&se);
2496 gfc_add_block_to_block (&block, &se.pre);
2499 /* Set the NEXT field of this_forall to NULL. */
2500 this_forall->next = NULL;
2501 /* Link this_forall to the info construct. */
2502 if (info->this_loop == NULL)
2503 info->this_loop = this_forall;
2506 iter_tmp = info->this_loop;
2507 while (iter_tmp->next != NULL)
2508 iter_tmp = iter_tmp->next;
2509 iter_tmp->next = this_forall;
2516 /* Work out the number of elements in the mask array. */
2519 size = gfc_index_one_node;
2520 sizevar = NULL_TREE;
2522 for (n = 0; n < nvar; n++)
2524 if (lenvar && TREE_TYPE (lenvar) != TREE_TYPE (start[n]))
2527 /* size = (end + step - start) / step. */
2528 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2530 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2532 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2533 tmp = convert (gfc_array_index_type, tmp);
2535 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2538 /* Record the nvar and size of current forall level. */
2542 /* Link the current forall level to nested_forall_info. */
2543 forall_tmp = nested_forall_info;
2544 if (forall_tmp == NULL)
2545 nested_forall_info = info;
2548 while (forall_tmp->next_nest != NULL)
2549 forall_tmp = forall_tmp->next_nest;
2550 info->outer = forall_tmp;
2551 forall_tmp->next_nest = info;
2554 /* Copy the mask into a temporary variable if required.
2555 For now we assume a mask temporary is needed. */
2558 /* As the mask array can be very big, prefer compact
2560 tree smallest_boolean_type_node
2561 = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2563 /* Allocate the mask temporary. */
2564 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
2565 TYPE_SIZE_UNIT (smallest_boolean_type_node));
2567 mask = gfc_do_allocate (bytesize, size, &pmask, &block,
2568 smallest_boolean_type_node);
2570 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2571 /* Record them in the info structure. */
2572 info->pmask = pmask;
2574 info->maskindex = maskindex;
2576 gfc_add_modify_expr (&block, maskindex, gfc_index_zero_node);
2578 /* Start of mask assignment loop body. */
2579 gfc_start_block (&body);
2581 /* Evaluate the mask expression. */
2582 gfc_init_se (&se, NULL);
2583 gfc_conv_expr_val (&se, code->expr);
2584 gfc_add_block_to_block (&body, &se.pre);
2586 /* Store the mask. */
2587 se.expr = convert (smallest_boolean_type_node, se.expr);
2590 tmp = build_fold_indirect_ref (mask);
2593 tmp = gfc_build_array_ref (tmp, maskindex);
2594 gfc_add_modify_expr (&body, tmp, se.expr);
2596 /* Advance to the next mask element. */
2597 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
2598 maskindex, gfc_index_one_node);
2599 gfc_add_modify_expr (&body, maskindex, tmp);
2601 /* Generate the loops. */
2602 tmp = gfc_finish_block (&body);
2603 tmp = gfc_trans_nested_forall_loop (info, tmp, 0, 0);
2604 gfc_add_expr_to_block (&block, tmp);
2608 /* No mask was specified. */
2609 maskindex = NULL_TREE;
2610 mask = pmask = NULL_TREE;
2613 c = code->block->next;
2615 /* TODO: loop merging in FORALL statements. */
2616 /* Now that we've got a copy of the mask, generate the assignment loops. */
2622 /* A scalar or array assignment. */
2623 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2624 /* Temporaries due to array assignment data dependencies introduce
2625 no end of problems. */
2627 gfc_trans_assign_need_temp (c->expr, c->expr2, NULL, false,
2628 nested_forall_info, &block);
2631 /* Use the normal assignment copying routines. */
2632 assign = gfc_trans_assignment (c->expr, c->expr2);
2634 /* Generate body and loops. */
2635 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1, 1);
2636 gfc_add_expr_to_block (&block, tmp);
2642 /* Translate WHERE or WHERE construct nested in FORALL. */
2643 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
2646 /* Pointer assignment inside FORALL. */
2647 case EXEC_POINTER_ASSIGN:
2648 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2650 gfc_trans_pointer_assign_need_temp (c->expr, c->expr2,
2651 nested_forall_info, &block);
2654 /* Use the normal assignment copying routines. */
2655 assign = gfc_trans_pointer_assignment (c->expr, c->expr2);
2657 /* Generate body and loops. */
2658 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign,
2660 gfc_add_expr_to_block (&block, tmp);
2665 tmp = gfc_trans_forall_1 (c, nested_forall_info);
2666 gfc_add_expr_to_block (&block, tmp);
2669 /* Explicit subroutine calls are prevented by the frontend but interface
2670 assignments can legitimately produce them. */
2671 case EXEC_ASSIGN_CALL:
2672 assign = gfc_trans_call (c, true);
2673 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1, 1);
2674 gfc_add_expr_to_block (&block, tmp);
2684 /* Restore the original index variables. */
2685 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
2686 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
2688 /* Free the space for var, start, end, step, varexpr. */
2694 gfc_free (saved_vars);
2698 /* Free the temporary for the mask. */
2699 tmp = gfc_chainon_list (NULL_TREE, pmask);
2700 tmp = build_function_call_expr (gfor_fndecl_internal_free, tmp);
2701 gfc_add_expr_to_block (&block, tmp);
2704 pushdecl (maskindex);
2706 return gfc_finish_block (&block);
2710 /* Translate the FORALL statement or construct. */
2712 tree gfc_trans_forall (gfc_code * code)
2714 return gfc_trans_forall_1 (code, NULL);
2718 /* Evaluate the WHERE mask expression, copy its value to a temporary.
2719 If the WHERE construct is nested in FORALL, compute the overall temporary
2720 needed by the WHERE mask expression multiplied by the iterator number of
2722 ME is the WHERE mask expression.
2723 MASK is the current execution mask upon input, whose sense may or may
2724 not be inverted as specified by the INVERT argument.
2725 CMASK is the updated execution mask on output, or NULL if not required.
2726 PMASK is the pending execution mask on output, or NULL if not required.
2727 BLOCK is the block in which to place the condition evaluation loops. */
2730 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
2731 tree mask, bool invert, tree cmask, tree pmask,
2732 tree mask_type, stmtblock_t * block)
2737 stmtblock_t body, body1;
2738 tree count, cond, mtmp;
2741 gfc_init_loopinfo (&loop);
2743 lss = gfc_walk_expr (me);
2744 rss = gfc_walk_expr (me);
2746 /* Variable to index the temporary. */
2747 count = gfc_create_var (gfc_array_index_type, "count");
2748 /* Initialize count. */
2749 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2751 gfc_start_block (&body);
2753 gfc_init_se (&rse, NULL);
2754 gfc_init_se (&lse, NULL);
2756 if (lss == gfc_ss_terminator)
2758 gfc_init_block (&body1);
2762 /* Initialize the loop. */
2763 gfc_init_loopinfo (&loop);
2765 /* We may need LSS to determine the shape of the expression. */
2766 gfc_add_ss_to_loop (&loop, lss);
2767 gfc_add_ss_to_loop (&loop, rss);
2769 gfc_conv_ss_startstride (&loop);
2770 gfc_conv_loop_setup (&loop);
2772 gfc_mark_ss_chain_used (rss, 1);
2773 /* Start the loop body. */
2774 gfc_start_scalarized_body (&loop, &body1);
2776 /* Translate the expression. */
2777 gfc_copy_loopinfo_to_se (&rse, &loop);
2779 gfc_conv_expr (&rse, me);
2782 /* Variable to evaluate mask condition. */
2783 cond = gfc_create_var (mask_type, "cond");
2784 if (mask && (cmask || pmask))
2785 mtmp = gfc_create_var (mask_type, "mask");
2786 else mtmp = NULL_TREE;
2788 gfc_add_block_to_block (&body1, &lse.pre);
2789 gfc_add_block_to_block (&body1, &rse.pre);
2791 gfc_add_modify_expr (&body1, cond, fold_convert (mask_type, rse.expr));
2793 if (mask && (cmask || pmask))
2795 tmp = gfc_build_array_ref (mask, count);
2797 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
2798 gfc_add_modify_expr (&body1, mtmp, tmp);
2803 tmp1 = gfc_build_array_ref (cmask, count);
2806 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
2807 gfc_add_modify_expr (&body1, tmp1, tmp);
2812 tmp1 = gfc_build_array_ref (pmask, count);
2813 tmp = build1 (TRUTH_NOT_EXPR, mask_type, cond);
2815 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
2816 gfc_add_modify_expr (&body1, tmp1, tmp);
2819 gfc_add_block_to_block (&body1, &lse.post);
2820 gfc_add_block_to_block (&body1, &rse.post);
2822 if (lss == gfc_ss_terminator)
2824 gfc_add_block_to_block (&body, &body1);
2828 /* Increment count. */
2829 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
2830 gfc_index_one_node);
2831 gfc_add_modify_expr (&body1, count, tmp1);
2833 /* Generate the copying loops. */
2834 gfc_trans_scalarizing_loops (&loop, &body1);
2836 gfc_add_block_to_block (&body, &loop.pre);
2837 gfc_add_block_to_block (&body, &loop.post);
2839 gfc_cleanup_loop (&loop);
2840 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2841 as tree nodes in SS may not be valid in different scope. */
2844 tmp1 = gfc_finish_block (&body);
2845 /* If the WHERE construct is inside FORALL, fill the full temporary. */
2846 if (nested_forall_info != NULL)
2847 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1, 1);
2849 gfc_add_expr_to_block (block, tmp1);
2853 /* Translate an assignment statement in a WHERE statement or construct
2854 statement. The MASK expression is used to control which elements
2855 of EXPR1 shall be assigned. The sense of MASK is specified by
2859 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
2860 tree mask, bool invert,
2861 tree count1, tree count2)
2866 gfc_ss *lss_section;
2873 tree index, maskexpr;
2876 /* TODO: handle this special case.
2877 Special case a single function returning an array. */
2878 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
2880 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
2886 /* Assignment of the form lhs = rhs. */
2887 gfc_start_block (&block);
2889 gfc_init_se (&lse, NULL);
2890 gfc_init_se (&rse, NULL);
2893 lss = gfc_walk_expr (expr1);
2896 /* In each where-assign-stmt, the mask-expr and the variable being
2897 defined shall be arrays of the same shape. */
2898 gcc_assert (lss != gfc_ss_terminator);
2900 /* The assignment needs scalarization. */
2903 /* Find a non-scalar SS from the lhs. */
2904 while (lss_section != gfc_ss_terminator
2905 && lss_section->type != GFC_SS_SECTION)
2906 lss_section = lss_section->next;
2908 gcc_assert (lss_section != gfc_ss_terminator);
2910 /* Initialize the scalarizer. */
2911 gfc_init_loopinfo (&loop);
2914 rss = gfc_walk_expr (expr2);
2915 if (rss == gfc_ss_terminator)
2917 /* The rhs is scalar. Add a ss for the expression. */
2918 rss = gfc_get_ss ();
2919 rss->next = gfc_ss_terminator;
2920 rss->type = GFC_SS_SCALAR;
2924 /* Associate the SS with the loop. */
2925 gfc_add_ss_to_loop (&loop, lss);
2926 gfc_add_ss_to_loop (&loop, rss);
2928 /* Calculate the bounds of the scalarization. */
2929 gfc_conv_ss_startstride (&loop);
2931 /* Resolve any data dependencies in the statement. */
2932 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
2934 /* Setup the scalarizing loops. */
2935 gfc_conv_loop_setup (&loop);
2937 /* Setup the gfc_se structures. */
2938 gfc_copy_loopinfo_to_se (&lse, &loop);
2939 gfc_copy_loopinfo_to_se (&rse, &loop);
2942 gfc_mark_ss_chain_used (rss, 1);
2943 if (loop.temp_ss == NULL)
2946 gfc_mark_ss_chain_used (lss, 1);
2950 lse.ss = loop.temp_ss;
2951 gfc_mark_ss_chain_used (lss, 3);
2952 gfc_mark_ss_chain_used (loop.temp_ss, 3);
2955 /* Start the scalarized loop body. */
2956 gfc_start_scalarized_body (&loop, &body);
2958 /* Translate the expression. */
2959 gfc_conv_expr (&rse, expr2);
2960 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
2962 gfc_conv_tmp_array_ref (&lse);
2963 gfc_advance_se_ss_chain (&lse);
2966 gfc_conv_expr (&lse, expr1);
2968 /* Form the mask expression according to the mask. */
2970 maskexpr = gfc_build_array_ref (mask, index);
2972 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
2974 /* Use the scalar assignment as is. */
2975 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts.type);
2976 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
2978 gfc_add_expr_to_block (&body, tmp);
2980 if (lss == gfc_ss_terminator)
2982 /* Increment count1. */
2983 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2984 count1, gfc_index_one_node);
2985 gfc_add_modify_expr (&body, count1, tmp);
2987 /* Use the scalar assignment as is. */
2988 gfc_add_block_to_block (&block, &body);
2992 gcc_assert (lse.ss == gfc_ss_terminator
2993 && rse.ss == gfc_ss_terminator);
2995 if (loop.temp_ss != NULL)
2997 /* Increment count1 before finish the main body of a scalarized
2999 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3000 count1, gfc_index_one_node);
3001 gfc_add_modify_expr (&body, count1, tmp);
3002 gfc_trans_scalarized_loop_boundary (&loop, &body);
3004 /* We need to copy the temporary to the actual lhs. */
3005 gfc_init_se (&lse, NULL);
3006 gfc_init_se (&rse, NULL);
3007 gfc_copy_loopinfo_to_se (&lse, &loop);
3008 gfc_copy_loopinfo_to_se (&rse, &loop);
3010 rse.ss = loop.temp_ss;
3013 gfc_conv_tmp_array_ref (&rse);
3014 gfc_advance_se_ss_chain (&rse);
3015 gfc_conv_expr (&lse, expr1);
3017 gcc_assert (lse.ss == gfc_ss_terminator
3018 && rse.ss == gfc_ss_terminator);
3020 /* Form the mask expression according to the mask tree list. */
3022 maskexpr = gfc_build_array_ref (mask, index);
3024 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3027 /* Use the scalar assignment as is. */
3028 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts.type);
3029 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3030 gfc_add_expr_to_block (&body, tmp);
3032 /* Increment count2. */
3033 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3034 count2, gfc_index_one_node);
3035 gfc_add_modify_expr (&body, count2, tmp);
3039 /* Increment count1. */
3040 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3041 count1, gfc_index_one_node);
3042 gfc_add_modify_expr (&body, count1, tmp);
3045 /* Generate the copying loops. */
3046 gfc_trans_scalarizing_loops (&loop, &body);
3048 /* Wrap the whole thing up. */
3049 gfc_add_block_to_block (&block, &loop.pre);
3050 gfc_add_block_to_block (&block, &loop.post);
3051 gfc_cleanup_loop (&loop);
3054 return gfc_finish_block (&block);
3058 /* Translate the WHERE construct or statement.
3059 This function can be called iteratively to translate the nested WHERE
3060 construct or statement.
3061 MASK is the control mask. */
3064 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3065 forall_info * nested_forall_info, stmtblock_t * block)
3067 stmtblock_t inner_size_body;
3068 tree inner_size, size;
3076 tree count1, count2;
3080 tree pcmask = NULL_TREE;
3081 tree ppmask = NULL_TREE;
3082 tree cmask = NULL_TREE;
3083 tree pmask = NULL_TREE;
3085 /* the WHERE statement or the WHERE construct statement. */
3086 cblock = code->block;
3088 /* As the mask array can be very big, prefer compact boolean types. */
3089 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3091 /* Determine which temporary masks are needed. */
3094 /* One clause: No ELSEWHEREs. */
3095 need_cmask = (cblock->next != 0);
3098 else if (cblock->block->block)
3100 /* Three or more clauses: Conditional ELSEWHEREs. */
3104 else if (cblock->next)
3106 /* Two clauses, the first non-empty. */
3108 need_pmask = (mask != NULL_TREE
3109 && cblock->block->next != 0);
3111 else if (!cblock->block->next)
3113 /* Two clauses, both empty. */
3117 /* Two clauses, the first empty, the second non-empty. */
3120 need_cmask = (cblock->block->expr != 0);
3129 if (need_cmask || need_pmask)
3131 /* Calculate the size of temporary needed by the mask-expr. */
3132 gfc_init_block (&inner_size_body);
3133 inner_size = compute_inner_temp_size (cblock->expr, cblock->expr,
3134 &inner_size_body, &lss, &rss);
3136 /* Calculate the total size of temporary needed. */
3137 size = compute_overall_iter_number (nested_forall_info, inner_size,
3138 &inner_size_body, block);
3140 /* Allocate temporary for WHERE mask if needed. */
3142 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3145 /* Allocate temporary for !mask if needed. */
3147 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3153 /* Each time around this loop, the where clause is conditional
3154 on the value of mask and invert, which are updated at the
3155 bottom of the loop. */
3157 /* Has mask-expr. */
3160 /* Ensure that the WHERE mask will be evaluated exactly once.
3161 If there are no statements in this WHERE/ELSEWHERE clause,
3162 then we don't need to update the control mask (cmask).
3163 If this is the last clause of the WHERE construct, then
3164 we don't need to update the pending control mask (pmask). */
3166 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3168 cblock->next ? cmask : NULL_TREE,
3169 cblock->block ? pmask : NULL_TREE,
3172 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3174 (cblock->next || cblock->block)
3175 ? cmask : NULL_TREE,
3176 NULL_TREE, mask_type, block);
3180 /* It's a final elsewhere-stmt. No mask-expr is present. */
3184 /* The body of this where clause are controlled by cmask with
3185 sense specified by invert. */
3187 /* Get the assignment statement of a WHERE statement, or the first
3188 statement in where-body-construct of a WHERE construct. */
3189 cnext = cblock->next;
3194 /* WHERE assignment statement. */
3196 expr1 = cnext->expr;
3197 expr2 = cnext->expr2;
3198 if (nested_forall_info != NULL)
3200 need_temp = gfc_check_dependency (expr1, expr2, 0);
3202 gfc_trans_assign_need_temp (expr1, expr2,
3204 nested_forall_info, block);
3207 /* Variables to control maskexpr. */
3208 count1 = gfc_create_var (gfc_array_index_type, "count1");
3209 count2 = gfc_create_var (gfc_array_index_type, "count2");
3210 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3211 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3213 tmp = gfc_trans_where_assign (expr1, expr2,
3217 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3219 gfc_add_expr_to_block (block, tmp);
3224 /* Variables to control maskexpr. */
3225 count1 = gfc_create_var (gfc_array_index_type, "count1");
3226 count2 = gfc_create_var (gfc_array_index_type, "count2");
3227 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3228 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3230 tmp = gfc_trans_where_assign (expr1, expr2,
3233 gfc_add_expr_to_block (block, tmp);
3238 /* WHERE or WHERE construct is part of a where-body-construct. */
3240 gfc_trans_where_2 (cnext, cmask, invert,
3241 nested_forall_info, block);
3248 /* The next statement within the same where-body-construct. */
3249 cnext = cnext->next;
3251 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3252 cblock = cblock->block;
3253 if (mask == NULL_TREE)
3255 /* If we're the initial WHERE, we can simply invert the sense
3256 of the current mask to obtain the "mask" for the remaining
3263 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3269 /* If we allocated a pending mask array, deallocate it now. */
3272 tree args = gfc_chainon_list (NULL_TREE, ppmask);
3273 tmp = build_function_call_expr (gfor_fndecl_internal_free, args);
3274 gfc_add_expr_to_block (block, tmp);
3277 /* If we allocated a current mask array, deallocate it now. */
3280 tree args = gfc_chainon_list (NULL_TREE, pcmask);
3281 tmp = build_function_call_expr (gfor_fndecl_internal_free, args);
3282 gfc_add_expr_to_block (block, tmp);
3286 /* Translate a simple WHERE construct or statement without dependencies.
3287 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3288 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3289 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3292 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3294 stmtblock_t block, body;
3295 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3296 tree tmp, cexpr, tstmt, estmt;
3297 gfc_ss *css, *tdss, *tsss;
3298 gfc_se cse, tdse, tsse, edse, esse;
3303 cond = cblock->expr;
3304 tdst = cblock->next->expr;
3305 tsrc = cblock->next->expr2;
3306 edst = eblock ? eblock->next->expr : NULL;
3307 esrc = eblock ? eblock->next->expr2 : NULL;
3309 gfc_start_block (&block);
3310 gfc_init_loopinfo (&loop);
3312 /* Handle the condition. */
3313 gfc_init_se (&cse, NULL);
3314 css = gfc_walk_expr (cond);
3315 gfc_add_ss_to_loop (&loop, css);
3317 /* Handle the then-clause. */
3318 gfc_init_se (&tdse, NULL);
3319 gfc_init_se (&tsse, NULL);
3320 tdss = gfc_walk_expr (tdst);
3321 tsss = gfc_walk_expr (tsrc);
3322 if (tsss == gfc_ss_terminator)
3324 tsss = gfc_get_ss ();
3325 tsss->next = gfc_ss_terminator;
3326 tsss->type = GFC_SS_SCALAR;
3329 gfc_add_ss_to_loop (&loop, tdss);
3330 gfc_add_ss_to_loop (&loop, tsss);
3334 /* Handle the else clause. */
3335 gfc_init_se (&edse, NULL);
3336 gfc_init_se (&esse, NULL);
3337 edss = gfc_walk_expr (edst);
3338 esss = gfc_walk_expr (esrc);
3339 if (esss == gfc_ss_terminator)
3341 esss = gfc_get_ss ();
3342 esss->next = gfc_ss_terminator;
3343 esss->type = GFC_SS_SCALAR;
3346 gfc_add_ss_to_loop (&loop, edss);
3347 gfc_add_ss_to_loop (&loop, esss);
3350 gfc_conv_ss_startstride (&loop);
3351 gfc_conv_loop_setup (&loop);
3353 gfc_mark_ss_chain_used (css, 1);
3354 gfc_mark_ss_chain_used (tdss, 1);
3355 gfc_mark_ss_chain_used (tsss, 1);
3358 gfc_mark_ss_chain_used (edss, 1);
3359 gfc_mark_ss_chain_used (esss, 1);
3362 gfc_start_scalarized_body (&loop, &body);
3364 gfc_copy_loopinfo_to_se (&cse, &loop);
3365 gfc_copy_loopinfo_to_se (&tdse, &loop);
3366 gfc_copy_loopinfo_to_se (&tsse, &loop);
3372 gfc_copy_loopinfo_to_se (&edse, &loop);
3373 gfc_copy_loopinfo_to_se (&esse, &loop);
3378 gfc_conv_expr (&cse, cond);
3379 gfc_add_block_to_block (&body, &cse.pre);
3382 gfc_conv_expr (&tsse, tsrc);
3383 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3385 gfc_conv_tmp_array_ref (&tdse);
3386 gfc_advance_se_ss_chain (&tdse);
3389 gfc_conv_expr (&tdse, tdst);
3393 gfc_conv_expr (&esse, esrc);
3394 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3396 gfc_conv_tmp_array_ref (&edse);
3397 gfc_advance_se_ss_chain (&edse);
3400 gfc_conv_expr (&edse, edst);
3403 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts.type);
3404 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts.type)
3405 : build_empty_stmt ();
3406 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3407 gfc_add_expr_to_block (&body, tmp);
3408 gfc_add_block_to_block (&body, &cse.post);
3410 gfc_trans_scalarizing_loops (&loop, &body);
3411 gfc_add_block_to_block (&block, &loop.pre);
3412 gfc_add_block_to_block (&block, &loop.post);
3413 gfc_cleanup_loop (&loop);
3415 return gfc_finish_block (&block);
3418 /* As the WHERE or WHERE construct statement can be nested, we call
3419 gfc_trans_where_2 to do the translation, and pass the initial
3420 NULL values for both the control mask and the pending control mask. */
3423 gfc_trans_where (gfc_code * code)
3429 cblock = code->block;
3431 && cblock->next->op == EXEC_ASSIGN
3432 && !cblock->next->next)
3434 eblock = cblock->block;
3437 /* A simple "WHERE (cond) x = y" statement or block is
3438 dependence free if cond is not dependent upon writing x,
3439 and the source y is unaffected by the destination x. */
3440 if (!gfc_check_dependency (cblock->next->expr,
3442 && !gfc_check_dependency (cblock->next->expr,
3443 cblock->next->expr2, 0))
3444 return gfc_trans_where_3 (cblock, NULL);
3446 else if (!eblock->expr
3449 && eblock->next->op == EXEC_ASSIGN
3450 && !eblock->next->next)
3452 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3453 block is dependence free if cond is not dependent on writes
3454 to x1 and x2, y1 is not dependent on writes to x2, and y2
3455 is not dependent on writes to x1, and both y's are not
3456 dependent upon their own x's. */
3457 if (!gfc_check_dependency(cblock->next->expr,
3459 && !gfc_check_dependency(eblock->next->expr,
3461 && !gfc_check_dependency(cblock->next->expr,
3462 eblock->next->expr2, 0)
3463 && !gfc_check_dependency(eblock->next->expr,
3464 cblock->next->expr2, 0)
3465 && !gfc_check_dependency(cblock->next->expr,
3466 cblock->next->expr2, 0)
3467 && !gfc_check_dependency(eblock->next->expr,
3468 eblock->next->expr2, 0))
3469 return gfc_trans_where_3 (cblock, eblock);
3473 gfc_start_block (&block);
3475 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3477 return gfc_finish_block (&block);
3481 /* CYCLE a DO loop. The label decl has already been created by
3482 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3483 node at the head of the loop. We must mark the label as used. */
3486 gfc_trans_cycle (gfc_code * code)
3490 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3491 TREE_USED (cycle_label) = 1;
3492 return build1_v (GOTO_EXPR, cycle_label);
3496 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3497 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3501 gfc_trans_exit (gfc_code * code)
3505 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3506 TREE_USED (exit_label) = 1;
3507 return build1_v (GOTO_EXPR, exit_label);
3511 /* Translate the ALLOCATE statement. */
3514 gfc_trans_allocate (gfc_code * code)
3526 if (!code->ext.alloc_list)
3529 gfc_start_block (&block);
3533 tree gfc_int4_type_node = gfc_get_int_type (4);
3535 stat = gfc_create_var (gfc_int4_type_node, "stat");
3536 pstat = build_fold_addr_expr (stat);
3538 error_label = gfc_build_label_decl (NULL_TREE);
3539 TREE_USED (error_label) = 1;
3543 pstat = integer_zero_node;
3544 stat = error_label = NULL_TREE;
3548 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3552 gfc_init_se (&se, NULL);
3553 gfc_start_block (&se.pre);
3555 se.want_pointer = 1;
3556 se.descriptor_only = 1;
3557 gfc_conv_expr (&se, expr);
3559 if (!gfc_array_allocate (&se, expr, pstat))
3561 /* A scalar or derived type. */
3564 val = gfc_create_var (ppvoid_type_node, "ptr");
3565 tmp = gfc_build_addr_expr (ppvoid_type_node, se.expr);
3566 gfc_add_modify_expr (&se.pre, val, tmp);
3568 tmp = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
3570 if (expr->ts.type == BT_CHARACTER && tmp == NULL_TREE)
3571 tmp = se.string_length;
3573 parm = gfc_chainon_list (NULL_TREE, val);
3574 parm = gfc_chainon_list (parm, tmp);
3575 parm = gfc_chainon_list (parm, pstat);
3576 tmp = build_function_call_expr (gfor_fndecl_allocate, parm);
3577 gfc_add_expr_to_block (&se.pre, tmp);
3581 tmp = build1_v (GOTO_EXPR, error_label);
3582 parm = fold_build2 (NE_EXPR, boolean_type_node,
3583 stat, build_int_cst (TREE_TYPE (stat), 0));
3584 tmp = fold_build3 (COND_EXPR, void_type_node,
3585 parm, tmp, build_empty_stmt ());
3586 gfc_add_expr_to_block (&se.pre, tmp);
3590 tmp = gfc_finish_block (&se.pre);
3591 gfc_add_expr_to_block (&block, tmp);
3594 /* Assign the value to the status variable. */
3597 tmp = build1_v (LABEL_EXPR, error_label);
3598 gfc_add_expr_to_block (&block, tmp);
3600 gfc_init_se (&se, NULL);
3601 gfc_conv_expr_lhs (&se, code->expr);
3602 tmp = convert (TREE_TYPE (se.expr), stat);
3603 gfc_add_modify_expr (&block, se.expr, tmp);
3606 return gfc_finish_block (&block);
3610 /* Translate a DEALLOCATE statement.
3611 There are two cases within the for loop:
3612 (1) deallocate(a1, a2, a3) is translated into the following sequence
3613 _gfortran_deallocate(a1, 0B)
3614 _gfortran_deallocate(a2, 0B)
3615 _gfortran_deallocate(a3, 0B)
3616 where the STAT= variable is passed a NULL pointer.
3617 (2) deallocate(a1, a2, a3, stat=i) is translated into the following
3619 _gfortran_deallocate(a1, &stat)
3620 astat = astat + stat
3621 _gfortran_deallocate(a2, &stat)
3622 astat = astat + stat
3623 _gfortran_deallocate(a3, &stat)
3624 astat = astat + stat
3625 In case (1), we simply return at the end of the for loop. In case (2)
3626 we set STAT= astat. */
3628 gfc_trans_deallocate (gfc_code * code)
3633 tree apstat, astat, parm, pstat, stat, tmp, type, var;
3636 gfc_start_block (&block);
3638 /* Set up the optional STAT= */
3641 tree gfc_int4_type_node = gfc_get_int_type (4);
3643 /* Variable used with the library call. */
3644 stat = gfc_create_var (gfc_int4_type_node, "stat");
3645 pstat = build_fold_addr_expr (stat);
3647 /* Running total of possible deallocation failures. */
3648 astat = gfc_create_var (gfc_int4_type_node, "astat");
3649 apstat = build_fold_addr_expr (astat);
3651 /* Initialize astat to 0. */
3652 gfc_add_modify_expr (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
3656 pstat = apstat = null_pointer_node;
3657 stat = astat = NULL_TREE;
3660 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3663 gcc_assert (expr->expr_type == EXPR_VARIABLE);
3665 gfc_init_se (&se, NULL);
3666 gfc_start_block (&se.pre);
3668 se.want_pointer = 1;
3669 se.descriptor_only = 1;
3670 gfc_conv_expr (&se, expr);
3673 tmp = gfc_array_deallocate (se.expr, pstat);
3676 type = build_pointer_type (TREE_TYPE (se.expr));
3677 var = gfc_create_var (type, "ptr");
3678 tmp = gfc_build_addr_expr (type, se.expr);
3679 gfc_add_modify_expr (&se.pre, var, tmp);
3681 parm = gfc_chainon_list (NULL_TREE, var);
3682 parm = gfc_chainon_list (parm, pstat);
3683 tmp = build_function_call_expr (gfor_fndecl_deallocate, parm);
3686 gfc_add_expr_to_block (&se.pre, tmp);
3688 /* Keep track of the number of failed deallocations by adding stat
3689 of the last deallocation to the running total. */
3692 apstat = build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
3693 gfc_add_modify_expr (&se.pre, astat, apstat);
3696 tmp = gfc_finish_block (&se.pre);
3697 gfc_add_expr_to_block (&block, tmp);
3701 /* Assign the value to the status variable. */
3704 gfc_init_se (&se, NULL);
3705 gfc_conv_expr_lhs (&se, code->expr);
3706 tmp = convert (TREE_TYPE (se.expr), astat);
3707 gfc_add_modify_expr (&block, se.expr, tmp);
3710 return gfc_finish_block (&block);