1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
27 #include "coretypes.h"
29 #include "tree-gimple.h"
36 #include "trans-stmt.h"
37 #include "trans-types.h"
38 #include "trans-array.h"
39 #include "trans-const.h"
41 #include "dependency.h"
43 typedef struct iter_info
49 struct iter_info *next;
53 typedef struct forall_info
61 struct forall_info *outer;
62 struct forall_info *next_nest;
66 static void gfc_trans_where_2 (gfc_code *, tree, bool,
67 forall_info *, stmtblock_t *);
69 /* Translate a F95 label number to a LABEL_EXPR. */
72 gfc_trans_label_here (gfc_code * code)
74 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
78 /* Given a variable expression which has been ASSIGNed to, find the decl
79 containing the auxiliary variables. For variables in common blocks this
83 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
85 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
86 gfc_conv_expr (se, expr);
87 /* Deals with variable in common block. Get the field declaration. */
88 if (TREE_CODE (se->expr) == COMPONENT_REF)
89 se->expr = TREE_OPERAND (se->expr, 1);
90 /* Deals with dummy argument. Get the parameter declaration. */
91 else if (TREE_CODE (se->expr) == INDIRECT_REF)
92 se->expr = TREE_OPERAND (se->expr, 0);
95 /* Translate a label assignment statement. */
98 gfc_trans_label_assign (gfc_code * code)
108 /* Start a new block. */
109 gfc_init_se (&se, NULL);
110 gfc_start_block (&se.pre);
111 gfc_conv_label_variable (&se, code->expr);
113 len = GFC_DECL_STRING_LEN (se.expr);
114 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
116 label_tree = gfc_get_label_decl (code->label);
118 if (code->label->defined == ST_LABEL_TARGET)
120 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
121 len_tree = integer_minus_one_node;
125 label_str = code->label->format->value.character.string;
126 label_len = code->label->format->value.character.length;
127 len_tree = build_int_cst (NULL_TREE, label_len);
128 label_tree = gfc_build_string_const (label_len + 1, label_str);
129 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
132 gfc_add_modify_expr (&se.pre, len, len_tree);
133 gfc_add_modify_expr (&se.pre, addr, label_tree);
135 return gfc_finish_block (&se.pre);
138 /* Translate a GOTO statement. */
141 gfc_trans_goto (gfc_code * code)
143 locus loc = code->loc;
149 if (code->label != NULL)
150 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
153 gfc_init_se (&se, NULL);
154 gfc_start_block (&se.pre);
155 gfc_conv_label_variable (&se, code->expr);
156 tmp = GFC_DECL_STRING_LEN (se.expr);
157 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
158 build_int_cst (TREE_TYPE (tmp), -1));
159 gfc_trans_runtime_check (tmp, "Assigned label is not a target label",
162 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
167 target = build1 (GOTO_EXPR, void_type_node, assigned_goto);
168 gfc_add_expr_to_block (&se.pre, target);
169 return gfc_finish_block (&se.pre);
172 /* Check the label list. */
175 target = gfc_get_label_decl (code->label);
176 tmp = gfc_build_addr_expr (pvoid_type_node, target);
177 tmp = build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
178 tmp = build3_v (COND_EXPR, tmp,
179 build1 (GOTO_EXPR, void_type_node, target),
180 build_empty_stmt ());
181 gfc_add_expr_to_block (&se.pre, tmp);
184 while (code != NULL);
185 gfc_trans_runtime_check (boolean_true_node,
186 "Assigned label is not in the list", &se.pre, &loc);
188 return gfc_finish_block (&se.pre);
192 /* Translate an ENTRY statement. Just adds a label for this entry point. */
194 gfc_trans_entry (gfc_code * code)
196 return build1_v (LABEL_EXPR, code->ext.entry->label);
200 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
201 elemental subroutines. Make temporaries for output arguments if any such
202 dependencies are found. Output arguments are chosen because internal_unpack
203 can be used, as is, to copy the result back to the variable. */
205 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
206 gfc_symbol * sym, gfc_actual_arglist * arg)
208 gfc_actual_arglist *arg0;
210 gfc_formal_arglist *formal;
211 gfc_loopinfo tmp_loop;
223 if (loopse->ss == NULL)
228 formal = sym->formal;
230 /* Loop over all the arguments testing for dependencies. */
231 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
237 /* Obtain the info structure for the current argument. */
239 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
243 info = &ss->data.info;
247 /* If there is a dependency, create a temporary and use it
248 instead of the variable. */
249 fsym = formal ? formal->sym : NULL;
250 if (e->expr_type == EXPR_VARIABLE
252 && fsym->attr.intent == INTENT_OUT
253 && gfc_check_fncall_dependency (e, INTENT_OUT, sym, arg0))
255 /* Make a local loopinfo for the temporary creation, so that
256 none of the other ss->info's have to be renormalized. */
257 gfc_init_loopinfo (&tmp_loop);
258 for (n = 0; n < info->dimen; n++)
260 tmp_loop.to[n] = loopse->loop->to[n];
261 tmp_loop.from[n] = loopse->loop->from[n];
262 tmp_loop.order[n] = loopse->loop->order[n];
265 /* Generate the temporary. Merge the block so that the
266 declarations are put at the right binding level. */
267 size = gfc_create_var (gfc_array_index_type, NULL);
268 data = gfc_create_var (pvoid_type_node, NULL);
269 gfc_start_block (&block);
270 tmp = gfc_typenode_for_spec (&e->ts);
271 tmp = gfc_trans_create_temp_array (&se->pre, &se->post,
272 &tmp_loop, info, tmp,
273 false, true, false, false);
274 gfc_add_modify_expr (&se->pre, size, tmp);
275 tmp = fold_convert (pvoid_type_node, info->data);
276 gfc_add_modify_expr (&se->pre, data, tmp);
277 gfc_merge_block_scope (&block);
279 /* Obtain the argument descriptor for unpacking. */
280 gfc_init_se (&parmse, NULL);
281 parmse.want_pointer = 1;
282 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
283 gfc_add_block_to_block (&se->pre, &parmse.pre);
285 /* Calculate the offset for the temporary. */
286 offset = gfc_index_zero_node;
287 for (n = 0; n < info->dimen; n++)
289 tmp = gfc_conv_descriptor_stride (info->descriptor,
291 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
292 loopse->loop->from[n], tmp);
293 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
296 info->offset = gfc_create_var (gfc_array_index_type, NULL);
297 gfc_add_modify_expr (&se->pre, info->offset, offset);
299 /* Copy the result back using unpack. */
300 tmp = gfc_chainon_list (NULL_TREE, parmse.expr);
301 tmp = gfc_chainon_list (tmp, data);
302 tmp = build_function_call_expr (gfor_fndecl_in_unpack, tmp);
303 gfc_add_expr_to_block (&se->post, tmp);
305 gfc_add_block_to_block (&se->post, &parmse.post);
311 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
314 gfc_trans_call (gfc_code * code, bool dependency_check)
318 int has_alternate_specifier;
320 /* A CALL starts a new block because the actual arguments may have to
321 be evaluated first. */
322 gfc_init_se (&se, NULL);
323 gfc_start_block (&se.pre);
325 gcc_assert (code->resolved_sym);
327 ss = gfc_ss_terminator;
328 if (code->resolved_sym->attr.elemental)
329 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
331 /* Is not an elemental subroutine call with array valued arguments. */
332 if (ss == gfc_ss_terminator)
335 /* Translate the call. */
336 has_alternate_specifier
337 = gfc_conv_function_call (&se, code->resolved_sym, code->ext.actual);
339 /* A subroutine without side-effect, by definition, does nothing! */
340 TREE_SIDE_EFFECTS (se.expr) = 1;
342 /* Chain the pieces together and return the block. */
343 if (has_alternate_specifier)
345 gfc_code *select_code;
347 select_code = code->next;
348 gcc_assert(select_code->op == EXEC_SELECT);
349 sym = select_code->expr->symtree->n.sym;
350 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
351 gfc_add_modify_expr (&se.pre, sym->backend_decl, se.expr);
354 gfc_add_expr_to_block (&se.pre, se.expr);
356 gfc_add_block_to_block (&se.pre, &se.post);
361 /* An elemental subroutine call with array valued arguments has
368 /* gfc_walk_elemental_function_args renders the ss chain in the
369 reverse order to the actual argument order. */
370 ss = gfc_reverse_ss (ss);
372 /* Initialize the loop. */
373 gfc_init_se (&loopse, NULL);
374 gfc_init_loopinfo (&loop);
375 gfc_add_ss_to_loop (&loop, ss);
377 gfc_conv_ss_startstride (&loop);
378 gfc_conv_loop_setup (&loop);
379 gfc_mark_ss_chain_used (ss, 1);
381 /* Convert the arguments, checking for dependencies. */
382 gfc_copy_loopinfo_to_se (&loopse, &loop);
385 /* For operator assignment, we need to do dependency checking.
386 We also check the intent of the parameters. */
387 if (dependency_check)
390 sym = code->resolved_sym;
391 gcc_assert (sym->formal->sym->attr.intent = INTENT_OUT);
392 gcc_assert (sym->formal->next->sym->attr.intent = INTENT_IN);
393 gfc_conv_elemental_dependencies (&se, &loopse, sym,
397 /* Generate the loop body. */
398 gfc_start_scalarized_body (&loop, &body);
399 gfc_init_block (&block);
401 /* Add the subroutine call to the block. */
402 gfc_conv_function_call (&loopse, code->resolved_sym, code->ext.actual);
403 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
405 gfc_add_block_to_block (&block, &loopse.pre);
406 gfc_add_block_to_block (&block, &loopse.post);
408 /* Finish up the loop block and the loop. */
409 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
410 gfc_trans_scalarizing_loops (&loop, &body);
411 gfc_add_block_to_block (&se.pre, &loop.pre);
412 gfc_add_block_to_block (&se.pre, &loop.post);
413 gfc_add_block_to_block (&se.pre, &se.post);
414 gfc_cleanup_loop (&loop);
417 return gfc_finish_block (&se.pre);
421 /* Translate the RETURN statement. */
424 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
432 /* if code->expr is not NULL, this return statement must appear
433 in a subroutine and current_fake_result_decl has already
436 result = gfc_get_fake_result_decl (NULL, 0);
439 gfc_warning ("An alternate return at %L without a * dummy argument",
441 return build1_v (GOTO_EXPR, gfc_get_return_label ());
444 /* Start a new block for this statement. */
445 gfc_init_se (&se, NULL);
446 gfc_start_block (&se.pre);
448 gfc_conv_expr (&se, code->expr);
450 tmp = build2 (MODIFY_EXPR, TREE_TYPE (result), result, se.expr);
451 gfc_add_expr_to_block (&se.pre, tmp);
453 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
454 gfc_add_expr_to_block (&se.pre, tmp);
455 gfc_add_block_to_block (&se.pre, &se.post);
456 return gfc_finish_block (&se.pre);
459 return build1_v (GOTO_EXPR, gfc_get_return_label ());
463 /* Translate the PAUSE statement. We have to translate this statement
464 to a runtime library call. */
467 gfc_trans_pause (gfc_code * code)
469 tree gfc_int4_type_node = gfc_get_int_type (4);
475 /* Start a new block for this statement. */
476 gfc_init_se (&se, NULL);
477 gfc_start_block (&se.pre);
480 if (code->expr == NULL)
482 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
483 args = gfc_chainon_list (NULL_TREE, tmp);
484 fndecl = gfor_fndecl_pause_numeric;
488 gfc_conv_expr_reference (&se, code->expr);
489 args = gfc_chainon_list (NULL_TREE, se.expr);
490 args = gfc_chainon_list (args, se.string_length);
491 fndecl = gfor_fndecl_pause_string;
494 tmp = build_function_call_expr (fndecl, args);
495 gfc_add_expr_to_block (&se.pre, tmp);
497 gfc_add_block_to_block (&se.pre, &se.post);
499 return gfc_finish_block (&se.pre);
503 /* Translate the STOP statement. We have to translate this statement
504 to a runtime library call. */
507 gfc_trans_stop (gfc_code * code)
509 tree gfc_int4_type_node = gfc_get_int_type (4);
515 /* Start a new block for this statement. */
516 gfc_init_se (&se, NULL);
517 gfc_start_block (&se.pre);
520 if (code->expr == NULL)
522 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
523 args = gfc_chainon_list (NULL_TREE, tmp);
524 fndecl = gfor_fndecl_stop_numeric;
528 gfc_conv_expr_reference (&se, code->expr);
529 args = gfc_chainon_list (NULL_TREE, se.expr);
530 args = gfc_chainon_list (args, se.string_length);
531 fndecl = gfor_fndecl_stop_string;
534 tmp = build_function_call_expr (fndecl, args);
535 gfc_add_expr_to_block (&se.pre, tmp);
537 gfc_add_block_to_block (&se.pre, &se.post);
539 return gfc_finish_block (&se.pre);
543 /* Generate GENERIC for the IF construct. This function also deals with
544 the simple IF statement, because the front end translates the IF
545 statement into an IF construct.
577 where COND_S is the simplified version of the predicate. PRE_COND_S
578 are the pre side-effects produced by the translation of the
580 We need to build the chain recursively otherwise we run into
581 problems with folding incomplete statements. */
584 gfc_trans_if_1 (gfc_code * code)
589 /* Check for an unconditional ELSE clause. */
591 return gfc_trans_code (code->next);
593 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
594 gfc_init_se (&if_se, NULL);
595 gfc_start_block (&if_se.pre);
597 /* Calculate the IF condition expression. */
598 gfc_conv_expr_val (&if_se, code->expr);
600 /* Translate the THEN clause. */
601 stmt = gfc_trans_code (code->next);
603 /* Translate the ELSE clause. */
605 elsestmt = gfc_trans_if_1 (code->block);
607 elsestmt = build_empty_stmt ();
609 /* Build the condition expression and add it to the condition block. */
610 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
612 gfc_add_expr_to_block (&if_se.pre, stmt);
614 /* Finish off this statement. */
615 return gfc_finish_block (&if_se.pre);
619 gfc_trans_if (gfc_code * code)
621 /* Ignore the top EXEC_IF, it only announces an IF construct. The
622 actual code we must translate is in code->block. */
624 return gfc_trans_if_1 (code->block);
628 /* Translage an arithmetic IF expression.
630 IF (cond) label1, label2, label3 translates to
642 An optimized version can be generated in case of equal labels.
643 E.g., if label1 is equal to label2, we can translate it to
652 gfc_trans_arithmetic_if (gfc_code * code)
660 /* Start a new block. */
661 gfc_init_se (&se, NULL);
662 gfc_start_block (&se.pre);
664 /* Pre-evaluate COND. */
665 gfc_conv_expr_val (&se, code->expr);
667 /* Build something to compare with. */
668 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
670 if (code->label->value != code->label2->value)
672 /* If (cond < 0) take branch1 else take branch2.
673 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
674 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
675 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
677 if (code->label->value != code->label3->value)
678 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
680 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
682 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
685 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
687 if (code->label->value != code->label3->value
688 && code->label2->value != code->label3->value)
690 /* if (cond <= 0) take branch1 else take branch2. */
691 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
692 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
693 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
696 /* Append the COND_EXPR to the evaluation of COND, and return. */
697 gfc_add_expr_to_block (&se.pre, branch1);
698 return gfc_finish_block (&se.pre);
702 /* Translate the simple DO construct. This is where the loop variable has
703 integer type and step +-1. We can't use this in the general case
704 because integer overflow and floating point errors could give incorrect
706 We translate a do loop from:
708 DO dovar = from, to, step
714 [Evaluate loop bounds and step]
716 if ((step > 0) ? (dovar <= to) : (dovar => to))
722 cond = (dovar == to);
724 if (cond) goto end_label;
729 This helps the optimizers by avoiding the extra induction variable
730 used in the general case. */
733 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
734 tree from, tree to, tree step)
743 type = TREE_TYPE (dovar);
745 /* Initialize the DO variable: dovar = from. */
746 gfc_add_modify_expr (pblock, dovar, from);
748 /* Cycle and exit statements are implemented with gotos. */
749 cycle_label = gfc_build_label_decl (NULL_TREE);
750 exit_label = gfc_build_label_decl (NULL_TREE);
752 /* Put the labels where they can be found later. See gfc_trans_do(). */
753 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
756 gfc_start_block (&body);
758 /* Main loop body. */
759 tmp = gfc_trans_code (code->block->next);
760 gfc_add_expr_to_block (&body, tmp);
762 /* Label for cycle statements (if needed). */
763 if (TREE_USED (cycle_label))
765 tmp = build1_v (LABEL_EXPR, cycle_label);
766 gfc_add_expr_to_block (&body, tmp);
769 /* Evaluate the loop condition. */
770 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
771 cond = gfc_evaluate_now (cond, &body);
773 /* Increment the loop variable. */
774 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
775 gfc_add_modify_expr (&body, dovar, tmp);
778 tmp = build1_v (GOTO_EXPR, exit_label);
779 TREE_USED (exit_label) = 1;
780 tmp = fold_build3 (COND_EXPR, void_type_node,
781 cond, tmp, build_empty_stmt ());
782 gfc_add_expr_to_block (&body, tmp);
784 /* Finish the loop body. */
785 tmp = gfc_finish_block (&body);
786 tmp = build1_v (LOOP_EXPR, tmp);
788 /* Only execute the loop if the number of iterations is positive. */
789 if (tree_int_cst_sgn (step) > 0)
790 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
792 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
793 tmp = fold_build3 (COND_EXPR, void_type_node,
794 cond, tmp, build_empty_stmt ());
795 gfc_add_expr_to_block (pblock, tmp);
797 /* Add the exit label. */
798 tmp = build1_v (LABEL_EXPR, exit_label);
799 gfc_add_expr_to_block (pblock, tmp);
801 return gfc_finish_block (pblock);
804 /* Translate the DO construct. This obviously is one of the most
805 important ones to get right with any compiler, but especially
808 We special case some loop forms as described in gfc_trans_simple_do.
809 For other cases we implement them with a separate loop count,
810 as described in the standard.
812 We translate a do loop from:
814 DO dovar = from, to, step
820 [evaluate loop bounds and step]
821 count = (to + step - from) / step;
829 if (count <=0) goto exit_label;
833 TODO: Large loop counts
834 The code above assumes the loop count fits into a signed integer kind,
835 i.e. Does not work for loop counts > 2^31 for integer(kind=4) variables
836 We must support the full range. */
839 gfc_trans_do (gfc_code * code)
856 gfc_start_block (&block);
858 /* Evaluate all the expressions in the iterator. */
859 gfc_init_se (&se, NULL);
860 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
861 gfc_add_block_to_block (&block, &se.pre);
863 type = TREE_TYPE (dovar);
865 gfc_init_se (&se, NULL);
866 gfc_conv_expr_val (&se, code->ext.iterator->start);
867 gfc_add_block_to_block (&block, &se.pre);
868 from = gfc_evaluate_now (se.expr, &block);
870 gfc_init_se (&se, NULL);
871 gfc_conv_expr_val (&se, code->ext.iterator->end);
872 gfc_add_block_to_block (&block, &se.pre);
873 to = gfc_evaluate_now (se.expr, &block);
875 gfc_init_se (&se, NULL);
876 gfc_conv_expr_val (&se, code->ext.iterator->step);
877 gfc_add_block_to_block (&block, &se.pre);
878 step = gfc_evaluate_now (se.expr, &block);
880 /* Special case simple loops. */
881 if (TREE_CODE (type) == INTEGER_TYPE
882 && (integer_onep (step)
883 || tree_int_cst_equal (step, integer_minus_one_node)))
884 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
886 /* Initialize loop count. This code is executed before we enter the
887 loop body. We generate: count = (to + step - from) / step. */
889 tmp = fold_build2 (MINUS_EXPR, type, step, from);
890 tmp = fold_build2 (PLUS_EXPR, type, to, tmp);
891 if (TREE_CODE (type) == INTEGER_TYPE)
893 tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step);
894 count = gfc_create_var (type, "count");
898 /* TODO: We could use the same width as the real type.
899 This would probably cause more problems that it solves
900 when we implement "long double" types. */
901 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
902 tmp = fold_build1 (FIX_TRUNC_EXPR, gfc_array_index_type, tmp);
903 count = gfc_create_var (gfc_array_index_type, "count");
905 gfc_add_modify_expr (&block, count, tmp);
907 count_one = build_int_cst (TREE_TYPE (count), 1);
909 /* Initialize the DO variable: dovar = from. */
910 gfc_add_modify_expr (&block, dovar, from);
913 gfc_start_block (&body);
915 /* Cycle and exit statements are implemented with gotos. */
916 cycle_label = gfc_build_label_decl (NULL_TREE);
917 exit_label = gfc_build_label_decl (NULL_TREE);
919 /* Start with the loop condition. Loop until count <= 0. */
920 cond = fold_build2 (LE_EXPR, boolean_type_node, count,
921 build_int_cst (TREE_TYPE (count), 0));
922 tmp = build1_v (GOTO_EXPR, exit_label);
923 TREE_USED (exit_label) = 1;
924 tmp = fold_build3 (COND_EXPR, void_type_node,
925 cond, tmp, build_empty_stmt ());
926 gfc_add_expr_to_block (&body, tmp);
928 /* Put these labels where they can be found later. We put the
929 labels in a TREE_LIST node (because TREE_CHAIN is already
930 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
931 label in TREE_VALUE (backend_decl). */
933 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
935 /* Main loop body. */
936 tmp = gfc_trans_code (code->block->next);
937 gfc_add_expr_to_block (&body, tmp);
939 /* Label for cycle statements (if needed). */
940 if (TREE_USED (cycle_label))
942 tmp = build1_v (LABEL_EXPR, cycle_label);
943 gfc_add_expr_to_block (&body, tmp);
946 /* Increment the loop variable. */
947 tmp = build2 (PLUS_EXPR, type, dovar, step);
948 gfc_add_modify_expr (&body, dovar, tmp);
950 /* Decrement the loop count. */
951 tmp = build2 (MINUS_EXPR, TREE_TYPE (count), count, count_one);
952 gfc_add_modify_expr (&body, count, tmp);
954 /* End of loop body. */
955 tmp = gfc_finish_block (&body);
957 /* The for loop itself. */
958 tmp = build1_v (LOOP_EXPR, tmp);
959 gfc_add_expr_to_block (&block, tmp);
961 /* Add the exit label. */
962 tmp = build1_v (LABEL_EXPR, exit_label);
963 gfc_add_expr_to_block (&block, tmp);
965 return gfc_finish_block (&block);
969 /* Translate the DO WHILE construct.
982 if (! cond) goto exit_label;
988 Because the evaluation of the exit condition `cond' may have side
989 effects, we can't do much for empty loop bodies. The backend optimizers
990 should be smart enough to eliminate any dead loops. */
993 gfc_trans_do_while (gfc_code * code)
1001 /* Everything we build here is part of the loop body. */
1002 gfc_start_block (&block);
1004 /* Cycle and exit statements are implemented with gotos. */
1005 cycle_label = gfc_build_label_decl (NULL_TREE);
1006 exit_label = gfc_build_label_decl (NULL_TREE);
1008 /* Put the labels where they can be found later. See gfc_trans_do(). */
1009 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1011 /* Create a GIMPLE version of the exit condition. */
1012 gfc_init_se (&cond, NULL);
1013 gfc_conv_expr_val (&cond, code->expr);
1014 gfc_add_block_to_block (&block, &cond.pre);
1015 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1017 /* Build "IF (! cond) GOTO exit_label". */
1018 tmp = build1_v (GOTO_EXPR, exit_label);
1019 TREE_USED (exit_label) = 1;
1020 tmp = fold_build3 (COND_EXPR, void_type_node,
1021 cond.expr, tmp, build_empty_stmt ());
1022 gfc_add_expr_to_block (&block, tmp);
1024 /* The main body of the loop. */
1025 tmp = gfc_trans_code (code->block->next);
1026 gfc_add_expr_to_block (&block, tmp);
1028 /* Label for cycle statements (if needed). */
1029 if (TREE_USED (cycle_label))
1031 tmp = build1_v (LABEL_EXPR, cycle_label);
1032 gfc_add_expr_to_block (&block, tmp);
1035 /* End of loop body. */
1036 tmp = gfc_finish_block (&block);
1038 gfc_init_block (&block);
1039 /* Build the loop. */
1040 tmp = build1_v (LOOP_EXPR, tmp);
1041 gfc_add_expr_to_block (&block, tmp);
1043 /* Add the exit label. */
1044 tmp = build1_v (LABEL_EXPR, exit_label);
1045 gfc_add_expr_to_block (&block, tmp);
1047 return gfc_finish_block (&block);
1051 /* Translate the SELECT CASE construct for INTEGER case expressions,
1052 without killing all potential optimizations. The problem is that
1053 Fortran allows unbounded cases, but the back-end does not, so we
1054 need to intercept those before we enter the equivalent SWITCH_EXPR
1057 For example, we translate this,
1060 CASE (:100,101,105:115)
1070 to the GENERIC equivalent,
1074 case (minimum value for typeof(expr) ... 100:
1080 case 200 ... (maximum value for typeof(expr):
1097 gfc_trans_integer_select (gfc_code * code)
1107 gfc_start_block (&block);
1109 /* Calculate the switch expression. */
1110 gfc_init_se (&se, NULL);
1111 gfc_conv_expr_val (&se, code->expr);
1112 gfc_add_block_to_block (&block, &se.pre);
1114 end_label = gfc_build_label_decl (NULL_TREE);
1116 gfc_init_block (&body);
1118 for (c = code->block; c; c = c->block)
1120 for (cp = c->ext.case_list; cp; cp = cp->next)
1125 /* Assume it's the default case. */
1126 low = high = NULL_TREE;
1130 low = gfc_conv_constant_to_tree (cp->low);
1132 /* If there's only a lower bound, set the high bound to the
1133 maximum value of the case expression. */
1135 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1140 /* Three cases are possible here:
1142 1) There is no lower bound, e.g. CASE (:N).
1143 2) There is a lower bound .NE. high bound, that is
1144 a case range, e.g. CASE (N:M) where M>N (we make
1145 sure that M>N during type resolution).
1146 3) There is a lower bound, and it has the same value
1147 as the high bound, e.g. CASE (N:N). This is our
1148 internal representation of CASE(N).
1150 In the first and second case, we need to set a value for
1151 high. In the third case, we don't because the GCC middle
1152 end represents a single case value by just letting high be
1153 a NULL_TREE. We can't do that because we need to be able
1154 to represent unbounded cases. */
1158 && mpz_cmp (cp->low->value.integer,
1159 cp->high->value.integer) != 0))
1160 high = gfc_conv_constant_to_tree (cp->high);
1162 /* Unbounded case. */
1164 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1167 /* Build a label. */
1168 label = gfc_build_label_decl (NULL_TREE);
1170 /* Add this case label.
1171 Add parameter 'label', make it match GCC backend. */
1172 tmp = build3 (CASE_LABEL_EXPR, void_type_node, low, high, label);
1173 gfc_add_expr_to_block (&body, tmp);
1176 /* Add the statements for this case. */
1177 tmp = gfc_trans_code (c->next);
1178 gfc_add_expr_to_block (&body, tmp);
1180 /* Break to the end of the construct. */
1181 tmp = build1_v (GOTO_EXPR, end_label);
1182 gfc_add_expr_to_block (&body, tmp);
1185 tmp = gfc_finish_block (&body);
1186 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1187 gfc_add_expr_to_block (&block, tmp);
1189 tmp = build1_v (LABEL_EXPR, end_label);
1190 gfc_add_expr_to_block (&block, tmp);
1192 return gfc_finish_block (&block);
1196 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1198 There are only two cases possible here, even though the standard
1199 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1200 .FALSE., and DEFAULT.
1202 We never generate more than two blocks here. Instead, we always
1203 try to eliminate the DEFAULT case. This way, we can translate this
1204 kind of SELECT construct to a simple
1208 expression in GENERIC. */
1211 gfc_trans_logical_select (gfc_code * code)
1214 gfc_code *t, *f, *d;
1219 /* Assume we don't have any cases at all. */
1222 /* Now see which ones we actually do have. We can have at most two
1223 cases in a single case list: one for .TRUE. and one for .FALSE.
1224 The default case is always separate. If the cases for .TRUE. and
1225 .FALSE. are in the same case list, the block for that case list
1226 always executed, and we don't generate code a COND_EXPR. */
1227 for (c = code->block; c; c = c->block)
1229 for (cp = c->ext.case_list; cp; cp = cp->next)
1233 if (cp->low->value.logical == 0) /* .FALSE. */
1235 else /* if (cp->value.logical != 0), thus .TRUE. */
1243 /* Start a new block. */
1244 gfc_start_block (&block);
1246 /* Calculate the switch expression. We always need to do this
1247 because it may have side effects. */
1248 gfc_init_se (&se, NULL);
1249 gfc_conv_expr_val (&se, code->expr);
1250 gfc_add_block_to_block (&block, &se.pre);
1252 if (t == f && t != NULL)
1254 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1255 translate the code for these cases, append it to the current
1257 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1261 tree true_tree, false_tree, stmt;
1263 true_tree = build_empty_stmt ();
1264 false_tree = build_empty_stmt ();
1266 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1267 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1268 make the missing case the default case. */
1269 if (t != NULL && f != NULL)
1279 /* Translate the code for each of these blocks, and append it to
1280 the current block. */
1282 true_tree = gfc_trans_code (t->next);
1285 false_tree = gfc_trans_code (f->next);
1287 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1288 true_tree, false_tree);
1289 gfc_add_expr_to_block (&block, stmt);
1292 return gfc_finish_block (&block);
1296 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1297 Instead of generating compares and jumps, it is far simpler to
1298 generate a data structure describing the cases in order and call a
1299 library subroutine that locates the right case.
1300 This is particularly true because this is the only case where we
1301 might have to dispose of a temporary.
1302 The library subroutine returns a pointer to jump to or NULL if no
1303 branches are to be taken. */
1306 gfc_trans_character_select (gfc_code *code)
1308 tree init, node, end_label, tmp, type, args, *labels;
1309 stmtblock_t block, body;
1315 static tree select_struct;
1316 static tree ss_string1, ss_string1_len;
1317 static tree ss_string2, ss_string2_len;
1318 static tree ss_target;
1320 if (select_struct == NULL)
1322 tree gfc_int4_type_node = gfc_get_int_type (4);
1324 select_struct = make_node (RECORD_TYPE);
1325 TYPE_NAME (select_struct) = get_identifier ("_jump_struct");
1328 #define ADD_FIELD(NAME, TYPE) \
1329 ss_##NAME = gfc_add_field_to_struct \
1330 (&(TYPE_FIELDS (select_struct)), select_struct, \
1331 get_identifier (stringize(NAME)), TYPE)
1333 ADD_FIELD (string1, pchar_type_node);
1334 ADD_FIELD (string1_len, gfc_int4_type_node);
1336 ADD_FIELD (string2, pchar_type_node);
1337 ADD_FIELD (string2_len, gfc_int4_type_node);
1339 ADD_FIELD (target, pvoid_type_node);
1342 gfc_finish_type (select_struct);
1345 cp = code->block->ext.case_list;
1346 while (cp->left != NULL)
1350 for (d = cp; d; d = d->right)
1354 labels = gfc_getmem (n * sizeof (tree));
1358 for(i = 0; i < n; i++)
1360 labels[i] = gfc_build_label_decl (NULL_TREE);
1361 TREE_USED (labels[i]) = 1;
1362 /* TODO: The gimplifier should do this for us, but it has
1363 inadequacies when dealing with static initializers. */
1364 FORCED_LABEL (labels[i]) = 1;
1367 end_label = gfc_build_label_decl (NULL_TREE);
1369 /* Generate the body */
1370 gfc_start_block (&block);
1371 gfc_init_block (&body);
1373 for (c = code->block; c; c = c->block)
1375 for (d = c->ext.case_list; d; d = d->next)
1377 tmp = build1_v (LABEL_EXPR, labels[d->n]);
1378 gfc_add_expr_to_block (&body, tmp);
1381 tmp = gfc_trans_code (c->next);
1382 gfc_add_expr_to_block (&body, tmp);
1384 tmp = build1_v (GOTO_EXPR, end_label);
1385 gfc_add_expr_to_block (&body, tmp);
1388 /* Generate the structure describing the branches */
1392 for(d = cp; d; d = d->right, i++)
1396 gfc_init_se (&se, NULL);
1400 node = tree_cons (ss_string1, null_pointer_node, node);
1401 node = tree_cons (ss_string1_len, integer_zero_node, node);
1405 gfc_conv_expr_reference (&se, d->low);
1407 node = tree_cons (ss_string1, se.expr, node);
1408 node = tree_cons (ss_string1_len, se.string_length, node);
1411 if (d->high == NULL)
1413 node = tree_cons (ss_string2, null_pointer_node, node);
1414 node = tree_cons (ss_string2_len, integer_zero_node, node);
1418 gfc_init_se (&se, NULL);
1419 gfc_conv_expr_reference (&se, d->high);
1421 node = tree_cons (ss_string2, se.expr, node);
1422 node = tree_cons (ss_string2_len, se.string_length, node);
1425 tmp = gfc_build_addr_expr (pvoid_type_node, labels[i]);
1426 node = tree_cons (ss_target, tmp, node);
1428 tmp = build_constructor_from_list (select_struct, nreverse (node));
1429 init = tree_cons (NULL_TREE, tmp, init);
1432 type = build_array_type (select_struct, build_index_type
1433 (build_int_cst (NULL_TREE, n - 1)));
1435 init = build_constructor_from_list (type, nreverse(init));
1436 TREE_CONSTANT (init) = 1;
1437 TREE_INVARIANT (init) = 1;
1438 TREE_STATIC (init) = 1;
1439 /* Create a static variable to hold the jump table. */
1440 tmp = gfc_create_var (type, "jumptable");
1441 TREE_CONSTANT (tmp) = 1;
1442 TREE_INVARIANT (tmp) = 1;
1443 TREE_STATIC (tmp) = 1;
1444 DECL_INITIAL (tmp) = init;
1447 /* Build an argument list for the library call */
1448 init = gfc_build_addr_expr (pvoid_type_node, init);
1449 args = gfc_chainon_list (NULL_TREE, init);
1451 tmp = build_int_cst (NULL_TREE, n);
1452 args = gfc_chainon_list (args, tmp);
1454 tmp = gfc_build_addr_expr (pvoid_type_node, end_label);
1455 args = gfc_chainon_list (args, tmp);
1457 gfc_init_se (&se, NULL);
1458 gfc_conv_expr_reference (&se, code->expr);
1460 args = gfc_chainon_list (args, se.expr);
1461 args = gfc_chainon_list (args, se.string_length);
1463 gfc_add_block_to_block (&block, &se.pre);
1465 tmp = build_function_call_expr (gfor_fndecl_select_string, args);
1466 tmp = build1 (GOTO_EXPR, void_type_node, tmp);
1467 gfc_add_expr_to_block (&block, tmp);
1469 tmp = gfc_finish_block (&body);
1470 gfc_add_expr_to_block (&block, tmp);
1471 tmp = build1_v (LABEL_EXPR, end_label);
1472 gfc_add_expr_to_block (&block, tmp);
1477 return gfc_finish_block (&block);
1481 /* Translate the three variants of the SELECT CASE construct.
1483 SELECT CASEs with INTEGER case expressions can be translated to an
1484 equivalent GENERIC switch statement, and for LOGICAL case
1485 expressions we build one or two if-else compares.
1487 SELECT CASEs with CHARACTER case expressions are a whole different
1488 story, because they don't exist in GENERIC. So we sort them and
1489 do a binary search at runtime.
1491 Fortran has no BREAK statement, and it does not allow jumps from
1492 one case block to another. That makes things a lot easier for
1496 gfc_trans_select (gfc_code * code)
1498 gcc_assert (code && code->expr);
1500 /* Empty SELECT constructs are legal. */
1501 if (code->block == NULL)
1502 return build_empty_stmt ();
1504 /* Select the correct translation function. */
1505 switch (code->expr->ts.type)
1507 case BT_LOGICAL: return gfc_trans_logical_select (code);
1508 case BT_INTEGER: return gfc_trans_integer_select (code);
1509 case BT_CHARACTER: return gfc_trans_character_select (code);
1511 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1517 /* Generate the loops for a FORALL block. The normal loop format:
1518 count = (end - start + step) / step
1531 gfc_trans_forall_loop (forall_info *forall_tmp, int nvar, tree body, int mask_flag)
1539 tree var, start, end, step;
1542 iter = forall_tmp->this_loop;
1543 for (n = 0; n < nvar; n++)
1546 start = iter->start;
1550 exit_label = gfc_build_label_decl (NULL_TREE);
1551 TREE_USED (exit_label) = 1;
1553 /* The loop counter. */
1554 count = gfc_create_var (TREE_TYPE (var), "count");
1556 /* The body of the loop. */
1557 gfc_init_block (&block);
1559 /* The exit condition. */
1560 cond = fold_build2 (LE_EXPR, boolean_type_node,
1561 count, build_int_cst (TREE_TYPE (count), 0));
1562 tmp = build1_v (GOTO_EXPR, exit_label);
1563 tmp = fold_build3 (COND_EXPR, void_type_node,
1564 cond, tmp, build_empty_stmt ());
1565 gfc_add_expr_to_block (&block, tmp);
1567 /* The main loop body. */
1568 gfc_add_expr_to_block (&block, body);
1570 /* Increment the loop variable. */
1571 tmp = build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1572 gfc_add_modify_expr (&block, var, tmp);
1574 /* Advance to the next mask element. Only do this for the
1576 if (n == 0 && mask_flag && forall_tmp->mask)
1578 tree maskindex = forall_tmp->maskindex;
1579 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
1580 maskindex, gfc_index_one_node);
1581 gfc_add_modify_expr (&block, maskindex, tmp);
1584 /* Decrement the loop counter. */
1585 tmp = build2 (MINUS_EXPR, TREE_TYPE (var), count, gfc_index_one_node);
1586 gfc_add_modify_expr (&block, count, tmp);
1588 body = gfc_finish_block (&block);
1590 /* Loop var initialization. */
1591 gfc_init_block (&block);
1592 gfc_add_modify_expr (&block, var, start);
1594 /* Initialize maskindex counter. Only do this before the
1596 if (n == nvar - 1 && mask_flag && forall_tmp->mask)
1597 gfc_add_modify_expr (&block, forall_tmp->maskindex,
1598 gfc_index_zero_node);
1600 /* Initialize the loop counter. */
1601 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1602 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1603 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1604 gfc_add_modify_expr (&block, count, tmp);
1606 /* The loop expression. */
1607 tmp = build1_v (LOOP_EXPR, body);
1608 gfc_add_expr_to_block (&block, tmp);
1610 /* The exit label. */
1611 tmp = build1_v (LABEL_EXPR, exit_label);
1612 gfc_add_expr_to_block (&block, tmp);
1614 body = gfc_finish_block (&block);
1621 /* Generate the body and loops according to MASK_FLAG and NEST_FLAG.
1622 if MASK_FLAG is nonzero, the body is controlled by maskes in forall
1623 nest, otherwise, the body is not controlled by maskes.
1624 if NEST_FLAG is nonzero, generate loops for nested forall, otherwise,
1625 only generate loops for the current forall level. */
1628 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1629 int mask_flag, int nest_flag)
1633 forall_info *forall_tmp;
1634 tree pmask, mask, maskindex;
1636 forall_tmp = nested_forall_info;
1637 /* Generate loops for nested forall. */
1640 while (forall_tmp->next_nest != NULL)
1641 forall_tmp = forall_tmp->next_nest;
1642 while (forall_tmp != NULL)
1644 /* Generate body with masks' control. */
1647 pmask = forall_tmp->pmask;
1648 mask = forall_tmp->mask;
1649 maskindex = forall_tmp->maskindex;
1653 /* If a mask was specified make the assignment conditional. */
1655 tmp = build_fold_indirect_ref (mask);
1658 tmp = gfc_build_array_ref (tmp, maskindex);
1660 body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ());
1663 nvar = forall_tmp->nvar;
1664 body = gfc_trans_forall_loop (forall_tmp, nvar, body, mask_flag);
1665 forall_tmp = forall_tmp->outer;
1670 nvar = forall_tmp->nvar;
1671 body = gfc_trans_forall_loop (forall_tmp, nvar, body, mask_flag);
1678 /* Allocate data for holding a temporary array. Returns either a local
1679 temporary array or a pointer variable. */
1682 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
1690 if (INTEGER_CST_P (size))
1692 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
1693 gfc_index_one_node);
1698 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
1699 type = build_array_type (elem_type, type);
1700 if (gfc_can_put_var_on_stack (bytesize))
1702 gcc_assert (INTEGER_CST_P (size));
1703 tmpvar = gfc_create_var (type, "temp");
1708 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
1709 *pdata = convert (pvoid_type_node, tmpvar);
1711 args = gfc_chainon_list (NULL_TREE, bytesize);
1712 if (gfc_index_integer_kind == 4)
1713 tmp = gfor_fndecl_internal_malloc;
1714 else if (gfc_index_integer_kind == 8)
1715 tmp = gfor_fndecl_internal_malloc64;
1718 tmp = build_function_call_expr (tmp, args);
1719 tmp = convert (TREE_TYPE (tmpvar), tmp);
1720 gfc_add_modify_expr (pblock, tmpvar, tmp);
1726 /* Generate codes to copy the temporary to the actual lhs. */
1729 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
1730 tree count1, tree wheremask, bool invert)
1734 stmtblock_t block, body;
1740 lss = gfc_walk_expr (expr);
1742 if (lss == gfc_ss_terminator)
1744 gfc_start_block (&block);
1746 gfc_init_se (&lse, NULL);
1748 /* Translate the expression. */
1749 gfc_conv_expr (&lse, expr);
1751 /* Form the expression for the temporary. */
1752 tmp = gfc_build_array_ref (tmp1, count1);
1754 /* Use the scalar assignment as is. */
1755 gfc_add_block_to_block (&block, &lse.pre);
1756 gfc_add_modify_expr (&block, lse.expr, tmp);
1757 gfc_add_block_to_block (&block, &lse.post);
1759 /* Increment the count1. */
1760 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1761 gfc_index_one_node);
1762 gfc_add_modify_expr (&block, count1, tmp);
1764 tmp = gfc_finish_block (&block);
1768 gfc_start_block (&block);
1770 gfc_init_loopinfo (&loop1);
1771 gfc_init_se (&rse, NULL);
1772 gfc_init_se (&lse, NULL);
1774 /* Associate the lss with the loop. */
1775 gfc_add_ss_to_loop (&loop1, lss);
1777 /* Calculate the bounds of the scalarization. */
1778 gfc_conv_ss_startstride (&loop1);
1779 /* Setup the scalarizing loops. */
1780 gfc_conv_loop_setup (&loop1);
1782 gfc_mark_ss_chain_used (lss, 1);
1784 /* Start the scalarized loop body. */
1785 gfc_start_scalarized_body (&loop1, &body);
1787 /* Setup the gfc_se structures. */
1788 gfc_copy_loopinfo_to_se (&lse, &loop1);
1791 /* Form the expression of the temporary. */
1792 if (lss != gfc_ss_terminator)
1793 rse.expr = gfc_build_array_ref (tmp1, count1);
1794 /* Translate expr. */
1795 gfc_conv_expr (&lse, expr);
1797 /* Use the scalar assignment. */
1798 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts.type);
1800 /* Form the mask expression according to the mask tree list. */
1803 wheremaskexpr = gfc_build_array_ref (wheremask, count3);
1805 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1806 TREE_TYPE (wheremaskexpr),
1808 tmp = fold_build3 (COND_EXPR, void_type_node,
1809 wheremaskexpr, tmp, build_empty_stmt ());
1812 gfc_add_expr_to_block (&body, tmp);
1814 /* Increment count1. */
1815 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1816 count1, gfc_index_one_node);
1817 gfc_add_modify_expr (&body, count1, tmp);
1819 /* Increment count3. */
1822 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1823 count3, gfc_index_one_node);
1824 gfc_add_modify_expr (&body, count3, tmp);
1827 /* Generate the copying loops. */
1828 gfc_trans_scalarizing_loops (&loop1, &body);
1829 gfc_add_block_to_block (&block, &loop1.pre);
1830 gfc_add_block_to_block (&block, &loop1.post);
1831 gfc_cleanup_loop (&loop1);
1833 tmp = gfc_finish_block (&block);
1839 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
1840 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
1841 and should not be freed. WHEREMASK is the conditional execution mask
1842 whose sense may be inverted by INVERT. */
1845 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
1846 tree count1, gfc_ss *lss, gfc_ss *rss,
1847 tree wheremask, bool invert)
1849 stmtblock_t block, body1;
1856 gfc_start_block (&block);
1858 gfc_init_se (&rse, NULL);
1859 gfc_init_se (&lse, NULL);
1861 if (lss == gfc_ss_terminator)
1863 gfc_init_block (&body1);
1864 gfc_conv_expr (&rse, expr2);
1865 lse.expr = gfc_build_array_ref (tmp1, count1);
1869 /* Initialize the loop. */
1870 gfc_init_loopinfo (&loop);
1872 /* We may need LSS to determine the shape of the expression. */
1873 gfc_add_ss_to_loop (&loop, lss);
1874 gfc_add_ss_to_loop (&loop, rss);
1876 gfc_conv_ss_startstride (&loop);
1877 gfc_conv_loop_setup (&loop);
1879 gfc_mark_ss_chain_used (rss, 1);
1880 /* Start the loop body. */
1881 gfc_start_scalarized_body (&loop, &body1);
1883 /* Translate the expression. */
1884 gfc_copy_loopinfo_to_se (&rse, &loop);
1886 gfc_conv_expr (&rse, expr2);
1888 /* Form the expression of the temporary. */
1889 lse.expr = gfc_build_array_ref (tmp1, count1);
1892 /* Use the scalar assignment. */
1893 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts.type);
1895 /* Form the mask expression according to the mask tree list. */
1898 wheremaskexpr = gfc_build_array_ref (wheremask, count3);
1900 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1901 TREE_TYPE (wheremaskexpr),
1903 tmp = fold_build3 (COND_EXPR, void_type_node,
1904 wheremaskexpr, tmp, build_empty_stmt ());
1907 gfc_add_expr_to_block (&body1, tmp);
1909 if (lss == gfc_ss_terminator)
1911 gfc_add_block_to_block (&block, &body1);
1913 /* Increment count1. */
1914 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1915 gfc_index_one_node);
1916 gfc_add_modify_expr (&block, count1, tmp);
1920 /* Increment count1. */
1921 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1922 count1, gfc_index_one_node);
1923 gfc_add_modify_expr (&body1, count1, tmp);
1925 /* Increment count3. */
1928 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1929 count3, gfc_index_one_node);
1930 gfc_add_modify_expr (&body1, count3, tmp);
1933 /* Generate the copying loops. */
1934 gfc_trans_scalarizing_loops (&loop, &body1);
1936 gfc_add_block_to_block (&block, &loop.pre);
1937 gfc_add_block_to_block (&block, &loop.post);
1939 gfc_cleanup_loop (&loop);
1940 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
1941 as tree nodes in SS may not be valid in different scope. */
1944 tmp = gfc_finish_block (&block);
1949 /* Calculate the size of temporary needed in the assignment inside forall.
1950 LSS and RSS are filled in this function. */
1953 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
1954 stmtblock_t * pblock,
1955 gfc_ss **lss, gfc_ss **rss)
1962 *lss = gfc_walk_expr (expr1);
1965 size = gfc_index_one_node;
1966 if (*lss != gfc_ss_terminator)
1968 gfc_init_loopinfo (&loop);
1970 /* Walk the RHS of the expression. */
1971 *rss = gfc_walk_expr (expr2);
1972 if (*rss == gfc_ss_terminator)
1974 /* The rhs is scalar. Add a ss for the expression. */
1975 *rss = gfc_get_ss ();
1976 (*rss)->next = gfc_ss_terminator;
1977 (*rss)->type = GFC_SS_SCALAR;
1978 (*rss)->expr = expr2;
1981 /* Associate the SS with the loop. */
1982 gfc_add_ss_to_loop (&loop, *lss);
1983 /* We don't actually need to add the rhs at this point, but it might
1984 make guessing the loop bounds a bit easier. */
1985 gfc_add_ss_to_loop (&loop, *rss);
1987 /* We only want the shape of the expression, not rest of the junk
1988 generated by the scalarizer. */
1989 loop.array_parameter = 1;
1991 /* Calculate the bounds of the scalarization. */
1992 gfc_conv_ss_startstride (&loop);
1993 gfc_conv_loop_setup (&loop);
1995 /* Figure out how many elements we need. */
1996 for (i = 0; i < loop.dimen; i++)
1998 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
1999 gfc_index_one_node, loop.from[i]);
2000 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2002 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2004 gfc_add_block_to_block (pblock, &loop.pre);
2005 size = gfc_evaluate_now (size, pblock);
2006 gfc_add_block_to_block (pblock, &loop.post);
2008 /* TODO: write a function that cleans up a loopinfo without freeing
2009 the SS chains. Currently a NOP. */
2016 /* Calculate the overall iterator number of the nested forall construct. */
2019 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2020 stmtblock_t *inner_size_body, stmtblock_t *block)
2025 /* TODO: optimizing the computing process. */
2026 number = gfc_create_var (gfc_array_index_type, "num");
2027 gfc_add_modify_expr (block, number, gfc_index_zero_node);
2029 gfc_start_block (&body);
2030 if (inner_size_body)
2031 gfc_add_block_to_block (&body, inner_size_body);
2032 if (nested_forall_info)
2033 tmp = build2 (PLUS_EXPR, gfc_array_index_type, number,
2037 gfc_add_modify_expr (&body, number, tmp);
2038 tmp = gfc_finish_block (&body);
2040 /* Generate loops. */
2041 if (nested_forall_info != NULL)
2042 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 0, 1);
2044 gfc_add_expr_to_block (block, tmp);
2050 /* Allocate temporary for forall construct. SIZE is the size of temporary
2051 needed. PTEMP1 is returned for space free. */
2054 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2062 unit = TYPE_SIZE_UNIT (type);
2063 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2066 temp1 = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2069 tmp = build_fold_indirect_ref (temp1);
2077 /* Allocate temporary for forall construct according to the information in
2078 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2079 assignment inside forall. PTEMP1 is returned for space free. */
2082 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2083 tree inner_size, stmtblock_t * inner_size_body,
2084 stmtblock_t * block, tree * ptemp1)
2088 /* Calculate the total size of temporary needed in forall construct. */
2089 size = compute_overall_iter_number (nested_forall_info, inner_size,
2090 inner_size_body, block);
2092 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2096 /* Handle assignments inside forall which need temporary.
2098 forall (i=start:end:stride; maskexpr)
2101 (where e,f<i> are arbitrary expressions possibly involving i
2102 and there is a dependency between e<i> and f<i>)
2104 masktmp(:) = maskexpr(:)
2109 for (i = start; i <= end; i += stride)
2113 for (i = start; i <= end; i += stride)
2115 if (masktmp[maskindex++])
2116 tmp[count1++] = f<i>
2120 for (i = start; i <= end; i += stride)
2122 if (masktmp[maskindex++])
2123 e<i> = tmp[count1++]
2128 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2129 tree wheremask, bool invert,
2130 forall_info * nested_forall_info,
2131 stmtblock_t * block)
2139 stmtblock_t inner_size_body;
2141 /* Create vars. count1 is the current iterator number of the nested
2143 count1 = gfc_create_var (gfc_array_index_type, "count1");
2145 /* Count is the wheremask index. */
2148 count = gfc_create_var (gfc_array_index_type, "count");
2149 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2154 /* Initialize count1. */
2155 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2157 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2158 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2159 gfc_init_block (&inner_size_body);
2160 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2163 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2164 type = gfc_typenode_for_spec (&expr1->ts);
2166 /* Allocate temporary for nested forall construct according to the
2167 information in nested_forall_info and inner_size. */
2168 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2169 &inner_size_body, block, &ptemp1);
2171 /* Generate codes to copy rhs to the temporary . */
2172 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2175 /* Generate body and loops according to the information in
2176 nested_forall_info. */
2177 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2178 gfc_add_expr_to_block (block, tmp);
2181 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2185 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2187 /* Generate codes to copy the temporary to lhs. */
2188 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2191 /* Generate body and loops according to the information in
2192 nested_forall_info. */
2193 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2194 gfc_add_expr_to_block (block, tmp);
2198 /* Free the temporary. */
2199 tmp = gfc_chainon_list (NULL_TREE, ptemp1);
2200 tmp = build_function_call_expr (gfor_fndecl_internal_free, tmp);
2201 gfc_add_expr_to_block (block, tmp);
2206 /* Translate pointer assignment inside FORALL which need temporary. */
2209 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2210 forall_info * nested_forall_info,
2211 stmtblock_t * block)
2225 tree tmp, tmp1, ptemp1;
2227 count = gfc_create_var (gfc_array_index_type, "count");
2228 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2230 inner_size = integer_one_node;
2231 lss = gfc_walk_expr (expr1);
2232 rss = gfc_walk_expr (expr2);
2233 if (lss == gfc_ss_terminator)
2235 type = gfc_typenode_for_spec (&expr1->ts);
2236 type = build_pointer_type (type);
2238 /* Allocate temporary for nested forall construct according to the
2239 information in nested_forall_info and inner_size. */
2240 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2241 inner_size, NULL, block, &ptemp1);
2242 gfc_start_block (&body);
2243 gfc_init_se (&lse, NULL);
2244 lse.expr = gfc_build_array_ref (tmp1, count);
2245 gfc_init_se (&rse, NULL);
2246 rse.want_pointer = 1;
2247 gfc_conv_expr (&rse, expr2);
2248 gfc_add_block_to_block (&body, &rse.pre);
2249 gfc_add_modify_expr (&body, lse.expr,
2250 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2251 gfc_add_block_to_block (&body, &rse.post);
2253 /* Increment count. */
2254 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2255 count, gfc_index_one_node);
2256 gfc_add_modify_expr (&body, count, tmp);
2258 tmp = gfc_finish_block (&body);
2260 /* Generate body and loops according to the information in
2261 nested_forall_info. */
2262 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2263 gfc_add_expr_to_block (block, tmp);
2266 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2268 gfc_start_block (&body);
2269 gfc_init_se (&lse, NULL);
2270 gfc_init_se (&rse, NULL);
2271 rse.expr = gfc_build_array_ref (tmp1, count);
2272 lse.want_pointer = 1;
2273 gfc_conv_expr (&lse, expr1);
2274 gfc_add_block_to_block (&body, &lse.pre);
2275 gfc_add_modify_expr (&body, lse.expr, rse.expr);
2276 gfc_add_block_to_block (&body, &lse.post);
2277 /* Increment count. */
2278 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2279 count, gfc_index_one_node);
2280 gfc_add_modify_expr (&body, count, tmp);
2281 tmp = gfc_finish_block (&body);
2283 /* Generate body and loops according to the information in
2284 nested_forall_info. */
2285 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2286 gfc_add_expr_to_block (block, tmp);
2290 gfc_init_loopinfo (&loop);
2292 /* Associate the SS with the loop. */
2293 gfc_add_ss_to_loop (&loop, rss);
2295 /* Setup the scalarizing loops and bounds. */
2296 gfc_conv_ss_startstride (&loop);
2298 gfc_conv_loop_setup (&loop);
2300 info = &rss->data.info;
2301 desc = info->descriptor;
2303 /* Make a new descriptor. */
2304 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2305 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2306 loop.from, loop.to, 1);
2308 /* Allocate temporary for nested forall construct. */
2309 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2310 inner_size, NULL, block, &ptemp1);
2311 gfc_start_block (&body);
2312 gfc_init_se (&lse, NULL);
2313 lse.expr = gfc_build_array_ref (tmp1, count);
2314 lse.direct_byref = 1;
2315 rss = gfc_walk_expr (expr2);
2316 gfc_conv_expr_descriptor (&lse, expr2, rss);
2318 gfc_add_block_to_block (&body, &lse.pre);
2319 gfc_add_block_to_block (&body, &lse.post);
2321 /* Increment count. */
2322 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2323 count, gfc_index_one_node);
2324 gfc_add_modify_expr (&body, count, tmp);
2326 tmp = gfc_finish_block (&body);
2328 /* Generate body and loops according to the information in
2329 nested_forall_info. */
2330 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2331 gfc_add_expr_to_block (block, tmp);
2334 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2336 parm = gfc_build_array_ref (tmp1, count);
2337 lss = gfc_walk_expr (expr1);
2338 gfc_init_se (&lse, NULL);
2339 gfc_conv_expr_descriptor (&lse, expr1, lss);
2340 gfc_add_modify_expr (&lse.pre, lse.expr, parm);
2341 gfc_start_block (&body);
2342 gfc_add_block_to_block (&body, &lse.pre);
2343 gfc_add_block_to_block (&body, &lse.post);
2345 /* Increment count. */
2346 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2347 count, gfc_index_one_node);
2348 gfc_add_modify_expr (&body, count, tmp);
2350 tmp = gfc_finish_block (&body);
2352 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2353 gfc_add_expr_to_block (block, tmp);
2355 /* Free the temporary. */
2358 tmp = gfc_chainon_list (NULL_TREE, ptemp1);
2359 tmp = build_function_call_expr (gfor_fndecl_internal_free, tmp);
2360 gfc_add_expr_to_block (block, tmp);
2365 /* FORALL and WHERE statements are really nasty, especially when you nest
2366 them. All the rhs of a forall assignment must be evaluated before the
2367 actual assignments are performed. Presumably this also applies to all the
2368 assignments in an inner where statement. */
2370 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2371 linear array, relying on the fact that we process in the same order in all
2374 forall (i=start:end:stride; maskexpr)
2378 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2380 count = ((end + 1 - start) / stride)
2381 masktmp(:) = maskexpr(:)
2384 for (i = start; i <= end; i += stride)
2386 if (masktmp[maskindex++])
2390 for (i = start; i <= end; i += stride)
2392 if (masktmp[maskindex++])
2396 Note that this code only works when there are no dependencies.
2397 Forall loop with array assignments and data dependencies are a real pain,
2398 because the size of the temporary cannot always be determined before the
2399 loop is executed. This problem is compounded by the presence of nested
2404 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2426 gfc_forall_iterator *fa;
2429 gfc_saved_var *saved_vars;
2430 iter_info *this_forall, *iter_tmp;
2431 forall_info *info, *forall_tmp;
2433 gfc_start_block (&block);
2436 /* Count the FORALL index number. */
2437 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2441 /* Allocate the space for var, start, end, step, varexpr. */
2442 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2443 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2444 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2445 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2446 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2447 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2449 /* Allocate the space for info. */
2450 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2452 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2454 gfc_symbol *sym = fa->var->symtree->n.sym;
2456 /* allocate space for this_forall. */
2457 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2459 /* Create a temporary variable for the FORALL index. */
2460 tmp = gfc_typenode_for_spec (&sym->ts);
2461 var[n] = gfc_create_var (tmp, sym->name);
2462 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2464 /* Record it in this_forall. */
2465 this_forall->var = var[n];
2467 /* Replace the index symbol's backend_decl with the temporary decl. */
2468 sym->backend_decl = var[n];
2470 /* Work out the start, end and stride for the loop. */
2471 gfc_init_se (&se, NULL);
2472 gfc_conv_expr_val (&se, fa->start);
2473 /* Record it in this_forall. */
2474 this_forall->start = se.expr;
2475 gfc_add_block_to_block (&block, &se.pre);
2478 gfc_init_se (&se, NULL);
2479 gfc_conv_expr_val (&se, fa->end);
2480 /* Record it in this_forall. */
2481 this_forall->end = se.expr;
2482 gfc_make_safe_expr (&se);
2483 gfc_add_block_to_block (&block, &se.pre);
2486 gfc_init_se (&se, NULL);
2487 gfc_conv_expr_val (&se, fa->stride);
2488 /* Record it in this_forall. */
2489 this_forall->step = se.expr;
2490 gfc_make_safe_expr (&se);
2491 gfc_add_block_to_block (&block, &se.pre);
2494 /* Set the NEXT field of this_forall to NULL. */
2495 this_forall->next = NULL;
2496 /* Link this_forall to the info construct. */
2497 if (info->this_loop == NULL)
2498 info->this_loop = this_forall;
2501 iter_tmp = info->this_loop;
2502 while (iter_tmp->next != NULL)
2503 iter_tmp = iter_tmp->next;
2504 iter_tmp->next = this_forall;
2511 /* Work out the number of elements in the mask array. */
2514 size = gfc_index_one_node;
2515 sizevar = NULL_TREE;
2517 for (n = 0; n < nvar; n++)
2519 if (lenvar && TREE_TYPE (lenvar) != TREE_TYPE (start[n]))
2522 /* size = (end + step - start) / step. */
2523 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2525 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2527 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2528 tmp = convert (gfc_array_index_type, tmp);
2530 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2533 /* Record the nvar and size of current forall level. */
2537 /* Link the current forall level to nested_forall_info. */
2538 forall_tmp = nested_forall_info;
2539 if (forall_tmp == NULL)
2540 nested_forall_info = info;
2543 while (forall_tmp->next_nest != NULL)
2544 forall_tmp = forall_tmp->next_nest;
2545 info->outer = forall_tmp;
2546 forall_tmp->next_nest = info;
2549 /* Copy the mask into a temporary variable if required.
2550 For now we assume a mask temporary is needed. */
2553 /* As the mask array can be very big, prefer compact
2555 tree smallest_boolean_type_node
2556 = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2558 /* Allocate the mask temporary. */
2559 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
2560 TYPE_SIZE_UNIT (smallest_boolean_type_node));
2562 mask = gfc_do_allocate (bytesize, size, &pmask, &block,
2563 smallest_boolean_type_node);
2565 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2566 /* Record them in the info structure. */
2567 info->pmask = pmask;
2569 info->maskindex = maskindex;
2571 gfc_add_modify_expr (&block, maskindex, gfc_index_zero_node);
2573 /* Start of mask assignment loop body. */
2574 gfc_start_block (&body);
2576 /* Evaluate the mask expression. */
2577 gfc_init_se (&se, NULL);
2578 gfc_conv_expr_val (&se, code->expr);
2579 gfc_add_block_to_block (&body, &se.pre);
2581 /* Store the mask. */
2582 se.expr = convert (smallest_boolean_type_node, se.expr);
2585 tmp = build_fold_indirect_ref (mask);
2588 tmp = gfc_build_array_ref (tmp, maskindex);
2589 gfc_add_modify_expr (&body, tmp, se.expr);
2591 /* Advance to the next mask element. */
2592 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
2593 maskindex, gfc_index_one_node);
2594 gfc_add_modify_expr (&body, maskindex, tmp);
2596 /* Generate the loops. */
2597 tmp = gfc_finish_block (&body);
2598 tmp = gfc_trans_nested_forall_loop (info, tmp, 0, 0);
2599 gfc_add_expr_to_block (&block, tmp);
2603 /* No mask was specified. */
2604 maskindex = NULL_TREE;
2605 mask = pmask = NULL_TREE;
2608 c = code->block->next;
2610 /* TODO: loop merging in FORALL statements. */
2611 /* Now that we've got a copy of the mask, generate the assignment loops. */
2617 /* A scalar or array assignment. */
2618 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2619 /* Temporaries due to array assignment data dependencies introduce
2620 no end of problems. */
2622 gfc_trans_assign_need_temp (c->expr, c->expr2, NULL, false,
2623 nested_forall_info, &block);
2626 /* Use the normal assignment copying routines. */
2627 assign = gfc_trans_assignment (c->expr, c->expr2);
2629 /* Generate body and loops. */
2630 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1, 1);
2631 gfc_add_expr_to_block (&block, tmp);
2637 /* Translate WHERE or WHERE construct nested in FORALL. */
2638 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
2641 /* Pointer assignment inside FORALL. */
2642 case EXEC_POINTER_ASSIGN:
2643 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2645 gfc_trans_pointer_assign_need_temp (c->expr, c->expr2,
2646 nested_forall_info, &block);
2649 /* Use the normal assignment copying routines. */
2650 assign = gfc_trans_pointer_assignment (c->expr, c->expr2);
2652 /* Generate body and loops. */
2653 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign,
2655 gfc_add_expr_to_block (&block, tmp);
2660 tmp = gfc_trans_forall_1 (c, nested_forall_info);
2661 gfc_add_expr_to_block (&block, tmp);
2664 /* Explicit subroutine calls are prevented by the frontend but interface
2665 assignments can legitimately produce them. */
2666 case EXEC_ASSIGN_CALL:
2667 assign = gfc_trans_call (c, true);
2668 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1, 1);
2669 gfc_add_expr_to_block (&block, tmp);
2679 /* Restore the original index variables. */
2680 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
2681 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
2683 /* Free the space for var, start, end, step, varexpr. */
2689 gfc_free (saved_vars);
2693 /* Free the temporary for the mask. */
2694 tmp = gfc_chainon_list (NULL_TREE, pmask);
2695 tmp = build_function_call_expr (gfor_fndecl_internal_free, tmp);
2696 gfc_add_expr_to_block (&block, tmp);
2699 pushdecl (maskindex);
2701 return gfc_finish_block (&block);
2705 /* Translate the FORALL statement or construct. */
2707 tree gfc_trans_forall (gfc_code * code)
2709 return gfc_trans_forall_1 (code, NULL);
2713 /* Evaluate the WHERE mask expression, copy its value to a temporary.
2714 If the WHERE construct is nested in FORALL, compute the overall temporary
2715 needed by the WHERE mask expression multiplied by the iterator number of
2717 ME is the WHERE mask expression.
2718 MASK is the current execution mask upon input, whose sense may or may
2719 not be inverted as specified by the INVERT argument.
2720 CMASK is the updated execution mask on output, or NULL if not required.
2721 PMASK is the pending execution mask on output, or NULL if not required.
2722 BLOCK is the block in which to place the condition evaluation loops. */
2725 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
2726 tree mask, bool invert, tree cmask, tree pmask,
2727 tree mask_type, stmtblock_t * block)
2732 stmtblock_t body, body1;
2733 tree count, cond, mtmp;
2736 gfc_init_loopinfo (&loop);
2738 lss = gfc_walk_expr (me);
2739 rss = gfc_walk_expr (me);
2741 /* Variable to index the temporary. */
2742 count = gfc_create_var (gfc_array_index_type, "count");
2743 /* Initialize count. */
2744 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2746 gfc_start_block (&body);
2748 gfc_init_se (&rse, NULL);
2749 gfc_init_se (&lse, NULL);
2751 if (lss == gfc_ss_terminator)
2753 gfc_init_block (&body1);
2757 /* Initialize the loop. */
2758 gfc_init_loopinfo (&loop);
2760 /* We may need LSS to determine the shape of the expression. */
2761 gfc_add_ss_to_loop (&loop, lss);
2762 gfc_add_ss_to_loop (&loop, rss);
2764 gfc_conv_ss_startstride (&loop);
2765 gfc_conv_loop_setup (&loop);
2767 gfc_mark_ss_chain_used (rss, 1);
2768 /* Start the loop body. */
2769 gfc_start_scalarized_body (&loop, &body1);
2771 /* Translate the expression. */
2772 gfc_copy_loopinfo_to_se (&rse, &loop);
2774 gfc_conv_expr (&rse, me);
2777 /* Variable to evaluate mask condition. */
2778 cond = gfc_create_var (mask_type, "cond");
2779 if (mask && (cmask || pmask))
2780 mtmp = gfc_create_var (mask_type, "mask");
2781 else mtmp = NULL_TREE;
2783 gfc_add_block_to_block (&body1, &lse.pre);
2784 gfc_add_block_to_block (&body1, &rse.pre);
2786 gfc_add_modify_expr (&body1, cond, fold_convert (mask_type, rse.expr));
2788 if (mask && (cmask || pmask))
2790 tmp = gfc_build_array_ref (mask, count);
2792 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
2793 gfc_add_modify_expr (&body1, mtmp, tmp);
2798 tmp1 = gfc_build_array_ref (cmask, count);
2801 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
2802 gfc_add_modify_expr (&body1, tmp1, tmp);
2807 tmp1 = gfc_build_array_ref (pmask, count);
2808 tmp = build1 (TRUTH_NOT_EXPR, mask_type, cond);
2810 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
2811 gfc_add_modify_expr (&body1, tmp1, tmp);
2814 gfc_add_block_to_block (&body1, &lse.post);
2815 gfc_add_block_to_block (&body1, &rse.post);
2817 if (lss == gfc_ss_terminator)
2819 gfc_add_block_to_block (&body, &body1);
2823 /* Increment count. */
2824 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
2825 gfc_index_one_node);
2826 gfc_add_modify_expr (&body1, count, tmp1);
2828 /* Generate the copying loops. */
2829 gfc_trans_scalarizing_loops (&loop, &body1);
2831 gfc_add_block_to_block (&body, &loop.pre);
2832 gfc_add_block_to_block (&body, &loop.post);
2834 gfc_cleanup_loop (&loop);
2835 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2836 as tree nodes in SS may not be valid in different scope. */
2839 tmp1 = gfc_finish_block (&body);
2840 /* If the WHERE construct is inside FORALL, fill the full temporary. */
2841 if (nested_forall_info != NULL)
2842 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1, 1);
2844 gfc_add_expr_to_block (block, tmp1);
2848 /* Translate an assignment statement in a WHERE statement or construct
2849 statement. The MASK expression is used to control which elements
2850 of EXPR1 shall be assigned. The sense of MASK is specified by
2854 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
2855 tree mask, bool invert,
2856 tree count1, tree count2)
2861 gfc_ss *lss_section;
2868 tree index, maskexpr;
2871 /* TODO: handle this special case.
2872 Special case a single function returning an array. */
2873 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
2875 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
2881 /* Assignment of the form lhs = rhs. */
2882 gfc_start_block (&block);
2884 gfc_init_se (&lse, NULL);
2885 gfc_init_se (&rse, NULL);
2888 lss = gfc_walk_expr (expr1);
2891 /* In each where-assign-stmt, the mask-expr and the variable being
2892 defined shall be arrays of the same shape. */
2893 gcc_assert (lss != gfc_ss_terminator);
2895 /* The assignment needs scalarization. */
2898 /* Find a non-scalar SS from the lhs. */
2899 while (lss_section != gfc_ss_terminator
2900 && lss_section->type != GFC_SS_SECTION)
2901 lss_section = lss_section->next;
2903 gcc_assert (lss_section != gfc_ss_terminator);
2905 /* Initialize the scalarizer. */
2906 gfc_init_loopinfo (&loop);
2909 rss = gfc_walk_expr (expr2);
2910 if (rss == gfc_ss_terminator)
2912 /* The rhs is scalar. Add a ss for the expression. */
2913 rss = gfc_get_ss ();
2914 rss->next = gfc_ss_terminator;
2915 rss->type = GFC_SS_SCALAR;
2919 /* Associate the SS with the loop. */
2920 gfc_add_ss_to_loop (&loop, lss);
2921 gfc_add_ss_to_loop (&loop, rss);
2923 /* Calculate the bounds of the scalarization. */
2924 gfc_conv_ss_startstride (&loop);
2926 /* Resolve any data dependencies in the statement. */
2927 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
2929 /* Setup the scalarizing loops. */
2930 gfc_conv_loop_setup (&loop);
2932 /* Setup the gfc_se structures. */
2933 gfc_copy_loopinfo_to_se (&lse, &loop);
2934 gfc_copy_loopinfo_to_se (&rse, &loop);
2937 gfc_mark_ss_chain_used (rss, 1);
2938 if (loop.temp_ss == NULL)
2941 gfc_mark_ss_chain_used (lss, 1);
2945 lse.ss = loop.temp_ss;
2946 gfc_mark_ss_chain_used (lss, 3);
2947 gfc_mark_ss_chain_used (loop.temp_ss, 3);
2950 /* Start the scalarized loop body. */
2951 gfc_start_scalarized_body (&loop, &body);
2953 /* Translate the expression. */
2954 gfc_conv_expr (&rse, expr2);
2955 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
2957 gfc_conv_tmp_array_ref (&lse);
2958 gfc_advance_se_ss_chain (&lse);
2961 gfc_conv_expr (&lse, expr1);
2963 /* Form the mask expression according to the mask. */
2965 maskexpr = gfc_build_array_ref (mask, index);
2967 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
2969 /* Use the scalar assignment as is. */
2970 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts.type);
2971 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
2973 gfc_add_expr_to_block (&body, tmp);
2975 if (lss == gfc_ss_terminator)
2977 /* Increment count1. */
2978 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2979 count1, gfc_index_one_node);
2980 gfc_add_modify_expr (&body, count1, tmp);
2982 /* Use the scalar assignment as is. */
2983 gfc_add_block_to_block (&block, &body);
2987 gcc_assert (lse.ss == gfc_ss_terminator
2988 && rse.ss == gfc_ss_terminator);
2990 if (loop.temp_ss != NULL)
2992 /* Increment count1 before finish the main body of a scalarized
2994 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2995 count1, gfc_index_one_node);
2996 gfc_add_modify_expr (&body, count1, tmp);
2997 gfc_trans_scalarized_loop_boundary (&loop, &body);
2999 /* We need to copy the temporary to the actual lhs. */
3000 gfc_init_se (&lse, NULL);
3001 gfc_init_se (&rse, NULL);
3002 gfc_copy_loopinfo_to_se (&lse, &loop);
3003 gfc_copy_loopinfo_to_se (&rse, &loop);
3005 rse.ss = loop.temp_ss;
3008 gfc_conv_tmp_array_ref (&rse);
3009 gfc_advance_se_ss_chain (&rse);
3010 gfc_conv_expr (&lse, expr1);
3012 gcc_assert (lse.ss == gfc_ss_terminator
3013 && rse.ss == gfc_ss_terminator);
3015 /* Form the mask expression according to the mask tree list. */
3017 maskexpr = gfc_build_array_ref (mask, index);
3019 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3022 /* Use the scalar assignment as is. */
3023 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts.type);
3024 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3025 gfc_add_expr_to_block (&body, tmp);
3027 /* Increment count2. */
3028 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3029 count2, gfc_index_one_node);
3030 gfc_add_modify_expr (&body, count2, tmp);
3034 /* Increment count1. */
3035 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3036 count1, gfc_index_one_node);
3037 gfc_add_modify_expr (&body, count1, tmp);
3040 /* Generate the copying loops. */
3041 gfc_trans_scalarizing_loops (&loop, &body);
3043 /* Wrap the whole thing up. */
3044 gfc_add_block_to_block (&block, &loop.pre);
3045 gfc_add_block_to_block (&block, &loop.post);
3046 gfc_cleanup_loop (&loop);
3049 return gfc_finish_block (&block);
3053 /* Translate the WHERE construct or statement.
3054 This function can be called iteratively to translate the nested WHERE
3055 construct or statement.
3056 MASK is the control mask. */
3059 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3060 forall_info * nested_forall_info, stmtblock_t * block)
3062 stmtblock_t inner_size_body;
3063 tree inner_size, size;
3071 tree count1, count2;
3075 tree pcmask = NULL_TREE;
3076 tree ppmask = NULL_TREE;
3077 tree cmask = NULL_TREE;
3078 tree pmask = NULL_TREE;
3080 /* the WHERE statement or the WHERE construct statement. */
3081 cblock = code->block;
3083 /* As the mask array can be very big, prefer compact boolean types. */
3084 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3086 /* Determine which temporary masks are needed. */
3089 /* One clause: No ELSEWHEREs. */
3090 need_cmask = (cblock->next != 0);
3093 else if (cblock->block->block)
3095 /* Three or more clauses: Conditional ELSEWHEREs. */
3099 else if (cblock->next)
3101 /* Two clauses, the first non-empty. */
3103 need_pmask = (mask != NULL_TREE
3104 && cblock->block->next != 0);
3106 else if (!cblock->block->next)
3108 /* Two clauses, both empty. */
3112 /* Two clauses, the first empty, the second non-empty. */
3115 need_cmask = (cblock->block->expr != 0);
3124 if (need_cmask || need_pmask)
3126 /* Calculate the size of temporary needed by the mask-expr. */
3127 gfc_init_block (&inner_size_body);
3128 inner_size = compute_inner_temp_size (cblock->expr, cblock->expr,
3129 &inner_size_body, &lss, &rss);
3131 /* Calculate the total size of temporary needed. */
3132 size = compute_overall_iter_number (nested_forall_info, inner_size,
3133 &inner_size_body, block);
3135 /* Allocate temporary for WHERE mask if needed. */
3137 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3140 /* Allocate temporary for !mask if needed. */
3142 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3148 /* Each time around this loop, the where clause is conditional
3149 on the value of mask and invert, which are updated at the
3150 bottom of the loop. */
3152 /* Has mask-expr. */
3155 /* Ensure that the WHERE mask will be evaluated exactly once.
3156 If there are no statements in this WHERE/ELSEWHERE clause,
3157 then we don't need to update the control mask (cmask).
3158 If this is the last clause of the WHERE construct, then
3159 we don't need to update the pending control mask (pmask). */
3161 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3163 cblock->next ? cmask : NULL_TREE,
3164 cblock->block ? pmask : NULL_TREE,
3167 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3169 (cblock->next || cblock->block)
3170 ? cmask : NULL_TREE,
3171 NULL_TREE, mask_type, block);
3175 /* It's a final elsewhere-stmt. No mask-expr is present. */
3179 /* The body of this where clause are controlled by cmask with
3180 sense specified by invert. */
3182 /* Get the assignment statement of a WHERE statement, or the first
3183 statement in where-body-construct of a WHERE construct. */
3184 cnext = cblock->next;
3189 /* WHERE assignment statement. */
3191 expr1 = cnext->expr;
3192 expr2 = cnext->expr2;
3193 if (nested_forall_info != NULL)
3195 need_temp = gfc_check_dependency (expr1, expr2, 0);
3197 gfc_trans_assign_need_temp (expr1, expr2,
3199 nested_forall_info, block);
3202 /* Variables to control maskexpr. */
3203 count1 = gfc_create_var (gfc_array_index_type, "count1");
3204 count2 = gfc_create_var (gfc_array_index_type, "count2");
3205 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3206 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3208 tmp = gfc_trans_where_assign (expr1, expr2,
3212 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3214 gfc_add_expr_to_block (block, tmp);
3219 /* Variables to control maskexpr. */
3220 count1 = gfc_create_var (gfc_array_index_type, "count1");
3221 count2 = gfc_create_var (gfc_array_index_type, "count2");
3222 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3223 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3225 tmp = gfc_trans_where_assign (expr1, expr2,
3228 gfc_add_expr_to_block (block, tmp);
3233 /* WHERE or WHERE construct is part of a where-body-construct. */
3235 gfc_trans_where_2 (cnext, cmask, invert,
3236 nested_forall_info, block);
3243 /* The next statement within the same where-body-construct. */
3244 cnext = cnext->next;
3246 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3247 cblock = cblock->block;
3248 if (mask == NULL_TREE)
3250 /* If we're the initial WHERE, we can simply invert the sense
3251 of the current mask to obtain the "mask" for the remaining
3258 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3264 /* If we allocated a pending mask array, deallocate it now. */
3267 tree args = gfc_chainon_list (NULL_TREE, ppmask);
3268 tmp = build_function_call_expr (gfor_fndecl_internal_free, args);
3269 gfc_add_expr_to_block (block, tmp);
3272 /* If we allocated a current mask array, deallocate it now. */
3275 tree args = gfc_chainon_list (NULL_TREE, pcmask);
3276 tmp = build_function_call_expr (gfor_fndecl_internal_free, args);
3277 gfc_add_expr_to_block (block, tmp);
3281 /* Translate a simple WHERE construct or statement without dependencies.
3282 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3283 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3284 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3287 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3289 stmtblock_t block, body;
3290 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3291 tree tmp, cexpr, tstmt, estmt;
3292 gfc_ss *css, *tdss, *tsss;
3293 gfc_se cse, tdse, tsse, edse, esse;
3298 cond = cblock->expr;
3299 tdst = cblock->next->expr;
3300 tsrc = cblock->next->expr2;
3301 edst = eblock ? eblock->next->expr : NULL;
3302 esrc = eblock ? eblock->next->expr2 : NULL;
3304 gfc_start_block (&block);
3305 gfc_init_loopinfo (&loop);
3307 /* Handle the condition. */
3308 gfc_init_se (&cse, NULL);
3309 css = gfc_walk_expr (cond);
3310 gfc_add_ss_to_loop (&loop, css);
3312 /* Handle the then-clause. */
3313 gfc_init_se (&tdse, NULL);
3314 gfc_init_se (&tsse, NULL);
3315 tdss = gfc_walk_expr (tdst);
3316 tsss = gfc_walk_expr (tsrc);
3317 if (tsss == gfc_ss_terminator)
3319 tsss = gfc_get_ss ();
3320 tsss->next = gfc_ss_terminator;
3321 tsss->type = GFC_SS_SCALAR;
3324 gfc_add_ss_to_loop (&loop, tdss);
3325 gfc_add_ss_to_loop (&loop, tsss);
3329 /* Handle the else clause. */
3330 gfc_init_se (&edse, NULL);
3331 gfc_init_se (&esse, NULL);
3332 edss = gfc_walk_expr (edst);
3333 esss = gfc_walk_expr (esrc);
3334 if (esss == gfc_ss_terminator)
3336 esss = gfc_get_ss ();
3337 esss->next = gfc_ss_terminator;
3338 esss->type = GFC_SS_SCALAR;
3341 gfc_add_ss_to_loop (&loop, edss);
3342 gfc_add_ss_to_loop (&loop, esss);
3345 gfc_conv_ss_startstride (&loop);
3346 gfc_conv_loop_setup (&loop);
3348 gfc_mark_ss_chain_used (css, 1);
3349 gfc_mark_ss_chain_used (tdss, 1);
3350 gfc_mark_ss_chain_used (tsss, 1);
3353 gfc_mark_ss_chain_used (edss, 1);
3354 gfc_mark_ss_chain_used (esss, 1);
3357 gfc_start_scalarized_body (&loop, &body);
3359 gfc_copy_loopinfo_to_se (&cse, &loop);
3360 gfc_copy_loopinfo_to_se (&tdse, &loop);
3361 gfc_copy_loopinfo_to_se (&tsse, &loop);
3367 gfc_copy_loopinfo_to_se (&edse, &loop);
3368 gfc_copy_loopinfo_to_se (&esse, &loop);
3373 gfc_conv_expr (&cse, cond);
3374 gfc_add_block_to_block (&body, &cse.pre);
3377 gfc_conv_expr (&tsse, tsrc);
3378 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3380 gfc_conv_tmp_array_ref (&tdse);
3381 gfc_advance_se_ss_chain (&tdse);
3384 gfc_conv_expr (&tdse, tdst);
3388 gfc_conv_expr (&esse, esrc);
3389 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3391 gfc_conv_tmp_array_ref (&edse);
3392 gfc_advance_se_ss_chain (&edse);
3395 gfc_conv_expr (&edse, edst);
3398 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts.type);
3399 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts.type)
3400 : build_empty_stmt ();
3401 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3402 gfc_add_expr_to_block (&body, tmp);
3403 gfc_add_block_to_block (&body, &cse.post);
3405 gfc_trans_scalarizing_loops (&loop, &body);
3406 gfc_add_block_to_block (&block, &loop.pre);
3407 gfc_add_block_to_block (&block, &loop.post);
3408 gfc_cleanup_loop (&loop);
3410 return gfc_finish_block (&block);
3413 /* As the WHERE or WHERE construct statement can be nested, we call
3414 gfc_trans_where_2 to do the translation, and pass the initial
3415 NULL values for both the control mask and the pending control mask. */
3418 gfc_trans_where (gfc_code * code)
3424 cblock = code->block;
3426 && cblock->next->op == EXEC_ASSIGN
3427 && !cblock->next->next)
3429 eblock = cblock->block;
3432 /* A simple "WHERE (cond) x = y" statement or block is
3433 dependence free if cond is not dependent upon writing x,
3434 and the source y is unaffected by the destination x. */
3435 if (!gfc_check_dependency (cblock->next->expr,
3437 && !gfc_check_dependency (cblock->next->expr,
3438 cblock->next->expr2, 0))
3439 return gfc_trans_where_3 (cblock, NULL);
3441 else if (!eblock->expr
3444 && eblock->next->op == EXEC_ASSIGN
3445 && !eblock->next->next)
3447 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3448 block is dependence free if cond is not dependent on writes
3449 to x1 and x2, y1 is not dependent on writes to x2, and y2
3450 is not dependent on writes to x1, and both y's are not
3451 dependent upon their own x's. */
3452 if (!gfc_check_dependency(cblock->next->expr,
3454 && !gfc_check_dependency(eblock->next->expr,
3456 && !gfc_check_dependency(cblock->next->expr,
3457 eblock->next->expr2, 0)
3458 && !gfc_check_dependency(eblock->next->expr,
3459 cblock->next->expr2, 0)
3460 && !gfc_check_dependency(cblock->next->expr,
3461 cblock->next->expr2, 0)
3462 && !gfc_check_dependency(eblock->next->expr,
3463 eblock->next->expr2, 0))
3464 return gfc_trans_where_3 (cblock, eblock);
3468 gfc_start_block (&block);
3470 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3472 return gfc_finish_block (&block);
3476 /* CYCLE a DO loop. The label decl has already been created by
3477 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3478 node at the head of the loop. We must mark the label as used. */
3481 gfc_trans_cycle (gfc_code * code)
3485 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3486 TREE_USED (cycle_label) = 1;
3487 return build1_v (GOTO_EXPR, cycle_label);
3491 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3492 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3496 gfc_trans_exit (gfc_code * code)
3500 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3501 TREE_USED (exit_label) = 1;
3502 return build1_v (GOTO_EXPR, exit_label);
3506 /* Translate the ALLOCATE statement. */
3509 gfc_trans_allocate (gfc_code * code)
3521 if (!code->ext.alloc_list)
3524 gfc_start_block (&block);
3528 tree gfc_int4_type_node = gfc_get_int_type (4);
3530 stat = gfc_create_var (gfc_int4_type_node, "stat");
3531 pstat = build_fold_addr_expr (stat);
3533 error_label = gfc_build_label_decl (NULL_TREE);
3534 TREE_USED (error_label) = 1;
3538 pstat = integer_zero_node;
3539 stat = error_label = NULL_TREE;
3543 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3547 gfc_init_se (&se, NULL);
3548 gfc_start_block (&se.pre);
3550 se.want_pointer = 1;
3551 se.descriptor_only = 1;
3552 gfc_conv_expr (&se, expr);
3554 if (!gfc_array_allocate (&se, expr, pstat))
3556 /* A scalar or derived type. */
3559 val = gfc_create_var (ppvoid_type_node, "ptr");
3560 tmp = gfc_build_addr_expr (ppvoid_type_node, se.expr);
3561 gfc_add_modify_expr (&se.pre, val, tmp);
3563 tmp = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
3565 if (expr->ts.type == BT_CHARACTER && tmp == NULL_TREE)
3566 tmp = se.string_length;
3568 parm = gfc_chainon_list (NULL_TREE, val);
3569 parm = gfc_chainon_list (parm, tmp);
3570 parm = gfc_chainon_list (parm, pstat);
3571 tmp = build_function_call_expr (gfor_fndecl_allocate, parm);
3572 gfc_add_expr_to_block (&se.pre, tmp);
3576 tmp = build1_v (GOTO_EXPR, error_label);
3577 parm = fold_build2 (NE_EXPR, boolean_type_node,
3578 stat, build_int_cst (TREE_TYPE (stat), 0));
3579 tmp = fold_build3 (COND_EXPR, void_type_node,
3580 parm, tmp, build_empty_stmt ());
3581 gfc_add_expr_to_block (&se.pre, tmp);
3585 tmp = gfc_finish_block (&se.pre);
3586 gfc_add_expr_to_block (&block, tmp);
3589 /* Assign the value to the status variable. */
3592 tmp = build1_v (LABEL_EXPR, error_label);
3593 gfc_add_expr_to_block (&block, tmp);
3595 gfc_init_se (&se, NULL);
3596 gfc_conv_expr_lhs (&se, code->expr);
3597 tmp = convert (TREE_TYPE (se.expr), stat);
3598 gfc_add_modify_expr (&block, se.expr, tmp);
3601 return gfc_finish_block (&block);
3605 /* Translate a DEALLOCATE statement.
3606 There are two cases within the for loop:
3607 (1) deallocate(a1, a2, a3) is translated into the following sequence
3608 _gfortran_deallocate(a1, 0B)
3609 _gfortran_deallocate(a2, 0B)
3610 _gfortran_deallocate(a3, 0B)
3611 where the STAT= variable is passed a NULL pointer.
3612 (2) deallocate(a1, a2, a3, stat=i) is translated into the following
3614 _gfortran_deallocate(a1, &stat)
3615 astat = astat + stat
3616 _gfortran_deallocate(a2, &stat)
3617 astat = astat + stat
3618 _gfortran_deallocate(a3, &stat)
3619 astat = astat + stat
3620 In case (1), we simply return at the end of the for loop. In case (2)
3621 we set STAT= astat. */
3623 gfc_trans_deallocate (gfc_code * code)
3628 tree apstat, astat, parm, pstat, stat, tmp, type, var;
3631 gfc_start_block (&block);
3633 /* Set up the optional STAT= */
3636 tree gfc_int4_type_node = gfc_get_int_type (4);
3638 /* Variable used with the library call. */
3639 stat = gfc_create_var (gfc_int4_type_node, "stat");
3640 pstat = build_fold_addr_expr (stat);
3642 /* Running total of possible deallocation failures. */
3643 astat = gfc_create_var (gfc_int4_type_node, "astat");
3644 apstat = build_fold_addr_expr (astat);
3646 /* Initialize astat to 0. */
3647 gfc_add_modify_expr (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
3651 pstat = apstat = null_pointer_node;
3652 stat = astat = NULL_TREE;
3655 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3658 gcc_assert (expr->expr_type == EXPR_VARIABLE);
3660 gfc_init_se (&se, NULL);
3661 gfc_start_block (&se.pre);
3663 se.want_pointer = 1;
3664 se.descriptor_only = 1;
3665 gfc_conv_expr (&se, expr);
3668 tmp = gfc_array_deallocate (se.expr, pstat);
3671 type = build_pointer_type (TREE_TYPE (se.expr));
3672 var = gfc_create_var (type, "ptr");
3673 tmp = gfc_build_addr_expr (type, se.expr);
3674 gfc_add_modify_expr (&se.pre, var, tmp);
3676 parm = gfc_chainon_list (NULL_TREE, var);
3677 parm = gfc_chainon_list (parm, pstat);
3678 tmp = build_function_call_expr (gfor_fndecl_deallocate, parm);
3681 gfc_add_expr_to_block (&se.pre, tmp);
3683 /* Keep track of the number of failed deallocations by adding stat
3684 of the last deallocation to the running total. */
3687 apstat = build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
3688 gfc_add_modify_expr (&se.pre, astat, apstat);
3691 tmp = gfc_finish_block (&se.pre);
3692 gfc_add_expr_to_block (&block, tmp);
3696 /* Assign the value to the status variable. */
3699 gfc_init_se (&se, NULL);
3700 gfc_conv_expr_lhs (&se, code->expr);
3701 tmp = convert (TREE_TYPE (se.expr), astat);
3702 gfc_add_modify_expr (&block, se.expr, tmp);
3705 return gfc_finish_block (&block);