1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
35 #include "trans-stmt.h"
36 #include "trans-types.h"
37 #include "trans-array.h"
38 #include "trans-const.h"
40 #include "dependency.h"
42 typedef struct iter_info
48 struct iter_info *next;
52 typedef struct forall_info
59 struct forall_info *prev_nest;
63 static void gfc_trans_where_2 (gfc_code *, tree, bool,
64 forall_info *, stmtblock_t *);
66 /* Translate a F95 label number to a LABEL_EXPR. */
69 gfc_trans_label_here (gfc_code * code)
71 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
75 /* Given a variable expression which has been ASSIGNed to, find the decl
76 containing the auxiliary variables. For variables in common blocks this
80 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
82 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
83 gfc_conv_expr (se, expr);
84 /* Deals with variable in common block. Get the field declaration. */
85 if (TREE_CODE (se->expr) == COMPONENT_REF)
86 se->expr = TREE_OPERAND (se->expr, 1);
87 /* Deals with dummy argument. Get the parameter declaration. */
88 else if (TREE_CODE (se->expr) == INDIRECT_REF)
89 se->expr = TREE_OPERAND (se->expr, 0);
92 /* Translate a label assignment statement. */
95 gfc_trans_label_assign (gfc_code * code)
104 /* Start a new block. */
105 gfc_init_se (&se, NULL);
106 gfc_start_block (&se.pre);
107 gfc_conv_label_variable (&se, code->expr);
109 len = GFC_DECL_STRING_LEN (se.expr);
110 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
112 label_tree = gfc_get_label_decl (code->label);
114 if (code->label->defined == ST_LABEL_TARGET)
116 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
117 len_tree = integer_minus_one_node;
121 gfc_expr *format = code->label->format;
123 label_len = format->value.character.length;
124 len_tree = build_int_cst (NULL_TREE, label_len);
125 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
126 format->value.character.string);
127 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
130 gfc_add_modify (&se.pre, len, len_tree);
131 gfc_add_modify (&se.pre, addr, label_tree);
133 return gfc_finish_block (&se.pre);
136 /* Translate a GOTO statement. */
139 gfc_trans_goto (gfc_code * code)
141 locus loc = code->loc;
147 if (code->label != NULL)
148 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
151 gfc_init_se (&se, NULL);
152 gfc_start_block (&se.pre);
153 gfc_conv_label_variable (&se, code->expr);
154 tmp = GFC_DECL_STRING_LEN (se.expr);
155 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
156 build_int_cst (TREE_TYPE (tmp), -1));
157 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
158 "Assigned label is not a target label");
160 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
165 target = fold_build1 (GOTO_EXPR, void_type_node, assigned_goto);
166 gfc_add_expr_to_block (&se.pre, target);
167 return gfc_finish_block (&se.pre);
170 /* Check the label list. */
173 target = gfc_get_label_decl (code->label);
174 tmp = gfc_build_addr_expr (pvoid_type_node, target);
175 tmp = fold_build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
176 tmp = build3_v (COND_EXPR, tmp,
177 fold_build1 (GOTO_EXPR, void_type_node, target),
178 build_empty_stmt ());
179 gfc_add_expr_to_block (&se.pre, tmp);
182 while (code != NULL);
183 gfc_trans_runtime_check (true, false, boolean_true_node, &se.pre, &loc,
184 "Assigned label is not in the list");
186 return gfc_finish_block (&se.pre);
190 /* Translate an ENTRY statement. Just adds a label for this entry point. */
192 gfc_trans_entry (gfc_code * code)
194 return build1_v (LABEL_EXPR, code->ext.entry->label);
198 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
199 elemental subroutines. Make temporaries for output arguments if any such
200 dependencies are found. Output arguments are chosen because internal_unpack
201 can be used, as is, to copy the result back to the variable. */
203 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
204 gfc_symbol * sym, gfc_actual_arglist * arg,
205 gfc_dep_check check_variable)
207 gfc_actual_arglist *arg0;
209 gfc_formal_arglist *formal;
210 gfc_loopinfo tmp_loop;
221 if (loopse->ss == NULL)
226 formal = sym->formal;
228 /* Loop over all the arguments testing for dependencies. */
229 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
235 /* Obtain the info structure for the current argument. */
237 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
241 info = &ss->data.info;
245 /* If there is a dependency, create a temporary and use it
246 instead of the variable. */
247 fsym = formal ? formal->sym : NULL;
248 if (e->expr_type == EXPR_VARIABLE
250 && fsym->attr.intent != INTENT_IN
251 && gfc_check_fncall_dependency (e, fsym->attr.intent,
252 sym, arg0, check_variable))
254 tree initial, temptype;
255 stmtblock_t temp_post;
257 /* Make a local loopinfo for the temporary creation, so that
258 none of the other ss->info's have to be renormalized. */
259 gfc_init_loopinfo (&tmp_loop);
260 for (n = 0; n < info->dimen; n++)
262 tmp_loop.to[n] = loopse->loop->to[n];
263 tmp_loop.from[n] = loopse->loop->from[n];
264 tmp_loop.order[n] = loopse->loop->order[n];
267 /* Obtain the argument descriptor for unpacking. */
268 gfc_init_se (&parmse, NULL);
269 parmse.want_pointer = 1;
270 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
271 gfc_add_block_to_block (&se->pre, &parmse.pre);
273 /* If we've got INTENT(INOUT), initialize the array temporary with
274 a copy of the values. */
275 if (fsym->attr.intent == INTENT_INOUT)
276 initial = parmse.expr;
280 /* Find the type of the temporary to create; we don't use the type
281 of e itself as this breaks for subcomponent-references in e (where
282 the type of e is that of the final reference, but parmse.expr's
283 type corresponds to the full derived-type). */
284 /* TODO: Fix this somehow so we don't need a temporary of the whole
285 array but instead only the components referenced. */
286 temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */
287 gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
288 temptype = TREE_TYPE (temptype);
289 temptype = gfc_get_element_type (temptype);
291 /* Generate the temporary. Cleaning up the temporary should be the
292 very last thing done, so we add the code to a new block and add it
293 to se->post as last instructions. */
294 size = gfc_create_var (gfc_array_index_type, NULL);
295 data = gfc_create_var (pvoid_type_node, NULL);
296 gfc_init_block (&temp_post);
297 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post,
298 &tmp_loop, info, temptype,
302 gfc_add_modify (&se->pre, size, tmp);
303 tmp = fold_convert (pvoid_type_node, info->data);
304 gfc_add_modify (&se->pre, data, tmp);
306 /* Calculate the offset for the temporary. */
307 offset = gfc_index_zero_node;
308 for (n = 0; n < info->dimen; n++)
310 tmp = gfc_conv_descriptor_stride (info->descriptor,
312 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
313 loopse->loop->from[n], tmp);
314 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
317 info->offset = gfc_create_var (gfc_array_index_type, NULL);
318 gfc_add_modify (&se->pre, info->offset, offset);
320 /* Copy the result back using unpack. */
321 tmp = build_call_expr (gfor_fndecl_in_unpack, 2, parmse.expr, data);
322 gfc_add_expr_to_block (&se->post, tmp);
324 /* parmse.pre is already added above. */
325 gfc_add_block_to_block (&se->post, &parmse.post);
326 gfc_add_block_to_block (&se->post, &temp_post);
332 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
335 gfc_trans_call (gfc_code * code, bool dependency_check)
339 int has_alternate_specifier;
340 gfc_dep_check check_variable;
342 /* A CALL starts a new block because the actual arguments may have to
343 be evaluated first. */
344 gfc_init_se (&se, NULL);
345 gfc_start_block (&se.pre);
347 gcc_assert (code->resolved_sym);
349 ss = gfc_ss_terminator;
350 if (code->resolved_sym->attr.elemental)
351 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
353 /* Is not an elemental subroutine call with array valued arguments. */
354 if (ss == gfc_ss_terminator)
357 /* Translate the call. */
358 has_alternate_specifier
359 = gfc_conv_function_call (&se, code->resolved_sym, code->ext.actual,
362 /* A subroutine without side-effect, by definition, does nothing! */
363 TREE_SIDE_EFFECTS (se.expr) = 1;
365 /* Chain the pieces together and return the block. */
366 if (has_alternate_specifier)
368 gfc_code *select_code;
370 select_code = code->next;
371 gcc_assert(select_code->op == EXEC_SELECT);
372 sym = select_code->expr->symtree->n.sym;
373 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
374 if (sym->backend_decl == NULL)
375 sym->backend_decl = gfc_get_symbol_decl (sym);
376 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
379 gfc_add_expr_to_block (&se.pre, se.expr);
381 gfc_add_block_to_block (&se.pre, &se.post);
386 /* An elemental subroutine call with array valued arguments has
394 /* gfc_walk_elemental_function_args renders the ss chain in the
395 reverse order to the actual argument order. */
396 ss = gfc_reverse_ss (ss);
398 /* Initialize the loop. */
399 gfc_init_se (&loopse, NULL);
400 gfc_init_loopinfo (&loop);
401 gfc_add_ss_to_loop (&loop, ss);
403 gfc_conv_ss_startstride (&loop);
404 /* TODO: gfc_conv_loop_setup generates a temporary for vector
405 subscripts. This could be prevented in the elemental case
406 as temporaries are handled separatedly
407 (below in gfc_conv_elemental_dependencies). */
408 gfc_conv_loop_setup (&loop, &code->expr->where);
409 gfc_mark_ss_chain_used (ss, 1);
411 /* Convert the arguments, checking for dependencies. */
412 gfc_copy_loopinfo_to_se (&loopse, &loop);
415 /* For operator assignment, do dependency checking. */
416 if (dependency_check)
417 check_variable = ELEM_CHECK_VARIABLE;
419 check_variable = ELEM_DONT_CHECK_VARIABLE;
421 gfc_init_se (&depse, NULL);
422 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
423 code->ext.actual, check_variable);
425 gfc_add_block_to_block (&loop.pre, &depse.pre);
426 gfc_add_block_to_block (&loop.post, &depse.post);
428 /* Generate the loop body. */
429 gfc_start_scalarized_body (&loop, &body);
430 gfc_init_block (&block);
432 /* Add the subroutine call to the block. */
433 gfc_conv_function_call (&loopse, code->resolved_sym, code->ext.actual,
435 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
437 gfc_add_block_to_block (&block, &loopse.pre);
438 gfc_add_block_to_block (&block, &loopse.post);
440 /* Finish up the loop block and the loop. */
441 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
442 gfc_trans_scalarizing_loops (&loop, &body);
443 gfc_add_block_to_block (&se.pre, &loop.pre);
444 gfc_add_block_to_block (&se.pre, &loop.post);
445 gfc_add_block_to_block (&se.pre, &se.post);
446 gfc_cleanup_loop (&loop);
449 return gfc_finish_block (&se.pre);
453 /* Translate the RETURN statement. */
456 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
464 /* If code->expr is not NULL, this return statement must appear
465 in a subroutine and current_fake_result_decl has already
468 result = gfc_get_fake_result_decl (NULL, 0);
471 gfc_warning ("An alternate return at %L without a * dummy argument",
473 return build1_v (GOTO_EXPR, gfc_get_return_label ());
476 /* Start a new block for this statement. */
477 gfc_init_se (&se, NULL);
478 gfc_start_block (&se.pre);
480 gfc_conv_expr (&se, code->expr);
482 tmp = fold_build2 (MODIFY_EXPR, TREE_TYPE (result), result,
483 fold_convert (TREE_TYPE (result), se.expr));
484 gfc_add_expr_to_block (&se.pre, tmp);
486 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
487 gfc_add_expr_to_block (&se.pre, tmp);
488 gfc_add_block_to_block (&se.pre, &se.post);
489 return gfc_finish_block (&se.pre);
492 return build1_v (GOTO_EXPR, gfc_get_return_label ());
496 /* Translate the PAUSE statement. We have to translate this statement
497 to a runtime library call. */
500 gfc_trans_pause (gfc_code * code)
502 tree gfc_int4_type_node = gfc_get_int_type (4);
506 /* Start a new block for this statement. */
507 gfc_init_se (&se, NULL);
508 gfc_start_block (&se.pre);
511 if (code->expr == NULL)
513 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
514 tmp = build_call_expr (gfor_fndecl_pause_numeric, 1, tmp);
518 gfc_conv_expr_reference (&se, code->expr);
519 tmp = build_call_expr (gfor_fndecl_pause_string, 2,
520 se.expr, se.string_length);
523 gfc_add_expr_to_block (&se.pre, tmp);
525 gfc_add_block_to_block (&se.pre, &se.post);
527 return gfc_finish_block (&se.pre);
531 /* Translate the STOP statement. We have to translate this statement
532 to a runtime library call. */
535 gfc_trans_stop (gfc_code * code)
537 tree gfc_int4_type_node = gfc_get_int_type (4);
541 /* Start a new block for this statement. */
542 gfc_init_se (&se, NULL);
543 gfc_start_block (&se.pre);
546 if (code->expr == NULL)
548 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
549 tmp = build_call_expr (gfor_fndecl_stop_numeric, 1, tmp);
553 gfc_conv_expr_reference (&se, code->expr);
554 tmp = build_call_expr (gfor_fndecl_stop_string, 2,
555 se.expr, se.string_length);
558 gfc_add_expr_to_block (&se.pre, tmp);
560 gfc_add_block_to_block (&se.pre, &se.post);
562 return gfc_finish_block (&se.pre);
566 /* Generate GENERIC for the IF construct. This function also deals with
567 the simple IF statement, because the front end translates the IF
568 statement into an IF construct.
600 where COND_S is the simplified version of the predicate. PRE_COND_S
601 are the pre side-effects produced by the translation of the
603 We need to build the chain recursively otherwise we run into
604 problems with folding incomplete statements. */
607 gfc_trans_if_1 (gfc_code * code)
612 /* Check for an unconditional ELSE clause. */
614 return gfc_trans_code (code->next);
616 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
617 gfc_init_se (&if_se, NULL);
618 gfc_start_block (&if_se.pre);
620 /* Calculate the IF condition expression. */
621 gfc_conv_expr_val (&if_se, code->expr);
623 /* Translate the THEN clause. */
624 stmt = gfc_trans_code (code->next);
626 /* Translate the ELSE clause. */
628 elsestmt = gfc_trans_if_1 (code->block);
630 elsestmt = build_empty_stmt ();
632 /* Build the condition expression and add it to the condition block. */
633 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
635 gfc_add_expr_to_block (&if_se.pre, stmt);
637 /* Finish off this statement. */
638 return gfc_finish_block (&if_se.pre);
642 gfc_trans_if (gfc_code * code)
644 /* Ignore the top EXEC_IF, it only announces an IF construct. The
645 actual code we must translate is in code->block. */
647 return gfc_trans_if_1 (code->block);
651 /* Translate an arithmetic IF expression.
653 IF (cond) label1, label2, label3 translates to
665 An optimized version can be generated in case of equal labels.
666 E.g., if label1 is equal to label2, we can translate it to
675 gfc_trans_arithmetic_if (gfc_code * code)
683 /* Start a new block. */
684 gfc_init_se (&se, NULL);
685 gfc_start_block (&se.pre);
687 /* Pre-evaluate COND. */
688 gfc_conv_expr_val (&se, code->expr);
689 se.expr = gfc_evaluate_now (se.expr, &se.pre);
691 /* Build something to compare with. */
692 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
694 if (code->label->value != code->label2->value)
696 /* If (cond < 0) take branch1 else take branch2.
697 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
698 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
699 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
701 if (code->label->value != code->label3->value)
702 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
704 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
706 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
709 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
711 if (code->label->value != code->label3->value
712 && code->label2->value != code->label3->value)
714 /* if (cond <= 0) take branch1 else take branch2. */
715 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
716 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
717 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
720 /* Append the COND_EXPR to the evaluation of COND, and return. */
721 gfc_add_expr_to_block (&se.pre, branch1);
722 return gfc_finish_block (&se.pre);
726 /* Translate the simple DO construct. This is where the loop variable has
727 integer type and step +-1. We can't use this in the general case
728 because integer overflow and floating point errors could give incorrect
730 We translate a do loop from:
732 DO dovar = from, to, step
738 [Evaluate loop bounds and step]
740 if ((step > 0) ? (dovar <= to) : (dovar => to))
746 cond = (dovar == to);
748 if (cond) goto end_label;
753 This helps the optimizers by avoiding the extra induction variable
754 used in the general case. */
757 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
758 tree from, tree to, tree step)
767 type = TREE_TYPE (dovar);
769 /* Initialize the DO variable: dovar = from. */
770 gfc_add_modify (pblock, dovar, from);
772 /* Cycle and exit statements are implemented with gotos. */
773 cycle_label = gfc_build_label_decl (NULL_TREE);
774 exit_label = gfc_build_label_decl (NULL_TREE);
776 /* Put the labels where they can be found later. See gfc_trans_do(). */
777 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
780 gfc_start_block (&body);
782 /* Main loop body. */
783 tmp = gfc_trans_code (code->block->next);
784 gfc_add_expr_to_block (&body, tmp);
786 /* Label for cycle statements (if needed). */
787 if (TREE_USED (cycle_label))
789 tmp = build1_v (LABEL_EXPR, cycle_label);
790 gfc_add_expr_to_block (&body, tmp);
793 /* Evaluate the loop condition. */
794 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
795 cond = gfc_evaluate_now (cond, &body);
797 /* Increment the loop variable. */
798 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
799 gfc_add_modify (&body, dovar, tmp);
802 tmp = build1_v (GOTO_EXPR, exit_label);
803 TREE_USED (exit_label) = 1;
804 tmp = fold_build3 (COND_EXPR, void_type_node,
805 cond, tmp, build_empty_stmt ());
806 gfc_add_expr_to_block (&body, tmp);
808 /* Finish the loop body. */
809 tmp = gfc_finish_block (&body);
810 tmp = build1_v (LOOP_EXPR, tmp);
812 /* Only execute the loop if the number of iterations is positive. */
813 if (tree_int_cst_sgn (step) > 0)
814 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
816 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
817 tmp = fold_build3 (COND_EXPR, void_type_node,
818 cond, tmp, build_empty_stmt ());
819 gfc_add_expr_to_block (pblock, tmp);
821 /* Add the exit label. */
822 tmp = build1_v (LABEL_EXPR, exit_label);
823 gfc_add_expr_to_block (pblock, tmp);
825 return gfc_finish_block (pblock);
828 /* Translate the DO construct. This obviously is one of the most
829 important ones to get right with any compiler, but especially
832 We special case some loop forms as described in gfc_trans_simple_do.
833 For other cases we implement them with a separate loop count,
834 as described in the standard.
836 We translate a do loop from:
838 DO dovar = from, to, step
844 [evaluate loop bounds and step]
845 empty = (step > 0 ? to < from : to > from);
846 countm1 = (to - from) / step;
848 if (empty) goto exit_label;
854 if (countm1 ==0) goto exit_label;
859 countm1 is an unsigned integer. It is equal to the loop count minus one,
860 because the loop count itself can overflow. */
863 gfc_trans_do (gfc_code * code)
881 gfc_start_block (&block);
883 /* Evaluate all the expressions in the iterator. */
884 gfc_init_se (&se, NULL);
885 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
886 gfc_add_block_to_block (&block, &se.pre);
888 type = TREE_TYPE (dovar);
890 gfc_init_se (&se, NULL);
891 gfc_conv_expr_val (&se, code->ext.iterator->start);
892 gfc_add_block_to_block (&block, &se.pre);
893 from = gfc_evaluate_now (se.expr, &block);
895 gfc_init_se (&se, NULL);
896 gfc_conv_expr_val (&se, code->ext.iterator->end);
897 gfc_add_block_to_block (&block, &se.pre);
898 to = gfc_evaluate_now (se.expr, &block);
900 gfc_init_se (&se, NULL);
901 gfc_conv_expr_val (&se, code->ext.iterator->step);
902 gfc_add_block_to_block (&block, &se.pre);
903 step = gfc_evaluate_now (se.expr, &block);
905 /* Special case simple loops. */
906 if (TREE_CODE (type) == INTEGER_TYPE
907 && (integer_onep (step)
908 || tree_int_cst_equal (step, integer_minus_one_node)))
909 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
911 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
912 fold_convert (type, integer_zero_node));
914 if (TREE_CODE (type) == INTEGER_TYPE)
915 utype = unsigned_type_for (type);
917 utype = unsigned_type_for (gfc_array_index_type);
918 countm1 = gfc_create_var (utype, "countm1");
920 /* Cycle and exit statements are implemented with gotos. */
921 cycle_label = gfc_build_label_decl (NULL_TREE);
922 exit_label = gfc_build_label_decl (NULL_TREE);
923 TREE_USED (exit_label) = 1;
925 /* Initialize the DO variable: dovar = from. */
926 gfc_add_modify (&block, dovar, from);
928 /* Initialize loop count and jump to exit label if the loop is empty.
929 This code is executed before we enter the loop body. We generate:
932 if (to < from) goto exit_label;
933 countm1 = (to - from) / step;
937 if (to > from) goto exit_label;
938 countm1 = (from - to) / -step;
940 if (TREE_CODE (type) == INTEGER_TYPE)
944 tmp = fold_build2 (LT_EXPR, boolean_type_node, to, from);
945 pos = fold_build3 (COND_EXPR, void_type_node, tmp,
946 build1_v (GOTO_EXPR, exit_label),
947 build_empty_stmt ());
948 tmp = fold_build2 (MINUS_EXPR, type, to, from);
949 tmp = fold_convert (utype, tmp);
950 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp,
951 fold_convert (utype, step));
952 tmp = build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
953 pos = build2 (COMPOUND_EXPR, void_type_node, pos, tmp);
955 tmp = fold_build2 (GT_EXPR, boolean_type_node, to, from);
956 neg = fold_build3 (COND_EXPR, void_type_node, tmp,
957 build1_v (GOTO_EXPR, exit_label),
958 build_empty_stmt ());
959 tmp = fold_build2 (MINUS_EXPR, type, from, to);
960 tmp = fold_convert (utype, tmp);
961 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp,
962 fold_convert (utype, fold_build1 (NEGATE_EXPR,
964 tmp = build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
965 neg = build2 (COMPOUND_EXPR, void_type_node, neg, tmp);
967 tmp = fold_build3 (COND_EXPR, void_type_node, pos_step, pos, neg);
968 gfc_add_expr_to_block (&block, tmp);
972 /* TODO: We could use the same width as the real type.
973 This would probably cause more problems that it solves
974 when we implement "long double" types. */
976 tmp = fold_build2 (MINUS_EXPR, type, to, from);
977 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
978 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
979 gfc_add_modify (&block, countm1, tmp);
981 /* We need a special check for empty loops:
982 empty = (step > 0 ? to < from : to > from); */
983 tmp = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
984 fold_build2 (LT_EXPR, boolean_type_node, to, from),
985 fold_build2 (GT_EXPR, boolean_type_node, to, from));
986 /* If the loop is empty, go directly to the exit label. */
987 tmp = fold_build3 (COND_EXPR, void_type_node, tmp,
988 build1_v (GOTO_EXPR, exit_label),
989 build_empty_stmt ());
990 gfc_add_expr_to_block (&block, tmp);
994 gfc_start_block (&body);
996 /* Put these labels where they can be found later. We put the
997 labels in a TREE_LIST node (because TREE_CHAIN is already
998 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
999 label in TREE_VALUE (backend_decl). */
1001 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1003 /* Main loop body. */
1004 tmp = gfc_trans_code (code->block->next);
1005 gfc_add_expr_to_block (&body, tmp);
1007 /* Label for cycle statements (if needed). */
1008 if (TREE_USED (cycle_label))
1010 tmp = build1_v (LABEL_EXPR, cycle_label);
1011 gfc_add_expr_to_block (&body, tmp);
1014 /* Increment the loop variable. */
1015 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
1016 gfc_add_modify (&body, dovar, tmp);
1018 /* End with the loop condition. Loop until countm1 == 0. */
1019 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
1020 build_int_cst (utype, 0));
1021 tmp = build1_v (GOTO_EXPR, exit_label);
1022 tmp = fold_build3 (COND_EXPR, void_type_node,
1023 cond, tmp, build_empty_stmt ());
1024 gfc_add_expr_to_block (&body, tmp);
1026 /* Decrement the loop count. */
1027 tmp = fold_build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
1028 gfc_add_modify (&body, countm1, tmp);
1030 /* End of loop body. */
1031 tmp = gfc_finish_block (&body);
1033 /* The for loop itself. */
1034 tmp = build1_v (LOOP_EXPR, tmp);
1035 gfc_add_expr_to_block (&block, tmp);
1037 /* Add the exit label. */
1038 tmp = build1_v (LABEL_EXPR, exit_label);
1039 gfc_add_expr_to_block (&block, tmp);
1041 return gfc_finish_block (&block);
1045 /* Translate the DO WHILE construct.
1058 if (! cond) goto exit_label;
1064 Because the evaluation of the exit condition `cond' may have side
1065 effects, we can't do much for empty loop bodies. The backend optimizers
1066 should be smart enough to eliminate any dead loops. */
1069 gfc_trans_do_while (gfc_code * code)
1077 /* Everything we build here is part of the loop body. */
1078 gfc_start_block (&block);
1080 /* Cycle and exit statements are implemented with gotos. */
1081 cycle_label = gfc_build_label_decl (NULL_TREE);
1082 exit_label = gfc_build_label_decl (NULL_TREE);
1084 /* Put the labels where they can be found later. See gfc_trans_do(). */
1085 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1087 /* Create a GIMPLE version of the exit condition. */
1088 gfc_init_se (&cond, NULL);
1089 gfc_conv_expr_val (&cond, code->expr);
1090 gfc_add_block_to_block (&block, &cond.pre);
1091 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1093 /* Build "IF (! cond) GOTO exit_label". */
1094 tmp = build1_v (GOTO_EXPR, exit_label);
1095 TREE_USED (exit_label) = 1;
1096 tmp = fold_build3 (COND_EXPR, void_type_node,
1097 cond.expr, tmp, build_empty_stmt ());
1098 gfc_add_expr_to_block (&block, tmp);
1100 /* The main body of the loop. */
1101 tmp = gfc_trans_code (code->block->next);
1102 gfc_add_expr_to_block (&block, tmp);
1104 /* Label for cycle statements (if needed). */
1105 if (TREE_USED (cycle_label))
1107 tmp = build1_v (LABEL_EXPR, cycle_label);
1108 gfc_add_expr_to_block (&block, tmp);
1111 /* End of loop body. */
1112 tmp = gfc_finish_block (&block);
1114 gfc_init_block (&block);
1115 /* Build the loop. */
1116 tmp = build1_v (LOOP_EXPR, tmp);
1117 gfc_add_expr_to_block (&block, tmp);
1119 /* Add the exit label. */
1120 tmp = build1_v (LABEL_EXPR, exit_label);
1121 gfc_add_expr_to_block (&block, tmp);
1123 return gfc_finish_block (&block);
1127 /* Translate the SELECT CASE construct for INTEGER case expressions,
1128 without killing all potential optimizations. The problem is that
1129 Fortran allows unbounded cases, but the back-end does not, so we
1130 need to intercept those before we enter the equivalent SWITCH_EXPR
1133 For example, we translate this,
1136 CASE (:100,101,105:115)
1146 to the GENERIC equivalent,
1150 case (minimum value for typeof(expr) ... 100:
1156 case 200 ... (maximum value for typeof(expr):
1173 gfc_trans_integer_select (gfc_code * code)
1183 gfc_start_block (&block);
1185 /* Calculate the switch expression. */
1186 gfc_init_se (&se, NULL);
1187 gfc_conv_expr_val (&se, code->expr);
1188 gfc_add_block_to_block (&block, &se.pre);
1190 end_label = gfc_build_label_decl (NULL_TREE);
1192 gfc_init_block (&body);
1194 for (c = code->block; c; c = c->block)
1196 for (cp = c->ext.case_list; cp; cp = cp->next)
1201 /* Assume it's the default case. */
1202 low = high = NULL_TREE;
1206 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1209 /* If there's only a lower bound, set the high bound to the
1210 maximum value of the case expression. */
1212 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1217 /* Three cases are possible here:
1219 1) There is no lower bound, e.g. CASE (:N).
1220 2) There is a lower bound .NE. high bound, that is
1221 a case range, e.g. CASE (N:M) where M>N (we make
1222 sure that M>N during type resolution).
1223 3) There is a lower bound, and it has the same value
1224 as the high bound, e.g. CASE (N:N). This is our
1225 internal representation of CASE(N).
1227 In the first and second case, we need to set a value for
1228 high. In the third case, we don't because the GCC middle
1229 end represents a single case value by just letting high be
1230 a NULL_TREE. We can't do that because we need to be able
1231 to represent unbounded cases. */
1235 && mpz_cmp (cp->low->value.integer,
1236 cp->high->value.integer) != 0))
1237 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1240 /* Unbounded case. */
1242 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1245 /* Build a label. */
1246 label = gfc_build_label_decl (NULL_TREE);
1248 /* Add this case label.
1249 Add parameter 'label', make it match GCC backend. */
1250 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1252 gfc_add_expr_to_block (&body, tmp);
1255 /* Add the statements for this case. */
1256 tmp = gfc_trans_code (c->next);
1257 gfc_add_expr_to_block (&body, tmp);
1259 /* Break to the end of the construct. */
1260 tmp = build1_v (GOTO_EXPR, end_label);
1261 gfc_add_expr_to_block (&body, tmp);
1264 tmp = gfc_finish_block (&body);
1265 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1266 gfc_add_expr_to_block (&block, tmp);
1268 tmp = build1_v (LABEL_EXPR, end_label);
1269 gfc_add_expr_to_block (&block, tmp);
1271 return gfc_finish_block (&block);
1275 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1277 There are only two cases possible here, even though the standard
1278 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1279 .FALSE., and DEFAULT.
1281 We never generate more than two blocks here. Instead, we always
1282 try to eliminate the DEFAULT case. This way, we can translate this
1283 kind of SELECT construct to a simple
1287 expression in GENERIC. */
1290 gfc_trans_logical_select (gfc_code * code)
1293 gfc_code *t, *f, *d;
1298 /* Assume we don't have any cases at all. */
1301 /* Now see which ones we actually do have. We can have at most two
1302 cases in a single case list: one for .TRUE. and one for .FALSE.
1303 The default case is always separate. If the cases for .TRUE. and
1304 .FALSE. are in the same case list, the block for that case list
1305 always executed, and we don't generate code a COND_EXPR. */
1306 for (c = code->block; c; c = c->block)
1308 for (cp = c->ext.case_list; cp; cp = cp->next)
1312 if (cp->low->value.logical == 0) /* .FALSE. */
1314 else /* if (cp->value.logical != 0), thus .TRUE. */
1322 /* Start a new block. */
1323 gfc_start_block (&block);
1325 /* Calculate the switch expression. We always need to do this
1326 because it may have side effects. */
1327 gfc_init_se (&se, NULL);
1328 gfc_conv_expr_val (&se, code->expr);
1329 gfc_add_block_to_block (&block, &se.pre);
1331 if (t == f && t != NULL)
1333 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1334 translate the code for these cases, append it to the current
1336 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1340 tree true_tree, false_tree, stmt;
1342 true_tree = build_empty_stmt ();
1343 false_tree = build_empty_stmt ();
1345 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1346 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1347 make the missing case the default case. */
1348 if (t != NULL && f != NULL)
1358 /* Translate the code for each of these blocks, and append it to
1359 the current block. */
1361 true_tree = gfc_trans_code (t->next);
1364 false_tree = gfc_trans_code (f->next);
1366 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1367 true_tree, false_tree);
1368 gfc_add_expr_to_block (&block, stmt);
1371 return gfc_finish_block (&block);
1375 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1376 Instead of generating compares and jumps, it is far simpler to
1377 generate a data structure describing the cases in order and call a
1378 library subroutine that locates the right case.
1379 This is particularly true because this is the only case where we
1380 might have to dispose of a temporary.
1381 The library subroutine returns a pointer to jump to or NULL if no
1382 branches are to be taken. */
1385 gfc_trans_character_select (gfc_code *code)
1387 tree init, node, end_label, tmp, type, case_num, label, fndecl;
1388 stmtblock_t block, body;
1394 /* The jump table types are stored in static variables to avoid
1395 constructing them from scratch every single time. */
1396 static tree select_struct[2];
1397 static tree ss_string1[2], ss_string1_len[2];
1398 static tree ss_string2[2], ss_string2_len[2];
1399 static tree ss_target[2];
1401 tree pchartype = gfc_get_pchar_type (code->expr->ts.kind);
1403 if (code->expr->ts.kind == 1)
1405 else if (code->expr->ts.kind == 4)
1410 if (select_struct[k] == NULL)
1412 select_struct[k] = make_node (RECORD_TYPE);
1414 if (code->expr->ts.kind == 1)
1415 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
1416 else if (code->expr->ts.kind == 4)
1417 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
1422 #define ADD_FIELD(NAME, TYPE) \
1423 ss_##NAME[k] = gfc_add_field_to_struct \
1424 (&(TYPE_FIELDS (select_struct[k])), select_struct[k], \
1425 get_identifier (stringize(NAME)), TYPE)
1427 ADD_FIELD (string1, pchartype);
1428 ADD_FIELD (string1_len, gfc_charlen_type_node);
1430 ADD_FIELD (string2, pchartype);
1431 ADD_FIELD (string2_len, gfc_charlen_type_node);
1433 ADD_FIELD (target, integer_type_node);
1436 gfc_finish_type (select_struct[k]);
1439 cp = code->block->ext.case_list;
1440 while (cp->left != NULL)
1444 for (d = cp; d; d = d->right)
1447 end_label = gfc_build_label_decl (NULL_TREE);
1449 /* Generate the body */
1450 gfc_start_block (&block);
1451 gfc_init_block (&body);
1453 for (c = code->block; c; c = c->block)
1455 for (d = c->ext.case_list; d; d = d->next)
1457 label = gfc_build_label_decl (NULL_TREE);
1458 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1459 build_int_cst (NULL_TREE, d->n),
1460 build_int_cst (NULL_TREE, d->n), label);
1461 gfc_add_expr_to_block (&body, tmp);
1464 tmp = gfc_trans_code (c->next);
1465 gfc_add_expr_to_block (&body, tmp);
1467 tmp = build1_v (GOTO_EXPR, end_label);
1468 gfc_add_expr_to_block (&body, tmp);
1471 /* Generate the structure describing the branches */
1474 for(d = cp; d; d = d->right)
1478 gfc_init_se (&se, NULL);
1482 node = tree_cons (ss_string1[k], null_pointer_node, node);
1483 node = tree_cons (ss_string1_len[k], integer_zero_node, node);
1487 gfc_conv_expr_reference (&se, d->low);
1489 node = tree_cons (ss_string1[k], se.expr, node);
1490 node = tree_cons (ss_string1_len[k], se.string_length, node);
1493 if (d->high == NULL)
1495 node = tree_cons (ss_string2[k], null_pointer_node, node);
1496 node = tree_cons (ss_string2_len[k], integer_zero_node, node);
1500 gfc_init_se (&se, NULL);
1501 gfc_conv_expr_reference (&se, d->high);
1503 node = tree_cons (ss_string2[k], se.expr, node);
1504 node = tree_cons (ss_string2_len[k], se.string_length, node);
1507 node = tree_cons (ss_target[k], build_int_cst (integer_type_node, d->n),
1510 tmp = build_constructor_from_list (select_struct[k], nreverse (node));
1511 init = tree_cons (NULL_TREE, tmp, init);
1514 type = build_array_type (select_struct[k],
1515 build_index_type (build_int_cst (NULL_TREE, n-1)));
1517 init = build_constructor_from_list (type, nreverse(init));
1518 TREE_CONSTANT (init) = 1;
1519 TREE_STATIC (init) = 1;
1520 /* Create a static variable to hold the jump table. */
1521 tmp = gfc_create_var (type, "jumptable");
1522 TREE_CONSTANT (tmp) = 1;
1523 TREE_STATIC (tmp) = 1;
1524 TREE_READONLY (tmp) = 1;
1525 DECL_INITIAL (tmp) = init;
1528 /* Build the library call */
1529 init = gfc_build_addr_expr (pvoid_type_node, init);
1531 gfc_init_se (&se, NULL);
1532 gfc_conv_expr_reference (&se, code->expr);
1534 gfc_add_block_to_block (&block, &se.pre);
1536 if (code->expr->ts.kind == 1)
1537 fndecl = gfor_fndecl_select_string;
1538 else if (code->expr->ts.kind == 4)
1539 fndecl = gfor_fndecl_select_string_char4;
1543 tmp = build_call_expr (fndecl, 4, init, build_int_cst (NULL_TREE, n),
1544 se.expr, se.string_length);
1545 case_num = gfc_create_var (integer_type_node, "case_num");
1546 gfc_add_modify (&block, case_num, tmp);
1548 gfc_add_block_to_block (&block, &se.post);
1550 tmp = gfc_finish_block (&body);
1551 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1552 gfc_add_expr_to_block (&block, tmp);
1554 tmp = build1_v (LABEL_EXPR, end_label);
1555 gfc_add_expr_to_block (&block, tmp);
1557 return gfc_finish_block (&block);
1561 /* Translate the three variants of the SELECT CASE construct.
1563 SELECT CASEs with INTEGER case expressions can be translated to an
1564 equivalent GENERIC switch statement, and for LOGICAL case
1565 expressions we build one or two if-else compares.
1567 SELECT CASEs with CHARACTER case expressions are a whole different
1568 story, because they don't exist in GENERIC. So we sort them and
1569 do a binary search at runtime.
1571 Fortran has no BREAK statement, and it does not allow jumps from
1572 one case block to another. That makes things a lot easier for
1576 gfc_trans_select (gfc_code * code)
1578 gcc_assert (code && code->expr);
1580 /* Empty SELECT constructs are legal. */
1581 if (code->block == NULL)
1582 return build_empty_stmt ();
1584 /* Select the correct translation function. */
1585 switch (code->expr->ts.type)
1587 case BT_LOGICAL: return gfc_trans_logical_select (code);
1588 case BT_INTEGER: return gfc_trans_integer_select (code);
1589 case BT_CHARACTER: return gfc_trans_character_select (code);
1591 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1597 /* Traversal function to substitute a replacement symtree if the symbol
1598 in the expression is the same as that passed. f == 2 signals that
1599 that variable itself is not to be checked - only the references.
1600 This group of functions is used when the variable expression in a
1601 FORALL assignment has internal references. For example:
1602 FORALL (i = 1:4) p(p(i)) = i
1603 The only recourse here is to store a copy of 'p' for the index
1606 static gfc_symtree *new_symtree;
1607 static gfc_symtree *old_symtree;
1610 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
1612 if (expr->expr_type != EXPR_VARIABLE)
1617 else if (expr->symtree->n.sym == sym)
1618 expr->symtree = new_symtree;
1624 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
1626 gfc_traverse_expr (e, sym, forall_replace, f);
1630 forall_restore (gfc_expr *expr,
1631 gfc_symbol *sym ATTRIBUTE_UNUSED,
1632 int *f ATTRIBUTE_UNUSED)
1634 if (expr->expr_type != EXPR_VARIABLE)
1637 if (expr->symtree == new_symtree)
1638 expr->symtree = old_symtree;
1644 forall_restore_symtree (gfc_expr *e)
1646 gfc_traverse_expr (e, NULL, forall_restore, 0);
1650 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1655 gfc_symbol *new_sym;
1656 gfc_symbol *old_sym;
1660 /* Build a copy of the lvalue. */
1661 old_symtree = c->expr->symtree;
1662 old_sym = old_symtree->n.sym;
1663 e = gfc_lval_expr_from_sym (old_sym);
1664 if (old_sym->attr.dimension)
1666 gfc_init_se (&tse, NULL);
1667 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN);
1668 gfc_add_block_to_block (pre, &tse.pre);
1669 gfc_add_block_to_block (post, &tse.post);
1670 tse.expr = build_fold_indirect_ref (tse.expr);
1672 if (e->ts.type != BT_CHARACTER)
1674 /* Use the variable offset for the temporary. */
1675 tmp = gfc_conv_descriptor_offset (tse.expr);
1676 gfc_add_modify (pre, tmp,
1677 gfc_conv_array_offset (old_sym->backend_decl));
1682 gfc_init_se (&tse, NULL);
1683 gfc_init_se (&rse, NULL);
1684 gfc_conv_expr (&rse, e);
1685 if (e->ts.type == BT_CHARACTER)
1687 tse.string_length = rse.string_length;
1688 tmp = gfc_get_character_type_len (gfc_default_character_kind,
1690 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
1692 gfc_add_block_to_block (pre, &tse.pre);
1693 gfc_add_block_to_block (post, &tse.post);
1697 tmp = gfc_typenode_for_spec (&e->ts);
1698 tse.expr = gfc_create_var (tmp, "temp");
1701 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
1702 e->expr_type == EXPR_VARIABLE);
1703 gfc_add_expr_to_block (pre, tmp);
1707 /* Create a new symbol to represent the lvalue. */
1708 new_sym = gfc_new_symbol (old_sym->name, NULL);
1709 new_sym->ts = old_sym->ts;
1710 new_sym->attr.referenced = 1;
1711 new_sym->attr.dimension = old_sym->attr.dimension;
1712 new_sym->attr.flavor = old_sym->attr.flavor;
1714 /* Use the temporary as the backend_decl. */
1715 new_sym->backend_decl = tse.expr;
1717 /* Create a fake symtree for it. */
1719 new_symtree = gfc_new_symtree (&root, old_sym->name);
1720 new_symtree->n.sym = new_sym;
1721 gcc_assert (new_symtree == root);
1723 /* Go through the expression reference replacing the old_symtree
1725 forall_replace_symtree (c->expr, old_sym, 2);
1727 /* Now we have made this temporary, we might as well use it for
1728 the right hand side. */
1729 forall_replace_symtree (c->expr2, old_sym, 1);
1733 /* Handles dependencies in forall assignments. */
1735 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1742 lsym = c->expr->symtree->n.sym;
1743 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
1745 /* Now check for dependencies within the 'variable'
1746 expression itself. These are treated by making a complete
1747 copy of variable and changing all the references to it
1748 point to the copy instead. Note that the shallow copy of
1749 the variable will not suffice for derived types with
1750 pointer components. We therefore leave these to their
1752 if (lsym->ts.type == BT_DERIVED
1753 && lsym->ts.derived->attr.pointer_comp)
1757 if (find_forall_index (c->expr, lsym, 2) == SUCCESS)
1759 forall_make_variable_temp (c, pre, post);
1763 /* Substrings with dependencies are treated in the same
1765 if (c->expr->ts.type == BT_CHARACTER
1767 && c->expr2->expr_type == EXPR_VARIABLE
1768 && lsym == c->expr2->symtree->n.sym)
1770 for (lref = c->expr->ref; lref; lref = lref->next)
1771 if (lref->type == REF_SUBSTRING)
1773 for (rref = c->expr2->ref; rref; rref = rref->next)
1774 if (rref->type == REF_SUBSTRING)
1778 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
1780 forall_make_variable_temp (c, pre, post);
1789 cleanup_forall_symtrees (gfc_code *c)
1791 forall_restore_symtree (c->expr);
1792 forall_restore_symtree (c->expr2);
1793 gfc_free (new_symtree->n.sym);
1794 gfc_free (new_symtree);
1798 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
1799 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
1800 indicates whether we should generate code to test the FORALLs mask
1801 array. OUTER is the loop header to be used for initializing mask
1804 The generated loop format is:
1805 count = (end - start + step) / step
1818 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
1819 int mask_flag, stmtblock_t *outer)
1827 tree var, start, end, step;
1830 /* Initialize the mask index outside the FORALL nest. */
1831 if (mask_flag && forall_tmp->mask)
1832 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
1834 iter = forall_tmp->this_loop;
1835 nvar = forall_tmp->nvar;
1836 for (n = 0; n < nvar; n++)
1839 start = iter->start;
1843 exit_label = gfc_build_label_decl (NULL_TREE);
1844 TREE_USED (exit_label) = 1;
1846 /* The loop counter. */
1847 count = gfc_create_var (TREE_TYPE (var), "count");
1849 /* The body of the loop. */
1850 gfc_init_block (&block);
1852 /* The exit condition. */
1853 cond = fold_build2 (LE_EXPR, boolean_type_node,
1854 count, build_int_cst (TREE_TYPE (count), 0));
1855 tmp = build1_v (GOTO_EXPR, exit_label);
1856 tmp = fold_build3 (COND_EXPR, void_type_node,
1857 cond, tmp, build_empty_stmt ());
1858 gfc_add_expr_to_block (&block, tmp);
1860 /* The main loop body. */
1861 gfc_add_expr_to_block (&block, body);
1863 /* Increment the loop variable. */
1864 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1865 gfc_add_modify (&block, var, tmp);
1867 /* Advance to the next mask element. Only do this for the
1869 if (n == 0 && mask_flag && forall_tmp->mask)
1871 tree maskindex = forall_tmp->maskindex;
1872 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1873 maskindex, gfc_index_one_node);
1874 gfc_add_modify (&block, maskindex, tmp);
1877 /* Decrement the loop counter. */
1878 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), count,
1879 build_int_cst (TREE_TYPE (var), 1));
1880 gfc_add_modify (&block, count, tmp);
1882 body = gfc_finish_block (&block);
1884 /* Loop var initialization. */
1885 gfc_init_block (&block);
1886 gfc_add_modify (&block, var, start);
1889 /* Initialize the loop counter. */
1890 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1891 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1892 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1893 gfc_add_modify (&block, count, tmp);
1895 /* The loop expression. */
1896 tmp = build1_v (LOOP_EXPR, body);
1897 gfc_add_expr_to_block (&block, tmp);
1899 /* The exit label. */
1900 tmp = build1_v (LABEL_EXPR, exit_label);
1901 gfc_add_expr_to_block (&block, tmp);
1903 body = gfc_finish_block (&block);
1910 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
1911 is nonzero, the body is controlled by all masks in the forall nest.
1912 Otherwise, the innermost loop is not controlled by it's mask. This
1913 is used for initializing that mask. */
1916 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1921 forall_info *forall_tmp;
1922 tree mask, maskindex;
1924 gfc_start_block (&header);
1926 forall_tmp = nested_forall_info;
1927 while (forall_tmp != NULL)
1929 /* Generate body with masks' control. */
1932 mask = forall_tmp->mask;
1933 maskindex = forall_tmp->maskindex;
1935 /* If a mask was specified make the assignment conditional. */
1938 tmp = gfc_build_array_ref (mask, maskindex, NULL);
1939 body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ());
1942 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
1943 forall_tmp = forall_tmp->prev_nest;
1947 gfc_add_expr_to_block (&header, body);
1948 return gfc_finish_block (&header);
1952 /* Allocate data for holding a temporary array. Returns either a local
1953 temporary array or a pointer variable. */
1956 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
1963 if (INTEGER_CST_P (size))
1965 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
1966 gfc_index_one_node);
1971 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
1972 type = build_array_type (elem_type, type);
1973 if (gfc_can_put_var_on_stack (bytesize))
1975 gcc_assert (INTEGER_CST_P (size));
1976 tmpvar = gfc_create_var (type, "temp");
1981 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
1982 *pdata = convert (pvoid_type_node, tmpvar);
1984 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
1985 gfc_add_modify (pblock, tmpvar, tmp);
1991 /* Generate codes to copy the temporary to the actual lhs. */
1994 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
1995 tree count1, tree wheremask, bool invert)
1999 stmtblock_t block, body;
2005 lss = gfc_walk_expr (expr);
2007 if (lss == gfc_ss_terminator)
2009 gfc_start_block (&block);
2011 gfc_init_se (&lse, NULL);
2013 /* Translate the expression. */
2014 gfc_conv_expr (&lse, expr);
2016 /* Form the expression for the temporary. */
2017 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2019 /* Use the scalar assignment as is. */
2020 gfc_add_block_to_block (&block, &lse.pre);
2021 gfc_add_modify (&block, lse.expr, tmp);
2022 gfc_add_block_to_block (&block, &lse.post);
2024 /* Increment the count1. */
2025 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2026 gfc_index_one_node);
2027 gfc_add_modify (&block, count1, tmp);
2029 tmp = gfc_finish_block (&block);
2033 gfc_start_block (&block);
2035 gfc_init_loopinfo (&loop1);
2036 gfc_init_se (&rse, NULL);
2037 gfc_init_se (&lse, NULL);
2039 /* Associate the lss with the loop. */
2040 gfc_add_ss_to_loop (&loop1, lss);
2042 /* Calculate the bounds of the scalarization. */
2043 gfc_conv_ss_startstride (&loop1);
2044 /* Setup the scalarizing loops. */
2045 gfc_conv_loop_setup (&loop1, &expr->where);
2047 gfc_mark_ss_chain_used (lss, 1);
2049 /* Start the scalarized loop body. */
2050 gfc_start_scalarized_body (&loop1, &body);
2052 /* Setup the gfc_se structures. */
2053 gfc_copy_loopinfo_to_se (&lse, &loop1);
2056 /* Form the expression of the temporary. */
2057 if (lss != gfc_ss_terminator)
2058 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2059 /* Translate expr. */
2060 gfc_conv_expr (&lse, expr);
2062 /* Use the scalar assignment. */
2063 rse.string_length = lse.string_length;
2064 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
2066 /* Form the mask expression according to the mask tree list. */
2069 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2071 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2072 TREE_TYPE (wheremaskexpr),
2074 tmp = fold_build3 (COND_EXPR, void_type_node,
2075 wheremaskexpr, tmp, build_empty_stmt ());
2078 gfc_add_expr_to_block (&body, tmp);
2080 /* Increment count1. */
2081 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2082 count1, gfc_index_one_node);
2083 gfc_add_modify (&body, count1, tmp);
2085 /* Increment count3. */
2088 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2089 count3, gfc_index_one_node);
2090 gfc_add_modify (&body, count3, tmp);
2093 /* Generate the copying loops. */
2094 gfc_trans_scalarizing_loops (&loop1, &body);
2095 gfc_add_block_to_block (&block, &loop1.pre);
2096 gfc_add_block_to_block (&block, &loop1.post);
2097 gfc_cleanup_loop (&loop1);
2099 tmp = gfc_finish_block (&block);
2105 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2106 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2107 and should not be freed. WHEREMASK is the conditional execution mask
2108 whose sense may be inverted by INVERT. */
2111 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2112 tree count1, gfc_ss *lss, gfc_ss *rss,
2113 tree wheremask, bool invert)
2115 stmtblock_t block, body1;
2122 gfc_start_block (&block);
2124 gfc_init_se (&rse, NULL);
2125 gfc_init_se (&lse, NULL);
2127 if (lss == gfc_ss_terminator)
2129 gfc_init_block (&body1);
2130 gfc_conv_expr (&rse, expr2);
2131 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2135 /* Initialize the loop. */
2136 gfc_init_loopinfo (&loop);
2138 /* We may need LSS to determine the shape of the expression. */
2139 gfc_add_ss_to_loop (&loop, lss);
2140 gfc_add_ss_to_loop (&loop, rss);
2142 gfc_conv_ss_startstride (&loop);
2143 gfc_conv_loop_setup (&loop, &expr2->where);
2145 gfc_mark_ss_chain_used (rss, 1);
2146 /* Start the loop body. */
2147 gfc_start_scalarized_body (&loop, &body1);
2149 /* Translate the expression. */
2150 gfc_copy_loopinfo_to_se (&rse, &loop);
2152 gfc_conv_expr (&rse, expr2);
2154 /* Form the expression of the temporary. */
2155 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2158 /* Use the scalar assignment. */
2159 lse.string_length = rse.string_length;
2160 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2161 expr2->expr_type == EXPR_VARIABLE);
2163 /* Form the mask expression according to the mask tree list. */
2166 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2168 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2169 TREE_TYPE (wheremaskexpr),
2171 tmp = fold_build3 (COND_EXPR, void_type_node,
2172 wheremaskexpr, tmp, build_empty_stmt ());
2175 gfc_add_expr_to_block (&body1, tmp);
2177 if (lss == gfc_ss_terminator)
2179 gfc_add_block_to_block (&block, &body1);
2181 /* Increment count1. */
2182 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2183 gfc_index_one_node);
2184 gfc_add_modify (&block, count1, tmp);
2188 /* Increment count1. */
2189 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2190 count1, gfc_index_one_node);
2191 gfc_add_modify (&body1, count1, tmp);
2193 /* Increment count3. */
2196 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2197 count3, gfc_index_one_node);
2198 gfc_add_modify (&body1, count3, tmp);
2201 /* Generate the copying loops. */
2202 gfc_trans_scalarizing_loops (&loop, &body1);
2204 gfc_add_block_to_block (&block, &loop.pre);
2205 gfc_add_block_to_block (&block, &loop.post);
2207 gfc_cleanup_loop (&loop);
2208 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2209 as tree nodes in SS may not be valid in different scope. */
2212 tmp = gfc_finish_block (&block);
2217 /* Calculate the size of temporary needed in the assignment inside forall.
2218 LSS and RSS are filled in this function. */
2221 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2222 stmtblock_t * pblock,
2223 gfc_ss **lss, gfc_ss **rss)
2231 *lss = gfc_walk_expr (expr1);
2234 size = gfc_index_one_node;
2235 if (*lss != gfc_ss_terminator)
2237 gfc_init_loopinfo (&loop);
2239 /* Walk the RHS of the expression. */
2240 *rss = gfc_walk_expr (expr2);
2241 if (*rss == gfc_ss_terminator)
2243 /* The rhs is scalar. Add a ss for the expression. */
2244 *rss = gfc_get_ss ();
2245 (*rss)->next = gfc_ss_terminator;
2246 (*rss)->type = GFC_SS_SCALAR;
2247 (*rss)->expr = expr2;
2250 /* Associate the SS with the loop. */
2251 gfc_add_ss_to_loop (&loop, *lss);
2252 /* We don't actually need to add the rhs at this point, but it might
2253 make guessing the loop bounds a bit easier. */
2254 gfc_add_ss_to_loop (&loop, *rss);
2256 /* We only want the shape of the expression, not rest of the junk
2257 generated by the scalarizer. */
2258 loop.array_parameter = 1;
2260 /* Calculate the bounds of the scalarization. */
2261 save_flag = flag_bounds_check;
2262 flag_bounds_check = 0;
2263 gfc_conv_ss_startstride (&loop);
2264 flag_bounds_check = save_flag;
2265 gfc_conv_loop_setup (&loop, &expr2->where);
2267 /* Figure out how many elements we need. */
2268 for (i = 0; i < loop.dimen; i++)
2270 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2271 gfc_index_one_node, loop.from[i]);
2272 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2274 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2276 gfc_add_block_to_block (pblock, &loop.pre);
2277 size = gfc_evaluate_now (size, pblock);
2278 gfc_add_block_to_block (pblock, &loop.post);
2280 /* TODO: write a function that cleans up a loopinfo without freeing
2281 the SS chains. Currently a NOP. */
2288 /* Calculate the overall iterator number of the nested forall construct.
2289 This routine actually calculates the number of times the body of the
2290 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2291 that by the expression INNER_SIZE. The BLOCK argument specifies the
2292 block in which to calculate the result, and the optional INNER_SIZE_BODY
2293 argument contains any statements that need to executed (inside the loop)
2294 to initialize or calculate INNER_SIZE. */
2297 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2298 stmtblock_t *inner_size_body, stmtblock_t *block)
2300 forall_info *forall_tmp = nested_forall_info;
2304 /* We can eliminate the innermost unconditional loops with constant
2306 if (INTEGER_CST_P (inner_size))
2309 && !forall_tmp->mask
2310 && INTEGER_CST_P (forall_tmp->size))
2312 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2313 inner_size, forall_tmp->size);
2314 forall_tmp = forall_tmp->prev_nest;
2317 /* If there are no loops left, we have our constant result. */
2322 /* Otherwise, create a temporary variable to compute the result. */
2323 number = gfc_create_var (gfc_array_index_type, "num");
2324 gfc_add_modify (block, number, gfc_index_zero_node);
2326 gfc_start_block (&body);
2327 if (inner_size_body)
2328 gfc_add_block_to_block (&body, inner_size_body);
2330 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2331 number, inner_size);
2334 gfc_add_modify (&body, number, tmp);
2335 tmp = gfc_finish_block (&body);
2337 /* Generate loops. */
2338 if (forall_tmp != NULL)
2339 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2341 gfc_add_expr_to_block (block, tmp);
2347 /* Allocate temporary for forall construct. SIZE is the size of temporary
2348 needed. PTEMP1 is returned for space free. */
2351 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2358 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2359 if (!integer_onep (unit))
2360 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2365 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2368 tmp = build_fold_indirect_ref (tmp);
2373 /* Allocate temporary for forall construct according to the information in
2374 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2375 assignment inside forall. PTEMP1 is returned for space free. */
2378 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2379 tree inner_size, stmtblock_t * inner_size_body,
2380 stmtblock_t * block, tree * ptemp1)
2384 /* Calculate the total size of temporary needed in forall construct. */
2385 size = compute_overall_iter_number (nested_forall_info, inner_size,
2386 inner_size_body, block);
2388 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2392 /* Handle assignments inside forall which need temporary.
2394 forall (i=start:end:stride; maskexpr)
2397 (where e,f<i> are arbitrary expressions possibly involving i
2398 and there is a dependency between e<i> and f<i>)
2400 masktmp(:) = maskexpr(:)
2405 for (i = start; i <= end; i += stride)
2409 for (i = start; i <= end; i += stride)
2411 if (masktmp[maskindex++])
2412 tmp[count1++] = f<i>
2416 for (i = start; i <= end; i += stride)
2418 if (masktmp[maskindex++])
2419 e<i> = tmp[count1++]
2424 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2425 tree wheremask, bool invert,
2426 forall_info * nested_forall_info,
2427 stmtblock_t * block)
2435 stmtblock_t inner_size_body;
2437 /* Create vars. count1 is the current iterator number of the nested
2439 count1 = gfc_create_var (gfc_array_index_type, "count1");
2441 /* Count is the wheremask index. */
2444 count = gfc_create_var (gfc_array_index_type, "count");
2445 gfc_add_modify (block, count, gfc_index_zero_node);
2450 /* Initialize count1. */
2451 gfc_add_modify (block, count1, gfc_index_zero_node);
2453 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2454 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2455 gfc_init_block (&inner_size_body);
2456 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2459 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2460 if (expr1->ts.type == BT_CHARACTER && expr1->ts.cl->length)
2462 if (!expr1->ts.cl->backend_decl)
2465 gfc_init_se (&tse, NULL);
2466 gfc_conv_expr (&tse, expr1->ts.cl->length);
2467 expr1->ts.cl->backend_decl = tse.expr;
2469 type = gfc_get_character_type_len (gfc_default_character_kind,
2470 expr1->ts.cl->backend_decl);
2473 type = gfc_typenode_for_spec (&expr1->ts);
2475 /* Allocate temporary for nested forall construct according to the
2476 information in nested_forall_info and inner_size. */
2477 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2478 &inner_size_body, block, &ptemp1);
2480 /* Generate codes to copy rhs to the temporary . */
2481 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2484 /* Generate body and loops according to the information in
2485 nested_forall_info. */
2486 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2487 gfc_add_expr_to_block (block, tmp);
2490 gfc_add_modify (block, count1, gfc_index_zero_node);
2494 gfc_add_modify (block, count, gfc_index_zero_node);
2496 /* Generate codes to copy the temporary to lhs. */
2497 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2500 /* Generate body and loops according to the information in
2501 nested_forall_info. */
2502 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2503 gfc_add_expr_to_block (block, tmp);
2507 /* Free the temporary. */
2508 tmp = gfc_call_free (ptemp1);
2509 gfc_add_expr_to_block (block, tmp);
2514 /* Translate pointer assignment inside FORALL which need temporary. */
2517 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2518 forall_info * nested_forall_info,
2519 stmtblock_t * block)
2533 tree tmp, tmp1, ptemp1;
2535 count = gfc_create_var (gfc_array_index_type, "count");
2536 gfc_add_modify (block, count, gfc_index_zero_node);
2538 inner_size = integer_one_node;
2539 lss = gfc_walk_expr (expr1);
2540 rss = gfc_walk_expr (expr2);
2541 if (lss == gfc_ss_terminator)
2543 type = gfc_typenode_for_spec (&expr1->ts);
2544 type = build_pointer_type (type);
2546 /* Allocate temporary for nested forall construct according to the
2547 information in nested_forall_info and inner_size. */
2548 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2549 inner_size, NULL, block, &ptemp1);
2550 gfc_start_block (&body);
2551 gfc_init_se (&lse, NULL);
2552 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2553 gfc_init_se (&rse, NULL);
2554 rse.want_pointer = 1;
2555 gfc_conv_expr (&rse, expr2);
2556 gfc_add_block_to_block (&body, &rse.pre);
2557 gfc_add_modify (&body, lse.expr,
2558 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2559 gfc_add_block_to_block (&body, &rse.post);
2561 /* Increment count. */
2562 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2563 count, gfc_index_one_node);
2564 gfc_add_modify (&body, count, tmp);
2566 tmp = gfc_finish_block (&body);
2568 /* Generate body and loops according to the information in
2569 nested_forall_info. */
2570 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2571 gfc_add_expr_to_block (block, tmp);
2574 gfc_add_modify (block, count, gfc_index_zero_node);
2576 gfc_start_block (&body);
2577 gfc_init_se (&lse, NULL);
2578 gfc_init_se (&rse, NULL);
2579 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
2580 lse.want_pointer = 1;
2581 gfc_conv_expr (&lse, expr1);
2582 gfc_add_block_to_block (&body, &lse.pre);
2583 gfc_add_modify (&body, lse.expr, rse.expr);
2584 gfc_add_block_to_block (&body, &lse.post);
2585 /* Increment count. */
2586 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2587 count, gfc_index_one_node);
2588 gfc_add_modify (&body, count, tmp);
2589 tmp = gfc_finish_block (&body);
2591 /* Generate body and loops according to the information in
2592 nested_forall_info. */
2593 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2594 gfc_add_expr_to_block (block, tmp);
2598 gfc_init_loopinfo (&loop);
2600 /* Associate the SS with the loop. */
2601 gfc_add_ss_to_loop (&loop, rss);
2603 /* Setup the scalarizing loops and bounds. */
2604 gfc_conv_ss_startstride (&loop);
2606 gfc_conv_loop_setup (&loop, &expr2->where);
2608 info = &rss->data.info;
2609 desc = info->descriptor;
2611 /* Make a new descriptor. */
2612 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2613 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2614 loop.from, loop.to, 1,
2617 /* Allocate temporary for nested forall construct. */
2618 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2619 inner_size, NULL, block, &ptemp1);
2620 gfc_start_block (&body);
2621 gfc_init_se (&lse, NULL);
2622 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2623 lse.direct_byref = 1;
2624 rss = gfc_walk_expr (expr2);
2625 gfc_conv_expr_descriptor (&lse, expr2, rss);
2627 gfc_add_block_to_block (&body, &lse.pre);
2628 gfc_add_block_to_block (&body, &lse.post);
2630 /* Increment count. */
2631 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2632 count, gfc_index_one_node);
2633 gfc_add_modify (&body, count, tmp);
2635 tmp = gfc_finish_block (&body);
2637 /* Generate body and loops according to the information in
2638 nested_forall_info. */
2639 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2640 gfc_add_expr_to_block (block, tmp);
2643 gfc_add_modify (block, count, gfc_index_zero_node);
2645 parm = gfc_build_array_ref (tmp1, count, NULL);
2646 lss = gfc_walk_expr (expr1);
2647 gfc_init_se (&lse, NULL);
2648 gfc_conv_expr_descriptor (&lse, expr1, lss);
2649 gfc_add_modify (&lse.pre, lse.expr, parm);
2650 gfc_start_block (&body);
2651 gfc_add_block_to_block (&body, &lse.pre);
2652 gfc_add_block_to_block (&body, &lse.post);
2654 /* Increment count. */
2655 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2656 count, gfc_index_one_node);
2657 gfc_add_modify (&body, count, tmp);
2659 tmp = gfc_finish_block (&body);
2661 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2662 gfc_add_expr_to_block (block, tmp);
2664 /* Free the temporary. */
2667 tmp = gfc_call_free (ptemp1);
2668 gfc_add_expr_to_block (block, tmp);
2673 /* FORALL and WHERE statements are really nasty, especially when you nest
2674 them. All the rhs of a forall assignment must be evaluated before the
2675 actual assignments are performed. Presumably this also applies to all the
2676 assignments in an inner where statement. */
2678 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2679 linear array, relying on the fact that we process in the same order in all
2682 forall (i=start:end:stride; maskexpr)
2686 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2688 count = ((end + 1 - start) / stride)
2689 masktmp(:) = maskexpr(:)
2692 for (i = start; i <= end; i += stride)
2694 if (masktmp[maskindex++])
2698 for (i = start; i <= end; i += stride)
2700 if (masktmp[maskindex++])
2704 Note that this code only works when there are no dependencies.
2705 Forall loop with array assignments and data dependencies are a real pain,
2706 because the size of the temporary cannot always be determined before the
2707 loop is executed. This problem is compounded by the presence of nested
2712 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2732 gfc_forall_iterator *fa;
2735 gfc_saved_var *saved_vars;
2736 iter_info *this_forall;
2740 /* Do nothing if the mask is false. */
2742 && code->expr->expr_type == EXPR_CONSTANT
2743 && !code->expr->value.logical)
2744 return build_empty_stmt ();
2747 /* Count the FORALL index number. */
2748 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2752 /* Allocate the space for var, start, end, step, varexpr. */
2753 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2754 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2755 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2756 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2757 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2758 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2760 /* Allocate the space for info. */
2761 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2763 gfc_start_block (&pre);
2764 gfc_init_block (&post);
2765 gfc_init_block (&block);
2768 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2770 gfc_symbol *sym = fa->var->symtree->n.sym;
2772 /* Allocate space for this_forall. */
2773 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2775 /* Create a temporary variable for the FORALL index. */
2776 tmp = gfc_typenode_for_spec (&sym->ts);
2777 var[n] = gfc_create_var (tmp, sym->name);
2778 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2780 /* Record it in this_forall. */
2781 this_forall->var = var[n];
2783 /* Replace the index symbol's backend_decl with the temporary decl. */
2784 sym->backend_decl = var[n];
2786 /* Work out the start, end and stride for the loop. */
2787 gfc_init_se (&se, NULL);
2788 gfc_conv_expr_val (&se, fa->start);
2789 /* Record it in this_forall. */
2790 this_forall->start = se.expr;
2791 gfc_add_block_to_block (&block, &se.pre);
2794 gfc_init_se (&se, NULL);
2795 gfc_conv_expr_val (&se, fa->end);
2796 /* Record it in this_forall. */
2797 this_forall->end = se.expr;
2798 gfc_make_safe_expr (&se);
2799 gfc_add_block_to_block (&block, &se.pre);
2802 gfc_init_se (&se, NULL);
2803 gfc_conv_expr_val (&se, fa->stride);
2804 /* Record it in this_forall. */
2805 this_forall->step = se.expr;
2806 gfc_make_safe_expr (&se);
2807 gfc_add_block_to_block (&block, &se.pre);
2810 /* Set the NEXT field of this_forall to NULL. */
2811 this_forall->next = NULL;
2812 /* Link this_forall to the info construct. */
2813 if (info->this_loop)
2815 iter_info *iter_tmp = info->this_loop;
2816 while (iter_tmp->next != NULL)
2817 iter_tmp = iter_tmp->next;
2818 iter_tmp->next = this_forall;
2821 info->this_loop = this_forall;
2827 /* Calculate the size needed for the current forall level. */
2828 size = gfc_index_one_node;
2829 for (n = 0; n < nvar; n++)
2831 /* size = (end + step - start) / step. */
2832 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2834 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2836 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2837 tmp = convert (gfc_array_index_type, tmp);
2839 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2842 /* Record the nvar and size of current forall level. */
2848 /* If the mask is .true., consider the FORALL unconditional. */
2849 if (code->expr->expr_type == EXPR_CONSTANT
2850 && code->expr->value.logical)
2858 /* First we need to allocate the mask. */
2861 /* As the mask array can be very big, prefer compact boolean types. */
2862 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2863 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
2864 size, NULL, &block, &pmask);
2865 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2867 /* Record them in the info structure. */
2868 info->maskindex = maskindex;
2873 /* No mask was specified. */
2874 maskindex = NULL_TREE;
2875 mask = pmask = NULL_TREE;
2878 /* Link the current forall level to nested_forall_info. */
2879 info->prev_nest = nested_forall_info;
2880 nested_forall_info = info;
2882 /* Copy the mask into a temporary variable if required.
2883 For now we assume a mask temporary is needed. */
2886 /* As the mask array can be very big, prefer compact boolean types. */
2887 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2889 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
2891 /* Start of mask assignment loop body. */
2892 gfc_start_block (&body);
2894 /* Evaluate the mask expression. */
2895 gfc_init_se (&se, NULL);
2896 gfc_conv_expr_val (&se, code->expr);
2897 gfc_add_block_to_block (&body, &se.pre);
2899 /* Store the mask. */
2900 se.expr = convert (mask_type, se.expr);
2902 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2903 gfc_add_modify (&body, tmp, se.expr);
2905 /* Advance to the next mask element. */
2906 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2907 maskindex, gfc_index_one_node);
2908 gfc_add_modify (&body, maskindex, tmp);
2910 /* Generate the loops. */
2911 tmp = gfc_finish_block (&body);
2912 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
2913 gfc_add_expr_to_block (&block, tmp);
2916 c = code->block->next;
2918 /* TODO: loop merging in FORALL statements. */
2919 /* Now that we've got a copy of the mask, generate the assignment loops. */
2925 /* A scalar or array assignment. DO the simple check for
2926 lhs to rhs dependencies. These make a temporary for the
2927 rhs and form a second forall block to copy to variable. */
2928 need_temp = check_forall_dependencies(c, &pre, &post);
2930 /* Temporaries due to array assignment data dependencies introduce
2931 no end of problems. */
2933 gfc_trans_assign_need_temp (c->expr, c->expr2, NULL, false,
2934 nested_forall_info, &block);
2937 /* Use the normal assignment copying routines. */
2938 assign = gfc_trans_assignment (c->expr, c->expr2, false);
2940 /* Generate body and loops. */
2941 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2943 gfc_add_expr_to_block (&block, tmp);
2946 /* Cleanup any temporary symtrees that have been made to deal
2947 with dependencies. */
2949 cleanup_forall_symtrees (c);
2954 /* Translate WHERE or WHERE construct nested in FORALL. */
2955 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
2958 /* Pointer assignment inside FORALL. */
2959 case EXEC_POINTER_ASSIGN:
2960 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2962 gfc_trans_pointer_assign_need_temp (c->expr, c->expr2,
2963 nested_forall_info, &block);
2966 /* Use the normal assignment copying routines. */
2967 assign = gfc_trans_pointer_assignment (c->expr, c->expr2);
2969 /* Generate body and loops. */
2970 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2972 gfc_add_expr_to_block (&block, tmp);
2977 tmp = gfc_trans_forall_1 (c, nested_forall_info);
2978 gfc_add_expr_to_block (&block, tmp);
2981 /* Explicit subroutine calls are prevented by the frontend but interface
2982 assignments can legitimately produce them. */
2983 case EXEC_ASSIGN_CALL:
2984 assign = gfc_trans_call (c, true);
2985 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
2986 gfc_add_expr_to_block (&block, tmp);
2996 /* Restore the original index variables. */
2997 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
2998 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
3000 /* Free the space for var, start, end, step, varexpr. */
3006 gfc_free (saved_vars);
3008 /* Free the space for this forall_info. */
3013 /* Free the temporary for the mask. */
3014 tmp = gfc_call_free (pmask);
3015 gfc_add_expr_to_block (&block, tmp);
3018 pushdecl (maskindex);
3020 gfc_add_block_to_block (&pre, &block);
3021 gfc_add_block_to_block (&pre, &post);
3023 return gfc_finish_block (&pre);
3027 /* Translate the FORALL statement or construct. */
3029 tree gfc_trans_forall (gfc_code * code)
3031 return gfc_trans_forall_1 (code, NULL);
3035 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3036 If the WHERE construct is nested in FORALL, compute the overall temporary
3037 needed by the WHERE mask expression multiplied by the iterator number of
3039 ME is the WHERE mask expression.
3040 MASK is the current execution mask upon input, whose sense may or may
3041 not be inverted as specified by the INVERT argument.
3042 CMASK is the updated execution mask on output, or NULL if not required.
3043 PMASK is the pending execution mask on output, or NULL if not required.
3044 BLOCK is the block in which to place the condition evaluation loops. */
3047 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3048 tree mask, bool invert, tree cmask, tree pmask,
3049 tree mask_type, stmtblock_t * block)
3054 stmtblock_t body, body1;
3055 tree count, cond, mtmp;
3058 gfc_init_loopinfo (&loop);
3060 lss = gfc_walk_expr (me);
3061 rss = gfc_walk_expr (me);
3063 /* Variable to index the temporary. */
3064 count = gfc_create_var (gfc_array_index_type, "count");
3065 /* Initialize count. */
3066 gfc_add_modify (block, count, gfc_index_zero_node);
3068 gfc_start_block (&body);
3070 gfc_init_se (&rse, NULL);
3071 gfc_init_se (&lse, NULL);
3073 if (lss == gfc_ss_terminator)
3075 gfc_init_block (&body1);
3079 /* Initialize the loop. */
3080 gfc_init_loopinfo (&loop);
3082 /* We may need LSS to determine the shape of the expression. */
3083 gfc_add_ss_to_loop (&loop, lss);
3084 gfc_add_ss_to_loop (&loop, rss);
3086 gfc_conv_ss_startstride (&loop);
3087 gfc_conv_loop_setup (&loop, &me->where);
3089 gfc_mark_ss_chain_used (rss, 1);
3090 /* Start the loop body. */
3091 gfc_start_scalarized_body (&loop, &body1);
3093 /* Translate the expression. */
3094 gfc_copy_loopinfo_to_se (&rse, &loop);
3096 gfc_conv_expr (&rse, me);
3099 /* Variable to evaluate mask condition. */
3100 cond = gfc_create_var (mask_type, "cond");
3101 if (mask && (cmask || pmask))
3102 mtmp = gfc_create_var (mask_type, "mask");
3103 else mtmp = NULL_TREE;
3105 gfc_add_block_to_block (&body1, &lse.pre);
3106 gfc_add_block_to_block (&body1, &rse.pre);
3108 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3110 if (mask && (cmask || pmask))
3112 tmp = gfc_build_array_ref (mask, count, NULL);
3114 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
3115 gfc_add_modify (&body1, mtmp, tmp);
3120 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3123 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3124 gfc_add_modify (&body1, tmp1, tmp);
3129 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3130 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, cond);
3132 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3133 gfc_add_modify (&body1, tmp1, tmp);
3136 gfc_add_block_to_block (&body1, &lse.post);
3137 gfc_add_block_to_block (&body1, &rse.post);
3139 if (lss == gfc_ss_terminator)
3141 gfc_add_block_to_block (&body, &body1);
3145 /* Increment count. */
3146 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
3147 gfc_index_one_node);
3148 gfc_add_modify (&body1, count, tmp1);
3150 /* Generate the copying loops. */
3151 gfc_trans_scalarizing_loops (&loop, &body1);
3153 gfc_add_block_to_block (&body, &loop.pre);
3154 gfc_add_block_to_block (&body, &loop.post);
3156 gfc_cleanup_loop (&loop);
3157 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3158 as tree nodes in SS may not be valid in different scope. */
3161 tmp1 = gfc_finish_block (&body);
3162 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3163 if (nested_forall_info != NULL)
3164 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3166 gfc_add_expr_to_block (block, tmp1);
3170 /* Translate an assignment statement in a WHERE statement or construct
3171 statement. The MASK expression is used to control which elements
3172 of EXPR1 shall be assigned. The sense of MASK is specified by
3176 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3177 tree mask, bool invert,
3178 tree count1, tree count2,
3184 gfc_ss *lss_section;
3191 tree index, maskexpr;
3194 /* TODO: handle this special case.
3195 Special case a single function returning an array. */
3196 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3198 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3204 /* Assignment of the form lhs = rhs. */
3205 gfc_start_block (&block);
3207 gfc_init_se (&lse, NULL);
3208 gfc_init_se (&rse, NULL);
3211 lss = gfc_walk_expr (expr1);
3214 /* In each where-assign-stmt, the mask-expr and the variable being
3215 defined shall be arrays of the same shape. */
3216 gcc_assert (lss != gfc_ss_terminator);
3218 /* The assignment needs scalarization. */
3221 /* Find a non-scalar SS from the lhs. */
3222 while (lss_section != gfc_ss_terminator
3223 && lss_section->type != GFC_SS_SECTION)
3224 lss_section = lss_section->next;
3226 gcc_assert (lss_section != gfc_ss_terminator);
3228 /* Initialize the scalarizer. */
3229 gfc_init_loopinfo (&loop);
3232 rss = gfc_walk_expr (expr2);
3233 if (rss == gfc_ss_terminator)
3235 /* The rhs is scalar. Add a ss for the expression. */
3236 rss = gfc_get_ss ();
3238 rss->next = gfc_ss_terminator;
3239 rss->type = GFC_SS_SCALAR;
3243 /* Associate the SS with the loop. */
3244 gfc_add_ss_to_loop (&loop, lss);
3245 gfc_add_ss_to_loop (&loop, rss);
3247 /* Calculate the bounds of the scalarization. */
3248 gfc_conv_ss_startstride (&loop);
3250 /* Resolve any data dependencies in the statement. */
3251 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3253 /* Setup the scalarizing loops. */
3254 gfc_conv_loop_setup (&loop, &expr2->where);
3256 /* Setup the gfc_se structures. */
3257 gfc_copy_loopinfo_to_se (&lse, &loop);
3258 gfc_copy_loopinfo_to_se (&rse, &loop);
3261 gfc_mark_ss_chain_used (rss, 1);
3262 if (loop.temp_ss == NULL)
3265 gfc_mark_ss_chain_used (lss, 1);
3269 lse.ss = loop.temp_ss;
3270 gfc_mark_ss_chain_used (lss, 3);
3271 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3274 /* Start the scalarized loop body. */
3275 gfc_start_scalarized_body (&loop, &body);
3277 /* Translate the expression. */
3278 gfc_conv_expr (&rse, expr2);
3279 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3281 gfc_conv_tmp_array_ref (&lse);
3282 gfc_advance_se_ss_chain (&lse);
3285 gfc_conv_expr (&lse, expr1);
3287 /* Form the mask expression according to the mask. */
3289 maskexpr = gfc_build_array_ref (mask, index, NULL);
3291 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
3293 /* Use the scalar assignment as is. */
3295 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3296 loop.temp_ss != NULL, false);
3298 tmp = gfc_conv_operator_assign (&lse, &rse, sym);
3300 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3302 gfc_add_expr_to_block (&body, tmp);
3304 if (lss == gfc_ss_terminator)
3306 /* Increment count1. */
3307 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3308 count1, gfc_index_one_node);
3309 gfc_add_modify (&body, count1, tmp);
3311 /* Use the scalar assignment as is. */
3312 gfc_add_block_to_block (&block, &body);
3316 gcc_assert (lse.ss == gfc_ss_terminator
3317 && rse.ss == gfc_ss_terminator);
3319 if (loop.temp_ss != NULL)
3321 /* Increment count1 before finish the main body of a scalarized
3323 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3324 count1, gfc_index_one_node);
3325 gfc_add_modify (&body, count1, tmp);
3326 gfc_trans_scalarized_loop_boundary (&loop, &body);
3328 /* We need to copy the temporary to the actual lhs. */
3329 gfc_init_se (&lse, NULL);
3330 gfc_init_se (&rse, NULL);
3331 gfc_copy_loopinfo_to_se (&lse, &loop);
3332 gfc_copy_loopinfo_to_se (&rse, &loop);
3334 rse.ss = loop.temp_ss;
3337 gfc_conv_tmp_array_ref (&rse);
3338 gfc_advance_se_ss_chain (&rse);
3339 gfc_conv_expr (&lse, expr1);
3341 gcc_assert (lse.ss == gfc_ss_terminator
3342 && rse.ss == gfc_ss_terminator);
3344 /* Form the mask expression according to the mask tree list. */
3346 maskexpr = gfc_build_array_ref (mask, index, NULL);
3348 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3351 /* Use the scalar assignment as is. */
3352 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3353 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3354 gfc_add_expr_to_block (&body, tmp);
3356 /* Increment count2. */
3357 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3358 count2, gfc_index_one_node);
3359 gfc_add_modify (&body, count2, tmp);
3363 /* Increment count1. */
3364 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3365 count1, gfc_index_one_node);
3366 gfc_add_modify (&body, count1, tmp);
3369 /* Generate the copying loops. */
3370 gfc_trans_scalarizing_loops (&loop, &body);
3372 /* Wrap the whole thing up. */
3373 gfc_add_block_to_block (&block, &loop.pre);
3374 gfc_add_block_to_block (&block, &loop.post);
3375 gfc_cleanup_loop (&loop);
3378 return gfc_finish_block (&block);
3382 /* Translate the WHERE construct or statement.
3383 This function can be called iteratively to translate the nested WHERE
3384 construct or statement.
3385 MASK is the control mask. */
3388 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3389 forall_info * nested_forall_info, stmtblock_t * block)
3391 stmtblock_t inner_size_body;
3392 tree inner_size, size;
3401 tree count1, count2;
3405 tree pcmask = NULL_TREE;
3406 tree ppmask = NULL_TREE;
3407 tree cmask = NULL_TREE;
3408 tree pmask = NULL_TREE;
3409 gfc_actual_arglist *arg;
3411 /* the WHERE statement or the WHERE construct statement. */
3412 cblock = code->block;
3414 /* As the mask array can be very big, prefer compact boolean types. */
3415 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3417 /* Determine which temporary masks are needed. */
3420 /* One clause: No ELSEWHEREs. */
3421 need_cmask = (cblock->next != 0);
3424 else if (cblock->block->block)
3426 /* Three or more clauses: Conditional ELSEWHEREs. */
3430 else if (cblock->next)
3432 /* Two clauses, the first non-empty. */
3434 need_pmask = (mask != NULL_TREE
3435 && cblock->block->next != 0);
3437 else if (!cblock->block->next)
3439 /* Two clauses, both empty. */
3443 /* Two clauses, the first empty, the second non-empty. */
3446 need_cmask = (cblock->block->expr != 0);
3455 if (need_cmask || need_pmask)
3457 /* Calculate the size of temporary needed by the mask-expr. */
3458 gfc_init_block (&inner_size_body);
3459 inner_size = compute_inner_temp_size (cblock->expr, cblock->expr,
3460 &inner_size_body, &lss, &rss);
3462 /* Calculate the total size of temporary needed. */
3463 size = compute_overall_iter_number (nested_forall_info, inner_size,
3464 &inner_size_body, block);
3466 /* Check whether the size is negative. */
3467 cond = fold_build2 (LE_EXPR, boolean_type_node, size,
3468 gfc_index_zero_node);
3469 size = fold_build3 (COND_EXPR, gfc_array_index_type, cond,
3470 gfc_index_zero_node, size);
3471 size = gfc_evaluate_now (size, block);
3473 /* Allocate temporary for WHERE mask if needed. */
3475 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3478 /* Allocate temporary for !mask if needed. */
3480 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3486 /* Each time around this loop, the where clause is conditional
3487 on the value of mask and invert, which are updated at the
3488 bottom of the loop. */
3490 /* Has mask-expr. */
3493 /* Ensure that the WHERE mask will be evaluated exactly once.
3494 If there are no statements in this WHERE/ELSEWHERE clause,
3495 then we don't need to update the control mask (cmask).
3496 If this is the last clause of the WHERE construct, then
3497 we don't need to update the pending control mask (pmask). */
3499 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3501 cblock->next ? cmask : NULL_TREE,
3502 cblock->block ? pmask : NULL_TREE,
3505 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3507 (cblock->next || cblock->block)
3508 ? cmask : NULL_TREE,
3509 NULL_TREE, mask_type, block);
3513 /* It's a final elsewhere-stmt. No mask-expr is present. */
3517 /* The body of this where clause are controlled by cmask with
3518 sense specified by invert. */
3520 /* Get the assignment statement of a WHERE statement, or the first
3521 statement in where-body-construct of a WHERE construct. */
3522 cnext = cblock->next;
3527 /* WHERE assignment statement. */
3528 case EXEC_ASSIGN_CALL:
3530 arg = cnext->ext.actual;
3531 expr1 = expr2 = NULL;
3532 for (; arg; arg = arg->next)
3544 expr1 = cnext->expr;
3545 expr2 = cnext->expr2;
3547 if (nested_forall_info != NULL)
3549 need_temp = gfc_check_dependency (expr1, expr2, 0);
3550 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3551 gfc_trans_assign_need_temp (expr1, expr2,
3553 nested_forall_info, block);
3556 /* Variables to control maskexpr. */
3557 count1 = gfc_create_var (gfc_array_index_type, "count1");
3558 count2 = gfc_create_var (gfc_array_index_type, "count2");
3559 gfc_add_modify (block, count1, gfc_index_zero_node);
3560 gfc_add_modify (block, count2, gfc_index_zero_node);
3562 tmp = gfc_trans_where_assign (expr1, expr2,
3565 cnext->resolved_sym);
3567 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3569 gfc_add_expr_to_block (block, tmp);
3574 /* Variables to control maskexpr. */
3575 count1 = gfc_create_var (gfc_array_index_type, "count1");
3576 count2 = gfc_create_var (gfc_array_index_type, "count2");
3577 gfc_add_modify (block, count1, gfc_index_zero_node);
3578 gfc_add_modify (block, count2, gfc_index_zero_node);
3580 tmp = gfc_trans_where_assign (expr1, expr2,
3583 cnext->resolved_sym);
3584 gfc_add_expr_to_block (block, tmp);
3589 /* WHERE or WHERE construct is part of a where-body-construct. */
3591 gfc_trans_where_2 (cnext, cmask, invert,
3592 nested_forall_info, block);
3599 /* The next statement within the same where-body-construct. */
3600 cnext = cnext->next;
3602 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3603 cblock = cblock->block;
3604 if (mask == NULL_TREE)
3606 /* If we're the initial WHERE, we can simply invert the sense
3607 of the current mask to obtain the "mask" for the remaining
3614 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3620 /* If we allocated a pending mask array, deallocate it now. */
3623 tmp = gfc_call_free (ppmask);
3624 gfc_add_expr_to_block (block, tmp);
3627 /* If we allocated a current mask array, deallocate it now. */
3630 tmp = gfc_call_free (pcmask);
3631 gfc_add_expr_to_block (block, tmp);
3635 /* Translate a simple WHERE construct or statement without dependencies.
3636 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3637 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3638 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3641 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3643 stmtblock_t block, body;
3644 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3645 tree tmp, cexpr, tstmt, estmt;
3646 gfc_ss *css, *tdss, *tsss;
3647 gfc_se cse, tdse, tsse, edse, esse;
3652 cond = cblock->expr;
3653 tdst = cblock->next->expr;
3654 tsrc = cblock->next->expr2;
3655 edst = eblock ? eblock->next->expr : NULL;
3656 esrc = eblock ? eblock->next->expr2 : NULL;
3658 gfc_start_block (&block);
3659 gfc_init_loopinfo (&loop);
3661 /* Handle the condition. */
3662 gfc_init_se (&cse, NULL);
3663 css = gfc_walk_expr (cond);
3664 gfc_add_ss_to_loop (&loop, css);
3666 /* Handle the then-clause. */
3667 gfc_init_se (&tdse, NULL);
3668 gfc_init_se (&tsse, NULL);
3669 tdss = gfc_walk_expr (tdst);
3670 tsss = gfc_walk_expr (tsrc);
3671 if (tsss == gfc_ss_terminator)
3673 tsss = gfc_get_ss ();
3675 tsss->next = gfc_ss_terminator;
3676 tsss->type = GFC_SS_SCALAR;
3679 gfc_add_ss_to_loop (&loop, tdss);
3680 gfc_add_ss_to_loop (&loop, tsss);
3684 /* Handle the else clause. */
3685 gfc_init_se (&edse, NULL);
3686 gfc_init_se (&esse, NULL);
3687 edss = gfc_walk_expr (edst);
3688 esss = gfc_walk_expr (esrc);
3689 if (esss == gfc_ss_terminator)
3691 esss = gfc_get_ss ();
3693 esss->next = gfc_ss_terminator;
3694 esss->type = GFC_SS_SCALAR;
3697 gfc_add_ss_to_loop (&loop, edss);
3698 gfc_add_ss_to_loop (&loop, esss);
3701 gfc_conv_ss_startstride (&loop);
3702 gfc_conv_loop_setup (&loop, &tdst->where);
3704 gfc_mark_ss_chain_used (css, 1);
3705 gfc_mark_ss_chain_used (tdss, 1);
3706 gfc_mark_ss_chain_used (tsss, 1);
3709 gfc_mark_ss_chain_used (edss, 1);
3710 gfc_mark_ss_chain_used (esss, 1);
3713 gfc_start_scalarized_body (&loop, &body);
3715 gfc_copy_loopinfo_to_se (&cse, &loop);
3716 gfc_copy_loopinfo_to_se (&tdse, &loop);
3717 gfc_copy_loopinfo_to_se (&tsse, &loop);
3723 gfc_copy_loopinfo_to_se (&edse, &loop);
3724 gfc_copy_loopinfo_to_se (&esse, &loop);
3729 gfc_conv_expr (&cse, cond);
3730 gfc_add_block_to_block (&body, &cse.pre);
3733 gfc_conv_expr (&tsse, tsrc);
3734 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3736 gfc_conv_tmp_array_ref (&tdse);
3737 gfc_advance_se_ss_chain (&tdse);
3740 gfc_conv_expr (&tdse, tdst);
3744 gfc_conv_expr (&esse, esrc);
3745 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3747 gfc_conv_tmp_array_ref (&edse);
3748 gfc_advance_se_ss_chain (&edse);
3751 gfc_conv_expr (&edse, edst);
3754 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false);
3755 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false)
3756 : build_empty_stmt ();
3757 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3758 gfc_add_expr_to_block (&body, tmp);
3759 gfc_add_block_to_block (&body, &cse.post);
3761 gfc_trans_scalarizing_loops (&loop, &body);
3762 gfc_add_block_to_block (&block, &loop.pre);
3763 gfc_add_block_to_block (&block, &loop.post);
3764 gfc_cleanup_loop (&loop);
3766 return gfc_finish_block (&block);
3769 /* As the WHERE or WHERE construct statement can be nested, we call
3770 gfc_trans_where_2 to do the translation, and pass the initial
3771 NULL values for both the control mask and the pending control mask. */
3774 gfc_trans_where (gfc_code * code)
3780 cblock = code->block;
3782 && cblock->next->op == EXEC_ASSIGN
3783 && !cblock->next->next)
3785 eblock = cblock->block;
3788 /* A simple "WHERE (cond) x = y" statement or block is
3789 dependence free if cond is not dependent upon writing x,
3790 and the source y is unaffected by the destination x. */
3791 if (!gfc_check_dependency (cblock->next->expr,
3793 && !gfc_check_dependency (cblock->next->expr,
3794 cblock->next->expr2, 0))
3795 return gfc_trans_where_3 (cblock, NULL);
3797 else if (!eblock->expr
3800 && eblock->next->op == EXEC_ASSIGN
3801 && !eblock->next->next)
3803 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3804 block is dependence free if cond is not dependent on writes
3805 to x1 and x2, y1 is not dependent on writes to x2, and y2
3806 is not dependent on writes to x1, and both y's are not
3807 dependent upon their own x's. In addition to this, the
3808 final two dependency checks below exclude all but the same
3809 array reference if the where and elswhere destinations
3810 are the same. In short, this is VERY conservative and this
3811 is needed because the two loops, required by the standard
3812 are coalesced in gfc_trans_where_3. */
3813 if (!gfc_check_dependency(cblock->next->expr,
3815 && !gfc_check_dependency(eblock->next->expr,
3817 && !gfc_check_dependency(cblock->next->expr,
3818 eblock->next->expr2, 1)
3819 && !gfc_check_dependency(eblock->next->expr,
3820 cblock->next->expr2, 1)
3821 && !gfc_check_dependency(cblock->next->expr,
3822 cblock->next->expr2, 1)
3823 && !gfc_check_dependency(eblock->next->expr,
3824 eblock->next->expr2, 1)
3825 && !gfc_check_dependency(cblock->next->expr,
3826 eblock->next->expr, 0)
3827 && !gfc_check_dependency(eblock->next->expr,
3828 cblock->next->expr, 0))
3829 return gfc_trans_where_3 (cblock, eblock);
3833 gfc_start_block (&block);
3835 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3837 return gfc_finish_block (&block);
3841 /* CYCLE a DO loop. The label decl has already been created by
3842 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3843 node at the head of the loop. We must mark the label as used. */
3846 gfc_trans_cycle (gfc_code * code)
3850 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3851 TREE_USED (cycle_label) = 1;
3852 return build1_v (GOTO_EXPR, cycle_label);
3856 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3857 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3861 gfc_trans_exit (gfc_code * code)
3865 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3866 TREE_USED (exit_label) = 1;
3867 return build1_v (GOTO_EXPR, exit_label);
3871 /* Translate the ALLOCATE statement. */
3874 gfc_trans_allocate (gfc_code * code)
3886 if (!code->ext.alloc_list)
3889 gfc_start_block (&block);
3893 tree gfc_int4_type_node = gfc_get_int_type (4);
3895 stat = gfc_create_var (gfc_int4_type_node, "stat");
3896 pstat = build_fold_addr_expr (stat);
3898 error_label = gfc_build_label_decl (NULL_TREE);
3899 TREE_USED (error_label) = 1;
3902 pstat = stat = error_label = NULL_TREE;
3904 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3908 gfc_init_se (&se, NULL);
3909 gfc_start_block (&se.pre);
3911 se.want_pointer = 1;
3912 se.descriptor_only = 1;
3913 gfc_conv_expr (&se, expr);
3915 if (!gfc_array_allocate (&se, expr, pstat))
3917 /* A scalar or derived type. */
3918 tmp = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
3920 if (expr->ts.type == BT_CHARACTER && tmp == NULL_TREE)
3921 tmp = se.string_length;
3923 tmp = gfc_allocate_with_status (&se.pre, tmp, pstat);
3924 tmp = fold_build2 (MODIFY_EXPR, void_type_node, se.expr,
3925 fold_convert (TREE_TYPE (se.expr), tmp));
3926 gfc_add_expr_to_block (&se.pre, tmp);
3930 tmp = build1_v (GOTO_EXPR, error_label);
3931 parm = fold_build2 (NE_EXPR, boolean_type_node,
3932 stat, build_int_cst (TREE_TYPE (stat), 0));
3933 tmp = fold_build3 (COND_EXPR, void_type_node,
3934 parm, tmp, build_empty_stmt ());
3935 gfc_add_expr_to_block (&se.pre, tmp);
3938 if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
3940 tmp = build_fold_indirect_ref (se.expr);
3941 tmp = gfc_nullify_alloc_comp (expr->ts.derived, tmp, 0);
3942 gfc_add_expr_to_block (&se.pre, tmp);
3947 tmp = gfc_finish_block (&se.pre);
3948 gfc_add_expr_to_block (&block, tmp);
3951 /* Assign the value to the status variable. */
3954 tmp = build1_v (LABEL_EXPR, error_label);
3955 gfc_add_expr_to_block (&block, tmp);
3957 gfc_init_se (&se, NULL);
3958 gfc_conv_expr_lhs (&se, code->expr);
3959 tmp = convert (TREE_TYPE (se.expr), stat);
3960 gfc_add_modify (&block, se.expr, tmp);
3963 return gfc_finish_block (&block);
3967 /* Translate a DEALLOCATE statement.
3968 There are two cases within the for loop:
3969 (1) deallocate(a1, a2, a3) is translated into the following sequence
3970 _gfortran_deallocate(a1, 0B)
3971 _gfortran_deallocate(a2, 0B)
3972 _gfortran_deallocate(a3, 0B)
3973 where the STAT= variable is passed a NULL pointer.
3974 (2) deallocate(a1, a2, a3, stat=i) is translated into the following
3976 _gfortran_deallocate(a1, &stat)
3977 astat = astat + stat
3978 _gfortran_deallocate(a2, &stat)
3979 astat = astat + stat
3980 _gfortran_deallocate(a3, &stat)
3981 astat = astat + stat
3982 In case (1), we simply return at the end of the for loop. In case (2)
3983 we set STAT= astat. */
3985 gfc_trans_deallocate (gfc_code * code)
3990 tree apstat, astat, pstat, stat, tmp;
3993 gfc_start_block (&block);
3995 /* Set up the optional STAT= */
3998 tree gfc_int4_type_node = gfc_get_int_type (4);
4000 /* Variable used with the library call. */
4001 stat = gfc_create_var (gfc_int4_type_node, "stat");
4002 pstat = build_fold_addr_expr (stat);
4004 /* Running total of possible deallocation failures. */
4005 astat = gfc_create_var (gfc_int4_type_node, "astat");
4006 apstat = build_fold_addr_expr (astat);
4008 /* Initialize astat to 0. */
4009 gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
4012 pstat = apstat = stat = astat = NULL_TREE;
4014 for (al = code->ext.alloc_list; al != NULL; al = al->next)
4017 gcc_assert (expr->expr_type == EXPR_VARIABLE);
4019 gfc_init_se (&se, NULL);
4020 gfc_start_block (&se.pre);
4022 se.want_pointer = 1;
4023 se.descriptor_only = 1;
4024 gfc_conv_expr (&se, expr);
4026 if (expr->ts.type == BT_DERIVED
4027 && expr->ts.derived->attr.alloc_comp)
4030 gfc_ref *last = NULL;
4031 for (ref = expr->ref; ref; ref = ref->next)
4032 if (ref->type == REF_COMPONENT)
4035 /* Do not deallocate the components of a derived type
4036 ultimate pointer component. */
4037 if (!(last && last->u.c.component->attr.pointer)
4038 && !(!last && expr->symtree->n.sym->attr.pointer))
4040 tmp = gfc_deallocate_alloc_comp (expr->ts.derived, se.expr,
4042 gfc_add_expr_to_block (&se.pre, tmp);
4047 tmp = gfc_array_deallocate (se.expr, pstat, expr);
4050 tmp = gfc_deallocate_with_status (se.expr, pstat, false, expr);
4051 gfc_add_expr_to_block (&se.pre, tmp);
4053 tmp = fold_build2 (MODIFY_EXPR, void_type_node,
4054 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
4057 gfc_add_expr_to_block (&se.pre, tmp);
4059 /* Keep track of the number of failed deallocations by adding stat
4060 of the last deallocation to the running total. */
4063 apstat = fold_build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
4064 gfc_add_modify (&se.pre, astat, apstat);
4067 tmp = gfc_finish_block (&se.pre);
4068 gfc_add_expr_to_block (&block, tmp);
4072 /* Assign the value to the status variable. */
4075 gfc_init_se (&se, NULL);
4076 gfc_conv_expr_lhs (&se, code->expr);
4077 tmp = convert (TREE_TYPE (se.expr), astat);
4078 gfc_add_modify (&block, se.expr, tmp);
4081 return gfc_finish_block (&block);