1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
27 #include "coretypes.h"
29 #include "tree-gimple.h"
35 #include "trans-stmt.h"
36 #include "trans-types.h"
37 #include "trans-array.h"
38 #include "trans-const.h"
40 #include "dependency.h"
42 typedef struct iter_info
48 struct iter_info *next;
52 typedef struct forall_info
60 struct forall_info *outer;
61 struct forall_info *next_nest;
65 static void gfc_trans_where_2 (gfc_code *, tree, bool,
66 forall_info *, stmtblock_t *);
68 /* Translate a F95 label number to a LABEL_EXPR. */
71 gfc_trans_label_here (gfc_code * code)
73 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
77 /* Given a variable expression which has been ASSIGNed to, find the decl
78 containing the auxiliary variables. For variables in common blocks this
82 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
84 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
85 gfc_conv_expr (se, expr);
86 /* Deals with variable in common block. Get the field declaration. */
87 if (TREE_CODE (se->expr) == COMPONENT_REF)
88 se->expr = TREE_OPERAND (se->expr, 1);
89 /* Deals with dummy argument. Get the parameter declaration. */
90 else if (TREE_CODE (se->expr) == INDIRECT_REF)
91 se->expr = TREE_OPERAND (se->expr, 0);
94 /* Translate a label assignment statement. */
97 gfc_trans_label_assign (gfc_code * code)
107 /* Start a new block. */
108 gfc_init_se (&se, NULL);
109 gfc_start_block (&se.pre);
110 gfc_conv_label_variable (&se, code->expr);
112 len = GFC_DECL_STRING_LEN (se.expr);
113 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
115 label_tree = gfc_get_label_decl (code->label);
117 if (code->label->defined == ST_LABEL_TARGET)
119 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
120 len_tree = integer_minus_one_node;
124 label_str = code->label->format->value.character.string;
125 label_len = code->label->format->value.character.length;
126 len_tree = build_int_cst (NULL_TREE, label_len);
127 label_tree = gfc_build_string_const (label_len + 1, label_str);
128 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
131 gfc_add_modify_expr (&se.pre, len, len_tree);
132 gfc_add_modify_expr (&se.pre, addr, label_tree);
134 return gfc_finish_block (&se.pre);
137 /* Translate a GOTO statement. */
140 gfc_trans_goto (gfc_code * code)
150 if (code->label != NULL)
151 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
154 gfc_init_se (&se, NULL);
155 gfc_start_block (&se.pre);
156 gfc_conv_label_variable (&se, code->expr);
158 gfc_build_cstring_const ("Assigned label is not a target label");
159 tmp = GFC_DECL_STRING_LEN (se.expr);
160 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
161 build_int_cst (TREE_TYPE (tmp), -1));
162 gfc_trans_runtime_check (tmp, assign_error, &se.pre);
164 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
169 target = build1 (GOTO_EXPR, void_type_node, assigned_goto);
170 gfc_add_expr_to_block (&se.pre, target);
171 return gfc_finish_block (&se.pre);
174 /* Check the label list. */
175 range_error = gfc_build_cstring_const ("Assigned label is not in the list");
179 target = gfc_get_label_decl (code->label);
180 tmp = gfc_build_addr_expr (pvoid_type_node, target);
181 tmp = build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
182 tmp = build3_v (COND_EXPR, tmp,
183 build1 (GOTO_EXPR, void_type_node, target),
184 build_empty_stmt ());
185 gfc_add_expr_to_block (&se.pre, tmp);
188 while (code != NULL);
189 gfc_trans_runtime_check (boolean_true_node, range_error, &se.pre);
190 return gfc_finish_block (&se.pre);
194 /* Translate an ENTRY statement. Just adds a label for this entry point. */
196 gfc_trans_entry (gfc_code * code)
198 return build1_v (LABEL_EXPR, code->ext.entry->label);
202 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
203 elemental subroutines. Make temporaries for output arguments if any such
204 dependencies are found. Output arguments are chosen because internal_unpack
205 can be used, as is, to copy the result back to the variable. */
207 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
208 gfc_symbol * sym, gfc_actual_arglist * arg)
210 gfc_actual_arglist *arg0;
212 gfc_formal_arglist *formal;
213 gfc_loopinfo tmp_loop;
225 if (loopse->ss == NULL)
230 formal = sym->formal;
232 /* Loop over all the arguments testing for dependencies. */
233 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
239 /* Obtain the info structure for the current argument. */
241 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
245 info = &ss->data.info;
249 /* If there is a dependency, create a temporary and use it
250 instead of the variable. */
251 fsym = formal ? formal->sym : NULL;
252 if (e->expr_type == EXPR_VARIABLE
254 && fsym->attr.intent == INTENT_OUT
255 && gfc_check_fncall_dependency (e, INTENT_OUT, sym, arg0))
257 /* Make a local loopinfo for the temporary creation, so that
258 none of the other ss->info's have to be renormalized. */
259 gfc_init_loopinfo (&tmp_loop);
260 for (n = 0; n < info->dimen; n++)
262 tmp_loop.to[n] = loopse->loop->to[n];
263 tmp_loop.from[n] = loopse->loop->from[n];
264 tmp_loop.order[n] = loopse->loop->order[n];
267 /* Generate the temporary. Merge the block so that the
268 declarations are put at the right binding level. */
269 size = gfc_create_var (gfc_array_index_type, NULL);
270 data = gfc_create_var (pvoid_type_node, NULL);
271 gfc_start_block (&block);
272 tmp = gfc_typenode_for_spec (&e->ts);
273 tmp = gfc_trans_create_temp_array (&se->pre, &se->post,
274 &tmp_loop, info, tmp,
276 gfc_add_modify_expr (&se->pre, size, tmp);
277 tmp = fold_convert (pvoid_type_node, info->data);
278 gfc_add_modify_expr (&se->pre, data, tmp);
279 gfc_merge_block_scope (&block);
281 /* Obtain the argument descriptor for unpacking. */
282 gfc_init_se (&parmse, NULL);
283 parmse.want_pointer = 1;
284 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
285 gfc_add_block_to_block (&se->pre, &parmse.pre);
287 /* Calculate the offset for the temporary. */
288 offset = gfc_index_zero_node;
289 for (n = 0; n < info->dimen; n++)
291 tmp = gfc_conv_descriptor_stride (info->descriptor,
293 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
294 loopse->loop->from[n], tmp);
295 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
298 info->offset = gfc_create_var (gfc_array_index_type, NULL);
299 gfc_add_modify_expr (&se->pre, info->offset, offset);
301 /* Copy the result back using unpack. */
302 tmp = gfc_chainon_list (NULL_TREE, parmse.expr);
303 tmp = gfc_chainon_list (tmp, data);
304 tmp = build_function_call_expr (gfor_fndecl_in_unpack, tmp);
305 gfc_add_expr_to_block (&se->post, tmp);
307 gfc_add_block_to_block (&se->post, &parmse.post);
313 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
316 gfc_trans_call (gfc_code * code, bool dependency_check)
320 int has_alternate_specifier;
322 /* A CALL starts a new block because the actual arguments may have to
323 be evaluated first. */
324 gfc_init_se (&se, NULL);
325 gfc_start_block (&se.pre);
327 gcc_assert (code->resolved_sym);
329 ss = gfc_ss_terminator;
330 if (code->resolved_sym->attr.elemental)
331 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
333 /* Is not an elemental subroutine call with array valued arguments. */
334 if (ss == gfc_ss_terminator)
337 /* Translate the call. */
338 has_alternate_specifier
339 = gfc_conv_function_call (&se, code->resolved_sym, code->ext.actual);
341 /* A subroutine without side-effect, by definition, does nothing! */
342 TREE_SIDE_EFFECTS (se.expr) = 1;
344 /* Chain the pieces together and return the block. */
345 if (has_alternate_specifier)
347 gfc_code *select_code;
349 select_code = code->next;
350 gcc_assert(select_code->op == EXEC_SELECT);
351 sym = select_code->expr->symtree->n.sym;
352 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
353 gfc_add_modify_expr (&se.pre, sym->backend_decl, se.expr);
356 gfc_add_expr_to_block (&se.pre, se.expr);
358 gfc_add_block_to_block (&se.pre, &se.post);
363 /* An elemental subroutine call with array valued arguments has
370 /* gfc_walk_elemental_function_args renders the ss chain in the
371 reverse order to the actual argument order. */
372 ss = gfc_reverse_ss (ss);
374 /* Initialize the loop. */
375 gfc_init_se (&loopse, NULL);
376 gfc_init_loopinfo (&loop);
377 gfc_add_ss_to_loop (&loop, ss);
379 gfc_conv_ss_startstride (&loop);
380 gfc_conv_loop_setup (&loop);
381 gfc_mark_ss_chain_used (ss, 1);
383 /* Convert the arguments, checking for dependencies. */
384 gfc_copy_loopinfo_to_se (&loopse, &loop);
387 /* For operator assignment, we need to do dependency checking.
388 We also check the intent of the parameters. */
389 if (dependency_check)
392 sym = code->resolved_sym;
393 gcc_assert (sym->formal->sym->attr.intent = INTENT_OUT);
394 gcc_assert (sym->formal->next->sym->attr.intent = INTENT_IN);
395 gfc_conv_elemental_dependencies (&se, &loopse, sym,
399 /* Generate the loop body. */
400 gfc_start_scalarized_body (&loop, &body);
401 gfc_init_block (&block);
403 /* Add the subroutine call to the block. */
404 gfc_conv_function_call (&loopse, code->resolved_sym, code->ext.actual);
405 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
407 gfc_add_block_to_block (&block, &loopse.pre);
408 gfc_add_block_to_block (&block, &loopse.post);
410 /* Finish up the loop block and the loop. */
411 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
412 gfc_trans_scalarizing_loops (&loop, &body);
413 gfc_add_block_to_block (&se.pre, &loop.pre);
414 gfc_add_block_to_block (&se.pre, &loop.post);
415 gfc_add_block_to_block (&se.pre, &se.post);
416 gfc_cleanup_loop (&loop);
419 return gfc_finish_block (&se.pre);
423 /* Translate the RETURN statement. */
426 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
434 /* if code->expr is not NULL, this return statement must appear
435 in a subroutine and current_fake_result_decl has already
438 result = gfc_get_fake_result_decl (NULL, 0);
441 gfc_warning ("An alternate return at %L without a * dummy argument",
443 return build1_v (GOTO_EXPR, gfc_get_return_label ());
446 /* Start a new block for this statement. */
447 gfc_init_se (&se, NULL);
448 gfc_start_block (&se.pre);
450 gfc_conv_expr (&se, code->expr);
452 tmp = build2 (MODIFY_EXPR, TREE_TYPE (result), result, se.expr);
453 gfc_add_expr_to_block (&se.pre, tmp);
455 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
456 gfc_add_expr_to_block (&se.pre, tmp);
457 gfc_add_block_to_block (&se.pre, &se.post);
458 return gfc_finish_block (&se.pre);
461 return build1_v (GOTO_EXPR, gfc_get_return_label ());
465 /* Translate the PAUSE statement. We have to translate this statement
466 to a runtime library call. */
469 gfc_trans_pause (gfc_code * code)
471 tree gfc_int4_type_node = gfc_get_int_type (4);
477 /* Start a new block for this statement. */
478 gfc_init_se (&se, NULL);
479 gfc_start_block (&se.pre);
482 if (code->expr == NULL)
484 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
485 args = gfc_chainon_list (NULL_TREE, tmp);
486 fndecl = gfor_fndecl_pause_numeric;
490 gfc_conv_expr_reference (&se, code->expr);
491 args = gfc_chainon_list (NULL_TREE, se.expr);
492 args = gfc_chainon_list (args, se.string_length);
493 fndecl = gfor_fndecl_pause_string;
496 tmp = build_function_call_expr (fndecl, args);
497 gfc_add_expr_to_block (&se.pre, tmp);
499 gfc_add_block_to_block (&se.pre, &se.post);
501 return gfc_finish_block (&se.pre);
505 /* Translate the STOP statement. We have to translate this statement
506 to a runtime library call. */
509 gfc_trans_stop (gfc_code * code)
511 tree gfc_int4_type_node = gfc_get_int_type (4);
517 /* Start a new block for this statement. */
518 gfc_init_se (&se, NULL);
519 gfc_start_block (&se.pre);
522 if (code->expr == NULL)
524 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
525 args = gfc_chainon_list (NULL_TREE, tmp);
526 fndecl = gfor_fndecl_stop_numeric;
530 gfc_conv_expr_reference (&se, code->expr);
531 args = gfc_chainon_list (NULL_TREE, se.expr);
532 args = gfc_chainon_list (args, se.string_length);
533 fndecl = gfor_fndecl_stop_string;
536 tmp = build_function_call_expr (fndecl, args);
537 gfc_add_expr_to_block (&se.pre, tmp);
539 gfc_add_block_to_block (&se.pre, &se.post);
541 return gfc_finish_block (&se.pre);
545 /* Generate GENERIC for the IF construct. This function also deals with
546 the simple IF statement, because the front end translates the IF
547 statement into an IF construct.
579 where COND_S is the simplified version of the predicate. PRE_COND_S
580 are the pre side-effects produced by the translation of the
582 We need to build the chain recursively otherwise we run into
583 problems with folding incomplete statements. */
586 gfc_trans_if_1 (gfc_code * code)
591 /* Check for an unconditional ELSE clause. */
593 return gfc_trans_code (code->next);
595 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
596 gfc_init_se (&if_se, NULL);
597 gfc_start_block (&if_se.pre);
599 /* Calculate the IF condition expression. */
600 gfc_conv_expr_val (&if_se, code->expr);
602 /* Translate the THEN clause. */
603 stmt = gfc_trans_code (code->next);
605 /* Translate the ELSE clause. */
607 elsestmt = gfc_trans_if_1 (code->block);
609 elsestmt = build_empty_stmt ();
611 /* Build the condition expression and add it to the condition block. */
612 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
614 gfc_add_expr_to_block (&if_se.pre, stmt);
616 /* Finish off this statement. */
617 return gfc_finish_block (&if_se.pre);
621 gfc_trans_if (gfc_code * code)
623 /* Ignore the top EXEC_IF, it only announces an IF construct. The
624 actual code we must translate is in code->block. */
626 return gfc_trans_if_1 (code->block);
630 /* Translage an arithmetic IF expression.
632 IF (cond) label1, label2, label3 translates to
644 An optimized version can be generated in case of equal labels.
645 E.g., if label1 is equal to label2, we can translate it to
654 gfc_trans_arithmetic_if (gfc_code * code)
662 /* Start a new block. */
663 gfc_init_se (&se, NULL);
664 gfc_start_block (&se.pre);
666 /* Pre-evaluate COND. */
667 gfc_conv_expr_val (&se, code->expr);
669 /* Build something to compare with. */
670 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
672 if (code->label->value != code->label2->value)
674 /* If (cond < 0) take branch1 else take branch2.
675 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
676 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
677 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
679 if (code->label->value != code->label3->value)
680 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
682 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
684 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
687 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
689 if (code->label->value != code->label3->value
690 && code->label2->value != code->label3->value)
692 /* if (cond <= 0) take branch1 else take branch2. */
693 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
694 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
695 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
698 /* Append the COND_EXPR to the evaluation of COND, and return. */
699 gfc_add_expr_to_block (&se.pre, branch1);
700 return gfc_finish_block (&se.pre);
704 /* Translate the simple DO construct. This is where the loop variable has
705 integer type and step +-1. We can't use this in the general case
706 because integer overflow and floating point errors could give incorrect
708 We translate a do loop from:
710 DO dovar = from, to, step
716 [Evaluate loop bounds and step]
718 if ((step > 0) ? (dovar <= to) : (dovar => to))
724 cond = (dovar == to);
726 if (cond) goto end_label;
731 This helps the optimizers by avoiding the extra induction variable
732 used in the general case. */
735 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
736 tree from, tree to, tree step)
745 type = TREE_TYPE (dovar);
747 /* Initialize the DO variable: dovar = from. */
748 gfc_add_modify_expr (pblock, dovar, from);
750 /* Cycle and exit statements are implemented with gotos. */
751 cycle_label = gfc_build_label_decl (NULL_TREE);
752 exit_label = gfc_build_label_decl (NULL_TREE);
754 /* Put the labels where they can be found later. See gfc_trans_do(). */
755 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
758 gfc_start_block (&body);
760 /* Main loop body. */
761 tmp = gfc_trans_code (code->block->next);
762 gfc_add_expr_to_block (&body, tmp);
764 /* Label for cycle statements (if needed). */
765 if (TREE_USED (cycle_label))
767 tmp = build1_v (LABEL_EXPR, cycle_label);
768 gfc_add_expr_to_block (&body, tmp);
771 /* Evaluate the loop condition. */
772 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
773 cond = gfc_evaluate_now (cond, &body);
775 /* Increment the loop variable. */
776 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
777 gfc_add_modify_expr (&body, dovar, tmp);
780 tmp = build1_v (GOTO_EXPR, exit_label);
781 TREE_USED (exit_label) = 1;
782 tmp = fold_build3 (COND_EXPR, void_type_node,
783 cond, tmp, build_empty_stmt ());
784 gfc_add_expr_to_block (&body, tmp);
786 /* Finish the loop body. */
787 tmp = gfc_finish_block (&body);
788 tmp = build1_v (LOOP_EXPR, tmp);
790 /* Only execute the loop if the number of iterations is positive. */
791 if (tree_int_cst_sgn (step) > 0)
792 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
794 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
795 tmp = fold_build3 (COND_EXPR, void_type_node,
796 cond, tmp, build_empty_stmt ());
797 gfc_add_expr_to_block (pblock, tmp);
799 /* Add the exit label. */
800 tmp = build1_v (LABEL_EXPR, exit_label);
801 gfc_add_expr_to_block (pblock, tmp);
803 return gfc_finish_block (pblock);
806 /* Translate the DO construct. This obviously is one of the most
807 important ones to get right with any compiler, but especially
810 We special case some loop forms as described in gfc_trans_simple_do.
811 For other cases we implement them with a separate loop count,
812 as described in the standard.
814 We translate a do loop from:
816 DO dovar = from, to, step
822 [evaluate loop bounds and step]
823 count = (to + step - from) / step;
831 if (count <=0) goto exit_label;
835 TODO: Large loop counts
836 The code above assumes the loop count fits into a signed integer kind,
837 i.e. Does not work for loop counts > 2^31 for integer(kind=4) variables
838 We must support the full range. */
841 gfc_trans_do (gfc_code * code)
858 gfc_start_block (&block);
860 /* Evaluate all the expressions in the iterator. */
861 gfc_init_se (&se, NULL);
862 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
863 gfc_add_block_to_block (&block, &se.pre);
865 type = TREE_TYPE (dovar);
867 gfc_init_se (&se, NULL);
868 gfc_conv_expr_val (&se, code->ext.iterator->start);
869 gfc_add_block_to_block (&block, &se.pre);
870 from = gfc_evaluate_now (se.expr, &block);
872 gfc_init_se (&se, NULL);
873 gfc_conv_expr_val (&se, code->ext.iterator->end);
874 gfc_add_block_to_block (&block, &se.pre);
875 to = gfc_evaluate_now (se.expr, &block);
877 gfc_init_se (&se, NULL);
878 gfc_conv_expr_val (&se, code->ext.iterator->step);
879 gfc_add_block_to_block (&block, &se.pre);
880 step = gfc_evaluate_now (se.expr, &block);
882 /* Special case simple loops. */
883 if (TREE_CODE (type) == INTEGER_TYPE
884 && (integer_onep (step)
885 || tree_int_cst_equal (step, integer_minus_one_node)))
886 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
888 /* Initialize loop count. This code is executed before we enter the
889 loop body. We generate: count = (to + step - from) / step. */
891 tmp = fold_build2 (MINUS_EXPR, type, step, from);
892 tmp = fold_build2 (PLUS_EXPR, type, to, tmp);
893 if (TREE_CODE (type) == INTEGER_TYPE)
895 tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step);
896 count = gfc_create_var (type, "count");
900 /* TODO: We could use the same width as the real type.
901 This would probably cause more problems that it solves
902 when we implement "long double" types. */
903 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
904 tmp = fold_build1 (FIX_TRUNC_EXPR, gfc_array_index_type, tmp);
905 count = gfc_create_var (gfc_array_index_type, "count");
907 gfc_add_modify_expr (&block, count, tmp);
909 count_one = convert (TREE_TYPE (count), integer_one_node);
911 /* Initialize the DO variable: dovar = from. */
912 gfc_add_modify_expr (&block, dovar, from);
915 gfc_start_block (&body);
917 /* Cycle and exit statements are implemented with gotos. */
918 cycle_label = gfc_build_label_decl (NULL_TREE);
919 exit_label = gfc_build_label_decl (NULL_TREE);
921 /* Start with the loop condition. Loop until count <= 0. */
922 cond = fold_build2 (LE_EXPR, boolean_type_node, count,
923 build_int_cst (TREE_TYPE (count), 0));
924 tmp = build1_v (GOTO_EXPR, exit_label);
925 TREE_USED (exit_label) = 1;
926 tmp = fold_build3 (COND_EXPR, void_type_node,
927 cond, tmp, build_empty_stmt ());
928 gfc_add_expr_to_block (&body, tmp);
930 /* Put these labels where they can be found later. We put the
931 labels in a TREE_LIST node (because TREE_CHAIN is already
932 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
933 label in TREE_VALUE (backend_decl). */
935 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
937 /* Main loop body. */
938 tmp = gfc_trans_code (code->block->next);
939 gfc_add_expr_to_block (&body, tmp);
941 /* Label for cycle statements (if needed). */
942 if (TREE_USED (cycle_label))
944 tmp = build1_v (LABEL_EXPR, cycle_label);
945 gfc_add_expr_to_block (&body, tmp);
948 /* Increment the loop variable. */
949 tmp = build2 (PLUS_EXPR, type, dovar, step);
950 gfc_add_modify_expr (&body, dovar, tmp);
952 /* Decrement the loop count. */
953 tmp = build2 (MINUS_EXPR, TREE_TYPE (count), count, count_one);
954 gfc_add_modify_expr (&body, count, tmp);
956 /* End of loop body. */
957 tmp = gfc_finish_block (&body);
959 /* The for loop itself. */
960 tmp = build1_v (LOOP_EXPR, tmp);
961 gfc_add_expr_to_block (&block, tmp);
963 /* Add the exit label. */
964 tmp = build1_v (LABEL_EXPR, exit_label);
965 gfc_add_expr_to_block (&block, tmp);
967 return gfc_finish_block (&block);
971 /* Translate the DO WHILE construct.
984 if (! cond) goto exit_label;
990 Because the evaluation of the exit condition `cond' may have side
991 effects, we can't do much for empty loop bodies. The backend optimizers
992 should be smart enough to eliminate any dead loops. */
995 gfc_trans_do_while (gfc_code * code)
1003 /* Everything we build here is part of the loop body. */
1004 gfc_start_block (&block);
1006 /* Cycle and exit statements are implemented with gotos. */
1007 cycle_label = gfc_build_label_decl (NULL_TREE);
1008 exit_label = gfc_build_label_decl (NULL_TREE);
1010 /* Put the labels where they can be found later. See gfc_trans_do(). */
1011 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1013 /* Create a GIMPLE version of the exit condition. */
1014 gfc_init_se (&cond, NULL);
1015 gfc_conv_expr_val (&cond, code->expr);
1016 gfc_add_block_to_block (&block, &cond.pre);
1017 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1019 /* Build "IF (! cond) GOTO exit_label". */
1020 tmp = build1_v (GOTO_EXPR, exit_label);
1021 TREE_USED (exit_label) = 1;
1022 tmp = fold_build3 (COND_EXPR, void_type_node,
1023 cond.expr, tmp, build_empty_stmt ());
1024 gfc_add_expr_to_block (&block, tmp);
1026 /* The main body of the loop. */
1027 tmp = gfc_trans_code (code->block->next);
1028 gfc_add_expr_to_block (&block, tmp);
1030 /* Label for cycle statements (if needed). */
1031 if (TREE_USED (cycle_label))
1033 tmp = build1_v (LABEL_EXPR, cycle_label);
1034 gfc_add_expr_to_block (&block, tmp);
1037 /* End of loop body. */
1038 tmp = gfc_finish_block (&block);
1040 gfc_init_block (&block);
1041 /* Build the loop. */
1042 tmp = build1_v (LOOP_EXPR, tmp);
1043 gfc_add_expr_to_block (&block, tmp);
1045 /* Add the exit label. */
1046 tmp = build1_v (LABEL_EXPR, exit_label);
1047 gfc_add_expr_to_block (&block, tmp);
1049 return gfc_finish_block (&block);
1053 /* Translate the SELECT CASE construct for INTEGER case expressions,
1054 without killing all potential optimizations. The problem is that
1055 Fortran allows unbounded cases, but the back-end does not, so we
1056 need to intercept those before we enter the equivalent SWITCH_EXPR
1059 For example, we translate this,
1062 CASE (:100,101,105:115)
1072 to the GENERIC equivalent,
1076 case (minimum value for typeof(expr) ... 100:
1082 case 200 ... (maximum value for typeof(expr):
1099 gfc_trans_integer_select (gfc_code * code)
1109 gfc_start_block (&block);
1111 /* Calculate the switch expression. */
1112 gfc_init_se (&se, NULL);
1113 gfc_conv_expr_val (&se, code->expr);
1114 gfc_add_block_to_block (&block, &se.pre);
1116 end_label = gfc_build_label_decl (NULL_TREE);
1118 gfc_init_block (&body);
1120 for (c = code->block; c; c = c->block)
1122 for (cp = c->ext.case_list; cp; cp = cp->next)
1127 /* Assume it's the default case. */
1128 low = high = NULL_TREE;
1132 low = gfc_conv_constant_to_tree (cp->low);
1134 /* If there's only a lower bound, set the high bound to the
1135 maximum value of the case expression. */
1137 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1142 /* Three cases are possible here:
1144 1) There is no lower bound, e.g. CASE (:N).
1145 2) There is a lower bound .NE. high bound, that is
1146 a case range, e.g. CASE (N:M) where M>N (we make
1147 sure that M>N during type resolution).
1148 3) There is a lower bound, and it has the same value
1149 as the high bound, e.g. CASE (N:N). This is our
1150 internal representation of CASE(N).
1152 In the first and second case, we need to set a value for
1153 high. In the thirth case, we don't because the GCC middle
1154 end represents a single case value by just letting high be
1155 a NULL_TREE. We can't do that because we need to be able
1156 to represent unbounded cases. */
1160 && mpz_cmp (cp->low->value.integer,
1161 cp->high->value.integer) != 0))
1162 high = gfc_conv_constant_to_tree (cp->high);
1164 /* Unbounded case. */
1166 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1169 /* Build a label. */
1170 label = gfc_build_label_decl (NULL_TREE);
1172 /* Add this case label.
1173 Add parameter 'label', make it match GCC backend. */
1174 tmp = build3 (CASE_LABEL_EXPR, void_type_node, low, high, label);
1175 gfc_add_expr_to_block (&body, tmp);
1178 /* Add the statements for this case. */
1179 tmp = gfc_trans_code (c->next);
1180 gfc_add_expr_to_block (&body, tmp);
1182 /* Break to the end of the construct. */
1183 tmp = build1_v (GOTO_EXPR, end_label);
1184 gfc_add_expr_to_block (&body, tmp);
1187 tmp = gfc_finish_block (&body);
1188 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1189 gfc_add_expr_to_block (&block, tmp);
1191 tmp = build1_v (LABEL_EXPR, end_label);
1192 gfc_add_expr_to_block (&block, tmp);
1194 return gfc_finish_block (&block);
1198 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1200 There are only two cases possible here, even though the standard
1201 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1202 .FALSE., and DEFAULT.
1204 We never generate more than two blocks here. Instead, we always
1205 try to eliminate the DEFAULT case. This way, we can translate this
1206 kind of SELECT construct to a simple
1210 expression in GENERIC. */
1213 gfc_trans_logical_select (gfc_code * code)
1216 gfc_code *t, *f, *d;
1221 /* Assume we don't have any cases at all. */
1224 /* Now see which ones we actually do have. We can have at most two
1225 cases in a single case list: one for .TRUE. and one for .FALSE.
1226 The default case is always separate. If the cases for .TRUE. and
1227 .FALSE. are in the same case list, the block for that case list
1228 always executed, and we don't generate code a COND_EXPR. */
1229 for (c = code->block; c; c = c->block)
1231 for (cp = c->ext.case_list; cp; cp = cp->next)
1235 if (cp->low->value.logical == 0) /* .FALSE. */
1237 else /* if (cp->value.logical != 0), thus .TRUE. */
1245 /* Start a new block. */
1246 gfc_start_block (&block);
1248 /* Calculate the switch expression. We always need to do this
1249 because it may have side effects. */
1250 gfc_init_se (&se, NULL);
1251 gfc_conv_expr_val (&se, code->expr);
1252 gfc_add_block_to_block (&block, &se.pre);
1254 if (t == f && t != NULL)
1256 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1257 translate the code for these cases, append it to the current
1259 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1263 tree true_tree, false_tree, stmt;
1265 true_tree = build_empty_stmt ();
1266 false_tree = build_empty_stmt ();
1268 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1269 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1270 make the missing case the default case. */
1271 if (t != NULL && f != NULL)
1281 /* Translate the code for each of these blocks, and append it to
1282 the current block. */
1284 true_tree = gfc_trans_code (t->next);
1287 false_tree = gfc_trans_code (f->next);
1289 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1290 true_tree, false_tree);
1291 gfc_add_expr_to_block (&block, stmt);
1294 return gfc_finish_block (&block);
1298 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1299 Instead of generating compares and jumps, it is far simpler to
1300 generate a data structure describing the cases in order and call a
1301 library subroutine that locates the right case.
1302 This is particularly true because this is the only case where we
1303 might have to dispose of a temporary.
1304 The library subroutine returns a pointer to jump to or NULL if no
1305 branches are to be taken. */
1308 gfc_trans_character_select (gfc_code *code)
1310 tree init, node, end_label, tmp, type, args, *labels;
1311 stmtblock_t block, body;
1317 static tree select_struct;
1318 static tree ss_string1, ss_string1_len;
1319 static tree ss_string2, ss_string2_len;
1320 static tree ss_target;
1322 if (select_struct == NULL)
1324 tree gfc_int4_type_node = gfc_get_int_type (4);
1326 select_struct = make_node (RECORD_TYPE);
1327 TYPE_NAME (select_struct) = get_identifier ("_jump_struct");
1330 #define ADD_FIELD(NAME, TYPE) \
1331 ss_##NAME = gfc_add_field_to_struct \
1332 (&(TYPE_FIELDS (select_struct)), select_struct, \
1333 get_identifier (stringize(NAME)), TYPE)
1335 ADD_FIELD (string1, pchar_type_node);
1336 ADD_FIELD (string1_len, gfc_int4_type_node);
1338 ADD_FIELD (string2, pchar_type_node);
1339 ADD_FIELD (string2_len, gfc_int4_type_node);
1341 ADD_FIELD (target, pvoid_type_node);
1344 gfc_finish_type (select_struct);
1347 cp = code->block->ext.case_list;
1348 while (cp->left != NULL)
1352 for (d = cp; d; d = d->right)
1356 labels = gfc_getmem (n * sizeof (tree));
1360 for(i = 0; i < n; i++)
1362 labels[i] = gfc_build_label_decl (NULL_TREE);
1363 TREE_USED (labels[i]) = 1;
1364 /* TODO: The gimplifier should do this for us, but it has
1365 inadequacies when dealing with static initializers. */
1366 FORCED_LABEL (labels[i]) = 1;
1369 end_label = gfc_build_label_decl (NULL_TREE);
1371 /* Generate the body */
1372 gfc_start_block (&block);
1373 gfc_init_block (&body);
1375 for (c = code->block; c; c = c->block)
1377 for (d = c->ext.case_list; d; d = d->next)
1379 tmp = build1_v (LABEL_EXPR, labels[d->n]);
1380 gfc_add_expr_to_block (&body, tmp);
1383 tmp = gfc_trans_code (c->next);
1384 gfc_add_expr_to_block (&body, tmp);
1386 tmp = build1_v (GOTO_EXPR, end_label);
1387 gfc_add_expr_to_block (&body, tmp);
1390 /* Generate the structure describing the branches */
1394 for(d = cp; d; d = d->right, i++)
1398 gfc_init_se (&se, NULL);
1402 node = tree_cons (ss_string1, null_pointer_node, node);
1403 node = tree_cons (ss_string1_len, integer_zero_node, node);
1407 gfc_conv_expr_reference (&se, d->low);
1409 node = tree_cons (ss_string1, se.expr, node);
1410 node = tree_cons (ss_string1_len, se.string_length, node);
1413 if (d->high == NULL)
1415 node = tree_cons (ss_string2, null_pointer_node, node);
1416 node = tree_cons (ss_string2_len, integer_zero_node, node);
1420 gfc_init_se (&se, NULL);
1421 gfc_conv_expr_reference (&se, d->high);
1423 node = tree_cons (ss_string2, se.expr, node);
1424 node = tree_cons (ss_string2_len, se.string_length, node);
1427 tmp = gfc_build_addr_expr (pvoid_type_node, labels[i]);
1428 node = tree_cons (ss_target, tmp, node);
1430 tmp = build_constructor_from_list (select_struct, nreverse (node));
1431 init = tree_cons (NULL_TREE, tmp, init);
1434 type = build_array_type (select_struct, build_index_type
1435 (build_int_cst (NULL_TREE, n - 1)));
1437 init = build_constructor_from_list (type, nreverse(init));
1438 TREE_CONSTANT (init) = 1;
1439 TREE_INVARIANT (init) = 1;
1440 TREE_STATIC (init) = 1;
1441 /* Create a static variable to hold the jump table. */
1442 tmp = gfc_create_var (type, "jumptable");
1443 TREE_CONSTANT (tmp) = 1;
1444 TREE_INVARIANT (tmp) = 1;
1445 TREE_STATIC (tmp) = 1;
1446 DECL_INITIAL (tmp) = init;
1449 /* Build an argument list for the library call */
1450 init = gfc_build_addr_expr (pvoid_type_node, init);
1451 args = gfc_chainon_list (NULL_TREE, init);
1453 tmp = build_int_cst (NULL_TREE, n);
1454 args = gfc_chainon_list (args, tmp);
1456 tmp = gfc_build_addr_expr (pvoid_type_node, end_label);
1457 args = gfc_chainon_list (args, tmp);
1459 gfc_init_se (&se, NULL);
1460 gfc_conv_expr_reference (&se, code->expr);
1462 args = gfc_chainon_list (args, se.expr);
1463 args = gfc_chainon_list (args, se.string_length);
1465 gfc_add_block_to_block (&block, &se.pre);
1467 tmp = build_function_call_expr (gfor_fndecl_select_string, args);
1468 tmp = build1 (GOTO_EXPR, void_type_node, tmp);
1469 gfc_add_expr_to_block (&block, tmp);
1471 tmp = gfc_finish_block (&body);
1472 gfc_add_expr_to_block (&block, tmp);
1473 tmp = build1_v (LABEL_EXPR, end_label);
1474 gfc_add_expr_to_block (&block, tmp);
1479 return gfc_finish_block (&block);
1483 /* Translate the three variants of the SELECT CASE construct.
1485 SELECT CASEs with INTEGER case expressions can be translated to an
1486 equivalent GENERIC switch statement, and for LOGICAL case
1487 expressions we build one or two if-else compares.
1489 SELECT CASEs with CHARACTER case expressions are a whole different
1490 story, because they don't exist in GENERIC. So we sort them and
1491 do a binary search at runtime.
1493 Fortran has no BREAK statement, and it does not allow jumps from
1494 one case block to another. That makes things a lot easier for
1498 gfc_trans_select (gfc_code * code)
1500 gcc_assert (code && code->expr);
1502 /* Empty SELECT constructs are legal. */
1503 if (code->block == NULL)
1504 return build_empty_stmt ();
1506 /* Select the correct translation function. */
1507 switch (code->expr->ts.type)
1509 case BT_LOGICAL: return gfc_trans_logical_select (code);
1510 case BT_INTEGER: return gfc_trans_integer_select (code);
1511 case BT_CHARACTER: return gfc_trans_character_select (code);
1513 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1519 /* Generate the loops for a FORALL block. The normal loop format:
1520 count = (end - start + step) / step
1533 gfc_trans_forall_loop (forall_info *forall_tmp, int nvar, tree body, int mask_flag)
1541 tree var, start, end, step;
1544 iter = forall_tmp->this_loop;
1545 for (n = 0; n < nvar; n++)
1548 start = iter->start;
1552 exit_label = gfc_build_label_decl (NULL_TREE);
1553 TREE_USED (exit_label) = 1;
1555 /* The loop counter. */
1556 count = gfc_create_var (TREE_TYPE (var), "count");
1558 /* The body of the loop. */
1559 gfc_init_block (&block);
1561 /* The exit condition. */
1562 cond = fold_build2 (LE_EXPR, boolean_type_node,
1563 count, build_int_cst (TREE_TYPE (count), 0));
1564 tmp = build1_v (GOTO_EXPR, exit_label);
1565 tmp = fold_build3 (COND_EXPR, void_type_node,
1566 cond, tmp, build_empty_stmt ());
1567 gfc_add_expr_to_block (&block, tmp);
1569 /* The main loop body. */
1570 gfc_add_expr_to_block (&block, body);
1572 /* Increment the loop variable. */
1573 tmp = build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1574 gfc_add_modify_expr (&block, var, tmp);
1576 /* Advance to the next mask element. Only do this for the
1578 if (n == 0 && mask_flag && forall_tmp->mask)
1580 tree maskindex = forall_tmp->maskindex;
1581 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
1582 maskindex, gfc_index_one_node);
1583 gfc_add_modify_expr (&block, maskindex, tmp);
1586 /* Decrement the loop counter. */
1587 tmp = build2 (MINUS_EXPR, TREE_TYPE (var), count, gfc_index_one_node);
1588 gfc_add_modify_expr (&block, count, tmp);
1590 body = gfc_finish_block (&block);
1592 /* Loop var initialization. */
1593 gfc_init_block (&block);
1594 gfc_add_modify_expr (&block, var, start);
1596 /* Initialize maskindex counter. Only do this before the
1598 if (n == nvar - 1 && mask_flag && forall_tmp->mask)
1599 gfc_add_modify_expr (&block, forall_tmp->maskindex,
1600 gfc_index_zero_node);
1602 /* Initialize the loop counter. */
1603 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1604 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1605 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1606 gfc_add_modify_expr (&block, count, tmp);
1608 /* The loop expression. */
1609 tmp = build1_v (LOOP_EXPR, body);
1610 gfc_add_expr_to_block (&block, tmp);
1612 /* The exit label. */
1613 tmp = build1_v (LABEL_EXPR, exit_label);
1614 gfc_add_expr_to_block (&block, tmp);
1616 body = gfc_finish_block (&block);
1623 /* Generate the body and loops according to MASK_FLAG and NEST_FLAG.
1624 if MASK_FLAG is nonzero, the body is controlled by maskes in forall
1625 nest, otherwise, the body is not controlled by maskes.
1626 if NEST_FLAG is nonzero, generate loops for nested forall, otherwise,
1627 only generate loops for the current forall level. */
1630 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1631 int mask_flag, int nest_flag)
1635 forall_info *forall_tmp;
1636 tree pmask, mask, maskindex;
1638 forall_tmp = nested_forall_info;
1639 /* Generate loops for nested forall. */
1642 while (forall_tmp->next_nest != NULL)
1643 forall_tmp = forall_tmp->next_nest;
1644 while (forall_tmp != NULL)
1646 /* Generate body with masks' control. */
1649 pmask = forall_tmp->pmask;
1650 mask = forall_tmp->mask;
1651 maskindex = forall_tmp->maskindex;
1655 /* If a mask was specified make the assignment conditional. */
1657 tmp = build_fold_indirect_ref (mask);
1660 tmp = gfc_build_array_ref (tmp, maskindex);
1662 body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ());
1665 nvar = forall_tmp->nvar;
1666 body = gfc_trans_forall_loop (forall_tmp, nvar, body, mask_flag);
1667 forall_tmp = forall_tmp->outer;
1672 nvar = forall_tmp->nvar;
1673 body = gfc_trans_forall_loop (forall_tmp, nvar, body, mask_flag);
1680 /* Allocate data for holding a temporary array. Returns either a local
1681 temporary array or a pointer variable. */
1684 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
1692 if (INTEGER_CST_P (size))
1694 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
1695 gfc_index_one_node);
1700 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
1701 type = build_array_type (elem_type, type);
1702 if (gfc_can_put_var_on_stack (bytesize))
1704 gcc_assert (INTEGER_CST_P (size));
1705 tmpvar = gfc_create_var (type, "temp");
1710 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
1711 *pdata = convert (pvoid_type_node, tmpvar);
1713 args = gfc_chainon_list (NULL_TREE, bytesize);
1714 if (gfc_index_integer_kind == 4)
1715 tmp = gfor_fndecl_internal_malloc;
1716 else if (gfc_index_integer_kind == 8)
1717 tmp = gfor_fndecl_internal_malloc64;
1720 tmp = build_function_call_expr (tmp, args);
1721 tmp = convert (TREE_TYPE (tmpvar), tmp);
1722 gfc_add_modify_expr (pblock, tmpvar, tmp);
1728 /* Generate codes to copy the temporary to the actual lhs. */
1731 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
1732 tree count1, tree wheremask, bool invert)
1736 stmtblock_t block, body;
1742 lss = gfc_walk_expr (expr);
1744 if (lss == gfc_ss_terminator)
1746 gfc_start_block (&block);
1748 gfc_init_se (&lse, NULL);
1750 /* Translate the expression. */
1751 gfc_conv_expr (&lse, expr);
1753 /* Form the expression for the temporary. */
1754 tmp = gfc_build_array_ref (tmp1, count1);
1756 /* Use the scalar assignment as is. */
1757 gfc_add_block_to_block (&block, &lse.pre);
1758 gfc_add_modify_expr (&block, lse.expr, tmp);
1759 gfc_add_block_to_block (&block, &lse.post);
1761 /* Increment the count1. */
1762 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1763 gfc_index_one_node);
1764 gfc_add_modify_expr (&block, count1, tmp);
1766 tmp = gfc_finish_block (&block);
1770 gfc_start_block (&block);
1772 gfc_init_loopinfo (&loop1);
1773 gfc_init_se (&rse, NULL);
1774 gfc_init_se (&lse, NULL);
1776 /* Associate the lss with the loop. */
1777 gfc_add_ss_to_loop (&loop1, lss);
1779 /* Calculate the bounds of the scalarization. */
1780 gfc_conv_ss_startstride (&loop1);
1781 /* Setup the scalarizing loops. */
1782 gfc_conv_loop_setup (&loop1);
1784 gfc_mark_ss_chain_used (lss, 1);
1786 /* Start the scalarized loop body. */
1787 gfc_start_scalarized_body (&loop1, &body);
1789 /* Setup the gfc_se structures. */
1790 gfc_copy_loopinfo_to_se (&lse, &loop1);
1793 /* Form the expression of the temporary. */
1794 if (lss != gfc_ss_terminator)
1795 rse.expr = gfc_build_array_ref (tmp1, count1);
1796 /* Translate expr. */
1797 gfc_conv_expr (&lse, expr);
1799 /* Use the scalar assignment. */
1800 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts.type);
1802 /* Form the mask expression according to the mask tree list. */
1805 wheremaskexpr = gfc_build_array_ref (wheremask, count3);
1807 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1808 TREE_TYPE (wheremaskexpr),
1810 tmp = fold_build3 (COND_EXPR, void_type_node,
1811 wheremaskexpr, tmp, build_empty_stmt ());
1814 gfc_add_expr_to_block (&body, tmp);
1816 /* Increment count1. */
1817 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1818 count1, gfc_index_one_node);
1819 gfc_add_modify_expr (&body, count1, tmp);
1821 /* Increment count3. */
1824 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1825 count3, gfc_index_one_node);
1826 gfc_add_modify_expr (&body, count3, tmp);
1829 /* Generate the copying loops. */
1830 gfc_trans_scalarizing_loops (&loop1, &body);
1831 gfc_add_block_to_block (&block, &loop1.pre);
1832 gfc_add_block_to_block (&block, &loop1.post);
1833 gfc_cleanup_loop (&loop1);
1835 tmp = gfc_finish_block (&block);
1841 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
1842 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
1843 and should not be freed. WHEREMASK is the conditional execution mask
1844 whose sense may be inverted by INVERT. */
1847 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
1848 tree count1, gfc_ss *lss, gfc_ss *rss,
1849 tree wheremask, bool invert)
1851 stmtblock_t block, body1;
1858 gfc_start_block (&block);
1860 gfc_init_se (&rse, NULL);
1861 gfc_init_se (&lse, NULL);
1863 if (lss == gfc_ss_terminator)
1865 gfc_init_block (&body1);
1866 gfc_conv_expr (&rse, expr2);
1867 lse.expr = gfc_build_array_ref (tmp1, count1);
1871 /* Initialize the loop. */
1872 gfc_init_loopinfo (&loop);
1874 /* We may need LSS to determine the shape of the expression. */
1875 gfc_add_ss_to_loop (&loop, lss);
1876 gfc_add_ss_to_loop (&loop, rss);
1878 gfc_conv_ss_startstride (&loop);
1879 gfc_conv_loop_setup (&loop);
1881 gfc_mark_ss_chain_used (rss, 1);
1882 /* Start the loop body. */
1883 gfc_start_scalarized_body (&loop, &body1);
1885 /* Translate the expression. */
1886 gfc_copy_loopinfo_to_se (&rse, &loop);
1888 gfc_conv_expr (&rse, expr2);
1890 /* Form the expression of the temporary. */
1891 lse.expr = gfc_build_array_ref (tmp1, count1);
1894 /* Use the scalar assignment. */
1895 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts.type);
1897 /* Form the mask expression according to the mask tree list. */
1900 wheremaskexpr = gfc_build_array_ref (wheremask, count3);
1902 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1903 TREE_TYPE (wheremaskexpr),
1905 tmp = fold_build3 (COND_EXPR, void_type_node,
1906 wheremaskexpr, tmp, build_empty_stmt ());
1909 gfc_add_expr_to_block (&body1, tmp);
1911 if (lss == gfc_ss_terminator)
1913 gfc_add_block_to_block (&block, &body1);
1915 /* Increment count1. */
1916 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1917 gfc_index_one_node);
1918 gfc_add_modify_expr (&block, count1, tmp);
1922 /* Increment count1. */
1923 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1924 count1, gfc_index_one_node);
1925 gfc_add_modify_expr (&body1, count1, tmp);
1927 /* Increment count3. */
1930 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1931 count3, gfc_index_one_node);
1932 gfc_add_modify_expr (&body1, count3, tmp);
1935 /* Generate the copying loops. */
1936 gfc_trans_scalarizing_loops (&loop, &body1);
1938 gfc_add_block_to_block (&block, &loop.pre);
1939 gfc_add_block_to_block (&block, &loop.post);
1941 gfc_cleanup_loop (&loop);
1942 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
1943 as tree nodes in SS may not be valid in different scope. */
1946 tmp = gfc_finish_block (&block);
1951 /* Calculate the size of temporary needed in the assignment inside forall.
1952 LSS and RSS are filled in this function. */
1955 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
1956 stmtblock_t * pblock,
1957 gfc_ss **lss, gfc_ss **rss)
1964 *lss = gfc_walk_expr (expr1);
1967 size = gfc_index_one_node;
1968 if (*lss != gfc_ss_terminator)
1970 gfc_init_loopinfo (&loop);
1972 /* Walk the RHS of the expression. */
1973 *rss = gfc_walk_expr (expr2);
1974 if (*rss == gfc_ss_terminator)
1976 /* The rhs is scalar. Add a ss for the expression. */
1977 *rss = gfc_get_ss ();
1978 (*rss)->next = gfc_ss_terminator;
1979 (*rss)->type = GFC_SS_SCALAR;
1980 (*rss)->expr = expr2;
1983 /* Associate the SS with the loop. */
1984 gfc_add_ss_to_loop (&loop, *lss);
1985 /* We don't actually need to add the rhs at this point, but it might
1986 make guessing the loop bounds a bit easier. */
1987 gfc_add_ss_to_loop (&loop, *rss);
1989 /* We only want the shape of the expression, not rest of the junk
1990 generated by the scalarizer. */
1991 loop.array_parameter = 1;
1993 /* Calculate the bounds of the scalarization. */
1994 gfc_conv_ss_startstride (&loop);
1995 gfc_conv_loop_setup (&loop);
1997 /* Figure out how many elements we need. */
1998 for (i = 0; i < loop.dimen; i++)
2000 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2001 gfc_index_one_node, loop.from[i]);
2002 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2004 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2006 gfc_add_block_to_block (pblock, &loop.pre);
2007 size = gfc_evaluate_now (size, pblock);
2008 gfc_add_block_to_block (pblock, &loop.post);
2010 /* TODO: write a function that cleans up a loopinfo without freeing
2011 the SS chains. Currently a NOP. */
2018 /* Calculate the overall iterator number of the nested forall construct. */
2021 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2022 stmtblock_t *inner_size_body, stmtblock_t *block)
2027 /* TODO: optimizing the computing process. */
2028 number = gfc_create_var (gfc_array_index_type, "num");
2029 gfc_add_modify_expr (block, number, gfc_index_zero_node);
2031 gfc_start_block (&body);
2032 if (inner_size_body)
2033 gfc_add_block_to_block (&body, inner_size_body);
2034 if (nested_forall_info)
2035 tmp = build2 (PLUS_EXPR, gfc_array_index_type, number,
2039 gfc_add_modify_expr (&body, number, tmp);
2040 tmp = gfc_finish_block (&body);
2042 /* Generate loops. */
2043 if (nested_forall_info != NULL)
2044 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 0, 1);
2046 gfc_add_expr_to_block (block, tmp);
2052 /* Allocate temporary for forall construct. SIZE is the size of temporary
2053 needed. PTEMP1 is returned for space free. */
2056 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2064 unit = TYPE_SIZE_UNIT (type);
2065 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2068 temp1 = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2071 tmp = build_fold_indirect_ref (temp1);
2079 /* Allocate temporary for forall construct according to the information in
2080 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2081 assignment inside forall. PTEMP1 is returned for space free. */
2084 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2085 tree inner_size, stmtblock_t * inner_size_body,
2086 stmtblock_t * block, tree * ptemp1)
2090 /* Calculate the total size of temporary needed in forall construct. */
2091 size = compute_overall_iter_number (nested_forall_info, inner_size,
2092 inner_size_body, block);
2094 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2098 /* Handle assignments inside forall which need temporary.
2100 forall (i=start:end:stride; maskexpr)
2103 (where e,f<i> are arbitrary expressions possibly involving i
2104 and there is a dependency between e<i> and f<i>)
2106 masktmp(:) = maskexpr(:)
2111 for (i = start; i <= end; i += stride)
2115 for (i = start; i <= end; i += stride)
2117 if (masktmp[maskindex++])
2118 tmp[count1++] = f<i>
2122 for (i = start; i <= end; i += stride)
2124 if (masktmp[maskindex++])
2125 e<i> = tmp[count1++]
2130 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2131 tree wheremask, bool invert,
2132 forall_info * nested_forall_info,
2133 stmtblock_t * block)
2141 stmtblock_t inner_size_body;
2143 /* Create vars. count1 is the current iterator number of the nested
2145 count1 = gfc_create_var (gfc_array_index_type, "count1");
2147 /* Count is the wheremask index. */
2150 count = gfc_create_var (gfc_array_index_type, "count");
2151 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2156 /* Initialize count1. */
2157 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2159 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2160 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2161 gfc_init_block (&inner_size_body);
2162 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2165 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2166 type = gfc_typenode_for_spec (&expr1->ts);
2168 /* Allocate temporary for nested forall construct according to the
2169 information in nested_forall_info and inner_size. */
2170 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2171 &inner_size_body, block, &ptemp1);
2173 /* Generate codes to copy rhs to the temporary . */
2174 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2177 /* Generate body and loops according to the information in
2178 nested_forall_info. */
2179 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2180 gfc_add_expr_to_block (block, tmp);
2183 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2187 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2189 /* Generate codes to copy the temporary to lhs. */
2190 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2193 /* Generate body and loops according to the information in
2194 nested_forall_info. */
2195 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2196 gfc_add_expr_to_block (block, tmp);
2200 /* Free the temporary. */
2201 tmp = gfc_chainon_list (NULL_TREE, ptemp1);
2202 tmp = build_function_call_expr (gfor_fndecl_internal_free, tmp);
2203 gfc_add_expr_to_block (block, tmp);
2208 /* Translate pointer assignment inside FORALL which need temporary. */
2211 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2212 forall_info * nested_forall_info,
2213 stmtblock_t * block)
2227 tree tmp, tmp1, ptemp1;
2229 count = gfc_create_var (gfc_array_index_type, "count");
2230 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2232 inner_size = integer_one_node;
2233 lss = gfc_walk_expr (expr1);
2234 rss = gfc_walk_expr (expr2);
2235 if (lss == gfc_ss_terminator)
2237 type = gfc_typenode_for_spec (&expr1->ts);
2238 type = build_pointer_type (type);
2240 /* Allocate temporary for nested forall construct according to the
2241 information in nested_forall_info and inner_size. */
2242 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2243 inner_size, NULL, block, &ptemp1);
2244 gfc_start_block (&body);
2245 gfc_init_se (&lse, NULL);
2246 lse.expr = gfc_build_array_ref (tmp1, count);
2247 gfc_init_se (&rse, NULL);
2248 rse.want_pointer = 1;
2249 gfc_conv_expr (&rse, expr2);
2250 gfc_add_block_to_block (&body, &rse.pre);
2251 gfc_add_modify_expr (&body, lse.expr,
2252 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2253 gfc_add_block_to_block (&body, &rse.post);
2255 /* Increment count. */
2256 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2257 count, gfc_index_one_node);
2258 gfc_add_modify_expr (&body, count, tmp);
2260 tmp = gfc_finish_block (&body);
2262 /* Generate body and loops according to the information in
2263 nested_forall_info. */
2264 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2265 gfc_add_expr_to_block (block, tmp);
2268 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2270 gfc_start_block (&body);
2271 gfc_init_se (&lse, NULL);
2272 gfc_init_se (&rse, NULL);
2273 rse.expr = gfc_build_array_ref (tmp1, count);
2274 lse.want_pointer = 1;
2275 gfc_conv_expr (&lse, expr1);
2276 gfc_add_block_to_block (&body, &lse.pre);
2277 gfc_add_modify_expr (&body, lse.expr, rse.expr);
2278 gfc_add_block_to_block (&body, &lse.post);
2279 /* Increment count. */
2280 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2281 count, gfc_index_one_node);
2282 gfc_add_modify_expr (&body, count, tmp);
2283 tmp = gfc_finish_block (&body);
2285 /* Generate body and loops according to the information in
2286 nested_forall_info. */
2287 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2288 gfc_add_expr_to_block (block, tmp);
2292 gfc_init_loopinfo (&loop);
2294 /* Associate the SS with the loop. */
2295 gfc_add_ss_to_loop (&loop, rss);
2297 /* Setup the scalarizing loops and bounds. */
2298 gfc_conv_ss_startstride (&loop);
2300 gfc_conv_loop_setup (&loop);
2302 info = &rss->data.info;
2303 desc = info->descriptor;
2305 /* Make a new descriptor. */
2306 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2307 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2308 loop.from, loop.to, 1);
2310 /* Allocate temporary for nested forall construct. */
2311 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2312 inner_size, NULL, block, &ptemp1);
2313 gfc_start_block (&body);
2314 gfc_init_se (&lse, NULL);
2315 lse.expr = gfc_build_array_ref (tmp1, count);
2316 lse.direct_byref = 1;
2317 rss = gfc_walk_expr (expr2);
2318 gfc_conv_expr_descriptor (&lse, expr2, rss);
2320 gfc_add_block_to_block (&body, &lse.pre);
2321 gfc_add_block_to_block (&body, &lse.post);
2323 /* Increment count. */
2324 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2325 count, gfc_index_one_node);
2326 gfc_add_modify_expr (&body, count, tmp);
2328 tmp = gfc_finish_block (&body);
2330 /* Generate body and loops according to the information in
2331 nested_forall_info. */
2332 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2333 gfc_add_expr_to_block (block, tmp);
2336 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2338 parm = gfc_build_array_ref (tmp1, count);
2339 lss = gfc_walk_expr (expr1);
2340 gfc_init_se (&lse, NULL);
2341 gfc_conv_expr_descriptor (&lse, expr1, lss);
2342 gfc_add_modify_expr (&lse.pre, lse.expr, parm);
2343 gfc_start_block (&body);
2344 gfc_add_block_to_block (&body, &lse.pre);
2345 gfc_add_block_to_block (&body, &lse.post);
2347 /* Increment count. */
2348 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2349 count, gfc_index_one_node);
2350 gfc_add_modify_expr (&body, count, tmp);
2352 tmp = gfc_finish_block (&body);
2354 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2355 gfc_add_expr_to_block (block, tmp);
2357 /* Free the temporary. */
2360 tmp = gfc_chainon_list (NULL_TREE, ptemp1);
2361 tmp = build_function_call_expr (gfor_fndecl_internal_free, tmp);
2362 gfc_add_expr_to_block (block, tmp);
2367 /* FORALL and WHERE statements are really nasty, especially when you nest
2368 them. All the rhs of a forall assignment must be evaluated before the
2369 actual assignments are performed. Presumably this also applies to all the
2370 assignments in an inner where statement. */
2372 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2373 linear array, relying on the fact that we process in the same order in all
2376 forall (i=start:end:stride; maskexpr)
2380 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2382 count = ((end + 1 - start) / stride)
2383 masktmp(:) = maskexpr(:)
2386 for (i = start; i <= end; i += stride)
2388 if (masktmp[maskindex++])
2392 for (i = start; i <= end; i += stride)
2394 if (masktmp[maskindex++])
2398 Note that this code only works when there are no dependencies.
2399 Forall loop with array assignments and data dependencies are a real pain,
2400 because the size of the temporary cannot always be determined before the
2401 loop is executed. This problem is compounded by the presence of nested
2406 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2428 gfc_forall_iterator *fa;
2431 gfc_saved_var *saved_vars;
2432 iter_info *this_forall, *iter_tmp;
2433 forall_info *info, *forall_tmp;
2435 gfc_start_block (&block);
2438 /* Count the FORALL index number. */
2439 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2443 /* Allocate the space for var, start, end, step, varexpr. */
2444 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2445 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2446 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2447 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2448 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2449 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2451 /* Allocate the space for info. */
2452 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2454 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2456 gfc_symbol *sym = fa->var->symtree->n.sym;
2458 /* allocate space for this_forall. */
2459 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2461 /* Create a temporary variable for the FORALL index. */
2462 tmp = gfc_typenode_for_spec (&sym->ts);
2463 var[n] = gfc_create_var (tmp, sym->name);
2464 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2466 /* Record it in this_forall. */
2467 this_forall->var = var[n];
2469 /* Replace the index symbol's backend_decl with the temporary decl. */
2470 sym->backend_decl = var[n];
2472 /* Work out the start, end and stride for the loop. */
2473 gfc_init_se (&se, NULL);
2474 gfc_conv_expr_val (&se, fa->start);
2475 /* Record it in this_forall. */
2476 this_forall->start = se.expr;
2477 gfc_add_block_to_block (&block, &se.pre);
2480 gfc_init_se (&se, NULL);
2481 gfc_conv_expr_val (&se, fa->end);
2482 /* Record it in this_forall. */
2483 this_forall->end = se.expr;
2484 gfc_make_safe_expr (&se);
2485 gfc_add_block_to_block (&block, &se.pre);
2488 gfc_init_se (&se, NULL);
2489 gfc_conv_expr_val (&se, fa->stride);
2490 /* Record it in this_forall. */
2491 this_forall->step = se.expr;
2492 gfc_make_safe_expr (&se);
2493 gfc_add_block_to_block (&block, &se.pre);
2496 /* Set the NEXT field of this_forall to NULL. */
2497 this_forall->next = NULL;
2498 /* Link this_forall to the info construct. */
2499 if (info->this_loop == NULL)
2500 info->this_loop = this_forall;
2503 iter_tmp = info->this_loop;
2504 while (iter_tmp->next != NULL)
2505 iter_tmp = iter_tmp->next;
2506 iter_tmp->next = this_forall;
2513 /* Work out the number of elements in the mask array. */
2516 size = gfc_index_one_node;
2517 sizevar = NULL_TREE;
2519 for (n = 0; n < nvar; n++)
2521 if (lenvar && TREE_TYPE (lenvar) != TREE_TYPE (start[n]))
2524 /* size = (end + step - start) / step. */
2525 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2527 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2529 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2530 tmp = convert (gfc_array_index_type, tmp);
2532 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2535 /* Record the nvar and size of current forall level. */
2539 /* Link the current forall level to nested_forall_info. */
2540 forall_tmp = nested_forall_info;
2541 if (forall_tmp == NULL)
2542 nested_forall_info = info;
2545 while (forall_tmp->next_nest != NULL)
2546 forall_tmp = forall_tmp->next_nest;
2547 info->outer = forall_tmp;
2548 forall_tmp->next_nest = info;
2551 /* Copy the mask into a temporary variable if required.
2552 For now we assume a mask temporary is needed. */
2555 /* As the mask array can be very big, prefer compact
2557 tree smallest_boolean_type_node
2558 = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2560 /* Allocate the mask temporary. */
2561 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
2562 TYPE_SIZE_UNIT (smallest_boolean_type_node));
2564 mask = gfc_do_allocate (bytesize, size, &pmask, &block,
2565 smallest_boolean_type_node);
2567 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2568 /* Record them in the info structure. */
2569 info->pmask = pmask;
2571 info->maskindex = maskindex;
2573 gfc_add_modify_expr (&block, maskindex, gfc_index_zero_node);
2575 /* Start of mask assignment loop body. */
2576 gfc_start_block (&body);
2578 /* Evaluate the mask expression. */
2579 gfc_init_se (&se, NULL);
2580 gfc_conv_expr_val (&se, code->expr);
2581 gfc_add_block_to_block (&body, &se.pre);
2583 /* Store the mask. */
2584 se.expr = convert (smallest_boolean_type_node, se.expr);
2587 tmp = build_fold_indirect_ref (mask);
2590 tmp = gfc_build_array_ref (tmp, maskindex);
2591 gfc_add_modify_expr (&body, tmp, se.expr);
2593 /* Advance to the next mask element. */
2594 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
2595 maskindex, gfc_index_one_node);
2596 gfc_add_modify_expr (&body, maskindex, tmp);
2598 /* Generate the loops. */
2599 tmp = gfc_finish_block (&body);
2600 tmp = gfc_trans_nested_forall_loop (info, tmp, 0, 0);
2601 gfc_add_expr_to_block (&block, tmp);
2605 /* No mask was specified. */
2606 maskindex = NULL_TREE;
2607 mask = pmask = NULL_TREE;
2610 c = code->block->next;
2612 /* TODO: loop merging in FORALL statements. */
2613 /* Now that we've got a copy of the mask, generate the assignment loops. */
2619 /* A scalar or array assignment. */
2620 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2621 /* Temporaries due to array assignment data dependencies introduce
2622 no end of problems. */
2624 gfc_trans_assign_need_temp (c->expr, c->expr2, NULL, false,
2625 nested_forall_info, &block);
2628 /* Use the normal assignment copying routines. */
2629 assign = gfc_trans_assignment (c->expr, c->expr2);
2631 /* Generate body and loops. */
2632 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1, 1);
2633 gfc_add_expr_to_block (&block, tmp);
2639 /* Translate WHERE or WHERE construct nested in FORALL. */
2640 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
2643 /* Pointer assignment inside FORALL. */
2644 case EXEC_POINTER_ASSIGN:
2645 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2647 gfc_trans_pointer_assign_need_temp (c->expr, c->expr2,
2648 nested_forall_info, &block);
2651 /* Use the normal assignment copying routines. */
2652 assign = gfc_trans_pointer_assignment (c->expr, c->expr2);
2654 /* Generate body and loops. */
2655 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign,
2657 gfc_add_expr_to_block (&block, tmp);
2662 tmp = gfc_trans_forall_1 (c, nested_forall_info);
2663 gfc_add_expr_to_block (&block, tmp);
2666 /* Explicit subroutine calls are prevented by the frontend but interface
2667 assignments can legitimately produce them. */
2668 case EXEC_ASSIGN_CALL:
2669 assign = gfc_trans_call (c, true);
2670 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1, 1);
2671 gfc_add_expr_to_block (&block, tmp);
2681 /* Restore the original index variables. */
2682 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
2683 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
2685 /* Free the space for var, start, end, step, varexpr. */
2691 gfc_free (saved_vars);
2695 /* Free the temporary for the mask. */
2696 tmp = gfc_chainon_list (NULL_TREE, pmask);
2697 tmp = build_function_call_expr (gfor_fndecl_internal_free, tmp);
2698 gfc_add_expr_to_block (&block, tmp);
2701 pushdecl (maskindex);
2703 return gfc_finish_block (&block);
2707 /* Translate the FORALL statement or construct. */
2709 tree gfc_trans_forall (gfc_code * code)
2711 return gfc_trans_forall_1 (code, NULL);
2715 /* Evaluate the WHERE mask expression, copy its value to a temporary.
2716 If the WHERE construct is nested in FORALL, compute the overall temporary
2717 needed by the WHERE mask expression multiplied by the iterator number of
2719 ME is the WHERE mask expression.
2720 MASK is the current execution mask upon input, whose sense may or may
2721 not be inverted as specified by the INVERT argument.
2722 CMASK is the updated execution mask on output, or NULL if not required.
2723 PMASK is the pending execution mask on output, or NULL if not required.
2724 BLOCK is the block in which to place the condition evaluation loops. */
2727 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
2728 tree mask, bool invert, tree cmask, tree pmask,
2729 tree mask_type, stmtblock_t * block)
2734 stmtblock_t body, body1;
2735 tree count, cond, mtmp;
2738 gfc_init_loopinfo (&loop);
2740 lss = gfc_walk_expr (me);
2741 rss = gfc_walk_expr (me);
2743 /* Variable to index the temporary. */
2744 count = gfc_create_var (gfc_array_index_type, "count");
2745 /* Initialize count. */
2746 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2748 gfc_start_block (&body);
2750 gfc_init_se (&rse, NULL);
2751 gfc_init_se (&lse, NULL);
2753 if (lss == gfc_ss_terminator)
2755 gfc_init_block (&body1);
2759 /* Initialize the loop. */
2760 gfc_init_loopinfo (&loop);
2762 /* We may need LSS to determine the shape of the expression. */
2763 gfc_add_ss_to_loop (&loop, lss);
2764 gfc_add_ss_to_loop (&loop, rss);
2766 gfc_conv_ss_startstride (&loop);
2767 gfc_conv_loop_setup (&loop);
2769 gfc_mark_ss_chain_used (rss, 1);
2770 /* Start the loop body. */
2771 gfc_start_scalarized_body (&loop, &body1);
2773 /* Translate the expression. */
2774 gfc_copy_loopinfo_to_se (&rse, &loop);
2776 gfc_conv_expr (&rse, me);
2779 /* Variable to evaluate mask condition. */
2780 cond = gfc_create_var (mask_type, "cond");
2781 if (mask && (cmask || pmask))
2782 mtmp = gfc_create_var (mask_type, "mask");
2783 else mtmp = NULL_TREE;
2785 gfc_add_block_to_block (&body1, &lse.pre);
2786 gfc_add_block_to_block (&body1, &rse.pre);
2788 gfc_add_modify_expr (&body1, cond, fold_convert (mask_type, rse.expr));
2790 if (mask && (cmask || pmask))
2792 tmp = gfc_build_array_ref (mask, count);
2794 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
2795 gfc_add_modify_expr (&body1, mtmp, tmp);
2800 tmp1 = gfc_build_array_ref (cmask, count);
2803 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
2804 gfc_add_modify_expr (&body1, tmp1, tmp);
2809 tmp1 = gfc_build_array_ref (pmask, count);
2810 tmp = build1 (TRUTH_NOT_EXPR, mask_type, cond);
2812 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
2813 gfc_add_modify_expr (&body1, tmp1, tmp);
2816 gfc_add_block_to_block (&body1, &lse.post);
2817 gfc_add_block_to_block (&body1, &rse.post);
2819 if (lss == gfc_ss_terminator)
2821 gfc_add_block_to_block (&body, &body1);
2825 /* Increment count. */
2826 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
2827 gfc_index_one_node);
2828 gfc_add_modify_expr (&body1, count, tmp1);
2830 /* Generate the copying loops. */
2831 gfc_trans_scalarizing_loops (&loop, &body1);
2833 gfc_add_block_to_block (&body, &loop.pre);
2834 gfc_add_block_to_block (&body, &loop.post);
2836 gfc_cleanup_loop (&loop);
2837 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2838 as tree nodes in SS may not be valid in different scope. */
2841 tmp1 = gfc_finish_block (&body);
2842 /* If the WHERE construct is inside FORALL, fill the full temporary. */
2843 if (nested_forall_info != NULL)
2844 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1, 1);
2846 gfc_add_expr_to_block (block, tmp1);
2850 /* Translate an assignment statement in a WHERE statement or construct
2851 statement. The MASK expression is used to control which elements
2852 of EXPR1 shall be assigned. The sense of MASK is specified by
2856 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
2857 tree mask, bool invert,
2858 tree count1, tree count2)
2863 gfc_ss *lss_section;
2870 tree index, maskexpr;
2873 /* TODO: handle this special case.
2874 Special case a single function returning an array. */
2875 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
2877 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
2883 /* Assignment of the form lhs = rhs. */
2884 gfc_start_block (&block);
2886 gfc_init_se (&lse, NULL);
2887 gfc_init_se (&rse, NULL);
2890 lss = gfc_walk_expr (expr1);
2893 /* In each where-assign-stmt, the mask-expr and the variable being
2894 defined shall be arrays of the same shape. */
2895 gcc_assert (lss != gfc_ss_terminator);
2897 /* The assignment needs scalarization. */
2900 /* Find a non-scalar SS from the lhs. */
2901 while (lss_section != gfc_ss_terminator
2902 && lss_section->type != GFC_SS_SECTION)
2903 lss_section = lss_section->next;
2905 gcc_assert (lss_section != gfc_ss_terminator);
2907 /* Initialize the scalarizer. */
2908 gfc_init_loopinfo (&loop);
2911 rss = gfc_walk_expr (expr2);
2912 if (rss == gfc_ss_terminator)
2914 /* The rhs is scalar. Add a ss for the expression. */
2915 rss = gfc_get_ss ();
2916 rss->next = gfc_ss_terminator;
2917 rss->type = GFC_SS_SCALAR;
2921 /* Associate the SS with the loop. */
2922 gfc_add_ss_to_loop (&loop, lss);
2923 gfc_add_ss_to_loop (&loop, rss);
2925 /* Calculate the bounds of the scalarization. */
2926 gfc_conv_ss_startstride (&loop);
2928 /* Resolve any data dependencies in the statement. */
2929 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
2931 /* Setup the scalarizing loops. */
2932 gfc_conv_loop_setup (&loop);
2934 /* Setup the gfc_se structures. */
2935 gfc_copy_loopinfo_to_se (&lse, &loop);
2936 gfc_copy_loopinfo_to_se (&rse, &loop);
2939 gfc_mark_ss_chain_used (rss, 1);
2940 if (loop.temp_ss == NULL)
2943 gfc_mark_ss_chain_used (lss, 1);
2947 lse.ss = loop.temp_ss;
2948 gfc_mark_ss_chain_used (lss, 3);
2949 gfc_mark_ss_chain_used (loop.temp_ss, 3);
2952 /* Start the scalarized loop body. */
2953 gfc_start_scalarized_body (&loop, &body);
2955 /* Translate the expression. */
2956 gfc_conv_expr (&rse, expr2);
2957 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
2959 gfc_conv_tmp_array_ref (&lse);
2960 gfc_advance_se_ss_chain (&lse);
2963 gfc_conv_expr (&lse, expr1);
2965 /* Form the mask expression according to the mask. */
2967 maskexpr = gfc_build_array_ref (mask, index);
2969 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
2971 /* Use the scalar assignment as is. */
2972 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts.type);
2973 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
2975 gfc_add_expr_to_block (&body, tmp);
2977 if (lss == gfc_ss_terminator)
2979 /* Increment count1. */
2980 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2981 count1, gfc_index_one_node);
2982 gfc_add_modify_expr (&body, count1, tmp);
2984 /* Use the scalar assignment as is. */
2985 gfc_add_block_to_block (&block, &body);
2989 gcc_assert (lse.ss == gfc_ss_terminator
2990 && rse.ss == gfc_ss_terminator);
2992 if (loop.temp_ss != NULL)
2994 /* Increment count1 before finish the main body of a scalarized
2996 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2997 count1, gfc_index_one_node);
2998 gfc_add_modify_expr (&body, count1, tmp);
2999 gfc_trans_scalarized_loop_boundary (&loop, &body);
3001 /* We need to copy the temporary to the actual lhs. */
3002 gfc_init_se (&lse, NULL);
3003 gfc_init_se (&rse, NULL);
3004 gfc_copy_loopinfo_to_se (&lse, &loop);
3005 gfc_copy_loopinfo_to_se (&rse, &loop);
3007 rse.ss = loop.temp_ss;
3010 gfc_conv_tmp_array_ref (&rse);
3011 gfc_advance_se_ss_chain (&rse);
3012 gfc_conv_expr (&lse, expr1);
3014 gcc_assert (lse.ss == gfc_ss_terminator
3015 && rse.ss == gfc_ss_terminator);
3017 /* Form the mask expression according to the mask tree list. */
3019 maskexpr = gfc_build_array_ref (mask, index);
3021 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3024 /* Use the scalar assignment as is. */
3025 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts.type);
3026 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3027 gfc_add_expr_to_block (&body, tmp);
3029 /* Increment count2. */
3030 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3031 count2, gfc_index_one_node);
3032 gfc_add_modify_expr (&body, count2, tmp);
3036 /* Increment count1. */
3037 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3038 count1, gfc_index_one_node);
3039 gfc_add_modify_expr (&body, count1, tmp);
3042 /* Generate the copying loops. */
3043 gfc_trans_scalarizing_loops (&loop, &body);
3045 /* Wrap the whole thing up. */
3046 gfc_add_block_to_block (&block, &loop.pre);
3047 gfc_add_block_to_block (&block, &loop.post);
3048 gfc_cleanup_loop (&loop);
3051 return gfc_finish_block (&block);
3055 /* Translate the WHERE construct or statement.
3056 This function can be called iteratively to translate the nested WHERE
3057 construct or statement.
3058 MASK is the control mask. */
3061 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3062 forall_info * nested_forall_info, stmtblock_t * block)
3064 stmtblock_t inner_size_body;
3065 tree inner_size, size;
3073 tree count1, count2;
3077 tree pcmask = NULL_TREE;
3078 tree ppmask = NULL_TREE;
3079 tree cmask = NULL_TREE;
3080 tree pmask = NULL_TREE;
3082 /* the WHERE statement or the WHERE construct statement. */
3083 cblock = code->block;
3085 /* As the mask array can be very big, prefer compact boolean types. */
3086 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3088 /* Determine which temporary masks are needed. */
3091 /* One clause: No ELSEWHEREs. */
3092 need_cmask = (cblock->next != 0);
3095 else if (cblock->block->block)
3097 /* Three or more clauses: Conditional ELSEWHEREs. */
3101 else if (cblock->next)
3103 /* Two clauses, the first non-empty. */
3105 need_pmask = (mask != NULL_TREE
3106 && cblock->block->next != 0);
3108 else if (!cblock->block->next)
3110 /* Two clauses, both empty. */
3114 /* Two clauses, the first empty, the second non-empty. */
3117 need_cmask = (cblock->block->expr != 0);
3126 if (need_cmask || need_pmask)
3128 /* Calculate the size of temporary needed by the mask-expr. */
3129 gfc_init_block (&inner_size_body);
3130 inner_size = compute_inner_temp_size (cblock->expr, cblock->expr,
3131 &inner_size_body, &lss, &rss);
3133 /* Calculate the total size of temporary needed. */
3134 size = compute_overall_iter_number (nested_forall_info, inner_size,
3135 &inner_size_body, block);
3137 /* Allocate temporary for WHERE mask if needed. */
3139 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3142 /* Allocate temporary for !mask if needed. */
3144 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3150 /* Each time around this loop, the where clause is conditional
3151 on the value of mask and invert, which are updated at the
3152 bottom of the loop. */
3154 /* Has mask-expr. */
3157 /* Ensure that the WHERE mask will be evaluated exactly once.
3158 If there are no statements in this WHERE/ELSEWHERE clause,
3159 then we don't need to update the control mask (cmask).
3160 If this is the last clause of the WHERE construct, then
3161 we don't need to update the pending control mask (pmask). */
3163 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3165 cblock->next ? cmask : NULL_TREE,
3166 cblock->block ? pmask : NULL_TREE,
3169 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3171 (cblock->next || cblock->block)
3172 ? cmask : NULL_TREE,
3173 NULL_TREE, mask_type, block);
3177 /* It's a final elsewhere-stmt. No mask-expr is present. */
3181 /* The body of this where clause are controlled by cmask with
3182 sense specified by invert. */
3184 /* Get the assignment statement of a WHERE statement, or the first
3185 statement in where-body-construct of a WHERE construct. */
3186 cnext = cblock->next;
3191 /* WHERE assignment statement. */
3193 expr1 = cnext->expr;
3194 expr2 = cnext->expr2;
3195 if (nested_forall_info != NULL)
3197 need_temp = gfc_check_dependency (expr1, expr2, 0);
3199 gfc_trans_assign_need_temp (expr1, expr2,
3201 nested_forall_info, block);
3204 /* Variables to control maskexpr. */
3205 count1 = gfc_create_var (gfc_array_index_type, "count1");
3206 count2 = gfc_create_var (gfc_array_index_type, "count2");
3207 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3208 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3210 tmp = gfc_trans_where_assign (expr1, expr2,
3214 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3216 gfc_add_expr_to_block (block, tmp);
3221 /* Variables to control maskexpr. */
3222 count1 = gfc_create_var (gfc_array_index_type, "count1");
3223 count2 = gfc_create_var (gfc_array_index_type, "count2");
3224 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3225 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3227 tmp = gfc_trans_where_assign (expr1, expr2,
3230 gfc_add_expr_to_block (block, tmp);
3235 /* WHERE or WHERE construct is part of a where-body-construct. */
3237 gfc_trans_where_2 (cnext, cmask, invert,
3238 nested_forall_info, block);
3245 /* The next statement within the same where-body-construct. */
3246 cnext = cnext->next;
3248 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3249 cblock = cblock->block;
3250 if (mask == NULL_TREE)
3252 /* If we're the initial WHERE, we can simply invert the sense
3253 of the current mask to obtain the "mask" for the remaining
3260 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3266 /* If we allocated a pending mask array, deallocate it now. */
3269 tree args = gfc_chainon_list (NULL_TREE, ppmask);
3270 tmp = build_function_call_expr (gfor_fndecl_internal_free, args);
3271 gfc_add_expr_to_block (block, tmp);
3274 /* If we allocated a current mask array, deallocate it now. */
3277 tree args = gfc_chainon_list (NULL_TREE, pcmask);
3278 tmp = build_function_call_expr (gfor_fndecl_internal_free, args);
3279 gfc_add_expr_to_block (block, tmp);
3283 /* Translate a simple WHERE construct or statement without dependencies.
3284 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3285 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3286 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3289 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3291 stmtblock_t block, body;
3292 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3293 tree tmp, cexpr, tstmt, estmt;
3294 gfc_ss *css, *tdss, *tsss;
3295 gfc_se cse, tdse, tsse, edse, esse;
3300 cond = cblock->expr;
3301 tdst = cblock->next->expr;
3302 tsrc = cblock->next->expr2;
3303 edst = eblock ? eblock->next->expr : NULL;
3304 esrc = eblock ? eblock->next->expr2 : NULL;
3306 gfc_start_block (&block);
3307 gfc_init_loopinfo (&loop);
3309 /* Handle the condition. */
3310 gfc_init_se (&cse, NULL);
3311 css = gfc_walk_expr (cond);
3312 gfc_add_ss_to_loop (&loop, css);
3314 /* Handle the then-clause. */
3315 gfc_init_se (&tdse, NULL);
3316 gfc_init_se (&tsse, NULL);
3317 tdss = gfc_walk_expr (tdst);
3318 tsss = gfc_walk_expr (tsrc);
3319 if (tsss == gfc_ss_terminator)
3321 tsss = gfc_get_ss ();
3322 tsss->next = gfc_ss_terminator;
3323 tsss->type = GFC_SS_SCALAR;
3326 gfc_add_ss_to_loop (&loop, tdss);
3327 gfc_add_ss_to_loop (&loop, tsss);
3331 /* Handle the else clause. */
3332 gfc_init_se (&edse, NULL);
3333 gfc_init_se (&esse, NULL);
3334 edss = gfc_walk_expr (edst);
3335 esss = gfc_walk_expr (esrc);
3336 if (esss == gfc_ss_terminator)
3338 esss = gfc_get_ss ();
3339 esss->next = gfc_ss_terminator;
3340 esss->type = GFC_SS_SCALAR;
3343 gfc_add_ss_to_loop (&loop, edss);
3344 gfc_add_ss_to_loop (&loop, esss);
3347 gfc_conv_ss_startstride (&loop);
3348 gfc_conv_loop_setup (&loop);
3350 gfc_mark_ss_chain_used (css, 1);
3351 gfc_mark_ss_chain_used (tdss, 1);
3352 gfc_mark_ss_chain_used (tsss, 1);
3355 gfc_mark_ss_chain_used (edss, 1);
3356 gfc_mark_ss_chain_used (esss, 1);
3359 gfc_start_scalarized_body (&loop, &body);
3361 gfc_copy_loopinfo_to_se (&cse, &loop);
3362 gfc_copy_loopinfo_to_se (&tdse, &loop);
3363 gfc_copy_loopinfo_to_se (&tsse, &loop);
3369 gfc_copy_loopinfo_to_se (&edse, &loop);
3370 gfc_copy_loopinfo_to_se (&esse, &loop);
3375 gfc_conv_expr (&cse, cond);
3376 gfc_add_block_to_block (&body, &cse.pre);
3379 gfc_conv_expr (&tsse, tsrc);
3380 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3382 gfc_conv_tmp_array_ref (&tdse);
3383 gfc_advance_se_ss_chain (&tdse);
3386 gfc_conv_expr (&tdse, tdst);
3390 gfc_conv_expr (&esse, esrc);
3391 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3393 gfc_conv_tmp_array_ref (&edse);
3394 gfc_advance_se_ss_chain (&edse);
3397 gfc_conv_expr (&edse, edst);
3400 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts.type);
3401 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts.type)
3402 : build_empty_stmt ();
3403 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3404 gfc_add_expr_to_block (&body, tmp);
3405 gfc_add_block_to_block (&body, &cse.post);
3407 gfc_trans_scalarizing_loops (&loop, &body);
3408 gfc_add_block_to_block (&block, &loop.pre);
3409 gfc_add_block_to_block (&block, &loop.post);
3410 gfc_cleanup_loop (&loop);
3412 return gfc_finish_block (&block);
3415 /* As the WHERE or WHERE construct statement can be nested, we call
3416 gfc_trans_where_2 to do the translation, and pass the initial
3417 NULL values for both the control mask and the pending control mask. */
3420 gfc_trans_where (gfc_code * code)
3426 cblock = code->block;
3428 && cblock->next->op == EXEC_ASSIGN
3429 && !cblock->next->next)
3431 eblock = cblock->block;
3434 /* A simple "WHERE (cond) x = y" statement or block is
3435 dependence free if cond is not dependent upon writing x,
3436 and the source y is unaffected by the destination x. */
3437 if (!gfc_check_dependency (cblock->next->expr,
3439 && !gfc_check_dependency (cblock->next->expr,
3440 cblock->next->expr2, 0))
3441 return gfc_trans_where_3 (cblock, NULL);
3443 else if (!eblock->expr
3446 && eblock->next->op == EXEC_ASSIGN
3447 && !eblock->next->next)
3449 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3450 block is dependence free if cond is not dependent on writes
3451 to x1 and x2, y1 is not dependent on writes to x2, and y2
3452 is not dependent on writes to x1, and both y's are not
3453 dependent upon their own x's. */
3454 if (!gfc_check_dependency(cblock->next->expr,
3456 && !gfc_check_dependency(eblock->next->expr,
3458 && !gfc_check_dependency(cblock->next->expr,
3459 eblock->next->expr2, 0)
3460 && !gfc_check_dependency(eblock->next->expr,
3461 cblock->next->expr2, 0)
3462 && !gfc_check_dependency(cblock->next->expr,
3463 cblock->next->expr2, 0)
3464 && !gfc_check_dependency(eblock->next->expr,
3465 eblock->next->expr2, 0))
3466 return gfc_trans_where_3 (cblock, eblock);
3470 gfc_start_block (&block);
3472 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3474 return gfc_finish_block (&block);
3478 /* CYCLE a DO loop. The label decl has already been created by
3479 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3480 node at the head of the loop. We must mark the label as used. */
3483 gfc_trans_cycle (gfc_code * code)
3487 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3488 TREE_USED (cycle_label) = 1;
3489 return build1_v (GOTO_EXPR, cycle_label);
3493 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3494 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3498 gfc_trans_exit (gfc_code * code)
3502 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3503 TREE_USED (exit_label) = 1;
3504 return build1_v (GOTO_EXPR, exit_label);
3508 /* Translate the ALLOCATE statement. */
3511 gfc_trans_allocate (gfc_code * code)
3523 if (!code->ext.alloc_list)
3526 gfc_start_block (&block);
3530 tree gfc_int4_type_node = gfc_get_int_type (4);
3532 stat = gfc_create_var (gfc_int4_type_node, "stat");
3533 pstat = build_fold_addr_expr (stat);
3535 error_label = gfc_build_label_decl (NULL_TREE);
3536 TREE_USED (error_label) = 1;
3540 pstat = integer_zero_node;
3541 stat = error_label = NULL_TREE;
3545 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3549 gfc_init_se (&se, NULL);
3550 gfc_start_block (&se.pre);
3552 se.want_pointer = 1;
3553 se.descriptor_only = 1;
3554 gfc_conv_expr (&se, expr);
3556 if (!gfc_array_allocate (&se, expr, pstat))
3558 /* A scalar or derived type. */
3561 val = gfc_create_var (ppvoid_type_node, "ptr");
3562 tmp = gfc_build_addr_expr (ppvoid_type_node, se.expr);
3563 gfc_add_modify_expr (&se.pre, val, tmp);
3565 tmp = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
3567 if (expr->ts.type == BT_CHARACTER && tmp == NULL_TREE)
3568 tmp = se.string_length;
3570 parm = gfc_chainon_list (NULL_TREE, val);
3571 parm = gfc_chainon_list (parm, tmp);
3572 parm = gfc_chainon_list (parm, pstat);
3573 tmp = build_function_call_expr (gfor_fndecl_allocate, parm);
3574 gfc_add_expr_to_block (&se.pre, tmp);
3578 tmp = build1_v (GOTO_EXPR, error_label);
3579 parm = fold_build2 (NE_EXPR, boolean_type_node,
3580 stat, build_int_cst (TREE_TYPE (stat), 0));
3581 tmp = fold_build3 (COND_EXPR, void_type_node,
3582 parm, tmp, build_empty_stmt ());
3583 gfc_add_expr_to_block (&se.pre, tmp);
3587 tmp = gfc_finish_block (&se.pre);
3588 gfc_add_expr_to_block (&block, tmp);
3591 /* Assign the value to the status variable. */
3594 tmp = build1_v (LABEL_EXPR, error_label);
3595 gfc_add_expr_to_block (&block, tmp);
3597 gfc_init_se (&se, NULL);
3598 gfc_conv_expr_lhs (&se, code->expr);
3599 tmp = convert (TREE_TYPE (se.expr), stat);
3600 gfc_add_modify_expr (&block, se.expr, tmp);
3603 return gfc_finish_block (&block);
3607 /* Translate a DEALLOCATE statement.
3608 There are two cases within the for loop:
3609 (1) deallocate(a1, a2, a3) is translated into the following sequence
3610 _gfortran_deallocate(a1, 0B)
3611 _gfortran_deallocate(a2, 0B)
3612 _gfortran_deallocate(a3, 0B)
3613 where the STAT= variable is passed a NULL pointer.
3614 (2) deallocate(a1, a2, a3, stat=i) is translated into the following
3616 _gfortran_deallocate(a1, &stat)
3617 astat = astat + stat
3618 _gfortran_deallocate(a2, &stat)
3619 astat = astat + stat
3620 _gfortran_deallocate(a3, &stat)
3621 astat = astat + stat
3622 In case (1), we simply return at the end of the for loop. In case (2)
3623 we set STAT= astat. */
3625 gfc_trans_deallocate (gfc_code * code)
3630 tree apstat, astat, parm, pstat, stat, tmp, type, var;
3633 gfc_start_block (&block);
3635 /* Set up the optional STAT= */
3638 tree gfc_int4_type_node = gfc_get_int_type (4);
3640 /* Variable used with the library call. */
3641 stat = gfc_create_var (gfc_int4_type_node, "stat");
3642 pstat = build_fold_addr_expr (stat);
3644 /* Running total of possible deallocation failures. */
3645 astat = gfc_create_var (gfc_int4_type_node, "astat");
3646 apstat = build_fold_addr_expr (astat);
3648 /* Initialize astat to 0. */
3649 gfc_add_modify_expr (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
3653 pstat = apstat = null_pointer_node;
3654 stat = astat = NULL_TREE;
3657 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3660 gcc_assert (expr->expr_type == EXPR_VARIABLE);
3662 gfc_init_se (&se, NULL);
3663 gfc_start_block (&se.pre);
3665 se.want_pointer = 1;
3666 se.descriptor_only = 1;
3667 gfc_conv_expr (&se, expr);
3670 tmp = gfc_array_deallocate (se.expr, pstat);
3673 type = build_pointer_type (TREE_TYPE (se.expr));
3674 var = gfc_create_var (type, "ptr");
3675 tmp = gfc_build_addr_expr (type, se.expr);
3676 gfc_add_modify_expr (&se.pre, var, tmp);
3678 parm = gfc_chainon_list (NULL_TREE, var);
3679 parm = gfc_chainon_list (parm, pstat);
3680 tmp = build_function_call_expr (gfor_fndecl_deallocate, parm);
3683 gfc_add_expr_to_block (&se.pre, tmp);
3685 /* Keep track of the number of failed deallocations by adding stat
3686 of the last deallocation to the running total. */
3689 apstat = build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
3690 gfc_add_modify_expr (&se.pre, astat, apstat);
3693 tmp = gfc_finish_block (&se.pre);
3694 gfc_add_expr_to_block (&block, tmp);
3698 /* Assign the value to the status variable. */
3701 gfc_init_se (&se, NULL);
3702 gfc_conv_expr_lhs (&se, code->expr);
3703 tmp = convert (TREE_TYPE (se.expr), astat);
3704 gfc_add_modify_expr (&block, se.expr, tmp);
3707 return gfc_finish_block (&block);