1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
35 #include "trans-stmt.h"
36 #include "trans-types.h"
37 #include "trans-array.h"
38 #include "trans-const.h"
40 #include "dependency.h"
42 typedef struct iter_info
48 struct iter_info *next;
52 typedef struct forall_info
59 struct forall_info *prev_nest;
63 static void gfc_trans_where_2 (gfc_code *, tree, bool,
64 forall_info *, stmtblock_t *);
66 /* Translate a F95 label number to a LABEL_EXPR. */
69 gfc_trans_label_here (gfc_code * code)
71 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
75 /* Given a variable expression which has been ASSIGNed to, find the decl
76 containing the auxiliary variables. For variables in common blocks this
80 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
82 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
83 gfc_conv_expr (se, expr);
84 /* Deals with variable in common block. Get the field declaration. */
85 if (TREE_CODE (se->expr) == COMPONENT_REF)
86 se->expr = TREE_OPERAND (se->expr, 1);
87 /* Deals with dummy argument. Get the parameter declaration. */
88 else if (TREE_CODE (se->expr) == INDIRECT_REF)
89 se->expr = TREE_OPERAND (se->expr, 0);
92 /* Translate a label assignment statement. */
95 gfc_trans_label_assign (gfc_code * code)
104 /* Start a new block. */
105 gfc_init_se (&se, NULL);
106 gfc_start_block (&se.pre);
107 gfc_conv_label_variable (&se, code->expr);
109 len = GFC_DECL_STRING_LEN (se.expr);
110 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
112 label_tree = gfc_get_label_decl (code->label);
114 if (code->label->defined == ST_LABEL_TARGET)
116 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
117 len_tree = integer_minus_one_node;
121 gfc_expr *format = code->label->format;
123 label_len = format->value.character.length;
124 len_tree = build_int_cst (NULL_TREE, label_len);
125 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
126 format->value.character.string);
127 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
130 gfc_add_modify (&se.pre, len, len_tree);
131 gfc_add_modify (&se.pre, addr, label_tree);
133 return gfc_finish_block (&se.pre);
136 /* Translate a GOTO statement. */
139 gfc_trans_goto (gfc_code * code)
141 locus loc = code->loc;
147 if (code->label != NULL)
148 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
151 gfc_init_se (&se, NULL);
152 gfc_start_block (&se.pre);
153 gfc_conv_label_variable (&se, code->expr);
154 tmp = GFC_DECL_STRING_LEN (se.expr);
155 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
156 build_int_cst (TREE_TYPE (tmp), -1));
157 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
158 "Assigned label is not a target label");
160 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
165 target = fold_build1 (GOTO_EXPR, void_type_node, assigned_goto);
166 gfc_add_expr_to_block (&se.pre, target);
167 return gfc_finish_block (&se.pre);
170 /* Check the label list. */
173 target = gfc_get_label_decl (code->label);
174 tmp = gfc_build_addr_expr (pvoid_type_node, target);
175 tmp = fold_build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
176 tmp = build3_v (COND_EXPR, tmp,
177 fold_build1 (GOTO_EXPR, void_type_node, target),
178 build_empty_stmt ());
179 gfc_add_expr_to_block (&se.pre, tmp);
182 while (code != NULL);
183 gfc_trans_runtime_check (true, false, boolean_true_node, &se.pre, &loc,
184 "Assigned label is not in the list");
186 return gfc_finish_block (&se.pre);
190 /* Translate an ENTRY statement. Just adds a label for this entry point. */
192 gfc_trans_entry (gfc_code * code)
194 return build1_v (LABEL_EXPR, code->ext.entry->label);
198 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
199 elemental subroutines. Make temporaries for output arguments if any such
200 dependencies are found. Output arguments are chosen because internal_unpack
201 can be used, as is, to copy the result back to the variable. */
203 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
204 gfc_symbol * sym, gfc_actual_arglist * arg,
205 gfc_dep_check check_variable)
207 gfc_actual_arglist *arg0;
209 gfc_formal_arglist *formal;
210 gfc_loopinfo tmp_loop;
221 if (loopse->ss == NULL)
226 formal = sym->formal;
228 /* Loop over all the arguments testing for dependencies. */
229 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
235 /* Obtain the info structure for the current argument. */
237 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
241 info = &ss->data.info;
245 /* If there is a dependency, create a temporary and use it
246 instead of the variable. */
247 fsym = formal ? formal->sym : NULL;
248 if (e->expr_type == EXPR_VARIABLE
250 && fsym->attr.intent != INTENT_IN
251 && gfc_check_fncall_dependency (e, fsym->attr.intent,
252 sym, arg0, check_variable))
254 tree initial, temptype;
255 stmtblock_t temp_post;
257 /* Make a local loopinfo for the temporary creation, so that
258 none of the other ss->info's have to be renormalized. */
259 gfc_init_loopinfo (&tmp_loop);
260 for (n = 0; n < info->dimen; n++)
262 tmp_loop.to[n] = loopse->loop->to[n];
263 tmp_loop.from[n] = loopse->loop->from[n];
264 tmp_loop.order[n] = loopse->loop->order[n];
267 /* Obtain the argument descriptor for unpacking. */
268 gfc_init_se (&parmse, NULL);
269 parmse.want_pointer = 1;
270 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
271 gfc_add_block_to_block (&se->pre, &parmse.pre);
273 /* If we've got INTENT(INOUT), initialize the array temporary with
274 a copy of the values. */
275 if (fsym->attr.intent == INTENT_INOUT)
276 initial = parmse.expr;
280 /* Find the type of the temporary to create; we don't use the type
281 of e itself as this breaks for subcomponent-references in e (where
282 the type of e is that of the final reference, but parmse.expr's
283 type corresponds to the full derived-type). */
284 /* TODO: Fix this somehow so we don't need a temporary of the whole
285 array but instead only the components referenced. */
286 temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */
287 gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
288 temptype = TREE_TYPE (temptype);
289 temptype = gfc_get_element_type (temptype);
291 /* Generate the temporary. Cleaning up the temporary should be the
292 very last thing done, so we add the code to a new block and add it
293 to se->post as last instructions. */
294 size = gfc_create_var (gfc_array_index_type, NULL);
295 data = gfc_create_var (pvoid_type_node, NULL);
296 gfc_init_block (&temp_post);
297 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post,
298 &tmp_loop, info, temptype,
302 gfc_add_modify (&se->pre, size, tmp);
303 tmp = fold_convert (pvoid_type_node, info->data);
304 gfc_add_modify (&se->pre, data, tmp);
306 /* Calculate the offset for the temporary. */
307 offset = gfc_index_zero_node;
308 for (n = 0; n < info->dimen; n++)
310 tmp = gfc_conv_descriptor_stride (info->descriptor,
312 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
313 loopse->loop->from[n], tmp);
314 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
317 info->offset = gfc_create_var (gfc_array_index_type, NULL);
318 gfc_add_modify (&se->pre, info->offset, offset);
320 /* Copy the result back using unpack. */
321 tmp = build_call_expr (gfor_fndecl_in_unpack, 2, parmse.expr, data);
322 gfc_add_expr_to_block (&se->post, tmp);
324 /* parmse.pre is already added above. */
325 gfc_add_block_to_block (&se->post, &parmse.post);
326 gfc_add_block_to_block (&se->post, &temp_post);
332 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
335 gfc_trans_call (gfc_code * code, bool dependency_check)
339 int has_alternate_specifier;
340 gfc_dep_check check_variable;
342 /* A CALL starts a new block because the actual arguments may have to
343 be evaluated first. */
344 gfc_init_se (&se, NULL);
345 gfc_start_block (&se.pre);
347 gcc_assert (code->resolved_sym);
349 ss = gfc_ss_terminator;
350 if (code->resolved_sym->attr.elemental)
351 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
353 /* Is not an elemental subroutine call with array valued arguments. */
354 if (ss == gfc_ss_terminator)
357 /* Translate the call. */
358 has_alternate_specifier
359 = gfc_conv_function_call (&se, code->resolved_sym, code->ext.actual,
362 /* A subroutine without side-effect, by definition, does nothing! */
363 TREE_SIDE_EFFECTS (se.expr) = 1;
365 /* Chain the pieces together and return the block. */
366 if (has_alternate_specifier)
368 gfc_code *select_code;
370 select_code = code->next;
371 gcc_assert(select_code->op == EXEC_SELECT);
372 sym = select_code->expr->symtree->n.sym;
373 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
374 if (sym->backend_decl == NULL)
375 sym->backend_decl = gfc_get_symbol_decl (sym);
376 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
379 gfc_add_expr_to_block (&se.pre, se.expr);
381 gfc_add_block_to_block (&se.pre, &se.post);
386 /* An elemental subroutine call with array valued arguments has
394 /* gfc_walk_elemental_function_args renders the ss chain in the
395 reverse order to the actual argument order. */
396 ss = gfc_reverse_ss (ss);
398 /* Initialize the loop. */
399 gfc_init_se (&loopse, NULL);
400 gfc_init_loopinfo (&loop);
401 gfc_add_ss_to_loop (&loop, ss);
403 gfc_conv_ss_startstride (&loop);
404 /* TODO: gfc_conv_loop_setup generates a temporary for vector
405 subscripts. This could be prevented in the elemental case
406 as temporaries are handled separatedly
407 (below in gfc_conv_elemental_dependencies). */
408 gfc_conv_loop_setup (&loop, &code->expr->where);
409 gfc_mark_ss_chain_used (ss, 1);
411 /* Convert the arguments, checking for dependencies. */
412 gfc_copy_loopinfo_to_se (&loopse, &loop);
415 /* For operator assignment, do dependency checking. */
416 if (dependency_check)
417 check_variable = ELEM_CHECK_VARIABLE;
419 check_variable = ELEM_DONT_CHECK_VARIABLE;
421 gfc_init_se (&depse, NULL);
422 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
423 code->ext.actual, check_variable);
425 gfc_add_block_to_block (&loop.pre, &depse.pre);
426 gfc_add_block_to_block (&loop.post, &depse.post);
428 /* Generate the loop body. */
429 gfc_start_scalarized_body (&loop, &body);
430 gfc_init_block (&block);
432 /* Add the subroutine call to the block. */
433 gfc_conv_function_call (&loopse, code->resolved_sym, code->ext.actual,
435 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
437 gfc_add_block_to_block (&block, &loopse.pre);
438 gfc_add_block_to_block (&block, &loopse.post);
440 /* Finish up the loop block and the loop. */
441 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
442 gfc_trans_scalarizing_loops (&loop, &body);
443 gfc_add_block_to_block (&se.pre, &loop.pre);
444 gfc_add_block_to_block (&se.pre, &loop.post);
445 gfc_add_block_to_block (&se.pre, &se.post);
446 gfc_cleanup_loop (&loop);
449 return gfc_finish_block (&se.pre);
453 /* Translate the RETURN statement. */
456 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
464 /* If code->expr is not NULL, this return statement must appear
465 in a subroutine and current_fake_result_decl has already
468 result = gfc_get_fake_result_decl (NULL, 0);
471 gfc_warning ("An alternate return at %L without a * dummy argument",
473 return build1_v (GOTO_EXPR, gfc_get_return_label ());
476 /* Start a new block for this statement. */
477 gfc_init_se (&se, NULL);
478 gfc_start_block (&se.pre);
480 gfc_conv_expr (&se, code->expr);
482 tmp = fold_build2 (MODIFY_EXPR, TREE_TYPE (result), result,
483 fold_convert (TREE_TYPE (result), se.expr));
484 gfc_add_expr_to_block (&se.pre, tmp);
486 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
487 gfc_add_expr_to_block (&se.pre, tmp);
488 gfc_add_block_to_block (&se.pre, &se.post);
489 return gfc_finish_block (&se.pre);
492 return build1_v (GOTO_EXPR, gfc_get_return_label ());
496 /* Translate the PAUSE statement. We have to translate this statement
497 to a runtime library call. */
500 gfc_trans_pause (gfc_code * code)
502 tree gfc_int4_type_node = gfc_get_int_type (4);
506 /* Start a new block for this statement. */
507 gfc_init_se (&se, NULL);
508 gfc_start_block (&se.pre);
511 if (code->expr == NULL)
513 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
514 tmp = build_call_expr (gfor_fndecl_pause_numeric, 1, tmp);
518 gfc_conv_expr_reference (&se, code->expr);
519 tmp = build_call_expr (gfor_fndecl_pause_string, 2,
520 se.expr, se.string_length);
523 gfc_add_expr_to_block (&se.pre, tmp);
525 gfc_add_block_to_block (&se.pre, &se.post);
527 return gfc_finish_block (&se.pre);
531 /* Translate the STOP statement. We have to translate this statement
532 to a runtime library call. */
535 gfc_trans_stop (gfc_code * code)
537 tree gfc_int4_type_node = gfc_get_int_type (4);
541 /* Start a new block for this statement. */
542 gfc_init_se (&se, NULL);
543 gfc_start_block (&se.pre);
546 if (code->expr == NULL)
548 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
549 tmp = build_call_expr (gfor_fndecl_stop_numeric, 1, tmp);
553 gfc_conv_expr_reference (&se, code->expr);
554 tmp = build_call_expr (gfor_fndecl_stop_string, 2,
555 se.expr, se.string_length);
558 gfc_add_expr_to_block (&se.pre, tmp);
560 gfc_add_block_to_block (&se.pre, &se.post);
562 return gfc_finish_block (&se.pre);
566 /* Generate GENERIC for the IF construct. This function also deals with
567 the simple IF statement, because the front end translates the IF
568 statement into an IF construct.
600 where COND_S is the simplified version of the predicate. PRE_COND_S
601 are the pre side-effects produced by the translation of the
603 We need to build the chain recursively otherwise we run into
604 problems with folding incomplete statements. */
607 gfc_trans_if_1 (gfc_code * code)
612 /* Check for an unconditional ELSE clause. */
614 return gfc_trans_code (code->next);
616 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
617 gfc_init_se (&if_se, NULL);
618 gfc_start_block (&if_se.pre);
620 /* Calculate the IF condition expression. */
621 gfc_conv_expr_val (&if_se, code->expr);
623 /* Translate the THEN clause. */
624 stmt = gfc_trans_code (code->next);
626 /* Translate the ELSE clause. */
628 elsestmt = gfc_trans_if_1 (code->block);
630 elsestmt = build_empty_stmt ();
632 /* Build the condition expression and add it to the condition block. */
633 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
635 gfc_add_expr_to_block (&if_se.pre, stmt);
637 /* Finish off this statement. */
638 return gfc_finish_block (&if_se.pre);
642 gfc_trans_if (gfc_code * code)
644 /* Ignore the top EXEC_IF, it only announces an IF construct. The
645 actual code we must translate is in code->block. */
647 return gfc_trans_if_1 (code->block);
651 /* Translate an arithmetic IF expression.
653 IF (cond) label1, label2, label3 translates to
665 An optimized version can be generated in case of equal labels.
666 E.g., if label1 is equal to label2, we can translate it to
675 gfc_trans_arithmetic_if (gfc_code * code)
683 /* Start a new block. */
684 gfc_init_se (&se, NULL);
685 gfc_start_block (&se.pre);
687 /* Pre-evaluate COND. */
688 gfc_conv_expr_val (&se, code->expr);
689 se.expr = gfc_evaluate_now (se.expr, &se.pre);
691 /* Build something to compare with. */
692 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
694 if (code->label->value != code->label2->value)
696 /* If (cond < 0) take branch1 else take branch2.
697 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
698 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
699 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
701 if (code->label->value != code->label3->value)
702 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
704 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
706 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
709 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
711 if (code->label->value != code->label3->value
712 && code->label2->value != code->label3->value)
714 /* if (cond <= 0) take branch1 else take branch2. */
715 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
716 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
717 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
720 /* Append the COND_EXPR to the evaluation of COND, and return. */
721 gfc_add_expr_to_block (&se.pre, branch1);
722 return gfc_finish_block (&se.pre);
726 /* Translate the simple DO construct. This is where the loop variable has
727 integer type and step +-1. We can't use this in the general case
728 because integer overflow and floating point errors could give incorrect
730 We translate a do loop from:
732 DO dovar = from, to, step
738 [Evaluate loop bounds and step]
740 if ((step > 0) ? (dovar <= to) : (dovar => to))
746 cond = (dovar == to);
748 if (cond) goto end_label;
753 This helps the optimizers by avoiding the extra induction variable
754 used in the general case. */
757 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
758 tree from, tree to, tree step)
764 tree saved_dovar = NULL;
768 type = TREE_TYPE (dovar);
770 /* Initialize the DO variable: dovar = from. */
771 gfc_add_modify (pblock, dovar, from);
773 /* Save value for do-tinkering checking. */
774 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
776 saved_dovar = gfc_create_var (type, ".saved_dovar");
777 gfc_add_modify (pblock, saved_dovar, dovar);
780 /* Cycle and exit statements are implemented with gotos. */
781 cycle_label = gfc_build_label_decl (NULL_TREE);
782 exit_label = gfc_build_label_decl (NULL_TREE);
784 /* Put the labels where they can be found later. See gfc_trans_do(). */
785 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
788 gfc_start_block (&body);
790 /* Main loop body. */
791 tmp = gfc_trans_code (code->block->next);
792 gfc_add_expr_to_block (&body, tmp);
794 /* Label for cycle statements (if needed). */
795 if (TREE_USED (cycle_label))
797 tmp = build1_v (LABEL_EXPR, cycle_label);
798 gfc_add_expr_to_block (&body, tmp);
801 /* Check whether someone has modified the loop variable. */
802 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
804 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
805 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
806 "Loop variable has been modified");
809 /* Evaluate the loop condition. */
810 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
811 cond = gfc_evaluate_now (cond, &body);
813 /* Increment the loop variable. */
814 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
815 gfc_add_modify (&body, dovar, tmp);
817 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
818 gfc_add_modify (&body, saved_dovar, dovar);
821 tmp = build1_v (GOTO_EXPR, exit_label);
822 TREE_USED (exit_label) = 1;
823 tmp = fold_build3 (COND_EXPR, void_type_node,
824 cond, tmp, build_empty_stmt ());
825 gfc_add_expr_to_block (&body, tmp);
827 /* Finish the loop body. */
828 tmp = gfc_finish_block (&body);
829 tmp = build1_v (LOOP_EXPR, tmp);
831 /* Only execute the loop if the number of iterations is positive. */
832 if (tree_int_cst_sgn (step) > 0)
833 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
835 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
836 tmp = fold_build3 (COND_EXPR, void_type_node,
837 cond, tmp, build_empty_stmt ());
838 gfc_add_expr_to_block (pblock, tmp);
840 /* Add the exit label. */
841 tmp = build1_v (LABEL_EXPR, exit_label);
842 gfc_add_expr_to_block (pblock, tmp);
844 return gfc_finish_block (pblock);
847 /* Translate the DO construct. This obviously is one of the most
848 important ones to get right with any compiler, but especially
851 We special case some loop forms as described in gfc_trans_simple_do.
852 For other cases we implement them with a separate loop count,
853 as described in the standard.
855 We translate a do loop from:
857 DO dovar = from, to, step
863 [evaluate loop bounds and step]
864 empty = (step > 0 ? to < from : to > from);
865 countm1 = (to - from) / step;
867 if (empty) goto exit_label;
873 if (countm1 ==0) goto exit_label;
878 countm1 is an unsigned integer. It is equal to the loop count minus one,
879 because the loop count itself can overflow. */
882 gfc_trans_do (gfc_code * code)
886 tree saved_dovar = NULL;
901 gfc_start_block (&block);
903 /* Evaluate all the expressions in the iterator. */
904 gfc_init_se (&se, NULL);
905 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
906 gfc_add_block_to_block (&block, &se.pre);
908 type = TREE_TYPE (dovar);
910 gfc_init_se (&se, NULL);
911 gfc_conv_expr_val (&se, code->ext.iterator->start);
912 gfc_add_block_to_block (&block, &se.pre);
913 from = gfc_evaluate_now (se.expr, &block);
915 gfc_init_se (&se, NULL);
916 gfc_conv_expr_val (&se, code->ext.iterator->end);
917 gfc_add_block_to_block (&block, &se.pre);
918 to = gfc_evaluate_now (se.expr, &block);
920 gfc_init_se (&se, NULL);
921 gfc_conv_expr_val (&se, code->ext.iterator->step);
922 gfc_add_block_to_block (&block, &se.pre);
923 step = gfc_evaluate_now (se.expr, &block);
925 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
927 tmp = fold_build2 (EQ_EXPR, boolean_type_node, step,
928 fold_convert (type, integer_zero_node));
929 gfc_trans_runtime_check (true, false, tmp, &block, &code->loc,
930 "DO step value is zero");
933 /* Special case simple loops. */
934 if (TREE_CODE (type) == INTEGER_TYPE
935 && (integer_onep (step)
936 || tree_int_cst_equal (step, integer_minus_one_node)))
937 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
939 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
940 fold_convert (type, integer_zero_node));
942 if (TREE_CODE (type) == INTEGER_TYPE)
943 utype = unsigned_type_for (type);
945 utype = unsigned_type_for (gfc_array_index_type);
946 countm1 = gfc_create_var (utype, "countm1");
948 /* Cycle and exit statements are implemented with gotos. */
949 cycle_label = gfc_build_label_decl (NULL_TREE);
950 exit_label = gfc_build_label_decl (NULL_TREE);
951 TREE_USED (exit_label) = 1;
953 /* Initialize the DO variable: dovar = from. */
954 gfc_add_modify (&block, dovar, from);
956 /* Save value for do-tinkering checking. */
957 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
959 saved_dovar = gfc_create_var (type, ".saved_dovar");
960 gfc_add_modify (&block, saved_dovar, dovar);
963 /* Initialize loop count and jump to exit label if the loop is empty.
964 This code is executed before we enter the loop body. We generate:
967 if (to < from) goto exit_label;
968 countm1 = (to - from) / step;
972 if (to > from) goto exit_label;
973 countm1 = (from - to) / -step;
975 if (TREE_CODE (type) == INTEGER_TYPE)
979 tmp = fold_build2 (LT_EXPR, boolean_type_node, to, from);
980 pos = fold_build3 (COND_EXPR, void_type_node, tmp,
981 build1_v (GOTO_EXPR, exit_label),
982 build_empty_stmt ());
983 tmp = fold_build2 (MINUS_EXPR, type, to, from);
984 tmp = fold_convert (utype, tmp);
985 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp,
986 fold_convert (utype, step));
987 tmp = build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
988 pos = build2 (COMPOUND_EXPR, void_type_node, pos, tmp);
990 tmp = fold_build2 (GT_EXPR, boolean_type_node, to, from);
991 neg = fold_build3 (COND_EXPR, void_type_node, tmp,
992 build1_v (GOTO_EXPR, exit_label),
993 build_empty_stmt ());
994 tmp = fold_build2 (MINUS_EXPR, type, from, to);
995 tmp = fold_convert (utype, tmp);
996 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp,
997 fold_convert (utype, fold_build1 (NEGATE_EXPR,
999 tmp = build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
1000 neg = build2 (COMPOUND_EXPR, void_type_node, neg, tmp);
1002 tmp = fold_build3 (COND_EXPR, void_type_node, pos_step, pos, neg);
1003 gfc_add_expr_to_block (&block, tmp);
1007 /* TODO: We could use the same width as the real type.
1008 This would probably cause more problems that it solves
1009 when we implement "long double" types. */
1011 tmp = fold_build2 (MINUS_EXPR, type, to, from);
1012 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
1013 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
1014 gfc_add_modify (&block, countm1, tmp);
1016 /* We need a special check for empty loops:
1017 empty = (step > 0 ? to < from : to > from); */
1018 tmp = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
1019 fold_build2 (LT_EXPR, boolean_type_node, to, from),
1020 fold_build2 (GT_EXPR, boolean_type_node, to, from));
1021 /* If the loop is empty, go directly to the exit label. */
1022 tmp = fold_build3 (COND_EXPR, void_type_node, tmp,
1023 build1_v (GOTO_EXPR, exit_label),
1024 build_empty_stmt ());
1025 gfc_add_expr_to_block (&block, tmp);
1029 gfc_start_block (&body);
1031 /* Put these labels where they can be found later. We put the
1032 labels in a TREE_LIST node (because TREE_CHAIN is already
1033 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
1034 label in TREE_VALUE (backend_decl). */
1036 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1038 /* Main loop body. */
1039 tmp = gfc_trans_code (code->block->next);
1040 gfc_add_expr_to_block (&body, tmp);
1042 /* Label for cycle statements (if needed). */
1043 if (TREE_USED (cycle_label))
1045 tmp = build1_v (LABEL_EXPR, cycle_label);
1046 gfc_add_expr_to_block (&body, tmp);
1049 /* Check whether someone has modified the loop variable. */
1050 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1052 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
1053 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1054 "Loop variable has been modified");
1057 /* Increment the loop variable. */
1058 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
1059 gfc_add_modify (&body, dovar, tmp);
1061 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1062 gfc_add_modify (&body, saved_dovar, dovar);
1064 /* End with the loop condition. Loop until countm1 == 0. */
1065 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
1066 build_int_cst (utype, 0));
1067 tmp = build1_v (GOTO_EXPR, exit_label);
1068 tmp = fold_build3 (COND_EXPR, void_type_node,
1069 cond, tmp, build_empty_stmt ());
1070 gfc_add_expr_to_block (&body, tmp);
1072 /* Decrement the loop count. */
1073 tmp = fold_build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
1074 gfc_add_modify (&body, countm1, tmp);
1076 /* End of loop body. */
1077 tmp = gfc_finish_block (&body);
1079 /* The for loop itself. */
1080 tmp = build1_v (LOOP_EXPR, tmp);
1081 gfc_add_expr_to_block (&block, tmp);
1083 /* Add the exit label. */
1084 tmp = build1_v (LABEL_EXPR, exit_label);
1085 gfc_add_expr_to_block (&block, tmp);
1087 return gfc_finish_block (&block);
1091 /* Translate the DO WHILE construct.
1104 if (! cond) goto exit_label;
1110 Because the evaluation of the exit condition `cond' may have side
1111 effects, we can't do much for empty loop bodies. The backend optimizers
1112 should be smart enough to eliminate any dead loops. */
1115 gfc_trans_do_while (gfc_code * code)
1123 /* Everything we build here is part of the loop body. */
1124 gfc_start_block (&block);
1126 /* Cycle and exit statements are implemented with gotos. */
1127 cycle_label = gfc_build_label_decl (NULL_TREE);
1128 exit_label = gfc_build_label_decl (NULL_TREE);
1130 /* Put the labels where they can be found later. See gfc_trans_do(). */
1131 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1133 /* Create a GIMPLE version of the exit condition. */
1134 gfc_init_se (&cond, NULL);
1135 gfc_conv_expr_val (&cond, code->expr);
1136 gfc_add_block_to_block (&block, &cond.pre);
1137 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1139 /* Build "IF (! cond) GOTO exit_label". */
1140 tmp = build1_v (GOTO_EXPR, exit_label);
1141 TREE_USED (exit_label) = 1;
1142 tmp = fold_build3 (COND_EXPR, void_type_node,
1143 cond.expr, tmp, build_empty_stmt ());
1144 gfc_add_expr_to_block (&block, tmp);
1146 /* The main body of the loop. */
1147 tmp = gfc_trans_code (code->block->next);
1148 gfc_add_expr_to_block (&block, tmp);
1150 /* Label for cycle statements (if needed). */
1151 if (TREE_USED (cycle_label))
1153 tmp = build1_v (LABEL_EXPR, cycle_label);
1154 gfc_add_expr_to_block (&block, tmp);
1157 /* End of loop body. */
1158 tmp = gfc_finish_block (&block);
1160 gfc_init_block (&block);
1161 /* Build the loop. */
1162 tmp = build1_v (LOOP_EXPR, tmp);
1163 gfc_add_expr_to_block (&block, tmp);
1165 /* Add the exit label. */
1166 tmp = build1_v (LABEL_EXPR, exit_label);
1167 gfc_add_expr_to_block (&block, tmp);
1169 return gfc_finish_block (&block);
1173 /* Translate the SELECT CASE construct for INTEGER case expressions,
1174 without killing all potential optimizations. The problem is that
1175 Fortran allows unbounded cases, but the back-end does not, so we
1176 need to intercept those before we enter the equivalent SWITCH_EXPR
1179 For example, we translate this,
1182 CASE (:100,101,105:115)
1192 to the GENERIC equivalent,
1196 case (minimum value for typeof(expr) ... 100:
1202 case 200 ... (maximum value for typeof(expr):
1219 gfc_trans_integer_select (gfc_code * code)
1229 gfc_start_block (&block);
1231 /* Calculate the switch expression. */
1232 gfc_init_se (&se, NULL);
1233 gfc_conv_expr_val (&se, code->expr);
1234 gfc_add_block_to_block (&block, &se.pre);
1236 end_label = gfc_build_label_decl (NULL_TREE);
1238 gfc_init_block (&body);
1240 for (c = code->block; c; c = c->block)
1242 for (cp = c->ext.case_list; cp; cp = cp->next)
1247 /* Assume it's the default case. */
1248 low = high = NULL_TREE;
1252 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1255 /* If there's only a lower bound, set the high bound to the
1256 maximum value of the case expression. */
1258 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1263 /* Three cases are possible here:
1265 1) There is no lower bound, e.g. CASE (:N).
1266 2) There is a lower bound .NE. high bound, that is
1267 a case range, e.g. CASE (N:M) where M>N (we make
1268 sure that M>N during type resolution).
1269 3) There is a lower bound, and it has the same value
1270 as the high bound, e.g. CASE (N:N). This is our
1271 internal representation of CASE(N).
1273 In the first and second case, we need to set a value for
1274 high. In the third case, we don't because the GCC middle
1275 end represents a single case value by just letting high be
1276 a NULL_TREE. We can't do that because we need to be able
1277 to represent unbounded cases. */
1281 && mpz_cmp (cp->low->value.integer,
1282 cp->high->value.integer) != 0))
1283 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1286 /* Unbounded case. */
1288 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1291 /* Build a label. */
1292 label = gfc_build_label_decl (NULL_TREE);
1294 /* Add this case label.
1295 Add parameter 'label', make it match GCC backend. */
1296 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1298 gfc_add_expr_to_block (&body, tmp);
1301 /* Add the statements for this case. */
1302 tmp = gfc_trans_code (c->next);
1303 gfc_add_expr_to_block (&body, tmp);
1305 /* Break to the end of the construct. */
1306 tmp = build1_v (GOTO_EXPR, end_label);
1307 gfc_add_expr_to_block (&body, tmp);
1310 tmp = gfc_finish_block (&body);
1311 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1312 gfc_add_expr_to_block (&block, tmp);
1314 tmp = build1_v (LABEL_EXPR, end_label);
1315 gfc_add_expr_to_block (&block, tmp);
1317 return gfc_finish_block (&block);
1321 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1323 There are only two cases possible here, even though the standard
1324 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1325 .FALSE., and DEFAULT.
1327 We never generate more than two blocks here. Instead, we always
1328 try to eliminate the DEFAULT case. This way, we can translate this
1329 kind of SELECT construct to a simple
1333 expression in GENERIC. */
1336 gfc_trans_logical_select (gfc_code * code)
1339 gfc_code *t, *f, *d;
1344 /* Assume we don't have any cases at all. */
1347 /* Now see which ones we actually do have. We can have at most two
1348 cases in a single case list: one for .TRUE. and one for .FALSE.
1349 The default case is always separate. If the cases for .TRUE. and
1350 .FALSE. are in the same case list, the block for that case list
1351 always executed, and we don't generate code a COND_EXPR. */
1352 for (c = code->block; c; c = c->block)
1354 for (cp = c->ext.case_list; cp; cp = cp->next)
1358 if (cp->low->value.logical == 0) /* .FALSE. */
1360 else /* if (cp->value.logical != 0), thus .TRUE. */
1368 /* Start a new block. */
1369 gfc_start_block (&block);
1371 /* Calculate the switch expression. We always need to do this
1372 because it may have side effects. */
1373 gfc_init_se (&se, NULL);
1374 gfc_conv_expr_val (&se, code->expr);
1375 gfc_add_block_to_block (&block, &se.pre);
1377 if (t == f && t != NULL)
1379 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1380 translate the code for these cases, append it to the current
1382 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1386 tree true_tree, false_tree, stmt;
1388 true_tree = build_empty_stmt ();
1389 false_tree = build_empty_stmt ();
1391 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1392 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1393 make the missing case the default case. */
1394 if (t != NULL && f != NULL)
1404 /* Translate the code for each of these blocks, and append it to
1405 the current block. */
1407 true_tree = gfc_trans_code (t->next);
1410 false_tree = gfc_trans_code (f->next);
1412 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1413 true_tree, false_tree);
1414 gfc_add_expr_to_block (&block, stmt);
1417 return gfc_finish_block (&block);
1421 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1422 Instead of generating compares and jumps, it is far simpler to
1423 generate a data structure describing the cases in order and call a
1424 library subroutine that locates the right case.
1425 This is particularly true because this is the only case where we
1426 might have to dispose of a temporary.
1427 The library subroutine returns a pointer to jump to or NULL if no
1428 branches are to be taken. */
1431 gfc_trans_character_select (gfc_code *code)
1433 tree init, node, end_label, tmp, type, case_num, label, fndecl;
1434 stmtblock_t block, body;
1440 /* The jump table types are stored in static variables to avoid
1441 constructing them from scratch every single time. */
1442 static tree select_struct[2];
1443 static tree ss_string1[2], ss_string1_len[2];
1444 static tree ss_string2[2], ss_string2_len[2];
1445 static tree ss_target[2];
1447 tree pchartype = gfc_get_pchar_type (code->expr->ts.kind);
1449 if (code->expr->ts.kind == 1)
1451 else if (code->expr->ts.kind == 4)
1456 if (select_struct[k] == NULL)
1458 select_struct[k] = make_node (RECORD_TYPE);
1460 if (code->expr->ts.kind == 1)
1461 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
1462 else if (code->expr->ts.kind == 4)
1463 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
1468 #define ADD_FIELD(NAME, TYPE) \
1469 ss_##NAME[k] = gfc_add_field_to_struct \
1470 (&(TYPE_FIELDS (select_struct[k])), select_struct[k], \
1471 get_identifier (stringize(NAME)), TYPE)
1473 ADD_FIELD (string1, pchartype);
1474 ADD_FIELD (string1_len, gfc_charlen_type_node);
1476 ADD_FIELD (string2, pchartype);
1477 ADD_FIELD (string2_len, gfc_charlen_type_node);
1479 ADD_FIELD (target, integer_type_node);
1482 gfc_finish_type (select_struct[k]);
1485 cp = code->block->ext.case_list;
1486 while (cp->left != NULL)
1490 for (d = cp; d; d = d->right)
1493 end_label = gfc_build_label_decl (NULL_TREE);
1495 /* Generate the body */
1496 gfc_start_block (&block);
1497 gfc_init_block (&body);
1499 for (c = code->block; c; c = c->block)
1501 for (d = c->ext.case_list; d; d = d->next)
1503 label = gfc_build_label_decl (NULL_TREE);
1504 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1505 build_int_cst (NULL_TREE, d->n),
1506 build_int_cst (NULL_TREE, d->n), label);
1507 gfc_add_expr_to_block (&body, tmp);
1510 tmp = gfc_trans_code (c->next);
1511 gfc_add_expr_to_block (&body, tmp);
1513 tmp = build1_v (GOTO_EXPR, end_label);
1514 gfc_add_expr_to_block (&body, tmp);
1517 /* Generate the structure describing the branches */
1520 for(d = cp; d; d = d->right)
1524 gfc_init_se (&se, NULL);
1528 node = tree_cons (ss_string1[k], null_pointer_node, node);
1529 node = tree_cons (ss_string1_len[k], integer_zero_node, node);
1533 gfc_conv_expr_reference (&se, d->low);
1535 node = tree_cons (ss_string1[k], se.expr, node);
1536 node = tree_cons (ss_string1_len[k], se.string_length, node);
1539 if (d->high == NULL)
1541 node = tree_cons (ss_string2[k], null_pointer_node, node);
1542 node = tree_cons (ss_string2_len[k], integer_zero_node, node);
1546 gfc_init_se (&se, NULL);
1547 gfc_conv_expr_reference (&se, d->high);
1549 node = tree_cons (ss_string2[k], se.expr, node);
1550 node = tree_cons (ss_string2_len[k], se.string_length, node);
1553 node = tree_cons (ss_target[k], build_int_cst (integer_type_node, d->n),
1556 tmp = build_constructor_from_list (select_struct[k], nreverse (node));
1557 init = tree_cons (NULL_TREE, tmp, init);
1560 type = build_array_type (select_struct[k],
1561 build_index_type (build_int_cst (NULL_TREE, n-1)));
1563 init = build_constructor_from_list (type, nreverse(init));
1564 TREE_CONSTANT (init) = 1;
1565 TREE_STATIC (init) = 1;
1566 /* Create a static variable to hold the jump table. */
1567 tmp = gfc_create_var (type, "jumptable");
1568 TREE_CONSTANT (tmp) = 1;
1569 TREE_STATIC (tmp) = 1;
1570 TREE_READONLY (tmp) = 1;
1571 DECL_INITIAL (tmp) = init;
1574 /* Build the library call */
1575 init = gfc_build_addr_expr (pvoid_type_node, init);
1577 gfc_init_se (&se, NULL);
1578 gfc_conv_expr_reference (&se, code->expr);
1580 gfc_add_block_to_block (&block, &se.pre);
1582 if (code->expr->ts.kind == 1)
1583 fndecl = gfor_fndecl_select_string;
1584 else if (code->expr->ts.kind == 4)
1585 fndecl = gfor_fndecl_select_string_char4;
1589 tmp = build_call_expr (fndecl, 4, init, build_int_cst (NULL_TREE, n),
1590 se.expr, se.string_length);
1591 case_num = gfc_create_var (integer_type_node, "case_num");
1592 gfc_add_modify (&block, case_num, tmp);
1594 gfc_add_block_to_block (&block, &se.post);
1596 tmp = gfc_finish_block (&body);
1597 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1598 gfc_add_expr_to_block (&block, tmp);
1600 tmp = build1_v (LABEL_EXPR, end_label);
1601 gfc_add_expr_to_block (&block, tmp);
1603 return gfc_finish_block (&block);
1607 /* Translate the three variants of the SELECT CASE construct.
1609 SELECT CASEs with INTEGER case expressions can be translated to an
1610 equivalent GENERIC switch statement, and for LOGICAL case
1611 expressions we build one or two if-else compares.
1613 SELECT CASEs with CHARACTER case expressions are a whole different
1614 story, because they don't exist in GENERIC. So we sort them and
1615 do a binary search at runtime.
1617 Fortran has no BREAK statement, and it does not allow jumps from
1618 one case block to another. That makes things a lot easier for
1622 gfc_trans_select (gfc_code * code)
1624 gcc_assert (code && code->expr);
1626 /* Empty SELECT constructs are legal. */
1627 if (code->block == NULL)
1628 return build_empty_stmt ();
1630 /* Select the correct translation function. */
1631 switch (code->expr->ts.type)
1633 case BT_LOGICAL: return gfc_trans_logical_select (code);
1634 case BT_INTEGER: return gfc_trans_integer_select (code);
1635 case BT_CHARACTER: return gfc_trans_character_select (code);
1637 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1643 /* Traversal function to substitute a replacement symtree if the symbol
1644 in the expression is the same as that passed. f == 2 signals that
1645 that variable itself is not to be checked - only the references.
1646 This group of functions is used when the variable expression in a
1647 FORALL assignment has internal references. For example:
1648 FORALL (i = 1:4) p(p(i)) = i
1649 The only recourse here is to store a copy of 'p' for the index
1652 static gfc_symtree *new_symtree;
1653 static gfc_symtree *old_symtree;
1656 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
1658 if (expr->expr_type != EXPR_VARIABLE)
1663 else if (expr->symtree->n.sym == sym)
1664 expr->symtree = new_symtree;
1670 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
1672 gfc_traverse_expr (e, sym, forall_replace, f);
1676 forall_restore (gfc_expr *expr,
1677 gfc_symbol *sym ATTRIBUTE_UNUSED,
1678 int *f ATTRIBUTE_UNUSED)
1680 if (expr->expr_type != EXPR_VARIABLE)
1683 if (expr->symtree == new_symtree)
1684 expr->symtree = old_symtree;
1690 forall_restore_symtree (gfc_expr *e)
1692 gfc_traverse_expr (e, NULL, forall_restore, 0);
1696 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1701 gfc_symbol *new_sym;
1702 gfc_symbol *old_sym;
1706 /* Build a copy of the lvalue. */
1707 old_symtree = c->expr->symtree;
1708 old_sym = old_symtree->n.sym;
1709 e = gfc_lval_expr_from_sym (old_sym);
1710 if (old_sym->attr.dimension)
1712 gfc_init_se (&tse, NULL);
1713 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN);
1714 gfc_add_block_to_block (pre, &tse.pre);
1715 gfc_add_block_to_block (post, &tse.post);
1716 tse.expr = build_fold_indirect_ref (tse.expr);
1718 if (e->ts.type != BT_CHARACTER)
1720 /* Use the variable offset for the temporary. */
1721 tmp = gfc_conv_descriptor_offset (tse.expr);
1722 gfc_add_modify (pre, tmp,
1723 gfc_conv_array_offset (old_sym->backend_decl));
1728 gfc_init_se (&tse, NULL);
1729 gfc_init_se (&rse, NULL);
1730 gfc_conv_expr (&rse, e);
1731 if (e->ts.type == BT_CHARACTER)
1733 tse.string_length = rse.string_length;
1734 tmp = gfc_get_character_type_len (gfc_default_character_kind,
1736 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
1738 gfc_add_block_to_block (pre, &tse.pre);
1739 gfc_add_block_to_block (post, &tse.post);
1743 tmp = gfc_typenode_for_spec (&e->ts);
1744 tse.expr = gfc_create_var (tmp, "temp");
1747 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
1748 e->expr_type == EXPR_VARIABLE);
1749 gfc_add_expr_to_block (pre, tmp);
1753 /* Create a new symbol to represent the lvalue. */
1754 new_sym = gfc_new_symbol (old_sym->name, NULL);
1755 new_sym->ts = old_sym->ts;
1756 new_sym->attr.referenced = 1;
1757 new_sym->attr.dimension = old_sym->attr.dimension;
1758 new_sym->attr.flavor = old_sym->attr.flavor;
1760 /* Use the temporary as the backend_decl. */
1761 new_sym->backend_decl = tse.expr;
1763 /* Create a fake symtree for it. */
1765 new_symtree = gfc_new_symtree (&root, old_sym->name);
1766 new_symtree->n.sym = new_sym;
1767 gcc_assert (new_symtree == root);
1769 /* Go through the expression reference replacing the old_symtree
1771 forall_replace_symtree (c->expr, old_sym, 2);
1773 /* Now we have made this temporary, we might as well use it for
1774 the right hand side. */
1775 forall_replace_symtree (c->expr2, old_sym, 1);
1779 /* Handles dependencies in forall assignments. */
1781 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1788 lsym = c->expr->symtree->n.sym;
1789 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
1791 /* Now check for dependencies within the 'variable'
1792 expression itself. These are treated by making a complete
1793 copy of variable and changing all the references to it
1794 point to the copy instead. Note that the shallow copy of
1795 the variable will not suffice for derived types with
1796 pointer components. We therefore leave these to their
1798 if (lsym->ts.type == BT_DERIVED
1799 && lsym->ts.derived->attr.pointer_comp)
1803 if (find_forall_index (c->expr, lsym, 2) == SUCCESS)
1805 forall_make_variable_temp (c, pre, post);
1809 /* Substrings with dependencies are treated in the same
1811 if (c->expr->ts.type == BT_CHARACTER
1813 && c->expr2->expr_type == EXPR_VARIABLE
1814 && lsym == c->expr2->symtree->n.sym)
1816 for (lref = c->expr->ref; lref; lref = lref->next)
1817 if (lref->type == REF_SUBSTRING)
1819 for (rref = c->expr2->ref; rref; rref = rref->next)
1820 if (rref->type == REF_SUBSTRING)
1824 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
1826 forall_make_variable_temp (c, pre, post);
1835 cleanup_forall_symtrees (gfc_code *c)
1837 forall_restore_symtree (c->expr);
1838 forall_restore_symtree (c->expr2);
1839 gfc_free (new_symtree->n.sym);
1840 gfc_free (new_symtree);
1844 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
1845 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
1846 indicates whether we should generate code to test the FORALLs mask
1847 array. OUTER is the loop header to be used for initializing mask
1850 The generated loop format is:
1851 count = (end - start + step) / step
1864 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
1865 int mask_flag, stmtblock_t *outer)
1873 tree var, start, end, step;
1876 /* Initialize the mask index outside the FORALL nest. */
1877 if (mask_flag && forall_tmp->mask)
1878 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
1880 iter = forall_tmp->this_loop;
1881 nvar = forall_tmp->nvar;
1882 for (n = 0; n < nvar; n++)
1885 start = iter->start;
1889 exit_label = gfc_build_label_decl (NULL_TREE);
1890 TREE_USED (exit_label) = 1;
1892 /* The loop counter. */
1893 count = gfc_create_var (TREE_TYPE (var), "count");
1895 /* The body of the loop. */
1896 gfc_init_block (&block);
1898 /* The exit condition. */
1899 cond = fold_build2 (LE_EXPR, boolean_type_node,
1900 count, build_int_cst (TREE_TYPE (count), 0));
1901 tmp = build1_v (GOTO_EXPR, exit_label);
1902 tmp = fold_build3 (COND_EXPR, void_type_node,
1903 cond, tmp, build_empty_stmt ());
1904 gfc_add_expr_to_block (&block, tmp);
1906 /* The main loop body. */
1907 gfc_add_expr_to_block (&block, body);
1909 /* Increment the loop variable. */
1910 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1911 gfc_add_modify (&block, var, tmp);
1913 /* Advance to the next mask element. Only do this for the
1915 if (n == 0 && mask_flag && forall_tmp->mask)
1917 tree maskindex = forall_tmp->maskindex;
1918 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1919 maskindex, gfc_index_one_node);
1920 gfc_add_modify (&block, maskindex, tmp);
1923 /* Decrement the loop counter. */
1924 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), count,
1925 build_int_cst (TREE_TYPE (var), 1));
1926 gfc_add_modify (&block, count, tmp);
1928 body = gfc_finish_block (&block);
1930 /* Loop var initialization. */
1931 gfc_init_block (&block);
1932 gfc_add_modify (&block, var, start);
1935 /* Initialize the loop counter. */
1936 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1937 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1938 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1939 gfc_add_modify (&block, count, tmp);
1941 /* The loop expression. */
1942 tmp = build1_v (LOOP_EXPR, body);
1943 gfc_add_expr_to_block (&block, tmp);
1945 /* The exit label. */
1946 tmp = build1_v (LABEL_EXPR, exit_label);
1947 gfc_add_expr_to_block (&block, tmp);
1949 body = gfc_finish_block (&block);
1956 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
1957 is nonzero, the body is controlled by all masks in the forall nest.
1958 Otherwise, the innermost loop is not controlled by it's mask. This
1959 is used for initializing that mask. */
1962 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1967 forall_info *forall_tmp;
1968 tree mask, maskindex;
1970 gfc_start_block (&header);
1972 forall_tmp = nested_forall_info;
1973 while (forall_tmp != NULL)
1975 /* Generate body with masks' control. */
1978 mask = forall_tmp->mask;
1979 maskindex = forall_tmp->maskindex;
1981 /* If a mask was specified make the assignment conditional. */
1984 tmp = gfc_build_array_ref (mask, maskindex, NULL);
1985 body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ());
1988 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
1989 forall_tmp = forall_tmp->prev_nest;
1993 gfc_add_expr_to_block (&header, body);
1994 return gfc_finish_block (&header);
1998 /* Allocate data for holding a temporary array. Returns either a local
1999 temporary array or a pointer variable. */
2002 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
2009 if (INTEGER_CST_P (size))
2011 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
2012 gfc_index_one_node);
2017 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
2018 type = build_array_type (elem_type, type);
2019 if (gfc_can_put_var_on_stack (bytesize))
2021 gcc_assert (INTEGER_CST_P (size));
2022 tmpvar = gfc_create_var (type, "temp");
2027 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
2028 *pdata = convert (pvoid_type_node, tmpvar);
2030 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
2031 gfc_add_modify (pblock, tmpvar, tmp);
2037 /* Generate codes to copy the temporary to the actual lhs. */
2040 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
2041 tree count1, tree wheremask, bool invert)
2045 stmtblock_t block, body;
2051 lss = gfc_walk_expr (expr);
2053 if (lss == gfc_ss_terminator)
2055 gfc_start_block (&block);
2057 gfc_init_se (&lse, NULL);
2059 /* Translate the expression. */
2060 gfc_conv_expr (&lse, expr);
2062 /* Form the expression for the temporary. */
2063 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2065 /* Use the scalar assignment as is. */
2066 gfc_add_block_to_block (&block, &lse.pre);
2067 gfc_add_modify (&block, lse.expr, tmp);
2068 gfc_add_block_to_block (&block, &lse.post);
2070 /* Increment the count1. */
2071 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2072 gfc_index_one_node);
2073 gfc_add_modify (&block, count1, tmp);
2075 tmp = gfc_finish_block (&block);
2079 gfc_start_block (&block);
2081 gfc_init_loopinfo (&loop1);
2082 gfc_init_se (&rse, NULL);
2083 gfc_init_se (&lse, NULL);
2085 /* Associate the lss with the loop. */
2086 gfc_add_ss_to_loop (&loop1, lss);
2088 /* Calculate the bounds of the scalarization. */
2089 gfc_conv_ss_startstride (&loop1);
2090 /* Setup the scalarizing loops. */
2091 gfc_conv_loop_setup (&loop1, &expr->where);
2093 gfc_mark_ss_chain_used (lss, 1);
2095 /* Start the scalarized loop body. */
2096 gfc_start_scalarized_body (&loop1, &body);
2098 /* Setup the gfc_se structures. */
2099 gfc_copy_loopinfo_to_se (&lse, &loop1);
2102 /* Form the expression of the temporary. */
2103 if (lss != gfc_ss_terminator)
2104 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2105 /* Translate expr. */
2106 gfc_conv_expr (&lse, expr);
2108 /* Use the scalar assignment. */
2109 rse.string_length = lse.string_length;
2110 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
2112 /* Form the mask expression according to the mask tree list. */
2115 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2117 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2118 TREE_TYPE (wheremaskexpr),
2120 tmp = fold_build3 (COND_EXPR, void_type_node,
2121 wheremaskexpr, tmp, build_empty_stmt ());
2124 gfc_add_expr_to_block (&body, tmp);
2126 /* Increment count1. */
2127 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2128 count1, gfc_index_one_node);
2129 gfc_add_modify (&body, count1, tmp);
2131 /* Increment count3. */
2134 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2135 count3, gfc_index_one_node);
2136 gfc_add_modify (&body, count3, tmp);
2139 /* Generate the copying loops. */
2140 gfc_trans_scalarizing_loops (&loop1, &body);
2141 gfc_add_block_to_block (&block, &loop1.pre);
2142 gfc_add_block_to_block (&block, &loop1.post);
2143 gfc_cleanup_loop (&loop1);
2145 tmp = gfc_finish_block (&block);
2151 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2152 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2153 and should not be freed. WHEREMASK is the conditional execution mask
2154 whose sense may be inverted by INVERT. */
2157 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2158 tree count1, gfc_ss *lss, gfc_ss *rss,
2159 tree wheremask, bool invert)
2161 stmtblock_t block, body1;
2168 gfc_start_block (&block);
2170 gfc_init_se (&rse, NULL);
2171 gfc_init_se (&lse, NULL);
2173 if (lss == gfc_ss_terminator)
2175 gfc_init_block (&body1);
2176 gfc_conv_expr (&rse, expr2);
2177 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2181 /* Initialize the loop. */
2182 gfc_init_loopinfo (&loop);
2184 /* We may need LSS to determine the shape of the expression. */
2185 gfc_add_ss_to_loop (&loop, lss);
2186 gfc_add_ss_to_loop (&loop, rss);
2188 gfc_conv_ss_startstride (&loop);
2189 gfc_conv_loop_setup (&loop, &expr2->where);
2191 gfc_mark_ss_chain_used (rss, 1);
2192 /* Start the loop body. */
2193 gfc_start_scalarized_body (&loop, &body1);
2195 /* Translate the expression. */
2196 gfc_copy_loopinfo_to_se (&rse, &loop);
2198 gfc_conv_expr (&rse, expr2);
2200 /* Form the expression of the temporary. */
2201 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2204 /* Use the scalar assignment. */
2205 lse.string_length = rse.string_length;
2206 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2207 expr2->expr_type == EXPR_VARIABLE);
2209 /* Form the mask expression according to the mask tree list. */
2212 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2214 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2215 TREE_TYPE (wheremaskexpr),
2217 tmp = fold_build3 (COND_EXPR, void_type_node,
2218 wheremaskexpr, tmp, build_empty_stmt ());
2221 gfc_add_expr_to_block (&body1, tmp);
2223 if (lss == gfc_ss_terminator)
2225 gfc_add_block_to_block (&block, &body1);
2227 /* Increment count1. */
2228 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2229 gfc_index_one_node);
2230 gfc_add_modify (&block, count1, tmp);
2234 /* Increment count1. */
2235 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2236 count1, gfc_index_one_node);
2237 gfc_add_modify (&body1, count1, tmp);
2239 /* Increment count3. */
2242 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2243 count3, gfc_index_one_node);
2244 gfc_add_modify (&body1, count3, tmp);
2247 /* Generate the copying loops. */
2248 gfc_trans_scalarizing_loops (&loop, &body1);
2250 gfc_add_block_to_block (&block, &loop.pre);
2251 gfc_add_block_to_block (&block, &loop.post);
2253 gfc_cleanup_loop (&loop);
2254 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2255 as tree nodes in SS may not be valid in different scope. */
2258 tmp = gfc_finish_block (&block);
2263 /* Calculate the size of temporary needed in the assignment inside forall.
2264 LSS and RSS are filled in this function. */
2267 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2268 stmtblock_t * pblock,
2269 gfc_ss **lss, gfc_ss **rss)
2277 *lss = gfc_walk_expr (expr1);
2280 size = gfc_index_one_node;
2281 if (*lss != gfc_ss_terminator)
2283 gfc_init_loopinfo (&loop);
2285 /* Walk the RHS of the expression. */
2286 *rss = gfc_walk_expr (expr2);
2287 if (*rss == gfc_ss_terminator)
2289 /* The rhs is scalar. Add a ss for the expression. */
2290 *rss = gfc_get_ss ();
2291 (*rss)->next = gfc_ss_terminator;
2292 (*rss)->type = GFC_SS_SCALAR;
2293 (*rss)->expr = expr2;
2296 /* Associate the SS with the loop. */
2297 gfc_add_ss_to_loop (&loop, *lss);
2298 /* We don't actually need to add the rhs at this point, but it might
2299 make guessing the loop bounds a bit easier. */
2300 gfc_add_ss_to_loop (&loop, *rss);
2302 /* We only want the shape of the expression, not rest of the junk
2303 generated by the scalarizer. */
2304 loop.array_parameter = 1;
2306 /* Calculate the bounds of the scalarization. */
2307 save_flag = gfc_option.rtcheck;
2308 gfc_option.rtcheck &= !GFC_RTCHECK_BOUNDS;
2309 gfc_conv_ss_startstride (&loop);
2310 gfc_option.rtcheck = save_flag;
2311 gfc_conv_loop_setup (&loop, &expr2->where);
2313 /* Figure out how many elements we need. */
2314 for (i = 0; i < loop.dimen; i++)
2316 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2317 gfc_index_one_node, loop.from[i]);
2318 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2320 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2322 gfc_add_block_to_block (pblock, &loop.pre);
2323 size = gfc_evaluate_now (size, pblock);
2324 gfc_add_block_to_block (pblock, &loop.post);
2326 /* TODO: write a function that cleans up a loopinfo without freeing
2327 the SS chains. Currently a NOP. */
2334 /* Calculate the overall iterator number of the nested forall construct.
2335 This routine actually calculates the number of times the body of the
2336 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2337 that by the expression INNER_SIZE. The BLOCK argument specifies the
2338 block in which to calculate the result, and the optional INNER_SIZE_BODY
2339 argument contains any statements that need to executed (inside the loop)
2340 to initialize or calculate INNER_SIZE. */
2343 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2344 stmtblock_t *inner_size_body, stmtblock_t *block)
2346 forall_info *forall_tmp = nested_forall_info;
2350 /* We can eliminate the innermost unconditional loops with constant
2352 if (INTEGER_CST_P (inner_size))
2355 && !forall_tmp->mask
2356 && INTEGER_CST_P (forall_tmp->size))
2358 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2359 inner_size, forall_tmp->size);
2360 forall_tmp = forall_tmp->prev_nest;
2363 /* If there are no loops left, we have our constant result. */
2368 /* Otherwise, create a temporary variable to compute the result. */
2369 number = gfc_create_var (gfc_array_index_type, "num");
2370 gfc_add_modify (block, number, gfc_index_zero_node);
2372 gfc_start_block (&body);
2373 if (inner_size_body)
2374 gfc_add_block_to_block (&body, inner_size_body);
2376 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2377 number, inner_size);
2380 gfc_add_modify (&body, number, tmp);
2381 tmp = gfc_finish_block (&body);
2383 /* Generate loops. */
2384 if (forall_tmp != NULL)
2385 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2387 gfc_add_expr_to_block (block, tmp);
2393 /* Allocate temporary for forall construct. SIZE is the size of temporary
2394 needed. PTEMP1 is returned for space free. */
2397 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2404 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2405 if (!integer_onep (unit))
2406 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2411 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2414 tmp = build_fold_indirect_ref (tmp);
2419 /* Allocate temporary for forall construct according to the information in
2420 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2421 assignment inside forall. PTEMP1 is returned for space free. */
2424 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2425 tree inner_size, stmtblock_t * inner_size_body,
2426 stmtblock_t * block, tree * ptemp1)
2430 /* Calculate the total size of temporary needed in forall construct. */
2431 size = compute_overall_iter_number (nested_forall_info, inner_size,
2432 inner_size_body, block);
2434 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2438 /* Handle assignments inside forall which need temporary.
2440 forall (i=start:end:stride; maskexpr)
2443 (where e,f<i> are arbitrary expressions possibly involving i
2444 and there is a dependency between e<i> and f<i>)
2446 masktmp(:) = maskexpr(:)
2451 for (i = start; i <= end; i += stride)
2455 for (i = start; i <= end; i += stride)
2457 if (masktmp[maskindex++])
2458 tmp[count1++] = f<i>
2462 for (i = start; i <= end; i += stride)
2464 if (masktmp[maskindex++])
2465 e<i> = tmp[count1++]
2470 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2471 tree wheremask, bool invert,
2472 forall_info * nested_forall_info,
2473 stmtblock_t * block)
2481 stmtblock_t inner_size_body;
2483 /* Create vars. count1 is the current iterator number of the nested
2485 count1 = gfc_create_var (gfc_array_index_type, "count1");
2487 /* Count is the wheremask index. */
2490 count = gfc_create_var (gfc_array_index_type, "count");
2491 gfc_add_modify (block, count, gfc_index_zero_node);
2496 /* Initialize count1. */
2497 gfc_add_modify (block, count1, gfc_index_zero_node);
2499 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2500 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2501 gfc_init_block (&inner_size_body);
2502 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2505 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2506 if (expr1->ts.type == BT_CHARACTER && expr1->ts.cl->length)
2508 if (!expr1->ts.cl->backend_decl)
2511 gfc_init_se (&tse, NULL);
2512 gfc_conv_expr (&tse, expr1->ts.cl->length);
2513 expr1->ts.cl->backend_decl = tse.expr;
2515 type = gfc_get_character_type_len (gfc_default_character_kind,
2516 expr1->ts.cl->backend_decl);
2519 type = gfc_typenode_for_spec (&expr1->ts);
2521 /* Allocate temporary for nested forall construct according to the
2522 information in nested_forall_info and inner_size. */
2523 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2524 &inner_size_body, block, &ptemp1);
2526 /* Generate codes to copy rhs to the temporary . */
2527 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2530 /* Generate body and loops according to the information in
2531 nested_forall_info. */
2532 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2533 gfc_add_expr_to_block (block, tmp);
2536 gfc_add_modify (block, count1, gfc_index_zero_node);
2540 gfc_add_modify (block, count, gfc_index_zero_node);
2542 /* Generate codes to copy the temporary to lhs. */
2543 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2546 /* Generate body and loops according to the information in
2547 nested_forall_info. */
2548 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2549 gfc_add_expr_to_block (block, tmp);
2553 /* Free the temporary. */
2554 tmp = gfc_call_free (ptemp1);
2555 gfc_add_expr_to_block (block, tmp);
2560 /* Translate pointer assignment inside FORALL which need temporary. */
2563 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2564 forall_info * nested_forall_info,
2565 stmtblock_t * block)
2579 tree tmp, tmp1, ptemp1;
2581 count = gfc_create_var (gfc_array_index_type, "count");
2582 gfc_add_modify (block, count, gfc_index_zero_node);
2584 inner_size = integer_one_node;
2585 lss = gfc_walk_expr (expr1);
2586 rss = gfc_walk_expr (expr2);
2587 if (lss == gfc_ss_terminator)
2589 type = gfc_typenode_for_spec (&expr1->ts);
2590 type = build_pointer_type (type);
2592 /* Allocate temporary for nested forall construct according to the
2593 information in nested_forall_info and inner_size. */
2594 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2595 inner_size, NULL, block, &ptemp1);
2596 gfc_start_block (&body);
2597 gfc_init_se (&lse, NULL);
2598 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2599 gfc_init_se (&rse, NULL);
2600 rse.want_pointer = 1;
2601 gfc_conv_expr (&rse, expr2);
2602 gfc_add_block_to_block (&body, &rse.pre);
2603 gfc_add_modify (&body, lse.expr,
2604 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2605 gfc_add_block_to_block (&body, &rse.post);
2607 /* Increment count. */
2608 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2609 count, gfc_index_one_node);
2610 gfc_add_modify (&body, count, tmp);
2612 tmp = gfc_finish_block (&body);
2614 /* Generate body and loops according to the information in
2615 nested_forall_info. */
2616 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2617 gfc_add_expr_to_block (block, tmp);
2620 gfc_add_modify (block, count, gfc_index_zero_node);
2622 gfc_start_block (&body);
2623 gfc_init_se (&lse, NULL);
2624 gfc_init_se (&rse, NULL);
2625 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
2626 lse.want_pointer = 1;
2627 gfc_conv_expr (&lse, expr1);
2628 gfc_add_block_to_block (&body, &lse.pre);
2629 gfc_add_modify (&body, lse.expr, rse.expr);
2630 gfc_add_block_to_block (&body, &lse.post);
2631 /* Increment count. */
2632 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2633 count, gfc_index_one_node);
2634 gfc_add_modify (&body, count, tmp);
2635 tmp = gfc_finish_block (&body);
2637 /* Generate body and loops according to the information in
2638 nested_forall_info. */
2639 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2640 gfc_add_expr_to_block (block, tmp);
2644 gfc_init_loopinfo (&loop);
2646 /* Associate the SS with the loop. */
2647 gfc_add_ss_to_loop (&loop, rss);
2649 /* Setup the scalarizing loops and bounds. */
2650 gfc_conv_ss_startstride (&loop);
2652 gfc_conv_loop_setup (&loop, &expr2->where);
2654 info = &rss->data.info;
2655 desc = info->descriptor;
2657 /* Make a new descriptor. */
2658 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2659 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2660 loop.from, loop.to, 1,
2663 /* Allocate temporary for nested forall construct. */
2664 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2665 inner_size, NULL, block, &ptemp1);
2666 gfc_start_block (&body);
2667 gfc_init_se (&lse, NULL);
2668 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2669 lse.direct_byref = 1;
2670 rss = gfc_walk_expr (expr2);
2671 gfc_conv_expr_descriptor (&lse, expr2, rss);
2673 gfc_add_block_to_block (&body, &lse.pre);
2674 gfc_add_block_to_block (&body, &lse.post);
2676 /* Increment count. */
2677 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2678 count, gfc_index_one_node);
2679 gfc_add_modify (&body, count, tmp);
2681 tmp = gfc_finish_block (&body);
2683 /* Generate body and loops according to the information in
2684 nested_forall_info. */
2685 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2686 gfc_add_expr_to_block (block, tmp);
2689 gfc_add_modify (block, count, gfc_index_zero_node);
2691 parm = gfc_build_array_ref (tmp1, count, NULL);
2692 lss = gfc_walk_expr (expr1);
2693 gfc_init_se (&lse, NULL);
2694 gfc_conv_expr_descriptor (&lse, expr1, lss);
2695 gfc_add_modify (&lse.pre, lse.expr, parm);
2696 gfc_start_block (&body);
2697 gfc_add_block_to_block (&body, &lse.pre);
2698 gfc_add_block_to_block (&body, &lse.post);
2700 /* Increment count. */
2701 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2702 count, gfc_index_one_node);
2703 gfc_add_modify (&body, count, tmp);
2705 tmp = gfc_finish_block (&body);
2707 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2708 gfc_add_expr_to_block (block, tmp);
2710 /* Free the temporary. */
2713 tmp = gfc_call_free (ptemp1);
2714 gfc_add_expr_to_block (block, tmp);
2719 /* FORALL and WHERE statements are really nasty, especially when you nest
2720 them. All the rhs of a forall assignment must be evaluated before the
2721 actual assignments are performed. Presumably this also applies to all the
2722 assignments in an inner where statement. */
2724 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2725 linear array, relying on the fact that we process in the same order in all
2728 forall (i=start:end:stride; maskexpr)
2732 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2734 count = ((end + 1 - start) / stride)
2735 masktmp(:) = maskexpr(:)
2738 for (i = start; i <= end; i += stride)
2740 if (masktmp[maskindex++])
2744 for (i = start; i <= end; i += stride)
2746 if (masktmp[maskindex++])
2750 Note that this code only works when there are no dependencies.
2751 Forall loop with array assignments and data dependencies are a real pain,
2752 because the size of the temporary cannot always be determined before the
2753 loop is executed. This problem is compounded by the presence of nested
2758 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2778 gfc_forall_iterator *fa;
2781 gfc_saved_var *saved_vars;
2782 iter_info *this_forall;
2786 /* Do nothing if the mask is false. */
2788 && code->expr->expr_type == EXPR_CONSTANT
2789 && !code->expr->value.logical)
2790 return build_empty_stmt ();
2793 /* Count the FORALL index number. */
2794 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2798 /* Allocate the space for var, start, end, step, varexpr. */
2799 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2800 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2801 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2802 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2803 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2804 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2806 /* Allocate the space for info. */
2807 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2809 gfc_start_block (&pre);
2810 gfc_init_block (&post);
2811 gfc_init_block (&block);
2814 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2816 gfc_symbol *sym = fa->var->symtree->n.sym;
2818 /* Allocate space for this_forall. */
2819 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2821 /* Create a temporary variable for the FORALL index. */
2822 tmp = gfc_typenode_for_spec (&sym->ts);
2823 var[n] = gfc_create_var (tmp, sym->name);
2824 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2826 /* Record it in this_forall. */
2827 this_forall->var = var[n];
2829 /* Replace the index symbol's backend_decl with the temporary decl. */
2830 sym->backend_decl = var[n];
2832 /* Work out the start, end and stride for the loop. */
2833 gfc_init_se (&se, NULL);
2834 gfc_conv_expr_val (&se, fa->start);
2835 /* Record it in this_forall. */
2836 this_forall->start = se.expr;
2837 gfc_add_block_to_block (&block, &se.pre);
2840 gfc_init_se (&se, NULL);
2841 gfc_conv_expr_val (&se, fa->end);
2842 /* Record it in this_forall. */
2843 this_forall->end = se.expr;
2844 gfc_make_safe_expr (&se);
2845 gfc_add_block_to_block (&block, &se.pre);
2848 gfc_init_se (&se, NULL);
2849 gfc_conv_expr_val (&se, fa->stride);
2850 /* Record it in this_forall. */
2851 this_forall->step = se.expr;
2852 gfc_make_safe_expr (&se);
2853 gfc_add_block_to_block (&block, &se.pre);
2856 /* Set the NEXT field of this_forall to NULL. */
2857 this_forall->next = NULL;
2858 /* Link this_forall to the info construct. */
2859 if (info->this_loop)
2861 iter_info *iter_tmp = info->this_loop;
2862 while (iter_tmp->next != NULL)
2863 iter_tmp = iter_tmp->next;
2864 iter_tmp->next = this_forall;
2867 info->this_loop = this_forall;
2873 /* Calculate the size needed for the current forall level. */
2874 size = gfc_index_one_node;
2875 for (n = 0; n < nvar; n++)
2877 /* size = (end + step - start) / step. */
2878 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2880 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2882 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2883 tmp = convert (gfc_array_index_type, tmp);
2885 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2888 /* Record the nvar and size of current forall level. */
2894 /* If the mask is .true., consider the FORALL unconditional. */
2895 if (code->expr->expr_type == EXPR_CONSTANT
2896 && code->expr->value.logical)
2904 /* First we need to allocate the mask. */
2907 /* As the mask array can be very big, prefer compact boolean types. */
2908 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2909 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
2910 size, NULL, &block, &pmask);
2911 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2913 /* Record them in the info structure. */
2914 info->maskindex = maskindex;
2919 /* No mask was specified. */
2920 maskindex = NULL_TREE;
2921 mask = pmask = NULL_TREE;
2924 /* Link the current forall level to nested_forall_info. */
2925 info->prev_nest = nested_forall_info;
2926 nested_forall_info = info;
2928 /* Copy the mask into a temporary variable if required.
2929 For now we assume a mask temporary is needed. */
2932 /* As the mask array can be very big, prefer compact boolean types. */
2933 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2935 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
2937 /* Start of mask assignment loop body. */
2938 gfc_start_block (&body);
2940 /* Evaluate the mask expression. */
2941 gfc_init_se (&se, NULL);
2942 gfc_conv_expr_val (&se, code->expr);
2943 gfc_add_block_to_block (&body, &se.pre);
2945 /* Store the mask. */
2946 se.expr = convert (mask_type, se.expr);
2948 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2949 gfc_add_modify (&body, tmp, se.expr);
2951 /* Advance to the next mask element. */
2952 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2953 maskindex, gfc_index_one_node);
2954 gfc_add_modify (&body, maskindex, tmp);
2956 /* Generate the loops. */
2957 tmp = gfc_finish_block (&body);
2958 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
2959 gfc_add_expr_to_block (&block, tmp);
2962 c = code->block->next;
2964 /* TODO: loop merging in FORALL statements. */
2965 /* Now that we've got a copy of the mask, generate the assignment loops. */
2971 /* A scalar or array assignment. DO the simple check for
2972 lhs to rhs dependencies. These make a temporary for the
2973 rhs and form a second forall block to copy to variable. */
2974 need_temp = check_forall_dependencies(c, &pre, &post);
2976 /* Temporaries due to array assignment data dependencies introduce
2977 no end of problems. */
2979 gfc_trans_assign_need_temp (c->expr, c->expr2, NULL, false,
2980 nested_forall_info, &block);
2983 /* Use the normal assignment copying routines. */
2984 assign = gfc_trans_assignment (c->expr, c->expr2, false);
2986 /* Generate body and loops. */
2987 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2989 gfc_add_expr_to_block (&block, tmp);
2992 /* Cleanup any temporary symtrees that have been made to deal
2993 with dependencies. */
2995 cleanup_forall_symtrees (c);
3000 /* Translate WHERE or WHERE construct nested in FORALL. */
3001 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
3004 /* Pointer assignment inside FORALL. */
3005 case EXEC_POINTER_ASSIGN:
3006 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
3008 gfc_trans_pointer_assign_need_temp (c->expr, c->expr2,
3009 nested_forall_info, &block);
3012 /* Use the normal assignment copying routines. */
3013 assign = gfc_trans_pointer_assignment (c->expr, c->expr2);
3015 /* Generate body and loops. */
3016 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3018 gfc_add_expr_to_block (&block, tmp);
3023 tmp = gfc_trans_forall_1 (c, nested_forall_info);
3024 gfc_add_expr_to_block (&block, tmp);
3027 /* Explicit subroutine calls are prevented by the frontend but interface
3028 assignments can legitimately produce them. */
3029 case EXEC_ASSIGN_CALL:
3030 assign = gfc_trans_call (c, true);
3031 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
3032 gfc_add_expr_to_block (&block, tmp);
3042 /* Restore the original index variables. */
3043 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
3044 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
3046 /* Free the space for var, start, end, step, varexpr. */
3052 gfc_free (saved_vars);
3054 /* Free the space for this forall_info. */
3059 /* Free the temporary for the mask. */
3060 tmp = gfc_call_free (pmask);
3061 gfc_add_expr_to_block (&block, tmp);
3064 pushdecl (maskindex);
3066 gfc_add_block_to_block (&pre, &block);
3067 gfc_add_block_to_block (&pre, &post);
3069 return gfc_finish_block (&pre);
3073 /* Translate the FORALL statement or construct. */
3075 tree gfc_trans_forall (gfc_code * code)
3077 return gfc_trans_forall_1 (code, NULL);
3081 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3082 If the WHERE construct is nested in FORALL, compute the overall temporary
3083 needed by the WHERE mask expression multiplied by the iterator number of
3085 ME is the WHERE mask expression.
3086 MASK is the current execution mask upon input, whose sense may or may
3087 not be inverted as specified by the INVERT argument.
3088 CMASK is the updated execution mask on output, or NULL if not required.
3089 PMASK is the pending execution mask on output, or NULL if not required.
3090 BLOCK is the block in which to place the condition evaluation loops. */
3093 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3094 tree mask, bool invert, tree cmask, tree pmask,
3095 tree mask_type, stmtblock_t * block)
3100 stmtblock_t body, body1;
3101 tree count, cond, mtmp;
3104 gfc_init_loopinfo (&loop);
3106 lss = gfc_walk_expr (me);
3107 rss = gfc_walk_expr (me);
3109 /* Variable to index the temporary. */
3110 count = gfc_create_var (gfc_array_index_type, "count");
3111 /* Initialize count. */
3112 gfc_add_modify (block, count, gfc_index_zero_node);
3114 gfc_start_block (&body);
3116 gfc_init_se (&rse, NULL);
3117 gfc_init_se (&lse, NULL);
3119 if (lss == gfc_ss_terminator)
3121 gfc_init_block (&body1);
3125 /* Initialize the loop. */
3126 gfc_init_loopinfo (&loop);
3128 /* We may need LSS to determine the shape of the expression. */
3129 gfc_add_ss_to_loop (&loop, lss);
3130 gfc_add_ss_to_loop (&loop, rss);
3132 gfc_conv_ss_startstride (&loop);
3133 gfc_conv_loop_setup (&loop, &me->where);
3135 gfc_mark_ss_chain_used (rss, 1);
3136 /* Start the loop body. */
3137 gfc_start_scalarized_body (&loop, &body1);
3139 /* Translate the expression. */
3140 gfc_copy_loopinfo_to_se (&rse, &loop);
3142 gfc_conv_expr (&rse, me);
3145 /* Variable to evaluate mask condition. */
3146 cond = gfc_create_var (mask_type, "cond");
3147 if (mask && (cmask || pmask))
3148 mtmp = gfc_create_var (mask_type, "mask");
3149 else mtmp = NULL_TREE;
3151 gfc_add_block_to_block (&body1, &lse.pre);
3152 gfc_add_block_to_block (&body1, &rse.pre);
3154 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3156 if (mask && (cmask || pmask))
3158 tmp = gfc_build_array_ref (mask, count, NULL);
3160 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
3161 gfc_add_modify (&body1, mtmp, tmp);
3166 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3169 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3170 gfc_add_modify (&body1, tmp1, tmp);
3175 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3176 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, cond);
3178 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3179 gfc_add_modify (&body1, tmp1, tmp);
3182 gfc_add_block_to_block (&body1, &lse.post);
3183 gfc_add_block_to_block (&body1, &rse.post);
3185 if (lss == gfc_ss_terminator)
3187 gfc_add_block_to_block (&body, &body1);
3191 /* Increment count. */
3192 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
3193 gfc_index_one_node);
3194 gfc_add_modify (&body1, count, tmp1);
3196 /* Generate the copying loops. */
3197 gfc_trans_scalarizing_loops (&loop, &body1);
3199 gfc_add_block_to_block (&body, &loop.pre);
3200 gfc_add_block_to_block (&body, &loop.post);
3202 gfc_cleanup_loop (&loop);
3203 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3204 as tree nodes in SS may not be valid in different scope. */
3207 tmp1 = gfc_finish_block (&body);
3208 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3209 if (nested_forall_info != NULL)
3210 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3212 gfc_add_expr_to_block (block, tmp1);
3216 /* Translate an assignment statement in a WHERE statement or construct
3217 statement. The MASK expression is used to control which elements
3218 of EXPR1 shall be assigned. The sense of MASK is specified by
3222 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3223 tree mask, bool invert,
3224 tree count1, tree count2,
3230 gfc_ss *lss_section;
3237 tree index, maskexpr;
3240 /* TODO: handle this special case.
3241 Special case a single function returning an array. */
3242 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3244 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3250 /* Assignment of the form lhs = rhs. */
3251 gfc_start_block (&block);
3253 gfc_init_se (&lse, NULL);
3254 gfc_init_se (&rse, NULL);
3257 lss = gfc_walk_expr (expr1);
3260 /* In each where-assign-stmt, the mask-expr and the variable being
3261 defined shall be arrays of the same shape. */
3262 gcc_assert (lss != gfc_ss_terminator);
3264 /* The assignment needs scalarization. */
3267 /* Find a non-scalar SS from the lhs. */
3268 while (lss_section != gfc_ss_terminator
3269 && lss_section->type != GFC_SS_SECTION)
3270 lss_section = lss_section->next;
3272 gcc_assert (lss_section != gfc_ss_terminator);
3274 /* Initialize the scalarizer. */
3275 gfc_init_loopinfo (&loop);
3278 rss = gfc_walk_expr (expr2);
3279 if (rss == gfc_ss_terminator)
3281 /* The rhs is scalar. Add a ss for the expression. */
3282 rss = gfc_get_ss ();
3284 rss->next = gfc_ss_terminator;
3285 rss->type = GFC_SS_SCALAR;
3289 /* Associate the SS with the loop. */
3290 gfc_add_ss_to_loop (&loop, lss);
3291 gfc_add_ss_to_loop (&loop, rss);
3293 /* Calculate the bounds of the scalarization. */
3294 gfc_conv_ss_startstride (&loop);
3296 /* Resolve any data dependencies in the statement. */
3297 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3299 /* Setup the scalarizing loops. */
3300 gfc_conv_loop_setup (&loop, &expr2->where);
3302 /* Setup the gfc_se structures. */
3303 gfc_copy_loopinfo_to_se (&lse, &loop);
3304 gfc_copy_loopinfo_to_se (&rse, &loop);
3307 gfc_mark_ss_chain_used (rss, 1);
3308 if (loop.temp_ss == NULL)
3311 gfc_mark_ss_chain_used (lss, 1);
3315 lse.ss = loop.temp_ss;
3316 gfc_mark_ss_chain_used (lss, 3);
3317 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3320 /* Start the scalarized loop body. */
3321 gfc_start_scalarized_body (&loop, &body);
3323 /* Translate the expression. */
3324 gfc_conv_expr (&rse, expr2);
3325 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3327 gfc_conv_tmp_array_ref (&lse);
3328 gfc_advance_se_ss_chain (&lse);
3331 gfc_conv_expr (&lse, expr1);
3333 /* Form the mask expression according to the mask. */
3335 maskexpr = gfc_build_array_ref (mask, index, NULL);
3337 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
3339 /* Use the scalar assignment as is. */
3341 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3342 loop.temp_ss != NULL, false);
3344 tmp = gfc_conv_operator_assign (&lse, &rse, sym);
3346 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3348 gfc_add_expr_to_block (&body, tmp);
3350 if (lss == gfc_ss_terminator)
3352 /* Increment count1. */
3353 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3354 count1, gfc_index_one_node);
3355 gfc_add_modify (&body, count1, tmp);
3357 /* Use the scalar assignment as is. */
3358 gfc_add_block_to_block (&block, &body);
3362 gcc_assert (lse.ss == gfc_ss_terminator
3363 && rse.ss == gfc_ss_terminator);
3365 if (loop.temp_ss != NULL)
3367 /* Increment count1 before finish the main body of a scalarized
3369 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3370 count1, gfc_index_one_node);
3371 gfc_add_modify (&body, count1, tmp);
3372 gfc_trans_scalarized_loop_boundary (&loop, &body);
3374 /* We need to copy the temporary to the actual lhs. */
3375 gfc_init_se (&lse, NULL);
3376 gfc_init_se (&rse, NULL);
3377 gfc_copy_loopinfo_to_se (&lse, &loop);
3378 gfc_copy_loopinfo_to_se (&rse, &loop);
3380 rse.ss = loop.temp_ss;
3383 gfc_conv_tmp_array_ref (&rse);
3384 gfc_advance_se_ss_chain (&rse);
3385 gfc_conv_expr (&lse, expr1);
3387 gcc_assert (lse.ss == gfc_ss_terminator
3388 && rse.ss == gfc_ss_terminator);
3390 /* Form the mask expression according to the mask tree list. */
3392 maskexpr = gfc_build_array_ref (mask, index, NULL);
3394 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3397 /* Use the scalar assignment as is. */
3398 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3399 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3400 gfc_add_expr_to_block (&body, tmp);
3402 /* Increment count2. */
3403 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3404 count2, gfc_index_one_node);
3405 gfc_add_modify (&body, count2, tmp);
3409 /* Increment count1. */
3410 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3411 count1, gfc_index_one_node);
3412 gfc_add_modify (&body, count1, tmp);
3415 /* Generate the copying loops. */
3416 gfc_trans_scalarizing_loops (&loop, &body);
3418 /* Wrap the whole thing up. */
3419 gfc_add_block_to_block (&block, &loop.pre);
3420 gfc_add_block_to_block (&block, &loop.post);
3421 gfc_cleanup_loop (&loop);
3424 return gfc_finish_block (&block);
3428 /* Translate the WHERE construct or statement.
3429 This function can be called iteratively to translate the nested WHERE
3430 construct or statement.
3431 MASK is the control mask. */
3434 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3435 forall_info * nested_forall_info, stmtblock_t * block)
3437 stmtblock_t inner_size_body;
3438 tree inner_size, size;
3447 tree count1, count2;
3451 tree pcmask = NULL_TREE;
3452 tree ppmask = NULL_TREE;
3453 tree cmask = NULL_TREE;
3454 tree pmask = NULL_TREE;
3455 gfc_actual_arglist *arg;
3457 /* the WHERE statement or the WHERE construct statement. */
3458 cblock = code->block;
3460 /* As the mask array can be very big, prefer compact boolean types. */
3461 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3463 /* Determine which temporary masks are needed. */
3466 /* One clause: No ELSEWHEREs. */
3467 need_cmask = (cblock->next != 0);
3470 else if (cblock->block->block)
3472 /* Three or more clauses: Conditional ELSEWHEREs. */
3476 else if (cblock->next)
3478 /* Two clauses, the first non-empty. */
3480 need_pmask = (mask != NULL_TREE
3481 && cblock->block->next != 0);
3483 else if (!cblock->block->next)
3485 /* Two clauses, both empty. */
3489 /* Two clauses, the first empty, the second non-empty. */
3492 need_cmask = (cblock->block->expr != 0);
3501 if (need_cmask || need_pmask)
3503 /* Calculate the size of temporary needed by the mask-expr. */
3504 gfc_init_block (&inner_size_body);
3505 inner_size = compute_inner_temp_size (cblock->expr, cblock->expr,
3506 &inner_size_body, &lss, &rss);
3508 /* Calculate the total size of temporary needed. */
3509 size = compute_overall_iter_number (nested_forall_info, inner_size,
3510 &inner_size_body, block);
3512 /* Check whether the size is negative. */
3513 cond = fold_build2 (LE_EXPR, boolean_type_node, size,
3514 gfc_index_zero_node);
3515 size = fold_build3 (COND_EXPR, gfc_array_index_type, cond,
3516 gfc_index_zero_node, size);
3517 size = gfc_evaluate_now (size, block);
3519 /* Allocate temporary for WHERE mask if needed. */
3521 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3524 /* Allocate temporary for !mask if needed. */
3526 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3532 /* Each time around this loop, the where clause is conditional
3533 on the value of mask and invert, which are updated at the
3534 bottom of the loop. */
3536 /* Has mask-expr. */
3539 /* Ensure that the WHERE mask will be evaluated exactly once.
3540 If there are no statements in this WHERE/ELSEWHERE clause,
3541 then we don't need to update the control mask (cmask).
3542 If this is the last clause of the WHERE construct, then
3543 we don't need to update the pending control mask (pmask). */
3545 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3547 cblock->next ? cmask : NULL_TREE,
3548 cblock->block ? pmask : NULL_TREE,
3551 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3553 (cblock->next || cblock->block)
3554 ? cmask : NULL_TREE,
3555 NULL_TREE, mask_type, block);
3559 /* It's a final elsewhere-stmt. No mask-expr is present. */
3563 /* The body of this where clause are controlled by cmask with
3564 sense specified by invert. */
3566 /* Get the assignment statement of a WHERE statement, or the first
3567 statement in where-body-construct of a WHERE construct. */
3568 cnext = cblock->next;
3573 /* WHERE assignment statement. */
3574 case EXEC_ASSIGN_CALL:
3576 arg = cnext->ext.actual;
3577 expr1 = expr2 = NULL;
3578 for (; arg; arg = arg->next)
3590 expr1 = cnext->expr;
3591 expr2 = cnext->expr2;
3593 if (nested_forall_info != NULL)
3595 need_temp = gfc_check_dependency (expr1, expr2, 0);
3596 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3597 gfc_trans_assign_need_temp (expr1, expr2,
3599 nested_forall_info, block);
3602 /* Variables to control maskexpr. */
3603 count1 = gfc_create_var (gfc_array_index_type, "count1");
3604 count2 = gfc_create_var (gfc_array_index_type, "count2");
3605 gfc_add_modify (block, count1, gfc_index_zero_node);
3606 gfc_add_modify (block, count2, gfc_index_zero_node);
3608 tmp = gfc_trans_where_assign (expr1, expr2,
3611 cnext->resolved_sym);
3613 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3615 gfc_add_expr_to_block (block, tmp);
3620 /* Variables to control maskexpr. */
3621 count1 = gfc_create_var (gfc_array_index_type, "count1");
3622 count2 = gfc_create_var (gfc_array_index_type, "count2");
3623 gfc_add_modify (block, count1, gfc_index_zero_node);
3624 gfc_add_modify (block, count2, gfc_index_zero_node);
3626 tmp = gfc_trans_where_assign (expr1, expr2,
3629 cnext->resolved_sym);
3630 gfc_add_expr_to_block (block, tmp);
3635 /* WHERE or WHERE construct is part of a where-body-construct. */
3637 gfc_trans_where_2 (cnext, cmask, invert,
3638 nested_forall_info, block);
3645 /* The next statement within the same where-body-construct. */
3646 cnext = cnext->next;
3648 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3649 cblock = cblock->block;
3650 if (mask == NULL_TREE)
3652 /* If we're the initial WHERE, we can simply invert the sense
3653 of the current mask to obtain the "mask" for the remaining
3660 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3666 /* If we allocated a pending mask array, deallocate it now. */
3669 tmp = gfc_call_free (ppmask);
3670 gfc_add_expr_to_block (block, tmp);
3673 /* If we allocated a current mask array, deallocate it now. */
3676 tmp = gfc_call_free (pcmask);
3677 gfc_add_expr_to_block (block, tmp);
3681 /* Translate a simple WHERE construct or statement without dependencies.
3682 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3683 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3684 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3687 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3689 stmtblock_t block, body;
3690 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3691 tree tmp, cexpr, tstmt, estmt;
3692 gfc_ss *css, *tdss, *tsss;
3693 gfc_se cse, tdse, tsse, edse, esse;
3698 cond = cblock->expr;
3699 tdst = cblock->next->expr;
3700 tsrc = cblock->next->expr2;
3701 edst = eblock ? eblock->next->expr : NULL;
3702 esrc = eblock ? eblock->next->expr2 : NULL;
3704 gfc_start_block (&block);
3705 gfc_init_loopinfo (&loop);
3707 /* Handle the condition. */
3708 gfc_init_se (&cse, NULL);
3709 css = gfc_walk_expr (cond);
3710 gfc_add_ss_to_loop (&loop, css);
3712 /* Handle the then-clause. */
3713 gfc_init_se (&tdse, NULL);
3714 gfc_init_se (&tsse, NULL);
3715 tdss = gfc_walk_expr (tdst);
3716 tsss = gfc_walk_expr (tsrc);
3717 if (tsss == gfc_ss_terminator)
3719 tsss = gfc_get_ss ();
3721 tsss->next = gfc_ss_terminator;
3722 tsss->type = GFC_SS_SCALAR;
3725 gfc_add_ss_to_loop (&loop, tdss);
3726 gfc_add_ss_to_loop (&loop, tsss);
3730 /* Handle the else clause. */
3731 gfc_init_se (&edse, NULL);
3732 gfc_init_se (&esse, NULL);
3733 edss = gfc_walk_expr (edst);
3734 esss = gfc_walk_expr (esrc);
3735 if (esss == gfc_ss_terminator)
3737 esss = gfc_get_ss ();
3739 esss->next = gfc_ss_terminator;
3740 esss->type = GFC_SS_SCALAR;
3743 gfc_add_ss_to_loop (&loop, edss);
3744 gfc_add_ss_to_loop (&loop, esss);
3747 gfc_conv_ss_startstride (&loop);
3748 gfc_conv_loop_setup (&loop, &tdst->where);
3750 gfc_mark_ss_chain_used (css, 1);
3751 gfc_mark_ss_chain_used (tdss, 1);
3752 gfc_mark_ss_chain_used (tsss, 1);
3755 gfc_mark_ss_chain_used (edss, 1);
3756 gfc_mark_ss_chain_used (esss, 1);
3759 gfc_start_scalarized_body (&loop, &body);
3761 gfc_copy_loopinfo_to_se (&cse, &loop);
3762 gfc_copy_loopinfo_to_se (&tdse, &loop);
3763 gfc_copy_loopinfo_to_se (&tsse, &loop);
3769 gfc_copy_loopinfo_to_se (&edse, &loop);
3770 gfc_copy_loopinfo_to_se (&esse, &loop);
3775 gfc_conv_expr (&cse, cond);
3776 gfc_add_block_to_block (&body, &cse.pre);
3779 gfc_conv_expr (&tsse, tsrc);
3780 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3782 gfc_conv_tmp_array_ref (&tdse);
3783 gfc_advance_se_ss_chain (&tdse);
3786 gfc_conv_expr (&tdse, tdst);
3790 gfc_conv_expr (&esse, esrc);
3791 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3793 gfc_conv_tmp_array_ref (&edse);
3794 gfc_advance_se_ss_chain (&edse);
3797 gfc_conv_expr (&edse, edst);
3800 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false);
3801 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false)
3802 : build_empty_stmt ();
3803 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3804 gfc_add_expr_to_block (&body, tmp);
3805 gfc_add_block_to_block (&body, &cse.post);
3807 gfc_trans_scalarizing_loops (&loop, &body);
3808 gfc_add_block_to_block (&block, &loop.pre);
3809 gfc_add_block_to_block (&block, &loop.post);
3810 gfc_cleanup_loop (&loop);
3812 return gfc_finish_block (&block);
3815 /* As the WHERE or WHERE construct statement can be nested, we call
3816 gfc_trans_where_2 to do the translation, and pass the initial
3817 NULL values for both the control mask and the pending control mask. */
3820 gfc_trans_where (gfc_code * code)
3826 cblock = code->block;
3828 && cblock->next->op == EXEC_ASSIGN
3829 && !cblock->next->next)
3831 eblock = cblock->block;
3834 /* A simple "WHERE (cond) x = y" statement or block is
3835 dependence free if cond is not dependent upon writing x,
3836 and the source y is unaffected by the destination x. */
3837 if (!gfc_check_dependency (cblock->next->expr,
3839 && !gfc_check_dependency (cblock->next->expr,
3840 cblock->next->expr2, 0))
3841 return gfc_trans_where_3 (cblock, NULL);
3843 else if (!eblock->expr
3846 && eblock->next->op == EXEC_ASSIGN
3847 && !eblock->next->next)
3849 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3850 block is dependence free if cond is not dependent on writes
3851 to x1 and x2, y1 is not dependent on writes to x2, and y2
3852 is not dependent on writes to x1, and both y's are not
3853 dependent upon their own x's. In addition to this, the
3854 final two dependency checks below exclude all but the same
3855 array reference if the where and elswhere destinations
3856 are the same. In short, this is VERY conservative and this
3857 is needed because the two loops, required by the standard
3858 are coalesced in gfc_trans_where_3. */
3859 if (!gfc_check_dependency(cblock->next->expr,
3861 && !gfc_check_dependency(eblock->next->expr,
3863 && !gfc_check_dependency(cblock->next->expr,
3864 eblock->next->expr2, 1)
3865 && !gfc_check_dependency(eblock->next->expr,
3866 cblock->next->expr2, 1)
3867 && !gfc_check_dependency(cblock->next->expr,
3868 cblock->next->expr2, 1)
3869 && !gfc_check_dependency(eblock->next->expr,
3870 eblock->next->expr2, 1)
3871 && !gfc_check_dependency(cblock->next->expr,
3872 eblock->next->expr, 0)
3873 && !gfc_check_dependency(eblock->next->expr,
3874 cblock->next->expr, 0))
3875 return gfc_trans_where_3 (cblock, eblock);
3879 gfc_start_block (&block);
3881 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3883 return gfc_finish_block (&block);
3887 /* CYCLE a DO loop. The label decl has already been created by
3888 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3889 node at the head of the loop. We must mark the label as used. */
3892 gfc_trans_cycle (gfc_code * code)
3896 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3897 TREE_USED (cycle_label) = 1;
3898 return build1_v (GOTO_EXPR, cycle_label);
3902 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3903 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3907 gfc_trans_exit (gfc_code * code)
3911 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3912 TREE_USED (exit_label) = 1;
3913 return build1_v (GOTO_EXPR, exit_label);
3917 /* Translate the ALLOCATE statement. */
3920 gfc_trans_allocate (gfc_code * code)
3932 if (!code->ext.alloc_list)
3935 pstat = stat = error_label = tmp = NULL_TREE;
3937 gfc_start_block (&block);
3939 /* Either STAT= and/or ERRMSG is present. */
3940 if (code->expr || code->expr2)
3942 tree gfc_int4_type_node = gfc_get_int_type (4);
3944 stat = gfc_create_var (gfc_int4_type_node, "stat");
3945 pstat = gfc_build_addr_expr (NULL_TREE, stat);
3947 error_label = gfc_build_label_decl (NULL_TREE);
3948 TREE_USED (error_label) = 1;
3951 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3955 gfc_init_se (&se, NULL);
3956 gfc_start_block (&se.pre);
3958 se.want_pointer = 1;
3959 se.descriptor_only = 1;
3960 gfc_conv_expr (&se, expr);
3962 if (!gfc_array_allocate (&se, expr, pstat))
3964 /* A scalar or derived type. */
3965 tmp = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
3967 if (expr->ts.type == BT_CHARACTER && tmp == NULL_TREE)
3968 tmp = se.string_length;
3970 tmp = gfc_allocate_with_status (&se.pre, tmp, pstat);
3971 tmp = fold_build2 (MODIFY_EXPR, void_type_node, se.expr,
3972 fold_convert (TREE_TYPE (se.expr), tmp));
3973 gfc_add_expr_to_block (&se.pre, tmp);
3975 if (code->expr || code->expr2)
3977 tmp = build1_v (GOTO_EXPR, error_label);
3978 parm = fold_build2 (NE_EXPR, boolean_type_node,
3979 stat, build_int_cst (TREE_TYPE (stat), 0));
3980 tmp = fold_build3 (COND_EXPR, void_type_node,
3981 parm, tmp, build_empty_stmt ());
3982 gfc_add_expr_to_block (&se.pre, tmp);
3985 if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
3987 tmp = build_fold_indirect_ref (se.expr);
3988 tmp = gfc_nullify_alloc_comp (expr->ts.derived, tmp, 0);
3989 gfc_add_expr_to_block (&se.pre, tmp);
3994 tmp = gfc_finish_block (&se.pre);
3995 gfc_add_expr_to_block (&block, tmp);
4001 tmp = build1_v (LABEL_EXPR, error_label);
4002 gfc_add_expr_to_block (&block, tmp);
4004 gfc_init_se (&se, NULL);
4005 gfc_conv_expr_lhs (&se, code->expr);
4006 tmp = convert (TREE_TYPE (se.expr), stat);
4007 gfc_add_modify (&block, se.expr, tmp);
4013 /* A better error message may be possible, but not required. */
4014 const char *msg = "Attempt to allocate an allocated object";
4015 tree errmsg, slen, dlen;
4017 gfc_init_se (&se, NULL);
4018 gfc_conv_expr_lhs (&se, code->expr2);
4020 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4022 gfc_add_modify (&block, errmsg,
4023 gfc_build_addr_expr (pchar_type_node,
4024 gfc_build_localized_cstring_const (msg)));
4026 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4027 dlen = gfc_get_expr_charlen (code->expr2);
4028 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4030 dlen = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3,
4031 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4033 tmp = fold_build2 (NE_EXPR, boolean_type_node, stat,
4034 build_int_cst (TREE_TYPE (stat), 0));
4036 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt ());
4038 gfc_add_expr_to_block (&block, tmp);
4041 return gfc_finish_block (&block);
4045 /* Translate a DEALLOCATE statement. */
4048 gfc_trans_deallocate (gfc_code *code)
4053 tree apstat, astat, pstat, stat, tmp;
4056 pstat = apstat = stat = astat = tmp = NULL_TREE;
4058 gfc_start_block (&block);
4060 /* Count the number of failed deallocations. If deallocate() was
4061 called with STAT= , then set STAT to the count. If deallocate
4062 was called with ERRMSG, then set ERRMG to a string. */
4063 if (code->expr || code->expr2)
4065 tree gfc_int4_type_node = gfc_get_int_type (4);
4067 stat = gfc_create_var (gfc_int4_type_node, "stat");
4068 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4070 /* Running total of possible deallocation failures. */
4071 astat = gfc_create_var (gfc_int4_type_node, "astat");
4072 apstat = gfc_build_addr_expr (NULL_TREE, astat);
4074 /* Initialize astat to 0. */
4075 gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
4078 for (al = code->ext.alloc_list; al != NULL; al = al->next)
4081 gcc_assert (expr->expr_type == EXPR_VARIABLE);
4083 gfc_init_se (&se, NULL);
4084 gfc_start_block (&se.pre);
4086 se.want_pointer = 1;
4087 se.descriptor_only = 1;
4088 gfc_conv_expr (&se, expr);
4090 if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
4093 gfc_ref *last = NULL;
4094 for (ref = expr->ref; ref; ref = ref->next)
4095 if (ref->type == REF_COMPONENT)
4098 /* Do not deallocate the components of a derived type
4099 ultimate pointer component. */
4100 if (!(last && last->u.c.component->attr.pointer)
4101 && !(!last && expr->symtree->n.sym->attr.pointer))
4103 tmp = gfc_deallocate_alloc_comp (expr->ts.derived, se.expr,
4105 gfc_add_expr_to_block (&se.pre, tmp);
4110 tmp = gfc_array_deallocate (se.expr, pstat, expr);
4113 tmp = gfc_deallocate_with_status (se.expr, pstat, false, expr);
4114 gfc_add_expr_to_block (&se.pre, tmp);
4116 tmp = fold_build2 (MODIFY_EXPR, void_type_node,
4117 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
4120 gfc_add_expr_to_block (&se.pre, tmp);
4122 /* Keep track of the number of failed deallocations by adding stat
4123 of the last deallocation to the running total. */
4124 if (code->expr || code->expr2)
4126 apstat = fold_build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
4127 gfc_add_modify (&se.pre, astat, apstat);
4130 tmp = gfc_finish_block (&se.pre);
4131 gfc_add_expr_to_block (&block, tmp);
4138 gfc_init_se (&se, NULL);
4139 gfc_conv_expr_lhs (&se, code->expr);
4140 tmp = convert (TREE_TYPE (se.expr), astat);
4141 gfc_add_modify (&block, se.expr, tmp);
4147 /* A better error message may be possible, but not required. */
4148 const char *msg = "Attempt to deallocate an unallocated object";
4149 tree errmsg, slen, dlen;
4151 gfc_init_se (&se, NULL);
4152 gfc_conv_expr_lhs (&se, code->expr2);
4154 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4156 gfc_add_modify (&block, errmsg,
4157 gfc_build_addr_expr (pchar_type_node,
4158 gfc_build_localized_cstring_const (msg)));
4160 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4161 dlen = gfc_get_expr_charlen (code->expr2);
4162 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4164 dlen = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3,
4165 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4167 tmp = fold_build2 (NE_EXPR, boolean_type_node, astat,
4168 build_int_cst (TREE_TYPE (astat), 0));
4170 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt ());
4172 gfc_add_expr_to_block (&block, tmp);
4175 return gfc_finish_block (&block);