1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
35 #include "trans-stmt.h"
36 #include "trans-types.h"
37 #include "trans-array.h"
38 #include "trans-const.h"
40 #include "dependency.h"
42 typedef struct iter_info
48 struct iter_info *next;
52 typedef struct forall_info
59 struct forall_info *prev_nest;
63 static void gfc_trans_where_2 (gfc_code *, tree, bool,
64 forall_info *, stmtblock_t *);
66 /* Translate a F95 label number to a LABEL_EXPR. */
69 gfc_trans_label_here (gfc_code * code)
71 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
75 /* Given a variable expression which has been ASSIGNed to, find the decl
76 containing the auxiliary variables. For variables in common blocks this
80 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
82 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
83 gfc_conv_expr (se, expr);
84 /* Deals with variable in common block. Get the field declaration. */
85 if (TREE_CODE (se->expr) == COMPONENT_REF)
86 se->expr = TREE_OPERAND (se->expr, 1);
87 /* Deals with dummy argument. Get the parameter declaration. */
88 else if (TREE_CODE (se->expr) == INDIRECT_REF)
89 se->expr = TREE_OPERAND (se->expr, 0);
92 /* Translate a label assignment statement. */
95 gfc_trans_label_assign (gfc_code * code)
104 /* Start a new block. */
105 gfc_init_se (&se, NULL);
106 gfc_start_block (&se.pre);
107 gfc_conv_label_variable (&se, code->expr1);
109 len = GFC_DECL_STRING_LEN (se.expr);
110 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
112 label_tree = gfc_get_label_decl (code->label1);
114 if (code->label1->defined == ST_LABEL_TARGET)
116 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
117 len_tree = integer_minus_one_node;
121 gfc_expr *format = code->label1->format;
123 label_len = format->value.character.length;
124 len_tree = build_int_cst (NULL_TREE, label_len);
125 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
126 format->value.character.string);
127 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
130 gfc_add_modify (&se.pre, len, len_tree);
131 gfc_add_modify (&se.pre, addr, label_tree);
133 return gfc_finish_block (&se.pre);
136 /* Translate a GOTO statement. */
139 gfc_trans_goto (gfc_code * code)
141 locus loc = code->loc;
147 if (code->label1 != NULL)
148 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
151 gfc_init_se (&se, NULL);
152 gfc_start_block (&se.pre);
153 gfc_conv_label_variable (&se, code->expr1);
154 tmp = GFC_DECL_STRING_LEN (se.expr);
155 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
156 build_int_cst (TREE_TYPE (tmp), -1));
157 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
158 "Assigned label is not a target label");
160 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
165 target = fold_build1 (GOTO_EXPR, void_type_node, assigned_goto);
166 gfc_add_expr_to_block (&se.pre, target);
167 return gfc_finish_block (&se.pre);
170 /* Check the label list. */
173 target = gfc_get_label_decl (code->label1);
174 tmp = gfc_build_addr_expr (pvoid_type_node, target);
175 tmp = fold_build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
176 tmp = build3_v (COND_EXPR, tmp,
177 fold_build1 (GOTO_EXPR, void_type_node, target),
178 build_empty_stmt ());
179 gfc_add_expr_to_block (&se.pre, tmp);
182 while (code != NULL);
183 gfc_trans_runtime_check (true, false, boolean_true_node, &se.pre, &loc,
184 "Assigned label is not in the list");
186 return gfc_finish_block (&se.pre);
190 /* Translate an ENTRY statement. Just adds a label for this entry point. */
192 gfc_trans_entry (gfc_code * code)
194 return build1_v (LABEL_EXPR, code->ext.entry->label);
198 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
199 elemental subroutines. Make temporaries for output arguments if any such
200 dependencies are found. Output arguments are chosen because internal_unpack
201 can be used, as is, to copy the result back to the variable. */
203 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
204 gfc_symbol * sym, gfc_actual_arglist * arg,
205 gfc_dep_check check_variable)
207 gfc_actual_arglist *arg0;
209 gfc_formal_arglist *formal;
210 gfc_loopinfo tmp_loop;
221 if (loopse->ss == NULL)
226 formal = sym->formal;
228 /* Loop over all the arguments testing for dependencies. */
229 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
235 /* Obtain the info structure for the current argument. */
237 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
241 info = &ss->data.info;
245 /* If there is a dependency, create a temporary and use it
246 instead of the variable. */
247 fsym = formal ? formal->sym : NULL;
248 if (e->expr_type == EXPR_VARIABLE
250 && fsym->attr.intent != INTENT_IN
251 && gfc_check_fncall_dependency (e, fsym->attr.intent,
252 sym, arg0, check_variable))
254 tree initial, temptype;
255 stmtblock_t temp_post;
257 /* Make a local loopinfo for the temporary creation, so that
258 none of the other ss->info's have to be renormalized. */
259 gfc_init_loopinfo (&tmp_loop);
260 for (n = 0; n < info->dimen; n++)
262 tmp_loop.to[n] = loopse->loop->to[n];
263 tmp_loop.from[n] = loopse->loop->from[n];
264 tmp_loop.order[n] = loopse->loop->order[n];
267 /* Obtain the argument descriptor for unpacking. */
268 gfc_init_se (&parmse, NULL);
269 parmse.want_pointer = 1;
270 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
271 gfc_add_block_to_block (&se->pre, &parmse.pre);
273 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
274 initialize the array temporary with a copy of the values. */
275 if (fsym->attr.intent == INTENT_INOUT
276 || (fsym->ts.type ==BT_DERIVED
277 && fsym->attr.intent == INTENT_OUT))
278 initial = parmse.expr;
282 /* Find the type of the temporary to create; we don't use the type
283 of e itself as this breaks for subcomponent-references in e (where
284 the type of e is that of the final reference, but parmse.expr's
285 type corresponds to the full derived-type). */
286 /* TODO: Fix this somehow so we don't need a temporary of the whole
287 array but instead only the components referenced. */
288 temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */
289 gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
290 temptype = TREE_TYPE (temptype);
291 temptype = gfc_get_element_type (temptype);
293 /* Generate the temporary. Cleaning up the temporary should be the
294 very last thing done, so we add the code to a new block and add it
295 to se->post as last instructions. */
296 size = gfc_create_var (gfc_array_index_type, NULL);
297 data = gfc_create_var (pvoid_type_node, NULL);
298 gfc_init_block (&temp_post);
299 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post,
300 &tmp_loop, info, temptype,
304 gfc_add_modify (&se->pre, size, tmp);
305 tmp = fold_convert (pvoid_type_node, info->data);
306 gfc_add_modify (&se->pre, data, tmp);
308 /* Calculate the offset for the temporary. */
309 offset = gfc_index_zero_node;
310 for (n = 0; n < info->dimen; n++)
312 tmp = gfc_conv_descriptor_stride (info->descriptor,
314 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
315 loopse->loop->from[n], tmp);
316 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
319 info->offset = gfc_create_var (gfc_array_index_type, NULL);
320 gfc_add_modify (&se->pre, info->offset, offset);
322 /* Copy the result back using unpack. */
323 tmp = build_call_expr (gfor_fndecl_in_unpack, 2, parmse.expr, data);
324 gfc_add_expr_to_block (&se->post, tmp);
326 /* parmse.pre is already added above. */
327 gfc_add_block_to_block (&se->post, &parmse.post);
328 gfc_add_block_to_block (&se->post, &temp_post);
334 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
337 gfc_trans_call (gfc_code * code, bool dependency_check,
338 tree mask, tree count1, bool invert)
342 int has_alternate_specifier;
343 gfc_dep_check check_variable;
344 tree index = NULL_TREE;
345 tree maskexpr = NULL_TREE;
348 /* A CALL starts a new block because the actual arguments may have to
349 be evaluated first. */
350 gfc_init_se (&se, NULL);
351 gfc_start_block (&se.pre);
353 gcc_assert (code->resolved_sym);
355 ss = gfc_ss_terminator;
356 if (code->resolved_sym->attr.elemental)
357 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
359 /* Is not an elemental subroutine call with array valued arguments. */
360 if (ss == gfc_ss_terminator)
363 /* Translate the call. */
364 has_alternate_specifier
365 = gfc_conv_procedure_call (&se, code->resolved_sym, code->ext.actual,
366 code->expr1, NULL_TREE);
368 /* A subroutine without side-effect, by definition, does nothing! */
369 TREE_SIDE_EFFECTS (se.expr) = 1;
371 /* Chain the pieces together and return the block. */
372 if (has_alternate_specifier)
374 gfc_code *select_code;
376 select_code = code->next;
377 gcc_assert(select_code->op == EXEC_SELECT);
378 sym = select_code->expr1->symtree->n.sym;
379 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
380 if (sym->backend_decl == NULL)
381 sym->backend_decl = gfc_get_symbol_decl (sym);
382 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
385 gfc_add_expr_to_block (&se.pre, se.expr);
387 gfc_add_block_to_block (&se.pre, &se.post);
392 /* An elemental subroutine call with array valued arguments has
400 /* gfc_walk_elemental_function_args renders the ss chain in the
401 reverse order to the actual argument order. */
402 ss = gfc_reverse_ss (ss);
404 /* Initialize the loop. */
405 gfc_init_se (&loopse, NULL);
406 gfc_init_loopinfo (&loop);
407 gfc_add_ss_to_loop (&loop, ss);
409 gfc_conv_ss_startstride (&loop);
410 /* TODO: gfc_conv_loop_setup generates a temporary for vector
411 subscripts. This could be prevented in the elemental case
412 as temporaries are handled separatedly
413 (below in gfc_conv_elemental_dependencies). */
414 gfc_conv_loop_setup (&loop, &code->expr1->where);
415 gfc_mark_ss_chain_used (ss, 1);
417 /* Convert the arguments, checking for dependencies. */
418 gfc_copy_loopinfo_to_se (&loopse, &loop);
421 /* For operator assignment, do dependency checking. */
422 if (dependency_check)
423 check_variable = ELEM_CHECK_VARIABLE;
425 check_variable = ELEM_DONT_CHECK_VARIABLE;
427 gfc_init_se (&depse, NULL);
428 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
429 code->ext.actual, check_variable);
431 gfc_add_block_to_block (&loop.pre, &depse.pre);
432 gfc_add_block_to_block (&loop.post, &depse.post);
434 /* Generate the loop body. */
435 gfc_start_scalarized_body (&loop, &body);
436 gfc_init_block (&block);
440 /* Form the mask expression according to the mask. */
442 maskexpr = gfc_build_array_ref (mask, index, NULL);
444 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
448 /* Add the subroutine call to the block. */
449 gfc_conv_procedure_call (&loopse, code->resolved_sym,
450 code->ext.actual, code->expr1,
455 tmp = build3_v (COND_EXPR, maskexpr, loopse.expr,
456 build_empty_stmt ());
457 gfc_add_expr_to_block (&loopse.pre, tmp);
458 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
459 count1, gfc_index_one_node);
460 gfc_add_modify (&loopse.pre, count1, tmp);
463 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
465 gfc_add_block_to_block (&block, &loopse.pre);
466 gfc_add_block_to_block (&block, &loopse.post);
468 /* Finish up the loop block and the loop. */
469 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
470 gfc_trans_scalarizing_loops (&loop, &body);
471 gfc_add_block_to_block (&se.pre, &loop.pre);
472 gfc_add_block_to_block (&se.pre, &loop.post);
473 gfc_add_block_to_block (&se.pre, &se.post);
474 gfc_cleanup_loop (&loop);
477 return gfc_finish_block (&se.pre);
481 /* Translate the RETURN statement. */
484 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
492 /* If code->expr is not NULL, this return statement must appear
493 in a subroutine and current_fake_result_decl has already
496 result = gfc_get_fake_result_decl (NULL, 0);
499 gfc_warning ("An alternate return at %L without a * dummy argument",
500 &code->expr1->where);
501 return build1_v (GOTO_EXPR, gfc_get_return_label ());
504 /* Start a new block for this statement. */
505 gfc_init_se (&se, NULL);
506 gfc_start_block (&se.pre);
508 gfc_conv_expr (&se, code->expr1);
510 tmp = fold_build2 (MODIFY_EXPR, TREE_TYPE (result), result,
511 fold_convert (TREE_TYPE (result), se.expr));
512 gfc_add_expr_to_block (&se.pre, tmp);
514 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
515 gfc_add_expr_to_block (&se.pre, tmp);
516 gfc_add_block_to_block (&se.pre, &se.post);
517 return gfc_finish_block (&se.pre);
520 return build1_v (GOTO_EXPR, gfc_get_return_label ());
524 /* Translate the PAUSE statement. We have to translate this statement
525 to a runtime library call. */
528 gfc_trans_pause (gfc_code * code)
530 tree gfc_int4_type_node = gfc_get_int_type (4);
534 /* Start a new block for this statement. */
535 gfc_init_se (&se, NULL);
536 gfc_start_block (&se.pre);
539 if (code->expr1 == NULL)
541 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
542 tmp = build_call_expr (gfor_fndecl_pause_numeric, 1, tmp);
546 gfc_conv_expr_reference (&se, code->expr1);
547 tmp = build_call_expr (gfor_fndecl_pause_string, 2,
548 se.expr, se.string_length);
551 gfc_add_expr_to_block (&se.pre, tmp);
553 gfc_add_block_to_block (&se.pre, &se.post);
555 return gfc_finish_block (&se.pre);
559 /* Translate the STOP statement. We have to translate this statement
560 to a runtime library call. */
563 gfc_trans_stop (gfc_code * code)
565 tree gfc_int4_type_node = gfc_get_int_type (4);
569 /* Start a new block for this statement. */
570 gfc_init_se (&se, NULL);
571 gfc_start_block (&se.pre);
574 if (code->expr1 == NULL)
576 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
577 tmp = build_call_expr (gfor_fndecl_stop_numeric, 1, tmp);
581 gfc_conv_expr_reference (&se, code->expr1);
582 tmp = build_call_expr (gfor_fndecl_stop_string, 2,
583 se.expr, se.string_length);
586 gfc_add_expr_to_block (&se.pre, tmp);
588 gfc_add_block_to_block (&se.pre, &se.post);
590 return gfc_finish_block (&se.pre);
594 /* Generate GENERIC for the IF construct. This function also deals with
595 the simple IF statement, because the front end translates the IF
596 statement into an IF construct.
628 where COND_S is the simplified version of the predicate. PRE_COND_S
629 are the pre side-effects produced by the translation of the
631 We need to build the chain recursively otherwise we run into
632 problems with folding incomplete statements. */
635 gfc_trans_if_1 (gfc_code * code)
640 /* Check for an unconditional ELSE clause. */
642 return gfc_trans_code (code->next);
644 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
645 gfc_init_se (&if_se, NULL);
646 gfc_start_block (&if_se.pre);
648 /* Calculate the IF condition expression. */
649 gfc_conv_expr_val (&if_se, code->expr1);
651 /* Translate the THEN clause. */
652 stmt = gfc_trans_code (code->next);
654 /* Translate the ELSE clause. */
656 elsestmt = gfc_trans_if_1 (code->block);
658 elsestmt = build_empty_stmt ();
660 /* Build the condition expression and add it to the condition block. */
661 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
663 gfc_add_expr_to_block (&if_se.pre, stmt);
665 /* Finish off this statement. */
666 return gfc_finish_block (&if_se.pre);
670 gfc_trans_if (gfc_code * code)
672 /* Ignore the top EXEC_IF, it only announces an IF construct. The
673 actual code we must translate is in code->block. */
675 return gfc_trans_if_1 (code->block);
679 /* Translate an arithmetic IF expression.
681 IF (cond) label1, label2, label3 translates to
693 An optimized version can be generated in case of equal labels.
694 E.g., if label1 is equal to label2, we can translate it to
703 gfc_trans_arithmetic_if (gfc_code * code)
711 /* Start a new block. */
712 gfc_init_se (&se, NULL);
713 gfc_start_block (&se.pre);
715 /* Pre-evaluate COND. */
716 gfc_conv_expr_val (&se, code->expr1);
717 se.expr = gfc_evaluate_now (se.expr, &se.pre);
719 /* Build something to compare with. */
720 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
722 if (code->label1->value != code->label2->value)
724 /* If (cond < 0) take branch1 else take branch2.
725 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
726 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
727 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
729 if (code->label1->value != code->label3->value)
730 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
732 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
734 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
737 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
739 if (code->label1->value != code->label3->value
740 && code->label2->value != code->label3->value)
742 /* if (cond <= 0) take branch1 else take branch2. */
743 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
744 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
745 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
748 /* Append the COND_EXPR to the evaluation of COND, and return. */
749 gfc_add_expr_to_block (&se.pre, branch1);
750 return gfc_finish_block (&se.pre);
754 /* Translate the simple DO construct. This is where the loop variable has
755 integer type and step +-1. We can't use this in the general case
756 because integer overflow and floating point errors could give incorrect
758 We translate a do loop from:
760 DO dovar = from, to, step
766 [Evaluate loop bounds and step]
768 if ((step > 0) ? (dovar <= to) : (dovar => to))
774 cond = (dovar == to);
776 if (cond) goto end_label;
781 This helps the optimizers by avoiding the extra induction variable
782 used in the general case. */
785 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
786 tree from, tree to, tree step)
792 tree saved_dovar = NULL;
796 type = TREE_TYPE (dovar);
798 /* Initialize the DO variable: dovar = from. */
799 gfc_add_modify (pblock, dovar, from);
801 /* Save value for do-tinkering checking. */
802 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
804 saved_dovar = gfc_create_var (type, ".saved_dovar");
805 gfc_add_modify (pblock, saved_dovar, dovar);
808 /* Cycle and exit statements are implemented with gotos. */
809 cycle_label = gfc_build_label_decl (NULL_TREE);
810 exit_label = gfc_build_label_decl (NULL_TREE);
812 /* Put the labels where they can be found later. See gfc_trans_do(). */
813 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
816 gfc_start_block (&body);
818 /* Main loop body. */
819 tmp = gfc_trans_code (code->block->next);
820 gfc_add_expr_to_block (&body, tmp);
822 /* Label for cycle statements (if needed). */
823 if (TREE_USED (cycle_label))
825 tmp = build1_v (LABEL_EXPR, cycle_label);
826 gfc_add_expr_to_block (&body, tmp);
829 /* Check whether someone has modified the loop variable. */
830 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
832 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
833 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
834 "Loop variable has been modified");
837 /* Evaluate the loop condition. */
838 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
839 cond = gfc_evaluate_now (cond, &body);
841 /* Increment the loop variable. */
842 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
843 gfc_add_modify (&body, dovar, tmp);
845 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
846 gfc_add_modify (&body, saved_dovar, dovar);
849 tmp = build1_v (GOTO_EXPR, exit_label);
850 TREE_USED (exit_label) = 1;
851 tmp = fold_build3 (COND_EXPR, void_type_node,
852 cond, tmp, build_empty_stmt ());
853 gfc_add_expr_to_block (&body, tmp);
855 /* Finish the loop body. */
856 tmp = gfc_finish_block (&body);
857 tmp = build1_v (LOOP_EXPR, tmp);
859 /* Only execute the loop if the number of iterations is positive. */
860 if (tree_int_cst_sgn (step) > 0)
861 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
863 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
864 tmp = fold_build3 (COND_EXPR, void_type_node,
865 cond, tmp, build_empty_stmt ());
866 gfc_add_expr_to_block (pblock, tmp);
868 /* Add the exit label. */
869 tmp = build1_v (LABEL_EXPR, exit_label);
870 gfc_add_expr_to_block (pblock, tmp);
872 return gfc_finish_block (pblock);
875 /* Translate the DO construct. This obviously is one of the most
876 important ones to get right with any compiler, but especially
879 We special case some loop forms as described in gfc_trans_simple_do.
880 For other cases we implement them with a separate loop count,
881 as described in the standard.
883 We translate a do loop from:
885 DO dovar = from, to, step
891 [evaluate loop bounds and step]
892 empty = (step > 0 ? to < from : to > from);
893 countm1 = (to - from) / step;
895 if (empty) goto exit_label;
901 if (countm1 ==0) goto exit_label;
906 countm1 is an unsigned integer. It is equal to the loop count minus one,
907 because the loop count itself can overflow. */
910 gfc_trans_do (gfc_code * code)
914 tree saved_dovar = NULL;
929 gfc_start_block (&block);
931 /* Evaluate all the expressions in the iterator. */
932 gfc_init_se (&se, NULL);
933 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
934 gfc_add_block_to_block (&block, &se.pre);
936 type = TREE_TYPE (dovar);
938 gfc_init_se (&se, NULL);
939 gfc_conv_expr_val (&se, code->ext.iterator->start);
940 gfc_add_block_to_block (&block, &se.pre);
941 from = gfc_evaluate_now (se.expr, &block);
943 gfc_init_se (&se, NULL);
944 gfc_conv_expr_val (&se, code->ext.iterator->end);
945 gfc_add_block_to_block (&block, &se.pre);
946 to = gfc_evaluate_now (se.expr, &block);
948 gfc_init_se (&se, NULL);
949 gfc_conv_expr_val (&se, code->ext.iterator->step);
950 gfc_add_block_to_block (&block, &se.pre);
951 step = gfc_evaluate_now (se.expr, &block);
953 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
955 tmp = fold_build2 (EQ_EXPR, boolean_type_node, step,
956 fold_convert (type, integer_zero_node));
957 gfc_trans_runtime_check (true, false, tmp, &block, &code->loc,
958 "DO step value is zero");
961 /* Special case simple loops. */
962 if (TREE_CODE (type) == INTEGER_TYPE
963 && (integer_onep (step)
964 || tree_int_cst_equal (step, integer_minus_one_node)))
965 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
967 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
968 fold_convert (type, integer_zero_node));
970 if (TREE_CODE (type) == INTEGER_TYPE)
971 utype = unsigned_type_for (type);
973 utype = unsigned_type_for (gfc_array_index_type);
974 countm1 = gfc_create_var (utype, "countm1");
976 /* Cycle and exit statements are implemented with gotos. */
977 cycle_label = gfc_build_label_decl (NULL_TREE);
978 exit_label = gfc_build_label_decl (NULL_TREE);
979 TREE_USED (exit_label) = 1;
981 /* Initialize the DO variable: dovar = from. */
982 gfc_add_modify (&block, dovar, from);
984 /* Save value for do-tinkering checking. */
985 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
987 saved_dovar = gfc_create_var (type, ".saved_dovar");
988 gfc_add_modify (&block, saved_dovar, dovar);
991 /* Initialize loop count and jump to exit label if the loop is empty.
992 This code is executed before we enter the loop body. We generate:
995 if (to < from) goto exit_label;
996 countm1 = (to - from) / step;
1000 if (to > from) goto exit_label;
1001 countm1 = (from - to) / -step;
1003 if (TREE_CODE (type) == INTEGER_TYPE)
1007 tmp = fold_build2 (LT_EXPR, boolean_type_node, to, from);
1008 pos = fold_build3 (COND_EXPR, void_type_node, tmp,
1009 build1_v (GOTO_EXPR, exit_label),
1010 build_empty_stmt ());
1011 tmp = fold_build2 (MINUS_EXPR, type, to, from);
1012 tmp = fold_convert (utype, tmp);
1013 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp,
1014 fold_convert (utype, step));
1015 tmp = build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
1016 pos = build2 (COMPOUND_EXPR, void_type_node, pos, tmp);
1018 tmp = fold_build2 (GT_EXPR, boolean_type_node, to, from);
1019 neg = fold_build3 (COND_EXPR, void_type_node, tmp,
1020 build1_v (GOTO_EXPR, exit_label),
1021 build_empty_stmt ());
1022 tmp = fold_build2 (MINUS_EXPR, type, from, to);
1023 tmp = fold_convert (utype, tmp);
1024 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp,
1025 fold_convert (utype, fold_build1 (NEGATE_EXPR,
1027 tmp = build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
1028 neg = build2 (COMPOUND_EXPR, void_type_node, neg, tmp);
1030 tmp = fold_build3 (COND_EXPR, void_type_node, pos_step, pos, neg);
1031 gfc_add_expr_to_block (&block, tmp);
1035 /* TODO: We could use the same width as the real type.
1036 This would probably cause more problems that it solves
1037 when we implement "long double" types. */
1039 tmp = fold_build2 (MINUS_EXPR, type, to, from);
1040 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
1041 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
1042 gfc_add_modify (&block, countm1, tmp);
1044 /* We need a special check for empty loops:
1045 empty = (step > 0 ? to < from : to > from); */
1046 tmp = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
1047 fold_build2 (LT_EXPR, boolean_type_node, to, from),
1048 fold_build2 (GT_EXPR, boolean_type_node, to, from));
1049 /* If the loop is empty, go directly to the exit label. */
1050 tmp = fold_build3 (COND_EXPR, void_type_node, tmp,
1051 build1_v (GOTO_EXPR, exit_label),
1052 build_empty_stmt ());
1053 gfc_add_expr_to_block (&block, tmp);
1057 gfc_start_block (&body);
1059 /* Put these labels where they can be found later. We put the
1060 labels in a TREE_LIST node (because TREE_CHAIN is already
1061 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
1062 label in TREE_VALUE (backend_decl). */
1064 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1066 /* Main loop body. */
1067 tmp = gfc_trans_code (code->block->next);
1068 gfc_add_expr_to_block (&body, tmp);
1070 /* Label for cycle statements (if needed). */
1071 if (TREE_USED (cycle_label))
1073 tmp = build1_v (LABEL_EXPR, cycle_label);
1074 gfc_add_expr_to_block (&body, tmp);
1077 /* Check whether someone has modified the loop variable. */
1078 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1080 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
1081 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1082 "Loop variable has been modified");
1085 /* Increment the loop variable. */
1086 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
1087 gfc_add_modify (&body, dovar, tmp);
1089 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1090 gfc_add_modify (&body, saved_dovar, dovar);
1092 /* End with the loop condition. Loop until countm1 == 0. */
1093 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
1094 build_int_cst (utype, 0));
1095 tmp = build1_v (GOTO_EXPR, exit_label);
1096 tmp = fold_build3 (COND_EXPR, void_type_node,
1097 cond, tmp, build_empty_stmt ());
1098 gfc_add_expr_to_block (&body, tmp);
1100 /* Decrement the loop count. */
1101 tmp = fold_build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
1102 gfc_add_modify (&body, countm1, tmp);
1104 /* End of loop body. */
1105 tmp = gfc_finish_block (&body);
1107 /* The for loop itself. */
1108 tmp = build1_v (LOOP_EXPR, tmp);
1109 gfc_add_expr_to_block (&block, tmp);
1111 /* Add the exit label. */
1112 tmp = build1_v (LABEL_EXPR, exit_label);
1113 gfc_add_expr_to_block (&block, tmp);
1115 return gfc_finish_block (&block);
1119 /* Translate the DO WHILE construct.
1132 if (! cond) goto exit_label;
1138 Because the evaluation of the exit condition `cond' may have side
1139 effects, we can't do much for empty loop bodies. The backend optimizers
1140 should be smart enough to eliminate any dead loops. */
1143 gfc_trans_do_while (gfc_code * code)
1151 /* Everything we build here is part of the loop body. */
1152 gfc_start_block (&block);
1154 /* Cycle and exit statements are implemented with gotos. */
1155 cycle_label = gfc_build_label_decl (NULL_TREE);
1156 exit_label = gfc_build_label_decl (NULL_TREE);
1158 /* Put the labels where they can be found later. See gfc_trans_do(). */
1159 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1161 /* Create a GIMPLE version of the exit condition. */
1162 gfc_init_se (&cond, NULL);
1163 gfc_conv_expr_val (&cond, code->expr1);
1164 gfc_add_block_to_block (&block, &cond.pre);
1165 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1167 /* Build "IF (! cond) GOTO exit_label". */
1168 tmp = build1_v (GOTO_EXPR, exit_label);
1169 TREE_USED (exit_label) = 1;
1170 tmp = fold_build3 (COND_EXPR, void_type_node,
1171 cond.expr, tmp, build_empty_stmt ());
1172 gfc_add_expr_to_block (&block, tmp);
1174 /* The main body of the loop. */
1175 tmp = gfc_trans_code (code->block->next);
1176 gfc_add_expr_to_block (&block, tmp);
1178 /* Label for cycle statements (if needed). */
1179 if (TREE_USED (cycle_label))
1181 tmp = build1_v (LABEL_EXPR, cycle_label);
1182 gfc_add_expr_to_block (&block, tmp);
1185 /* End of loop body. */
1186 tmp = gfc_finish_block (&block);
1188 gfc_init_block (&block);
1189 /* Build the loop. */
1190 tmp = build1_v (LOOP_EXPR, tmp);
1191 gfc_add_expr_to_block (&block, tmp);
1193 /* Add the exit label. */
1194 tmp = build1_v (LABEL_EXPR, exit_label);
1195 gfc_add_expr_to_block (&block, tmp);
1197 return gfc_finish_block (&block);
1201 /* Translate the SELECT CASE construct for INTEGER case expressions,
1202 without killing all potential optimizations. The problem is that
1203 Fortran allows unbounded cases, but the back-end does not, so we
1204 need to intercept those before we enter the equivalent SWITCH_EXPR
1207 For example, we translate this,
1210 CASE (:100,101,105:115)
1220 to the GENERIC equivalent,
1224 case (minimum value for typeof(expr) ... 100:
1230 case 200 ... (maximum value for typeof(expr):
1247 gfc_trans_integer_select (gfc_code * code)
1257 gfc_start_block (&block);
1259 /* Calculate the switch expression. */
1260 gfc_init_se (&se, NULL);
1261 gfc_conv_expr_val (&se, code->expr1);
1262 gfc_add_block_to_block (&block, &se.pre);
1264 end_label = gfc_build_label_decl (NULL_TREE);
1266 gfc_init_block (&body);
1268 for (c = code->block; c; c = c->block)
1270 for (cp = c->ext.case_list; cp; cp = cp->next)
1275 /* Assume it's the default case. */
1276 low = high = NULL_TREE;
1280 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1283 /* If there's only a lower bound, set the high bound to the
1284 maximum value of the case expression. */
1286 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1291 /* Three cases are possible here:
1293 1) There is no lower bound, e.g. CASE (:N).
1294 2) There is a lower bound .NE. high bound, that is
1295 a case range, e.g. CASE (N:M) where M>N (we make
1296 sure that M>N during type resolution).
1297 3) There is a lower bound, and it has the same value
1298 as the high bound, e.g. CASE (N:N). This is our
1299 internal representation of CASE(N).
1301 In the first and second case, we need to set a value for
1302 high. In the third case, we don't because the GCC middle
1303 end represents a single case value by just letting high be
1304 a NULL_TREE. We can't do that because we need to be able
1305 to represent unbounded cases. */
1309 && mpz_cmp (cp->low->value.integer,
1310 cp->high->value.integer) != 0))
1311 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1314 /* Unbounded case. */
1316 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1319 /* Build a label. */
1320 label = gfc_build_label_decl (NULL_TREE);
1322 /* Add this case label.
1323 Add parameter 'label', make it match GCC backend. */
1324 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1326 gfc_add_expr_to_block (&body, tmp);
1329 /* Add the statements for this case. */
1330 tmp = gfc_trans_code (c->next);
1331 gfc_add_expr_to_block (&body, tmp);
1333 /* Break to the end of the construct. */
1334 tmp = build1_v (GOTO_EXPR, end_label);
1335 gfc_add_expr_to_block (&body, tmp);
1338 tmp = gfc_finish_block (&body);
1339 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1340 gfc_add_expr_to_block (&block, tmp);
1342 tmp = build1_v (LABEL_EXPR, end_label);
1343 gfc_add_expr_to_block (&block, tmp);
1345 return gfc_finish_block (&block);
1349 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1351 There are only two cases possible here, even though the standard
1352 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1353 .FALSE., and DEFAULT.
1355 We never generate more than two blocks here. Instead, we always
1356 try to eliminate the DEFAULT case. This way, we can translate this
1357 kind of SELECT construct to a simple
1361 expression in GENERIC. */
1364 gfc_trans_logical_select (gfc_code * code)
1367 gfc_code *t, *f, *d;
1372 /* Assume we don't have any cases at all. */
1375 /* Now see which ones we actually do have. We can have at most two
1376 cases in a single case list: one for .TRUE. and one for .FALSE.
1377 The default case is always separate. If the cases for .TRUE. and
1378 .FALSE. are in the same case list, the block for that case list
1379 always executed, and we don't generate code a COND_EXPR. */
1380 for (c = code->block; c; c = c->block)
1382 for (cp = c->ext.case_list; cp; cp = cp->next)
1386 if (cp->low->value.logical == 0) /* .FALSE. */
1388 else /* if (cp->value.logical != 0), thus .TRUE. */
1396 /* Start a new block. */
1397 gfc_start_block (&block);
1399 /* Calculate the switch expression. We always need to do this
1400 because it may have side effects. */
1401 gfc_init_se (&se, NULL);
1402 gfc_conv_expr_val (&se, code->expr1);
1403 gfc_add_block_to_block (&block, &se.pre);
1405 if (t == f && t != NULL)
1407 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1408 translate the code for these cases, append it to the current
1410 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1414 tree true_tree, false_tree, stmt;
1416 true_tree = build_empty_stmt ();
1417 false_tree = build_empty_stmt ();
1419 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1420 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1421 make the missing case the default case. */
1422 if (t != NULL && f != NULL)
1432 /* Translate the code for each of these blocks, and append it to
1433 the current block. */
1435 true_tree = gfc_trans_code (t->next);
1438 false_tree = gfc_trans_code (f->next);
1440 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1441 true_tree, false_tree);
1442 gfc_add_expr_to_block (&block, stmt);
1445 return gfc_finish_block (&block);
1449 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1450 Instead of generating compares and jumps, it is far simpler to
1451 generate a data structure describing the cases in order and call a
1452 library subroutine that locates the right case.
1453 This is particularly true because this is the only case where we
1454 might have to dispose of a temporary.
1455 The library subroutine returns a pointer to jump to or NULL if no
1456 branches are to be taken. */
1459 gfc_trans_character_select (gfc_code *code)
1461 tree init, node, end_label, tmp, type, case_num, label, fndecl;
1462 stmtblock_t block, body;
1468 /* The jump table types are stored in static variables to avoid
1469 constructing them from scratch every single time. */
1470 static tree select_struct[2];
1471 static tree ss_string1[2], ss_string1_len[2];
1472 static tree ss_string2[2], ss_string2_len[2];
1473 static tree ss_target[2];
1475 tree pchartype = gfc_get_pchar_type (code->expr1->ts.kind);
1477 if (code->expr1->ts.kind == 1)
1479 else if (code->expr1->ts.kind == 4)
1484 if (select_struct[k] == NULL)
1486 select_struct[k] = make_node (RECORD_TYPE);
1488 if (code->expr1->ts.kind == 1)
1489 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
1490 else if (code->expr1->ts.kind == 4)
1491 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
1496 #define ADD_FIELD(NAME, TYPE) \
1497 ss_##NAME[k] = gfc_add_field_to_struct \
1498 (&(TYPE_FIELDS (select_struct[k])), select_struct[k], \
1499 get_identifier (stringize(NAME)), TYPE)
1501 ADD_FIELD (string1, pchartype);
1502 ADD_FIELD (string1_len, gfc_charlen_type_node);
1504 ADD_FIELD (string2, pchartype);
1505 ADD_FIELD (string2_len, gfc_charlen_type_node);
1507 ADD_FIELD (target, integer_type_node);
1510 gfc_finish_type (select_struct[k]);
1513 cp = code->block->ext.case_list;
1514 while (cp->left != NULL)
1518 for (d = cp; d; d = d->right)
1521 end_label = gfc_build_label_decl (NULL_TREE);
1523 /* Generate the body */
1524 gfc_start_block (&block);
1525 gfc_init_block (&body);
1527 for (c = code->block; c; c = c->block)
1529 for (d = c->ext.case_list; d; d = d->next)
1531 label = gfc_build_label_decl (NULL_TREE);
1532 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1533 build_int_cst (NULL_TREE, d->n),
1534 build_int_cst (NULL_TREE, d->n), label);
1535 gfc_add_expr_to_block (&body, tmp);
1538 tmp = gfc_trans_code (c->next);
1539 gfc_add_expr_to_block (&body, tmp);
1541 tmp = build1_v (GOTO_EXPR, end_label);
1542 gfc_add_expr_to_block (&body, tmp);
1545 /* Generate the structure describing the branches */
1548 for(d = cp; d; d = d->right)
1552 gfc_init_se (&se, NULL);
1556 node = tree_cons (ss_string1[k], null_pointer_node, node);
1557 node = tree_cons (ss_string1_len[k], integer_zero_node, node);
1561 gfc_conv_expr_reference (&se, d->low);
1563 node = tree_cons (ss_string1[k], se.expr, node);
1564 node = tree_cons (ss_string1_len[k], se.string_length, node);
1567 if (d->high == NULL)
1569 node = tree_cons (ss_string2[k], null_pointer_node, node);
1570 node = tree_cons (ss_string2_len[k], integer_zero_node, node);
1574 gfc_init_se (&se, NULL);
1575 gfc_conv_expr_reference (&se, d->high);
1577 node = tree_cons (ss_string2[k], se.expr, node);
1578 node = tree_cons (ss_string2_len[k], se.string_length, node);
1581 node = tree_cons (ss_target[k], build_int_cst (integer_type_node, d->n),
1584 tmp = build_constructor_from_list (select_struct[k], nreverse (node));
1585 init = tree_cons (NULL_TREE, tmp, init);
1588 type = build_array_type (select_struct[k],
1589 build_index_type (build_int_cst (NULL_TREE, n-1)));
1591 init = build_constructor_from_list (type, nreverse(init));
1592 TREE_CONSTANT (init) = 1;
1593 TREE_STATIC (init) = 1;
1594 /* Create a static variable to hold the jump table. */
1595 tmp = gfc_create_var (type, "jumptable");
1596 TREE_CONSTANT (tmp) = 1;
1597 TREE_STATIC (tmp) = 1;
1598 TREE_READONLY (tmp) = 1;
1599 DECL_INITIAL (tmp) = init;
1602 /* Build the library call */
1603 init = gfc_build_addr_expr (pvoid_type_node, init);
1605 gfc_init_se (&se, NULL);
1606 gfc_conv_expr_reference (&se, code->expr1);
1608 gfc_add_block_to_block (&block, &se.pre);
1610 if (code->expr1->ts.kind == 1)
1611 fndecl = gfor_fndecl_select_string;
1612 else if (code->expr1->ts.kind == 4)
1613 fndecl = gfor_fndecl_select_string_char4;
1617 tmp = build_call_expr (fndecl, 4, init, build_int_cst (NULL_TREE, n),
1618 se.expr, se.string_length);
1619 case_num = gfc_create_var (integer_type_node, "case_num");
1620 gfc_add_modify (&block, case_num, tmp);
1622 gfc_add_block_to_block (&block, &se.post);
1624 tmp = gfc_finish_block (&body);
1625 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1626 gfc_add_expr_to_block (&block, tmp);
1628 tmp = build1_v (LABEL_EXPR, end_label);
1629 gfc_add_expr_to_block (&block, tmp);
1631 return gfc_finish_block (&block);
1635 /* Translate the three variants of the SELECT CASE construct.
1637 SELECT CASEs with INTEGER case expressions can be translated to an
1638 equivalent GENERIC switch statement, and for LOGICAL case
1639 expressions we build one or two if-else compares.
1641 SELECT CASEs with CHARACTER case expressions are a whole different
1642 story, because they don't exist in GENERIC. So we sort them and
1643 do a binary search at runtime.
1645 Fortran has no BREAK statement, and it does not allow jumps from
1646 one case block to another. That makes things a lot easier for
1650 gfc_trans_select (gfc_code * code)
1652 gcc_assert (code && code->expr1);
1654 /* Empty SELECT constructs are legal. */
1655 if (code->block == NULL)
1656 return build_empty_stmt ();
1658 /* Select the correct translation function. */
1659 switch (code->expr1->ts.type)
1661 case BT_LOGICAL: return gfc_trans_logical_select (code);
1662 case BT_INTEGER: return gfc_trans_integer_select (code);
1663 case BT_CHARACTER: return gfc_trans_character_select (code);
1665 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1671 /* Traversal function to substitute a replacement symtree if the symbol
1672 in the expression is the same as that passed. f == 2 signals that
1673 that variable itself is not to be checked - only the references.
1674 This group of functions is used when the variable expression in a
1675 FORALL assignment has internal references. For example:
1676 FORALL (i = 1:4) p(p(i)) = i
1677 The only recourse here is to store a copy of 'p' for the index
1680 static gfc_symtree *new_symtree;
1681 static gfc_symtree *old_symtree;
1684 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
1686 if (expr->expr_type != EXPR_VARIABLE)
1691 else if (expr->symtree->n.sym == sym)
1692 expr->symtree = new_symtree;
1698 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
1700 gfc_traverse_expr (e, sym, forall_replace, f);
1704 forall_restore (gfc_expr *expr,
1705 gfc_symbol *sym ATTRIBUTE_UNUSED,
1706 int *f ATTRIBUTE_UNUSED)
1708 if (expr->expr_type != EXPR_VARIABLE)
1711 if (expr->symtree == new_symtree)
1712 expr->symtree = old_symtree;
1718 forall_restore_symtree (gfc_expr *e)
1720 gfc_traverse_expr (e, NULL, forall_restore, 0);
1724 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1729 gfc_symbol *new_sym;
1730 gfc_symbol *old_sym;
1734 /* Build a copy of the lvalue. */
1735 old_symtree = c->expr1->symtree;
1736 old_sym = old_symtree->n.sym;
1737 e = gfc_lval_expr_from_sym (old_sym);
1738 if (old_sym->attr.dimension)
1740 gfc_init_se (&tse, NULL);
1741 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN);
1742 gfc_add_block_to_block (pre, &tse.pre);
1743 gfc_add_block_to_block (post, &tse.post);
1744 tse.expr = build_fold_indirect_ref (tse.expr);
1746 if (e->ts.type != BT_CHARACTER)
1748 /* Use the variable offset for the temporary. */
1749 tmp = gfc_conv_descriptor_offset (tse.expr);
1750 gfc_add_modify (pre, tmp,
1751 gfc_conv_array_offset (old_sym->backend_decl));
1756 gfc_init_se (&tse, NULL);
1757 gfc_init_se (&rse, NULL);
1758 gfc_conv_expr (&rse, e);
1759 if (e->ts.type == BT_CHARACTER)
1761 tse.string_length = rse.string_length;
1762 tmp = gfc_get_character_type_len (gfc_default_character_kind,
1764 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
1766 gfc_add_block_to_block (pre, &tse.pre);
1767 gfc_add_block_to_block (post, &tse.post);
1771 tmp = gfc_typenode_for_spec (&e->ts);
1772 tse.expr = gfc_create_var (tmp, "temp");
1775 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
1776 e->expr_type == EXPR_VARIABLE);
1777 gfc_add_expr_to_block (pre, tmp);
1781 /* Create a new symbol to represent the lvalue. */
1782 new_sym = gfc_new_symbol (old_sym->name, NULL);
1783 new_sym->ts = old_sym->ts;
1784 new_sym->attr.referenced = 1;
1785 new_sym->attr.temporary = 1;
1786 new_sym->attr.dimension = old_sym->attr.dimension;
1787 new_sym->attr.flavor = old_sym->attr.flavor;
1789 /* Use the temporary as the backend_decl. */
1790 new_sym->backend_decl = tse.expr;
1792 /* Create a fake symtree for it. */
1794 new_symtree = gfc_new_symtree (&root, old_sym->name);
1795 new_symtree->n.sym = new_sym;
1796 gcc_assert (new_symtree == root);
1798 /* Go through the expression reference replacing the old_symtree
1800 forall_replace_symtree (c->expr1, old_sym, 2);
1802 /* Now we have made this temporary, we might as well use it for
1803 the right hand side. */
1804 forall_replace_symtree (c->expr2, old_sym, 1);
1808 /* Handles dependencies in forall assignments. */
1810 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1817 lsym = c->expr1->symtree->n.sym;
1818 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
1820 /* Now check for dependencies within the 'variable'
1821 expression itself. These are treated by making a complete
1822 copy of variable and changing all the references to it
1823 point to the copy instead. Note that the shallow copy of
1824 the variable will not suffice for derived types with
1825 pointer components. We therefore leave these to their
1827 if (lsym->ts.type == BT_DERIVED
1828 && lsym->ts.derived->attr.pointer_comp)
1832 if (find_forall_index (c->expr1, lsym, 2) == SUCCESS)
1834 forall_make_variable_temp (c, pre, post);
1838 /* Substrings with dependencies are treated in the same
1840 if (c->expr1->ts.type == BT_CHARACTER
1842 && c->expr2->expr_type == EXPR_VARIABLE
1843 && lsym == c->expr2->symtree->n.sym)
1845 for (lref = c->expr1->ref; lref; lref = lref->next)
1846 if (lref->type == REF_SUBSTRING)
1848 for (rref = c->expr2->ref; rref; rref = rref->next)
1849 if (rref->type == REF_SUBSTRING)
1853 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
1855 forall_make_variable_temp (c, pre, post);
1864 cleanup_forall_symtrees (gfc_code *c)
1866 forall_restore_symtree (c->expr1);
1867 forall_restore_symtree (c->expr2);
1868 gfc_free (new_symtree->n.sym);
1869 gfc_free (new_symtree);
1873 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
1874 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
1875 indicates whether we should generate code to test the FORALLs mask
1876 array. OUTER is the loop header to be used for initializing mask
1879 The generated loop format is:
1880 count = (end - start + step) / step
1893 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
1894 int mask_flag, stmtblock_t *outer)
1902 tree var, start, end, step;
1905 /* Initialize the mask index outside the FORALL nest. */
1906 if (mask_flag && forall_tmp->mask)
1907 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
1909 iter = forall_tmp->this_loop;
1910 nvar = forall_tmp->nvar;
1911 for (n = 0; n < nvar; n++)
1914 start = iter->start;
1918 exit_label = gfc_build_label_decl (NULL_TREE);
1919 TREE_USED (exit_label) = 1;
1921 /* The loop counter. */
1922 count = gfc_create_var (TREE_TYPE (var), "count");
1924 /* The body of the loop. */
1925 gfc_init_block (&block);
1927 /* The exit condition. */
1928 cond = fold_build2 (LE_EXPR, boolean_type_node,
1929 count, build_int_cst (TREE_TYPE (count), 0));
1930 tmp = build1_v (GOTO_EXPR, exit_label);
1931 tmp = fold_build3 (COND_EXPR, void_type_node,
1932 cond, tmp, build_empty_stmt ());
1933 gfc_add_expr_to_block (&block, tmp);
1935 /* The main loop body. */
1936 gfc_add_expr_to_block (&block, body);
1938 /* Increment the loop variable. */
1939 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1940 gfc_add_modify (&block, var, tmp);
1942 /* Advance to the next mask element. Only do this for the
1944 if (n == 0 && mask_flag && forall_tmp->mask)
1946 tree maskindex = forall_tmp->maskindex;
1947 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1948 maskindex, gfc_index_one_node);
1949 gfc_add_modify (&block, maskindex, tmp);
1952 /* Decrement the loop counter. */
1953 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), count,
1954 build_int_cst (TREE_TYPE (var), 1));
1955 gfc_add_modify (&block, count, tmp);
1957 body = gfc_finish_block (&block);
1959 /* Loop var initialization. */
1960 gfc_init_block (&block);
1961 gfc_add_modify (&block, var, start);
1964 /* Initialize the loop counter. */
1965 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1966 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1967 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1968 gfc_add_modify (&block, count, tmp);
1970 /* The loop expression. */
1971 tmp = build1_v (LOOP_EXPR, body);
1972 gfc_add_expr_to_block (&block, tmp);
1974 /* The exit label. */
1975 tmp = build1_v (LABEL_EXPR, exit_label);
1976 gfc_add_expr_to_block (&block, tmp);
1978 body = gfc_finish_block (&block);
1985 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
1986 is nonzero, the body is controlled by all masks in the forall nest.
1987 Otherwise, the innermost loop is not controlled by it's mask. This
1988 is used for initializing that mask. */
1991 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1996 forall_info *forall_tmp;
1997 tree mask, maskindex;
1999 gfc_start_block (&header);
2001 forall_tmp = nested_forall_info;
2002 while (forall_tmp != NULL)
2004 /* Generate body with masks' control. */
2007 mask = forall_tmp->mask;
2008 maskindex = forall_tmp->maskindex;
2010 /* If a mask was specified make the assignment conditional. */
2013 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2014 body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ());
2017 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
2018 forall_tmp = forall_tmp->prev_nest;
2022 gfc_add_expr_to_block (&header, body);
2023 return gfc_finish_block (&header);
2027 /* Allocate data for holding a temporary array. Returns either a local
2028 temporary array or a pointer variable. */
2031 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
2038 if (INTEGER_CST_P (size))
2040 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
2041 gfc_index_one_node);
2046 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
2047 type = build_array_type (elem_type, type);
2048 if (gfc_can_put_var_on_stack (bytesize))
2050 gcc_assert (INTEGER_CST_P (size));
2051 tmpvar = gfc_create_var (type, "temp");
2056 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
2057 *pdata = convert (pvoid_type_node, tmpvar);
2059 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
2060 gfc_add_modify (pblock, tmpvar, tmp);
2066 /* Generate codes to copy the temporary to the actual lhs. */
2069 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
2070 tree count1, tree wheremask, bool invert)
2074 stmtblock_t block, body;
2080 lss = gfc_walk_expr (expr);
2082 if (lss == gfc_ss_terminator)
2084 gfc_start_block (&block);
2086 gfc_init_se (&lse, NULL);
2088 /* Translate the expression. */
2089 gfc_conv_expr (&lse, expr);
2091 /* Form the expression for the temporary. */
2092 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2094 /* Use the scalar assignment as is. */
2095 gfc_add_block_to_block (&block, &lse.pre);
2096 gfc_add_modify (&block, lse.expr, tmp);
2097 gfc_add_block_to_block (&block, &lse.post);
2099 /* Increment the count1. */
2100 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2101 gfc_index_one_node);
2102 gfc_add_modify (&block, count1, tmp);
2104 tmp = gfc_finish_block (&block);
2108 gfc_start_block (&block);
2110 gfc_init_loopinfo (&loop1);
2111 gfc_init_se (&rse, NULL);
2112 gfc_init_se (&lse, NULL);
2114 /* Associate the lss with the loop. */
2115 gfc_add_ss_to_loop (&loop1, lss);
2117 /* Calculate the bounds of the scalarization. */
2118 gfc_conv_ss_startstride (&loop1);
2119 /* Setup the scalarizing loops. */
2120 gfc_conv_loop_setup (&loop1, &expr->where);
2122 gfc_mark_ss_chain_used (lss, 1);
2124 /* Start the scalarized loop body. */
2125 gfc_start_scalarized_body (&loop1, &body);
2127 /* Setup the gfc_se structures. */
2128 gfc_copy_loopinfo_to_se (&lse, &loop1);
2131 /* Form the expression of the temporary. */
2132 if (lss != gfc_ss_terminator)
2133 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2134 /* Translate expr. */
2135 gfc_conv_expr (&lse, expr);
2137 /* Use the scalar assignment. */
2138 rse.string_length = lse.string_length;
2139 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
2141 /* Form the mask expression according to the mask tree list. */
2144 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2146 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2147 TREE_TYPE (wheremaskexpr),
2149 tmp = fold_build3 (COND_EXPR, void_type_node,
2150 wheremaskexpr, tmp, build_empty_stmt ());
2153 gfc_add_expr_to_block (&body, tmp);
2155 /* Increment count1. */
2156 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2157 count1, gfc_index_one_node);
2158 gfc_add_modify (&body, count1, tmp);
2160 /* Increment count3. */
2163 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2164 count3, gfc_index_one_node);
2165 gfc_add_modify (&body, count3, tmp);
2168 /* Generate the copying loops. */
2169 gfc_trans_scalarizing_loops (&loop1, &body);
2170 gfc_add_block_to_block (&block, &loop1.pre);
2171 gfc_add_block_to_block (&block, &loop1.post);
2172 gfc_cleanup_loop (&loop1);
2174 tmp = gfc_finish_block (&block);
2180 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2181 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2182 and should not be freed. WHEREMASK is the conditional execution mask
2183 whose sense may be inverted by INVERT. */
2186 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2187 tree count1, gfc_ss *lss, gfc_ss *rss,
2188 tree wheremask, bool invert)
2190 stmtblock_t block, body1;
2197 gfc_start_block (&block);
2199 gfc_init_se (&rse, NULL);
2200 gfc_init_se (&lse, NULL);
2202 if (lss == gfc_ss_terminator)
2204 gfc_init_block (&body1);
2205 gfc_conv_expr (&rse, expr2);
2206 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2210 /* Initialize the loop. */
2211 gfc_init_loopinfo (&loop);
2213 /* We may need LSS to determine the shape of the expression. */
2214 gfc_add_ss_to_loop (&loop, lss);
2215 gfc_add_ss_to_loop (&loop, rss);
2217 gfc_conv_ss_startstride (&loop);
2218 gfc_conv_loop_setup (&loop, &expr2->where);
2220 gfc_mark_ss_chain_used (rss, 1);
2221 /* Start the loop body. */
2222 gfc_start_scalarized_body (&loop, &body1);
2224 /* Translate the expression. */
2225 gfc_copy_loopinfo_to_se (&rse, &loop);
2227 gfc_conv_expr (&rse, expr2);
2229 /* Form the expression of the temporary. */
2230 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2233 /* Use the scalar assignment. */
2234 lse.string_length = rse.string_length;
2235 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2236 expr2->expr_type == EXPR_VARIABLE);
2238 /* Form the mask expression according to the mask tree list. */
2241 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2243 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2244 TREE_TYPE (wheremaskexpr),
2246 tmp = fold_build3 (COND_EXPR, void_type_node,
2247 wheremaskexpr, tmp, build_empty_stmt ());
2250 gfc_add_expr_to_block (&body1, tmp);
2252 if (lss == gfc_ss_terminator)
2254 gfc_add_block_to_block (&block, &body1);
2256 /* Increment count1. */
2257 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2258 gfc_index_one_node);
2259 gfc_add_modify (&block, count1, tmp);
2263 /* Increment count1. */
2264 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2265 count1, gfc_index_one_node);
2266 gfc_add_modify (&body1, count1, tmp);
2268 /* Increment count3. */
2271 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2272 count3, gfc_index_one_node);
2273 gfc_add_modify (&body1, count3, tmp);
2276 /* Generate the copying loops. */
2277 gfc_trans_scalarizing_loops (&loop, &body1);
2279 gfc_add_block_to_block (&block, &loop.pre);
2280 gfc_add_block_to_block (&block, &loop.post);
2282 gfc_cleanup_loop (&loop);
2283 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2284 as tree nodes in SS may not be valid in different scope. */
2287 tmp = gfc_finish_block (&block);
2292 /* Calculate the size of temporary needed in the assignment inside forall.
2293 LSS and RSS are filled in this function. */
2296 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2297 stmtblock_t * pblock,
2298 gfc_ss **lss, gfc_ss **rss)
2306 *lss = gfc_walk_expr (expr1);
2309 size = gfc_index_one_node;
2310 if (*lss != gfc_ss_terminator)
2312 gfc_init_loopinfo (&loop);
2314 /* Walk the RHS of the expression. */
2315 *rss = gfc_walk_expr (expr2);
2316 if (*rss == gfc_ss_terminator)
2318 /* The rhs is scalar. Add a ss for the expression. */
2319 *rss = gfc_get_ss ();
2320 (*rss)->next = gfc_ss_terminator;
2321 (*rss)->type = GFC_SS_SCALAR;
2322 (*rss)->expr = expr2;
2325 /* Associate the SS with the loop. */
2326 gfc_add_ss_to_loop (&loop, *lss);
2327 /* We don't actually need to add the rhs at this point, but it might
2328 make guessing the loop bounds a bit easier. */
2329 gfc_add_ss_to_loop (&loop, *rss);
2331 /* We only want the shape of the expression, not rest of the junk
2332 generated by the scalarizer. */
2333 loop.array_parameter = 1;
2335 /* Calculate the bounds of the scalarization. */
2336 save_flag = gfc_option.rtcheck;
2337 gfc_option.rtcheck &= !GFC_RTCHECK_BOUNDS;
2338 gfc_conv_ss_startstride (&loop);
2339 gfc_option.rtcheck = save_flag;
2340 gfc_conv_loop_setup (&loop, &expr2->where);
2342 /* Figure out how many elements we need. */
2343 for (i = 0; i < loop.dimen; i++)
2345 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2346 gfc_index_one_node, loop.from[i]);
2347 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2349 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2351 gfc_add_block_to_block (pblock, &loop.pre);
2352 size = gfc_evaluate_now (size, pblock);
2353 gfc_add_block_to_block (pblock, &loop.post);
2355 /* TODO: write a function that cleans up a loopinfo without freeing
2356 the SS chains. Currently a NOP. */
2363 /* Calculate the overall iterator number of the nested forall construct.
2364 This routine actually calculates the number of times the body of the
2365 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2366 that by the expression INNER_SIZE. The BLOCK argument specifies the
2367 block in which to calculate the result, and the optional INNER_SIZE_BODY
2368 argument contains any statements that need to executed (inside the loop)
2369 to initialize or calculate INNER_SIZE. */
2372 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2373 stmtblock_t *inner_size_body, stmtblock_t *block)
2375 forall_info *forall_tmp = nested_forall_info;
2379 /* We can eliminate the innermost unconditional loops with constant
2381 if (INTEGER_CST_P (inner_size))
2384 && !forall_tmp->mask
2385 && INTEGER_CST_P (forall_tmp->size))
2387 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2388 inner_size, forall_tmp->size);
2389 forall_tmp = forall_tmp->prev_nest;
2392 /* If there are no loops left, we have our constant result. */
2397 /* Otherwise, create a temporary variable to compute the result. */
2398 number = gfc_create_var (gfc_array_index_type, "num");
2399 gfc_add_modify (block, number, gfc_index_zero_node);
2401 gfc_start_block (&body);
2402 if (inner_size_body)
2403 gfc_add_block_to_block (&body, inner_size_body);
2405 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2406 number, inner_size);
2409 gfc_add_modify (&body, number, tmp);
2410 tmp = gfc_finish_block (&body);
2412 /* Generate loops. */
2413 if (forall_tmp != NULL)
2414 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2416 gfc_add_expr_to_block (block, tmp);
2422 /* Allocate temporary for forall construct. SIZE is the size of temporary
2423 needed. PTEMP1 is returned for space free. */
2426 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2433 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2434 if (!integer_onep (unit))
2435 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2440 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2443 tmp = build_fold_indirect_ref (tmp);
2448 /* Allocate temporary for forall construct according to the information in
2449 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2450 assignment inside forall. PTEMP1 is returned for space free. */
2453 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2454 tree inner_size, stmtblock_t * inner_size_body,
2455 stmtblock_t * block, tree * ptemp1)
2459 /* Calculate the total size of temporary needed in forall construct. */
2460 size = compute_overall_iter_number (nested_forall_info, inner_size,
2461 inner_size_body, block);
2463 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2467 /* Handle assignments inside forall which need temporary.
2469 forall (i=start:end:stride; maskexpr)
2472 (where e,f<i> are arbitrary expressions possibly involving i
2473 and there is a dependency between e<i> and f<i>)
2475 masktmp(:) = maskexpr(:)
2480 for (i = start; i <= end; i += stride)
2484 for (i = start; i <= end; i += stride)
2486 if (masktmp[maskindex++])
2487 tmp[count1++] = f<i>
2491 for (i = start; i <= end; i += stride)
2493 if (masktmp[maskindex++])
2494 e<i> = tmp[count1++]
2499 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2500 tree wheremask, bool invert,
2501 forall_info * nested_forall_info,
2502 stmtblock_t * block)
2510 stmtblock_t inner_size_body;
2512 /* Create vars. count1 is the current iterator number of the nested
2514 count1 = gfc_create_var (gfc_array_index_type, "count1");
2516 /* Count is the wheremask index. */
2519 count = gfc_create_var (gfc_array_index_type, "count");
2520 gfc_add_modify (block, count, gfc_index_zero_node);
2525 /* Initialize count1. */
2526 gfc_add_modify (block, count1, gfc_index_zero_node);
2528 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2529 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2530 gfc_init_block (&inner_size_body);
2531 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2534 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2535 if (expr1->ts.type == BT_CHARACTER && expr1->ts.cl->length)
2537 if (!expr1->ts.cl->backend_decl)
2540 gfc_init_se (&tse, NULL);
2541 gfc_conv_expr (&tse, expr1->ts.cl->length);
2542 expr1->ts.cl->backend_decl = tse.expr;
2544 type = gfc_get_character_type_len (gfc_default_character_kind,
2545 expr1->ts.cl->backend_decl);
2548 type = gfc_typenode_for_spec (&expr1->ts);
2550 /* Allocate temporary for nested forall construct according to the
2551 information in nested_forall_info and inner_size. */
2552 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2553 &inner_size_body, block, &ptemp1);
2555 /* Generate codes to copy rhs to the temporary . */
2556 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2559 /* Generate body and loops according to the information in
2560 nested_forall_info. */
2561 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2562 gfc_add_expr_to_block (block, tmp);
2565 gfc_add_modify (block, count1, gfc_index_zero_node);
2569 gfc_add_modify (block, count, gfc_index_zero_node);
2571 /* Generate codes to copy the temporary to lhs. */
2572 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2575 /* Generate body and loops according to the information in
2576 nested_forall_info. */
2577 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2578 gfc_add_expr_to_block (block, tmp);
2582 /* Free the temporary. */
2583 tmp = gfc_call_free (ptemp1);
2584 gfc_add_expr_to_block (block, tmp);
2589 /* Translate pointer assignment inside FORALL which need temporary. */
2592 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2593 forall_info * nested_forall_info,
2594 stmtblock_t * block)
2608 tree tmp, tmp1, ptemp1;
2610 count = gfc_create_var (gfc_array_index_type, "count");
2611 gfc_add_modify (block, count, gfc_index_zero_node);
2613 inner_size = integer_one_node;
2614 lss = gfc_walk_expr (expr1);
2615 rss = gfc_walk_expr (expr2);
2616 if (lss == gfc_ss_terminator)
2618 type = gfc_typenode_for_spec (&expr1->ts);
2619 type = build_pointer_type (type);
2621 /* Allocate temporary for nested forall construct according to the
2622 information in nested_forall_info and inner_size. */
2623 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2624 inner_size, NULL, block, &ptemp1);
2625 gfc_start_block (&body);
2626 gfc_init_se (&lse, NULL);
2627 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2628 gfc_init_se (&rse, NULL);
2629 rse.want_pointer = 1;
2630 gfc_conv_expr (&rse, expr2);
2631 gfc_add_block_to_block (&body, &rse.pre);
2632 gfc_add_modify (&body, lse.expr,
2633 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2634 gfc_add_block_to_block (&body, &rse.post);
2636 /* Increment count. */
2637 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2638 count, gfc_index_one_node);
2639 gfc_add_modify (&body, count, tmp);
2641 tmp = gfc_finish_block (&body);
2643 /* Generate body and loops according to the information in
2644 nested_forall_info. */
2645 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2646 gfc_add_expr_to_block (block, tmp);
2649 gfc_add_modify (block, count, gfc_index_zero_node);
2651 gfc_start_block (&body);
2652 gfc_init_se (&lse, NULL);
2653 gfc_init_se (&rse, NULL);
2654 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
2655 lse.want_pointer = 1;
2656 gfc_conv_expr (&lse, expr1);
2657 gfc_add_block_to_block (&body, &lse.pre);
2658 gfc_add_modify (&body, lse.expr, rse.expr);
2659 gfc_add_block_to_block (&body, &lse.post);
2660 /* Increment count. */
2661 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2662 count, gfc_index_one_node);
2663 gfc_add_modify (&body, count, tmp);
2664 tmp = gfc_finish_block (&body);
2666 /* Generate body and loops according to the information in
2667 nested_forall_info. */
2668 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2669 gfc_add_expr_to_block (block, tmp);
2673 gfc_init_loopinfo (&loop);
2675 /* Associate the SS with the loop. */
2676 gfc_add_ss_to_loop (&loop, rss);
2678 /* Setup the scalarizing loops and bounds. */
2679 gfc_conv_ss_startstride (&loop);
2681 gfc_conv_loop_setup (&loop, &expr2->where);
2683 info = &rss->data.info;
2684 desc = info->descriptor;
2686 /* Make a new descriptor. */
2687 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2688 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2689 loop.from, loop.to, 1,
2692 /* Allocate temporary for nested forall construct. */
2693 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2694 inner_size, NULL, block, &ptemp1);
2695 gfc_start_block (&body);
2696 gfc_init_se (&lse, NULL);
2697 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2698 lse.direct_byref = 1;
2699 rss = gfc_walk_expr (expr2);
2700 gfc_conv_expr_descriptor (&lse, expr2, rss);
2702 gfc_add_block_to_block (&body, &lse.pre);
2703 gfc_add_block_to_block (&body, &lse.post);
2705 /* Increment count. */
2706 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2707 count, gfc_index_one_node);
2708 gfc_add_modify (&body, count, tmp);
2710 tmp = gfc_finish_block (&body);
2712 /* Generate body and loops according to the information in
2713 nested_forall_info. */
2714 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2715 gfc_add_expr_to_block (block, tmp);
2718 gfc_add_modify (block, count, gfc_index_zero_node);
2720 parm = gfc_build_array_ref (tmp1, count, NULL);
2721 lss = gfc_walk_expr (expr1);
2722 gfc_init_se (&lse, NULL);
2723 gfc_conv_expr_descriptor (&lse, expr1, lss);
2724 gfc_add_modify (&lse.pre, lse.expr, parm);
2725 gfc_start_block (&body);
2726 gfc_add_block_to_block (&body, &lse.pre);
2727 gfc_add_block_to_block (&body, &lse.post);
2729 /* Increment count. */
2730 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2731 count, gfc_index_one_node);
2732 gfc_add_modify (&body, count, tmp);
2734 tmp = gfc_finish_block (&body);
2736 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2737 gfc_add_expr_to_block (block, tmp);
2739 /* Free the temporary. */
2742 tmp = gfc_call_free (ptemp1);
2743 gfc_add_expr_to_block (block, tmp);
2748 /* FORALL and WHERE statements are really nasty, especially when you nest
2749 them. All the rhs of a forall assignment must be evaluated before the
2750 actual assignments are performed. Presumably this also applies to all the
2751 assignments in an inner where statement. */
2753 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2754 linear array, relying on the fact that we process in the same order in all
2757 forall (i=start:end:stride; maskexpr)
2761 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2763 count = ((end + 1 - start) / stride)
2764 masktmp(:) = maskexpr(:)
2767 for (i = start; i <= end; i += stride)
2769 if (masktmp[maskindex++])
2773 for (i = start; i <= end; i += stride)
2775 if (masktmp[maskindex++])
2779 Note that this code only works when there are no dependencies.
2780 Forall loop with array assignments and data dependencies are a real pain,
2781 because the size of the temporary cannot always be determined before the
2782 loop is executed. This problem is compounded by the presence of nested
2787 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2807 gfc_forall_iterator *fa;
2810 gfc_saved_var *saved_vars;
2811 iter_info *this_forall;
2815 /* Do nothing if the mask is false. */
2817 && code->expr1->expr_type == EXPR_CONSTANT
2818 && !code->expr1->value.logical)
2819 return build_empty_stmt ();
2822 /* Count the FORALL index number. */
2823 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2827 /* Allocate the space for var, start, end, step, varexpr. */
2828 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2829 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2830 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2831 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2832 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2833 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2835 /* Allocate the space for info. */
2836 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2838 gfc_start_block (&pre);
2839 gfc_init_block (&post);
2840 gfc_init_block (&block);
2843 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2845 gfc_symbol *sym = fa->var->symtree->n.sym;
2847 /* Allocate space for this_forall. */
2848 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2850 /* Create a temporary variable for the FORALL index. */
2851 tmp = gfc_typenode_for_spec (&sym->ts);
2852 var[n] = gfc_create_var (tmp, sym->name);
2853 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2855 /* Record it in this_forall. */
2856 this_forall->var = var[n];
2858 /* Replace the index symbol's backend_decl with the temporary decl. */
2859 sym->backend_decl = var[n];
2861 /* Work out the start, end and stride for the loop. */
2862 gfc_init_se (&se, NULL);
2863 gfc_conv_expr_val (&se, fa->start);
2864 /* Record it in this_forall. */
2865 this_forall->start = se.expr;
2866 gfc_add_block_to_block (&block, &se.pre);
2869 gfc_init_se (&se, NULL);
2870 gfc_conv_expr_val (&se, fa->end);
2871 /* Record it in this_forall. */
2872 this_forall->end = se.expr;
2873 gfc_make_safe_expr (&se);
2874 gfc_add_block_to_block (&block, &se.pre);
2877 gfc_init_se (&se, NULL);
2878 gfc_conv_expr_val (&se, fa->stride);
2879 /* Record it in this_forall. */
2880 this_forall->step = se.expr;
2881 gfc_make_safe_expr (&se);
2882 gfc_add_block_to_block (&block, &se.pre);
2885 /* Set the NEXT field of this_forall to NULL. */
2886 this_forall->next = NULL;
2887 /* Link this_forall to the info construct. */
2888 if (info->this_loop)
2890 iter_info *iter_tmp = info->this_loop;
2891 while (iter_tmp->next != NULL)
2892 iter_tmp = iter_tmp->next;
2893 iter_tmp->next = this_forall;
2896 info->this_loop = this_forall;
2902 /* Calculate the size needed for the current forall level. */
2903 size = gfc_index_one_node;
2904 for (n = 0; n < nvar; n++)
2906 /* size = (end + step - start) / step. */
2907 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2909 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2911 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2912 tmp = convert (gfc_array_index_type, tmp);
2914 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2917 /* Record the nvar and size of current forall level. */
2923 /* If the mask is .true., consider the FORALL unconditional. */
2924 if (code->expr1->expr_type == EXPR_CONSTANT
2925 && code->expr1->value.logical)
2933 /* First we need to allocate the mask. */
2936 /* As the mask array can be very big, prefer compact boolean types. */
2937 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2938 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
2939 size, NULL, &block, &pmask);
2940 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2942 /* Record them in the info structure. */
2943 info->maskindex = maskindex;
2948 /* No mask was specified. */
2949 maskindex = NULL_TREE;
2950 mask = pmask = NULL_TREE;
2953 /* Link the current forall level to nested_forall_info. */
2954 info->prev_nest = nested_forall_info;
2955 nested_forall_info = info;
2957 /* Copy the mask into a temporary variable if required.
2958 For now we assume a mask temporary is needed. */
2961 /* As the mask array can be very big, prefer compact boolean types. */
2962 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2964 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
2966 /* Start of mask assignment loop body. */
2967 gfc_start_block (&body);
2969 /* Evaluate the mask expression. */
2970 gfc_init_se (&se, NULL);
2971 gfc_conv_expr_val (&se, code->expr1);
2972 gfc_add_block_to_block (&body, &se.pre);
2974 /* Store the mask. */
2975 se.expr = convert (mask_type, se.expr);
2977 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2978 gfc_add_modify (&body, tmp, se.expr);
2980 /* Advance to the next mask element. */
2981 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2982 maskindex, gfc_index_one_node);
2983 gfc_add_modify (&body, maskindex, tmp);
2985 /* Generate the loops. */
2986 tmp = gfc_finish_block (&body);
2987 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
2988 gfc_add_expr_to_block (&block, tmp);
2991 c = code->block->next;
2993 /* TODO: loop merging in FORALL statements. */
2994 /* Now that we've got a copy of the mask, generate the assignment loops. */
3000 /* A scalar or array assignment. DO the simple check for
3001 lhs to rhs dependencies. These make a temporary for the
3002 rhs and form a second forall block to copy to variable. */
3003 need_temp = check_forall_dependencies(c, &pre, &post);
3005 /* Temporaries due to array assignment data dependencies introduce
3006 no end of problems. */
3008 gfc_trans_assign_need_temp (c->expr1, c->expr2, NULL, false,
3009 nested_forall_info, &block);
3012 /* Use the normal assignment copying routines. */
3013 assign = gfc_trans_assignment (c->expr1, c->expr2, false);
3015 /* Generate body and loops. */
3016 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3018 gfc_add_expr_to_block (&block, tmp);
3021 /* Cleanup any temporary symtrees that have been made to deal
3022 with dependencies. */
3024 cleanup_forall_symtrees (c);
3029 /* Translate WHERE or WHERE construct nested in FORALL. */
3030 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
3033 /* Pointer assignment inside FORALL. */
3034 case EXEC_POINTER_ASSIGN:
3035 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
3037 gfc_trans_pointer_assign_need_temp (c->expr1, c->expr2,
3038 nested_forall_info, &block);
3041 /* Use the normal assignment copying routines. */
3042 assign = gfc_trans_pointer_assignment (c->expr1, c->expr2);
3044 /* Generate body and loops. */
3045 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3047 gfc_add_expr_to_block (&block, tmp);
3052 tmp = gfc_trans_forall_1 (c, nested_forall_info);
3053 gfc_add_expr_to_block (&block, tmp);
3056 /* Explicit subroutine calls are prevented by the frontend but interface
3057 assignments can legitimately produce them. */
3058 case EXEC_ASSIGN_CALL:
3059 assign = gfc_trans_call (c, true, NULL_TREE, NULL_TREE, false);
3060 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
3061 gfc_add_expr_to_block (&block, tmp);
3071 /* Restore the original index variables. */
3072 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
3073 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
3075 /* Free the space for var, start, end, step, varexpr. */
3081 gfc_free (saved_vars);
3083 /* Free the space for this forall_info. */
3088 /* Free the temporary for the mask. */
3089 tmp = gfc_call_free (pmask);
3090 gfc_add_expr_to_block (&block, tmp);
3093 pushdecl (maskindex);
3095 gfc_add_block_to_block (&pre, &block);
3096 gfc_add_block_to_block (&pre, &post);
3098 return gfc_finish_block (&pre);
3102 /* Translate the FORALL statement or construct. */
3104 tree gfc_trans_forall (gfc_code * code)
3106 return gfc_trans_forall_1 (code, NULL);
3110 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3111 If the WHERE construct is nested in FORALL, compute the overall temporary
3112 needed by the WHERE mask expression multiplied by the iterator number of
3114 ME is the WHERE mask expression.
3115 MASK is the current execution mask upon input, whose sense may or may
3116 not be inverted as specified by the INVERT argument.
3117 CMASK is the updated execution mask on output, or NULL if not required.
3118 PMASK is the pending execution mask on output, or NULL if not required.
3119 BLOCK is the block in which to place the condition evaluation loops. */
3122 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3123 tree mask, bool invert, tree cmask, tree pmask,
3124 tree mask_type, stmtblock_t * block)
3129 stmtblock_t body, body1;
3130 tree count, cond, mtmp;
3133 gfc_init_loopinfo (&loop);
3135 lss = gfc_walk_expr (me);
3136 rss = gfc_walk_expr (me);
3138 /* Variable to index the temporary. */
3139 count = gfc_create_var (gfc_array_index_type, "count");
3140 /* Initialize count. */
3141 gfc_add_modify (block, count, gfc_index_zero_node);
3143 gfc_start_block (&body);
3145 gfc_init_se (&rse, NULL);
3146 gfc_init_se (&lse, NULL);
3148 if (lss == gfc_ss_terminator)
3150 gfc_init_block (&body1);
3154 /* Initialize the loop. */
3155 gfc_init_loopinfo (&loop);
3157 /* We may need LSS to determine the shape of the expression. */
3158 gfc_add_ss_to_loop (&loop, lss);
3159 gfc_add_ss_to_loop (&loop, rss);
3161 gfc_conv_ss_startstride (&loop);
3162 gfc_conv_loop_setup (&loop, &me->where);
3164 gfc_mark_ss_chain_used (rss, 1);
3165 /* Start the loop body. */
3166 gfc_start_scalarized_body (&loop, &body1);
3168 /* Translate the expression. */
3169 gfc_copy_loopinfo_to_se (&rse, &loop);
3171 gfc_conv_expr (&rse, me);
3174 /* Variable to evaluate mask condition. */
3175 cond = gfc_create_var (mask_type, "cond");
3176 if (mask && (cmask || pmask))
3177 mtmp = gfc_create_var (mask_type, "mask");
3178 else mtmp = NULL_TREE;
3180 gfc_add_block_to_block (&body1, &lse.pre);
3181 gfc_add_block_to_block (&body1, &rse.pre);
3183 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3185 if (mask && (cmask || pmask))
3187 tmp = gfc_build_array_ref (mask, count, NULL);
3189 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
3190 gfc_add_modify (&body1, mtmp, tmp);
3195 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3198 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3199 gfc_add_modify (&body1, tmp1, tmp);
3204 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3205 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, cond);
3207 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3208 gfc_add_modify (&body1, tmp1, tmp);
3211 gfc_add_block_to_block (&body1, &lse.post);
3212 gfc_add_block_to_block (&body1, &rse.post);
3214 if (lss == gfc_ss_terminator)
3216 gfc_add_block_to_block (&body, &body1);
3220 /* Increment count. */
3221 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
3222 gfc_index_one_node);
3223 gfc_add_modify (&body1, count, tmp1);
3225 /* Generate the copying loops. */
3226 gfc_trans_scalarizing_loops (&loop, &body1);
3228 gfc_add_block_to_block (&body, &loop.pre);
3229 gfc_add_block_to_block (&body, &loop.post);
3231 gfc_cleanup_loop (&loop);
3232 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3233 as tree nodes in SS may not be valid in different scope. */
3236 tmp1 = gfc_finish_block (&body);
3237 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3238 if (nested_forall_info != NULL)
3239 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3241 gfc_add_expr_to_block (block, tmp1);
3245 /* Translate an assignment statement in a WHERE statement or construct
3246 statement. The MASK expression is used to control which elements
3247 of EXPR1 shall be assigned. The sense of MASK is specified by
3251 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3252 tree mask, bool invert,
3253 tree count1, tree count2,
3259 gfc_ss *lss_section;
3266 tree index, maskexpr;
3268 /* A defined assignment. */
3269 if (cnext && cnext->resolved_sym)
3270 return gfc_trans_call (cnext, true, mask, count1, invert);
3273 /* TODO: handle this special case.
3274 Special case a single function returning an array. */
3275 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3277 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3283 /* Assignment of the form lhs = rhs. */
3284 gfc_start_block (&block);
3286 gfc_init_se (&lse, NULL);
3287 gfc_init_se (&rse, NULL);
3290 lss = gfc_walk_expr (expr1);
3293 /* In each where-assign-stmt, the mask-expr and the variable being
3294 defined shall be arrays of the same shape. */
3295 gcc_assert (lss != gfc_ss_terminator);
3297 /* The assignment needs scalarization. */
3300 /* Find a non-scalar SS from the lhs. */
3301 while (lss_section != gfc_ss_terminator
3302 && lss_section->type != GFC_SS_SECTION)
3303 lss_section = lss_section->next;
3305 gcc_assert (lss_section != gfc_ss_terminator);
3307 /* Initialize the scalarizer. */
3308 gfc_init_loopinfo (&loop);
3311 rss = gfc_walk_expr (expr2);
3312 if (rss == gfc_ss_terminator)
3314 /* The rhs is scalar. Add a ss for the expression. */
3315 rss = gfc_get_ss ();
3317 rss->next = gfc_ss_terminator;
3318 rss->type = GFC_SS_SCALAR;
3322 /* Associate the SS with the loop. */
3323 gfc_add_ss_to_loop (&loop, lss);
3324 gfc_add_ss_to_loop (&loop, rss);
3326 /* Calculate the bounds of the scalarization. */
3327 gfc_conv_ss_startstride (&loop);
3329 /* Resolve any data dependencies in the statement. */
3330 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3332 /* Setup the scalarizing loops. */
3333 gfc_conv_loop_setup (&loop, &expr2->where);
3335 /* Setup the gfc_se structures. */
3336 gfc_copy_loopinfo_to_se (&lse, &loop);
3337 gfc_copy_loopinfo_to_se (&rse, &loop);
3340 gfc_mark_ss_chain_used (rss, 1);
3341 if (loop.temp_ss == NULL)
3344 gfc_mark_ss_chain_used (lss, 1);
3348 lse.ss = loop.temp_ss;
3349 gfc_mark_ss_chain_used (lss, 3);
3350 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3353 /* Start the scalarized loop body. */
3354 gfc_start_scalarized_body (&loop, &body);
3356 /* Translate the expression. */
3357 gfc_conv_expr (&rse, expr2);
3358 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3360 gfc_conv_tmp_array_ref (&lse);
3361 gfc_advance_se_ss_chain (&lse);
3364 gfc_conv_expr (&lse, expr1);
3366 /* Form the mask expression according to the mask. */
3368 maskexpr = gfc_build_array_ref (mask, index, NULL);
3370 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
3372 /* Use the scalar assignment as is. */
3373 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3374 loop.temp_ss != NULL, false);
3376 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3378 gfc_add_expr_to_block (&body, tmp);
3380 if (lss == gfc_ss_terminator)
3382 /* Increment count1. */
3383 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3384 count1, gfc_index_one_node);
3385 gfc_add_modify (&body, count1, tmp);
3387 /* Use the scalar assignment as is. */
3388 gfc_add_block_to_block (&block, &body);
3392 gcc_assert (lse.ss == gfc_ss_terminator
3393 && rse.ss == gfc_ss_terminator);
3395 if (loop.temp_ss != NULL)
3397 /* Increment count1 before finish the main body of a scalarized
3399 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3400 count1, gfc_index_one_node);
3401 gfc_add_modify (&body, count1, tmp);
3402 gfc_trans_scalarized_loop_boundary (&loop, &body);
3404 /* We need to copy the temporary to the actual lhs. */
3405 gfc_init_se (&lse, NULL);
3406 gfc_init_se (&rse, NULL);
3407 gfc_copy_loopinfo_to_se (&lse, &loop);
3408 gfc_copy_loopinfo_to_se (&rse, &loop);
3410 rse.ss = loop.temp_ss;
3413 gfc_conv_tmp_array_ref (&rse);
3414 gfc_advance_se_ss_chain (&rse);
3415 gfc_conv_expr (&lse, expr1);
3417 gcc_assert (lse.ss == gfc_ss_terminator
3418 && rse.ss == gfc_ss_terminator);
3420 /* Form the mask expression according to the mask tree list. */
3422 maskexpr = gfc_build_array_ref (mask, index, NULL);
3424 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3427 /* Use the scalar assignment as is. */
3428 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3429 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3430 gfc_add_expr_to_block (&body, tmp);
3432 /* Increment count2. */
3433 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3434 count2, gfc_index_one_node);
3435 gfc_add_modify (&body, count2, tmp);
3439 /* Increment count1. */
3440 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3441 count1, gfc_index_one_node);
3442 gfc_add_modify (&body, count1, tmp);
3445 /* Generate the copying loops. */
3446 gfc_trans_scalarizing_loops (&loop, &body);
3448 /* Wrap the whole thing up. */
3449 gfc_add_block_to_block (&block, &loop.pre);
3450 gfc_add_block_to_block (&block, &loop.post);
3451 gfc_cleanup_loop (&loop);
3454 return gfc_finish_block (&block);
3458 /* Translate the WHERE construct or statement.
3459 This function can be called iteratively to translate the nested WHERE
3460 construct or statement.
3461 MASK is the control mask. */
3464 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3465 forall_info * nested_forall_info, stmtblock_t * block)
3467 stmtblock_t inner_size_body;
3468 tree inner_size, size;
3477 tree count1, count2;
3481 tree pcmask = NULL_TREE;
3482 tree ppmask = NULL_TREE;
3483 tree cmask = NULL_TREE;
3484 tree pmask = NULL_TREE;
3485 gfc_actual_arglist *arg;
3487 /* the WHERE statement or the WHERE construct statement. */
3488 cblock = code->block;
3490 /* As the mask array can be very big, prefer compact boolean types. */
3491 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3493 /* Determine which temporary masks are needed. */
3496 /* One clause: No ELSEWHEREs. */
3497 need_cmask = (cblock->next != 0);
3500 else if (cblock->block->block)
3502 /* Three or more clauses: Conditional ELSEWHEREs. */
3506 else if (cblock->next)
3508 /* Two clauses, the first non-empty. */
3510 need_pmask = (mask != NULL_TREE
3511 && cblock->block->next != 0);
3513 else if (!cblock->block->next)
3515 /* Two clauses, both empty. */
3519 /* Two clauses, the first empty, the second non-empty. */
3522 need_cmask = (cblock->block->expr1 != 0);
3531 if (need_cmask || need_pmask)
3533 /* Calculate the size of temporary needed by the mask-expr. */
3534 gfc_init_block (&inner_size_body);
3535 inner_size = compute_inner_temp_size (cblock->expr1, cblock->expr1,
3536 &inner_size_body, &lss, &rss);
3538 /* Calculate the total size of temporary needed. */
3539 size = compute_overall_iter_number (nested_forall_info, inner_size,
3540 &inner_size_body, block);
3542 /* Check whether the size is negative. */
3543 cond = fold_build2 (LE_EXPR, boolean_type_node, size,
3544 gfc_index_zero_node);
3545 size = fold_build3 (COND_EXPR, gfc_array_index_type, cond,
3546 gfc_index_zero_node, size);
3547 size = gfc_evaluate_now (size, block);
3549 /* Allocate temporary for WHERE mask if needed. */
3551 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3554 /* Allocate temporary for !mask if needed. */
3556 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3562 /* Each time around this loop, the where clause is conditional
3563 on the value of mask and invert, which are updated at the
3564 bottom of the loop. */
3566 /* Has mask-expr. */
3569 /* Ensure that the WHERE mask will be evaluated exactly once.
3570 If there are no statements in this WHERE/ELSEWHERE clause,
3571 then we don't need to update the control mask (cmask).
3572 If this is the last clause of the WHERE construct, then
3573 we don't need to update the pending control mask (pmask). */
3575 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3577 cblock->next ? cmask : NULL_TREE,
3578 cblock->block ? pmask : NULL_TREE,
3581 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3583 (cblock->next || cblock->block)
3584 ? cmask : NULL_TREE,
3585 NULL_TREE, mask_type, block);
3589 /* It's a final elsewhere-stmt. No mask-expr is present. */
3593 /* The body of this where clause are controlled by cmask with
3594 sense specified by invert. */
3596 /* Get the assignment statement of a WHERE statement, or the first
3597 statement in where-body-construct of a WHERE construct. */
3598 cnext = cblock->next;
3603 /* WHERE assignment statement. */
3604 case EXEC_ASSIGN_CALL:
3606 arg = cnext->ext.actual;
3607 expr1 = expr2 = NULL;
3608 for (; arg; arg = arg->next)
3620 expr1 = cnext->expr1;
3621 expr2 = cnext->expr2;
3623 if (nested_forall_info != NULL)
3625 need_temp = gfc_check_dependency (expr1, expr2, 0);
3626 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3627 gfc_trans_assign_need_temp (expr1, expr2,
3629 nested_forall_info, block);
3632 /* Variables to control maskexpr. */
3633 count1 = gfc_create_var (gfc_array_index_type, "count1");
3634 count2 = gfc_create_var (gfc_array_index_type, "count2");
3635 gfc_add_modify (block, count1, gfc_index_zero_node);
3636 gfc_add_modify (block, count2, gfc_index_zero_node);
3638 tmp = gfc_trans_where_assign (expr1, expr2,
3643 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3645 gfc_add_expr_to_block (block, tmp);
3650 /* Variables to control maskexpr. */
3651 count1 = gfc_create_var (gfc_array_index_type, "count1");
3652 count2 = gfc_create_var (gfc_array_index_type, "count2");
3653 gfc_add_modify (block, count1, gfc_index_zero_node);
3654 gfc_add_modify (block, count2, gfc_index_zero_node);
3656 tmp = gfc_trans_where_assign (expr1, expr2,
3660 gfc_add_expr_to_block (block, tmp);
3665 /* WHERE or WHERE construct is part of a where-body-construct. */
3667 gfc_trans_where_2 (cnext, cmask, invert,
3668 nested_forall_info, block);
3675 /* The next statement within the same where-body-construct. */
3676 cnext = cnext->next;
3678 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3679 cblock = cblock->block;
3680 if (mask == NULL_TREE)
3682 /* If we're the initial WHERE, we can simply invert the sense
3683 of the current mask to obtain the "mask" for the remaining
3690 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3696 /* If we allocated a pending mask array, deallocate it now. */
3699 tmp = gfc_call_free (ppmask);
3700 gfc_add_expr_to_block (block, tmp);
3703 /* If we allocated a current mask array, deallocate it now. */
3706 tmp = gfc_call_free (pcmask);
3707 gfc_add_expr_to_block (block, tmp);
3711 /* Translate a simple WHERE construct or statement without dependencies.
3712 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3713 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3714 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3717 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3719 stmtblock_t block, body;
3720 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3721 tree tmp, cexpr, tstmt, estmt;
3722 gfc_ss *css, *tdss, *tsss;
3723 gfc_se cse, tdse, tsse, edse, esse;
3728 /* Allow the scalarizer to workshare simple where loops. */
3729 if (ompws_flags & OMPWS_WORKSHARE_FLAG)
3730 ompws_flags |= OMPWS_SCALARIZER_WS;
3732 cond = cblock->expr1;
3733 tdst = cblock->next->expr1;
3734 tsrc = cblock->next->expr2;
3735 edst = eblock ? eblock->next->expr1 : NULL;
3736 esrc = eblock ? eblock->next->expr2 : NULL;
3738 gfc_start_block (&block);
3739 gfc_init_loopinfo (&loop);
3741 /* Handle the condition. */
3742 gfc_init_se (&cse, NULL);
3743 css = gfc_walk_expr (cond);
3744 gfc_add_ss_to_loop (&loop, css);
3746 /* Handle the then-clause. */
3747 gfc_init_se (&tdse, NULL);
3748 gfc_init_se (&tsse, NULL);
3749 tdss = gfc_walk_expr (tdst);
3750 tsss = gfc_walk_expr (tsrc);
3751 if (tsss == gfc_ss_terminator)
3753 tsss = gfc_get_ss ();
3755 tsss->next = gfc_ss_terminator;
3756 tsss->type = GFC_SS_SCALAR;
3759 gfc_add_ss_to_loop (&loop, tdss);
3760 gfc_add_ss_to_loop (&loop, tsss);
3764 /* Handle the else clause. */
3765 gfc_init_se (&edse, NULL);
3766 gfc_init_se (&esse, NULL);
3767 edss = gfc_walk_expr (edst);
3768 esss = gfc_walk_expr (esrc);
3769 if (esss == gfc_ss_terminator)
3771 esss = gfc_get_ss ();
3773 esss->next = gfc_ss_terminator;
3774 esss->type = GFC_SS_SCALAR;
3777 gfc_add_ss_to_loop (&loop, edss);
3778 gfc_add_ss_to_loop (&loop, esss);
3781 gfc_conv_ss_startstride (&loop);
3782 gfc_conv_loop_setup (&loop, &tdst->where);
3784 gfc_mark_ss_chain_used (css, 1);
3785 gfc_mark_ss_chain_used (tdss, 1);
3786 gfc_mark_ss_chain_used (tsss, 1);
3789 gfc_mark_ss_chain_used (edss, 1);
3790 gfc_mark_ss_chain_used (esss, 1);
3793 gfc_start_scalarized_body (&loop, &body);
3795 gfc_copy_loopinfo_to_se (&cse, &loop);
3796 gfc_copy_loopinfo_to_se (&tdse, &loop);
3797 gfc_copy_loopinfo_to_se (&tsse, &loop);
3803 gfc_copy_loopinfo_to_se (&edse, &loop);
3804 gfc_copy_loopinfo_to_se (&esse, &loop);
3809 gfc_conv_expr (&cse, cond);
3810 gfc_add_block_to_block (&body, &cse.pre);
3813 gfc_conv_expr (&tsse, tsrc);
3814 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3816 gfc_conv_tmp_array_ref (&tdse);
3817 gfc_advance_se_ss_chain (&tdse);
3820 gfc_conv_expr (&tdse, tdst);
3824 gfc_conv_expr (&esse, esrc);
3825 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3827 gfc_conv_tmp_array_ref (&edse);
3828 gfc_advance_se_ss_chain (&edse);
3831 gfc_conv_expr (&edse, edst);
3834 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false);
3835 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false)
3836 : build_empty_stmt ();
3837 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3838 gfc_add_expr_to_block (&body, tmp);
3839 gfc_add_block_to_block (&body, &cse.post);
3841 gfc_trans_scalarizing_loops (&loop, &body);
3842 gfc_add_block_to_block (&block, &loop.pre);
3843 gfc_add_block_to_block (&block, &loop.post);
3844 gfc_cleanup_loop (&loop);
3846 return gfc_finish_block (&block);
3849 /* As the WHERE or WHERE construct statement can be nested, we call
3850 gfc_trans_where_2 to do the translation, and pass the initial
3851 NULL values for both the control mask and the pending control mask. */
3854 gfc_trans_where (gfc_code * code)
3860 cblock = code->block;
3862 && cblock->next->op == EXEC_ASSIGN
3863 && !cblock->next->next)
3865 eblock = cblock->block;
3868 /* A simple "WHERE (cond) x = y" statement or block is
3869 dependence free if cond is not dependent upon writing x,
3870 and the source y is unaffected by the destination x. */
3871 if (!gfc_check_dependency (cblock->next->expr1,
3873 && !gfc_check_dependency (cblock->next->expr1,
3874 cblock->next->expr2, 0))
3875 return gfc_trans_where_3 (cblock, NULL);
3877 else if (!eblock->expr1
3880 && eblock->next->op == EXEC_ASSIGN
3881 && !eblock->next->next)
3883 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3884 block is dependence free if cond is not dependent on writes
3885 to x1 and x2, y1 is not dependent on writes to x2, and y2
3886 is not dependent on writes to x1, and both y's are not
3887 dependent upon their own x's. In addition to this, the
3888 final two dependency checks below exclude all but the same
3889 array reference if the where and elswhere destinations
3890 are the same. In short, this is VERY conservative and this
3891 is needed because the two loops, required by the standard
3892 are coalesced in gfc_trans_where_3. */
3893 if (!gfc_check_dependency(cblock->next->expr1,
3895 && !gfc_check_dependency(eblock->next->expr1,
3897 && !gfc_check_dependency(cblock->next->expr1,
3898 eblock->next->expr2, 1)
3899 && !gfc_check_dependency(eblock->next->expr1,
3900 cblock->next->expr2, 1)
3901 && !gfc_check_dependency(cblock->next->expr1,
3902 cblock->next->expr2, 1)
3903 && !gfc_check_dependency(eblock->next->expr1,
3904 eblock->next->expr2, 1)
3905 && !gfc_check_dependency(cblock->next->expr1,
3906 eblock->next->expr1, 0)
3907 && !gfc_check_dependency(eblock->next->expr1,
3908 cblock->next->expr1, 0))
3909 return gfc_trans_where_3 (cblock, eblock);
3913 gfc_start_block (&block);
3915 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3917 return gfc_finish_block (&block);
3921 /* CYCLE a DO loop. The label decl has already been created by
3922 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3923 node at the head of the loop. We must mark the label as used. */
3926 gfc_trans_cycle (gfc_code * code)
3930 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3931 TREE_USED (cycle_label) = 1;
3932 return build1_v (GOTO_EXPR, cycle_label);
3936 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3937 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3941 gfc_trans_exit (gfc_code * code)
3945 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3946 TREE_USED (exit_label) = 1;
3947 return build1_v (GOTO_EXPR, exit_label);
3951 /* Translate the ALLOCATE statement. */
3954 gfc_trans_allocate (gfc_code * code)
3966 if (!code->ext.alloc_list)
3969 pstat = stat = error_label = tmp = NULL_TREE;
3971 gfc_start_block (&block);
3973 /* Either STAT= and/or ERRMSG is present. */
3974 if (code->expr1 || code->expr2)
3976 tree gfc_int4_type_node = gfc_get_int_type (4);
3978 stat = gfc_create_var (gfc_int4_type_node, "stat");
3979 pstat = gfc_build_addr_expr (NULL_TREE, stat);
3981 error_label = gfc_build_label_decl (NULL_TREE);
3982 TREE_USED (error_label) = 1;
3985 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3989 gfc_init_se (&se, NULL);
3990 gfc_start_block (&se.pre);
3992 se.want_pointer = 1;
3993 se.descriptor_only = 1;
3994 gfc_conv_expr (&se, expr);
3996 if (!gfc_array_allocate (&se, expr, pstat))
3998 /* A scalar or derived type. */
3999 tmp = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
4001 if (expr->ts.type == BT_CHARACTER && tmp == NULL_TREE)
4002 tmp = se.string_length;
4004 tmp = gfc_allocate_with_status (&se.pre, tmp, pstat);
4005 tmp = fold_build2 (MODIFY_EXPR, void_type_node, se.expr,
4006 fold_convert (TREE_TYPE (se.expr), tmp));
4007 gfc_add_expr_to_block (&se.pre, tmp);
4009 if (code->expr1 || code->expr2)
4011 tmp = build1_v (GOTO_EXPR, error_label);
4012 parm = fold_build2 (NE_EXPR, boolean_type_node,
4013 stat, build_int_cst (TREE_TYPE (stat), 0));
4014 tmp = fold_build3 (COND_EXPR, void_type_node,
4015 parm, tmp, build_empty_stmt ());
4016 gfc_add_expr_to_block (&se.pre, tmp);
4019 if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
4021 tmp = build_fold_indirect_ref (se.expr);
4022 tmp = gfc_nullify_alloc_comp (expr->ts.derived, tmp, 0);
4023 gfc_add_expr_to_block (&se.pre, tmp);
4028 tmp = gfc_finish_block (&se.pre);
4029 gfc_add_expr_to_block (&block, tmp);
4035 tmp = build1_v (LABEL_EXPR, error_label);
4036 gfc_add_expr_to_block (&block, tmp);
4038 gfc_init_se (&se, NULL);
4039 gfc_conv_expr_lhs (&se, code->expr1);
4040 tmp = convert (TREE_TYPE (se.expr), stat);
4041 gfc_add_modify (&block, se.expr, tmp);
4047 /* A better error message may be possible, but not required. */
4048 const char *msg = "Attempt to allocate an allocated object";
4049 tree errmsg, slen, dlen;
4051 gfc_init_se (&se, NULL);
4052 gfc_conv_expr_lhs (&se, code->expr2);
4054 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4056 gfc_add_modify (&block, errmsg,
4057 gfc_build_addr_expr (pchar_type_node,
4058 gfc_build_localized_cstring_const (msg)));
4060 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4061 dlen = gfc_get_expr_charlen (code->expr2);
4062 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4064 dlen = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3,
4065 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4067 tmp = fold_build2 (NE_EXPR, boolean_type_node, stat,
4068 build_int_cst (TREE_TYPE (stat), 0));
4070 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt ());
4072 gfc_add_expr_to_block (&block, tmp);
4075 return gfc_finish_block (&block);
4079 /* Translate a DEALLOCATE statement. */
4082 gfc_trans_deallocate (gfc_code *code)
4087 tree apstat, astat, pstat, stat, tmp;
4090 pstat = apstat = stat = astat = tmp = NULL_TREE;
4092 gfc_start_block (&block);
4094 /* Count the number of failed deallocations. If deallocate() was
4095 called with STAT= , then set STAT to the count. If deallocate
4096 was called with ERRMSG, then set ERRMG to a string. */
4097 if (code->expr1 || code->expr2)
4099 tree gfc_int4_type_node = gfc_get_int_type (4);
4101 stat = gfc_create_var (gfc_int4_type_node, "stat");
4102 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4104 /* Running total of possible deallocation failures. */
4105 astat = gfc_create_var (gfc_int4_type_node, "astat");
4106 apstat = gfc_build_addr_expr (NULL_TREE, astat);
4108 /* Initialize astat to 0. */
4109 gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
4112 for (al = code->ext.alloc_list; al != NULL; al = al->next)
4115 gcc_assert (expr->expr_type == EXPR_VARIABLE);
4117 gfc_init_se (&se, NULL);
4118 gfc_start_block (&se.pre);
4120 se.want_pointer = 1;
4121 se.descriptor_only = 1;
4122 gfc_conv_expr (&se, expr);
4124 if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
4127 gfc_ref *last = NULL;
4128 for (ref = expr->ref; ref; ref = ref->next)
4129 if (ref->type == REF_COMPONENT)
4132 /* Do not deallocate the components of a derived type
4133 ultimate pointer component. */
4134 if (!(last && last->u.c.component->attr.pointer)
4135 && !(!last && expr->symtree->n.sym->attr.pointer))
4137 tmp = gfc_deallocate_alloc_comp (expr->ts.derived, se.expr,
4139 gfc_add_expr_to_block (&se.pre, tmp);
4144 tmp = gfc_array_deallocate (se.expr, pstat, expr);
4147 tmp = gfc_deallocate_with_status (se.expr, pstat, false, expr);
4148 gfc_add_expr_to_block (&se.pre, tmp);
4150 tmp = fold_build2 (MODIFY_EXPR, void_type_node,
4151 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
4154 gfc_add_expr_to_block (&se.pre, tmp);
4156 /* Keep track of the number of failed deallocations by adding stat
4157 of the last deallocation to the running total. */
4158 if (code->expr1 || code->expr2)
4160 apstat = fold_build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
4161 gfc_add_modify (&se.pre, astat, apstat);
4164 tmp = gfc_finish_block (&se.pre);
4165 gfc_add_expr_to_block (&block, tmp);
4172 gfc_init_se (&se, NULL);
4173 gfc_conv_expr_lhs (&se, code->expr1);
4174 tmp = convert (TREE_TYPE (se.expr), astat);
4175 gfc_add_modify (&block, se.expr, tmp);
4181 /* A better error message may be possible, but not required. */
4182 const char *msg = "Attempt to deallocate an unallocated object";
4183 tree errmsg, slen, dlen;
4185 gfc_init_se (&se, NULL);
4186 gfc_conv_expr_lhs (&se, code->expr2);
4188 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4190 gfc_add_modify (&block, errmsg,
4191 gfc_build_addr_expr (pchar_type_node,
4192 gfc_build_localized_cstring_const (msg)));
4194 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4195 dlen = gfc_get_expr_charlen (code->expr2);
4196 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4198 dlen = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3,
4199 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4201 tmp = fold_build2 (NE_EXPR, boolean_type_node, astat,
4202 build_int_cst (TREE_TYPE (astat), 0));
4204 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt ());
4206 gfc_add_expr_to_block (&block, tmp);
4209 return gfc_finish_block (&block);