1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
35 #include "trans-stmt.h"
36 #include "trans-types.h"
37 #include "trans-array.h"
38 #include "trans-const.h"
40 #include "dependency.h"
42 typedef struct iter_info
48 struct iter_info *next;
52 typedef struct forall_info
59 struct forall_info *prev_nest;
63 static void gfc_trans_where_2 (gfc_code *, tree, bool,
64 forall_info *, stmtblock_t *);
66 /* Translate a F95 label number to a LABEL_EXPR. */
69 gfc_trans_label_here (gfc_code * code)
71 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
75 /* Given a variable expression which has been ASSIGNed to, find the decl
76 containing the auxiliary variables. For variables in common blocks this
80 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
82 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
83 gfc_conv_expr (se, expr);
84 /* Deals with variable in common block. Get the field declaration. */
85 if (TREE_CODE (se->expr) == COMPONENT_REF)
86 se->expr = TREE_OPERAND (se->expr, 1);
87 /* Deals with dummy argument. Get the parameter declaration. */
88 else if (TREE_CODE (se->expr) == INDIRECT_REF)
89 se->expr = TREE_OPERAND (se->expr, 0);
92 /* Translate a label assignment statement. */
95 gfc_trans_label_assign (gfc_code * code)
104 /* Start a new block. */
105 gfc_init_se (&se, NULL);
106 gfc_start_block (&se.pre);
107 gfc_conv_label_variable (&se, code->expr1);
109 len = GFC_DECL_STRING_LEN (se.expr);
110 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
112 label_tree = gfc_get_label_decl (code->label1);
114 if (code->label1->defined == ST_LABEL_TARGET)
116 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
117 len_tree = integer_minus_one_node;
121 gfc_expr *format = code->label1->format;
123 label_len = format->value.character.length;
124 len_tree = build_int_cst (NULL_TREE, label_len);
125 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
126 format->value.character.string);
127 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
130 gfc_add_modify (&se.pre, len, len_tree);
131 gfc_add_modify (&se.pre, addr, label_tree);
133 return gfc_finish_block (&se.pre);
136 /* Translate a GOTO statement. */
139 gfc_trans_goto (gfc_code * code)
141 locus loc = code->loc;
147 if (code->label1 != NULL)
148 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
151 gfc_init_se (&se, NULL);
152 gfc_start_block (&se.pre);
153 gfc_conv_label_variable (&se, code->expr1);
154 tmp = GFC_DECL_STRING_LEN (se.expr);
155 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
156 build_int_cst (TREE_TYPE (tmp), -1));
157 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
158 "Assigned label is not a target label");
160 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
162 /* We're going to ignore a label list. It does not really change the
163 statement's semantics (because it is just a further restriction on
164 what's legal code); before, we were comparing label addresses here, but
165 that's a very fragile business and may break with optimization. So
168 target = fold_build1 (GOTO_EXPR, void_type_node, assigned_goto);
169 gfc_add_expr_to_block (&se.pre, target);
170 return gfc_finish_block (&se.pre);
174 /* Translate an ENTRY statement. Just adds a label for this entry point. */
176 gfc_trans_entry (gfc_code * code)
178 return build1_v (LABEL_EXPR, code->ext.entry->label);
182 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
183 elemental subroutines. Make temporaries for output arguments if any such
184 dependencies are found. Output arguments are chosen because internal_unpack
185 can be used, as is, to copy the result back to the variable. */
187 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
188 gfc_symbol * sym, gfc_actual_arglist * arg,
189 gfc_dep_check check_variable)
191 gfc_actual_arglist *arg0;
193 gfc_formal_arglist *formal;
194 gfc_loopinfo tmp_loop;
205 if (loopse->ss == NULL)
210 formal = sym->formal;
212 /* Loop over all the arguments testing for dependencies. */
213 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
219 /* Obtain the info structure for the current argument. */
221 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
225 info = &ss->data.info;
229 /* If there is a dependency, create a temporary and use it
230 instead of the variable. */
231 fsym = formal ? formal->sym : NULL;
232 if (e->expr_type == EXPR_VARIABLE
234 && fsym->attr.intent != INTENT_IN
235 && gfc_check_fncall_dependency (e, fsym->attr.intent,
236 sym, arg0, check_variable))
238 tree initial, temptype;
239 stmtblock_t temp_post;
241 /* Make a local loopinfo for the temporary creation, so that
242 none of the other ss->info's have to be renormalized. */
243 gfc_init_loopinfo (&tmp_loop);
244 for (n = 0; n < info->dimen; n++)
246 tmp_loop.to[n] = loopse->loop->to[n];
247 tmp_loop.from[n] = loopse->loop->from[n];
248 tmp_loop.order[n] = loopse->loop->order[n];
251 /* Obtain the argument descriptor for unpacking. */
252 gfc_init_se (&parmse, NULL);
253 parmse.want_pointer = 1;
254 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
255 gfc_add_block_to_block (&se->pre, &parmse.pre);
257 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
258 initialize the array temporary with a copy of the values. */
259 if (fsym->attr.intent == INTENT_INOUT
260 || (fsym->ts.type ==BT_DERIVED
261 && fsym->attr.intent == INTENT_OUT))
262 initial = parmse.expr;
266 /* Find the type of the temporary to create; we don't use the type
267 of e itself as this breaks for subcomponent-references in e (where
268 the type of e is that of the final reference, but parmse.expr's
269 type corresponds to the full derived-type). */
270 /* TODO: Fix this somehow so we don't need a temporary of the whole
271 array but instead only the components referenced. */
272 temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */
273 gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
274 temptype = TREE_TYPE (temptype);
275 temptype = gfc_get_element_type (temptype);
277 /* Generate the temporary. Cleaning up the temporary should be the
278 very last thing done, so we add the code to a new block and add it
279 to se->post as last instructions. */
280 size = gfc_create_var (gfc_array_index_type, NULL);
281 data = gfc_create_var (pvoid_type_node, NULL);
282 gfc_init_block (&temp_post);
283 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post,
284 &tmp_loop, info, temptype,
288 gfc_add_modify (&se->pre, size, tmp);
289 tmp = fold_convert (pvoid_type_node, info->data);
290 gfc_add_modify (&se->pre, data, tmp);
292 /* Calculate the offset for the temporary. */
293 offset = gfc_index_zero_node;
294 for (n = 0; n < info->dimen; n++)
296 tmp = gfc_conv_descriptor_stride_get (info->descriptor,
298 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
299 loopse->loop->from[n], tmp);
300 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
303 info->offset = gfc_create_var (gfc_array_index_type, NULL);
304 gfc_add_modify (&se->pre, info->offset, offset);
306 /* Copy the result back using unpack. */
307 tmp = build_call_expr_loc (input_location,
308 gfor_fndecl_in_unpack, 2, parmse.expr, data);
309 gfc_add_expr_to_block (&se->post, tmp);
311 /* parmse.pre is already added above. */
312 gfc_add_block_to_block (&se->post, &parmse.post);
313 gfc_add_block_to_block (&se->post, &temp_post);
319 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
322 gfc_trans_call (gfc_code * code, bool dependency_check,
323 tree mask, tree count1, bool invert)
327 int has_alternate_specifier;
328 gfc_dep_check check_variable;
329 tree index = NULL_TREE;
330 tree maskexpr = NULL_TREE;
333 /* A CALL starts a new block because the actual arguments may have to
334 be evaluated first. */
335 gfc_init_se (&se, NULL);
336 gfc_start_block (&se.pre);
338 gcc_assert (code->resolved_sym);
340 ss = gfc_ss_terminator;
341 if (code->resolved_sym->attr.elemental)
342 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
344 /* Is not an elemental subroutine call with array valued arguments. */
345 if (ss == gfc_ss_terminator)
348 /* Translate the call. */
349 has_alternate_specifier
350 = gfc_conv_procedure_call (&se, code->resolved_sym, code->ext.actual,
351 code->expr1, NULL_TREE);
353 /* A subroutine without side-effect, by definition, does nothing! */
354 TREE_SIDE_EFFECTS (se.expr) = 1;
356 /* Chain the pieces together and return the block. */
357 if (has_alternate_specifier)
359 gfc_code *select_code;
361 select_code = code->next;
362 gcc_assert(select_code->op == EXEC_SELECT);
363 sym = select_code->expr1->symtree->n.sym;
364 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
365 if (sym->backend_decl == NULL)
366 sym->backend_decl = gfc_get_symbol_decl (sym);
367 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
370 gfc_add_expr_to_block (&se.pre, se.expr);
372 gfc_add_block_to_block (&se.pre, &se.post);
377 /* An elemental subroutine call with array valued arguments has
385 /* gfc_walk_elemental_function_args renders the ss chain in the
386 reverse order to the actual argument order. */
387 ss = gfc_reverse_ss (ss);
389 /* Initialize the loop. */
390 gfc_init_se (&loopse, NULL);
391 gfc_init_loopinfo (&loop);
392 gfc_add_ss_to_loop (&loop, ss);
394 gfc_conv_ss_startstride (&loop);
395 /* TODO: gfc_conv_loop_setup generates a temporary for vector
396 subscripts. This could be prevented in the elemental case
397 as temporaries are handled separatedly
398 (below in gfc_conv_elemental_dependencies). */
399 gfc_conv_loop_setup (&loop, &code->expr1->where);
400 gfc_mark_ss_chain_used (ss, 1);
402 /* Convert the arguments, checking for dependencies. */
403 gfc_copy_loopinfo_to_se (&loopse, &loop);
406 /* For operator assignment, do dependency checking. */
407 if (dependency_check)
408 check_variable = ELEM_CHECK_VARIABLE;
410 check_variable = ELEM_DONT_CHECK_VARIABLE;
412 gfc_init_se (&depse, NULL);
413 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
414 code->ext.actual, check_variable);
416 gfc_add_block_to_block (&loop.pre, &depse.pre);
417 gfc_add_block_to_block (&loop.post, &depse.post);
419 /* Generate the loop body. */
420 gfc_start_scalarized_body (&loop, &body);
421 gfc_init_block (&block);
425 /* Form the mask expression according to the mask. */
427 maskexpr = gfc_build_array_ref (mask, index, NULL);
429 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
433 /* Add the subroutine call to the block. */
434 gfc_conv_procedure_call (&loopse, code->resolved_sym,
435 code->ext.actual, code->expr1,
440 tmp = build3_v (COND_EXPR, maskexpr, loopse.expr,
441 build_empty_stmt (input_location));
442 gfc_add_expr_to_block (&loopse.pre, tmp);
443 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
444 count1, gfc_index_one_node);
445 gfc_add_modify (&loopse.pre, count1, tmp);
448 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
450 gfc_add_block_to_block (&block, &loopse.pre);
451 gfc_add_block_to_block (&block, &loopse.post);
453 /* Finish up the loop block and the loop. */
454 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
455 gfc_trans_scalarizing_loops (&loop, &body);
456 gfc_add_block_to_block (&se.pre, &loop.pre);
457 gfc_add_block_to_block (&se.pre, &loop.post);
458 gfc_add_block_to_block (&se.pre, &se.post);
459 gfc_cleanup_loop (&loop);
462 return gfc_finish_block (&se.pre);
466 /* Translate the RETURN statement. */
469 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
477 /* If code->expr is not NULL, this return statement must appear
478 in a subroutine and current_fake_result_decl has already
481 result = gfc_get_fake_result_decl (NULL, 0);
484 gfc_warning ("An alternate return at %L without a * dummy argument",
485 &code->expr1->where);
486 return build1_v (GOTO_EXPR, gfc_get_return_label ());
489 /* Start a new block for this statement. */
490 gfc_init_se (&se, NULL);
491 gfc_start_block (&se.pre);
493 gfc_conv_expr (&se, code->expr1);
495 tmp = fold_build2 (MODIFY_EXPR, TREE_TYPE (result), result,
496 fold_convert (TREE_TYPE (result), se.expr));
497 gfc_add_expr_to_block (&se.pre, tmp);
499 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
500 gfc_add_expr_to_block (&se.pre, tmp);
501 gfc_add_block_to_block (&se.pre, &se.post);
502 return gfc_finish_block (&se.pre);
505 return build1_v (GOTO_EXPR, gfc_get_return_label ());
509 /* Translate the PAUSE statement. We have to translate this statement
510 to a runtime library call. */
513 gfc_trans_pause (gfc_code * code)
515 tree gfc_int4_type_node = gfc_get_int_type (4);
519 /* Start a new block for this statement. */
520 gfc_init_se (&se, NULL);
521 gfc_start_block (&se.pre);
524 if (code->expr1 == NULL)
526 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
527 tmp = build_call_expr_loc (input_location,
528 gfor_fndecl_pause_numeric, 1, tmp);
532 gfc_conv_expr_reference (&se, code->expr1);
533 tmp = build_call_expr_loc (input_location,
534 gfor_fndecl_pause_string, 2,
535 se.expr, se.string_length);
538 gfc_add_expr_to_block (&se.pre, tmp);
540 gfc_add_block_to_block (&se.pre, &se.post);
542 return gfc_finish_block (&se.pre);
546 /* Translate the STOP statement. We have to translate this statement
547 to a runtime library call. */
550 gfc_trans_stop (gfc_code * code)
552 tree gfc_int4_type_node = gfc_get_int_type (4);
556 /* Start a new block for this statement. */
557 gfc_init_se (&se, NULL);
558 gfc_start_block (&se.pre);
561 if (code->expr1 == NULL)
563 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
564 tmp = build_call_expr_loc (input_location,
565 gfor_fndecl_stop_numeric, 1, tmp);
569 gfc_conv_expr_reference (&se, code->expr1);
570 tmp = build_call_expr_loc (input_location,
571 gfor_fndecl_stop_string, 2,
572 se.expr, se.string_length);
575 gfc_add_expr_to_block (&se.pre, tmp);
577 gfc_add_block_to_block (&se.pre, &se.post);
579 return gfc_finish_block (&se.pre);
583 /* Generate GENERIC for the IF construct. This function also deals with
584 the simple IF statement, because the front end translates the IF
585 statement into an IF construct.
617 where COND_S is the simplified version of the predicate. PRE_COND_S
618 are the pre side-effects produced by the translation of the
620 We need to build the chain recursively otherwise we run into
621 problems with folding incomplete statements. */
624 gfc_trans_if_1 (gfc_code * code)
629 /* Check for an unconditional ELSE clause. */
631 return gfc_trans_code (code->next);
633 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
634 gfc_init_se (&if_se, NULL);
635 gfc_start_block (&if_se.pre);
637 /* Calculate the IF condition expression. */
638 gfc_conv_expr_val (&if_se, code->expr1);
640 /* Translate the THEN clause. */
641 stmt = gfc_trans_code (code->next);
643 /* Translate the ELSE clause. */
645 elsestmt = gfc_trans_if_1 (code->block);
647 elsestmt = build_empty_stmt (input_location);
649 /* Build the condition expression and add it to the condition block. */
650 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
652 gfc_add_expr_to_block (&if_se.pre, stmt);
654 /* Finish off this statement. */
655 return gfc_finish_block (&if_se.pre);
659 gfc_trans_if (gfc_code * code)
661 /* Ignore the top EXEC_IF, it only announces an IF construct. The
662 actual code we must translate is in code->block. */
664 return gfc_trans_if_1 (code->block);
668 /* Translate an arithmetic IF expression.
670 IF (cond) label1, label2, label3 translates to
682 An optimized version can be generated in case of equal labels.
683 E.g., if label1 is equal to label2, we can translate it to
692 gfc_trans_arithmetic_if (gfc_code * code)
700 /* Start a new block. */
701 gfc_init_se (&se, NULL);
702 gfc_start_block (&se.pre);
704 /* Pre-evaluate COND. */
705 gfc_conv_expr_val (&se, code->expr1);
706 se.expr = gfc_evaluate_now (se.expr, &se.pre);
708 /* Build something to compare with. */
709 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
711 if (code->label1->value != code->label2->value)
713 /* If (cond < 0) take branch1 else take branch2.
714 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
715 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
716 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
718 if (code->label1->value != code->label3->value)
719 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
721 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
723 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
726 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
728 if (code->label1->value != code->label3->value
729 && code->label2->value != code->label3->value)
731 /* if (cond <= 0) take branch1 else take branch2. */
732 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
733 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
734 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
737 /* Append the COND_EXPR to the evaluation of COND, and return. */
738 gfc_add_expr_to_block (&se.pre, branch1);
739 return gfc_finish_block (&se.pre);
743 /* Translate a BLOCK construct. This is basically what we would do for a
747 gfc_trans_block_construct (gfc_code* code)
759 gcc_assert (!sym->tlink);
762 gfc_start_block (&body);
763 gfc_process_block_locals (ns);
765 tmp = gfc_trans_code (ns->code);
766 tmp = gfc_trans_deferred_vars (sym, tmp);
768 gfc_add_expr_to_block (&body, tmp);
769 return gfc_finish_block (&body);
773 /* Translate the simple DO construct. This is where the loop variable has
774 integer type and step +-1. We can't use this in the general case
775 because integer overflow and floating point errors could give incorrect
777 We translate a do loop from:
779 DO dovar = from, to, step
785 [Evaluate loop bounds and step]
787 if ((step > 0) ? (dovar <= to) : (dovar => to))
793 cond = (dovar == to);
795 if (cond) goto end_label;
800 This helps the optimizers by avoiding the extra induction variable
801 used in the general case. */
804 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
805 tree from, tree to, tree step)
811 tree saved_dovar = NULL;
815 type = TREE_TYPE (dovar);
817 /* Initialize the DO variable: dovar = from. */
818 gfc_add_modify (pblock, dovar, from);
820 /* Save value for do-tinkering checking. */
821 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
823 saved_dovar = gfc_create_var (type, ".saved_dovar");
824 gfc_add_modify (pblock, saved_dovar, dovar);
827 /* Cycle and exit statements are implemented with gotos. */
828 cycle_label = gfc_build_label_decl (NULL_TREE);
829 exit_label = gfc_build_label_decl (NULL_TREE);
831 /* Put the labels where they can be found later. See gfc_trans_do(). */
832 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
835 gfc_start_block (&body);
837 /* Main loop body. */
838 tmp = gfc_trans_code (code->block->next);
839 gfc_add_expr_to_block (&body, tmp);
841 /* Label for cycle statements (if needed). */
842 if (TREE_USED (cycle_label))
844 tmp = build1_v (LABEL_EXPR, cycle_label);
845 gfc_add_expr_to_block (&body, tmp);
848 /* Check whether someone has modified the loop variable. */
849 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
851 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
852 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
853 "Loop variable has been modified");
856 /* Evaluate the loop condition. */
857 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
858 cond = gfc_evaluate_now (cond, &body);
860 /* Increment the loop variable. */
861 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
862 gfc_add_modify (&body, dovar, tmp);
864 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
865 gfc_add_modify (&body, saved_dovar, dovar);
868 tmp = build1_v (GOTO_EXPR, exit_label);
869 TREE_USED (exit_label) = 1;
870 tmp = fold_build3 (COND_EXPR, void_type_node,
871 cond, tmp, build_empty_stmt (input_location));
872 gfc_add_expr_to_block (&body, tmp);
874 /* Finish the loop body. */
875 tmp = gfc_finish_block (&body);
876 tmp = build1_v (LOOP_EXPR, tmp);
878 /* Only execute the loop if the number of iterations is positive. */
879 if (tree_int_cst_sgn (step) > 0)
880 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
882 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
883 tmp = fold_build3 (COND_EXPR, void_type_node,
884 cond, tmp, build_empty_stmt (input_location));
885 gfc_add_expr_to_block (pblock, tmp);
887 /* Add the exit label. */
888 tmp = build1_v (LABEL_EXPR, exit_label);
889 gfc_add_expr_to_block (pblock, tmp);
891 return gfc_finish_block (pblock);
894 /* Translate the DO construct. This obviously is one of the most
895 important ones to get right with any compiler, but especially
898 We special case some loop forms as described in gfc_trans_simple_do.
899 For other cases we implement them with a separate loop count,
900 as described in the standard.
902 We translate a do loop from:
904 DO dovar = from, to, step
910 [evaluate loop bounds and step]
911 empty = (step > 0 ? to < from : to > from);
912 countm1 = (to - from) / step;
914 if (empty) goto exit_label;
920 if (countm1 ==0) goto exit_label;
925 countm1 is an unsigned integer. It is equal to the loop count minus one,
926 because the loop count itself can overflow. */
929 gfc_trans_do (gfc_code * code)
933 tree saved_dovar = NULL;
948 gfc_start_block (&block);
950 /* Evaluate all the expressions in the iterator. */
951 gfc_init_se (&se, NULL);
952 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
953 gfc_add_block_to_block (&block, &se.pre);
955 type = TREE_TYPE (dovar);
957 gfc_init_se (&se, NULL);
958 gfc_conv_expr_val (&se, code->ext.iterator->start);
959 gfc_add_block_to_block (&block, &se.pre);
960 from = gfc_evaluate_now (se.expr, &block);
962 gfc_init_se (&se, NULL);
963 gfc_conv_expr_val (&se, code->ext.iterator->end);
964 gfc_add_block_to_block (&block, &se.pre);
965 to = gfc_evaluate_now (se.expr, &block);
967 gfc_init_se (&se, NULL);
968 gfc_conv_expr_val (&se, code->ext.iterator->step);
969 gfc_add_block_to_block (&block, &se.pre);
970 step = gfc_evaluate_now (se.expr, &block);
972 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
974 tmp = fold_build2 (EQ_EXPR, boolean_type_node, step,
975 fold_convert (type, integer_zero_node));
976 gfc_trans_runtime_check (true, false, tmp, &block, &code->loc,
977 "DO step value is zero");
980 /* Special case simple loops. */
981 if (TREE_CODE (type) == INTEGER_TYPE
982 && (integer_onep (step)
983 || tree_int_cst_equal (step, integer_minus_one_node)))
984 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
986 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
987 fold_convert (type, integer_zero_node));
989 if (TREE_CODE (type) == INTEGER_TYPE)
990 utype = unsigned_type_for (type);
992 utype = unsigned_type_for (gfc_array_index_type);
993 countm1 = gfc_create_var (utype, "countm1");
995 /* Cycle and exit statements are implemented with gotos. */
996 cycle_label = gfc_build_label_decl (NULL_TREE);
997 exit_label = gfc_build_label_decl (NULL_TREE);
998 TREE_USED (exit_label) = 1;
1000 /* Initialize the DO variable: dovar = from. */
1001 gfc_add_modify (&block, dovar, from);
1003 /* Save value for do-tinkering checking. */
1004 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1006 saved_dovar = gfc_create_var (type, ".saved_dovar");
1007 gfc_add_modify (&block, saved_dovar, dovar);
1010 /* Initialize loop count and jump to exit label if the loop is empty.
1011 This code is executed before we enter the loop body. We generate:
1012 step_sign = sign(1,step);
1023 countm1 = (to*step_sign - from*step_sign) / (step*step_sign);
1027 if (TREE_CODE (type) == INTEGER_TYPE)
1029 tree pos, neg, step_sign, to2, from2, step2;
1031 /* Calculate SIGN (1,step), as (step < 0 ? -1 : 1) */
1033 tmp = fold_build2 (LT_EXPR, boolean_type_node, step,
1034 build_int_cst (TREE_TYPE (step), 0));
1035 step_sign = fold_build3 (COND_EXPR, type, tmp,
1036 build_int_cst (type, -1),
1037 build_int_cst (type, 1));
1039 tmp = fold_build2 (LT_EXPR, boolean_type_node, to, from);
1040 pos = fold_build3 (COND_EXPR, void_type_node, tmp,
1041 build1_v (GOTO_EXPR, exit_label),
1042 build_empty_stmt (input_location));
1044 tmp = fold_build2 (GT_EXPR, boolean_type_node, to, from);
1045 neg = fold_build3 (COND_EXPR, void_type_node, tmp,
1046 build1_v (GOTO_EXPR, exit_label),
1047 build_empty_stmt (input_location));
1048 tmp = fold_build3 (COND_EXPR, void_type_node, pos_step, pos, neg);
1050 gfc_add_expr_to_block (&block, tmp);
1052 /* Calculate the loop count. to-from can overflow, so
1053 we cast to unsigned. */
1055 to2 = fold_build2 (MULT_EXPR, type, step_sign, to);
1056 from2 = fold_build2 (MULT_EXPR, type, step_sign, from);
1057 step2 = fold_build2 (MULT_EXPR, type, step_sign, step);
1058 step2 = fold_convert (utype, step2);
1059 tmp = fold_build2 (MINUS_EXPR, type, to2, from2);
1060 tmp = fold_convert (utype, tmp);
1061 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp, step2);
1062 tmp = fold_build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
1063 gfc_add_expr_to_block (&block, tmp);
1067 /* TODO: We could use the same width as the real type.
1068 This would probably cause more problems that it solves
1069 when we implement "long double" types. */
1071 tmp = fold_build2 (MINUS_EXPR, type, to, from);
1072 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
1073 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
1074 gfc_add_modify (&block, countm1, tmp);
1076 /* We need a special check for empty loops:
1077 empty = (step > 0 ? to < from : to > from); */
1078 tmp = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
1079 fold_build2 (LT_EXPR, boolean_type_node, to, from),
1080 fold_build2 (GT_EXPR, boolean_type_node, to, from));
1081 /* If the loop is empty, go directly to the exit label. */
1082 tmp = fold_build3 (COND_EXPR, void_type_node, tmp,
1083 build1_v (GOTO_EXPR, exit_label),
1084 build_empty_stmt (input_location));
1085 gfc_add_expr_to_block (&block, tmp);
1089 gfc_start_block (&body);
1091 /* Put these labels where they can be found later. We put the
1092 labels in a TREE_LIST node (because TREE_CHAIN is already
1093 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
1094 label in TREE_VALUE (backend_decl). */
1096 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1098 /* Main loop body. */
1099 tmp = gfc_trans_code (code->block->next);
1100 gfc_add_expr_to_block (&body, tmp);
1102 /* Label for cycle statements (if needed). */
1103 if (TREE_USED (cycle_label))
1105 tmp = build1_v (LABEL_EXPR, cycle_label);
1106 gfc_add_expr_to_block (&body, tmp);
1109 /* Check whether someone has modified the loop variable. */
1110 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1112 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
1113 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1114 "Loop variable has been modified");
1117 /* Increment the loop variable. */
1118 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
1119 gfc_add_modify (&body, dovar, tmp);
1121 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1122 gfc_add_modify (&body, saved_dovar, dovar);
1124 /* End with the loop condition. Loop until countm1 == 0. */
1125 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
1126 build_int_cst (utype, 0));
1127 tmp = build1_v (GOTO_EXPR, exit_label);
1128 tmp = fold_build3 (COND_EXPR, void_type_node,
1129 cond, tmp, build_empty_stmt (input_location));
1130 gfc_add_expr_to_block (&body, tmp);
1132 /* Decrement the loop count. */
1133 tmp = fold_build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
1134 gfc_add_modify (&body, countm1, tmp);
1136 /* End of loop body. */
1137 tmp = gfc_finish_block (&body);
1139 /* The for loop itself. */
1140 tmp = build1_v (LOOP_EXPR, tmp);
1141 gfc_add_expr_to_block (&block, tmp);
1143 /* Add the exit label. */
1144 tmp = build1_v (LABEL_EXPR, exit_label);
1145 gfc_add_expr_to_block (&block, tmp);
1147 return gfc_finish_block (&block);
1151 /* Translate the DO WHILE construct.
1164 if (! cond) goto exit_label;
1170 Because the evaluation of the exit condition `cond' may have side
1171 effects, we can't do much for empty loop bodies. The backend optimizers
1172 should be smart enough to eliminate any dead loops. */
1175 gfc_trans_do_while (gfc_code * code)
1183 /* Everything we build here is part of the loop body. */
1184 gfc_start_block (&block);
1186 /* Cycle and exit statements are implemented with gotos. */
1187 cycle_label = gfc_build_label_decl (NULL_TREE);
1188 exit_label = gfc_build_label_decl (NULL_TREE);
1190 /* Put the labels where they can be found later. See gfc_trans_do(). */
1191 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1193 /* Create a GIMPLE version of the exit condition. */
1194 gfc_init_se (&cond, NULL);
1195 gfc_conv_expr_val (&cond, code->expr1);
1196 gfc_add_block_to_block (&block, &cond.pre);
1197 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1199 /* Build "IF (! cond) GOTO exit_label". */
1200 tmp = build1_v (GOTO_EXPR, exit_label);
1201 TREE_USED (exit_label) = 1;
1202 tmp = fold_build3 (COND_EXPR, void_type_node,
1203 cond.expr, tmp, build_empty_stmt (input_location));
1204 gfc_add_expr_to_block (&block, tmp);
1206 /* The main body of the loop. */
1207 tmp = gfc_trans_code (code->block->next);
1208 gfc_add_expr_to_block (&block, tmp);
1210 /* Label for cycle statements (if needed). */
1211 if (TREE_USED (cycle_label))
1213 tmp = build1_v (LABEL_EXPR, cycle_label);
1214 gfc_add_expr_to_block (&block, tmp);
1217 /* End of loop body. */
1218 tmp = gfc_finish_block (&block);
1220 gfc_init_block (&block);
1221 /* Build the loop. */
1222 tmp = build1_v (LOOP_EXPR, tmp);
1223 gfc_add_expr_to_block (&block, tmp);
1225 /* Add the exit label. */
1226 tmp = build1_v (LABEL_EXPR, exit_label);
1227 gfc_add_expr_to_block (&block, tmp);
1229 return gfc_finish_block (&block);
1233 /* Translate the SELECT CASE construct for INTEGER case expressions,
1234 without killing all potential optimizations. The problem is that
1235 Fortran allows unbounded cases, but the back-end does not, so we
1236 need to intercept those before we enter the equivalent SWITCH_EXPR
1239 For example, we translate this,
1242 CASE (:100,101,105:115)
1252 to the GENERIC equivalent,
1256 case (minimum value for typeof(expr) ... 100:
1262 case 200 ... (maximum value for typeof(expr):
1279 gfc_trans_integer_select (gfc_code * code)
1289 gfc_start_block (&block);
1291 /* Calculate the switch expression. */
1292 gfc_init_se (&se, NULL);
1293 gfc_conv_expr_val (&se, code->expr1);
1294 gfc_add_block_to_block (&block, &se.pre);
1296 end_label = gfc_build_label_decl (NULL_TREE);
1298 gfc_init_block (&body);
1300 for (c = code->block; c; c = c->block)
1302 for (cp = c->ext.case_list; cp; cp = cp->next)
1307 /* Assume it's the default case. */
1308 low = high = NULL_TREE;
1312 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1315 /* If there's only a lower bound, set the high bound to the
1316 maximum value of the case expression. */
1318 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1323 /* Three cases are possible here:
1325 1) There is no lower bound, e.g. CASE (:N).
1326 2) There is a lower bound .NE. high bound, that is
1327 a case range, e.g. CASE (N:M) where M>N (we make
1328 sure that M>N during type resolution).
1329 3) There is a lower bound, and it has the same value
1330 as the high bound, e.g. CASE (N:N). This is our
1331 internal representation of CASE(N).
1333 In the first and second case, we need to set a value for
1334 high. In the third case, we don't because the GCC middle
1335 end represents a single case value by just letting high be
1336 a NULL_TREE. We can't do that because we need to be able
1337 to represent unbounded cases. */
1341 && mpz_cmp (cp->low->value.integer,
1342 cp->high->value.integer) != 0))
1343 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1346 /* Unbounded case. */
1348 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1351 /* Build a label. */
1352 label = gfc_build_label_decl (NULL_TREE);
1354 /* Add this case label.
1355 Add parameter 'label', make it match GCC backend. */
1356 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1358 gfc_add_expr_to_block (&body, tmp);
1361 /* Add the statements for this case. */
1362 tmp = gfc_trans_code (c->next);
1363 gfc_add_expr_to_block (&body, tmp);
1365 /* Break to the end of the construct. */
1366 tmp = build1_v (GOTO_EXPR, end_label);
1367 gfc_add_expr_to_block (&body, tmp);
1370 tmp = gfc_finish_block (&body);
1371 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1372 gfc_add_expr_to_block (&block, tmp);
1374 tmp = build1_v (LABEL_EXPR, end_label);
1375 gfc_add_expr_to_block (&block, tmp);
1377 return gfc_finish_block (&block);
1381 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1383 There are only two cases possible here, even though the standard
1384 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1385 .FALSE., and DEFAULT.
1387 We never generate more than two blocks here. Instead, we always
1388 try to eliminate the DEFAULT case. This way, we can translate this
1389 kind of SELECT construct to a simple
1393 expression in GENERIC. */
1396 gfc_trans_logical_select (gfc_code * code)
1399 gfc_code *t, *f, *d;
1404 /* Assume we don't have any cases at all. */
1407 /* Now see which ones we actually do have. We can have at most two
1408 cases in a single case list: one for .TRUE. and one for .FALSE.
1409 The default case is always separate. If the cases for .TRUE. and
1410 .FALSE. are in the same case list, the block for that case list
1411 always executed, and we don't generate code a COND_EXPR. */
1412 for (c = code->block; c; c = c->block)
1414 for (cp = c->ext.case_list; cp; cp = cp->next)
1418 if (cp->low->value.logical == 0) /* .FALSE. */
1420 else /* if (cp->value.logical != 0), thus .TRUE. */
1428 /* Start a new block. */
1429 gfc_start_block (&block);
1431 /* Calculate the switch expression. We always need to do this
1432 because it may have side effects. */
1433 gfc_init_se (&se, NULL);
1434 gfc_conv_expr_val (&se, code->expr1);
1435 gfc_add_block_to_block (&block, &se.pre);
1437 if (t == f && t != NULL)
1439 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1440 translate the code for these cases, append it to the current
1442 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1446 tree true_tree, false_tree, stmt;
1448 true_tree = build_empty_stmt (input_location);
1449 false_tree = build_empty_stmt (input_location);
1451 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1452 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1453 make the missing case the default case. */
1454 if (t != NULL && f != NULL)
1464 /* Translate the code for each of these blocks, and append it to
1465 the current block. */
1467 true_tree = gfc_trans_code (t->next);
1470 false_tree = gfc_trans_code (f->next);
1472 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1473 true_tree, false_tree);
1474 gfc_add_expr_to_block (&block, stmt);
1477 return gfc_finish_block (&block);
1481 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1482 Instead of generating compares and jumps, it is far simpler to
1483 generate a data structure describing the cases in order and call a
1484 library subroutine that locates the right case.
1485 This is particularly true because this is the only case where we
1486 might have to dispose of a temporary.
1487 The library subroutine returns a pointer to jump to or NULL if no
1488 branches are to be taken. */
1491 gfc_trans_character_select (gfc_code *code)
1493 tree init, node, end_label, tmp, type, case_num, label, fndecl;
1494 stmtblock_t block, body;
1500 /* The jump table types are stored in static variables to avoid
1501 constructing them from scratch every single time. */
1502 static tree select_struct[2];
1503 static tree ss_string1[2], ss_string1_len[2];
1504 static tree ss_string2[2], ss_string2_len[2];
1505 static tree ss_target[2];
1507 tree pchartype = gfc_get_pchar_type (code->expr1->ts.kind);
1509 if (code->expr1->ts.kind == 1)
1511 else if (code->expr1->ts.kind == 4)
1516 if (select_struct[k] == NULL)
1518 select_struct[k] = make_node (RECORD_TYPE);
1520 if (code->expr1->ts.kind == 1)
1521 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
1522 else if (code->expr1->ts.kind == 4)
1523 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
1528 #define ADD_FIELD(NAME, TYPE) \
1529 ss_##NAME[k] = gfc_add_field_to_struct \
1530 (&(TYPE_FIELDS (select_struct[k])), select_struct[k], \
1531 get_identifier (stringize(NAME)), TYPE)
1533 ADD_FIELD (string1, pchartype);
1534 ADD_FIELD (string1_len, gfc_charlen_type_node);
1536 ADD_FIELD (string2, pchartype);
1537 ADD_FIELD (string2_len, gfc_charlen_type_node);
1539 ADD_FIELD (target, integer_type_node);
1542 gfc_finish_type (select_struct[k]);
1545 cp = code->block->ext.case_list;
1546 while (cp->left != NULL)
1550 for (d = cp; d; d = d->right)
1553 end_label = gfc_build_label_decl (NULL_TREE);
1555 /* Generate the body */
1556 gfc_start_block (&block);
1557 gfc_init_block (&body);
1559 for (c = code->block; c; c = c->block)
1561 for (d = c->ext.case_list; d; d = d->next)
1563 label = gfc_build_label_decl (NULL_TREE);
1564 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1565 build_int_cst (NULL_TREE, d->n),
1566 build_int_cst (NULL_TREE, d->n), label);
1567 gfc_add_expr_to_block (&body, tmp);
1570 tmp = gfc_trans_code (c->next);
1571 gfc_add_expr_to_block (&body, tmp);
1573 tmp = build1_v (GOTO_EXPR, end_label);
1574 gfc_add_expr_to_block (&body, tmp);
1577 /* Generate the structure describing the branches */
1580 for(d = cp; d; d = d->right)
1584 gfc_init_se (&se, NULL);
1588 node = tree_cons (ss_string1[k], null_pointer_node, node);
1589 node = tree_cons (ss_string1_len[k], integer_zero_node, node);
1593 gfc_conv_expr_reference (&se, d->low);
1595 node = tree_cons (ss_string1[k], se.expr, node);
1596 node = tree_cons (ss_string1_len[k], se.string_length, node);
1599 if (d->high == NULL)
1601 node = tree_cons (ss_string2[k], null_pointer_node, node);
1602 node = tree_cons (ss_string2_len[k], integer_zero_node, node);
1606 gfc_init_se (&se, NULL);
1607 gfc_conv_expr_reference (&se, d->high);
1609 node = tree_cons (ss_string2[k], se.expr, node);
1610 node = tree_cons (ss_string2_len[k], se.string_length, node);
1613 node = tree_cons (ss_target[k], build_int_cst (integer_type_node, d->n),
1616 tmp = build_constructor_from_list (select_struct[k], nreverse (node));
1617 init = tree_cons (NULL_TREE, tmp, init);
1620 type = build_array_type (select_struct[k],
1621 build_index_type (build_int_cst (NULL_TREE, n-1)));
1623 init = build_constructor_from_list (type, nreverse(init));
1624 TREE_CONSTANT (init) = 1;
1625 TREE_STATIC (init) = 1;
1626 /* Create a static variable to hold the jump table. */
1627 tmp = gfc_create_var (type, "jumptable");
1628 TREE_CONSTANT (tmp) = 1;
1629 TREE_STATIC (tmp) = 1;
1630 TREE_READONLY (tmp) = 1;
1631 DECL_INITIAL (tmp) = init;
1634 /* Build the library call */
1635 init = gfc_build_addr_expr (pvoid_type_node, init);
1637 gfc_init_se (&se, NULL);
1638 gfc_conv_expr_reference (&se, code->expr1);
1640 gfc_add_block_to_block (&block, &se.pre);
1642 if (code->expr1->ts.kind == 1)
1643 fndecl = gfor_fndecl_select_string;
1644 else if (code->expr1->ts.kind == 4)
1645 fndecl = gfor_fndecl_select_string_char4;
1649 tmp = build_call_expr_loc (input_location,
1650 fndecl, 4, init, build_int_cst (NULL_TREE, n),
1651 se.expr, se.string_length);
1652 case_num = gfc_create_var (integer_type_node, "case_num");
1653 gfc_add_modify (&block, case_num, tmp);
1655 gfc_add_block_to_block (&block, &se.post);
1657 tmp = gfc_finish_block (&body);
1658 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1659 gfc_add_expr_to_block (&block, tmp);
1661 tmp = build1_v (LABEL_EXPR, end_label);
1662 gfc_add_expr_to_block (&block, tmp);
1664 return gfc_finish_block (&block);
1668 /* Translate the three variants of the SELECT CASE construct.
1670 SELECT CASEs with INTEGER case expressions can be translated to an
1671 equivalent GENERIC switch statement, and for LOGICAL case
1672 expressions we build one or two if-else compares.
1674 SELECT CASEs with CHARACTER case expressions are a whole different
1675 story, because they don't exist in GENERIC. So we sort them and
1676 do a binary search at runtime.
1678 Fortran has no BREAK statement, and it does not allow jumps from
1679 one case block to another. That makes things a lot easier for
1683 gfc_trans_select (gfc_code * code)
1685 gcc_assert (code && code->expr1);
1687 /* Empty SELECT constructs are legal. */
1688 if (code->block == NULL)
1689 return build_empty_stmt (input_location);
1691 /* Select the correct translation function. */
1692 switch (code->expr1->ts.type)
1694 case BT_LOGICAL: return gfc_trans_logical_select (code);
1695 case BT_INTEGER: return gfc_trans_integer_select (code);
1696 case BT_CHARACTER: return gfc_trans_character_select (code);
1698 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1704 /* Traversal function to substitute a replacement symtree if the symbol
1705 in the expression is the same as that passed. f == 2 signals that
1706 that variable itself is not to be checked - only the references.
1707 This group of functions is used when the variable expression in a
1708 FORALL assignment has internal references. For example:
1709 FORALL (i = 1:4) p(p(i)) = i
1710 The only recourse here is to store a copy of 'p' for the index
1713 static gfc_symtree *new_symtree;
1714 static gfc_symtree *old_symtree;
1717 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
1719 if (expr->expr_type != EXPR_VARIABLE)
1724 else if (expr->symtree->n.sym == sym)
1725 expr->symtree = new_symtree;
1731 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
1733 gfc_traverse_expr (e, sym, forall_replace, f);
1737 forall_restore (gfc_expr *expr,
1738 gfc_symbol *sym ATTRIBUTE_UNUSED,
1739 int *f ATTRIBUTE_UNUSED)
1741 if (expr->expr_type != EXPR_VARIABLE)
1744 if (expr->symtree == new_symtree)
1745 expr->symtree = old_symtree;
1751 forall_restore_symtree (gfc_expr *e)
1753 gfc_traverse_expr (e, NULL, forall_restore, 0);
1757 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1762 gfc_symbol *new_sym;
1763 gfc_symbol *old_sym;
1767 /* Build a copy of the lvalue. */
1768 old_symtree = c->expr1->symtree;
1769 old_sym = old_symtree->n.sym;
1770 e = gfc_lval_expr_from_sym (old_sym);
1771 if (old_sym->attr.dimension)
1773 gfc_init_se (&tse, NULL);
1774 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN);
1775 gfc_add_block_to_block (pre, &tse.pre);
1776 gfc_add_block_to_block (post, &tse.post);
1777 tse.expr = build_fold_indirect_ref_loc (input_location, tse.expr);
1779 if (e->ts.type != BT_CHARACTER)
1781 /* Use the variable offset for the temporary. */
1782 tmp = gfc_conv_array_offset (old_sym->backend_decl);
1783 gfc_conv_descriptor_offset_set (pre, tse.expr, tmp);
1788 gfc_init_se (&tse, NULL);
1789 gfc_init_se (&rse, NULL);
1790 gfc_conv_expr (&rse, e);
1791 if (e->ts.type == BT_CHARACTER)
1793 tse.string_length = rse.string_length;
1794 tmp = gfc_get_character_type_len (gfc_default_character_kind,
1796 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
1798 gfc_add_block_to_block (pre, &tse.pre);
1799 gfc_add_block_to_block (post, &tse.post);
1803 tmp = gfc_typenode_for_spec (&e->ts);
1804 tse.expr = gfc_create_var (tmp, "temp");
1807 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
1808 e->expr_type == EXPR_VARIABLE);
1809 gfc_add_expr_to_block (pre, tmp);
1813 /* Create a new symbol to represent the lvalue. */
1814 new_sym = gfc_new_symbol (old_sym->name, NULL);
1815 new_sym->ts = old_sym->ts;
1816 new_sym->attr.referenced = 1;
1817 new_sym->attr.temporary = 1;
1818 new_sym->attr.dimension = old_sym->attr.dimension;
1819 new_sym->attr.flavor = old_sym->attr.flavor;
1821 /* Use the temporary as the backend_decl. */
1822 new_sym->backend_decl = tse.expr;
1824 /* Create a fake symtree for it. */
1826 new_symtree = gfc_new_symtree (&root, old_sym->name);
1827 new_symtree->n.sym = new_sym;
1828 gcc_assert (new_symtree == root);
1830 /* Go through the expression reference replacing the old_symtree
1832 forall_replace_symtree (c->expr1, old_sym, 2);
1834 /* Now we have made this temporary, we might as well use it for
1835 the right hand side. */
1836 forall_replace_symtree (c->expr2, old_sym, 1);
1840 /* Handles dependencies in forall assignments. */
1842 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1849 lsym = c->expr1->symtree->n.sym;
1850 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
1852 /* Now check for dependencies within the 'variable'
1853 expression itself. These are treated by making a complete
1854 copy of variable and changing all the references to it
1855 point to the copy instead. Note that the shallow copy of
1856 the variable will not suffice for derived types with
1857 pointer components. We therefore leave these to their
1859 if (lsym->ts.type == BT_DERIVED
1860 && lsym->ts.u.derived->attr.pointer_comp)
1864 if (find_forall_index (c->expr1, lsym, 2) == SUCCESS)
1866 forall_make_variable_temp (c, pre, post);
1870 /* Substrings with dependencies are treated in the same
1872 if (c->expr1->ts.type == BT_CHARACTER
1874 && c->expr2->expr_type == EXPR_VARIABLE
1875 && lsym == c->expr2->symtree->n.sym)
1877 for (lref = c->expr1->ref; lref; lref = lref->next)
1878 if (lref->type == REF_SUBSTRING)
1880 for (rref = c->expr2->ref; rref; rref = rref->next)
1881 if (rref->type == REF_SUBSTRING)
1885 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
1887 forall_make_variable_temp (c, pre, post);
1896 cleanup_forall_symtrees (gfc_code *c)
1898 forall_restore_symtree (c->expr1);
1899 forall_restore_symtree (c->expr2);
1900 gfc_free (new_symtree->n.sym);
1901 gfc_free (new_symtree);
1905 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
1906 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
1907 indicates whether we should generate code to test the FORALLs mask
1908 array. OUTER is the loop header to be used for initializing mask
1911 The generated loop format is:
1912 count = (end - start + step) / step
1925 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
1926 int mask_flag, stmtblock_t *outer)
1934 tree var, start, end, step;
1937 /* Initialize the mask index outside the FORALL nest. */
1938 if (mask_flag && forall_tmp->mask)
1939 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
1941 iter = forall_tmp->this_loop;
1942 nvar = forall_tmp->nvar;
1943 for (n = 0; n < nvar; n++)
1946 start = iter->start;
1950 exit_label = gfc_build_label_decl (NULL_TREE);
1951 TREE_USED (exit_label) = 1;
1953 /* The loop counter. */
1954 count = gfc_create_var (TREE_TYPE (var), "count");
1956 /* The body of the loop. */
1957 gfc_init_block (&block);
1959 /* The exit condition. */
1960 cond = fold_build2 (LE_EXPR, boolean_type_node,
1961 count, build_int_cst (TREE_TYPE (count), 0));
1962 tmp = build1_v (GOTO_EXPR, exit_label);
1963 tmp = fold_build3 (COND_EXPR, void_type_node,
1964 cond, tmp, build_empty_stmt (input_location));
1965 gfc_add_expr_to_block (&block, tmp);
1967 /* The main loop body. */
1968 gfc_add_expr_to_block (&block, body);
1970 /* Increment the loop variable. */
1971 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1972 gfc_add_modify (&block, var, tmp);
1974 /* Advance to the next mask element. Only do this for the
1976 if (n == 0 && mask_flag && forall_tmp->mask)
1978 tree maskindex = forall_tmp->maskindex;
1979 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1980 maskindex, gfc_index_one_node);
1981 gfc_add_modify (&block, maskindex, tmp);
1984 /* Decrement the loop counter. */
1985 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), count,
1986 build_int_cst (TREE_TYPE (var), 1));
1987 gfc_add_modify (&block, count, tmp);
1989 body = gfc_finish_block (&block);
1991 /* Loop var initialization. */
1992 gfc_init_block (&block);
1993 gfc_add_modify (&block, var, start);
1996 /* Initialize the loop counter. */
1997 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1998 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1999 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
2000 gfc_add_modify (&block, count, tmp);
2002 /* The loop expression. */
2003 tmp = build1_v (LOOP_EXPR, body);
2004 gfc_add_expr_to_block (&block, tmp);
2006 /* The exit label. */
2007 tmp = build1_v (LABEL_EXPR, exit_label);
2008 gfc_add_expr_to_block (&block, tmp);
2010 body = gfc_finish_block (&block);
2017 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2018 is nonzero, the body is controlled by all masks in the forall nest.
2019 Otherwise, the innermost loop is not controlled by it's mask. This
2020 is used for initializing that mask. */
2023 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
2028 forall_info *forall_tmp;
2029 tree mask, maskindex;
2031 gfc_start_block (&header);
2033 forall_tmp = nested_forall_info;
2034 while (forall_tmp != NULL)
2036 /* Generate body with masks' control. */
2039 mask = forall_tmp->mask;
2040 maskindex = forall_tmp->maskindex;
2042 /* If a mask was specified make the assignment conditional. */
2045 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2046 body = build3_v (COND_EXPR, tmp, body,
2047 build_empty_stmt (input_location));
2050 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
2051 forall_tmp = forall_tmp->prev_nest;
2055 gfc_add_expr_to_block (&header, body);
2056 return gfc_finish_block (&header);
2060 /* Allocate data for holding a temporary array. Returns either a local
2061 temporary array or a pointer variable. */
2064 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
2071 if (INTEGER_CST_P (size))
2073 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
2074 gfc_index_one_node);
2079 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
2080 type = build_array_type (elem_type, type);
2081 if (gfc_can_put_var_on_stack (bytesize))
2083 gcc_assert (INTEGER_CST_P (size));
2084 tmpvar = gfc_create_var (type, "temp");
2089 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
2090 *pdata = convert (pvoid_type_node, tmpvar);
2092 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
2093 gfc_add_modify (pblock, tmpvar, tmp);
2099 /* Generate codes to copy the temporary to the actual lhs. */
2102 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
2103 tree count1, tree wheremask, bool invert)
2107 stmtblock_t block, body;
2113 lss = gfc_walk_expr (expr);
2115 if (lss == gfc_ss_terminator)
2117 gfc_start_block (&block);
2119 gfc_init_se (&lse, NULL);
2121 /* Translate the expression. */
2122 gfc_conv_expr (&lse, expr);
2124 /* Form the expression for the temporary. */
2125 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2127 /* Use the scalar assignment as is. */
2128 gfc_add_block_to_block (&block, &lse.pre);
2129 gfc_add_modify (&block, lse.expr, tmp);
2130 gfc_add_block_to_block (&block, &lse.post);
2132 /* Increment the count1. */
2133 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2134 gfc_index_one_node);
2135 gfc_add_modify (&block, count1, tmp);
2137 tmp = gfc_finish_block (&block);
2141 gfc_start_block (&block);
2143 gfc_init_loopinfo (&loop1);
2144 gfc_init_se (&rse, NULL);
2145 gfc_init_se (&lse, NULL);
2147 /* Associate the lss with the loop. */
2148 gfc_add_ss_to_loop (&loop1, lss);
2150 /* Calculate the bounds of the scalarization. */
2151 gfc_conv_ss_startstride (&loop1);
2152 /* Setup the scalarizing loops. */
2153 gfc_conv_loop_setup (&loop1, &expr->where);
2155 gfc_mark_ss_chain_used (lss, 1);
2157 /* Start the scalarized loop body. */
2158 gfc_start_scalarized_body (&loop1, &body);
2160 /* Setup the gfc_se structures. */
2161 gfc_copy_loopinfo_to_se (&lse, &loop1);
2164 /* Form the expression of the temporary. */
2165 if (lss != gfc_ss_terminator)
2166 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2167 /* Translate expr. */
2168 gfc_conv_expr (&lse, expr);
2170 /* Use the scalar assignment. */
2171 rse.string_length = lse.string_length;
2172 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
2174 /* Form the mask expression according to the mask tree list. */
2177 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2179 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2180 TREE_TYPE (wheremaskexpr),
2182 tmp = fold_build3 (COND_EXPR, void_type_node,
2184 build_empty_stmt (input_location));
2187 gfc_add_expr_to_block (&body, tmp);
2189 /* Increment count1. */
2190 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2191 count1, gfc_index_one_node);
2192 gfc_add_modify (&body, count1, tmp);
2194 /* Increment count3. */
2197 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2198 count3, gfc_index_one_node);
2199 gfc_add_modify (&body, count3, tmp);
2202 /* Generate the copying loops. */
2203 gfc_trans_scalarizing_loops (&loop1, &body);
2204 gfc_add_block_to_block (&block, &loop1.pre);
2205 gfc_add_block_to_block (&block, &loop1.post);
2206 gfc_cleanup_loop (&loop1);
2208 tmp = gfc_finish_block (&block);
2214 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2215 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2216 and should not be freed. WHEREMASK is the conditional execution mask
2217 whose sense may be inverted by INVERT. */
2220 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2221 tree count1, gfc_ss *lss, gfc_ss *rss,
2222 tree wheremask, bool invert)
2224 stmtblock_t block, body1;
2231 gfc_start_block (&block);
2233 gfc_init_se (&rse, NULL);
2234 gfc_init_se (&lse, NULL);
2236 if (lss == gfc_ss_terminator)
2238 gfc_init_block (&body1);
2239 gfc_conv_expr (&rse, expr2);
2240 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2244 /* Initialize the loop. */
2245 gfc_init_loopinfo (&loop);
2247 /* We may need LSS to determine the shape of the expression. */
2248 gfc_add_ss_to_loop (&loop, lss);
2249 gfc_add_ss_to_loop (&loop, rss);
2251 gfc_conv_ss_startstride (&loop);
2252 gfc_conv_loop_setup (&loop, &expr2->where);
2254 gfc_mark_ss_chain_used (rss, 1);
2255 /* Start the loop body. */
2256 gfc_start_scalarized_body (&loop, &body1);
2258 /* Translate the expression. */
2259 gfc_copy_loopinfo_to_se (&rse, &loop);
2261 gfc_conv_expr (&rse, expr2);
2263 /* Form the expression of the temporary. */
2264 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2267 /* Use the scalar assignment. */
2268 lse.string_length = rse.string_length;
2269 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2270 expr2->expr_type == EXPR_VARIABLE);
2272 /* Form the mask expression according to the mask tree list. */
2275 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2277 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2278 TREE_TYPE (wheremaskexpr),
2280 tmp = fold_build3 (COND_EXPR, void_type_node,
2281 wheremaskexpr, tmp, build_empty_stmt (input_location));
2284 gfc_add_expr_to_block (&body1, tmp);
2286 if (lss == gfc_ss_terminator)
2288 gfc_add_block_to_block (&block, &body1);
2290 /* Increment count1. */
2291 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2292 gfc_index_one_node);
2293 gfc_add_modify (&block, count1, tmp);
2297 /* Increment count1. */
2298 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2299 count1, gfc_index_one_node);
2300 gfc_add_modify (&body1, count1, tmp);
2302 /* Increment count3. */
2305 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2306 count3, gfc_index_one_node);
2307 gfc_add_modify (&body1, count3, tmp);
2310 /* Generate the copying loops. */
2311 gfc_trans_scalarizing_loops (&loop, &body1);
2313 gfc_add_block_to_block (&block, &loop.pre);
2314 gfc_add_block_to_block (&block, &loop.post);
2316 gfc_cleanup_loop (&loop);
2317 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2318 as tree nodes in SS may not be valid in different scope. */
2321 tmp = gfc_finish_block (&block);
2326 /* Calculate the size of temporary needed in the assignment inside forall.
2327 LSS and RSS are filled in this function. */
2330 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2331 stmtblock_t * pblock,
2332 gfc_ss **lss, gfc_ss **rss)
2340 *lss = gfc_walk_expr (expr1);
2343 size = gfc_index_one_node;
2344 if (*lss != gfc_ss_terminator)
2346 gfc_init_loopinfo (&loop);
2348 /* Walk the RHS of the expression. */
2349 *rss = gfc_walk_expr (expr2);
2350 if (*rss == gfc_ss_terminator)
2352 /* The rhs is scalar. Add a ss for the expression. */
2353 *rss = gfc_get_ss ();
2354 (*rss)->next = gfc_ss_terminator;
2355 (*rss)->type = GFC_SS_SCALAR;
2356 (*rss)->expr = expr2;
2359 /* Associate the SS with the loop. */
2360 gfc_add_ss_to_loop (&loop, *lss);
2361 /* We don't actually need to add the rhs at this point, but it might
2362 make guessing the loop bounds a bit easier. */
2363 gfc_add_ss_to_loop (&loop, *rss);
2365 /* We only want the shape of the expression, not rest of the junk
2366 generated by the scalarizer. */
2367 loop.array_parameter = 1;
2369 /* Calculate the bounds of the scalarization. */
2370 save_flag = gfc_option.rtcheck;
2371 gfc_option.rtcheck &= !GFC_RTCHECK_BOUNDS;
2372 gfc_conv_ss_startstride (&loop);
2373 gfc_option.rtcheck = save_flag;
2374 gfc_conv_loop_setup (&loop, &expr2->where);
2376 /* Figure out how many elements we need. */
2377 for (i = 0; i < loop.dimen; i++)
2379 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2380 gfc_index_one_node, loop.from[i]);
2381 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2383 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2385 gfc_add_block_to_block (pblock, &loop.pre);
2386 size = gfc_evaluate_now (size, pblock);
2387 gfc_add_block_to_block (pblock, &loop.post);
2389 /* TODO: write a function that cleans up a loopinfo without freeing
2390 the SS chains. Currently a NOP. */
2397 /* Calculate the overall iterator number of the nested forall construct.
2398 This routine actually calculates the number of times the body of the
2399 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2400 that by the expression INNER_SIZE. The BLOCK argument specifies the
2401 block in which to calculate the result, and the optional INNER_SIZE_BODY
2402 argument contains any statements that need to executed (inside the loop)
2403 to initialize or calculate INNER_SIZE. */
2406 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2407 stmtblock_t *inner_size_body, stmtblock_t *block)
2409 forall_info *forall_tmp = nested_forall_info;
2413 /* We can eliminate the innermost unconditional loops with constant
2415 if (INTEGER_CST_P (inner_size))
2418 && !forall_tmp->mask
2419 && INTEGER_CST_P (forall_tmp->size))
2421 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2422 inner_size, forall_tmp->size);
2423 forall_tmp = forall_tmp->prev_nest;
2426 /* If there are no loops left, we have our constant result. */
2431 /* Otherwise, create a temporary variable to compute the result. */
2432 number = gfc_create_var (gfc_array_index_type, "num");
2433 gfc_add_modify (block, number, gfc_index_zero_node);
2435 gfc_start_block (&body);
2436 if (inner_size_body)
2437 gfc_add_block_to_block (&body, inner_size_body);
2439 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2440 number, inner_size);
2443 gfc_add_modify (&body, number, tmp);
2444 tmp = gfc_finish_block (&body);
2446 /* Generate loops. */
2447 if (forall_tmp != NULL)
2448 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2450 gfc_add_expr_to_block (block, tmp);
2456 /* Allocate temporary for forall construct. SIZE is the size of temporary
2457 needed. PTEMP1 is returned for space free. */
2460 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2467 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2468 if (!integer_onep (unit))
2469 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2474 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2477 tmp = build_fold_indirect_ref_loc (input_location, tmp);
2482 /* Allocate temporary for forall construct according to the information in
2483 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2484 assignment inside forall. PTEMP1 is returned for space free. */
2487 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2488 tree inner_size, stmtblock_t * inner_size_body,
2489 stmtblock_t * block, tree * ptemp1)
2493 /* Calculate the total size of temporary needed in forall construct. */
2494 size = compute_overall_iter_number (nested_forall_info, inner_size,
2495 inner_size_body, block);
2497 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2501 /* Handle assignments inside forall which need temporary.
2503 forall (i=start:end:stride; maskexpr)
2506 (where e,f<i> are arbitrary expressions possibly involving i
2507 and there is a dependency between e<i> and f<i>)
2509 masktmp(:) = maskexpr(:)
2514 for (i = start; i <= end; i += stride)
2518 for (i = start; i <= end; i += stride)
2520 if (masktmp[maskindex++])
2521 tmp[count1++] = f<i>
2525 for (i = start; i <= end; i += stride)
2527 if (masktmp[maskindex++])
2528 e<i> = tmp[count1++]
2533 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2534 tree wheremask, bool invert,
2535 forall_info * nested_forall_info,
2536 stmtblock_t * block)
2544 stmtblock_t inner_size_body;
2546 /* Create vars. count1 is the current iterator number of the nested
2548 count1 = gfc_create_var (gfc_array_index_type, "count1");
2550 /* Count is the wheremask index. */
2553 count = gfc_create_var (gfc_array_index_type, "count");
2554 gfc_add_modify (block, count, gfc_index_zero_node);
2559 /* Initialize count1. */
2560 gfc_add_modify (block, count1, gfc_index_zero_node);
2562 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2563 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2564 gfc_init_block (&inner_size_body);
2565 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2568 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2569 if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->length)
2571 if (!expr1->ts.u.cl->backend_decl)
2574 gfc_init_se (&tse, NULL);
2575 gfc_conv_expr (&tse, expr1->ts.u.cl->length);
2576 expr1->ts.u.cl->backend_decl = tse.expr;
2578 type = gfc_get_character_type_len (gfc_default_character_kind,
2579 expr1->ts.u.cl->backend_decl);
2582 type = gfc_typenode_for_spec (&expr1->ts);
2584 /* Allocate temporary for nested forall construct according to the
2585 information in nested_forall_info and inner_size. */
2586 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2587 &inner_size_body, block, &ptemp1);
2589 /* Generate codes to copy rhs to the temporary . */
2590 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2593 /* Generate body and loops according to the information in
2594 nested_forall_info. */
2595 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2596 gfc_add_expr_to_block (block, tmp);
2599 gfc_add_modify (block, count1, gfc_index_zero_node);
2603 gfc_add_modify (block, count, gfc_index_zero_node);
2605 /* Generate codes to copy the temporary to lhs. */
2606 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2609 /* Generate body and loops according to the information in
2610 nested_forall_info. */
2611 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2612 gfc_add_expr_to_block (block, tmp);
2616 /* Free the temporary. */
2617 tmp = gfc_call_free (ptemp1);
2618 gfc_add_expr_to_block (block, tmp);
2623 /* Translate pointer assignment inside FORALL which need temporary. */
2626 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2627 forall_info * nested_forall_info,
2628 stmtblock_t * block)
2642 tree tmp, tmp1, ptemp1;
2644 count = gfc_create_var (gfc_array_index_type, "count");
2645 gfc_add_modify (block, count, gfc_index_zero_node);
2647 inner_size = integer_one_node;
2648 lss = gfc_walk_expr (expr1);
2649 rss = gfc_walk_expr (expr2);
2650 if (lss == gfc_ss_terminator)
2652 type = gfc_typenode_for_spec (&expr1->ts);
2653 type = build_pointer_type (type);
2655 /* Allocate temporary for nested forall construct according to the
2656 information in nested_forall_info and inner_size. */
2657 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2658 inner_size, NULL, block, &ptemp1);
2659 gfc_start_block (&body);
2660 gfc_init_se (&lse, NULL);
2661 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2662 gfc_init_se (&rse, NULL);
2663 rse.want_pointer = 1;
2664 gfc_conv_expr (&rse, expr2);
2665 gfc_add_block_to_block (&body, &rse.pre);
2666 gfc_add_modify (&body, lse.expr,
2667 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2668 gfc_add_block_to_block (&body, &rse.post);
2670 /* Increment count. */
2671 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2672 count, gfc_index_one_node);
2673 gfc_add_modify (&body, count, tmp);
2675 tmp = gfc_finish_block (&body);
2677 /* Generate body and loops according to the information in
2678 nested_forall_info. */
2679 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2680 gfc_add_expr_to_block (block, tmp);
2683 gfc_add_modify (block, count, gfc_index_zero_node);
2685 gfc_start_block (&body);
2686 gfc_init_se (&lse, NULL);
2687 gfc_init_se (&rse, NULL);
2688 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
2689 lse.want_pointer = 1;
2690 gfc_conv_expr (&lse, expr1);
2691 gfc_add_block_to_block (&body, &lse.pre);
2692 gfc_add_modify (&body, lse.expr, rse.expr);
2693 gfc_add_block_to_block (&body, &lse.post);
2694 /* Increment count. */
2695 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2696 count, gfc_index_one_node);
2697 gfc_add_modify (&body, count, tmp);
2698 tmp = gfc_finish_block (&body);
2700 /* Generate body and loops according to the information in
2701 nested_forall_info. */
2702 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2703 gfc_add_expr_to_block (block, tmp);
2707 gfc_init_loopinfo (&loop);
2709 /* Associate the SS with the loop. */
2710 gfc_add_ss_to_loop (&loop, rss);
2712 /* Setup the scalarizing loops and bounds. */
2713 gfc_conv_ss_startstride (&loop);
2715 gfc_conv_loop_setup (&loop, &expr2->where);
2717 info = &rss->data.info;
2718 desc = info->descriptor;
2720 /* Make a new descriptor. */
2721 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2722 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2723 loop.from, loop.to, 1,
2724 GFC_ARRAY_UNKNOWN, true);
2726 /* Allocate temporary for nested forall construct. */
2727 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2728 inner_size, NULL, block, &ptemp1);
2729 gfc_start_block (&body);
2730 gfc_init_se (&lse, NULL);
2731 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2732 lse.direct_byref = 1;
2733 rss = gfc_walk_expr (expr2);
2734 gfc_conv_expr_descriptor (&lse, expr2, rss);
2736 gfc_add_block_to_block (&body, &lse.pre);
2737 gfc_add_block_to_block (&body, &lse.post);
2739 /* Increment count. */
2740 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2741 count, gfc_index_one_node);
2742 gfc_add_modify (&body, count, tmp);
2744 tmp = gfc_finish_block (&body);
2746 /* Generate body and loops according to the information in
2747 nested_forall_info. */
2748 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2749 gfc_add_expr_to_block (block, tmp);
2752 gfc_add_modify (block, count, gfc_index_zero_node);
2754 parm = gfc_build_array_ref (tmp1, count, NULL);
2755 lss = gfc_walk_expr (expr1);
2756 gfc_init_se (&lse, NULL);
2757 gfc_conv_expr_descriptor (&lse, expr1, lss);
2758 gfc_add_modify (&lse.pre, lse.expr, parm);
2759 gfc_start_block (&body);
2760 gfc_add_block_to_block (&body, &lse.pre);
2761 gfc_add_block_to_block (&body, &lse.post);
2763 /* Increment count. */
2764 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2765 count, gfc_index_one_node);
2766 gfc_add_modify (&body, count, tmp);
2768 tmp = gfc_finish_block (&body);
2770 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2771 gfc_add_expr_to_block (block, tmp);
2773 /* Free the temporary. */
2776 tmp = gfc_call_free (ptemp1);
2777 gfc_add_expr_to_block (block, tmp);
2782 /* FORALL and WHERE statements are really nasty, especially when you nest
2783 them. All the rhs of a forall assignment must be evaluated before the
2784 actual assignments are performed. Presumably this also applies to all the
2785 assignments in an inner where statement. */
2787 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2788 linear array, relying on the fact that we process in the same order in all
2791 forall (i=start:end:stride; maskexpr)
2795 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2797 count = ((end + 1 - start) / stride)
2798 masktmp(:) = maskexpr(:)
2801 for (i = start; i <= end; i += stride)
2803 if (masktmp[maskindex++])
2807 for (i = start; i <= end; i += stride)
2809 if (masktmp[maskindex++])
2813 Note that this code only works when there are no dependencies.
2814 Forall loop with array assignments and data dependencies are a real pain,
2815 because the size of the temporary cannot always be determined before the
2816 loop is executed. This problem is compounded by the presence of nested
2821 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2841 gfc_forall_iterator *fa;
2844 gfc_saved_var *saved_vars;
2845 iter_info *this_forall;
2849 /* Do nothing if the mask is false. */
2851 && code->expr1->expr_type == EXPR_CONSTANT
2852 && !code->expr1->value.logical)
2853 return build_empty_stmt (input_location);
2856 /* Count the FORALL index number. */
2857 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2861 /* Allocate the space for var, start, end, step, varexpr. */
2862 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2863 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2864 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2865 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2866 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2867 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2869 /* Allocate the space for info. */
2870 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2872 gfc_start_block (&pre);
2873 gfc_init_block (&post);
2874 gfc_init_block (&block);
2877 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2879 gfc_symbol *sym = fa->var->symtree->n.sym;
2881 /* Allocate space for this_forall. */
2882 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2884 /* Create a temporary variable for the FORALL index. */
2885 tmp = gfc_typenode_for_spec (&sym->ts);
2886 var[n] = gfc_create_var (tmp, sym->name);
2887 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2889 /* Record it in this_forall. */
2890 this_forall->var = var[n];
2892 /* Replace the index symbol's backend_decl with the temporary decl. */
2893 sym->backend_decl = var[n];
2895 /* Work out the start, end and stride for the loop. */
2896 gfc_init_se (&se, NULL);
2897 gfc_conv_expr_val (&se, fa->start);
2898 /* Record it in this_forall. */
2899 this_forall->start = se.expr;
2900 gfc_add_block_to_block (&block, &se.pre);
2903 gfc_init_se (&se, NULL);
2904 gfc_conv_expr_val (&se, fa->end);
2905 /* Record it in this_forall. */
2906 this_forall->end = se.expr;
2907 gfc_make_safe_expr (&se);
2908 gfc_add_block_to_block (&block, &se.pre);
2911 gfc_init_se (&se, NULL);
2912 gfc_conv_expr_val (&se, fa->stride);
2913 /* Record it in this_forall. */
2914 this_forall->step = se.expr;
2915 gfc_make_safe_expr (&se);
2916 gfc_add_block_to_block (&block, &se.pre);
2919 /* Set the NEXT field of this_forall to NULL. */
2920 this_forall->next = NULL;
2921 /* Link this_forall to the info construct. */
2922 if (info->this_loop)
2924 iter_info *iter_tmp = info->this_loop;
2925 while (iter_tmp->next != NULL)
2926 iter_tmp = iter_tmp->next;
2927 iter_tmp->next = this_forall;
2930 info->this_loop = this_forall;
2936 /* Calculate the size needed for the current forall level. */
2937 size = gfc_index_one_node;
2938 for (n = 0; n < nvar; n++)
2940 /* size = (end + step - start) / step. */
2941 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2943 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2945 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2946 tmp = convert (gfc_array_index_type, tmp);
2948 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2951 /* Record the nvar and size of current forall level. */
2957 /* If the mask is .true., consider the FORALL unconditional. */
2958 if (code->expr1->expr_type == EXPR_CONSTANT
2959 && code->expr1->value.logical)
2967 /* First we need to allocate the mask. */
2970 /* As the mask array can be very big, prefer compact boolean types. */
2971 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2972 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
2973 size, NULL, &block, &pmask);
2974 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2976 /* Record them in the info structure. */
2977 info->maskindex = maskindex;
2982 /* No mask was specified. */
2983 maskindex = NULL_TREE;
2984 mask = pmask = NULL_TREE;
2987 /* Link the current forall level to nested_forall_info. */
2988 info->prev_nest = nested_forall_info;
2989 nested_forall_info = info;
2991 /* Copy the mask into a temporary variable if required.
2992 For now we assume a mask temporary is needed. */
2995 /* As the mask array can be very big, prefer compact boolean types. */
2996 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2998 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
3000 /* Start of mask assignment loop body. */
3001 gfc_start_block (&body);
3003 /* Evaluate the mask expression. */
3004 gfc_init_se (&se, NULL);
3005 gfc_conv_expr_val (&se, code->expr1);
3006 gfc_add_block_to_block (&body, &se.pre);
3008 /* Store the mask. */
3009 se.expr = convert (mask_type, se.expr);
3011 tmp = gfc_build_array_ref (mask, maskindex, NULL);
3012 gfc_add_modify (&body, tmp, se.expr);
3014 /* Advance to the next mask element. */
3015 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3016 maskindex, gfc_index_one_node);
3017 gfc_add_modify (&body, maskindex, tmp);
3019 /* Generate the loops. */
3020 tmp = gfc_finish_block (&body);
3021 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
3022 gfc_add_expr_to_block (&block, tmp);
3025 c = code->block->next;
3027 /* TODO: loop merging in FORALL statements. */
3028 /* Now that we've got a copy of the mask, generate the assignment loops. */
3034 /* A scalar or array assignment. DO the simple check for
3035 lhs to rhs dependencies. These make a temporary for the
3036 rhs and form a second forall block to copy to variable. */
3037 need_temp = check_forall_dependencies(c, &pre, &post);
3039 /* Temporaries due to array assignment data dependencies introduce
3040 no end of problems. */
3042 gfc_trans_assign_need_temp (c->expr1, c->expr2, NULL, false,
3043 nested_forall_info, &block);
3046 /* Use the normal assignment copying routines. */
3047 assign = gfc_trans_assignment (c->expr1, c->expr2, false);
3049 /* Generate body and loops. */
3050 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3052 gfc_add_expr_to_block (&block, tmp);
3055 /* Cleanup any temporary symtrees that have been made to deal
3056 with dependencies. */
3058 cleanup_forall_symtrees (c);
3063 /* Translate WHERE or WHERE construct nested in FORALL. */
3064 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
3067 /* Pointer assignment inside FORALL. */
3068 case EXEC_POINTER_ASSIGN:
3069 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
3071 gfc_trans_pointer_assign_need_temp (c->expr1, c->expr2,
3072 nested_forall_info, &block);
3075 /* Use the normal assignment copying routines. */
3076 assign = gfc_trans_pointer_assignment (c->expr1, c->expr2);
3078 /* Generate body and loops. */
3079 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3081 gfc_add_expr_to_block (&block, tmp);
3086 tmp = gfc_trans_forall_1 (c, nested_forall_info);
3087 gfc_add_expr_to_block (&block, tmp);
3090 /* Explicit subroutine calls are prevented by the frontend but interface
3091 assignments can legitimately produce them. */
3092 case EXEC_ASSIGN_CALL:
3093 assign = gfc_trans_call (c, true, NULL_TREE, NULL_TREE, false);
3094 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
3095 gfc_add_expr_to_block (&block, tmp);
3105 /* Restore the original index variables. */
3106 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
3107 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
3109 /* Free the space for var, start, end, step, varexpr. */
3115 gfc_free (saved_vars);
3117 /* Free the space for this forall_info. */
3122 /* Free the temporary for the mask. */
3123 tmp = gfc_call_free (pmask);
3124 gfc_add_expr_to_block (&block, tmp);
3127 pushdecl (maskindex);
3129 gfc_add_block_to_block (&pre, &block);
3130 gfc_add_block_to_block (&pre, &post);
3132 return gfc_finish_block (&pre);
3136 /* Translate the FORALL statement or construct. */
3138 tree gfc_trans_forall (gfc_code * code)
3140 return gfc_trans_forall_1 (code, NULL);
3144 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3145 If the WHERE construct is nested in FORALL, compute the overall temporary
3146 needed by the WHERE mask expression multiplied by the iterator number of
3148 ME is the WHERE mask expression.
3149 MASK is the current execution mask upon input, whose sense may or may
3150 not be inverted as specified by the INVERT argument.
3151 CMASK is the updated execution mask on output, or NULL if not required.
3152 PMASK is the pending execution mask on output, or NULL if not required.
3153 BLOCK is the block in which to place the condition evaluation loops. */
3156 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3157 tree mask, bool invert, tree cmask, tree pmask,
3158 tree mask_type, stmtblock_t * block)
3163 stmtblock_t body, body1;
3164 tree count, cond, mtmp;
3167 gfc_init_loopinfo (&loop);
3169 lss = gfc_walk_expr (me);
3170 rss = gfc_walk_expr (me);
3172 /* Variable to index the temporary. */
3173 count = gfc_create_var (gfc_array_index_type, "count");
3174 /* Initialize count. */
3175 gfc_add_modify (block, count, gfc_index_zero_node);
3177 gfc_start_block (&body);
3179 gfc_init_se (&rse, NULL);
3180 gfc_init_se (&lse, NULL);
3182 if (lss == gfc_ss_terminator)
3184 gfc_init_block (&body1);
3188 /* Initialize the loop. */
3189 gfc_init_loopinfo (&loop);
3191 /* We may need LSS to determine the shape of the expression. */
3192 gfc_add_ss_to_loop (&loop, lss);
3193 gfc_add_ss_to_loop (&loop, rss);
3195 gfc_conv_ss_startstride (&loop);
3196 gfc_conv_loop_setup (&loop, &me->where);
3198 gfc_mark_ss_chain_used (rss, 1);
3199 /* Start the loop body. */
3200 gfc_start_scalarized_body (&loop, &body1);
3202 /* Translate the expression. */
3203 gfc_copy_loopinfo_to_se (&rse, &loop);
3205 gfc_conv_expr (&rse, me);
3208 /* Variable to evaluate mask condition. */
3209 cond = gfc_create_var (mask_type, "cond");
3210 if (mask && (cmask || pmask))
3211 mtmp = gfc_create_var (mask_type, "mask");
3212 else mtmp = NULL_TREE;
3214 gfc_add_block_to_block (&body1, &lse.pre);
3215 gfc_add_block_to_block (&body1, &rse.pre);
3217 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3219 if (mask && (cmask || pmask))
3221 tmp = gfc_build_array_ref (mask, count, NULL);
3223 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
3224 gfc_add_modify (&body1, mtmp, tmp);
3229 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3232 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3233 gfc_add_modify (&body1, tmp1, tmp);
3238 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3239 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, cond);
3241 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3242 gfc_add_modify (&body1, tmp1, tmp);
3245 gfc_add_block_to_block (&body1, &lse.post);
3246 gfc_add_block_to_block (&body1, &rse.post);
3248 if (lss == gfc_ss_terminator)
3250 gfc_add_block_to_block (&body, &body1);
3254 /* Increment count. */
3255 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
3256 gfc_index_one_node);
3257 gfc_add_modify (&body1, count, tmp1);
3259 /* Generate the copying loops. */
3260 gfc_trans_scalarizing_loops (&loop, &body1);
3262 gfc_add_block_to_block (&body, &loop.pre);
3263 gfc_add_block_to_block (&body, &loop.post);
3265 gfc_cleanup_loop (&loop);
3266 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3267 as tree nodes in SS may not be valid in different scope. */
3270 tmp1 = gfc_finish_block (&body);
3271 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3272 if (nested_forall_info != NULL)
3273 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3275 gfc_add_expr_to_block (block, tmp1);
3279 /* Translate an assignment statement in a WHERE statement or construct
3280 statement. The MASK expression is used to control which elements
3281 of EXPR1 shall be assigned. The sense of MASK is specified by
3285 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3286 tree mask, bool invert,
3287 tree count1, tree count2,
3293 gfc_ss *lss_section;
3300 tree index, maskexpr;
3302 /* A defined assignment. */
3303 if (cnext && cnext->resolved_sym)
3304 return gfc_trans_call (cnext, true, mask, count1, invert);
3307 /* TODO: handle this special case.
3308 Special case a single function returning an array. */
3309 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3311 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3317 /* Assignment of the form lhs = rhs. */
3318 gfc_start_block (&block);
3320 gfc_init_se (&lse, NULL);
3321 gfc_init_se (&rse, NULL);
3324 lss = gfc_walk_expr (expr1);
3327 /* In each where-assign-stmt, the mask-expr and the variable being
3328 defined shall be arrays of the same shape. */
3329 gcc_assert (lss != gfc_ss_terminator);
3331 /* The assignment needs scalarization. */
3334 /* Find a non-scalar SS from the lhs. */
3335 while (lss_section != gfc_ss_terminator
3336 && lss_section->type != GFC_SS_SECTION)
3337 lss_section = lss_section->next;
3339 gcc_assert (lss_section != gfc_ss_terminator);
3341 /* Initialize the scalarizer. */
3342 gfc_init_loopinfo (&loop);
3345 rss = gfc_walk_expr (expr2);
3346 if (rss == gfc_ss_terminator)
3348 /* The rhs is scalar. Add a ss for the expression. */
3349 rss = gfc_get_ss ();
3351 rss->next = gfc_ss_terminator;
3352 rss->type = GFC_SS_SCALAR;
3356 /* Associate the SS with the loop. */
3357 gfc_add_ss_to_loop (&loop, lss);
3358 gfc_add_ss_to_loop (&loop, rss);
3360 /* Calculate the bounds of the scalarization. */
3361 gfc_conv_ss_startstride (&loop);
3363 /* Resolve any data dependencies in the statement. */
3364 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3366 /* Setup the scalarizing loops. */
3367 gfc_conv_loop_setup (&loop, &expr2->where);
3369 /* Setup the gfc_se structures. */
3370 gfc_copy_loopinfo_to_se (&lse, &loop);
3371 gfc_copy_loopinfo_to_se (&rse, &loop);
3374 gfc_mark_ss_chain_used (rss, 1);
3375 if (loop.temp_ss == NULL)
3378 gfc_mark_ss_chain_used (lss, 1);
3382 lse.ss = loop.temp_ss;
3383 gfc_mark_ss_chain_used (lss, 3);
3384 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3387 /* Start the scalarized loop body. */
3388 gfc_start_scalarized_body (&loop, &body);
3390 /* Translate the expression. */
3391 gfc_conv_expr (&rse, expr2);
3392 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3394 gfc_conv_tmp_array_ref (&lse);
3395 gfc_advance_se_ss_chain (&lse);
3398 gfc_conv_expr (&lse, expr1);
3400 /* Form the mask expression according to the mask. */
3402 maskexpr = gfc_build_array_ref (mask, index, NULL);
3404 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
3406 /* Use the scalar assignment as is. */
3407 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3408 loop.temp_ss != NULL, false);
3410 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt (input_location));
3412 gfc_add_expr_to_block (&body, tmp);
3414 if (lss == gfc_ss_terminator)
3416 /* Increment count1. */
3417 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3418 count1, gfc_index_one_node);
3419 gfc_add_modify (&body, count1, tmp);
3421 /* Use the scalar assignment as is. */
3422 gfc_add_block_to_block (&block, &body);
3426 gcc_assert (lse.ss == gfc_ss_terminator
3427 && rse.ss == gfc_ss_terminator);
3429 if (loop.temp_ss != NULL)
3431 /* Increment count1 before finish the main body of a scalarized
3433 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3434 count1, gfc_index_one_node);
3435 gfc_add_modify (&body, count1, tmp);
3436 gfc_trans_scalarized_loop_boundary (&loop, &body);
3438 /* We need to copy the temporary to the actual lhs. */
3439 gfc_init_se (&lse, NULL);
3440 gfc_init_se (&rse, NULL);
3441 gfc_copy_loopinfo_to_se (&lse, &loop);
3442 gfc_copy_loopinfo_to_se (&rse, &loop);
3444 rse.ss = loop.temp_ss;
3447 gfc_conv_tmp_array_ref (&rse);
3448 gfc_advance_se_ss_chain (&rse);
3449 gfc_conv_expr (&lse, expr1);
3451 gcc_assert (lse.ss == gfc_ss_terminator
3452 && rse.ss == gfc_ss_terminator);
3454 /* Form the mask expression according to the mask tree list. */
3456 maskexpr = gfc_build_array_ref (mask, index, NULL);
3458 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3461 /* Use the scalar assignment as is. */
3462 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3463 tmp = build3_v (COND_EXPR, maskexpr, tmp,
3464 build_empty_stmt (input_location));
3465 gfc_add_expr_to_block (&body, tmp);
3467 /* Increment count2. */
3468 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3469 count2, gfc_index_one_node);
3470 gfc_add_modify (&body, count2, tmp);
3474 /* Increment count1. */
3475 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3476 count1, gfc_index_one_node);
3477 gfc_add_modify (&body, count1, tmp);
3480 /* Generate the copying loops. */
3481 gfc_trans_scalarizing_loops (&loop, &body);
3483 /* Wrap the whole thing up. */
3484 gfc_add_block_to_block (&block, &loop.pre);
3485 gfc_add_block_to_block (&block, &loop.post);
3486 gfc_cleanup_loop (&loop);
3489 return gfc_finish_block (&block);
3493 /* Translate the WHERE construct or statement.
3494 This function can be called iteratively to translate the nested WHERE
3495 construct or statement.
3496 MASK is the control mask. */
3499 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3500 forall_info * nested_forall_info, stmtblock_t * block)
3502 stmtblock_t inner_size_body;
3503 tree inner_size, size;
3512 tree count1, count2;
3516 tree pcmask = NULL_TREE;
3517 tree ppmask = NULL_TREE;
3518 tree cmask = NULL_TREE;
3519 tree pmask = NULL_TREE;
3520 gfc_actual_arglist *arg;
3522 /* the WHERE statement or the WHERE construct statement. */
3523 cblock = code->block;
3525 /* As the mask array can be very big, prefer compact boolean types. */
3526 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3528 /* Determine which temporary masks are needed. */
3531 /* One clause: No ELSEWHEREs. */
3532 need_cmask = (cblock->next != 0);
3535 else if (cblock->block->block)
3537 /* Three or more clauses: Conditional ELSEWHEREs. */
3541 else if (cblock->next)
3543 /* Two clauses, the first non-empty. */
3545 need_pmask = (mask != NULL_TREE
3546 && cblock->block->next != 0);
3548 else if (!cblock->block->next)
3550 /* Two clauses, both empty. */
3554 /* Two clauses, the first empty, the second non-empty. */
3557 need_cmask = (cblock->block->expr1 != 0);
3566 if (need_cmask || need_pmask)
3568 /* Calculate the size of temporary needed by the mask-expr. */
3569 gfc_init_block (&inner_size_body);
3570 inner_size = compute_inner_temp_size (cblock->expr1, cblock->expr1,
3571 &inner_size_body, &lss, &rss);
3573 /* Calculate the total size of temporary needed. */
3574 size = compute_overall_iter_number (nested_forall_info, inner_size,
3575 &inner_size_body, block);
3577 /* Check whether the size is negative. */
3578 cond = fold_build2 (LE_EXPR, boolean_type_node, size,
3579 gfc_index_zero_node);
3580 size = fold_build3 (COND_EXPR, gfc_array_index_type, cond,
3581 gfc_index_zero_node, size);
3582 size = gfc_evaluate_now (size, block);
3584 /* Allocate temporary for WHERE mask if needed. */
3586 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3589 /* Allocate temporary for !mask if needed. */
3591 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3597 /* Each time around this loop, the where clause is conditional
3598 on the value of mask and invert, which are updated at the
3599 bottom of the loop. */
3601 /* Has mask-expr. */
3604 /* Ensure that the WHERE mask will be evaluated exactly once.
3605 If there are no statements in this WHERE/ELSEWHERE clause,
3606 then we don't need to update the control mask (cmask).
3607 If this is the last clause of the WHERE construct, then
3608 we don't need to update the pending control mask (pmask). */
3610 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3612 cblock->next ? cmask : NULL_TREE,
3613 cblock->block ? pmask : NULL_TREE,
3616 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3618 (cblock->next || cblock->block)
3619 ? cmask : NULL_TREE,
3620 NULL_TREE, mask_type, block);
3624 /* It's a final elsewhere-stmt. No mask-expr is present. */
3628 /* The body of this where clause are controlled by cmask with
3629 sense specified by invert. */
3631 /* Get the assignment statement of a WHERE statement, or the first
3632 statement in where-body-construct of a WHERE construct. */
3633 cnext = cblock->next;
3638 /* WHERE assignment statement. */
3639 case EXEC_ASSIGN_CALL:
3641 arg = cnext->ext.actual;
3642 expr1 = expr2 = NULL;
3643 for (; arg; arg = arg->next)
3655 expr1 = cnext->expr1;
3656 expr2 = cnext->expr2;
3658 if (nested_forall_info != NULL)
3660 need_temp = gfc_check_dependency (expr1, expr2, 0);
3661 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3662 gfc_trans_assign_need_temp (expr1, expr2,
3664 nested_forall_info, block);
3667 /* Variables to control maskexpr. */
3668 count1 = gfc_create_var (gfc_array_index_type, "count1");
3669 count2 = gfc_create_var (gfc_array_index_type, "count2");
3670 gfc_add_modify (block, count1, gfc_index_zero_node);
3671 gfc_add_modify (block, count2, gfc_index_zero_node);
3673 tmp = gfc_trans_where_assign (expr1, expr2,
3678 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3680 gfc_add_expr_to_block (block, tmp);
3685 /* Variables to control maskexpr. */
3686 count1 = gfc_create_var (gfc_array_index_type, "count1");
3687 count2 = gfc_create_var (gfc_array_index_type, "count2");
3688 gfc_add_modify (block, count1, gfc_index_zero_node);
3689 gfc_add_modify (block, count2, gfc_index_zero_node);
3691 tmp = gfc_trans_where_assign (expr1, expr2,
3695 gfc_add_expr_to_block (block, tmp);
3700 /* WHERE or WHERE construct is part of a where-body-construct. */
3702 gfc_trans_where_2 (cnext, cmask, invert,
3703 nested_forall_info, block);
3710 /* The next statement within the same where-body-construct. */
3711 cnext = cnext->next;
3713 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3714 cblock = cblock->block;
3715 if (mask == NULL_TREE)
3717 /* If we're the initial WHERE, we can simply invert the sense
3718 of the current mask to obtain the "mask" for the remaining
3725 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3731 /* If we allocated a pending mask array, deallocate it now. */
3734 tmp = gfc_call_free (ppmask);
3735 gfc_add_expr_to_block (block, tmp);
3738 /* If we allocated a current mask array, deallocate it now. */
3741 tmp = gfc_call_free (pcmask);
3742 gfc_add_expr_to_block (block, tmp);
3746 /* Translate a simple WHERE construct or statement without dependencies.
3747 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3748 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3749 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3752 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3754 stmtblock_t block, body;
3755 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3756 tree tmp, cexpr, tstmt, estmt;
3757 gfc_ss *css, *tdss, *tsss;
3758 gfc_se cse, tdse, tsse, edse, esse;
3763 /* Allow the scalarizer to workshare simple where loops. */
3764 if (ompws_flags & OMPWS_WORKSHARE_FLAG)
3765 ompws_flags |= OMPWS_SCALARIZER_WS;
3767 cond = cblock->expr1;
3768 tdst = cblock->next->expr1;
3769 tsrc = cblock->next->expr2;
3770 edst = eblock ? eblock->next->expr1 : NULL;
3771 esrc = eblock ? eblock->next->expr2 : NULL;
3773 gfc_start_block (&block);
3774 gfc_init_loopinfo (&loop);
3776 /* Handle the condition. */
3777 gfc_init_se (&cse, NULL);
3778 css = gfc_walk_expr (cond);
3779 gfc_add_ss_to_loop (&loop, css);
3781 /* Handle the then-clause. */
3782 gfc_init_se (&tdse, NULL);
3783 gfc_init_se (&tsse, NULL);
3784 tdss = gfc_walk_expr (tdst);
3785 tsss = gfc_walk_expr (tsrc);
3786 if (tsss == gfc_ss_terminator)
3788 tsss = gfc_get_ss ();
3790 tsss->next = gfc_ss_terminator;
3791 tsss->type = GFC_SS_SCALAR;
3794 gfc_add_ss_to_loop (&loop, tdss);
3795 gfc_add_ss_to_loop (&loop, tsss);
3799 /* Handle the else clause. */
3800 gfc_init_se (&edse, NULL);
3801 gfc_init_se (&esse, NULL);
3802 edss = gfc_walk_expr (edst);
3803 esss = gfc_walk_expr (esrc);
3804 if (esss == gfc_ss_terminator)
3806 esss = gfc_get_ss ();
3808 esss->next = gfc_ss_terminator;
3809 esss->type = GFC_SS_SCALAR;
3812 gfc_add_ss_to_loop (&loop, edss);
3813 gfc_add_ss_to_loop (&loop, esss);
3816 gfc_conv_ss_startstride (&loop);
3817 gfc_conv_loop_setup (&loop, &tdst->where);
3819 gfc_mark_ss_chain_used (css, 1);
3820 gfc_mark_ss_chain_used (tdss, 1);
3821 gfc_mark_ss_chain_used (tsss, 1);
3824 gfc_mark_ss_chain_used (edss, 1);
3825 gfc_mark_ss_chain_used (esss, 1);
3828 gfc_start_scalarized_body (&loop, &body);
3830 gfc_copy_loopinfo_to_se (&cse, &loop);
3831 gfc_copy_loopinfo_to_se (&tdse, &loop);
3832 gfc_copy_loopinfo_to_se (&tsse, &loop);
3838 gfc_copy_loopinfo_to_se (&edse, &loop);
3839 gfc_copy_loopinfo_to_se (&esse, &loop);
3844 gfc_conv_expr (&cse, cond);
3845 gfc_add_block_to_block (&body, &cse.pre);
3848 gfc_conv_expr (&tsse, tsrc);
3849 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3851 gfc_conv_tmp_array_ref (&tdse);
3852 gfc_advance_se_ss_chain (&tdse);
3855 gfc_conv_expr (&tdse, tdst);
3859 gfc_conv_expr (&esse, esrc);
3860 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3862 gfc_conv_tmp_array_ref (&edse);
3863 gfc_advance_se_ss_chain (&edse);
3866 gfc_conv_expr (&edse, edst);
3869 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false);
3870 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false)
3871 : build_empty_stmt (input_location);
3872 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3873 gfc_add_expr_to_block (&body, tmp);
3874 gfc_add_block_to_block (&body, &cse.post);
3876 gfc_trans_scalarizing_loops (&loop, &body);
3877 gfc_add_block_to_block (&block, &loop.pre);
3878 gfc_add_block_to_block (&block, &loop.post);
3879 gfc_cleanup_loop (&loop);
3881 return gfc_finish_block (&block);
3884 /* As the WHERE or WHERE construct statement can be nested, we call
3885 gfc_trans_where_2 to do the translation, and pass the initial
3886 NULL values for both the control mask and the pending control mask. */
3889 gfc_trans_where (gfc_code * code)
3895 cblock = code->block;
3897 && cblock->next->op == EXEC_ASSIGN
3898 && !cblock->next->next)
3900 eblock = cblock->block;
3903 /* A simple "WHERE (cond) x = y" statement or block is
3904 dependence free if cond is not dependent upon writing x,
3905 and the source y is unaffected by the destination x. */
3906 if (!gfc_check_dependency (cblock->next->expr1,
3908 && !gfc_check_dependency (cblock->next->expr1,
3909 cblock->next->expr2, 0))
3910 return gfc_trans_where_3 (cblock, NULL);
3912 else if (!eblock->expr1
3915 && eblock->next->op == EXEC_ASSIGN
3916 && !eblock->next->next)
3918 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3919 block is dependence free if cond is not dependent on writes
3920 to x1 and x2, y1 is not dependent on writes to x2, and y2
3921 is not dependent on writes to x1, and both y's are not
3922 dependent upon their own x's. In addition to this, the
3923 final two dependency checks below exclude all but the same
3924 array reference if the where and elswhere destinations
3925 are the same. In short, this is VERY conservative and this
3926 is needed because the two loops, required by the standard
3927 are coalesced in gfc_trans_where_3. */
3928 if (!gfc_check_dependency(cblock->next->expr1,
3930 && !gfc_check_dependency(eblock->next->expr1,
3932 && !gfc_check_dependency(cblock->next->expr1,
3933 eblock->next->expr2, 1)
3934 && !gfc_check_dependency(eblock->next->expr1,
3935 cblock->next->expr2, 1)
3936 && !gfc_check_dependency(cblock->next->expr1,
3937 cblock->next->expr2, 1)
3938 && !gfc_check_dependency(eblock->next->expr1,
3939 eblock->next->expr2, 1)
3940 && !gfc_check_dependency(cblock->next->expr1,
3941 eblock->next->expr1, 0)
3942 && !gfc_check_dependency(eblock->next->expr1,
3943 cblock->next->expr1, 0))
3944 return gfc_trans_where_3 (cblock, eblock);
3948 gfc_start_block (&block);
3950 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3952 return gfc_finish_block (&block);
3956 /* CYCLE a DO loop. The label decl has already been created by
3957 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3958 node at the head of the loop. We must mark the label as used. */
3961 gfc_trans_cycle (gfc_code * code)
3965 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3966 TREE_USED (cycle_label) = 1;
3967 return build1_v (GOTO_EXPR, cycle_label);
3971 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3972 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3976 gfc_trans_exit (gfc_code * code)
3980 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3981 TREE_USED (exit_label) = 1;
3982 return build1_v (GOTO_EXPR, exit_label);
3986 /* Translate the ALLOCATE statement. */
3989 gfc_trans_allocate (gfc_code * code)
3992 gfc_expr *expr, *init_e;
4002 if (!code->ext.alloc.list)
4005 pstat = stat = error_label = tmp = memsz = NULL_TREE;
4007 gfc_start_block (&block);
4009 /* Either STAT= and/or ERRMSG is present. */
4010 if (code->expr1 || code->expr2)
4012 tree gfc_int4_type_node = gfc_get_int_type (4);
4014 stat = gfc_create_var (gfc_int4_type_node, "stat");
4015 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4017 error_label = gfc_build_label_decl (NULL_TREE);
4018 TREE_USED (error_label) = 1;
4021 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4023 expr = gfc_copy_expr (al->expr);
4025 if (expr->ts.type == BT_CLASS)
4026 gfc_add_component_ref (expr, "$data");
4028 gfc_init_se (&se, NULL);
4029 gfc_start_block (&se.pre);
4031 se.want_pointer = 1;
4032 se.descriptor_only = 1;
4033 gfc_conv_expr (&se, expr);
4035 if (!gfc_array_allocate (&se, expr, pstat))
4037 /* A scalar or derived type. */
4039 /* Determine allocate size. */
4040 if (code->expr3 && code->expr3->ts.type == BT_CLASS)
4044 sz = gfc_copy_expr (code->expr3);
4045 gfc_add_component_ref (sz, "$vptr");
4046 gfc_add_component_ref (sz, "$size");
4047 gfc_init_se (&se_sz, NULL);
4048 gfc_conv_expr (&se_sz, sz);
4052 else if (code->expr3 && code->expr3->ts.type != BT_CLASS)
4053 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->expr3->ts));
4054 else if (code->ext.alloc.ts.type != BT_UNKNOWN)
4055 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->ext.alloc.ts));
4057 memsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
4059 if (expr->ts.type == BT_CHARACTER && memsz == NULL_TREE)
4060 memsz = se.string_length;
4062 /* Allocate - for non-pointers with re-alloc checking. */
4069 /* Find the last reference in the chain. */
4070 while (ref && ref->next != NULL)
4072 gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT);
4077 allocatable = expr->symtree->n.sym->attr.allocatable;
4079 allocatable = ref->u.c.component->attr.allocatable;
4082 tmp = gfc_allocate_array_with_status (&se.pre, se.expr, memsz,
4085 tmp = gfc_allocate_with_status (&se.pre, memsz, pstat);
4088 tmp = fold_build2 (MODIFY_EXPR, void_type_node, se.expr,
4089 fold_convert (TREE_TYPE (se.expr), tmp));
4090 gfc_add_expr_to_block (&se.pre, tmp);
4092 if (code->expr1 || code->expr2)
4094 tmp = build1_v (GOTO_EXPR, error_label);
4095 parm = fold_build2 (NE_EXPR, boolean_type_node,
4096 stat, build_int_cst (TREE_TYPE (stat), 0));
4097 tmp = fold_build3 (COND_EXPR, void_type_node,
4098 parm, tmp, build_empty_stmt (input_location));
4099 gfc_add_expr_to_block (&se.pre, tmp);
4102 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4104 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
4105 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, tmp, 0);
4106 gfc_add_expr_to_block (&se.pre, tmp);
4111 tmp = gfc_finish_block (&se.pre);
4112 gfc_add_expr_to_block (&block, tmp);
4114 /* Initialization via SOURCE block. */
4117 gfc_expr *rhs = gfc_copy_expr (code->expr3);
4118 if (al->expr->ts.type == BT_CLASS)
4121 if (rhs->ts.type == BT_CLASS)
4122 gfc_add_component_ref (rhs, "$data");
4123 gfc_init_se (&dst, NULL);
4124 gfc_init_se (&src, NULL);
4125 gfc_conv_expr (&dst, expr);
4126 gfc_conv_expr (&src, rhs);
4127 gfc_add_block_to_block (&block, &src.pre);
4128 tmp = gfc_build_memcpy_call (dst.expr, src.expr, memsz);
4131 tmp = gfc_trans_assignment (gfc_expr_to_initialize (expr),
4133 gfc_free_expr (rhs);
4134 gfc_add_expr_to_block (&block, tmp);
4136 /* Default initializer for CLASS variables. */
4137 else if (al->expr->ts.type == BT_CLASS
4138 && code->ext.alloc.ts.type == BT_DERIVED
4139 && (init_e = gfc_default_initializer (&code->ext.alloc.ts)))
4142 gfc_init_se (&dst, NULL);
4143 gfc_init_se (&src, NULL);
4144 gfc_conv_expr (&dst, expr);
4145 gfc_conv_expr (&src, init_e);
4146 gfc_add_block_to_block (&block, &src.pre);
4147 tmp = gfc_build_memcpy_call (dst.expr, src.expr, memsz);
4148 gfc_add_expr_to_block (&block, tmp);
4150 /* Add default initializer for those derived types that need them. */
4151 else if (expr->ts.type == BT_DERIVED
4152 && (init_e = gfc_default_initializer (&expr->ts)))
4154 tmp = gfc_trans_assignment (gfc_expr_to_initialize (expr),
4156 gfc_add_expr_to_block (&block, tmp);
4159 /* Allocation of CLASS entities. */
4160 gfc_free_expr (expr);
4162 if (expr->ts.type == BT_CLASS)
4167 /* Initialize VPTR for CLASS objects. */
4168 lhs = gfc_expr_to_initialize (expr);
4169 gfc_add_component_ref (lhs, "$vptr");
4171 if (code->expr3 && code->expr3->ts.type == BT_CLASS)
4173 /* VPTR must be determined at run time. */
4174 rhs = gfc_copy_expr (code->expr3);
4175 gfc_add_component_ref (rhs, "$vptr");
4176 tmp = gfc_trans_pointer_assignment (lhs, rhs);
4177 gfc_add_expr_to_block (&block, tmp);
4178 gfc_free_expr (rhs);
4182 /* VPTR is fixed at compile time. */
4186 ts = &code->expr3->ts;
4187 else if (expr->ts.type == BT_DERIVED)
4189 else if (code->ext.alloc.ts.type == BT_DERIVED)
4190 ts = &code->ext.alloc.ts;
4191 else if (expr->ts.type == BT_CLASS)
4192 ts = &expr->ts.u.derived->components->ts;
4196 if (ts->type == BT_DERIVED)
4198 vtab = gfc_find_derived_vtab (ts->u.derived);
4200 gfc_init_se (&lse, NULL);
4201 lse.want_pointer = 1;
4202 gfc_conv_expr (&lse, lhs);
4203 tmp = gfc_build_addr_expr (NULL_TREE,
4204 gfc_get_symbol_decl (vtab));
4205 gfc_add_modify (&block, lse.expr,
4206 fold_convert (TREE_TYPE (lse.expr), tmp));
4216 tmp = build1_v (LABEL_EXPR, error_label);
4217 gfc_add_expr_to_block (&block, tmp);
4219 gfc_init_se (&se, NULL);
4220 gfc_conv_expr_lhs (&se, code->expr1);
4221 tmp = convert (TREE_TYPE (se.expr), stat);
4222 gfc_add_modify (&block, se.expr, tmp);
4228 /* A better error message may be possible, but not required. */
4229 const char *msg = "Attempt to allocate an allocated object";
4230 tree errmsg, slen, dlen;
4232 gfc_init_se (&se, NULL);
4233 gfc_conv_expr_lhs (&se, code->expr2);
4235 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4237 gfc_add_modify (&block, errmsg,
4238 gfc_build_addr_expr (pchar_type_node,
4239 gfc_build_localized_cstring_const (msg)));
4241 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4242 dlen = gfc_get_expr_charlen (code->expr2);
4243 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4245 dlen = build_call_expr_loc (input_location,
4246 built_in_decls[BUILT_IN_MEMCPY], 3,
4247 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4249 tmp = fold_build2 (NE_EXPR, boolean_type_node, stat,
4250 build_int_cst (TREE_TYPE (stat), 0));
4252 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4254 gfc_add_expr_to_block (&block, tmp);
4257 return gfc_finish_block (&block);
4261 /* Translate a DEALLOCATE statement. */
4264 gfc_trans_deallocate (gfc_code *code)
4269 tree apstat, astat, pstat, stat, tmp;
4272 pstat = apstat = stat = astat = tmp = NULL_TREE;
4274 gfc_start_block (&block);
4276 /* Count the number of failed deallocations. If deallocate() was
4277 called with STAT= , then set STAT to the count. If deallocate
4278 was called with ERRMSG, then set ERRMG to a string. */
4279 if (code->expr1 || code->expr2)
4281 tree gfc_int4_type_node = gfc_get_int_type (4);
4283 stat = gfc_create_var (gfc_int4_type_node, "stat");
4284 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4286 /* Running total of possible deallocation failures. */
4287 astat = gfc_create_var (gfc_int4_type_node, "astat");
4288 apstat = gfc_build_addr_expr (NULL_TREE, astat);
4290 /* Initialize astat to 0. */
4291 gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
4294 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4297 gcc_assert (expr->expr_type == EXPR_VARIABLE);
4299 gfc_init_se (&se, NULL);
4300 gfc_start_block (&se.pre);
4302 se.want_pointer = 1;
4303 se.descriptor_only = 1;
4304 gfc_conv_expr (&se, expr);
4306 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4309 gfc_ref *last = NULL;
4310 for (ref = expr->ref; ref; ref = ref->next)
4311 if (ref->type == REF_COMPONENT)
4314 /* Do not deallocate the components of a derived type
4315 ultimate pointer component. */
4316 if (!(last && last->u.c.component->attr.pointer)
4317 && !(!last && expr->symtree->n.sym->attr.pointer))
4319 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, se.expr,
4321 gfc_add_expr_to_block (&se.pre, tmp);
4326 tmp = gfc_array_deallocate (se.expr, pstat, expr);
4329 tmp = gfc_deallocate_with_status (se.expr, pstat, false, expr);
4330 gfc_add_expr_to_block (&se.pre, tmp);
4332 tmp = fold_build2 (MODIFY_EXPR, void_type_node,
4333 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
4336 gfc_add_expr_to_block (&se.pre, tmp);
4338 /* Keep track of the number of failed deallocations by adding stat
4339 of the last deallocation to the running total. */
4340 if (code->expr1 || code->expr2)
4342 apstat = fold_build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
4343 gfc_add_modify (&se.pre, astat, apstat);
4346 tmp = gfc_finish_block (&se.pre);
4347 gfc_add_expr_to_block (&block, tmp);
4354 gfc_init_se (&se, NULL);
4355 gfc_conv_expr_lhs (&se, code->expr1);
4356 tmp = convert (TREE_TYPE (se.expr), astat);
4357 gfc_add_modify (&block, se.expr, tmp);
4363 /* A better error message may be possible, but not required. */
4364 const char *msg = "Attempt to deallocate an unallocated object";
4365 tree errmsg, slen, dlen;
4367 gfc_init_se (&se, NULL);
4368 gfc_conv_expr_lhs (&se, code->expr2);
4370 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4372 gfc_add_modify (&block, errmsg,
4373 gfc_build_addr_expr (pchar_type_node,
4374 gfc_build_localized_cstring_const (msg)));
4376 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4377 dlen = gfc_get_expr_charlen (code->expr2);
4378 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4380 dlen = build_call_expr_loc (input_location,
4381 built_in_decls[BUILT_IN_MEMCPY], 3,
4382 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4384 tmp = fold_build2 (NE_EXPR, boolean_type_node, astat,
4385 build_int_cst (TREE_TYPE (astat), 0));
4387 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4389 gfc_add_expr_to_block (&block, tmp);
4392 return gfc_finish_block (&block);