1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
31 #include "trans-stmt.h"
32 #include "trans-types.h"
33 #include "trans-array.h"
34 #include "trans-const.h"
36 #include "dependency.h"
38 typedef struct iter_info
44 struct iter_info *next;
48 typedef struct forall_info
55 struct forall_info *prev_nest;
59 static void gfc_trans_where_2 (gfc_code *, tree, bool,
60 forall_info *, stmtblock_t *);
62 /* Translate a F95 label number to a LABEL_EXPR. */
65 gfc_trans_label_here (gfc_code * code)
67 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
71 /* Given a variable expression which has been ASSIGNed to, find the decl
72 containing the auxiliary variables. For variables in common blocks this
76 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
78 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
79 gfc_conv_expr (se, expr);
80 /* Deals with variable in common block. Get the field declaration. */
81 if (TREE_CODE (se->expr) == COMPONENT_REF)
82 se->expr = TREE_OPERAND (se->expr, 1);
83 /* Deals with dummy argument. Get the parameter declaration. */
84 else if (TREE_CODE (se->expr) == INDIRECT_REF)
85 se->expr = TREE_OPERAND (se->expr, 0);
88 /* Translate a label assignment statement. */
91 gfc_trans_label_assign (gfc_code * code)
100 /* Start a new block. */
101 gfc_init_se (&se, NULL);
102 gfc_start_block (&se.pre);
103 gfc_conv_label_variable (&se, code->expr1);
105 len = GFC_DECL_STRING_LEN (se.expr);
106 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
108 label_tree = gfc_get_label_decl (code->label1);
110 if (code->label1->defined == ST_LABEL_TARGET)
112 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
113 len_tree = integer_minus_one_node;
117 gfc_expr *format = code->label1->format;
119 label_len = format->value.character.length;
120 len_tree = build_int_cst (NULL_TREE, label_len);
121 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
122 format->value.character.string);
123 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
126 gfc_add_modify (&se.pre, len, len_tree);
127 gfc_add_modify (&se.pre, addr, label_tree);
129 return gfc_finish_block (&se.pre);
132 /* Translate a GOTO statement. */
135 gfc_trans_goto (gfc_code * code)
137 locus loc = code->loc;
143 if (code->label1 != NULL)
144 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
147 gfc_init_se (&se, NULL);
148 gfc_start_block (&se.pre);
149 gfc_conv_label_variable (&se, code->expr1);
150 tmp = GFC_DECL_STRING_LEN (se.expr);
151 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
152 build_int_cst (TREE_TYPE (tmp), -1));
153 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
154 "Assigned label is not a target label");
156 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
158 /* We're going to ignore a label list. It does not really change the
159 statement's semantics (because it is just a further restriction on
160 what's legal code); before, we were comparing label addresses here, but
161 that's a very fragile business and may break with optimization. So
164 target = fold_build1 (GOTO_EXPR, void_type_node, assigned_goto);
165 gfc_add_expr_to_block (&se.pre, target);
166 return gfc_finish_block (&se.pre);
170 /* Translate an ENTRY statement. Just adds a label for this entry point. */
172 gfc_trans_entry (gfc_code * code)
174 return build1_v (LABEL_EXPR, code->ext.entry->label);
178 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
179 elemental subroutines. Make temporaries for output arguments if any such
180 dependencies are found. Output arguments are chosen because internal_unpack
181 can be used, as is, to copy the result back to the variable. */
183 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
184 gfc_symbol * sym, gfc_actual_arglist * arg,
185 gfc_dep_check check_variable)
187 gfc_actual_arglist *arg0;
189 gfc_formal_arglist *formal;
190 gfc_loopinfo tmp_loop;
202 if (loopse->ss == NULL)
207 formal = sym->formal;
209 /* Loop over all the arguments testing for dependencies. */
210 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
216 /* Obtain the info structure for the current argument. */
218 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
222 info = &ss->data.info;
226 /* If there is a dependency, create a temporary and use it
227 instead of the variable. */
228 fsym = formal ? formal->sym : NULL;
229 if (e->expr_type == EXPR_VARIABLE
231 && fsym->attr.intent != INTENT_IN
232 && gfc_check_fncall_dependency (e, fsym->attr.intent,
233 sym, arg0, check_variable))
235 tree initial, temptype;
236 stmtblock_t temp_post;
238 /* Make a local loopinfo for the temporary creation, so that
239 none of the other ss->info's have to be renormalized. */
240 gfc_init_loopinfo (&tmp_loop);
241 for (n = 0; n < info->dimen; n++)
243 tmp_loop.to[n] = loopse->loop->to[n];
244 tmp_loop.from[n] = loopse->loop->from[n];
245 tmp_loop.order[n] = loopse->loop->order[n];
248 /* Obtain the argument descriptor for unpacking. */
249 gfc_init_se (&parmse, NULL);
250 parmse.want_pointer = 1;
252 /* The scalarizer introduces some specific peculiarities when
253 handling elemental subroutines; the stride can be needed up to
254 the dim_array - 1, rather than dim_loop - 1 to calculate
255 offsets outside the loop. For this reason, we make sure that
256 the descriptor has the dimensionality of the array by converting
257 trailing elements into ranges with end = start. */
258 for (ref = e->ref; ref; ref = ref->next)
259 if (ref->type == REF_ARRAY && ref->u.ar.type == AR_SECTION)
264 bool seen_range = false;
265 for (n = 0; n < ref->u.ar.dimen; n++)
267 if (ref->u.ar.dimen_type[n] == DIMEN_RANGE)
271 || ref->u.ar.dimen_type[n] != DIMEN_ELEMENT)
274 ref->u.ar.end[n] = gfc_copy_expr (ref->u.ar.start[n]);
275 ref->u.ar.dimen_type[n] = DIMEN_RANGE;
279 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
280 gfc_add_block_to_block (&se->pre, &parmse.pre);
282 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
283 initialize the array temporary with a copy of the values. */
284 if (fsym->attr.intent == INTENT_INOUT
285 || (fsym->ts.type ==BT_DERIVED
286 && fsym->attr.intent == INTENT_OUT))
287 initial = parmse.expr;
291 /* Find the type of the temporary to create; we don't use the type
292 of e itself as this breaks for subcomponent-references in e (where
293 the type of e is that of the final reference, but parmse.expr's
294 type corresponds to the full derived-type). */
295 /* TODO: Fix this somehow so we don't need a temporary of the whole
296 array but instead only the components referenced. */
297 temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */
298 gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
299 temptype = TREE_TYPE (temptype);
300 temptype = gfc_get_element_type (temptype);
302 /* Generate the temporary. Cleaning up the temporary should be the
303 very last thing done, so we add the code to a new block and add it
304 to se->post as last instructions. */
305 size = gfc_create_var (gfc_array_index_type, NULL);
306 data = gfc_create_var (pvoid_type_node, NULL);
307 gfc_init_block (&temp_post);
308 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post,
309 &tmp_loop, info, temptype,
313 gfc_add_modify (&se->pre, size, tmp);
314 tmp = fold_convert (pvoid_type_node, info->data);
315 gfc_add_modify (&se->pre, data, tmp);
317 /* Calculate the offset for the temporary. */
318 offset = gfc_index_zero_node;
319 for (n = 0; n < info->dimen; n++)
321 tmp = gfc_conv_descriptor_stride_get (info->descriptor,
323 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
324 loopse->loop->from[n], tmp);
325 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
328 info->offset = gfc_create_var (gfc_array_index_type, NULL);
329 gfc_add_modify (&se->pre, info->offset, offset);
331 /* Copy the result back using unpack. */
332 tmp = build_call_expr_loc (input_location,
333 gfor_fndecl_in_unpack, 2, parmse.expr, data);
334 gfc_add_expr_to_block (&se->post, tmp);
336 /* parmse.pre is already added above. */
337 gfc_add_block_to_block (&se->post, &parmse.post);
338 gfc_add_block_to_block (&se->post, &temp_post);
344 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
347 gfc_trans_call (gfc_code * code, bool dependency_check,
348 tree mask, tree count1, bool invert)
352 int has_alternate_specifier;
353 gfc_dep_check check_variable;
354 tree index = NULL_TREE;
355 tree maskexpr = NULL_TREE;
358 /* A CALL starts a new block because the actual arguments may have to
359 be evaluated first. */
360 gfc_init_se (&se, NULL);
361 gfc_start_block (&se.pre);
363 gcc_assert (code->resolved_sym);
365 ss = gfc_ss_terminator;
366 if (code->resolved_sym->attr.elemental)
367 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
369 /* Is not an elemental subroutine call with array valued arguments. */
370 if (ss == gfc_ss_terminator)
373 /* Translate the call. */
374 has_alternate_specifier
375 = gfc_conv_procedure_call (&se, code->resolved_sym, code->ext.actual,
378 /* A subroutine without side-effect, by definition, does nothing! */
379 TREE_SIDE_EFFECTS (se.expr) = 1;
381 /* Chain the pieces together and return the block. */
382 if (has_alternate_specifier)
384 gfc_code *select_code;
386 select_code = code->next;
387 gcc_assert(select_code->op == EXEC_SELECT);
388 sym = select_code->expr1->symtree->n.sym;
389 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
390 if (sym->backend_decl == NULL)
391 sym->backend_decl = gfc_get_symbol_decl (sym);
392 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
395 gfc_add_expr_to_block (&se.pre, se.expr);
397 gfc_add_block_to_block (&se.pre, &se.post);
402 /* An elemental subroutine call with array valued arguments has
410 /* gfc_walk_elemental_function_args renders the ss chain in the
411 reverse order to the actual argument order. */
412 ss = gfc_reverse_ss (ss);
414 /* Initialize the loop. */
415 gfc_init_se (&loopse, NULL);
416 gfc_init_loopinfo (&loop);
417 gfc_add_ss_to_loop (&loop, ss);
419 gfc_conv_ss_startstride (&loop);
420 /* TODO: gfc_conv_loop_setup generates a temporary for vector
421 subscripts. This could be prevented in the elemental case
422 as temporaries are handled separatedly
423 (below in gfc_conv_elemental_dependencies). */
424 gfc_conv_loop_setup (&loop, &code->expr1->where);
425 gfc_mark_ss_chain_used (ss, 1);
427 /* Convert the arguments, checking for dependencies. */
428 gfc_copy_loopinfo_to_se (&loopse, &loop);
431 /* For operator assignment, do dependency checking. */
432 if (dependency_check)
433 check_variable = ELEM_CHECK_VARIABLE;
435 check_variable = ELEM_DONT_CHECK_VARIABLE;
437 gfc_init_se (&depse, NULL);
438 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
439 code->ext.actual, check_variable);
441 gfc_add_block_to_block (&loop.pre, &depse.pre);
442 gfc_add_block_to_block (&loop.post, &depse.post);
444 /* Generate the loop body. */
445 gfc_start_scalarized_body (&loop, &body);
446 gfc_init_block (&block);
450 /* Form the mask expression according to the mask. */
452 maskexpr = gfc_build_array_ref (mask, index, NULL);
454 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
458 /* Add the subroutine call to the block. */
459 gfc_conv_procedure_call (&loopse, code->resolved_sym,
460 code->ext.actual, code->expr1, NULL);
464 tmp = build3_v (COND_EXPR, maskexpr, loopse.expr,
465 build_empty_stmt (input_location));
466 gfc_add_expr_to_block (&loopse.pre, tmp);
467 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
468 count1, gfc_index_one_node);
469 gfc_add_modify (&loopse.pre, count1, tmp);
472 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
474 gfc_add_block_to_block (&block, &loopse.pre);
475 gfc_add_block_to_block (&block, &loopse.post);
477 /* Finish up the loop block and the loop. */
478 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
479 gfc_trans_scalarizing_loops (&loop, &body);
480 gfc_add_block_to_block (&se.pre, &loop.pre);
481 gfc_add_block_to_block (&se.pre, &loop.post);
482 gfc_add_block_to_block (&se.pre, &se.post);
483 gfc_cleanup_loop (&loop);
486 return gfc_finish_block (&se.pre);
490 /* Translate the RETURN statement. */
493 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
501 /* If code->expr is not NULL, this return statement must appear
502 in a subroutine and current_fake_result_decl has already
505 result = gfc_get_fake_result_decl (NULL, 0);
508 gfc_warning ("An alternate return at %L without a * dummy argument",
509 &code->expr1->where);
510 return build1_v (GOTO_EXPR, gfc_get_return_label ());
513 /* Start a new block for this statement. */
514 gfc_init_se (&se, NULL);
515 gfc_start_block (&se.pre);
517 gfc_conv_expr (&se, code->expr1);
519 tmp = fold_build2 (MODIFY_EXPR, TREE_TYPE (result), result,
520 fold_convert (TREE_TYPE (result), se.expr));
521 gfc_add_expr_to_block (&se.pre, tmp);
523 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
524 gfc_add_expr_to_block (&se.pre, tmp);
525 gfc_add_block_to_block (&se.pre, &se.post);
526 return gfc_finish_block (&se.pre);
529 return build1_v (GOTO_EXPR, gfc_get_return_label ());
533 /* Translate the PAUSE statement. We have to translate this statement
534 to a runtime library call. */
537 gfc_trans_pause (gfc_code * code)
539 tree gfc_int4_type_node = gfc_get_int_type (4);
543 /* Start a new block for this statement. */
544 gfc_init_se (&se, NULL);
545 gfc_start_block (&se.pre);
548 if (code->expr1 == NULL)
550 tmp = build_int_cst (gfc_int4_type_node, 0);
551 tmp = build_call_expr_loc (input_location,
552 gfor_fndecl_pause_string, 2,
553 build_int_cst (pchar_type_node, 0), tmp);
555 else if (code->expr1->ts.type == BT_INTEGER)
557 gfc_conv_expr (&se, code->expr1);
558 tmp = build_call_expr_loc (input_location,
559 gfor_fndecl_pause_numeric, 1,
560 fold_convert (gfc_int4_type_node, se.expr));
564 gfc_conv_expr_reference (&se, code->expr1);
565 tmp = build_call_expr_loc (input_location,
566 gfor_fndecl_pause_string, 2,
567 se.expr, se.string_length);
570 gfc_add_expr_to_block (&se.pre, tmp);
572 gfc_add_block_to_block (&se.pre, &se.post);
574 return gfc_finish_block (&se.pre);
578 /* Translate the STOP statement. We have to translate this statement
579 to a runtime library call. */
582 gfc_trans_stop (gfc_code *code, bool error_stop)
584 tree gfc_int4_type_node = gfc_get_int_type (4);
588 /* Start a new block for this statement. */
589 gfc_init_se (&se, NULL);
590 gfc_start_block (&se.pre);
592 if (code->expr1 == NULL)
594 tmp = build_int_cst (gfc_int4_type_node, 0);
595 tmp = build_call_expr_loc (input_location,
596 error_stop ? gfor_fndecl_error_stop_string
597 : gfor_fndecl_stop_string,
598 2, build_int_cst (pchar_type_node, 0), tmp);
600 else if (code->expr1->ts.type == BT_INTEGER)
602 gfc_conv_expr (&se, code->expr1);
603 tmp = build_call_expr_loc (input_location,
604 error_stop ? gfor_fndecl_error_stop_numeric
605 : gfor_fndecl_stop_numeric, 1,
606 fold_convert (gfc_int4_type_node, se.expr));
610 gfc_conv_expr_reference (&se, code->expr1);
611 tmp = build_call_expr_loc (input_location,
612 error_stop ? gfor_fndecl_error_stop_string
613 : gfor_fndecl_stop_string,
614 2, se.expr, se.string_length);
617 gfc_add_expr_to_block (&se.pre, tmp);
619 gfc_add_block_to_block (&se.pre, &se.post);
621 return gfc_finish_block (&se.pre);
626 gfc_trans_sync (gfc_code *code, gfc_exec_op type __attribute__ ((unused)))
630 if ((code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)) || code->expr2)
632 gfc_init_se (&se, NULL);
633 gfc_start_block (&se.pre);
636 /* Check SYNC IMAGES(imageset) for valid image index.
637 FIXME: Add a check for image-set arrays. */
638 if (code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
639 && code->expr1->rank == 0)
642 gfc_conv_expr (&se, code->expr1);
643 cond = fold_build2 (NE_EXPR, boolean_type_node, se.expr,
644 build_int_cst (TREE_TYPE (se.expr), 1));
645 gfc_trans_runtime_check (true, false, cond, &se.pre,
646 &code->expr1->where, "Invalid image number "
648 fold_convert (integer_type_node, se.expr));
651 /* If STAT is present, set it to zero. */
654 gcc_assert (code->expr2->expr_type == EXPR_VARIABLE);
655 gfc_conv_expr (&se, code->expr2);
656 gfc_add_modify (&se.pre, se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
659 if ((code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)) || code->expr2)
660 return gfc_finish_block (&se.pre);
666 /* Generate GENERIC for the IF construct. This function also deals with
667 the simple IF statement, because the front end translates the IF
668 statement into an IF construct.
700 where COND_S is the simplified version of the predicate. PRE_COND_S
701 are the pre side-effects produced by the translation of the
703 We need to build the chain recursively otherwise we run into
704 problems with folding incomplete statements. */
707 gfc_trans_if_1 (gfc_code * code)
712 /* Check for an unconditional ELSE clause. */
714 return gfc_trans_code (code->next);
716 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
717 gfc_init_se (&if_se, NULL);
718 gfc_start_block (&if_se.pre);
720 /* Calculate the IF condition expression. */
721 gfc_conv_expr_val (&if_se, code->expr1);
723 /* Translate the THEN clause. */
724 stmt = gfc_trans_code (code->next);
726 /* Translate the ELSE clause. */
728 elsestmt = gfc_trans_if_1 (code->block);
730 elsestmt = build_empty_stmt (input_location);
732 /* Build the condition expression and add it to the condition block. */
733 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
735 gfc_add_expr_to_block (&if_se.pre, stmt);
737 /* Finish off this statement. */
738 return gfc_finish_block (&if_se.pre);
742 gfc_trans_if (gfc_code * code)
744 /* Ignore the top EXEC_IF, it only announces an IF construct. The
745 actual code we must translate is in code->block. */
747 return gfc_trans_if_1 (code->block);
751 /* Translate an arithmetic IF expression.
753 IF (cond) label1, label2, label3 translates to
765 An optimized version can be generated in case of equal labels.
766 E.g., if label1 is equal to label2, we can translate it to
775 gfc_trans_arithmetic_if (gfc_code * code)
783 /* Start a new block. */
784 gfc_init_se (&se, NULL);
785 gfc_start_block (&se.pre);
787 /* Pre-evaluate COND. */
788 gfc_conv_expr_val (&se, code->expr1);
789 se.expr = gfc_evaluate_now (se.expr, &se.pre);
791 /* Build something to compare with. */
792 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
794 if (code->label1->value != code->label2->value)
796 /* If (cond < 0) take branch1 else take branch2.
797 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
798 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
799 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
801 if (code->label1->value != code->label3->value)
802 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
804 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
806 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
809 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
811 if (code->label1->value != code->label3->value
812 && code->label2->value != code->label3->value)
814 /* if (cond <= 0) take branch1 else take branch2. */
815 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
816 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
817 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
820 /* Append the COND_EXPR to the evaluation of COND, and return. */
821 gfc_add_expr_to_block (&se.pre, branch1);
822 return gfc_finish_block (&se.pre);
826 /* Translate a CRITICAL block. */
828 gfc_trans_critical (gfc_code *code)
833 gfc_start_block (&block);
834 tmp = gfc_trans_code (code->block->next);
835 gfc_add_expr_to_block (&block, tmp);
837 return gfc_finish_block (&block);
841 /* Translate a BLOCK construct. This is basically what we would do for a
845 gfc_trans_block_construct (gfc_code* code)
852 ns = code->ext.block.ns;
857 gcc_assert (!sym->tlink);
860 gfc_start_block (&body);
861 gfc_process_block_locals (ns);
863 tmp = gfc_trans_code (ns->code);
864 tmp = gfc_trans_deferred_vars (sym, tmp);
866 gfc_add_expr_to_block (&body, tmp);
867 return gfc_finish_block (&body);
871 /* Translate the simple DO construct. This is where the loop variable has
872 integer type and step +-1. We can't use this in the general case
873 because integer overflow and floating point errors could give incorrect
875 We translate a do loop from:
877 DO dovar = from, to, step
883 [Evaluate loop bounds and step]
885 if ((step > 0) ? (dovar <= to) : (dovar => to))
891 cond = (dovar == to);
893 if (cond) goto end_label;
898 This helps the optimizers by avoiding the extra induction variable
899 used in the general case. */
902 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
903 tree from, tree to, tree step, tree exit_cond)
909 tree saved_dovar = NULL;
913 type = TREE_TYPE (dovar);
915 /* Initialize the DO variable: dovar = from. */
916 gfc_add_modify (pblock, dovar, from);
918 /* Save value for do-tinkering checking. */
919 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
921 saved_dovar = gfc_create_var (type, ".saved_dovar");
922 gfc_add_modify (pblock, saved_dovar, dovar);
925 /* Cycle and exit statements are implemented with gotos. */
926 cycle_label = gfc_build_label_decl (NULL_TREE);
927 exit_label = gfc_build_label_decl (NULL_TREE);
929 /* Put the labels where they can be found later. See gfc_trans_do(). */
930 code->block->cycle_label = cycle_label;
931 code->block->exit_label = exit_label;
934 gfc_start_block (&body);
936 /* Main loop body. */
937 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
938 gfc_add_expr_to_block (&body, tmp);
940 /* Label for cycle statements (if needed). */
941 if (TREE_USED (cycle_label))
943 tmp = build1_v (LABEL_EXPR, cycle_label);
944 gfc_add_expr_to_block (&body, tmp);
947 /* Check whether someone has modified the loop variable. */
948 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
950 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
951 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
952 "Loop variable has been modified");
955 /* Exit the loop if there is an I/O result condition or error. */
958 tmp = build1_v (GOTO_EXPR, exit_label);
959 tmp = fold_build3 (COND_EXPR, void_type_node, exit_cond, tmp,
960 build_empty_stmt (input_location));
961 gfc_add_expr_to_block (&body, tmp);
964 /* Evaluate the loop condition. */
965 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
966 cond = gfc_evaluate_now (cond, &body);
968 /* Increment the loop variable. */
969 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
970 gfc_add_modify (&body, dovar, tmp);
972 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
973 gfc_add_modify (&body, saved_dovar, dovar);
976 tmp = build1_v (GOTO_EXPR, exit_label);
977 TREE_USED (exit_label) = 1;
978 tmp = fold_build3 (COND_EXPR, void_type_node,
979 cond, tmp, build_empty_stmt (input_location));
980 gfc_add_expr_to_block (&body, tmp);
982 /* Finish the loop body. */
983 tmp = gfc_finish_block (&body);
984 tmp = build1_v (LOOP_EXPR, tmp);
986 /* Only execute the loop if the number of iterations is positive. */
987 if (tree_int_cst_sgn (step) > 0)
988 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
990 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
991 tmp = fold_build3 (COND_EXPR, void_type_node,
992 cond, tmp, build_empty_stmt (input_location));
993 gfc_add_expr_to_block (pblock, tmp);
995 /* Add the exit label. */
996 tmp = build1_v (LABEL_EXPR, exit_label);
997 gfc_add_expr_to_block (pblock, tmp);
999 return gfc_finish_block (pblock);
1002 /* Translate the DO construct. This obviously is one of the most
1003 important ones to get right with any compiler, but especially
1006 We special case some loop forms as described in gfc_trans_simple_do.
1007 For other cases we implement them with a separate loop count,
1008 as described in the standard.
1010 We translate a do loop from:
1012 DO dovar = from, to, step
1018 [evaluate loop bounds and step]
1019 empty = (step > 0 ? to < from : to > from);
1020 countm1 = (to - from) / step;
1022 if (empty) goto exit_label;
1028 if (countm1 ==0) goto exit_label;
1033 countm1 is an unsigned integer. It is equal to the loop count minus one,
1034 because the loop count itself can overflow. */
1037 gfc_trans_do (gfc_code * code, tree exit_cond)
1041 tree saved_dovar = NULL;
1056 gfc_start_block (&block);
1058 /* Evaluate all the expressions in the iterator. */
1059 gfc_init_se (&se, NULL);
1060 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1061 gfc_add_block_to_block (&block, &se.pre);
1063 type = TREE_TYPE (dovar);
1065 gfc_init_se (&se, NULL);
1066 gfc_conv_expr_val (&se, code->ext.iterator->start);
1067 gfc_add_block_to_block (&block, &se.pre);
1068 from = gfc_evaluate_now (se.expr, &block);
1070 gfc_init_se (&se, NULL);
1071 gfc_conv_expr_val (&se, code->ext.iterator->end);
1072 gfc_add_block_to_block (&block, &se.pre);
1073 to = gfc_evaluate_now (se.expr, &block);
1075 gfc_init_se (&se, NULL);
1076 gfc_conv_expr_val (&se, code->ext.iterator->step);
1077 gfc_add_block_to_block (&block, &se.pre);
1078 step = gfc_evaluate_now (se.expr, &block);
1080 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1082 tmp = fold_build2 (EQ_EXPR, boolean_type_node, step,
1083 fold_convert (type, integer_zero_node));
1084 gfc_trans_runtime_check (true, false, tmp, &block, &code->loc,
1085 "DO step value is zero");
1088 /* Special case simple loops. */
1089 if (TREE_CODE (type) == INTEGER_TYPE
1090 && (integer_onep (step)
1091 || tree_int_cst_equal (step, integer_minus_one_node)))
1092 return gfc_trans_simple_do (code, &block, dovar, from, to, step, exit_cond);
1094 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
1095 fold_convert (type, integer_zero_node));
1097 if (TREE_CODE (type) == INTEGER_TYPE)
1098 utype = unsigned_type_for (type);
1100 utype = unsigned_type_for (gfc_array_index_type);
1101 countm1 = gfc_create_var (utype, "countm1");
1103 /* Cycle and exit statements are implemented with gotos. */
1104 cycle_label = gfc_build_label_decl (NULL_TREE);
1105 exit_label = gfc_build_label_decl (NULL_TREE);
1106 TREE_USED (exit_label) = 1;
1108 /* Initialize the DO variable: dovar = from. */
1109 gfc_add_modify (&block, dovar, from);
1111 /* Save value for do-tinkering checking. */
1112 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1114 saved_dovar = gfc_create_var (type, ".saved_dovar");
1115 gfc_add_modify (&block, saved_dovar, dovar);
1118 /* Initialize loop count and jump to exit label if the loop is empty.
1119 This code is executed before we enter the loop body. We generate:
1120 step_sign = sign(1,step);
1131 countm1 = (to*step_sign - from*step_sign) / (step*step_sign);
1135 if (TREE_CODE (type) == INTEGER_TYPE)
1137 tree pos, neg, step_sign, to2, from2, step2;
1139 /* Calculate SIGN (1,step), as (step < 0 ? -1 : 1) */
1141 tmp = fold_build2 (LT_EXPR, boolean_type_node, step,
1142 build_int_cst (TREE_TYPE (step), 0));
1143 step_sign = fold_build3 (COND_EXPR, type, tmp,
1144 build_int_cst (type, -1),
1145 build_int_cst (type, 1));
1147 tmp = fold_build2 (LT_EXPR, boolean_type_node, to, from);
1148 pos = fold_build3 (COND_EXPR, void_type_node, tmp,
1149 build1_v (GOTO_EXPR, exit_label),
1150 build_empty_stmt (input_location));
1152 tmp = fold_build2 (GT_EXPR, boolean_type_node, to, from);
1153 neg = fold_build3 (COND_EXPR, void_type_node, tmp,
1154 build1_v (GOTO_EXPR, exit_label),
1155 build_empty_stmt (input_location));
1156 tmp = fold_build3 (COND_EXPR, void_type_node, pos_step, pos, neg);
1158 gfc_add_expr_to_block (&block, tmp);
1160 /* Calculate the loop count. to-from can overflow, so
1161 we cast to unsigned. */
1163 to2 = fold_build2 (MULT_EXPR, type, step_sign, to);
1164 from2 = fold_build2 (MULT_EXPR, type, step_sign, from);
1165 step2 = fold_build2 (MULT_EXPR, type, step_sign, step);
1166 step2 = fold_convert (utype, step2);
1167 tmp = fold_build2 (MINUS_EXPR, type, to2, from2);
1168 tmp = fold_convert (utype, tmp);
1169 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp, step2);
1170 tmp = fold_build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
1171 gfc_add_expr_to_block (&block, tmp);
1175 /* TODO: We could use the same width as the real type.
1176 This would probably cause more problems that it solves
1177 when we implement "long double" types. */
1179 tmp = fold_build2 (MINUS_EXPR, type, to, from);
1180 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
1181 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
1182 gfc_add_modify (&block, countm1, tmp);
1184 /* We need a special check for empty loops:
1185 empty = (step > 0 ? to < from : to > from); */
1186 tmp = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
1187 fold_build2 (LT_EXPR, boolean_type_node, to, from),
1188 fold_build2 (GT_EXPR, boolean_type_node, to, from));
1189 /* If the loop is empty, go directly to the exit label. */
1190 tmp = fold_build3 (COND_EXPR, void_type_node, tmp,
1191 build1_v (GOTO_EXPR, exit_label),
1192 build_empty_stmt (input_location));
1193 gfc_add_expr_to_block (&block, tmp);
1197 gfc_start_block (&body);
1199 /* Put these labels where they can be found later. */
1201 code->block->cycle_label = cycle_label;
1202 code->block->exit_label = exit_label;
1204 /* Main loop body. */
1205 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1206 gfc_add_expr_to_block (&body, tmp);
1208 /* Label for cycle statements (if needed). */
1209 if (TREE_USED (cycle_label))
1211 tmp = build1_v (LABEL_EXPR, cycle_label);
1212 gfc_add_expr_to_block (&body, tmp);
1215 /* Check whether someone has modified the loop variable. */
1216 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1218 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
1219 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1220 "Loop variable has been modified");
1223 /* Exit the loop if there is an I/O result condition or error. */
1226 tmp = build1_v (GOTO_EXPR, exit_label);
1227 tmp = fold_build3 (COND_EXPR, void_type_node, exit_cond, tmp,
1228 build_empty_stmt (input_location));
1229 gfc_add_expr_to_block (&body, tmp);
1232 /* Increment the loop variable. */
1233 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
1234 gfc_add_modify (&body, dovar, tmp);
1236 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1237 gfc_add_modify (&body, saved_dovar, dovar);
1239 /* End with the loop condition. Loop until countm1 == 0. */
1240 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
1241 build_int_cst (utype, 0));
1242 tmp = build1_v (GOTO_EXPR, exit_label);
1243 tmp = fold_build3 (COND_EXPR, void_type_node,
1244 cond, tmp, build_empty_stmt (input_location));
1245 gfc_add_expr_to_block (&body, tmp);
1247 /* Decrement the loop count. */
1248 tmp = fold_build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
1249 gfc_add_modify (&body, countm1, tmp);
1251 /* End of loop body. */
1252 tmp = gfc_finish_block (&body);
1254 /* The for loop itself. */
1255 tmp = build1_v (LOOP_EXPR, tmp);
1256 gfc_add_expr_to_block (&block, tmp);
1258 /* Add the exit label. */
1259 tmp = build1_v (LABEL_EXPR, exit_label);
1260 gfc_add_expr_to_block (&block, tmp);
1262 return gfc_finish_block (&block);
1266 /* Translate the DO WHILE construct.
1279 if (! cond) goto exit_label;
1285 Because the evaluation of the exit condition `cond' may have side
1286 effects, we can't do much for empty loop bodies. The backend optimizers
1287 should be smart enough to eliminate any dead loops. */
1290 gfc_trans_do_while (gfc_code * code)
1298 /* Everything we build here is part of the loop body. */
1299 gfc_start_block (&block);
1301 /* Cycle and exit statements are implemented with gotos. */
1302 cycle_label = gfc_build_label_decl (NULL_TREE);
1303 exit_label = gfc_build_label_decl (NULL_TREE);
1305 /* Put the labels where they can be found later. See gfc_trans_do(). */
1306 code->block->cycle_label = cycle_label;
1307 code->block->exit_label = exit_label;
1309 /* Create a GIMPLE version of the exit condition. */
1310 gfc_init_se (&cond, NULL);
1311 gfc_conv_expr_val (&cond, code->expr1);
1312 gfc_add_block_to_block (&block, &cond.pre);
1313 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1315 /* Build "IF (! cond) GOTO exit_label". */
1316 tmp = build1_v (GOTO_EXPR, exit_label);
1317 TREE_USED (exit_label) = 1;
1318 tmp = fold_build3 (COND_EXPR, void_type_node,
1319 cond.expr, tmp, build_empty_stmt (input_location));
1320 gfc_add_expr_to_block (&block, tmp);
1322 /* The main body of the loop. */
1323 tmp = gfc_trans_code (code->block->next);
1324 gfc_add_expr_to_block (&block, tmp);
1326 /* Label for cycle statements (if needed). */
1327 if (TREE_USED (cycle_label))
1329 tmp = build1_v (LABEL_EXPR, cycle_label);
1330 gfc_add_expr_to_block (&block, tmp);
1333 /* End of loop body. */
1334 tmp = gfc_finish_block (&block);
1336 gfc_init_block (&block);
1337 /* Build the loop. */
1338 tmp = build1_v (LOOP_EXPR, tmp);
1339 gfc_add_expr_to_block (&block, tmp);
1341 /* Add the exit label. */
1342 tmp = build1_v (LABEL_EXPR, exit_label);
1343 gfc_add_expr_to_block (&block, tmp);
1345 return gfc_finish_block (&block);
1349 /* Translate the SELECT CASE construct for INTEGER case expressions,
1350 without killing all potential optimizations. The problem is that
1351 Fortran allows unbounded cases, but the back-end does not, so we
1352 need to intercept those before we enter the equivalent SWITCH_EXPR
1355 For example, we translate this,
1358 CASE (:100,101,105:115)
1368 to the GENERIC equivalent,
1372 case (minimum value for typeof(expr) ... 100:
1378 case 200 ... (maximum value for typeof(expr):
1395 gfc_trans_integer_select (gfc_code * code)
1405 gfc_start_block (&block);
1407 /* Calculate the switch expression. */
1408 gfc_init_se (&se, NULL);
1409 gfc_conv_expr_val (&se, code->expr1);
1410 gfc_add_block_to_block (&block, &se.pre);
1412 end_label = gfc_build_label_decl (NULL_TREE);
1414 gfc_init_block (&body);
1416 for (c = code->block; c; c = c->block)
1418 for (cp = c->ext.case_list; cp; cp = cp->next)
1423 /* Assume it's the default case. */
1424 low = high = NULL_TREE;
1428 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1431 /* If there's only a lower bound, set the high bound to the
1432 maximum value of the case expression. */
1434 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1439 /* Three cases are possible here:
1441 1) There is no lower bound, e.g. CASE (:N).
1442 2) There is a lower bound .NE. high bound, that is
1443 a case range, e.g. CASE (N:M) where M>N (we make
1444 sure that M>N during type resolution).
1445 3) There is a lower bound, and it has the same value
1446 as the high bound, e.g. CASE (N:N). This is our
1447 internal representation of CASE(N).
1449 In the first and second case, we need to set a value for
1450 high. In the third case, we don't because the GCC middle
1451 end represents a single case value by just letting high be
1452 a NULL_TREE. We can't do that because we need to be able
1453 to represent unbounded cases. */
1457 && mpz_cmp (cp->low->value.integer,
1458 cp->high->value.integer) != 0))
1459 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1462 /* Unbounded case. */
1464 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1467 /* Build a label. */
1468 label = gfc_build_label_decl (NULL_TREE);
1470 /* Add this case label.
1471 Add parameter 'label', make it match GCC backend. */
1472 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1474 gfc_add_expr_to_block (&body, tmp);
1477 /* Add the statements for this case. */
1478 tmp = gfc_trans_code (c->next);
1479 gfc_add_expr_to_block (&body, tmp);
1481 /* Break to the end of the construct. */
1482 tmp = build1_v (GOTO_EXPR, end_label);
1483 gfc_add_expr_to_block (&body, tmp);
1486 tmp = gfc_finish_block (&body);
1487 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1488 gfc_add_expr_to_block (&block, tmp);
1490 tmp = build1_v (LABEL_EXPR, end_label);
1491 gfc_add_expr_to_block (&block, tmp);
1493 return gfc_finish_block (&block);
1497 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1499 There are only two cases possible here, even though the standard
1500 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1501 .FALSE., and DEFAULT.
1503 We never generate more than two blocks here. Instead, we always
1504 try to eliminate the DEFAULT case. This way, we can translate this
1505 kind of SELECT construct to a simple
1509 expression in GENERIC. */
1512 gfc_trans_logical_select (gfc_code * code)
1515 gfc_code *t, *f, *d;
1520 /* Assume we don't have any cases at all. */
1523 /* Now see which ones we actually do have. We can have at most two
1524 cases in a single case list: one for .TRUE. and one for .FALSE.
1525 The default case is always separate. If the cases for .TRUE. and
1526 .FALSE. are in the same case list, the block for that case list
1527 always executed, and we don't generate code a COND_EXPR. */
1528 for (c = code->block; c; c = c->block)
1530 for (cp = c->ext.case_list; cp; cp = cp->next)
1534 if (cp->low->value.logical == 0) /* .FALSE. */
1536 else /* if (cp->value.logical != 0), thus .TRUE. */
1544 /* Start a new block. */
1545 gfc_start_block (&block);
1547 /* Calculate the switch expression. We always need to do this
1548 because it may have side effects. */
1549 gfc_init_se (&se, NULL);
1550 gfc_conv_expr_val (&se, code->expr1);
1551 gfc_add_block_to_block (&block, &se.pre);
1553 if (t == f && t != NULL)
1555 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1556 translate the code for these cases, append it to the current
1558 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1562 tree true_tree, false_tree, stmt;
1564 true_tree = build_empty_stmt (input_location);
1565 false_tree = build_empty_stmt (input_location);
1567 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1568 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1569 make the missing case the default case. */
1570 if (t != NULL && f != NULL)
1580 /* Translate the code for each of these blocks, and append it to
1581 the current block. */
1583 true_tree = gfc_trans_code (t->next);
1586 false_tree = gfc_trans_code (f->next);
1588 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1589 true_tree, false_tree);
1590 gfc_add_expr_to_block (&block, stmt);
1593 return gfc_finish_block (&block);
1597 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1598 Instead of generating compares and jumps, it is far simpler to
1599 generate a data structure describing the cases in order and call a
1600 library subroutine that locates the right case.
1601 This is particularly true because this is the only case where we
1602 might have to dispose of a temporary.
1603 The library subroutine returns a pointer to jump to or NULL if no
1604 branches are to be taken. */
1607 gfc_trans_character_select (gfc_code *code)
1609 tree init, end_label, tmp, type, case_num, label, fndecl;
1610 stmtblock_t block, body;
1615 VEC(constructor_elt,gc) *inits = NULL;
1617 /* The jump table types are stored in static variables to avoid
1618 constructing them from scratch every single time. */
1619 static tree select_struct[2];
1620 static tree ss_string1[2], ss_string1_len[2];
1621 static tree ss_string2[2], ss_string2_len[2];
1622 static tree ss_target[2];
1624 tree pchartype = gfc_get_pchar_type (code->expr1->ts.kind);
1626 if (code->expr1->ts.kind == 1)
1628 else if (code->expr1->ts.kind == 4)
1633 if (select_struct[k] == NULL)
1636 select_struct[k] = make_node (RECORD_TYPE);
1638 if (code->expr1->ts.kind == 1)
1639 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
1640 else if (code->expr1->ts.kind == 4)
1641 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
1646 #define ADD_FIELD(NAME, TYPE) \
1647 ss_##NAME[k] = gfc_add_field_to_struct (select_struct[k], \
1648 get_identifier (stringize(NAME)), \
1652 ADD_FIELD (string1, pchartype);
1653 ADD_FIELD (string1_len, gfc_charlen_type_node);
1655 ADD_FIELD (string2, pchartype);
1656 ADD_FIELD (string2_len, gfc_charlen_type_node);
1658 ADD_FIELD (target, integer_type_node);
1661 gfc_finish_type (select_struct[k]);
1664 cp = code->block->ext.case_list;
1665 while (cp->left != NULL)
1669 for (d = cp; d; d = d->right)
1672 end_label = gfc_build_label_decl (NULL_TREE);
1674 /* Generate the body */
1675 gfc_start_block (&block);
1676 gfc_init_block (&body);
1678 for (c = code->block; c; c = c->block)
1680 for (d = c->ext.case_list; d; d = d->next)
1682 label = gfc_build_label_decl (NULL_TREE);
1683 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1684 build_int_cst (NULL_TREE, d->n),
1685 build_int_cst (NULL_TREE, d->n), label);
1686 gfc_add_expr_to_block (&body, tmp);
1689 tmp = gfc_trans_code (c->next);
1690 gfc_add_expr_to_block (&body, tmp);
1692 tmp = build1_v (GOTO_EXPR, end_label);
1693 gfc_add_expr_to_block (&body, tmp);
1696 /* Generate the structure describing the branches */
1697 for(d = cp; d; d = d->right)
1699 VEC(constructor_elt,gc) *node = NULL;
1701 gfc_init_se (&se, NULL);
1705 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], null_pointer_node);
1706 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], integer_zero_node);
1710 gfc_conv_expr_reference (&se, d->low);
1712 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], se.expr);
1713 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], se.string_length);
1716 if (d->high == NULL)
1718 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], null_pointer_node);
1719 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], integer_zero_node);
1723 gfc_init_se (&se, NULL);
1724 gfc_conv_expr_reference (&se, d->high);
1726 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], se.expr);
1727 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], se.string_length);
1730 CONSTRUCTOR_APPEND_ELT (node, ss_target[k],
1731 build_int_cst (integer_type_node, d->n));
1733 tmp = build_constructor (select_struct[k], node);
1734 CONSTRUCTOR_APPEND_ELT (inits, NULL_TREE, tmp);
1737 type = build_array_type (select_struct[k],
1738 build_index_type (build_int_cst (NULL_TREE, n-1)));
1740 init = build_constructor (type, inits);
1741 TREE_CONSTANT (init) = 1;
1742 TREE_STATIC (init) = 1;
1743 /* Create a static variable to hold the jump table. */
1744 tmp = gfc_create_var (type, "jumptable");
1745 TREE_CONSTANT (tmp) = 1;
1746 TREE_STATIC (tmp) = 1;
1747 TREE_READONLY (tmp) = 1;
1748 DECL_INITIAL (tmp) = init;
1751 /* Build the library call */
1752 init = gfc_build_addr_expr (pvoid_type_node, init);
1754 gfc_init_se (&se, NULL);
1755 gfc_conv_expr_reference (&se, code->expr1);
1757 gfc_add_block_to_block (&block, &se.pre);
1759 if (code->expr1->ts.kind == 1)
1760 fndecl = gfor_fndecl_select_string;
1761 else if (code->expr1->ts.kind == 4)
1762 fndecl = gfor_fndecl_select_string_char4;
1766 tmp = build_call_expr_loc (input_location,
1767 fndecl, 4, init, build_int_cst (NULL_TREE, n),
1768 se.expr, se.string_length);
1769 case_num = gfc_create_var (integer_type_node, "case_num");
1770 gfc_add_modify (&block, case_num, tmp);
1772 gfc_add_block_to_block (&block, &se.post);
1774 tmp = gfc_finish_block (&body);
1775 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1776 gfc_add_expr_to_block (&block, tmp);
1778 tmp = build1_v (LABEL_EXPR, end_label);
1779 gfc_add_expr_to_block (&block, tmp);
1781 return gfc_finish_block (&block);
1785 /* Translate the three variants of the SELECT CASE construct.
1787 SELECT CASEs with INTEGER case expressions can be translated to an
1788 equivalent GENERIC switch statement, and for LOGICAL case
1789 expressions we build one or two if-else compares.
1791 SELECT CASEs with CHARACTER case expressions are a whole different
1792 story, because they don't exist in GENERIC. So we sort them and
1793 do a binary search at runtime.
1795 Fortran has no BREAK statement, and it does not allow jumps from
1796 one case block to another. That makes things a lot easier for
1800 gfc_trans_select (gfc_code * code)
1802 gcc_assert (code && code->expr1);
1804 /* Empty SELECT constructs are legal. */
1805 if (code->block == NULL)
1806 return build_empty_stmt (input_location);
1808 /* Select the correct translation function. */
1809 switch (code->expr1->ts.type)
1811 case BT_LOGICAL: return gfc_trans_logical_select (code);
1812 case BT_INTEGER: return gfc_trans_integer_select (code);
1813 case BT_CHARACTER: return gfc_trans_character_select (code);
1815 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1821 /* Traversal function to substitute a replacement symtree if the symbol
1822 in the expression is the same as that passed. f == 2 signals that
1823 that variable itself is not to be checked - only the references.
1824 This group of functions is used when the variable expression in a
1825 FORALL assignment has internal references. For example:
1826 FORALL (i = 1:4) p(p(i)) = i
1827 The only recourse here is to store a copy of 'p' for the index
1830 static gfc_symtree *new_symtree;
1831 static gfc_symtree *old_symtree;
1834 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
1836 if (expr->expr_type != EXPR_VARIABLE)
1841 else if (expr->symtree->n.sym == sym)
1842 expr->symtree = new_symtree;
1848 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
1850 gfc_traverse_expr (e, sym, forall_replace, f);
1854 forall_restore (gfc_expr *expr,
1855 gfc_symbol *sym ATTRIBUTE_UNUSED,
1856 int *f ATTRIBUTE_UNUSED)
1858 if (expr->expr_type != EXPR_VARIABLE)
1861 if (expr->symtree == new_symtree)
1862 expr->symtree = old_symtree;
1868 forall_restore_symtree (gfc_expr *e)
1870 gfc_traverse_expr (e, NULL, forall_restore, 0);
1874 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1879 gfc_symbol *new_sym;
1880 gfc_symbol *old_sym;
1884 /* Build a copy of the lvalue. */
1885 old_symtree = c->expr1->symtree;
1886 old_sym = old_symtree->n.sym;
1887 e = gfc_lval_expr_from_sym (old_sym);
1888 if (old_sym->attr.dimension)
1890 gfc_init_se (&tse, NULL);
1891 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN, false);
1892 gfc_add_block_to_block (pre, &tse.pre);
1893 gfc_add_block_to_block (post, &tse.post);
1894 tse.expr = build_fold_indirect_ref_loc (input_location, tse.expr);
1896 if (e->ts.type != BT_CHARACTER)
1898 /* Use the variable offset for the temporary. */
1899 tmp = gfc_conv_array_offset (old_sym->backend_decl);
1900 gfc_conv_descriptor_offset_set (pre, tse.expr, tmp);
1905 gfc_init_se (&tse, NULL);
1906 gfc_init_se (&rse, NULL);
1907 gfc_conv_expr (&rse, e);
1908 if (e->ts.type == BT_CHARACTER)
1910 tse.string_length = rse.string_length;
1911 tmp = gfc_get_character_type_len (gfc_default_character_kind,
1913 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
1915 gfc_add_block_to_block (pre, &tse.pre);
1916 gfc_add_block_to_block (post, &tse.post);
1920 tmp = gfc_typenode_for_spec (&e->ts);
1921 tse.expr = gfc_create_var (tmp, "temp");
1924 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
1925 e->expr_type == EXPR_VARIABLE, true);
1926 gfc_add_expr_to_block (pre, tmp);
1930 /* Create a new symbol to represent the lvalue. */
1931 new_sym = gfc_new_symbol (old_sym->name, NULL);
1932 new_sym->ts = old_sym->ts;
1933 new_sym->attr.referenced = 1;
1934 new_sym->attr.temporary = 1;
1935 new_sym->attr.dimension = old_sym->attr.dimension;
1936 new_sym->attr.flavor = old_sym->attr.flavor;
1938 /* Use the temporary as the backend_decl. */
1939 new_sym->backend_decl = tse.expr;
1941 /* Create a fake symtree for it. */
1943 new_symtree = gfc_new_symtree (&root, old_sym->name);
1944 new_symtree->n.sym = new_sym;
1945 gcc_assert (new_symtree == root);
1947 /* Go through the expression reference replacing the old_symtree
1949 forall_replace_symtree (c->expr1, old_sym, 2);
1951 /* Now we have made this temporary, we might as well use it for
1952 the right hand side. */
1953 forall_replace_symtree (c->expr2, old_sym, 1);
1957 /* Handles dependencies in forall assignments. */
1959 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1966 lsym = c->expr1->symtree->n.sym;
1967 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
1969 /* Now check for dependencies within the 'variable'
1970 expression itself. These are treated by making a complete
1971 copy of variable and changing all the references to it
1972 point to the copy instead. Note that the shallow copy of
1973 the variable will not suffice for derived types with
1974 pointer components. We therefore leave these to their
1976 if (lsym->ts.type == BT_DERIVED
1977 && lsym->ts.u.derived->attr.pointer_comp)
1981 if (find_forall_index (c->expr1, lsym, 2) == SUCCESS)
1983 forall_make_variable_temp (c, pre, post);
1987 /* Substrings with dependencies are treated in the same
1989 if (c->expr1->ts.type == BT_CHARACTER
1991 && c->expr2->expr_type == EXPR_VARIABLE
1992 && lsym == c->expr2->symtree->n.sym)
1994 for (lref = c->expr1->ref; lref; lref = lref->next)
1995 if (lref->type == REF_SUBSTRING)
1997 for (rref = c->expr2->ref; rref; rref = rref->next)
1998 if (rref->type == REF_SUBSTRING)
2002 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
2004 forall_make_variable_temp (c, pre, post);
2013 cleanup_forall_symtrees (gfc_code *c)
2015 forall_restore_symtree (c->expr1);
2016 forall_restore_symtree (c->expr2);
2017 gfc_free (new_symtree->n.sym);
2018 gfc_free (new_symtree);
2022 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
2023 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
2024 indicates whether we should generate code to test the FORALLs mask
2025 array. OUTER is the loop header to be used for initializing mask
2028 The generated loop format is:
2029 count = (end - start + step) / step
2042 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
2043 int mask_flag, stmtblock_t *outer)
2051 tree var, start, end, step;
2054 /* Initialize the mask index outside the FORALL nest. */
2055 if (mask_flag && forall_tmp->mask)
2056 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
2058 iter = forall_tmp->this_loop;
2059 nvar = forall_tmp->nvar;
2060 for (n = 0; n < nvar; n++)
2063 start = iter->start;
2067 exit_label = gfc_build_label_decl (NULL_TREE);
2068 TREE_USED (exit_label) = 1;
2070 /* The loop counter. */
2071 count = gfc_create_var (TREE_TYPE (var), "count");
2073 /* The body of the loop. */
2074 gfc_init_block (&block);
2076 /* The exit condition. */
2077 cond = fold_build2 (LE_EXPR, boolean_type_node,
2078 count, build_int_cst (TREE_TYPE (count), 0));
2079 tmp = build1_v (GOTO_EXPR, exit_label);
2080 tmp = fold_build3 (COND_EXPR, void_type_node,
2081 cond, tmp, build_empty_stmt (input_location));
2082 gfc_add_expr_to_block (&block, tmp);
2084 /* The main loop body. */
2085 gfc_add_expr_to_block (&block, body);
2087 /* Increment the loop variable. */
2088 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
2089 gfc_add_modify (&block, var, tmp);
2091 /* Advance to the next mask element. Only do this for the
2093 if (n == 0 && mask_flag && forall_tmp->mask)
2095 tree maskindex = forall_tmp->maskindex;
2096 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2097 maskindex, gfc_index_one_node);
2098 gfc_add_modify (&block, maskindex, tmp);
2101 /* Decrement the loop counter. */
2102 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), count,
2103 build_int_cst (TREE_TYPE (var), 1));
2104 gfc_add_modify (&block, count, tmp);
2106 body = gfc_finish_block (&block);
2108 /* Loop var initialization. */
2109 gfc_init_block (&block);
2110 gfc_add_modify (&block, var, start);
2113 /* Initialize the loop counter. */
2114 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
2115 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
2116 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
2117 gfc_add_modify (&block, count, tmp);
2119 /* The loop expression. */
2120 tmp = build1_v (LOOP_EXPR, body);
2121 gfc_add_expr_to_block (&block, tmp);
2123 /* The exit label. */
2124 tmp = build1_v (LABEL_EXPR, exit_label);
2125 gfc_add_expr_to_block (&block, tmp);
2127 body = gfc_finish_block (&block);
2134 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2135 is nonzero, the body is controlled by all masks in the forall nest.
2136 Otherwise, the innermost loop is not controlled by it's mask. This
2137 is used for initializing that mask. */
2140 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
2145 forall_info *forall_tmp;
2146 tree mask, maskindex;
2148 gfc_start_block (&header);
2150 forall_tmp = nested_forall_info;
2151 while (forall_tmp != NULL)
2153 /* Generate body with masks' control. */
2156 mask = forall_tmp->mask;
2157 maskindex = forall_tmp->maskindex;
2159 /* If a mask was specified make the assignment conditional. */
2162 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2163 body = build3_v (COND_EXPR, tmp, body,
2164 build_empty_stmt (input_location));
2167 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
2168 forall_tmp = forall_tmp->prev_nest;
2172 gfc_add_expr_to_block (&header, body);
2173 return gfc_finish_block (&header);
2177 /* Allocate data for holding a temporary array. Returns either a local
2178 temporary array or a pointer variable. */
2181 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
2188 if (INTEGER_CST_P (size))
2190 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
2191 gfc_index_one_node);
2196 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
2197 type = build_array_type (elem_type, type);
2198 if (gfc_can_put_var_on_stack (bytesize))
2200 gcc_assert (INTEGER_CST_P (size));
2201 tmpvar = gfc_create_var (type, "temp");
2206 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
2207 *pdata = convert (pvoid_type_node, tmpvar);
2209 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
2210 gfc_add_modify (pblock, tmpvar, tmp);
2216 /* Generate codes to copy the temporary to the actual lhs. */
2219 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
2220 tree count1, tree wheremask, bool invert)
2224 stmtblock_t block, body;
2230 lss = gfc_walk_expr (expr);
2232 if (lss == gfc_ss_terminator)
2234 gfc_start_block (&block);
2236 gfc_init_se (&lse, NULL);
2238 /* Translate the expression. */
2239 gfc_conv_expr (&lse, expr);
2241 /* Form the expression for the temporary. */
2242 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2244 /* Use the scalar assignment as is. */
2245 gfc_add_block_to_block (&block, &lse.pre);
2246 gfc_add_modify (&block, lse.expr, tmp);
2247 gfc_add_block_to_block (&block, &lse.post);
2249 /* Increment the count1. */
2250 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2251 gfc_index_one_node);
2252 gfc_add_modify (&block, count1, tmp);
2254 tmp = gfc_finish_block (&block);
2258 gfc_start_block (&block);
2260 gfc_init_loopinfo (&loop1);
2261 gfc_init_se (&rse, NULL);
2262 gfc_init_se (&lse, NULL);
2264 /* Associate the lss with the loop. */
2265 gfc_add_ss_to_loop (&loop1, lss);
2267 /* Calculate the bounds of the scalarization. */
2268 gfc_conv_ss_startstride (&loop1);
2269 /* Setup the scalarizing loops. */
2270 gfc_conv_loop_setup (&loop1, &expr->where);
2272 gfc_mark_ss_chain_used (lss, 1);
2274 /* Start the scalarized loop body. */
2275 gfc_start_scalarized_body (&loop1, &body);
2277 /* Setup the gfc_se structures. */
2278 gfc_copy_loopinfo_to_se (&lse, &loop1);
2281 /* Form the expression of the temporary. */
2282 if (lss != gfc_ss_terminator)
2283 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2284 /* Translate expr. */
2285 gfc_conv_expr (&lse, expr);
2287 /* Use the scalar assignment. */
2288 rse.string_length = lse.string_length;
2289 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, true, true);
2291 /* Form the mask expression according to the mask tree list. */
2294 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2296 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2297 TREE_TYPE (wheremaskexpr),
2299 tmp = fold_build3 (COND_EXPR, void_type_node,
2301 build_empty_stmt (input_location));
2304 gfc_add_expr_to_block (&body, tmp);
2306 /* Increment count1. */
2307 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2308 count1, gfc_index_one_node);
2309 gfc_add_modify (&body, count1, tmp);
2311 /* Increment count3. */
2314 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2315 count3, gfc_index_one_node);
2316 gfc_add_modify (&body, count3, tmp);
2319 /* Generate the copying loops. */
2320 gfc_trans_scalarizing_loops (&loop1, &body);
2321 gfc_add_block_to_block (&block, &loop1.pre);
2322 gfc_add_block_to_block (&block, &loop1.post);
2323 gfc_cleanup_loop (&loop1);
2325 tmp = gfc_finish_block (&block);
2331 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2332 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2333 and should not be freed. WHEREMASK is the conditional execution mask
2334 whose sense may be inverted by INVERT. */
2337 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2338 tree count1, gfc_ss *lss, gfc_ss *rss,
2339 tree wheremask, bool invert)
2341 stmtblock_t block, body1;
2348 gfc_start_block (&block);
2350 gfc_init_se (&rse, NULL);
2351 gfc_init_se (&lse, NULL);
2353 if (lss == gfc_ss_terminator)
2355 gfc_init_block (&body1);
2356 gfc_conv_expr (&rse, expr2);
2357 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2361 /* Initialize the loop. */
2362 gfc_init_loopinfo (&loop);
2364 /* We may need LSS to determine the shape of the expression. */
2365 gfc_add_ss_to_loop (&loop, lss);
2366 gfc_add_ss_to_loop (&loop, rss);
2368 gfc_conv_ss_startstride (&loop);
2369 gfc_conv_loop_setup (&loop, &expr2->where);
2371 gfc_mark_ss_chain_used (rss, 1);
2372 /* Start the loop body. */
2373 gfc_start_scalarized_body (&loop, &body1);
2375 /* Translate the expression. */
2376 gfc_copy_loopinfo_to_se (&rse, &loop);
2378 gfc_conv_expr (&rse, expr2);
2380 /* Form the expression of the temporary. */
2381 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2384 /* Use the scalar assignment. */
2385 lse.string_length = rse.string_length;
2386 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2387 expr2->expr_type == EXPR_VARIABLE, true);
2389 /* Form the mask expression according to the mask tree list. */
2392 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2394 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2395 TREE_TYPE (wheremaskexpr),
2397 tmp = fold_build3 (COND_EXPR, void_type_node,
2398 wheremaskexpr, tmp, build_empty_stmt (input_location));
2401 gfc_add_expr_to_block (&body1, tmp);
2403 if (lss == gfc_ss_terminator)
2405 gfc_add_block_to_block (&block, &body1);
2407 /* Increment count1. */
2408 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2409 gfc_index_one_node);
2410 gfc_add_modify (&block, count1, tmp);
2414 /* Increment count1. */
2415 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2416 count1, gfc_index_one_node);
2417 gfc_add_modify (&body1, count1, tmp);
2419 /* Increment count3. */
2422 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2423 count3, gfc_index_one_node);
2424 gfc_add_modify (&body1, count3, tmp);
2427 /* Generate the copying loops. */
2428 gfc_trans_scalarizing_loops (&loop, &body1);
2430 gfc_add_block_to_block (&block, &loop.pre);
2431 gfc_add_block_to_block (&block, &loop.post);
2433 gfc_cleanup_loop (&loop);
2434 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2435 as tree nodes in SS may not be valid in different scope. */
2438 tmp = gfc_finish_block (&block);
2443 /* Calculate the size of temporary needed in the assignment inside forall.
2444 LSS and RSS are filled in this function. */
2447 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2448 stmtblock_t * pblock,
2449 gfc_ss **lss, gfc_ss **rss)
2457 *lss = gfc_walk_expr (expr1);
2460 size = gfc_index_one_node;
2461 if (*lss != gfc_ss_terminator)
2463 gfc_init_loopinfo (&loop);
2465 /* Walk the RHS of the expression. */
2466 *rss = gfc_walk_expr (expr2);
2467 if (*rss == gfc_ss_terminator)
2469 /* The rhs is scalar. Add a ss for the expression. */
2470 *rss = gfc_get_ss ();
2471 (*rss)->next = gfc_ss_terminator;
2472 (*rss)->type = GFC_SS_SCALAR;
2473 (*rss)->expr = expr2;
2476 /* Associate the SS with the loop. */
2477 gfc_add_ss_to_loop (&loop, *lss);
2478 /* We don't actually need to add the rhs at this point, but it might
2479 make guessing the loop bounds a bit easier. */
2480 gfc_add_ss_to_loop (&loop, *rss);
2482 /* We only want the shape of the expression, not rest of the junk
2483 generated by the scalarizer. */
2484 loop.array_parameter = 1;
2486 /* Calculate the bounds of the scalarization. */
2487 save_flag = gfc_option.rtcheck;
2488 gfc_option.rtcheck &= !GFC_RTCHECK_BOUNDS;
2489 gfc_conv_ss_startstride (&loop);
2490 gfc_option.rtcheck = save_flag;
2491 gfc_conv_loop_setup (&loop, &expr2->where);
2493 /* Figure out how many elements we need. */
2494 for (i = 0; i < loop.dimen; i++)
2496 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2497 gfc_index_one_node, loop.from[i]);
2498 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2500 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2502 gfc_add_block_to_block (pblock, &loop.pre);
2503 size = gfc_evaluate_now (size, pblock);
2504 gfc_add_block_to_block (pblock, &loop.post);
2506 /* TODO: write a function that cleans up a loopinfo without freeing
2507 the SS chains. Currently a NOP. */
2514 /* Calculate the overall iterator number of the nested forall construct.
2515 This routine actually calculates the number of times the body of the
2516 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2517 that by the expression INNER_SIZE. The BLOCK argument specifies the
2518 block in which to calculate the result, and the optional INNER_SIZE_BODY
2519 argument contains any statements that need to executed (inside the loop)
2520 to initialize or calculate INNER_SIZE. */
2523 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2524 stmtblock_t *inner_size_body, stmtblock_t *block)
2526 forall_info *forall_tmp = nested_forall_info;
2530 /* We can eliminate the innermost unconditional loops with constant
2532 if (INTEGER_CST_P (inner_size))
2535 && !forall_tmp->mask
2536 && INTEGER_CST_P (forall_tmp->size))
2538 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2539 inner_size, forall_tmp->size);
2540 forall_tmp = forall_tmp->prev_nest;
2543 /* If there are no loops left, we have our constant result. */
2548 /* Otherwise, create a temporary variable to compute the result. */
2549 number = gfc_create_var (gfc_array_index_type, "num");
2550 gfc_add_modify (block, number, gfc_index_zero_node);
2552 gfc_start_block (&body);
2553 if (inner_size_body)
2554 gfc_add_block_to_block (&body, inner_size_body);
2556 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2557 number, inner_size);
2560 gfc_add_modify (&body, number, tmp);
2561 tmp = gfc_finish_block (&body);
2563 /* Generate loops. */
2564 if (forall_tmp != NULL)
2565 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2567 gfc_add_expr_to_block (block, tmp);
2573 /* Allocate temporary for forall construct. SIZE is the size of temporary
2574 needed. PTEMP1 is returned for space free. */
2577 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2584 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2585 if (!integer_onep (unit))
2586 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2591 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2594 tmp = build_fold_indirect_ref_loc (input_location, tmp);
2599 /* Allocate temporary for forall construct according to the information in
2600 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2601 assignment inside forall. PTEMP1 is returned for space free. */
2604 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2605 tree inner_size, stmtblock_t * inner_size_body,
2606 stmtblock_t * block, tree * ptemp1)
2610 /* Calculate the total size of temporary needed in forall construct. */
2611 size = compute_overall_iter_number (nested_forall_info, inner_size,
2612 inner_size_body, block);
2614 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2618 /* Handle assignments inside forall which need temporary.
2620 forall (i=start:end:stride; maskexpr)
2623 (where e,f<i> are arbitrary expressions possibly involving i
2624 and there is a dependency between e<i> and f<i>)
2626 masktmp(:) = maskexpr(:)
2631 for (i = start; i <= end; i += stride)
2635 for (i = start; i <= end; i += stride)
2637 if (masktmp[maskindex++])
2638 tmp[count1++] = f<i>
2642 for (i = start; i <= end; i += stride)
2644 if (masktmp[maskindex++])
2645 e<i> = tmp[count1++]
2650 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2651 tree wheremask, bool invert,
2652 forall_info * nested_forall_info,
2653 stmtblock_t * block)
2661 stmtblock_t inner_size_body;
2663 /* Create vars. count1 is the current iterator number of the nested
2665 count1 = gfc_create_var (gfc_array_index_type, "count1");
2667 /* Count is the wheremask index. */
2670 count = gfc_create_var (gfc_array_index_type, "count");
2671 gfc_add_modify (block, count, gfc_index_zero_node);
2676 /* Initialize count1. */
2677 gfc_add_modify (block, count1, gfc_index_zero_node);
2679 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2680 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2681 gfc_init_block (&inner_size_body);
2682 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2685 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2686 if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->length)
2688 if (!expr1->ts.u.cl->backend_decl)
2691 gfc_init_se (&tse, NULL);
2692 gfc_conv_expr (&tse, expr1->ts.u.cl->length);
2693 expr1->ts.u.cl->backend_decl = tse.expr;
2695 type = gfc_get_character_type_len (gfc_default_character_kind,
2696 expr1->ts.u.cl->backend_decl);
2699 type = gfc_typenode_for_spec (&expr1->ts);
2701 /* Allocate temporary for nested forall construct according to the
2702 information in nested_forall_info and inner_size. */
2703 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2704 &inner_size_body, block, &ptemp1);
2706 /* Generate codes to copy rhs to the temporary . */
2707 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2710 /* Generate body and loops according to the information in
2711 nested_forall_info. */
2712 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2713 gfc_add_expr_to_block (block, tmp);
2716 gfc_add_modify (block, count1, gfc_index_zero_node);
2720 gfc_add_modify (block, count, gfc_index_zero_node);
2722 /* Generate codes to copy the temporary to lhs. */
2723 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2726 /* Generate body and loops according to the information in
2727 nested_forall_info. */
2728 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2729 gfc_add_expr_to_block (block, tmp);
2733 /* Free the temporary. */
2734 tmp = gfc_call_free (ptemp1);
2735 gfc_add_expr_to_block (block, tmp);
2740 /* Translate pointer assignment inside FORALL which need temporary. */
2743 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2744 forall_info * nested_forall_info,
2745 stmtblock_t * block)
2759 tree tmp, tmp1, ptemp1;
2761 count = gfc_create_var (gfc_array_index_type, "count");
2762 gfc_add_modify (block, count, gfc_index_zero_node);
2764 inner_size = integer_one_node;
2765 lss = gfc_walk_expr (expr1);
2766 rss = gfc_walk_expr (expr2);
2767 if (lss == gfc_ss_terminator)
2769 type = gfc_typenode_for_spec (&expr1->ts);
2770 type = build_pointer_type (type);
2772 /* Allocate temporary for nested forall construct according to the
2773 information in nested_forall_info and inner_size. */
2774 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2775 inner_size, NULL, block, &ptemp1);
2776 gfc_start_block (&body);
2777 gfc_init_se (&lse, NULL);
2778 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2779 gfc_init_se (&rse, NULL);
2780 rse.want_pointer = 1;
2781 gfc_conv_expr (&rse, expr2);
2782 gfc_add_block_to_block (&body, &rse.pre);
2783 gfc_add_modify (&body, lse.expr,
2784 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2785 gfc_add_block_to_block (&body, &rse.post);
2787 /* Increment count. */
2788 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2789 count, gfc_index_one_node);
2790 gfc_add_modify (&body, count, tmp);
2792 tmp = gfc_finish_block (&body);
2794 /* Generate body and loops according to the information in
2795 nested_forall_info. */
2796 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2797 gfc_add_expr_to_block (block, tmp);
2800 gfc_add_modify (block, count, gfc_index_zero_node);
2802 gfc_start_block (&body);
2803 gfc_init_se (&lse, NULL);
2804 gfc_init_se (&rse, NULL);
2805 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
2806 lse.want_pointer = 1;
2807 gfc_conv_expr (&lse, expr1);
2808 gfc_add_block_to_block (&body, &lse.pre);
2809 gfc_add_modify (&body, lse.expr, rse.expr);
2810 gfc_add_block_to_block (&body, &lse.post);
2811 /* Increment count. */
2812 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2813 count, gfc_index_one_node);
2814 gfc_add_modify (&body, count, tmp);
2815 tmp = gfc_finish_block (&body);
2817 /* Generate body and loops according to the information in
2818 nested_forall_info. */
2819 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2820 gfc_add_expr_to_block (block, tmp);
2824 gfc_init_loopinfo (&loop);
2826 /* Associate the SS with the loop. */
2827 gfc_add_ss_to_loop (&loop, rss);
2829 /* Setup the scalarizing loops and bounds. */
2830 gfc_conv_ss_startstride (&loop);
2832 gfc_conv_loop_setup (&loop, &expr2->where);
2834 info = &rss->data.info;
2835 desc = info->descriptor;
2837 /* Make a new descriptor. */
2838 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2839 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, 0,
2840 loop.from, loop.to, 1,
2841 GFC_ARRAY_UNKNOWN, true);
2843 /* Allocate temporary for nested forall construct. */
2844 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2845 inner_size, NULL, block, &ptemp1);
2846 gfc_start_block (&body);
2847 gfc_init_se (&lse, NULL);
2848 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2849 lse.direct_byref = 1;
2850 rss = gfc_walk_expr (expr2);
2851 gfc_conv_expr_descriptor (&lse, expr2, rss);
2853 gfc_add_block_to_block (&body, &lse.pre);
2854 gfc_add_block_to_block (&body, &lse.post);
2856 /* Increment count. */
2857 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2858 count, gfc_index_one_node);
2859 gfc_add_modify (&body, count, tmp);
2861 tmp = gfc_finish_block (&body);
2863 /* Generate body and loops according to the information in
2864 nested_forall_info. */
2865 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2866 gfc_add_expr_to_block (block, tmp);
2869 gfc_add_modify (block, count, gfc_index_zero_node);
2871 parm = gfc_build_array_ref (tmp1, count, NULL);
2872 lss = gfc_walk_expr (expr1);
2873 gfc_init_se (&lse, NULL);
2874 gfc_conv_expr_descriptor (&lse, expr1, lss);
2875 gfc_add_modify (&lse.pre, lse.expr, parm);
2876 gfc_start_block (&body);
2877 gfc_add_block_to_block (&body, &lse.pre);
2878 gfc_add_block_to_block (&body, &lse.post);
2880 /* Increment count. */
2881 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2882 count, gfc_index_one_node);
2883 gfc_add_modify (&body, count, tmp);
2885 tmp = gfc_finish_block (&body);
2887 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2888 gfc_add_expr_to_block (block, tmp);
2890 /* Free the temporary. */
2893 tmp = gfc_call_free (ptemp1);
2894 gfc_add_expr_to_block (block, tmp);
2899 /* FORALL and WHERE statements are really nasty, especially when you nest
2900 them. All the rhs of a forall assignment must be evaluated before the
2901 actual assignments are performed. Presumably this also applies to all the
2902 assignments in an inner where statement. */
2904 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2905 linear array, relying on the fact that we process in the same order in all
2908 forall (i=start:end:stride; maskexpr)
2912 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2914 count = ((end + 1 - start) / stride)
2915 masktmp(:) = maskexpr(:)
2918 for (i = start; i <= end; i += stride)
2920 if (masktmp[maskindex++])
2924 for (i = start; i <= end; i += stride)
2926 if (masktmp[maskindex++])
2930 Note that this code only works when there are no dependencies.
2931 Forall loop with array assignments and data dependencies are a real pain,
2932 because the size of the temporary cannot always be determined before the
2933 loop is executed. This problem is compounded by the presence of nested
2938 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2958 gfc_forall_iterator *fa;
2961 gfc_saved_var *saved_vars;
2962 iter_info *this_forall;
2966 /* Do nothing if the mask is false. */
2968 && code->expr1->expr_type == EXPR_CONSTANT
2969 && !code->expr1->value.logical)
2970 return build_empty_stmt (input_location);
2973 /* Count the FORALL index number. */
2974 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2978 /* Allocate the space for var, start, end, step, varexpr. */
2979 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2980 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2981 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2982 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2983 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2984 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2986 /* Allocate the space for info. */
2987 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2989 gfc_start_block (&pre);
2990 gfc_init_block (&post);
2991 gfc_init_block (&block);
2994 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2996 gfc_symbol *sym = fa->var->symtree->n.sym;
2998 /* Allocate space for this_forall. */
2999 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
3001 /* Create a temporary variable for the FORALL index. */
3002 tmp = gfc_typenode_for_spec (&sym->ts);
3003 var[n] = gfc_create_var (tmp, sym->name);
3004 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
3006 /* Record it in this_forall. */
3007 this_forall->var = var[n];
3009 /* Replace the index symbol's backend_decl with the temporary decl. */
3010 sym->backend_decl = var[n];
3012 /* Work out the start, end and stride for the loop. */
3013 gfc_init_se (&se, NULL);
3014 gfc_conv_expr_val (&se, fa->start);
3015 /* Record it in this_forall. */
3016 this_forall->start = se.expr;
3017 gfc_add_block_to_block (&block, &se.pre);
3020 gfc_init_se (&se, NULL);
3021 gfc_conv_expr_val (&se, fa->end);
3022 /* Record it in this_forall. */
3023 this_forall->end = se.expr;
3024 gfc_make_safe_expr (&se);
3025 gfc_add_block_to_block (&block, &se.pre);
3028 gfc_init_se (&se, NULL);
3029 gfc_conv_expr_val (&se, fa->stride);
3030 /* Record it in this_forall. */
3031 this_forall->step = se.expr;
3032 gfc_make_safe_expr (&se);
3033 gfc_add_block_to_block (&block, &se.pre);
3036 /* Set the NEXT field of this_forall to NULL. */
3037 this_forall->next = NULL;
3038 /* Link this_forall to the info construct. */
3039 if (info->this_loop)
3041 iter_info *iter_tmp = info->this_loop;
3042 while (iter_tmp->next != NULL)
3043 iter_tmp = iter_tmp->next;
3044 iter_tmp->next = this_forall;
3047 info->this_loop = this_forall;
3053 /* Calculate the size needed for the current forall level. */
3054 size = gfc_index_one_node;
3055 for (n = 0; n < nvar; n++)
3057 /* size = (end + step - start) / step. */
3058 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
3060 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
3062 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
3063 tmp = convert (gfc_array_index_type, tmp);
3065 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
3068 /* Record the nvar and size of current forall level. */
3074 /* If the mask is .true., consider the FORALL unconditional. */
3075 if (code->expr1->expr_type == EXPR_CONSTANT
3076 && code->expr1->value.logical)
3084 /* First we need to allocate the mask. */
3087 /* As the mask array can be very big, prefer compact boolean types. */
3088 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3089 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
3090 size, NULL, &block, &pmask);
3091 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
3093 /* Record them in the info structure. */
3094 info->maskindex = maskindex;
3099 /* No mask was specified. */
3100 maskindex = NULL_TREE;
3101 mask = pmask = NULL_TREE;
3104 /* Link the current forall level to nested_forall_info. */
3105 info->prev_nest = nested_forall_info;
3106 nested_forall_info = info;
3108 /* Copy the mask into a temporary variable if required.
3109 For now we assume a mask temporary is needed. */
3112 /* As the mask array can be very big, prefer compact boolean types. */
3113 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3115 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
3117 /* Start of mask assignment loop body. */
3118 gfc_start_block (&body);
3120 /* Evaluate the mask expression. */
3121 gfc_init_se (&se, NULL);
3122 gfc_conv_expr_val (&se, code->expr1);
3123 gfc_add_block_to_block (&body, &se.pre);
3125 /* Store the mask. */
3126 se.expr = convert (mask_type, se.expr);
3128 tmp = gfc_build_array_ref (mask, maskindex, NULL);
3129 gfc_add_modify (&body, tmp, se.expr);
3131 /* Advance to the next mask element. */
3132 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3133 maskindex, gfc_index_one_node);
3134 gfc_add_modify (&body, maskindex, tmp);
3136 /* Generate the loops. */
3137 tmp = gfc_finish_block (&body);
3138 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
3139 gfc_add_expr_to_block (&block, tmp);
3142 c = code->block->next;
3144 /* TODO: loop merging in FORALL statements. */
3145 /* Now that we've got a copy of the mask, generate the assignment loops. */
3151 /* A scalar or array assignment. DO the simple check for
3152 lhs to rhs dependencies. These make a temporary for the
3153 rhs and form a second forall block to copy to variable. */
3154 need_temp = check_forall_dependencies(c, &pre, &post);
3156 /* Temporaries due to array assignment data dependencies introduce
3157 no end of problems. */
3159 gfc_trans_assign_need_temp (c->expr1, c->expr2, NULL, false,
3160 nested_forall_info, &block);
3163 /* Use the normal assignment copying routines. */
3164 assign = gfc_trans_assignment (c->expr1, c->expr2, false, true);
3166 /* Generate body and loops. */
3167 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3169 gfc_add_expr_to_block (&block, tmp);
3172 /* Cleanup any temporary symtrees that have been made to deal
3173 with dependencies. */
3175 cleanup_forall_symtrees (c);
3180 /* Translate WHERE or WHERE construct nested in FORALL. */
3181 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
3184 /* Pointer assignment inside FORALL. */
3185 case EXEC_POINTER_ASSIGN:
3186 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
3188 gfc_trans_pointer_assign_need_temp (c->expr1, c->expr2,
3189 nested_forall_info, &block);
3192 /* Use the normal assignment copying routines. */
3193 assign = gfc_trans_pointer_assignment (c->expr1, c->expr2);
3195 /* Generate body and loops. */
3196 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3198 gfc_add_expr_to_block (&block, tmp);
3203 tmp = gfc_trans_forall_1 (c, nested_forall_info);
3204 gfc_add_expr_to_block (&block, tmp);
3207 /* Explicit subroutine calls are prevented by the frontend but interface
3208 assignments can legitimately produce them. */
3209 case EXEC_ASSIGN_CALL:
3210 assign = gfc_trans_call (c, true, NULL_TREE, NULL_TREE, false);
3211 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
3212 gfc_add_expr_to_block (&block, tmp);
3222 /* Restore the original index variables. */
3223 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
3224 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
3226 /* Free the space for var, start, end, step, varexpr. */
3232 gfc_free (saved_vars);
3234 /* Free the space for this forall_info. */
3239 /* Free the temporary for the mask. */
3240 tmp = gfc_call_free (pmask);
3241 gfc_add_expr_to_block (&block, tmp);
3244 pushdecl (maskindex);
3246 gfc_add_block_to_block (&pre, &block);
3247 gfc_add_block_to_block (&pre, &post);
3249 return gfc_finish_block (&pre);
3253 /* Translate the FORALL statement or construct. */
3255 tree gfc_trans_forall (gfc_code * code)
3257 return gfc_trans_forall_1 (code, NULL);
3261 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3262 If the WHERE construct is nested in FORALL, compute the overall temporary
3263 needed by the WHERE mask expression multiplied by the iterator number of
3265 ME is the WHERE mask expression.
3266 MASK is the current execution mask upon input, whose sense may or may
3267 not be inverted as specified by the INVERT argument.
3268 CMASK is the updated execution mask on output, or NULL if not required.
3269 PMASK is the pending execution mask on output, or NULL if not required.
3270 BLOCK is the block in which to place the condition evaluation loops. */
3273 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3274 tree mask, bool invert, tree cmask, tree pmask,
3275 tree mask_type, stmtblock_t * block)
3280 stmtblock_t body, body1;
3281 tree count, cond, mtmp;
3284 gfc_init_loopinfo (&loop);
3286 lss = gfc_walk_expr (me);
3287 rss = gfc_walk_expr (me);
3289 /* Variable to index the temporary. */
3290 count = gfc_create_var (gfc_array_index_type, "count");
3291 /* Initialize count. */
3292 gfc_add_modify (block, count, gfc_index_zero_node);
3294 gfc_start_block (&body);
3296 gfc_init_se (&rse, NULL);
3297 gfc_init_se (&lse, NULL);
3299 if (lss == gfc_ss_terminator)
3301 gfc_init_block (&body1);
3305 /* Initialize the loop. */
3306 gfc_init_loopinfo (&loop);
3308 /* We may need LSS to determine the shape of the expression. */
3309 gfc_add_ss_to_loop (&loop, lss);
3310 gfc_add_ss_to_loop (&loop, rss);
3312 gfc_conv_ss_startstride (&loop);
3313 gfc_conv_loop_setup (&loop, &me->where);
3315 gfc_mark_ss_chain_used (rss, 1);
3316 /* Start the loop body. */
3317 gfc_start_scalarized_body (&loop, &body1);
3319 /* Translate the expression. */
3320 gfc_copy_loopinfo_to_se (&rse, &loop);
3322 gfc_conv_expr (&rse, me);
3325 /* Variable to evaluate mask condition. */
3326 cond = gfc_create_var (mask_type, "cond");
3327 if (mask && (cmask || pmask))
3328 mtmp = gfc_create_var (mask_type, "mask");
3329 else mtmp = NULL_TREE;
3331 gfc_add_block_to_block (&body1, &lse.pre);
3332 gfc_add_block_to_block (&body1, &rse.pre);
3334 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3336 if (mask && (cmask || pmask))
3338 tmp = gfc_build_array_ref (mask, count, NULL);
3340 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
3341 gfc_add_modify (&body1, mtmp, tmp);
3346 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3349 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3350 gfc_add_modify (&body1, tmp1, tmp);
3355 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3356 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, cond);
3358 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3359 gfc_add_modify (&body1, tmp1, tmp);
3362 gfc_add_block_to_block (&body1, &lse.post);
3363 gfc_add_block_to_block (&body1, &rse.post);
3365 if (lss == gfc_ss_terminator)
3367 gfc_add_block_to_block (&body, &body1);
3371 /* Increment count. */
3372 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
3373 gfc_index_one_node);
3374 gfc_add_modify (&body1, count, tmp1);
3376 /* Generate the copying loops. */
3377 gfc_trans_scalarizing_loops (&loop, &body1);
3379 gfc_add_block_to_block (&body, &loop.pre);
3380 gfc_add_block_to_block (&body, &loop.post);
3382 gfc_cleanup_loop (&loop);
3383 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3384 as tree nodes in SS may not be valid in different scope. */
3387 tmp1 = gfc_finish_block (&body);
3388 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3389 if (nested_forall_info != NULL)
3390 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3392 gfc_add_expr_to_block (block, tmp1);
3396 /* Translate an assignment statement in a WHERE statement or construct
3397 statement. The MASK expression is used to control which elements
3398 of EXPR1 shall be assigned. The sense of MASK is specified by
3402 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3403 tree mask, bool invert,
3404 tree count1, tree count2,
3410 gfc_ss *lss_section;
3417 tree index, maskexpr;
3419 /* A defined assignment. */
3420 if (cnext && cnext->resolved_sym)
3421 return gfc_trans_call (cnext, true, mask, count1, invert);
3424 /* TODO: handle this special case.
3425 Special case a single function returning an array. */
3426 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3428 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3434 /* Assignment of the form lhs = rhs. */
3435 gfc_start_block (&block);
3437 gfc_init_se (&lse, NULL);
3438 gfc_init_se (&rse, NULL);
3441 lss = gfc_walk_expr (expr1);
3444 /* In each where-assign-stmt, the mask-expr and the variable being
3445 defined shall be arrays of the same shape. */
3446 gcc_assert (lss != gfc_ss_terminator);
3448 /* The assignment needs scalarization. */
3451 /* Find a non-scalar SS from the lhs. */
3452 while (lss_section != gfc_ss_terminator
3453 && lss_section->type != GFC_SS_SECTION)
3454 lss_section = lss_section->next;
3456 gcc_assert (lss_section != gfc_ss_terminator);
3458 /* Initialize the scalarizer. */
3459 gfc_init_loopinfo (&loop);
3462 rss = gfc_walk_expr (expr2);
3463 if (rss == gfc_ss_terminator)
3465 /* The rhs is scalar. Add a ss for the expression. */
3466 rss = gfc_get_ss ();
3468 rss->next = gfc_ss_terminator;
3469 rss->type = GFC_SS_SCALAR;
3473 /* Associate the SS with the loop. */
3474 gfc_add_ss_to_loop (&loop, lss);
3475 gfc_add_ss_to_loop (&loop, rss);
3477 /* Calculate the bounds of the scalarization. */
3478 gfc_conv_ss_startstride (&loop);
3480 /* Resolve any data dependencies in the statement. */
3481 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3483 /* Setup the scalarizing loops. */
3484 gfc_conv_loop_setup (&loop, &expr2->where);
3486 /* Setup the gfc_se structures. */
3487 gfc_copy_loopinfo_to_se (&lse, &loop);
3488 gfc_copy_loopinfo_to_se (&rse, &loop);
3491 gfc_mark_ss_chain_used (rss, 1);
3492 if (loop.temp_ss == NULL)
3495 gfc_mark_ss_chain_used (lss, 1);
3499 lse.ss = loop.temp_ss;
3500 gfc_mark_ss_chain_used (lss, 3);
3501 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3504 /* Start the scalarized loop body. */
3505 gfc_start_scalarized_body (&loop, &body);
3507 /* Translate the expression. */
3508 gfc_conv_expr (&rse, expr2);
3509 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3511 gfc_conv_tmp_array_ref (&lse);
3512 gfc_advance_se_ss_chain (&lse);
3515 gfc_conv_expr (&lse, expr1);
3517 /* Form the mask expression according to the mask. */
3519 maskexpr = gfc_build_array_ref (mask, index, NULL);
3521 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
3523 /* Use the scalar assignment as is. */
3524 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3525 loop.temp_ss != NULL, false, true);
3527 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt (input_location));
3529 gfc_add_expr_to_block (&body, tmp);
3531 if (lss == gfc_ss_terminator)
3533 /* Increment count1. */
3534 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3535 count1, gfc_index_one_node);
3536 gfc_add_modify (&body, count1, tmp);
3538 /* Use the scalar assignment as is. */
3539 gfc_add_block_to_block (&block, &body);
3543 gcc_assert (lse.ss == gfc_ss_terminator
3544 && rse.ss == gfc_ss_terminator);
3546 if (loop.temp_ss != NULL)
3548 /* Increment count1 before finish the main body of a scalarized
3550 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3551 count1, gfc_index_one_node);
3552 gfc_add_modify (&body, count1, tmp);
3553 gfc_trans_scalarized_loop_boundary (&loop, &body);
3555 /* We need to copy the temporary to the actual lhs. */
3556 gfc_init_se (&lse, NULL);
3557 gfc_init_se (&rse, NULL);
3558 gfc_copy_loopinfo_to_se (&lse, &loop);
3559 gfc_copy_loopinfo_to_se (&rse, &loop);
3561 rse.ss = loop.temp_ss;
3564 gfc_conv_tmp_array_ref (&rse);
3565 gfc_advance_se_ss_chain (&rse);
3566 gfc_conv_expr (&lse, expr1);
3568 gcc_assert (lse.ss == gfc_ss_terminator
3569 && rse.ss == gfc_ss_terminator);
3571 /* Form the mask expression according to the mask tree list. */
3573 maskexpr = gfc_build_array_ref (mask, index, NULL);
3575 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3578 /* Use the scalar assignment as is. */
3579 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false,
3581 tmp = build3_v (COND_EXPR, maskexpr, tmp,
3582 build_empty_stmt (input_location));
3583 gfc_add_expr_to_block (&body, tmp);
3585 /* Increment count2. */
3586 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3587 count2, gfc_index_one_node);
3588 gfc_add_modify (&body, count2, tmp);
3592 /* Increment count1. */
3593 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3594 count1, gfc_index_one_node);
3595 gfc_add_modify (&body, count1, tmp);
3598 /* Generate the copying loops. */
3599 gfc_trans_scalarizing_loops (&loop, &body);
3601 /* Wrap the whole thing up. */
3602 gfc_add_block_to_block (&block, &loop.pre);
3603 gfc_add_block_to_block (&block, &loop.post);
3604 gfc_cleanup_loop (&loop);
3607 return gfc_finish_block (&block);
3611 /* Translate the WHERE construct or statement.
3612 This function can be called iteratively to translate the nested WHERE
3613 construct or statement.
3614 MASK is the control mask. */
3617 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3618 forall_info * nested_forall_info, stmtblock_t * block)
3620 stmtblock_t inner_size_body;
3621 tree inner_size, size;
3630 tree count1, count2;
3634 tree pcmask = NULL_TREE;
3635 tree ppmask = NULL_TREE;
3636 tree cmask = NULL_TREE;
3637 tree pmask = NULL_TREE;
3638 gfc_actual_arglist *arg;
3640 /* the WHERE statement or the WHERE construct statement. */
3641 cblock = code->block;
3643 /* As the mask array can be very big, prefer compact boolean types. */
3644 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3646 /* Determine which temporary masks are needed. */
3649 /* One clause: No ELSEWHEREs. */
3650 need_cmask = (cblock->next != 0);
3653 else if (cblock->block->block)
3655 /* Three or more clauses: Conditional ELSEWHEREs. */
3659 else if (cblock->next)
3661 /* Two clauses, the first non-empty. */
3663 need_pmask = (mask != NULL_TREE
3664 && cblock->block->next != 0);
3666 else if (!cblock->block->next)
3668 /* Two clauses, both empty. */
3672 /* Two clauses, the first empty, the second non-empty. */
3675 need_cmask = (cblock->block->expr1 != 0);
3684 if (need_cmask || need_pmask)
3686 /* Calculate the size of temporary needed by the mask-expr. */
3687 gfc_init_block (&inner_size_body);
3688 inner_size = compute_inner_temp_size (cblock->expr1, cblock->expr1,
3689 &inner_size_body, &lss, &rss);
3691 /* Calculate the total size of temporary needed. */
3692 size = compute_overall_iter_number (nested_forall_info, inner_size,
3693 &inner_size_body, block);
3695 /* Check whether the size is negative. */
3696 cond = fold_build2 (LE_EXPR, boolean_type_node, size,
3697 gfc_index_zero_node);
3698 size = fold_build3 (COND_EXPR, gfc_array_index_type, cond,
3699 gfc_index_zero_node, size);
3700 size = gfc_evaluate_now (size, block);
3702 /* Allocate temporary for WHERE mask if needed. */
3704 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3707 /* Allocate temporary for !mask if needed. */
3709 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3715 /* Each time around this loop, the where clause is conditional
3716 on the value of mask and invert, which are updated at the
3717 bottom of the loop. */
3719 /* Has mask-expr. */
3722 /* Ensure that the WHERE mask will be evaluated exactly once.
3723 If there are no statements in this WHERE/ELSEWHERE clause,
3724 then we don't need to update the control mask (cmask).
3725 If this is the last clause of the WHERE construct, then
3726 we don't need to update the pending control mask (pmask). */
3728 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3730 cblock->next ? cmask : NULL_TREE,
3731 cblock->block ? pmask : NULL_TREE,
3734 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3736 (cblock->next || cblock->block)
3737 ? cmask : NULL_TREE,
3738 NULL_TREE, mask_type, block);
3742 /* It's a final elsewhere-stmt. No mask-expr is present. */
3746 /* The body of this where clause are controlled by cmask with
3747 sense specified by invert. */
3749 /* Get the assignment statement of a WHERE statement, or the first
3750 statement in where-body-construct of a WHERE construct. */
3751 cnext = cblock->next;
3756 /* WHERE assignment statement. */
3757 case EXEC_ASSIGN_CALL:
3759 arg = cnext->ext.actual;
3760 expr1 = expr2 = NULL;
3761 for (; arg; arg = arg->next)
3773 expr1 = cnext->expr1;
3774 expr2 = cnext->expr2;
3776 if (nested_forall_info != NULL)
3778 need_temp = gfc_check_dependency (expr1, expr2, 0);
3779 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3780 gfc_trans_assign_need_temp (expr1, expr2,
3782 nested_forall_info, block);
3785 /* Variables to control maskexpr. */
3786 count1 = gfc_create_var (gfc_array_index_type, "count1");
3787 count2 = gfc_create_var (gfc_array_index_type, "count2");
3788 gfc_add_modify (block, count1, gfc_index_zero_node);
3789 gfc_add_modify (block, count2, gfc_index_zero_node);
3791 tmp = gfc_trans_where_assign (expr1, expr2,
3796 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3798 gfc_add_expr_to_block (block, tmp);
3803 /* Variables to control maskexpr. */
3804 count1 = gfc_create_var (gfc_array_index_type, "count1");
3805 count2 = gfc_create_var (gfc_array_index_type, "count2");
3806 gfc_add_modify (block, count1, gfc_index_zero_node);
3807 gfc_add_modify (block, count2, gfc_index_zero_node);
3809 tmp = gfc_trans_where_assign (expr1, expr2,
3813 gfc_add_expr_to_block (block, tmp);
3818 /* WHERE or WHERE construct is part of a where-body-construct. */
3820 gfc_trans_where_2 (cnext, cmask, invert,
3821 nested_forall_info, block);
3828 /* The next statement within the same where-body-construct. */
3829 cnext = cnext->next;
3831 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3832 cblock = cblock->block;
3833 if (mask == NULL_TREE)
3835 /* If we're the initial WHERE, we can simply invert the sense
3836 of the current mask to obtain the "mask" for the remaining
3843 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3849 /* If we allocated a pending mask array, deallocate it now. */
3852 tmp = gfc_call_free (ppmask);
3853 gfc_add_expr_to_block (block, tmp);
3856 /* If we allocated a current mask array, deallocate it now. */
3859 tmp = gfc_call_free (pcmask);
3860 gfc_add_expr_to_block (block, tmp);
3864 /* Translate a simple WHERE construct or statement without dependencies.
3865 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3866 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3867 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3870 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3872 stmtblock_t block, body;
3873 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3874 tree tmp, cexpr, tstmt, estmt;
3875 gfc_ss *css, *tdss, *tsss;
3876 gfc_se cse, tdse, tsse, edse, esse;
3881 /* Allow the scalarizer to workshare simple where loops. */
3882 if (ompws_flags & OMPWS_WORKSHARE_FLAG)
3883 ompws_flags |= OMPWS_SCALARIZER_WS;
3885 cond = cblock->expr1;
3886 tdst = cblock->next->expr1;
3887 tsrc = cblock->next->expr2;
3888 edst = eblock ? eblock->next->expr1 : NULL;
3889 esrc = eblock ? eblock->next->expr2 : NULL;
3891 gfc_start_block (&block);
3892 gfc_init_loopinfo (&loop);
3894 /* Handle the condition. */
3895 gfc_init_se (&cse, NULL);
3896 css = gfc_walk_expr (cond);
3897 gfc_add_ss_to_loop (&loop, css);
3899 /* Handle the then-clause. */
3900 gfc_init_se (&tdse, NULL);
3901 gfc_init_se (&tsse, NULL);
3902 tdss = gfc_walk_expr (tdst);
3903 tsss = gfc_walk_expr (tsrc);
3904 if (tsss == gfc_ss_terminator)
3906 tsss = gfc_get_ss ();
3908 tsss->next = gfc_ss_terminator;
3909 tsss->type = GFC_SS_SCALAR;
3912 gfc_add_ss_to_loop (&loop, tdss);
3913 gfc_add_ss_to_loop (&loop, tsss);
3917 /* Handle the else clause. */
3918 gfc_init_se (&edse, NULL);
3919 gfc_init_se (&esse, NULL);
3920 edss = gfc_walk_expr (edst);
3921 esss = gfc_walk_expr (esrc);
3922 if (esss == gfc_ss_terminator)
3924 esss = gfc_get_ss ();
3926 esss->next = gfc_ss_terminator;
3927 esss->type = GFC_SS_SCALAR;
3930 gfc_add_ss_to_loop (&loop, edss);
3931 gfc_add_ss_to_loop (&loop, esss);
3934 gfc_conv_ss_startstride (&loop);
3935 gfc_conv_loop_setup (&loop, &tdst->where);
3937 gfc_mark_ss_chain_used (css, 1);
3938 gfc_mark_ss_chain_used (tdss, 1);
3939 gfc_mark_ss_chain_used (tsss, 1);
3942 gfc_mark_ss_chain_used (edss, 1);
3943 gfc_mark_ss_chain_used (esss, 1);
3946 gfc_start_scalarized_body (&loop, &body);
3948 gfc_copy_loopinfo_to_se (&cse, &loop);
3949 gfc_copy_loopinfo_to_se (&tdse, &loop);
3950 gfc_copy_loopinfo_to_se (&tsse, &loop);
3956 gfc_copy_loopinfo_to_se (&edse, &loop);
3957 gfc_copy_loopinfo_to_se (&esse, &loop);
3962 gfc_conv_expr (&cse, cond);
3963 gfc_add_block_to_block (&body, &cse.pre);
3966 gfc_conv_expr (&tsse, tsrc);
3967 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3969 gfc_conv_tmp_array_ref (&tdse);
3970 gfc_advance_se_ss_chain (&tdse);
3973 gfc_conv_expr (&tdse, tdst);
3977 gfc_conv_expr (&esse, esrc);
3978 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3980 gfc_conv_tmp_array_ref (&edse);
3981 gfc_advance_se_ss_chain (&edse);
3984 gfc_conv_expr (&edse, edst);
3987 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false, true);
3988 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false,
3990 : build_empty_stmt (input_location);
3991 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3992 gfc_add_expr_to_block (&body, tmp);
3993 gfc_add_block_to_block (&body, &cse.post);
3995 gfc_trans_scalarizing_loops (&loop, &body);
3996 gfc_add_block_to_block (&block, &loop.pre);
3997 gfc_add_block_to_block (&block, &loop.post);
3998 gfc_cleanup_loop (&loop);
4000 return gfc_finish_block (&block);
4003 /* As the WHERE or WHERE construct statement can be nested, we call
4004 gfc_trans_where_2 to do the translation, and pass the initial
4005 NULL values for both the control mask and the pending control mask. */
4008 gfc_trans_where (gfc_code * code)
4014 cblock = code->block;
4016 && cblock->next->op == EXEC_ASSIGN
4017 && !cblock->next->next)
4019 eblock = cblock->block;
4022 /* A simple "WHERE (cond) x = y" statement or block is
4023 dependence free if cond is not dependent upon writing x,
4024 and the source y is unaffected by the destination x. */
4025 if (!gfc_check_dependency (cblock->next->expr1,
4027 && !gfc_check_dependency (cblock->next->expr1,
4028 cblock->next->expr2, 0))
4029 return gfc_trans_where_3 (cblock, NULL);
4031 else if (!eblock->expr1
4034 && eblock->next->op == EXEC_ASSIGN
4035 && !eblock->next->next)
4037 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
4038 block is dependence free if cond is not dependent on writes
4039 to x1 and x2, y1 is not dependent on writes to x2, and y2
4040 is not dependent on writes to x1, and both y's are not
4041 dependent upon their own x's. In addition to this, the
4042 final two dependency checks below exclude all but the same
4043 array reference if the where and elswhere destinations
4044 are the same. In short, this is VERY conservative and this
4045 is needed because the two loops, required by the standard
4046 are coalesced in gfc_trans_where_3. */
4047 if (!gfc_check_dependency(cblock->next->expr1,
4049 && !gfc_check_dependency(eblock->next->expr1,
4051 && !gfc_check_dependency(cblock->next->expr1,
4052 eblock->next->expr2, 1)
4053 && !gfc_check_dependency(eblock->next->expr1,
4054 cblock->next->expr2, 1)
4055 && !gfc_check_dependency(cblock->next->expr1,
4056 cblock->next->expr2, 1)
4057 && !gfc_check_dependency(eblock->next->expr1,
4058 eblock->next->expr2, 1)
4059 && !gfc_check_dependency(cblock->next->expr1,
4060 eblock->next->expr1, 0)
4061 && !gfc_check_dependency(eblock->next->expr1,
4062 cblock->next->expr1, 0))
4063 return gfc_trans_where_3 (cblock, eblock);
4067 gfc_start_block (&block);
4069 gfc_trans_where_2 (code, NULL, false, NULL, &block);
4071 return gfc_finish_block (&block);
4075 /* CYCLE a DO loop. The label decl has already been created by
4076 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
4077 node at the head of the loop. We must mark the label as used. */
4080 gfc_trans_cycle (gfc_code * code)
4084 cycle_label = code->ext.whichloop->cycle_label;
4085 TREE_USED (cycle_label) = 1;
4086 return build1_v (GOTO_EXPR, cycle_label);
4090 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
4091 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
4095 gfc_trans_exit (gfc_code * code)
4099 exit_label = code->ext.whichloop->exit_label;
4100 TREE_USED (exit_label) = 1;
4101 return build1_v (GOTO_EXPR, exit_label);
4105 /* Translate the ALLOCATE statement. */
4108 gfc_trans_allocate (gfc_code * code)
4121 if (!code->ext.alloc.list)
4124 pstat = stat = error_label = tmp = memsz = NULL_TREE;
4126 gfc_start_block (&block);
4128 /* Either STAT= and/or ERRMSG is present. */
4129 if (code->expr1 || code->expr2)
4131 tree gfc_int4_type_node = gfc_get_int_type (4);
4133 stat = gfc_create_var (gfc_int4_type_node, "stat");
4134 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4136 error_label = gfc_build_label_decl (NULL_TREE);
4137 TREE_USED (error_label) = 1;
4140 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4142 expr = gfc_copy_expr (al->expr);
4144 if (expr->ts.type == BT_CLASS)
4145 gfc_add_component_ref (expr, "$data");
4147 gfc_init_se (&se, NULL);
4148 gfc_start_block (&se.pre);
4150 se.want_pointer = 1;
4151 se.descriptor_only = 1;
4152 gfc_conv_expr (&se, expr);
4154 if (!gfc_array_allocate (&se, expr, pstat))
4156 /* A scalar or derived type. */
4158 /* Determine allocate size. */
4159 if (al->expr->ts.type == BT_CLASS && code->expr3)
4161 if (code->expr3->ts.type == BT_CLASS)
4165 sz = gfc_copy_expr (code->expr3);
4166 gfc_add_component_ref (sz, "$vptr");
4167 gfc_add_component_ref (sz, "$size");
4168 gfc_init_se (&se_sz, NULL);
4169 gfc_conv_expr (&se_sz, sz);
4174 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->expr3->ts));
4176 else if (code->ext.alloc.ts.type != BT_UNKNOWN)
4177 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->ext.alloc.ts));
4179 memsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
4181 if (expr->ts.type == BT_CHARACTER && memsz == NULL_TREE)
4182 memsz = se.string_length;
4184 /* Allocate - for non-pointers with re-alloc checking. */
4191 /* Find the last reference in the chain. */
4192 while (ref && ref->next != NULL)
4194 gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT);
4199 allocatable = expr->symtree->n.sym->attr.allocatable;
4201 allocatable = ref->u.c.component->attr.allocatable;
4204 tmp = gfc_allocate_array_with_status (&se.pre, se.expr, memsz,
4207 tmp = gfc_allocate_with_status (&se.pre, memsz, pstat);
4210 tmp = fold_build2 (MODIFY_EXPR, void_type_node, se.expr,
4211 fold_convert (TREE_TYPE (se.expr), tmp));
4212 gfc_add_expr_to_block (&se.pre, tmp);
4214 if (code->expr1 || code->expr2)
4216 tmp = build1_v (GOTO_EXPR, error_label);
4217 parm = fold_build2 (NE_EXPR, boolean_type_node,
4218 stat, build_int_cst (TREE_TYPE (stat), 0));
4219 tmp = fold_build3 (COND_EXPR, void_type_node,
4220 parm, tmp, build_empty_stmt (input_location));
4221 gfc_add_expr_to_block (&se.pre, tmp);
4224 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4226 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
4227 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, tmp, 0);
4228 gfc_add_expr_to_block (&se.pre, tmp);
4233 tmp = gfc_finish_block (&se.pre);
4234 gfc_add_expr_to_block (&block, tmp);
4236 /* Initialization via SOURCE block. */
4237 if (code->expr3 && !code->expr3->mold)
4239 gfc_expr *rhs = gfc_copy_expr (code->expr3);
4240 if (al->expr->ts.type == BT_CLASS)
4243 if (rhs->ts.type == BT_CLASS)
4244 gfc_add_component_ref (rhs, "$data");
4245 gfc_init_se (&dst, NULL);
4246 gfc_init_se (&src, NULL);
4247 gfc_conv_expr (&dst, expr);
4248 gfc_conv_expr (&src, rhs);
4249 gfc_add_block_to_block (&block, &src.pre);
4250 tmp = gfc_build_memcpy_call (dst.expr, src.expr, memsz);
4253 tmp = gfc_trans_assignment (gfc_expr_to_initialize (expr),
4255 gfc_free_expr (rhs);
4256 gfc_add_expr_to_block (&block, tmp);
4259 /* Allocation of CLASS entities. */
4260 gfc_free_expr (expr);
4262 if (expr->ts.type == BT_CLASS)
4267 /* Initialize VPTR for CLASS objects. */
4268 lhs = gfc_expr_to_initialize (expr);
4269 gfc_add_component_ref (lhs, "$vptr");
4271 if (code->expr3 && code->expr3->ts.type == BT_CLASS)
4273 /* Polymorphic SOURCE: VPTR must be determined at run time. */
4274 rhs = gfc_copy_expr (code->expr3);
4275 gfc_add_component_ref (rhs, "$vptr");
4276 tmp = gfc_trans_pointer_assignment (lhs, rhs);
4277 gfc_add_expr_to_block (&block, tmp);
4278 gfc_free_expr (rhs);
4282 /* VPTR is fixed at compile time. */
4286 ts = &code->expr3->ts;
4287 else if (expr->ts.type == BT_DERIVED)
4289 else if (code->ext.alloc.ts.type == BT_DERIVED)
4290 ts = &code->ext.alloc.ts;
4291 else if (expr->ts.type == BT_CLASS)
4292 ts = &CLASS_DATA (expr)->ts;
4296 if (ts->type == BT_DERIVED)
4298 vtab = gfc_find_derived_vtab (ts->u.derived);
4300 gfc_trans_assign_vtab_procs (&block, ts->u.derived, vtab);
4301 gfc_init_se (&lse, NULL);
4302 lse.want_pointer = 1;
4303 gfc_conv_expr (&lse, lhs);
4304 tmp = gfc_build_addr_expr (NULL_TREE,
4305 gfc_get_symbol_decl (vtab));
4306 gfc_add_modify (&block, lse.expr,
4307 fold_convert (TREE_TYPE (lse.expr), tmp));
4317 tmp = build1_v (LABEL_EXPR, error_label);
4318 gfc_add_expr_to_block (&block, tmp);
4320 gfc_init_se (&se, NULL);
4321 gfc_conv_expr_lhs (&se, code->expr1);
4322 tmp = convert (TREE_TYPE (se.expr), stat);
4323 gfc_add_modify (&block, se.expr, tmp);
4329 /* A better error message may be possible, but not required. */
4330 const char *msg = "Attempt to allocate an allocated object";
4331 tree errmsg, slen, dlen;
4333 gfc_init_se (&se, NULL);
4334 gfc_conv_expr_lhs (&se, code->expr2);
4336 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4338 gfc_add_modify (&block, errmsg,
4339 gfc_build_addr_expr (pchar_type_node,
4340 gfc_build_localized_cstring_const (msg)));
4342 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4343 dlen = gfc_get_expr_charlen (code->expr2);
4344 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4346 dlen = build_call_expr_loc (input_location,
4347 built_in_decls[BUILT_IN_MEMCPY], 3,
4348 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4350 tmp = fold_build2 (NE_EXPR, boolean_type_node, stat,
4351 build_int_cst (TREE_TYPE (stat), 0));
4353 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4355 gfc_add_expr_to_block (&block, tmp);
4358 return gfc_finish_block (&block);
4362 /* Translate a DEALLOCATE statement. */
4365 gfc_trans_deallocate (gfc_code *code)
4370 tree apstat, astat, pstat, stat, tmp;
4373 pstat = apstat = stat = astat = tmp = NULL_TREE;
4375 gfc_start_block (&block);
4377 /* Count the number of failed deallocations. If deallocate() was
4378 called with STAT= , then set STAT to the count. If deallocate
4379 was called with ERRMSG, then set ERRMG to a string. */
4380 if (code->expr1 || code->expr2)
4382 tree gfc_int4_type_node = gfc_get_int_type (4);
4384 stat = gfc_create_var (gfc_int4_type_node, "stat");
4385 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4387 /* Running total of possible deallocation failures. */
4388 astat = gfc_create_var (gfc_int4_type_node, "astat");
4389 apstat = gfc_build_addr_expr (NULL_TREE, astat);
4391 /* Initialize astat to 0. */
4392 gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
4395 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4398 gcc_assert (expr->expr_type == EXPR_VARIABLE);
4400 gfc_init_se (&se, NULL);
4401 gfc_start_block (&se.pre);
4403 se.want_pointer = 1;
4404 se.descriptor_only = 1;
4405 gfc_conv_expr (&se, expr);
4407 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4410 gfc_ref *last = NULL;
4411 for (ref = expr->ref; ref; ref = ref->next)
4412 if (ref->type == REF_COMPONENT)
4415 /* Do not deallocate the components of a derived type
4416 ultimate pointer component. */
4417 if (!(last && last->u.c.component->attr.pointer)
4418 && !(!last && expr->symtree->n.sym->attr.pointer))
4420 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, se.expr,
4422 gfc_add_expr_to_block (&se.pre, tmp);
4427 tmp = gfc_array_deallocate (se.expr, pstat, expr);
4430 tmp = gfc_deallocate_with_status (se.expr, pstat, false, expr);
4431 gfc_add_expr_to_block (&se.pre, tmp);
4433 tmp = fold_build2 (MODIFY_EXPR, void_type_node,
4434 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
4437 gfc_add_expr_to_block (&se.pre, tmp);
4439 /* Keep track of the number of failed deallocations by adding stat
4440 of the last deallocation to the running total. */
4441 if (code->expr1 || code->expr2)
4443 apstat = fold_build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
4444 gfc_add_modify (&se.pre, astat, apstat);
4447 tmp = gfc_finish_block (&se.pre);
4448 gfc_add_expr_to_block (&block, tmp);
4455 gfc_init_se (&se, NULL);
4456 gfc_conv_expr_lhs (&se, code->expr1);
4457 tmp = convert (TREE_TYPE (se.expr), astat);
4458 gfc_add_modify (&block, se.expr, tmp);
4464 /* A better error message may be possible, but not required. */
4465 const char *msg = "Attempt to deallocate an unallocated object";
4466 tree errmsg, slen, dlen;
4468 gfc_init_se (&se, NULL);
4469 gfc_conv_expr_lhs (&se, code->expr2);
4471 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4473 gfc_add_modify (&block, errmsg,
4474 gfc_build_addr_expr (pchar_type_node,
4475 gfc_build_localized_cstring_const (msg)));
4477 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4478 dlen = gfc_get_expr_charlen (code->expr2);
4479 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4481 dlen = build_call_expr_loc (input_location,
4482 built_in_decls[BUILT_IN_MEMCPY], 3,
4483 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4485 tmp = fold_build2 (NE_EXPR, boolean_type_node, astat,
4486 build_int_cst (TREE_TYPE (astat), 0));
4488 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4490 gfc_add_expr_to_block (&block, tmp);
4493 return gfc_finish_block (&block);