1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
32 #include "trans-stmt.h"
33 #include "trans-types.h"
34 #include "trans-array.h"
35 #include "trans-const.h"
37 #include "dependency.h"
39 typedef struct iter_info
45 struct iter_info *next;
49 typedef struct forall_info
56 struct forall_info *prev_nest;
60 static void gfc_trans_where_2 (gfc_code *, tree, bool,
61 forall_info *, stmtblock_t *);
63 /* Translate a F95 label number to a LABEL_EXPR. */
66 gfc_trans_label_here (gfc_code * code)
68 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
72 /* Given a variable expression which has been ASSIGNed to, find the decl
73 containing the auxiliary variables. For variables in common blocks this
77 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
79 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
80 gfc_conv_expr (se, expr);
81 /* Deals with variable in common block. Get the field declaration. */
82 if (TREE_CODE (se->expr) == COMPONENT_REF)
83 se->expr = TREE_OPERAND (se->expr, 1);
84 /* Deals with dummy argument. Get the parameter declaration. */
85 else if (TREE_CODE (se->expr) == INDIRECT_REF)
86 se->expr = TREE_OPERAND (se->expr, 0);
89 /* Translate a label assignment statement. */
92 gfc_trans_label_assign (gfc_code * code)
101 /* Start a new block. */
102 gfc_init_se (&se, NULL);
103 gfc_start_block (&se.pre);
104 gfc_conv_label_variable (&se, code->expr1);
106 len = GFC_DECL_STRING_LEN (se.expr);
107 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
109 label_tree = gfc_get_label_decl (code->label1);
111 if (code->label1->defined == ST_LABEL_TARGET)
113 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
114 len_tree = integer_minus_one_node;
118 gfc_expr *format = code->label1->format;
120 label_len = format->value.character.length;
121 len_tree = build_int_cst (NULL_TREE, label_len);
122 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
123 format->value.character.string);
124 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
127 gfc_add_modify (&se.pre, len, len_tree);
128 gfc_add_modify (&se.pre, addr, label_tree);
130 return gfc_finish_block (&se.pre);
133 /* Translate a GOTO statement. */
136 gfc_trans_goto (gfc_code * code)
138 locus loc = code->loc;
144 if (code->label1 != NULL)
145 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
148 gfc_init_se (&se, NULL);
149 gfc_start_block (&se.pre);
150 gfc_conv_label_variable (&se, code->expr1);
151 tmp = GFC_DECL_STRING_LEN (se.expr);
152 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
153 build_int_cst (TREE_TYPE (tmp), -1));
154 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
155 "Assigned label is not a target label");
157 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
159 /* We're going to ignore a label list. It does not really change the
160 statement's semantics (because it is just a further restriction on
161 what's legal code); before, we were comparing label addresses here, but
162 that's a very fragile business and may break with optimization. So
165 target = fold_build1 (GOTO_EXPR, void_type_node, assigned_goto);
166 gfc_add_expr_to_block (&se.pre, target);
167 return gfc_finish_block (&se.pre);
171 /* Translate an ENTRY statement. Just adds a label for this entry point. */
173 gfc_trans_entry (gfc_code * code)
175 return build1_v (LABEL_EXPR, code->ext.entry->label);
179 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
180 elemental subroutines. Make temporaries for output arguments if any such
181 dependencies are found. Output arguments are chosen because internal_unpack
182 can be used, as is, to copy the result back to the variable. */
184 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
185 gfc_symbol * sym, gfc_actual_arglist * arg,
186 gfc_dep_check check_variable)
188 gfc_actual_arglist *arg0;
190 gfc_formal_arglist *formal;
191 gfc_loopinfo tmp_loop;
203 if (loopse->ss == NULL)
208 formal = sym->formal;
210 /* Loop over all the arguments testing for dependencies. */
211 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
217 /* Obtain the info structure for the current argument. */
219 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
223 info = &ss->data.info;
227 /* If there is a dependency, create a temporary and use it
228 instead of the variable. */
229 fsym = formal ? formal->sym : NULL;
230 if (e->expr_type == EXPR_VARIABLE
232 && fsym->attr.intent != INTENT_IN
233 && gfc_check_fncall_dependency (e, fsym->attr.intent,
234 sym, arg0, check_variable))
236 tree initial, temptype;
237 stmtblock_t temp_post;
239 /* Make a local loopinfo for the temporary creation, so that
240 none of the other ss->info's have to be renormalized. */
241 gfc_init_loopinfo (&tmp_loop);
242 for (n = 0; n < info->dimen; n++)
244 tmp_loop.to[n] = loopse->loop->to[n];
245 tmp_loop.from[n] = loopse->loop->from[n];
246 tmp_loop.order[n] = loopse->loop->order[n];
249 /* Obtain the argument descriptor for unpacking. */
250 gfc_init_se (&parmse, NULL);
251 parmse.want_pointer = 1;
253 /* The scalarizer introduces some specific peculiarities when
254 handling elemental subroutines; the stride can be needed up to
255 the dim_array - 1, rather than dim_loop - 1 to calculate
256 offsets outside the loop. For this reason, we make sure that
257 the descriptor has the dimensionality of the array by converting
258 trailing elements into ranges with end = start. */
259 for (ref = e->ref; ref; ref = ref->next)
260 if (ref->type == REF_ARRAY && ref->u.ar.type == AR_SECTION)
265 bool seen_range = false;
266 for (n = 0; n < ref->u.ar.dimen; n++)
268 if (ref->u.ar.dimen_type[n] == DIMEN_RANGE)
272 || ref->u.ar.dimen_type[n] != DIMEN_ELEMENT)
275 ref->u.ar.end[n] = gfc_copy_expr (ref->u.ar.start[n]);
276 ref->u.ar.dimen_type[n] = DIMEN_RANGE;
280 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
281 gfc_add_block_to_block (&se->pre, &parmse.pre);
283 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
284 initialize the array temporary with a copy of the values. */
285 if (fsym->attr.intent == INTENT_INOUT
286 || (fsym->ts.type ==BT_DERIVED
287 && fsym->attr.intent == INTENT_OUT))
288 initial = parmse.expr;
292 /* Find the type of the temporary to create; we don't use the type
293 of e itself as this breaks for subcomponent-references in e (where
294 the type of e is that of the final reference, but parmse.expr's
295 type corresponds to the full derived-type). */
296 /* TODO: Fix this somehow so we don't need a temporary of the whole
297 array but instead only the components referenced. */
298 temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */
299 gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
300 temptype = TREE_TYPE (temptype);
301 temptype = gfc_get_element_type (temptype);
303 /* Generate the temporary. Cleaning up the temporary should be the
304 very last thing done, so we add the code to a new block and add it
305 to se->post as last instructions. */
306 size = gfc_create_var (gfc_array_index_type, NULL);
307 data = gfc_create_var (pvoid_type_node, NULL);
308 gfc_init_block (&temp_post);
309 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post,
310 &tmp_loop, info, temptype,
314 gfc_add_modify (&se->pre, size, tmp);
315 tmp = fold_convert (pvoid_type_node, info->data);
316 gfc_add_modify (&se->pre, data, tmp);
318 /* Calculate the offset for the temporary. */
319 offset = gfc_index_zero_node;
320 for (n = 0; n < info->dimen; n++)
322 tmp = gfc_conv_descriptor_stride_get (info->descriptor,
324 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
325 loopse->loop->from[n], tmp);
326 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
329 info->offset = gfc_create_var (gfc_array_index_type, NULL);
330 gfc_add_modify (&se->pre, info->offset, offset);
332 /* Copy the result back using unpack. */
333 tmp = build_call_expr_loc (input_location,
334 gfor_fndecl_in_unpack, 2, parmse.expr, data);
335 gfc_add_expr_to_block (&se->post, tmp);
337 /* parmse.pre is already added above. */
338 gfc_add_block_to_block (&se->post, &parmse.post);
339 gfc_add_block_to_block (&se->post, &temp_post);
345 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
348 gfc_trans_call (gfc_code * code, bool dependency_check,
349 tree mask, tree count1, bool invert)
353 int has_alternate_specifier;
354 gfc_dep_check check_variable;
355 tree index = NULL_TREE;
356 tree maskexpr = NULL_TREE;
359 /* A CALL starts a new block because the actual arguments may have to
360 be evaluated first. */
361 gfc_init_se (&se, NULL);
362 gfc_start_block (&se.pre);
364 gcc_assert (code->resolved_sym);
366 ss = gfc_ss_terminator;
367 if (code->resolved_sym->attr.elemental)
368 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
370 /* Is not an elemental subroutine call with array valued arguments. */
371 if (ss == gfc_ss_terminator)
374 /* Translate the call. */
375 has_alternate_specifier
376 = gfc_conv_procedure_call (&se, code->resolved_sym, code->ext.actual,
377 code->expr1, NULL_TREE);
379 /* A subroutine without side-effect, by definition, does nothing! */
380 TREE_SIDE_EFFECTS (se.expr) = 1;
382 /* Chain the pieces together and return the block. */
383 if (has_alternate_specifier)
385 gfc_code *select_code;
387 select_code = code->next;
388 gcc_assert(select_code->op == EXEC_SELECT);
389 sym = select_code->expr1->symtree->n.sym;
390 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
391 if (sym->backend_decl == NULL)
392 sym->backend_decl = gfc_get_symbol_decl (sym);
393 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
396 gfc_add_expr_to_block (&se.pre, se.expr);
398 gfc_add_block_to_block (&se.pre, &se.post);
403 /* An elemental subroutine call with array valued arguments has
411 /* gfc_walk_elemental_function_args renders the ss chain in the
412 reverse order to the actual argument order. */
413 ss = gfc_reverse_ss (ss);
415 /* Initialize the loop. */
416 gfc_init_se (&loopse, NULL);
417 gfc_init_loopinfo (&loop);
418 gfc_add_ss_to_loop (&loop, ss);
420 gfc_conv_ss_startstride (&loop);
421 /* TODO: gfc_conv_loop_setup generates a temporary for vector
422 subscripts. This could be prevented in the elemental case
423 as temporaries are handled separatedly
424 (below in gfc_conv_elemental_dependencies). */
425 gfc_conv_loop_setup (&loop, &code->expr1->where);
426 gfc_mark_ss_chain_used (ss, 1);
428 /* Convert the arguments, checking for dependencies. */
429 gfc_copy_loopinfo_to_se (&loopse, &loop);
432 /* For operator assignment, do dependency checking. */
433 if (dependency_check)
434 check_variable = ELEM_CHECK_VARIABLE;
436 check_variable = ELEM_DONT_CHECK_VARIABLE;
438 gfc_init_se (&depse, NULL);
439 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
440 code->ext.actual, check_variable);
442 gfc_add_block_to_block (&loop.pre, &depse.pre);
443 gfc_add_block_to_block (&loop.post, &depse.post);
445 /* Generate the loop body. */
446 gfc_start_scalarized_body (&loop, &body);
447 gfc_init_block (&block);
451 /* Form the mask expression according to the mask. */
453 maskexpr = gfc_build_array_ref (mask, index, NULL);
455 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
459 /* Add the subroutine call to the block. */
460 gfc_conv_procedure_call (&loopse, code->resolved_sym,
461 code->ext.actual, code->expr1,
466 tmp = build3_v (COND_EXPR, maskexpr, loopse.expr,
467 build_empty_stmt (input_location));
468 gfc_add_expr_to_block (&loopse.pre, tmp);
469 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
470 count1, gfc_index_one_node);
471 gfc_add_modify (&loopse.pre, count1, tmp);
474 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
476 gfc_add_block_to_block (&block, &loopse.pre);
477 gfc_add_block_to_block (&block, &loopse.post);
479 /* Finish up the loop block and the loop. */
480 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
481 gfc_trans_scalarizing_loops (&loop, &body);
482 gfc_add_block_to_block (&se.pre, &loop.pre);
483 gfc_add_block_to_block (&se.pre, &loop.post);
484 gfc_add_block_to_block (&se.pre, &se.post);
485 gfc_cleanup_loop (&loop);
488 return gfc_finish_block (&se.pre);
492 /* Translate the RETURN statement. */
495 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
503 /* If code->expr is not NULL, this return statement must appear
504 in a subroutine and current_fake_result_decl has already
507 result = gfc_get_fake_result_decl (NULL, 0);
510 gfc_warning ("An alternate return at %L without a * dummy argument",
511 &code->expr1->where);
512 return build1_v (GOTO_EXPR, gfc_get_return_label ());
515 /* Start a new block for this statement. */
516 gfc_init_se (&se, NULL);
517 gfc_start_block (&se.pre);
519 gfc_conv_expr (&se, code->expr1);
521 tmp = fold_build2 (MODIFY_EXPR, TREE_TYPE (result), result,
522 fold_convert (TREE_TYPE (result), se.expr));
523 gfc_add_expr_to_block (&se.pre, tmp);
525 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
526 gfc_add_expr_to_block (&se.pre, tmp);
527 gfc_add_block_to_block (&se.pre, &se.post);
528 return gfc_finish_block (&se.pre);
531 return build1_v (GOTO_EXPR, gfc_get_return_label ());
535 /* Translate the PAUSE statement. We have to translate this statement
536 to a runtime library call. */
539 gfc_trans_pause (gfc_code * code)
541 tree gfc_int4_type_node = gfc_get_int_type (4);
545 /* Start a new block for this statement. */
546 gfc_init_se (&se, NULL);
547 gfc_start_block (&se.pre);
550 if (code->expr1 == NULL)
552 tmp = build_int_cst (gfc_int4_type_node, 0);
553 tmp = build_call_expr_loc (input_location,
554 gfor_fndecl_pause_string, 2,
555 build_int_cst (pchar_type_node, 0), tmp);
557 else if (code->expr1->ts.type == BT_INTEGER)
559 gfc_conv_expr (&se, code->expr1);
560 tmp = build_call_expr_loc (input_location,
561 gfor_fndecl_pause_numeric, 1,
562 fold_convert (gfc_int4_type_node, se.expr));
566 gfc_conv_expr_reference (&se, code->expr1);
567 tmp = build_call_expr_loc (input_location,
568 gfor_fndecl_pause_string, 2,
569 se.expr, se.string_length);
572 gfc_add_expr_to_block (&se.pre, tmp);
574 gfc_add_block_to_block (&se.pre, &se.post);
576 return gfc_finish_block (&se.pre);
580 /* Translate the STOP statement. We have to translate this statement
581 to a runtime library call. */
584 gfc_trans_stop (gfc_code *code, bool error_stop)
586 tree gfc_int4_type_node = gfc_get_int_type (4);
590 /* Start a new block for this statement. */
591 gfc_init_se (&se, NULL);
592 gfc_start_block (&se.pre);
594 if (code->expr1 == NULL)
596 tmp = build_int_cst (gfc_int4_type_node, 0);
597 tmp = build_call_expr_loc (input_location,
598 error_stop ? gfor_fndecl_error_stop_string
599 : gfor_fndecl_stop_string,
600 2, build_int_cst (pchar_type_node, 0), tmp);
602 else if (code->expr1->ts.type == BT_INTEGER)
604 gfc_conv_expr (&se, code->expr1);
605 tmp = build_call_expr_loc (input_location,
606 error_stop ? gfor_fndecl_error_stop_numeric
607 : gfor_fndecl_stop_numeric, 1,
608 fold_convert (gfc_int4_type_node, se.expr));
612 gfc_conv_expr_reference (&se, code->expr1);
613 tmp = build_call_expr_loc (input_location,
614 error_stop ? gfor_fndecl_error_stop_string
615 : gfor_fndecl_stop_string,
616 2, se.expr, se.string_length);
619 gfc_add_expr_to_block (&se.pre, tmp);
621 gfc_add_block_to_block (&se.pre, &se.post);
623 return gfc_finish_block (&se.pre);
628 gfc_trans_sync (gfc_code *code, gfc_exec_op type __attribute__ ((unused)))
632 if ((code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)) || code->expr2)
634 gfc_init_se (&se, NULL);
635 gfc_start_block (&se.pre);
638 /* Check SYNC IMAGES(imageset) for valid image index.
639 FIXME: Add a check for image-set arrays. */
640 if (code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
641 && code->expr1->rank == 0)
644 gfc_conv_expr (&se, code->expr1);
645 cond = fold_build2 (NE_EXPR, boolean_type_node, se.expr,
646 build_int_cst (TREE_TYPE (se.expr), 1));
647 gfc_trans_runtime_check (true, false, cond, &se.pre,
648 &code->expr1->where, "Invalid image number "
650 fold_convert (integer_type_node, se.expr));
653 /* If STAT is present, set it to zero. */
656 gcc_assert (code->expr2->expr_type == EXPR_VARIABLE);
657 gfc_conv_expr (&se, code->expr2);
658 gfc_add_modify (&se.pre, se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
661 if ((code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)) || code->expr2)
662 return gfc_finish_block (&se.pre);
668 /* Generate GENERIC for the IF construct. This function also deals with
669 the simple IF statement, because the front end translates the IF
670 statement into an IF construct.
702 where COND_S is the simplified version of the predicate. PRE_COND_S
703 are the pre side-effects produced by the translation of the
705 We need to build the chain recursively otherwise we run into
706 problems with folding incomplete statements. */
709 gfc_trans_if_1 (gfc_code * code)
714 /* Check for an unconditional ELSE clause. */
716 return gfc_trans_code (code->next);
718 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
719 gfc_init_se (&if_se, NULL);
720 gfc_start_block (&if_se.pre);
722 /* Calculate the IF condition expression. */
723 gfc_conv_expr_val (&if_se, code->expr1);
725 /* Translate the THEN clause. */
726 stmt = gfc_trans_code (code->next);
728 /* Translate the ELSE clause. */
730 elsestmt = gfc_trans_if_1 (code->block);
732 elsestmt = build_empty_stmt (input_location);
734 /* Build the condition expression and add it to the condition block. */
735 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
737 gfc_add_expr_to_block (&if_se.pre, stmt);
739 /* Finish off this statement. */
740 return gfc_finish_block (&if_se.pre);
744 gfc_trans_if (gfc_code * code)
746 /* Ignore the top EXEC_IF, it only announces an IF construct. The
747 actual code we must translate is in code->block. */
749 return gfc_trans_if_1 (code->block);
753 /* Translate an arithmetic IF expression.
755 IF (cond) label1, label2, label3 translates to
767 An optimized version can be generated in case of equal labels.
768 E.g., if label1 is equal to label2, we can translate it to
777 gfc_trans_arithmetic_if (gfc_code * code)
785 /* Start a new block. */
786 gfc_init_se (&se, NULL);
787 gfc_start_block (&se.pre);
789 /* Pre-evaluate COND. */
790 gfc_conv_expr_val (&se, code->expr1);
791 se.expr = gfc_evaluate_now (se.expr, &se.pre);
793 /* Build something to compare with. */
794 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
796 if (code->label1->value != code->label2->value)
798 /* If (cond < 0) take branch1 else take branch2.
799 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
800 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
801 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
803 if (code->label1->value != code->label3->value)
804 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
806 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
808 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
811 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
813 if (code->label1->value != code->label3->value
814 && code->label2->value != code->label3->value)
816 /* if (cond <= 0) take branch1 else take branch2. */
817 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
818 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
819 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
822 /* Append the COND_EXPR to the evaluation of COND, and return. */
823 gfc_add_expr_to_block (&se.pre, branch1);
824 return gfc_finish_block (&se.pre);
828 /* Translate a CRITICAL block. */
830 gfc_trans_critical (gfc_code *code)
835 gfc_start_block (&block);
836 tmp = gfc_trans_code (code->block->next);
837 gfc_add_expr_to_block (&block, tmp);
839 return gfc_finish_block (&block);
843 /* Translate a BLOCK construct. This is basically what we would do for a
847 gfc_trans_block_construct (gfc_code* code)
859 gcc_assert (!sym->tlink);
862 gfc_start_block (&body);
863 gfc_process_block_locals (ns);
865 tmp = gfc_trans_code (ns->code);
866 tmp = gfc_trans_deferred_vars (sym, tmp);
868 gfc_add_expr_to_block (&body, tmp);
869 return gfc_finish_block (&body);
873 /* Translate the simple DO construct. This is where the loop variable has
874 integer type and step +-1. We can't use this in the general case
875 because integer overflow and floating point errors could give incorrect
877 We translate a do loop from:
879 DO dovar = from, to, step
885 [Evaluate loop bounds and step]
887 if ((step > 0) ? (dovar <= to) : (dovar => to))
893 cond = (dovar == to);
895 if (cond) goto end_label;
900 This helps the optimizers by avoiding the extra induction variable
901 used in the general case. */
904 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
905 tree from, tree to, tree step, tree exit_cond)
911 tree saved_dovar = NULL;
915 type = TREE_TYPE (dovar);
917 /* Initialize the DO variable: dovar = from. */
918 gfc_add_modify (pblock, dovar, from);
920 /* Save value for do-tinkering checking. */
921 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
923 saved_dovar = gfc_create_var (type, ".saved_dovar");
924 gfc_add_modify (pblock, saved_dovar, dovar);
927 /* Cycle and exit statements are implemented with gotos. */
928 cycle_label = gfc_build_label_decl (NULL_TREE);
929 exit_label = gfc_build_label_decl (NULL_TREE);
931 /* Put the labels where they can be found later. See gfc_trans_do(). */
932 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
935 gfc_start_block (&body);
937 /* Main loop body. */
938 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
939 gfc_add_expr_to_block (&body, tmp);
941 /* Label for cycle statements (if needed). */
942 if (TREE_USED (cycle_label))
944 tmp = build1_v (LABEL_EXPR, cycle_label);
945 gfc_add_expr_to_block (&body, tmp);
948 /* Check whether someone has modified the loop variable. */
949 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
951 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
952 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
953 "Loop variable has been modified");
956 /* Exit the loop if there is an I/O result condition or error. */
959 tmp = build1_v (GOTO_EXPR, exit_label);
960 tmp = fold_build3 (COND_EXPR, void_type_node, exit_cond, tmp,
961 build_empty_stmt (input_location));
962 gfc_add_expr_to_block (&body, tmp);
965 /* Evaluate the loop condition. */
966 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
967 cond = gfc_evaluate_now (cond, &body);
969 /* Increment the loop variable. */
970 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
971 gfc_add_modify (&body, dovar, tmp);
973 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
974 gfc_add_modify (&body, saved_dovar, dovar);
977 tmp = build1_v (GOTO_EXPR, exit_label);
978 TREE_USED (exit_label) = 1;
979 tmp = fold_build3 (COND_EXPR, void_type_node,
980 cond, tmp, build_empty_stmt (input_location));
981 gfc_add_expr_to_block (&body, tmp);
983 /* Finish the loop body. */
984 tmp = gfc_finish_block (&body);
985 tmp = build1_v (LOOP_EXPR, tmp);
987 /* Only execute the loop if the number of iterations is positive. */
988 if (tree_int_cst_sgn (step) > 0)
989 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
991 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
992 tmp = fold_build3 (COND_EXPR, void_type_node,
993 cond, tmp, build_empty_stmt (input_location));
994 gfc_add_expr_to_block (pblock, tmp);
996 /* Add the exit label. */
997 tmp = build1_v (LABEL_EXPR, exit_label);
998 gfc_add_expr_to_block (pblock, tmp);
1000 return gfc_finish_block (pblock);
1003 /* Translate the DO construct. This obviously is one of the most
1004 important ones to get right with any compiler, but especially
1007 We special case some loop forms as described in gfc_trans_simple_do.
1008 For other cases we implement them with a separate loop count,
1009 as described in the standard.
1011 We translate a do loop from:
1013 DO dovar = from, to, step
1019 [evaluate loop bounds and step]
1020 empty = (step > 0 ? to < from : to > from);
1021 countm1 = (to - from) / step;
1023 if (empty) goto exit_label;
1029 if (countm1 ==0) goto exit_label;
1034 countm1 is an unsigned integer. It is equal to the loop count minus one,
1035 because the loop count itself can overflow. */
1038 gfc_trans_do (gfc_code * code, tree exit_cond)
1042 tree saved_dovar = NULL;
1057 gfc_start_block (&block);
1059 /* Evaluate all the expressions in the iterator. */
1060 gfc_init_se (&se, NULL);
1061 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1062 gfc_add_block_to_block (&block, &se.pre);
1064 type = TREE_TYPE (dovar);
1066 gfc_init_se (&se, NULL);
1067 gfc_conv_expr_val (&se, code->ext.iterator->start);
1068 gfc_add_block_to_block (&block, &se.pre);
1069 from = gfc_evaluate_now (se.expr, &block);
1071 gfc_init_se (&se, NULL);
1072 gfc_conv_expr_val (&se, code->ext.iterator->end);
1073 gfc_add_block_to_block (&block, &se.pre);
1074 to = gfc_evaluate_now (se.expr, &block);
1076 gfc_init_se (&se, NULL);
1077 gfc_conv_expr_val (&se, code->ext.iterator->step);
1078 gfc_add_block_to_block (&block, &se.pre);
1079 step = gfc_evaluate_now (se.expr, &block);
1081 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1083 tmp = fold_build2 (EQ_EXPR, boolean_type_node, step,
1084 fold_convert (type, integer_zero_node));
1085 gfc_trans_runtime_check (true, false, tmp, &block, &code->loc,
1086 "DO step value is zero");
1089 /* Special case simple loops. */
1090 if (TREE_CODE (type) == INTEGER_TYPE
1091 && (integer_onep (step)
1092 || tree_int_cst_equal (step, integer_minus_one_node)))
1093 return gfc_trans_simple_do (code, &block, dovar, from, to, step, exit_cond);
1095 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
1096 fold_convert (type, integer_zero_node));
1098 if (TREE_CODE (type) == INTEGER_TYPE)
1099 utype = unsigned_type_for (type);
1101 utype = unsigned_type_for (gfc_array_index_type);
1102 countm1 = gfc_create_var (utype, "countm1");
1104 /* Cycle and exit statements are implemented with gotos. */
1105 cycle_label = gfc_build_label_decl (NULL_TREE);
1106 exit_label = gfc_build_label_decl (NULL_TREE);
1107 TREE_USED (exit_label) = 1;
1109 /* Initialize the DO variable: dovar = from. */
1110 gfc_add_modify (&block, dovar, from);
1112 /* Save value for do-tinkering checking. */
1113 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1115 saved_dovar = gfc_create_var (type, ".saved_dovar");
1116 gfc_add_modify (&block, saved_dovar, dovar);
1119 /* Initialize loop count and jump to exit label if the loop is empty.
1120 This code is executed before we enter the loop body. We generate:
1121 step_sign = sign(1,step);
1132 countm1 = (to*step_sign - from*step_sign) / (step*step_sign);
1136 if (TREE_CODE (type) == INTEGER_TYPE)
1138 tree pos, neg, step_sign, to2, from2, step2;
1140 /* Calculate SIGN (1,step), as (step < 0 ? -1 : 1) */
1142 tmp = fold_build2 (LT_EXPR, boolean_type_node, step,
1143 build_int_cst (TREE_TYPE (step), 0));
1144 step_sign = fold_build3 (COND_EXPR, type, tmp,
1145 build_int_cst (type, -1),
1146 build_int_cst (type, 1));
1148 tmp = fold_build2 (LT_EXPR, boolean_type_node, to, from);
1149 pos = fold_build3 (COND_EXPR, void_type_node, tmp,
1150 build1_v (GOTO_EXPR, exit_label),
1151 build_empty_stmt (input_location));
1153 tmp = fold_build2 (GT_EXPR, boolean_type_node, to, from);
1154 neg = fold_build3 (COND_EXPR, void_type_node, tmp,
1155 build1_v (GOTO_EXPR, exit_label),
1156 build_empty_stmt (input_location));
1157 tmp = fold_build3 (COND_EXPR, void_type_node, pos_step, pos, neg);
1159 gfc_add_expr_to_block (&block, tmp);
1161 /* Calculate the loop count. to-from can overflow, so
1162 we cast to unsigned. */
1164 to2 = fold_build2 (MULT_EXPR, type, step_sign, to);
1165 from2 = fold_build2 (MULT_EXPR, type, step_sign, from);
1166 step2 = fold_build2 (MULT_EXPR, type, step_sign, step);
1167 step2 = fold_convert (utype, step2);
1168 tmp = fold_build2 (MINUS_EXPR, type, to2, from2);
1169 tmp = fold_convert (utype, tmp);
1170 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp, step2);
1171 tmp = fold_build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
1172 gfc_add_expr_to_block (&block, tmp);
1176 /* TODO: We could use the same width as the real type.
1177 This would probably cause more problems that it solves
1178 when we implement "long double" types. */
1180 tmp = fold_build2 (MINUS_EXPR, type, to, from);
1181 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
1182 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
1183 gfc_add_modify (&block, countm1, tmp);
1185 /* We need a special check for empty loops:
1186 empty = (step > 0 ? to < from : to > from); */
1187 tmp = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
1188 fold_build2 (LT_EXPR, boolean_type_node, to, from),
1189 fold_build2 (GT_EXPR, boolean_type_node, to, from));
1190 /* If the loop is empty, go directly to the exit label. */
1191 tmp = fold_build3 (COND_EXPR, void_type_node, tmp,
1192 build1_v (GOTO_EXPR, exit_label),
1193 build_empty_stmt (input_location));
1194 gfc_add_expr_to_block (&block, tmp);
1198 gfc_start_block (&body);
1200 /* Put these labels where they can be found later. We put the
1201 labels in a TREE_LIST node (because TREE_CHAIN is already
1202 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
1203 label in TREE_VALUE (backend_decl). */
1205 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1207 /* Main loop body. */
1208 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1209 gfc_add_expr_to_block (&body, tmp);
1211 /* Label for cycle statements (if needed). */
1212 if (TREE_USED (cycle_label))
1214 tmp = build1_v (LABEL_EXPR, cycle_label);
1215 gfc_add_expr_to_block (&body, tmp);
1218 /* Check whether someone has modified the loop variable. */
1219 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1221 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
1222 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1223 "Loop variable has been modified");
1226 /* Exit the loop if there is an I/O result condition or error. */
1229 tmp = build1_v (GOTO_EXPR, exit_label);
1230 tmp = fold_build3 (COND_EXPR, void_type_node, exit_cond, tmp,
1231 build_empty_stmt (input_location));
1232 gfc_add_expr_to_block (&body, tmp);
1235 /* Increment the loop variable. */
1236 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
1237 gfc_add_modify (&body, dovar, tmp);
1239 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1240 gfc_add_modify (&body, saved_dovar, dovar);
1242 /* End with the loop condition. Loop until countm1 == 0. */
1243 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
1244 build_int_cst (utype, 0));
1245 tmp = build1_v (GOTO_EXPR, exit_label);
1246 tmp = fold_build3 (COND_EXPR, void_type_node,
1247 cond, tmp, build_empty_stmt (input_location));
1248 gfc_add_expr_to_block (&body, tmp);
1250 /* Decrement the loop count. */
1251 tmp = fold_build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
1252 gfc_add_modify (&body, countm1, tmp);
1254 /* End of loop body. */
1255 tmp = gfc_finish_block (&body);
1257 /* The for loop itself. */
1258 tmp = build1_v (LOOP_EXPR, tmp);
1259 gfc_add_expr_to_block (&block, tmp);
1261 /* Add the exit label. */
1262 tmp = build1_v (LABEL_EXPR, exit_label);
1263 gfc_add_expr_to_block (&block, tmp);
1265 return gfc_finish_block (&block);
1269 /* Translate the DO WHILE construct.
1282 if (! cond) goto exit_label;
1288 Because the evaluation of the exit condition `cond' may have side
1289 effects, we can't do much for empty loop bodies. The backend optimizers
1290 should be smart enough to eliminate any dead loops. */
1293 gfc_trans_do_while (gfc_code * code)
1301 /* Everything we build here is part of the loop body. */
1302 gfc_start_block (&block);
1304 /* Cycle and exit statements are implemented with gotos. */
1305 cycle_label = gfc_build_label_decl (NULL_TREE);
1306 exit_label = gfc_build_label_decl (NULL_TREE);
1308 /* Put the labels where they can be found later. See gfc_trans_do(). */
1309 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1311 /* Create a GIMPLE version of the exit condition. */
1312 gfc_init_se (&cond, NULL);
1313 gfc_conv_expr_val (&cond, code->expr1);
1314 gfc_add_block_to_block (&block, &cond.pre);
1315 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1317 /* Build "IF (! cond) GOTO exit_label". */
1318 tmp = build1_v (GOTO_EXPR, exit_label);
1319 TREE_USED (exit_label) = 1;
1320 tmp = fold_build3 (COND_EXPR, void_type_node,
1321 cond.expr, tmp, build_empty_stmt (input_location));
1322 gfc_add_expr_to_block (&block, tmp);
1324 /* The main body of the loop. */
1325 tmp = gfc_trans_code (code->block->next);
1326 gfc_add_expr_to_block (&block, tmp);
1328 /* Label for cycle statements (if needed). */
1329 if (TREE_USED (cycle_label))
1331 tmp = build1_v (LABEL_EXPR, cycle_label);
1332 gfc_add_expr_to_block (&block, tmp);
1335 /* End of loop body. */
1336 tmp = gfc_finish_block (&block);
1338 gfc_init_block (&block);
1339 /* Build the loop. */
1340 tmp = build1_v (LOOP_EXPR, tmp);
1341 gfc_add_expr_to_block (&block, tmp);
1343 /* Add the exit label. */
1344 tmp = build1_v (LABEL_EXPR, exit_label);
1345 gfc_add_expr_to_block (&block, tmp);
1347 return gfc_finish_block (&block);
1351 /* Translate the SELECT CASE construct for INTEGER case expressions,
1352 without killing all potential optimizations. The problem is that
1353 Fortran allows unbounded cases, but the back-end does not, so we
1354 need to intercept those before we enter the equivalent SWITCH_EXPR
1357 For example, we translate this,
1360 CASE (:100,101,105:115)
1370 to the GENERIC equivalent,
1374 case (minimum value for typeof(expr) ... 100:
1380 case 200 ... (maximum value for typeof(expr):
1397 gfc_trans_integer_select (gfc_code * code)
1407 gfc_start_block (&block);
1409 /* Calculate the switch expression. */
1410 gfc_init_se (&se, NULL);
1411 gfc_conv_expr_val (&se, code->expr1);
1412 gfc_add_block_to_block (&block, &se.pre);
1414 end_label = gfc_build_label_decl (NULL_TREE);
1416 gfc_init_block (&body);
1418 for (c = code->block; c; c = c->block)
1420 for (cp = c->ext.case_list; cp; cp = cp->next)
1425 /* Assume it's the default case. */
1426 low = high = NULL_TREE;
1430 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1433 /* If there's only a lower bound, set the high bound to the
1434 maximum value of the case expression. */
1436 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1441 /* Three cases are possible here:
1443 1) There is no lower bound, e.g. CASE (:N).
1444 2) There is a lower bound .NE. high bound, that is
1445 a case range, e.g. CASE (N:M) where M>N (we make
1446 sure that M>N during type resolution).
1447 3) There is a lower bound, and it has the same value
1448 as the high bound, e.g. CASE (N:N). This is our
1449 internal representation of CASE(N).
1451 In the first and second case, we need to set a value for
1452 high. In the third case, we don't because the GCC middle
1453 end represents a single case value by just letting high be
1454 a NULL_TREE. We can't do that because we need to be able
1455 to represent unbounded cases. */
1459 && mpz_cmp (cp->low->value.integer,
1460 cp->high->value.integer) != 0))
1461 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1464 /* Unbounded case. */
1466 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1469 /* Build a label. */
1470 label = gfc_build_label_decl (NULL_TREE);
1472 /* Add this case label.
1473 Add parameter 'label', make it match GCC backend. */
1474 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1476 gfc_add_expr_to_block (&body, tmp);
1479 /* Add the statements for this case. */
1480 tmp = gfc_trans_code (c->next);
1481 gfc_add_expr_to_block (&body, tmp);
1483 /* Break to the end of the construct. */
1484 tmp = build1_v (GOTO_EXPR, end_label);
1485 gfc_add_expr_to_block (&body, tmp);
1488 tmp = gfc_finish_block (&body);
1489 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1490 gfc_add_expr_to_block (&block, tmp);
1492 tmp = build1_v (LABEL_EXPR, end_label);
1493 gfc_add_expr_to_block (&block, tmp);
1495 return gfc_finish_block (&block);
1499 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1501 There are only two cases possible here, even though the standard
1502 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1503 .FALSE., and DEFAULT.
1505 We never generate more than two blocks here. Instead, we always
1506 try to eliminate the DEFAULT case. This way, we can translate this
1507 kind of SELECT construct to a simple
1511 expression in GENERIC. */
1514 gfc_trans_logical_select (gfc_code * code)
1517 gfc_code *t, *f, *d;
1522 /* Assume we don't have any cases at all. */
1525 /* Now see which ones we actually do have. We can have at most two
1526 cases in a single case list: one for .TRUE. and one for .FALSE.
1527 The default case is always separate. If the cases for .TRUE. and
1528 .FALSE. are in the same case list, the block for that case list
1529 always executed, and we don't generate code a COND_EXPR. */
1530 for (c = code->block; c; c = c->block)
1532 for (cp = c->ext.case_list; cp; cp = cp->next)
1536 if (cp->low->value.logical == 0) /* .FALSE. */
1538 else /* if (cp->value.logical != 0), thus .TRUE. */
1546 /* Start a new block. */
1547 gfc_start_block (&block);
1549 /* Calculate the switch expression. We always need to do this
1550 because it may have side effects. */
1551 gfc_init_se (&se, NULL);
1552 gfc_conv_expr_val (&se, code->expr1);
1553 gfc_add_block_to_block (&block, &se.pre);
1555 if (t == f && t != NULL)
1557 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1558 translate the code for these cases, append it to the current
1560 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1564 tree true_tree, false_tree, stmt;
1566 true_tree = build_empty_stmt (input_location);
1567 false_tree = build_empty_stmt (input_location);
1569 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1570 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1571 make the missing case the default case. */
1572 if (t != NULL && f != NULL)
1582 /* Translate the code for each of these blocks, and append it to
1583 the current block. */
1585 true_tree = gfc_trans_code (t->next);
1588 false_tree = gfc_trans_code (f->next);
1590 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1591 true_tree, false_tree);
1592 gfc_add_expr_to_block (&block, stmt);
1595 return gfc_finish_block (&block);
1599 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1600 Instead of generating compares and jumps, it is far simpler to
1601 generate a data structure describing the cases in order and call a
1602 library subroutine that locates the right case.
1603 This is particularly true because this is the only case where we
1604 might have to dispose of a temporary.
1605 The library subroutine returns a pointer to jump to or NULL if no
1606 branches are to be taken. */
1609 gfc_trans_character_select (gfc_code *code)
1611 tree init, end_label, tmp, type, case_num, label, fndecl;
1612 stmtblock_t block, body;
1617 VEC(constructor_elt,gc) *inits = NULL;
1619 /* The jump table types are stored in static variables to avoid
1620 constructing them from scratch every single time. */
1621 static tree select_struct[2];
1622 static tree ss_string1[2], ss_string1_len[2];
1623 static tree ss_string2[2], ss_string2_len[2];
1624 static tree ss_target[2];
1626 tree pchartype = gfc_get_pchar_type (code->expr1->ts.kind);
1628 if (code->expr1->ts.kind == 1)
1630 else if (code->expr1->ts.kind == 4)
1635 if (select_struct[k] == NULL)
1637 select_struct[k] = make_node (RECORD_TYPE);
1639 if (code->expr1->ts.kind == 1)
1640 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
1641 else if (code->expr1->ts.kind == 4)
1642 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
1647 #define ADD_FIELD(NAME, TYPE) \
1648 ss_##NAME[k] = gfc_add_field_to_struct \
1649 (&(TYPE_FIELDS (select_struct[k])), select_struct[k], \
1650 get_identifier (stringize(NAME)), TYPE)
1652 ADD_FIELD (string1, pchartype);
1653 ADD_FIELD (string1_len, gfc_charlen_type_node);
1655 ADD_FIELD (string2, pchartype);
1656 ADD_FIELD (string2_len, gfc_charlen_type_node);
1658 ADD_FIELD (target, integer_type_node);
1661 gfc_finish_type (select_struct[k]);
1664 cp = code->block->ext.case_list;
1665 while (cp->left != NULL)
1669 for (d = cp; d; d = d->right)
1672 end_label = gfc_build_label_decl (NULL_TREE);
1674 /* Generate the body */
1675 gfc_start_block (&block);
1676 gfc_init_block (&body);
1678 for (c = code->block; c; c = c->block)
1680 for (d = c->ext.case_list; d; d = d->next)
1682 label = gfc_build_label_decl (NULL_TREE);
1683 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1684 build_int_cst (NULL_TREE, d->n),
1685 build_int_cst (NULL_TREE, d->n), label);
1686 gfc_add_expr_to_block (&body, tmp);
1689 tmp = gfc_trans_code (c->next);
1690 gfc_add_expr_to_block (&body, tmp);
1692 tmp = build1_v (GOTO_EXPR, end_label);
1693 gfc_add_expr_to_block (&body, tmp);
1696 /* Generate the structure describing the branches */
1697 for(d = cp; d; d = d->right)
1699 VEC(constructor_elt,gc) *node = NULL;
1701 gfc_init_se (&se, NULL);
1705 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], null_pointer_node);
1706 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], integer_zero_node);
1710 gfc_conv_expr_reference (&se, d->low);
1712 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], se.expr);
1713 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], se.string_length);
1716 if (d->high == NULL)
1718 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], null_pointer_node);
1719 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], integer_zero_node);
1723 gfc_init_se (&se, NULL);
1724 gfc_conv_expr_reference (&se, d->high);
1726 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], se.expr);
1727 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], se.string_length);
1730 CONSTRUCTOR_APPEND_ELT (node, ss_target[k],
1731 build_int_cst (integer_type_node, d->n));
1733 tmp = build_constructor (select_struct[k], node);
1734 CONSTRUCTOR_APPEND_ELT (inits, NULL_TREE, tmp);
1737 type = build_array_type (select_struct[k],
1738 build_index_type (build_int_cst (NULL_TREE, n-1)));
1740 init = build_constructor (type, inits);
1741 TREE_CONSTANT (init) = 1;
1742 TREE_STATIC (init) = 1;
1743 /* Create a static variable to hold the jump table. */
1744 tmp = gfc_create_var (type, "jumptable");
1745 TREE_CONSTANT (tmp) = 1;
1746 TREE_STATIC (tmp) = 1;
1747 TREE_READONLY (tmp) = 1;
1748 DECL_INITIAL (tmp) = init;
1751 /* Build the library call */
1752 init = gfc_build_addr_expr (pvoid_type_node, init);
1754 gfc_init_se (&se, NULL);
1755 gfc_conv_expr_reference (&se, code->expr1);
1757 gfc_add_block_to_block (&block, &se.pre);
1759 if (code->expr1->ts.kind == 1)
1760 fndecl = gfor_fndecl_select_string;
1761 else if (code->expr1->ts.kind == 4)
1762 fndecl = gfor_fndecl_select_string_char4;
1766 tmp = build_call_expr_loc (input_location,
1767 fndecl, 4, init, build_int_cst (NULL_TREE, n),
1768 se.expr, se.string_length);
1769 case_num = gfc_create_var (integer_type_node, "case_num");
1770 gfc_add_modify (&block, case_num, tmp);
1772 gfc_add_block_to_block (&block, &se.post);
1774 tmp = gfc_finish_block (&body);
1775 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1776 gfc_add_expr_to_block (&block, tmp);
1778 tmp = build1_v (LABEL_EXPR, end_label);
1779 gfc_add_expr_to_block (&block, tmp);
1781 return gfc_finish_block (&block);
1785 /* Translate the three variants of the SELECT CASE construct.
1787 SELECT CASEs with INTEGER case expressions can be translated to an
1788 equivalent GENERIC switch statement, and for LOGICAL case
1789 expressions we build one or two if-else compares.
1791 SELECT CASEs with CHARACTER case expressions are a whole different
1792 story, because they don't exist in GENERIC. So we sort them and
1793 do a binary search at runtime.
1795 Fortran has no BREAK statement, and it does not allow jumps from
1796 one case block to another. That makes things a lot easier for
1800 gfc_trans_select (gfc_code * code)
1802 gcc_assert (code && code->expr1);
1804 /* Empty SELECT constructs are legal. */
1805 if (code->block == NULL)
1806 return build_empty_stmt (input_location);
1808 /* Select the correct translation function. */
1809 switch (code->expr1->ts.type)
1811 case BT_LOGICAL: return gfc_trans_logical_select (code);
1812 case BT_INTEGER: return gfc_trans_integer_select (code);
1813 case BT_CHARACTER: return gfc_trans_character_select (code);
1815 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1821 /* Traversal function to substitute a replacement symtree if the symbol
1822 in the expression is the same as that passed. f == 2 signals that
1823 that variable itself is not to be checked - only the references.
1824 This group of functions is used when the variable expression in a
1825 FORALL assignment has internal references. For example:
1826 FORALL (i = 1:4) p(p(i)) = i
1827 The only recourse here is to store a copy of 'p' for the index
1830 static gfc_symtree *new_symtree;
1831 static gfc_symtree *old_symtree;
1834 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
1836 if (expr->expr_type != EXPR_VARIABLE)
1841 else if (expr->symtree->n.sym == sym)
1842 expr->symtree = new_symtree;
1848 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
1850 gfc_traverse_expr (e, sym, forall_replace, f);
1854 forall_restore (gfc_expr *expr,
1855 gfc_symbol *sym ATTRIBUTE_UNUSED,
1856 int *f ATTRIBUTE_UNUSED)
1858 if (expr->expr_type != EXPR_VARIABLE)
1861 if (expr->symtree == new_symtree)
1862 expr->symtree = old_symtree;
1868 forall_restore_symtree (gfc_expr *e)
1870 gfc_traverse_expr (e, NULL, forall_restore, 0);
1874 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1879 gfc_symbol *new_sym;
1880 gfc_symbol *old_sym;
1884 /* Build a copy of the lvalue. */
1885 old_symtree = c->expr1->symtree;
1886 old_sym = old_symtree->n.sym;
1887 e = gfc_lval_expr_from_sym (old_sym);
1888 if (old_sym->attr.dimension)
1890 gfc_init_se (&tse, NULL);
1891 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN, false);
1892 gfc_add_block_to_block (pre, &tse.pre);
1893 gfc_add_block_to_block (post, &tse.post);
1894 tse.expr = build_fold_indirect_ref_loc (input_location, tse.expr);
1896 if (e->ts.type != BT_CHARACTER)
1898 /* Use the variable offset for the temporary. */
1899 tmp = gfc_conv_array_offset (old_sym->backend_decl);
1900 gfc_conv_descriptor_offset_set (pre, tse.expr, tmp);
1905 gfc_init_se (&tse, NULL);
1906 gfc_init_se (&rse, NULL);
1907 gfc_conv_expr (&rse, e);
1908 if (e->ts.type == BT_CHARACTER)
1910 tse.string_length = rse.string_length;
1911 tmp = gfc_get_character_type_len (gfc_default_character_kind,
1913 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
1915 gfc_add_block_to_block (pre, &tse.pre);
1916 gfc_add_block_to_block (post, &tse.post);
1920 tmp = gfc_typenode_for_spec (&e->ts);
1921 tse.expr = gfc_create_var (tmp, "temp");
1924 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
1925 e->expr_type == EXPR_VARIABLE, true);
1926 gfc_add_expr_to_block (pre, tmp);
1930 /* Create a new symbol to represent the lvalue. */
1931 new_sym = gfc_new_symbol (old_sym->name, NULL);
1932 new_sym->ts = old_sym->ts;
1933 new_sym->attr.referenced = 1;
1934 new_sym->attr.temporary = 1;
1935 new_sym->attr.dimension = old_sym->attr.dimension;
1936 new_sym->attr.flavor = old_sym->attr.flavor;
1938 /* Use the temporary as the backend_decl. */
1939 new_sym->backend_decl = tse.expr;
1941 /* Create a fake symtree for it. */
1943 new_symtree = gfc_new_symtree (&root, old_sym->name);
1944 new_symtree->n.sym = new_sym;
1945 gcc_assert (new_symtree == root);
1947 /* Go through the expression reference replacing the old_symtree
1949 forall_replace_symtree (c->expr1, old_sym, 2);
1951 /* Now we have made this temporary, we might as well use it for
1952 the right hand side. */
1953 forall_replace_symtree (c->expr2, old_sym, 1);
1957 /* Handles dependencies in forall assignments. */
1959 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1966 lsym = c->expr1->symtree->n.sym;
1967 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
1969 /* Now check for dependencies within the 'variable'
1970 expression itself. These are treated by making a complete
1971 copy of variable and changing all the references to it
1972 point to the copy instead. Note that the shallow copy of
1973 the variable will not suffice for derived types with
1974 pointer components. We therefore leave these to their
1976 if (lsym->ts.type == BT_DERIVED
1977 && lsym->ts.u.derived->attr.pointer_comp)
1981 if (find_forall_index (c->expr1, lsym, 2) == SUCCESS)
1983 forall_make_variable_temp (c, pre, post);
1987 /* Substrings with dependencies are treated in the same
1989 if (c->expr1->ts.type == BT_CHARACTER
1991 && c->expr2->expr_type == EXPR_VARIABLE
1992 && lsym == c->expr2->symtree->n.sym)
1994 for (lref = c->expr1->ref; lref; lref = lref->next)
1995 if (lref->type == REF_SUBSTRING)
1997 for (rref = c->expr2->ref; rref; rref = rref->next)
1998 if (rref->type == REF_SUBSTRING)
2002 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
2004 forall_make_variable_temp (c, pre, post);
2013 cleanup_forall_symtrees (gfc_code *c)
2015 forall_restore_symtree (c->expr1);
2016 forall_restore_symtree (c->expr2);
2017 gfc_free (new_symtree->n.sym);
2018 gfc_free (new_symtree);
2022 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
2023 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
2024 indicates whether we should generate code to test the FORALLs mask
2025 array. OUTER is the loop header to be used for initializing mask
2028 The generated loop format is:
2029 count = (end - start + step) / step
2042 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
2043 int mask_flag, stmtblock_t *outer)
2051 tree var, start, end, step;
2054 /* Initialize the mask index outside the FORALL nest. */
2055 if (mask_flag && forall_tmp->mask)
2056 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
2058 iter = forall_tmp->this_loop;
2059 nvar = forall_tmp->nvar;
2060 for (n = 0; n < nvar; n++)
2063 start = iter->start;
2067 exit_label = gfc_build_label_decl (NULL_TREE);
2068 TREE_USED (exit_label) = 1;
2070 /* The loop counter. */
2071 count = gfc_create_var (TREE_TYPE (var), "count");
2073 /* The body of the loop. */
2074 gfc_init_block (&block);
2076 /* The exit condition. */
2077 cond = fold_build2 (LE_EXPR, boolean_type_node,
2078 count, build_int_cst (TREE_TYPE (count), 0));
2079 tmp = build1_v (GOTO_EXPR, exit_label);
2080 tmp = fold_build3 (COND_EXPR, void_type_node,
2081 cond, tmp, build_empty_stmt (input_location));
2082 gfc_add_expr_to_block (&block, tmp);
2084 /* The main loop body. */
2085 gfc_add_expr_to_block (&block, body);
2087 /* Increment the loop variable. */
2088 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
2089 gfc_add_modify (&block, var, tmp);
2091 /* Advance to the next mask element. Only do this for the
2093 if (n == 0 && mask_flag && forall_tmp->mask)
2095 tree maskindex = forall_tmp->maskindex;
2096 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2097 maskindex, gfc_index_one_node);
2098 gfc_add_modify (&block, maskindex, tmp);
2101 /* Decrement the loop counter. */
2102 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), count,
2103 build_int_cst (TREE_TYPE (var), 1));
2104 gfc_add_modify (&block, count, tmp);
2106 body = gfc_finish_block (&block);
2108 /* Loop var initialization. */
2109 gfc_init_block (&block);
2110 gfc_add_modify (&block, var, start);
2113 /* Initialize the loop counter. */
2114 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
2115 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
2116 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
2117 gfc_add_modify (&block, count, tmp);
2119 /* The loop expression. */
2120 tmp = build1_v (LOOP_EXPR, body);
2121 gfc_add_expr_to_block (&block, tmp);
2123 /* The exit label. */
2124 tmp = build1_v (LABEL_EXPR, exit_label);
2125 gfc_add_expr_to_block (&block, tmp);
2127 body = gfc_finish_block (&block);
2134 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2135 is nonzero, the body is controlled by all masks in the forall nest.
2136 Otherwise, the innermost loop is not controlled by it's mask. This
2137 is used for initializing that mask. */
2140 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
2145 forall_info *forall_tmp;
2146 tree mask, maskindex;
2148 gfc_start_block (&header);
2150 forall_tmp = nested_forall_info;
2151 while (forall_tmp != NULL)
2153 /* Generate body with masks' control. */
2156 mask = forall_tmp->mask;
2157 maskindex = forall_tmp->maskindex;
2159 /* If a mask was specified make the assignment conditional. */
2162 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2163 body = build3_v (COND_EXPR, tmp, body,
2164 build_empty_stmt (input_location));
2167 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
2168 forall_tmp = forall_tmp->prev_nest;
2172 gfc_add_expr_to_block (&header, body);
2173 return gfc_finish_block (&header);
2177 /* Allocate data for holding a temporary array. Returns either a local
2178 temporary array or a pointer variable. */
2181 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
2188 if (INTEGER_CST_P (size))
2190 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
2191 gfc_index_one_node);
2196 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
2197 type = build_array_type (elem_type, type);
2198 if (gfc_can_put_var_on_stack (bytesize))
2200 gcc_assert (INTEGER_CST_P (size));
2201 tmpvar = gfc_create_var (type, "temp");
2206 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
2207 *pdata = convert (pvoid_type_node, tmpvar);
2209 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
2210 gfc_add_modify (pblock, tmpvar, tmp);
2216 /* Generate codes to copy the temporary to the actual lhs. */
2219 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
2220 tree count1, tree wheremask, bool invert)
2224 stmtblock_t block, body;
2230 lss = gfc_walk_expr (expr);
2232 if (lss == gfc_ss_terminator)
2234 gfc_start_block (&block);
2236 gfc_init_se (&lse, NULL);
2238 /* Translate the expression. */
2239 gfc_conv_expr (&lse, expr);
2241 /* Form the expression for the temporary. */
2242 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2244 /* Use the scalar assignment as is. */
2245 gfc_add_block_to_block (&block, &lse.pre);
2246 gfc_add_modify (&block, lse.expr, tmp);
2247 gfc_add_block_to_block (&block, &lse.post);
2249 /* Increment the count1. */
2250 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2251 gfc_index_one_node);
2252 gfc_add_modify (&block, count1, tmp);
2254 tmp = gfc_finish_block (&block);
2258 gfc_start_block (&block);
2260 gfc_init_loopinfo (&loop1);
2261 gfc_init_se (&rse, NULL);
2262 gfc_init_se (&lse, NULL);
2264 /* Associate the lss with the loop. */
2265 gfc_add_ss_to_loop (&loop1, lss);
2267 /* Calculate the bounds of the scalarization. */
2268 gfc_conv_ss_startstride (&loop1);
2269 /* Setup the scalarizing loops. */
2270 gfc_conv_loop_setup (&loop1, &expr->where);
2272 gfc_mark_ss_chain_used (lss, 1);
2274 /* Start the scalarized loop body. */
2275 gfc_start_scalarized_body (&loop1, &body);
2277 /* Setup the gfc_se structures. */
2278 gfc_copy_loopinfo_to_se (&lse, &loop1);
2281 /* Form the expression of the temporary. */
2282 if (lss != gfc_ss_terminator)
2283 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2284 /* Translate expr. */
2285 gfc_conv_expr (&lse, expr);
2287 /* Use the scalar assignment. */
2288 rse.string_length = lse.string_length;
2289 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, true, true);
2291 /* Form the mask expression according to the mask tree list. */
2294 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2296 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2297 TREE_TYPE (wheremaskexpr),
2299 tmp = fold_build3 (COND_EXPR, void_type_node,
2301 build_empty_stmt (input_location));
2304 gfc_add_expr_to_block (&body, tmp);
2306 /* Increment count1. */
2307 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2308 count1, gfc_index_one_node);
2309 gfc_add_modify (&body, count1, tmp);
2311 /* Increment count3. */
2314 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2315 count3, gfc_index_one_node);
2316 gfc_add_modify (&body, count3, tmp);
2319 /* Generate the copying loops. */
2320 gfc_trans_scalarizing_loops (&loop1, &body);
2321 gfc_add_block_to_block (&block, &loop1.pre);
2322 gfc_add_block_to_block (&block, &loop1.post);
2323 gfc_cleanup_loop (&loop1);
2325 tmp = gfc_finish_block (&block);
2331 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2332 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2333 and should not be freed. WHEREMASK is the conditional execution mask
2334 whose sense may be inverted by INVERT. */
2337 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2338 tree count1, gfc_ss *lss, gfc_ss *rss,
2339 tree wheremask, bool invert)
2341 stmtblock_t block, body1;
2348 gfc_start_block (&block);
2350 gfc_init_se (&rse, NULL);
2351 gfc_init_se (&lse, NULL);
2353 if (lss == gfc_ss_terminator)
2355 gfc_init_block (&body1);
2356 gfc_conv_expr (&rse, expr2);
2357 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2361 /* Initialize the loop. */
2362 gfc_init_loopinfo (&loop);
2364 /* We may need LSS to determine the shape of the expression. */
2365 gfc_add_ss_to_loop (&loop, lss);
2366 gfc_add_ss_to_loop (&loop, rss);
2368 gfc_conv_ss_startstride (&loop);
2369 gfc_conv_loop_setup (&loop, &expr2->where);
2371 gfc_mark_ss_chain_used (rss, 1);
2372 /* Start the loop body. */
2373 gfc_start_scalarized_body (&loop, &body1);
2375 /* Translate the expression. */
2376 gfc_copy_loopinfo_to_se (&rse, &loop);
2378 gfc_conv_expr (&rse, expr2);
2380 /* Form the expression of the temporary. */
2381 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2384 /* Use the scalar assignment. */
2385 lse.string_length = rse.string_length;
2386 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2387 expr2->expr_type == EXPR_VARIABLE, true);
2389 /* Form the mask expression according to the mask tree list. */
2392 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2394 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2395 TREE_TYPE (wheremaskexpr),
2397 tmp = fold_build3 (COND_EXPR, void_type_node,
2398 wheremaskexpr, tmp, build_empty_stmt (input_location));
2401 gfc_add_expr_to_block (&body1, tmp);
2403 if (lss == gfc_ss_terminator)
2405 gfc_add_block_to_block (&block, &body1);
2407 /* Increment count1. */
2408 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2409 gfc_index_one_node);
2410 gfc_add_modify (&block, count1, tmp);
2414 /* Increment count1. */
2415 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2416 count1, gfc_index_one_node);
2417 gfc_add_modify (&body1, count1, tmp);
2419 /* Increment count3. */
2422 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2423 count3, gfc_index_one_node);
2424 gfc_add_modify (&body1, count3, tmp);
2427 /* Generate the copying loops. */
2428 gfc_trans_scalarizing_loops (&loop, &body1);
2430 gfc_add_block_to_block (&block, &loop.pre);
2431 gfc_add_block_to_block (&block, &loop.post);
2433 gfc_cleanup_loop (&loop);
2434 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2435 as tree nodes in SS may not be valid in different scope. */
2438 tmp = gfc_finish_block (&block);
2443 /* Calculate the size of temporary needed in the assignment inside forall.
2444 LSS and RSS are filled in this function. */
2447 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2448 stmtblock_t * pblock,
2449 gfc_ss **lss, gfc_ss **rss)
2457 *lss = gfc_walk_expr (expr1);
2460 size = gfc_index_one_node;
2461 if (*lss != gfc_ss_terminator)
2463 gfc_init_loopinfo (&loop);
2465 /* Walk the RHS of the expression. */
2466 *rss = gfc_walk_expr (expr2);
2467 if (*rss == gfc_ss_terminator)
2469 /* The rhs is scalar. Add a ss for the expression. */
2470 *rss = gfc_get_ss ();
2471 (*rss)->next = gfc_ss_terminator;
2472 (*rss)->type = GFC_SS_SCALAR;
2473 (*rss)->expr = expr2;
2476 /* Associate the SS with the loop. */
2477 gfc_add_ss_to_loop (&loop, *lss);
2478 /* We don't actually need to add the rhs at this point, but it might
2479 make guessing the loop bounds a bit easier. */
2480 gfc_add_ss_to_loop (&loop, *rss);
2482 /* We only want the shape of the expression, not rest of the junk
2483 generated by the scalarizer. */
2484 loop.array_parameter = 1;
2486 /* Calculate the bounds of the scalarization. */
2487 save_flag = gfc_option.rtcheck;
2488 gfc_option.rtcheck &= !GFC_RTCHECK_BOUNDS;
2489 gfc_conv_ss_startstride (&loop);
2490 gfc_option.rtcheck = save_flag;
2491 gfc_conv_loop_setup (&loop, &expr2->where);
2493 /* Figure out how many elements we need. */
2494 for (i = 0; i < loop.dimen; i++)
2496 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2497 gfc_index_one_node, loop.from[i]);
2498 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2500 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2502 gfc_add_block_to_block (pblock, &loop.pre);
2503 size = gfc_evaluate_now (size, pblock);
2504 gfc_add_block_to_block (pblock, &loop.post);
2506 /* TODO: write a function that cleans up a loopinfo without freeing
2507 the SS chains. Currently a NOP. */
2514 /* Calculate the overall iterator number of the nested forall construct.
2515 This routine actually calculates the number of times the body of the
2516 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2517 that by the expression INNER_SIZE. The BLOCK argument specifies the
2518 block in which to calculate the result, and the optional INNER_SIZE_BODY
2519 argument contains any statements that need to executed (inside the loop)
2520 to initialize or calculate INNER_SIZE. */
2523 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2524 stmtblock_t *inner_size_body, stmtblock_t *block)
2526 forall_info *forall_tmp = nested_forall_info;
2530 /* We can eliminate the innermost unconditional loops with constant
2532 if (INTEGER_CST_P (inner_size))
2535 && !forall_tmp->mask
2536 && INTEGER_CST_P (forall_tmp->size))
2538 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2539 inner_size, forall_tmp->size);
2540 forall_tmp = forall_tmp->prev_nest;
2543 /* If there are no loops left, we have our constant result. */
2548 /* Otherwise, create a temporary variable to compute the result. */
2549 number = gfc_create_var (gfc_array_index_type, "num");
2550 gfc_add_modify (block, number, gfc_index_zero_node);
2552 gfc_start_block (&body);
2553 if (inner_size_body)
2554 gfc_add_block_to_block (&body, inner_size_body);
2556 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2557 number, inner_size);
2560 gfc_add_modify (&body, number, tmp);
2561 tmp = gfc_finish_block (&body);
2563 /* Generate loops. */
2564 if (forall_tmp != NULL)
2565 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2567 gfc_add_expr_to_block (block, tmp);
2573 /* Allocate temporary for forall construct. SIZE is the size of temporary
2574 needed. PTEMP1 is returned for space free. */
2577 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2584 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2585 if (!integer_onep (unit))
2586 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2591 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2594 tmp = build_fold_indirect_ref_loc (input_location, tmp);
2599 /* Allocate temporary for forall construct according to the information in
2600 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2601 assignment inside forall. PTEMP1 is returned for space free. */
2604 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2605 tree inner_size, stmtblock_t * inner_size_body,
2606 stmtblock_t * block, tree * ptemp1)
2610 /* Calculate the total size of temporary needed in forall construct. */
2611 size = compute_overall_iter_number (nested_forall_info, inner_size,
2612 inner_size_body, block);
2614 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2618 /* Handle assignments inside forall which need temporary.
2620 forall (i=start:end:stride; maskexpr)
2623 (where e,f<i> are arbitrary expressions possibly involving i
2624 and there is a dependency between e<i> and f<i>)
2626 masktmp(:) = maskexpr(:)
2631 for (i = start; i <= end; i += stride)
2635 for (i = start; i <= end; i += stride)
2637 if (masktmp[maskindex++])
2638 tmp[count1++] = f<i>
2642 for (i = start; i <= end; i += stride)
2644 if (masktmp[maskindex++])
2645 e<i> = tmp[count1++]
2650 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2651 tree wheremask, bool invert,
2652 forall_info * nested_forall_info,
2653 stmtblock_t * block)
2661 stmtblock_t inner_size_body;
2663 /* Create vars. count1 is the current iterator number of the nested
2665 count1 = gfc_create_var (gfc_array_index_type, "count1");
2667 /* Count is the wheremask index. */
2670 count = gfc_create_var (gfc_array_index_type, "count");
2671 gfc_add_modify (block, count, gfc_index_zero_node);
2676 /* Initialize count1. */
2677 gfc_add_modify (block, count1, gfc_index_zero_node);
2679 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2680 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2681 gfc_init_block (&inner_size_body);
2682 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2685 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2686 if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->length)
2688 if (!expr1->ts.u.cl->backend_decl)
2691 gfc_init_se (&tse, NULL);
2692 gfc_conv_expr (&tse, expr1->ts.u.cl->length);
2693 expr1->ts.u.cl->backend_decl = tse.expr;
2695 type = gfc_get_character_type_len (gfc_default_character_kind,
2696 expr1->ts.u.cl->backend_decl);
2699 type = gfc_typenode_for_spec (&expr1->ts);
2701 /* Allocate temporary for nested forall construct according to the
2702 information in nested_forall_info and inner_size. */
2703 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2704 &inner_size_body, block, &ptemp1);
2706 /* Generate codes to copy rhs to the temporary . */
2707 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2710 /* Generate body and loops according to the information in
2711 nested_forall_info. */
2712 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2713 gfc_add_expr_to_block (block, tmp);
2716 gfc_add_modify (block, count1, gfc_index_zero_node);
2720 gfc_add_modify (block, count, gfc_index_zero_node);
2722 /* Generate codes to copy the temporary to lhs. */
2723 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2726 /* Generate body and loops according to the information in
2727 nested_forall_info. */
2728 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2729 gfc_add_expr_to_block (block, tmp);
2733 /* Free the temporary. */
2734 tmp = gfc_call_free (ptemp1);
2735 gfc_add_expr_to_block (block, tmp);
2740 /* Translate pointer assignment inside FORALL which need temporary. */
2743 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2744 forall_info * nested_forall_info,
2745 stmtblock_t * block)
2759 tree tmp, tmp1, ptemp1;
2761 count = gfc_create_var (gfc_array_index_type, "count");
2762 gfc_add_modify (block, count, gfc_index_zero_node);
2764 inner_size = integer_one_node;
2765 lss = gfc_walk_expr (expr1);
2766 rss = gfc_walk_expr (expr2);
2767 if (lss == gfc_ss_terminator)
2769 type = gfc_typenode_for_spec (&expr1->ts);
2770 type = build_pointer_type (type);
2772 /* Allocate temporary for nested forall construct according to the
2773 information in nested_forall_info and inner_size. */
2774 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2775 inner_size, NULL, block, &ptemp1);
2776 gfc_start_block (&body);
2777 gfc_init_se (&lse, NULL);
2778 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2779 gfc_init_se (&rse, NULL);
2780 rse.want_pointer = 1;
2781 gfc_conv_expr (&rse, expr2);
2782 gfc_add_block_to_block (&body, &rse.pre);
2783 gfc_add_modify (&body, lse.expr,
2784 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2785 gfc_add_block_to_block (&body, &rse.post);
2787 /* Increment count. */
2788 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2789 count, gfc_index_one_node);
2790 gfc_add_modify (&body, count, tmp);
2792 tmp = gfc_finish_block (&body);
2794 /* Generate body and loops according to the information in
2795 nested_forall_info. */
2796 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2797 gfc_add_expr_to_block (block, tmp);
2800 gfc_add_modify (block, count, gfc_index_zero_node);
2802 gfc_start_block (&body);
2803 gfc_init_se (&lse, NULL);
2804 gfc_init_se (&rse, NULL);
2805 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
2806 lse.want_pointer = 1;
2807 gfc_conv_expr (&lse, expr1);
2808 gfc_add_block_to_block (&body, &lse.pre);
2809 gfc_add_modify (&body, lse.expr, rse.expr);
2810 gfc_add_block_to_block (&body, &lse.post);
2811 /* Increment count. */
2812 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2813 count, gfc_index_one_node);
2814 gfc_add_modify (&body, count, tmp);
2815 tmp = gfc_finish_block (&body);
2817 /* Generate body and loops according to the information in
2818 nested_forall_info. */
2819 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2820 gfc_add_expr_to_block (block, tmp);
2824 gfc_init_loopinfo (&loop);
2826 /* Associate the SS with the loop. */
2827 gfc_add_ss_to_loop (&loop, rss);
2829 /* Setup the scalarizing loops and bounds. */
2830 gfc_conv_ss_startstride (&loop);
2832 gfc_conv_loop_setup (&loop, &expr2->where);
2834 info = &rss->data.info;
2835 desc = info->descriptor;
2837 /* Make a new descriptor. */
2838 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2839 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, 0,
2840 loop.from, loop.to, 1,
2841 GFC_ARRAY_UNKNOWN, true);
2843 /* Allocate temporary for nested forall construct. */
2844 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2845 inner_size, NULL, block, &ptemp1);
2846 gfc_start_block (&body);
2847 gfc_init_se (&lse, NULL);
2848 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2849 lse.direct_byref = 1;
2850 rss = gfc_walk_expr (expr2);
2851 gfc_conv_expr_descriptor (&lse, expr2, rss);
2853 gfc_add_block_to_block (&body, &lse.pre);
2854 gfc_add_block_to_block (&body, &lse.post);
2856 /* Increment count. */
2857 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2858 count, gfc_index_one_node);
2859 gfc_add_modify (&body, count, tmp);
2861 tmp = gfc_finish_block (&body);
2863 /* Generate body and loops according to the information in
2864 nested_forall_info. */
2865 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2866 gfc_add_expr_to_block (block, tmp);
2869 gfc_add_modify (block, count, gfc_index_zero_node);
2871 parm = gfc_build_array_ref (tmp1, count, NULL);
2872 lss = gfc_walk_expr (expr1);
2873 gfc_init_se (&lse, NULL);
2874 gfc_conv_expr_descriptor (&lse, expr1, lss);
2875 gfc_add_modify (&lse.pre, lse.expr, parm);
2876 gfc_start_block (&body);
2877 gfc_add_block_to_block (&body, &lse.pre);
2878 gfc_add_block_to_block (&body, &lse.post);
2880 /* Increment count. */
2881 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2882 count, gfc_index_one_node);
2883 gfc_add_modify (&body, count, tmp);
2885 tmp = gfc_finish_block (&body);
2887 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2888 gfc_add_expr_to_block (block, tmp);
2890 /* Free the temporary. */
2893 tmp = gfc_call_free (ptemp1);
2894 gfc_add_expr_to_block (block, tmp);
2899 /* FORALL and WHERE statements are really nasty, especially when you nest
2900 them. All the rhs of a forall assignment must be evaluated before the
2901 actual assignments are performed. Presumably this also applies to all the
2902 assignments in an inner where statement. */
2904 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2905 linear array, relying on the fact that we process in the same order in all
2908 forall (i=start:end:stride; maskexpr)
2912 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2914 count = ((end + 1 - start) / stride)
2915 masktmp(:) = maskexpr(:)
2918 for (i = start; i <= end; i += stride)
2920 if (masktmp[maskindex++])
2924 for (i = start; i <= end; i += stride)
2926 if (masktmp[maskindex++])
2930 Note that this code only works when there are no dependencies.
2931 Forall loop with array assignments and data dependencies are a real pain,
2932 because the size of the temporary cannot always be determined before the
2933 loop is executed. This problem is compounded by the presence of nested
2938 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2958 gfc_forall_iterator *fa;
2961 gfc_saved_var *saved_vars;
2962 iter_info *this_forall;
2966 /* Do nothing if the mask is false. */
2968 && code->expr1->expr_type == EXPR_CONSTANT
2969 && !code->expr1->value.logical)
2970 return build_empty_stmt (input_location);
2973 /* Count the FORALL index number. */
2974 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2978 /* Allocate the space for var, start, end, step, varexpr. */
2979 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2980 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2981 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2982 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2983 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2984 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2986 /* Allocate the space for info. */
2987 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2989 gfc_start_block (&pre);
2990 gfc_init_block (&post);
2991 gfc_init_block (&block);
2994 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2996 gfc_symbol *sym = fa->var->symtree->n.sym;
2998 /* Allocate space for this_forall. */
2999 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
3001 /* Create a temporary variable for the FORALL index. */
3002 tmp = gfc_typenode_for_spec (&sym->ts);
3003 var[n] = gfc_create_var (tmp, sym->name);
3004 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
3006 /* Record it in this_forall. */
3007 this_forall->var = var[n];
3009 /* Replace the index symbol's backend_decl with the temporary decl. */
3010 sym->backend_decl = var[n];
3012 /* Work out the start, end and stride for the loop. */
3013 gfc_init_se (&se, NULL);
3014 gfc_conv_expr_val (&se, fa->start);
3015 /* Record it in this_forall. */
3016 this_forall->start = se.expr;
3017 gfc_add_block_to_block (&block, &se.pre);
3020 gfc_init_se (&se, NULL);
3021 gfc_conv_expr_val (&se, fa->end);
3022 /* Record it in this_forall. */
3023 this_forall->end = se.expr;
3024 gfc_make_safe_expr (&se);
3025 gfc_add_block_to_block (&block, &se.pre);
3028 gfc_init_se (&se, NULL);
3029 gfc_conv_expr_val (&se, fa->stride);
3030 /* Record it in this_forall. */
3031 this_forall->step = se.expr;
3032 gfc_make_safe_expr (&se);
3033 gfc_add_block_to_block (&block, &se.pre);
3036 /* Set the NEXT field of this_forall to NULL. */
3037 this_forall->next = NULL;
3038 /* Link this_forall to the info construct. */
3039 if (info->this_loop)
3041 iter_info *iter_tmp = info->this_loop;
3042 while (iter_tmp->next != NULL)
3043 iter_tmp = iter_tmp->next;
3044 iter_tmp->next = this_forall;
3047 info->this_loop = this_forall;
3053 /* Calculate the size needed for the current forall level. */
3054 size = gfc_index_one_node;
3055 for (n = 0; n < nvar; n++)
3057 /* size = (end + step - start) / step. */
3058 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
3060 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
3062 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
3063 tmp = convert (gfc_array_index_type, tmp);
3065 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
3068 /* Record the nvar and size of current forall level. */
3074 /* If the mask is .true., consider the FORALL unconditional. */
3075 if (code->expr1->expr_type == EXPR_CONSTANT
3076 && code->expr1->value.logical)
3084 /* First we need to allocate the mask. */
3087 /* As the mask array can be very big, prefer compact boolean types. */
3088 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3089 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
3090 size, NULL, &block, &pmask);
3091 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
3093 /* Record them in the info structure. */
3094 info->maskindex = maskindex;
3099 /* No mask was specified. */
3100 maskindex = NULL_TREE;
3101 mask = pmask = NULL_TREE;
3104 /* Link the current forall level to nested_forall_info. */
3105 info->prev_nest = nested_forall_info;
3106 nested_forall_info = info;
3108 /* Copy the mask into a temporary variable if required.
3109 For now we assume a mask temporary is needed. */
3112 /* As the mask array can be very big, prefer compact boolean types. */
3113 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3115 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
3117 /* Start of mask assignment loop body. */
3118 gfc_start_block (&body);
3120 /* Evaluate the mask expression. */
3121 gfc_init_se (&se, NULL);
3122 gfc_conv_expr_val (&se, code->expr1);
3123 gfc_add_block_to_block (&body, &se.pre);
3125 /* Store the mask. */
3126 se.expr = convert (mask_type, se.expr);
3128 tmp = gfc_build_array_ref (mask, maskindex, NULL);
3129 gfc_add_modify (&body, tmp, se.expr);
3131 /* Advance to the next mask element. */
3132 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3133 maskindex, gfc_index_one_node);
3134 gfc_add_modify (&body, maskindex, tmp);
3136 /* Generate the loops. */
3137 tmp = gfc_finish_block (&body);
3138 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
3139 gfc_add_expr_to_block (&block, tmp);
3142 c = code->block->next;
3144 /* TODO: loop merging in FORALL statements. */
3145 /* Now that we've got a copy of the mask, generate the assignment loops. */
3151 /* A scalar or array assignment. DO the simple check for
3152 lhs to rhs dependencies. These make a temporary for the
3153 rhs and form a second forall block to copy to variable. */
3154 need_temp = check_forall_dependencies(c, &pre, &post);
3156 /* Temporaries due to array assignment data dependencies introduce
3157 no end of problems. */
3159 gfc_trans_assign_need_temp (c->expr1, c->expr2, NULL, false,
3160 nested_forall_info, &block);
3163 /* Use the normal assignment copying routines. */
3164 assign = gfc_trans_assignment (c->expr1, c->expr2, false, true);
3166 /* Generate body and loops. */
3167 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3169 gfc_add_expr_to_block (&block, tmp);
3172 /* Cleanup any temporary symtrees that have been made to deal
3173 with dependencies. */
3175 cleanup_forall_symtrees (c);
3180 /* Translate WHERE or WHERE construct nested in FORALL. */
3181 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
3184 /* Pointer assignment inside FORALL. */
3185 case EXEC_POINTER_ASSIGN:
3186 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
3188 gfc_trans_pointer_assign_need_temp (c->expr1, c->expr2,
3189 nested_forall_info, &block);
3192 /* Use the normal assignment copying routines. */
3193 assign = gfc_trans_pointer_assignment (c->expr1, c->expr2);
3195 /* Generate body and loops. */
3196 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3198 gfc_add_expr_to_block (&block, tmp);
3203 tmp = gfc_trans_forall_1 (c, nested_forall_info);
3204 gfc_add_expr_to_block (&block, tmp);
3207 /* Explicit subroutine calls are prevented by the frontend but interface
3208 assignments can legitimately produce them. */
3209 case EXEC_ASSIGN_CALL:
3210 assign = gfc_trans_call (c, true, NULL_TREE, NULL_TREE, false);
3211 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
3212 gfc_add_expr_to_block (&block, tmp);
3222 /* Restore the original index variables. */
3223 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
3224 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
3226 /* Free the space for var, start, end, step, varexpr. */
3232 gfc_free (saved_vars);
3234 /* Free the space for this forall_info. */
3239 /* Free the temporary for the mask. */
3240 tmp = gfc_call_free (pmask);
3241 gfc_add_expr_to_block (&block, tmp);
3244 pushdecl (maskindex);
3246 gfc_add_block_to_block (&pre, &block);
3247 gfc_add_block_to_block (&pre, &post);
3249 return gfc_finish_block (&pre);
3253 /* Translate the FORALL statement or construct. */
3255 tree gfc_trans_forall (gfc_code * code)
3257 return gfc_trans_forall_1 (code, NULL);
3261 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3262 If the WHERE construct is nested in FORALL, compute the overall temporary
3263 needed by the WHERE mask expression multiplied by the iterator number of
3265 ME is the WHERE mask expression.
3266 MASK is the current execution mask upon input, whose sense may or may
3267 not be inverted as specified by the INVERT argument.
3268 CMASK is the updated execution mask on output, or NULL if not required.
3269 PMASK is the pending execution mask on output, or NULL if not required.
3270 BLOCK is the block in which to place the condition evaluation loops. */
3273 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3274 tree mask, bool invert, tree cmask, tree pmask,
3275 tree mask_type, stmtblock_t * block)
3280 stmtblock_t body, body1;
3281 tree count, cond, mtmp;
3284 gfc_init_loopinfo (&loop);
3286 lss = gfc_walk_expr (me);
3287 rss = gfc_walk_expr (me);
3289 /* Variable to index the temporary. */
3290 count = gfc_create_var (gfc_array_index_type, "count");
3291 /* Initialize count. */
3292 gfc_add_modify (block, count, gfc_index_zero_node);
3294 gfc_start_block (&body);
3296 gfc_init_se (&rse, NULL);
3297 gfc_init_se (&lse, NULL);
3299 if (lss == gfc_ss_terminator)
3301 gfc_init_block (&body1);
3305 /* Initialize the loop. */
3306 gfc_init_loopinfo (&loop);
3308 /* We may need LSS to determine the shape of the expression. */
3309 gfc_add_ss_to_loop (&loop, lss);
3310 gfc_add_ss_to_loop (&loop, rss);
3312 gfc_conv_ss_startstride (&loop);
3313 gfc_conv_loop_setup (&loop, &me->where);
3315 gfc_mark_ss_chain_used (rss, 1);
3316 /* Start the loop body. */
3317 gfc_start_scalarized_body (&loop, &body1);
3319 /* Translate the expression. */
3320 gfc_copy_loopinfo_to_se (&rse, &loop);
3322 gfc_conv_expr (&rse, me);
3325 /* Variable to evaluate mask condition. */
3326 cond = gfc_create_var (mask_type, "cond");
3327 if (mask && (cmask || pmask))
3328 mtmp = gfc_create_var (mask_type, "mask");
3329 else mtmp = NULL_TREE;
3331 gfc_add_block_to_block (&body1, &lse.pre);
3332 gfc_add_block_to_block (&body1, &rse.pre);
3334 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3336 if (mask && (cmask || pmask))
3338 tmp = gfc_build_array_ref (mask, count, NULL);
3340 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
3341 gfc_add_modify (&body1, mtmp, tmp);
3346 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3349 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3350 gfc_add_modify (&body1, tmp1, tmp);
3355 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3356 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, cond);
3358 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3359 gfc_add_modify (&body1, tmp1, tmp);
3362 gfc_add_block_to_block (&body1, &lse.post);
3363 gfc_add_block_to_block (&body1, &rse.post);
3365 if (lss == gfc_ss_terminator)
3367 gfc_add_block_to_block (&body, &body1);
3371 /* Increment count. */
3372 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
3373 gfc_index_one_node);
3374 gfc_add_modify (&body1, count, tmp1);
3376 /* Generate the copying loops. */
3377 gfc_trans_scalarizing_loops (&loop, &body1);
3379 gfc_add_block_to_block (&body, &loop.pre);
3380 gfc_add_block_to_block (&body, &loop.post);
3382 gfc_cleanup_loop (&loop);
3383 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3384 as tree nodes in SS may not be valid in different scope. */
3387 tmp1 = gfc_finish_block (&body);
3388 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3389 if (nested_forall_info != NULL)
3390 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3392 gfc_add_expr_to_block (block, tmp1);
3396 /* Translate an assignment statement in a WHERE statement or construct
3397 statement. The MASK expression is used to control which elements
3398 of EXPR1 shall be assigned. The sense of MASK is specified by
3402 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3403 tree mask, bool invert,
3404 tree count1, tree count2,
3410 gfc_ss *lss_section;
3417 tree index, maskexpr;
3419 /* A defined assignment. */
3420 if (cnext && cnext->resolved_sym)
3421 return gfc_trans_call (cnext, true, mask, count1, invert);
3424 /* TODO: handle this special case.
3425 Special case a single function returning an array. */
3426 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3428 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3434 /* Assignment of the form lhs = rhs. */
3435 gfc_start_block (&block);
3437 gfc_init_se (&lse, NULL);
3438 gfc_init_se (&rse, NULL);
3441 lss = gfc_walk_expr (expr1);
3444 /* In each where-assign-stmt, the mask-expr and the variable being
3445 defined shall be arrays of the same shape. */
3446 gcc_assert (lss != gfc_ss_terminator);
3448 /* The assignment needs scalarization. */
3451 /* Find a non-scalar SS from the lhs. */
3452 while (lss_section != gfc_ss_terminator
3453 && lss_section->type != GFC_SS_SECTION)
3454 lss_section = lss_section->next;
3456 gcc_assert (lss_section != gfc_ss_terminator);
3458 /* Initialize the scalarizer. */
3459 gfc_init_loopinfo (&loop);
3462 rss = gfc_walk_expr (expr2);
3463 if (rss == gfc_ss_terminator)
3465 /* The rhs is scalar. Add a ss for the expression. */
3466 rss = gfc_get_ss ();
3468 rss->next = gfc_ss_terminator;
3469 rss->type = GFC_SS_SCALAR;
3473 /* Associate the SS with the loop. */
3474 gfc_add_ss_to_loop (&loop, lss);
3475 gfc_add_ss_to_loop (&loop, rss);
3477 /* Calculate the bounds of the scalarization. */
3478 gfc_conv_ss_startstride (&loop);
3480 /* Resolve any data dependencies in the statement. */
3481 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3483 /* Setup the scalarizing loops. */
3484 gfc_conv_loop_setup (&loop, &expr2->where);
3486 /* Setup the gfc_se structures. */
3487 gfc_copy_loopinfo_to_se (&lse, &loop);
3488 gfc_copy_loopinfo_to_se (&rse, &loop);
3491 gfc_mark_ss_chain_used (rss, 1);
3492 if (loop.temp_ss == NULL)
3495 gfc_mark_ss_chain_used (lss, 1);
3499 lse.ss = loop.temp_ss;
3500 gfc_mark_ss_chain_used (lss, 3);
3501 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3504 /* Start the scalarized loop body. */
3505 gfc_start_scalarized_body (&loop, &body);
3507 /* Translate the expression. */
3508 gfc_conv_expr (&rse, expr2);
3509 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3511 gfc_conv_tmp_array_ref (&lse);
3512 gfc_advance_se_ss_chain (&lse);
3515 gfc_conv_expr (&lse, expr1);
3517 /* Form the mask expression according to the mask. */
3519 maskexpr = gfc_build_array_ref (mask, index, NULL);
3521 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
3523 /* Use the scalar assignment as is. */
3524 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3525 loop.temp_ss != NULL, false, true);
3527 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt (input_location));
3529 gfc_add_expr_to_block (&body, tmp);
3531 if (lss == gfc_ss_terminator)
3533 /* Increment count1. */
3534 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3535 count1, gfc_index_one_node);
3536 gfc_add_modify (&body, count1, tmp);
3538 /* Use the scalar assignment as is. */
3539 gfc_add_block_to_block (&block, &body);
3543 gcc_assert (lse.ss == gfc_ss_terminator
3544 && rse.ss == gfc_ss_terminator);
3546 if (loop.temp_ss != NULL)
3548 /* Increment count1 before finish the main body of a scalarized
3550 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3551 count1, gfc_index_one_node);
3552 gfc_add_modify (&body, count1, tmp);
3553 gfc_trans_scalarized_loop_boundary (&loop, &body);
3555 /* We need to copy the temporary to the actual lhs. */
3556 gfc_init_se (&lse, NULL);
3557 gfc_init_se (&rse, NULL);
3558 gfc_copy_loopinfo_to_se (&lse, &loop);
3559 gfc_copy_loopinfo_to_se (&rse, &loop);
3561 rse.ss = loop.temp_ss;
3564 gfc_conv_tmp_array_ref (&rse);
3565 gfc_advance_se_ss_chain (&rse);
3566 gfc_conv_expr (&lse, expr1);
3568 gcc_assert (lse.ss == gfc_ss_terminator
3569 && rse.ss == gfc_ss_terminator);
3571 /* Form the mask expression according to the mask tree list. */
3573 maskexpr = gfc_build_array_ref (mask, index, NULL);
3575 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3578 /* Use the scalar assignment as is. */
3579 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false,
3581 tmp = build3_v (COND_EXPR, maskexpr, tmp,
3582 build_empty_stmt (input_location));
3583 gfc_add_expr_to_block (&body, tmp);
3585 /* Increment count2. */
3586 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3587 count2, gfc_index_one_node);
3588 gfc_add_modify (&body, count2, tmp);
3592 /* Increment count1. */
3593 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3594 count1, gfc_index_one_node);
3595 gfc_add_modify (&body, count1, tmp);
3598 /* Generate the copying loops. */
3599 gfc_trans_scalarizing_loops (&loop, &body);
3601 /* Wrap the whole thing up. */
3602 gfc_add_block_to_block (&block, &loop.pre);
3603 gfc_add_block_to_block (&block, &loop.post);
3604 gfc_cleanup_loop (&loop);
3607 return gfc_finish_block (&block);
3611 /* Translate the WHERE construct or statement.
3612 This function can be called iteratively to translate the nested WHERE
3613 construct or statement.
3614 MASK is the control mask. */
3617 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3618 forall_info * nested_forall_info, stmtblock_t * block)
3620 stmtblock_t inner_size_body;
3621 tree inner_size, size;
3630 tree count1, count2;
3634 tree pcmask = NULL_TREE;
3635 tree ppmask = NULL_TREE;
3636 tree cmask = NULL_TREE;
3637 tree pmask = NULL_TREE;
3638 gfc_actual_arglist *arg;
3640 /* the WHERE statement or the WHERE construct statement. */
3641 cblock = code->block;
3643 /* As the mask array can be very big, prefer compact boolean types. */
3644 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3646 /* Determine which temporary masks are needed. */
3649 /* One clause: No ELSEWHEREs. */
3650 need_cmask = (cblock->next != 0);
3653 else if (cblock->block->block)
3655 /* Three or more clauses: Conditional ELSEWHEREs. */
3659 else if (cblock->next)
3661 /* Two clauses, the first non-empty. */
3663 need_pmask = (mask != NULL_TREE
3664 && cblock->block->next != 0);
3666 else if (!cblock->block->next)
3668 /* Two clauses, both empty. */
3672 /* Two clauses, the first empty, the second non-empty. */
3675 need_cmask = (cblock->block->expr1 != 0);
3684 if (need_cmask || need_pmask)
3686 /* Calculate the size of temporary needed by the mask-expr. */
3687 gfc_init_block (&inner_size_body);
3688 inner_size = compute_inner_temp_size (cblock->expr1, cblock->expr1,
3689 &inner_size_body, &lss, &rss);
3691 /* Calculate the total size of temporary needed. */
3692 size = compute_overall_iter_number (nested_forall_info, inner_size,
3693 &inner_size_body, block);
3695 /* Check whether the size is negative. */
3696 cond = fold_build2 (LE_EXPR, boolean_type_node, size,
3697 gfc_index_zero_node);
3698 size = fold_build3 (COND_EXPR, gfc_array_index_type, cond,
3699 gfc_index_zero_node, size);
3700 size = gfc_evaluate_now (size, block);
3702 /* Allocate temporary for WHERE mask if needed. */
3704 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3707 /* Allocate temporary for !mask if needed. */
3709 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3715 /* Each time around this loop, the where clause is conditional
3716 on the value of mask and invert, which are updated at the
3717 bottom of the loop. */
3719 /* Has mask-expr. */
3722 /* Ensure that the WHERE mask will be evaluated exactly once.
3723 If there are no statements in this WHERE/ELSEWHERE clause,
3724 then we don't need to update the control mask (cmask).
3725 If this is the last clause of the WHERE construct, then
3726 we don't need to update the pending control mask (pmask). */
3728 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3730 cblock->next ? cmask : NULL_TREE,
3731 cblock->block ? pmask : NULL_TREE,
3734 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3736 (cblock->next || cblock->block)
3737 ? cmask : NULL_TREE,
3738 NULL_TREE, mask_type, block);
3742 /* It's a final elsewhere-stmt. No mask-expr is present. */
3746 /* The body of this where clause are controlled by cmask with
3747 sense specified by invert. */
3749 /* Get the assignment statement of a WHERE statement, or the first
3750 statement in where-body-construct of a WHERE construct. */
3751 cnext = cblock->next;
3756 /* WHERE assignment statement. */
3757 case EXEC_ASSIGN_CALL:
3759 arg = cnext->ext.actual;
3760 expr1 = expr2 = NULL;
3761 for (; arg; arg = arg->next)
3773 expr1 = cnext->expr1;
3774 expr2 = cnext->expr2;
3776 if (nested_forall_info != NULL)
3778 need_temp = gfc_check_dependency (expr1, expr2, 0);
3779 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3780 gfc_trans_assign_need_temp (expr1, expr2,
3782 nested_forall_info, block);
3785 /* Variables to control maskexpr. */
3786 count1 = gfc_create_var (gfc_array_index_type, "count1");
3787 count2 = gfc_create_var (gfc_array_index_type, "count2");
3788 gfc_add_modify (block, count1, gfc_index_zero_node);
3789 gfc_add_modify (block, count2, gfc_index_zero_node);
3791 tmp = gfc_trans_where_assign (expr1, expr2,
3796 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3798 gfc_add_expr_to_block (block, tmp);
3803 /* Variables to control maskexpr. */
3804 count1 = gfc_create_var (gfc_array_index_type, "count1");
3805 count2 = gfc_create_var (gfc_array_index_type, "count2");
3806 gfc_add_modify (block, count1, gfc_index_zero_node);
3807 gfc_add_modify (block, count2, gfc_index_zero_node);
3809 tmp = gfc_trans_where_assign (expr1, expr2,
3813 gfc_add_expr_to_block (block, tmp);
3818 /* WHERE or WHERE construct is part of a where-body-construct. */
3820 gfc_trans_where_2 (cnext, cmask, invert,
3821 nested_forall_info, block);
3828 /* The next statement within the same where-body-construct. */
3829 cnext = cnext->next;
3831 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3832 cblock = cblock->block;
3833 if (mask == NULL_TREE)
3835 /* If we're the initial WHERE, we can simply invert the sense
3836 of the current mask to obtain the "mask" for the remaining
3843 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3849 /* If we allocated a pending mask array, deallocate it now. */
3852 tmp = gfc_call_free (ppmask);
3853 gfc_add_expr_to_block (block, tmp);
3856 /* If we allocated a current mask array, deallocate it now. */
3859 tmp = gfc_call_free (pcmask);
3860 gfc_add_expr_to_block (block, tmp);
3864 /* Translate a simple WHERE construct or statement without dependencies.
3865 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3866 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3867 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3870 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3872 stmtblock_t block, body;
3873 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3874 tree tmp, cexpr, tstmt, estmt;
3875 gfc_ss *css, *tdss, *tsss;
3876 gfc_se cse, tdse, tsse, edse, esse;
3881 /* Allow the scalarizer to workshare simple where loops. */
3882 if (ompws_flags & OMPWS_WORKSHARE_FLAG)
3883 ompws_flags |= OMPWS_SCALARIZER_WS;
3885 cond = cblock->expr1;
3886 tdst = cblock->next->expr1;
3887 tsrc = cblock->next->expr2;
3888 edst = eblock ? eblock->next->expr1 : NULL;
3889 esrc = eblock ? eblock->next->expr2 : NULL;
3891 gfc_start_block (&block);
3892 gfc_init_loopinfo (&loop);
3894 /* Handle the condition. */
3895 gfc_init_se (&cse, NULL);
3896 css = gfc_walk_expr (cond);
3897 gfc_add_ss_to_loop (&loop, css);
3899 /* Handle the then-clause. */
3900 gfc_init_se (&tdse, NULL);
3901 gfc_init_se (&tsse, NULL);
3902 tdss = gfc_walk_expr (tdst);
3903 tsss = gfc_walk_expr (tsrc);
3904 if (tsss == gfc_ss_terminator)
3906 tsss = gfc_get_ss ();
3908 tsss->next = gfc_ss_terminator;
3909 tsss->type = GFC_SS_SCALAR;
3912 gfc_add_ss_to_loop (&loop, tdss);
3913 gfc_add_ss_to_loop (&loop, tsss);
3917 /* Handle the else clause. */
3918 gfc_init_se (&edse, NULL);
3919 gfc_init_se (&esse, NULL);
3920 edss = gfc_walk_expr (edst);
3921 esss = gfc_walk_expr (esrc);
3922 if (esss == gfc_ss_terminator)
3924 esss = gfc_get_ss ();
3926 esss->next = gfc_ss_terminator;
3927 esss->type = GFC_SS_SCALAR;
3930 gfc_add_ss_to_loop (&loop, edss);
3931 gfc_add_ss_to_loop (&loop, esss);
3934 gfc_conv_ss_startstride (&loop);
3935 gfc_conv_loop_setup (&loop, &tdst->where);
3937 gfc_mark_ss_chain_used (css, 1);
3938 gfc_mark_ss_chain_used (tdss, 1);
3939 gfc_mark_ss_chain_used (tsss, 1);
3942 gfc_mark_ss_chain_used (edss, 1);
3943 gfc_mark_ss_chain_used (esss, 1);
3946 gfc_start_scalarized_body (&loop, &body);
3948 gfc_copy_loopinfo_to_se (&cse, &loop);
3949 gfc_copy_loopinfo_to_se (&tdse, &loop);
3950 gfc_copy_loopinfo_to_se (&tsse, &loop);
3956 gfc_copy_loopinfo_to_se (&edse, &loop);
3957 gfc_copy_loopinfo_to_se (&esse, &loop);
3962 gfc_conv_expr (&cse, cond);
3963 gfc_add_block_to_block (&body, &cse.pre);
3966 gfc_conv_expr (&tsse, tsrc);
3967 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3969 gfc_conv_tmp_array_ref (&tdse);
3970 gfc_advance_se_ss_chain (&tdse);
3973 gfc_conv_expr (&tdse, tdst);
3977 gfc_conv_expr (&esse, esrc);
3978 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3980 gfc_conv_tmp_array_ref (&edse);
3981 gfc_advance_se_ss_chain (&edse);
3984 gfc_conv_expr (&edse, edst);
3987 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false, true);
3988 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false,
3990 : build_empty_stmt (input_location);
3991 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3992 gfc_add_expr_to_block (&body, tmp);
3993 gfc_add_block_to_block (&body, &cse.post);
3995 gfc_trans_scalarizing_loops (&loop, &body);
3996 gfc_add_block_to_block (&block, &loop.pre);
3997 gfc_add_block_to_block (&block, &loop.post);
3998 gfc_cleanup_loop (&loop);
4000 return gfc_finish_block (&block);
4003 /* As the WHERE or WHERE construct statement can be nested, we call
4004 gfc_trans_where_2 to do the translation, and pass the initial
4005 NULL values for both the control mask and the pending control mask. */
4008 gfc_trans_where (gfc_code * code)
4014 cblock = code->block;
4016 && cblock->next->op == EXEC_ASSIGN
4017 && !cblock->next->next)
4019 eblock = cblock->block;
4022 /* A simple "WHERE (cond) x = y" statement or block is
4023 dependence free if cond is not dependent upon writing x,
4024 and the source y is unaffected by the destination x. */
4025 if (!gfc_check_dependency (cblock->next->expr1,
4027 && !gfc_check_dependency (cblock->next->expr1,
4028 cblock->next->expr2, 0))
4029 return gfc_trans_where_3 (cblock, NULL);
4031 else if (!eblock->expr1
4034 && eblock->next->op == EXEC_ASSIGN
4035 && !eblock->next->next)
4037 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
4038 block is dependence free if cond is not dependent on writes
4039 to x1 and x2, y1 is not dependent on writes to x2, and y2
4040 is not dependent on writes to x1, and both y's are not
4041 dependent upon their own x's. In addition to this, the
4042 final two dependency checks below exclude all but the same
4043 array reference if the where and elswhere destinations
4044 are the same. In short, this is VERY conservative and this
4045 is needed because the two loops, required by the standard
4046 are coalesced in gfc_trans_where_3. */
4047 if (!gfc_check_dependency(cblock->next->expr1,
4049 && !gfc_check_dependency(eblock->next->expr1,
4051 && !gfc_check_dependency(cblock->next->expr1,
4052 eblock->next->expr2, 1)
4053 && !gfc_check_dependency(eblock->next->expr1,
4054 cblock->next->expr2, 1)
4055 && !gfc_check_dependency(cblock->next->expr1,
4056 cblock->next->expr2, 1)
4057 && !gfc_check_dependency(eblock->next->expr1,
4058 eblock->next->expr2, 1)
4059 && !gfc_check_dependency(cblock->next->expr1,
4060 eblock->next->expr1, 0)
4061 && !gfc_check_dependency(eblock->next->expr1,
4062 cblock->next->expr1, 0))
4063 return gfc_trans_where_3 (cblock, eblock);
4067 gfc_start_block (&block);
4069 gfc_trans_where_2 (code, NULL, false, NULL, &block);
4071 return gfc_finish_block (&block);
4075 /* CYCLE a DO loop. The label decl has already been created by
4076 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
4077 node at the head of the loop. We must mark the label as used. */
4080 gfc_trans_cycle (gfc_code * code)
4084 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
4085 TREE_USED (cycle_label) = 1;
4086 return build1_v (GOTO_EXPR, cycle_label);
4090 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
4091 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
4095 gfc_trans_exit (gfc_code * code)
4099 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
4100 TREE_USED (exit_label) = 1;
4101 return build1_v (GOTO_EXPR, exit_label);
4105 /* Translate the ALLOCATE statement. */
4108 gfc_trans_allocate (gfc_code * code)
4121 if (!code->ext.alloc.list)
4124 pstat = stat = error_label = tmp = memsz = NULL_TREE;
4126 gfc_start_block (&block);
4128 /* Either STAT= and/or ERRMSG is present. */
4129 if (code->expr1 || code->expr2)
4131 tree gfc_int4_type_node = gfc_get_int_type (4);
4133 stat = gfc_create_var (gfc_int4_type_node, "stat");
4134 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4136 error_label = gfc_build_label_decl (NULL_TREE);
4137 TREE_USED (error_label) = 1;
4140 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4142 expr = gfc_copy_expr (al->expr);
4144 if (expr->ts.type == BT_CLASS)
4145 gfc_add_component_ref (expr, "$data");
4147 gfc_init_se (&se, NULL);
4148 gfc_start_block (&se.pre);
4150 se.want_pointer = 1;
4151 se.descriptor_only = 1;
4152 gfc_conv_expr (&se, expr);
4154 if (!gfc_array_allocate (&se, expr, pstat))
4156 /* A scalar or derived type. */
4158 /* Determine allocate size. */
4159 if (code->expr3 && code->expr3->ts.type == BT_CLASS)
4163 sz = gfc_copy_expr (code->expr3);
4164 gfc_add_component_ref (sz, "$vptr");
4165 gfc_add_component_ref (sz, "$size");
4166 gfc_init_se (&se_sz, NULL);
4167 gfc_conv_expr (&se_sz, sz);
4171 else if (code->expr3 && code->expr3->ts.type != BT_CLASS)
4172 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->expr3->ts));
4173 else if (code->ext.alloc.ts.type != BT_UNKNOWN)
4174 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->ext.alloc.ts));
4176 memsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
4178 if (expr->ts.type == BT_CHARACTER && memsz == NULL_TREE)
4179 memsz = se.string_length;
4181 /* Allocate - for non-pointers with re-alloc checking. */
4188 /* Find the last reference in the chain. */
4189 while (ref && ref->next != NULL)
4191 gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT);
4196 allocatable = expr->symtree->n.sym->attr.allocatable;
4198 allocatable = ref->u.c.component->attr.allocatable;
4201 tmp = gfc_allocate_array_with_status (&se.pre, se.expr, memsz,
4204 tmp = gfc_allocate_with_status (&se.pre, memsz, pstat);
4207 tmp = fold_build2 (MODIFY_EXPR, void_type_node, se.expr,
4208 fold_convert (TREE_TYPE (se.expr), tmp));
4209 gfc_add_expr_to_block (&se.pre, tmp);
4211 if (code->expr1 || code->expr2)
4213 tmp = build1_v (GOTO_EXPR, error_label);
4214 parm = fold_build2 (NE_EXPR, boolean_type_node,
4215 stat, build_int_cst (TREE_TYPE (stat), 0));
4216 tmp = fold_build3 (COND_EXPR, void_type_node,
4217 parm, tmp, build_empty_stmt (input_location));
4218 gfc_add_expr_to_block (&se.pre, tmp);
4221 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4223 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
4224 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, tmp, 0);
4225 gfc_add_expr_to_block (&se.pre, tmp);
4230 tmp = gfc_finish_block (&se.pre);
4231 gfc_add_expr_to_block (&block, tmp);
4233 /* Initialization via SOURCE block. */
4236 gfc_expr *rhs = gfc_copy_expr (code->expr3);
4237 if (al->expr->ts.type == BT_CLASS)
4240 if (rhs->ts.type == BT_CLASS)
4241 gfc_add_component_ref (rhs, "$data");
4242 gfc_init_se (&dst, NULL);
4243 gfc_init_se (&src, NULL);
4244 gfc_conv_expr (&dst, expr);
4245 gfc_conv_expr (&src, rhs);
4246 gfc_add_block_to_block (&block, &src.pre);
4247 tmp = gfc_build_memcpy_call (dst.expr, src.expr, memsz);
4250 tmp = gfc_trans_assignment (gfc_expr_to_initialize (expr),
4252 gfc_free_expr (rhs);
4253 gfc_add_expr_to_block (&block, tmp);
4256 /* Allocation of CLASS entities. */
4257 gfc_free_expr (expr);
4259 if (expr->ts.type == BT_CLASS)
4264 /* Initialize VPTR for CLASS objects. */
4265 lhs = gfc_expr_to_initialize (expr);
4266 gfc_add_component_ref (lhs, "$vptr");
4268 if (code->expr3 && code->expr3->ts.type == BT_CLASS)
4270 /* VPTR must be determined at run time. */
4271 rhs = gfc_copy_expr (code->expr3);
4272 gfc_add_component_ref (rhs, "$vptr");
4273 tmp = gfc_trans_pointer_assignment (lhs, rhs);
4274 gfc_add_expr_to_block (&block, tmp);
4275 gfc_free_expr (rhs);
4279 /* VPTR is fixed at compile time. */
4283 ts = &code->expr3->ts;
4284 else if (expr->ts.type == BT_DERIVED)
4286 else if (code->ext.alloc.ts.type == BT_DERIVED)
4287 ts = &code->ext.alloc.ts;
4288 else if (expr->ts.type == BT_CLASS)
4289 ts = &expr->ts.u.derived->components->ts;
4293 if (ts->type == BT_DERIVED)
4295 vtab = gfc_find_derived_vtab (ts->u.derived, true);
4297 gfc_trans_assign_vtab_procs (&block, ts->u.derived, vtab);
4298 gfc_init_se (&lse, NULL);
4299 lse.want_pointer = 1;
4300 gfc_conv_expr (&lse, lhs);
4301 tmp = gfc_build_addr_expr (NULL_TREE,
4302 gfc_get_symbol_decl (vtab));
4303 gfc_add_modify (&block, lse.expr,
4304 fold_convert (TREE_TYPE (lse.expr), tmp));
4314 tmp = build1_v (LABEL_EXPR, error_label);
4315 gfc_add_expr_to_block (&block, tmp);
4317 gfc_init_se (&se, NULL);
4318 gfc_conv_expr_lhs (&se, code->expr1);
4319 tmp = convert (TREE_TYPE (se.expr), stat);
4320 gfc_add_modify (&block, se.expr, tmp);
4326 /* A better error message may be possible, but not required. */
4327 const char *msg = "Attempt to allocate an allocated object";
4328 tree errmsg, slen, dlen;
4330 gfc_init_se (&se, NULL);
4331 gfc_conv_expr_lhs (&se, code->expr2);
4333 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4335 gfc_add_modify (&block, errmsg,
4336 gfc_build_addr_expr (pchar_type_node,
4337 gfc_build_localized_cstring_const (msg)));
4339 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4340 dlen = gfc_get_expr_charlen (code->expr2);
4341 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4343 dlen = build_call_expr_loc (input_location,
4344 built_in_decls[BUILT_IN_MEMCPY], 3,
4345 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4347 tmp = fold_build2 (NE_EXPR, boolean_type_node, stat,
4348 build_int_cst (TREE_TYPE (stat), 0));
4350 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4352 gfc_add_expr_to_block (&block, tmp);
4355 return gfc_finish_block (&block);
4359 /* Translate a DEALLOCATE statement. */
4362 gfc_trans_deallocate (gfc_code *code)
4367 tree apstat, astat, pstat, stat, tmp;
4370 pstat = apstat = stat = astat = tmp = NULL_TREE;
4372 gfc_start_block (&block);
4374 /* Count the number of failed deallocations. If deallocate() was
4375 called with STAT= , then set STAT to the count. If deallocate
4376 was called with ERRMSG, then set ERRMG to a string. */
4377 if (code->expr1 || code->expr2)
4379 tree gfc_int4_type_node = gfc_get_int_type (4);
4381 stat = gfc_create_var (gfc_int4_type_node, "stat");
4382 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4384 /* Running total of possible deallocation failures. */
4385 astat = gfc_create_var (gfc_int4_type_node, "astat");
4386 apstat = gfc_build_addr_expr (NULL_TREE, astat);
4388 /* Initialize astat to 0. */
4389 gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
4392 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4395 gcc_assert (expr->expr_type == EXPR_VARIABLE);
4397 gfc_init_se (&se, NULL);
4398 gfc_start_block (&se.pre);
4400 se.want_pointer = 1;
4401 se.descriptor_only = 1;
4402 gfc_conv_expr (&se, expr);
4404 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4407 gfc_ref *last = NULL;
4408 for (ref = expr->ref; ref; ref = ref->next)
4409 if (ref->type == REF_COMPONENT)
4412 /* Do not deallocate the components of a derived type
4413 ultimate pointer component. */
4414 if (!(last && last->u.c.component->attr.pointer)
4415 && !(!last && expr->symtree->n.sym->attr.pointer))
4417 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, se.expr,
4419 gfc_add_expr_to_block (&se.pre, tmp);
4424 tmp = gfc_array_deallocate (se.expr, pstat, expr);
4427 tmp = gfc_deallocate_with_status (se.expr, pstat, false, expr);
4428 gfc_add_expr_to_block (&se.pre, tmp);
4430 tmp = fold_build2 (MODIFY_EXPR, void_type_node,
4431 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
4434 gfc_add_expr_to_block (&se.pre, tmp);
4436 /* Keep track of the number of failed deallocations by adding stat
4437 of the last deallocation to the running total. */
4438 if (code->expr1 || code->expr2)
4440 apstat = fold_build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
4441 gfc_add_modify (&se.pre, astat, apstat);
4444 tmp = gfc_finish_block (&se.pre);
4445 gfc_add_expr_to_block (&block, tmp);
4452 gfc_init_se (&se, NULL);
4453 gfc_conv_expr_lhs (&se, code->expr1);
4454 tmp = convert (TREE_TYPE (se.expr), astat);
4455 gfc_add_modify (&block, se.expr, tmp);
4461 /* A better error message may be possible, but not required. */
4462 const char *msg = "Attempt to deallocate an unallocated object";
4463 tree errmsg, slen, dlen;
4465 gfc_init_se (&se, NULL);
4466 gfc_conv_expr_lhs (&se, code->expr2);
4468 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4470 gfc_add_modify (&block, errmsg,
4471 gfc_build_addr_expr (pchar_type_node,
4472 gfc_build_localized_cstring_const (msg)));
4474 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4475 dlen = gfc_get_expr_charlen (code->expr2);
4476 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4478 dlen = build_call_expr_loc (input_location,
4479 built_in_decls[BUILT_IN_MEMCPY], 3,
4480 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4482 tmp = fold_build2 (NE_EXPR, boolean_type_node, astat,
4483 build_int_cst (TREE_TYPE (astat), 0));
4485 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4487 gfc_add_expr_to_block (&block, tmp);
4490 return gfc_finish_block (&block);