1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
31 #include "trans-stmt.h"
32 #include "trans-types.h"
33 #include "trans-array.h"
34 #include "trans-const.h"
36 #include "dependency.h"
39 typedef struct iter_info
45 struct iter_info *next;
49 typedef struct forall_info
56 struct forall_info *prev_nest;
60 static void gfc_trans_where_2 (gfc_code *, tree, bool,
61 forall_info *, stmtblock_t *);
63 /* Translate a F95 label number to a LABEL_EXPR. */
66 gfc_trans_label_here (gfc_code * code)
68 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
72 /* Given a variable expression which has been ASSIGNed to, find the decl
73 containing the auxiliary variables. For variables in common blocks this
77 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
79 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
80 gfc_conv_expr (se, expr);
81 /* Deals with variable in common block. Get the field declaration. */
82 if (TREE_CODE (se->expr) == COMPONENT_REF)
83 se->expr = TREE_OPERAND (se->expr, 1);
84 /* Deals with dummy argument. Get the parameter declaration. */
85 else if (TREE_CODE (se->expr) == INDIRECT_REF)
86 se->expr = TREE_OPERAND (se->expr, 0);
89 /* Translate a label assignment statement. */
92 gfc_trans_label_assign (gfc_code * code)
101 /* Start a new block. */
102 gfc_init_se (&se, NULL);
103 gfc_start_block (&se.pre);
104 gfc_conv_label_variable (&se, code->expr1);
106 len = GFC_DECL_STRING_LEN (se.expr);
107 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
109 label_tree = gfc_get_label_decl (code->label1);
111 if (code->label1->defined == ST_LABEL_TARGET)
113 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
114 len_tree = integer_minus_one_node;
118 gfc_expr *format = code->label1->format;
120 label_len = format->value.character.length;
121 len_tree = build_int_cst (NULL_TREE, label_len);
122 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
123 format->value.character.string);
124 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
127 gfc_add_modify (&se.pre, len, len_tree);
128 gfc_add_modify (&se.pre, addr, label_tree);
130 return gfc_finish_block (&se.pre);
133 /* Translate a GOTO statement. */
136 gfc_trans_goto (gfc_code * code)
138 locus loc = code->loc;
144 if (code->label1 != NULL)
145 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
148 gfc_init_se (&se, NULL);
149 gfc_start_block (&se.pre);
150 gfc_conv_label_variable (&se, code->expr1);
151 tmp = GFC_DECL_STRING_LEN (se.expr);
152 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
153 build_int_cst (TREE_TYPE (tmp), -1));
154 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
155 "Assigned label is not a target label");
157 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
159 /* We're going to ignore a label list. It does not really change the
160 statement's semantics (because it is just a further restriction on
161 what's legal code); before, we were comparing label addresses here, but
162 that's a very fragile business and may break with optimization. So
165 target = fold_build1 (GOTO_EXPR, void_type_node, assigned_goto);
166 gfc_add_expr_to_block (&se.pre, target);
167 return gfc_finish_block (&se.pre);
171 /* Translate an ENTRY statement. Just adds a label for this entry point. */
173 gfc_trans_entry (gfc_code * code)
175 return build1_v (LABEL_EXPR, code->ext.entry->label);
179 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
180 elemental subroutines. Make temporaries for output arguments if any such
181 dependencies are found. Output arguments are chosen because internal_unpack
182 can be used, as is, to copy the result back to the variable. */
184 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
185 gfc_symbol * sym, gfc_actual_arglist * arg,
186 gfc_dep_check check_variable)
188 gfc_actual_arglist *arg0;
190 gfc_formal_arglist *formal;
191 gfc_loopinfo tmp_loop;
203 if (loopse->ss == NULL)
208 formal = sym->formal;
210 /* Loop over all the arguments testing for dependencies. */
211 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
217 /* Obtain the info structure for the current argument. */
219 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
223 info = &ss->data.info;
227 /* If there is a dependency, create a temporary and use it
228 instead of the variable. */
229 fsym = formal ? formal->sym : NULL;
230 if (e->expr_type == EXPR_VARIABLE
232 && fsym->attr.intent != INTENT_IN
233 && gfc_check_fncall_dependency (e, fsym->attr.intent,
234 sym, arg0, check_variable))
236 tree initial, temptype;
237 stmtblock_t temp_post;
239 /* Make a local loopinfo for the temporary creation, so that
240 none of the other ss->info's have to be renormalized. */
241 gfc_init_loopinfo (&tmp_loop);
242 for (n = 0; n < info->dimen; n++)
244 tmp_loop.to[n] = loopse->loop->to[n];
245 tmp_loop.from[n] = loopse->loop->from[n];
246 tmp_loop.order[n] = loopse->loop->order[n];
249 /* Obtain the argument descriptor for unpacking. */
250 gfc_init_se (&parmse, NULL);
251 parmse.want_pointer = 1;
253 /* The scalarizer introduces some specific peculiarities when
254 handling elemental subroutines; the stride can be needed up to
255 the dim_array - 1, rather than dim_loop - 1 to calculate
256 offsets outside the loop. For this reason, we make sure that
257 the descriptor has the dimensionality of the array by converting
258 trailing elements into ranges with end = start. */
259 for (ref = e->ref; ref; ref = ref->next)
260 if (ref->type == REF_ARRAY && ref->u.ar.type == AR_SECTION)
265 bool seen_range = false;
266 for (n = 0; n < ref->u.ar.dimen; n++)
268 if (ref->u.ar.dimen_type[n] == DIMEN_RANGE)
272 || ref->u.ar.dimen_type[n] != DIMEN_ELEMENT)
275 ref->u.ar.end[n] = gfc_copy_expr (ref->u.ar.start[n]);
276 ref->u.ar.dimen_type[n] = DIMEN_RANGE;
280 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
281 gfc_add_block_to_block (&se->pre, &parmse.pre);
283 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
284 initialize the array temporary with a copy of the values. */
285 if (fsym->attr.intent == INTENT_INOUT
286 || (fsym->ts.type ==BT_DERIVED
287 && fsym->attr.intent == INTENT_OUT))
288 initial = parmse.expr;
292 /* Find the type of the temporary to create; we don't use the type
293 of e itself as this breaks for subcomponent-references in e (where
294 the type of e is that of the final reference, but parmse.expr's
295 type corresponds to the full derived-type). */
296 /* TODO: Fix this somehow so we don't need a temporary of the whole
297 array but instead only the components referenced. */
298 temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */
299 gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
300 temptype = TREE_TYPE (temptype);
301 temptype = gfc_get_element_type (temptype);
303 /* Generate the temporary. Cleaning up the temporary should be the
304 very last thing done, so we add the code to a new block and add it
305 to se->post as last instructions. */
306 size = gfc_create_var (gfc_array_index_type, NULL);
307 data = gfc_create_var (pvoid_type_node, NULL);
308 gfc_init_block (&temp_post);
309 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post,
310 &tmp_loop, info, temptype,
314 gfc_add_modify (&se->pre, size, tmp);
315 tmp = fold_convert (pvoid_type_node, info->data);
316 gfc_add_modify (&se->pre, data, tmp);
318 /* Calculate the offset for the temporary. */
319 offset = gfc_index_zero_node;
320 for (n = 0; n < info->dimen; n++)
322 tmp = gfc_conv_descriptor_stride_get (info->descriptor,
324 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
325 loopse->loop->from[n], tmp);
326 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
329 info->offset = gfc_create_var (gfc_array_index_type, NULL);
330 gfc_add_modify (&se->pre, info->offset, offset);
332 /* Copy the result back using unpack. */
333 tmp = build_call_expr_loc (input_location,
334 gfor_fndecl_in_unpack, 2, parmse.expr, data);
335 gfc_add_expr_to_block (&se->post, tmp);
337 /* parmse.pre is already added above. */
338 gfc_add_block_to_block (&se->post, &parmse.post);
339 gfc_add_block_to_block (&se->post, &temp_post);
345 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
348 gfc_trans_call (gfc_code * code, bool dependency_check,
349 tree mask, tree count1, bool invert)
353 int has_alternate_specifier;
354 gfc_dep_check check_variable;
355 tree index = NULL_TREE;
356 tree maskexpr = NULL_TREE;
359 /* A CALL starts a new block because the actual arguments may have to
360 be evaluated first. */
361 gfc_init_se (&se, NULL);
362 gfc_start_block (&se.pre);
364 gcc_assert (code->resolved_sym);
366 ss = gfc_ss_terminator;
367 if (code->resolved_sym->attr.elemental)
368 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
370 /* Is not an elemental subroutine call with array valued arguments. */
371 if (ss == gfc_ss_terminator)
374 /* Translate the call. */
375 has_alternate_specifier
376 = gfc_conv_procedure_call (&se, code->resolved_sym, code->ext.actual,
379 /* A subroutine without side-effect, by definition, does nothing! */
380 TREE_SIDE_EFFECTS (se.expr) = 1;
382 /* Chain the pieces together and return the block. */
383 if (has_alternate_specifier)
385 gfc_code *select_code;
387 select_code = code->next;
388 gcc_assert(select_code->op == EXEC_SELECT);
389 sym = select_code->expr1->symtree->n.sym;
390 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
391 if (sym->backend_decl == NULL)
392 sym->backend_decl = gfc_get_symbol_decl (sym);
393 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
396 gfc_add_expr_to_block (&se.pre, se.expr);
398 gfc_add_block_to_block (&se.pre, &se.post);
403 /* An elemental subroutine call with array valued arguments has
411 /* gfc_walk_elemental_function_args renders the ss chain in the
412 reverse order to the actual argument order. */
413 ss = gfc_reverse_ss (ss);
415 /* Initialize the loop. */
416 gfc_init_se (&loopse, NULL);
417 gfc_init_loopinfo (&loop);
418 gfc_add_ss_to_loop (&loop, ss);
420 gfc_conv_ss_startstride (&loop);
421 /* TODO: gfc_conv_loop_setup generates a temporary for vector
422 subscripts. This could be prevented in the elemental case
423 as temporaries are handled separatedly
424 (below in gfc_conv_elemental_dependencies). */
425 gfc_conv_loop_setup (&loop, &code->expr1->where);
426 gfc_mark_ss_chain_used (ss, 1);
428 /* Convert the arguments, checking for dependencies. */
429 gfc_copy_loopinfo_to_se (&loopse, &loop);
432 /* For operator assignment, do dependency checking. */
433 if (dependency_check)
434 check_variable = ELEM_CHECK_VARIABLE;
436 check_variable = ELEM_DONT_CHECK_VARIABLE;
438 gfc_init_se (&depse, NULL);
439 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
440 code->ext.actual, check_variable);
442 gfc_add_block_to_block (&loop.pre, &depse.pre);
443 gfc_add_block_to_block (&loop.post, &depse.post);
445 /* Generate the loop body. */
446 gfc_start_scalarized_body (&loop, &body);
447 gfc_init_block (&block);
451 /* Form the mask expression according to the mask. */
453 maskexpr = gfc_build_array_ref (mask, index, NULL);
455 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
459 /* Add the subroutine call to the block. */
460 gfc_conv_procedure_call (&loopse, code->resolved_sym,
461 code->ext.actual, code->expr1, NULL);
465 tmp = build3_v (COND_EXPR, maskexpr, loopse.expr,
466 build_empty_stmt (input_location));
467 gfc_add_expr_to_block (&loopse.pre, tmp);
468 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
469 count1, gfc_index_one_node);
470 gfc_add_modify (&loopse.pre, count1, tmp);
473 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
475 gfc_add_block_to_block (&block, &loopse.pre);
476 gfc_add_block_to_block (&block, &loopse.post);
478 /* Finish up the loop block and the loop. */
479 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
480 gfc_trans_scalarizing_loops (&loop, &body);
481 gfc_add_block_to_block (&se.pre, &loop.pre);
482 gfc_add_block_to_block (&se.pre, &loop.post);
483 gfc_add_block_to_block (&se.pre, &se.post);
484 gfc_cleanup_loop (&loop);
487 return gfc_finish_block (&se.pre);
491 /* Translate the RETURN statement. */
494 gfc_trans_return (gfc_code * code)
502 /* If code->expr is not NULL, this return statement must appear
503 in a subroutine and current_fake_result_decl has already
506 result = gfc_get_fake_result_decl (NULL, 0);
509 gfc_warning ("An alternate return at %L without a * dummy argument",
510 &code->expr1->where);
511 return gfc_generate_return ();
514 /* Start a new block for this statement. */
515 gfc_init_se (&se, NULL);
516 gfc_start_block (&se.pre);
518 gfc_conv_expr (&se, code->expr1);
520 /* Note that the actually returned expression is a simple value and
521 does not depend on any pointers or such; thus we can clean-up with
522 se.post before returning. */
523 tmp = fold_build2 (MODIFY_EXPR, TREE_TYPE (result), result,
524 fold_convert (TREE_TYPE (result), se.expr));
525 gfc_add_expr_to_block (&se.pre, tmp);
526 gfc_add_block_to_block (&se.pre, &se.post);
528 tmp = gfc_generate_return ();
529 gfc_add_expr_to_block (&se.pre, tmp);
530 return gfc_finish_block (&se.pre);
533 return gfc_generate_return ();
537 /* Translate the PAUSE statement. We have to translate this statement
538 to a runtime library call. */
541 gfc_trans_pause (gfc_code * code)
543 tree gfc_int4_type_node = gfc_get_int_type (4);
547 /* Start a new block for this statement. */
548 gfc_init_se (&se, NULL);
549 gfc_start_block (&se.pre);
552 if (code->expr1 == NULL)
554 tmp = build_int_cst (gfc_int4_type_node, 0);
555 tmp = build_call_expr_loc (input_location,
556 gfor_fndecl_pause_string, 2,
557 build_int_cst (pchar_type_node, 0), tmp);
559 else if (code->expr1->ts.type == BT_INTEGER)
561 gfc_conv_expr (&se, code->expr1);
562 tmp = build_call_expr_loc (input_location,
563 gfor_fndecl_pause_numeric, 1,
564 fold_convert (gfc_int4_type_node, se.expr));
568 gfc_conv_expr_reference (&se, code->expr1);
569 tmp = build_call_expr_loc (input_location,
570 gfor_fndecl_pause_string, 2,
571 se.expr, se.string_length);
574 gfc_add_expr_to_block (&se.pre, tmp);
576 gfc_add_block_to_block (&se.pre, &se.post);
578 return gfc_finish_block (&se.pre);
582 /* Translate the STOP statement. We have to translate this statement
583 to a runtime library call. */
586 gfc_trans_stop (gfc_code *code, bool error_stop)
588 tree gfc_int4_type_node = gfc_get_int_type (4);
592 /* Start a new block for this statement. */
593 gfc_init_se (&se, NULL);
594 gfc_start_block (&se.pre);
596 if (code->expr1 == NULL)
598 tmp = build_int_cst (gfc_int4_type_node, 0);
599 tmp = build_call_expr_loc (input_location,
600 error_stop ? gfor_fndecl_error_stop_string
601 : gfor_fndecl_stop_string,
602 2, build_int_cst (pchar_type_node, 0), tmp);
604 else if (code->expr1->ts.type == BT_INTEGER)
606 gfc_conv_expr (&se, code->expr1);
607 tmp = build_call_expr_loc (input_location,
608 error_stop ? gfor_fndecl_error_stop_numeric
609 : gfor_fndecl_stop_numeric, 1,
610 fold_convert (gfc_int4_type_node, se.expr));
614 gfc_conv_expr_reference (&se, code->expr1);
615 tmp = build_call_expr_loc (input_location,
616 error_stop ? gfor_fndecl_error_stop_string
617 : gfor_fndecl_stop_string,
618 2, se.expr, se.string_length);
621 gfc_add_expr_to_block (&se.pre, tmp);
623 gfc_add_block_to_block (&se.pre, &se.post);
625 return gfc_finish_block (&se.pre);
630 gfc_trans_sync (gfc_code *code, gfc_exec_op type __attribute__ ((unused)))
634 if ((code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)) || code->expr2)
636 gfc_init_se (&se, NULL);
637 gfc_start_block (&se.pre);
640 /* Check SYNC IMAGES(imageset) for valid image index.
641 FIXME: Add a check for image-set arrays. */
642 if (code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
643 && code->expr1->rank == 0)
646 gfc_conv_expr (&se, code->expr1);
647 cond = fold_build2 (NE_EXPR, boolean_type_node, se.expr,
648 build_int_cst (TREE_TYPE (se.expr), 1));
649 gfc_trans_runtime_check (true, false, cond, &se.pre,
650 &code->expr1->where, "Invalid image number "
652 fold_convert (integer_type_node, se.expr));
655 /* If STAT is present, set it to zero. */
658 gcc_assert (code->expr2->expr_type == EXPR_VARIABLE);
659 gfc_conv_expr (&se, code->expr2);
660 gfc_add_modify (&se.pre, se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
663 if ((code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)) || code->expr2)
664 return gfc_finish_block (&se.pre);
670 /* Generate GENERIC for the IF construct. This function also deals with
671 the simple IF statement, because the front end translates the IF
672 statement into an IF construct.
704 where COND_S is the simplified version of the predicate. PRE_COND_S
705 are the pre side-effects produced by the translation of the
707 We need to build the chain recursively otherwise we run into
708 problems with folding incomplete statements. */
711 gfc_trans_if_1 (gfc_code * code)
716 /* Check for an unconditional ELSE clause. */
718 return gfc_trans_code (code->next);
720 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
721 gfc_init_se (&if_se, NULL);
722 gfc_start_block (&if_se.pre);
724 /* Calculate the IF condition expression. */
725 gfc_conv_expr_val (&if_se, code->expr1);
727 /* Translate the THEN clause. */
728 stmt = gfc_trans_code (code->next);
730 /* Translate the ELSE clause. */
732 elsestmt = gfc_trans_if_1 (code->block);
734 elsestmt = build_empty_stmt (input_location);
736 /* Build the condition expression and add it to the condition block. */
737 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
739 gfc_add_expr_to_block (&if_se.pre, stmt);
741 /* Finish off this statement. */
742 return gfc_finish_block (&if_se.pre);
746 gfc_trans_if (gfc_code * code)
748 /* Ignore the top EXEC_IF, it only announces an IF construct. The
749 actual code we must translate is in code->block. */
751 return gfc_trans_if_1 (code->block);
755 /* Translate an arithmetic IF expression.
757 IF (cond) label1, label2, label3 translates to
769 An optimized version can be generated in case of equal labels.
770 E.g., if label1 is equal to label2, we can translate it to
779 gfc_trans_arithmetic_if (gfc_code * code)
787 /* Start a new block. */
788 gfc_init_se (&se, NULL);
789 gfc_start_block (&se.pre);
791 /* Pre-evaluate COND. */
792 gfc_conv_expr_val (&se, code->expr1);
793 se.expr = gfc_evaluate_now (se.expr, &se.pre);
795 /* Build something to compare with. */
796 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
798 if (code->label1->value != code->label2->value)
800 /* If (cond < 0) take branch1 else take branch2.
801 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
802 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
803 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
805 if (code->label1->value != code->label3->value)
806 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
808 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
810 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
813 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
815 if (code->label1->value != code->label3->value
816 && code->label2->value != code->label3->value)
818 /* if (cond <= 0) take branch1 else take branch2. */
819 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
820 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
821 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
824 /* Append the COND_EXPR to the evaluation of COND, and return. */
825 gfc_add_expr_to_block (&se.pre, branch1);
826 return gfc_finish_block (&se.pre);
830 /* Translate a CRITICAL block. */
832 gfc_trans_critical (gfc_code *code)
837 gfc_start_block (&block);
838 tmp = gfc_trans_code (code->block->next);
839 gfc_add_expr_to_block (&block, tmp);
841 return gfc_finish_block (&block);
845 /* Translate a BLOCK construct. This is basically what we would do for a
849 gfc_trans_block_construct (gfc_code* code)
853 gfc_wrapped_block body;
855 ns = code->ext.block.ns;
860 gcc_assert (!sym->tlink);
863 gfc_process_block_locals (ns, code->ext.block.assoc);
865 gfc_start_wrapped_block (&body, gfc_trans_code (ns->code));
866 gfc_trans_deferred_vars (sym, &body);
868 return gfc_finish_wrapped_block (&body);
872 /* Translate the simple DO construct. This is where the loop variable has
873 integer type and step +-1. We can't use this in the general case
874 because integer overflow and floating point errors could give incorrect
876 We translate a do loop from:
878 DO dovar = from, to, step
884 [Evaluate loop bounds and step]
886 if ((step > 0) ? (dovar <= to) : (dovar => to))
892 cond = (dovar == to);
894 if (cond) goto end_label;
899 This helps the optimizers by avoiding the extra induction variable
900 used in the general case. */
903 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
904 tree from, tree to, tree step, tree exit_cond)
910 tree saved_dovar = NULL;
914 type = TREE_TYPE (dovar);
916 /* Initialize the DO variable: dovar = from. */
917 gfc_add_modify (pblock, dovar, from);
919 /* Save value for do-tinkering checking. */
920 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
922 saved_dovar = gfc_create_var (type, ".saved_dovar");
923 gfc_add_modify (pblock, saved_dovar, dovar);
926 /* Cycle and exit statements are implemented with gotos. */
927 cycle_label = gfc_build_label_decl (NULL_TREE);
928 exit_label = gfc_build_label_decl (NULL_TREE);
930 /* Put the labels where they can be found later. See gfc_trans_do(). */
931 code->block->cycle_label = cycle_label;
932 code->block->exit_label = exit_label;
935 gfc_start_block (&body);
937 /* Main loop body. */
938 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
939 gfc_add_expr_to_block (&body, tmp);
941 /* Label for cycle statements (if needed). */
942 if (TREE_USED (cycle_label))
944 tmp = build1_v (LABEL_EXPR, cycle_label);
945 gfc_add_expr_to_block (&body, tmp);
948 /* Check whether someone has modified the loop variable. */
949 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
951 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
952 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
953 "Loop variable has been modified");
956 /* Exit the loop if there is an I/O result condition or error. */
959 tmp = build1_v (GOTO_EXPR, exit_label);
960 tmp = fold_build3 (COND_EXPR, void_type_node, exit_cond, tmp,
961 build_empty_stmt (input_location));
962 gfc_add_expr_to_block (&body, tmp);
965 /* Evaluate the loop condition. */
966 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
967 cond = gfc_evaluate_now (cond, &body);
969 /* Increment the loop variable. */
970 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
971 gfc_add_modify (&body, dovar, tmp);
973 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
974 gfc_add_modify (&body, saved_dovar, dovar);
977 tmp = build1_v (GOTO_EXPR, exit_label);
978 TREE_USED (exit_label) = 1;
979 tmp = fold_build3 (COND_EXPR, void_type_node,
980 cond, tmp, build_empty_stmt (input_location));
981 gfc_add_expr_to_block (&body, tmp);
983 /* Finish the loop body. */
984 tmp = gfc_finish_block (&body);
985 tmp = build1_v (LOOP_EXPR, tmp);
987 /* Only execute the loop if the number of iterations is positive. */
988 if (tree_int_cst_sgn (step) > 0)
989 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
991 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
992 tmp = fold_build3 (COND_EXPR, void_type_node,
993 cond, tmp, build_empty_stmt (input_location));
994 gfc_add_expr_to_block (pblock, tmp);
996 /* Add the exit label. */
997 tmp = build1_v (LABEL_EXPR, exit_label);
998 gfc_add_expr_to_block (pblock, tmp);
1000 return gfc_finish_block (pblock);
1003 /* Translate the DO construct. This obviously is one of the most
1004 important ones to get right with any compiler, but especially
1007 We special case some loop forms as described in gfc_trans_simple_do.
1008 For other cases we implement them with a separate loop count,
1009 as described in the standard.
1011 We translate a do loop from:
1013 DO dovar = from, to, step
1019 [evaluate loop bounds and step]
1020 empty = (step > 0 ? to < from : to > from);
1021 countm1 = (to - from) / step;
1023 if (empty) goto exit_label;
1029 if (countm1 ==0) goto exit_label;
1034 countm1 is an unsigned integer. It is equal to the loop count minus one,
1035 because the loop count itself can overflow. */
1038 gfc_trans_do (gfc_code * code, tree exit_cond)
1042 tree saved_dovar = NULL;
1057 gfc_start_block (&block);
1059 /* Evaluate all the expressions in the iterator. */
1060 gfc_init_se (&se, NULL);
1061 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1062 gfc_add_block_to_block (&block, &se.pre);
1064 type = TREE_TYPE (dovar);
1066 gfc_init_se (&se, NULL);
1067 gfc_conv_expr_val (&se, code->ext.iterator->start);
1068 gfc_add_block_to_block (&block, &se.pre);
1069 from = gfc_evaluate_now (se.expr, &block);
1071 gfc_init_se (&se, NULL);
1072 gfc_conv_expr_val (&se, code->ext.iterator->end);
1073 gfc_add_block_to_block (&block, &se.pre);
1074 to = gfc_evaluate_now (se.expr, &block);
1076 gfc_init_se (&se, NULL);
1077 gfc_conv_expr_val (&se, code->ext.iterator->step);
1078 gfc_add_block_to_block (&block, &se.pre);
1079 step = gfc_evaluate_now (se.expr, &block);
1081 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1083 tmp = fold_build2 (EQ_EXPR, boolean_type_node, step,
1084 fold_convert (type, integer_zero_node));
1085 gfc_trans_runtime_check (true, false, tmp, &block, &code->loc,
1086 "DO step value is zero");
1089 /* Special case simple loops. */
1090 if (TREE_CODE (type) == INTEGER_TYPE
1091 && (integer_onep (step)
1092 || tree_int_cst_equal (step, integer_minus_one_node)))
1093 return gfc_trans_simple_do (code, &block, dovar, from, to, step, exit_cond);
1095 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
1096 fold_convert (type, integer_zero_node));
1098 if (TREE_CODE (type) == INTEGER_TYPE)
1099 utype = unsigned_type_for (type);
1101 utype = unsigned_type_for (gfc_array_index_type);
1102 countm1 = gfc_create_var (utype, "countm1");
1104 /* Cycle and exit statements are implemented with gotos. */
1105 cycle_label = gfc_build_label_decl (NULL_TREE);
1106 exit_label = gfc_build_label_decl (NULL_TREE);
1107 TREE_USED (exit_label) = 1;
1109 /* Initialize the DO variable: dovar = from. */
1110 gfc_add_modify (&block, dovar, from);
1112 /* Save value for do-tinkering checking. */
1113 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1115 saved_dovar = gfc_create_var (type, ".saved_dovar");
1116 gfc_add_modify (&block, saved_dovar, dovar);
1119 /* Initialize loop count and jump to exit label if the loop is empty.
1120 This code is executed before we enter the loop body. We generate:
1121 step_sign = sign(1,step);
1132 countm1 = (to*step_sign - from*step_sign) / (step*step_sign);
1136 if (TREE_CODE (type) == INTEGER_TYPE)
1138 tree pos, neg, step_sign, to2, from2, step2;
1140 /* Calculate SIGN (1,step), as (step < 0 ? -1 : 1) */
1142 tmp = fold_build2 (LT_EXPR, boolean_type_node, step,
1143 build_int_cst (TREE_TYPE (step), 0));
1144 step_sign = fold_build3 (COND_EXPR, type, tmp,
1145 build_int_cst (type, -1),
1146 build_int_cst (type, 1));
1148 tmp = fold_build2 (LT_EXPR, boolean_type_node, to, from);
1149 pos = fold_build3 (COND_EXPR, void_type_node, tmp,
1150 build1_v (GOTO_EXPR, exit_label),
1151 build_empty_stmt (input_location));
1153 tmp = fold_build2 (GT_EXPR, boolean_type_node, to, from);
1154 neg = fold_build3 (COND_EXPR, void_type_node, tmp,
1155 build1_v (GOTO_EXPR, exit_label),
1156 build_empty_stmt (input_location));
1157 tmp = fold_build3 (COND_EXPR, void_type_node, pos_step, pos, neg);
1159 gfc_add_expr_to_block (&block, tmp);
1161 /* Calculate the loop count. to-from can overflow, so
1162 we cast to unsigned. */
1164 to2 = fold_build2 (MULT_EXPR, type, step_sign, to);
1165 from2 = fold_build2 (MULT_EXPR, type, step_sign, from);
1166 step2 = fold_build2 (MULT_EXPR, type, step_sign, step);
1167 step2 = fold_convert (utype, step2);
1168 tmp = fold_build2 (MINUS_EXPR, type, to2, from2);
1169 tmp = fold_convert (utype, tmp);
1170 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp, step2);
1171 tmp = fold_build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
1172 gfc_add_expr_to_block (&block, tmp);
1176 /* TODO: We could use the same width as the real type.
1177 This would probably cause more problems that it solves
1178 when we implement "long double" types. */
1180 tmp = fold_build2 (MINUS_EXPR, type, to, from);
1181 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
1182 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
1183 gfc_add_modify (&block, countm1, tmp);
1185 /* We need a special check for empty loops:
1186 empty = (step > 0 ? to < from : to > from); */
1187 tmp = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
1188 fold_build2 (LT_EXPR, boolean_type_node, to, from),
1189 fold_build2 (GT_EXPR, boolean_type_node, to, from));
1190 /* If the loop is empty, go directly to the exit label. */
1191 tmp = fold_build3 (COND_EXPR, void_type_node, tmp,
1192 build1_v (GOTO_EXPR, exit_label),
1193 build_empty_stmt (input_location));
1194 gfc_add_expr_to_block (&block, tmp);
1198 gfc_start_block (&body);
1200 /* Put these labels where they can be found later. */
1202 code->block->cycle_label = cycle_label;
1203 code->block->exit_label = exit_label;
1205 /* Main loop body. */
1206 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1207 gfc_add_expr_to_block (&body, tmp);
1209 /* Label for cycle statements (if needed). */
1210 if (TREE_USED (cycle_label))
1212 tmp = build1_v (LABEL_EXPR, cycle_label);
1213 gfc_add_expr_to_block (&body, tmp);
1216 /* Check whether someone has modified the loop variable. */
1217 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1219 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
1220 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1221 "Loop variable has been modified");
1224 /* Exit the loop if there is an I/O result condition or error. */
1227 tmp = build1_v (GOTO_EXPR, exit_label);
1228 tmp = fold_build3 (COND_EXPR, void_type_node, exit_cond, tmp,
1229 build_empty_stmt (input_location));
1230 gfc_add_expr_to_block (&body, tmp);
1233 /* Increment the loop variable. */
1234 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
1235 gfc_add_modify (&body, dovar, tmp);
1237 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1238 gfc_add_modify (&body, saved_dovar, dovar);
1240 /* End with the loop condition. Loop until countm1 == 0. */
1241 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
1242 build_int_cst (utype, 0));
1243 tmp = build1_v (GOTO_EXPR, exit_label);
1244 tmp = fold_build3 (COND_EXPR, void_type_node,
1245 cond, tmp, build_empty_stmt (input_location));
1246 gfc_add_expr_to_block (&body, tmp);
1248 /* Decrement the loop count. */
1249 tmp = fold_build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
1250 gfc_add_modify (&body, countm1, tmp);
1252 /* End of loop body. */
1253 tmp = gfc_finish_block (&body);
1255 /* The for loop itself. */
1256 tmp = build1_v (LOOP_EXPR, tmp);
1257 gfc_add_expr_to_block (&block, tmp);
1259 /* Add the exit label. */
1260 tmp = build1_v (LABEL_EXPR, exit_label);
1261 gfc_add_expr_to_block (&block, tmp);
1263 return gfc_finish_block (&block);
1267 /* Translate the DO WHILE construct.
1280 if (! cond) goto exit_label;
1286 Because the evaluation of the exit condition `cond' may have side
1287 effects, we can't do much for empty loop bodies. The backend optimizers
1288 should be smart enough to eliminate any dead loops. */
1291 gfc_trans_do_while (gfc_code * code)
1299 /* Everything we build here is part of the loop body. */
1300 gfc_start_block (&block);
1302 /* Cycle and exit statements are implemented with gotos. */
1303 cycle_label = gfc_build_label_decl (NULL_TREE);
1304 exit_label = gfc_build_label_decl (NULL_TREE);
1306 /* Put the labels where they can be found later. See gfc_trans_do(). */
1307 code->block->cycle_label = cycle_label;
1308 code->block->exit_label = exit_label;
1310 /* Create a GIMPLE version of the exit condition. */
1311 gfc_init_se (&cond, NULL);
1312 gfc_conv_expr_val (&cond, code->expr1);
1313 gfc_add_block_to_block (&block, &cond.pre);
1314 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1316 /* Build "IF (! cond) GOTO exit_label". */
1317 tmp = build1_v (GOTO_EXPR, exit_label);
1318 TREE_USED (exit_label) = 1;
1319 tmp = fold_build3 (COND_EXPR, void_type_node,
1320 cond.expr, tmp, build_empty_stmt (input_location));
1321 gfc_add_expr_to_block (&block, tmp);
1323 /* The main body of the loop. */
1324 tmp = gfc_trans_code (code->block->next);
1325 gfc_add_expr_to_block (&block, tmp);
1327 /* Label for cycle statements (if needed). */
1328 if (TREE_USED (cycle_label))
1330 tmp = build1_v (LABEL_EXPR, cycle_label);
1331 gfc_add_expr_to_block (&block, tmp);
1334 /* End of loop body. */
1335 tmp = gfc_finish_block (&block);
1337 gfc_init_block (&block);
1338 /* Build the loop. */
1339 tmp = build1_v (LOOP_EXPR, tmp);
1340 gfc_add_expr_to_block (&block, tmp);
1342 /* Add the exit label. */
1343 tmp = build1_v (LABEL_EXPR, exit_label);
1344 gfc_add_expr_to_block (&block, tmp);
1346 return gfc_finish_block (&block);
1350 /* Translate the SELECT CASE construct for INTEGER case expressions,
1351 without killing all potential optimizations. The problem is that
1352 Fortran allows unbounded cases, but the back-end does not, so we
1353 need to intercept those before we enter the equivalent SWITCH_EXPR
1356 For example, we translate this,
1359 CASE (:100,101,105:115)
1369 to the GENERIC equivalent,
1373 case (minimum value for typeof(expr) ... 100:
1379 case 200 ... (maximum value for typeof(expr):
1396 gfc_trans_integer_select (gfc_code * code)
1406 gfc_start_block (&block);
1408 /* Calculate the switch expression. */
1409 gfc_init_se (&se, NULL);
1410 gfc_conv_expr_val (&se, code->expr1);
1411 gfc_add_block_to_block (&block, &se.pre);
1413 end_label = gfc_build_label_decl (NULL_TREE);
1415 gfc_init_block (&body);
1417 for (c = code->block; c; c = c->block)
1419 for (cp = c->ext.case_list; cp; cp = cp->next)
1424 /* Assume it's the default case. */
1425 low = high = NULL_TREE;
1429 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1432 /* If there's only a lower bound, set the high bound to the
1433 maximum value of the case expression. */
1435 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1440 /* Three cases are possible here:
1442 1) There is no lower bound, e.g. CASE (:N).
1443 2) There is a lower bound .NE. high bound, that is
1444 a case range, e.g. CASE (N:M) where M>N (we make
1445 sure that M>N during type resolution).
1446 3) There is a lower bound, and it has the same value
1447 as the high bound, e.g. CASE (N:N). This is our
1448 internal representation of CASE(N).
1450 In the first and second case, we need to set a value for
1451 high. In the third case, we don't because the GCC middle
1452 end represents a single case value by just letting high be
1453 a NULL_TREE. We can't do that because we need to be able
1454 to represent unbounded cases. */
1458 && mpz_cmp (cp->low->value.integer,
1459 cp->high->value.integer) != 0))
1460 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1463 /* Unbounded case. */
1465 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1468 /* Build a label. */
1469 label = gfc_build_label_decl (NULL_TREE);
1471 /* Add this case label.
1472 Add parameter 'label', make it match GCC backend. */
1473 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1475 gfc_add_expr_to_block (&body, tmp);
1478 /* Add the statements for this case. */
1479 tmp = gfc_trans_code (c->next);
1480 gfc_add_expr_to_block (&body, tmp);
1482 /* Break to the end of the construct. */
1483 tmp = build1_v (GOTO_EXPR, end_label);
1484 gfc_add_expr_to_block (&body, tmp);
1487 tmp = gfc_finish_block (&body);
1488 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1489 gfc_add_expr_to_block (&block, tmp);
1491 tmp = build1_v (LABEL_EXPR, end_label);
1492 gfc_add_expr_to_block (&block, tmp);
1494 return gfc_finish_block (&block);
1498 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1500 There are only two cases possible here, even though the standard
1501 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1502 .FALSE., and DEFAULT.
1504 We never generate more than two blocks here. Instead, we always
1505 try to eliminate the DEFAULT case. This way, we can translate this
1506 kind of SELECT construct to a simple
1510 expression in GENERIC. */
1513 gfc_trans_logical_select (gfc_code * code)
1516 gfc_code *t, *f, *d;
1521 /* Assume we don't have any cases at all. */
1524 /* Now see which ones we actually do have. We can have at most two
1525 cases in a single case list: one for .TRUE. and one for .FALSE.
1526 The default case is always separate. If the cases for .TRUE. and
1527 .FALSE. are in the same case list, the block for that case list
1528 always executed, and we don't generate code a COND_EXPR. */
1529 for (c = code->block; c; c = c->block)
1531 for (cp = c->ext.case_list; cp; cp = cp->next)
1535 if (cp->low->value.logical == 0) /* .FALSE. */
1537 else /* if (cp->value.logical != 0), thus .TRUE. */
1545 /* Start a new block. */
1546 gfc_start_block (&block);
1548 /* Calculate the switch expression. We always need to do this
1549 because it may have side effects. */
1550 gfc_init_se (&se, NULL);
1551 gfc_conv_expr_val (&se, code->expr1);
1552 gfc_add_block_to_block (&block, &se.pre);
1554 if (t == f && t != NULL)
1556 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1557 translate the code for these cases, append it to the current
1559 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1563 tree true_tree, false_tree, stmt;
1565 true_tree = build_empty_stmt (input_location);
1566 false_tree = build_empty_stmt (input_location);
1568 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1569 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1570 make the missing case the default case. */
1571 if (t != NULL && f != NULL)
1581 /* Translate the code for each of these blocks, and append it to
1582 the current block. */
1584 true_tree = gfc_trans_code (t->next);
1587 false_tree = gfc_trans_code (f->next);
1589 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1590 true_tree, false_tree);
1591 gfc_add_expr_to_block (&block, stmt);
1594 return gfc_finish_block (&block);
1598 /* The jump table types are stored in static variables to avoid
1599 constructing them from scratch every single time. */
1600 static GTY(()) tree select_struct[2];
1602 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1603 Instead of generating compares and jumps, it is far simpler to
1604 generate a data structure describing the cases in order and call a
1605 library subroutine that locates the right case.
1606 This is particularly true because this is the only case where we
1607 might have to dispose of a temporary.
1608 The library subroutine returns a pointer to jump to or NULL if no
1609 branches are to be taken. */
1612 gfc_trans_character_select (gfc_code *code)
1614 tree init, end_label, tmp, type, case_num, label, fndecl;
1615 stmtblock_t block, body;
1620 VEC(constructor_elt,gc) *inits = NULL;
1622 tree pchartype = gfc_get_pchar_type (code->expr1->ts.kind);
1624 /* The jump table types are stored in static variables to avoid
1625 constructing them from scratch every single time. */
1626 static tree ss_string1[2], ss_string1_len[2];
1627 static tree ss_string2[2], ss_string2_len[2];
1628 static tree ss_target[2];
1630 cp = code->block->ext.case_list;
1631 while (cp->left != NULL)
1634 /* Generate the body */
1635 gfc_start_block (&block);
1636 gfc_init_se (&expr1se, NULL);
1637 gfc_conv_expr_reference (&expr1se, code->expr1);
1639 gfc_add_block_to_block (&block, &expr1se.pre);
1641 end_label = gfc_build_label_decl (NULL_TREE);
1643 gfc_init_block (&body);
1645 /* Attempt to optimize length 1 selects. */
1646 if (expr1se.string_length == integer_one_node)
1648 for (d = cp; d; d = d->right)
1653 gcc_assert (d->low->expr_type == EXPR_CONSTANT
1654 && d->low->ts.type == BT_CHARACTER);
1655 if (d->low->value.character.length > 1)
1657 for (i = 1; i < d->low->value.character.length; i++)
1658 if (d->low->value.character.string[i] != ' ')
1660 if (i != d->low->value.character.length)
1662 if (optimize && d->high && i == 1)
1664 gcc_assert (d->high->expr_type == EXPR_CONSTANT
1665 && d->high->ts.type == BT_CHARACTER);
1666 if (d->high->value.character.length > 1
1667 && (d->low->value.character.string[0]
1668 == d->high->value.character.string[0])
1669 && d->high->value.character.string[1] != ' '
1670 && ((d->low->value.character.string[1] < ' ')
1671 == (d->high->value.character.string[1]
1681 gcc_assert (d->high->expr_type == EXPR_CONSTANT
1682 && d->high->ts.type == BT_CHARACTER);
1683 if (d->high->value.character.length > 1)
1685 for (i = 1; i < d->high->value.character.length; i++)
1686 if (d->high->value.character.string[i] != ' ')
1688 if (i != d->high->value.character.length)
1695 tree ctype = gfc_get_char_type (code->expr1->ts.kind);
1697 for (c = code->block; c; c = c->block)
1699 for (cp = c->ext.case_list; cp; cp = cp->next)
1705 /* Assume it's the default case. */
1706 low = high = NULL_TREE;
1710 /* CASE ('ab') or CASE ('ab':'az') will never match
1711 any length 1 character. */
1712 if (cp->low->value.character.length > 1
1713 && cp->low->value.character.string[1] != ' ')
1716 if (cp->low->value.character.length > 0)
1717 r = cp->low->value.character.string[0];
1720 low = build_int_cst (ctype, r);
1722 /* If there's only a lower bound, set the high bound
1723 to the maximum value of the case expression. */
1725 high = TYPE_MAX_VALUE (ctype);
1731 || (cp->low->value.character.string[0]
1732 != cp->high->value.character.string[0]))
1734 if (cp->high->value.character.length > 0)
1735 r = cp->high->value.character.string[0];
1738 high = build_int_cst (ctype, r);
1741 /* Unbounded case. */
1743 low = TYPE_MIN_VALUE (ctype);
1746 /* Build a label. */
1747 label = gfc_build_label_decl (NULL_TREE);
1749 /* Add this case label.
1750 Add parameter 'label', make it match GCC backend. */
1751 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1753 gfc_add_expr_to_block (&body, tmp);
1756 /* Add the statements for this case. */
1757 tmp = gfc_trans_code (c->next);
1758 gfc_add_expr_to_block (&body, tmp);
1760 /* Break to the end of the construct. */
1761 tmp = build1_v (GOTO_EXPR, end_label);
1762 gfc_add_expr_to_block (&body, tmp);
1765 tmp = gfc_string_to_single_character (expr1se.string_length,
1767 code->expr1->ts.kind);
1768 case_num = gfc_create_var (ctype, "case_num");
1769 gfc_add_modify (&block, case_num, tmp);
1771 gfc_add_block_to_block (&block, &expr1se.post);
1773 tmp = gfc_finish_block (&body);
1774 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1775 gfc_add_expr_to_block (&block, tmp);
1777 tmp = build1_v (LABEL_EXPR, end_label);
1778 gfc_add_expr_to_block (&block, tmp);
1780 return gfc_finish_block (&block);
1784 if (code->expr1->ts.kind == 1)
1786 else if (code->expr1->ts.kind == 4)
1791 if (select_struct[k] == NULL)
1794 select_struct[k] = make_node (RECORD_TYPE);
1796 if (code->expr1->ts.kind == 1)
1797 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
1798 else if (code->expr1->ts.kind == 4)
1799 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
1804 #define ADD_FIELD(NAME, TYPE) \
1805 ss_##NAME[k] = gfc_add_field_to_struct (select_struct[k], \
1806 get_identifier (stringize(NAME)), \
1810 ADD_FIELD (string1, pchartype);
1811 ADD_FIELD (string1_len, gfc_charlen_type_node);
1813 ADD_FIELD (string2, pchartype);
1814 ADD_FIELD (string2_len, gfc_charlen_type_node);
1816 ADD_FIELD (target, integer_type_node);
1819 gfc_finish_type (select_struct[k]);
1823 for (d = cp; d; d = d->right)
1826 for (c = code->block; c; c = c->block)
1828 for (d = c->ext.case_list; d; d = d->next)
1830 label = gfc_build_label_decl (NULL_TREE);
1831 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1832 (d->low == NULL && d->high == NULL)
1833 ? NULL : build_int_cst (NULL_TREE, d->n),
1835 gfc_add_expr_to_block (&body, tmp);
1838 tmp = gfc_trans_code (c->next);
1839 gfc_add_expr_to_block (&body, tmp);
1841 tmp = build1_v (GOTO_EXPR, end_label);
1842 gfc_add_expr_to_block (&body, tmp);
1845 /* Generate the structure describing the branches */
1846 for (d = cp; d; d = d->right)
1848 VEC(constructor_elt,gc) *node = NULL;
1850 gfc_init_se (&se, NULL);
1854 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], null_pointer_node);
1855 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], integer_zero_node);
1859 gfc_conv_expr_reference (&se, d->low);
1861 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], se.expr);
1862 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], se.string_length);
1865 if (d->high == NULL)
1867 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], null_pointer_node);
1868 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], integer_zero_node);
1872 gfc_init_se (&se, NULL);
1873 gfc_conv_expr_reference (&se, d->high);
1875 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], se.expr);
1876 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], se.string_length);
1879 CONSTRUCTOR_APPEND_ELT (node, ss_target[k],
1880 build_int_cst (integer_type_node, d->n));
1882 tmp = build_constructor (select_struct[k], node);
1883 CONSTRUCTOR_APPEND_ELT (inits, NULL_TREE, tmp);
1886 type = build_array_type (select_struct[k],
1887 build_index_type (build_int_cst (NULL_TREE, n-1)));
1889 init = build_constructor (type, inits);
1890 TREE_CONSTANT (init) = 1;
1891 TREE_STATIC (init) = 1;
1892 /* Create a static variable to hold the jump table. */
1893 tmp = gfc_create_var (type, "jumptable");
1894 TREE_CONSTANT (tmp) = 1;
1895 TREE_STATIC (tmp) = 1;
1896 TREE_READONLY (tmp) = 1;
1897 DECL_INITIAL (tmp) = init;
1900 /* Build the library call */
1901 init = gfc_build_addr_expr (pvoid_type_node, init);
1903 if (code->expr1->ts.kind == 1)
1904 fndecl = gfor_fndecl_select_string;
1905 else if (code->expr1->ts.kind == 4)
1906 fndecl = gfor_fndecl_select_string_char4;
1910 tmp = build_call_expr_loc (input_location,
1911 fndecl, 4, init, build_int_cst (NULL_TREE, n),
1912 expr1se.expr, expr1se.string_length);
1913 case_num = gfc_create_var (integer_type_node, "case_num");
1914 gfc_add_modify (&block, case_num, tmp);
1916 gfc_add_block_to_block (&block, &expr1se.post);
1918 tmp = gfc_finish_block (&body);
1919 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1920 gfc_add_expr_to_block (&block, tmp);
1922 tmp = build1_v (LABEL_EXPR, end_label);
1923 gfc_add_expr_to_block (&block, tmp);
1925 return gfc_finish_block (&block);
1929 /* Translate the three variants of the SELECT CASE construct.
1931 SELECT CASEs with INTEGER case expressions can be translated to an
1932 equivalent GENERIC switch statement, and for LOGICAL case
1933 expressions we build one or two if-else compares.
1935 SELECT CASEs with CHARACTER case expressions are a whole different
1936 story, because they don't exist in GENERIC. So we sort them and
1937 do a binary search at runtime.
1939 Fortran has no BREAK statement, and it does not allow jumps from
1940 one case block to another. That makes things a lot easier for
1944 gfc_trans_select (gfc_code * code)
1946 gcc_assert (code && code->expr1);
1948 /* Empty SELECT constructs are legal. */
1949 if (code->block == NULL)
1950 return build_empty_stmt (input_location);
1952 /* Select the correct translation function. */
1953 switch (code->expr1->ts.type)
1955 case BT_LOGICAL: return gfc_trans_logical_select (code);
1956 case BT_INTEGER: return gfc_trans_integer_select (code);
1957 case BT_CHARACTER: return gfc_trans_character_select (code);
1959 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1965 /* Traversal function to substitute a replacement symtree if the symbol
1966 in the expression is the same as that passed. f == 2 signals that
1967 that variable itself is not to be checked - only the references.
1968 This group of functions is used when the variable expression in a
1969 FORALL assignment has internal references. For example:
1970 FORALL (i = 1:4) p(p(i)) = i
1971 The only recourse here is to store a copy of 'p' for the index
1974 static gfc_symtree *new_symtree;
1975 static gfc_symtree *old_symtree;
1978 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
1980 if (expr->expr_type != EXPR_VARIABLE)
1985 else if (expr->symtree->n.sym == sym)
1986 expr->symtree = new_symtree;
1992 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
1994 gfc_traverse_expr (e, sym, forall_replace, f);
1998 forall_restore (gfc_expr *expr,
1999 gfc_symbol *sym ATTRIBUTE_UNUSED,
2000 int *f ATTRIBUTE_UNUSED)
2002 if (expr->expr_type != EXPR_VARIABLE)
2005 if (expr->symtree == new_symtree)
2006 expr->symtree = old_symtree;
2012 forall_restore_symtree (gfc_expr *e)
2014 gfc_traverse_expr (e, NULL, forall_restore, 0);
2018 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
2023 gfc_symbol *new_sym;
2024 gfc_symbol *old_sym;
2028 /* Build a copy of the lvalue. */
2029 old_symtree = c->expr1->symtree;
2030 old_sym = old_symtree->n.sym;
2031 e = gfc_lval_expr_from_sym (old_sym);
2032 if (old_sym->attr.dimension)
2034 gfc_init_se (&tse, NULL);
2035 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN, false);
2036 gfc_add_block_to_block (pre, &tse.pre);
2037 gfc_add_block_to_block (post, &tse.post);
2038 tse.expr = build_fold_indirect_ref_loc (input_location, tse.expr);
2040 if (e->ts.type != BT_CHARACTER)
2042 /* Use the variable offset for the temporary. */
2043 tmp = gfc_conv_array_offset (old_sym->backend_decl);
2044 gfc_conv_descriptor_offset_set (pre, tse.expr, tmp);
2049 gfc_init_se (&tse, NULL);
2050 gfc_init_se (&rse, NULL);
2051 gfc_conv_expr (&rse, e);
2052 if (e->ts.type == BT_CHARACTER)
2054 tse.string_length = rse.string_length;
2055 tmp = gfc_get_character_type_len (gfc_default_character_kind,
2057 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
2059 gfc_add_block_to_block (pre, &tse.pre);
2060 gfc_add_block_to_block (post, &tse.post);
2064 tmp = gfc_typenode_for_spec (&e->ts);
2065 tse.expr = gfc_create_var (tmp, "temp");
2068 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
2069 e->expr_type == EXPR_VARIABLE, true);
2070 gfc_add_expr_to_block (pre, tmp);
2074 /* Create a new symbol to represent the lvalue. */
2075 new_sym = gfc_new_symbol (old_sym->name, NULL);
2076 new_sym->ts = old_sym->ts;
2077 new_sym->attr.referenced = 1;
2078 new_sym->attr.temporary = 1;
2079 new_sym->attr.dimension = old_sym->attr.dimension;
2080 new_sym->attr.flavor = old_sym->attr.flavor;
2082 /* Use the temporary as the backend_decl. */
2083 new_sym->backend_decl = tse.expr;
2085 /* Create a fake symtree for it. */
2087 new_symtree = gfc_new_symtree (&root, old_sym->name);
2088 new_symtree->n.sym = new_sym;
2089 gcc_assert (new_symtree == root);
2091 /* Go through the expression reference replacing the old_symtree
2093 forall_replace_symtree (c->expr1, old_sym, 2);
2095 /* Now we have made this temporary, we might as well use it for
2096 the right hand side. */
2097 forall_replace_symtree (c->expr2, old_sym, 1);
2101 /* Handles dependencies in forall assignments. */
2103 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
2110 lsym = c->expr1->symtree->n.sym;
2111 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
2113 /* Now check for dependencies within the 'variable'
2114 expression itself. These are treated by making a complete
2115 copy of variable and changing all the references to it
2116 point to the copy instead. Note that the shallow copy of
2117 the variable will not suffice for derived types with
2118 pointer components. We therefore leave these to their
2120 if (lsym->ts.type == BT_DERIVED
2121 && lsym->ts.u.derived->attr.pointer_comp)
2125 if (find_forall_index (c->expr1, lsym, 2) == SUCCESS)
2127 forall_make_variable_temp (c, pre, post);
2131 /* Substrings with dependencies are treated in the same
2133 if (c->expr1->ts.type == BT_CHARACTER
2135 && c->expr2->expr_type == EXPR_VARIABLE
2136 && lsym == c->expr2->symtree->n.sym)
2138 for (lref = c->expr1->ref; lref; lref = lref->next)
2139 if (lref->type == REF_SUBSTRING)
2141 for (rref = c->expr2->ref; rref; rref = rref->next)
2142 if (rref->type == REF_SUBSTRING)
2146 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
2148 forall_make_variable_temp (c, pre, post);
2157 cleanup_forall_symtrees (gfc_code *c)
2159 forall_restore_symtree (c->expr1);
2160 forall_restore_symtree (c->expr2);
2161 gfc_free (new_symtree->n.sym);
2162 gfc_free (new_symtree);
2166 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
2167 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
2168 indicates whether we should generate code to test the FORALLs mask
2169 array. OUTER is the loop header to be used for initializing mask
2172 The generated loop format is:
2173 count = (end - start + step) / step
2186 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
2187 int mask_flag, stmtblock_t *outer)
2195 tree var, start, end, step;
2198 /* Initialize the mask index outside the FORALL nest. */
2199 if (mask_flag && forall_tmp->mask)
2200 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
2202 iter = forall_tmp->this_loop;
2203 nvar = forall_tmp->nvar;
2204 for (n = 0; n < nvar; n++)
2207 start = iter->start;
2211 exit_label = gfc_build_label_decl (NULL_TREE);
2212 TREE_USED (exit_label) = 1;
2214 /* The loop counter. */
2215 count = gfc_create_var (TREE_TYPE (var), "count");
2217 /* The body of the loop. */
2218 gfc_init_block (&block);
2220 /* The exit condition. */
2221 cond = fold_build2 (LE_EXPR, boolean_type_node,
2222 count, build_int_cst (TREE_TYPE (count), 0));
2223 tmp = build1_v (GOTO_EXPR, exit_label);
2224 tmp = fold_build3 (COND_EXPR, void_type_node,
2225 cond, tmp, build_empty_stmt (input_location));
2226 gfc_add_expr_to_block (&block, tmp);
2228 /* The main loop body. */
2229 gfc_add_expr_to_block (&block, body);
2231 /* Increment the loop variable. */
2232 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
2233 gfc_add_modify (&block, var, tmp);
2235 /* Advance to the next mask element. Only do this for the
2237 if (n == 0 && mask_flag && forall_tmp->mask)
2239 tree maskindex = forall_tmp->maskindex;
2240 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2241 maskindex, gfc_index_one_node);
2242 gfc_add_modify (&block, maskindex, tmp);
2245 /* Decrement the loop counter. */
2246 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), count,
2247 build_int_cst (TREE_TYPE (var), 1));
2248 gfc_add_modify (&block, count, tmp);
2250 body = gfc_finish_block (&block);
2252 /* Loop var initialization. */
2253 gfc_init_block (&block);
2254 gfc_add_modify (&block, var, start);
2257 /* Initialize the loop counter. */
2258 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
2259 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
2260 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
2261 gfc_add_modify (&block, count, tmp);
2263 /* The loop expression. */
2264 tmp = build1_v (LOOP_EXPR, body);
2265 gfc_add_expr_to_block (&block, tmp);
2267 /* The exit label. */
2268 tmp = build1_v (LABEL_EXPR, exit_label);
2269 gfc_add_expr_to_block (&block, tmp);
2271 body = gfc_finish_block (&block);
2278 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2279 is nonzero, the body is controlled by all masks in the forall nest.
2280 Otherwise, the innermost loop is not controlled by it's mask. This
2281 is used for initializing that mask. */
2284 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
2289 forall_info *forall_tmp;
2290 tree mask, maskindex;
2292 gfc_start_block (&header);
2294 forall_tmp = nested_forall_info;
2295 while (forall_tmp != NULL)
2297 /* Generate body with masks' control. */
2300 mask = forall_tmp->mask;
2301 maskindex = forall_tmp->maskindex;
2303 /* If a mask was specified make the assignment conditional. */
2306 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2307 body = build3_v (COND_EXPR, tmp, body,
2308 build_empty_stmt (input_location));
2311 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
2312 forall_tmp = forall_tmp->prev_nest;
2316 gfc_add_expr_to_block (&header, body);
2317 return gfc_finish_block (&header);
2321 /* Allocate data for holding a temporary array. Returns either a local
2322 temporary array or a pointer variable. */
2325 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
2332 if (INTEGER_CST_P (size))
2334 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
2335 gfc_index_one_node);
2340 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
2341 type = build_array_type (elem_type, type);
2342 if (gfc_can_put_var_on_stack (bytesize))
2344 gcc_assert (INTEGER_CST_P (size));
2345 tmpvar = gfc_create_var (type, "temp");
2350 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
2351 *pdata = convert (pvoid_type_node, tmpvar);
2353 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
2354 gfc_add_modify (pblock, tmpvar, tmp);
2360 /* Generate codes to copy the temporary to the actual lhs. */
2363 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
2364 tree count1, tree wheremask, bool invert)
2368 stmtblock_t block, body;
2374 lss = gfc_walk_expr (expr);
2376 if (lss == gfc_ss_terminator)
2378 gfc_start_block (&block);
2380 gfc_init_se (&lse, NULL);
2382 /* Translate the expression. */
2383 gfc_conv_expr (&lse, expr);
2385 /* Form the expression for the temporary. */
2386 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2388 /* Use the scalar assignment as is. */
2389 gfc_add_block_to_block (&block, &lse.pre);
2390 gfc_add_modify (&block, lse.expr, tmp);
2391 gfc_add_block_to_block (&block, &lse.post);
2393 /* Increment the count1. */
2394 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2395 gfc_index_one_node);
2396 gfc_add_modify (&block, count1, tmp);
2398 tmp = gfc_finish_block (&block);
2402 gfc_start_block (&block);
2404 gfc_init_loopinfo (&loop1);
2405 gfc_init_se (&rse, NULL);
2406 gfc_init_se (&lse, NULL);
2408 /* Associate the lss with the loop. */
2409 gfc_add_ss_to_loop (&loop1, lss);
2411 /* Calculate the bounds of the scalarization. */
2412 gfc_conv_ss_startstride (&loop1);
2413 /* Setup the scalarizing loops. */
2414 gfc_conv_loop_setup (&loop1, &expr->where);
2416 gfc_mark_ss_chain_used (lss, 1);
2418 /* Start the scalarized loop body. */
2419 gfc_start_scalarized_body (&loop1, &body);
2421 /* Setup the gfc_se structures. */
2422 gfc_copy_loopinfo_to_se (&lse, &loop1);
2425 /* Form the expression of the temporary. */
2426 if (lss != gfc_ss_terminator)
2427 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2428 /* Translate expr. */
2429 gfc_conv_expr (&lse, expr);
2431 /* Use the scalar assignment. */
2432 rse.string_length = lse.string_length;
2433 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, true, true);
2435 /* Form the mask expression according to the mask tree list. */
2438 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2440 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2441 TREE_TYPE (wheremaskexpr),
2443 tmp = fold_build3 (COND_EXPR, void_type_node,
2445 build_empty_stmt (input_location));
2448 gfc_add_expr_to_block (&body, tmp);
2450 /* Increment count1. */
2451 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2452 count1, gfc_index_one_node);
2453 gfc_add_modify (&body, count1, tmp);
2455 /* Increment count3. */
2458 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2459 count3, gfc_index_one_node);
2460 gfc_add_modify (&body, count3, tmp);
2463 /* Generate the copying loops. */
2464 gfc_trans_scalarizing_loops (&loop1, &body);
2465 gfc_add_block_to_block (&block, &loop1.pre);
2466 gfc_add_block_to_block (&block, &loop1.post);
2467 gfc_cleanup_loop (&loop1);
2469 tmp = gfc_finish_block (&block);
2475 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2476 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2477 and should not be freed. WHEREMASK is the conditional execution mask
2478 whose sense may be inverted by INVERT. */
2481 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2482 tree count1, gfc_ss *lss, gfc_ss *rss,
2483 tree wheremask, bool invert)
2485 stmtblock_t block, body1;
2492 gfc_start_block (&block);
2494 gfc_init_se (&rse, NULL);
2495 gfc_init_se (&lse, NULL);
2497 if (lss == gfc_ss_terminator)
2499 gfc_init_block (&body1);
2500 gfc_conv_expr (&rse, expr2);
2501 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2505 /* Initialize the loop. */
2506 gfc_init_loopinfo (&loop);
2508 /* We may need LSS to determine the shape of the expression. */
2509 gfc_add_ss_to_loop (&loop, lss);
2510 gfc_add_ss_to_loop (&loop, rss);
2512 gfc_conv_ss_startstride (&loop);
2513 gfc_conv_loop_setup (&loop, &expr2->where);
2515 gfc_mark_ss_chain_used (rss, 1);
2516 /* Start the loop body. */
2517 gfc_start_scalarized_body (&loop, &body1);
2519 /* Translate the expression. */
2520 gfc_copy_loopinfo_to_se (&rse, &loop);
2522 gfc_conv_expr (&rse, expr2);
2524 /* Form the expression of the temporary. */
2525 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2528 /* Use the scalar assignment. */
2529 lse.string_length = rse.string_length;
2530 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2531 expr2->expr_type == EXPR_VARIABLE, true);
2533 /* Form the mask expression according to the mask tree list. */
2536 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2538 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2539 TREE_TYPE (wheremaskexpr),
2541 tmp = fold_build3 (COND_EXPR, void_type_node,
2542 wheremaskexpr, tmp, build_empty_stmt (input_location));
2545 gfc_add_expr_to_block (&body1, tmp);
2547 if (lss == gfc_ss_terminator)
2549 gfc_add_block_to_block (&block, &body1);
2551 /* Increment count1. */
2552 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2553 gfc_index_one_node);
2554 gfc_add_modify (&block, count1, tmp);
2558 /* Increment count1. */
2559 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2560 count1, gfc_index_one_node);
2561 gfc_add_modify (&body1, count1, tmp);
2563 /* Increment count3. */
2566 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2567 count3, gfc_index_one_node);
2568 gfc_add_modify (&body1, count3, tmp);
2571 /* Generate the copying loops. */
2572 gfc_trans_scalarizing_loops (&loop, &body1);
2574 gfc_add_block_to_block (&block, &loop.pre);
2575 gfc_add_block_to_block (&block, &loop.post);
2577 gfc_cleanup_loop (&loop);
2578 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2579 as tree nodes in SS may not be valid in different scope. */
2582 tmp = gfc_finish_block (&block);
2587 /* Calculate the size of temporary needed in the assignment inside forall.
2588 LSS and RSS are filled in this function. */
2591 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2592 stmtblock_t * pblock,
2593 gfc_ss **lss, gfc_ss **rss)
2601 *lss = gfc_walk_expr (expr1);
2604 size = gfc_index_one_node;
2605 if (*lss != gfc_ss_terminator)
2607 gfc_init_loopinfo (&loop);
2609 /* Walk the RHS of the expression. */
2610 *rss = gfc_walk_expr (expr2);
2611 if (*rss == gfc_ss_terminator)
2613 /* The rhs is scalar. Add a ss for the expression. */
2614 *rss = gfc_get_ss ();
2615 (*rss)->next = gfc_ss_terminator;
2616 (*rss)->type = GFC_SS_SCALAR;
2617 (*rss)->expr = expr2;
2620 /* Associate the SS with the loop. */
2621 gfc_add_ss_to_loop (&loop, *lss);
2622 /* We don't actually need to add the rhs at this point, but it might
2623 make guessing the loop bounds a bit easier. */
2624 gfc_add_ss_to_loop (&loop, *rss);
2626 /* We only want the shape of the expression, not rest of the junk
2627 generated by the scalarizer. */
2628 loop.array_parameter = 1;
2630 /* Calculate the bounds of the scalarization. */
2631 save_flag = gfc_option.rtcheck;
2632 gfc_option.rtcheck &= !GFC_RTCHECK_BOUNDS;
2633 gfc_conv_ss_startstride (&loop);
2634 gfc_option.rtcheck = save_flag;
2635 gfc_conv_loop_setup (&loop, &expr2->where);
2637 /* Figure out how many elements we need. */
2638 for (i = 0; i < loop.dimen; i++)
2640 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2641 gfc_index_one_node, loop.from[i]);
2642 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2644 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2646 gfc_add_block_to_block (pblock, &loop.pre);
2647 size = gfc_evaluate_now (size, pblock);
2648 gfc_add_block_to_block (pblock, &loop.post);
2650 /* TODO: write a function that cleans up a loopinfo without freeing
2651 the SS chains. Currently a NOP. */
2658 /* Calculate the overall iterator number of the nested forall construct.
2659 This routine actually calculates the number of times the body of the
2660 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2661 that by the expression INNER_SIZE. The BLOCK argument specifies the
2662 block in which to calculate the result, and the optional INNER_SIZE_BODY
2663 argument contains any statements that need to executed (inside the loop)
2664 to initialize or calculate INNER_SIZE. */
2667 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2668 stmtblock_t *inner_size_body, stmtblock_t *block)
2670 forall_info *forall_tmp = nested_forall_info;
2674 /* We can eliminate the innermost unconditional loops with constant
2676 if (INTEGER_CST_P (inner_size))
2679 && !forall_tmp->mask
2680 && INTEGER_CST_P (forall_tmp->size))
2682 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2683 inner_size, forall_tmp->size);
2684 forall_tmp = forall_tmp->prev_nest;
2687 /* If there are no loops left, we have our constant result. */
2692 /* Otherwise, create a temporary variable to compute the result. */
2693 number = gfc_create_var (gfc_array_index_type, "num");
2694 gfc_add_modify (block, number, gfc_index_zero_node);
2696 gfc_start_block (&body);
2697 if (inner_size_body)
2698 gfc_add_block_to_block (&body, inner_size_body);
2700 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2701 number, inner_size);
2704 gfc_add_modify (&body, number, tmp);
2705 tmp = gfc_finish_block (&body);
2707 /* Generate loops. */
2708 if (forall_tmp != NULL)
2709 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2711 gfc_add_expr_to_block (block, tmp);
2717 /* Allocate temporary for forall construct. SIZE is the size of temporary
2718 needed. PTEMP1 is returned for space free. */
2721 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2728 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2729 if (!integer_onep (unit))
2730 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2735 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2738 tmp = build_fold_indirect_ref_loc (input_location, tmp);
2743 /* Allocate temporary for forall construct according to the information in
2744 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2745 assignment inside forall. PTEMP1 is returned for space free. */
2748 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2749 tree inner_size, stmtblock_t * inner_size_body,
2750 stmtblock_t * block, tree * ptemp1)
2754 /* Calculate the total size of temporary needed in forall construct. */
2755 size = compute_overall_iter_number (nested_forall_info, inner_size,
2756 inner_size_body, block);
2758 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2762 /* Handle assignments inside forall which need temporary.
2764 forall (i=start:end:stride; maskexpr)
2767 (where e,f<i> are arbitrary expressions possibly involving i
2768 and there is a dependency between e<i> and f<i>)
2770 masktmp(:) = maskexpr(:)
2775 for (i = start; i <= end; i += stride)
2779 for (i = start; i <= end; i += stride)
2781 if (masktmp[maskindex++])
2782 tmp[count1++] = f<i>
2786 for (i = start; i <= end; i += stride)
2788 if (masktmp[maskindex++])
2789 e<i> = tmp[count1++]
2794 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2795 tree wheremask, bool invert,
2796 forall_info * nested_forall_info,
2797 stmtblock_t * block)
2805 stmtblock_t inner_size_body;
2807 /* Create vars. count1 is the current iterator number of the nested
2809 count1 = gfc_create_var (gfc_array_index_type, "count1");
2811 /* Count is the wheremask index. */
2814 count = gfc_create_var (gfc_array_index_type, "count");
2815 gfc_add_modify (block, count, gfc_index_zero_node);
2820 /* Initialize count1. */
2821 gfc_add_modify (block, count1, gfc_index_zero_node);
2823 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2824 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2825 gfc_init_block (&inner_size_body);
2826 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2829 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2830 if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->length)
2832 if (!expr1->ts.u.cl->backend_decl)
2835 gfc_init_se (&tse, NULL);
2836 gfc_conv_expr (&tse, expr1->ts.u.cl->length);
2837 expr1->ts.u.cl->backend_decl = tse.expr;
2839 type = gfc_get_character_type_len (gfc_default_character_kind,
2840 expr1->ts.u.cl->backend_decl);
2843 type = gfc_typenode_for_spec (&expr1->ts);
2845 /* Allocate temporary for nested forall construct according to the
2846 information in nested_forall_info and inner_size. */
2847 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2848 &inner_size_body, block, &ptemp1);
2850 /* Generate codes to copy rhs to the temporary . */
2851 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2854 /* Generate body and loops according to the information in
2855 nested_forall_info. */
2856 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2857 gfc_add_expr_to_block (block, tmp);
2860 gfc_add_modify (block, count1, gfc_index_zero_node);
2864 gfc_add_modify (block, count, gfc_index_zero_node);
2866 /* Generate codes to copy the temporary to lhs. */
2867 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2870 /* Generate body and loops according to the information in
2871 nested_forall_info. */
2872 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2873 gfc_add_expr_to_block (block, tmp);
2877 /* Free the temporary. */
2878 tmp = gfc_call_free (ptemp1);
2879 gfc_add_expr_to_block (block, tmp);
2884 /* Translate pointer assignment inside FORALL which need temporary. */
2887 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2888 forall_info * nested_forall_info,
2889 stmtblock_t * block)
2903 tree tmp, tmp1, ptemp1;
2905 count = gfc_create_var (gfc_array_index_type, "count");
2906 gfc_add_modify (block, count, gfc_index_zero_node);
2908 inner_size = integer_one_node;
2909 lss = gfc_walk_expr (expr1);
2910 rss = gfc_walk_expr (expr2);
2911 if (lss == gfc_ss_terminator)
2913 type = gfc_typenode_for_spec (&expr1->ts);
2914 type = build_pointer_type (type);
2916 /* Allocate temporary for nested forall construct according to the
2917 information in nested_forall_info and inner_size. */
2918 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2919 inner_size, NULL, block, &ptemp1);
2920 gfc_start_block (&body);
2921 gfc_init_se (&lse, NULL);
2922 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2923 gfc_init_se (&rse, NULL);
2924 rse.want_pointer = 1;
2925 gfc_conv_expr (&rse, expr2);
2926 gfc_add_block_to_block (&body, &rse.pre);
2927 gfc_add_modify (&body, lse.expr,
2928 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2929 gfc_add_block_to_block (&body, &rse.post);
2931 /* Increment count. */
2932 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2933 count, gfc_index_one_node);
2934 gfc_add_modify (&body, count, tmp);
2936 tmp = gfc_finish_block (&body);
2938 /* Generate body and loops according to the information in
2939 nested_forall_info. */
2940 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2941 gfc_add_expr_to_block (block, tmp);
2944 gfc_add_modify (block, count, gfc_index_zero_node);
2946 gfc_start_block (&body);
2947 gfc_init_se (&lse, NULL);
2948 gfc_init_se (&rse, NULL);
2949 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
2950 lse.want_pointer = 1;
2951 gfc_conv_expr (&lse, expr1);
2952 gfc_add_block_to_block (&body, &lse.pre);
2953 gfc_add_modify (&body, lse.expr, rse.expr);
2954 gfc_add_block_to_block (&body, &lse.post);
2955 /* Increment count. */
2956 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2957 count, gfc_index_one_node);
2958 gfc_add_modify (&body, count, tmp);
2959 tmp = gfc_finish_block (&body);
2961 /* Generate body and loops according to the information in
2962 nested_forall_info. */
2963 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2964 gfc_add_expr_to_block (block, tmp);
2968 gfc_init_loopinfo (&loop);
2970 /* Associate the SS with the loop. */
2971 gfc_add_ss_to_loop (&loop, rss);
2973 /* Setup the scalarizing loops and bounds. */
2974 gfc_conv_ss_startstride (&loop);
2976 gfc_conv_loop_setup (&loop, &expr2->where);
2978 info = &rss->data.info;
2979 desc = info->descriptor;
2981 /* Make a new descriptor. */
2982 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2983 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, 0,
2984 loop.from, loop.to, 1,
2985 GFC_ARRAY_UNKNOWN, true);
2987 /* Allocate temporary for nested forall construct. */
2988 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2989 inner_size, NULL, block, &ptemp1);
2990 gfc_start_block (&body);
2991 gfc_init_se (&lse, NULL);
2992 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2993 lse.direct_byref = 1;
2994 rss = gfc_walk_expr (expr2);
2995 gfc_conv_expr_descriptor (&lse, expr2, rss);
2997 gfc_add_block_to_block (&body, &lse.pre);
2998 gfc_add_block_to_block (&body, &lse.post);
3000 /* Increment count. */
3001 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3002 count, gfc_index_one_node);
3003 gfc_add_modify (&body, count, tmp);
3005 tmp = gfc_finish_block (&body);
3007 /* Generate body and loops according to the information in
3008 nested_forall_info. */
3009 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3010 gfc_add_expr_to_block (block, tmp);
3013 gfc_add_modify (block, count, gfc_index_zero_node);
3015 parm = gfc_build_array_ref (tmp1, count, NULL);
3016 lss = gfc_walk_expr (expr1);
3017 gfc_init_se (&lse, NULL);
3018 gfc_conv_expr_descriptor (&lse, expr1, lss);
3019 gfc_add_modify (&lse.pre, lse.expr, parm);
3020 gfc_start_block (&body);
3021 gfc_add_block_to_block (&body, &lse.pre);
3022 gfc_add_block_to_block (&body, &lse.post);
3024 /* Increment count. */
3025 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3026 count, gfc_index_one_node);
3027 gfc_add_modify (&body, count, tmp);
3029 tmp = gfc_finish_block (&body);
3031 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3032 gfc_add_expr_to_block (block, tmp);
3034 /* Free the temporary. */
3037 tmp = gfc_call_free (ptemp1);
3038 gfc_add_expr_to_block (block, tmp);
3043 /* FORALL and WHERE statements are really nasty, especially when you nest
3044 them. All the rhs of a forall assignment must be evaluated before the
3045 actual assignments are performed. Presumably this also applies to all the
3046 assignments in an inner where statement. */
3048 /* Generate code for a FORALL statement. Any temporaries are allocated as a
3049 linear array, relying on the fact that we process in the same order in all
3052 forall (i=start:end:stride; maskexpr)
3056 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
3058 count = ((end + 1 - start) / stride)
3059 masktmp(:) = maskexpr(:)
3062 for (i = start; i <= end; i += stride)
3064 if (masktmp[maskindex++])
3068 for (i = start; i <= end; i += stride)
3070 if (masktmp[maskindex++])
3074 Note that this code only works when there are no dependencies.
3075 Forall loop with array assignments and data dependencies are a real pain,
3076 because the size of the temporary cannot always be determined before the
3077 loop is executed. This problem is compounded by the presence of nested
3082 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
3102 gfc_forall_iterator *fa;
3105 gfc_saved_var *saved_vars;
3106 iter_info *this_forall;
3110 /* Do nothing if the mask is false. */
3112 && code->expr1->expr_type == EXPR_CONSTANT
3113 && !code->expr1->value.logical)
3114 return build_empty_stmt (input_location);
3117 /* Count the FORALL index number. */
3118 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
3122 /* Allocate the space for var, start, end, step, varexpr. */
3123 var = (tree *) gfc_getmem (nvar * sizeof (tree));
3124 start = (tree *) gfc_getmem (nvar * sizeof (tree));
3125 end = (tree *) gfc_getmem (nvar * sizeof (tree));
3126 step = (tree *) gfc_getmem (nvar * sizeof (tree));
3127 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
3128 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
3130 /* Allocate the space for info. */
3131 info = (forall_info *) gfc_getmem (sizeof (forall_info));
3133 gfc_start_block (&pre);
3134 gfc_init_block (&post);
3135 gfc_init_block (&block);
3138 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
3140 gfc_symbol *sym = fa->var->symtree->n.sym;
3142 /* Allocate space for this_forall. */
3143 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
3145 /* Create a temporary variable for the FORALL index. */
3146 tmp = gfc_typenode_for_spec (&sym->ts);
3147 var[n] = gfc_create_var (tmp, sym->name);
3148 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
3150 /* Record it in this_forall. */
3151 this_forall->var = var[n];
3153 /* Replace the index symbol's backend_decl with the temporary decl. */
3154 sym->backend_decl = var[n];
3156 /* Work out the start, end and stride for the loop. */
3157 gfc_init_se (&se, NULL);
3158 gfc_conv_expr_val (&se, fa->start);
3159 /* Record it in this_forall. */
3160 this_forall->start = se.expr;
3161 gfc_add_block_to_block (&block, &se.pre);
3164 gfc_init_se (&se, NULL);
3165 gfc_conv_expr_val (&se, fa->end);
3166 /* Record it in this_forall. */
3167 this_forall->end = se.expr;
3168 gfc_make_safe_expr (&se);
3169 gfc_add_block_to_block (&block, &se.pre);
3172 gfc_init_se (&se, NULL);
3173 gfc_conv_expr_val (&se, fa->stride);
3174 /* Record it in this_forall. */
3175 this_forall->step = se.expr;
3176 gfc_make_safe_expr (&se);
3177 gfc_add_block_to_block (&block, &se.pre);
3180 /* Set the NEXT field of this_forall to NULL. */
3181 this_forall->next = NULL;
3182 /* Link this_forall to the info construct. */
3183 if (info->this_loop)
3185 iter_info *iter_tmp = info->this_loop;
3186 while (iter_tmp->next != NULL)
3187 iter_tmp = iter_tmp->next;
3188 iter_tmp->next = this_forall;
3191 info->this_loop = this_forall;
3197 /* Calculate the size needed for the current forall level. */
3198 size = gfc_index_one_node;
3199 for (n = 0; n < nvar; n++)
3201 /* size = (end + step - start) / step. */
3202 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
3204 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
3206 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
3207 tmp = convert (gfc_array_index_type, tmp);
3209 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
3212 /* Record the nvar and size of current forall level. */
3218 /* If the mask is .true., consider the FORALL unconditional. */
3219 if (code->expr1->expr_type == EXPR_CONSTANT
3220 && code->expr1->value.logical)
3228 /* First we need to allocate the mask. */
3231 /* As the mask array can be very big, prefer compact boolean types. */
3232 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3233 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
3234 size, NULL, &block, &pmask);
3235 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
3237 /* Record them in the info structure. */
3238 info->maskindex = maskindex;
3243 /* No mask was specified. */
3244 maskindex = NULL_TREE;
3245 mask = pmask = NULL_TREE;
3248 /* Link the current forall level to nested_forall_info. */
3249 info->prev_nest = nested_forall_info;
3250 nested_forall_info = info;
3252 /* Copy the mask into a temporary variable if required.
3253 For now we assume a mask temporary is needed. */
3256 /* As the mask array can be very big, prefer compact boolean types. */
3257 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3259 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
3261 /* Start of mask assignment loop body. */
3262 gfc_start_block (&body);
3264 /* Evaluate the mask expression. */
3265 gfc_init_se (&se, NULL);
3266 gfc_conv_expr_val (&se, code->expr1);
3267 gfc_add_block_to_block (&body, &se.pre);
3269 /* Store the mask. */
3270 se.expr = convert (mask_type, se.expr);
3272 tmp = gfc_build_array_ref (mask, maskindex, NULL);
3273 gfc_add_modify (&body, tmp, se.expr);
3275 /* Advance to the next mask element. */
3276 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3277 maskindex, gfc_index_one_node);
3278 gfc_add_modify (&body, maskindex, tmp);
3280 /* Generate the loops. */
3281 tmp = gfc_finish_block (&body);
3282 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
3283 gfc_add_expr_to_block (&block, tmp);
3286 c = code->block->next;
3288 /* TODO: loop merging in FORALL statements. */
3289 /* Now that we've got a copy of the mask, generate the assignment loops. */
3295 /* A scalar or array assignment. DO the simple check for
3296 lhs to rhs dependencies. These make a temporary for the
3297 rhs and form a second forall block to copy to variable. */
3298 need_temp = check_forall_dependencies(c, &pre, &post);
3300 /* Temporaries due to array assignment data dependencies introduce
3301 no end of problems. */
3303 gfc_trans_assign_need_temp (c->expr1, c->expr2, NULL, false,
3304 nested_forall_info, &block);
3307 /* Use the normal assignment copying routines. */
3308 assign = gfc_trans_assignment (c->expr1, c->expr2, false, true);
3310 /* Generate body and loops. */
3311 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3313 gfc_add_expr_to_block (&block, tmp);
3316 /* Cleanup any temporary symtrees that have been made to deal
3317 with dependencies. */
3319 cleanup_forall_symtrees (c);
3324 /* Translate WHERE or WHERE construct nested in FORALL. */
3325 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
3328 /* Pointer assignment inside FORALL. */
3329 case EXEC_POINTER_ASSIGN:
3330 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
3332 gfc_trans_pointer_assign_need_temp (c->expr1, c->expr2,
3333 nested_forall_info, &block);
3336 /* Use the normal assignment copying routines. */
3337 assign = gfc_trans_pointer_assignment (c->expr1, c->expr2);
3339 /* Generate body and loops. */
3340 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3342 gfc_add_expr_to_block (&block, tmp);
3347 tmp = gfc_trans_forall_1 (c, nested_forall_info);
3348 gfc_add_expr_to_block (&block, tmp);
3351 /* Explicit subroutine calls are prevented by the frontend but interface
3352 assignments can legitimately produce them. */
3353 case EXEC_ASSIGN_CALL:
3354 assign = gfc_trans_call (c, true, NULL_TREE, NULL_TREE, false);
3355 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
3356 gfc_add_expr_to_block (&block, tmp);
3366 /* Restore the original index variables. */
3367 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
3368 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
3370 /* Free the space for var, start, end, step, varexpr. */
3376 gfc_free (saved_vars);
3378 /* Free the space for this forall_info. */
3383 /* Free the temporary for the mask. */
3384 tmp = gfc_call_free (pmask);
3385 gfc_add_expr_to_block (&block, tmp);
3388 pushdecl (maskindex);
3390 gfc_add_block_to_block (&pre, &block);
3391 gfc_add_block_to_block (&pre, &post);
3393 return gfc_finish_block (&pre);
3397 /* Translate the FORALL statement or construct. */
3399 tree gfc_trans_forall (gfc_code * code)
3401 return gfc_trans_forall_1 (code, NULL);
3405 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3406 If the WHERE construct is nested in FORALL, compute the overall temporary
3407 needed by the WHERE mask expression multiplied by the iterator number of
3409 ME is the WHERE mask expression.
3410 MASK is the current execution mask upon input, whose sense may or may
3411 not be inverted as specified by the INVERT argument.
3412 CMASK is the updated execution mask on output, or NULL if not required.
3413 PMASK is the pending execution mask on output, or NULL if not required.
3414 BLOCK is the block in which to place the condition evaluation loops. */
3417 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3418 tree mask, bool invert, tree cmask, tree pmask,
3419 tree mask_type, stmtblock_t * block)
3424 stmtblock_t body, body1;
3425 tree count, cond, mtmp;
3428 gfc_init_loopinfo (&loop);
3430 lss = gfc_walk_expr (me);
3431 rss = gfc_walk_expr (me);
3433 /* Variable to index the temporary. */
3434 count = gfc_create_var (gfc_array_index_type, "count");
3435 /* Initialize count. */
3436 gfc_add_modify (block, count, gfc_index_zero_node);
3438 gfc_start_block (&body);
3440 gfc_init_se (&rse, NULL);
3441 gfc_init_se (&lse, NULL);
3443 if (lss == gfc_ss_terminator)
3445 gfc_init_block (&body1);
3449 /* Initialize the loop. */
3450 gfc_init_loopinfo (&loop);
3452 /* We may need LSS to determine the shape of the expression. */
3453 gfc_add_ss_to_loop (&loop, lss);
3454 gfc_add_ss_to_loop (&loop, rss);
3456 gfc_conv_ss_startstride (&loop);
3457 gfc_conv_loop_setup (&loop, &me->where);
3459 gfc_mark_ss_chain_used (rss, 1);
3460 /* Start the loop body. */
3461 gfc_start_scalarized_body (&loop, &body1);
3463 /* Translate the expression. */
3464 gfc_copy_loopinfo_to_se (&rse, &loop);
3466 gfc_conv_expr (&rse, me);
3469 /* Variable to evaluate mask condition. */
3470 cond = gfc_create_var (mask_type, "cond");
3471 if (mask && (cmask || pmask))
3472 mtmp = gfc_create_var (mask_type, "mask");
3473 else mtmp = NULL_TREE;
3475 gfc_add_block_to_block (&body1, &lse.pre);
3476 gfc_add_block_to_block (&body1, &rse.pre);
3478 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3480 if (mask && (cmask || pmask))
3482 tmp = gfc_build_array_ref (mask, count, NULL);
3484 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
3485 gfc_add_modify (&body1, mtmp, tmp);
3490 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3493 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3494 gfc_add_modify (&body1, tmp1, tmp);
3499 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3500 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, cond);
3502 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3503 gfc_add_modify (&body1, tmp1, tmp);
3506 gfc_add_block_to_block (&body1, &lse.post);
3507 gfc_add_block_to_block (&body1, &rse.post);
3509 if (lss == gfc_ss_terminator)
3511 gfc_add_block_to_block (&body, &body1);
3515 /* Increment count. */
3516 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
3517 gfc_index_one_node);
3518 gfc_add_modify (&body1, count, tmp1);
3520 /* Generate the copying loops. */
3521 gfc_trans_scalarizing_loops (&loop, &body1);
3523 gfc_add_block_to_block (&body, &loop.pre);
3524 gfc_add_block_to_block (&body, &loop.post);
3526 gfc_cleanup_loop (&loop);
3527 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3528 as tree nodes in SS may not be valid in different scope. */
3531 tmp1 = gfc_finish_block (&body);
3532 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3533 if (nested_forall_info != NULL)
3534 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3536 gfc_add_expr_to_block (block, tmp1);
3540 /* Translate an assignment statement in a WHERE statement or construct
3541 statement. The MASK expression is used to control which elements
3542 of EXPR1 shall be assigned. The sense of MASK is specified by
3546 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3547 tree mask, bool invert,
3548 tree count1, tree count2,
3554 gfc_ss *lss_section;
3561 tree index, maskexpr;
3563 /* A defined assignment. */
3564 if (cnext && cnext->resolved_sym)
3565 return gfc_trans_call (cnext, true, mask, count1, invert);
3568 /* TODO: handle this special case.
3569 Special case a single function returning an array. */
3570 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3572 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3578 /* Assignment of the form lhs = rhs. */
3579 gfc_start_block (&block);
3581 gfc_init_se (&lse, NULL);
3582 gfc_init_se (&rse, NULL);
3585 lss = gfc_walk_expr (expr1);
3588 /* In each where-assign-stmt, the mask-expr and the variable being
3589 defined shall be arrays of the same shape. */
3590 gcc_assert (lss != gfc_ss_terminator);
3592 /* The assignment needs scalarization. */
3595 /* Find a non-scalar SS from the lhs. */
3596 while (lss_section != gfc_ss_terminator
3597 && lss_section->type != GFC_SS_SECTION)
3598 lss_section = lss_section->next;
3600 gcc_assert (lss_section != gfc_ss_terminator);
3602 /* Initialize the scalarizer. */
3603 gfc_init_loopinfo (&loop);
3606 rss = gfc_walk_expr (expr2);
3607 if (rss == gfc_ss_terminator)
3609 /* The rhs is scalar. Add a ss for the expression. */
3610 rss = gfc_get_ss ();
3612 rss->next = gfc_ss_terminator;
3613 rss->type = GFC_SS_SCALAR;
3617 /* Associate the SS with the loop. */
3618 gfc_add_ss_to_loop (&loop, lss);
3619 gfc_add_ss_to_loop (&loop, rss);
3621 /* Calculate the bounds of the scalarization. */
3622 gfc_conv_ss_startstride (&loop);
3624 /* Resolve any data dependencies in the statement. */
3625 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3627 /* Setup the scalarizing loops. */
3628 gfc_conv_loop_setup (&loop, &expr2->where);
3630 /* Setup the gfc_se structures. */
3631 gfc_copy_loopinfo_to_se (&lse, &loop);
3632 gfc_copy_loopinfo_to_se (&rse, &loop);
3635 gfc_mark_ss_chain_used (rss, 1);
3636 if (loop.temp_ss == NULL)
3639 gfc_mark_ss_chain_used (lss, 1);
3643 lse.ss = loop.temp_ss;
3644 gfc_mark_ss_chain_used (lss, 3);
3645 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3648 /* Start the scalarized loop body. */
3649 gfc_start_scalarized_body (&loop, &body);
3651 /* Translate the expression. */
3652 gfc_conv_expr (&rse, expr2);
3653 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3655 gfc_conv_tmp_array_ref (&lse);
3656 gfc_advance_se_ss_chain (&lse);
3659 gfc_conv_expr (&lse, expr1);
3661 /* Form the mask expression according to the mask. */
3663 maskexpr = gfc_build_array_ref (mask, index, NULL);
3665 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
3667 /* Use the scalar assignment as is. */
3668 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3669 loop.temp_ss != NULL, false, true);
3671 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt (input_location));
3673 gfc_add_expr_to_block (&body, tmp);
3675 if (lss == gfc_ss_terminator)
3677 /* Increment count1. */
3678 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3679 count1, gfc_index_one_node);
3680 gfc_add_modify (&body, count1, tmp);
3682 /* Use the scalar assignment as is. */
3683 gfc_add_block_to_block (&block, &body);
3687 gcc_assert (lse.ss == gfc_ss_terminator
3688 && rse.ss == gfc_ss_terminator);
3690 if (loop.temp_ss != NULL)
3692 /* Increment count1 before finish the main body of a scalarized
3694 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3695 count1, gfc_index_one_node);
3696 gfc_add_modify (&body, count1, tmp);
3697 gfc_trans_scalarized_loop_boundary (&loop, &body);
3699 /* We need to copy the temporary to the actual lhs. */
3700 gfc_init_se (&lse, NULL);
3701 gfc_init_se (&rse, NULL);
3702 gfc_copy_loopinfo_to_se (&lse, &loop);
3703 gfc_copy_loopinfo_to_se (&rse, &loop);
3705 rse.ss = loop.temp_ss;
3708 gfc_conv_tmp_array_ref (&rse);
3709 gfc_advance_se_ss_chain (&rse);
3710 gfc_conv_expr (&lse, expr1);
3712 gcc_assert (lse.ss == gfc_ss_terminator
3713 && rse.ss == gfc_ss_terminator);
3715 /* Form the mask expression according to the mask tree list. */
3717 maskexpr = gfc_build_array_ref (mask, index, NULL);
3719 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3722 /* Use the scalar assignment as is. */
3723 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false,
3725 tmp = build3_v (COND_EXPR, maskexpr, tmp,
3726 build_empty_stmt (input_location));
3727 gfc_add_expr_to_block (&body, tmp);
3729 /* Increment count2. */
3730 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3731 count2, gfc_index_one_node);
3732 gfc_add_modify (&body, count2, tmp);
3736 /* Increment count1. */
3737 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3738 count1, gfc_index_one_node);
3739 gfc_add_modify (&body, count1, tmp);
3742 /* Generate the copying loops. */
3743 gfc_trans_scalarizing_loops (&loop, &body);
3745 /* Wrap the whole thing up. */
3746 gfc_add_block_to_block (&block, &loop.pre);
3747 gfc_add_block_to_block (&block, &loop.post);
3748 gfc_cleanup_loop (&loop);
3751 return gfc_finish_block (&block);
3755 /* Translate the WHERE construct or statement.
3756 This function can be called iteratively to translate the nested WHERE
3757 construct or statement.
3758 MASK is the control mask. */
3761 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3762 forall_info * nested_forall_info, stmtblock_t * block)
3764 stmtblock_t inner_size_body;
3765 tree inner_size, size;
3774 tree count1, count2;
3778 tree pcmask = NULL_TREE;
3779 tree ppmask = NULL_TREE;
3780 tree cmask = NULL_TREE;
3781 tree pmask = NULL_TREE;
3782 gfc_actual_arglist *arg;
3784 /* the WHERE statement or the WHERE construct statement. */
3785 cblock = code->block;
3787 /* As the mask array can be very big, prefer compact boolean types. */
3788 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3790 /* Determine which temporary masks are needed. */
3793 /* One clause: No ELSEWHEREs. */
3794 need_cmask = (cblock->next != 0);
3797 else if (cblock->block->block)
3799 /* Three or more clauses: Conditional ELSEWHEREs. */
3803 else if (cblock->next)
3805 /* Two clauses, the first non-empty. */
3807 need_pmask = (mask != NULL_TREE
3808 && cblock->block->next != 0);
3810 else if (!cblock->block->next)
3812 /* Two clauses, both empty. */
3816 /* Two clauses, the first empty, the second non-empty. */
3819 need_cmask = (cblock->block->expr1 != 0);
3828 if (need_cmask || need_pmask)
3830 /* Calculate the size of temporary needed by the mask-expr. */
3831 gfc_init_block (&inner_size_body);
3832 inner_size = compute_inner_temp_size (cblock->expr1, cblock->expr1,
3833 &inner_size_body, &lss, &rss);
3835 /* Calculate the total size of temporary needed. */
3836 size = compute_overall_iter_number (nested_forall_info, inner_size,
3837 &inner_size_body, block);
3839 /* Check whether the size is negative. */
3840 cond = fold_build2 (LE_EXPR, boolean_type_node, size,
3841 gfc_index_zero_node);
3842 size = fold_build3 (COND_EXPR, gfc_array_index_type, cond,
3843 gfc_index_zero_node, size);
3844 size = gfc_evaluate_now (size, block);
3846 /* Allocate temporary for WHERE mask if needed. */
3848 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3851 /* Allocate temporary for !mask if needed. */
3853 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3859 /* Each time around this loop, the where clause is conditional
3860 on the value of mask and invert, which are updated at the
3861 bottom of the loop. */
3863 /* Has mask-expr. */
3866 /* Ensure that the WHERE mask will be evaluated exactly once.
3867 If there are no statements in this WHERE/ELSEWHERE clause,
3868 then we don't need to update the control mask (cmask).
3869 If this is the last clause of the WHERE construct, then
3870 we don't need to update the pending control mask (pmask). */
3872 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3874 cblock->next ? cmask : NULL_TREE,
3875 cblock->block ? pmask : NULL_TREE,
3878 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3880 (cblock->next || cblock->block)
3881 ? cmask : NULL_TREE,
3882 NULL_TREE, mask_type, block);
3886 /* It's a final elsewhere-stmt. No mask-expr is present. */
3890 /* The body of this where clause are controlled by cmask with
3891 sense specified by invert. */
3893 /* Get the assignment statement of a WHERE statement, or the first
3894 statement in where-body-construct of a WHERE construct. */
3895 cnext = cblock->next;
3900 /* WHERE assignment statement. */
3901 case EXEC_ASSIGN_CALL:
3903 arg = cnext->ext.actual;
3904 expr1 = expr2 = NULL;
3905 for (; arg; arg = arg->next)
3917 expr1 = cnext->expr1;
3918 expr2 = cnext->expr2;
3920 if (nested_forall_info != NULL)
3922 need_temp = gfc_check_dependency (expr1, expr2, 0);
3923 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3924 gfc_trans_assign_need_temp (expr1, expr2,
3926 nested_forall_info, block);
3929 /* Variables to control maskexpr. */
3930 count1 = gfc_create_var (gfc_array_index_type, "count1");
3931 count2 = gfc_create_var (gfc_array_index_type, "count2");
3932 gfc_add_modify (block, count1, gfc_index_zero_node);
3933 gfc_add_modify (block, count2, gfc_index_zero_node);
3935 tmp = gfc_trans_where_assign (expr1, expr2,
3940 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3942 gfc_add_expr_to_block (block, tmp);
3947 /* Variables to control maskexpr. */
3948 count1 = gfc_create_var (gfc_array_index_type, "count1");
3949 count2 = gfc_create_var (gfc_array_index_type, "count2");
3950 gfc_add_modify (block, count1, gfc_index_zero_node);
3951 gfc_add_modify (block, count2, gfc_index_zero_node);
3953 tmp = gfc_trans_where_assign (expr1, expr2,
3957 gfc_add_expr_to_block (block, tmp);
3962 /* WHERE or WHERE construct is part of a where-body-construct. */
3964 gfc_trans_where_2 (cnext, cmask, invert,
3965 nested_forall_info, block);
3972 /* The next statement within the same where-body-construct. */
3973 cnext = cnext->next;
3975 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3976 cblock = cblock->block;
3977 if (mask == NULL_TREE)
3979 /* If we're the initial WHERE, we can simply invert the sense
3980 of the current mask to obtain the "mask" for the remaining
3987 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3993 /* If we allocated a pending mask array, deallocate it now. */
3996 tmp = gfc_call_free (ppmask);
3997 gfc_add_expr_to_block (block, tmp);
4000 /* If we allocated a current mask array, deallocate it now. */
4003 tmp = gfc_call_free (pcmask);
4004 gfc_add_expr_to_block (block, tmp);
4008 /* Translate a simple WHERE construct or statement without dependencies.
4009 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
4010 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
4011 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
4014 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
4016 stmtblock_t block, body;
4017 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
4018 tree tmp, cexpr, tstmt, estmt;
4019 gfc_ss *css, *tdss, *tsss;
4020 gfc_se cse, tdse, tsse, edse, esse;
4025 /* Allow the scalarizer to workshare simple where loops. */
4026 if (ompws_flags & OMPWS_WORKSHARE_FLAG)
4027 ompws_flags |= OMPWS_SCALARIZER_WS;
4029 cond = cblock->expr1;
4030 tdst = cblock->next->expr1;
4031 tsrc = cblock->next->expr2;
4032 edst = eblock ? eblock->next->expr1 : NULL;
4033 esrc = eblock ? eblock->next->expr2 : NULL;
4035 gfc_start_block (&block);
4036 gfc_init_loopinfo (&loop);
4038 /* Handle the condition. */
4039 gfc_init_se (&cse, NULL);
4040 css = gfc_walk_expr (cond);
4041 gfc_add_ss_to_loop (&loop, css);
4043 /* Handle the then-clause. */
4044 gfc_init_se (&tdse, NULL);
4045 gfc_init_se (&tsse, NULL);
4046 tdss = gfc_walk_expr (tdst);
4047 tsss = gfc_walk_expr (tsrc);
4048 if (tsss == gfc_ss_terminator)
4050 tsss = gfc_get_ss ();
4052 tsss->next = gfc_ss_terminator;
4053 tsss->type = GFC_SS_SCALAR;
4056 gfc_add_ss_to_loop (&loop, tdss);
4057 gfc_add_ss_to_loop (&loop, tsss);
4061 /* Handle the else clause. */
4062 gfc_init_se (&edse, NULL);
4063 gfc_init_se (&esse, NULL);
4064 edss = gfc_walk_expr (edst);
4065 esss = gfc_walk_expr (esrc);
4066 if (esss == gfc_ss_terminator)
4068 esss = gfc_get_ss ();
4070 esss->next = gfc_ss_terminator;
4071 esss->type = GFC_SS_SCALAR;
4074 gfc_add_ss_to_loop (&loop, edss);
4075 gfc_add_ss_to_loop (&loop, esss);
4078 gfc_conv_ss_startstride (&loop);
4079 gfc_conv_loop_setup (&loop, &tdst->where);
4081 gfc_mark_ss_chain_used (css, 1);
4082 gfc_mark_ss_chain_used (tdss, 1);
4083 gfc_mark_ss_chain_used (tsss, 1);
4086 gfc_mark_ss_chain_used (edss, 1);
4087 gfc_mark_ss_chain_used (esss, 1);
4090 gfc_start_scalarized_body (&loop, &body);
4092 gfc_copy_loopinfo_to_se (&cse, &loop);
4093 gfc_copy_loopinfo_to_se (&tdse, &loop);
4094 gfc_copy_loopinfo_to_se (&tsse, &loop);
4100 gfc_copy_loopinfo_to_se (&edse, &loop);
4101 gfc_copy_loopinfo_to_se (&esse, &loop);
4106 gfc_conv_expr (&cse, cond);
4107 gfc_add_block_to_block (&body, &cse.pre);
4110 gfc_conv_expr (&tsse, tsrc);
4111 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
4113 gfc_conv_tmp_array_ref (&tdse);
4114 gfc_advance_se_ss_chain (&tdse);
4117 gfc_conv_expr (&tdse, tdst);
4121 gfc_conv_expr (&esse, esrc);
4122 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
4124 gfc_conv_tmp_array_ref (&edse);
4125 gfc_advance_se_ss_chain (&edse);
4128 gfc_conv_expr (&edse, edst);
4131 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false, true);
4132 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false,
4134 : build_empty_stmt (input_location);
4135 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
4136 gfc_add_expr_to_block (&body, tmp);
4137 gfc_add_block_to_block (&body, &cse.post);
4139 gfc_trans_scalarizing_loops (&loop, &body);
4140 gfc_add_block_to_block (&block, &loop.pre);
4141 gfc_add_block_to_block (&block, &loop.post);
4142 gfc_cleanup_loop (&loop);
4144 return gfc_finish_block (&block);
4147 /* As the WHERE or WHERE construct statement can be nested, we call
4148 gfc_trans_where_2 to do the translation, and pass the initial
4149 NULL values for both the control mask and the pending control mask. */
4152 gfc_trans_where (gfc_code * code)
4158 cblock = code->block;
4160 && cblock->next->op == EXEC_ASSIGN
4161 && !cblock->next->next)
4163 eblock = cblock->block;
4166 /* A simple "WHERE (cond) x = y" statement or block is
4167 dependence free if cond is not dependent upon writing x,
4168 and the source y is unaffected by the destination x. */
4169 if (!gfc_check_dependency (cblock->next->expr1,
4171 && !gfc_check_dependency (cblock->next->expr1,
4172 cblock->next->expr2, 0))
4173 return gfc_trans_where_3 (cblock, NULL);
4175 else if (!eblock->expr1
4178 && eblock->next->op == EXEC_ASSIGN
4179 && !eblock->next->next)
4181 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
4182 block is dependence free if cond is not dependent on writes
4183 to x1 and x2, y1 is not dependent on writes to x2, and y2
4184 is not dependent on writes to x1, and both y's are not
4185 dependent upon their own x's. In addition to this, the
4186 final two dependency checks below exclude all but the same
4187 array reference if the where and elswhere destinations
4188 are the same. In short, this is VERY conservative and this
4189 is needed because the two loops, required by the standard
4190 are coalesced in gfc_trans_where_3. */
4191 if (!gfc_check_dependency(cblock->next->expr1,
4193 && !gfc_check_dependency(eblock->next->expr1,
4195 && !gfc_check_dependency(cblock->next->expr1,
4196 eblock->next->expr2, 1)
4197 && !gfc_check_dependency(eblock->next->expr1,
4198 cblock->next->expr2, 1)
4199 && !gfc_check_dependency(cblock->next->expr1,
4200 cblock->next->expr2, 1)
4201 && !gfc_check_dependency(eblock->next->expr1,
4202 eblock->next->expr2, 1)
4203 && !gfc_check_dependency(cblock->next->expr1,
4204 eblock->next->expr1, 0)
4205 && !gfc_check_dependency(eblock->next->expr1,
4206 cblock->next->expr1, 0))
4207 return gfc_trans_where_3 (cblock, eblock);
4211 gfc_start_block (&block);
4213 gfc_trans_where_2 (code, NULL, false, NULL, &block);
4215 return gfc_finish_block (&block);
4219 /* CYCLE a DO loop. The label decl has already been created by
4220 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
4221 node at the head of the loop. We must mark the label as used. */
4224 gfc_trans_cycle (gfc_code * code)
4228 cycle_label = code->ext.whichloop->cycle_label;
4229 TREE_USED (cycle_label) = 1;
4230 return build1_v (GOTO_EXPR, cycle_label);
4234 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
4235 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
4239 gfc_trans_exit (gfc_code * code)
4243 exit_label = code->ext.whichloop->exit_label;
4244 TREE_USED (exit_label) = 1;
4245 return build1_v (GOTO_EXPR, exit_label);
4249 /* Translate the ALLOCATE statement. */
4252 gfc_trans_allocate (gfc_code * code)
4265 if (!code->ext.alloc.list)
4268 pstat = stat = error_label = tmp = memsz = NULL_TREE;
4270 gfc_start_block (&block);
4272 /* Either STAT= and/or ERRMSG is present. */
4273 if (code->expr1 || code->expr2)
4275 tree gfc_int4_type_node = gfc_get_int_type (4);
4277 stat = gfc_create_var (gfc_int4_type_node, "stat");
4278 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4280 error_label = gfc_build_label_decl (NULL_TREE);
4281 TREE_USED (error_label) = 1;
4284 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4286 expr = gfc_copy_expr (al->expr);
4288 if (expr->ts.type == BT_CLASS)
4289 gfc_add_component_ref (expr, "$data");
4291 gfc_init_se (&se, NULL);
4292 gfc_start_block (&se.pre);
4294 se.want_pointer = 1;
4295 se.descriptor_only = 1;
4296 gfc_conv_expr (&se, expr);
4298 if (!gfc_array_allocate (&se, expr, pstat))
4300 /* A scalar or derived type. */
4302 /* Determine allocate size. */
4303 if (al->expr->ts.type == BT_CLASS && code->expr3)
4305 if (code->expr3->ts.type == BT_CLASS)
4309 sz = gfc_copy_expr (code->expr3);
4310 gfc_add_component_ref (sz, "$vptr");
4311 gfc_add_component_ref (sz, "$size");
4312 gfc_init_se (&se_sz, NULL);
4313 gfc_conv_expr (&se_sz, sz);
4318 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->expr3->ts));
4320 else if (code->ext.alloc.ts.type != BT_UNKNOWN)
4321 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->ext.alloc.ts));
4323 memsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
4325 if (expr->ts.type == BT_CHARACTER && memsz == NULL_TREE)
4326 memsz = se.string_length;
4328 /* Allocate - for non-pointers with re-alloc checking. */
4335 /* Find the last reference in the chain. */
4336 while (ref && ref->next != NULL)
4338 gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT);
4343 allocatable = expr->symtree->n.sym->attr.allocatable;
4345 allocatable = ref->u.c.component->attr.allocatable;
4348 tmp = gfc_allocate_array_with_status (&se.pre, se.expr, memsz,
4351 tmp = gfc_allocate_with_status (&se.pre, memsz, pstat);
4354 tmp = fold_build2 (MODIFY_EXPR, void_type_node, se.expr,
4355 fold_convert (TREE_TYPE (se.expr), tmp));
4356 gfc_add_expr_to_block (&se.pre, tmp);
4358 if (code->expr1 || code->expr2)
4360 tmp = build1_v (GOTO_EXPR, error_label);
4361 parm = fold_build2 (NE_EXPR, boolean_type_node,
4362 stat, build_int_cst (TREE_TYPE (stat), 0));
4363 tmp = fold_build3 (COND_EXPR, void_type_node,
4364 parm, tmp, build_empty_stmt (input_location));
4365 gfc_add_expr_to_block (&se.pre, tmp);
4368 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4370 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
4371 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, tmp, 0);
4372 gfc_add_expr_to_block (&se.pre, tmp);
4377 tmp = gfc_finish_block (&se.pre);
4378 gfc_add_expr_to_block (&block, tmp);
4380 /* Initialization via SOURCE block. */
4381 if (code->expr3 && !code->expr3->mold)
4383 gfc_expr *rhs = gfc_copy_expr (code->expr3);
4384 if (al->expr->ts.type == BT_CLASS)
4387 if (rhs->ts.type == BT_CLASS)
4388 gfc_add_component_ref (rhs, "$data");
4389 gfc_init_se (&dst, NULL);
4390 gfc_init_se (&src, NULL);
4391 gfc_conv_expr (&dst, expr);
4392 gfc_conv_expr (&src, rhs);
4393 gfc_add_block_to_block (&block, &src.pre);
4394 tmp = gfc_build_memcpy_call (dst.expr, src.expr, memsz);
4397 tmp = gfc_trans_assignment (gfc_expr_to_initialize (expr),
4399 gfc_free_expr (rhs);
4400 gfc_add_expr_to_block (&block, tmp);
4404 /* Add default initializer for those derived types that need them. */
4405 gfc_expr *rhs = NULL;
4408 if (code->ext.alloc.ts.type == BT_DERIVED)
4409 ts = code->ext.alloc.ts;
4410 else if (code->expr3)
4411 ts = code->expr3->ts;
4415 if (ts.type == BT_DERIVED)
4417 rhs = gfc_default_initializer (&ts);
4418 gfc_resolve_expr (rhs);
4420 else if (ts.type == BT_CLASS)
4422 rhs = gfc_copy_expr (code->expr3);
4423 gfc_add_component_ref (rhs, "$vptr");
4424 gfc_add_component_ref (rhs, "$def_init");
4429 gfc_expr *lhs = gfc_expr_to_initialize (expr);
4430 if (al->expr->ts.type == BT_DERIVED)
4432 tmp = gfc_trans_assignment (lhs, rhs, true, false);
4433 gfc_add_expr_to_block (&block, tmp);
4435 else if (al->expr->ts.type == BT_CLASS)
4438 gfc_init_se (&dst, NULL);
4439 gfc_init_se (&src, NULL);
4440 gfc_conv_expr (&dst, lhs);
4441 gfc_conv_expr (&src, rhs);
4442 gfc_add_block_to_block (&block, &src.pre);
4443 tmp = gfc_build_memcpy_call (dst.expr, src.expr, memsz);
4444 gfc_add_expr_to_block (&block, tmp);
4446 gfc_free_expr (lhs);
4447 gfc_free_expr (rhs);
4451 /* Allocation of CLASS entities. */
4452 gfc_free_expr (expr);
4454 if (expr->ts.type == BT_CLASS)
4459 /* Initialize VPTR for CLASS objects. */
4460 lhs = gfc_expr_to_initialize (expr);
4461 gfc_add_component_ref (lhs, "$vptr");
4463 if (code->expr3 && code->expr3->ts.type == BT_CLASS)
4465 /* Polymorphic SOURCE: VPTR must be determined at run time. */
4466 rhs = gfc_copy_expr (code->expr3);
4467 gfc_add_component_ref (rhs, "$vptr");
4468 tmp = gfc_trans_pointer_assignment (lhs, rhs);
4469 gfc_add_expr_to_block (&block, tmp);
4470 gfc_free_expr (rhs);
4474 /* VPTR is fixed at compile time. */
4478 ts = &code->expr3->ts;
4479 else if (expr->ts.type == BT_DERIVED)
4481 else if (code->ext.alloc.ts.type == BT_DERIVED)
4482 ts = &code->ext.alloc.ts;
4483 else if (expr->ts.type == BT_CLASS)
4484 ts = &CLASS_DATA (expr)->ts;
4488 if (ts->type == BT_DERIVED)
4490 vtab = gfc_find_derived_vtab (ts->u.derived);
4492 gfc_init_se (&lse, NULL);
4493 lse.want_pointer = 1;
4494 gfc_conv_expr (&lse, lhs);
4495 tmp = gfc_build_addr_expr (NULL_TREE,
4496 gfc_get_symbol_decl (vtab));
4497 gfc_add_modify (&block, lse.expr,
4498 fold_convert (TREE_TYPE (lse.expr), tmp));
4508 tmp = build1_v (LABEL_EXPR, error_label);
4509 gfc_add_expr_to_block (&block, tmp);
4511 gfc_init_se (&se, NULL);
4512 gfc_conv_expr_lhs (&se, code->expr1);
4513 tmp = convert (TREE_TYPE (se.expr), stat);
4514 gfc_add_modify (&block, se.expr, tmp);
4520 /* A better error message may be possible, but not required. */
4521 const char *msg = "Attempt to allocate an allocated object";
4522 tree errmsg, slen, dlen;
4524 gfc_init_se (&se, NULL);
4525 gfc_conv_expr_lhs (&se, code->expr2);
4527 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4529 gfc_add_modify (&block, errmsg,
4530 gfc_build_addr_expr (pchar_type_node,
4531 gfc_build_localized_cstring_const (msg)));
4533 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4534 dlen = gfc_get_expr_charlen (code->expr2);
4535 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4537 dlen = build_call_expr_loc (input_location,
4538 built_in_decls[BUILT_IN_MEMCPY], 3,
4539 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4541 tmp = fold_build2 (NE_EXPR, boolean_type_node, stat,
4542 build_int_cst (TREE_TYPE (stat), 0));
4544 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4546 gfc_add_expr_to_block (&block, tmp);
4549 return gfc_finish_block (&block);
4553 /* Translate a DEALLOCATE statement. */
4556 gfc_trans_deallocate (gfc_code *code)
4561 tree apstat, astat, pstat, stat, tmp;
4564 pstat = apstat = stat = astat = tmp = NULL_TREE;
4566 gfc_start_block (&block);
4568 /* Count the number of failed deallocations. If deallocate() was
4569 called with STAT= , then set STAT to the count. If deallocate
4570 was called with ERRMSG, then set ERRMG to a string. */
4571 if (code->expr1 || code->expr2)
4573 tree gfc_int4_type_node = gfc_get_int_type (4);
4575 stat = gfc_create_var (gfc_int4_type_node, "stat");
4576 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4578 /* Running total of possible deallocation failures. */
4579 astat = gfc_create_var (gfc_int4_type_node, "astat");
4580 apstat = gfc_build_addr_expr (NULL_TREE, astat);
4582 /* Initialize astat to 0. */
4583 gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
4586 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4589 gcc_assert (expr->expr_type == EXPR_VARIABLE);
4591 gfc_init_se (&se, NULL);
4592 gfc_start_block (&se.pre);
4594 se.want_pointer = 1;
4595 se.descriptor_only = 1;
4596 gfc_conv_expr (&se, expr);
4598 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4601 gfc_ref *last = NULL;
4602 for (ref = expr->ref; ref; ref = ref->next)
4603 if (ref->type == REF_COMPONENT)
4606 /* Do not deallocate the components of a derived type
4607 ultimate pointer component. */
4608 if (!(last && last->u.c.component->attr.pointer)
4609 && !(!last && expr->symtree->n.sym->attr.pointer))
4611 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, se.expr,
4613 gfc_add_expr_to_block (&se.pre, tmp);
4618 tmp = gfc_array_deallocate (se.expr, pstat, expr);
4621 tmp = gfc_deallocate_with_status (se.expr, pstat, false, expr);
4622 gfc_add_expr_to_block (&se.pre, tmp);
4624 tmp = fold_build2 (MODIFY_EXPR, void_type_node,
4625 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
4628 gfc_add_expr_to_block (&se.pre, tmp);
4630 /* Keep track of the number of failed deallocations by adding stat
4631 of the last deallocation to the running total. */
4632 if (code->expr1 || code->expr2)
4634 apstat = fold_build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
4635 gfc_add_modify (&se.pre, astat, apstat);
4638 tmp = gfc_finish_block (&se.pre);
4639 gfc_add_expr_to_block (&block, tmp);
4646 gfc_init_se (&se, NULL);
4647 gfc_conv_expr_lhs (&se, code->expr1);
4648 tmp = convert (TREE_TYPE (se.expr), astat);
4649 gfc_add_modify (&block, se.expr, tmp);
4655 /* A better error message may be possible, but not required. */
4656 const char *msg = "Attempt to deallocate an unallocated object";
4657 tree errmsg, slen, dlen;
4659 gfc_init_se (&se, NULL);
4660 gfc_conv_expr_lhs (&se, code->expr2);
4662 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4664 gfc_add_modify (&block, errmsg,
4665 gfc_build_addr_expr (pchar_type_node,
4666 gfc_build_localized_cstring_const (msg)));
4668 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4669 dlen = gfc_get_expr_charlen (code->expr2);
4670 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4672 dlen = build_call_expr_loc (input_location,
4673 built_in_decls[BUILT_IN_MEMCPY], 3,
4674 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4676 tmp = fold_build2 (NE_EXPR, boolean_type_node, astat,
4677 build_int_cst (TREE_TYPE (astat), 0));
4679 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4681 gfc_add_expr_to_block (&block, tmp);
4684 return gfc_finish_block (&block);
4687 #include "gt-fortran-trans-stmt.h"