1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
31 #include "trans-stmt.h"
32 #include "trans-types.h"
33 #include "trans-array.h"
34 #include "trans-const.h"
36 #include "dependency.h"
39 typedef struct iter_info
45 struct iter_info *next;
49 typedef struct forall_info
56 struct forall_info *prev_nest;
60 static void gfc_trans_where_2 (gfc_code *, tree, bool,
61 forall_info *, stmtblock_t *);
63 /* Translate a F95 label number to a LABEL_EXPR. */
66 gfc_trans_label_here (gfc_code * code)
68 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
72 /* Given a variable expression which has been ASSIGNed to, find the decl
73 containing the auxiliary variables. For variables in common blocks this
77 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
79 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
80 gfc_conv_expr (se, expr);
81 /* Deals with variable in common block. Get the field declaration. */
82 if (TREE_CODE (se->expr) == COMPONENT_REF)
83 se->expr = TREE_OPERAND (se->expr, 1);
84 /* Deals with dummy argument. Get the parameter declaration. */
85 else if (TREE_CODE (se->expr) == INDIRECT_REF)
86 se->expr = TREE_OPERAND (se->expr, 0);
89 /* Translate a label assignment statement. */
92 gfc_trans_label_assign (gfc_code * code)
101 /* Start a new block. */
102 gfc_init_se (&se, NULL);
103 gfc_start_block (&se.pre);
104 gfc_conv_label_variable (&se, code->expr1);
106 len = GFC_DECL_STRING_LEN (se.expr);
107 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
109 label_tree = gfc_get_label_decl (code->label1);
111 if (code->label1->defined == ST_LABEL_TARGET)
113 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
114 len_tree = integer_minus_one_node;
118 gfc_expr *format = code->label1->format;
120 label_len = format->value.character.length;
121 len_tree = build_int_cst (NULL_TREE, label_len);
122 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
123 format->value.character.string);
124 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
127 gfc_add_modify (&se.pre, len, len_tree);
128 gfc_add_modify (&se.pre, addr, label_tree);
130 return gfc_finish_block (&se.pre);
133 /* Translate a GOTO statement. */
136 gfc_trans_goto (gfc_code * code)
138 locus loc = code->loc;
144 if (code->label1 != NULL)
145 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
148 gfc_init_se (&se, NULL);
149 gfc_start_block (&se.pre);
150 gfc_conv_label_variable (&se, code->expr1);
151 tmp = GFC_DECL_STRING_LEN (se.expr);
152 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, tmp,
153 build_int_cst (TREE_TYPE (tmp), -1));
154 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
155 "Assigned label is not a target label");
157 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
159 /* We're going to ignore a label list. It does not really change the
160 statement's semantics (because it is just a further restriction on
161 what's legal code); before, we were comparing label addresses here, but
162 that's a very fragile business and may break with optimization. So
165 target = fold_build1_loc (input_location, GOTO_EXPR, void_type_node,
167 gfc_add_expr_to_block (&se.pre, target);
168 return gfc_finish_block (&se.pre);
172 /* Translate an ENTRY statement. Just adds a label for this entry point. */
174 gfc_trans_entry (gfc_code * code)
176 return build1_v (LABEL_EXPR, code->ext.entry->label);
180 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
181 elemental subroutines. Make temporaries for output arguments if any such
182 dependencies are found. Output arguments are chosen because internal_unpack
183 can be used, as is, to copy the result back to the variable. */
185 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
186 gfc_symbol * sym, gfc_actual_arglist * arg,
187 gfc_dep_check check_variable)
189 gfc_actual_arglist *arg0;
191 gfc_formal_arglist *formal;
192 gfc_loopinfo tmp_loop;
204 if (loopse->ss == NULL)
209 formal = sym->formal;
211 /* Loop over all the arguments testing for dependencies. */
212 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
218 /* Obtain the info structure for the current argument. */
220 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
224 info = &ss->data.info;
228 /* If there is a dependency, create a temporary and use it
229 instead of the variable. */
230 fsym = formal ? formal->sym : NULL;
231 if (e->expr_type == EXPR_VARIABLE
233 && fsym->attr.intent != INTENT_IN
234 && gfc_check_fncall_dependency (e, fsym->attr.intent,
235 sym, arg0, check_variable))
237 tree initial, temptype;
238 stmtblock_t temp_post;
240 /* Make a local loopinfo for the temporary creation, so that
241 none of the other ss->info's have to be renormalized. */
242 gfc_init_loopinfo (&tmp_loop);
243 tmp_loop.dimen = info->dimen;
244 for (n = 0; n < info->dimen; n++)
246 tmp_loop.to[n] = loopse->loop->to[n];
247 tmp_loop.from[n] = loopse->loop->from[n];
248 tmp_loop.order[n] = loopse->loop->order[n];
251 /* Obtain the argument descriptor for unpacking. */
252 gfc_init_se (&parmse, NULL);
253 parmse.want_pointer = 1;
255 /* The scalarizer introduces some specific peculiarities when
256 handling elemental subroutines; the stride can be needed up to
257 the dim_array - 1, rather than dim_loop - 1 to calculate
258 offsets outside the loop. For this reason, we make sure that
259 the descriptor has the dimensionality of the array by converting
260 trailing elements into ranges with end = start. */
261 for (ref = e->ref; ref; ref = ref->next)
262 if (ref->type == REF_ARRAY && ref->u.ar.type == AR_SECTION)
267 bool seen_range = false;
268 for (n = 0; n < ref->u.ar.dimen; n++)
270 if (ref->u.ar.dimen_type[n] == DIMEN_RANGE)
274 || ref->u.ar.dimen_type[n] != DIMEN_ELEMENT)
277 ref->u.ar.end[n] = gfc_copy_expr (ref->u.ar.start[n]);
278 ref->u.ar.dimen_type[n] = DIMEN_RANGE;
282 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
283 gfc_add_block_to_block (&se->pre, &parmse.pre);
285 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
286 initialize the array temporary with a copy of the values. */
287 if (fsym->attr.intent == INTENT_INOUT
288 || (fsym->ts.type ==BT_DERIVED
289 && fsym->attr.intent == INTENT_OUT))
290 initial = parmse.expr;
294 /* Find the type of the temporary to create; we don't use the type
295 of e itself as this breaks for subcomponent-references in e (where
296 the type of e is that of the final reference, but parmse.expr's
297 type corresponds to the full derived-type). */
298 /* TODO: Fix this somehow so we don't need a temporary of the whole
299 array but instead only the components referenced. */
300 temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */
301 gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
302 temptype = TREE_TYPE (temptype);
303 temptype = gfc_get_element_type (temptype);
305 /* Generate the temporary. Cleaning up the temporary should be the
306 very last thing done, so we add the code to a new block and add it
307 to se->post as last instructions. */
308 size = gfc_create_var (gfc_array_index_type, NULL);
309 data = gfc_create_var (pvoid_type_node, NULL);
310 gfc_init_block (&temp_post);
311 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post,
312 &tmp_loop, info, temptype,
316 gfc_add_modify (&se->pre, size, tmp);
317 tmp = fold_convert (pvoid_type_node, info->data);
318 gfc_add_modify (&se->pre, data, tmp);
320 /* Calculate the offset for the temporary. */
321 offset = gfc_index_zero_node;
322 for (n = 0; n < info->dimen; n++)
324 tmp = gfc_conv_descriptor_stride_get (info->descriptor,
326 tmp = fold_build2_loc (input_location, MULT_EXPR,
327 gfc_array_index_type,
328 loopse->loop->from[n], tmp);
329 offset = fold_build2_loc (input_location, MINUS_EXPR,
330 gfc_array_index_type, offset, tmp);
332 info->offset = gfc_create_var (gfc_array_index_type, NULL);
333 gfc_add_modify (&se->pre, info->offset, offset);
335 /* Copy the result back using unpack. */
336 tmp = build_call_expr_loc (input_location,
337 gfor_fndecl_in_unpack, 2, parmse.expr, data);
338 gfc_add_expr_to_block (&se->post, tmp);
340 /* parmse.pre is already added above. */
341 gfc_add_block_to_block (&se->post, &parmse.post);
342 gfc_add_block_to_block (&se->post, &temp_post);
348 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
351 gfc_trans_call (gfc_code * code, bool dependency_check,
352 tree mask, tree count1, bool invert)
356 int has_alternate_specifier;
357 gfc_dep_check check_variable;
358 tree index = NULL_TREE;
359 tree maskexpr = NULL_TREE;
362 /* A CALL starts a new block because the actual arguments may have to
363 be evaluated first. */
364 gfc_init_se (&se, NULL);
365 gfc_start_block (&se.pre);
367 gcc_assert (code->resolved_sym);
369 ss = gfc_ss_terminator;
370 if (code->resolved_sym->attr.elemental)
371 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
373 /* Is not an elemental subroutine call with array valued arguments. */
374 if (ss == gfc_ss_terminator)
377 /* Translate the call. */
378 has_alternate_specifier
379 = gfc_conv_procedure_call (&se, code->resolved_sym, code->ext.actual,
382 /* A subroutine without side-effect, by definition, does nothing! */
383 TREE_SIDE_EFFECTS (se.expr) = 1;
385 /* Chain the pieces together and return the block. */
386 if (has_alternate_specifier)
388 gfc_code *select_code;
390 select_code = code->next;
391 gcc_assert(select_code->op == EXEC_SELECT);
392 sym = select_code->expr1->symtree->n.sym;
393 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
394 if (sym->backend_decl == NULL)
395 sym->backend_decl = gfc_get_symbol_decl (sym);
396 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
399 gfc_add_expr_to_block (&se.pre, se.expr);
401 gfc_add_block_to_block (&se.pre, &se.post);
406 /* An elemental subroutine call with array valued arguments has
414 /* gfc_walk_elemental_function_args renders the ss chain in the
415 reverse order to the actual argument order. */
416 ss = gfc_reverse_ss (ss);
418 /* Initialize the loop. */
419 gfc_init_se (&loopse, NULL);
420 gfc_init_loopinfo (&loop);
421 gfc_add_ss_to_loop (&loop, ss);
423 gfc_conv_ss_startstride (&loop);
424 /* TODO: gfc_conv_loop_setup generates a temporary for vector
425 subscripts. This could be prevented in the elemental case
426 as temporaries are handled separatedly
427 (below in gfc_conv_elemental_dependencies). */
428 gfc_conv_loop_setup (&loop, &code->expr1->where);
429 gfc_mark_ss_chain_used (ss, 1);
431 /* Convert the arguments, checking for dependencies. */
432 gfc_copy_loopinfo_to_se (&loopse, &loop);
435 /* For operator assignment, do dependency checking. */
436 if (dependency_check)
437 check_variable = ELEM_CHECK_VARIABLE;
439 check_variable = ELEM_DONT_CHECK_VARIABLE;
441 gfc_init_se (&depse, NULL);
442 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
443 code->ext.actual, check_variable);
445 gfc_add_block_to_block (&loop.pre, &depse.pre);
446 gfc_add_block_to_block (&loop.post, &depse.post);
448 /* Generate the loop body. */
449 gfc_start_scalarized_body (&loop, &body);
450 gfc_init_block (&block);
454 /* Form the mask expression according to the mask. */
456 maskexpr = gfc_build_array_ref (mask, index, NULL);
458 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
459 TREE_TYPE (maskexpr), maskexpr);
462 /* Add the subroutine call to the block. */
463 gfc_conv_procedure_call (&loopse, code->resolved_sym,
464 code->ext.actual, code->expr1, NULL);
468 tmp = build3_v (COND_EXPR, maskexpr, loopse.expr,
469 build_empty_stmt (input_location));
470 gfc_add_expr_to_block (&loopse.pre, tmp);
471 tmp = fold_build2_loc (input_location, PLUS_EXPR,
472 gfc_array_index_type,
473 count1, gfc_index_one_node);
474 gfc_add_modify (&loopse.pre, count1, tmp);
477 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
479 gfc_add_block_to_block (&block, &loopse.pre);
480 gfc_add_block_to_block (&block, &loopse.post);
482 /* Finish up the loop block and the loop. */
483 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
484 gfc_trans_scalarizing_loops (&loop, &body);
485 gfc_add_block_to_block (&se.pre, &loop.pre);
486 gfc_add_block_to_block (&se.pre, &loop.post);
487 gfc_add_block_to_block (&se.pre, &se.post);
488 gfc_cleanup_loop (&loop);
491 return gfc_finish_block (&se.pre);
495 /* Translate the RETURN statement. */
498 gfc_trans_return (gfc_code * code)
506 /* If code->expr is not NULL, this return statement must appear
507 in a subroutine and current_fake_result_decl has already
510 result = gfc_get_fake_result_decl (NULL, 0);
513 gfc_warning ("An alternate return at %L without a * dummy argument",
514 &code->expr1->where);
515 return gfc_generate_return ();
518 /* Start a new block for this statement. */
519 gfc_init_se (&se, NULL);
520 gfc_start_block (&se.pre);
522 gfc_conv_expr (&se, code->expr1);
524 /* Note that the actually returned expression is a simple value and
525 does not depend on any pointers or such; thus we can clean-up with
526 se.post before returning. */
527 tmp = fold_build2_loc (input_location, MODIFY_EXPR, TREE_TYPE (result),
528 result, fold_convert (TREE_TYPE (result),
530 gfc_add_expr_to_block (&se.pre, tmp);
531 gfc_add_block_to_block (&se.pre, &se.post);
533 tmp = gfc_generate_return ();
534 gfc_add_expr_to_block (&se.pre, tmp);
535 return gfc_finish_block (&se.pre);
538 return gfc_generate_return ();
542 /* Translate the PAUSE statement. We have to translate this statement
543 to a runtime library call. */
546 gfc_trans_pause (gfc_code * code)
548 tree gfc_int4_type_node = gfc_get_int_type (4);
552 /* Start a new block for this statement. */
553 gfc_init_se (&se, NULL);
554 gfc_start_block (&se.pre);
557 if (code->expr1 == NULL)
559 tmp = build_int_cst (gfc_int4_type_node, 0);
560 tmp = build_call_expr_loc (input_location,
561 gfor_fndecl_pause_string, 2,
562 build_int_cst (pchar_type_node, 0), tmp);
564 else if (code->expr1->ts.type == BT_INTEGER)
566 gfc_conv_expr (&se, code->expr1);
567 tmp = build_call_expr_loc (input_location,
568 gfor_fndecl_pause_numeric, 1,
569 fold_convert (gfc_int4_type_node, se.expr));
573 gfc_conv_expr_reference (&se, code->expr1);
574 tmp = build_call_expr_loc (input_location,
575 gfor_fndecl_pause_string, 2,
576 se.expr, se.string_length);
579 gfc_add_expr_to_block (&se.pre, tmp);
581 gfc_add_block_to_block (&se.pre, &se.post);
583 return gfc_finish_block (&se.pre);
587 /* Translate the STOP statement. We have to translate this statement
588 to a runtime library call. */
591 gfc_trans_stop (gfc_code *code, bool error_stop)
593 tree gfc_int4_type_node = gfc_get_int_type (4);
597 /* Start a new block for this statement. */
598 gfc_init_se (&se, NULL);
599 gfc_start_block (&se.pre);
601 if (code->expr1 == NULL)
603 tmp = build_int_cst (gfc_int4_type_node, 0);
604 tmp = build_call_expr_loc (input_location,
605 error_stop ? gfor_fndecl_error_stop_string
606 : gfor_fndecl_stop_string,
607 2, build_int_cst (pchar_type_node, 0), tmp);
609 else if (code->expr1->ts.type == BT_INTEGER)
611 gfc_conv_expr (&se, code->expr1);
612 tmp = build_call_expr_loc (input_location,
613 error_stop ? gfor_fndecl_error_stop_numeric
614 : gfor_fndecl_stop_numeric, 1,
615 fold_convert (gfc_int4_type_node, se.expr));
619 gfc_conv_expr_reference (&se, code->expr1);
620 tmp = build_call_expr_loc (input_location,
621 error_stop ? gfor_fndecl_error_stop_string
622 : gfor_fndecl_stop_string,
623 2, se.expr, se.string_length);
626 gfc_add_expr_to_block (&se.pre, tmp);
628 gfc_add_block_to_block (&se.pre, &se.post);
630 return gfc_finish_block (&se.pre);
635 gfc_trans_sync (gfc_code *code, gfc_exec_op type __attribute__ ((unused)))
639 if ((code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)) || code->expr2)
641 gfc_init_se (&se, NULL);
642 gfc_start_block (&se.pre);
645 /* Check SYNC IMAGES(imageset) for valid image index.
646 FIXME: Add a check for image-set arrays. */
647 if (code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
648 && code->expr1->rank == 0)
651 gfc_conv_expr (&se, code->expr1);
652 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
653 se.expr, build_int_cst (TREE_TYPE (se.expr), 1));
654 gfc_trans_runtime_check (true, false, cond, &se.pre,
655 &code->expr1->where, "Invalid image number "
657 fold_convert (integer_type_node, se.expr));
660 /* If STAT is present, set it to zero. */
663 gcc_assert (code->expr2->expr_type == EXPR_VARIABLE);
664 gfc_conv_expr (&se, code->expr2);
665 gfc_add_modify (&se.pre, se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
668 if ((code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)) || code->expr2)
669 return gfc_finish_block (&se.pre);
675 /* Generate GENERIC for the IF construct. This function also deals with
676 the simple IF statement, because the front end translates the IF
677 statement into an IF construct.
709 where COND_S is the simplified version of the predicate. PRE_COND_S
710 are the pre side-effects produced by the translation of the
712 We need to build the chain recursively otherwise we run into
713 problems with folding incomplete statements. */
716 gfc_trans_if_1 (gfc_code * code)
721 /* Check for an unconditional ELSE clause. */
723 return gfc_trans_code (code->next);
725 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
726 gfc_init_se (&if_se, NULL);
727 gfc_start_block (&if_se.pre);
729 /* Calculate the IF condition expression. */
730 gfc_conv_expr_val (&if_se, code->expr1);
732 /* Translate the THEN clause. */
733 stmt = gfc_trans_code (code->next);
735 /* Translate the ELSE clause. */
737 elsestmt = gfc_trans_if_1 (code->block);
739 elsestmt = build_empty_stmt (input_location);
741 /* Build the condition expression and add it to the condition block. */
742 stmt = fold_build3_loc (input_location, COND_EXPR, void_type_node,
743 if_se.expr, stmt, elsestmt);
745 gfc_add_expr_to_block (&if_se.pre, stmt);
747 /* Finish off this statement. */
748 return gfc_finish_block (&if_se.pre);
752 gfc_trans_if (gfc_code * code)
757 /* Create exit label so it is available for trans'ing the body code. */
758 exit_label = gfc_build_label_decl (NULL_TREE);
759 code->exit_label = exit_label;
761 /* Translate the actual code in code->block. */
762 gfc_init_block (&body);
763 gfc_add_expr_to_block (&body, gfc_trans_if_1 (code->block));
765 /* Add exit label. */
766 gfc_add_expr_to_block (&body, build1_v (LABEL_EXPR, exit_label));
768 return gfc_finish_block (&body);
772 /* Translate an arithmetic IF expression.
774 IF (cond) label1, label2, label3 translates to
786 An optimized version can be generated in case of equal labels.
787 E.g., if label1 is equal to label2, we can translate it to
796 gfc_trans_arithmetic_if (gfc_code * code)
804 /* Start a new block. */
805 gfc_init_se (&se, NULL);
806 gfc_start_block (&se.pre);
808 /* Pre-evaluate COND. */
809 gfc_conv_expr_val (&se, code->expr1);
810 se.expr = gfc_evaluate_now (se.expr, &se.pre);
812 /* Build something to compare with. */
813 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
815 if (code->label1->value != code->label2->value)
817 /* If (cond < 0) take branch1 else take branch2.
818 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
819 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
820 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
822 if (code->label1->value != code->label3->value)
823 tmp = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
826 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
829 branch1 = fold_build3_loc (input_location, COND_EXPR, void_type_node,
830 tmp, branch1, branch2);
833 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
835 if (code->label1->value != code->label3->value
836 && code->label2->value != code->label3->value)
838 /* if (cond <= 0) take branch1 else take branch2. */
839 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
840 tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
842 branch1 = fold_build3_loc (input_location, COND_EXPR, void_type_node,
843 tmp, branch1, branch2);
846 /* Append the COND_EXPR to the evaluation of COND, and return. */
847 gfc_add_expr_to_block (&se.pre, branch1);
848 return gfc_finish_block (&se.pre);
852 /* Translate a CRITICAL block. */
854 gfc_trans_critical (gfc_code *code)
859 gfc_start_block (&block);
860 tmp = gfc_trans_code (code->block->next);
861 gfc_add_expr_to_block (&block, tmp);
863 return gfc_finish_block (&block);
867 /* Translate a BLOCK construct. This is basically what we would do for a
871 gfc_trans_block_construct (gfc_code* code)
875 gfc_wrapped_block block;
879 ns = code->ext.block.ns;
884 /* Process local variables. */
885 gcc_assert (!sym->tlink);
887 gfc_process_block_locals (ns, code->ext.block.assoc);
889 /* Generate code including exit-label. */
890 gfc_init_block (&body);
891 exit_label = gfc_build_label_decl (NULL_TREE);
892 code->exit_label = exit_label;
893 gfc_add_expr_to_block (&body, gfc_trans_code (ns->code));
894 gfc_add_expr_to_block (&body, build1_v (LABEL_EXPR, exit_label));
896 /* Finish everything. */
897 gfc_start_wrapped_block (&block, gfc_finish_block (&body));
898 gfc_trans_deferred_vars (sym, &block);
900 return gfc_finish_wrapped_block (&block);
904 /* Translate the simple DO construct. This is where the loop variable has
905 integer type and step +-1. We can't use this in the general case
906 because integer overflow and floating point errors could give incorrect
908 We translate a do loop from:
910 DO dovar = from, to, step
916 [Evaluate loop bounds and step]
918 if ((step > 0) ? (dovar <= to) : (dovar => to))
924 cond = (dovar == to);
926 if (cond) goto end_label;
931 This helps the optimizers by avoiding the extra induction variable
932 used in the general case. */
935 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
936 tree from, tree to, tree step, tree exit_cond)
942 tree saved_dovar = NULL;
946 type = TREE_TYPE (dovar);
948 /* Initialize the DO variable: dovar = from. */
949 gfc_add_modify (pblock, dovar, from);
951 /* Save value for do-tinkering checking. */
952 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
954 saved_dovar = gfc_create_var (type, ".saved_dovar");
955 gfc_add_modify (pblock, saved_dovar, dovar);
958 /* Cycle and exit statements are implemented with gotos. */
959 cycle_label = gfc_build_label_decl (NULL_TREE);
960 exit_label = gfc_build_label_decl (NULL_TREE);
962 /* Put the labels where they can be found later. See gfc_trans_do(). */
963 code->cycle_label = cycle_label;
964 code->exit_label = exit_label;
967 gfc_start_block (&body);
969 /* Main loop body. */
970 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
971 gfc_add_expr_to_block (&body, tmp);
973 /* Label for cycle statements (if needed). */
974 if (TREE_USED (cycle_label))
976 tmp = build1_v (LABEL_EXPR, cycle_label);
977 gfc_add_expr_to_block (&body, tmp);
980 /* Check whether someone has modified the loop variable. */
981 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
983 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
985 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
986 "Loop variable has been modified");
989 /* Exit the loop if there is an I/O result condition or error. */
992 tmp = build1_v (GOTO_EXPR, exit_label);
993 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
995 build_empty_stmt (input_location));
996 gfc_add_expr_to_block (&body, tmp);
999 /* Evaluate the loop condition. */
1000 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, dovar,
1002 cond = gfc_evaluate_now (cond, &body);
1004 /* Increment the loop variable. */
1005 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, dovar, step);
1006 gfc_add_modify (&body, dovar, tmp);
1008 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1009 gfc_add_modify (&body, saved_dovar, dovar);
1011 /* The loop exit. */
1012 tmp = build1_v (GOTO_EXPR, exit_label);
1013 TREE_USED (exit_label) = 1;
1014 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
1015 cond, tmp, build_empty_stmt (input_location));
1016 gfc_add_expr_to_block (&body, tmp);
1018 /* Finish the loop body. */
1019 tmp = gfc_finish_block (&body);
1020 tmp = build1_v (LOOP_EXPR, tmp);
1022 /* Only execute the loop if the number of iterations is positive. */
1023 if (tree_int_cst_sgn (step) > 0)
1024 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, dovar,
1027 cond = fold_build2_loc (input_location, GE_EXPR, boolean_type_node, dovar,
1029 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, cond, tmp,
1030 build_empty_stmt (input_location));
1031 gfc_add_expr_to_block (pblock, tmp);
1033 /* Add the exit label. */
1034 tmp = build1_v (LABEL_EXPR, exit_label);
1035 gfc_add_expr_to_block (pblock, tmp);
1037 return gfc_finish_block (pblock);
1040 /* Translate the DO construct. This obviously is one of the most
1041 important ones to get right with any compiler, but especially
1044 We special case some loop forms as described in gfc_trans_simple_do.
1045 For other cases we implement them with a separate loop count,
1046 as described in the standard.
1048 We translate a do loop from:
1050 DO dovar = from, to, step
1056 [evaluate loop bounds and step]
1057 empty = (step > 0 ? to < from : to > from);
1058 countm1 = (to - from) / step;
1060 if (empty) goto exit_label;
1066 if (countm1 ==0) goto exit_label;
1071 countm1 is an unsigned integer. It is equal to the loop count minus one,
1072 because the loop count itself can overflow. */
1075 gfc_trans_do (gfc_code * code, tree exit_cond)
1079 tree saved_dovar = NULL;
1094 gfc_start_block (&block);
1096 /* Evaluate all the expressions in the iterator. */
1097 gfc_init_se (&se, NULL);
1098 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1099 gfc_add_block_to_block (&block, &se.pre);
1101 type = TREE_TYPE (dovar);
1103 gfc_init_se (&se, NULL);
1104 gfc_conv_expr_val (&se, code->ext.iterator->start);
1105 gfc_add_block_to_block (&block, &se.pre);
1106 from = gfc_evaluate_now (se.expr, &block);
1108 gfc_init_se (&se, NULL);
1109 gfc_conv_expr_val (&se, code->ext.iterator->end);
1110 gfc_add_block_to_block (&block, &se.pre);
1111 to = gfc_evaluate_now (se.expr, &block);
1113 gfc_init_se (&se, NULL);
1114 gfc_conv_expr_val (&se, code->ext.iterator->step);
1115 gfc_add_block_to_block (&block, &se.pre);
1116 step = gfc_evaluate_now (se.expr, &block);
1118 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1120 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, step,
1121 fold_convert (type, integer_zero_node));
1122 gfc_trans_runtime_check (true, false, tmp, &block, &code->loc,
1123 "DO step value is zero");
1126 /* Special case simple loops. */
1127 if (TREE_CODE (type) == INTEGER_TYPE
1128 && (integer_onep (step)
1129 || tree_int_cst_equal (step, integer_minus_one_node)))
1130 return gfc_trans_simple_do (code, &block, dovar, from, to, step, exit_cond);
1132 pos_step = fold_build2_loc (input_location, GT_EXPR, boolean_type_node, step,
1133 fold_convert (type, integer_zero_node));
1135 if (TREE_CODE (type) == INTEGER_TYPE)
1136 utype = unsigned_type_for (type);
1138 utype = unsigned_type_for (gfc_array_index_type);
1139 countm1 = gfc_create_var (utype, "countm1");
1141 /* Cycle and exit statements are implemented with gotos. */
1142 cycle_label = gfc_build_label_decl (NULL_TREE);
1143 exit_label = gfc_build_label_decl (NULL_TREE);
1144 TREE_USED (exit_label) = 1;
1146 /* Put these labels where they can be found later. */
1147 code->cycle_label = cycle_label;
1148 code->exit_label = exit_label;
1150 /* Initialize the DO variable: dovar = from. */
1151 gfc_add_modify (&block, dovar, from);
1153 /* Save value for do-tinkering checking. */
1154 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1156 saved_dovar = gfc_create_var (type, ".saved_dovar");
1157 gfc_add_modify (&block, saved_dovar, dovar);
1160 /* Initialize loop count and jump to exit label if the loop is empty.
1161 This code is executed before we enter the loop body. We generate:
1162 step_sign = sign(1,step);
1173 countm1 = (to*step_sign - from*step_sign) / (step*step_sign);
1177 if (TREE_CODE (type) == INTEGER_TYPE)
1179 tree pos, neg, step_sign, to2, from2, step2;
1181 /* Calculate SIGN (1,step), as (step < 0 ? -1 : 1) */
1183 tmp = fold_build2_loc (input_location, LT_EXPR, boolean_type_node, step,
1184 build_int_cst (TREE_TYPE (step), 0));
1185 step_sign = fold_build3_loc (input_location, COND_EXPR, type, tmp,
1186 build_int_cst (type, -1),
1187 build_int_cst (type, 1));
1189 tmp = fold_build2_loc (input_location, LT_EXPR, boolean_type_node, to,
1191 pos = fold_build3_loc (input_location, COND_EXPR, void_type_node, tmp,
1192 build1_v (GOTO_EXPR, exit_label),
1193 build_empty_stmt (input_location));
1195 tmp = fold_build2_loc (input_location, GT_EXPR, boolean_type_node, to,
1197 neg = fold_build3_loc (input_location, COND_EXPR, void_type_node, tmp,
1198 build1_v (GOTO_EXPR, exit_label),
1199 build_empty_stmt (input_location));
1200 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
1201 pos_step, pos, neg);
1203 gfc_add_expr_to_block (&block, tmp);
1205 /* Calculate the loop count. to-from can overflow, so
1206 we cast to unsigned. */
1208 to2 = fold_build2_loc (input_location, MULT_EXPR, type, step_sign, to);
1209 from2 = fold_build2_loc (input_location, MULT_EXPR, type, step_sign,
1211 step2 = fold_build2_loc (input_location, MULT_EXPR, type, step_sign,
1213 step2 = fold_convert (utype, step2);
1214 tmp = fold_build2_loc (input_location, MINUS_EXPR, type, to2, from2);
1215 tmp = fold_convert (utype, tmp);
1216 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR, utype, tmp,
1218 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
1220 gfc_add_expr_to_block (&block, tmp);
1224 /* TODO: We could use the same width as the real type.
1225 This would probably cause more problems that it solves
1226 when we implement "long double" types. */
1228 tmp = fold_build2_loc (input_location, MINUS_EXPR, type, to, from);
1229 tmp = fold_build2_loc (input_location, RDIV_EXPR, type, tmp, step);
1230 tmp = fold_build1_loc (input_location, FIX_TRUNC_EXPR, utype, tmp);
1231 gfc_add_modify (&block, countm1, tmp);
1233 /* We need a special check for empty loops:
1234 empty = (step > 0 ? to < from : to > from); */
1235 tmp = fold_build3_loc (input_location, COND_EXPR, boolean_type_node,
1237 fold_build2_loc (input_location, LT_EXPR,
1238 boolean_type_node, to, from),
1239 fold_build2_loc (input_location, GT_EXPR,
1240 boolean_type_node, to, from));
1241 /* If the loop is empty, go directly to the exit label. */
1242 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, tmp,
1243 build1_v (GOTO_EXPR, exit_label),
1244 build_empty_stmt (input_location));
1245 gfc_add_expr_to_block (&block, tmp);
1249 gfc_start_block (&body);
1251 /* Main loop body. */
1252 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1253 gfc_add_expr_to_block (&body, tmp);
1255 /* Label for cycle statements (if needed). */
1256 if (TREE_USED (cycle_label))
1258 tmp = build1_v (LABEL_EXPR, cycle_label);
1259 gfc_add_expr_to_block (&body, tmp);
1262 /* Check whether someone has modified the loop variable. */
1263 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1265 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, dovar,
1267 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1268 "Loop variable has been modified");
1271 /* Exit the loop if there is an I/O result condition or error. */
1274 tmp = build1_v (GOTO_EXPR, exit_label);
1275 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
1277 build_empty_stmt (input_location));
1278 gfc_add_expr_to_block (&body, tmp);
1281 /* Increment the loop variable. */
1282 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, dovar, step);
1283 gfc_add_modify (&body, dovar, tmp);
1285 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1286 gfc_add_modify (&body, saved_dovar, dovar);
1288 /* End with the loop condition. Loop until countm1 == 0. */
1289 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, countm1,
1290 build_int_cst (utype, 0));
1291 tmp = build1_v (GOTO_EXPR, exit_label);
1292 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
1293 cond, tmp, build_empty_stmt (input_location));
1294 gfc_add_expr_to_block (&body, tmp);
1296 /* Decrement the loop count. */
1297 tmp = fold_build2_loc (input_location, MINUS_EXPR, utype, countm1,
1298 build_int_cst (utype, 1));
1299 gfc_add_modify (&body, countm1, tmp);
1301 /* End of loop body. */
1302 tmp = gfc_finish_block (&body);
1304 /* The for loop itself. */
1305 tmp = build1_v (LOOP_EXPR, tmp);
1306 gfc_add_expr_to_block (&block, tmp);
1308 /* Add the exit label. */
1309 tmp = build1_v (LABEL_EXPR, exit_label);
1310 gfc_add_expr_to_block (&block, tmp);
1312 return gfc_finish_block (&block);
1316 /* Translate the DO WHILE construct.
1329 if (! cond) goto exit_label;
1335 Because the evaluation of the exit condition `cond' may have side
1336 effects, we can't do much for empty loop bodies. The backend optimizers
1337 should be smart enough to eliminate any dead loops. */
1340 gfc_trans_do_while (gfc_code * code)
1348 /* Everything we build here is part of the loop body. */
1349 gfc_start_block (&block);
1351 /* Cycle and exit statements are implemented with gotos. */
1352 cycle_label = gfc_build_label_decl (NULL_TREE);
1353 exit_label = gfc_build_label_decl (NULL_TREE);
1355 /* Put the labels where they can be found later. See gfc_trans_do(). */
1356 code->cycle_label = cycle_label;
1357 code->exit_label = exit_label;
1359 /* Create a GIMPLE version of the exit condition. */
1360 gfc_init_se (&cond, NULL);
1361 gfc_conv_expr_val (&cond, code->expr1);
1362 gfc_add_block_to_block (&block, &cond.pre);
1363 cond.expr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
1364 boolean_type_node, cond.expr);
1366 /* Build "IF (! cond) GOTO exit_label". */
1367 tmp = build1_v (GOTO_EXPR, exit_label);
1368 TREE_USED (exit_label) = 1;
1369 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
1370 cond.expr, tmp, build_empty_stmt (input_location));
1371 gfc_add_expr_to_block (&block, tmp);
1373 /* The main body of the loop. */
1374 tmp = gfc_trans_code (code->block->next);
1375 gfc_add_expr_to_block (&block, tmp);
1377 /* Label for cycle statements (if needed). */
1378 if (TREE_USED (cycle_label))
1380 tmp = build1_v (LABEL_EXPR, cycle_label);
1381 gfc_add_expr_to_block (&block, tmp);
1384 /* End of loop body. */
1385 tmp = gfc_finish_block (&block);
1387 gfc_init_block (&block);
1388 /* Build the loop. */
1389 tmp = build1_v (LOOP_EXPR, tmp);
1390 gfc_add_expr_to_block (&block, tmp);
1392 /* Add the exit label. */
1393 tmp = build1_v (LABEL_EXPR, exit_label);
1394 gfc_add_expr_to_block (&block, tmp);
1396 return gfc_finish_block (&block);
1400 /* Translate the SELECT CASE construct for INTEGER case expressions,
1401 without killing all potential optimizations. The problem is that
1402 Fortran allows unbounded cases, but the back-end does not, so we
1403 need to intercept those before we enter the equivalent SWITCH_EXPR
1406 For example, we translate this,
1409 CASE (:100,101,105:115)
1419 to the GENERIC equivalent,
1423 case (minimum value for typeof(expr) ... 100:
1429 case 200 ... (maximum value for typeof(expr):
1446 gfc_trans_integer_select (gfc_code * code)
1456 gfc_start_block (&block);
1458 /* Calculate the switch expression. */
1459 gfc_init_se (&se, NULL);
1460 gfc_conv_expr_val (&se, code->expr1);
1461 gfc_add_block_to_block (&block, &se.pre);
1463 end_label = gfc_build_label_decl (NULL_TREE);
1465 gfc_init_block (&body);
1467 for (c = code->block; c; c = c->block)
1469 for (cp = c->ext.case_list; cp; cp = cp->next)
1474 /* Assume it's the default case. */
1475 low = high = NULL_TREE;
1479 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1482 /* If there's only a lower bound, set the high bound to the
1483 maximum value of the case expression. */
1485 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1490 /* Three cases are possible here:
1492 1) There is no lower bound, e.g. CASE (:N).
1493 2) There is a lower bound .NE. high bound, that is
1494 a case range, e.g. CASE (N:M) where M>N (we make
1495 sure that M>N during type resolution).
1496 3) There is a lower bound, and it has the same value
1497 as the high bound, e.g. CASE (N:N). This is our
1498 internal representation of CASE(N).
1500 In the first and second case, we need to set a value for
1501 high. In the third case, we don't because the GCC middle
1502 end represents a single case value by just letting high be
1503 a NULL_TREE. We can't do that because we need to be able
1504 to represent unbounded cases. */
1508 && mpz_cmp (cp->low->value.integer,
1509 cp->high->value.integer) != 0))
1510 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1513 /* Unbounded case. */
1515 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1518 /* Build a label. */
1519 label = gfc_build_label_decl (NULL_TREE);
1521 /* Add this case label.
1522 Add parameter 'label', make it match GCC backend. */
1523 tmp = fold_build3_loc (input_location, CASE_LABEL_EXPR,
1524 void_type_node, low, high, label);
1525 gfc_add_expr_to_block (&body, tmp);
1528 /* Add the statements for this case. */
1529 tmp = gfc_trans_code (c->next);
1530 gfc_add_expr_to_block (&body, tmp);
1532 /* Break to the end of the construct. */
1533 tmp = build1_v (GOTO_EXPR, end_label);
1534 gfc_add_expr_to_block (&body, tmp);
1537 tmp = gfc_finish_block (&body);
1538 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1539 gfc_add_expr_to_block (&block, tmp);
1541 tmp = build1_v (LABEL_EXPR, end_label);
1542 gfc_add_expr_to_block (&block, tmp);
1544 return gfc_finish_block (&block);
1548 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1550 There are only two cases possible here, even though the standard
1551 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1552 .FALSE., and DEFAULT.
1554 We never generate more than two blocks here. Instead, we always
1555 try to eliminate the DEFAULT case. This way, we can translate this
1556 kind of SELECT construct to a simple
1560 expression in GENERIC. */
1563 gfc_trans_logical_select (gfc_code * code)
1566 gfc_code *t, *f, *d;
1571 /* Assume we don't have any cases at all. */
1574 /* Now see which ones we actually do have. We can have at most two
1575 cases in a single case list: one for .TRUE. and one for .FALSE.
1576 The default case is always separate. If the cases for .TRUE. and
1577 .FALSE. are in the same case list, the block for that case list
1578 always executed, and we don't generate code a COND_EXPR. */
1579 for (c = code->block; c; c = c->block)
1581 for (cp = c->ext.case_list; cp; cp = cp->next)
1585 if (cp->low->value.logical == 0) /* .FALSE. */
1587 else /* if (cp->value.logical != 0), thus .TRUE. */
1595 /* Start a new block. */
1596 gfc_start_block (&block);
1598 /* Calculate the switch expression. We always need to do this
1599 because it may have side effects. */
1600 gfc_init_se (&se, NULL);
1601 gfc_conv_expr_val (&se, code->expr1);
1602 gfc_add_block_to_block (&block, &se.pre);
1604 if (t == f && t != NULL)
1606 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1607 translate the code for these cases, append it to the current
1609 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1613 tree true_tree, false_tree, stmt;
1615 true_tree = build_empty_stmt (input_location);
1616 false_tree = build_empty_stmt (input_location);
1618 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1619 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1620 make the missing case the default case. */
1621 if (t != NULL && f != NULL)
1631 /* Translate the code for each of these blocks, and append it to
1632 the current block. */
1634 true_tree = gfc_trans_code (t->next);
1637 false_tree = gfc_trans_code (f->next);
1639 stmt = fold_build3_loc (input_location, COND_EXPR, void_type_node,
1640 se.expr, true_tree, false_tree);
1641 gfc_add_expr_to_block (&block, stmt);
1644 return gfc_finish_block (&block);
1648 /* The jump table types are stored in static variables to avoid
1649 constructing them from scratch every single time. */
1650 static GTY(()) tree select_struct[2];
1652 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1653 Instead of generating compares and jumps, it is far simpler to
1654 generate a data structure describing the cases in order and call a
1655 library subroutine that locates the right case.
1656 This is particularly true because this is the only case where we
1657 might have to dispose of a temporary.
1658 The library subroutine returns a pointer to jump to or NULL if no
1659 branches are to be taken. */
1662 gfc_trans_character_select (gfc_code *code)
1664 tree init, end_label, tmp, type, case_num, label, fndecl;
1665 stmtblock_t block, body;
1670 VEC(constructor_elt,gc) *inits = NULL;
1672 tree pchartype = gfc_get_pchar_type (code->expr1->ts.kind);
1674 /* The jump table types are stored in static variables to avoid
1675 constructing them from scratch every single time. */
1676 static tree ss_string1[2], ss_string1_len[2];
1677 static tree ss_string2[2], ss_string2_len[2];
1678 static tree ss_target[2];
1680 cp = code->block->ext.case_list;
1681 while (cp->left != NULL)
1684 /* Generate the body */
1685 gfc_start_block (&block);
1686 gfc_init_se (&expr1se, NULL);
1687 gfc_conv_expr_reference (&expr1se, code->expr1);
1689 gfc_add_block_to_block (&block, &expr1se.pre);
1691 end_label = gfc_build_label_decl (NULL_TREE);
1693 gfc_init_block (&body);
1695 /* Attempt to optimize length 1 selects. */
1696 if (integer_onep (expr1se.string_length))
1698 for (d = cp; d; d = d->right)
1703 gcc_assert (d->low->expr_type == EXPR_CONSTANT
1704 && d->low->ts.type == BT_CHARACTER);
1705 if (d->low->value.character.length > 1)
1707 for (i = 1; i < d->low->value.character.length; i++)
1708 if (d->low->value.character.string[i] != ' ')
1710 if (i != d->low->value.character.length)
1712 if (optimize && d->high && i == 1)
1714 gcc_assert (d->high->expr_type == EXPR_CONSTANT
1715 && d->high->ts.type == BT_CHARACTER);
1716 if (d->high->value.character.length > 1
1717 && (d->low->value.character.string[0]
1718 == d->high->value.character.string[0])
1719 && d->high->value.character.string[1] != ' '
1720 && ((d->low->value.character.string[1] < ' ')
1721 == (d->high->value.character.string[1]
1731 gcc_assert (d->high->expr_type == EXPR_CONSTANT
1732 && d->high->ts.type == BT_CHARACTER);
1733 if (d->high->value.character.length > 1)
1735 for (i = 1; i < d->high->value.character.length; i++)
1736 if (d->high->value.character.string[i] != ' ')
1738 if (i != d->high->value.character.length)
1745 tree ctype = gfc_get_char_type (code->expr1->ts.kind);
1747 for (c = code->block; c; c = c->block)
1749 for (cp = c->ext.case_list; cp; cp = cp->next)
1755 /* Assume it's the default case. */
1756 low = high = NULL_TREE;
1760 /* CASE ('ab') or CASE ('ab':'az') will never match
1761 any length 1 character. */
1762 if (cp->low->value.character.length > 1
1763 && cp->low->value.character.string[1] != ' ')
1766 if (cp->low->value.character.length > 0)
1767 r = cp->low->value.character.string[0];
1770 low = build_int_cst (ctype, r);
1772 /* If there's only a lower bound, set the high bound
1773 to the maximum value of the case expression. */
1775 high = TYPE_MAX_VALUE (ctype);
1781 || (cp->low->value.character.string[0]
1782 != cp->high->value.character.string[0]))
1784 if (cp->high->value.character.length > 0)
1785 r = cp->high->value.character.string[0];
1788 high = build_int_cst (ctype, r);
1791 /* Unbounded case. */
1793 low = TYPE_MIN_VALUE (ctype);
1796 /* Build a label. */
1797 label = gfc_build_label_decl (NULL_TREE);
1799 /* Add this case label.
1800 Add parameter 'label', make it match GCC backend. */
1801 tmp = fold_build3_loc (input_location, CASE_LABEL_EXPR,
1802 void_type_node, low, high, label);
1803 gfc_add_expr_to_block (&body, tmp);
1806 /* Add the statements for this case. */
1807 tmp = gfc_trans_code (c->next);
1808 gfc_add_expr_to_block (&body, tmp);
1810 /* Break to the end of the construct. */
1811 tmp = build1_v (GOTO_EXPR, end_label);
1812 gfc_add_expr_to_block (&body, tmp);
1815 tmp = gfc_string_to_single_character (expr1se.string_length,
1817 code->expr1->ts.kind);
1818 case_num = gfc_create_var (ctype, "case_num");
1819 gfc_add_modify (&block, case_num, tmp);
1821 gfc_add_block_to_block (&block, &expr1se.post);
1823 tmp = gfc_finish_block (&body);
1824 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1825 gfc_add_expr_to_block (&block, tmp);
1827 tmp = build1_v (LABEL_EXPR, end_label);
1828 gfc_add_expr_to_block (&block, tmp);
1830 return gfc_finish_block (&block);
1834 if (code->expr1->ts.kind == 1)
1836 else if (code->expr1->ts.kind == 4)
1841 if (select_struct[k] == NULL)
1844 select_struct[k] = make_node (RECORD_TYPE);
1846 if (code->expr1->ts.kind == 1)
1847 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
1848 else if (code->expr1->ts.kind == 4)
1849 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
1854 #define ADD_FIELD(NAME, TYPE) \
1855 ss_##NAME[k] = gfc_add_field_to_struct (select_struct[k], \
1856 get_identifier (stringize(NAME)), \
1860 ADD_FIELD (string1, pchartype);
1861 ADD_FIELD (string1_len, gfc_charlen_type_node);
1863 ADD_FIELD (string2, pchartype);
1864 ADD_FIELD (string2_len, gfc_charlen_type_node);
1866 ADD_FIELD (target, integer_type_node);
1869 gfc_finish_type (select_struct[k]);
1873 for (d = cp; d; d = d->right)
1876 for (c = code->block; c; c = c->block)
1878 for (d = c->ext.case_list; d; d = d->next)
1880 label = gfc_build_label_decl (NULL_TREE);
1881 tmp = fold_build3_loc (input_location, CASE_LABEL_EXPR,
1883 (d->low == NULL && d->high == NULL)
1884 ? NULL : build_int_cst (NULL_TREE, d->n),
1886 gfc_add_expr_to_block (&body, tmp);
1889 tmp = gfc_trans_code (c->next);
1890 gfc_add_expr_to_block (&body, tmp);
1892 tmp = build1_v (GOTO_EXPR, end_label);
1893 gfc_add_expr_to_block (&body, tmp);
1896 /* Generate the structure describing the branches */
1897 for (d = cp; d; d = d->right)
1899 VEC(constructor_elt,gc) *node = NULL;
1901 gfc_init_se (&se, NULL);
1905 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], null_pointer_node);
1906 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], integer_zero_node);
1910 gfc_conv_expr_reference (&se, d->low);
1912 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], se.expr);
1913 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], se.string_length);
1916 if (d->high == NULL)
1918 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], null_pointer_node);
1919 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], integer_zero_node);
1923 gfc_init_se (&se, NULL);
1924 gfc_conv_expr_reference (&se, d->high);
1926 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], se.expr);
1927 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], se.string_length);
1930 CONSTRUCTOR_APPEND_ELT (node, ss_target[k],
1931 build_int_cst (integer_type_node, d->n));
1933 tmp = build_constructor (select_struct[k], node);
1934 CONSTRUCTOR_APPEND_ELT (inits, NULL_TREE, tmp);
1937 type = build_array_type (select_struct[k],
1938 build_index_type (build_int_cst (NULL_TREE, n-1)));
1940 init = build_constructor (type, inits);
1941 TREE_CONSTANT (init) = 1;
1942 TREE_STATIC (init) = 1;
1943 /* Create a static variable to hold the jump table. */
1944 tmp = gfc_create_var (type, "jumptable");
1945 TREE_CONSTANT (tmp) = 1;
1946 TREE_STATIC (tmp) = 1;
1947 TREE_READONLY (tmp) = 1;
1948 DECL_INITIAL (tmp) = init;
1951 /* Build the library call */
1952 init = gfc_build_addr_expr (pvoid_type_node, init);
1954 if (code->expr1->ts.kind == 1)
1955 fndecl = gfor_fndecl_select_string;
1956 else if (code->expr1->ts.kind == 4)
1957 fndecl = gfor_fndecl_select_string_char4;
1961 tmp = build_call_expr_loc (input_location,
1962 fndecl, 4, init, build_int_cst (NULL_TREE, n),
1963 expr1se.expr, expr1se.string_length);
1964 case_num = gfc_create_var (integer_type_node, "case_num");
1965 gfc_add_modify (&block, case_num, tmp);
1967 gfc_add_block_to_block (&block, &expr1se.post);
1969 tmp = gfc_finish_block (&body);
1970 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1971 gfc_add_expr_to_block (&block, tmp);
1973 tmp = build1_v (LABEL_EXPR, end_label);
1974 gfc_add_expr_to_block (&block, tmp);
1976 return gfc_finish_block (&block);
1980 /* Translate the three variants of the SELECT CASE construct.
1982 SELECT CASEs with INTEGER case expressions can be translated to an
1983 equivalent GENERIC switch statement, and for LOGICAL case
1984 expressions we build one or two if-else compares.
1986 SELECT CASEs with CHARACTER case expressions are a whole different
1987 story, because they don't exist in GENERIC. So we sort them and
1988 do a binary search at runtime.
1990 Fortran has no BREAK statement, and it does not allow jumps from
1991 one case block to another. That makes things a lot easier for
1995 gfc_trans_select (gfc_code * code)
2001 gcc_assert (code && code->expr1);
2002 gfc_init_block (&block);
2004 /* Build the exit label and hang it in. */
2005 exit_label = gfc_build_label_decl (NULL_TREE);
2006 code->exit_label = exit_label;
2008 /* Empty SELECT constructs are legal. */
2009 if (code->block == NULL)
2010 body = build_empty_stmt (input_location);
2012 /* Select the correct translation function. */
2014 switch (code->expr1->ts.type)
2017 body = gfc_trans_logical_select (code);
2021 body = gfc_trans_integer_select (code);
2025 body = gfc_trans_character_select (code);
2029 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
2033 /* Build everything together. */
2034 gfc_add_expr_to_block (&block, body);
2035 gfc_add_expr_to_block (&block, build1_v (LABEL_EXPR, exit_label));
2037 return gfc_finish_block (&block);
2041 /* Traversal function to substitute a replacement symtree if the symbol
2042 in the expression is the same as that passed. f == 2 signals that
2043 that variable itself is not to be checked - only the references.
2044 This group of functions is used when the variable expression in a
2045 FORALL assignment has internal references. For example:
2046 FORALL (i = 1:4) p(p(i)) = i
2047 The only recourse here is to store a copy of 'p' for the index
2050 static gfc_symtree *new_symtree;
2051 static gfc_symtree *old_symtree;
2054 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
2056 if (expr->expr_type != EXPR_VARIABLE)
2061 else if (expr->symtree->n.sym == sym)
2062 expr->symtree = new_symtree;
2068 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
2070 gfc_traverse_expr (e, sym, forall_replace, f);
2074 forall_restore (gfc_expr *expr,
2075 gfc_symbol *sym ATTRIBUTE_UNUSED,
2076 int *f ATTRIBUTE_UNUSED)
2078 if (expr->expr_type != EXPR_VARIABLE)
2081 if (expr->symtree == new_symtree)
2082 expr->symtree = old_symtree;
2088 forall_restore_symtree (gfc_expr *e)
2090 gfc_traverse_expr (e, NULL, forall_restore, 0);
2094 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
2099 gfc_symbol *new_sym;
2100 gfc_symbol *old_sym;
2104 /* Build a copy of the lvalue. */
2105 old_symtree = c->expr1->symtree;
2106 old_sym = old_symtree->n.sym;
2107 e = gfc_lval_expr_from_sym (old_sym);
2108 if (old_sym->attr.dimension)
2110 gfc_init_se (&tse, NULL);
2111 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN, false);
2112 gfc_add_block_to_block (pre, &tse.pre);
2113 gfc_add_block_to_block (post, &tse.post);
2114 tse.expr = build_fold_indirect_ref_loc (input_location, tse.expr);
2116 if (e->ts.type != BT_CHARACTER)
2118 /* Use the variable offset for the temporary. */
2119 tmp = gfc_conv_array_offset (old_sym->backend_decl);
2120 gfc_conv_descriptor_offset_set (pre, tse.expr, tmp);
2125 gfc_init_se (&tse, NULL);
2126 gfc_init_se (&rse, NULL);
2127 gfc_conv_expr (&rse, e);
2128 if (e->ts.type == BT_CHARACTER)
2130 tse.string_length = rse.string_length;
2131 tmp = gfc_get_character_type_len (gfc_default_character_kind,
2133 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
2135 gfc_add_block_to_block (pre, &tse.pre);
2136 gfc_add_block_to_block (post, &tse.post);
2140 tmp = gfc_typenode_for_spec (&e->ts);
2141 tse.expr = gfc_create_var (tmp, "temp");
2144 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
2145 e->expr_type == EXPR_VARIABLE, true);
2146 gfc_add_expr_to_block (pre, tmp);
2150 /* Create a new symbol to represent the lvalue. */
2151 new_sym = gfc_new_symbol (old_sym->name, NULL);
2152 new_sym->ts = old_sym->ts;
2153 new_sym->attr.referenced = 1;
2154 new_sym->attr.temporary = 1;
2155 new_sym->attr.dimension = old_sym->attr.dimension;
2156 new_sym->attr.flavor = old_sym->attr.flavor;
2158 /* Use the temporary as the backend_decl. */
2159 new_sym->backend_decl = tse.expr;
2161 /* Create a fake symtree for it. */
2163 new_symtree = gfc_new_symtree (&root, old_sym->name);
2164 new_symtree->n.sym = new_sym;
2165 gcc_assert (new_symtree == root);
2167 /* Go through the expression reference replacing the old_symtree
2169 forall_replace_symtree (c->expr1, old_sym, 2);
2171 /* Now we have made this temporary, we might as well use it for
2172 the right hand side. */
2173 forall_replace_symtree (c->expr2, old_sym, 1);
2177 /* Handles dependencies in forall assignments. */
2179 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
2186 lsym = c->expr1->symtree->n.sym;
2187 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
2189 /* Now check for dependencies within the 'variable'
2190 expression itself. These are treated by making a complete
2191 copy of variable and changing all the references to it
2192 point to the copy instead. Note that the shallow copy of
2193 the variable will not suffice for derived types with
2194 pointer components. We therefore leave these to their
2196 if (lsym->ts.type == BT_DERIVED
2197 && lsym->ts.u.derived->attr.pointer_comp)
2201 if (find_forall_index (c->expr1, lsym, 2) == SUCCESS)
2203 forall_make_variable_temp (c, pre, post);
2207 /* Substrings with dependencies are treated in the same
2209 if (c->expr1->ts.type == BT_CHARACTER
2211 && c->expr2->expr_type == EXPR_VARIABLE
2212 && lsym == c->expr2->symtree->n.sym)
2214 for (lref = c->expr1->ref; lref; lref = lref->next)
2215 if (lref->type == REF_SUBSTRING)
2217 for (rref = c->expr2->ref; rref; rref = rref->next)
2218 if (rref->type == REF_SUBSTRING)
2222 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
2224 forall_make_variable_temp (c, pre, post);
2233 cleanup_forall_symtrees (gfc_code *c)
2235 forall_restore_symtree (c->expr1);
2236 forall_restore_symtree (c->expr2);
2237 gfc_free (new_symtree->n.sym);
2238 gfc_free (new_symtree);
2242 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
2243 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
2244 indicates whether we should generate code to test the FORALLs mask
2245 array. OUTER is the loop header to be used for initializing mask
2248 The generated loop format is:
2249 count = (end - start + step) / step
2262 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
2263 int mask_flag, stmtblock_t *outer)
2271 tree var, start, end, step;
2274 /* Initialize the mask index outside the FORALL nest. */
2275 if (mask_flag && forall_tmp->mask)
2276 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
2278 iter = forall_tmp->this_loop;
2279 nvar = forall_tmp->nvar;
2280 for (n = 0; n < nvar; n++)
2283 start = iter->start;
2287 exit_label = gfc_build_label_decl (NULL_TREE);
2288 TREE_USED (exit_label) = 1;
2290 /* The loop counter. */
2291 count = gfc_create_var (TREE_TYPE (var), "count");
2293 /* The body of the loop. */
2294 gfc_init_block (&block);
2296 /* The exit condition. */
2297 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
2298 count, build_int_cst (TREE_TYPE (count), 0));
2299 tmp = build1_v (GOTO_EXPR, exit_label);
2300 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2301 cond, tmp, build_empty_stmt (input_location));
2302 gfc_add_expr_to_block (&block, tmp);
2304 /* The main loop body. */
2305 gfc_add_expr_to_block (&block, body);
2307 /* Increment the loop variable. */
2308 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (var), var,
2310 gfc_add_modify (&block, var, tmp);
2312 /* Advance to the next mask element. Only do this for the
2314 if (n == 0 && mask_flag && forall_tmp->mask)
2316 tree maskindex = forall_tmp->maskindex;
2317 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2318 maskindex, gfc_index_one_node);
2319 gfc_add_modify (&block, maskindex, tmp);
2322 /* Decrement the loop counter. */
2323 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (var), count,
2324 build_int_cst (TREE_TYPE (var), 1));
2325 gfc_add_modify (&block, count, tmp);
2327 body = gfc_finish_block (&block);
2329 /* Loop var initialization. */
2330 gfc_init_block (&block);
2331 gfc_add_modify (&block, var, start);
2334 /* Initialize the loop counter. */
2335 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (var), step,
2337 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (var), end,
2339 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR, TREE_TYPE (var),
2341 gfc_add_modify (&block, count, tmp);
2343 /* The loop expression. */
2344 tmp = build1_v (LOOP_EXPR, body);
2345 gfc_add_expr_to_block (&block, tmp);
2347 /* The exit label. */
2348 tmp = build1_v (LABEL_EXPR, exit_label);
2349 gfc_add_expr_to_block (&block, tmp);
2351 body = gfc_finish_block (&block);
2358 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2359 is nonzero, the body is controlled by all masks in the forall nest.
2360 Otherwise, the innermost loop is not controlled by it's mask. This
2361 is used for initializing that mask. */
2364 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
2369 forall_info *forall_tmp;
2370 tree mask, maskindex;
2372 gfc_start_block (&header);
2374 forall_tmp = nested_forall_info;
2375 while (forall_tmp != NULL)
2377 /* Generate body with masks' control. */
2380 mask = forall_tmp->mask;
2381 maskindex = forall_tmp->maskindex;
2383 /* If a mask was specified make the assignment conditional. */
2386 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2387 body = build3_v (COND_EXPR, tmp, body,
2388 build_empty_stmt (input_location));
2391 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
2392 forall_tmp = forall_tmp->prev_nest;
2396 gfc_add_expr_to_block (&header, body);
2397 return gfc_finish_block (&header);
2401 /* Allocate data for holding a temporary array. Returns either a local
2402 temporary array or a pointer variable. */
2405 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
2412 if (INTEGER_CST_P (size))
2413 tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
2414 size, gfc_index_one_node);
2418 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
2419 type = build_array_type (elem_type, type);
2420 if (gfc_can_put_var_on_stack (bytesize))
2422 gcc_assert (INTEGER_CST_P (size));
2423 tmpvar = gfc_create_var (type, "temp");
2428 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
2429 *pdata = convert (pvoid_type_node, tmpvar);
2431 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
2432 gfc_add_modify (pblock, tmpvar, tmp);
2438 /* Generate codes to copy the temporary to the actual lhs. */
2441 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
2442 tree count1, tree wheremask, bool invert)
2446 stmtblock_t block, body;
2452 lss = gfc_walk_expr (expr);
2454 if (lss == gfc_ss_terminator)
2456 gfc_start_block (&block);
2458 gfc_init_se (&lse, NULL);
2460 /* Translate the expression. */
2461 gfc_conv_expr (&lse, expr);
2463 /* Form the expression for the temporary. */
2464 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2466 /* Use the scalar assignment as is. */
2467 gfc_add_block_to_block (&block, &lse.pre);
2468 gfc_add_modify (&block, lse.expr, tmp);
2469 gfc_add_block_to_block (&block, &lse.post);
2471 /* Increment the count1. */
2472 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (count1),
2473 count1, gfc_index_one_node);
2474 gfc_add_modify (&block, count1, tmp);
2476 tmp = gfc_finish_block (&block);
2480 gfc_start_block (&block);
2482 gfc_init_loopinfo (&loop1);
2483 gfc_init_se (&rse, NULL);
2484 gfc_init_se (&lse, NULL);
2486 /* Associate the lss with the loop. */
2487 gfc_add_ss_to_loop (&loop1, lss);
2489 /* Calculate the bounds of the scalarization. */
2490 gfc_conv_ss_startstride (&loop1);
2491 /* Setup the scalarizing loops. */
2492 gfc_conv_loop_setup (&loop1, &expr->where);
2494 gfc_mark_ss_chain_used (lss, 1);
2496 /* Start the scalarized loop body. */
2497 gfc_start_scalarized_body (&loop1, &body);
2499 /* Setup the gfc_se structures. */
2500 gfc_copy_loopinfo_to_se (&lse, &loop1);
2503 /* Form the expression of the temporary. */
2504 if (lss != gfc_ss_terminator)
2505 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2506 /* Translate expr. */
2507 gfc_conv_expr (&lse, expr);
2509 /* Use the scalar assignment. */
2510 rse.string_length = lse.string_length;
2511 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, true, true);
2513 /* Form the mask expression according to the mask tree list. */
2516 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2518 wheremaskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
2519 TREE_TYPE (wheremaskexpr),
2521 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2523 build_empty_stmt (input_location));
2526 gfc_add_expr_to_block (&body, tmp);
2528 /* Increment count1. */
2529 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2530 count1, gfc_index_one_node);
2531 gfc_add_modify (&body, count1, tmp);
2533 /* Increment count3. */
2536 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2537 gfc_array_index_type, count3,
2538 gfc_index_one_node);
2539 gfc_add_modify (&body, count3, tmp);
2542 /* Generate the copying loops. */
2543 gfc_trans_scalarizing_loops (&loop1, &body);
2544 gfc_add_block_to_block (&block, &loop1.pre);
2545 gfc_add_block_to_block (&block, &loop1.post);
2546 gfc_cleanup_loop (&loop1);
2548 tmp = gfc_finish_block (&block);
2554 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2555 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2556 and should not be freed. WHEREMASK is the conditional execution mask
2557 whose sense may be inverted by INVERT. */
2560 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2561 tree count1, gfc_ss *lss, gfc_ss *rss,
2562 tree wheremask, bool invert)
2564 stmtblock_t block, body1;
2571 gfc_start_block (&block);
2573 gfc_init_se (&rse, NULL);
2574 gfc_init_se (&lse, NULL);
2576 if (lss == gfc_ss_terminator)
2578 gfc_init_block (&body1);
2579 gfc_conv_expr (&rse, expr2);
2580 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2584 /* Initialize the loop. */
2585 gfc_init_loopinfo (&loop);
2587 /* We may need LSS to determine the shape of the expression. */
2588 gfc_add_ss_to_loop (&loop, lss);
2589 gfc_add_ss_to_loop (&loop, rss);
2591 gfc_conv_ss_startstride (&loop);
2592 gfc_conv_loop_setup (&loop, &expr2->where);
2594 gfc_mark_ss_chain_used (rss, 1);
2595 /* Start the loop body. */
2596 gfc_start_scalarized_body (&loop, &body1);
2598 /* Translate the expression. */
2599 gfc_copy_loopinfo_to_se (&rse, &loop);
2601 gfc_conv_expr (&rse, expr2);
2603 /* Form the expression of the temporary. */
2604 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2607 /* Use the scalar assignment. */
2608 lse.string_length = rse.string_length;
2609 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2610 expr2->expr_type == EXPR_VARIABLE, true);
2612 /* Form the mask expression according to the mask tree list. */
2615 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2617 wheremaskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
2618 TREE_TYPE (wheremaskexpr),
2620 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2622 build_empty_stmt (input_location));
2625 gfc_add_expr_to_block (&body1, tmp);
2627 if (lss == gfc_ss_terminator)
2629 gfc_add_block_to_block (&block, &body1);
2631 /* Increment count1. */
2632 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (count1),
2633 count1, gfc_index_one_node);
2634 gfc_add_modify (&block, count1, tmp);
2638 /* Increment count1. */
2639 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2640 count1, gfc_index_one_node);
2641 gfc_add_modify (&body1, count1, tmp);
2643 /* Increment count3. */
2646 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2647 gfc_array_index_type,
2648 count3, gfc_index_one_node);
2649 gfc_add_modify (&body1, count3, tmp);
2652 /* Generate the copying loops. */
2653 gfc_trans_scalarizing_loops (&loop, &body1);
2655 gfc_add_block_to_block (&block, &loop.pre);
2656 gfc_add_block_to_block (&block, &loop.post);
2658 gfc_cleanup_loop (&loop);
2659 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2660 as tree nodes in SS may not be valid in different scope. */
2663 tmp = gfc_finish_block (&block);
2668 /* Calculate the size of temporary needed in the assignment inside forall.
2669 LSS and RSS are filled in this function. */
2672 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2673 stmtblock_t * pblock,
2674 gfc_ss **lss, gfc_ss **rss)
2682 *lss = gfc_walk_expr (expr1);
2685 size = gfc_index_one_node;
2686 if (*lss != gfc_ss_terminator)
2688 gfc_init_loopinfo (&loop);
2690 /* Walk the RHS of the expression. */
2691 *rss = gfc_walk_expr (expr2);
2692 if (*rss == gfc_ss_terminator)
2694 /* The rhs is scalar. Add a ss for the expression. */
2695 *rss = gfc_get_ss ();
2696 (*rss)->next = gfc_ss_terminator;
2697 (*rss)->type = GFC_SS_SCALAR;
2698 (*rss)->expr = expr2;
2701 /* Associate the SS with the loop. */
2702 gfc_add_ss_to_loop (&loop, *lss);
2703 /* We don't actually need to add the rhs at this point, but it might
2704 make guessing the loop bounds a bit easier. */
2705 gfc_add_ss_to_loop (&loop, *rss);
2707 /* We only want the shape of the expression, not rest of the junk
2708 generated by the scalarizer. */
2709 loop.array_parameter = 1;
2711 /* Calculate the bounds of the scalarization. */
2712 save_flag = gfc_option.rtcheck;
2713 gfc_option.rtcheck &= !GFC_RTCHECK_BOUNDS;
2714 gfc_conv_ss_startstride (&loop);
2715 gfc_option.rtcheck = save_flag;
2716 gfc_conv_loop_setup (&loop, &expr2->where);
2718 /* Figure out how many elements we need. */
2719 for (i = 0; i < loop.dimen; i++)
2721 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2722 gfc_array_index_type,
2723 gfc_index_one_node, loop.from[i]);
2724 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2725 gfc_array_index_type, tmp, loop.to[i]);
2726 size = fold_build2_loc (input_location, MULT_EXPR,
2727 gfc_array_index_type, size, tmp);
2729 gfc_add_block_to_block (pblock, &loop.pre);
2730 size = gfc_evaluate_now (size, pblock);
2731 gfc_add_block_to_block (pblock, &loop.post);
2733 /* TODO: write a function that cleans up a loopinfo without freeing
2734 the SS chains. Currently a NOP. */
2741 /* Calculate the overall iterator number of the nested forall construct.
2742 This routine actually calculates the number of times the body of the
2743 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2744 that by the expression INNER_SIZE. The BLOCK argument specifies the
2745 block in which to calculate the result, and the optional INNER_SIZE_BODY
2746 argument contains any statements that need to executed (inside the loop)
2747 to initialize or calculate INNER_SIZE. */
2750 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2751 stmtblock_t *inner_size_body, stmtblock_t *block)
2753 forall_info *forall_tmp = nested_forall_info;
2757 /* We can eliminate the innermost unconditional loops with constant
2759 if (INTEGER_CST_P (inner_size))
2762 && !forall_tmp->mask
2763 && INTEGER_CST_P (forall_tmp->size))
2765 inner_size = fold_build2_loc (input_location, MULT_EXPR,
2766 gfc_array_index_type,
2767 inner_size, forall_tmp->size);
2768 forall_tmp = forall_tmp->prev_nest;
2771 /* If there are no loops left, we have our constant result. */
2776 /* Otherwise, create a temporary variable to compute the result. */
2777 number = gfc_create_var (gfc_array_index_type, "num");
2778 gfc_add_modify (block, number, gfc_index_zero_node);
2780 gfc_start_block (&body);
2781 if (inner_size_body)
2782 gfc_add_block_to_block (&body, inner_size_body);
2784 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2785 gfc_array_index_type, number, inner_size);
2788 gfc_add_modify (&body, number, tmp);
2789 tmp = gfc_finish_block (&body);
2791 /* Generate loops. */
2792 if (forall_tmp != NULL)
2793 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2795 gfc_add_expr_to_block (block, tmp);
2801 /* Allocate temporary for forall construct. SIZE is the size of temporary
2802 needed. PTEMP1 is returned for space free. */
2805 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2812 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2813 if (!integer_onep (unit))
2814 bytesize = fold_build2_loc (input_location, MULT_EXPR,
2815 gfc_array_index_type, size, unit);
2820 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2823 tmp = build_fold_indirect_ref_loc (input_location, tmp);
2828 /* Allocate temporary for forall construct according to the information in
2829 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2830 assignment inside forall. PTEMP1 is returned for space free. */
2833 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2834 tree inner_size, stmtblock_t * inner_size_body,
2835 stmtblock_t * block, tree * ptemp1)
2839 /* Calculate the total size of temporary needed in forall construct. */
2840 size = compute_overall_iter_number (nested_forall_info, inner_size,
2841 inner_size_body, block);
2843 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2847 /* Handle assignments inside forall which need temporary.
2849 forall (i=start:end:stride; maskexpr)
2852 (where e,f<i> are arbitrary expressions possibly involving i
2853 and there is a dependency between e<i> and f<i>)
2855 masktmp(:) = maskexpr(:)
2860 for (i = start; i <= end; i += stride)
2864 for (i = start; i <= end; i += stride)
2866 if (masktmp[maskindex++])
2867 tmp[count1++] = f<i>
2871 for (i = start; i <= end; i += stride)
2873 if (masktmp[maskindex++])
2874 e<i> = tmp[count1++]
2879 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2880 tree wheremask, bool invert,
2881 forall_info * nested_forall_info,
2882 stmtblock_t * block)
2890 stmtblock_t inner_size_body;
2892 /* Create vars. count1 is the current iterator number of the nested
2894 count1 = gfc_create_var (gfc_array_index_type, "count1");
2896 /* Count is the wheremask index. */
2899 count = gfc_create_var (gfc_array_index_type, "count");
2900 gfc_add_modify (block, count, gfc_index_zero_node);
2905 /* Initialize count1. */
2906 gfc_add_modify (block, count1, gfc_index_zero_node);
2908 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2909 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2910 gfc_init_block (&inner_size_body);
2911 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2914 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2915 if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->length)
2917 if (!expr1->ts.u.cl->backend_decl)
2920 gfc_init_se (&tse, NULL);
2921 gfc_conv_expr (&tse, expr1->ts.u.cl->length);
2922 expr1->ts.u.cl->backend_decl = tse.expr;
2924 type = gfc_get_character_type_len (gfc_default_character_kind,
2925 expr1->ts.u.cl->backend_decl);
2928 type = gfc_typenode_for_spec (&expr1->ts);
2930 /* Allocate temporary for nested forall construct according to the
2931 information in nested_forall_info and inner_size. */
2932 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2933 &inner_size_body, block, &ptemp1);
2935 /* Generate codes to copy rhs to the temporary . */
2936 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2939 /* Generate body and loops according to the information in
2940 nested_forall_info. */
2941 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2942 gfc_add_expr_to_block (block, tmp);
2945 gfc_add_modify (block, count1, gfc_index_zero_node);
2949 gfc_add_modify (block, count, gfc_index_zero_node);
2951 /* Generate codes to copy the temporary to lhs. */
2952 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2955 /* Generate body and loops according to the information in
2956 nested_forall_info. */
2957 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2958 gfc_add_expr_to_block (block, tmp);
2962 /* Free the temporary. */
2963 tmp = gfc_call_free (ptemp1);
2964 gfc_add_expr_to_block (block, tmp);
2969 /* Translate pointer assignment inside FORALL which need temporary. */
2972 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2973 forall_info * nested_forall_info,
2974 stmtblock_t * block)
2988 tree tmp, tmp1, ptemp1;
2990 count = gfc_create_var (gfc_array_index_type, "count");
2991 gfc_add_modify (block, count, gfc_index_zero_node);
2993 inner_size = integer_one_node;
2994 lss = gfc_walk_expr (expr1);
2995 rss = gfc_walk_expr (expr2);
2996 if (lss == gfc_ss_terminator)
2998 type = gfc_typenode_for_spec (&expr1->ts);
2999 type = build_pointer_type (type);
3001 /* Allocate temporary for nested forall construct according to the
3002 information in nested_forall_info and inner_size. */
3003 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
3004 inner_size, NULL, block, &ptemp1);
3005 gfc_start_block (&body);
3006 gfc_init_se (&lse, NULL);
3007 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
3008 gfc_init_se (&rse, NULL);
3009 rse.want_pointer = 1;
3010 gfc_conv_expr (&rse, expr2);
3011 gfc_add_block_to_block (&body, &rse.pre);
3012 gfc_add_modify (&body, lse.expr,
3013 fold_convert (TREE_TYPE (lse.expr), rse.expr));
3014 gfc_add_block_to_block (&body, &rse.post);
3016 /* Increment count. */
3017 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3018 count, gfc_index_one_node);
3019 gfc_add_modify (&body, count, tmp);
3021 tmp = gfc_finish_block (&body);
3023 /* Generate body and loops according to the information in
3024 nested_forall_info. */
3025 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3026 gfc_add_expr_to_block (block, tmp);
3029 gfc_add_modify (block, count, gfc_index_zero_node);
3031 gfc_start_block (&body);
3032 gfc_init_se (&lse, NULL);
3033 gfc_init_se (&rse, NULL);
3034 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
3035 lse.want_pointer = 1;
3036 gfc_conv_expr (&lse, expr1);
3037 gfc_add_block_to_block (&body, &lse.pre);
3038 gfc_add_modify (&body, lse.expr, rse.expr);
3039 gfc_add_block_to_block (&body, &lse.post);
3040 /* Increment count. */
3041 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3042 count, gfc_index_one_node);
3043 gfc_add_modify (&body, count, tmp);
3044 tmp = gfc_finish_block (&body);
3046 /* Generate body and loops according to the information in
3047 nested_forall_info. */
3048 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3049 gfc_add_expr_to_block (block, tmp);
3053 gfc_init_loopinfo (&loop);
3055 /* Associate the SS with the loop. */
3056 gfc_add_ss_to_loop (&loop, rss);
3058 /* Setup the scalarizing loops and bounds. */
3059 gfc_conv_ss_startstride (&loop);
3061 gfc_conv_loop_setup (&loop, &expr2->where);
3063 info = &rss->data.info;
3064 desc = info->descriptor;
3066 /* Make a new descriptor. */
3067 parmtype = gfc_get_element_type (TREE_TYPE (desc));
3068 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, 0,
3069 loop.from, loop.to, 1,
3070 GFC_ARRAY_UNKNOWN, true);
3072 /* Allocate temporary for nested forall construct. */
3073 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
3074 inner_size, NULL, block, &ptemp1);
3075 gfc_start_block (&body);
3076 gfc_init_se (&lse, NULL);
3077 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
3078 lse.direct_byref = 1;
3079 rss = gfc_walk_expr (expr2);
3080 gfc_conv_expr_descriptor (&lse, expr2, rss);
3082 gfc_add_block_to_block (&body, &lse.pre);
3083 gfc_add_block_to_block (&body, &lse.post);
3085 /* Increment count. */
3086 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3087 count, gfc_index_one_node);
3088 gfc_add_modify (&body, count, tmp);
3090 tmp = gfc_finish_block (&body);
3092 /* Generate body and loops according to the information in
3093 nested_forall_info. */
3094 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3095 gfc_add_expr_to_block (block, tmp);
3098 gfc_add_modify (block, count, gfc_index_zero_node);
3100 parm = gfc_build_array_ref (tmp1, count, NULL);
3101 lss = gfc_walk_expr (expr1);
3102 gfc_init_se (&lse, NULL);
3103 gfc_conv_expr_descriptor (&lse, expr1, lss);
3104 gfc_add_modify (&lse.pre, lse.expr, parm);
3105 gfc_start_block (&body);
3106 gfc_add_block_to_block (&body, &lse.pre);
3107 gfc_add_block_to_block (&body, &lse.post);
3109 /* Increment count. */
3110 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3111 count, gfc_index_one_node);
3112 gfc_add_modify (&body, count, tmp);
3114 tmp = gfc_finish_block (&body);
3116 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3117 gfc_add_expr_to_block (block, tmp);
3119 /* Free the temporary. */
3122 tmp = gfc_call_free (ptemp1);
3123 gfc_add_expr_to_block (block, tmp);
3128 /* FORALL and WHERE statements are really nasty, especially when you nest
3129 them. All the rhs of a forall assignment must be evaluated before the
3130 actual assignments are performed. Presumably this also applies to all the
3131 assignments in an inner where statement. */
3133 /* Generate code for a FORALL statement. Any temporaries are allocated as a
3134 linear array, relying on the fact that we process in the same order in all
3137 forall (i=start:end:stride; maskexpr)
3141 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
3143 count = ((end + 1 - start) / stride)
3144 masktmp(:) = maskexpr(:)
3147 for (i = start; i <= end; i += stride)
3149 if (masktmp[maskindex++])
3153 for (i = start; i <= end; i += stride)
3155 if (masktmp[maskindex++])
3159 Note that this code only works when there are no dependencies.
3160 Forall loop with array assignments and data dependencies are a real pain,
3161 because the size of the temporary cannot always be determined before the
3162 loop is executed. This problem is compounded by the presence of nested
3167 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
3187 gfc_forall_iterator *fa;
3190 gfc_saved_var *saved_vars;
3191 iter_info *this_forall;
3195 /* Do nothing if the mask is false. */
3197 && code->expr1->expr_type == EXPR_CONSTANT
3198 && !code->expr1->value.logical)
3199 return build_empty_stmt (input_location);
3202 /* Count the FORALL index number. */
3203 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
3207 /* Allocate the space for var, start, end, step, varexpr. */
3208 var = (tree *) gfc_getmem (nvar * sizeof (tree));
3209 start = (tree *) gfc_getmem (nvar * sizeof (tree));
3210 end = (tree *) gfc_getmem (nvar * sizeof (tree));
3211 step = (tree *) gfc_getmem (nvar * sizeof (tree));
3212 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
3213 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
3215 /* Allocate the space for info. */
3216 info = (forall_info *) gfc_getmem (sizeof (forall_info));
3218 gfc_start_block (&pre);
3219 gfc_init_block (&post);
3220 gfc_init_block (&block);
3223 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
3225 gfc_symbol *sym = fa->var->symtree->n.sym;
3227 /* Allocate space for this_forall. */
3228 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
3230 /* Create a temporary variable for the FORALL index. */
3231 tmp = gfc_typenode_for_spec (&sym->ts);
3232 var[n] = gfc_create_var (tmp, sym->name);
3233 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
3235 /* Record it in this_forall. */
3236 this_forall->var = var[n];
3238 /* Replace the index symbol's backend_decl with the temporary decl. */
3239 sym->backend_decl = var[n];
3241 /* Work out the start, end and stride for the loop. */
3242 gfc_init_se (&se, NULL);
3243 gfc_conv_expr_val (&se, fa->start);
3244 /* Record it in this_forall. */
3245 this_forall->start = se.expr;
3246 gfc_add_block_to_block (&block, &se.pre);
3249 gfc_init_se (&se, NULL);
3250 gfc_conv_expr_val (&se, fa->end);
3251 /* Record it in this_forall. */
3252 this_forall->end = se.expr;
3253 gfc_make_safe_expr (&se);
3254 gfc_add_block_to_block (&block, &se.pre);
3257 gfc_init_se (&se, NULL);
3258 gfc_conv_expr_val (&se, fa->stride);
3259 /* Record it in this_forall. */
3260 this_forall->step = se.expr;
3261 gfc_make_safe_expr (&se);
3262 gfc_add_block_to_block (&block, &se.pre);
3265 /* Set the NEXT field of this_forall to NULL. */
3266 this_forall->next = NULL;
3267 /* Link this_forall to the info construct. */
3268 if (info->this_loop)
3270 iter_info *iter_tmp = info->this_loop;
3271 while (iter_tmp->next != NULL)
3272 iter_tmp = iter_tmp->next;
3273 iter_tmp->next = this_forall;
3276 info->this_loop = this_forall;
3282 /* Calculate the size needed for the current forall level. */
3283 size = gfc_index_one_node;
3284 for (n = 0; n < nvar; n++)
3286 /* size = (end + step - start) / step. */
3287 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (start[n]),
3289 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (end[n]),
3291 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, TREE_TYPE (tmp),
3293 tmp = convert (gfc_array_index_type, tmp);
3295 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
3299 /* Record the nvar and size of current forall level. */
3305 /* If the mask is .true., consider the FORALL unconditional. */
3306 if (code->expr1->expr_type == EXPR_CONSTANT
3307 && code->expr1->value.logical)
3315 /* First we need to allocate the mask. */
3318 /* As the mask array can be very big, prefer compact boolean types. */
3319 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3320 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
3321 size, NULL, &block, &pmask);
3322 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
3324 /* Record them in the info structure. */
3325 info->maskindex = maskindex;
3330 /* No mask was specified. */
3331 maskindex = NULL_TREE;
3332 mask = pmask = NULL_TREE;
3335 /* Link the current forall level to nested_forall_info. */
3336 info->prev_nest = nested_forall_info;
3337 nested_forall_info = info;
3339 /* Copy the mask into a temporary variable if required.
3340 For now we assume a mask temporary is needed. */
3343 /* As the mask array can be very big, prefer compact boolean types. */
3344 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3346 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
3348 /* Start of mask assignment loop body. */
3349 gfc_start_block (&body);
3351 /* Evaluate the mask expression. */
3352 gfc_init_se (&se, NULL);
3353 gfc_conv_expr_val (&se, code->expr1);
3354 gfc_add_block_to_block (&body, &se.pre);
3356 /* Store the mask. */
3357 se.expr = convert (mask_type, se.expr);
3359 tmp = gfc_build_array_ref (mask, maskindex, NULL);
3360 gfc_add_modify (&body, tmp, se.expr);
3362 /* Advance to the next mask element. */
3363 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3364 maskindex, gfc_index_one_node);
3365 gfc_add_modify (&body, maskindex, tmp);
3367 /* Generate the loops. */
3368 tmp = gfc_finish_block (&body);
3369 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
3370 gfc_add_expr_to_block (&block, tmp);
3373 c = code->block->next;
3375 /* TODO: loop merging in FORALL statements. */
3376 /* Now that we've got a copy of the mask, generate the assignment loops. */
3382 /* A scalar or array assignment. DO the simple check for
3383 lhs to rhs dependencies. These make a temporary for the
3384 rhs and form a second forall block to copy to variable. */
3385 need_temp = check_forall_dependencies(c, &pre, &post);
3387 /* Temporaries due to array assignment data dependencies introduce
3388 no end of problems. */
3390 gfc_trans_assign_need_temp (c->expr1, c->expr2, NULL, false,
3391 nested_forall_info, &block);
3394 /* Use the normal assignment copying routines. */
3395 assign = gfc_trans_assignment (c->expr1, c->expr2, false, true);
3397 /* Generate body and loops. */
3398 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3400 gfc_add_expr_to_block (&block, tmp);
3403 /* Cleanup any temporary symtrees that have been made to deal
3404 with dependencies. */
3406 cleanup_forall_symtrees (c);
3411 /* Translate WHERE or WHERE construct nested in FORALL. */
3412 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
3415 /* Pointer assignment inside FORALL. */
3416 case EXEC_POINTER_ASSIGN:
3417 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
3419 gfc_trans_pointer_assign_need_temp (c->expr1, c->expr2,
3420 nested_forall_info, &block);
3423 /* Use the normal assignment copying routines. */
3424 assign = gfc_trans_pointer_assignment (c->expr1, c->expr2);
3426 /* Generate body and loops. */
3427 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3429 gfc_add_expr_to_block (&block, tmp);
3434 tmp = gfc_trans_forall_1 (c, nested_forall_info);
3435 gfc_add_expr_to_block (&block, tmp);
3438 /* Explicit subroutine calls are prevented by the frontend but interface
3439 assignments can legitimately produce them. */
3440 case EXEC_ASSIGN_CALL:
3441 assign = gfc_trans_call (c, true, NULL_TREE, NULL_TREE, false);
3442 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
3443 gfc_add_expr_to_block (&block, tmp);
3453 /* Restore the original index variables. */
3454 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
3455 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
3457 /* Free the space for var, start, end, step, varexpr. */
3463 gfc_free (saved_vars);
3465 /* Free the space for this forall_info. */
3470 /* Free the temporary for the mask. */
3471 tmp = gfc_call_free (pmask);
3472 gfc_add_expr_to_block (&block, tmp);
3475 pushdecl (maskindex);
3477 gfc_add_block_to_block (&pre, &block);
3478 gfc_add_block_to_block (&pre, &post);
3480 return gfc_finish_block (&pre);
3484 /* Translate the FORALL statement or construct. */
3486 tree gfc_trans_forall (gfc_code * code)
3488 return gfc_trans_forall_1 (code, NULL);
3492 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3493 If the WHERE construct is nested in FORALL, compute the overall temporary
3494 needed by the WHERE mask expression multiplied by the iterator number of
3496 ME is the WHERE mask expression.
3497 MASK is the current execution mask upon input, whose sense may or may
3498 not be inverted as specified by the INVERT argument.
3499 CMASK is the updated execution mask on output, or NULL if not required.
3500 PMASK is the pending execution mask on output, or NULL if not required.
3501 BLOCK is the block in which to place the condition evaluation loops. */
3504 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3505 tree mask, bool invert, tree cmask, tree pmask,
3506 tree mask_type, stmtblock_t * block)
3511 stmtblock_t body, body1;
3512 tree count, cond, mtmp;
3515 gfc_init_loopinfo (&loop);
3517 lss = gfc_walk_expr (me);
3518 rss = gfc_walk_expr (me);
3520 /* Variable to index the temporary. */
3521 count = gfc_create_var (gfc_array_index_type, "count");
3522 /* Initialize count. */
3523 gfc_add_modify (block, count, gfc_index_zero_node);
3525 gfc_start_block (&body);
3527 gfc_init_se (&rse, NULL);
3528 gfc_init_se (&lse, NULL);
3530 if (lss == gfc_ss_terminator)
3532 gfc_init_block (&body1);
3536 /* Initialize the loop. */
3537 gfc_init_loopinfo (&loop);
3539 /* We may need LSS to determine the shape of the expression. */
3540 gfc_add_ss_to_loop (&loop, lss);
3541 gfc_add_ss_to_loop (&loop, rss);
3543 gfc_conv_ss_startstride (&loop);
3544 gfc_conv_loop_setup (&loop, &me->where);
3546 gfc_mark_ss_chain_used (rss, 1);
3547 /* Start the loop body. */
3548 gfc_start_scalarized_body (&loop, &body1);
3550 /* Translate the expression. */
3551 gfc_copy_loopinfo_to_se (&rse, &loop);
3553 gfc_conv_expr (&rse, me);
3556 /* Variable to evaluate mask condition. */
3557 cond = gfc_create_var (mask_type, "cond");
3558 if (mask && (cmask || pmask))
3559 mtmp = gfc_create_var (mask_type, "mask");
3560 else mtmp = NULL_TREE;
3562 gfc_add_block_to_block (&body1, &lse.pre);
3563 gfc_add_block_to_block (&body1, &rse.pre);
3565 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3567 if (mask && (cmask || pmask))
3569 tmp = gfc_build_array_ref (mask, count, NULL);
3571 tmp = fold_build1_loc (input_location, TRUTH_NOT_EXPR, mask_type, tmp);
3572 gfc_add_modify (&body1, mtmp, tmp);
3577 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3580 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, mask_type,
3582 gfc_add_modify (&body1, tmp1, tmp);
3587 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3588 tmp = fold_build1_loc (input_location, TRUTH_NOT_EXPR, mask_type, cond);
3590 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, mask_type, mtmp,
3592 gfc_add_modify (&body1, tmp1, tmp);
3595 gfc_add_block_to_block (&body1, &lse.post);
3596 gfc_add_block_to_block (&body1, &rse.post);
3598 if (lss == gfc_ss_terminator)
3600 gfc_add_block_to_block (&body, &body1);
3604 /* Increment count. */
3605 tmp1 = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3606 count, gfc_index_one_node);
3607 gfc_add_modify (&body1, count, tmp1);
3609 /* Generate the copying loops. */
3610 gfc_trans_scalarizing_loops (&loop, &body1);
3612 gfc_add_block_to_block (&body, &loop.pre);
3613 gfc_add_block_to_block (&body, &loop.post);
3615 gfc_cleanup_loop (&loop);
3616 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3617 as tree nodes in SS may not be valid in different scope. */
3620 tmp1 = gfc_finish_block (&body);
3621 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3622 if (nested_forall_info != NULL)
3623 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3625 gfc_add_expr_to_block (block, tmp1);
3629 /* Translate an assignment statement in a WHERE statement or construct
3630 statement. The MASK expression is used to control which elements
3631 of EXPR1 shall be assigned. The sense of MASK is specified by
3635 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3636 tree mask, bool invert,
3637 tree count1, tree count2,
3643 gfc_ss *lss_section;
3650 tree index, maskexpr;
3652 /* A defined assignment. */
3653 if (cnext && cnext->resolved_sym)
3654 return gfc_trans_call (cnext, true, mask, count1, invert);
3657 /* TODO: handle this special case.
3658 Special case a single function returning an array. */
3659 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3661 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3667 /* Assignment of the form lhs = rhs. */
3668 gfc_start_block (&block);
3670 gfc_init_se (&lse, NULL);
3671 gfc_init_se (&rse, NULL);
3674 lss = gfc_walk_expr (expr1);
3677 /* In each where-assign-stmt, the mask-expr and the variable being
3678 defined shall be arrays of the same shape. */
3679 gcc_assert (lss != gfc_ss_terminator);
3681 /* The assignment needs scalarization. */
3684 /* Find a non-scalar SS from the lhs. */
3685 while (lss_section != gfc_ss_terminator
3686 && lss_section->type != GFC_SS_SECTION)
3687 lss_section = lss_section->next;
3689 gcc_assert (lss_section != gfc_ss_terminator);
3691 /* Initialize the scalarizer. */
3692 gfc_init_loopinfo (&loop);
3695 rss = gfc_walk_expr (expr2);
3696 if (rss == gfc_ss_terminator)
3698 /* The rhs is scalar. Add a ss for the expression. */
3699 rss = gfc_get_ss ();
3701 rss->next = gfc_ss_terminator;
3702 rss->type = GFC_SS_SCALAR;
3706 /* Associate the SS with the loop. */
3707 gfc_add_ss_to_loop (&loop, lss);
3708 gfc_add_ss_to_loop (&loop, rss);
3710 /* Calculate the bounds of the scalarization. */
3711 gfc_conv_ss_startstride (&loop);
3713 /* Resolve any data dependencies in the statement. */
3714 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3716 /* Setup the scalarizing loops. */
3717 gfc_conv_loop_setup (&loop, &expr2->where);
3719 /* Setup the gfc_se structures. */
3720 gfc_copy_loopinfo_to_se (&lse, &loop);
3721 gfc_copy_loopinfo_to_se (&rse, &loop);
3724 gfc_mark_ss_chain_used (rss, 1);
3725 if (loop.temp_ss == NULL)
3728 gfc_mark_ss_chain_used (lss, 1);
3732 lse.ss = loop.temp_ss;
3733 gfc_mark_ss_chain_used (lss, 3);
3734 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3737 /* Start the scalarized loop body. */
3738 gfc_start_scalarized_body (&loop, &body);
3740 /* Translate the expression. */
3741 gfc_conv_expr (&rse, expr2);
3742 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3744 gfc_conv_tmp_array_ref (&lse);
3745 gfc_advance_se_ss_chain (&lse);
3748 gfc_conv_expr (&lse, expr1);
3750 /* Form the mask expression according to the mask. */
3752 maskexpr = gfc_build_array_ref (mask, index, NULL);
3754 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
3755 TREE_TYPE (maskexpr), maskexpr);
3757 /* Use the scalar assignment as is. */
3758 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3759 loop.temp_ss != NULL, false, true);
3761 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt (input_location));
3763 gfc_add_expr_to_block (&body, tmp);
3765 if (lss == gfc_ss_terminator)
3767 /* Increment count1. */
3768 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3769 count1, gfc_index_one_node);
3770 gfc_add_modify (&body, count1, tmp);
3772 /* Use the scalar assignment as is. */
3773 gfc_add_block_to_block (&block, &body);
3777 gcc_assert (lse.ss == gfc_ss_terminator
3778 && rse.ss == gfc_ss_terminator);
3780 if (loop.temp_ss != NULL)
3782 /* Increment count1 before finish the main body of a scalarized
3784 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3785 gfc_array_index_type, count1, gfc_index_one_node);
3786 gfc_add_modify (&body, count1, tmp);
3787 gfc_trans_scalarized_loop_boundary (&loop, &body);
3789 /* We need to copy the temporary to the actual lhs. */
3790 gfc_init_se (&lse, NULL);
3791 gfc_init_se (&rse, NULL);
3792 gfc_copy_loopinfo_to_se (&lse, &loop);
3793 gfc_copy_loopinfo_to_se (&rse, &loop);
3795 rse.ss = loop.temp_ss;
3798 gfc_conv_tmp_array_ref (&rse);
3799 gfc_advance_se_ss_chain (&rse);
3800 gfc_conv_expr (&lse, expr1);
3802 gcc_assert (lse.ss == gfc_ss_terminator
3803 && rse.ss == gfc_ss_terminator);
3805 /* Form the mask expression according to the mask tree list. */
3807 maskexpr = gfc_build_array_ref (mask, index, NULL);
3809 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
3810 TREE_TYPE (maskexpr), maskexpr);
3812 /* Use the scalar assignment as is. */
3813 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false,
3815 tmp = build3_v (COND_EXPR, maskexpr, tmp,
3816 build_empty_stmt (input_location));
3817 gfc_add_expr_to_block (&body, tmp);
3819 /* Increment count2. */
3820 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3821 gfc_array_index_type, count2,
3822 gfc_index_one_node);
3823 gfc_add_modify (&body, count2, tmp);
3827 /* Increment count1. */
3828 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3829 gfc_array_index_type, count1,
3830 gfc_index_one_node);
3831 gfc_add_modify (&body, count1, tmp);
3834 /* Generate the copying loops. */
3835 gfc_trans_scalarizing_loops (&loop, &body);
3837 /* Wrap the whole thing up. */
3838 gfc_add_block_to_block (&block, &loop.pre);
3839 gfc_add_block_to_block (&block, &loop.post);
3840 gfc_cleanup_loop (&loop);
3843 return gfc_finish_block (&block);
3847 /* Translate the WHERE construct or statement.
3848 This function can be called iteratively to translate the nested WHERE
3849 construct or statement.
3850 MASK is the control mask. */
3853 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3854 forall_info * nested_forall_info, stmtblock_t * block)
3856 stmtblock_t inner_size_body;
3857 tree inner_size, size;
3866 tree count1, count2;
3870 tree pcmask = NULL_TREE;
3871 tree ppmask = NULL_TREE;
3872 tree cmask = NULL_TREE;
3873 tree pmask = NULL_TREE;
3874 gfc_actual_arglist *arg;
3876 /* the WHERE statement or the WHERE construct statement. */
3877 cblock = code->block;
3879 /* As the mask array can be very big, prefer compact boolean types. */
3880 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3882 /* Determine which temporary masks are needed. */
3885 /* One clause: No ELSEWHEREs. */
3886 need_cmask = (cblock->next != 0);
3889 else if (cblock->block->block)
3891 /* Three or more clauses: Conditional ELSEWHEREs. */
3895 else if (cblock->next)
3897 /* Two clauses, the first non-empty. */
3899 need_pmask = (mask != NULL_TREE
3900 && cblock->block->next != 0);
3902 else if (!cblock->block->next)
3904 /* Two clauses, both empty. */
3908 /* Two clauses, the first empty, the second non-empty. */
3911 need_cmask = (cblock->block->expr1 != 0);
3920 if (need_cmask || need_pmask)
3922 /* Calculate the size of temporary needed by the mask-expr. */
3923 gfc_init_block (&inner_size_body);
3924 inner_size = compute_inner_temp_size (cblock->expr1, cblock->expr1,
3925 &inner_size_body, &lss, &rss);
3927 /* Calculate the total size of temporary needed. */
3928 size = compute_overall_iter_number (nested_forall_info, inner_size,
3929 &inner_size_body, block);
3931 /* Check whether the size is negative. */
3932 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, size,
3933 gfc_index_zero_node);
3934 size = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
3935 cond, gfc_index_zero_node, size);
3936 size = gfc_evaluate_now (size, block);
3938 /* Allocate temporary for WHERE mask if needed. */
3940 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3943 /* Allocate temporary for !mask if needed. */
3945 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3951 /* Each time around this loop, the where clause is conditional
3952 on the value of mask and invert, which are updated at the
3953 bottom of the loop. */
3955 /* Has mask-expr. */
3958 /* Ensure that the WHERE mask will be evaluated exactly once.
3959 If there are no statements in this WHERE/ELSEWHERE clause,
3960 then we don't need to update the control mask (cmask).
3961 If this is the last clause of the WHERE construct, then
3962 we don't need to update the pending control mask (pmask). */
3964 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3966 cblock->next ? cmask : NULL_TREE,
3967 cblock->block ? pmask : NULL_TREE,
3970 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3972 (cblock->next || cblock->block)
3973 ? cmask : NULL_TREE,
3974 NULL_TREE, mask_type, block);
3978 /* It's a final elsewhere-stmt. No mask-expr is present. */
3982 /* The body of this where clause are controlled by cmask with
3983 sense specified by invert. */
3985 /* Get the assignment statement of a WHERE statement, or the first
3986 statement in where-body-construct of a WHERE construct. */
3987 cnext = cblock->next;
3992 /* WHERE assignment statement. */
3993 case EXEC_ASSIGN_CALL:
3995 arg = cnext->ext.actual;
3996 expr1 = expr2 = NULL;
3997 for (; arg; arg = arg->next)
4009 expr1 = cnext->expr1;
4010 expr2 = cnext->expr2;
4012 if (nested_forall_info != NULL)
4014 need_temp = gfc_check_dependency (expr1, expr2, 0);
4015 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
4016 gfc_trans_assign_need_temp (expr1, expr2,
4018 nested_forall_info, block);
4021 /* Variables to control maskexpr. */
4022 count1 = gfc_create_var (gfc_array_index_type, "count1");
4023 count2 = gfc_create_var (gfc_array_index_type, "count2");
4024 gfc_add_modify (block, count1, gfc_index_zero_node);
4025 gfc_add_modify (block, count2, gfc_index_zero_node);
4027 tmp = gfc_trans_where_assign (expr1, expr2,
4032 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
4034 gfc_add_expr_to_block (block, tmp);
4039 /* Variables to control maskexpr. */
4040 count1 = gfc_create_var (gfc_array_index_type, "count1");
4041 count2 = gfc_create_var (gfc_array_index_type, "count2");
4042 gfc_add_modify (block, count1, gfc_index_zero_node);
4043 gfc_add_modify (block, count2, gfc_index_zero_node);
4045 tmp = gfc_trans_where_assign (expr1, expr2,
4049 gfc_add_expr_to_block (block, tmp);
4054 /* WHERE or WHERE construct is part of a where-body-construct. */
4056 gfc_trans_where_2 (cnext, cmask, invert,
4057 nested_forall_info, block);
4064 /* The next statement within the same where-body-construct. */
4065 cnext = cnext->next;
4067 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
4068 cblock = cblock->block;
4069 if (mask == NULL_TREE)
4071 /* If we're the initial WHERE, we can simply invert the sense
4072 of the current mask to obtain the "mask" for the remaining
4079 /* Otherwise, for nested WHERE's we need to use the pending mask. */
4085 /* If we allocated a pending mask array, deallocate it now. */
4088 tmp = gfc_call_free (ppmask);
4089 gfc_add_expr_to_block (block, tmp);
4092 /* If we allocated a current mask array, deallocate it now. */
4095 tmp = gfc_call_free (pcmask);
4096 gfc_add_expr_to_block (block, tmp);
4100 /* Translate a simple WHERE construct or statement without dependencies.
4101 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
4102 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
4103 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
4106 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
4108 stmtblock_t block, body;
4109 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
4110 tree tmp, cexpr, tstmt, estmt;
4111 gfc_ss *css, *tdss, *tsss;
4112 gfc_se cse, tdse, tsse, edse, esse;
4117 /* Allow the scalarizer to workshare simple where loops. */
4118 if (ompws_flags & OMPWS_WORKSHARE_FLAG)
4119 ompws_flags |= OMPWS_SCALARIZER_WS;
4121 cond = cblock->expr1;
4122 tdst = cblock->next->expr1;
4123 tsrc = cblock->next->expr2;
4124 edst = eblock ? eblock->next->expr1 : NULL;
4125 esrc = eblock ? eblock->next->expr2 : NULL;
4127 gfc_start_block (&block);
4128 gfc_init_loopinfo (&loop);
4130 /* Handle the condition. */
4131 gfc_init_se (&cse, NULL);
4132 css = gfc_walk_expr (cond);
4133 gfc_add_ss_to_loop (&loop, css);
4135 /* Handle the then-clause. */
4136 gfc_init_se (&tdse, NULL);
4137 gfc_init_se (&tsse, NULL);
4138 tdss = gfc_walk_expr (tdst);
4139 tsss = gfc_walk_expr (tsrc);
4140 if (tsss == gfc_ss_terminator)
4142 tsss = gfc_get_ss ();
4144 tsss->next = gfc_ss_terminator;
4145 tsss->type = GFC_SS_SCALAR;
4148 gfc_add_ss_to_loop (&loop, tdss);
4149 gfc_add_ss_to_loop (&loop, tsss);
4153 /* Handle the else clause. */
4154 gfc_init_se (&edse, NULL);
4155 gfc_init_se (&esse, NULL);
4156 edss = gfc_walk_expr (edst);
4157 esss = gfc_walk_expr (esrc);
4158 if (esss == gfc_ss_terminator)
4160 esss = gfc_get_ss ();
4162 esss->next = gfc_ss_terminator;
4163 esss->type = GFC_SS_SCALAR;
4166 gfc_add_ss_to_loop (&loop, edss);
4167 gfc_add_ss_to_loop (&loop, esss);
4170 gfc_conv_ss_startstride (&loop);
4171 gfc_conv_loop_setup (&loop, &tdst->where);
4173 gfc_mark_ss_chain_used (css, 1);
4174 gfc_mark_ss_chain_used (tdss, 1);
4175 gfc_mark_ss_chain_used (tsss, 1);
4178 gfc_mark_ss_chain_used (edss, 1);
4179 gfc_mark_ss_chain_used (esss, 1);
4182 gfc_start_scalarized_body (&loop, &body);
4184 gfc_copy_loopinfo_to_se (&cse, &loop);
4185 gfc_copy_loopinfo_to_se (&tdse, &loop);
4186 gfc_copy_loopinfo_to_se (&tsse, &loop);
4192 gfc_copy_loopinfo_to_se (&edse, &loop);
4193 gfc_copy_loopinfo_to_se (&esse, &loop);
4198 gfc_conv_expr (&cse, cond);
4199 gfc_add_block_to_block (&body, &cse.pre);
4202 gfc_conv_expr (&tsse, tsrc);
4203 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
4205 gfc_conv_tmp_array_ref (&tdse);
4206 gfc_advance_se_ss_chain (&tdse);
4209 gfc_conv_expr (&tdse, tdst);
4213 gfc_conv_expr (&esse, esrc);
4214 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
4216 gfc_conv_tmp_array_ref (&edse);
4217 gfc_advance_se_ss_chain (&edse);
4220 gfc_conv_expr (&edse, edst);
4223 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false, true);
4224 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false,
4226 : build_empty_stmt (input_location);
4227 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
4228 gfc_add_expr_to_block (&body, tmp);
4229 gfc_add_block_to_block (&body, &cse.post);
4231 gfc_trans_scalarizing_loops (&loop, &body);
4232 gfc_add_block_to_block (&block, &loop.pre);
4233 gfc_add_block_to_block (&block, &loop.post);
4234 gfc_cleanup_loop (&loop);
4236 return gfc_finish_block (&block);
4239 /* As the WHERE or WHERE construct statement can be nested, we call
4240 gfc_trans_where_2 to do the translation, and pass the initial
4241 NULL values for both the control mask and the pending control mask. */
4244 gfc_trans_where (gfc_code * code)
4250 cblock = code->block;
4252 && cblock->next->op == EXEC_ASSIGN
4253 && !cblock->next->next)
4255 eblock = cblock->block;
4258 /* A simple "WHERE (cond) x = y" statement or block is
4259 dependence free if cond is not dependent upon writing x,
4260 and the source y is unaffected by the destination x. */
4261 if (!gfc_check_dependency (cblock->next->expr1,
4263 && !gfc_check_dependency (cblock->next->expr1,
4264 cblock->next->expr2, 0))
4265 return gfc_trans_where_3 (cblock, NULL);
4267 else if (!eblock->expr1
4270 && eblock->next->op == EXEC_ASSIGN
4271 && !eblock->next->next)
4273 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
4274 block is dependence free if cond is not dependent on writes
4275 to x1 and x2, y1 is not dependent on writes to x2, and y2
4276 is not dependent on writes to x1, and both y's are not
4277 dependent upon their own x's. In addition to this, the
4278 final two dependency checks below exclude all but the same
4279 array reference if the where and elswhere destinations
4280 are the same. In short, this is VERY conservative and this
4281 is needed because the two loops, required by the standard
4282 are coalesced in gfc_trans_where_3. */
4283 if (!gfc_check_dependency(cblock->next->expr1,
4285 && !gfc_check_dependency(eblock->next->expr1,
4287 && !gfc_check_dependency(cblock->next->expr1,
4288 eblock->next->expr2, 1)
4289 && !gfc_check_dependency(eblock->next->expr1,
4290 cblock->next->expr2, 1)
4291 && !gfc_check_dependency(cblock->next->expr1,
4292 cblock->next->expr2, 1)
4293 && !gfc_check_dependency(eblock->next->expr1,
4294 eblock->next->expr2, 1)
4295 && !gfc_check_dependency(cblock->next->expr1,
4296 eblock->next->expr1, 0)
4297 && !gfc_check_dependency(eblock->next->expr1,
4298 cblock->next->expr1, 0))
4299 return gfc_trans_where_3 (cblock, eblock);
4303 gfc_start_block (&block);
4305 gfc_trans_where_2 (code, NULL, false, NULL, &block);
4307 return gfc_finish_block (&block);
4311 /* CYCLE a DO loop. The label decl has already been created by
4312 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
4313 node at the head of the loop. We must mark the label as used. */
4316 gfc_trans_cycle (gfc_code * code)
4320 cycle_label = code->ext.which_construct->cycle_label;
4321 gcc_assert (cycle_label);
4323 TREE_USED (cycle_label) = 1;
4324 return build1_v (GOTO_EXPR, cycle_label);
4328 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
4329 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
4333 gfc_trans_exit (gfc_code * code)
4337 exit_label = code->ext.which_construct->exit_label;
4338 gcc_assert (exit_label);
4340 TREE_USED (exit_label) = 1;
4341 return build1_v (GOTO_EXPR, exit_label);
4345 /* Translate the ALLOCATE statement. */
4348 gfc_trans_allocate (gfc_code * code)
4361 if (!code->ext.alloc.list)
4364 pstat = stat = error_label = tmp = memsz = NULL_TREE;
4366 gfc_start_block (&block);
4368 /* Either STAT= and/or ERRMSG is present. */
4369 if (code->expr1 || code->expr2)
4371 tree gfc_int4_type_node = gfc_get_int_type (4);
4373 stat = gfc_create_var (gfc_int4_type_node, "stat");
4374 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4376 error_label = gfc_build_label_decl (NULL_TREE);
4377 TREE_USED (error_label) = 1;
4380 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4382 expr = gfc_copy_expr (al->expr);
4384 if (expr->ts.type == BT_CLASS)
4385 gfc_add_component_ref (expr, "$data");
4387 gfc_init_se (&se, NULL);
4388 gfc_start_block (&se.pre);
4390 se.want_pointer = 1;
4391 se.descriptor_only = 1;
4392 gfc_conv_expr (&se, expr);
4394 if (!gfc_array_allocate (&se, expr, pstat))
4396 /* A scalar or derived type. */
4398 /* Determine allocate size. */
4399 if (al->expr->ts.type == BT_CLASS && code->expr3)
4401 if (code->expr3->ts.type == BT_CLASS)
4405 sz = gfc_copy_expr (code->expr3);
4406 gfc_add_component_ref (sz, "$vptr");
4407 gfc_add_component_ref (sz, "$size");
4408 gfc_init_se (&se_sz, NULL);
4409 gfc_conv_expr (&se_sz, sz);
4414 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->expr3->ts));
4416 else if (code->ext.alloc.ts.type != BT_UNKNOWN)
4417 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->ext.alloc.ts));
4419 memsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
4421 if (expr->ts.type == BT_CHARACTER && memsz == NULL_TREE)
4422 memsz = se.string_length;
4424 /* Allocate - for non-pointers with re-alloc checking. */
4431 /* Find the last reference in the chain. */
4432 while (ref && ref->next != NULL)
4434 gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT);
4439 allocatable = expr->symtree->n.sym->attr.allocatable;
4441 allocatable = ref->u.c.component->attr.allocatable;
4444 tmp = gfc_allocate_array_with_status (&se.pre, se.expr, memsz,
4447 tmp = gfc_allocate_with_status (&se.pre, memsz, pstat);
4450 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
4452 fold_convert (TREE_TYPE (se.expr), tmp));
4453 gfc_add_expr_to_block (&se.pre, tmp);
4455 if (code->expr1 || code->expr2)
4457 tmp = build1_v (GOTO_EXPR, error_label);
4458 parm = fold_build2_loc (input_location, NE_EXPR,
4459 boolean_type_node, stat,
4460 build_int_cst (TREE_TYPE (stat), 0));
4461 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
4463 build_empty_stmt (input_location));
4464 gfc_add_expr_to_block (&se.pre, tmp);
4467 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4469 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
4470 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, tmp, 0);
4471 gfc_add_expr_to_block (&se.pre, tmp);
4476 tmp = gfc_finish_block (&se.pre);
4477 gfc_add_expr_to_block (&block, tmp);
4479 if (code->expr3 && !code->expr3->mold)
4481 /* Initialization via SOURCE block
4482 (or static default initializer). */
4483 gfc_expr *rhs = gfc_copy_expr (code->expr3);
4484 if (al->expr->ts.type == BT_CLASS)
4487 if (rhs->ts.type == BT_CLASS)
4488 gfc_add_component_ref (rhs, "$data");
4489 gfc_init_se (&dst, NULL);
4490 gfc_init_se (&src, NULL);
4491 gfc_conv_expr (&dst, expr);
4492 gfc_conv_expr (&src, rhs);
4493 gfc_add_block_to_block (&block, &src.pre);
4494 tmp = gfc_build_memcpy_call (dst.expr, src.expr, memsz);
4497 tmp = gfc_trans_assignment (gfc_expr_to_initialize (expr),
4499 gfc_free_expr (rhs);
4500 gfc_add_expr_to_block (&block, tmp);
4502 else if (code->expr3 && code->expr3->mold
4503 && code->expr3->ts.type == BT_CLASS)
4505 /* Default-initialization via MOLD (polymorphic). */
4506 gfc_expr *rhs = gfc_copy_expr (code->expr3);
4508 gfc_add_component_ref (rhs, "$vptr");
4509 gfc_add_component_ref (rhs, "$def_init");
4510 gfc_init_se (&dst, NULL);
4511 gfc_init_se (&src, NULL);
4512 gfc_conv_expr (&dst, expr);
4513 gfc_conv_expr (&src, rhs);
4514 gfc_add_block_to_block (&block, &src.pre);
4515 tmp = gfc_build_memcpy_call (dst.expr, src.expr, memsz);
4516 gfc_add_expr_to_block (&block, tmp);
4517 gfc_free_expr (rhs);
4520 /* Allocation of CLASS entities. */
4521 gfc_free_expr (expr);
4523 if (expr->ts.type == BT_CLASS)
4528 /* Initialize VPTR for CLASS objects. */
4529 lhs = gfc_expr_to_initialize (expr);
4530 gfc_add_component_ref (lhs, "$vptr");
4532 if (code->expr3 && code->expr3->ts.type == BT_CLASS)
4534 /* Polymorphic SOURCE: VPTR must be determined at run time. */
4535 rhs = gfc_copy_expr (code->expr3);
4536 gfc_add_component_ref (rhs, "$vptr");
4537 tmp = gfc_trans_pointer_assignment (lhs, rhs);
4538 gfc_add_expr_to_block (&block, tmp);
4539 gfc_free_expr (rhs);
4543 /* VPTR is fixed at compile time. */
4547 ts = &code->expr3->ts;
4548 else if (expr->ts.type == BT_DERIVED)
4550 else if (code->ext.alloc.ts.type == BT_DERIVED)
4551 ts = &code->ext.alloc.ts;
4552 else if (expr->ts.type == BT_CLASS)
4553 ts = &CLASS_DATA (expr)->ts;
4557 if (ts->type == BT_DERIVED)
4559 vtab = gfc_find_derived_vtab (ts->u.derived);
4561 gfc_init_se (&lse, NULL);
4562 lse.want_pointer = 1;
4563 gfc_conv_expr (&lse, lhs);
4564 tmp = gfc_build_addr_expr (NULL_TREE,
4565 gfc_get_symbol_decl (vtab));
4566 gfc_add_modify (&block, lse.expr,
4567 fold_convert (TREE_TYPE (lse.expr), tmp));
4577 tmp = build1_v (LABEL_EXPR, error_label);
4578 gfc_add_expr_to_block (&block, tmp);
4580 gfc_init_se (&se, NULL);
4581 gfc_conv_expr_lhs (&se, code->expr1);
4582 tmp = convert (TREE_TYPE (se.expr), stat);
4583 gfc_add_modify (&block, se.expr, tmp);
4589 /* A better error message may be possible, but not required. */
4590 const char *msg = "Attempt to allocate an allocated object";
4591 tree errmsg, slen, dlen;
4593 gfc_init_se (&se, NULL);
4594 gfc_conv_expr_lhs (&se, code->expr2);
4596 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4598 gfc_add_modify (&block, errmsg,
4599 gfc_build_addr_expr (pchar_type_node,
4600 gfc_build_localized_cstring_const (msg)));
4602 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4603 dlen = gfc_get_expr_charlen (code->expr2);
4604 slen = fold_build2_loc (input_location, MIN_EXPR, TREE_TYPE (slen), dlen,
4607 dlen = build_call_expr_loc (input_location,
4608 built_in_decls[BUILT_IN_MEMCPY], 3,
4609 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4611 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, stat,
4612 build_int_cst (TREE_TYPE (stat), 0));
4614 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4616 gfc_add_expr_to_block (&block, tmp);
4619 return gfc_finish_block (&block);
4623 /* Translate a DEALLOCATE statement. */
4626 gfc_trans_deallocate (gfc_code *code)
4631 tree apstat, astat, pstat, stat, tmp;
4634 pstat = apstat = stat = astat = tmp = NULL_TREE;
4636 gfc_start_block (&block);
4638 /* Count the number of failed deallocations. If deallocate() was
4639 called with STAT= , then set STAT to the count. If deallocate
4640 was called with ERRMSG, then set ERRMG to a string. */
4641 if (code->expr1 || code->expr2)
4643 tree gfc_int4_type_node = gfc_get_int_type (4);
4645 stat = gfc_create_var (gfc_int4_type_node, "stat");
4646 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4648 /* Running total of possible deallocation failures. */
4649 astat = gfc_create_var (gfc_int4_type_node, "astat");
4650 apstat = gfc_build_addr_expr (NULL_TREE, astat);
4652 /* Initialize astat to 0. */
4653 gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
4656 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4659 gcc_assert (expr->expr_type == EXPR_VARIABLE);
4661 gfc_init_se (&se, NULL);
4662 gfc_start_block (&se.pre);
4664 se.want_pointer = 1;
4665 se.descriptor_only = 1;
4666 gfc_conv_expr (&se, expr);
4668 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4671 gfc_ref *last = NULL;
4672 for (ref = expr->ref; ref; ref = ref->next)
4673 if (ref->type == REF_COMPONENT)
4676 /* Do not deallocate the components of a derived type
4677 ultimate pointer component. */
4678 if (!(last && last->u.c.component->attr.pointer)
4679 && !(!last && expr->symtree->n.sym->attr.pointer))
4681 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, se.expr,
4683 gfc_add_expr_to_block (&se.pre, tmp);
4688 tmp = gfc_array_deallocate (se.expr, pstat, expr);
4691 tmp = gfc_deallocate_with_status (se.expr, pstat, false, expr);
4692 gfc_add_expr_to_block (&se.pre, tmp);
4694 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
4696 build_int_cst (TREE_TYPE (se.expr), 0));
4699 gfc_add_expr_to_block (&se.pre, tmp);
4701 /* Keep track of the number of failed deallocations by adding stat
4702 of the last deallocation to the running total. */
4703 if (code->expr1 || code->expr2)
4705 apstat = fold_build2_loc (input_location, PLUS_EXPR,
4706 TREE_TYPE (stat), astat, stat);
4707 gfc_add_modify (&se.pre, astat, apstat);
4710 tmp = gfc_finish_block (&se.pre);
4711 gfc_add_expr_to_block (&block, tmp);
4718 gfc_init_se (&se, NULL);
4719 gfc_conv_expr_lhs (&se, code->expr1);
4720 tmp = convert (TREE_TYPE (se.expr), astat);
4721 gfc_add_modify (&block, se.expr, tmp);
4727 /* A better error message may be possible, but not required. */
4728 const char *msg = "Attempt to deallocate an unallocated object";
4729 tree errmsg, slen, dlen;
4731 gfc_init_se (&se, NULL);
4732 gfc_conv_expr_lhs (&se, code->expr2);
4734 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4736 gfc_add_modify (&block, errmsg,
4737 gfc_build_addr_expr (pchar_type_node,
4738 gfc_build_localized_cstring_const (msg)));
4740 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4741 dlen = gfc_get_expr_charlen (code->expr2);
4742 slen = fold_build2_loc (input_location, MIN_EXPR, TREE_TYPE (slen), dlen,
4745 dlen = build_call_expr_loc (input_location,
4746 built_in_decls[BUILT_IN_MEMCPY], 3,
4747 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4749 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, astat,
4750 build_int_cst (TREE_TYPE (astat), 0));
4752 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4754 gfc_add_expr_to_block (&block, tmp);
4757 return gfc_finish_block (&block);
4760 #include "gt-fortran-trans-stmt.h"